diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/debian/changelog cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/debian/changelog --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/debian/changelog 2016-11-07 10:03:18.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/debian/changelog 2016-12-23 10:35:58.000000000 +0000 @@ -1,5 +1,5 @@ -cabal-install-1.24 (1.24.0.1+git20161105.0.b8f7afb-2~16.04) xenial; urgency=medium +cabal-install-1.24 (1.24.0.2+git20161208.0.c5ebf12-3~16.04) xenial; urgency=medium * Initial release - -- Herbert Valerio Riedel Mon, 07 Nov 2016 11:03:18 +0100 + -- Herbert Valerio Riedel Fri, 23 Dec 2016 11:35:58 +0100 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/debian/control cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/debian/control --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/debian/control 2016-11-07 10:03:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/debian/control 2016-12-23 10:35:42.000000000 +0000 @@ -2,7 +2,7 @@ Section: universe/haskell Priority: extra Maintainer: Herbert Valerio Riedel -Build-Depends: debhelper (>= 8.0.0), zlib1g-dev, ghc-7.10.3 +Build-Depends: debhelper (>= 8.0.0), zlib1g-dev, ghc-8.0.1 Standards-Version: 3.9.4 Homepage: http://www.haskell.org/ghc/ diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/debian/rules cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/debian/rules --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/debian/rules 2016-11-07 10:03:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/debian/rules 2016-12-23 10:35:42.000000000 +0000 @@ -6,7 +6,7 @@ dh $@ override_dh_auto_install: - PATH=/opt/ghc/7.10.3/bin:$$PATH PREFIX=$(CURDIR)/debian/cabal-install-1.24/opt/cabal/1.24 $(CURDIR)/build.sh + PATH=/opt/ghc/8.0.1/bin:$$PATH PREFIX=$(CURDIR)/debian/cabal-install-1.24/opt/cabal/1.24 $(CURDIR)/build.sh mkdir -p $(CURDIR)/debian/cabal-install-1.24/opt/ghc/bin mkdir -p $(CURDIR)/debian/cabal-install-1.24/opt/cabal/bin rm -rf $(CURDIR)/debian/cabal-install-1.24/opt/cabal/1.24/lib diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.0/async.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.0/async.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.0/async.cabal 2016-11-07 10:02:57.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.0/async.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,64 +0,0 @@ -name: async -version: 2.1.0 --- don't forget to update ./changelog.md! -synopsis: Run IO operations asynchronously and wait for their results - -description: - This package provides a higher-level interface over - threads, in which an @Async a@ is a concurrent - thread that will eventually deliver a value of - type @a@. The package provides ways to create - @Async@ computations, wait for their results, and - cancel them. - . - Using @Async@ is safer than using threads in two - ways: - . - * When waiting for a thread to return a result, - if the thread dies with an exception then the - caller must either re-throw the exception - ('wait') or handle it ('waitCatch'); the - exception cannot be ignored. - . - * The API makes it possible to build a tree of - threads that are automatically killed when - their parent dies (see 'withAsync'). - -license: BSD3 -license-file: LICENSE -author: Simon Marlow -maintainer: Simon Marlow -copyright: (c) Simon Marlow 2012 -category: Concurrency -build-type: Simple -cabal-version: >=1.10 -homepage: https://github.com/simonmar/async -bug-reports: https://github.com/simonmar/async/issues -tested-with: GHC==7.11.*, GHC==7.10.3, GHC==7.8.4, GHC==7.6.3, GHC==7.4.2, GHC==7.2.2, GHC==7.0.4 - -extra-source-files: - changelog.md - bench/race.hs - -source-repository head - type: git - location: https://github.com/simonmar/async.git - -library - default-language: Haskell2010 - other-extensions: CPP, MagicHash, RankNTypes, UnboxedTuples - if impl(ghc>=7.1) - other-extensions: Trustworthy - exposed-modules: Control.Concurrent.Async - build-depends: base >= 4.3 && < 4.10, stm >= 2.2 && < 2.5 - -test-suite test-async - default-language: Haskell2010 - type: exitcode-stdio-1.0 - hs-source-dirs: test - main-is: test-async.hs - build-depends: base >= 4.3 && < 4.10, - async, - test-framework, - test-framework-hunit, - HUnit diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.0/bench/race.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.0/bench/race.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.0/bench/race.hs 2016-01-05 16:42:20.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.0/bench/race.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -import Control.Concurrent.Async -import System.Environment -import Control.Monad -import Control.Concurrent - -main = runInUnboundThread $ do - [n] <- fmap (fmap read) getArgs - replicateM_ n $ race (return 1) (return 2) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.0/changelog.md cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.0/changelog.md --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.0/changelog.md 2016-01-05 16:42:20.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.0/changelog.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,45 +0,0 @@ -## Changes in 2.1.0: - - - Bump base dependency to allow 4.10 - - Remove invalid Monad instance for `Concurrently` - - Add `Monoid` and `Semigroup` instances for `Concurrently` - - Add `forConcurrently` (flipped version of `mapConcurrently`) - - Add STM version of all applicable IO functions: - `waitAnySTM`, `waitAnyCatchSTM`, `waitEitherSTM`, - `waitEitherCatchSTM`, `waitEitherSTM_`, and `waitBothSTM`. - -## Changes in 2.0.2: - - - Add a Monad instance for `Concurrently` - - Bump base dependency to allow 4.9 - -## Changes in 2.0.1.6: - - - Add workaround to waitCatch for #14 - -## Changes in 2.0.1.5: - - - Bump `base` dependencies for GHC 7.8 - -## Changes in 2.0.1.4: - - - Bump `base` dependency of test suite - -## Changes in 2.0.1.3: - - - Bump `base` dependency to allow 4.6 - -## Changes in 2.0.1.2: - - - Bump `stm` dependency to 2.4 - -## Changes in 2.0.1.1: - - - Safe Haskell support: `Control.Concurrent.Async` is now `Trustworthy` - -## Changes in 2.0.1.0: - - - Added a `Functor` instance for `Async` - - Added `asyncBound`, `asyncOn`, `asyncWithUnmask`, `asyncOnWithUnmask`, `withAsyncBound`, `withAsyncOn`, `withAsyncWithUnmask`, `withAsyncOnWithUnmask`. - - Added `mapConcurrently` - - Added `Concurrently` (with `Applicative` and `Alternative` instances) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.0/Control/Concurrent/Async.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.0/Control/Concurrent/Async.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.0/Control/Concurrent/Async.hs 2016-01-05 16:42:20.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.0/Control/Concurrent/Async.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,684 +0,0 @@ -{-# LANGUAGE CPP, MagicHash, UnboxedTuples, RankNTypes #-} -#if __GLASGOW_HASKELL__ >= 701 -{-# LANGUAGE Trustworthy #-} -#endif -{-# OPTIONS -Wall #-} - ------------------------------------------------------------------------------ --- | --- Module : Control.Concurrent.Async --- Copyright : (c) Simon Marlow 2012 --- License : BSD3 (see the file LICENSE) --- --- Maintainer : Simon Marlow --- Stability : provisional --- Portability : non-portable (requires concurrency) --- --- This module provides a set of operations for running IO operations --- asynchronously and waiting for their results. It is a thin layer --- over the basic concurrency operations provided by --- "Control.Concurrent". The main additional functionality it --- provides is the ability to wait for the return value of a thread, --- but the interface also provides some additional safety and --- robustness over using threads and @MVar@ directly. --- --- The basic type is @'Async' a@, which represents an asynchronous --- @IO@ action that will return a value of type @a@, or die with an --- exception. An @Async@ corresponds to a thread, and its 'ThreadId' --- can be obtained with 'asyncThreadId', although that should rarely --- be necessary. --- --- For example, to fetch two web pages at the same time, we could do --- this (assuming a suitable @getURL@ function): --- --- > do a1 <- async (getURL url1) --- > a2 <- async (getURL url2) --- > page1 <- wait a1 --- > page2 <- wait a2 --- > ... --- --- where 'async' starts the operation in a separate thread, and --- 'wait' waits for and returns the result. If the operation --- throws an exception, then that exception is re-thrown by --- 'wait'. This is one of the ways in which this library --- provides some additional safety: it is harder to accidentally --- forget about exceptions thrown in child threads. --- --- A slight improvement over the previous example is this: --- --- > withAsync (getURL url1) $ \a1 -> do --- > withAsync (getURL url2) $ \a2 -> do --- > page1 <- wait a1 --- > page2 <- wait a2 --- > ... --- --- 'withAsync' is like 'async', except that the 'Async' is --- automatically killed (using 'cancel') if the enclosing IO operation --- returns before it has completed. Consider the case when the first --- 'wait' throws an exception; then the second 'Async' will be --- automatically killed rather than being left to run in the --- background, possibly indefinitely. This is the second way that the --- library provides additional safety: using 'withAsync' means we can --- avoid accidentally leaving threads running. Furthermore, --- 'withAsync' allows a tree of threads to be built, such that --- children are automatically killed if their parents die for any --- reason. --- --- The pattern of performing two IO actions concurrently and waiting --- for their results is packaged up in a combinator 'concurrently', so --- we can further shorten the above example to: --- --- > (page1, page2) <- concurrently (getURL url1) (getURL url2) --- > ... --- --- The 'Functor' instance can be used to change the result of an --- 'Async'. For example: --- --- > ghci> a <- async (return 3) --- > ghci> wait a --- > 3 --- > ghci> wait (fmap (+1) a) --- > 4 - ------------------------------------------------------------------------------ - -module Control.Concurrent.Async ( - - -- * Asynchronous actions - Async, - -- ** Spawning - async, asyncBound, asyncOn, asyncWithUnmask, asyncOnWithUnmask, - - -- ** Spawning with automatic 'cancel'ation - withAsync, withAsyncBound, withAsyncOn, withAsyncWithUnmask, withAsyncOnWithUnmask, - - -- ** Querying 'Async's - wait, poll, waitCatch, cancel, cancelWith, - asyncThreadId, - - -- ** STM operations - waitSTM, pollSTM, waitCatchSTM, - - -- ** Waiting for multiple 'Async's - waitAny, waitAnyCatch, waitAnyCancel, waitAnyCatchCancel, - waitEither, waitEitherCatch, waitEitherCancel, waitEitherCatchCancel, - waitEither_, - waitBoth, - - -- ** Waiting for multiple 'Async's in STM - waitAnySTM, waitAnyCatchSTM, - waitEitherSTM, waitEitherCatchSTM, - waitEitherSTM_, - waitBothSTM, - - -- ** Linking - link, link2, - - -- * Convenient utilities - race, race_, concurrently, mapConcurrently, forConcurrently, - Concurrently(..), - - ) where - -import Control.Concurrent.STM -import Control.Exception -import Control.Concurrent -#if !MIN_VERSION_base(4,6,0) -import Prelude hiding (catch) -#endif -import Control.Monad -import Control.Applicative -#if !MIN_VERSION_base(4,8,0) -import Data.Monoid (Monoid(mempty,mappend)) -import Data.Traversable -#endif -#if MIN_VERSION_base(4,9,0) -import Data.Semigroup (Semigroup((<>))) -#endif - - -import GHC.Exts -import GHC.IO hiding (finally, onException) -import GHC.Conc - --- ----------------------------------------------------------------------------- --- STM Async API - - --- | An asynchronous action spawned by 'async' or 'withAsync'. --- Asynchronous actions are executed in a separate thread, and --- operations are provided for waiting for asynchronous actions to --- complete and obtaining their results (see e.g. 'wait'). --- -data Async a = Async { asyncThreadId :: {-# UNPACK #-} !ThreadId - -- ^ Returns the 'ThreadId' of the thread running the given 'Async'. - , _asyncWait :: STM (Either SomeException a) } - -instance Eq (Async a) where - Async a _ == Async b _ = a == b - -instance Ord (Async a) where - Async a _ `compare` Async b _ = a `compare` b - -instance Functor Async where - fmap f (Async a w) = Async a (fmap (fmap f) w) - --- | Spawn an asynchronous action in a separate thread. -async :: IO a -> IO (Async a) -async = inline asyncUsing rawForkIO - --- | Like 'async' but using 'forkOS' internally. -asyncBound :: IO a -> IO (Async a) -asyncBound = asyncUsing forkOS - --- | Like 'async' but using 'forkOn' internally. -asyncOn :: Int -> IO a -> IO (Async a) -asyncOn = asyncUsing . rawForkOn - --- | Like 'async' but using 'forkIOWithUnmask' internally. --- The child thread is passed a function that can be used to unmask asynchronous exceptions. -asyncWithUnmask :: ((forall b . IO b -> IO b) -> IO a) -> IO (Async a) -asyncWithUnmask actionWith = asyncUsing rawForkIO (actionWith unsafeUnmask) - --- | Like 'asyncOn' but using 'forkOnWithUnmask' internally. --- The child thread is passed a function that can be used to unmask asynchronous exceptions. -asyncOnWithUnmask :: Int -> ((forall b . IO b -> IO b) -> IO a) -> IO (Async a) -asyncOnWithUnmask cpu actionWith = asyncUsing (rawForkOn cpu) (actionWith unsafeUnmask) - -asyncUsing :: (IO () -> IO ThreadId) - -> IO a -> IO (Async a) -asyncUsing doFork = \action -> do - var <- newEmptyTMVarIO - -- t <- forkFinally action (\r -> atomically $ putTMVar var r) - -- slightly faster: - t <- mask $ \restore -> - doFork $ try (restore action) >>= atomically . putTMVar var - return (Async t (readTMVar var)) - --- | Spawn an asynchronous action in a separate thread, and pass its --- @Async@ handle to the supplied function. When the function returns --- or throws an exception, 'cancel' is called on the @Async@. --- --- > withAsync action inner = bracket (async action) cancel inner --- --- This is a useful variant of 'async' that ensures an @Async@ is --- never left running unintentionally. --- --- Since 'cancel' may block, 'withAsync' may also block; see 'cancel' --- for details. --- -withAsync :: IO a -> (Async a -> IO b) -> IO b -withAsync = inline withAsyncUsing rawForkIO - --- | Like 'withAsync' but uses 'forkOS' internally. -withAsyncBound :: IO a -> (Async a -> IO b) -> IO b -withAsyncBound = withAsyncUsing forkOS - --- | Like 'withAsync' but uses 'forkOn' internally. -withAsyncOn :: Int -> IO a -> (Async a -> IO b) -> IO b -withAsyncOn = withAsyncUsing . rawForkOn - --- | Like 'withAsync' but uses 'forkIOWithUnmask' internally. --- The child thread is passed a function that can be used to unmask asynchronous exceptions. -withAsyncWithUnmask :: ((forall c. IO c -> IO c) -> IO a) -> (Async a -> IO b) -> IO b -withAsyncWithUnmask actionWith = withAsyncUsing rawForkIO (actionWith unsafeUnmask) - --- | Like 'withAsyncOn' but uses 'forkOnWithUnmask' internally. --- The child thread is passed a function that can be used to unmask asynchronous exceptions -withAsyncOnWithUnmask :: Int -> ((forall c. IO c -> IO c) -> IO a) -> (Async a -> IO b) -> IO b -withAsyncOnWithUnmask cpu actionWith = withAsyncUsing (rawForkOn cpu) (actionWith unsafeUnmask) - -withAsyncUsing :: (IO () -> IO ThreadId) - -> IO a -> (Async a -> IO b) -> IO b --- The bracket version works, but is slow. We can do better by --- hand-coding it: -withAsyncUsing doFork = \action inner -> do - var <- newEmptyTMVarIO - mask $ \restore -> do - t <- doFork $ try (restore action) >>= atomically . putTMVar var - let a = Async t (readTMVar var) - r <- restore (inner a) `catchAll` \e -> do cancel a; throwIO e - cancel a - return r - --- | Wait for an asynchronous action to complete, and return its --- value. If the asynchronous action threw an exception, then the --- exception is re-thrown by 'wait'. --- --- > wait = atomically . waitSTM --- -{-# INLINE wait #-} -wait :: Async a -> IO a -wait = atomically . waitSTM - --- | Wait for an asynchronous action to complete, and return either --- @Left e@ if the action raised an exception @e@, or @Right a@ if it --- returned a value @a@. --- --- > waitCatch = atomically . waitCatchSTM --- -{-# INLINE waitCatch #-} -waitCatch :: Async a -> IO (Either SomeException a) -waitCatch = tryAgain . atomically . waitCatchSTM - where - -- See: https://github.com/simonmar/async/issues/14 - tryAgain f = f `catch` \BlockedIndefinitelyOnSTM -> f - --- | Check whether an 'Async' has completed yet. If it has not --- completed yet, then the result is @Nothing@, otherwise the result --- is @Just e@ where @e@ is @Left x@ if the @Async@ raised an --- exception @x@, or @Right a@ if it returned a value @a@. --- --- > poll = atomically . pollSTM --- -{-# INLINE poll #-} -poll :: Async a -> IO (Maybe (Either SomeException a)) -poll = atomically . pollSTM - --- | A version of 'wait' that can be used inside an STM transaction. --- -waitSTM :: Async a -> STM a -waitSTM a = do - r <- waitCatchSTM a - either throwSTM return r - --- | A version of 'waitCatch' that can be used inside an STM transaction. --- -{-# INLINE waitCatchSTM #-} -waitCatchSTM :: Async a -> STM (Either SomeException a) -waitCatchSTM (Async _ w) = w - --- | A version of 'poll' that can be used inside an STM transaction. --- -{-# INLINE pollSTM #-} -pollSTM :: Async a -> STM (Maybe (Either SomeException a)) -pollSTM (Async _ w) = (Just <$> w) `orElse` return Nothing - --- | Cancel an asynchronous action by throwing the @ThreadKilled@ --- exception to it. Has no effect if the 'Async' has already --- completed. --- --- > cancel a = throwTo (asyncThreadId a) ThreadKilled --- --- Note that 'cancel' is synchronous in the same sense as 'throwTo'. --- It does not return until the exception has been thrown in the --- target thread, or the target thread has completed. In particular, --- if the target thread is making a foreign call, the exception will --- not be thrown until the foreign call returns, and in this case --- 'cancel' may block indefinitely. An asynchronous 'cancel' can --- of course be obtained by wrapping 'cancel' itself in 'async'. --- -{-# INLINE cancel #-} -cancel :: Async a -> IO () -cancel (Async t _) = throwTo t ThreadKilled - --- | Cancel an asynchronous action by throwing the supplied exception --- to it. --- --- > cancelWith a x = throwTo (asyncThreadId a) x --- --- The notes about the synchronous nature of 'cancel' also apply to --- 'cancelWith'. -cancelWith :: Exception e => Async a -> e -> IO () -cancelWith (Async t _) e = throwTo t e - --- | Wait for any of the supplied asynchronous operations to complete. --- The value returned is a pair of the 'Async' that completed, and the --- result that would be returned by 'wait' on that 'Async'. --- --- If multiple 'Async's complete or have completed, then the value --- returned corresponds to the first completed 'Async' in the list. --- -{-# INLINE waitAnyCatch #-} -waitAnyCatch :: [Async a] -> IO (Async a, Either SomeException a) -waitAnyCatch = atomically . waitAnyCatchSTM - --- | A version of 'waitAnyCatch' that can be used inside an STM transaction. --- --- @since 2.1.0 -waitAnyCatchSTM :: [Async a] -> STM (Async a, Either SomeException a) -waitAnyCatchSTM asyncs = - foldr orElse retry $ - map (\a -> do r <- waitCatchSTM a; return (a, r)) asyncs - --- | Like 'waitAnyCatch', but also cancels the other asynchronous --- operations as soon as one has completed. --- -waitAnyCatchCancel :: [Async a] -> IO (Async a, Either SomeException a) -waitAnyCatchCancel asyncs = - waitAnyCatch asyncs `finally` mapM_ cancel asyncs - --- | Wait for any of the supplied @Async@s to complete. If the first --- to complete throws an exception, then that exception is re-thrown --- by 'waitAny'. --- --- If multiple 'Async's complete or have completed, then the value --- returned corresponds to the first completed 'Async' in the list. --- -{-# INLINE waitAny #-} -waitAny :: [Async a] -> IO (Async a, a) -waitAny = atomically . waitAnySTM - --- | A version of 'waitAny' that can be used inside an STM transaction. --- --- @since 2.1.0 -waitAnySTM :: [Async a] -> STM (Async a, a) -waitAnySTM asyncs = - foldr orElse retry $ - map (\a -> do r <- waitSTM a; return (a, r)) asyncs - --- | Like 'waitAny', but also cancels the other asynchronous --- operations as soon as one has completed. --- -waitAnyCancel :: [Async a] -> IO (Async a, a) -waitAnyCancel asyncs = - waitAny asyncs `finally` mapM_ cancel asyncs - --- | Wait for the first of two @Async@s to finish. -{-# INLINE waitEitherCatch #-} -waitEitherCatch :: Async a -> Async b - -> IO (Either (Either SomeException a) - (Either SomeException b)) -waitEitherCatch left right = atomically (waitEitherCatchSTM left right) - --- | A version of 'waitEitherCatch' that can be used inside an STM transaction. --- --- @since 2.1.0 -waitEitherCatchSTM :: Async a -> Async b - -> STM (Either (Either SomeException a) - (Either SomeException b)) -waitEitherCatchSTM left right = - (Left <$> waitCatchSTM left) - `orElse` - (Right <$> waitCatchSTM right) - --- | Like 'waitEitherCatch', but also 'cancel's both @Async@s before --- returning. --- -waitEitherCatchCancel :: Async a -> Async b - -> IO (Either (Either SomeException a) - (Either SomeException b)) -waitEitherCatchCancel left right = - waitEitherCatch left right `finally` (cancel left >> cancel right) - --- | Wait for the first of two @Async@s to finish. If the @Async@ --- that finished first raised an exception, then the exception is --- re-thrown by 'waitEither'. --- -{-# INLINE waitEither #-} -waitEither :: Async a -> Async b -> IO (Either a b) -waitEither left right = atomically (waitEitherSTM left right) - --- | A version of 'waitEither' that can be used inside an STM transaction. --- --- @since 2.1.0 -waitEitherSTM :: Async a -> Async b -> STM (Either a b) -waitEitherSTM left right = - (Left <$> waitSTM left) - `orElse` - (Right <$> waitSTM right) - --- | Like 'waitEither', but the result is ignored. --- -{-# INLINE waitEither_ #-} -waitEither_ :: Async a -> Async b -> IO () -waitEither_ left right = atomically (waitEitherSTM_ left right) - --- | A version of 'waitEither_' that can be used inside an STM transaction. --- --- @since 2.1.0 -waitEitherSTM_:: Async a -> Async b -> STM () -waitEitherSTM_ left right = - (void $ waitSTM left) - `orElse` - (void $ waitSTM right) - --- | Like 'waitEither', but also 'cancel's both @Async@s before --- returning. --- -waitEitherCancel :: Async a -> Async b -> IO (Either a b) -waitEitherCancel left right = - waitEither left right `finally` (cancel left >> cancel right) - --- | Waits for both @Async@s to finish, but if either of them throws --- an exception before they have both finished, then the exception is --- re-thrown by 'waitBoth'. --- -{-# INLINE waitBoth #-} -waitBoth :: Async a -> Async b -> IO (a,b) -waitBoth left right = atomically (waitBothSTM left right) - --- | A version of 'waitBoth' that can be used inside an STM transaction. --- --- @since 2.1.0 -waitBothSTM :: Async a -> Async b -> STM (a,b) -waitBothSTM left right = do - a <- waitSTM left - `orElse` - (waitSTM right >> retry) - b <- waitSTM right - return (a,b) - - --- | Link the given @Async@ to the current thread, such that if the --- @Async@ raises an exception, that exception will be re-thrown in --- the current thread. --- -link :: Async a -> IO () -link (Async _ w) = do - me <- myThreadId - void $ forkRepeat $ do - r <- atomically $ w - case r of - Left e -> throwTo me e - _ -> return () - --- | Link two @Async@s together, such that if either raises an --- exception, the same exception is re-thrown in the other @Async@. --- -link2 :: Async a -> Async b -> IO () -link2 left@(Async tl _) right@(Async tr _) = - void $ forkRepeat $ do - r <- waitEitherCatch left right - case r of - Left (Left e) -> throwTo tr e - Right (Left e) -> throwTo tl e - _ -> return () - - --- ----------------------------------------------------------------------------- - --- | Run two @IO@ actions concurrently, and return the first to --- finish. The loser of the race is 'cancel'led. --- --- > race left right = --- > withAsync left $ \a -> --- > withAsync right $ \b -> --- > waitEither a b --- -race :: IO a -> IO b -> IO (Either a b) - --- | Like 'race', but the result is ignored. --- -race_ :: IO a -> IO b -> IO () - --- | Run two @IO@ actions concurrently, and return both results. If --- either action throws an exception at any time, then the other --- action is 'cancel'led, and the exception is re-thrown by --- 'concurrently'. --- --- > concurrently left right = --- > withAsync left $ \a -> --- > withAsync right $ \b -> --- > waitBoth a b -concurrently :: IO a -> IO b -> IO (a,b) - -#define USE_ASYNC_VERSIONS 0 - -#if USE_ASYNC_VERSIONS - -race left right = - withAsync left $ \a -> - withAsync right $ \b -> - waitEither a b - -race_ left right = - withAsync left $ \a -> - withAsync right $ \b -> - waitEither_ a b - -concurrently left right = - withAsync left $ \a -> - withAsync right $ \b -> - waitBoth a b - -#else - --- MVar versions of race/concurrently --- More ugly than the Async versions, but quite a bit faster. - --- race :: IO a -> IO b -> IO (Either a b) -race left right = concurrently' left right collect - where - collect m = do - e <- takeMVar m - case e of - Left ex -> throwIO ex - Right r -> return r - --- race_ :: IO a -> IO b -> IO () -race_ left right = void $ race left right - --- concurrently :: IO a -> IO b -> IO (a,b) -concurrently left right = concurrently' left right (collect []) - where - collect [Left a, Right b] _ = return (a,b) - collect [Right b, Left a] _ = return (a,b) - collect xs m = do - e <- takeMVar m - case e of - Left ex -> throwIO ex - Right r -> collect (r:xs) m - -concurrently' :: IO a -> IO b - -> (MVar (Either SomeException (Either a b)) -> IO r) - -> IO r -concurrently' left right collect = do - done <- newEmptyMVar - mask $ \restore -> do - lid <- forkIO $ restore (left >>= putMVar done . Right . Left) - `catchAll` (putMVar done . Left) - rid <- forkIO $ restore (right >>= putMVar done . Right . Right) - `catchAll` (putMVar done . Left) - let stop = killThread rid >> killThread lid - -- kill right before left, to match the semantics of - -- the version using withAsync. (#27) - r <- restore (collect done) `onException` stop - stop - return r - -#endif - --- | maps an @IO@-performing function over any @Traversable@ data --- type, performing all the @IO@ actions concurrently, and returning --- the original data structure with the arguments replaced by the --- results. --- --- For example, @mapConcurrently@ works with lists: --- --- > pages <- mapConcurrently getURL ["url1", "url2", "url3"] --- -mapConcurrently :: Traversable t => (a -> IO b) -> t a -> IO (t b) -mapConcurrently f = runConcurrently . traverse (Concurrently . f) - --- | `forConcurrently` is `mapConcurrently` with its arguments flipped --- --- > pages <- forConcurrently ["url1", "url2", "url3"] $ \url -> getURL url --- --- @since 2.1.0 -forConcurrently :: Traversable t => t a -> (a -> IO b)-> IO (t b) -forConcurrently = flip mapConcurrently - --- ----------------------------------------------------------------------------- - --- | A value of type @Concurrently a@ is an @IO@ operation that can be --- composed with other @Concurrently@ values, using the @Applicative@ --- and @Alternative@ instances. --- --- Calling @runConcurrently@ on a value of type @Concurrently a@ will --- execute the @IO@ operations it contains concurrently, before --- delivering the result of type @a@. --- --- For example --- --- > (page1, page2, page3) --- > <- runConcurrently $ (,,) --- > <$> Concurrently (getURL "url1") --- > <*> Concurrently (getURL "url2") --- > <*> Concurrently (getURL "url3") --- -newtype Concurrently a = Concurrently { runConcurrently :: IO a } - -instance Functor Concurrently where - fmap f (Concurrently a) = Concurrently $ f <$> a - -instance Applicative Concurrently where - pure = Concurrently . return - Concurrently fs <*> Concurrently as = - Concurrently $ (\(f, a) -> f a) <$> concurrently fs as - -instance Alternative Concurrently where - empty = Concurrently $ forever (threadDelay maxBound) - Concurrently as <|> Concurrently bs = - Concurrently $ either id id <$> race as bs - -#if MIN_VERSION_base(4,9,0) --- | Only defined by @async@ for @base >= 4.9@ --- --- @since 2.1.0 -instance Semigroup a => Semigroup (Concurrently a) where - (<>) = liftA2 (<>) - --- | @since 2.1.0 -instance (Semigroup a, Monoid a) => Monoid (Concurrently a) where - mempty = pure mempty - mappend = (<>) -#else --- | @since 2.1.0 -instance Monoid a => Monoid (Concurrently a) where - mempty = pure mempty - mappend = liftA2 mappend -#endif - --- ---------------------------------------------------------------------------- - --- | Fork a thread that runs the supplied action, and if it raises an --- exception, re-runs the action. The thread terminates only when the --- action runs to completion without raising an exception. -forkRepeat :: IO a -> IO ThreadId -forkRepeat action = - mask $ \restore -> - let go = do r <- tryAll (restore action) - case r of - Left _ -> go - _ -> return () - in forkIO go - -catchAll :: IO a -> (SomeException -> IO a) -> IO a -catchAll = catch - -tryAll :: IO a -> IO (Either SomeException a) -tryAll = try - --- A version of forkIO that does not include the outer exception --- handler: saves a bit of time when we will be installing our own --- exception handler. -{-# INLINE rawForkIO #-} -rawForkIO :: IO () -> IO ThreadId -rawForkIO action = IO $ \ s -> - case (fork# action s) of (# s1, tid #) -> (# s1, ThreadId tid #) - -{-# INLINE rawForkOn #-} -rawForkOn :: Int -> IO () -> IO ThreadId -rawForkOn (I# cpu) action = IO $ \ s -> - case (forkOn# cpu action s) of (# s1, tid #) -> (# s1, ThreadId tid #) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.0/LICENSE cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.0/LICENSE --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.0/LICENSE 2016-01-05 16:42:20.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.0/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,30 +0,0 @@ -Copyright (c) 2012, Simon Marlow - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - - * Neither the name of Simon Marlow nor the names of other - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.0/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.0/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.0/Setup.hs 2016-01-05 16:42:20.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.0/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.0/test/test-async.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.0/test/test-async.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.0/test/test-async.hs 2016-01-05 16:42:20.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.0/test/test-async.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,117 +0,0 @@ -{-# LANGUAGE ScopedTypeVariables,DeriveDataTypeable #-} -module Main where - -import Test.Framework (defaultMain, testGroup) -import Test.Framework.Providers.HUnit - -import Test.HUnit - -import Control.Concurrent.Async -import Control.Exception -import Data.Typeable -import Control.Concurrent -import Control.Monad -import Data.Maybe - -import Prelude hiding (catch) - -main = defaultMain tests - -tests = [ - testCase "async_wait" async_wait - , testCase "async_waitCatch" async_waitCatch - , testCase "async_exwait" async_exwait - , testCase "async_exwaitCatch" async_exwaitCatch - , testCase "withasync_waitCatch" withasync_waitCatch - , testCase "withasync_wait2" withasync_wait2 - , testGroup "async_cancel_rep" $ - replicate 1000 $ - testCase "async_cancel" async_cancel - , testCase "async_poll" async_poll - , testCase "async_poll2" async_poll2 - , testCase "withasync_waitCatch_blocked" withasync_waitCatch_blocked - ] - -value = 42 :: Int - -data TestException = TestException deriving (Eq,Show,Typeable) -instance Exception TestException - -async_waitCatch :: Assertion -async_waitCatch = do - a <- async (return value) - r <- waitCatch a - case r of - Left _ -> assertFailure "" - Right e -> e @?= value - -async_wait :: Assertion -async_wait = do - a <- async (return value) - r <- wait a - assertEqual "async_wait" r value - -async_exwaitCatch :: Assertion -async_exwaitCatch = do - a <- async (throwIO TestException) - r <- waitCatch a - case r of - Left e -> fromException e @?= Just TestException - Right _ -> assertFailure "" - -async_exwait :: Assertion -async_exwait = do - a <- async (throwIO TestException) - (wait a >> assertFailure "") `catch` \e -> e @?= TestException - -withasync_waitCatch :: Assertion -withasync_waitCatch = do - withAsync (return value) $ \a -> do - r <- waitCatch a - case r of - Left _ -> assertFailure "" - Right e -> e @?= value - -withasync_wait2 :: Assertion -withasync_wait2 = do - a <- withAsync (threadDelay 1000000) $ return - r <- waitCatch a - case r of - Left e -> fromException e @?= Just ThreadKilled - Right _ -> assertFailure "" - -async_cancel :: Assertion -async_cancel = do - a <- async (return value) - cancelWith a TestException - r <- waitCatch a - case r of - Left e -> fromException e @?= Just TestException - Right r -> r @?= value - -async_poll :: Assertion -async_poll = do - a <- async (threadDelay 1000000) - r <- poll a - when (isJust r) $ assertFailure "" - r <- poll a -- poll twice, just to check we don't deadlock - when (isJust r) $ assertFailure "" - -async_poll2 :: Assertion -async_poll2 = do - a <- async (return value) - wait a - r <- poll a - when (isNothing r) $ assertFailure "" - r <- poll a -- poll twice, just to check we don't deadlock - when (isNothing r) $ assertFailure "" - -withasync_waitCatch_blocked :: Assertion -withasync_waitCatch_blocked = do - r <- withAsync (newEmptyMVar >>= takeMVar) waitCatch - case r of - Left e -> - case fromException e of - Just BlockedIndefinitelyOnMVar -> return () - Nothing -> assertFailure $ show e - Right () -> assertFailure "" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.1/async.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.1/async.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.1/async.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.1/async.cabal 2016-12-23 10:35:39.000000000 +0000 @@ -0,0 +1,64 @@ +name: async +version: 2.1.1 +-- don't forget to update ./changelog.md! +synopsis: Run IO operations asynchronously and wait for their results + +description: + This package provides a higher-level interface over + threads, in which an @Async a@ is a concurrent + thread that will eventually deliver a value of + type @a@. The package provides ways to create + @Async@ computations, wait for their results, and + cancel them. + . + Using @Async@ is safer than using threads in two + ways: + . + * When waiting for a thread to return a result, + if the thread dies with an exception then the + caller must either re-throw the exception + ('wait') or handle it ('waitCatch'); the + exception cannot be ignored. + . + * The API makes it possible to build a tree of + threads that are automatically killed when + their parent dies (see 'withAsync'). + +license: BSD3 +license-file: LICENSE +author: Simon Marlow +maintainer: Simon Marlow +copyright: (c) Simon Marlow 2012 +category: Concurrency +build-type: Simple +cabal-version: >=1.10 +homepage: https://github.com/simonmar/async +bug-reports: https://github.com/simonmar/async/issues +tested-with: GHC==7.11.*, GHC==7.10.3, GHC==7.8.4, GHC==7.6.3, GHC==7.4.2, GHC==7.2.2, GHC==7.0.4 + +extra-source-files: + changelog.md + bench/race.hs + +source-repository head + type: git + location: https://github.com/simonmar/async.git + +library + default-language: Haskell2010 + other-extensions: CPP, MagicHash, RankNTypes, UnboxedTuples + if impl(ghc>=7.1) + other-extensions: Trustworthy + exposed-modules: Control.Concurrent.Async + build-depends: base >= 4.3 && < 4.10, stm >= 2.2 && < 2.5 + +test-suite test-async + default-language: Haskell2010 + type: exitcode-stdio-1.0 + hs-source-dirs: test + main-is: test-async.hs + build-depends: base >= 4.3 && < 4.10, + async, + test-framework, + test-framework-hunit, + HUnit diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.1/bench/race.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.1/bench/race.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.1/bench/race.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.1/bench/race.hs 2016-11-18 14:09:22.000000000 +0000 @@ -0,0 +1,8 @@ +import Control.Concurrent.Async +import System.Environment +import Control.Monad +import Control.Concurrent + +main = runInUnboundThread $ do + [n] <- fmap (fmap read) getArgs + replicateM_ n $ race (return 1) (return 2) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.1/changelog.md cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.1/changelog.md --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.1/changelog.md 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.1/changelog.md 2016-11-18 14:09:22.000000000 +0000 @@ -0,0 +1,57 @@ +## Changes in 2.1.1: + + - Add `concurrently_` + - Add `replicateConcurrently` + - Add `replicateConcurrently_` + - Fix incorrect argument order in `forConcurrently_` + - Generalize `mapConcurrently_` and `forConcurrently_` to `Foldable` + - `withAsync` now reliably kills the thread, by using an + uninterruptible cancel + - Make `cancel` wait for the thread to finish, and adjust + 'concurrently' to match + +## Changes in 2.1.0: + + - Bump base dependency to allow 4.10 + - Remove invalid Monad instance for `Concurrently` + - Add `Monoid` and `Semigroup` instances for `Concurrently` + - Add `forConcurrently` (flipped version of `mapConcurrently`) + - Add STM version of all applicable IO functions: + `waitAnySTM`, `waitAnyCatchSTM`, `waitEitherSTM`, + `waitEitherCatchSTM`, `waitEitherSTM_`, and `waitBothSTM`. + +## Changes in 2.0.2: + + - Add a Monad instance for `Concurrently` + - Bump base dependency to allow 4.9 + +## Changes in 2.0.1.6: + + - Add workaround to waitCatch for #14 + +## Changes in 2.0.1.5: + + - Bump `base` dependencies for GHC 7.8 + +## Changes in 2.0.1.4: + + - Bump `base` dependency of test suite + +## Changes in 2.0.1.3: + + - Bump `base` dependency to allow 4.6 + +## Changes in 2.0.1.2: + + - Bump `stm` dependency to 2.4 + +## Changes in 2.0.1.1: + + - Safe Haskell support: `Control.Concurrent.Async` is now `Trustworthy` + +## Changes in 2.0.1.0: + + - Added a `Functor` instance for `Async` + - Added `asyncBound`, `asyncOn`, `asyncWithUnmask`, `asyncOnWithUnmask`, `withAsyncBound`, `withAsyncOn`, `withAsyncWithUnmask`, `withAsyncOnWithUnmask`. + - Added `mapConcurrently` + - Added `Concurrently` (with `Applicative` and `Alternative` instances) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.1/Control/Concurrent/Async.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.1/Control/Concurrent/Async.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.1/Control/Concurrent/Async.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.1/Control/Concurrent/Async.hs 2016-11-18 14:09:22.000000000 +0000 @@ -0,0 +1,761 @@ +{-# LANGUAGE CPP, MagicHash, UnboxedTuples, RankNTypes #-} +#if __GLASGOW_HASKELL__ >= 701 +{-# LANGUAGE Trustworthy #-} +#endif +{-# OPTIONS -Wall #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Control.Concurrent.Async +-- Copyright : (c) Simon Marlow 2012 +-- License : BSD3 (see the file LICENSE) +-- +-- Maintainer : Simon Marlow +-- Stability : provisional +-- Portability : non-portable (requires concurrency) +-- +-- This module provides a set of operations for running IO operations +-- asynchronously and waiting for their results. It is a thin layer +-- over the basic concurrency operations provided by +-- "Control.Concurrent". The main additional functionality it +-- provides is the ability to wait for the return value of a thread, +-- but the interface also provides some additional safety and +-- robustness over using threads and @MVar@ directly. +-- +-- The basic type is @'Async' a@, which represents an asynchronous +-- @IO@ action that will return a value of type @a@, or die with an +-- exception. An @Async@ corresponds to a thread, and its 'ThreadId' +-- can be obtained with 'asyncThreadId', although that should rarely +-- be necessary. +-- +-- For example, to fetch two web pages at the same time, we could do +-- this (assuming a suitable @getURL@ function): +-- +-- > do a1 <- async (getURL url1) +-- > a2 <- async (getURL url2) +-- > page1 <- wait a1 +-- > page2 <- wait a2 +-- > ... +-- +-- where 'async' starts the operation in a separate thread, and +-- 'wait' waits for and returns the result. If the operation +-- throws an exception, then that exception is re-thrown by +-- 'wait'. This is one of the ways in which this library +-- provides some additional safety: it is harder to accidentally +-- forget about exceptions thrown in child threads. +-- +-- A slight improvement over the previous example is this: +-- +-- > withAsync (getURL url1) $ \a1 -> do +-- > withAsync (getURL url2) $ \a2 -> do +-- > page1 <- wait a1 +-- > page2 <- wait a2 +-- > ... +-- +-- 'withAsync' is like 'async', except that the 'Async' is +-- automatically killed (using 'uninterruptibleCancel') if the +-- enclosing IO opercation returns before it has completed. Consider +-- the case when the first 'wait' throws an exception; then the second +-- 'Async' will be automatically killed rather than being left to run +-- in the background, possibly indefinitely. This is the second way +-- that the library provides additional safety: using 'withAsync' +-- means we can avoid accidentally leaving threads running. +-- Furthermore, 'withAsync' allows a tree of threads to be built, such +-- that children are automatically killed if their parents die for any +-- reason. +-- +-- The pattern of performing two IO actions concurrently and waiting +-- for their results is packaged up in a combinator 'concurrently', so +-- we can further shorten the above example to: +-- +-- > (page1, page2) <- concurrently (getURL url1) (getURL url2) +-- > ... +-- +-- The 'Functor' instance can be used to change the result of an +-- 'Async'. For example: +-- +-- > ghci> a <- async (return 3) +-- > ghci> wait a +-- > 3 +-- > ghci> wait (fmap (+1) a) +-- > 4 + +----------------------------------------------------------------------------- + +module Control.Concurrent.Async ( + + -- * Asynchronous actions + Async, + -- ** Spawning + async, asyncBound, asyncOn, asyncWithUnmask, asyncOnWithUnmask, + + -- ** Spawning with automatic 'cancel'ation + withAsync, withAsyncBound, withAsyncOn, withAsyncWithUnmask, + withAsyncOnWithUnmask, + + -- ** Querying 'Async's + wait, poll, waitCatch, cancel, uninterruptibleCancel, cancelWith, + asyncThreadId, + + -- ** STM operations + waitSTM, pollSTM, waitCatchSTM, + + -- ** Waiting for multiple 'Async's + waitAny, waitAnyCatch, waitAnyCancel, waitAnyCatchCancel, + waitEither, waitEitherCatch, waitEitherCancel, waitEitherCatchCancel, + waitEither_, + waitBoth, + + -- ** Waiting for multiple 'Async's in STM + waitAnySTM, waitAnyCatchSTM, + waitEitherSTM, waitEitherCatchSTM, + waitEitherSTM_, + waitBothSTM, + + -- ** Linking + link, link2, + + -- * Convenient utilities + race, race_, + concurrently, concurrently_, + mapConcurrently, forConcurrently, + mapConcurrently_, forConcurrently_, + replicateConcurrently, replicateConcurrently_, + Concurrently(..), + + ) where + +import Control.Concurrent.STM +import Control.Exception +import Control.Concurrent +import qualified Data.Foldable as F +#if !MIN_VERSION_base(4,6,0) +import Prelude hiding (catch) +#endif +import Control.Monad +import Control.Applicative +#if !MIN_VERSION_base(4,8,0) +import Data.Monoid (Monoid(mempty,mappend)) +import Data.Traversable +#endif +#if MIN_VERSION_base(4,9,0) +import Data.Semigroup (Semigroup((<>))) +#endif + +import Data.IORef + +import GHC.Exts +import GHC.IO hiding (finally, onException) +import GHC.Conc + +-- ----------------------------------------------------------------------------- +-- STM Async API + + +-- | An asynchronous action spawned by 'async' or 'withAsync'. +-- Asynchronous actions are executed in a separate thread, and +-- operations are provided for waiting for asynchronous actions to +-- complete and obtaining their results (see e.g. 'wait'). +-- +data Async a = Async + { asyncThreadId :: {-# UNPACK #-} !ThreadId + -- ^ Returns the 'ThreadId' of the thread running + -- the given 'Async'. + , _asyncWait :: STM (Either SomeException a) + } + +instance Eq (Async a) where + Async a _ == Async b _ = a == b + +instance Ord (Async a) where + Async a _ `compare` Async b _ = a `compare` b + +instance Functor Async where + fmap f (Async a w) = Async a (fmap (fmap f) w) + +-- | Spawn an asynchronous action in a separate thread. +async :: IO a -> IO (Async a) +async = inline asyncUsing rawForkIO + +-- | Like 'async' but using 'forkOS' internally. +asyncBound :: IO a -> IO (Async a) +asyncBound = asyncUsing forkOS + +-- | Like 'async' but using 'forkOn' internally. +asyncOn :: Int -> IO a -> IO (Async a) +asyncOn = asyncUsing . rawForkOn + +-- | Like 'async' but using 'forkIOWithUnmask' internally. The child +-- thread is passed a function that can be used to unmask asynchronous +-- exceptions. +asyncWithUnmask :: ((forall b . IO b -> IO b) -> IO a) -> IO (Async a) +asyncWithUnmask actionWith = asyncUsing rawForkIO (actionWith unsafeUnmask) + +-- | Like 'asyncOn' but using 'forkOnWithUnmask' internally. The +-- child thread is passed a function that can be used to unmask +-- asynchronous exceptions. +asyncOnWithUnmask :: Int -> ((forall b . IO b -> IO b) -> IO a) -> IO (Async a) +asyncOnWithUnmask cpu actionWith = + asyncUsing (rawForkOn cpu) (actionWith unsafeUnmask) + +asyncUsing :: (IO () -> IO ThreadId) + -> IO a -> IO (Async a) +asyncUsing doFork = \action -> do + var <- newEmptyTMVarIO + -- t <- forkFinally action (\r -> atomically $ putTMVar var r) + -- slightly faster: + t <- mask $ \restore -> + doFork $ try (restore action) >>= atomically . putTMVar var + return (Async t (readTMVar var)) + +-- | Spawn an asynchronous action in a separate thread, and pass its +-- @Async@ handle to the supplied function. When the function returns +-- or throws an exception, 'uninterruptibleCancel' is called on the @Async@. +-- +-- > withAsync action inner = bracket (async action) uninterruptibleCancel inner +-- +-- This is a useful variant of 'async' that ensures an @Async@ is +-- never left running unintentionally. +-- +withAsync :: IO a -> (Async a -> IO b) -> IO b +withAsync = inline withAsyncUsing rawForkIO + +-- | Like 'withAsync' but uses 'forkOS' internally. +withAsyncBound :: IO a -> (Async a -> IO b) -> IO b +withAsyncBound = withAsyncUsing forkOS + +-- | Like 'withAsync' but uses 'forkOn' internally. +withAsyncOn :: Int -> IO a -> (Async a -> IO b) -> IO b +withAsyncOn = withAsyncUsing . rawForkOn + +-- | Like 'withAsync' but uses 'forkIOWithUnmask' internally. The +-- child thread is passed a function that can be used to unmask +-- asynchronous exceptions. +withAsyncWithUnmask + :: ((forall c. IO c -> IO c) -> IO a) -> (Async a -> IO b) -> IO b +withAsyncWithUnmask actionWith = + withAsyncUsing rawForkIO (actionWith unsafeUnmask) + +-- | Like 'withAsyncOn' but uses 'forkOnWithUnmask' internally. The +-- child thread is passed a function that can be used to unmask +-- asynchronous exceptions +withAsyncOnWithUnmask + :: Int -> ((forall c. IO c -> IO c) -> IO a) -> (Async a -> IO b) -> IO b +withAsyncOnWithUnmask cpu actionWith = + withAsyncUsing (rawForkOn cpu) (actionWith unsafeUnmask) + +withAsyncUsing :: (IO () -> IO ThreadId) + -> IO a -> (Async a -> IO b) -> IO b +-- The bracket version works, but is slow. We can do better by +-- hand-coding it: +withAsyncUsing doFork = \action inner -> do + var <- newEmptyTMVarIO + mask $ \restore -> do + t <- doFork $ try (restore action) >>= atomically . putTMVar var + let a = Async t (readTMVar var) + r <- restore (inner a) `catchAll` \e -> do + uninterruptibleCancel a + throwIO e + uninterruptibleCancel a + return r + +-- | Wait for an asynchronous action to complete, and return its +-- value. If the asynchronous action threw an exception, then the +-- exception is re-thrown by 'wait'. +-- +-- > wait = atomically . waitSTM +-- +{-# INLINE wait #-} +wait :: Async a -> IO a +wait = atomically . waitSTM + +-- | Wait for an asynchronous action to complete, and return either +-- @Left e@ if the action raised an exception @e@, or @Right a@ if it +-- returned a value @a@. +-- +-- > waitCatch = atomically . waitCatchSTM +-- +{-# INLINE waitCatch #-} +waitCatch :: Async a -> IO (Either SomeException a) +waitCatch = tryAgain . atomically . waitCatchSTM + where + -- See: https://github.com/simonmar/async/issues/14 + tryAgain f = f `catch` \BlockedIndefinitelyOnSTM -> f + +-- | Check whether an 'Async' has completed yet. If it has not +-- completed yet, then the result is @Nothing@, otherwise the result +-- is @Just e@ where @e@ is @Left x@ if the @Async@ raised an +-- exception @x@, or @Right a@ if it returned a value @a@. +-- +-- > poll = atomically . pollSTM +-- +{-# INLINE poll #-} +poll :: Async a -> IO (Maybe (Either SomeException a)) +poll = atomically . pollSTM + +-- | A version of 'wait' that can be used inside an STM transaction. +-- +waitSTM :: Async a -> STM a +waitSTM a = do + r <- waitCatchSTM a + either throwSTM return r + +-- | A version of 'waitCatch' that can be used inside an STM transaction. +-- +{-# INLINE waitCatchSTM #-} +waitCatchSTM :: Async a -> STM (Either SomeException a) +waitCatchSTM (Async _ w) = w + +-- | A version of 'poll' that can be used inside an STM transaction. +-- +{-# INLINE pollSTM #-} +pollSTM :: Async a -> STM (Maybe (Either SomeException a)) +pollSTM (Async _ w) = (Just <$> w) `orElse` return Nothing + +-- | Cancel an asynchronous action by throwing the @ThreadKilled@ +-- exception to it, and waiting for the `Async` thread to quit. +-- Has no effect if the 'Async' has already completed. +-- +-- > cancel a = throwTo (asyncThreadId a) ThreadKilled <* waitCatch w +-- +-- Note that 'cancel' will not terminate until the thread the 'Async' +-- refers to has terminated. This means that 'cancel' will block for +-- as long said thread blocks when receiving an asynchronous exception. +-- +-- For example, it could block if: +-- +-- * It's executing a foreign call, and thus cannot receive the asynchronous +-- exception; +-- * It's executing some cleanup handler after having received the exception, +-- and the handler is blocking. +{-# INLINE cancel #-} +cancel :: Async a -> IO () +cancel a@(Async t _) = throwTo t ThreadKilled <* waitCatch a + +-- | Cancel an asynchronous action +-- +-- This is a variant of `cancel`, but it is not interruptible. +{-# INLINE uninterruptibleCancel #-} +uninterruptibleCancel :: Async a -> IO () +uninterruptibleCancel = uninterruptibleMask_ . cancel + +-- | Cancel an asynchronous action by throwing the supplied exception +-- to it. +-- +-- > cancelWith a x = throwTo (asyncThreadId a) x +-- +-- The notes about the synchronous nature of 'cancel' also apply to +-- 'cancelWith'. +cancelWith :: Exception e => Async a -> e -> IO () +cancelWith (Async t _) e = throwTo t e + +-- | Wait for any of the supplied asynchronous operations to complete. +-- The value returned is a pair of the 'Async' that completed, and the +-- result that would be returned by 'wait' on that 'Async'. +-- +-- If multiple 'Async's complete or have completed, then the value +-- returned corresponds to the first completed 'Async' in the list. +-- +{-# INLINE waitAnyCatch #-} +waitAnyCatch :: [Async a] -> IO (Async a, Either SomeException a) +waitAnyCatch = atomically . waitAnyCatchSTM + +-- | A version of 'waitAnyCatch' that can be used inside an STM transaction. +-- +-- @since 2.1.0 +waitAnyCatchSTM :: [Async a] -> STM (Async a, Either SomeException a) +waitAnyCatchSTM asyncs = + foldr orElse retry $ + map (\a -> do r <- waitCatchSTM a; return (a, r)) asyncs + +-- | Like 'waitAnyCatch', but also cancels the other asynchronous +-- operations as soon as one has completed. +-- +waitAnyCatchCancel :: [Async a] -> IO (Async a, Either SomeException a) +waitAnyCatchCancel asyncs = + waitAnyCatch asyncs `finally` mapM_ cancel asyncs + +-- | Wait for any of the supplied @Async@s to complete. If the first +-- to complete throws an exception, then that exception is re-thrown +-- by 'waitAny'. +-- +-- If multiple 'Async's complete or have completed, then the value +-- returned corresponds to the first completed 'Async' in the list. +-- +{-# INLINE waitAny #-} +waitAny :: [Async a] -> IO (Async a, a) +waitAny = atomically . waitAnySTM + +-- | A version of 'waitAny' that can be used inside an STM transaction. +-- +-- @since 2.1.0 +waitAnySTM :: [Async a] -> STM (Async a, a) +waitAnySTM asyncs = + foldr orElse retry $ + map (\a -> do r <- waitSTM a; return (a, r)) asyncs + +-- | Like 'waitAny', but also cancels the other asynchronous +-- operations as soon as one has completed. +-- +waitAnyCancel :: [Async a] -> IO (Async a, a) +waitAnyCancel asyncs = + waitAny asyncs `finally` mapM_ cancel asyncs + +-- | Wait for the first of two @Async@s to finish. +{-# INLINE waitEitherCatch #-} +waitEitherCatch :: Async a -> Async b + -> IO (Either (Either SomeException a) + (Either SomeException b)) +waitEitherCatch left right = atomically (waitEitherCatchSTM left right) + +-- | A version of 'waitEitherCatch' that can be used inside an STM transaction. +-- +-- @since 2.1.0 +waitEitherCatchSTM :: Async a -> Async b + -> STM (Either (Either SomeException a) + (Either SomeException b)) +waitEitherCatchSTM left right = + (Left <$> waitCatchSTM left) + `orElse` + (Right <$> waitCatchSTM right) + +-- | Like 'waitEitherCatch', but also 'cancel's both @Async@s before +-- returning. +-- +waitEitherCatchCancel :: Async a -> Async b + -> IO (Either (Either SomeException a) + (Either SomeException b)) +waitEitherCatchCancel left right = + waitEitherCatch left right `finally` (cancel left >> cancel right) + +-- | Wait for the first of two @Async@s to finish. If the @Async@ +-- that finished first raised an exception, then the exception is +-- re-thrown by 'waitEither'. +-- +{-# INLINE waitEither #-} +waitEither :: Async a -> Async b -> IO (Either a b) +waitEither left right = atomically (waitEitherSTM left right) + +-- | A version of 'waitEither' that can be used inside an STM transaction. +-- +-- @since 2.1.0 +waitEitherSTM :: Async a -> Async b -> STM (Either a b) +waitEitherSTM left right = + (Left <$> waitSTM left) + `orElse` + (Right <$> waitSTM right) + +-- | Like 'waitEither', but the result is ignored. +-- +{-# INLINE waitEither_ #-} +waitEither_ :: Async a -> Async b -> IO () +waitEither_ left right = atomically (waitEitherSTM_ left right) + +-- | A version of 'waitEither_' that can be used inside an STM transaction. +-- +-- @since 2.1.0 +waitEitherSTM_:: Async a -> Async b -> STM () +waitEitherSTM_ left right = + (void $ waitSTM left) + `orElse` + (void $ waitSTM right) + +-- | Like 'waitEither', but also 'cancel's both @Async@s before +-- returning. +-- +waitEitherCancel :: Async a -> Async b -> IO (Either a b) +waitEitherCancel left right = + waitEither left right `finally` (cancel left >> cancel right) + +-- | Waits for both @Async@s to finish, but if either of them throws +-- an exception before they have both finished, then the exception is +-- re-thrown by 'waitBoth'. +-- +{-# INLINE waitBoth #-} +waitBoth :: Async a -> Async b -> IO (a,b) +waitBoth left right = atomically (waitBothSTM left right) + +-- | A version of 'waitBoth' that can be used inside an STM transaction. +-- +-- @since 2.1.0 +waitBothSTM :: Async a -> Async b -> STM (a,b) +waitBothSTM left right = do + a <- waitSTM left + `orElse` + (waitSTM right >> retry) + b <- waitSTM right + return (a,b) + + +-- | Link the given @Async@ to the current thread, such that if the +-- @Async@ raises an exception, that exception will be re-thrown in +-- the current thread. +-- +link :: Async a -> IO () +link (Async _ w) = do + me <- myThreadId + void $ forkRepeat $ do + r <- atomically $ w + case r of + Left e -> throwTo me e + _ -> return () + +-- | Link two @Async@s together, such that if either raises an +-- exception, the same exception is re-thrown in the other @Async@. +-- +link2 :: Async a -> Async b -> IO () +link2 left@(Async tl _) right@(Async tr _) = + void $ forkRepeat $ do + r <- waitEitherCatch left right + case r of + Left (Left e) -> throwTo tr e + Right (Left e) -> throwTo tl e + _ -> return () + + +-- ----------------------------------------------------------------------------- + +-- | Run two @IO@ actions concurrently, and return the first to +-- finish. The loser of the race is 'cancel'led. +-- +-- > race left right = +-- > withAsync left $ \a -> +-- > withAsync right $ \b -> +-- > waitEither a b +-- +race :: IO a -> IO b -> IO (Either a b) + +-- | Like 'race', but the result is ignored. +-- +race_ :: IO a -> IO b -> IO () + +-- | Run two @IO@ actions concurrently, and return both results. If +-- either action throws an exception at any time, then the other +-- action is 'cancel'led, and the exception is re-thrown by +-- 'concurrently'. +-- +-- > concurrently left right = +-- > withAsync left $ \a -> +-- > withAsync right $ \b -> +-- > waitBoth a b +concurrently :: IO a -> IO b -> IO (a,b) + +#define USE_ASYNC_VERSIONS 0 + +#if USE_ASYNC_VERSIONS + +race left right = + withAsync left $ \a -> + withAsync right $ \b -> + waitEither a b + +race_ left right = + withAsync left $ \a -> + withAsync right $ \b -> + waitEither_ a b + +concurrently left right = + withAsync left $ \a -> + withAsync right $ \b -> + waitBoth a b + +#else + +-- MVar versions of race/concurrently +-- More ugly than the Async versions, but quite a bit faster. + +-- race :: IO a -> IO b -> IO (Either a b) +race left right = concurrently' left right collect + where + collect m = do + e <- m + case e of + Left ex -> throwIO ex + Right r -> return r + +-- race_ :: IO a -> IO b -> IO () +race_ left right = void $ race left right + +-- concurrently :: IO a -> IO b -> IO (a,b) +concurrently left right = concurrently' left right (collect []) + where + collect [Left a, Right b] _ = return (a,b) + collect [Right b, Left a] _ = return (a,b) + collect xs m = do + e <- m + case e of + Left ex -> throwIO ex + Right r -> collect (r:xs) m + +concurrently' :: IO a -> IO b + -> (IO (Either SomeException (Either a b)) -> IO r) + -> IO r +concurrently' left right collect = do + done <- newEmptyMVar + mask $ \restore -> do + lid <- forkIO $ restore (left >>= putMVar done . Right . Left) + `catchAll` (putMVar done . Left) + rid <- forkIO $ restore (right >>= putMVar done . Right . Right) + `catchAll` (putMVar done . Left) + + count <- newIORef (2 :: Int) + let takeDone = do + -- Decrement the counter so we know how many takes are left. + -- Since only the parent thread is calling this, we can + -- use non-atomic modifications. + modifyIORef count (subtract 1) + + takeMVar done + + let stop = do + -- kill right before left, to match the semantics of + -- the version using withAsync. (#27) + uninterruptibleMask_ $ do + killThread rid >> killThread lid + -- ensure the children are really dead + count' <- readIORef count + replicateM_ count' (takeMVar done) + r <- restore (collect takeDone) `onException` stop + stop + return r + +#endif + +-- | maps an @IO@-performing function over any @Traversable@ data +-- type, performing all the @IO@ actions concurrently, and returning +-- the original data structure with the arguments replaced by the +-- results. +-- +-- For example, @mapConcurrently@ works with lists: +-- +-- > pages <- mapConcurrently getURL ["url1", "url2", "url3"] +-- +mapConcurrently :: Traversable t => (a -> IO b) -> t a -> IO (t b) +mapConcurrently f = runConcurrently . traverse (Concurrently . f) + +-- | `forConcurrently` is `mapConcurrently` with its arguments flipped +-- +-- > pages <- forConcurrently ["url1", "url2", "url3"] $ \url -> getURL url +-- +-- @since 2.1.0 +forConcurrently :: Traversable t => t a -> (a -> IO b)-> IO (t b) +forConcurrently = flip mapConcurrently + +-- | `mapConcurrently_` is `mapConcurrently` with the return value discarded, +-- just like @mapM_ +mapConcurrently_ :: F.Foldable f => (a -> IO b) -> f a -> IO () +mapConcurrently_ f = runConcurrently . F.foldMap (Concurrently . void . f) + +-- | `forConcurrently_` is `forConcurrently` with the return value discarded, +-- just like @forM_ +forConcurrently_ :: F.Foldable f => f a -> (a -> IO b) -> IO () +forConcurrently_ = flip mapConcurrently_ + +-- | 'concurrently', but ignore the result values +-- +-- @since 2.1.1 +concurrently_ :: IO a -> IO b -> IO () +concurrently_ left right = concurrently' left right (collect 0) + where + collect 2 _ = return () + collect i m = do + e <- m + case e of + Left ex -> throwIO ex + Right _ -> collect (i + 1 :: Int) m + +-- | Perform the action in the given number of threads. +-- +-- @since 2.1.1 +replicateConcurrently :: Int -> IO a -> IO [a] +replicateConcurrently cnt = runConcurrently . sequenceA . replicate cnt . Concurrently + +-- | Same as 'replicateConcurrently', but ignore the results. +-- +-- @since 2.1.1 +replicateConcurrently_ :: Int -> IO a -> IO () +replicateConcurrently_ cnt = runConcurrently . F.fold . replicate cnt . Concurrently . void + +-- ----------------------------------------------------------------------------- + +-- | A value of type @Concurrently a@ is an @IO@ operation that can be +-- composed with other @Concurrently@ values, using the @Applicative@ +-- and @Alternative@ instances. +-- +-- Calling @runConcurrently@ on a value of type @Concurrently a@ will +-- execute the @IO@ operations it contains concurrently, before +-- delivering the result of type @a@. +-- +-- For example +-- +-- > (page1, page2, page3) +-- > <- runConcurrently $ (,,) +-- > <$> Concurrently (getURL "url1") +-- > <*> Concurrently (getURL "url2") +-- > <*> Concurrently (getURL "url3") +-- +newtype Concurrently a = Concurrently { runConcurrently :: IO a } + +instance Functor Concurrently where + fmap f (Concurrently a) = Concurrently $ f <$> a + +instance Applicative Concurrently where + pure = Concurrently . return + Concurrently fs <*> Concurrently as = + Concurrently $ (\(f, a) -> f a) <$> concurrently fs as + +instance Alternative Concurrently where + empty = Concurrently $ forever (threadDelay maxBound) + Concurrently as <|> Concurrently bs = + Concurrently $ either id id <$> race as bs + +#if MIN_VERSION_base(4,9,0) +-- | Only defined by @async@ for @base >= 4.9@ +-- +-- @since 2.1.0 +instance Semigroup a => Semigroup (Concurrently a) where + (<>) = liftA2 (<>) + +-- | @since 2.1.0 +instance (Semigroup a, Monoid a) => Monoid (Concurrently a) where + mempty = pure mempty + mappend = (<>) +#else +-- | @since 2.1.0 +instance Monoid a => Monoid (Concurrently a) where + mempty = pure mempty + mappend = liftA2 mappend +#endif + +-- ---------------------------------------------------------------------------- + +-- | Fork a thread that runs the supplied action, and if it raises an +-- exception, re-runs the action. The thread terminates only when the +-- action runs to completion without raising an exception. +forkRepeat :: IO a -> IO ThreadId +forkRepeat action = + mask $ \restore -> + let go = do r <- tryAll (restore action) + case r of + Left _ -> go + _ -> return () + in forkIO go + +catchAll :: IO a -> (SomeException -> IO a) -> IO a +catchAll = catch + +tryAll :: IO a -> IO (Either SomeException a) +tryAll = try + +-- A version of forkIO that does not include the outer exception +-- handler: saves a bit of time when we will be installing our own +-- exception handler. +{-# INLINE rawForkIO #-} +rawForkIO :: IO () -> IO ThreadId +rawForkIO action = IO $ \ s -> + case (fork# action s) of (# s1, tid #) -> (# s1, ThreadId tid #) + +{-# INLINE rawForkOn #-} +rawForkOn :: Int -> IO () -> IO ThreadId +rawForkOn (I# cpu) action = IO $ \ s -> + case (forkOn# cpu action s) of (# s1, tid #) -> (# s1, ThreadId tid #) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.1/LICENSE cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.1/LICENSE --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.1/LICENSE 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.1/LICENSE 2016-11-18 14:09:22.000000000 +0000 @@ -0,0 +1,30 @@ +Copyright (c) 2012, Simon Marlow + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of Simon Marlow nor the names of other + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.1/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.1/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.1/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.1/Setup.hs 2016-11-18 14:09:22.000000000 +0000 @@ -0,0 +1,2 @@ +import Distribution.Simple +main = defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.1/test/test-async.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.1/test/test-async.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/async-2.1.1/test/test-async.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/async-2.1.1/test/test-async.hs 2016-11-18 14:09:22.000000000 +0000 @@ -0,0 +1,238 @@ +{-# LANGUAGE ScopedTypeVariables,DeriveDataTypeable #-} +module Main where + +import Test.Framework (defaultMain, testGroup) +import Test.Framework.Providers.HUnit + +import Test.HUnit + +import Control.Concurrent.Async +import Control.Exception +import Data.IORef +import Data.Typeable +import Control.Concurrent +import Control.Monad +import Data.List (sort) +import Data.Maybe + +import Prelude hiding (catch) + +main = defaultMain tests + +tests = [ + testCase "async_wait" async_wait + , testCase "async_waitCatch" async_waitCatch + , testCase "async_exwait" async_exwait + , testCase "async_exwaitCatch" async_exwaitCatch + , testCase "withasync_waitCatch" withasync_waitCatch + , testCase "withasync_wait2" withasync_wait2 + , testGroup "async_cancel_rep" $ + replicate 1000 $ + testCase "async_cancel" async_cancel + , testCase "async_poll" async_poll + , testCase "async_poll2" async_poll2 + , testCase "withasync_waitCatch_blocked" withasync_waitCatch_blocked + , testGroup "children surviving too long" + [ testCase "concurrently+success" concurrently_success + , testCase "concurrently+failure" concurrently_failure + , testCase "race+success" race_success + , testCase "race+failure" race_failure + , testCase "cancel" cancel_survive + , testCase "withAsync" withasync_survive + ] + , testCase "concurrently_" case_concurrently_ + , testCase "replicateConcurrently_" case_replicateConcurrently + , testCase "replicateConcurrently" case_replicateConcurrently_ + ] + +value = 42 :: Int + +data TestException = TestException deriving (Eq,Show,Typeable) +instance Exception TestException + +async_waitCatch :: Assertion +async_waitCatch = do + a <- async (return value) + r <- waitCatch a + case r of + Left _ -> assertFailure "" + Right e -> e @?= value + +async_wait :: Assertion +async_wait = do + a <- async (return value) + r <- wait a + assertEqual "async_wait" r value + +async_exwaitCatch :: Assertion +async_exwaitCatch = do + a <- async (throwIO TestException) + r <- waitCatch a + case r of + Left e -> fromException e @?= Just TestException + Right _ -> assertFailure "" + +async_exwait :: Assertion +async_exwait = do + a <- async (throwIO TestException) + (wait a >> assertFailure "") `catch` \e -> e @?= TestException + +withasync_waitCatch :: Assertion +withasync_waitCatch = do + withAsync (return value) $ \a -> do + r <- waitCatch a + case r of + Left _ -> assertFailure "" + Right e -> e @?= value + +withasync_wait2 :: Assertion +withasync_wait2 = do + a <- withAsync (threadDelay 1000000) $ return + r <- waitCatch a + case r of + Left e -> fromException e @?= Just ThreadKilled + Right _ -> assertFailure "" + +async_cancel :: Assertion +async_cancel = do + a <- async (return value) + cancelWith a TestException + r <- waitCatch a + case r of + Left e -> fromException e @?= Just TestException + Right r -> r @?= value + +async_poll :: Assertion +async_poll = do + a <- async (threadDelay 1000000) + r <- poll a + when (isJust r) $ assertFailure "" + r <- poll a -- poll twice, just to check we don't deadlock + when (isJust r) $ assertFailure "" + +async_poll2 :: Assertion +async_poll2 = do + a <- async (return value) + wait a + r <- poll a + when (isNothing r) $ assertFailure "" + r <- poll a -- poll twice, just to check we don't deadlock + when (isNothing r) $ assertFailure "" + +withasync_waitCatch_blocked :: Assertion +withasync_waitCatch_blocked = do + r <- withAsync (newEmptyMVar >>= takeMVar) waitCatch + case r of + Left e -> + case fromException e of + Just BlockedIndefinitelyOnMVar -> return () + Nothing -> assertFailure $ show e + Right () -> assertFailure "" + +concurrently_success :: Assertion +concurrently_success = do + finalRes <- newIORef "never filled" + baton <- newEmptyMVar + let quick = return () + slow = threadDelay 10000 `finally` do + threadDelay 10000 + writeIORef finalRes "slow" + putMVar baton () + _ <- concurrently quick slow + writeIORef finalRes "parent" + takeMVar baton + res <- readIORef finalRes + res @?= "parent" + +concurrently_failure :: Assertion +concurrently_failure = do + finalRes <- newIORef "never filled" + let quick = error "a quick death" + slow = threadDelay 10000 `finally` do + threadDelay 10000 + writeIORef finalRes "slow" + _ :: Either SomeException ((), ()) <- try (concurrently quick slow) + writeIORef finalRes "parent" + threadDelay 1000000 -- not using the baton, can lead to deadlock detection + res <- readIORef finalRes + res @?= "parent" + +race_success :: Assertion +race_success = do + finalRes <- newIORef "never filled" + let quick = return () + slow = threadDelay 10000 `finally` do + threadDelay 10000 + writeIORef finalRes "slow" + race_ quick slow + writeIORef finalRes "parent" + threadDelay 1000000 -- not using the baton, can lead to deadlock detection + res <- readIORef finalRes + res @?= "parent" + +race_failure :: Assertion +race_failure = do + finalRes <- newIORef "never filled" + baton <- newEmptyMVar + let quick = error "a quick death" + slow restore = restore (threadDelay 10000) `finally` do + threadDelay 10000 + writeIORef finalRes "slow" + putMVar baton () + _ :: Either SomeException () <- + try $ mask $ \restore -> + race_ quick (slow restore) + writeIORef finalRes "parent" + takeMVar baton + res <- readIORef finalRes + res @?= "parent" + +cancel_survive :: Assertion +cancel_survive = do + finalRes <- newIORef "never filled" + a <- async $ threadDelay 10000 `finally` do + threadDelay 10000 + writeIORef finalRes "child" + cancel a + writeIORef finalRes "parent" + threadDelay 1000000 -- not using the baton, can lead to deadlock detection + res <- readIORef finalRes + res @?= "parent" + +withasync_survive :: Assertion +withasync_survive = do + finalRes <- newIORef "never filled" + let child = threadDelay 10000 `finally` do + threadDelay 10000 + writeIORef finalRes "child" + withAsync child (\_ -> return ()) + writeIORef finalRes "parent" + threadDelay 1000000 -- not using the baton, can lead to deadlock detection + res <- readIORef finalRes + res @?= "parent" + +case_concurrently_ :: Assertion +case_concurrently_ = do + ref <- newIORef 0 + () <- concurrently_ + (atomicModifyIORef ref (\x -> (x + 1, True))) + (atomicModifyIORef ref (\x -> (x + 2, 'x'))) + res <- readIORef ref + res @?= 3 + +case_replicateConcurrently :: Assertion +case_replicateConcurrently = do + ref <- newIORef 0 + let action = atomicModifyIORef ref (\x -> (x + 1, x + 1)) + resList <- replicateConcurrently 100 action + resVal <- readIORef ref + resVal @?= 100 + sort resList @?= [1..100] + +case_replicateConcurrently_ :: Assertion +case_replicateConcurrently_ = do + ref <- newIORef 0 + let action = atomicModifyIORef ref (\x -> (x + 1, x + 1)) + () <- replicateConcurrently_ 100 action + resVal <- readIORef ref + resVal @?= 100 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/buildplan.lst cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/buildplan.lst --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/buildplan.lst 2016-11-07 10:02:50.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/buildplan.lst 2016-12-23 10:35:33.000000000 +0000 @@ -1,4 +1,4 @@ -Cabal-1.24.1.0 +Cabal-1.24.2.0 base16-bytestring-0.1.1.6 base64-bytestring-1.0.0.1 cryptohash-sha256-0.11.100.1 @@ -10,10 +10,10 @@ tar-0.5.0.3 text-1.2.2.1 zlib-0.6.1.2 -async-2.1.0 +async-2.1.1 parsec-3.1.11 hashable-1.2.4.0 network-uri-2.6.1.0 hackage-security-0.5.2.2 HTTP-4000.3.3 -cabal-install-1.24.0.1 +cabal-install-1.24.0.2 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Cabal.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Cabal.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Cabal.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Cabal.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,371 +0,0 @@ -name: Cabal -version: 1.24.1.0 -copyright: 2003-2006, Isaac Jones - 2005-2011, Duncan Coutts -license: BSD3 -license-file: LICENSE -author: Isaac Jones - Duncan Coutts -maintainer: cabal-devel@haskell.org -homepage: http://www.haskell.org/cabal/ -bug-reports: https://github.com/haskell/cabal/issues -synopsis: A framework for packaging Haskell software -description: - The Haskell Common Architecture for Building Applications and - Libraries: a framework defining a common interface for authors to more - easily build their Haskell applications in a portable way. - . - The Haskell Cabal is part of a larger infrastructure for distributing, - organizing, and cataloging Haskell libraries and tools. -category: Distribution -cabal-version: >=1.10 -build-type: Simple --- If we use a new Cabal feature, this needs to be changed to Custom so --- we can bootstrap. - -extra-source-files: - README.md tests/README.md changelog - doc/Cabal.css doc/developing-packages.markdown doc/index.markdown - doc/installing-packages.markdown - doc/misc.markdown - - -- Generated with 'misc/gen-extra-source-files.sh' - -- Do NOT edit this section manually; instead, run the script. - -- BEGIN gen-extra-source-files - tests/PackageTests/AllowNewer/AllowNewer.cabal - tests/PackageTests/AllowNewer/benchmarks/Bench.hs - tests/PackageTests/AllowNewer/src/Foo.hs - tests/PackageTests/AllowNewer/tests/Test.hs - tests/PackageTests/BenchmarkExeV10/Foo.hs - tests/PackageTests/BenchmarkExeV10/benchmarks/bench-Foo.hs - tests/PackageTests/BenchmarkExeV10/my.cabal - tests/PackageTests/BenchmarkOptions/BenchmarkOptions.cabal - tests/PackageTests/BenchmarkOptions/test-BenchmarkOptions.hs - tests/PackageTests/BenchmarkStanza/my.cabal - tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive1/GlobalBuildDepsNotAdditive1.cabal - tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive1/MyLibrary.hs - tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive2/GlobalBuildDepsNotAdditive2.cabal - tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive2/lemon.hs - tests/PackageTests/BuildDeps/InternalLibrary0/MyLibrary.hs - tests/PackageTests/BuildDeps/InternalLibrary0/my.cabal - tests/PackageTests/BuildDeps/InternalLibrary0/programs/lemon.hs - tests/PackageTests/BuildDeps/InternalLibrary1/MyLibrary.hs - tests/PackageTests/BuildDeps/InternalLibrary1/my.cabal - tests/PackageTests/BuildDeps/InternalLibrary1/programs/lemon.hs - tests/PackageTests/BuildDeps/InternalLibrary2/MyLibrary.hs - tests/PackageTests/BuildDeps/InternalLibrary2/my.cabal - tests/PackageTests/BuildDeps/InternalLibrary2/programs/lemon.hs - tests/PackageTests/BuildDeps/InternalLibrary2/to-install/MyLibrary.hs - tests/PackageTests/BuildDeps/InternalLibrary2/to-install/my.cabal - tests/PackageTests/BuildDeps/InternalLibrary3/MyLibrary.hs - tests/PackageTests/BuildDeps/InternalLibrary3/my.cabal - tests/PackageTests/BuildDeps/InternalLibrary3/programs/lemon.hs - tests/PackageTests/BuildDeps/InternalLibrary3/to-install/MyLibrary.hs - tests/PackageTests/BuildDeps/InternalLibrary3/to-install/my.cabal - tests/PackageTests/BuildDeps/InternalLibrary4/MyLibrary.hs - tests/PackageTests/BuildDeps/InternalLibrary4/my.cabal - tests/PackageTests/BuildDeps/InternalLibrary4/programs/lemon.hs - tests/PackageTests/BuildDeps/InternalLibrary4/to-install/MyLibrary.hs - tests/PackageTests/BuildDeps/InternalLibrary4/to-install/my.cabal - tests/PackageTests/BuildDeps/SameDepsAllRound/MyLibrary.hs - tests/PackageTests/BuildDeps/SameDepsAllRound/SameDepsAllRound.cabal - tests/PackageTests/BuildDeps/SameDepsAllRound/lemon.hs - tests/PackageTests/BuildDeps/SameDepsAllRound/pineapple.hs - tests/PackageTests/BuildDeps/TargetSpecificDeps1/MyLibrary.hs - tests/PackageTests/BuildDeps/TargetSpecificDeps1/lemon.hs - tests/PackageTests/BuildDeps/TargetSpecificDeps1/my.cabal - tests/PackageTests/BuildDeps/TargetSpecificDeps2/MyLibrary.hs - tests/PackageTests/BuildDeps/TargetSpecificDeps2/lemon.hs - tests/PackageTests/BuildDeps/TargetSpecificDeps2/my.cabal - tests/PackageTests/BuildDeps/TargetSpecificDeps3/MyLibrary.hs - tests/PackageTests/BuildDeps/TargetSpecificDeps3/lemon.hs - tests/PackageTests/BuildDeps/TargetSpecificDeps3/my.cabal - tests/PackageTests/BuildTestSuiteDetailedV09/Dummy2.hs - tests/PackageTests/BuildableField/BuildableField.cabal - tests/PackageTests/BuildableField/Main.hs - tests/PackageTests/CMain/Bar.hs - tests/PackageTests/CMain/foo.c - tests/PackageTests/CMain/my.cabal - tests/PackageTests/DeterministicAr/Lib.hs - tests/PackageTests/DeterministicAr/my.cabal - tests/PackageTests/DuplicateModuleName/DuplicateModuleName.cabal - tests/PackageTests/DuplicateModuleName/src/Foo.hs - tests/PackageTests/DuplicateModuleName/tests/Foo.hs - tests/PackageTests/DuplicateModuleName/tests2/Foo.hs - tests/PackageTests/EmptyLib/empty/empty.cabal - tests/PackageTests/GhcPkgGuess/SameDirectory/SameDirectory.cabal - tests/PackageTests/GhcPkgGuess/SameDirectory/ghc - tests/PackageTests/GhcPkgGuess/SameDirectory/ghc-pkg - tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/SameDirectory.cabal - tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/ghc-7.10 - tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/ghc-pkg-ghc-7.10 - tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/SameDirectory.cabal - tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/ghc-7.10 - tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/ghc-pkg-7.10 - tests/PackageTests/GhcPkgGuess/Symlink/SameDirectory.cabal - tests/PackageTests/GhcPkgGuess/Symlink/bin/ghc - tests/PackageTests/GhcPkgGuess/Symlink/bin/ghc-pkg - tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/SameDirectory.cabal - tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/bin/ghc-7.10 - tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/bin/ghc-pkg-7.10 - tests/PackageTests/GhcPkgGuess/SymlinkVersion/SameDirectory.cabal - tests/PackageTests/GhcPkgGuess/SymlinkVersion/bin/ghc-7.10 - tests/PackageTests/GhcPkgGuess/SymlinkVersion/bin/ghc-pkg-ghc-7.10 - tests/PackageTests/Haddock/CPP.hs - tests/PackageTests/Haddock/Literate.lhs - tests/PackageTests/Haddock/NoCPP.hs - tests/PackageTests/Haddock/Simple.hs - tests/PackageTests/Haddock/my.cabal - tests/PackageTests/HaddockNewline/A.hs - tests/PackageTests/HaddockNewline/HaddockNewline.cabal - tests/PackageTests/HaddockNewline/Setup.hs - tests/PackageTests/Options.hs - tests/PackageTests/OrderFlags/Foo.hs - tests/PackageTests/OrderFlags/my.cabal - tests/PackageTests/PathsModule/Executable/Main.hs - tests/PackageTests/PathsModule/Executable/my.cabal - tests/PackageTests/PathsModule/Library/my.cabal - tests/PackageTests/PreProcess/Foo.hsc - tests/PackageTests/PreProcess/Main.hs - tests/PackageTests/PreProcess/my.cabal - tests/PackageTests/PreProcessExtraSources/Foo.hsc - tests/PackageTests/PreProcessExtraSources/Main.hs - tests/PackageTests/PreProcessExtraSources/my.cabal - tests/PackageTests/ReexportedModules/ReexportedModules.cabal - tests/PackageTests/TemplateHaskell/dynamic/Exe.hs - tests/PackageTests/TemplateHaskell/dynamic/Lib.hs - tests/PackageTests/TemplateHaskell/dynamic/TH.hs - tests/PackageTests/TemplateHaskell/dynamic/my.cabal - tests/PackageTests/TemplateHaskell/profiling/Exe.hs - tests/PackageTests/TemplateHaskell/profiling/Lib.hs - tests/PackageTests/TemplateHaskell/profiling/TH.hs - tests/PackageTests/TemplateHaskell/profiling/my.cabal - tests/PackageTests/TemplateHaskell/vanilla/Exe.hs - tests/PackageTests/TemplateHaskell/vanilla/Lib.hs - tests/PackageTests/TemplateHaskell/vanilla/TH.hs - tests/PackageTests/TemplateHaskell/vanilla/my.cabal - tests/PackageTests/TestNameCollision/child/Child.hs - tests/PackageTests/TestNameCollision/child/child.cabal - tests/PackageTests/TestNameCollision/child/tests/Test.hs - tests/PackageTests/TestNameCollision/parent/Parent.hs - tests/PackageTests/TestNameCollision/parent/parent.cabal - tests/PackageTests/TestOptions/TestOptions.cabal - tests/PackageTests/TestOptions/test-TestOptions.hs - tests/PackageTests/TestStanza/my.cabal - tests/PackageTests/TestSuiteTests/ExeV10/Foo.hs - tests/PackageTests/TestSuiteTests/ExeV10/my.cabal - tests/PackageTests/TestSuiteTests/ExeV10/tests/test-Foo.hs - tests/PackageTests/TestSuiteTests/ExeV10/tests/test-Short.hs - tests/PackageTests/TestSuiteTests/LibV09/Lib.hs - tests/PackageTests/TestSuiteTests/LibV09/LibV09.cabal - tests/PackageTests/TestSuiteTests/LibV09/tests/Deadlock.hs - tests/PackageTests/Tests.hs - tests/PackageTests/UniqueIPID/P1/M.hs - tests/PackageTests/UniqueIPID/P1/my.cabal - tests/PackageTests/UniqueIPID/P2/M.hs - tests/PackageTests/UniqueIPID/P2/my.cabal - tests/PackageTests/multInst/my.cabal - tests/Setup.hs - tests/hackage/check.sh - tests/hackage/download.sh - tests/hackage/unpack.sh - tests/misc/ghc-supported-languages.hs - -- END gen-extra-source-files - -source-repository head - type: git - location: https://github.com/haskell/cabal/ - subdir: Cabal - -flag bundled-binary-generic - default: False - -library - build-depends: - array >= 0.1 && < 0.6, - base >= 4.5 && < 5, - bytestring >= 0.9 && < 1, - containers >= 0.4 && < 0.6, - deepseq >= 1.3 && < 1.5, - directory >= 1.1 && < 1.3, - filepath >= 1.3 && < 1.5, - pretty >= 1.1 && < 1.2, - process >= 1.1.0.1 && < 1.5, - time >= 1.4 && < 1.7 - - if flag(bundled-binary-generic) - build-depends: binary >= 0.5 && < 0.7 - else - build-depends: binary >= 0.7 && < 0.9 - - -- Needed for GHC.Generics before GHC 7.6 - if impl(ghc < 7.6) - build-depends: ghc-prim >= 0.2 && < 0.3 - - if !os(windows) - build-depends: - unix >= 2.5 && < 2.8 - - if os(windows) - build-depends: - Win32 >= 2.2 && < 2.4 - - ghc-options: -Wall -fno-ignore-asserts -fwarn-tabs - if impl(ghc >= 8.0) - ghc-options: -Wcompat -Wnoncanonical-monad-instances - -Wnoncanonical-monadfail-instances - - exposed-modules: - Distribution.Compat.CreatePipe - Distribution.Compat.Environment - Distribution.Compat.Exception - Distribution.Compat.Internal.TempFile - Distribution.Compat.ReadP - Distribution.Compat.Semigroup - Distribution.Compiler - Distribution.InstalledPackageInfo - Distribution.License - Distribution.Make - Distribution.ModuleName - Distribution.Package - Distribution.PackageDescription - Distribution.PackageDescription.Check - Distribution.PackageDescription.Configuration - Distribution.PackageDescription.Parse - Distribution.PackageDescription.PrettyPrint - Distribution.PackageDescription.Utils - Distribution.ParseUtils - Distribution.ReadE - Distribution.Simple - Distribution.Simple.Bench - Distribution.Simple.Build - Distribution.Simple.Build.Macros - Distribution.Simple.Build.PathsModule - Distribution.Simple.BuildPaths - Distribution.Simple.BuildTarget - Distribution.Simple.CCompiler - Distribution.Simple.Command - Distribution.Simple.Compiler - Distribution.Simple.Configure - Distribution.Simple.GHC - Distribution.Simple.GHCJS - Distribution.Simple.Haddock - Distribution.Simple.HaskellSuite - Distribution.Simple.Hpc - Distribution.Simple.Install - Distribution.Simple.InstallDirs - Distribution.Simple.JHC - Distribution.Simple.LHC - Distribution.Simple.LocalBuildInfo - Distribution.Simple.PackageIndex - Distribution.Simple.PreProcess - Distribution.Simple.PreProcess.Unlit - Distribution.Simple.Program - Distribution.Simple.Program.Ar - Distribution.Simple.Program.Builtin - Distribution.Simple.Program.Db - Distribution.Simple.Program.Find - Distribution.Simple.Program.GHC - Distribution.Simple.Program.HcPkg - Distribution.Simple.Program.Hpc - Distribution.Simple.Program.Internal - Distribution.Simple.Program.Ld - Distribution.Simple.Program.Run - Distribution.Simple.Program.Script - Distribution.Simple.Program.Strip - Distribution.Simple.Program.Types - Distribution.Simple.Register - Distribution.Simple.Setup - Distribution.Simple.SrcDist - Distribution.Simple.Test - Distribution.Simple.Test.ExeV10 - Distribution.Simple.Test.LibV09 - Distribution.Simple.Test.Log - Distribution.Simple.UHC - Distribution.Simple.UserHooks - Distribution.Simple.Utils - Distribution.System - Distribution.TestSuite - Distribution.Text - Distribution.Utils.NubList - Distribution.Verbosity - Distribution.Version - Language.Haskell.Extension - Distribution.Compat.Binary - - other-modules: - Distribution.Compat.CopyFile - Distribution.Compat.GetShortPathName - Distribution.Compat.MonadFail - Distribution.GetOpt - Distribution.Lex - Distribution.Simple.GHC.Internal - Distribution.Simple.GHC.IPI642 - Distribution.Simple.GHC.IPIConvert - Distribution.Simple.GHC.ImplInfo - Paths_Cabal - - if flag(bundled-binary-generic) - other-modules: - Distribution.Compat.Binary.Class - Distribution.Compat.Binary.Generic - - default-language: Haskell98 - --- Small, fast running tests. -test-suite unit-tests - type: exitcode-stdio-1.0 - hs-source-dirs: tests - other-modules: - Test.Laws - Test.QuickCheck.Utils - UnitTests.Distribution.Compat.CreatePipe - UnitTests.Distribution.Compat.ReadP - UnitTests.Distribution.Simple.Program.Internal - UnitTests.Distribution.Simple.Utils - UnitTests.Distribution.System - UnitTests.Distribution.Utils.NubList - UnitTests.Distribution.Version - main-is: UnitTests.hs - build-depends: - base, - directory, - tasty, - tasty-hunit, - tasty-quickcheck, - pretty, - QuickCheck >= 2.7 && < 2.10, - Cabal - ghc-options: -Wall - default-language: Haskell98 - --- Large, system tests that build packages. -test-suite package-tests - type: exitcode-stdio-1.0 - main-is: PackageTests.hs - other-modules: - PackageTests.BenchmarkStanza.Check - PackageTests.TestStanza.Check - PackageTests.DeterministicAr.Check - PackageTests.TestSuiteTests.ExeV10.Check - PackageTests.PackageTester - hs-source-dirs: tests - build-depends: - base, - containers, - tagged, - tasty, - tasty-hunit, - transformers, - Cabal, - process, - directory, - filepath, - bytestring, - regex-posix, - old-time - if !os(windows) - build-depends: unix, exceptions - ghc-options: -Wall -rtsopts - default-extensions: CPP - default-language: Haskell98 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/changelog cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/changelog --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/changelog 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/changelog 1970-01-01 00:00:00.000000000 +0000 @@ -1,479 +0,0 @@ --*-change-log-*- -1.24.1.0 Ryan Thomas October 2016 - * API addition: 'differenceVersionRanges' (#3519). - * Fixed reexported-modules display mangling (#3928). - * Check that the correct cabal-version is specified when the - extra-doc-files field is present (#3825). - * Fixed an incorrect invocation of GetShortPathName that was - causing build failures on Windows (#3649). - * Linker flags are now set correctly on GHC >= 7.8 (#3443). - -1.24.0.0 Ryan Thomas May 2016 - * Support GHC 8. - * Deal with extra C sources from preprocessors (#238). - * Include cabal_macros.h when running c2hs (#2600). - * Don't recompile C sources unless needed (#2601). - * Read 'builddir' option from 'CABAL_BUILDDIR' environment variable - * Add '--profiling-detail=$level' flag with a default for libraries - and executables of 'exported-functions' and 'toplevel-functions' - respetively (GHC's '-fprof-auto-{exported,top}' flags) (#193). - * New 'custom-setup' stanza to specify setup deps. Setup is also built - with the cabal_macros.h style macros, for conditional compilation. - * Support Haddock response files (#2746). - * Fixed a bug in the Text instance for Platform (#2862). - * New 'setup haddock' option: '--for-hackage' (#2852). - * New --show-detail=direct; like streaming, but allows the test - program to detect that is connected to a terminal, and works - reliable with a non-threaded runtime (#2911, and serves as a - work-around for #2398) - * Library support for multi-instance package DBs (#2948). - * Improved the './Setup configure' solver (#3082, #3076). - * The '--allow-newer' option can be now used with './Setup - configure' (#3163). - * Added a way to specify extra locations to find OS X frameworks - in ('extra-framework-dirs'). Can be used both in .cabal files and - as an argument to './Setup configure' (#3158). - * Macros 'VERSION_$pkgname' and 'MIN_VERSION_$pkgname' are now - also generated for the current package. (#3235). - -1.22.0.0 Johan Tibell January 2015 - * Support GHC 7.10. - * Experimental support for emitting DWARF debug info. - * Preliminary support for relocatable packages. - * Allow cabal to be used inside cabal exec enviroments. - * hpc: support mutliple "ways" (e.g. profiling and vanilla). - * Support GHCJS. - * Improved command line documentation. - * Add '-none' constraint syntax for version ranges (#2093). - * Make the default doc index file path compiler/arch/os-dependent - (#2136). - * Warn instead of dying when generating documentation and hscolour - isn't installed (455f51622fa38347db62197a04bb0fa5b928ff17). - * Support the new BinaryLiterals extension - (1f25ab3c5eff311ada73c6c987061b80e9bbebd9). - * Warn about 'ghc-prof-options: -auto-all' in 'cabal check' (#2162). - * Add preliminary support for multiple instances of the same package - version installed side-by-side (#2002). - * New binary build config format - faster build times (#2076). - * Support module thinning and renaming (#2038). - * Add a new license type: UnspecifiedLicense (#2141). - * Remove support for Hugs and nhc98 (#2168). - * Invoke 'tar' with '--formar ustar' if possible in 'sdist' (#1903). - * Replace --enable-library-coverage with --enable-coverage, which - enables program coverage for all components (#1945). - * Suggest that `ExitFailure 9` is probably due to memory - exhaustion (#1522). - * Drop support for Haddock < 2.0 (#1808, #1718). - * Make 'cabal test'/'cabal bench' build only what's needed for - running tests/benchmarks (#1821). - * Build shared libraries by default when linking executables dynamically. - * Build profiled libraries by default when profiling executables. - -1.20.0.1 Johan Tibell May 2014 - * Fix streaming test output. - -1.20.0.0 Johan Tibell April 2014 - * Rewrite user guide - * Fix repl Ctrl+C handling - * Add haskell-suite compiler support - * Add __HADDOCK_VERSION__ define - * Allow specifying exact dependency version using hash - * Rename extra-html-files to extra-doc-files - * Add parallel build support for GHC 7.8 and later - * Don't call ranlib on OS X - * Avoid re-linking executables, test suites, and benchmarks - unnecessarily, shortening build times - * Add --allow-newer which allows upper version bounds to be - ignored - * Add --enable-library-stripping - * Add command for freezing dependencies - * Allow repl to be used outside Cabal packages - * Add --require-sandbox - * Don't use --strip-unneeded on OS X or iOS - * Add new license-files field got additional licenses - * Fix if(solaris) on some Solaris versions - * Don't use -dylib-install-name on OS X with GHC > 7.8 - * Add DragonFly as a known OS - * Improve pretty-printing of Cabal files - * Add test flag --show-details=streaming for real-time test output - * Add exec command - -1.10.2.0 Duncan Coutts June 2011 - * Include test suites in cabal sdist - * Fix for conditionals in test suite stanzas in .cabal files - * Fix permissions of directories created during install - * Fix for global builds when $HOME env var is not set - -1.10.1.0 Duncan Coutts February 2011 - * Improved error messages when test suites are not enabled - * Template parameters allowed in test --test-option(s) flag - * Improved documentation of the test feature - * Relaxed QA check on cabal-version when using test-suite sections - * haddock command now allows both --hoogle and --html at the same time - * Find ghc-version-specific instances of the hsc2hs program - * Preserve file executable permissions in sdist tarballs - * Pass gcc location and flags to ./configure scripts - * Get default gcc flags from ghc - -1.10.0.0 Duncan Coutts November 2010 - * New cabal test feature - * Initial support for UHC - * New default-language and other-languages fields (e.g. Haskell98/2010) - * New default-extensions and other-extensions fields - * Deprecated extensions field (for packages using cabal-version >=1.10) - * Cabal-version field must now only be of the form ">= x.y" - * Removed deprecated --copy-prefix= feature - * Auto-reconfigure when .cabal file changes - * Workaround for haddock overwriting .hi and .o files when using TH - * Extra cpp flags used with hsc2hs and c2hs (-D${os}_BUILD_OS etc) - * New cpp define VERSION_ gives string version of dependencies - * User guide source now in markdown format for easier editing - * Improved checks and error messages for C libraries and headers - * Removed BSD4 from the list of suggested licenses - * Updated list of known language extensions - * Fix for include paths to allow C code to import FFI stub.h files - * Fix for intra-package dependencies on OSX - * Stricter checks on various bits of .cabal file syntax - * Minor fixes for c2hs - -1.8.0.6 Duncan Coutts June 2010 - * Fix 'register --global/--user' - -1.8.0.4 Duncan Coutts March 2010 - * Set dylib-install-name for dynalic libs on OSX - * Stricter configure check that compiler supports a package's extensions - * More configure-time warnings - * Hugs can compile Cabal lib again - * Default datadir now follows prefix on Windows - * Support for finding installed packages for hugs - * Cabal version macros now have proper parenthesis - * Reverted change to filter out deps of non-buildable components - * Fix for registering implace when using a specific package db - * Fix mismatch between $os and $arch path template variables - * Fix for finding ar.exe on Windows, always pick ghc's version - * Fix for intra-package dependencies with ghc-6.12 - -1.8.0.2 Duncan Coutts December 2009 - * Support for GHC-6.12 - * New unique installed package IDs which use a package hash - * Allow executables to depend on the lib within the same package - * Dependencies for each component apply only to that component - (previously applied to all the other components too) - * Added new known license MIT and versioned GPL and LGPL - * More liberal package version range syntax - * Package registration files are now UTF8 - * Support for LHC and JHC-0.7.2 - * Deprecated RecordPuns extension in favour of NamedFieldPuns - * Deprecated PatternSignatures extension in favor of ScopedTypeVariables - * New VersionRange semantic view as a sequence of intervals - * Improved package quality checks - * Minor simplification in a couple Setup.hs hooks - * Beginnings of a unit level testsuite using QuickCheck - * Various bug fixes - * Various internal cleanups - -1.6.0.2 Duncan Coutts February 2009 - * New configure-time check for C headers and libraries - * Added language extensions present in ghc-6.10 - * Added support for NamedFieldPuns extension in ghc-6.8 - * Fix in configure step for ghc-6.6 on Windows - * Fix warnings in Path_pkgname.hs module on Windows - * Fix for exotic flags in ld-options field - * Fix for using pkg-config in a package with a lib and an executable - * Fix for building haddock docs for exes that use the Paths module - * Fix for installing header files in subdirectories - * Fix for the case of building profiling libs but not ordinary libs - * Fix read-only attribute of installed files on Windows - * Ignore ghc -threaded flag when profiling in ghc-6.8 and older - -1.6.0.1 Duncan Coutts October 2008 - * Export a compat function to help alex and happy - -1.6.0.0 Duncan Coutts October 2008 - * Support for ghc-6.10 - * Source control repositories can now be specified in .cabal files - * Bug report URLs can be now specified in .cabal files - * Wildcards now allowed in data-files and extra-source-files fields - * New syntactic sugar for dependencies "build-depends: foo ==1.2.*" - * New cabal_macros.h provides macros to test versions of dependencies - * Relocatable bindists now possible on unix via env vars - * New 'exposed' field allows packages to be not exposed by default - * Install dir flags can now use $os and $arch variables - * New --builddir flag allows multiple builds from a single sources dir - * cc-options now only apply to .c files, not for -fvia-C - * cc-options are not longer propagated to dependent packages - * The cpp/cc/ld-options fields no longer use ',' as a separator - * hsc2hs is now called using gcc instead of using ghc as gcc - * New api for manipulating sets and graphs of packages - * Internal api improvements and code cleanups - * Minor improvements to the user guide - * Miscellaneous minor bug fixes - -1.4.0.2 Duncan Coutts August 2008 - * Fix executable stripping default - * Fix striping exes on OSX that export dynamic symbols (like ghc) - * Correct the order of arguments given by --prog-options= - * Fix corner case with overlapping user and global packages - * Fix for modules that use pre-processing and .hs-boot files - * Clarify some points in the user guide and readme text - * Fix verbosity flags passed to sub-command like haddock - * Fix sdist --snapshot - * Allow meta-packages that contain no modules or C code - * Make the generated Paths module -Wall clean on Windows - -1.4.0.1 Duncan Coutts June 2008 - * Fix a bug which caused '.' to always be in the sources search path - * Haddock-2.2 and later do now support the --hoogle flag - -1.4.0.0 Duncan Coutts June 2008 - * Rewritten command line handling support - * Command line completion with bash - * Better support for Haddock 2 - * Improved support for nhc98 - * Removed support for ghc-6.2 - * Haddock markup in .lhs files now supported - * Default colour scheme for highlighted source code - * Default prefix for --user installs is now $HOME/.cabal - * All .cabal files are treaded as UTF-8 and must be valid - * Many checks added for common mistakes - * New --package-db= option for specific package databases - * Many internal changes to support cabal-install - * Stricter parsing for version strings, eg dissalows "1.05" - * Improved user guide introduction - * Programatica support removed - * New options --program-prefix/suffix allows eg versioned programs - * Support packages that use .hs-boot files - * Fix sdist for Main modules that require preprocessing - * New configure -O flag with optimisation level 0--2 - * Provide access to "x-" extension fields through the Cabal api - * Added check for broken installed packages - * Added warning about using inconsistent versions of dependencies - * Strip binary executable files by default with an option to disable - * New options to add site-specific include and library search paths - * Lift the restriction that libraries must have exposed-modules - * Many bugs fixed. - * Many internal structural improvements and code cleanups - -1.2.4.0 Duncan Coutts June 2008 - * Released with GHC 6.8.3 - * Backported several fixes and minor improvements from Cabal-1.4 - * Use a default colour scheme for sources with hscolour >=1.9 - * Support --hyperlink-source for Haddock >= 2.0 - * Fix for running in a non-writable directory - * Add OSX -framework arguments when linking executables - * Updates to the user guide - * Allow build-tools names to include + and _ - * Export autoconfUserHooks and simpleUserHooks - * Export ccLdOptionsBuildInfo for Setup.hs scripts - * Export unionBuildInfo and make BuildInfo an instance of Monoid - * Fix to allow the 'main-is' module to use a pre-processor - -1.2.3.0 Duncan Coutts Nov 2007 - * Released with GHC 6.8.2 - * Includes full list of GHC language extensions - * Fix infamous "dist/conftest.c" bug - * Fix configure --interfacedir= - * Find ld.exe on Windows correctly - * Export PreProcessor constructor and mkSimplePreProcessor - * Fix minor bug in unlit code - * Fix some markup in the haddock docs - -1.2.2.0 Duncan Coutts Nov 2007 - * Released with GHC 6.8.1 - * Support haddock-2.0 - * Support building DSOs with GHC - * Require reconfiguring if the .cabal file has changed - * Fix os(windows) configuration test - * Fix building documentation - * Fix building packages on Solaris - * Other minor bug fixes - -1.2.1 Duncan Coutts Oct 2007 - * To be included in GHC 6.8.1 - * New field "cpp-options" used when preprocessing Haskell modules - * Fixes for hsc2hs when using ghc - * C source code gets compiled with -O2 by default - * OS aliases, to allow os(windows) rather than requiring os(mingw32) - * Fix cleaning of 'stub' files - * Fix cabal-setup, command line ui that replaces "runhaskell Setup.hs" - * Build docs even when dependent packages docs are missing - * Allow the --html-dir to be specified at configure time - * Fix building with ghc-6.2 - * Other minor bug fixes and build fixes - -1.2.0 Duncan Coutts Sept 2007 - * To be included in GHC 6.8.x - * New configurations feature - * Can make haddock docs link to hilighted sources (with hscolour) - * New flag to allow linking to haddock docs on the web - * Supports pkg-config - * New field "build-tools" for tool dependencies - * Improved c2hs support - * Preprocessor output no longer clutters source dirs - * Separate "includes" and "install-includes" fields - * Makefile command to generate makefiles for building libs with GHC - * New --docdir configure flag - * Generic --with-prog --prog-args configure flags - * Better default installation paths on Windows - * Install paths can be specified relative to each other - * License files now installed - * Initial support for NHC (incomplete) - * Consistent treatment of verbosity - * Reduced verbosity of configure step by default - * Improved helpfulness of output messages - * Help output now clearer and fits in 80 columns - * New setup register --gen-pkg-config flag for distros - * Major internal refactoring, hooks api has changed - * Dozens of bug fixes - -1.1.6.2 Duncan Coutts May 2007 - * Released with GHC 6.6.1 - * Handle windows text file encoding for .cabal files - * Fix compiling a executable for profiling that uses Template Haskell - * Other minor bug fixes and user guide clarifications - -1.1.6.1 Duncan Coutts Oct 2006 - * fix unlit code - * fix escaping in register.sh - -1.1.6 Duncan Coutts Oct 2006 - * Released with GHC 6.6 - * Added support for hoogle - * Allow profiling and normal builds of libs to be chosen indepentantly - * Default installation directories on Win32 changed - * Register haddock docs with ghc-pkg - * Get haddock to make hyperlinks to dependent package docs - * Added BangPatterns language extension - * Various bug fixes - -1.1.4 Duncan Coutts May 2006 - * Released with GHC 6.4.2 - * Better support for packages that need to install header files - * cabal-setup added, but not installed by default yet - * Implemented "setup register --inplace" - * Have packages exposed by default with ghc-6.2 - * It is no longer necessary to run 'configure' before 'clean' or 'sdist' - * Added support for ghc's -split-objs - * Initial support for JHC - * Ignore extension fields in .cabal files (fields begining with "x-") - * Some changes to command hooks API to improve consistency - * Hugs support improvements - * Added GeneralisedNewtypeDeriving language extension - * Added cabal-version field - * Support hidden modules with haddock - * Internal code refactoring - * More bug fixes - -1.1.3 Isaac Jones Sept 2005 - * WARNING: Interfaces not documented in the user's guide may - change in future releases. - * Move building of GHCi .o libs to the build phase rather than - register phase. (from Duncan Coutts) - * Use .tar.gz for source package extension - * Uses GHC instead of cpphs if the latter is not available - * Added experimental "command hooks" which completely override the - default behavior of a command. - * Some bugfixes - -1.1.1 Isaac Jones July 2005 - * WARNING: Interfaces not documented in the user's guide may - change in future releases. - * Handles recursive modules for GHC 6.2 and GHC 6.4. - * Added "setup test" command (Used with UserHook) - * implemented handling of _stub.{c,h,o} files - * Added support for profiling - * Changed install prefix of libraries (pref/pkgname-version - to prefix/pkgname-version/compname-version) - * Added pattern guards as a language extension - * Moved some functionality to Language.Haskell.Extension - * Register / unregister .bat files for windows - * Exposed more of the API - * Added support for the hide-all-packages flag in GHC > 6.4 - * Several bug fixes - -1.0 Isaac Jones March 11 2005 - * Released with GHC 6.4, Hugs March 2005, and nhc98 1.18 - * Some sanity checking - -0.5 Isaac Jones Wed Feb 19 2005 - * WARNING: this is a pre-release and the interfaces are still - likely to change until we reach a 1.0 release. - * Hooks interfaces changed - * Added preprocessors to user hooks - * No more executable-modules or hidden-modules. Use - "other-modules" instead. - * Certain fields moved into BuildInfo, much refactoring - * extra-libs -> extra-libraries - * Added --gen-script to configure and unconfigure. - * modules-ghc (etc) now ghc-modules (etc) - * added new fields including "synopsis" - * Lots of bug fixes - * spaces can sometimes be used instead of commas - * A user manual has appeared (Thanks, ross!) - * for ghc 6.4, configures versionsed depends properly - * more features to ./setup haddock - -0.4 Isaac Jones Sun Jan 16 2005 - - * Much thanks to all the awesome fptools hackers who have been - working hard to build the Haskell Cabal! - - * Interface Changes: - - ** WARNING: this is a pre-release and the interfaces are still - likely to change until we reach a 1.0 release. - - ** Instead of Package.description, you should name your - description files .cabal. In particular, we suggest - that you name it .cabal, but this is not enforced - (yet). Multiple .cabal files in the same directory is an error, - at least for now. - - ** ./setup install --install-prefix is gone. Use ./setup copy - --copy-prefix instead. - - ** The "Modules" field is gone. Use "hidden-modules", - "exposed-modules", and "executable-modules". - - ** Build-depends is now a package-only field, and can't go into - executable stanzas. Build-depends is a package-to-package - relationship. - - ** Some new fields. Use the Source. - - * New Features - - ** Cabal is now included as a package in the CVS version of - fptools. That means it'll be released as "-package Cabal" in - future versions of the compilers, and if you are a bleeding-edge - user, you can grab it from the CVS repository with the compilers. - - ** Hugs compatibility and NHC98 compatibility should both be - improved. - - ** Hooks Interface / Autoconf compatibility: Most of the hooks - interface is hidden for now, because it's not finalized. I have - exposed only "defaultMainWithHooks" and "defaultUserHooks". This - allows you to use a ./configure script to preprocess - "foo.buildinfo", which gets merged with "foo.cabal". In future - releases, we'll expose UserHooks, but we're definitely going to - change the interface to those. The interface to the two functions - I've exposed should stay the same, though. - - ** ./setup haddock is a baby feature which pre-processes the - source code with hscpp and runs haddock on it. This is brand new - and hardly tested, so you get to knock it around and see what you - think. - - ** Some commands now actually implement verbosity. - - ** The preprocessors have been tested a bit more, and seem to work - OK. Please give feedback if you use these. - -0.3 Isaac Jones Sun Jan 16 2005 - * Unstable snapshot release - * From now on, stable releases are even. - -0.2 Isaac Jones - - * Adds more HUGS support and preprocessor support. diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compat/Binary/Class.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compat/Binary/Class.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compat/Binary/Class.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compat/Binary/Class.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,518 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE Trustworthy #-} -{-# LANGUAGE DefaultSignatures #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Compat.Binary.Class --- Copyright : Lennart Kolmodin --- License : BSD3-style (see LICENSE) --- --- Maintainer : Lennart Kolmodin --- Stability : unstable --- Portability : portable to Hugs and GHC. Requires the FFI and some flexible instances --- --- Typeclass and instances for binary serialization. --- ------------------------------------------------------------------------------ - -module Distribution.Compat.Binary.Class ( - - -- * The Binary class - Binary(..) - - -- * Support for generics - , GBinary(..) - - ) where - -import Data.Word - -import Data.Binary.Put -import Data.Binary.Get - -import Control.Monad -import Foreign - -import Data.ByteString.Lazy (ByteString) -import qualified Data.ByteString.Lazy as L - -import Data.Char (chr,ord) -import Data.List (unfoldr) - --- And needed for the instances: -import qualified Data.ByteString as B -import qualified Data.Map as Map -import qualified Data.Set as Set -import qualified Data.IntMap as IntMap -import qualified Data.IntSet as IntSet -import qualified Data.Ratio as R - -import qualified Data.Tree as T - -import Data.Array.Unboxed - -import GHC.Generics - -import qualified Data.Sequence as Seq -import qualified Data.Foldable as Fold - ------------------------------------------------------------------------- - -class GBinary f where - gput :: f t -> Put - gget :: Get (f t) - --- | The 'Binary' class provides 'put' and 'get', methods to encode and --- decode a Haskell value to a lazy 'ByteString'. It mirrors the 'Read' and --- 'Show' classes for textual representation of Haskell types, and is --- suitable for serialising Haskell values to disk, over the network. --- --- For decoding and generating simple external binary formats (e.g. C --- structures), Binary may be used, but in general is not suitable --- for complex protocols. Instead use the 'Put' and 'Get' primitives --- directly. --- --- Instances of Binary should satisfy the following property: --- --- > decode . encode == id --- --- That is, the 'get' and 'put' methods should be the inverse of each --- other. A range of instances are provided for basic Haskell types. --- -class Binary t where - -- | Encode a value in the Put monad. - put :: t -> Put - -- | Decode a value in the Get monad - get :: Get t - - default put :: (Generic t, GBinary (Rep t)) => t -> Put - put = gput . from - - default get :: (Generic t, GBinary (Rep t)) => Get t - get = to `fmap` gget - ------------------------------------------------------------------------- --- Simple instances - --- The () type need never be written to disk: values of singleton type --- can be reconstructed from the type alone -instance Binary () where - put () = return () - get = return () - --- Bools are encoded as a byte in the range 0 .. 1 -instance Binary Bool where - put = putWord8 . fromIntegral . fromEnum - get = liftM (toEnum . fromIntegral) getWord8 - --- Values of type 'Ordering' are encoded as a byte in the range 0 .. 2 -instance Binary Ordering where - put = putWord8 . fromIntegral . fromEnum - get = liftM (toEnum . fromIntegral) getWord8 - ------------------------------------------------------------------------- --- Words and Ints - --- Words8s are written as bytes -instance Binary Word8 where - put = putWord8 - get = getWord8 - --- Words16s are written as 2 bytes in big-endian (network) order -instance Binary Word16 where - put = putWord16be - get = getWord16be - --- Words32s are written as 4 bytes in big-endian (network) order -instance Binary Word32 where - put = putWord32be - get = getWord32be - --- Words64s are written as 8 bytes in big-endian (network) order -instance Binary Word64 where - put = putWord64be - get = getWord64be - --- Int8s are written as a single byte. -instance Binary Int8 where - put i = put (fromIntegral i :: Word8) - get = liftM fromIntegral (get :: Get Word8) - --- Int16s are written as a 2 bytes in big endian format -instance Binary Int16 where - put i = put (fromIntegral i :: Word16) - get = liftM fromIntegral (get :: Get Word16) - --- Int32s are written as a 4 bytes in big endian format -instance Binary Int32 where - put i = put (fromIntegral i :: Word32) - get = liftM fromIntegral (get :: Get Word32) - --- Int64s are written as a 4 bytes in big endian format -instance Binary Int64 where - put i = put (fromIntegral i :: Word64) - get = liftM fromIntegral (get :: Get Word64) - ------------------------------------------------------------------------- - --- Words are are written as Word64s, that is, 8 bytes in big endian format -instance Binary Word where - put i = put (fromIntegral i :: Word64) - get = liftM fromIntegral (get :: Get Word64) - --- Ints are are written as Int64s, that is, 8 bytes in big endian format -instance Binary Int where - put i = put (fromIntegral i :: Int64) - get = liftM fromIntegral (get :: Get Int64) - ------------------------------------------------------------------------- --- --- Portable, and pretty efficient, serialisation of Integer --- - --- Fixed-size type for a subset of Integer -type SmallInt = Int32 - --- Integers are encoded in two ways: if they fit inside a SmallInt, --- they're written as a byte tag, and that value. If the Integer value --- is too large to fit in a SmallInt, it is written as a byte array, --- along with a sign and length field. - -instance Binary Integer where - - {-# INLINE put #-} - put n | n >= lo && n <= hi = do - putWord8 0 - put (fromIntegral n :: SmallInt) -- fast path - where - lo = fromIntegral (minBound :: SmallInt) :: Integer - hi = fromIntegral (maxBound :: SmallInt) :: Integer - - put n = do - putWord8 1 - put sign - put (unroll (abs n)) -- unroll the bytes - where - sign = fromIntegral (signum n) :: Word8 - - {-# INLINE get #-} - get = do - tag <- get :: Get Word8 - case tag of - 0 -> liftM fromIntegral (get :: Get SmallInt) - _ -> do sign <- get - bytes <- get - let v = roll bytes - return $! if sign == (1 :: Word8) then v else - v - --- --- Fold and unfold an Integer to and from a list of its bytes --- -unroll :: Integer -> [Word8] -unroll = unfoldr step - where - step 0 = Nothing - step i = Just (fromIntegral i, i `shiftR` 8) - -roll :: [Word8] -> Integer -roll = foldr unstep 0 - where - unstep b a = a `shiftL` 8 .|. fromIntegral b - -{- - --- --- An efficient, raw serialisation for Integer (GHC only) --- - --- TODO This instance is not architecture portable. GMP stores numbers as --- arrays of machine sized words, so the byte format is not portable across --- architectures with different endianness and word size. - -import Data.ByteString.Base (toForeignPtr,unsafePackAddress, memcpy) -import GHC.Base hiding (ord, chr) -import GHC.Prim -import GHC.Ptr (Ptr(..)) -import GHC.IOBase (IO(..)) - -instance Binary Integer where - put (S# i) = putWord8 0 >> put (I# i) - put (J# s ba) = do - putWord8 1 - put (I# s) - put (BA ba) - - get = do - b <- getWord8 - case b of - 0 -> do (I# i#) <- get - return (S# i#) - _ -> do (I# s#) <- get - (BA a#) <- get - return (J# s# a#) - -instance Binary ByteArray where - - -- Pretty safe. - put (BA ba) = - let sz = sizeofByteArray# ba -- (primitive) in *bytes* - addr = byteArrayContents# ba - bs = unsafePackAddress (I# sz) addr - in put bs -- write as a ByteString. easy, yay! - - -- Pretty scary. Should be quick though - get = do - (fp, off, n@(I# sz)) <- liftM toForeignPtr get -- so decode a ByteString - assert (off == 0) $ return $ unsafePerformIO $ do - (MBA arr) <- newByteArray sz -- and copy it into a ByteArray# - let to = byteArrayContents# (unsafeCoerce# arr) -- urk, is this safe? - withForeignPtr fp $ \from -> memcpy (Ptr to) from (fromIntegral n) - freezeByteArray arr - --- wrapper for ByteArray# -data ByteArray = BA {-# UNPACK #-} !ByteArray# -data MBA = MBA {-# UNPACK #-} !(MutableByteArray# RealWorld) - -newByteArray :: Int# -> IO MBA -newByteArray sz = IO $ \s -> - case newPinnedByteArray# sz s of { (# s', arr #) -> - (# s', MBA arr #) } - -freezeByteArray :: MutableByteArray# RealWorld -> IO ByteArray -freezeByteArray arr = IO $ \s -> - case unsafeFreezeByteArray# arr s of { (# s', arr' #) -> - (# s', BA arr' #) } - --} - -instance (Binary a,Integral a) => Binary (R.Ratio a) where - put r = put (R.numerator r) >> put (R.denominator r) - get = liftM2 (R.%) get get - ------------------------------------------------------------------------- - --- Char is serialised as UTF-8 -instance Binary Char where - put a | c <= 0x7f = put (fromIntegral c :: Word8) - | c <= 0x7ff = do put (0xc0 .|. y) - put (0x80 .|. z) - | c <= 0xffff = do put (0xe0 .|. x) - put (0x80 .|. y) - put (0x80 .|. z) - | c <= 0x10ffff = do put (0xf0 .|. w) - put (0x80 .|. x) - put (0x80 .|. y) - put (0x80 .|. z) - | otherwise = error "Not a valid Unicode code point" - where - c = ord a - z, y, x, w :: Word8 - z = fromIntegral (c .&. 0x3f) - y = fromIntegral (shiftR c 6 .&. 0x3f) - x = fromIntegral (shiftR c 12 .&. 0x3f) - w = fromIntegral (shiftR c 18 .&. 0x7) - - get = do - let getByte = liftM (fromIntegral :: Word8 -> Int) get - shiftL6 = flip shiftL 6 :: Int -> Int - w <- getByte - r <- case () of - _ | w < 0x80 -> return w - | w < 0xe0 -> do - x <- liftM (xor 0x80) getByte - return (x .|. shiftL6 (xor 0xc0 w)) - | w < 0xf0 -> do - x <- liftM (xor 0x80) getByte - y <- liftM (xor 0x80) getByte - return (y .|. shiftL6 (x .|. shiftL6 - (xor 0xe0 w))) - | otherwise -> do - x <- liftM (xor 0x80) getByte - y <- liftM (xor 0x80) getByte - z <- liftM (xor 0x80) getByte - return (z .|. shiftL6 (y .|. shiftL6 - (x .|. shiftL6 (xor 0xf0 w)))) - return $! chr r - ------------------------------------------------------------------------- --- Instances for the first few tuples - -instance (Binary a, Binary b) => Binary (a,b) where - put (a,b) = put a >> put b - get = liftM2 (,) get get - -instance (Binary a, Binary b, Binary c) => Binary (a,b,c) where - put (a,b,c) = put a >> put b >> put c - get = liftM3 (,,) get get get - -instance (Binary a, Binary b, Binary c, Binary d) => Binary (a,b,c,d) where - put (a,b,c,d) = put a >> put b >> put c >> put d - get = liftM4 (,,,) get get get get - -instance (Binary a, Binary b, Binary c, Binary d, Binary e) => Binary (a,b,c,d,e) where - put (a,b,c,d,e) = put a >> put b >> put c >> put d >> put e - get = liftM5 (,,,,) get get get get get - --- --- and now just recurse: --- - -instance (Binary a, Binary b, Binary c, Binary d, Binary e, Binary f) - => Binary (a,b,c,d,e,f) where - put (a,b,c,d,e,f) = put (a,(b,c,d,e,f)) - get = do (a,(b,c,d,e,f)) <- get ; return (a,b,c,d,e,f) - -instance (Binary a, Binary b, Binary c, Binary d, Binary e, Binary f, Binary g) - => Binary (a,b,c,d,e,f,g) where - put (a,b,c,d,e,f,g) = put (a,(b,c,d,e,f,g)) - get = do (a,(b,c,d,e,f,g)) <- get ; return (a,b,c,d,e,f,g) - -instance (Binary a, Binary b, Binary c, Binary d, Binary e, - Binary f, Binary g, Binary h) - => Binary (a,b,c,d,e,f,g,h) where - put (a,b,c,d,e,f,g,h) = put (a,(b,c,d,e,f,g,h)) - get = do (a,(b,c,d,e,f,g,h)) <- get ; return (a,b,c,d,e,f,g,h) - -instance (Binary a, Binary b, Binary c, Binary d, Binary e, - Binary f, Binary g, Binary h, Binary i) - => Binary (a,b,c,d,e,f,g,h,i) where - put (a,b,c,d,e,f,g,h,i) = put (a,(b,c,d,e,f,g,h,i)) - get = do (a,(b,c,d,e,f,g,h,i)) <- get ; return (a,b,c,d,e,f,g,h,i) - -instance (Binary a, Binary b, Binary c, Binary d, Binary e, - Binary f, Binary g, Binary h, Binary i, Binary j) - => Binary (a,b,c,d,e,f,g,h,i,j) where - put (a,b,c,d,e,f,g,h,i,j) = put (a,(b,c,d,e,f,g,h,i,j)) - get = do (a,(b,c,d,e,f,g,h,i,j)) <- get ; return (a,b,c,d,e,f,g,h,i,j) - ------------------------------------------------------------------------- --- Container types - -instance Binary a => Binary [a] where - put l = put (length l) >> mapM_ put l - get = do n <- get :: Get Int - getMany n - --- | 'getMany n' get 'n' elements in order, without blowing the stack. -getMany :: Binary a => Int -> Get [a] -getMany n = go [] n - where - go xs 0 = return $! reverse xs - go xs i = do x <- get - -- we must seq x to avoid stack overflows due to laziness in - -- (>>=) - x `seq` go (x:xs) (i-1) -{-# INLINE getMany #-} - -instance (Binary a) => Binary (Maybe a) where - put Nothing = putWord8 0 - put (Just x) = putWord8 1 >> put x - get = do - w <- getWord8 - case w of - 0 -> return Nothing - _ -> liftM Just get - -instance (Binary a, Binary b) => Binary (Either a b) where - put (Left a) = putWord8 0 >> put a - put (Right b) = putWord8 1 >> put b - get = do - w <- getWord8 - case w of - 0 -> liftM Left get - _ -> liftM Right get - ------------------------------------------------------------------------- --- ByteStrings (have specially efficient instances) - -instance Binary B.ByteString where - put bs = do put (B.length bs) - putByteString bs - get = get >>= getByteString - --- --- Using old versions of fps, this is a type synonym, and non portable --- --- Requires 'flexible instances' --- -instance Binary ByteString where - put bs = do put (fromIntegral (L.length bs) :: Int) - putLazyByteString bs - get = get >>= getLazyByteString - ------------------------------------------------------------------------- --- Maps and Sets - -instance (Binary a) => Binary (Set.Set a) where - put s = put (Set.size s) >> mapM_ put (Set.toAscList s) - get = liftM Set.fromDistinctAscList get - -instance (Binary k, Binary e) => Binary (Map.Map k e) where - put m = put (Map.size m) >> mapM_ put (Map.toAscList m) - get = liftM Map.fromDistinctAscList get - -instance Binary IntSet.IntSet where - put s = put (IntSet.size s) >> mapM_ put (IntSet.toAscList s) - get = liftM IntSet.fromDistinctAscList get - -instance (Binary e) => Binary (IntMap.IntMap e) where - put m = put (IntMap.size m) >> mapM_ put (IntMap.toAscList m) - get = liftM IntMap.fromDistinctAscList get - ------------------------------------------------------------------------- --- Queues and Sequences - -instance (Binary e) => Binary (Seq.Seq e) where - put s = put (Seq.length s) >> Fold.mapM_ put s - get = do n <- get :: Get Int - rep Seq.empty n get - where rep xs 0 _ = return $! xs - rep xs n g = xs `seq` n `seq` do - x <- g - rep (xs Seq.|> x) (n-1) g - ------------------------------------------------------------------------- --- Floating point - -instance Binary Double where - put d = put (decodeFloat d) - get = liftM2 encodeFloat get get - -instance Binary Float where - put f = put (decodeFloat f) - get = liftM2 encodeFloat get get - ------------------------------------------------------------------------- --- Trees - -instance (Binary e) => Binary (T.Tree e) where - put (T.Node r s) = put r >> put s - get = liftM2 T.Node get get - ------------------------------------------------------------------------- --- Arrays - -instance (Binary i, Ix i, Binary e) => Binary (Array i e) where - put a = do - put (bounds a) - put (rangeSize $ bounds a) -- write the length - mapM_ put (elems a) -- now the elems. - get = do - bs <- get - n <- get -- read the length - xs <- getMany n -- now the elems. - return (listArray bs xs) - --- --- The IArray UArray e constraint is non portable. Requires flexible instances --- -instance (Binary i, Ix i, Binary e, IArray UArray e) => Binary (UArray i e) where - put a = do - put (bounds a) - put (rangeSize $ bounds a) -- now write the length - mapM_ put (elems a) - get = do - bs <- get - n <- get - xs <- getMany n - return (listArray bs xs) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compat/Binary/Generic.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compat/Binary/Generic.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compat/Binary/Generic.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compat/Binary/Generic.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,128 +0,0 @@ -{-# LANGUAGE BangPatterns, CPP, FlexibleInstances, KindSignatures, - ScopedTypeVariables, Trustworthy, TypeOperators, TypeSynonymInstances #-} -{-# OPTIONS_GHC -fno-warn-orphans #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Compat.Binary.Generic --- Copyright : Bryan O'Sullivan --- License : BSD3-style (see LICENSE) --- --- Maintainer : Bryan O'Sullivan --- Stability : unstable --- Portability : Only works with GHC 7.2 and newer --- --- Instances for supporting GHC generics. --- ------------------------------------------------------------------------------ -module Distribution.Compat.Binary.Generic - ( - ) where - -import Control.Applicative -import Distribution.Compat.Binary.Class -import Data.Binary.Get -import Data.Binary.Put -import Data.Bits -import Data.Word -import GHC.Generics - --- Type without constructors -instance GBinary V1 where - gput _ = return () - gget = return undefined - --- Constructor without arguments -instance GBinary U1 where - gput U1 = return () - gget = return U1 - --- Product: constructor with parameters -instance (GBinary a, GBinary b) => GBinary (a :*: b) where - gput (x :*: y) = gput x >> gput y - gget = (:*:) <$> gget <*> gget - --- Metadata (constructor name, etc) -instance GBinary a => GBinary (M1 i c a) where - gput = gput . unM1 - gget = M1 <$> gget - --- Constants, additional parameters, and rank-1 recursion -instance Binary a => GBinary (K1 i a) where - gput = put . unK1 - gget = K1 <$> get - --- Borrowed from the cereal package. - --- The following GBinary instance for sums has support for serializing --- types with up to 2^64-1 constructors. It will use the minimal --- number of bytes needed to encode the constructor. For example when --- a type has 2^8 constructors or less it will use a single byte to --- encode the constructor. If it has 2^16 constructors or less it will --- use two bytes, and so on till 2^64-1. - -#define GUARD(WORD) (size - 1) <= fromIntegral (maxBound :: WORD) -#define PUTSUM(WORD) GUARD(WORD) = putSum (0 :: WORD) (fromIntegral size) -#define GETSUM(WORD) GUARD(WORD) = (get :: Get WORD) >>= checkGetSum (fromIntegral size) - -instance ( GSum a, GSum b - , GBinary a, GBinary b - , SumSize a, SumSize b) => GBinary (a :+: b) where - gput | PUTSUM(Word8) | PUTSUM(Word16) | PUTSUM(Word32) | PUTSUM(Word64) - | otherwise = sizeError "encode" size - where - size = unTagged (sumSize :: Tagged (a :+: b) Word64) - - gget | GETSUM(Word8) | GETSUM(Word16) | GETSUM(Word32) | GETSUM(Word64) - | otherwise = sizeError "decode" size - where - size = unTagged (sumSize :: Tagged (a :+: b) Word64) - -sizeError :: Show size => String -> size -> error -sizeError s size = - error $ "Can't " ++ s ++ " a type with " ++ show size ++ " constructors" - ------------------------------------------------------------------------- - -checkGetSum :: (Ord word, Num word, Bits word, GSum f) - => word -> word -> Get (f a) -checkGetSum size code | code < size = getSum code size - | otherwise = fail "Unknown encoding for constructor" -{-# INLINE checkGetSum #-} - -class GSum f where - getSum :: (Ord word, Num word, Bits word) => word -> word -> Get (f a) - putSum :: (Num w, Bits w, Binary w) => w -> w -> f a -> Put - -instance (GSum a, GSum b, GBinary a, GBinary b) => GSum (a :+: b) where - getSum !code !size | code < sizeL = L1 <$> getSum code sizeL - | otherwise = R1 <$> getSum (code - sizeL) sizeR - where - sizeL = size `shiftR` 1 - sizeR = size - sizeL - - putSum !code !size s = case s of - L1 x -> putSum code sizeL x - R1 x -> putSum (code + sizeL) sizeR x - where - sizeL = size `shiftR` 1 - sizeR = size - sizeL - -instance GBinary a => GSum (C1 c a) where - getSum _ _ = gget - - putSum !code _ x = put code *> gput x - ------------------------------------------------------------------------- - -class SumSize f where - sumSize :: Tagged f Word64 - -newtype Tagged (s :: * -> *) b = Tagged {unTagged :: b} - -instance (SumSize a, SumSize b) => SumSize (a :+: b) where - sumSize = Tagged $ unTagged (sumSize :: Tagged a Word64) + - unTagged (sumSize :: Tagged b Word64) - -instance SumSize (C1 c a) where - sumSize = Tagged 1 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compat/Binary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compat/Binary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compat/Binary.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compat/Binary.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,60 +0,0 @@ -{-# LANGUAGE CPP #-} -#if __GLASGOW_HASKELL__ >= 711 -{-# LANGUAGE PatternSynonyms #-} -#endif - -#ifndef MIN_VERSION_binary -#define MIN_VERSION_binary(x, y, z) 0 -#endif - -module Distribution.Compat.Binary - ( decodeOrFailIO -#if __GLASGOW_HASKELL__ >= 708 || MIN_VERSION_binary(0,7,0) - , module Data.Binary -#else - , Binary(..) - , decode, encode -#endif - ) where - -import Control.Exception (catch, evaluate) -#if __GLASGOW_HASKELL__ >= 711 -import Control.Exception (pattern ErrorCall) -#else -import Control.Exception (ErrorCall(..)) -#endif -import Data.ByteString.Lazy (ByteString) - -#if __GLASGOW_HASKELL__ < 706 -import Prelude hiding (catch) -#endif - -#if __GLASGOW_HASKELL__ >= 708 || MIN_VERSION_binary(0,7,0) - -import Data.Binary - -#else - -import Data.Binary.Get -import Data.Binary.Put - -import Distribution.Compat.Binary.Class -import Distribution.Compat.Binary.Generic () - --- | Decode a value from a lazy ByteString, reconstructing the original structure. --- -decode :: Binary a => ByteString -> a -decode = runGet get - --- | Encode a value using binary serialisation to a lazy ByteString. --- -encode :: Binary a => a -> ByteString -encode = runPut . put -{-# INLINE encode #-} - -#endif - -decodeOrFailIO :: Binary a => ByteString -> IO (Either String a) -decodeOrFailIO bs = - catch (evaluate (decode bs) >>= return . Right) - $ \(ErrorCall str) -> return $ Left str diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compat/CopyFile.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compat/CopyFile.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compat/CopyFile.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compat/CopyFile.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,106 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# OPTIONS_HADDOCK hide #-} -module Distribution.Compat.CopyFile ( - copyFile, - copyFileChanged, - filesEqual, - copyOrdinaryFile, - copyExecutableFile, - setFileOrdinary, - setFileExecutable, - setDirOrdinary, - ) where - -import Distribution.Compat.Exception -import Distribution.Compat.Internal.TempFile - -import Control.Monad - ( when, unless ) -import Control.Exception - ( bracketOnError, throwIO ) -import qualified Data.ByteString.Lazy as BSL -import System.IO.Error - ( ioeSetLocation ) -import System.Directory - ( doesFileExist, renameFile, removeFile ) -import System.FilePath - ( takeDirectory ) -import System.IO - ( IOMode(ReadMode), hClose, hGetBuf, hPutBuf - , withBinaryFile ) -import Foreign - ( allocaBytes ) - -#ifndef mingw32_HOST_OS -import System.Posix.Internals (withFilePath) -import System.Posix.Types - ( FileMode ) -import System.Posix.Internals - ( c_chmod ) -import Foreign.C - ( throwErrnoPathIfMinus1_ ) -#endif /* mingw32_HOST_OS */ - -copyOrdinaryFile, copyExecutableFile :: FilePath -> FilePath -> IO () -copyOrdinaryFile src dest = copyFile src dest >> setFileOrdinary dest -copyExecutableFile src dest = copyFile src dest >> setFileExecutable dest - -setFileOrdinary, setFileExecutable, setDirOrdinary :: FilePath -> IO () -#ifndef mingw32_HOST_OS -setFileOrdinary path = setFileMode path 0o644 -- file perms -rw-r--r-- -setFileExecutable path = setFileMode path 0o755 -- file perms -rwxr-xr-x - -setFileMode :: FilePath -> FileMode -> IO () -setFileMode name m = - withFilePath name $ \s -> do - throwErrnoPathIfMinus1_ "setFileMode" name (c_chmod s m) -#else -setFileOrdinary _ = return () -setFileExecutable _ = return () -#endif --- This happens to be true on Unix and currently on Windows too: -setDirOrdinary = setFileExecutable - --- | Copies a file to a new destination. --- Often you should use `copyFileChanged` instead. -copyFile :: FilePath -> FilePath -> IO () -copyFile fromFPath toFPath = - copy - `catchIO` (\ioe -> throwIO (ioeSetLocation ioe "copyFile")) - where copy = withBinaryFile fromFPath ReadMode $ \hFrom -> - bracketOnError openTmp cleanTmp $ \(tmpFPath, hTmp) -> - do allocaBytes bufferSize $ copyContents hFrom hTmp - hClose hTmp - renameFile tmpFPath toFPath - openTmp = openBinaryTempFile (takeDirectory toFPath) ".copyFile.tmp" - cleanTmp (tmpFPath, hTmp) = do - hClose hTmp `catchIO` \_ -> return () - removeFile tmpFPath `catchIO` \_ -> return () - bufferSize = 4096 - - copyContents hFrom hTo buffer = do - count <- hGetBuf hFrom buffer bufferSize - when (count > 0) $ do - hPutBuf hTo buffer count - copyContents hFrom hTo buffer - --- | Like `copyFile`, but does not touch the target if source and destination --- are already byte-identical. This is recommended as it is useful for --- time-stamp based recompilation avoidance. -copyFileChanged :: FilePath -> FilePath -> IO () -copyFileChanged src dest = do - equal <- filesEqual src dest - unless equal $ copyFile src dest - --- | Checks if two files are byte-identical. --- Returns False if either of the files do not exist. -filesEqual :: FilePath -> FilePath -> IO Bool -filesEqual f1 f2 = do - ex1 <- doesFileExist f1 - ex2 <- doesFileExist f2 - if not (ex1 && ex2) then return False else - withBinaryFile f1 ReadMode $ \h1 -> - withBinaryFile f2 ReadMode $ \h2 -> do - c1 <- BSL.hGetContents h1 - c2 <- BSL.hGetContents h2 - return $! c1 == c2 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compat/CreatePipe.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compat/CreatePipe.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compat/CreatePipe.hs 2016-11-07 10:02:22.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compat/CreatePipe.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,62 +0,0 @@ -{-# LANGUAGE CPP, ForeignFunctionInterface #-} -module Distribution.Compat.CreatePipe (createPipe) where - -import System.IO (Handle, hSetEncoding, localeEncoding) - --- The mingw32_HOST_OS CPP macro is GHC-specific -#if mingw32_HOST_OS -import Control.Exception (onException) -import Foreign.C.Error (throwErrnoIfMinus1_) -import Foreign.C.Types (CInt(..), CUInt(..)) -import Foreign.Ptr (Ptr) -import Foreign.Marshal.Array (allocaArray) -import Foreign.Storable (peek, peekElemOff) -import GHC.IO.FD (mkFD) -import GHC.IO.Device (IODeviceType(Stream)) -import GHC.IO.Handle.FD (mkHandleFromFD) -import System.IO (IOMode(ReadMode, WriteMode)) -#elif ghcjs_HOST_OS -#else -import System.Posix.IO (fdToHandle) -import qualified System.Posix.IO as Posix -#endif - -createPipe :: IO (Handle, Handle) --- The mingw32_HOST_OS CPP macro is GHC-specific -#if mingw32_HOST_OS -createPipe = do - (readfd, writefd) <- allocaArray 2 $ \ pfds -> do - throwErrnoIfMinus1_ "_pipe" $ c__pipe pfds 2 ({- _O_BINARY -} 32768) - readfd <- peek pfds - writefd <- peekElemOff pfds 1 - return (readfd, writefd) - (do readh <- fdToHandle readfd ReadMode - writeh <- fdToHandle writefd WriteMode - hSetEncoding readh localeEncoding - hSetEncoding writeh localeEncoding - return (readh, writeh)) `onException` (close readfd >> close writefd) - where - fdToHandle :: CInt -> IOMode -> IO Handle - fdToHandle fd mode = do - (fd', deviceType) <- mkFD fd mode (Just (Stream, 0, 0)) False False - mkHandleFromFD fd' deviceType "" mode False Nothing - - close :: CInt -> IO () - close = throwErrnoIfMinus1_ "_close" . c__close - -foreign import ccall "io.h _pipe" c__pipe :: - Ptr CInt -> CUInt -> CInt -> IO CInt - -foreign import ccall "io.h _close" c__close :: - CInt -> IO CInt -#elif ghcjs_HOST_OS -createPipe = error "createPipe" -#else -createPipe = do - (readfd, writefd) <- Posix.createPipe - readh <- fdToHandle readfd - writeh <- fdToHandle writefd - hSetEncoding readh localeEncoding - hSetEncoding writeh localeEncoding - return (readh, writeh) -#endif diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compat/Environment.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compat/Environment.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compat/Environment.hs 2016-11-07 10:02:22.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compat/Environment.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,89 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE ForeignFunctionInterface #-} -{-# OPTIONS_HADDOCK hide #-} - -module Distribution.Compat.Environment - ( getEnvironment, lookupEnv, setEnv ) - where - -import qualified System.Environment as System -#if __GLASGOW_HASKELL__ >= 706 -import System.Environment (lookupEnv) -#else -import Distribution.Compat.Exception (catchIO) -#endif - -#ifdef mingw32_HOST_OS -import Control.Monad -import qualified Data.Char as Char (toUpper) -import Foreign.C -import GHC.Windows -#else -import Foreign.C.Types -import Foreign.C.String -import Foreign.C.Error (throwErrnoIfMinus1_) -import System.Posix.Internals ( withFilePath ) -#endif /* mingw32_HOST_OS */ - -getEnvironment :: IO [(String, String)] -#ifdef mingw32_HOST_OS --- On Windows, the names of environment variables are case-insensitive, but are --- often given in mixed-case (e.g. "PATH" is "Path"), so we have to normalise --- them. -getEnvironment = fmap upcaseVars System.getEnvironment - where - upcaseVars = map upcaseVar - upcaseVar (var, val) = (map Char.toUpper var, val) -#else -getEnvironment = System.getEnvironment -#endif - -#if __GLASGOW_HASKELL__ < 706 --- | @lookupEnv var@ returns the value of the environment variable @var@, or --- @Nothing@ if there is no such value. -lookupEnv :: String -> IO (Maybe String) -lookupEnv name = (Just `fmap` System.getEnv name) `catchIO` const (return Nothing) -#endif /* __GLASGOW_HASKELL__ < 706 */ - --- | @setEnv name value@ sets the specified environment variable to @value@. --- --- Throws `Control.Exception.IOException` if either @name@ or @value@ is the --- empty string or contains an equals sign. -setEnv :: String -> String -> IO () -setEnv key value_ - | null value = error "Distribuiton.Compat.setEnv: empty string" - | otherwise = setEnv_ key value - where - -- NOTE: Anything that follows NUL is ignored on both POSIX and Windows. We - -- still strip it manually so that the null check above succeeds if a value - -- starts with NUL. - value = takeWhile (/= '\NUL') value_ - -setEnv_ :: String -> String -> IO () - -#ifdef mingw32_HOST_OS - -setEnv_ key value = withCWString key $ \k -> withCWString value $ \v -> do - success <- c_SetEnvironmentVariable k v - unless success (throwGetLastError "setEnv") - -# if defined(i386_HOST_ARCH) -# define WINDOWS_CCONV stdcall -# elif defined(x86_64_HOST_ARCH) -# define WINDOWS_CCONV ccall -# else -# error Unknown mingw32 arch -# endif /* i386_HOST_ARCH */ - -foreign import WINDOWS_CCONV unsafe "windows.h SetEnvironmentVariableW" - c_SetEnvironmentVariable :: LPTSTR -> LPTSTR -> IO Bool -#else -setEnv_ key value = do - withFilePath key $ \ keyP -> - withFilePath value $ \ valueP -> - throwErrnoIfMinus1_ "setenv" $ - c_setenv keyP valueP (fromIntegral (fromEnum True)) - -foreign import ccall unsafe "setenv" - c_setenv :: CString -> CString -> CInt -> IO CInt -#endif /* mingw32_HOST_OS */ diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compat/Exception.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compat/Exception.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compat/Exception.hs 2016-11-07 10:02:22.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compat/Exception.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -module Distribution.Compat.Exception ( - catchIO, - catchExit, - tryIO, - ) where - -import System.Exit -import qualified Control.Exception as Exception - -tryIO :: IO a -> IO (Either Exception.IOException a) -tryIO = Exception.try - -catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a -catchIO = Exception.catch - -catchExit :: IO a -> (ExitCode -> IO a) -> IO a -catchExit = Exception.catch diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compat/GetShortPathName.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compat/GetShortPathName.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compat/GetShortPathName.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compat/GetShortPathName.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,56 +0,0 @@ -{-# LANGUAGE CPP, ForeignFunctionInterface #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Compat.GetShortPathName --- --- Maintainer : cabal-devel@haskell.org --- Portability : Windows-only --- --- Win32 API 'GetShortPathName' function. - -module Distribution.Compat.GetShortPathName ( getShortPathName ) - where - -#ifdef mingw32_HOST_OS -import Control.Monad (void) - -import qualified System.Win32 as Win32 -import System.Win32 (LPCTSTR, LPTSTR, DWORD) -import Foreign.Marshal.Array (allocaArray) - -#ifdef x86_64_HOST_ARCH -#define WINAPI ccall -#else -#define WINAPI stdcall -#endif - -foreign import WINAPI unsafe "windows.h GetShortPathNameW" - c_GetShortPathName :: LPCTSTR -> LPTSTR -> DWORD -> IO DWORD - --- | On Windows, retrieves the short path form of the specified path. On --- non-Windows, does nothing. See https://github.com/haskell/cabal/issues/3185. --- --- From MS's GetShortPathName docs: --- --- Passing NULL for [the second] parameter and zero for cchBuffer --- will always return the required buffer size for a --- specified lpszLongPath. --- -getShortPathName :: FilePath -> IO FilePath -getShortPathName path = - Win32.withTString path $ \c_path -> do - c_len <- Win32.failIfZero "GetShortPathName #1 failed!" $ - c_GetShortPathName c_path Win32.nullPtr 0 - let arr_len = fromIntegral c_len - allocaArray arr_len $ \c_out -> do - void $ Win32.failIfZero "GetShortPathName #2 failed!" $ - c_GetShortPathName c_path c_out c_len - Win32.peekTString c_out - -#else - -getShortPathName :: FilePath -> IO FilePath -getShortPathName path = return path - -#endif diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compat/Internal/TempFile.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compat/Internal/TempFile.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compat/Internal/TempFile.hs 2016-11-07 10:02:22.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compat/Internal/TempFile.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,124 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# OPTIONS_HADDOCK hide #-} -module Distribution.Compat.Internal.TempFile ( - openTempFile, - openBinaryTempFile, - openNewBinaryFile, - createTempDirectory, - ) where - -import Distribution.Compat.Exception - -import System.FilePath (()) -import Foreign.C (CInt, eEXIST, getErrno, errnoToIOError) - -import System.IO (Handle, openTempFile, openBinaryTempFile) -import Data.Bits ((.|.)) -import System.Posix.Internals (c_open, c_close, o_CREAT, o_EXCL, o_RDWR, - o_BINARY, o_NONBLOCK, o_NOCTTY, - withFilePath, c_getpid) -import System.IO.Error (isAlreadyExistsError) -import GHC.IO.Handle.FD (fdToHandle) -import Control.Exception (onException) - -#if defined(mingw32_HOST_OS) || defined(ghcjs_HOST_OS) -import System.Directory ( createDirectory ) -#else -import qualified System.Posix -#endif - --- ------------------------------------------------------------ --- * temporary files --- ------------------------------------------------------------ - --- This is here for Haskell implementations that do not come with --- System.IO.openTempFile. This includes nhc-1.20, hugs-2006.9. --- TODO: Not sure about JHC --- TODO: This file should probably be removed. - --- This is a copy/paste of the openBinaryTempFile definition, but --- if uses 666 rather than 600 for the permissions. The base library --- needs to be changed to make this better. -openNewBinaryFile :: FilePath -> String -> IO (FilePath, Handle) -openNewBinaryFile dir template = do - pid <- c_getpid - findTempName pid - where - -- We split off the last extension, so we can use .foo.ext files - -- for temporary files (hidden on Unix OSes). Unfortunately we're - -- below file path in the hierarchy here. - (prefix,suffix) = - case break (== '.') $ reverse template of - -- First case: template contains no '.'s. Just re-reverse it. - (rev_suffix, "") -> (reverse rev_suffix, "") - -- Second case: template contains at least one '.'. Strip the - -- dot from the prefix and prepend it to the suffix (if we don't - -- do this, the unique number will get added after the '.' and - -- thus be part of the extension, which is wrong.) - (rev_suffix, '.':rest) -> (reverse rest, '.':reverse rev_suffix) - -- Otherwise, something is wrong, because (break (== '.')) should - -- always return a pair with either the empty string or a string - -- beginning with '.' as the second component. - _ -> error "bug in System.IO.openTempFile" - - oflags = rw_flags .|. o_EXCL .|. o_BINARY - - findTempName x = do - fd <- withFilePath filepath $ \ f -> - c_open f oflags 0o666 - if fd < 0 - then do - errno <- getErrno - if errno == eEXIST - then findTempName (x+1) - else ioError (errnoToIOError "openNewBinaryFile" errno Nothing (Just dir)) - else do - -- TODO: We want to tell fdToHandle what the file path is, - -- as any exceptions etc will only be able to report the - -- FD currently - h <- fdToHandle fd `onException` c_close fd - return (filepath, h) - where - filename = prefix ++ show x ++ suffix - filepath = dir `combine` filename - - -- FIXME: bits copied from System.FilePath - combine a b - | null b = a - | null a = b - | last a == pathSeparator = a ++ b - | otherwise = a ++ [pathSeparator] ++ b - --- FIXME: Should use System.FilePath library -pathSeparator :: Char -#ifdef mingw32_HOST_OS -pathSeparator = '\\' -#else -pathSeparator = '/' -#endif - --- FIXME: Copied from GHC.Handle -std_flags, output_flags, rw_flags :: CInt -std_flags = o_NONBLOCK .|. o_NOCTTY -output_flags = std_flags .|. o_CREAT -rw_flags = output_flags .|. o_RDWR - -createTempDirectory :: FilePath -> String -> IO FilePath -createTempDirectory dir template = do - pid <- c_getpid - findTempName pid - where - findTempName x = do - let dirpath = dir template ++ "-" ++ show x - r <- tryIO $ mkPrivateDir dirpath - case r of - Right _ -> return dirpath - Left e | isAlreadyExistsError e -> findTempName (x+1) - | otherwise -> ioError e - -mkPrivateDir :: String -> IO () -#if defined(mingw32_HOST_OS) || defined(ghcjs_HOST_OS) -mkPrivateDir s = createDirectory s -#else -mkPrivateDir s = System.Posix.createDirectory s 0o700 -#endif diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compat/MonadFail.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compat/MonadFail.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compat/MonadFail.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compat/MonadFail.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,36 +0,0 @@ -{-# LANGUAGE CPP #-} - --- | Compatibility layer for "Control.Monad.Fail" -module Distribution.Compat.MonadFail ( MonadFail(fail) ) where -#if __GLASGOW_HASKELL__ >= 800 --- provided by base-4.9.0.0 and later -import Control.Monad.Fail (MonadFail(fail)) -#else --- the following code corresponds to --- http://hackage.haskell.org/package/fail-4.9.0.0 -import qualified Prelude as P -import Prelude hiding (fail) - -import Text.ParserCombinators.ReadP -import Text.ParserCombinators.ReadPrec - -class Monad m => MonadFail m where - fail :: String -> m a - --- instances provided by base-4.9 - -instance MonadFail Maybe where - fail _ = Nothing - -instance MonadFail [] where - fail _ = [] - -instance MonadFail IO where - fail = P.fail - -instance MonadFail ReadPrec where - fail = P.fail -- = P (\_ -> fail s) - -instance MonadFail ReadP where - fail = P.fail -#endif diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compat/ReadP.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compat/ReadP.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compat/ReadP.hs 2016-11-07 10:02:22.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compat/ReadP.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,402 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Compat.ReadP --- Copyright : (c) The University of Glasgow 2002 --- License : BSD-style (see the file libraries/base/LICENSE) --- --- Maintainer : libraries@haskell.org --- Portability : portable --- --- This is a library of parser combinators, originally written by Koen Claessen. --- It parses all alternatives in parallel, so it never keeps hold of --- the beginning of the input string, a common source of space leaks with --- other parsers. The '(+++)' choice combinator is genuinely commutative; --- it makes no difference which branch is \"shorter\". --- --- See also Koen's paper /Parallel Parsing Processes/ --- (). --- --- This version of ReadP has been locally hacked to make it H98, by --- Martin Sjögren --- --- The unit tests have been moved to UnitTest.Distribution.Compat.ReadP, by --- Mark Lentczner ------------------------------------------------------------------------------ - -module Distribution.Compat.ReadP - ( - -- * The 'ReadP' type - ReadP, -- :: * -> *; instance Functor, Monad, MonadPlus - - -- * Primitive operations - get, -- :: ReadP Char - look, -- :: ReadP String - (+++), -- :: ReadP a -> ReadP a -> ReadP a - (<++), -- :: ReadP a -> ReadP a -> ReadP a - gather, -- :: ReadP a -> ReadP (String, a) - - -- * Other operations - pfail, -- :: ReadP a - satisfy, -- :: (Char -> Bool) -> ReadP Char - char, -- :: Char -> ReadP Char - string, -- :: String -> ReadP String - munch, -- :: (Char -> Bool) -> ReadP String - munch1, -- :: (Char -> Bool) -> ReadP String - skipSpaces, -- :: ReadP () - choice, -- :: [ReadP a] -> ReadP a - count, -- :: Int -> ReadP a -> ReadP [a] - between, -- :: ReadP open -> ReadP close -> ReadP a -> ReadP a - option, -- :: a -> ReadP a -> ReadP a - optional, -- :: ReadP a -> ReadP () - many, -- :: ReadP a -> ReadP [a] - many1, -- :: ReadP a -> ReadP [a] - skipMany, -- :: ReadP a -> ReadP () - skipMany1, -- :: ReadP a -> ReadP () - sepBy, -- :: ReadP a -> ReadP sep -> ReadP [a] - sepBy1, -- :: ReadP a -> ReadP sep -> ReadP [a] - endBy, -- :: ReadP a -> ReadP sep -> ReadP [a] - endBy1, -- :: ReadP a -> ReadP sep -> ReadP [a] - chainr, -- :: ReadP a -> ReadP (a -> a -> a) -> a -> ReadP a - chainl, -- :: ReadP a -> ReadP (a -> a -> a) -> a -> ReadP a - chainl1, -- :: ReadP a -> ReadP (a -> a -> a) -> ReadP a - chainr1, -- :: ReadP a -> ReadP (a -> a -> a) -> ReadP a - manyTill, -- :: ReadP a -> ReadP end -> ReadP [a] - - -- * Running a parser - ReadS, -- :: *; = String -> [(a,String)] - readP_to_S, -- :: ReadP a -> ReadS a - readS_to_P -- :: ReadS a -> ReadP a - ) - where - -import qualified Distribution.Compat.MonadFail as Fail - -import Control.Monad( MonadPlus(..), liftM, liftM2, replicateM, ap, (>=>) ) -import Data.Char (isSpace) -import Control.Applicative as AP (Applicative(..), Alternative(empty, (<|>))) - -infixr 5 +++, <++ - --- --------------------------------------------------------------------------- --- The P type --- is representation type -- should be kept abstract - -data P s a - = Get (s -> P s a) - | Look ([s] -> P s a) - | Fail - | Result a (P s a) - | Final [(a,[s])] -- invariant: list is non-empty! - --- Monad, MonadPlus - -instance Functor (P s) where - fmap = liftM - -instance Applicative (P s) where - pure x = Result x Fail - (<*>) = ap - -instance Monad (P s) where - return = AP.pure - - (Get f) >>= k = Get (f >=> k) - (Look f) >>= k = Look (f >=> k) - Fail >>= _ = Fail - (Result x p) >>= k = k x `mplus` (p >>= k) - (Final r) >>= k = final [ys' | (x,s) <- r, ys' <- run (k x) s] - - fail = Fail.fail - -instance Fail.MonadFail (P s) where - fail _ = Fail - -instance Alternative (P s) where - empty = mzero - (<|>) = mplus - -instance MonadPlus (P s) where - mzero = Fail - - -- most common case: two gets are combined - Get f1 `mplus` Get f2 = Get (\c -> f1 c `mplus` f2 c) - - -- results are delivered as soon as possible - Result x p `mplus` q = Result x (p `mplus` q) - p `mplus` Result x q = Result x (p `mplus` q) - - -- fail disappears - Fail `mplus` p = p - p `mplus` Fail = p - - -- two finals are combined - -- final + look becomes one look and one final (=optimization) - -- final + sthg else becomes one look and one final - Final r `mplus` Final t = Final (r ++ t) - Final r `mplus` Look f = Look (\s -> Final (r ++ run (f s) s)) - Final r `mplus` p = Look (\s -> Final (r ++ run p s)) - Look f `mplus` Final r = Look (\s -> Final (run (f s) s ++ r)) - p `mplus` Final r = Look (\s -> Final (run p s ++ r)) - - -- two looks are combined (=optimization) - -- look + sthg else floats upwards - Look f `mplus` Look g = Look (\s -> f s `mplus` g s) - Look f `mplus` p = Look (\s -> f s `mplus` p) - p `mplus` Look f = Look (\s -> p `mplus` f s) - --- --------------------------------------------------------------------------- --- The ReadP type - -newtype Parser r s a = R ((a -> P s r) -> P s r) -type ReadP r a = Parser r Char a - --- Functor, Monad, MonadPlus - -instance Functor (Parser r s) where - fmap h (R f) = R (\k -> f (k . h)) - -instance Applicative (Parser r s) where - pure x = R (\k -> k x) - (<*>) = ap - -instance Monad (Parser r s) where - return = AP.pure - fail = Fail.fail - R m >>= f = R (\k -> m (\a -> let R m' = f a in m' k)) - -instance Fail.MonadFail (Parser r s) where - fail _ = R (const Fail) - ---instance MonadPlus (Parser r s) where --- mzero = pfail --- mplus = (+++) - --- --------------------------------------------------------------------------- --- Operations over P - -final :: [(a,[s])] -> P s a --- Maintains invariant for Final constructor -final [] = Fail -final r = Final r - -run :: P c a -> ([c] -> [(a, [c])]) -run (Get f) (c:s) = run (f c) s -run (Look f) s = run (f s) s -run (Result x p) s = (x,s) : run p s -run (Final r) _ = r -run _ _ = [] - --- --------------------------------------------------------------------------- --- Operations over ReadP - -get :: ReadP r Char --- ^ Consumes and returns the next character. --- Fails if there is no input left. -get = R Get - -look :: ReadP r String --- ^ Look-ahead: returns the part of the input that is left, without --- consuming it. -look = R Look - -pfail :: ReadP r a --- ^ Always fails. -pfail = R (const Fail) - -(+++) :: ReadP r a -> ReadP r a -> ReadP r a --- ^ Symmetric choice. -R f1 +++ R f2 = R (\k -> f1 k `mplus` f2 k) - -(<++) :: ReadP a a -> ReadP r a -> ReadP r a --- ^ Local, exclusive, left-biased choice: If left parser --- locally produces any result at all, then right parser is --- not used. -R f <++ q = - do s <- look - probe (f return) s 0 - where - probe (Get f') (c:s) n = probe (f' c) s (n+1 :: Int) - probe (Look f') s n = probe (f' s) s n - probe p@(Result _ _) _ n = discard n >> R (p >>=) - probe (Final r) _ _ = R (Final r >>=) - probe _ _ _ = q - - discard 0 = return () - discard n = get >> discard (n-1 :: Int) - -gather :: ReadP (String -> P Char r) a -> ReadP r (String, a) --- ^ Transforms a parser into one that does the same, but --- in addition returns the exact characters read. --- IMPORTANT NOTE: 'gather' gives a runtime error if its first argument --- is built using any occurrences of readS_to_P. -gather (R m) = - R (\k -> gath id (m (\a -> return (\s -> k (s,a))))) - where - gath l (Get f) = Get (\c -> gath (l.(c:)) (f c)) - gath _ Fail = Fail - gath l (Look f) = Look (gath l . f) - gath l (Result k p) = k (l []) `mplus` gath l p - gath _ (Final _) = error "do not use readS_to_P in gather!" - --- --------------------------------------------------------------------------- --- Derived operations - -satisfy :: (Char -> Bool) -> ReadP r Char --- ^ Consumes and returns the next character, if it satisfies the --- specified predicate. -satisfy p = do c <- get; if p c then return c else pfail - -char :: Char -> ReadP r Char --- ^ Parses and returns the specified character. -char c = satisfy (c ==) - -string :: String -> ReadP r String --- ^ Parses and returns the specified string. -string this = do s <- look; scan this s - where - scan [] _ = return this - scan (x:xs) (y:ys) | x == y = get >> scan xs ys - scan _ _ = pfail - -munch :: (Char -> Bool) -> ReadP r String --- ^ Parses the first zero or more characters satisfying the predicate. -munch p = - do s <- look - scan s - where - scan (c:cs) | p c = do _ <- get; s <- scan cs; return (c:s) - scan _ = do return "" - -munch1 :: (Char -> Bool) -> ReadP r String --- ^ Parses the first one or more characters satisfying the predicate. -munch1 p = - do c <- get - if p c then do s <- munch p; return (c:s) - else pfail - -choice :: [ReadP r a] -> ReadP r a --- ^ Combines all parsers in the specified list. -choice [] = pfail -choice [p] = p -choice (p:ps) = p +++ choice ps - -skipSpaces :: ReadP r () --- ^ Skips all whitespace. -skipSpaces = - do s <- look - skip s - where - skip (c:s) | isSpace c = do _ <- get; skip s - skip _ = do return () - -count :: Int -> ReadP r a -> ReadP r [a] --- ^ @ count n p @ parses @n@ occurrences of @p@ in sequence. A list of --- results is returned. -count n p = replicateM n p - -between :: ReadP r open -> ReadP r close -> ReadP r a -> ReadP r a --- ^ @ between open close p @ parses @open@, followed by @p@ and finally --- @close@. Only the value of @p@ is returned. -between open close p = do _ <- open - x <- p - _ <- close - return x - -option :: a -> ReadP r a -> ReadP r a --- ^ @option x p@ will either parse @p@ or return @x@ without consuming --- any input. -option x p = p +++ return x - -optional :: ReadP r a -> ReadP r () --- ^ @optional p@ optionally parses @p@ and always returns @()@. -optional p = (p >> return ()) +++ return () - -many :: ReadP r a -> ReadP r [a] --- ^ Parses zero or more occurrences of the given parser. -many p = return [] +++ many1 p - -many1 :: ReadP r a -> ReadP r [a] --- ^ Parses one or more occurrences of the given parser. -many1 p = liftM2 (:) p (many p) - -skipMany :: ReadP r a -> ReadP r () --- ^ Like 'many', but discards the result. -skipMany p = many p >> return () - -skipMany1 :: ReadP r a -> ReadP r () --- ^ Like 'many1', but discards the result. -skipMany1 p = p >> skipMany p - -sepBy :: ReadP r a -> ReadP r sep -> ReadP r [a] --- ^ @sepBy p sep@ parses zero or more occurrences of @p@, separated by @sep@. --- Returns a list of values returned by @p@. -sepBy p sep = sepBy1 p sep +++ return [] - -sepBy1 :: ReadP r a -> ReadP r sep -> ReadP r [a] --- ^ @sepBy1 p sep@ parses one or more occurrences of @p@, separated by @sep@. --- Returns a list of values returned by @p@. -sepBy1 p sep = liftM2 (:) p (many (sep >> p)) - -endBy :: ReadP r a -> ReadP r sep -> ReadP r [a] --- ^ @endBy p sep@ parses zero or more occurrences of @p@, separated and ended --- by @sep@. -endBy p sep = many (do x <- p ; _ <- sep ; return x) - -endBy1 :: ReadP r a -> ReadP r sep -> ReadP r [a] --- ^ @endBy p sep@ parses one or more occurrences of @p@, separated and ended --- by @sep@. -endBy1 p sep = many1 (do x <- p ; _ <- sep ; return x) - -chainr :: ReadP r a -> ReadP r (a -> a -> a) -> a -> ReadP r a --- ^ @chainr p op x@ parses zero or more occurrences of @p@, separated by @op@. --- Returns a value produced by a /right/ associative application of all --- functions returned by @op@. If there are no occurrences of @p@, @x@ is --- returned. -chainr p op x = chainr1 p op +++ return x - -chainl :: ReadP r a -> ReadP r (a -> a -> a) -> a -> ReadP r a --- ^ @chainl p op x@ parses zero or more occurrences of @p@, separated by @op@. --- Returns a value produced by a /left/ associative application of all --- functions returned by @op@. If there are no occurrences of @p@, @x@ is --- returned. -chainl p op x = chainl1 p op +++ return x - -chainr1 :: ReadP r a -> ReadP r (a -> a -> a) -> ReadP r a --- ^ Like 'chainr', but parses one or more occurrences of @p@. -chainr1 p op = scan - where scan = p >>= rest - rest x = do f <- op - y <- scan - return (f x y) - +++ return x - -chainl1 :: ReadP r a -> ReadP r (a -> a -> a) -> ReadP r a --- ^ Like 'chainl', but parses one or more occurrences of @p@. -chainl1 p op = p >>= rest - where rest x = do f <- op - y <- p - rest (f x y) - +++ return x - -manyTill :: ReadP r a -> ReadP [a] end -> ReadP r [a] --- ^ @manyTill p end@ parses zero or more occurrences of @p@, until @end@ --- succeeds. Returns a list of values returned by @p@. -manyTill p end = scan - where scan = (end >> return []) <++ (liftM2 (:) p scan) - --- --------------------------------------------------------------------------- --- Converting between ReadP and Read - -readP_to_S :: ReadP a a -> ReadS a --- ^ Converts a parser into a Haskell ReadS-style function. --- This is the main way in which you can \"run\" a 'ReadP' parser: --- the expanded type is --- @ readP_to_S :: ReadP a -> String -> [(a,String)] @ -readP_to_S (R f) = run (f return) - -readS_to_P :: ReadS a -> ReadP r a --- ^ Converts a Haskell ReadS-style function into a parser. --- Warning: This introduces local backtracking in the resulting --- parser, and therefore a possible inefficiency. -readS_to_P r = - R (\k -> Look (\s -> final [bs'' | (a,s') <- r s, bs'' <- run (k a) s'])) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compat/Semigroup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compat/Semigroup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compat/Semigroup.hs 2016-11-07 10:02:22.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compat/Semigroup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,171 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE TypeOperators #-} - --- | Compatibility layer for "Data.Semigroup" -module Distribution.Compat.Semigroup - ( Semigroup((<>)) - , Mon.Monoid(..) - , All(..) - , Any(..) - - , Last'(..) - - , gmappend - , gmempty - ) where - -import Distribution.Compat.Binary (Binary) - -import Control.Applicative as App -import GHC.Generics -#if __GLASGOW_HASKELL__ >= 711 --- Data.Semigroup is available since GHC 8.0/base-4.9 -import Data.Semigroup -import qualified Data.Monoid as Mon -#else --- provide internal simplified non-exposed class for older GHCs -import Data.Monoid as Mon (Monoid(..), All(..), Any(..), Dual(..)) --- containers -import Data.Set (Set) -import Data.IntSet (IntSet) -import Data.Map (Map) -import Data.IntMap (IntMap) - - -class Semigroup a where - (<>) :: a -> a -> a - --- several primitive instances -instance Semigroup () where - _ <> _ = () - -instance Semigroup [a] where - (<>) = (++) - -instance Semigroup a => Semigroup (Dual a) where - Dual a <> Dual b = Dual (b <> a) - -instance Semigroup a => Semigroup (Maybe a) where - Nothing <> b = b - a <> Nothing = a - Just a <> Just b = Just (a <> b) - -instance Semigroup (Either a b) where - Left _ <> b = b - a <> _ = a - -instance Semigroup Ordering where - LT <> _ = LT - EQ <> y = y - GT <> _ = GT - -instance Semigroup b => Semigroup (a -> b) where - f <> g = \a -> f a <> g a - -instance Semigroup All where - All a <> All b = All (a && b) - -instance Semigroup Any where - Any a <> Any b = Any (a || b) - -instance (Semigroup a, Semigroup b) => Semigroup (a, b) where - (a,b) <> (a',b') = (a<>a',b<>b') - -instance (Semigroup a, Semigroup b, Semigroup c) - => Semigroup (a, b, c) where - (a,b,c) <> (a',b',c') = (a<>a',b<>b',c<>c') - -instance (Semigroup a, Semigroup b, Semigroup c, Semigroup d) - => Semigroup (a, b, c, d) where - (a,b,c,d) <> (a',b',c',d') = (a<>a',b<>b',c<>c',d<>d') - -instance (Semigroup a, Semigroup b, Semigroup c, Semigroup d, Semigroup e) - => Semigroup (a, b, c, d, e) where - (a,b,c,d,e) <> (a',b',c',d',e') = (a<>a',b<>b',c<>c',d<>d',e<>e') - --- containers instances -instance Semigroup IntSet where - (<>) = mappend - -instance Ord a => Semigroup (Set a) where - (<>) = mappend - -instance Semigroup (IntMap v) where - (<>) = mappend - -instance Ord k => Semigroup (Map k v) where - (<>) = mappend -#endif - --- | Cabal's own 'Data.Monoid.Last' copy to avoid requiring an orphan --- 'Binary' instance. --- --- Once the oldest `binary` version we support provides a 'Binary' --- instance for 'Data.Monoid.Last' we can remove this one here. --- --- NB: 'Data.Semigroup.Last' is defined differently and not a 'Monoid' -newtype Last' a = Last' { getLast' :: Maybe a } - deriving (Eq, Ord, Read, Show, Binary, - Functor, App.Applicative, Generic) - -instance Semigroup (Last' a) where - x <> Last' Nothing = x - _ <> x = x - -instance Monoid (Last' a) where - mempty = Last' Nothing - mappend = (<>) - -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- --- Stolen from Edward Kmett's BSD3-licensed `semigroups` package - --- | Generically generate a 'Semigroup' ('<>') operation for any type --- implementing 'Generic'. This operation will append two values --- by point-wise appending their component fields. It is only defined --- for product types. --- --- @ --- 'gmappend' a ('gmappend' b c) = 'gmappend' ('gmappend' a b) c --- @ -gmappend :: (Generic a, GSemigroup (Rep a)) => a -> a -> a -gmappend x y = to (gmappend' (from x) (from y)) - -class GSemigroup f where - gmappend' :: f p -> f p -> f p - -instance Semigroup a => GSemigroup (K1 i a) where - gmappend' (K1 x) (K1 y) = K1 (x <> y) - -instance GSemigroup f => GSemigroup (M1 i c f) where - gmappend' (M1 x) (M1 y) = M1 (gmappend' x y) - -instance (GSemigroup f, GSemigroup g) => GSemigroup (f :*: g) where - gmappend' (x1 :*: x2) (y1 :*: y2) = gmappend' x1 y1 :*: gmappend' x2 y2 - --- | Generically generate a 'Monoid' 'mempty' for any product-like type --- implementing 'Generic'. --- --- It is only defined for product types. --- --- @ --- 'gmappend' 'gmempty' a = a = 'gmappend' a 'gmempty' --- @ - -gmempty :: (Generic a, GMonoid (Rep a)) => a -gmempty = to gmempty' - -class GSemigroup f => GMonoid f where - gmempty' :: f p - -instance (Semigroup a, Monoid a) => GMonoid (K1 i a) where - gmempty' = K1 mempty - -instance GMonoid f => GMonoid (M1 i c f) where - gmempty' = M1 gmempty' - -instance (GMonoid f, GMonoid g) => GMonoid (f :*: g) where - gmempty' = gmempty' :*: gmempty' diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compiler.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compiler.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Compiler.hs 2016-11-07 10:02:22.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Compiler.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,204 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Compiler --- Copyright : Isaac Jones 2003-2004 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This has an enumeration of the various compilers that Cabal knows about. It --- also specifies the default compiler. Sadly you'll often see code that does --- case analysis on this compiler flavour enumeration like: --- --- > case compilerFlavor comp of --- > GHC -> GHC.getInstalledPackages verbosity packageDb progconf --- > JHC -> JHC.getInstalledPackages verbosity packageDb progconf --- --- Obviously it would be better to use the proper 'Compiler' abstraction --- because that would keep all the compiler-specific code together. --- Unfortunately we cannot make this change yet without breaking the --- 'UserHooks' api, which would break all custom @Setup.hs@ files, so for the --- moment we just have to live with this deficiency. If you're interested, see --- ticket #57. - -module Distribution.Compiler ( - -- * Compiler flavor - CompilerFlavor(..), - buildCompilerId, - buildCompilerFlavor, - defaultCompilerFlavor, - parseCompilerFlavorCompat, - - -- * Compiler id - CompilerId(..), - - -- * Compiler info - CompilerInfo(..), - unknownCompilerInfo, - AbiTag(..), abiTagString - ) where - -import Distribution.Compat.Binary -import Language.Haskell.Extension - -import Data.Data (Data) -import Data.Typeable (Typeable) -import Data.Maybe (fromMaybe) -import Distribution.Version (Version(..)) -import GHC.Generics (Generic) - -import qualified System.Info (compilerName, compilerVersion) -import Distribution.Text (Text(..), display) -import qualified Distribution.Compat.ReadP as Parse -import qualified Text.PrettyPrint as Disp -import Text.PrettyPrint ((<>)) - -import qualified Data.Char as Char (toLower, isDigit, isAlphaNum) -import Control.Monad (when) - -data CompilerFlavor = GHC | GHCJS | NHC | YHC | Hugs | HBC | Helium | JHC | LHC | UHC - | HaskellSuite String -- string is the id of the actual compiler - | OtherCompiler String - deriving (Generic, Show, Read, Eq, Ord, Typeable, Data) - -instance Binary CompilerFlavor - -knownCompilerFlavors :: [CompilerFlavor] -knownCompilerFlavors = [GHC, GHCJS, NHC, YHC, Hugs, HBC, Helium, JHC, LHC, UHC] - -instance Text CompilerFlavor where - disp (OtherCompiler name) = Disp.text name - disp (HaskellSuite name) = Disp.text name - disp NHC = Disp.text "nhc98" - disp other = Disp.text (lowercase (show other)) - - parse = do - comp <- Parse.munch1 Char.isAlphaNum - when (all Char.isDigit comp) Parse.pfail - return (classifyCompilerFlavor comp) - -classifyCompilerFlavor :: String -> CompilerFlavor -classifyCompilerFlavor s = - fromMaybe (OtherCompiler s) $ lookup (lowercase s) compilerMap - where - compilerMap = [ (display compiler, compiler) - | compiler <- knownCompilerFlavors ] - - ---TODO: In some future release, remove 'parseCompilerFlavorCompat' and use --- ordinary 'parse'. Also add ("nhc", NHC) to the above 'compilerMap'. - --- | Like 'classifyCompilerFlavor' but compatible with the old ReadS parser. --- --- It is compatible in the sense that it accepts only the same strings, --- eg "GHC" but not "ghc". However other strings get mapped to 'OtherCompiler'. --- The point of this is that we do not allow extra valid values that would --- upset older Cabal versions that had a stricter parser however we cope with --- new values more gracefully so that we'll be able to introduce new value in --- future without breaking things so much. --- -parseCompilerFlavorCompat :: Parse.ReadP r CompilerFlavor -parseCompilerFlavorCompat = do - comp <- Parse.munch1 Char.isAlphaNum - when (all Char.isDigit comp) Parse.pfail - case lookup comp compilerMap of - Just compiler -> return compiler - Nothing -> return (OtherCompiler comp) - where - compilerMap = [ (show compiler, compiler) - | compiler <- knownCompilerFlavors - , compiler /= YHC ] - -buildCompilerFlavor :: CompilerFlavor -buildCompilerFlavor = classifyCompilerFlavor System.Info.compilerName - -buildCompilerVersion :: Version -buildCompilerVersion = System.Info.compilerVersion - -buildCompilerId :: CompilerId -buildCompilerId = CompilerId buildCompilerFlavor buildCompilerVersion - --- | The default compiler flavour to pick when compiling stuff. This defaults --- to the compiler used to build the Cabal lib. --- --- However if it's not a recognised compiler then it's 'Nothing' and the user --- will have to specify which compiler they want. --- -defaultCompilerFlavor :: Maybe CompilerFlavor -defaultCompilerFlavor = case buildCompilerFlavor of - OtherCompiler _ -> Nothing - _ -> Just buildCompilerFlavor - --- ------------------------------------------------------------ --- * Compiler Id --- ------------------------------------------------------------ - -data CompilerId = CompilerId CompilerFlavor Version - deriving (Eq, Generic, Ord, Read, Show) - -instance Binary CompilerId - -instance Text CompilerId where - disp (CompilerId f (Version [] _)) = disp f - disp (CompilerId f v) = disp f <> Disp.char '-' <> disp v - - parse = do - flavour <- parse - version <- (Parse.char '-' >> parse) Parse.<++ return (Version [] []) - return (CompilerId flavour version) - -lowercase :: String -> String -lowercase = map Char.toLower - --- ------------------------------------------------------------ --- * Compiler Info --- ------------------------------------------------------------ - --- | Compiler information used for resolving configurations. Some fields can be --- set to Nothing to indicate that the information is unknown. - -data CompilerInfo = CompilerInfo { - compilerInfoId :: CompilerId, - -- ^ Compiler flavour and version. - compilerInfoAbiTag :: AbiTag, - -- ^ Tag for distinguishing incompatible ABI's on the same architecture/os. - compilerInfoCompat :: Maybe [CompilerId], - -- ^ Other implementations that this compiler claims to be compatible with, if known. - compilerInfoLanguages :: Maybe [Language], - -- ^ Supported language standards, if known. - compilerInfoExtensions :: Maybe [Extension] - -- ^ Supported extensions, if known. - } - deriving (Generic, Show, Read) - -instance Binary CompilerInfo - -data AbiTag - = NoAbiTag - | AbiTag String - deriving (Eq, Generic, Show, Read) - -instance Binary AbiTag - -instance Text AbiTag where - disp NoAbiTag = Disp.empty - disp (AbiTag tag) = Disp.text tag - - parse = do - tag <- Parse.munch (\c -> Char.isAlphaNum c || c == '_') - if null tag then return NoAbiTag else return (AbiTag tag) - -abiTagString :: AbiTag -> String -abiTagString NoAbiTag = "" -abiTagString (AbiTag tag) = tag - --- | Make a CompilerInfo of which only the known information is its CompilerId, --- its AbiTag and that it does not claim to be compatible with other --- compiler id's. -unknownCompilerInfo :: CompilerId -> AbiTag -> CompilerInfo -unknownCompilerInfo compilerId abiTag = - CompilerInfo compilerId abiTag (Just []) Nothing Nothing diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/GetOpt.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/GetOpt.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/GetOpt.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/GetOpt.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,335 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.GetOpt --- Copyright : (c) Sven Panne 2002-2005 --- License : BSD-style (see the file libraries/base/LICENSE) --- --- Maintainer : libraries@haskell.org --- Portability : portable --- --- This library provides facilities for parsing the command-line options --- in a standalone program. It is essentially a Haskell port of the GNU --- @getopt@ library. --- ------------------------------------------------------------------------------ - -{- -Sven Panne Oct. 1996 (small -changes Dec. 1997) - -Two rather obscure features are missing: The Bash 2.0 non-option hack -(if you don't already know it, you probably don't want to hear about -it...) and the recognition of long options with a single dash -(e.g. '-help' is recognised as '--help', as long as there is no short -option 'h'). - -Other differences between GNU's getopt and this implementation: - -* To enforce a coherent description of options and arguments, there - are explanation fields in the option/argument descriptor. - -* Error messages are now more informative, but no longer POSIX - compliant... :-( - -And a final Haskell advertisement: The GNU C implementation uses well -over 1100 lines, we need only 195 here, including a 46 line example! -:-) --} - -{-# OPTIONS_HADDOCK hide #-} -module Distribution.GetOpt ( - -- * GetOpt - getOpt, getOpt', - usageInfo, - ArgOrder(..), - OptDescr(..), - ArgDescr(..), - - -- * Example - - -- $example -) where - -import Data.List ( isPrefixOf, intercalate, find ) -import Data.Maybe ( isJust ) - --- |What to do with options following non-options -data ArgOrder a - = RequireOrder -- ^ no option processing after first non-option - | Permute -- ^ freely intersperse options and non-options - | ReturnInOrder (String -> a) -- ^ wrap non-options into options - -{-| -Each 'OptDescr' describes a single option. - -The arguments to 'Option' are: - -* list of short option characters - -* list of long option strings (without \"--\") - -* argument descriptor - -* explanation of option for user --} -data OptDescr a = -- description of a single options: - Option [Char] -- list of short option characters - [String] -- list of long option strings (without "--") - (ArgDescr a) -- argument descriptor - String -- explanation of option for user - --- |Describes whether an option takes an argument or not, and if so --- how the argument is injected into a value of type @a@. -data ArgDescr a - = NoArg a -- ^ no argument expected - | ReqArg (String -> a) String -- ^ option requires argument - | OptArg (Maybe String -> a) String -- ^ optional argument - -data OptKind a -- kind of cmd line arg (internal use only): - = Opt a -- an option - | UnreqOpt String -- an un-recognized option - | NonOpt String -- a non-option - | EndOfOpts -- end-of-options marker (i.e. "--") - | OptErr String -- something went wrong... - --- | Return a string describing the usage of a command, derived from --- the header (first argument) and the options described by the --- second argument. -usageInfo :: String -- header - -> [OptDescr a] -- option descriptors - -> String -- nicely formatted decription of options -usageInfo header optDescr = unlines (header:table) - where (ss,ls,ds) = unzip3 [ (intercalate ", " (map (fmtShort ad) sos) - ,concatMap (fmtLong ad) (take 1 los) - ,d) - | Option sos los ad d <- optDescr ] - ssWidth = (maximum . map length) ss - lsWidth = (maximum . map length) ls - dsWidth = 30 `max` (80 - (ssWidth + lsWidth + 3)) - table = [ " " ++ padTo ssWidth so' ++ - " " ++ padTo lsWidth lo' ++ - " " ++ d' - | (so,lo,d) <- zip3 ss ls ds - , (so',lo',d') <- fmtOpt dsWidth so lo d ] - padTo n x = take n (x ++ repeat ' ') - -fmtOpt :: Int -> String -> String -> String -> [(String, String, String)] -fmtOpt descrWidth so lo descr = - case wrapText descrWidth descr of - [] -> [(so,lo,"")] - (d:ds) -> (so,lo,d) : [ ("","",d') | d' <- ds ] - -fmtShort :: ArgDescr a -> Char -> String -fmtShort (NoArg _ ) so = "-" ++ [so] -fmtShort (ReqArg _ _) so = "-" ++ [so] -fmtShort (OptArg _ _) so = "-" ++ [so] - -fmtLong :: ArgDescr a -> String -> String -fmtLong (NoArg _ ) lo = "--" ++ lo -fmtLong (ReqArg _ ad) lo = "--" ++ lo ++ "=" ++ ad -fmtLong (OptArg _ ad) lo = "--" ++ lo ++ "[=" ++ ad ++ "]" - -wrapText :: Int -> String -> [String] -wrapText width = map unwords . wrap 0 [] . words - where wrap :: Int -> [String] -> [String] -> [[String]] - wrap 0 [] (w:ws) - | length w + 1 > width - = wrap (length w) [w] ws - wrap col line (w:ws) - | col + length w + 1 > width - = reverse line : wrap 0 [] (w:ws) - wrap col line (w:ws) - = let col' = col + length w + 1 - in wrap col' (w:line) ws - wrap _ [] [] = [] - wrap _ line [] = [reverse line] - -{-| -Process the command-line, and return the list of values that matched -(and those that didn\'t). The arguments are: - -* The order requirements (see 'ArgOrder') - -* The option descriptions (see 'OptDescr') - -* The actual command line arguments (presumably got from - 'System.Environment.getArgs'). - -'getOpt' returns a triple consisting of the option arguments, a list -of non-options, and a list of error messages. --} -getOpt :: ArgOrder a -- non-option handling - -> [OptDescr a] -- option descriptors - -> [String] -- the command-line arguments - -> ([a],[String],[String]) -- (options,non-options,error messages) -getOpt ordering optDescr args = (os,xs,es ++ map errUnrec us) - where (os,xs,us,es) = getOpt' ordering optDescr args - -{-| -This is almost the same as 'getOpt', but returns a quadruple -consisting of the option arguments, a list of non-options, a list of -unrecognized options, and a list of error messages. --} -getOpt' :: ArgOrder a -- non-option handling - -> [OptDescr a] -- option descriptors - -> [String] -- the command-line arguments - -> ([a],[String], [String] ,[String]) -- (options,non-options,unrecognized,error messages) -getOpt' _ _ [] = ([],[],[],[]) -getOpt' ordering optDescr (arg:args) = procNextOpt opt ordering - where procNextOpt (Opt o) _ = (o:os,xs,us,es) - procNextOpt (UnreqOpt u) _ = (os,xs,u:us,es) - procNextOpt (NonOpt x) RequireOrder = ([],x:rest,[],[]) - procNextOpt (NonOpt x) Permute = (os,x:xs,us,es) - procNextOpt (NonOpt x) (ReturnInOrder f) = (f x :os, xs,us,es) - procNextOpt EndOfOpts RequireOrder = ([],rest,[],[]) - procNextOpt EndOfOpts Permute = ([],rest,[],[]) - procNextOpt EndOfOpts (ReturnInOrder f) = (map f rest,[],[],[]) - procNextOpt (OptErr e) _ = (os,xs,us,e:es) - - (opt,rest) = getNext arg args optDescr - (os,xs,us,es) = getOpt' ordering optDescr rest - --- take a look at the next cmd line arg and decide what to do with it -getNext :: String -> [String] -> [OptDescr a] -> (OptKind a,[String]) -getNext ('-':'-':[]) rest _ = (EndOfOpts,rest) -getNext ('-':'-':xs) rest optDescr = longOpt xs rest optDescr -getNext ('-': x :xs) rest optDescr = shortOpt x xs rest optDescr -getNext a rest _ = (NonOpt a,rest) - --- handle long option -longOpt :: String -> [String] -> [OptDescr a] -> (OptKind a,[String]) -longOpt ls rs optDescr = long ads arg rs - where (opt,arg) = break (=='=') ls - getWith p = [ o | o@(Option _ xs _ _) <- optDescr - , isJust (find (p opt) xs)] - exact = getWith (==) - options = if null exact then getWith isPrefixOf else exact - ads = [ ad | Option _ _ ad _ <- options ] - optStr = "--" ++ opt - - long (_:_:_) _ rest = (errAmbig options optStr,rest) - long [NoArg a ] [] rest = (Opt a,rest) - long [NoArg _ ] ('=':_) rest = (errNoArg optStr,rest) - long [ReqArg _ d] [] [] = (errReq d optStr,[]) - long [ReqArg f _] [] (r:rest) = (Opt (f r),rest) - long [ReqArg f _] ('=':xs) rest = (Opt (f xs),rest) - long [OptArg f _] [] rest = (Opt (f Nothing),rest) - long [OptArg f _] ('=':xs) rest = (Opt (f (Just xs)),rest) - long _ _ rest = (UnreqOpt ("--"++ls),rest) - --- handle short option -shortOpt :: Char -> String -> [String] -> [OptDescr a] -> (OptKind a,[String]) -shortOpt y ys rs optDescr = short ads ys rs - where options = [ o | o@(Option ss _ _ _) <- optDescr, s <- ss, y == s ] - ads = [ ad | Option _ _ ad _ <- options ] - optStr = '-':[y] - - short (_:_:_) _ rest = (errAmbig options optStr,rest) - short (NoArg a :_) [] rest = (Opt a,rest) - short (NoArg a :_) xs rest = (Opt a,('-':xs):rest) - short (ReqArg _ d:_) [] [] = (errReq d optStr,[]) - short (ReqArg f _:_) [] (r:rest) = (Opt (f r),rest) - short (ReqArg f _:_) xs rest = (Opt (f xs),rest) - short (OptArg f _:_) [] rest = (Opt (f Nothing),rest) - short (OptArg f _:_) xs rest = (Opt (f (Just xs)),rest) - short [] [] rest = (UnreqOpt optStr,rest) - short [] xs rest = (UnreqOpt (optStr++xs),rest) - --- miscellaneous error formatting - -errAmbig :: [OptDescr a] -> String -> OptKind a -errAmbig ods optStr = OptErr (usageInfo header ods) - where header = "option `" ++ optStr ++ "' is ambiguous; could be one of:" - -errReq :: String -> String -> OptKind a -errReq d optStr = OptErr ("option `" ++ optStr ++ "' requires an argument " ++ d ++ "\n") - -errUnrec :: String -> String -errUnrec optStr = "unrecognized option `" ++ optStr ++ "'\n" - -errNoArg :: String -> OptKind a -errNoArg optStr = OptErr ("option `" ++ optStr ++ "' doesn't allow an argument\n") - -{- ------------------------------------------------------------------------------------------ --- and here a small and hopefully enlightening example: - -data Flag = Verbose | Version | Name String | Output String | Arg String deriving Show - -options :: [OptDescr Flag] -options = - [Option ['v'] ["verbose"] (NoArg Verbose) "verbosely list files", - Option ['V','?'] ["version","release"] (NoArg Version) "show version info", - Option ['o'] ["output"] (OptArg out "FILE") "use FILE for dump", - Option ['n'] ["name"] (ReqArg Name "USER") "only dump USER's files"] - -out :: Maybe String -> Flag -out Nothing = Output "stdout" -out (Just o) = Output o - -test :: ArgOrder Flag -> [String] -> String -test order cmdline = case getOpt order options cmdline of - (o,n,[] ) -> "options=" ++ show o ++ " args=" ++ show n ++ "\n" - (_,_,errs) -> concat errs ++ usageInfo header options - where header = "Usage: foobar [OPTION...] files..." - --- example runs: --- putStr (test RequireOrder ["foo","-v"]) --- ==> options=[] args=["foo", "-v"] --- putStr (test Permute ["foo","-v"]) --- ==> options=[Verbose] args=["foo"] --- putStr (test (ReturnInOrder Arg) ["foo","-v"]) --- ==> options=[Arg "foo", Verbose] args=[] --- putStr (test Permute ["foo","--","-v"]) --- ==> options=[] args=["foo", "-v"] --- putStr (test Permute ["-?o","--name","bar","--na=baz"]) --- ==> options=[Version, Output "stdout", Name "bar", Name "baz"] args=[] --- putStr (test Permute ["--ver","foo"]) --- ==> option `--ver' is ambiguous; could be one of: --- -v --verbose verbosely list files --- -V, -? --version, --release show version info --- Usage: foobar [OPTION...] files... --- -v --verbose verbosely list files --- -V, -? --version, --release show version info --- -o[FILE] --output[=FILE] use FILE for dump --- -n USER --name=USER only dump USER's files ------------------------------------------------------------------------------------------ --} - -{- $example - -To hopefully illuminate the role of the different data -structures, here\'s the command-line options for a (very simple) -compiler: - -> module Opts where -> -> import Distribution.GetOpt -> import Data.Maybe ( fromMaybe ) -> -> data Flag -> = Verbose | Version -> | Input String | Output String | LibDir String -> deriving Show -> -> options :: [OptDescr Flag] -> options = -> [ Option ['v'] ["verbose"] (NoArg Verbose) "chatty output on stderr" -> , Option ['V','?'] ["version"] (NoArg Version) "show version number" -> , Option ['o'] ["output"] (OptArg outp "FILE") "output FILE" -> , Option ['c'] [] (OptArg inp "FILE") "input FILE" -> , Option ['L'] ["libdir"] (ReqArg LibDir "DIR") "library directory" -> ] -> -> inp,outp :: Maybe String -> Flag -> outp = Output . fromMaybe "stdout" -> inp = Input . fromMaybe "stdin" -> -> compilerOpts :: [String] -> IO ([Flag], [String]) -> compilerOpts argv = -> case getOpt Permute options argv of -> (o,n,[] ) -> return (o,n) -> (_,_,errs) -> ioError (userError (concat errs ++ usageInfo header options)) -> where header = "Usage: ic [OPTION...] files..." - --} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/InstalledPackageInfo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/InstalledPackageInfo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/InstalledPackageInfo.hs 2016-11-07 10:02:22.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/InstalledPackageInfo.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,372 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.InstalledPackageInfo --- Copyright : (c) The University of Glasgow 2004 --- --- Maintainer : libraries@haskell.org --- Portability : portable --- --- This is the information about an /installed/ package that --- is communicated to the @ghc-pkg@ program in order to register --- a package. @ghc-pkg@ now consumes this package format (as of version --- 6.4). This is specific to GHC at the moment. --- --- The @.cabal@ file format is for describing a package that is not yet --- installed. It has a lot of flexibility, like conditionals and dependency --- ranges. As such, that format is not at all suitable for describing a package --- that has already been built and installed. By the time we get to that stage, --- we have resolved all conditionals and resolved dependency version --- constraints to exact versions of dependent packages. So, this module defines --- the 'InstalledPackageInfo' data structure that contains all the info we keep --- about an installed package. There is a parser and pretty printer. The --- textual format is rather simpler than the @.cabal@ format: there are no --- sections, for example. - --- This module is meant to be local-only to Distribution... - -module Distribution.InstalledPackageInfo ( - InstalledPackageInfo(..), - installedComponentId, - installedPackageId, - OriginalModule(..), ExposedModule(..), - ParseResult(..), PError(..), PWarning, - emptyInstalledPackageInfo, - parseInstalledPackageInfo, - showInstalledPackageInfo, - showInstalledPackageInfoField, - showSimpleInstalledPackageInfoField, - fieldsInstalledPackageInfo, - ) where - -import Distribution.ParseUtils -import Distribution.License -import Distribution.Package hiding (installedUnitId, installedPackageId) -import qualified Distribution.Package as Package -import Distribution.ModuleName -import Distribution.Version -import Distribution.Text -import qualified Distribution.Compat.ReadP as Parse -import Distribution.Compat.Binary - -import Text.PrettyPrint as Disp -import Data.Maybe (fromMaybe) -import GHC.Generics (Generic) - --- ----------------------------------------------------------------------------- --- The InstalledPackageInfo type - --- For BC reasons, we continue to name this record an InstalledPackageInfo; --- but it would more accurately be called an InstalledUnitInfo with Backpack -data InstalledPackageInfo - = InstalledPackageInfo { - -- these parts are exactly the same as PackageDescription - sourcePackageId :: PackageId, - installedUnitId :: UnitId, - compatPackageKey :: String, - license :: License, - copyright :: String, - maintainer :: String, - author :: String, - stability :: String, - homepage :: String, - pkgUrl :: String, - synopsis :: String, - description :: String, - category :: String, - -- these parts are required by an installed package only: - abiHash :: AbiHash, - exposed :: Bool, - exposedModules :: [ExposedModule], - hiddenModules :: [ModuleName], - trusted :: Bool, - importDirs :: [FilePath], - libraryDirs :: [FilePath], - libraryDynDirs :: [FilePath], - dataDir :: FilePath, - hsLibraries :: [String], - extraLibraries :: [String], - extraGHCiLibraries:: [String], -- overrides extraLibraries for GHCi - includeDirs :: [FilePath], - includes :: [String], - depends :: [UnitId], - ccOptions :: [String], - ldOptions :: [String], - frameworkDirs :: [FilePath], - frameworks :: [String], - haddockInterfaces :: [FilePath], - haddockHTMLs :: [FilePath], - pkgRoot :: Maybe FilePath - } - deriving (Eq, Generic, Read, Show) - -installedComponentId :: InstalledPackageInfo -> ComponentId -installedComponentId ipi = case installedUnitId ipi of - SimpleUnitId cid -> cid - -{-# DEPRECATED installedPackageId "Use installedUnitId instead" #-} --- | Backwards compatibility with Cabal pre-1.24. -installedPackageId :: InstalledPackageInfo -> UnitId -installedPackageId = installedUnitId - -instance Binary InstalledPackageInfo - -instance Package.Package InstalledPackageInfo where - packageId = sourcePackageId - -instance Package.HasUnitId InstalledPackageInfo where - installedUnitId = installedUnitId - -instance Package.PackageInstalled InstalledPackageInfo where - installedDepends = depends - -emptyInstalledPackageInfo :: InstalledPackageInfo -emptyInstalledPackageInfo - = InstalledPackageInfo { - sourcePackageId = PackageIdentifier (PackageName "") (Version [] []), - installedUnitId = mkUnitId "", - compatPackageKey = "", - license = UnspecifiedLicense, - copyright = "", - maintainer = "", - author = "", - stability = "", - homepage = "", - pkgUrl = "", - synopsis = "", - description = "", - category = "", - abiHash = AbiHash "", - exposed = False, - exposedModules = [], - hiddenModules = [], - trusted = False, - importDirs = [], - libraryDirs = [], - libraryDynDirs = [], - dataDir = "", - hsLibraries = [], - extraLibraries = [], - extraGHCiLibraries= [], - includeDirs = [], - includes = [], - depends = [], - ccOptions = [], - ldOptions = [], - frameworkDirs = [], - frameworks = [], - haddockInterfaces = [], - haddockHTMLs = [], - pkgRoot = Nothing - } - --- ----------------------------------------------------------------------------- --- Exposed modules - -data OriginalModule - = OriginalModule { - originalPackageId :: UnitId, - originalModuleName :: ModuleName - } - deriving (Generic, Eq, Read, Show) - -data ExposedModule - = ExposedModule { - exposedName :: ModuleName, - exposedReexport :: Maybe OriginalModule - } - deriving (Eq, Generic, Read, Show) - -instance Text OriginalModule where - disp (OriginalModule ipi m) = - disp ipi <> Disp.char ':' <> disp m - parse = do - ipi <- parse - _ <- Parse.char ':' - m <- parse - return (OriginalModule ipi m) - -instance Text ExposedModule where - disp (ExposedModule m reexport) = - Disp.hsep [ disp m - , case reexport of - Just m' -> Disp.hsep [Disp.text "from", disp m'] - Nothing -> Disp.empty - ] - parse = do - m <- parseModuleNameQ - Parse.skipSpaces - reexport <- Parse.option Nothing $ do - _ <- Parse.string "from" - Parse.skipSpaces - fmap Just parse - return (ExposedModule m reexport) - - -instance Binary OriginalModule - -instance Binary ExposedModule - --- To maintain backwards-compatibility, we accept both comma/non-comma --- separated variants of this field. You SHOULD use the comma syntax if you --- use any new functions, although actually it's unambiguous due to a quirk --- of the fact that modules must start with capital letters. - -showExposedModules :: [ExposedModule] -> Disp.Doc -showExposedModules xs - | all isExposedModule xs = fsep (map disp xs) - | otherwise = fsep (Disp.punctuate comma (map disp xs)) - where isExposedModule (ExposedModule _ Nothing) = True - isExposedModule _ = False - -parseExposedModules :: Parse.ReadP r [ExposedModule] -parseExposedModules = parseOptCommaList parse - --- ----------------------------------------------------------------------------- --- Parsing - -parseInstalledPackageInfo :: String -> ParseResult InstalledPackageInfo -parseInstalledPackageInfo = - parseFieldsFlat (fieldsInstalledPackageInfo ++ deprecatedFieldDescrs) - emptyInstalledPackageInfo - --- ----------------------------------------------------------------------------- --- Pretty-printing - -showInstalledPackageInfo :: InstalledPackageInfo -> String -showInstalledPackageInfo = showFields fieldsInstalledPackageInfo - -showInstalledPackageInfoField :: String -> Maybe (InstalledPackageInfo -> String) -showInstalledPackageInfoField = showSingleNamedField fieldsInstalledPackageInfo - -showSimpleInstalledPackageInfoField :: String -> Maybe (InstalledPackageInfo -> String) -showSimpleInstalledPackageInfoField = showSimpleSingleNamedField fieldsInstalledPackageInfo - --- ----------------------------------------------------------------------------- --- Description of the fields, for parsing/printing - -fieldsInstalledPackageInfo :: [FieldDescr InstalledPackageInfo] -fieldsInstalledPackageInfo = basicFieldDescrs ++ installedFieldDescrs - -basicFieldDescrs :: [FieldDescr InstalledPackageInfo] -basicFieldDescrs = - [ simpleField "name" - disp parsePackageNameQ - packageName (\name pkg -> pkg{sourcePackageId=(sourcePackageId pkg){pkgName=name}}) - , simpleField "version" - disp parseOptVersion - packageVersion (\ver pkg -> pkg{sourcePackageId=(sourcePackageId pkg){pkgVersion=ver}}) - , simpleField "id" - disp parse - installedUnitId (\pk pkg -> pkg{installedUnitId=pk}) - -- NB: parse these as component IDs - , simpleField "key" - (disp . ComponentId) (fmap (\(ComponentId s) -> s) parse) - compatPackageKey (\pk pkg -> pkg{compatPackageKey=pk}) - , simpleField "license" - disp parseLicenseQ - license (\l pkg -> pkg{license=l}) - , simpleField "copyright" - showFreeText parseFreeText - copyright (\val pkg -> pkg{copyright=val}) - , simpleField "maintainer" - showFreeText parseFreeText - maintainer (\val pkg -> pkg{maintainer=val}) - , simpleField "stability" - showFreeText parseFreeText - stability (\val pkg -> pkg{stability=val}) - , simpleField "homepage" - showFreeText parseFreeText - homepage (\val pkg -> pkg{homepage=val}) - , simpleField "package-url" - showFreeText parseFreeText - pkgUrl (\val pkg -> pkg{pkgUrl=val}) - , simpleField "synopsis" - showFreeText parseFreeText - synopsis (\val pkg -> pkg{synopsis=val}) - , simpleField "description" - showFreeText parseFreeText - description (\val pkg -> pkg{description=val}) - , simpleField "category" - showFreeText parseFreeText - category (\val pkg -> pkg{category=val}) - , simpleField "author" - showFreeText parseFreeText - author (\val pkg -> pkg{author=val}) - ] - -installedFieldDescrs :: [FieldDescr InstalledPackageInfo] -installedFieldDescrs = [ - boolField "exposed" - exposed (\val pkg -> pkg{exposed=val}) - , simpleField "exposed-modules" - showExposedModules parseExposedModules - exposedModules (\xs pkg -> pkg{exposedModules=xs}) - , listField "hidden-modules" - disp parseModuleNameQ - hiddenModules (\xs pkg -> pkg{hiddenModules=xs}) - , simpleField "abi" - disp parse - abiHash (\abi pkg -> pkg{abiHash=abi}) - , boolField "trusted" - trusted (\val pkg -> pkg{trusted=val}) - , listField "import-dirs" - showFilePath parseFilePathQ - importDirs (\xs pkg -> pkg{importDirs=xs}) - , listField "library-dirs" - showFilePath parseFilePathQ - libraryDirs (\xs pkg -> pkg{libraryDirs=xs}) - , listField "dynamic-library-dirs" - showFilePath parseFilePathQ - libraryDynDirs (\xs pkg -> pkg{libraryDynDirs=xs}) - , simpleField "data-dir" - showFilePath (parseFilePathQ Parse.<++ return "") - dataDir (\val pkg -> pkg{dataDir=val}) - , listField "hs-libraries" - showFilePath parseTokenQ - hsLibraries (\xs pkg -> pkg{hsLibraries=xs}) - , listField "extra-libraries" - showToken parseTokenQ - extraLibraries (\xs pkg -> pkg{extraLibraries=xs}) - , listField "extra-ghci-libraries" - showToken parseTokenQ - extraGHCiLibraries (\xs pkg -> pkg{extraGHCiLibraries=xs}) - , listField "include-dirs" - showFilePath parseFilePathQ - includeDirs (\xs pkg -> pkg{includeDirs=xs}) - , listField "includes" - showFilePath parseFilePathQ - includes (\xs pkg -> pkg{includes=xs}) - , listField "depends" - disp parse - depends (\xs pkg -> pkg{depends=xs}) - , listField "cc-options" - showToken parseTokenQ - ccOptions (\path pkg -> pkg{ccOptions=path}) - , listField "ld-options" - showToken parseTokenQ - ldOptions (\path pkg -> pkg{ldOptions=path}) - , listField "framework-dirs" - showFilePath parseFilePathQ - frameworkDirs (\xs pkg -> pkg{frameworkDirs=xs}) - , listField "frameworks" - showToken parseTokenQ - frameworks (\xs pkg -> pkg{frameworks=xs}) - , listField "haddock-interfaces" - showFilePath parseFilePathQ - haddockInterfaces (\xs pkg -> pkg{haddockInterfaces=xs}) - , listField "haddock-html" - showFilePath parseFilePathQ - haddockHTMLs (\xs pkg -> pkg{haddockHTMLs=xs}) - , simpleField "pkgroot" - (const Disp.empty) parseFilePathQ - (fromMaybe "" . pkgRoot) (\xs pkg -> pkg{pkgRoot=Just xs}) - ] - -deprecatedFieldDescrs :: [FieldDescr InstalledPackageInfo] -deprecatedFieldDescrs = [ - listField "hugs-options" - showToken parseTokenQ - (const []) (const id) - ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Lex.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Lex.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Lex.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Lex.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,55 +0,0 @@ -{-# LANGUAGE PatternGuards #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Lex --- Copyright : Ben Gamari 2015-2019 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module contains a simple lexer supporting quoted strings - -module Distribution.Lex ( - tokenizeQuotedWords - ) where - -import Data.Char (isSpace) -import Distribution.Compat.Semigroup as Semi - -newtype DList a = DList ([a] -> [a]) - -runDList :: DList a -> [a] -runDList (DList run) = run [] - -singleton :: a -> DList a -singleton a = DList (a:) - -instance Monoid (DList a) where - mempty = DList id - mappend = (Semi.<>) - -instance Semigroup (DList a) where - DList a <> DList b = DList (a . b) - -tokenizeQuotedWords :: String -> [String] -tokenizeQuotedWords = filter (not . null) . go False mempty - where - go :: Bool -- ^ in quoted region - -> DList Char -- ^ accumulator - -> String -- ^ string to be parsed - -> [String] -- ^ parse result - go _ accum [] - | [] <- accum' = [] - | otherwise = [accum'] - where accum' = runDList accum - - go False accum (c:cs) - | isSpace c = runDList accum : go False mempty cs - | c == '"' = go True accum cs - - go True accum (c:cs) - | c == '"' = go False accum cs - - go quoted accum (c:cs) - = go quoted (accum `mappend` singleton c) cs - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/License.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/License.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/License.hs 2016-11-07 10:02:22.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/License.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,177 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.License --- Description : The License data type. --- Copyright : Isaac Jones 2003-2005 --- Duncan Coutts 2008 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Package descriptions contain fields for specifying the name of a software --- license and the name of the file containing the text of that license. While --- package authors may choose any license they like, Cabal provides an --- enumeration of a small set of common free and open source software licenses. --- This is done so that Hackage can recognise licenses, so that tools can detect --- , --- and to deter --- . --- --- It is recommended that all package authors use the @license-file@ or --- @license-files@ fields in their package descriptions. Further information --- about these fields can be found in the --- . --- --- = Additional resources --- --- The following websites provide information about free and open source --- software licenses: --- --- * --- --- * --- --- = Disclaimer --- --- The descriptions of software licenses provided by this documentation are --- intended for informational purposes only and in no way constitute legal --- advice. Please read the text of the licenses and consult a lawyer for any --- advice regarding software licensing. - -module Distribution.License ( - License(..), - knownLicenses, - ) where - -import Distribution.Version -import Distribution.Text -import qualified Distribution.Compat.ReadP as Parse -import Distribution.Compat.Binary - -import qualified Text.PrettyPrint as Disp -import Text.PrettyPrint ((<>)) -import qualified Data.Char as Char (isAlphaNum) -import Data.Data (Data) -import Data.Typeable (Typeable) -import GHC.Generics (Generic) - --- | Indicates the license under which a package's source code is released. --- Versions of the licenses not listed here will be rejected by Hackage and --- cause @cabal check@ to issue a warning. -data License = - -- TODO: * remove BSD4 - - -- | GNU General Public License, - -- or - -- . - GPL (Maybe Version) - - -- | . - | AGPL (Maybe Version) - - -- | GNU Lesser General Public License, - -- or - -- . - | LGPL (Maybe Version) - - -- | . - | BSD2 - - -- | . - | BSD3 - - -- | . - -- This license has not been approved by the OSI and is incompatible with - -- the GNU GPL. It is provided for historical reasons and should be avoided. - | BSD4 - - -- | . - | MIT - - -- | - | ISC - - -- | . - | MPL Version - - -- | . - | Apache (Maybe Version) - - -- | The author of a package disclaims any copyright to its source code and - -- dedicates it to the public domain. This is not a software license. Please - -- note that it is not possible to dedicate works to the public domain in - -- every jurisdiction, nor is a work that is in the public domain in one - -- jurisdiction necessarily in the public domain elsewhere. - | PublicDomain - - -- | Explicitly 'All Rights Reserved', eg for proprietary software. The - -- package may not be legally modified or redistributed by anyone but the - -- rightsholder. - | AllRightsReserved - - -- | No license specified which legally defaults to 'All Rights Reserved'. - -- The package may not be legally modified or redistributed by anyone but - -- the rightsholder. - | UnspecifiedLicense - - -- | Any other software license. - | OtherLicense - - -- | Indicates an erroneous license name. - | UnknownLicense String - deriving (Generic, Read, Show, Eq, Typeable, Data) - -instance Binary License - --- | The list of all currently recognised licenses. -knownLicenses :: [License] -knownLicenses = [ GPL unversioned, GPL (version [2]), GPL (version [3]) - , LGPL unversioned, LGPL (version [2, 1]), LGPL (version [3]) - , AGPL unversioned, AGPL (version [3]) - , BSD2, BSD3, MIT, ISC - , MPL (Version [2, 0] []) - , Apache unversioned, Apache (version [2, 0]) - , PublicDomain, AllRightsReserved, OtherLicense] - where - unversioned = Nothing - version v = Just (Version v []) - -instance Text License where - disp (GPL version) = Disp.text "GPL" <> dispOptVersion version - disp (LGPL version) = Disp.text "LGPL" <> dispOptVersion version - disp (AGPL version) = Disp.text "AGPL" <> dispOptVersion version - disp (MPL version) = Disp.text "MPL" <> dispVersion version - disp (Apache version) = Disp.text "Apache" <> dispOptVersion version - disp (UnknownLicense other) = Disp.text other - disp other = Disp.text (show other) - - parse = do - name <- Parse.munch1 (\c -> Char.isAlphaNum c && c /= '-') - version <- Parse.option Nothing (Parse.char '-' >> fmap Just parse) - return $! case (name, version :: Maybe Version) of - ("GPL", _ ) -> GPL version - ("LGPL", _ ) -> LGPL version - ("AGPL", _ ) -> AGPL version - ("BSD2", Nothing) -> BSD2 - ("BSD3", Nothing) -> BSD3 - ("BSD4", Nothing) -> BSD4 - ("ISC", Nothing) -> ISC - ("MIT", Nothing) -> MIT - ("MPL", Just version') -> MPL version' - ("Apache", _ ) -> Apache version - ("PublicDomain", Nothing) -> PublicDomain - ("AllRightsReserved", Nothing) -> AllRightsReserved - ("OtherLicense", Nothing) -> OtherLicense - _ -> UnknownLicense $ name ++ - maybe "" (('-':) . display) version - -dispOptVersion :: Maybe Version -> Disp.Doc -dispOptVersion Nothing = Disp.empty -dispOptVersion (Just v) = dispVersion v - -dispVersion :: Version -> Disp.Doc -dispVersion v = Disp.char '-' <> disp v diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Make.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Make.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Make.hs 2016-11-07 10:02:22.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Make.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,181 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Make --- Copyright : Martin Sjögren 2004 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This is an alternative build system that delegates everything to the @make@ --- program. All the commands just end up calling @make@ with appropriate --- arguments. The intention was to allow preexisting packages that used --- makefiles to be wrapped into Cabal packages. In practice essentially all --- such packages were converted over to the \"Simple\" build system instead. --- Consequently this module is not used much and it certainly only sees cursory --- maintenance and no testing. Perhaps at some point we should stop pretending --- that it works. --- --- Uses the parsed command-line from "Distribution.Simple.Setup" in order to build --- Haskell tools using a back-end build system based on make. Obviously we --- assume that there is a configure script, and that after the ConfigCmd has --- been run, there is a Makefile. Further assumptions: --- --- [ConfigCmd] We assume the configure script accepts --- @--with-hc@, --- @--with-hc-pkg@, --- @--prefix@, --- @--bindir@, --- @--libdir@, --- @--libexecdir@, --- @--datadir@. --- --- [BuildCmd] We assume that the default Makefile target will build everything. --- --- [InstallCmd] We assume there is an @install@ target. Note that we assume that --- this does *not* register the package! --- --- [CopyCmd] We assume there is a @copy@ target, and a variable @$(destdir)@. --- The @copy@ target should probably just invoke @make install@ --- recursively (e.g. @$(MAKE) install prefix=$(destdir)\/$(prefix) --- bindir=$(destdir)\/$(bindir)@. The reason we can\'t invoke @make --- install@ directly here is that we don\'t know the value of @$(prefix)@. --- --- [SDistCmd] We assume there is a @dist@ target. --- --- [RegisterCmd] We assume there is a @register@ target and a variable @$(user)@. --- --- [UnregisterCmd] We assume there is an @unregister@ target. --- --- [HaddockCmd] We assume there is a @docs@ or @doc@ target. - - --- copy : --- $(MAKE) install prefix=$(destdir)/$(prefix) \ --- bindir=$(destdir)/$(bindir) \ - -module Distribution.Make ( - module Distribution.Package, - License(..), Version(..), - defaultMain, defaultMainArgs, defaultMainNoRead - ) where - --- local -import Distribution.Compat.Exception -import Distribution.Package -import Distribution.Simple.Program -import Distribution.PackageDescription -import Distribution.Simple.Setup -import Distribution.Simple.Command - -import Distribution.Simple.Utils - -import Distribution.License -import Distribution.Version -import Distribution.Text - -import System.Environment (getArgs, getProgName) -import System.Exit - -defaultMain :: IO () -defaultMain = getArgs >>= defaultMainArgs - -defaultMainArgs :: [String] -> IO () -defaultMainArgs = defaultMainHelper - -{-# DEPRECATED defaultMainNoRead "it ignores its PackageDescription arg" #-} -defaultMainNoRead :: PackageDescription -> IO () -defaultMainNoRead = const defaultMain - -defaultMainHelper :: [String] -> IO () -defaultMainHelper args = - case commandsRun (globalCommand commands) commands args of - CommandHelp help -> printHelp help - CommandList opts -> printOptionsList opts - CommandErrors errs -> printErrors errs - CommandReadyToGo (flags, commandParse) -> - case commandParse of - _ | fromFlag (globalVersion flags) -> printVersion - | fromFlag (globalNumericVersion flags) -> printNumericVersion - CommandHelp help -> printHelp help - CommandList opts -> printOptionsList opts - CommandErrors errs -> printErrors errs - CommandReadyToGo action -> action - - where - printHelp help = getProgName >>= putStr . help - printOptionsList = putStr . unlines - printErrors errs = do - putStr (intercalate "\n" errs) - exitWith (ExitFailure 1) - printNumericVersion = putStrLn $ display cabalVersion - printVersion = putStrLn $ "Cabal library version " - ++ display cabalVersion - - progs = defaultProgramConfiguration - commands = - [configureCommand progs `commandAddAction` configureAction - ,buildCommand progs `commandAddAction` buildAction - ,installCommand `commandAddAction` installAction - ,copyCommand `commandAddAction` copyAction - ,haddockCommand `commandAddAction` haddockAction - ,cleanCommand `commandAddAction` cleanAction - ,sdistCommand `commandAddAction` sdistAction - ,registerCommand `commandAddAction` registerAction - ,unregisterCommand `commandAddAction` unregisterAction - ] - -configureAction :: ConfigFlags -> [String] -> IO () -configureAction flags args = do - noExtraFlags args - let verbosity = fromFlag (configVerbosity flags) - rawSystemExit verbosity "sh" $ - "configure" - : configureArgs backwardsCompatHack flags - where backwardsCompatHack = True - -copyAction :: CopyFlags -> [String] -> IO () -copyAction flags args = do - noExtraFlags args - let destArgs = case fromFlag $ copyDest flags of - NoCopyDest -> ["install"] - CopyTo path -> ["copy", "destdir=" ++ path] - rawSystemExit (fromFlag $ copyVerbosity flags) "make" destArgs - -installAction :: InstallFlags -> [String] -> IO () -installAction flags args = do - noExtraFlags args - rawSystemExit (fromFlag $ installVerbosity flags) "make" ["install"] - rawSystemExit (fromFlag $ installVerbosity flags) "make" ["register"] - -haddockAction :: HaddockFlags -> [String] -> IO () -haddockAction flags args = do - noExtraFlags args - rawSystemExit (fromFlag $ haddockVerbosity flags) "make" ["docs"] - `catchIO` \_ -> - rawSystemExit (fromFlag $ haddockVerbosity flags) "make" ["doc"] - -buildAction :: BuildFlags -> [String] -> IO () -buildAction flags args = do - noExtraFlags args - rawSystemExit (fromFlag $ buildVerbosity flags) "make" [] - -cleanAction :: CleanFlags -> [String] -> IO () -cleanAction flags args = do - noExtraFlags args - rawSystemExit (fromFlag $ cleanVerbosity flags) "make" ["clean"] - -sdistAction :: SDistFlags -> [String] -> IO () -sdistAction flags args = do - noExtraFlags args - rawSystemExit (fromFlag $ sDistVerbosity flags) "make" ["dist"] - -registerAction :: RegisterFlags -> [String] -> IO () -registerAction flags args = do - noExtraFlags args - rawSystemExit (fromFlag $ regVerbosity flags) "make" ["register"] - -unregisterAction :: RegisterFlags -> [String] -> IO () -unregisterAction flags args = do - noExtraFlags args - rawSystemExit (fromFlag $ regVerbosity flags) "make" ["unregister"] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/ModuleName.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/ModuleName.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/ModuleName.hs 2016-11-07 10:02:22.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/ModuleName.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,109 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.ModuleName --- Copyright : Duncan Coutts 2008 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Data type for Haskell module names. - -module Distribution.ModuleName ( - ModuleName, - fromString, - components, - toFilePath, - main, - simple, - ) where - -import Distribution.Text -import Distribution.Compat.Binary -import qualified Distribution.Compat.ReadP as Parse - -import qualified Data.Char as Char - ( isAlphaNum, isUpper ) -import Data.Data (Data) -import Data.Typeable (Typeable) -import qualified Text.PrettyPrint as Disp -import Data.List - ( intercalate, intersperse ) -import GHC.Generics (Generic) -import System.FilePath - ( pathSeparator ) - --- | A valid Haskell module name. --- -newtype ModuleName = ModuleName [String] - deriving (Eq, Generic, Ord, Read, Show, Typeable, Data) - -instance Binary ModuleName - -instance Text ModuleName where - disp (ModuleName ms) = - Disp.hcat (intersperse (Disp.char '.') (map Disp.text ms)) - - parse = do - ms <- Parse.sepBy1 component (Parse.char '.') - return (ModuleName ms) - - where - component = do - c <- Parse.satisfy Char.isUpper - cs <- Parse.munch validModuleChar - return (c:cs) - -validModuleChar :: Char -> Bool -validModuleChar c = Char.isAlphaNum c || c == '_' || c == '\'' - -validModuleComponent :: String -> Bool -validModuleComponent [] = False -validModuleComponent (c:cs) = Char.isUpper c - && all validModuleChar cs - -{-# DEPRECATED simple "use ModuleName.fromString instead" #-} -simple :: String -> ModuleName -simple str = ModuleName [str] - --- | Construct a 'ModuleName' from a valid module name 'String'. --- --- This is just a convenience function intended for valid module strings. It is --- an error if it is used with a string that is not a valid module name. If you --- are parsing user input then use 'Distribution.Text.simpleParse' instead. --- -fromString :: String -> ModuleName -fromString string - | all validModuleComponent components' = ModuleName components' - | otherwise = error badName - - where - components' = split string - badName = "ModuleName.fromString: invalid module name " ++ show string - - split cs = case break (=='.') cs of - (chunk,[]) -> chunk : [] - (chunk,_:rest) -> chunk : split rest - --- | The module name @Main@. --- -main :: ModuleName -main = ModuleName ["Main"] - --- | The individual components of a hierarchical module name. For example --- --- > components (fromString "A.B.C") = ["A", "B", "C"] --- -components :: ModuleName -> [String] -components (ModuleName ms) = ms - --- | Convert a module name to a file path, but without any file extension. --- For example: --- --- > toFilePath (fromString "A.B.C") = "A/B/C" --- -toFilePath :: ModuleName -> FilePath -toFilePath = intercalate [pathSeparator] . components diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/PackageDescription/Check.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/PackageDescription/Check.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/PackageDescription/Check.hs 2016-11-07 10:02:22.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/PackageDescription/Check.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1737 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.PackageDescription.Check --- Copyright : Lennart Kolmodin 2008 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This has code for checking for various problems in packages. There is one --- set of checks that just looks at a 'PackageDescription' in isolation and --- another set of checks that also looks at files in the package. Some of the --- checks are basic sanity checks, others are portability standards that we'd --- like to encourage. There is a 'PackageCheck' type that distinguishes the --- different kinds of check so we can see which ones are appropriate to report --- in different situations. This code gets uses when configuring a package when --- we consider only basic problems. The higher standard is uses when when --- preparing a source tarball and by Hackage when uploading new packages. The --- reason for this is that we want to hold packages that are expected to be --- distributed to a higher standard than packages that are only ever expected --- to be used on the author's own environment. - -module Distribution.PackageDescription.Check ( - -- * Package Checking - PackageCheck(..), - checkPackage, - checkConfiguredPackage, - - -- ** Checking package contents - checkPackageFiles, - checkPackageContent, - CheckPackageContentOps(..), - checkPackageFileNames, - ) where - -import Distribution.PackageDescription -import Distribution.PackageDescription.Configuration -import Distribution.Compiler -import Distribution.System -import Distribution.License -import Distribution.Simple.CCompiler -import Distribution.Simple.Utils hiding (findPackageDesc, notice) -import Distribution.Version -import Distribution.Package -import Distribution.Text -import Language.Haskell.Extension - -import Data.Maybe - ( isNothing, isJust, catMaybes, mapMaybe, maybeToList, fromMaybe ) -import Data.List (sort, group, isPrefixOf, nub, find) -import Control.Monad - ( filterM, liftM ) -import qualified System.Directory as System - ( doesFileExist, doesDirectoryExist ) -import qualified Data.Map as Map - -import qualified Text.PrettyPrint as Disp -import Text.PrettyPrint ((<>), (<+>)) - -import qualified System.Directory (getDirectoryContents) -import System.IO (openBinaryFile, IOMode(ReadMode), hGetContents) -import System.FilePath - ( (), takeExtension, isRelative, isAbsolute - , splitDirectories, splitPath, splitExtension ) -import System.FilePath.Windows as FilePath.Windows - ( isValid ) - --- | Results of some kind of failed package check. --- --- There are a range of severities, from merely dubious to totally insane. --- All of them come with a human readable explanation. In future we may augment --- them with more machine readable explanations, for example to help an IDE --- suggest automatic corrections. --- -data PackageCheck = - - -- | This package description is no good. There's no way it's going to - -- build sensibly. This should give an error at configure time. - PackageBuildImpossible { explanation :: String } - - -- | A problem that is likely to affect building the package, or an - -- issue that we'd like every package author to be aware of, even if - -- the package is never distributed. - | PackageBuildWarning { explanation :: String } - - -- | An issue that might not be a problem for the package author but - -- might be annoying or detrimental when the package is distributed to - -- users. We should encourage distributed packages to be free from these - -- issues, but occasionally there are justifiable reasons so we cannot - -- ban them entirely. - | PackageDistSuspicious { explanation :: String } - - -- | Like PackageDistSuspicious but will only display warnings - -- rather than causing abnormal exit when you run 'cabal check'. - | PackageDistSuspiciousWarn { explanation :: String } - - -- | An issue that is OK in the author's environment but is almost - -- certain to be a portability problem for other environments. We can - -- quite legitimately refuse to publicly distribute packages with these - -- problems. - | PackageDistInexcusable { explanation :: String } - deriving (Eq) - -instance Show PackageCheck where - show notice = explanation notice - -check :: Bool -> PackageCheck -> Maybe PackageCheck -check False _ = Nothing -check True pc = Just pc - -checkSpecVersion :: PackageDescription -> [Int] -> Bool -> PackageCheck - -> Maybe PackageCheck -checkSpecVersion pkg specver cond pc - | specVersion pkg >= Version specver [] = Nothing - | otherwise = check cond pc - --- ------------------------------------------------------------ --- * Standard checks --- ------------------------------------------------------------ - --- | Check for common mistakes and problems in package descriptions. --- --- This is the standard collection of checks covering all aspects except --- for checks that require looking at files within the package. For those --- see 'checkPackageFiles'. --- --- It requires the 'GenericPackageDescription' and optionally a particular --- configuration of that package. If you pass 'Nothing' then we just check --- a version of the generic description using 'flattenPackageDescription'. --- -checkPackage :: GenericPackageDescription - -> Maybe PackageDescription - -> [PackageCheck] -checkPackage gpkg mpkg = - checkConfiguredPackage pkg - ++ checkConditionals gpkg - ++ checkPackageVersions gpkg - ++ checkDevelopmentOnlyFlags gpkg - where - pkg = fromMaybe (flattenPackageDescription gpkg) mpkg - ---TODO: make this variant go away --- we should always know the GenericPackageDescription -checkConfiguredPackage :: PackageDescription -> [PackageCheck] -checkConfiguredPackage pkg = - checkSanity pkg - ++ checkFields pkg - ++ checkLicense pkg - ++ checkSourceRepos pkg - ++ checkGhcOptions pkg - ++ checkCCOptions pkg - ++ checkCPPOptions pkg - ++ checkPaths pkg - ++ checkCabalVersion pkg - - --- ------------------------------------------------------------ --- * Basic sanity checks --- ------------------------------------------------------------ - --- | Check that this package description is sane. --- -checkSanity :: PackageDescription -> [PackageCheck] -checkSanity pkg = - catMaybes [ - - check (null . (\(PackageName n) -> n) . packageName $ pkg) $ - PackageBuildImpossible "No 'name' field." - - , check (null . versionBranch . packageVersion $ pkg) $ - PackageBuildImpossible "No 'version' field." - - , check (all ($ pkg) [ null . executables - , null . testSuites - , null . benchmarks - , isNothing . library ]) $ - PackageBuildImpossible - "No executables, libraries, tests, or benchmarks found. Nothing to do." - - , check (not (null duplicateNames)) $ - PackageBuildImpossible $ "Duplicate sections: " ++ commaSep duplicateNames - ++ ". The name of every executable, test suite, and benchmark section in" - ++ " the package must be unique." - ] - --TODO: check for name clashes case insensitively: windows file systems cannot - --cope. - - ++ maybe [] (checkLibrary pkg) (library pkg) - ++ concatMap (checkExecutable pkg) (executables pkg) - ++ concatMap (checkTestSuite pkg) (testSuites pkg) - ++ concatMap (checkBenchmark pkg) (benchmarks pkg) - - ++ catMaybes [ - - check (specVersion pkg > cabalVersion) $ - PackageBuildImpossible $ - "This package description follows version " - ++ display (specVersion pkg) ++ " of the Cabal specification. This " - ++ "tool only supports up to version " ++ display cabalVersion ++ "." - ] - where - exeNames = map exeName $ executables pkg - testNames = map testName $ testSuites pkg - bmNames = map benchmarkName $ benchmarks pkg - duplicateNames = dups $ exeNames ++ testNames ++ bmNames - -checkLibrary :: PackageDescription -> Library -> [PackageCheck] -checkLibrary pkg lib = - catMaybes [ - - check (not (null moduleDuplicates)) $ - PackageBuildImpossible $ - "Duplicate modules in library: " - ++ commaSep (map display moduleDuplicates) - - -- check use of required-signatures/exposed-signatures sections - , checkVersion [1,21] (not (null (requiredSignatures lib))) $ - PackageDistInexcusable $ - "To use the 'required-signatures' field the package needs to specify " - ++ "at least 'cabal-version: >= 1.21'." - - , checkVersion [1,21] (not (null (exposedSignatures lib))) $ - PackageDistInexcusable $ - "To use the 'exposed-signatures' field the package needs to specify " - ++ "at least 'cabal-version: >= 1.21'." - ] - - where - checkVersion :: [Int] -> Bool -> PackageCheck -> Maybe PackageCheck - checkVersion ver cond pc - | specVersion pkg >= Version ver [] = Nothing - | otherwise = check cond pc - - moduleDuplicates = dups (libModules lib ++ - map moduleReexportName (reexportedModules lib)) - -checkExecutable :: PackageDescription -> Executable -> [PackageCheck] -checkExecutable pkg exe = - catMaybes [ - - check (null (modulePath exe)) $ - PackageBuildImpossible $ - "No 'main-is' field found for executable " ++ exeName exe - - , check (not (null (modulePath exe)) - && (not $ fileExtensionSupportedLanguage $ modulePath exe)) $ - PackageBuildImpossible $ - "The 'main-is' field must specify a '.hs' or '.lhs' file " - ++ "(even if it is generated by a preprocessor), " - ++ "or it may specify a C/C++/obj-C source file." - - , checkSpecVersion pkg [1,17] - (fileExtensionSupportedLanguage (modulePath exe) - && takeExtension (modulePath exe) `notElem` [".hs", ".lhs"]) $ - PackageDistInexcusable $ - "The package uses a C/C++/obj-C source file for the 'main-is' field. " - ++ "To use this feature you must specify 'cabal-version: >= 1.18'." - - , check (not (null moduleDuplicates)) $ - PackageBuildImpossible $ - "Duplicate modules in executable '" ++ exeName exe ++ "': " - ++ commaSep (map display moduleDuplicates) - ] - where - moduleDuplicates = dups (exeModules exe) - -checkTestSuite :: PackageDescription -> TestSuite -> [PackageCheck] -checkTestSuite pkg test = - catMaybes [ - - case testInterface test of - TestSuiteUnsupported tt@(TestTypeUnknown _ _) -> Just $ - PackageBuildWarning $ - quote (display tt) ++ " is not a known type of test suite. " - ++ "The known test suite types are: " - ++ commaSep (map display knownTestTypes) - - TestSuiteUnsupported tt -> Just $ - PackageBuildWarning $ - quote (display tt) ++ " is not a supported test suite version. " - ++ "The known test suite types are: " - ++ commaSep (map display knownTestTypes) - _ -> Nothing - - , check (not $ null moduleDuplicates) $ - PackageBuildImpossible $ - "Duplicate modules in test suite '" ++ testName test ++ "': " - ++ commaSep (map display moduleDuplicates) - - , check mainIsWrongExt $ - PackageBuildImpossible $ - "The 'main-is' field must specify a '.hs' or '.lhs' file " - ++ "(even if it is generated by a preprocessor), " - ++ "or it may specify a C/C++/obj-C source file." - - , checkSpecVersion pkg [1,17] (mainIsNotHsExt && not mainIsWrongExt) $ - PackageDistInexcusable $ - "The package uses a C/C++/obj-C source file for the 'main-is' field. " - ++ "To use this feature you must specify 'cabal-version: >= 1.18'." - ] - where - moduleDuplicates = dups $ testModules test - - mainIsWrongExt = case testInterface test of - TestSuiteExeV10 _ f -> not $ fileExtensionSupportedLanguage f - _ -> False - - mainIsNotHsExt = case testInterface test of - TestSuiteExeV10 _ f -> takeExtension f `notElem` [".hs", ".lhs"] - _ -> False - -checkBenchmark :: PackageDescription -> Benchmark -> [PackageCheck] -checkBenchmark _pkg bm = - catMaybes [ - - case benchmarkInterface bm of - BenchmarkUnsupported tt@(BenchmarkTypeUnknown _ _) -> Just $ - PackageBuildWarning $ - quote (display tt) ++ " is not a known type of benchmark. " - ++ "The known benchmark types are: " - ++ commaSep (map display knownBenchmarkTypes) - - BenchmarkUnsupported tt -> Just $ - PackageBuildWarning $ - quote (display tt) ++ " is not a supported benchmark version. " - ++ "The known benchmark types are: " - ++ commaSep (map display knownBenchmarkTypes) - _ -> Nothing - - , check (not $ null moduleDuplicates) $ - PackageBuildImpossible $ - "Duplicate modules in benchmark '" ++ benchmarkName bm ++ "': " - ++ commaSep (map display moduleDuplicates) - - , check mainIsWrongExt $ - PackageBuildImpossible $ - "The 'main-is' field must specify a '.hs' or '.lhs' file " - ++ "(even if it is generated by a preprocessor)." - ] - where - moduleDuplicates = dups $ benchmarkModules bm - - mainIsWrongExt = case benchmarkInterface bm of - BenchmarkExeV10 _ f -> takeExtension f `notElem` [".hs", ".lhs"] - _ -> False - --- ------------------------------------------------------------ --- * Additional pure checks --- ------------------------------------------------------------ - -checkFields :: PackageDescription -> [PackageCheck] -checkFields pkg = - catMaybes [ - - check (not . FilePath.Windows.isValid . display . packageName $ pkg) $ - PackageDistInexcusable $ - "Unfortunately, the package name '" ++ display (packageName pkg) - ++ "' is one of the reserved system file names on Windows. Many tools " - ++ "need to convert package names to file names so using this name " - ++ "would cause problems." - - , check (isNothing (buildType pkg)) $ - PackageBuildWarning $ - "No 'build-type' specified. If you do not need a custom Setup.hs or " - ++ "./configure script then use 'build-type: Simple'." - - , case buildType pkg of - Just (UnknownBuildType unknown) -> Just $ - PackageBuildWarning $ - quote unknown ++ " is not a known 'build-type'. " - ++ "The known build types are: " - ++ commaSep (map display knownBuildTypes) - _ -> Nothing - - , check (isJust (setupBuildInfo pkg) && buildType pkg /= Just Custom) $ - PackageBuildWarning $ - "Ignoring the 'custom-setup' section because the 'build-type' is " - ++ "not 'Custom'. Use 'build-type: Custom' if you need to use a " - ++ "custom Setup.hs script." - - , check (not (null unknownCompilers)) $ - PackageBuildWarning $ - "Unknown compiler " ++ commaSep (map quote unknownCompilers) - ++ " in 'tested-with' field." - - , check (not (null unknownLanguages)) $ - PackageBuildWarning $ - "Unknown languages: " ++ commaSep unknownLanguages - - , check (not (null unknownExtensions)) $ - PackageBuildWarning $ - "Unknown extensions: " ++ commaSep unknownExtensions - - , check (not (null languagesUsedAsExtensions)) $ - PackageBuildWarning $ - "Languages listed as extensions: " - ++ commaSep languagesUsedAsExtensions - ++ ". Languages must be specified in either the 'default-language' " - ++ " or the 'other-languages' field." - - , check (not (null ourDeprecatedExtensions)) $ - PackageDistSuspicious $ - "Deprecated extensions: " - ++ commaSep (map (quote . display . fst) ourDeprecatedExtensions) - ++ ". " ++ unwords - [ "Instead of '" ++ display ext - ++ "' use '" ++ display replacement ++ "'." - | (ext, Just replacement) <- ourDeprecatedExtensions ] - - , check (null (category pkg)) $ - PackageDistSuspicious "No 'category' field." - - , check (null (maintainer pkg)) $ - PackageDistSuspicious "No 'maintainer' field." - - , check (null (synopsis pkg) && null (description pkg)) $ - PackageDistInexcusable "No 'synopsis' or 'description' field." - - , check (null (description pkg) && not (null (synopsis pkg))) $ - PackageDistSuspicious "No 'description' field." - - , check (null (synopsis pkg) && not (null (description pkg))) $ - PackageDistSuspicious "No 'synopsis' field." - - --TODO: recommend the bug reports URL, author and homepage fields - --TODO: recommend not using the stability field - --TODO: recommend specifying a source repo - - , check (length (synopsis pkg) >= 80) $ - PackageDistSuspicious - "The 'synopsis' field is rather long (max 80 chars is recommended)." - - -- check use of impossible constraints "tested-with: GHC== 6.10 && ==6.12" - , check (not (null testedWithImpossibleRanges)) $ - PackageDistInexcusable $ - "Invalid 'tested-with' version range: " - ++ commaSep (map display testedWithImpossibleRanges) - ++ ". To indicate that you have tested a package with multiple " - ++ "different versions of the same compiler use multiple entries, " - ++ "for example 'tested-with: GHC==6.10.4, GHC==6.12.3' and not " - ++ "'tested-with: GHC==6.10.4 && ==6.12.3'." - ] - where - unknownCompilers = [ name | (OtherCompiler name, _) <- testedWith pkg ] - unknownLanguages = [ name | bi <- allBuildInfo pkg - , UnknownLanguage name <- allLanguages bi ] - unknownExtensions = [ name | bi <- allBuildInfo pkg - , UnknownExtension name <- allExtensions bi - , name `notElem` map display knownLanguages ] - ourDeprecatedExtensions = nub $ catMaybes - [ find ((==ext) . fst) deprecatedExtensions - | bi <- allBuildInfo pkg - , ext <- allExtensions bi ] - languagesUsedAsExtensions = - [ name | bi <- allBuildInfo pkg - , UnknownExtension name <- allExtensions bi - , name `elem` map display knownLanguages ] - - testedWithImpossibleRanges = - [ Dependency (PackageName (display compiler)) vr - | (compiler, vr) <- testedWith pkg - , isNoVersion vr ] - - -checkLicense :: PackageDescription -> [PackageCheck] -checkLicense pkg = - catMaybes [ - - check (license pkg == UnspecifiedLicense) $ - PackageDistInexcusable - "The 'license' field is missing." - - , check (license pkg == AllRightsReserved) $ - PackageDistSuspicious - "The 'license' is AllRightsReserved. Is that really what you want?" - , case license pkg of - UnknownLicense l -> Just $ - PackageBuildWarning $ - quote ("license: " ++ l) ++ " is not a recognised license. The " - ++ "known licenses are: " - ++ commaSep (map display knownLicenses) - _ -> Nothing - - , check (license pkg == BSD4) $ - PackageDistSuspicious $ - "Using 'license: BSD4' is almost always a misunderstanding. 'BSD4' " - ++ "refers to the old 4-clause BSD license with the advertising " - ++ "clause. 'BSD3' refers the new 3-clause BSD license." - - , case unknownLicenseVersion (license pkg) of - Just knownVersions -> Just $ - PackageDistSuspicious $ - "'license: " ++ display (license pkg) ++ "' is not a known " - ++ "version of that license. The known versions are " - ++ commaSep (map display knownVersions) - ++ ". If this is not a mistake and you think it should be a known " - ++ "version then please file a ticket." - _ -> Nothing - - , check (license pkg `notElem` [ AllRightsReserved - , UnspecifiedLicense, PublicDomain] - -- AllRightsReserved and PublicDomain are not strictly - -- licenses so don't need license files. - && null (licenseFiles pkg)) $ - PackageDistSuspicious "A 'license-file' is not specified." - ] - where - unknownLicenseVersion (GPL (Just v)) - | v `notElem` knownVersions = Just knownVersions - where knownVersions = [ v' | GPL (Just v') <- knownLicenses ] - unknownLicenseVersion (LGPL (Just v)) - | v `notElem` knownVersions = Just knownVersions - where knownVersions = [ v' | LGPL (Just v') <- knownLicenses ] - unknownLicenseVersion (AGPL (Just v)) - | v `notElem` knownVersions = Just knownVersions - where knownVersions = [ v' | AGPL (Just v') <- knownLicenses ] - unknownLicenseVersion (Apache (Just v)) - | v `notElem` knownVersions = Just knownVersions - where knownVersions = [ v' | Apache (Just v') <- knownLicenses ] - unknownLicenseVersion _ = Nothing - -checkSourceRepos :: PackageDescription -> [PackageCheck] -checkSourceRepos pkg = - catMaybes $ concat [[ - - case repoKind repo of - RepoKindUnknown kind -> Just $ PackageDistInexcusable $ - quote kind ++ " is not a recognised kind of source-repository. " - ++ "The repo kind is usually 'head' or 'this'" - _ -> Nothing - - , check (isNothing (repoType repo)) $ - PackageDistInexcusable - "The source-repository 'type' is a required field." - - , check (isNothing (repoLocation repo)) $ - PackageDistInexcusable - "The source-repository 'location' is a required field." - - , check (repoType repo == Just CVS && isNothing (repoModule repo)) $ - PackageDistInexcusable - "For a CVS source-repository, the 'module' is a required field." - - , check (repoKind repo == RepoThis && isNothing (repoTag repo)) $ - PackageDistInexcusable $ - "For the 'this' kind of source-repository, the 'tag' is a required " - ++ "field. It should specify the tag corresponding to this version " - ++ "or release of the package." - - , check (maybe False System.FilePath.isAbsolute (repoSubdir repo)) $ - PackageDistInexcusable - "The 'subdir' field of a source-repository must be a relative path." - ] - | repo <- sourceRepos pkg ] - ---TODO: check location looks like a URL for some repo types. - -checkGhcOptions :: PackageDescription -> [PackageCheck] -checkGhcOptions pkg = - catMaybes [ - - checkFlags ["-fasm"] $ - PackageDistInexcusable $ - "'ghc-options: -fasm' is unnecessary and will not work on CPU " - ++ "architectures other than x86, x86-64, ppc or sparc." - - , checkFlags ["-fvia-C"] $ - PackageDistSuspicious $ - "'ghc-options: -fvia-C' is usually unnecessary. If your package " - ++ "needs -via-C for correctness rather than performance then it " - ++ "is using the FFI incorrectly and will probably not work with GHC " - ++ "6.10 or later." - - , checkFlags ["-fhpc"] $ - PackageDistInexcusable $ - "'ghc-options: -fhpc' is not not necessary. Use the configure flag " - ++ " --enable-coverage instead." - - , checkFlags ["-prof"] $ - PackageBuildWarning $ - "'ghc-options: -prof' is not necessary and will lead to problems " - ++ "when used on a library. Use the configure flag " - ++ "--enable-library-profiling and/or --enable-profiling." - - , checkFlags ["-o"] $ - PackageBuildWarning $ - "'ghc-options: -o' is not needed. " - ++ "The output files are named automatically." - - , checkFlags ["-hide-package"] $ - PackageBuildWarning $ - "'ghc-options: -hide-package' is never needed. " - ++ "Cabal hides all packages." - - , checkFlags ["--make"] $ - PackageBuildWarning $ - "'ghc-options: --make' is never needed. Cabal uses this automatically." - - , checkFlags ["-main-is"] $ - PackageDistSuspicious $ - "'ghc-options: -main-is' is not portable." - - , checkFlags ["-O0", "-Onot"] $ - PackageDistSuspicious $ - "'ghc-options: -O0' is not needed. " - ++ "Use the --disable-optimization configure flag." - - , checkFlags [ "-O", "-O1"] $ - PackageDistInexcusable $ - "'ghc-options: -O' is not needed. " - ++ "Cabal automatically adds the '-O' flag. " - ++ "Setting it yourself interferes with the --disable-optimization flag." - - , checkFlags ["-O2"] $ - PackageDistSuspiciousWarn $ - "'ghc-options: -O2' is rarely needed. " - ++ "Check that it is giving a real benefit " - ++ "and not just imposing longer compile times on your users." - - , checkFlags ["-split-objs"] $ - PackageBuildWarning $ - "'ghc-options: -split-objs' is not needed. " - ++ "Use the --enable-split-objs configure flag." - - , checkFlags ["-optl-Wl,-s", "-optl-s"] $ - PackageDistInexcusable $ - "'ghc-options: -optl-Wl,-s' is not needed and is not portable to all" - ++ " operating systems. Cabal 1.4 and later automatically strip" - ++ " executables. Cabal also has a flag --disable-executable-stripping" - ++ " which is necessary when building packages for some Linux" - ++ " distributions and using '-optl-Wl,-s' prevents that from working." - - , checkFlags ["-fglasgow-exts"] $ - PackageDistSuspicious $ - "Instead of 'ghc-options: -fglasgow-exts' it is preferable to use " - ++ "the 'extensions' field." - - , check ("-threaded" `elem` lib_ghc_options) $ - PackageBuildWarning $ - "'ghc-options: -threaded' has no effect for libraries. It should " - ++ "only be used for executables." - - , check ("-rtsopts" `elem` lib_ghc_options) $ - PackageBuildWarning $ - "'ghc-options: -rtsopts' has no effect for libraries. It should " - ++ "only be used for executables." - - , check (any (\opt -> "-with-rtsopts" `isPrefixOf` opt) lib_ghc_options) $ - PackageBuildWarning $ - "'ghc-options: -with-rtsopts' has no effect for libraries. It " - ++ "should only be used for executables." - - , checkAlternatives "ghc-options" "extensions" - [ (flag, display extension) | flag <- all_ghc_options - , Just extension <- [ghcExtension flag] ] - - , checkAlternatives "ghc-options" "extensions" - [ (flag, extension) | flag@('-':'X':extension) <- all_ghc_options ] - - , checkAlternatives "ghc-options" "cpp-options" $ - [ (flag, flag) | flag@('-':'D':_) <- all_ghc_options ] - ++ [ (flag, flag) | flag@('-':'U':_) <- all_ghc_options ] - - , checkAlternatives "ghc-options" "include-dirs" - [ (flag, dir) | flag@('-':'I':dir) <- all_ghc_options ] - - , checkAlternatives "ghc-options" "extra-libraries" - [ (flag, lib) | flag@('-':'l':lib) <- all_ghc_options ] - - , checkAlternatives "ghc-options" "extra-lib-dirs" - [ (flag, dir) | flag@('-':'L':dir) <- all_ghc_options ] - - , checkAlternatives "ghc-options" "frameworks" - [ (flag, fmwk) | (flag@"-framework", fmwk) <- - zip all_ghc_options (safeTail all_ghc_options) ] - - , checkAlternatives "ghc-options" "extra-framework-dirs" - [ (flag, dir) | (flag@"-framework-path", dir) <- - zip all_ghc_options (safeTail all_ghc_options) ] - ] - - where - all_ghc_options = concatMap get_ghc_options (allBuildInfo pkg) - lib_ghc_options = maybe [] (get_ghc_options . libBuildInfo) (library pkg) - get_ghc_options bi = hcOptions GHC bi ++ hcProfOptions GHC bi - ++ hcSharedOptions GHC bi - - checkFlags :: [String] -> PackageCheck -> Maybe PackageCheck - checkFlags flags = check (any (`elem` flags) all_ghc_options) - - ghcExtension ('-':'f':name) = case name of - "allow-overlapping-instances" -> enable OverlappingInstances - "no-allow-overlapping-instances" -> disable OverlappingInstances - "th" -> enable TemplateHaskell - "no-th" -> disable TemplateHaskell - "ffi" -> enable ForeignFunctionInterface - "no-ffi" -> disable ForeignFunctionInterface - "fi" -> enable ForeignFunctionInterface - "no-fi" -> disable ForeignFunctionInterface - "monomorphism-restriction" -> enable MonomorphismRestriction - "no-monomorphism-restriction" -> disable MonomorphismRestriction - "mono-pat-binds" -> enable MonoPatBinds - "no-mono-pat-binds" -> disable MonoPatBinds - "allow-undecidable-instances" -> enable UndecidableInstances - "no-allow-undecidable-instances" -> disable UndecidableInstances - "allow-incoherent-instances" -> enable IncoherentInstances - "no-allow-incoherent-instances" -> disable IncoherentInstances - "arrows" -> enable Arrows - "no-arrows" -> disable Arrows - "generics" -> enable Generics - "no-generics" -> disable Generics - "implicit-prelude" -> enable ImplicitPrelude - "no-implicit-prelude" -> disable ImplicitPrelude - "implicit-params" -> enable ImplicitParams - "no-implicit-params" -> disable ImplicitParams - "bang-patterns" -> enable BangPatterns - "no-bang-patterns" -> disable BangPatterns - "scoped-type-variables" -> enable ScopedTypeVariables - "no-scoped-type-variables" -> disable ScopedTypeVariables - "extended-default-rules" -> enable ExtendedDefaultRules - "no-extended-default-rules" -> disable ExtendedDefaultRules - _ -> Nothing - ghcExtension "-cpp" = enable CPP - ghcExtension _ = Nothing - - enable e = Just (EnableExtension e) - disable e = Just (DisableExtension e) - -checkCCOptions :: PackageDescription -> [PackageCheck] -checkCCOptions pkg = - catMaybes [ - - checkAlternatives "cc-options" "include-dirs" - [ (flag, dir) | flag@('-':'I':dir) <- all_ccOptions ] - - , checkAlternatives "cc-options" "extra-libraries" - [ (flag, lib) | flag@('-':'l':lib) <- all_ccOptions ] - - , checkAlternatives "cc-options" "extra-lib-dirs" - [ (flag, dir) | flag@('-':'L':dir) <- all_ccOptions ] - - , checkAlternatives "ld-options" "extra-libraries" - [ (flag, lib) | flag@('-':'l':lib) <- all_ldOptions ] - - , checkAlternatives "ld-options" "extra-lib-dirs" - [ (flag, dir) | flag@('-':'L':dir) <- all_ldOptions ] - - , checkCCFlags [ "-O", "-Os", "-O0", "-O1", "-O2", "-O3" ] $ - PackageDistSuspicious $ - "'cc-options: -O[n]' is generally not needed. When building with " - ++ " optimisations Cabal automatically adds '-O2' for C code. " - ++ "Setting it yourself interferes with the --disable-optimization " - ++ "flag." - ] - - where all_ccOptions = [ opts | bi <- allBuildInfo pkg - , opts <- ccOptions bi ] - all_ldOptions = [ opts | bi <- allBuildInfo pkg - , opts <- ldOptions bi ] - - checkCCFlags :: [String] -> PackageCheck -> Maybe PackageCheck - checkCCFlags flags = check (any (`elem` flags) all_ccOptions) - -checkCPPOptions :: PackageDescription -> [PackageCheck] -checkCPPOptions pkg = - catMaybes [ - checkAlternatives "cpp-options" "include-dirs" - [ (flag, dir) | flag@('-':'I':dir) <- all_cppOptions] - ] - where all_cppOptions = [ opts | bi <- allBuildInfo pkg - , opts <- cppOptions bi ] - -checkAlternatives :: String -> String -> [(String, String)] -> Maybe PackageCheck -checkAlternatives badField goodField flags = - check (not (null badFlags)) $ - PackageBuildWarning $ - "Instead of " ++ quote (badField ++ ": " ++ unwords badFlags) - ++ " use " ++ quote (goodField ++ ": " ++ unwords goodFlags) - - where (badFlags, goodFlags) = unzip flags - -checkPaths :: PackageDescription -> [PackageCheck] -checkPaths pkg = - [ PackageBuildWarning $ - quote (kind ++ ": " ++ path) - ++ " is a relative path outside of the source tree. " - ++ "This will not work when generating a tarball with 'sdist'." - | (path, kind) <- relPaths ++ absPaths - , isOutsideTree path ] - ++ - [ PackageDistInexcusable $ - quote (kind ++ ": " ++ path) ++ " is an absolute path." - | (path, kind) <- relPaths - , isAbsolute path ] - ++ - [ PackageDistInexcusable $ - quote (kind ++ ": " ++ path) ++ " points inside the 'dist' " - ++ "directory. This is not reliable because the location of this " - ++ "directory is configurable by the user (or package manager). In " - ++ "addition the layout of the 'dist' directory is subject to change " - ++ "in future versions of Cabal." - | (path, kind) <- relPaths ++ absPaths - , isInsideDist path ] - ++ - [ PackageDistInexcusable $ - "The 'ghc-options' contains the path '" ++ path ++ "' which points " - ++ "inside the 'dist' directory. This is not reliable because the " - ++ "location of this directory is configurable by the user (or package " - ++ "manager). In addition the layout of the 'dist' directory is subject " - ++ "to change in future versions of Cabal." - | bi <- allBuildInfo pkg - , (GHC, flags) <- options bi - , path <- flags - , isInsideDist path ] - where - isOutsideTree path = case splitDirectories path of - "..":_ -> True - ".":"..":_ -> True - _ -> False - isInsideDist path = case map lowercase (splitDirectories path) of - "dist" :_ -> True - ".":"dist":_ -> True - _ -> False - -- paths that must be relative - relPaths = - [ (path, "extra-src-files") | path <- extraSrcFiles pkg ] - ++ [ (path, "extra-tmp-files") | path <- extraTmpFiles pkg ] - ++ [ (path, "extra-doc-files") | path <- extraDocFiles pkg ] - ++ [ (path, "data-files") | path <- dataFiles pkg ] - ++ [ (path, "data-dir") | path <- [dataDir pkg]] - ++ [ (path, "license-file") | path <- licenseFiles pkg ] - ++ concat - [ [ (path, "c-sources") | path <- cSources bi ] - ++ [ (path, "js-sources") | path <- jsSources bi ] - ++ [ (path, "install-includes") | path <- installIncludes bi ] - ++ [ (path, "hs-source-dirs") | path <- hsSourceDirs bi ] - | bi <- allBuildInfo pkg ] - -- paths that are allowed to be absolute - absPaths = concat - [ [ (path, "includes") | path <- includes bi ] - ++ [ (path, "include-dirs") | path <- includeDirs bi ] - ++ [ (path, "extra-lib-dirs") | path <- extraLibDirs bi ] - | bi <- allBuildInfo pkg ] - ---TODO: check sets of paths that would be interpreted differently between Unix --- and windows, ie case-sensitive or insensitive. Things that might clash, or --- conversely be distinguished. - ---TODO: use the tar path checks on all the above paths - --- | Check that the package declares the version in the @\"cabal-version\"@ --- field correctly. --- -checkCabalVersion :: PackageDescription -> [PackageCheck] -checkCabalVersion pkg = - catMaybes [ - - -- check syntax of cabal-version field - check (specVersion pkg >= Version [1,10] [] - && not simpleSpecVersionRangeSyntax) $ - PackageBuildWarning $ - "Packages relying on Cabal 1.10 or later must only specify a " - ++ "version range of the form 'cabal-version: >= x.y'. Use " - ++ "'cabal-version: >= " ++ display (specVersion pkg) ++ "'." - - -- check syntax of cabal-version field - , check (specVersion pkg < Version [1,9] [] - && not simpleSpecVersionRangeSyntax) $ - PackageDistSuspicious $ - "It is recommended that the 'cabal-version' field only specify a " - ++ "version range of the form '>= x.y'. Use " - ++ "'cabal-version: >= " ++ display (specVersion pkg) ++ "'. " - ++ "Tools based on Cabal 1.10 and later will ignore upper bounds." - - -- check syntax of cabal-version field - , checkVersion [1,12] simpleSpecVersionSyntax $ - PackageBuildWarning $ - "With Cabal 1.10 or earlier, the 'cabal-version' field must use " - ++ "range syntax rather than a simple version number. Use " - ++ "'cabal-version: >= " ++ display (specVersion pkg) ++ "'." - - -- check use of test suite sections - , checkVersion [1,8] (not (null $ testSuites pkg)) $ - PackageDistInexcusable $ - "The 'test-suite' section is new in Cabal 1.10. " - ++ "Unfortunately it messes up the parser in older Cabal versions " - ++ "so you must specify at least 'cabal-version: >= 1.8', but note " - ++ "that only Cabal 1.10 and later can actually run such test suites." - - -- check use of default-language field - -- note that we do not need to do an equivalent check for the - -- other-language field since that one does not change behaviour - , checkVersion [1,10] (any isJust (buildInfoField defaultLanguage)) $ - PackageBuildWarning $ - "To use the 'default-language' field the package needs to specify " - ++ "at least 'cabal-version: >= 1.10'." - - , check (specVersion pkg >= Version [1,10] [] - && (any isNothing (buildInfoField defaultLanguage))) $ - PackageBuildWarning $ - "Packages using 'cabal-version: >= 1.10' must specify the " - ++ "'default-language' field for each component (e.g. Haskell98 or " - ++ "Haskell2010). If a component uses different languages in " - ++ "different modules then list the other ones in the " - ++ "'other-languages' field." - - -- check use of reexported-modules sections - , checkVersion [1,21] - (maybe False (not.null.reexportedModules) (library pkg)) $ - PackageDistInexcusable $ - "To use the 'reexported-module' field the package needs to specify " - ++ "at least 'cabal-version: >= 1.21'." - - -- check use of thinning and renaming - , checkVersion [1,21] (not (null depsUsingThinningRenamingSyntax)) $ - PackageDistInexcusable $ - "The package uses " - ++ "thinning and renaming in the 'build-depends' field: " - ++ commaSep (map display depsUsingThinningRenamingSyntax) - ++ ". To use this new syntax, the package needs to specify at least" - ++ "'cabal-version: >= 1.21'." - - -- check use of 'extra-framework-dirs' field - , checkVersion [1,23] (any (not . null) (buildInfoField extraFrameworkDirs)) $ - -- Just a warning, because this won't break on old Cabal versions. - PackageDistSuspiciousWarn $ - "To use the 'extra-framework-dirs' field the package needs to specify" - ++ " at least 'cabal-version: >= 1.23'." - - -- check use of default-extensions field - -- don't need to do the equivalent check for other-extensions - , checkVersion [1,10] (any (not . null) (buildInfoField defaultExtensions)) $ - PackageBuildWarning $ - "To use the 'default-extensions' field the package needs to specify " - ++ "at least 'cabal-version: >= 1.10'." - - -- check use of extensions field - , check (specVersion pkg >= Version [1,10] [] - && (any (not . null) (buildInfoField oldExtensions))) $ - PackageBuildWarning $ - "For packages using 'cabal-version: >= 1.10' the 'extensions' " - ++ "field is deprecated. The new 'default-extensions' field lists " - ++ "extensions that are used in all modules in the component, while " - ++ "the 'other-extensions' field lists extensions that are used in " - ++ "some modules, e.g. via the {-# LANGUAGE #-} pragma." - - -- check use of "foo (>= 1.0 && < 1.4) || >=1.8 " version-range syntax - , checkVersion [1,8] (not (null versionRangeExpressions)) $ - PackageDistInexcusable $ - "The package uses full version-range expressions " - ++ "in a 'build-depends' field: " - ++ commaSep (map displayRawDependency versionRangeExpressions) - ++ ". To use this new syntax the package needs to specify at least " - ++ "'cabal-version: >= 1.8'. Alternatively, if broader compatibility " - ++ "is important, then convert to conjunctive normal form, and use " - ++ "multiple 'build-depends:' lines, one conjunct per line." - - -- check use of "build-depends: foo == 1.*" syntax - , checkVersion [1,6] (not (null depsUsingWildcardSyntax)) $ - PackageDistInexcusable $ - "The package uses wildcard syntax in the 'build-depends' field: " - ++ commaSep (map display depsUsingWildcardSyntax) - ++ ". To use this new syntax the package need to specify at least " - ++ "'cabal-version: >= 1.6'. Alternatively, if broader compatibility " - ++ "is important then use: " ++ commaSep - [ display (Dependency name (eliminateWildcardSyntax versionRange)) - | Dependency name versionRange <- depsUsingWildcardSyntax ] - - -- check use of "tested-with: GHC (>= 1.0 && < 1.4) || >=1.8 " syntax - , checkVersion [1,8] (not (null testedWithVersionRangeExpressions)) $ - PackageDistInexcusable $ - "The package uses full version-range expressions " - ++ "in a 'tested-with' field: " - ++ commaSep (map displayRawDependency testedWithVersionRangeExpressions) - ++ ". To use this new syntax the package needs to specify at least " - ++ "'cabal-version: >= 1.8'." - - -- check use of "tested-with: GHC == 6.12.*" syntax - , checkVersion [1,6] (not (null testedWithUsingWildcardSyntax)) $ - PackageDistInexcusable $ - "The package uses wildcard syntax in the 'tested-with' field: " - ++ commaSep (map display testedWithUsingWildcardSyntax) - ++ ". To use this new syntax the package need to specify at least " - ++ "'cabal-version: >= 1.6'. Alternatively, if broader compatibility " - ++ "is important then use: " ++ commaSep - [ display (Dependency name (eliminateWildcardSyntax versionRange)) - | Dependency name versionRange <- testedWithUsingWildcardSyntax ] - - -- check use of "data-files: data/*.txt" syntax - , checkVersion [1,6] (not (null dataFilesUsingGlobSyntax)) $ - PackageDistInexcusable $ - "Using wildcards like " - ++ commaSep (map quote $ take 3 dataFilesUsingGlobSyntax) - ++ " in the 'data-files' field requires 'cabal-version: >= 1.6'. " - ++ "Alternatively if you require compatibility with earlier Cabal " - ++ "versions then list all the files explicitly." - - -- check use of "extra-source-files: mk/*.in" syntax - , checkVersion [1,6] (not (null extraSrcFilesUsingGlobSyntax)) $ - PackageDistInexcusable $ - "Using wildcards like " - ++ commaSep (map quote $ take 3 extraSrcFilesUsingGlobSyntax) - ++ " in the 'extra-source-files' field requires " - ++ "'cabal-version: >= 1.6'. Alternatively if you require " - ++ "compatibility with earlier Cabal versions then list all the files " - ++ "explicitly." - - -- check use of "source-repository" section - , checkVersion [1,6] (not (null (sourceRepos pkg))) $ - PackageDistInexcusable $ - "The 'source-repository' section is new in Cabal 1.6. " - ++ "Unfortunately it messes up the parser in earlier Cabal versions " - ++ "so you need to specify 'cabal-version: >= 1.6'." - - -- check for new licenses - , checkVersion [1,4] (license pkg `notElem` compatLicenses) $ - PackageDistInexcusable $ - "Unfortunately the license " ++ quote (display (license pkg)) - ++ " messes up the parser in earlier Cabal versions so you need to " - ++ "specify 'cabal-version: >= 1.4'. Alternatively if you require " - ++ "compatibility with earlier Cabal versions then use 'OtherLicense'." - - -- check for new language extensions - , checkVersion [1,2,3] (not (null mentionedExtensionsThatNeedCabal12)) $ - PackageDistInexcusable $ - "Unfortunately the language extensions " - ++ commaSep (map (quote . display) mentionedExtensionsThatNeedCabal12) - ++ " break the parser in earlier Cabal versions so you need to " - ++ "specify 'cabal-version: >= 1.2.3'. Alternatively if you require " - ++ "compatibility with earlier Cabal versions then you may be able to " - ++ "use an equivalent compiler-specific flag." - - , checkVersion [1,4] (not (null mentionedExtensionsThatNeedCabal14)) $ - PackageDistInexcusable $ - "Unfortunately the language extensions " - ++ commaSep (map (quote . display) mentionedExtensionsThatNeedCabal14) - ++ " break the parser in earlier Cabal versions so you need to " - ++ "specify 'cabal-version: >= 1.4'. Alternatively if you require " - ++ "compatibility with earlier Cabal versions then you may be able to " - ++ "use an equivalent compiler-specific flag." - - , check (specVersion pkg >= Version [1,23] [] - && isNothing (setupBuildInfo pkg) - && buildType pkg == Just Custom) $ - PackageBuildWarning $ - "Packages using 'cabal-version: >= 1.23' with 'build-type: Custom' " - ++ "must use a 'custom-setup' section with a 'setup-depends' field " - ++ "that specifies the dependencies of the Setup.hs script itself. " - ++ "The 'setup-depends' field uses the same syntax as 'build-depends', " - ++ "so a simple example would be 'setup-depends: base, Cabal'." - - , check (specVersion pkg < Version [1,23] [] - && isNothing (setupBuildInfo pkg) - && buildType pkg == Just Custom) $ - PackageDistSuspiciousWarn $ - "From version 1.23 cabal supports specifiying explicit dependencies " - ++ "for Custom setup scripts. Consider using cabal-version >= 1.23 and " - ++ "adding a 'custom-setup' section with a 'setup-depends' field " - ++ "that specifies the dependencies of the Setup.hs script itself. " - ++ "The 'setup-depends' field uses the same syntax as 'build-depends', " - ++ "so a simple example would be 'setup-depends: base, Cabal'." - ] - where - -- Perform a check on packages that use a version of the spec less than - -- the version given. This is for cases where a new Cabal version adds - -- a new feature and we want to check that it is not used prior to that - -- version. - checkVersion :: [Int] -> Bool -> PackageCheck -> Maybe PackageCheck - checkVersion ver cond pc - | specVersion pkg >= Version ver [] = Nothing - | otherwise = check cond pc - - buildInfoField field = map field (allBuildInfo pkg) - dataFilesUsingGlobSyntax = filter usesGlobSyntax (dataFiles pkg) - extraSrcFilesUsingGlobSyntax = filter usesGlobSyntax (extraSrcFiles pkg) - usesGlobSyntax str = case parseFileGlob str of - Just (FileGlob _ _) -> True - _ -> False - - versionRangeExpressions = - [ dep | dep@(Dependency _ vr) <- buildDepends pkg - , usesNewVersionRangeSyntax vr ] - - testedWithVersionRangeExpressions = - [ Dependency (PackageName (display compiler)) vr - | (compiler, vr) <- testedWith pkg - , usesNewVersionRangeSyntax vr ] - - simpleSpecVersionRangeSyntax = - either (const True) - (foldVersionRange' - True - (\_ -> False) - (\_ -> False) (\_ -> False) - (\_ -> True) -- >= - (\_ -> False) - (\_ _ -> False) - (\_ _ -> False) (\_ _ -> False) - id) - (specVersionRaw pkg) - - -- is the cabal-version field a simple version number, rather than a range - simpleSpecVersionSyntax = - either (const True) (const False) (specVersionRaw pkg) - - usesNewVersionRangeSyntax :: VersionRange -> Bool - usesNewVersionRangeSyntax = - (> 2) -- uses the new syntax if depth is more than 2 - . foldVersionRange' - (1 :: Int) - (const 1) - (const 1) (const 1) - (const 1) (const 1) - (const (const 1)) - (+) (+) - (const 3) -- uses new ()'s syntax - - depsUsingWildcardSyntax = [ dep | dep@(Dependency _ vr) <- buildDepends pkg - , usesWildcardSyntax vr ] - - -- TODO: If the user writes build-depends: foo with (), this is - -- indistinguishable from build-depends: foo, so there won't be an - -- error even though there should be - depsUsingThinningRenamingSyntax = - [ name - | bi <- allBuildInfo pkg - , (name, _) <- Map.toList (targetBuildRenaming bi) ] - - testedWithUsingWildcardSyntax = - [ Dependency (PackageName (display compiler)) vr - | (compiler, vr) <- testedWith pkg - , usesWildcardSyntax vr ] - - usesWildcardSyntax :: VersionRange -> Bool - usesWildcardSyntax = - foldVersionRange' - False (const False) - (const False) (const False) - (const False) (const False) - (\_ _ -> True) -- the wildcard case - (||) (||) id - - eliminateWildcardSyntax = - foldVersionRange' - anyVersion thisVersion - laterVersion earlierVersion - orLaterVersion orEarlierVersion - (\v v' -> intersectVersionRanges (orLaterVersion v) (earlierVersion v')) - intersectVersionRanges unionVersionRanges id - - compatLicenses = [ GPL Nothing, LGPL Nothing, AGPL Nothing, BSD3, BSD4 - , PublicDomain, AllRightsReserved - , UnspecifiedLicense, OtherLicense ] - - mentionedExtensions = [ ext | bi <- allBuildInfo pkg - , ext <- allExtensions bi ] - mentionedExtensionsThatNeedCabal12 = - nub (filter (`elem` compatExtensionsExtra) mentionedExtensions) - - -- As of Cabal-1.4 we can add new extensions without worrying about - -- breaking old versions of cabal. - mentionedExtensionsThatNeedCabal14 = - nub (filter (`notElem` compatExtensions) mentionedExtensions) - - -- The known extensions in Cabal-1.2.3 - compatExtensions = - map EnableExtension - [ OverlappingInstances, UndecidableInstances, IncoherentInstances - , RecursiveDo, ParallelListComp, MultiParamTypeClasses - , FunctionalDependencies, Rank2Types - , RankNTypes, PolymorphicComponents, ExistentialQuantification - , ScopedTypeVariables, ImplicitParams, FlexibleContexts - , FlexibleInstances, EmptyDataDecls, CPP, BangPatterns - , TypeSynonymInstances, TemplateHaskell, ForeignFunctionInterface - , Arrows, Generics, NamedFieldPuns, PatternGuards - , GeneralizedNewtypeDeriving, ExtensibleRecords, RestrictedTypeSynonyms - , HereDocuments] ++ - map DisableExtension - [MonomorphismRestriction, ImplicitPrelude] ++ - compatExtensionsExtra - - -- The extra known extensions in Cabal-1.2.3 vs Cabal-1.1.6 - -- (Cabal-1.1.6 came with ghc-6.6. Cabal-1.2 came with ghc-6.8) - compatExtensionsExtra = - map EnableExtension - [ KindSignatures, MagicHash, TypeFamilies, StandaloneDeriving - , UnicodeSyntax, PatternSignatures, UnliftedFFITypes, LiberalTypeSynonyms - , TypeOperators, RecordWildCards, RecordPuns, DisambiguateRecordFields - , OverloadedStrings, GADTs, RelaxedPolyRec - , ExtendedDefaultRules, UnboxedTuples, DeriveDataTypeable - , ConstrainedClassMethods - ] ++ - map DisableExtension - [MonoPatBinds] - --- | A variation on the normal 'Text' instance, shows any ()'s in the original --- textual syntax. We need to show these otherwise it's confusing to users when --- we complain of their presence but do not pretty print them! --- -displayRawVersionRange :: VersionRange -> String -displayRawVersionRange = - Disp.render - . fst - . foldVersionRange' -- precedence: - -- All the same as the usual pretty printer, except for the parens - ( Disp.text "-any" , 0 :: Int) - (\v -> (Disp.text "==" <> disp v , 0)) - (\v -> (Disp.char '>' <> disp v , 0)) - (\v -> (Disp.char '<' <> disp v , 0)) - (\v -> (Disp.text ">=" <> disp v , 0)) - (\v -> (Disp.text "<=" <> disp v , 0)) - (\v _ -> (Disp.text "==" <> dispWild v , 0)) - (\(r1, p1) (r2, p2) -> - (punct 2 p1 r1 <+> Disp.text "||" <+> punct 2 p2 r2 , 2)) - (\(r1, p1) (r2, p2) -> - (punct 1 p1 r1 <+> Disp.text "&&" <+> punct 1 p2 r2 , 1)) - (\(r, _ ) -> (Disp.parens r, 0)) -- parens - - where - dispWild (Version b _) = - Disp.hcat (Disp.punctuate (Disp.char '.') (map Disp.int b)) - <> Disp.text ".*" - punct p p' | p < p' = Disp.parens - | otherwise = id - -displayRawDependency :: Dependency -> String -displayRawDependency (Dependency pkg vr) = - display pkg ++ " " ++ displayRawVersionRange vr - - --- ------------------------------------------------------------ --- * Checks on the GenericPackageDescription --- ------------------------------------------------------------ - --- | Check the build-depends fields for any weirdness or bad practise. --- -checkPackageVersions :: GenericPackageDescription -> [PackageCheck] -checkPackageVersions pkg = - catMaybes [ - - -- Check that the version of base is bounded above. - -- For example this bans "build-depends: base >= 3". - -- It should probably be "build-depends: base >= 3 && < 4" - -- which is the same as "build-depends: base == 3.*" - check (not (boundedAbove baseDependency)) $ - PackageDistInexcusable $ - "The dependency 'build-depends: base' does not specify an upper " - ++ "bound on the version number. Each major release of the 'base' " - ++ "package changes the API in various ways and most packages will " - ++ "need some changes to compile with it. The recommended practise " - ++ "is to specify an upper bound on the version of the 'base' " - ++ "package. This ensures your package will continue to build when a " - ++ "new major version of the 'base' package is released. If you are " - ++ "not sure what upper bound to use then use the next major " - ++ "version. For example if you have tested your package with 'base' " - ++ "version 4.5 and 4.6 then use 'build-depends: base >= 4.5 && < 4.7'." - - ] - where - -- TODO: What we really want to do is test if there exists any - -- configuration in which the base version is unbounded above. - -- However that's a bit tricky because there are many possible - -- configurations. As a cheap easy and safe approximation we will - -- pick a single "typical" configuration and check if that has an - -- open upper bound. To get a typical configuration we finalise - -- using no package index and the current platform. - finalised = finalizePackageDescription - [] (const True) buildPlatform - (unknownCompilerInfo - (CompilerId buildCompilerFlavor (Version [] [])) NoAbiTag) - [] pkg - baseDependency = case finalised of - Right (pkg', _) | not (null baseDeps) -> - foldr intersectVersionRanges anyVersion baseDeps - where - baseDeps = - [ vr | Dependency (PackageName "base") vr <- buildDepends pkg' ] - - -- Just in case finalizePackageDescription fails for any reason, - -- or if the package doesn't depend on the base package at all, - -- then we will just skip the check, since boundedAbove noVersion = True - _ -> noVersion - - boundedAbove :: VersionRange -> Bool - boundedAbove vr = case asVersionIntervals vr of - [] -> True -- this is the inconsistent version range. - intervals -> case last intervals of - (_, UpperBound _ _) -> True - (_, NoUpperBound ) -> False - - -checkConditionals :: GenericPackageDescription -> [PackageCheck] -checkConditionals pkg = - catMaybes [ - - check (not $ null unknownOSs) $ - PackageDistInexcusable $ - "Unknown operating system name " - ++ commaSep (map quote unknownOSs) - - , check (not $ null unknownArches) $ - PackageDistInexcusable $ - "Unknown architecture name " - ++ commaSep (map quote unknownArches) - - , check (not $ null unknownImpls) $ - PackageDistInexcusable $ - "Unknown compiler name " - ++ commaSep (map quote unknownImpls) - ] - where - unknownOSs = [ os | OS (OtherOS os) <- conditions ] - unknownArches = [ arch | Arch (OtherArch arch) <- conditions ] - unknownImpls = [ impl | Impl (OtherCompiler impl) _ <- conditions ] - conditions = maybe [] fvs (condLibrary pkg) - ++ concatMap (fvs . snd) (condExecutables pkg) - fvs (CondNode _ _ ifs) = concatMap compfv ifs -- free variables - compfv (c, ct, mct) = condfv c ++ fvs ct ++ maybe [] fvs mct - condfv c = case c of - Var v -> [v] - Lit _ -> [] - CNot c1 -> condfv c1 - COr c1 c2 -> condfv c1 ++ condfv c2 - CAnd c1 c2 -> condfv c1 ++ condfv c2 - -checkDevelopmentOnlyFlagsBuildInfo :: BuildInfo -> [PackageCheck] -checkDevelopmentOnlyFlagsBuildInfo bi = - catMaybes [ - - check has_WerrorWall $ - PackageDistInexcusable $ - "'ghc-options: -Wall -Werror' makes the package very easy to " - ++ "break with future GHC versions because new GHC versions often " - ++ "add new warnings. Use just 'ghc-options: -Wall' instead." - ++ extraExplanation - - , check (not has_WerrorWall && has_Werror) $ - PackageDistInexcusable $ - "'ghc-options: -Werror' makes the package easy to " - ++ "break with future GHC versions because new GHC versions often " - ++ "add new warnings. " - ++ extraExplanation - - , checkFlags ["-fdefer-type-errors"] $ - PackageDistInexcusable $ - "'ghc-options: -fdefer-type-errors' is fine during development but " - ++ "is not appropriate for a distributed package. " - ++ extraExplanation - - -- -dynamic is not a debug flag - , check (any (\opt -> "-d" `isPrefixOf` opt && opt /= "-dynamic") - ghc_options) $ - PackageDistInexcusable $ - "'ghc-options: -d*' debug flags are not appropriate " - ++ "for a distributed package. " - ++ extraExplanation - - , checkFlags ["-fprof-auto", "-fprof-auto-top", "-fprof-auto-calls", - "-fprof-cafs", "-fno-prof-count-entries", - "-auto-all", "-auto", "-caf-all"] $ - PackageDistSuspicious $ - "'ghc-options: -fprof*' profiling flags are typically not " - ++ "appropriate for a distributed library package. These flags are " - ++ "useful to profile this package, but when profiling other packages " - ++ "that use this one these flags clutter the profile output with " - ++ "excessive detail. If you think other packages really want to see " - ++ "cost centres from this package then use '-fprof-auto-exported' " - ++ "which puts cost centres only on exported functions. " - ++ extraExplanation - ] - where - extraExplanation = - " Alternatively, if you want to use this, make it conditional based " - ++ "on a Cabal configuration flag (with 'manual: True' and 'default: " - ++ "False') and enable that flag during development." - - has_WerrorWall = has_Werror && ( has_Wall || has_W ) - has_Werror = "-Werror" `elem` ghc_options - has_Wall = "-Wall" `elem` ghc_options - has_W = "-W" `elem` ghc_options - ghc_options = hcOptions GHC bi ++ hcProfOptions GHC bi - ++ hcSharedOptions GHC bi - - checkFlags :: [String] -> PackageCheck -> Maybe PackageCheck - checkFlags flags = check (any (`elem` flags) ghc_options) - -checkDevelopmentOnlyFlags :: GenericPackageDescription -> [PackageCheck] -checkDevelopmentOnlyFlags pkg = - concatMap checkDevelopmentOnlyFlagsBuildInfo - [ bi - | (conditions, bi) <- allConditionalBuildInfo - , not (any guardedByManualFlag conditions) ] - where - guardedByManualFlag = definitelyFalse - - -- We've basically got three-values logic here: True, False or unknown - -- hence this pattern to propagate the unknown cases properly. - definitelyFalse (Var (Flag n)) = maybe False not (Map.lookup n manualFlags) - definitelyFalse (Var _) = False - definitelyFalse (Lit b) = not b - definitelyFalse (CNot c) = definitelyTrue c - definitelyFalse (COr c1 c2) = definitelyFalse c1 && definitelyFalse c2 - definitelyFalse (CAnd c1 c2) = definitelyFalse c1 || definitelyFalse c2 - - definitelyTrue (Var (Flag n)) = fromMaybe False (Map.lookup n manualFlags) - definitelyTrue (Var _) = False - definitelyTrue (Lit b) = b - definitelyTrue (CNot c) = definitelyFalse c - definitelyTrue (COr c1 c2) = definitelyTrue c1 || definitelyTrue c2 - definitelyTrue (CAnd c1 c2) = definitelyTrue c1 && definitelyTrue c2 - - manualFlags = Map.fromList - [ (flagName flag, flagDefault flag) - | flag <- genPackageFlags pkg - , flagManual flag ] - - allConditionalBuildInfo :: [([Condition ConfVar], BuildInfo)] - allConditionalBuildInfo = - concatMap (collectCondTreePaths libBuildInfo) - (maybeToList (condLibrary pkg)) - - ++ concatMap (collectCondTreePaths buildInfo . snd) - (condExecutables pkg) - - ++ concatMap (collectCondTreePaths testBuildInfo . snd) - (condTestSuites pkg) - - ++ concatMap (collectCondTreePaths benchmarkBuildInfo . snd) - (condBenchmarks pkg) - - -- get all the leaf BuildInfo, paired up with the path (in the tree sense) - -- of if-conditions that guard it - collectCondTreePaths :: (a -> b) - -> CondTree v c a - -> [([Condition v], b)] - collectCondTreePaths mapData = go [] - where - go conditions condNode = - -- the data at this level in the tree: - (reverse conditions, mapData (condTreeData condNode)) - - : concat - [ go (condition:conditions) ifThen - | (condition, ifThen, _) <- condTreeComponents condNode ] - - ++ concat - [ go (condition:conditions) elseThen - | (condition, _, Just elseThen) <- condTreeComponents condNode ] - - --- ------------------------------------------------------------ --- * Checks involving files in the package --- ------------------------------------------------------------ - --- | Sanity check things that requires IO. It looks at the files in the --- package and expects to find the package unpacked in at the given file path. --- -checkPackageFiles :: PackageDescription -> FilePath -> IO [PackageCheck] -checkPackageFiles pkg root = checkPackageContent checkFilesIO pkg - where - checkFilesIO = CheckPackageContentOps { - doesFileExist = System.doesFileExist . relative, - doesDirectoryExist = System.doesDirectoryExist . relative, - getDirectoryContents = System.Directory.getDirectoryContents . relative, - getFileContents = \f -> openBinaryFile (relative f) ReadMode >>= hGetContents - } - relative path = root path - --- | A record of operations needed to check the contents of packages. --- Used by 'checkPackageContent'. --- -data CheckPackageContentOps m = CheckPackageContentOps { - doesFileExist :: FilePath -> m Bool, - doesDirectoryExist :: FilePath -> m Bool, - getDirectoryContents :: FilePath -> m [FilePath], - getFileContents :: FilePath -> m String - } - --- | Sanity check things that requires looking at files in the package. --- This is a generalised version of 'checkPackageFiles' that can work in any --- monad for which you can provide 'CheckPackageContentOps' operations. --- --- The point of this extra generality is to allow doing checks in some virtual --- file system, for example a tarball in memory. --- -checkPackageContent :: Monad m => CheckPackageContentOps m - -> PackageDescription - -> m [PackageCheck] -checkPackageContent ops pkg = do - cabalBomError <- checkCabalFileBOM ops - licenseErrors <- checkLicensesExist ops pkg - setupError <- checkSetupExists ops pkg - configureError <- checkConfigureExists ops pkg - localPathErrors <- checkLocalPathsExist ops pkg - vcsLocation <- checkMissingVcsInfo ops pkg - - return $ licenseErrors - ++ catMaybes [cabalBomError, setupError, configureError] - ++ localPathErrors - ++ vcsLocation - -checkCabalFileBOM :: Monad m => CheckPackageContentOps m - -> m (Maybe PackageCheck) -checkCabalFileBOM ops = do - epdfile <- findPackageDesc ops - case epdfile of - Left pc -> return $ Just pc - Right pdfile -> (flip check pc . startsWithBOM . fromUTF8) `liftM` (getFileContents ops pdfile) - where pc = PackageDistInexcusable $ - pdfile ++ " starts with an Unicode byte order mark (BOM). This may cause problems with older cabal versions." - --- |Find a package description file in the given directory. Looks for --- @.cabal@ files. Like 'Distribution.Simple.Utils.findPackageDesc', --- but generalized over monads. -findPackageDesc :: Monad m => CheckPackageContentOps m - -> m (Either PackageCheck FilePath) -- ^.cabal -findPackageDesc ops - = do let dir = "." - files <- getDirectoryContents ops dir - -- to make sure we do not mistake a ~/.cabal/ dir for a .cabal - -- file we filter to exclude dirs and null base file names: - cabalFiles <- filterM (doesFileExist ops) - [ dir file - | file <- files - , let (name, ext) = splitExtension file - , not (null name) && ext == ".cabal" ] - case cabalFiles of - [] -> return (Left $ PackageBuildImpossible noDesc) - [cabalFile] -> return (Right cabalFile) - multiple -> return (Left $ PackageBuildImpossible $ multiDesc multiple) - - where - noDesc :: String - noDesc = "No cabal file found.\n" - ++ "Please create a package description file .cabal" - - multiDesc :: [String] -> String - multiDesc l = "Multiple cabal files found.\n" - ++ "Please use only one of: " - ++ intercalate ", " l - -checkLicensesExist :: Monad m => CheckPackageContentOps m - -> PackageDescription - -> m [PackageCheck] -checkLicensesExist ops pkg = do - exists <- mapM (doesFileExist ops) (licenseFiles pkg) - return - [ PackageBuildWarning $ - "The '" ++ fieldname ++ "' field refers to the file " - ++ quote file ++ " which does not exist." - | (file, False) <- zip (licenseFiles pkg) exists ] - where - fieldname | length (licenseFiles pkg) == 1 = "license-file" - | otherwise = "license-files" - -checkSetupExists :: Monad m => CheckPackageContentOps m - -> PackageDescription - -> m (Maybe PackageCheck) -checkSetupExists ops pkg = do - let simpleBuild = buildType pkg == Just Simple - hsexists <- doesFileExist ops "Setup.hs" - lhsexists <- doesFileExist ops "Setup.lhs" - return $ check (not simpleBuild && not hsexists && not lhsexists) $ - PackageDistInexcusable $ - "The package is missing a Setup.hs or Setup.lhs script." - -checkConfigureExists :: Monad m => CheckPackageContentOps m - -> PackageDescription - -> m (Maybe PackageCheck) -checkConfigureExists ops PackageDescription { buildType = Just Configure } = do - exists <- doesFileExist ops "configure" - return $ check (not exists) $ - PackageBuildWarning $ - "The 'build-type' is 'Configure' but there is no 'configure' script. " - ++ "You probably need to run 'autoreconf -i' to generate it." -checkConfigureExists _ _ = return Nothing - -checkLocalPathsExist :: Monad m => CheckPackageContentOps m - -> PackageDescription - -> m [PackageCheck] -checkLocalPathsExist ops pkg = do - let dirs = [ (dir, kind) - | bi <- allBuildInfo pkg - , (dir, kind) <- - [ (dir, "extra-lib-dirs") | dir <- extraLibDirs bi ] - ++ [ (dir, "extra-framework-dirs") - | dir <- extraFrameworkDirs bi ] - ++ [ (dir, "include-dirs") | dir <- includeDirs bi ] - ++ [ (dir, "hs-source-dirs") | dir <- hsSourceDirs bi ] - , isRelative dir ] - missing <- filterM (liftM not . doesDirectoryExist ops . fst) dirs - return [ PackageBuildWarning { - explanation = quote (kind ++ ": " ++ dir) - ++ " directory does not exist." - } - | (dir, kind) <- missing ] - -checkMissingVcsInfo :: Monad m => CheckPackageContentOps m - -> PackageDescription - -> m [PackageCheck] -checkMissingVcsInfo ops pkg | null (sourceRepos pkg) = do - vcsInUse <- liftM or $ mapM (doesDirectoryExist ops) repoDirnames - if vcsInUse - then return [ PackageDistSuspicious message ] - else return [] - where - repoDirnames = [ dirname | repo <- knownRepoTypes - , dirname <- repoTypeDirname repo ] - message = "When distributing packages it is encouraged to specify source " - ++ "control information in the .cabal file using one or more " - ++ "'source-repository' sections. See the Cabal user guide for " - ++ "details." - -checkMissingVcsInfo _ _ = return [] - -repoTypeDirname :: RepoType -> [FilePath] -repoTypeDirname Darcs = ["_darcs"] -repoTypeDirname Git = [".git"] -repoTypeDirname SVN = [".svn"] -repoTypeDirname CVS = ["CVS"] -repoTypeDirname Mercurial = [".hg"] -repoTypeDirname GnuArch = [".arch-params"] -repoTypeDirname Bazaar = [".bzr"] -repoTypeDirname Monotone = ["_MTN"] -repoTypeDirname _ = [] - --- ------------------------------------------------------------ --- * Checks involving files in the package --- ------------------------------------------------------------ - --- | Check the names of all files in a package for portability problems. This --- should be done for example when creating or validating a package tarball. --- -checkPackageFileNames :: [FilePath] -> [PackageCheck] -checkPackageFileNames files = - (take 1 . mapMaybe checkWindowsPath $ files) - ++ (take 1 . mapMaybe checkTarPath $ files) - -- If we get any of these checks triggering then we're likely to get - -- many, and that's probably not helpful, so return at most one. - -checkWindowsPath :: FilePath -> Maybe PackageCheck -checkWindowsPath path = - check (not $ FilePath.Windows.isValid path') $ - PackageDistInexcusable $ - "Unfortunately, the file " ++ quote path ++ " is not a valid file " - ++ "name on Windows which would cause portability problems for this " - ++ "package. Windows file names cannot contain any of the characters " - ++ "\":*?<>|\" and there are a few reserved names including \"aux\", " - ++ "\"nul\", \"con\", \"prn\", \"com1-9\", \"lpt1-9\" and \"clock$\"." - where - path' = ".\\" ++ path - -- force a relative name to catch invalid file names like "f:oo" which - -- otherwise parse as file "oo" in the current directory on the 'f' drive. - --- | Check a file name is valid for the portable POSIX tar format. --- --- The POSIX tar format has a restriction on the length of file names. It is --- unfortunately not a simple restriction like a maximum length. The exact --- restriction is that either the whole path be 100 characters or less, or it --- be possible to split the path on a directory separator such that the first --- part is 155 characters or less and the second part 100 characters or less. --- -checkTarPath :: FilePath -> Maybe PackageCheck -checkTarPath path - | length path > 255 = Just longPath - | otherwise = case pack nameMax (reverse (splitPath path)) of - Left err -> Just err - Right [] -> Nothing - Right (first:rest) -> case pack prefixMax remainder of - Left err -> Just err - Right [] -> Nothing - Right (_:_) -> Just noSplit - where - -- drop the '/' between the name and prefix: - remainder = init first : rest - - where - nameMax, prefixMax :: Int - nameMax = 100 - prefixMax = 155 - - pack _ [] = Left emptyName - pack maxLen (c:cs) - | n > maxLen = Left longName - | otherwise = Right (pack' maxLen n cs) - where n = length c - - pack' maxLen n (c:cs) - | n' <= maxLen = pack' maxLen n' cs - where n' = n + length c - pack' _ _ cs = cs - - longPath = PackageDistInexcusable $ - "The following file name is too long to store in a portable POSIX " - ++ "format tar archive. The maximum length is 255 ASCII characters.\n" - ++ "The file in question is:\n " ++ path - longName = PackageDistInexcusable $ - "The following file name is too long to store in a portable POSIX " - ++ "format tar archive. The maximum length for the name part (including " - ++ "extension) is 100 ASCII characters. The maximum length for any " - ++ "individual directory component is 155.\n" - ++ "The file in question is:\n " ++ path - noSplit = PackageDistInexcusable $ - "The following file name is too long to store in a portable POSIX " - ++ "format tar archive. While the total length is less than 255 ASCII " - ++ "characters, there are unfortunately further restrictions. It has to " - ++ "be possible to split the file path on a directory separator into " - ++ "two parts such that the first part fits in 155 characters or less " - ++ "and the second part fits in 100 characters or less. Basically you " - ++ "have to make the file name or directory names shorter, or you could " - ++ "split a long directory name into nested subdirectories with shorter " - ++ "names.\nThe file in question is:\n " ++ path - emptyName = PackageDistInexcusable $ - "Encountered a file with an empty name, something is very wrong! " - ++ "Files with an empty name cannot be stored in a tar archive or in " - ++ "standard file systems." - --- ------------------------------------------------------------ --- * Utils --- ------------------------------------------------------------ - -quote :: String -> String -quote s = "'" ++ s ++ "'" - -commaSep :: [String] -> String -commaSep = intercalate ", " - -dups :: Ord a => [a] -> [a] -dups xs = [ x | (x:_:_) <- group (sort xs) ] - -fileExtensionSupportedLanguage :: FilePath -> Bool -fileExtensionSupportedLanguage path = - isHaskell || isC - where - extension = takeExtension path - isHaskell = extension `elem` [".hs", ".lhs"] - isC = isJust (filenameCDialect extension) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/PackageDescription/Configuration.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/PackageDescription/Configuration.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/PackageDescription/Configuration.hs 2016-11-07 10:02:22.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/PackageDescription/Configuration.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,759 +0,0 @@ -{-# LANGUAGE CPP #-} --- -fno-warn-deprecations for use of Map.foldWithKey -{-# OPTIONS_GHC -fno-warn-deprecations #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.PackageDescription.Configuration --- Copyright : Thomas Schilling, 2007 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This is about the cabal configurations feature. It exports --- 'finalizePackageDescription' and 'flattenPackageDescription' which are --- functions for converting 'GenericPackageDescription's down to --- 'PackageDescription's. It has code for working with the tree of conditions --- and resolving or flattening conditions. - -module Distribution.PackageDescription.Configuration ( - finalizePackageDescription, - flattenPackageDescription, - - -- Utils - parseCondition, - freeVars, - extractCondition, - extractConditions, - addBuildableCondition, - mapCondTree, - mapTreeData, - mapTreeConds, - mapTreeConstrs, - transformAllBuildInfos, - transformAllBuildDepends, - ) where - -import Control.Applicative -- 7.10 -Werror workaround. -import Prelude - -import Distribution.Package -import Distribution.PackageDescription -import Distribution.PackageDescription.Utils -import Distribution.Version -import Distribution.Compiler -import Distribution.System -import Distribution.Simple.Utils -import Distribution.Text -import Distribution.Compat.ReadP as ReadP hiding ( char ) -import qualified Distribution.Compat.ReadP as ReadP ( char ) -import Distribution.Compat.Semigroup as Semi - -import Control.Arrow (first) -import Data.Char ( isAlphaNum ) -import Data.Maybe ( mapMaybe, maybeToList ) -import Data.Map ( Map, fromListWith, toList ) -import qualified Data.Map as Map -import Data.Tree ( Tree(Node) ) - ------------------------------------------------------------------------------- - --- | Simplify the condition and return its free variables. -simplifyCondition :: Condition c - -> (c -> Either d Bool) -- ^ (partial) variable assignment - -> (Condition d, [d]) -simplifyCondition cond i = fv . walk $ cond - where - walk cnd = case cnd of - Var v -> either Var Lit (i v) - Lit b -> Lit b - CNot c -> case walk c of - Lit True -> Lit False - Lit False -> Lit True - c' -> CNot c' - COr c d -> case (walk c, walk d) of - (Lit False, d') -> d' - (Lit True, _) -> Lit True - (c', Lit False) -> c' - (_, Lit True) -> Lit True - (c',d') -> COr c' d' - CAnd c d -> case (walk c, walk d) of - (Lit False, _) -> Lit False - (Lit True, d') -> d' - (_, Lit False) -> Lit False - (c', Lit True) -> c' - (c',d') -> CAnd c' d' - -- gather free vars - fv c = (c, fv' c) - fv' c = case c of - Var v -> [v] - Lit _ -> [] - CNot c' -> fv' c' - COr c1 c2 -> fv' c1 ++ fv' c2 - CAnd c1 c2 -> fv' c1 ++ fv' c2 - --- | Simplify a configuration condition using the OS and arch names. Returns --- the names of all the flags occurring in the condition. -simplifyWithSysParams :: OS -> Arch -> CompilerInfo -> Condition ConfVar - -> (Condition FlagName, [FlagName]) -simplifyWithSysParams os arch cinfo cond = (cond', flags) - where - (cond', flags) = simplifyCondition cond interp - interp (OS os') = Right $ os' == os - interp (Arch arch') = Right $ arch' == arch - interp (Impl comp vr) - | matchImpl (compilerInfoId cinfo) = Right True - | otherwise = case compilerInfoCompat cinfo of - -- fixme: treat Nothing as unknown, rather than empty list once we - -- support partial resolution of system parameters - Nothing -> Right False - Just compat -> Right (any matchImpl compat) - where - matchImpl (CompilerId c v) = comp == c && v `withinRange` vr - interp (Flag f) = Left f - --- TODO: Add instances and check --- --- prop_sC_idempotent cond a o = cond' == cond'' --- where --- cond' = simplifyCondition cond a o --- cond'' = simplifyCondition cond' a o --- --- prop_sC_noLits cond a o = isLit res || not (hasLits res) --- where --- res = simplifyCondition cond a o --- hasLits (Lit _) = True --- hasLits (CNot c) = hasLits c --- hasLits (COr l r) = hasLits l || hasLits r --- hasLits (CAnd l r) = hasLits l || hasLits r --- hasLits _ = False --- - --- | Parse a configuration condition from a string. -parseCondition :: ReadP r (Condition ConfVar) -parseCondition = condOr - where - condOr = sepBy1 condAnd (oper "||") >>= return . foldl1 COr - condAnd = sepBy1 cond (oper "&&")>>= return . foldl1 CAnd - cond = sp >> (boolLiteral +++ inparens condOr +++ notCond +++ osCond - +++ archCond +++ flagCond +++ implCond ) - inparens = between (ReadP.char '(' >> sp) (sp >> ReadP.char ')' >> sp) - notCond = ReadP.char '!' >> sp >> cond >>= return . CNot - osCond = string "os" >> sp >> inparens osIdent >>= return . Var - archCond = string "arch" >> sp >> inparens archIdent >>= return . Var - flagCond = string "flag" >> sp >> inparens flagIdent >>= return . Var - implCond = string "impl" >> sp >> inparens implIdent >>= return . Var - boolLiteral = fmap Lit parse - archIdent = fmap Arch parse - osIdent = fmap OS parse - flagIdent = fmap (Flag . FlagName . lowercase) (munch1 isIdentChar) - isIdentChar c = isAlphaNum c || c == '_' || c == '-' - oper s = sp >> string s >> sp - sp = skipSpaces - implIdent = do i <- parse - vr <- sp >> option anyVersion parse - return $ Impl i vr - ------------------------------------------------------------------------------- - -mapCondTree :: (a -> b) -> (c -> d) -> (Condition v -> Condition w) - -> CondTree v c a -> CondTree w d b -mapCondTree fa fc fcnd (CondNode a c ifs) = - CondNode (fa a) (fc c) (map g ifs) - where - g (cnd, t, me) = (fcnd cnd, mapCondTree fa fc fcnd t, - fmap (mapCondTree fa fc fcnd) me) - -mapTreeConstrs :: (c -> d) -> CondTree v c a -> CondTree v d a -mapTreeConstrs f = mapCondTree id f id - -mapTreeConds :: (Condition v -> Condition w) -> CondTree v c a -> CondTree w c a -mapTreeConds f = mapCondTree id id f - -mapTreeData :: (a -> b) -> CondTree v c a -> CondTree v c b -mapTreeData f = mapCondTree f id id - --- | Result of dependency test. Isomorphic to @Maybe d@ but renamed for --- clarity. -data DepTestRslt d = DepOk | MissingDeps d - -instance Semigroup d => Monoid (DepTestRslt d) where - mempty = DepOk - mappend = (Semi.<>) - -instance Semigroup d => Semigroup (DepTestRslt d) where - DepOk <> x = x - x <> DepOk = x - (MissingDeps d) <> (MissingDeps d') = MissingDeps (d <> d') - - --- | Try to find a flag assignment that satisfies the constraints of all trees. --- --- Returns either the missing dependencies, or a tuple containing the --- resulting data, the associated dependencies, and the chosen flag --- assignments. --- --- In case of failure, the union of the dependencies that led to backtracking --- on all branches is returned. --- [TODO: Could also be specified with a function argument.] --- --- TODO: The current algorithm is rather naive. A better approach would be to: --- --- * Rule out possible paths, by taking a look at the associated dependencies. --- --- * Infer the required values for the conditions of these paths, and --- calculate the required domains for the variables used in these --- conditions. Then picking a flag assignment would be linear (I guess). --- --- This would require some sort of SAT solving, though, thus it's not --- implemented unless we really need it. --- -resolveWithFlags :: - [(FlagName,[Bool])] - -- ^ Domain for each flag name, will be tested in order. - -> OS -- ^ OS as returned by Distribution.System.buildOS - -> Arch -- ^ Arch as returned by Distribution.System.buildArch - -> CompilerInfo -- ^ Compiler information - -> [Dependency] -- ^ Additional constraints - -> [CondTree ConfVar [Dependency] PDTagged] - -> ([Dependency] -> DepTestRslt [Dependency]) -- ^ Dependency test function. - -> Either [Dependency] (TargetSet PDTagged, FlagAssignment) - -- ^ Either the missing dependencies (error case), or a pair of - -- (set of build targets with dependencies, chosen flag assignments) -resolveWithFlags dom os arch impl constrs trees checkDeps = - either (Left . fromDepMapUnion) Right $ explore (build [] dom) - where - extraConstrs = toDepMap constrs - - -- simplify trees by (partially) evaluating all conditions and converting - -- dependencies to dependency maps. - simplifiedTrees :: [CondTree FlagName DependencyMap PDTagged] - simplifiedTrees = map ( mapTreeConstrs toDepMap -- convert to maps - . addBuildableCondition pdTaggedBuildInfo - . mapTreeConds (fst . simplifyWithSysParams os arch impl)) - trees - - -- @explore@ searches a tree of assignments, backtracking whenever a flag - -- introduces a dependency that cannot be satisfied. If there is no - -- solution, @explore@ returns the union of all dependencies that caused - -- it to backtrack. Since the tree is constructed lazily, we avoid some - -- computation overhead in the successful case. - explore :: Tree FlagAssignment - -> Either DepMapUnion (TargetSet PDTagged, FlagAssignment) - explore (Node flags ts) = - let targetSet = TargetSet $ flip map simplifiedTrees $ - -- apply additional constraints to all dependencies - first (`constrainBy` extraConstrs) . - simplifyCondTree (env flags) - deps = overallDependencies targetSet - in case checkDeps (fromDepMap deps) of - DepOk | null ts -> Right (targetSet, flags) - | otherwise -> tryAll $ map explore ts - MissingDeps mds -> Left (toDepMapUnion mds) - - -- Builds a tree of all possible flag assignments. Internal nodes - -- have only partial assignments. - build :: FlagAssignment -> [(FlagName, [Bool])] -> Tree FlagAssignment - build assigned [] = Node assigned [] - build assigned ((fn, vals) : unassigned) = - Node assigned $ map (\v -> build ((fn, v) : assigned) unassigned) vals - - tryAll :: [Either DepMapUnion a] -> Either DepMapUnion a - tryAll = foldr mp mz - - -- special version of `mplus' for our local purposes - mp :: Either DepMapUnion a -> Either DepMapUnion a -> Either DepMapUnion a - mp m@(Right _) _ = m - mp _ m@(Right _) = m - mp (Left xs) (Left ys) = - let union = Map.foldrWithKey (Map.insertWith' combine) - (unDepMapUnion xs) (unDepMapUnion ys) - combine x y = simplifyVersionRange $ unionVersionRanges x y - in union `seq` Left (DepMapUnion union) - - -- `mzero' - mz :: Either DepMapUnion a - mz = Left (DepMapUnion Map.empty) - - env :: FlagAssignment -> FlagName -> Either FlagName Bool - env flags flag = (maybe (Left flag) Right . lookup flag) flags - - pdTaggedBuildInfo :: PDTagged -> BuildInfo - pdTaggedBuildInfo (Lib l) = libBuildInfo l - pdTaggedBuildInfo (Exe _ e) = buildInfo e - pdTaggedBuildInfo (Test _ t) = testBuildInfo t - pdTaggedBuildInfo (Bench _ b) = benchmarkBuildInfo b - pdTaggedBuildInfo PDNull = mempty - --- | Transforms a 'CondTree' by putting the input under the "then" branch of a --- conditional that is True when Buildable is True. If 'addBuildableCondition' --- can determine that Buildable is always True, it returns the input unchanged. --- If Buildable is always False, it returns the empty 'CondTree'. -addBuildableCondition :: (Eq v, Monoid a, Monoid c) => (a -> BuildInfo) - -> CondTree v c a - -> CondTree v c a -addBuildableCondition getInfo t = - case extractCondition (buildable . getInfo) t of - Lit True -> t - Lit False -> CondNode mempty mempty [] - c -> CondNode mempty mempty [(c, t, Nothing)] - --- Note: extracting buildable conditions. --- -------------------------------------- --- --- If the conditions in a cond tree lead to Buildable being set to False, then --- none of the dependencies for this cond tree should actually be taken into --- account. On the other hand, some of the flags may only be decided in the --- solver, so we cannot necessarily make the decision whether a component is --- Buildable or not prior to solving. --- --- What we are doing here is to partially evaluate a condition tree in order to --- extract the condition under which Buildable is True. The predicate determines --- whether data under a 'CondTree' is buildable. - - --- | Extract the condition matched by the given predicate from a cond tree. --- --- We use this mainly for extracting buildable conditions (see the Note above), --- but the function is in fact more general. -extractCondition :: Eq v => (a -> Bool) -> CondTree v c a -> Condition v -extractCondition p = go - where - go (CondNode x _ cs) | not (p x) = Lit False - | otherwise = goList cs - - goList [] = Lit True - goList ((c, t, e) : cs) = - let - ct = go t - ce = maybe (Lit True) go e - in - ((c `cAnd` ct) `cOr` (CNot c `cAnd` ce)) `cAnd` goList cs - --- | Extract conditions matched by the given predicate from all cond trees in a --- 'GenericPackageDescription'. -extractConditions :: (BuildInfo -> Bool) -> GenericPackageDescription - -> [Condition ConfVar] -extractConditions f gpkg = - concat [ - maybeToList $ extractCondition (f . libBuildInfo) <$> condLibrary gpkg - , extractCondition (f . buildInfo) . snd <$> condExecutables gpkg - , extractCondition (f . testBuildInfo) . snd <$> condTestSuites gpkg - , extractCondition (f . benchmarkBuildInfo) . snd <$> condBenchmarks gpkg - ] - - --- | A map of dependencies that combines version ranges using 'unionVersionRanges'. -newtype DepMapUnion = DepMapUnion { unDepMapUnion :: Map PackageName VersionRange } - -toDepMapUnion :: [Dependency] -> DepMapUnion -toDepMapUnion ds = - DepMapUnion $ fromListWith unionVersionRanges [ (p,vr) | Dependency p vr <- ds ] - -fromDepMapUnion :: DepMapUnion -> [Dependency] -fromDepMapUnion m = [ Dependency p vr | (p,vr) <- toList (unDepMapUnion m) ] - --- | A map of dependencies. Newtyped since the default monoid instance is not --- appropriate. The monoid instance uses 'intersectVersionRanges'. -newtype DependencyMap = DependencyMap { unDependencyMap :: Map PackageName VersionRange } - deriving (Show, Read) - -instance Monoid DependencyMap where - mempty = DependencyMap Map.empty - mappend = (Semi.<>) - -instance Semigroup DependencyMap where - (DependencyMap a) <> (DependencyMap b) = - DependencyMap (Map.unionWith intersectVersionRanges a b) - -toDepMap :: [Dependency] -> DependencyMap -toDepMap ds = - DependencyMap $ fromListWith intersectVersionRanges [ (p,vr) | Dependency p vr <- ds ] - -fromDepMap :: DependencyMap -> [Dependency] -fromDepMap m = [ Dependency p vr | (p,vr) <- toList (unDependencyMap m) ] - --- | Flattens a CondTree using a partial flag assignment. When a condition --- cannot be evaluated, both branches are ignored. -simplifyCondTree :: (Monoid a, Monoid d) => - (v -> Either v Bool) - -> CondTree v d a - -> (d, a) -simplifyCondTree env (CondNode a d ifs) = - mconcat $ (d, a) : mapMaybe simplifyIf ifs - where - simplifyIf (cnd, t, me) = - case simplifyCondition cnd env of - (Lit True, _) -> Just $ simplifyCondTree env t - (Lit False, _) -> fmap (simplifyCondTree env) me - _ -> Nothing - --- | Flatten a CondTree. This will resolve the CondTree by taking all --- possible paths into account. Note that since branches represent exclusive --- choices this may not result in a \"sane\" result. -ignoreConditions :: (Monoid a, Monoid c) => CondTree v c a -> (a, c) -ignoreConditions (CondNode a c ifs) = (a, c) `mappend` mconcat (concatMap f ifs) - where f (_, t, me) = ignoreConditions t - : maybeToList (fmap ignoreConditions me) - -freeVars :: CondTree ConfVar c a -> [FlagName] -freeVars t = [ f | Flag f <- freeVars' t ] - where - freeVars' (CondNode _ _ ifs) = concatMap compfv ifs - compfv (c, ct, mct) = condfv c ++ freeVars' ct ++ maybe [] freeVars' mct - condfv c = case c of - Var v -> [v] - Lit _ -> [] - CNot c' -> condfv c' - COr c1 c2 -> condfv c1 ++ condfv c2 - CAnd c1 c2 -> condfv c1 ++ condfv c2 - - ------------------------------------------------------------------------------- - --- | A set of targets with their package dependencies -newtype TargetSet a = TargetSet [(DependencyMap, a)] - --- | Combine the target-specific dependencies in a TargetSet to give the --- dependencies for the package as a whole. -overallDependencies :: TargetSet PDTagged -> DependencyMap -overallDependencies (TargetSet targets) = mconcat depss - where - (depss, _) = unzip $ filter (removeDisabledSections . snd) targets - removeDisabledSections :: PDTagged -> Bool - removeDisabledSections (Lib l) = buildable (libBuildInfo l) - removeDisabledSections (Exe _ e) = buildable (buildInfo e) - removeDisabledSections (Test _ t) = testEnabled t && buildable (testBuildInfo t) - removeDisabledSections (Bench _ b) = benchmarkEnabled b && buildable (benchmarkBuildInfo b) - removeDisabledSections PDNull = True - --- Apply extra constraints to a dependency map. --- Combines dependencies where the result will only contain keys from the left --- (first) map. If a key also exists in the right map, both constraints will --- be intersected. -constrainBy :: DependencyMap -- ^ Input map - -> DependencyMap -- ^ Extra constraints - -> DependencyMap -constrainBy left extra = - DependencyMap $ - Map.foldWithKey tightenConstraint (unDependencyMap left) - (unDependencyMap extra) - where tightenConstraint n c l = - case Map.lookup n l of - Nothing -> l - Just vr -> Map.insert n (intersectVersionRanges vr c) l - --- | Collect up the targets in a TargetSet of tagged targets, storing the --- dependencies as we go. -flattenTaggedTargets :: TargetSet PDTagged -> - (Maybe Library, [(String, Executable)], [(String, TestSuite)] - , [(String, Benchmark)]) -flattenTaggedTargets (TargetSet targets) = foldr untag (Nothing, [], [], []) targets - where - untag (_, Lib _) (Just _, _, _, _) = userBug "Only one library expected" - untag (deps, Lib l) (Nothing, exes, tests, bms) = - (Just l', exes, tests, bms) - where - l' = l { - libBuildInfo = (libBuildInfo l) { targetBuildDepends = fromDepMap deps } - } - untag (deps, Exe n e) (mlib, exes, tests, bms) - | any ((== n) . fst) exes = - userBug $ "There exist several exes with the same name: '" ++ n ++ "'" - | any ((== n) . fst) tests = - userBug $ "There exists a test with the same name as an exe: '" ++ n ++ "'" - | any ((== n) . fst) bms = - userBug $ "There exists a benchmark with the same name as an exe: '" ++ n ++ "'" - | otherwise = (mlib, (n, e'):exes, tests, bms) - where - e' = e { - buildInfo = (buildInfo e) { targetBuildDepends = fromDepMap deps } - } - untag (deps, Test n t) (mlib, exes, tests, bms) - | any ((== n) . fst) tests = - userBug $ "There exist several tests with the same name: '" ++ n ++ "'" - | any ((== n) . fst) exes = - userBug $ "There exists an exe with the same name as the test: '" ++ n ++ "'" - | any ((== n) . fst) bms = - userBug $ "There exists a benchmark with the same name as the test: '" ++ n ++ "'" - | otherwise = (mlib, exes, (n, t'):tests, bms) - where - t' = t { - testBuildInfo = (testBuildInfo t) - { targetBuildDepends = fromDepMap deps } - } - untag (deps, Bench n b) (mlib, exes, tests, bms) - | any ((== n) . fst) bms = - userBug $ "There exist several benchmarks with the same name: '" ++ n ++ "'" - | any ((== n) . fst) exes = - userBug $ "There exists an exe with the same name as the benchmark: '" ++ n ++ "'" - | any ((== n) . fst) tests = - userBug $ "There exists a test with the same name as the benchmark: '" ++ n ++ "'" - | otherwise = (mlib, exes, tests, (n, b'):bms) - where - b' = b { - benchmarkBuildInfo = (benchmarkBuildInfo b) - { targetBuildDepends = fromDepMap deps } - } - untag (_, PDNull) x = x -- actually this should not happen, but let's be liberal - - ------------------------------------------------------------------------------- --- Convert GenericPackageDescription to PackageDescription --- - -data PDTagged = Lib Library - | Exe String Executable - | Test String TestSuite - | Bench String Benchmark - | PDNull - deriving Show - -instance Monoid PDTagged where - mempty = PDNull - mappend = (Semi.<>) - -instance Semigroup PDTagged where - PDNull <> x = x - x <> PDNull = x - Lib l <> Lib l' = Lib (l <> l') - Exe n e <> Exe n' e' | n == n' = Exe n (e <> e') - Test n t <> Test n' t' | n == n' = Test n (t <> t') - Bench n b <> Bench n' b' | n == n' = Bench n (b <> b') - _ <> _ = cabalBug "Cannot combine incompatible tags" - --- | Create a package description with all configurations resolved. --- --- This function takes a `GenericPackageDescription` and several environment --- parameters and tries to generate `PackageDescription` by finding a flag --- assignment that result in satisfiable dependencies. --- --- It takes as inputs a not necessarily complete specifications of flags --- assignments, an optional package index as well as platform parameters. If --- some flags are not assigned explicitly, this function will try to pick an --- assignment that causes this function to succeed. The package index is --- optional since on some platforms we cannot determine which packages have --- been installed before. When no package index is supplied, every dependency --- is assumed to be satisfiable, therefore all not explicitly assigned flags --- will get their default values. --- --- This function will fail if it cannot find a flag assignment that leads to --- satisfiable dependencies. (It will not try alternative assignments for --- explicitly specified flags.) In case of failure it will return the missing --- dependencies that it encountered when trying different flag assignments. --- On success, it will return the package description and the full flag --- assignment chosen. --- -finalizePackageDescription :: - FlagAssignment -- ^ Explicitly specified flag assignments - -> (Dependency -> Bool) -- ^ Is a given dependency satisfiable from the set of - -- available packages? If this is unknown then use - -- True. - -> Platform -- ^ The 'Arch' and 'OS' - -> CompilerInfo -- ^ Compiler information - -> [Dependency] -- ^ Additional constraints - -> GenericPackageDescription - -> Either [Dependency] - (PackageDescription, FlagAssignment) - -- ^ Either missing dependencies or the resolved package - -- description along with the flag assignments chosen. -finalizePackageDescription userflags satisfyDep - (Platform arch os) impl constraints - (GenericPackageDescription pkg flags mlib0 exes0 tests0 bms0) = - case resolveFlags of - Right ((mlib, exes', tests', bms'), targetSet, flagVals) -> - Right ( pkg { library = mlib - , executables = exes' - , testSuites = tests' - , benchmarks = bms' - , buildDepends = fromDepMap (overallDependencies targetSet) - } - , flagVals ) - - Left missing -> Left missing - where - -- Combine lib, exes, and tests into one list of @CondTree@s with tagged data - condTrees = maybeToList (fmap (mapTreeData Lib) mlib0 ) - ++ map (\(name,tree) -> mapTreeData (Exe name) tree) exes0 - ++ map (\(name,tree) -> mapTreeData (Test name) tree) tests0 - ++ map (\(name,tree) -> mapTreeData (Bench name) tree) bms0 - - resolveFlags = - case resolveWithFlags flagChoices os arch impl constraints condTrees check of - Right (targetSet, fs) -> - let (mlib, exes, tests, bms) = flattenTaggedTargets targetSet in - Right ( (fmap libFillInDefaults mlib, - map (\(n,e) -> (exeFillInDefaults e) { exeName = n }) exes, - map (\(n,t) -> (testFillInDefaults t) { testName = n }) tests, - map (\(n,b) -> (benchFillInDefaults b) { benchmarkName = n }) bms), - targetSet, fs) - Left missing -> Left missing - - flagChoices = map (\(MkFlag n _ d manual) -> (n, d2c manual n d)) flags - d2c manual n b = case lookup n userflags of - Just val -> [val] - Nothing - | manual -> [b] - | otherwise -> [b, not b] - --flagDefaults = map (\(n,x:_) -> (n,x)) flagChoices - check ds = let missingDeps = filter (not . satisfyDep) ds - in if null missingDeps - then DepOk - else MissingDeps missingDeps - -{- -let tst_p = (CondNode [1::Int] [Distribution.Package.Dependency "a" AnyVersion] []) -let tst_p2 = (CondNode [1::Int] [Distribution.Package.Dependency "a" (EarlierVersion (Version [1,0] [])), Distribution.Package.Dependency "a" (LaterVersion (Version [2,0] []))] []) - -let p_index = Distribution.Simple.PackageIndex.fromList [Distribution.Package.PackageIdentifier "a" (Version [0,5] []), Distribution.Package.PackageIdentifier "a" (Version [2,5] [])] -let look = not . null . Distribution.Simple.PackageIndex.lookupDependency p_index -let looks ds = mconcat $ map (\d -> if look d then DepOk else MissingDeps [d]) ds -resolveWithFlags [] Distribution.System.Linux Distribution.System.I386 (Distribution.Compiler.GHC,Version [6,8,2] []) [tst_p] looks ===> Right ... -resolveWithFlags [] Distribution.System.Linux Distribution.System.I386 (Distribution.Compiler.GHC,Version [6,8,2] []) [tst_p2] looks ===> Left ... --} - --- | Flatten a generic package description by ignoring all conditions and just --- join the field descriptors into on package description. Note, however, --- that this may lead to inconsistent field values, since all values are --- joined into one field, which may not be possible in the original package --- description, due to the use of exclusive choices (if ... else ...). --- --- TODO: One particularly tricky case is defaulting. In the original package --- description, e.g., the source directory might either be the default or a --- certain, explicitly set path. Since defaults are filled in only after the --- package has been resolved and when no explicit value has been set, the --- default path will be missing from the package description returned by this --- function. -flattenPackageDescription :: GenericPackageDescription -> PackageDescription -flattenPackageDescription (GenericPackageDescription pkg _ mlib0 exes0 tests0 bms0) = - pkg { library = mlib - , executables = reverse exes - , testSuites = reverse tests - , benchmarks = reverse bms - , buildDepends = ldeps ++ reverse edeps ++ reverse tdeps ++ reverse bdeps - } - where - (mlib, ldeps) = case mlib0 of - Just lib -> let (l,ds) = ignoreConditions lib in - (Just (libFillInDefaults l), ds) - Nothing -> (Nothing, []) - (exes, edeps) = foldr flattenExe ([],[]) exes0 - (tests, tdeps) = foldr flattenTst ([],[]) tests0 - (bms, bdeps) = foldr flattenBm ([],[]) bms0 - flattenExe (n, t) (es, ds) = - let (e, ds') = ignoreConditions t in - ( (exeFillInDefaults $ e { exeName = n }) : es, ds' ++ ds ) - flattenTst (n, t) (es, ds) = - let (e, ds') = ignoreConditions t in - ( (testFillInDefaults $ e { testName = n }) : es, ds' ++ ds ) - flattenBm (n, t) (es, ds) = - let (e, ds') = ignoreConditions t in - ( (benchFillInDefaults $ e { benchmarkName = n }) : es, ds' ++ ds ) - --- This is in fact rather a hack. The original version just overrode the --- default values, however, when adding conditions we had to switch to a --- modifier-based approach. There, nothing is ever overwritten, but only --- joined together. --- --- This is the cleanest way i could think of, that doesn't require --- changing all field parsing functions to return modifiers instead. -libFillInDefaults :: Library -> Library -libFillInDefaults lib@(Library { libBuildInfo = bi }) = - lib { libBuildInfo = biFillInDefaults bi } - -exeFillInDefaults :: Executable -> Executable -exeFillInDefaults exe@(Executable { buildInfo = bi }) = - exe { buildInfo = biFillInDefaults bi } - -testFillInDefaults :: TestSuite -> TestSuite -testFillInDefaults tst@(TestSuite { testBuildInfo = bi }) = - tst { testBuildInfo = biFillInDefaults bi } - -benchFillInDefaults :: Benchmark -> Benchmark -benchFillInDefaults bm@(Benchmark { benchmarkBuildInfo = bi }) = - bm { benchmarkBuildInfo = biFillInDefaults bi } - -biFillInDefaults :: BuildInfo -> BuildInfo -biFillInDefaults bi = - if null (hsSourceDirs bi) - then bi { hsSourceDirs = [currentDir] } - else bi - --- Walk a 'GenericPackageDescription' and apply @onBuildInfo@/@onSetupBuildInfo@ --- to all nested 'BuildInfo'/'SetupBuildInfo' values. -transformAllBuildInfos :: (BuildInfo -> BuildInfo) - -> (SetupBuildInfo -> SetupBuildInfo) - -> GenericPackageDescription - -> GenericPackageDescription -transformAllBuildInfos onBuildInfo onSetupBuildInfo gpd = gpd' - where - onLibrary lib = lib { libBuildInfo = onBuildInfo $ libBuildInfo lib } - onExecutable exe = exe { buildInfo = onBuildInfo $ buildInfo exe } - onTestSuite tst = tst { testBuildInfo = onBuildInfo $ testBuildInfo tst } - onBenchmark bmk = bmk { benchmarkBuildInfo = - onBuildInfo $ benchmarkBuildInfo bmk } - - pd = packageDescription gpd - pd' = pd { - library = fmap onLibrary (library pd), - executables = map onExecutable (executables pd), - testSuites = map onTestSuite (testSuites pd), - benchmarks = map onBenchmark (benchmarks pd), - setupBuildInfo = fmap onSetupBuildInfo (setupBuildInfo pd) - } - - gpd' = transformAllCondTrees onLibrary onExecutable - onTestSuite onBenchmark id - $ gpd { packageDescription = pd' } - --- | Walk a 'GenericPackageDescription' and apply @f@ to all nested --- @build-depends@ fields. -transformAllBuildDepends :: (Dependency -> Dependency) - -> GenericPackageDescription - -> GenericPackageDescription -transformAllBuildDepends f gpd = gpd' - where - onBI bi = bi { targetBuildDepends = map f $ targetBuildDepends bi } - onSBI stp = stp { setupDepends = map f $ setupDepends stp } - onPD pd = pd { buildDepends = map f $ buildDepends pd } - - pd' = onPD $ packageDescription gpd - gpd' = transformAllCondTrees id id id id (map f) - . transformAllBuildInfos onBI onSBI - $ gpd { packageDescription = pd' } - --- | Walk all 'CondTree's inside a 'GenericPackageDescription' and apply --- appropriate transformations to all nodes. Helper function used by --- 'transformAllBuildDepends' and 'transformAllBuildInfos'. -transformAllCondTrees :: (Library -> Library) - -> (Executable -> Executable) - -> (TestSuite -> TestSuite) - -> (Benchmark -> Benchmark) - -> ([Dependency] -> [Dependency]) - -> GenericPackageDescription -> GenericPackageDescription -transformAllCondTrees onLibrary onExecutable - onTestSuite onBenchmark onDepends gpd = gpd' - where - gpd' = gpd { - condLibrary = condLib', - condExecutables = condExes', - condTestSuites = condTests', - condBenchmarks = condBenchs' - } - - condLib = condLibrary gpd - condExes = condExecutables gpd - condTests = condTestSuites gpd - condBenchs = condBenchmarks gpd - - condLib' = fmap (onCondTree onLibrary) condLib - condExes' = map (mapSnd $ onCondTree onExecutable) condExes - condTests' = map (mapSnd $ onCondTree onTestSuite) condTests - condBenchs' = map (mapSnd $ onCondTree onBenchmark) condBenchs - - mapSnd :: (a -> b) -> (c,a) -> (c,b) - mapSnd = fmap - - onCondTree :: (a -> b) -> CondTree v [Dependency] a - -> CondTree v [Dependency] b - onCondTree g = mapCondTree g onDepends id diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/PackageDescription/Parse.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/PackageDescription/Parse.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/PackageDescription/Parse.hs 2016-11-07 10:02:23.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/PackageDescription/Parse.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1282 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE DeriveDataTypeable #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.PackageDescription.Parse --- Copyright : Isaac Jones 2003-2005 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This defined parsers and partial pretty printers for the @.cabal@ format. --- Some of the complexity in this module is due to the fact that we have to be --- backwards compatible with old @.cabal@ files, so there's code to translate --- into the newer structure. - -module Distribution.PackageDescription.Parse ( - -- * Package descriptions - readPackageDescription, - writePackageDescription, - parsePackageDescription, - showPackageDescription, - - -- ** Parsing - ParseResult(..), - FieldDescr(..), - LineNo, - - -- ** Supplementary build information - readHookedBuildInfo, - parseHookedBuildInfo, - writeHookedBuildInfo, - showHookedBuildInfo, - - pkgDescrFieldDescrs, - libFieldDescrs, - executableFieldDescrs, - binfoFieldDescrs, - sourceRepoFieldDescrs, - testSuiteFieldDescrs, - flagFieldDescrs - ) where - -import Distribution.ParseUtils hiding (parseFields) -import Distribution.PackageDescription -import Distribution.PackageDescription.Utils -import Distribution.Package -import Distribution.ModuleName -import Distribution.Version -import Distribution.Verbosity -import Distribution.Compiler -import Distribution.PackageDescription.Configuration -import Distribution.Simple.Utils -import Distribution.Text -import Distribution.Compat.ReadP hiding (get) - -import Data.Char (isSpace) -import Data.Foldable (traverse_) -import Data.Maybe (listToMaybe, isJust) -import Data.List (nub, unfoldr, partition, (\\)) -import Control.Monad (liftM, foldM, when, unless, ap) -#if __GLASGOW_HASKELL__ < 710 -import Data.Monoid (Monoid(..)) -import Control.Applicative (Applicative(..)) -#endif -import Control.Arrow (first) -import System.Directory (doesFileExist) -import qualified Data.ByteString.Lazy.Char8 as BS.Char8 - -import Text.PrettyPrint - - --- ----------------------------------------------------------------------------- --- The PackageDescription type - -pkgDescrFieldDescrs :: [FieldDescr PackageDescription] -pkgDescrFieldDescrs = - [ simpleField "name" - disp parse - packageName (\name pkg -> pkg{package=(package pkg){pkgName=name}}) - , simpleField "version" - disp parse - packageVersion (\ver pkg -> pkg{package=(package pkg){pkgVersion=ver}}) - , simpleField "cabal-version" - (either disp disp) (liftM Left parse +++ liftM Right parse) - specVersionRaw (\v pkg -> pkg{specVersionRaw=v}) - , simpleField "build-type" - (maybe empty disp) (fmap Just parse) - buildType (\t pkg -> pkg{buildType=t}) - , simpleField "license" - disp parseLicenseQ - license (\l pkg -> pkg{license=l}) - -- We have both 'license-file' and 'license-files' fields. - -- Rather than declaring license-file to be deprecated, we will continue - -- to allow both. The 'license-file' will continue to only allow single - -- tokens, while 'license-files' allows multiple. On pretty-printing, we - -- will use 'license-file' if there's just one, and use 'license-files' - -- otherwise. - , simpleField "license-file" - showFilePath parseFilePathQ - (\pkg -> case licenseFiles pkg of - [x] -> x - _ -> "") - (\l pkg -> pkg{licenseFiles=licenseFiles pkg ++ [l]}) - , listField "license-files" - showFilePath parseFilePathQ - (\pkg -> case licenseFiles pkg of - [_] -> [] - xs -> xs) - (\ls pkg -> pkg{licenseFiles=ls}) - , simpleField "copyright" - showFreeText parseFreeText - copyright (\val pkg -> pkg{copyright=val}) - , simpleField "maintainer" - showFreeText parseFreeText - maintainer (\val pkg -> pkg{maintainer=val}) - , simpleField "stability" - showFreeText parseFreeText - stability (\val pkg -> pkg{stability=val}) - , simpleField "homepage" - showFreeText parseFreeText - homepage (\val pkg -> pkg{homepage=val}) - , simpleField "package-url" - showFreeText parseFreeText - pkgUrl (\val pkg -> pkg{pkgUrl=val}) - , simpleField "bug-reports" - showFreeText parseFreeText - bugReports (\val pkg -> pkg{bugReports=val}) - , simpleField "synopsis" - showFreeText parseFreeText - synopsis (\val pkg -> pkg{synopsis=val}) - , simpleField "description" - showFreeText parseFreeText - description (\val pkg -> pkg{description=val}) - , simpleField "category" - showFreeText parseFreeText - category (\val pkg -> pkg{category=val}) - , simpleField "author" - showFreeText parseFreeText - author (\val pkg -> pkg{author=val}) - , listField "tested-with" - showTestedWith parseTestedWithQ - testedWith (\val pkg -> pkg{testedWith=val}) - , listFieldWithSep vcat "data-files" - showFilePath parseFilePathQ - dataFiles (\val pkg -> pkg{dataFiles=val}) - , simpleField "data-dir" - showFilePath parseFilePathQ - dataDir (\val pkg -> pkg{dataDir=val}) - , listFieldWithSep vcat "extra-source-files" - showFilePath parseFilePathQ - extraSrcFiles (\val pkg -> pkg{extraSrcFiles=val}) - , listFieldWithSep vcat "extra-tmp-files" - showFilePath parseFilePathQ - extraTmpFiles (\val pkg -> pkg{extraTmpFiles=val}) - , listFieldWithSep vcat "extra-doc-files" - showFilePath parseFilePathQ - extraDocFiles (\val pkg -> pkg{extraDocFiles=val}) - ] - --- | Store any fields beginning with "x-" in the customFields field of --- a PackageDescription. All other fields will generate a warning. -storeXFieldsPD :: UnrecFieldParser PackageDescription -storeXFieldsPD (f@('x':'-':_),val) pkg = - Just pkg{ customFieldsPD = - customFieldsPD pkg ++ [(f,val)]} -storeXFieldsPD _ _ = Nothing - --- --------------------------------------------------------------------------- --- The Library type - -libFieldDescrs :: [FieldDescr Library] -libFieldDescrs = - [ listFieldWithSep vcat "exposed-modules" disp parseModuleNameQ - exposedModules (\mods lib -> lib{exposedModules=mods}) - - , commaListFieldWithSep vcat "reexported-modules" disp parse - reexportedModules (\mods lib -> lib{reexportedModules=mods}) - - , listFieldWithSep vcat "required-signatures" disp parseModuleNameQ - requiredSignatures (\mods lib -> lib{requiredSignatures=mods}) - - , listFieldWithSep vcat "exposed-signatures" disp parseModuleNameQ - exposedSignatures (\mods lib -> lib{exposedSignatures=mods}) - - , boolField "exposed" - libExposed (\val lib -> lib{libExposed=val}) - ] ++ map biToLib binfoFieldDescrs - where biToLib = liftField libBuildInfo (\bi lib -> lib{libBuildInfo=bi}) - -storeXFieldsLib :: UnrecFieldParser Library -storeXFieldsLib (f@('x':'-':_), val) l@(Library { libBuildInfo = bi }) = - Just $ l {libBuildInfo = - bi{ customFieldsBI = customFieldsBI bi ++ [(f,val)]}} -storeXFieldsLib _ _ = Nothing - --- --------------------------------------------------------------------------- --- The Executable type - - -executableFieldDescrs :: [FieldDescr Executable] -executableFieldDescrs = - [ -- note ordering: configuration must come first, for - -- showPackageDescription. - simpleField "executable" - showToken parseTokenQ - exeName (\xs exe -> exe{exeName=xs}) - , simpleField "main-is" - showFilePath parseFilePathQ - modulePath (\xs exe -> exe{modulePath=xs}) - ] - ++ map biToExe binfoFieldDescrs - where biToExe = liftField buildInfo (\bi exe -> exe{buildInfo=bi}) - -storeXFieldsExe :: UnrecFieldParser Executable -storeXFieldsExe (f@('x':'-':_), val) e@(Executable { buildInfo = bi }) = - Just $ e {buildInfo = bi{ customFieldsBI = (f,val):customFieldsBI bi}} -storeXFieldsExe _ _ = Nothing - --- --------------------------------------------------------------------------- --- The TestSuite type - --- | An intermediate type just used for parsing the test-suite stanza. --- After validation it is converted into the proper 'TestSuite' type. -data TestSuiteStanza = TestSuiteStanza { - testStanzaTestType :: Maybe TestType, - testStanzaMainIs :: Maybe FilePath, - testStanzaTestModule :: Maybe ModuleName, - testStanzaBuildInfo :: BuildInfo - } - -emptyTestStanza :: TestSuiteStanza -emptyTestStanza = TestSuiteStanza Nothing Nothing Nothing mempty - -testSuiteFieldDescrs :: [FieldDescr TestSuiteStanza] -testSuiteFieldDescrs = - [ simpleField "type" - (maybe empty disp) (fmap Just parse) - testStanzaTestType (\x suite -> suite { testStanzaTestType = x }) - , simpleField "main-is" - (maybe empty showFilePath) (fmap Just parseFilePathQ) - testStanzaMainIs (\x suite -> suite { testStanzaMainIs = x }) - , simpleField "test-module" - (maybe empty disp) (fmap Just parseModuleNameQ) - testStanzaTestModule (\x suite -> suite { testStanzaTestModule = x }) - ] - ++ map biToTest binfoFieldDescrs - where - biToTest = liftField testStanzaBuildInfo - (\bi suite -> suite { testStanzaBuildInfo = bi }) - -storeXFieldsTest :: UnrecFieldParser TestSuiteStanza -storeXFieldsTest (f@('x':'-':_), val) t@(TestSuiteStanza { testStanzaBuildInfo = bi }) = - Just $ t {testStanzaBuildInfo = bi{ customFieldsBI = (f,val):customFieldsBI bi}} -storeXFieldsTest _ _ = Nothing - -validateTestSuite :: LineNo -> TestSuiteStanza -> ParseResult TestSuite -validateTestSuite line stanza = - case testStanzaTestType stanza of - Nothing -> return $ - emptyTestSuite { testBuildInfo = testStanzaBuildInfo stanza } - - Just tt@(TestTypeUnknown _ _) -> - return emptyTestSuite { - testInterface = TestSuiteUnsupported tt, - testBuildInfo = testStanzaBuildInfo stanza - } - - Just tt | tt `notElem` knownTestTypes -> - return emptyTestSuite { - testInterface = TestSuiteUnsupported tt, - testBuildInfo = testStanzaBuildInfo stanza - } - - Just tt@(TestTypeExe ver) -> - case testStanzaMainIs stanza of - Nothing -> syntaxError line (missingField "main-is" tt) - Just file -> do - when (isJust (testStanzaTestModule stanza)) $ - warning (extraField "test-module" tt) - return emptyTestSuite { - testInterface = TestSuiteExeV10 ver file, - testBuildInfo = testStanzaBuildInfo stanza - } - - Just tt@(TestTypeLib ver) -> - case testStanzaTestModule stanza of - Nothing -> syntaxError line (missingField "test-module" tt) - Just module_ -> do - when (isJust (testStanzaMainIs stanza)) $ - warning (extraField "main-is" tt) - return emptyTestSuite { - testInterface = TestSuiteLibV09 ver module_, - testBuildInfo = testStanzaBuildInfo stanza - } - - where - missingField name tt = "The '" ++ name ++ "' field is required for the " - ++ display tt ++ " test suite type." - - extraField name tt = "The '" ++ name ++ "' field is not used for the '" - ++ display tt ++ "' test suite type." - - --- --------------------------------------------------------------------------- --- The Benchmark type - --- | An intermediate type just used for parsing the benchmark stanza. --- After validation it is converted into the proper 'Benchmark' type. -data BenchmarkStanza = BenchmarkStanza { - benchmarkStanzaBenchmarkType :: Maybe BenchmarkType, - benchmarkStanzaMainIs :: Maybe FilePath, - benchmarkStanzaBenchmarkModule :: Maybe ModuleName, - benchmarkStanzaBuildInfo :: BuildInfo - } - -emptyBenchmarkStanza :: BenchmarkStanza -emptyBenchmarkStanza = BenchmarkStanza Nothing Nothing Nothing mempty - -benchmarkFieldDescrs :: [FieldDescr BenchmarkStanza] -benchmarkFieldDescrs = - [ simpleField "type" - (maybe empty disp) (fmap Just parse) - benchmarkStanzaBenchmarkType - (\x suite -> suite { benchmarkStanzaBenchmarkType = x }) - , simpleField "main-is" - (maybe empty showFilePath) (fmap Just parseFilePathQ) - benchmarkStanzaMainIs - (\x suite -> suite { benchmarkStanzaMainIs = x }) - ] - ++ map biToBenchmark binfoFieldDescrs - where - biToBenchmark = liftField benchmarkStanzaBuildInfo - (\bi suite -> suite { benchmarkStanzaBuildInfo = bi }) - -storeXFieldsBenchmark :: UnrecFieldParser BenchmarkStanza -storeXFieldsBenchmark (f@('x':'-':_), val) - t@(BenchmarkStanza { benchmarkStanzaBuildInfo = bi }) = - Just $ t {benchmarkStanzaBuildInfo = - bi{ customFieldsBI = (f,val):customFieldsBI bi}} -storeXFieldsBenchmark _ _ = Nothing - -validateBenchmark :: LineNo -> BenchmarkStanza -> ParseResult Benchmark -validateBenchmark line stanza = - case benchmarkStanzaBenchmarkType stanza of - Nothing -> return $ - emptyBenchmark { benchmarkBuildInfo = benchmarkStanzaBuildInfo stanza } - - Just tt@(BenchmarkTypeUnknown _ _) -> - return emptyBenchmark { - benchmarkInterface = BenchmarkUnsupported tt, - benchmarkBuildInfo = benchmarkStanzaBuildInfo stanza - } - - Just tt | tt `notElem` knownBenchmarkTypes -> - return emptyBenchmark { - benchmarkInterface = BenchmarkUnsupported tt, - benchmarkBuildInfo = benchmarkStanzaBuildInfo stanza - } - - Just tt@(BenchmarkTypeExe ver) -> - case benchmarkStanzaMainIs stanza of - Nothing -> syntaxError line (missingField "main-is" tt) - Just file -> do - when (isJust (benchmarkStanzaBenchmarkModule stanza)) $ - warning (extraField "benchmark-module" tt) - return emptyBenchmark { - benchmarkInterface = BenchmarkExeV10 ver file, - benchmarkBuildInfo = benchmarkStanzaBuildInfo stanza - } - - where - missingField name tt = "The '" ++ name ++ "' field is required for the " - ++ display tt ++ " benchmark type." - - extraField name tt = "The '" ++ name ++ "' field is not used for the '" - ++ display tt ++ "' benchmark type." - --- --------------------------------------------------------------------------- --- The BuildInfo type - - -binfoFieldDescrs :: [FieldDescr BuildInfo] -binfoFieldDescrs = - [ boolField "buildable" - buildable (\val binfo -> binfo{buildable=val}) - , commaListField "build-tools" - disp parseBuildTool - buildTools (\xs binfo -> binfo{buildTools=xs}) - , commaListFieldWithSep vcat "build-depends" - disp parse - targetBuildDepends (\xs binfo -> binfo{targetBuildDepends=xs}) - , spaceListField "cpp-options" - showToken parseTokenQ' - cppOptions (\val binfo -> binfo{cppOptions=val}) - , spaceListField "cc-options" - showToken parseTokenQ' - ccOptions (\val binfo -> binfo{ccOptions=val}) - , spaceListField "ld-options" - showToken parseTokenQ' - ldOptions (\val binfo -> binfo{ldOptions=val}) - , commaListField "pkgconfig-depends" - disp parsePkgconfigDependency - pkgconfigDepends (\xs binfo -> binfo{pkgconfigDepends=xs}) - , listField "frameworks" - showToken parseTokenQ - frameworks (\val binfo -> binfo{frameworks=val}) - , listField "extra-framework-dirs" - showToken parseFilePathQ - extraFrameworkDirs (\val binfo -> binfo{extraFrameworkDirs=val}) - , listFieldWithSep vcat "c-sources" - showFilePath parseFilePathQ - cSources (\paths binfo -> binfo{cSources=paths}) - , listFieldWithSep vcat "js-sources" - showFilePath parseFilePathQ - jsSources (\paths binfo -> binfo{jsSources=paths}) - , simpleField "default-language" - (maybe empty disp) (option Nothing (fmap Just parseLanguageQ)) - defaultLanguage (\lang binfo -> binfo{defaultLanguage=lang}) - , listField "other-languages" - disp parseLanguageQ - otherLanguages (\langs binfo -> binfo{otherLanguages=langs}) - , listField "default-extensions" - disp parseExtensionQ - defaultExtensions (\exts binfo -> binfo{defaultExtensions=exts}) - , listField "other-extensions" - disp parseExtensionQ - otherExtensions (\exts binfo -> binfo{otherExtensions=exts}) - , listField "extensions" - disp parseExtensionQ - oldExtensions (\exts binfo -> binfo{oldExtensions=exts}) - - , listFieldWithSep vcat "extra-libraries" - showToken parseTokenQ - extraLibs (\xs binfo -> binfo{extraLibs=xs}) - , listFieldWithSep vcat "extra-ghci-libraries" - showToken parseTokenQ - extraGHCiLibs (\xs binfo -> binfo{extraGHCiLibs=xs}) - , listField "extra-lib-dirs" - showFilePath parseFilePathQ - extraLibDirs (\xs binfo -> binfo{extraLibDirs=xs}) - , listFieldWithSep vcat "includes" - showFilePath parseFilePathQ - includes (\paths binfo -> binfo{includes=paths}) - , listFieldWithSep vcat "install-includes" - showFilePath parseFilePathQ - installIncludes (\paths binfo -> binfo{installIncludes=paths}) - , listField "include-dirs" - showFilePath parseFilePathQ - includeDirs (\paths binfo -> binfo{includeDirs=paths}) - , listField "hs-source-dirs" - showFilePath parseFilePathQ - hsSourceDirs (\paths binfo -> binfo{hsSourceDirs=paths}) - , listFieldWithSep vcat "other-modules" - disp parseModuleNameQ - otherModules (\val binfo -> binfo{otherModules=val}) - , optsField "ghc-prof-options" GHC - profOptions (\val binfo -> binfo{profOptions=val}) - , optsField "ghcjs-prof-options" GHCJS - profOptions (\val binfo -> binfo{profOptions=val}) - , optsField "ghc-shared-options" GHC - sharedOptions (\val binfo -> binfo{sharedOptions=val}) - , optsField "ghcjs-shared-options" GHCJS - sharedOptions (\val binfo -> binfo{sharedOptions=val}) - , optsField "ghc-options" GHC - options (\path binfo -> binfo{options=path}) - , optsField "ghcjs-options" GHCJS - options (\path binfo -> binfo{options=path}) - , optsField "jhc-options" JHC - options (\path binfo -> binfo{options=path}) - - -- NOTE: Hugs and NHC are not supported anymore, but these fields are kept - -- around for backwards compatibility. - , optsField "hugs-options" Hugs - options (const id) - , optsField "nhc98-options" NHC - options (const id) - ] - -storeXFieldsBI :: UnrecFieldParser BuildInfo -storeXFieldsBI (f@('x':'-':_),val) bi = Just bi{ customFieldsBI = (f,val):customFieldsBI bi } -storeXFieldsBI _ _ = Nothing - ------------------------------------------------------------------------------- - -flagFieldDescrs :: [FieldDescr Flag] -flagFieldDescrs = - [ simpleField "description" - showFreeText parseFreeText - flagDescription (\val fl -> fl{ flagDescription = val }) - , boolField "default" - flagDefault (\val fl -> fl{ flagDefault = val }) - , boolField "manual" - flagManual (\val fl -> fl{ flagManual = val }) - ] - ------------------------------------------------------------------------------- - -sourceRepoFieldDescrs :: [FieldDescr SourceRepo] -sourceRepoFieldDescrs = - [ simpleField "type" - (maybe empty disp) (fmap Just parse) - repoType (\val repo -> repo { repoType = val }) - , simpleField "location" - (maybe empty showFreeText) (fmap Just parseFreeText) - repoLocation (\val repo -> repo { repoLocation = val }) - , simpleField "module" - (maybe empty showToken) (fmap Just parseTokenQ) - repoModule (\val repo -> repo { repoModule = val }) - , simpleField "branch" - (maybe empty showToken) (fmap Just parseTokenQ) - repoBranch (\val repo -> repo { repoBranch = val }) - , simpleField "tag" - (maybe empty showToken) (fmap Just parseTokenQ) - repoTag (\val repo -> repo { repoTag = val }) - , simpleField "subdir" - (maybe empty showFilePath) (fmap Just parseFilePathQ) - repoSubdir (\val repo -> repo { repoSubdir = val }) - ] - ------------------------------------------------------------------------------- - -setupBInfoFieldDescrs :: [FieldDescr SetupBuildInfo] -setupBInfoFieldDescrs = - [ commaListFieldWithSep vcat "setup-depends" - disp parse - setupDepends (\xs binfo -> binfo{setupDepends=xs}) - ] - --- --------------------------------------------------------------- --- Parsing - --- | Given a parser and a filename, return the parse of the file, --- after checking if the file exists. -readAndParseFile :: (FilePath -> (String -> IO a) -> IO a) - -> (String -> ParseResult a) - -> Verbosity - -> FilePath -> IO a -readAndParseFile withFileContents' parser verbosity fpath = do - exists <- doesFileExist fpath - unless exists - (die $ "Error Parsing: file \"" ++ fpath ++ "\" doesn't exist. Cannot continue.") - withFileContents' fpath $ \str -> case parser str of - ParseFailed e -> do - let (line, message) = locatedErrorMsg e - dieWithLocation fpath line message - ParseOk warnings x -> do - mapM_ (warn verbosity . showPWarning fpath) $ reverse warnings - return x - -readHookedBuildInfo :: Verbosity -> FilePath -> IO HookedBuildInfo -readHookedBuildInfo = - readAndParseFile withFileContents parseHookedBuildInfo - --- |Parse the given package file. -readPackageDescription :: Verbosity -> FilePath -> IO GenericPackageDescription -readPackageDescription = - readAndParseFile withUTF8FileContents parsePackageDescription - -stanzas :: [Field] -> [[Field]] -stanzas [] = [] -stanzas (f:fields) = (f:this) : stanzas rest - where - (this, rest) = break isStanzaHeader fields - -isStanzaHeader :: Field -> Bool -isStanzaHeader (F _ f _) = f == "executable" -isStanzaHeader _ = False - ------------------------------------------------------------------------------- - - -mapSimpleFields :: (Field -> ParseResult Field) -> [Field] - -> ParseResult [Field] -mapSimpleFields f = mapM walk - where - walk fld@F{} = f fld - walk (IfBlock l c fs1 fs2) = do - fs1' <- mapM walk fs1 - fs2' <- mapM walk fs2 - return (IfBlock l c fs1' fs2') - walk (Section ln n l fs1) = do - fs1' <- mapM walk fs1 - return (Section ln n l fs1') - --- prop_isMapM fs = mapSimpleFields return fs == return fs - - --- names of fields that represents dependencies --- TODO: maybe build-tools should go here too? -constraintFieldNames :: [String] -constraintFieldNames = ["build-depends"] - --- Possible refactoring would be to have modifiers be explicit about what --- they add and define an accessor that specifies what the dependencies --- are. This way we would completely reuse the parsing knowledge from the --- field descriptor. -parseConstraint :: Field -> ParseResult [Dependency] -parseConstraint (F l n v) - | n `elem` constraintFieldNames = runP l n (parseCommaList parse) v -parseConstraint f = userBug $ "Constraint was expected (got: " ++ show f ++ ")" - -{- -headerFieldNames :: [String] -headerFieldNames = filter (\n -> not (n `elem` constraintFieldNames)) - . map fieldName $ pkgDescrFieldDescrs --} - -libFieldNames :: [String] -libFieldNames = map fieldName libFieldDescrs - ++ buildInfoNames ++ constraintFieldNames - --- exeFieldNames :: [String] --- exeFieldNames = map fieldName executableFieldDescrs --- ++ buildInfoNames - -buildInfoNames :: [String] -buildInfoNames = map fieldName binfoFieldDescrs - ++ map fst deprecatedFieldsBuildInfo - --- A minimal implementation of the StateT monad transformer to avoid depending --- on the 'mtl' package. -newtype StT s m a = StT { runStT :: s -> m (a,s) } - -instance Functor f => Functor (StT s f) where - fmap g (StT f) = StT $ fmap (first g) . f - -#if __GLASGOW_HASKELL__ >= 710 -instance (Monad m) => Applicative (StT s m) where -#else -instance (Monad m, Functor m) => Applicative (StT s m) where -#endif - pure a = StT (\s -> return (a,s)) - (<*>) = ap - - -instance Monad m => Monad (StT s m) where -#if __GLASGOW_HASKELL__ < 710 - return a = StT (\s -> return (a,s)) -#endif - StT f >>= g = StT $ \s -> do - (a,s') <- f s - runStT (g a) s' - -get :: Monad m => StT s m s -get = StT $ \s -> return (s, s) - -modify :: Monad m => (s -> s) -> StT s m () -modify f = StT $ \s -> return ((),f s) - -lift :: Monad m => m a -> StT s m a -lift m = StT $ \s -> m >>= \a -> return (a,s) - -evalStT :: Monad m => StT s m a -> s -> m a -evalStT st s = liftM fst $ runStT st s - --- Our monad for parsing a list/tree of fields. --- --- The state represents the remaining fields to be processed. -type PM a = StT [Field] ParseResult a - - - --- return look-ahead field or nothing if we're at the end of the file -peekField :: PM (Maybe Field) -peekField = liftM listToMaybe get - --- Unconditionally discard the first field in our state. Will error when it --- reaches end of file. (Yes, that's evil.) -skipField :: PM () -skipField = modify tail - ---FIXME: this should take a ByteString, not a String. We have to be able to --- decode UTF8 and handle the BOM. - --- | Parses the given file into a 'GenericPackageDescription'. --- --- In Cabal 1.2 the syntax for package descriptions was changed to a format --- with sections and possibly indented property descriptions. -parsePackageDescription :: String -> ParseResult GenericPackageDescription -parsePackageDescription file = do - - -- This function is quite complex because it needs to be able to parse - -- both pre-Cabal-1.2 and post-Cabal-1.2 files. Additionally, it contains - -- a lot of parser-related noise since we do not want to depend on Parsec. - -- - -- If we detect an pre-1.2 file we implicitly convert it to post-1.2 - -- style. See 'sectionizeFields' below for details about the conversion. - - fields0 <- readFields file `catchParseError` \err -> - let tabs = findIndentTabs file in - case err of - -- In case of a TabsError report them all at once. - TabsError tabLineNo -> reportTabsError - -- but only report the ones including and following - -- the one that caused the actual error - [ t | t@(lineNo',_) <- tabs - , lineNo' >= tabLineNo ] - _ -> parseFail err - - let cabalVersionNeeded = - head $ [ minVersionBound versionRange - | Just versionRange <- [ simpleParse v - | F _ "cabal-version" v <- fields0 ] ] - ++ [Version [0] []] - minVersionBound versionRange = - case asVersionIntervals versionRange of - [] -> Version [0] [] - ((LowerBound version _, _):_) -> version - - handleFutureVersionParseFailure cabalVersionNeeded $ do - - let sf = sectionizeFields fields0 -- ensure 1.2 format - - -- figure out and warn about deprecated stuff (warnings are collected - -- inside our parsing monad) - fields <- mapSimpleFields deprecField sf - - -- Our parsing monad takes the not-yet-parsed fields as its state. - -- After each successful parse we remove the field from the state - -- ('skipField') and move on to the next one. - -- - -- Things are complicated a bit, because fields take a tree-like - -- structure -- they can be sections or "if"/"else" conditionals. - - flip evalStT fields $ do - - -- The header consists of all simple fields up to the first section - -- (flag, library, executable). - header_fields <- getHeader [] - - -- Parses just the header fields and stores them in a - -- 'PackageDescription'. Note that our final result is a - -- 'GenericPackageDescription'; for pragmatic reasons we just store - -- the partially filled-out 'PackageDescription' inside the - -- 'GenericPackageDescription'. - pkg <- lift $ parseFields pkgDescrFieldDescrs - storeXFieldsPD - emptyPackageDescription - header_fields - - -- 'getBody' assumes that the remaining fields only consist of - -- flags, lib and exe sections. - (repos, flags, mcsetup, mlib, exes, tests, bms) <- getBody - warnIfRest -- warn if getBody did not parse up to the last field. - -- warn about using old/new syntax with wrong cabal-version: - maybeWarnCabalVersion (not $ oldSyntax fields0) pkg - checkForUndefinedFlags flags mlib exes tests - return $ GenericPackageDescription - pkg { sourceRepos = repos, setupBuildInfo = mcsetup } - flags mlib exes tests bms - - where - oldSyntax = all isSimpleField - reportTabsError tabs = - syntaxError (fst (head tabs)) $ - "Do not use tabs for indentation (use spaces instead)\n" - ++ " Tabs were used at (line,column): " ++ show tabs - - maybeWarnCabalVersion newsyntax pkg - | newsyntax && specVersion pkg < Version [1,2] [] - = lift $ warning $ - "A package using section syntax must specify at least\n" - ++ "'cabal-version: >= 1.2'." - - maybeWarnCabalVersion newsyntax pkg - | not newsyntax && specVersion pkg >= Version [1,2] [] - = lift $ warning $ - "A package using 'cabal-version: " - ++ displaySpecVersion (specVersionRaw pkg) - ++ "' must use section syntax. See the Cabal user guide for details." - where - displaySpecVersion (Left version) = display version - displaySpecVersion (Right versionRange) = - case asVersionIntervals versionRange of - [] {- impossible -} -> display versionRange - ((LowerBound version _, _):_) -> display (orLaterVersion version) - - maybeWarnCabalVersion _ _ = return () - - - handleFutureVersionParseFailure cabalVersionNeeded parseBody = - (unless versionOk (warning message) >> parseBody) - `catchParseError` \parseError -> case parseError of - TabsError _ -> parseFail parseError - _ | versionOk -> parseFail parseError - | otherwise -> fail message - where versionOk = cabalVersionNeeded <= cabalVersion - message = "This package requires at least Cabal version " - ++ display cabalVersionNeeded - - -- "Sectionize" an old-style Cabal file. A sectionized file has: - -- - -- * all global fields at the beginning, followed by - -- - -- * all flag declarations, followed by - -- - -- * an optional library section, and an arbitrary number of executable - -- sections (in any order). - -- - -- The current implementation just gathers all library-specific fields - -- in a library section and wraps all executable stanzas in an executable - -- section. - sectionizeFields :: [Field] -> [Field] - sectionizeFields fs - | oldSyntax fs = - let - -- "build-depends" is a local field now. To be backwards - -- compatible, we still allow it as a global field in old-style - -- package description files and translate it to a local field by - -- adding it to every non-empty section - (hdr0, exes0) = break ((=="executable") . fName) fs - (hdr, libfs0) = partition (not . (`elem` libFieldNames) . fName) hdr0 - - (deps, libfs) = partition ((== "build-depends") . fName) - libfs0 - - exes = unfoldr toExe exes0 - toExe [] = Nothing - toExe (F l e n : r) - | e == "executable" = - let (efs, r') = break ((=="executable") . fName) r - in Just (Section l "executable" n (deps ++ efs), r') - toExe _ = cabalBug "unexpected input to 'toExe'" - in - hdr ++ - (if null libfs then [] - else [Section (lineNo (head libfs)) "library" "" (deps ++ libfs)]) - ++ exes - | otherwise = fs - - isSimpleField F{} = True - isSimpleField _ = False - - -- warn if there's something at the end of the file - warnIfRest :: PM () - warnIfRest = do - s <- get - case s of - [] -> return () - _ -> lift $ warning "Ignoring trailing declarations." -- add line no. - - -- all simple fields at the beginning of the file are (considered) header - -- fields - getHeader :: [Field] -> PM [Field] - getHeader acc = peekField >>= \mf -> case mf of - Just f@F{} -> skipField >> getHeader (f:acc) - _ -> return (reverse acc) - - -- - -- body ::= { repo | flag | library | executable | test }+ -- at most one lib - -- - -- The body consists of an optional sequence of declarations of flags and - -- an arbitrary number of executables and at most one library. - getBody :: PM ([SourceRepo], [Flag] - ,Maybe SetupBuildInfo - ,Maybe (CondTree ConfVar [Dependency] Library) - ,[(String, CondTree ConfVar [Dependency] Executable)] - ,[(String, CondTree ConfVar [Dependency] TestSuite)] - ,[(String, CondTree ConfVar [Dependency] Benchmark)]) - getBody = peekField >>= \mf -> case mf of - Just (Section line_no sec_type sec_label sec_fields) - | sec_type == "executable" -> do - when (null sec_label) $ lift $ syntaxError line_no - "'executable' needs one argument (the executable's name)" - exename <- lift $ runP line_no "executable" parseTokenQ sec_label - flds <- collectFields parseExeFields sec_fields - skipField - (repos, flags, csetup, lib, exes, tests, bms) <- getBody - return (repos, flags, csetup, lib, (exename, flds): exes, tests, bms) - - | sec_type == "test-suite" -> do - when (null sec_label) $ lift $ syntaxError line_no - "'test-suite' needs one argument (the test suite's name)" - testname <- lift $ runP line_no "test" parseTokenQ sec_label - flds <- collectFields (parseTestFields line_no) sec_fields - - -- Check that a valid test suite type has been chosen. A type - -- field may be given inside a conditional block, so we must - -- check for that before complaining that a type field has not - -- been given. The test suite must always have a valid type, so - -- we need to check both the 'then' and 'else' blocks, though - -- the blocks need not have the same type. - let checkTestType ts ct = - let ts' = mappend ts $ condTreeData ct - -- If a conditional has only a 'then' block and no - -- 'else' block, then it cannot have a valid type - -- in every branch, unless the type is specified at - -- a higher level in the tree. - checkComponent (_, _, Nothing) = False - -- If a conditional has a 'then' block and an 'else' - -- block, both must specify a test type, unless the - -- type is specified higher in the tree. - checkComponent (_, t, Just e) = - checkTestType ts' t && checkTestType ts' e - -- Does the current node specify a test type? - hasTestType = testInterface ts' - /= testInterface emptyTestSuite - -- If the current level of the tree specifies a type, - -- then we are done. If not, then one of the conditional - -- branches below the current node must specify a type. - -- Each node may have multiple immediate children; we - -- only one need one to specify a type because the - -- configure step uses 'mappend' to join together the - -- results of flag resolution. - in hasTestType || any checkComponent (condTreeComponents ct) - if checkTestType emptyTestSuite flds - then do - skipField - (repos, flags, csetup, lib, exes, tests, bms) <- getBody - return (repos, flags, csetup, lib, exes, - (testname, flds) : tests, bms) - else lift $ syntaxError line_no $ - "Test suite \"" ++ testname - ++ "\" is missing required field \"type\" or the field " - ++ "is not present in all conditional branches. The " - ++ "available test types are: " - ++ intercalate ", " (map display knownTestTypes) - - | sec_type == "benchmark" -> do - when (null sec_label) $ lift $ syntaxError line_no - "'benchmark' needs one argument (the benchmark's name)" - benchname <- lift $ runP line_no "benchmark" parseTokenQ sec_label - flds <- collectFields (parseBenchmarkFields line_no) sec_fields - - -- Check that a valid benchmark type has been chosen. A type - -- field may be given inside a conditional block, so we must - -- check for that before complaining that a type field has not - -- been given. The benchmark must always have a valid type, so - -- we need to check both the 'then' and 'else' blocks, though - -- the blocks need not have the same type. - let checkBenchmarkType ts ct = - let ts' = mappend ts $ condTreeData ct - -- If a conditional has only a 'then' block and no - -- 'else' block, then it cannot have a valid type - -- in every branch, unless the type is specified at - -- a higher level in the tree. - checkComponent (_, _, Nothing) = False - -- If a conditional has a 'then' block and an 'else' - -- block, both must specify a benchmark type, unless the - -- type is specified higher in the tree. - checkComponent (_, t, Just e) = - checkBenchmarkType ts' t && checkBenchmarkType ts' e - -- Does the current node specify a benchmark type? - hasBenchmarkType = benchmarkInterface ts' - /= benchmarkInterface emptyBenchmark - -- If the current level of the tree specifies a type, - -- then we are done. If not, then one of the conditional - -- branches below the current node must specify a type. - -- Each node may have multiple immediate children; we - -- only one need one to specify a type because the - -- configure step uses 'mappend' to join together the - -- results of flag resolution. - in hasBenchmarkType || any checkComponent (condTreeComponents ct) - if checkBenchmarkType emptyBenchmark flds - then do - skipField - (repos, flags, csetup, lib, exes, tests, bms) <- getBody - return (repos, flags, csetup, lib, exes, - tests, (benchname, flds) : bms) - else lift $ syntaxError line_no $ - "Benchmark \"" ++ benchname - ++ "\" is missing required field \"type\" or the field " - ++ "is not present in all conditional branches. The " - ++ "available benchmark types are: " - ++ intercalate ", " (map display knownBenchmarkTypes) - - | sec_type == "library" -> do - unless (null sec_label) $ lift $ - syntaxError line_no "'library' expects no argument" - flds <- collectFields parseLibFields sec_fields - skipField - (repos, flags, csetup, lib, exes, tests, bms) <- getBody - when (isJust lib) $ lift $ syntaxError line_no - "There can only be one library section in a package description." - return (repos, flags, csetup, Just flds, exes, tests, bms) - - | sec_type == "flag" -> do - when (null sec_label) $ lift $ - syntaxError line_no "'flag' needs one argument (the flag's name)" - flag <- lift $ parseFields - flagFieldDescrs - warnUnrec - (MkFlag (FlagName (lowercase sec_label)) "" True False) - sec_fields - skipField - (repos, flags, csetup, lib, exes, tests, bms) <- getBody - return (repos, flag:flags, csetup, lib, exes, tests, bms) - - | sec_type == "source-repository" -> do - when (null sec_label) $ lift $ syntaxError line_no $ - "'source-repository' needs one argument, " - ++ "the repo kind which is usually 'head' or 'this'" - kind <- case simpleParse sec_label of - Just kind -> return kind - Nothing -> lift $ syntaxError line_no $ - "could not parse repo kind: " ++ sec_label - repo <- lift $ parseFields - sourceRepoFieldDescrs - warnUnrec - SourceRepo { - repoKind = kind, - repoType = Nothing, - repoLocation = Nothing, - repoModule = Nothing, - repoBranch = Nothing, - repoTag = Nothing, - repoSubdir = Nothing - } - sec_fields - skipField - (repos, flags, csetup, lib, exes, tests, bms) <- getBody - return (repo:repos, flags, csetup, lib, exes, tests, bms) - - | sec_type == "custom-setup" -> do - unless (null sec_label) $ lift $ - syntaxError line_no "'setup' expects no argument" - flds <- lift $ parseFields - setupBInfoFieldDescrs - warnUnrec - mempty - sec_fields - skipField - (repos, flags, csetup0, lib, exes, tests, bms) <- getBody - when (isJust csetup0) $ lift $ syntaxError line_no - "There can only be one 'custom-setup' section in a package description." - return (repos, flags, Just flds, lib, exes, tests, bms) - - | otherwise -> do - lift $ warning $ "Ignoring unknown section type: " ++ sec_type - skipField - getBody - Just f@(F {}) -> do - _ <- lift $ syntaxError (lineNo f) $ - "Plain fields are not allowed in between stanzas: " ++ show f - skipField - getBody - Just f@(IfBlock {}) -> do - _ <- lift $ syntaxError (lineNo f) $ - "If-blocks are not allowed in between stanzas: " ++ show f - skipField - getBody - Nothing -> return ([], [], Nothing, Nothing, [], [], []) - - -- Extracts all fields in a block and returns a 'CondTree'. - -- - -- We have to recurse down into conditionals and we treat fields that - -- describe dependencies specially. - collectFields :: ([Field] -> PM a) -> [Field] - -> PM (CondTree ConfVar [Dependency] a) - collectFields parser allflds = do - - let simplFlds = [ F l n v | F l n v <- allflds ] - condFlds = [ f | f@IfBlock{} <- allflds ] - sections = [ s | s@Section{} <- allflds ] - - mapM_ - (\(Section l n _ _) -> lift . warning $ - "Unexpected section '" ++ n ++ "' on line " ++ show l) - sections - - a <- parser simplFlds - - -- Dependencies must be treated specially: when we - -- parse into a CondTree, not only do we parse them into - -- the targetBuildDepends/etc field inside the - -- PackageDescription, but we also have to put the - -- combined dependencies into CondTree. - -- - -- This information is, in principle, redundant, but - -- putting it here makes it easier for the constraint - -- solver to pick a flag assignment which supports - -- all of the dependencies (because it only has - -- to check the CondTree, rather than grovel everywhere - -- inside the conditional bits). - deps <- liftM concat - . mapM (lift . parseConstraint) - . filter isConstraint - $ simplFlds - - ifs <- mapM processIfs condFlds - - return (CondNode a deps ifs) - where - isConstraint (F _ n _) = n `elem` constraintFieldNames - isConstraint _ = False - - processIfs (IfBlock l c t e) = do - cnd <- lift $ runP l "if" parseCondition c - t' <- collectFields parser t - e' <- case e of - [] -> return Nothing - es -> do fs <- collectFields parser es - return (Just fs) - return (cnd, t', e') - processIfs _ = cabalBug "processIfs called with wrong field type" - - parseLibFields :: [Field] -> PM Library - parseLibFields = lift . parseFields libFieldDescrs storeXFieldsLib emptyLibrary - - -- Note: we don't parse the "executable" field here, hence the tail hack. - parseExeFields :: [Field] -> PM Executable - parseExeFields = lift . parseFields (tail executableFieldDescrs) - storeXFieldsExe emptyExecutable - - parseTestFields :: LineNo -> [Field] -> PM TestSuite - parseTestFields line fields = do - x <- lift $ parseFields testSuiteFieldDescrs storeXFieldsTest - emptyTestStanza fields - lift $ validateTestSuite line x - - parseBenchmarkFields :: LineNo -> [Field] -> PM Benchmark - parseBenchmarkFields line fields = do - x <- lift $ parseFields benchmarkFieldDescrs storeXFieldsBenchmark - emptyBenchmarkStanza fields - lift $ validateBenchmark line x - - checkForUndefinedFlags :: - [Flag] -> - Maybe (CondTree ConfVar [Dependency] Library) -> - [(String, CondTree ConfVar [Dependency] Executable)] -> - [(String, CondTree ConfVar [Dependency] TestSuite)] -> - PM () - checkForUndefinedFlags flags mlib exes tests = do - let definedFlags = map flagName flags - traverse_ (checkCondTreeFlags definedFlags) mlib - mapM_ (checkCondTreeFlags definedFlags . snd) exes - mapM_ (checkCondTreeFlags definedFlags . snd) tests - - checkCondTreeFlags :: [FlagName] -> CondTree ConfVar c a -> PM () - checkCondTreeFlags definedFlags ct = do - let fv = nub $ freeVars ct - unless (all (`elem` definedFlags) fv) $ - fail $ "These flags are used without having been defined: " - ++ intercalate ", " [ n | FlagName n <- fv \\ definedFlags ] - - --- | Parse a list of fields, given a list of field descriptions, --- a structure to accumulate the parsed fields, and a function --- that can decide what to do with fields which don't match any --- of the field descriptions. -parseFields :: [FieldDescr a] -- ^ descriptions of fields we know how to - -- parse - -> UnrecFieldParser a -- ^ possibly do something with - -- unrecognized fields - -> a -- ^ accumulator - -> [Field] -- ^ fields to be parsed - -> ParseResult a -parseFields descrs unrec ini fields = - do (a, unknowns) <- foldM (parseField descrs unrec) (ini, []) fields - unless (null unknowns) $ warning $ render $ - text "Unknown fields:" <+> - commaSep (map (\(l,u) -> u ++ " (line " ++ show l ++ ")") - (reverse unknowns)) - $+$ - text "Fields allowed in this section:" $$ - nest 4 (commaSep $ map fieldName descrs) - return a - where - commaSep = fsep . punctuate comma . map text - -parseField :: [FieldDescr a] -- ^ list of parseable fields - -> UnrecFieldParser a -- ^ possibly do something with - -- unrecognized fields - -> (a,[(Int,String)]) -- ^ accumulated result and warnings - -> Field -- ^ the field to be parsed - -> ParseResult (a, [(Int,String)]) -parseField (FieldDescr name _ parser : fields) unrec (a, us) (F line f val) - | name == f = parser line val a >>= \a' -> return (a',us) - | otherwise = parseField fields unrec (a,us) (F line f val) -parseField [] unrec (a,us) (F l f val) = return $ - case unrec (f,val) a of -- no fields matched, see if the 'unrec' - Just a' -> (a',us) -- function wants to do anything with it - Nothing -> (a, (l,f):us) -parseField _ _ _ _ = cabalBug "'parseField' called on a non-field" - -deprecatedFields :: [(String,String)] -deprecatedFields = - deprecatedFieldsPkgDescr ++ deprecatedFieldsBuildInfo - -deprecatedFieldsPkgDescr :: [(String,String)] -deprecatedFieldsPkgDescr = [ ("other-files", "extra-source-files") ] - -deprecatedFieldsBuildInfo :: [(String,String)] -deprecatedFieldsBuildInfo = [ ("hs-source-dir","hs-source-dirs") ] - --- Handle deprecated fields -deprecField :: Field -> ParseResult Field -deprecField (F line fld val) = do - fld' <- case lookup fld deprecatedFields of - Nothing -> return fld - Just newName -> do - warning $ "The field \"" ++ fld - ++ "\" is deprecated, please use \"" ++ newName ++ "\"" - return newName - return (F line fld' val) -deprecField _ = cabalBug "'deprecField' called on a non-field" - - -parseHookedBuildInfo :: String -> ParseResult HookedBuildInfo -parseHookedBuildInfo inp = do - fields <- readFields inp - let ss@(mLibFields:exes) = stanzas fields - mLib <- parseLib mLibFields - biExes <- mapM parseExe (maybe ss (const exes) mLib) - return (mLib, biExes) - where - parseLib :: [Field] -> ParseResult (Maybe BuildInfo) - parseLib (bi@(F _ inFieldName _:_)) - | lowercase inFieldName /= "executable" = liftM Just (parseBI bi) - parseLib _ = return Nothing - - parseExe :: [Field] -> ParseResult (String, BuildInfo) - parseExe (F line inFieldName mName:bi) - | lowercase inFieldName == "executable" - = do bis <- parseBI bi - return (mName, bis) - | otherwise = syntaxError line "expecting 'executable' at top of stanza" - parseExe (_:_) = cabalBug "`parseExe' called on a non-field" - parseExe [] = syntaxError 0 "error in parsing buildinfo file. Expected executable stanza" - - parseBI st = parseFields binfoFieldDescrs storeXFieldsBI emptyBuildInfo st - --- --------------------------------------------------------------------------- --- Pretty printing - -writePackageDescription :: FilePath -> PackageDescription -> IO () -writePackageDescription fpath pkg = writeUTF8File fpath (showPackageDescription pkg) - ---TODO: make this use section syntax --- add equivalent for GenericPackageDescription -showPackageDescription :: PackageDescription -> String -showPackageDescription pkg = render $ - ppPackage pkg - $$ ppCustomFields (customFieldsPD pkg) - $$ (case library pkg of - Nothing -> empty - Just lib -> ppLibrary lib) - $$ vcat [ space $$ ppExecutable exe | exe <- executables pkg ] - where - ppPackage = ppFields pkgDescrFieldDescrs - ppLibrary = ppFields libFieldDescrs - ppExecutable = ppFields executableFieldDescrs - -ppCustomFields :: [(String,String)] -> Doc -ppCustomFields flds = vcat (map ppCustomField flds) - -ppCustomField :: (String,String) -> Doc -ppCustomField (name,val) = text name <> colon <+> showFreeText val - -writeHookedBuildInfo :: FilePath -> HookedBuildInfo -> IO () -writeHookedBuildInfo fpath = writeFileAtomic fpath . BS.Char8.pack - . showHookedBuildInfo - -showHookedBuildInfo :: HookedBuildInfo -> String -showHookedBuildInfo (mb_lib_bi, ex_bis) = render $ - (case mb_lib_bi of - Nothing -> empty - Just bi -> ppBuildInfo bi) - $$ vcat [ space - $$ text "executable:" <+> text name - $$ ppBuildInfo bi - | (name, bi) <- ex_bis ] - where - ppBuildInfo bi = ppFields binfoFieldDescrs bi - $$ ppCustomFields (customFieldsBI bi) - --- replace all tabs used as indentation with whitespace, also return where --- tabs were found -findIndentTabs :: String -> [(Int,Int)] -findIndentTabs = concatMap checkLine - . zip [1..] - . lines - where - checkLine (lineno, l) = - let (indent, _content) = span isSpace l - tabCols = map fst . filter ((== '\t') . snd) . zip [0..] - addLineNo = map (\col -> (lineno,col)) - in addLineNo (tabCols indent) - ---test_findIndentTabs = findIndentTabs $ unlines $ --- [ "foo", " bar", " \t baz", "\t biz\t", "\t\t \t mib" ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/PackageDescription/PrettyPrint.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/PackageDescription/PrettyPrint.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/PackageDescription/PrettyPrint.hs 2016-11-07 10:02:23.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/PackageDescription/PrettyPrint.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,252 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.PackageDescription.PrettyPrint --- Copyright : Jürgen Nicklisch-Franken 2010 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- Pretty printing for cabal files --- ------------------------------------------------------------------------------ - -module Distribution.PackageDescription.PrettyPrint ( - writeGenericPackageDescription, - showGenericPackageDescription, -) where - -import Distribution.PackageDescription -import Distribution.Simple.Utils -import Distribution.ParseUtils -import Distribution.PackageDescription.Parse -import Distribution.Package -import Distribution.Text - -import Data.Monoid as Mon (Monoid(mempty)) -import Data.Maybe (isJust) -import Text.PrettyPrint - (hsep, parens, char, nest, empty, isEmpty, ($$), (<+>), - colon, (<>), text, vcat, ($+$), Doc, render) - --- | Recompile with false for regression testing -simplifiedPrinting :: Bool -simplifiedPrinting = False - --- | Writes a .cabal file from a generic package description -writeGenericPackageDescription :: FilePath -> GenericPackageDescription -> IO () -writeGenericPackageDescription fpath pkg = writeUTF8File fpath (showGenericPackageDescription pkg) - --- | Writes a generic package description to a string -showGenericPackageDescription :: GenericPackageDescription -> String -showGenericPackageDescription = render . ppGenericPackageDescription - -ppGenericPackageDescription :: GenericPackageDescription -> Doc -ppGenericPackageDescription gpd = - ppPackageDescription (packageDescription gpd) - $+$ ppGenPackageFlags (genPackageFlags gpd) - $+$ ppLibrary (condLibrary gpd) - $+$ ppExecutables (condExecutables gpd) - $+$ ppTestSuites (condTestSuites gpd) - $+$ ppBenchmarks (condBenchmarks gpd) - -ppPackageDescription :: PackageDescription -> Doc -ppPackageDescription pd = ppFields pkgDescrFieldDescrs pd - $+$ ppCustomFields (customFieldsPD pd) - $+$ ppSourceRepos (sourceRepos pd) - -ppSourceRepos :: [SourceRepo] -> Doc -ppSourceRepos [] = empty -ppSourceRepos (hd:tl) = ppSourceRepo hd $+$ ppSourceRepos tl - -ppSourceRepo :: SourceRepo -> Doc -ppSourceRepo repo = - emptyLine $ text "source-repository" <+> disp (repoKind repo) $+$ - (nest indentWith (ppFields sourceRepoFieldDescrs' repo)) - where - sourceRepoFieldDescrs' = [fd | fd <- sourceRepoFieldDescrs, fieldName fd /= "kind"] - --- TODO: this is a temporary hack. Ideally, fields containing default values --- would be filtered out when the @FieldDescr a@ list is generated. -ppFieldsFiltered :: [(String, String)] -> [FieldDescr a] -> a -> Doc -ppFieldsFiltered removable fields x = ppFields (filter nondefault fields) x - where - nondefault (FieldDescr name getter _) = - maybe True (render (getter x) /=) (lookup name removable) - -binfoDefaults :: [(String, String)] -binfoDefaults = [("buildable", "True")] - -libDefaults :: [(String, String)] -libDefaults = ("exposed", "True") : binfoDefaults - -flagDefaults :: [(String, String)] -flagDefaults = [("default", "True"), ("manual", "False")] - -ppDiffFields :: [FieldDescr a] -> a -> a -> Doc -ppDiffFields fields x y = - vcat [ ppField name (getter x) - | FieldDescr name getter _ <- fields - , render (getter x) /= render (getter y) - ] - -ppCustomFields :: [(String,String)] -> Doc -ppCustomFields flds = vcat [ppCustomField f | f <- flds] - -ppCustomField :: (String,String) -> Doc -ppCustomField (name,val) = text name <> colon <+> showFreeText val - -ppGenPackageFlags :: [Flag] -> Doc -ppGenPackageFlags flds = vcat [ppFlag f | f <- flds] - -ppFlag :: Flag -> Doc -ppFlag flag@(MkFlag name _ _ _) = - emptyLine $ text "flag" <+> ppFlagName name $+$ nest indentWith fields - where - fields = ppFieldsFiltered flagDefaults flagFieldDescrs flag - -ppLibrary :: (Maybe (CondTree ConfVar [Dependency] Library)) -> Doc -ppLibrary Nothing = empty -ppLibrary (Just condTree) = - emptyLine $ text "library" $+$ nest indentWith (ppCondTree condTree Nothing ppLib) - where - ppLib lib Nothing = ppFieldsFiltered libDefaults libFieldDescrs lib - $$ ppCustomFields (customFieldsBI (libBuildInfo lib)) - ppLib lib (Just plib) = ppDiffFields libFieldDescrs lib plib - $$ ppCustomFields (customFieldsBI (libBuildInfo lib)) - -ppExecutables :: [(String, CondTree ConfVar [Dependency] Executable)] -> Doc -ppExecutables exes = - vcat [emptyLine $ text ("executable " ++ n) - $+$ nest indentWith (ppCondTree condTree Nothing ppExe)| (n,condTree) <- exes] - where - ppExe (Executable _ modulePath' buildInfo') Nothing = - (if modulePath' == "" then empty else text "main-is:" <+> text modulePath') - $+$ ppFieldsFiltered binfoDefaults binfoFieldDescrs buildInfo' - $+$ ppCustomFields (customFieldsBI buildInfo') - ppExe (Executable _ modulePath' buildInfo') - (Just (Executable _ modulePath2 buildInfo2)) = - (if modulePath' == "" || modulePath' == modulePath2 - then empty else text "main-is:" <+> text modulePath') - $+$ ppDiffFields binfoFieldDescrs buildInfo' buildInfo2 - $+$ ppCustomFields (customFieldsBI buildInfo') - -ppTestSuites :: [(String, CondTree ConfVar [Dependency] TestSuite)] -> Doc -ppTestSuites suites = - emptyLine $ vcat [ text ("test-suite " ++ n) - $+$ nest indentWith (ppCondTree condTree Nothing ppTestSuite) - | (n,condTree) <- suites] - where - ppTestSuite testsuite Nothing = - maybe empty (\t -> text "type:" <+> disp t) - maybeTestType - $+$ maybe empty (\f -> text "main-is:" <+> text f) - (testSuiteMainIs testsuite) - $+$ maybe empty (\m -> text "test-module:" <+> disp m) - (testSuiteModule testsuite) - $+$ ppFieldsFiltered binfoDefaults binfoFieldDescrs (testBuildInfo testsuite) - $+$ ppCustomFields (customFieldsBI (testBuildInfo testsuite)) - where - maybeTestType | testInterface testsuite == mempty = Nothing - | otherwise = Just (testType testsuite) - - ppTestSuite (TestSuite _ _ buildInfo' _) - (Just (TestSuite _ _ buildInfo2 _)) = - ppDiffFields binfoFieldDescrs buildInfo' buildInfo2 - $+$ ppCustomFields (customFieldsBI buildInfo') - - testSuiteMainIs test = case testInterface test of - TestSuiteExeV10 _ f -> Just f - _ -> Nothing - - testSuiteModule test = case testInterface test of - TestSuiteLibV09 _ m -> Just m - _ -> Nothing - -ppBenchmarks :: [(String, CondTree ConfVar [Dependency] Benchmark)] -> Doc -ppBenchmarks suites = - emptyLine $ vcat [ text ("benchmark " ++ n) - $+$ nest indentWith (ppCondTree condTree Nothing ppBenchmark) - | (n,condTree) <- suites] - where - ppBenchmark benchmark Nothing = - maybe empty (\t -> text "type:" <+> disp t) - maybeBenchmarkType - $+$ maybe empty (\f -> text "main-is:" <+> text f) - (benchmarkMainIs benchmark) - $+$ ppFieldsFiltered binfoDefaults binfoFieldDescrs (benchmarkBuildInfo benchmark) - $+$ ppCustomFields (customFieldsBI (benchmarkBuildInfo benchmark)) - where - maybeBenchmarkType | benchmarkInterface benchmark == mempty = Nothing - | otherwise = Just (benchmarkType benchmark) - - ppBenchmark (Benchmark _ _ buildInfo' _) - (Just (Benchmark _ _ buildInfo2 _)) = - ppDiffFields binfoFieldDescrs buildInfo' buildInfo2 - $+$ ppCustomFields (customFieldsBI buildInfo') - - benchmarkMainIs benchmark = case benchmarkInterface benchmark of - BenchmarkExeV10 _ f -> Just f - _ -> Nothing - -ppCondition :: Condition ConfVar -> Doc -ppCondition (Var x) = ppConfVar x -ppCondition (Lit b) = text (show b) -ppCondition (CNot c) = char '!' <> (ppCondition c) -ppCondition (COr c1 c2) = parens (hsep [ppCondition c1, text "||" - <+> ppCondition c2]) -ppCondition (CAnd c1 c2) = parens (hsep [ppCondition c1, text "&&" - <+> ppCondition c2]) -ppConfVar :: ConfVar -> Doc -ppConfVar (OS os) = text "os" <> parens (disp os) -ppConfVar (Arch arch) = text "arch" <> parens (disp arch) -ppConfVar (Flag name) = text "flag" <> parens (ppFlagName name) -ppConfVar (Impl c v) = text "impl" <> parens (disp c <+> disp v) - -ppFlagName :: FlagName -> Doc -ppFlagName (FlagName name) = text name - -ppCondTree :: CondTree ConfVar [Dependency] a -> Maybe a -> (a -> Maybe a -> Doc) -> Doc -ppCondTree ct@(CondNode it _ ifs) mbIt ppIt = - let res = (vcat $ map ppIf ifs) - $+$ ppIt it mbIt - in if isJust mbIt && isEmpty res - then ppCondTree ct Nothing ppIt - else res - where - -- TODO: this ends up printing trailing spaces when combined with nest. - ppIf (c, thenTree, Just elseTree) = ppIfElse it ppIt c thenTree elseTree - ppIf (c, thenTree, Nothing) = ppIf' it ppIt c thenTree - -ppIfCondition :: (Condition ConfVar) -> Doc -ppIfCondition c = (emptyLine $ text "if" <+> ppCondition c) - -ppIf' :: a -> (a -> Maybe a -> Doc) - -> Condition ConfVar - -> CondTree ConfVar [Dependency] a - -> Doc -ppIf' it ppIt c thenTree = - if isEmpty thenDoc - then Mon.mempty - else ppIfCondition c $$ nest indentWith thenDoc - where thenDoc = ppCondTree thenTree (if simplifiedPrinting then (Just it) else Nothing) ppIt - -ppIfElse :: a -> (a -> Maybe a -> Doc) - -> Condition ConfVar - -> CondTree ConfVar [Dependency] a - -> CondTree ConfVar [Dependency] a - -> Doc -ppIfElse it ppIt c thenTree elseTree = - case (isEmpty thenDoc, isEmpty elseDoc) of - (True, True) -> Mon.mempty - (False, True) -> ppIfCondition c $$ nest indentWith thenDoc - (True, False) -> ppIfCondition (cNot c) $$ nest indentWith elseDoc - (False, False) -> (ppIfCondition c $$ nest indentWith thenDoc) - $+$ (text "else" $$ nest indentWith elseDoc) - where thenDoc = ppCondTree thenTree (if simplifiedPrinting then (Just it) else Nothing) ppIt - elseDoc = ppCondTree elseTree (if simplifiedPrinting then (Just it) else Nothing) ppIt - -emptyLine :: Doc -> Doc -emptyLine d = text "" $+$ d - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/PackageDescription/Utils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/PackageDescription/Utils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/PackageDescription/Utils.hs 2016-11-07 10:02:23.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/PackageDescription/Utils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.PackageDescription.Utils --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Common utils used by modules under Distribution.PackageDescription.*. - -module Distribution.PackageDescription.Utils ( - cabalBug, userBug - ) where - --- ---------------------------------------------------------------------------- --- Exception and logging utils - -userBug :: String -> a -userBug msg = error $ msg ++ ". This is a bug in your .cabal file." - -cabalBug :: String -> a -cabalBug msg = error $ msg ++ ". This is possibly a bug in Cabal.\n" - ++ "Please report it to the developers: " - ++ "https://github.com/haskell/cabal/issues/new" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/PackageDescription.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/PackageDescription.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/PackageDescription.hs 2016-11-07 10:02:22.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/PackageDescription.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1289 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.PackageDescription --- Copyright : Isaac Jones 2003-2005 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This defines the data structure for the @.cabal@ file format. There are --- several parts to this structure. It has top level info and then 'Library', --- 'Executable', 'TestSuite', and 'Benchmark' sections each of which have --- associated 'BuildInfo' data that's used to build the library, exe, test, or --- benchmark. To further complicate things there is both a 'PackageDescription' --- and a 'GenericPackageDescription'. This distinction relates to cabal --- configurations. When we initially read a @.cabal@ file we get a --- 'GenericPackageDescription' which has all the conditional sections. --- Before actually building a package we have to decide --- on each conditional. Once we've done that we get a 'PackageDescription'. --- It was done this way initially to avoid breaking too much stuff when the --- feature was introduced. It could probably do with being rationalised at some --- point to make it simpler. - -module Distribution.PackageDescription ( - -- * Package descriptions - PackageDescription(..), - emptyPackageDescription, - specVersion, - descCabalVersion, - BuildType(..), - knownBuildTypes, - - -- ** Renaming - ModuleRenaming(..), - defaultRenaming, - lookupRenaming, - - -- ** Libraries - Library(..), - ModuleReexport(..), - emptyLibrary, - withLib, - hasLibs, - libModules, - - -- ** Executables - Executable(..), - emptyExecutable, - withExe, - hasExes, - exeModules, - - -- * Tests - TestSuite(..), - TestSuiteInterface(..), - TestType(..), - testType, - knownTestTypes, - emptyTestSuite, - hasTests, - withTest, - testModules, - enabledTests, - - -- * Benchmarks - Benchmark(..), - BenchmarkInterface(..), - BenchmarkType(..), - benchmarkType, - knownBenchmarkTypes, - emptyBenchmark, - hasBenchmarks, - withBenchmark, - benchmarkModules, - enabledBenchmarks, - - -- * Build information - BuildInfo(..), - emptyBuildInfo, - allBuildInfo, - allLanguages, - allExtensions, - usedExtensions, - hcOptions, - hcProfOptions, - hcSharedOptions, - - -- ** Supplementary build information - HookedBuildInfo, - emptyHookedBuildInfo, - updatePackageDescription, - - -- * package configuration - GenericPackageDescription(..), - Flag(..), FlagName(..), FlagAssignment, - CondTree(..), ConfVar(..), Condition(..), - cNot, cAnd, cOr, - - -- * Source repositories - SourceRepo(..), - RepoKind(..), - RepoType(..), - knownRepoTypes, - - -- * Custom setup build information - SetupBuildInfo(..), - ) where - -import Distribution.Compat.Binary -import qualified Distribution.Compat.Semigroup as Semi ((<>)) -import Distribution.Compat.Semigroup as Semi (Monoid(..), Semigroup, gmempty) -import qualified Distribution.Compat.ReadP as Parse -import Distribution.Compat.ReadP ((<++)) -import Distribution.Package -import Distribution.ModuleName -import Distribution.Version -import Distribution.License -import Distribution.Compiler -import Distribution.System -import Distribution.Text -import Language.Haskell.Extension - -import Data.Data (Data) -import Data.Foldable (traverse_) -import Data.List (nub, intercalate) -import Data.Maybe (fromMaybe, maybeToList) -import Data.Foldable as Fold (Foldable(foldMap)) -import Data.Traversable as Trav (Traversable(traverse)) -import Data.Typeable ( Typeable ) -import Control.Applicative as AP (Alternative(..), Applicative(..)) -import Control.Monad (MonadPlus(mplus,mzero), ap) -import GHC.Generics (Generic) -import Text.PrettyPrint as Disp -import qualified Data.Char as Char (isAlphaNum, isDigit, toLower) -import qualified Data.Map as Map -import Data.Map (Map) - --- ----------------------------------------------------------------------------- --- The PackageDescription type - --- | This data type is the internal representation of the file @pkg.cabal@. --- It contains two kinds of information about the package: information --- which is needed for all packages, such as the package name and version, and --- information which is needed for the simple build system only, such as --- the compiler options and library name. --- -data PackageDescription - = PackageDescription { - -- the following are required by all packages: - package :: PackageIdentifier, - license :: License, - licenseFiles :: [FilePath], - copyright :: String, - maintainer :: String, - author :: String, - stability :: String, - testedWith :: [(CompilerFlavor,VersionRange)], - homepage :: String, - pkgUrl :: String, - bugReports :: String, - sourceRepos :: [SourceRepo], - synopsis :: String, -- ^A one-line summary of this package - description :: String, -- ^A more verbose description of this package - category :: String, - customFieldsPD :: [(String,String)], -- ^Custom fields starting - -- with x-, stored in a - -- simple assoc-list. - - -- | YOU PROBABLY DON'T WANT TO USE THIS FIELD. This field is - -- special! Depending on how far along processing the - -- PackageDescription we are, the contents of this field are - -- either nonsense, or the collected dependencies of *all* the - -- components in this package. buildDepends is initialized by - -- 'finalizePackageDescription' and 'flattenPackageDescription'; - -- prior to that, dependency info is stored in the 'CondTree' - -- built around a 'GenericPackageDescription'. When this - -- resolution is done, dependency info is written to the inner - -- 'BuildInfo' and this field. This is all horrible, and #2066 - -- tracks progress to get rid of this field. - buildDepends :: [Dependency], - -- | The version of the Cabal spec that this package description uses. - -- For historical reasons this is specified with a version range but - -- only ranges of the form @>= v@ make sense. We are in the process of - -- transitioning to specifying just a single version, not a range. - specVersionRaw :: Either Version VersionRange, - buildType :: Maybe BuildType, - setupBuildInfo :: Maybe SetupBuildInfo, - -- components - library :: Maybe Library, - executables :: [Executable], - testSuites :: [TestSuite], - benchmarks :: [Benchmark], - dataFiles :: [FilePath], - dataDir :: FilePath, - extraSrcFiles :: [FilePath], - extraTmpFiles :: [FilePath], - extraDocFiles :: [FilePath] - } - deriving (Generic, Show, Read, Eq, Typeable, Data) - -instance Binary PackageDescription - -instance Package PackageDescription where - packageId = package - --- | The version of the Cabal spec that this package should be interpreted --- against. --- --- Historically we used a version range but we are switching to using a single --- version. Currently we accept either. This function converts into a single --- version by ignoring upper bounds in the version range. --- -specVersion :: PackageDescription -> Version -specVersion pkg = case specVersionRaw pkg of - Left version -> version - Right versionRange -> case asVersionIntervals versionRange of - [] -> Version [0] [] - ((LowerBound version _, _):_) -> version - --- | The range of versions of the Cabal tools that this package is intended to --- work with. --- --- This function is deprecated and should not be used for new purposes, only to --- support old packages that rely on the old interpretation. --- -descCabalVersion :: PackageDescription -> VersionRange -descCabalVersion pkg = case specVersionRaw pkg of - Left version -> orLaterVersion version - Right versionRange -> versionRange -{-# DEPRECATED descCabalVersion "Use specVersion instead" #-} - -emptyPackageDescription :: PackageDescription -emptyPackageDescription - = PackageDescription { - package = PackageIdentifier (PackageName "") - (Version [] []), - license = UnspecifiedLicense, - licenseFiles = [], - specVersionRaw = Right anyVersion, - buildType = Nothing, - copyright = "", - maintainer = "", - author = "", - stability = "", - testedWith = [], - buildDepends = [], - homepage = "", - pkgUrl = "", - bugReports = "", - sourceRepos = [], - synopsis = "", - description = "", - category = "", - customFieldsPD = [], - setupBuildInfo = Nothing, - library = Nothing, - executables = [], - testSuites = [], - benchmarks = [], - dataFiles = [], - dataDir = "", - extraSrcFiles = [], - extraTmpFiles = [], - extraDocFiles = [] - } - --- | The type of build system used by this package. -data BuildType - = Simple -- ^ calls @Distribution.Simple.defaultMain@ - | Configure -- ^ calls @Distribution.Simple.defaultMainWithHooks defaultUserHooks@, - -- which invokes @configure@ to generate additional build - -- information used by later phases. - | Make -- ^ calls @Distribution.Make.defaultMain@ - | Custom -- ^ uses user-supplied @Setup.hs@ or @Setup.lhs@ (default) - | UnknownBuildType String - -- ^ a package that uses an unknown build type cannot actually - -- be built. Doing it this way rather than just giving a - -- parse error means we get better error messages and allows - -- you to inspect the rest of the package description. - deriving (Generic, Show, Read, Eq, Typeable, Data) - -instance Binary BuildType - -knownBuildTypes :: [BuildType] -knownBuildTypes = [Simple, Configure, Make, Custom] - -instance Text BuildType where - disp (UnknownBuildType other) = Disp.text other - disp other = Disp.text (show other) - - parse = do - name <- Parse.munch1 Char.isAlphaNum - return $ case name of - "Simple" -> Simple - "Configure" -> Configure - "Custom" -> Custom - "Make" -> Make - _ -> UnknownBuildType name - --- --------------------------------------------------------------------------- --- The SetupBuildInfo type - --- One can see this as a very cut-down version of BuildInfo below. --- To keep things simple for tools that compile Setup.hs we limit the --- options authors can specify to just Haskell package dependencies. - -data SetupBuildInfo = SetupBuildInfo { - setupDepends :: [Dependency], - defaultSetupDepends :: Bool - -- ^ Is this a default 'custom-setup' section added by the cabal-install - -- code (as opposed to user-provided)? This field is only used - -- internally, and doesn't correspond to anything in the .cabal - -- file. See #3199. - } - deriving (Generic, Show, Eq, Read, Typeable, Data) - -instance Binary SetupBuildInfo - -instance Semi.Monoid SetupBuildInfo where - mempty = SetupBuildInfo [] False - mappend = (Semi.<>) - -instance Semigroup SetupBuildInfo where - a <> b = SetupBuildInfo (setupDepends a Semi.<> setupDepends b) - (defaultSetupDepends a || defaultSetupDepends b) - --- --------------------------------------------------------------------------- --- Module renaming - --- | Renaming applied to the modules provided by a package. --- The boolean indicates whether or not to also include all of the --- original names of modules. Thus, @ModuleRenaming False []@ is --- "don't expose any modules, and @ModuleRenaming True [("Data.Bool", "Bool")]@ --- is, "expose all modules, but also expose @Data.Bool@ as @Bool@". --- -data ModuleRenaming = ModuleRenaming Bool [(ModuleName, ModuleName)] - deriving (Show, Read, Eq, Ord, Typeable, Data, Generic) - -defaultRenaming :: ModuleRenaming -defaultRenaming = ModuleRenaming True [] - -lookupRenaming :: Package pkg => pkg -> Map PackageName ModuleRenaming -> ModuleRenaming -lookupRenaming = Map.findWithDefault defaultRenaming . packageName - -instance Binary ModuleRenaming where - -instance Monoid ModuleRenaming where - mempty = ModuleRenaming False [] - mappend = (Semi.<>) - -instance Semigroup ModuleRenaming where - ModuleRenaming b rns <> ModuleRenaming b' rns' - = ModuleRenaming (b || b') (rns ++ rns') -- ToDo: dedupe? - --- NB: parentheses are mandatory, because later we may extend this syntax --- to allow "hiding (A, B)" or other modifier words. -instance Text ModuleRenaming where - disp (ModuleRenaming True []) = Disp.empty - disp (ModuleRenaming b vs) = (if b then text "with" else Disp.empty) <+> dispRns - where dispRns = Disp.parens - (Disp.hsep - (Disp.punctuate Disp.comma (map dispEntry vs))) - dispEntry (orig, new) - | orig == new = disp orig - | otherwise = disp orig <+> text "as" <+> disp new - - parse = do Parse.string "with" >> Parse.skipSpaces - fmap (ModuleRenaming True) parseRns - <++ fmap (ModuleRenaming False) parseRns - <++ return (ModuleRenaming True []) - where parseRns = do - rns <- Parse.between (Parse.char '(') (Parse.char ')') parseList - Parse.skipSpaces - return rns - parseList = - Parse.sepBy parseEntry (Parse.char ',' >> Parse.skipSpaces) - parseEntry :: Parse.ReadP r (ModuleName, ModuleName) - parseEntry = do - orig <- parse - Parse.skipSpaces - (do _ <- Parse.string "as" - Parse.skipSpaces - new <- parse - Parse.skipSpaces - return (orig, new) - <++ - return (orig, orig)) - --- --------------------------------------------------------------------------- --- The Library type - -data Library = Library { - exposedModules :: [ModuleName], - reexportedModules :: [ModuleReexport], - requiredSignatures:: [ModuleName], -- ^ What sigs need implementations? - exposedSignatures:: [ModuleName], -- ^ What sigs are visible to users? - libExposed :: Bool, -- ^ Is the lib to be exposed by default? - libBuildInfo :: BuildInfo - } - deriving (Generic, Show, Eq, Read, Typeable, Data) - -instance Binary Library - -instance Monoid Library where - mempty = Library { - exposedModules = mempty, - reexportedModules = mempty, - requiredSignatures = mempty, - exposedSignatures = mempty, - libExposed = True, - libBuildInfo = mempty - } - mappend = (Semi.<>) - -instance Semigroup Library where - a <> b = Library { - exposedModules = combine exposedModules, - reexportedModules = combine reexportedModules, - requiredSignatures = combine requiredSignatures, - exposedSignatures = combine exposedSignatures, - libExposed = libExposed a && libExposed b, -- so False propagates - libBuildInfo = combine libBuildInfo - } - where combine field = field a `mappend` field b - -emptyLibrary :: Library -emptyLibrary = mempty - --- |does this package have any libraries? -hasLibs :: PackageDescription -> Bool -hasLibs p = maybe False (buildable . libBuildInfo) (library p) - --- |'Maybe' version of 'hasLibs' -maybeHasLibs :: PackageDescription -> Maybe Library -maybeHasLibs p = - library p >>= \lib -> if buildable (libBuildInfo lib) - then Just lib - else Nothing - --- |If the package description has a library section, call the given --- function with the library build info as argument. -withLib :: PackageDescription -> (Library -> IO ()) -> IO () -withLib pkg_descr f = - traverse_ f (maybeHasLibs pkg_descr) - --- | Get all the module names from the library (exposed and internal modules) --- which need to be compiled. (This does not include reexports, which --- do not need to be compiled.) -libModules :: Library -> [ModuleName] -libModules lib = exposedModules lib - ++ otherModules (libBuildInfo lib) - ++ exposedSignatures lib - ++ requiredSignatures lib - --- ----------------------------------------------------------------------------- --- Module re-exports - -data ModuleReexport = ModuleReexport { - moduleReexportOriginalPackage :: Maybe PackageName, - moduleReexportOriginalName :: ModuleName, - moduleReexportName :: ModuleName - } - deriving (Eq, Generic, Read, Show, Typeable, Data) - -instance Binary ModuleReexport - -instance Text ModuleReexport where - disp (ModuleReexport mpkgname origname newname) = - maybe Disp.empty (\pkgname -> disp pkgname <> Disp.char ':') mpkgname - <> disp origname - <+> if newname == origname - then Disp.empty - else Disp.text "as" <+> disp newname - - parse = do - mpkgname <- Parse.option Nothing $ do - pkgname <- parse - _ <- Parse.char ':' - return (Just pkgname) - origname <- parse - newname <- Parse.option origname $ do - Parse.skipSpaces - _ <- Parse.string "as" - Parse.skipSpaces - parse - return (ModuleReexport mpkgname origname newname) - --- --------------------------------------------------------------------------- --- The Executable type - -data Executable = Executable { - exeName :: String, - modulePath :: FilePath, - buildInfo :: BuildInfo - } - deriving (Generic, Show, Read, Eq, Typeable, Data) - -instance Binary Executable - -instance Monoid Executable where - mempty = gmempty - mappend = (Semi.<>) - -instance Semigroup Executable where - a <> b = Executable{ - exeName = combine' exeName, - modulePath = combine modulePath, - buildInfo = combine buildInfo - } - where combine field = field a `mappend` field b - combine' field = case (field a, field b) of - ("","") -> "" - ("", x) -> x - (x, "") -> x - (x, y) -> error $ "Ambiguous values for executable field: '" - ++ x ++ "' and '" ++ y ++ "'" - -emptyExecutable :: Executable -emptyExecutable = mempty - --- |does this package have any executables? -hasExes :: PackageDescription -> Bool -hasExes p = any (buildable . buildInfo) (executables p) - --- | Perform the action on each buildable 'Executable' in the package --- description. -withExe :: PackageDescription -> (Executable -> IO ()) -> IO () -withExe pkg_descr f = - sequence_ [f exe | exe <- executables pkg_descr, buildable (buildInfo exe)] - --- | Get all the module names from an exe -exeModules :: Executable -> [ModuleName] -exeModules exe = otherModules (buildInfo exe) - --- --------------------------------------------------------------------------- --- The TestSuite type - --- | A \"test-suite\" stanza in a cabal file. --- -data TestSuite = TestSuite { - testName :: String, - testInterface :: TestSuiteInterface, - testBuildInfo :: BuildInfo, - testEnabled :: Bool - -- TODO: By having a 'testEnabled' field in the PackageDescription, we - -- are mixing build status information (i.e., arguments to 'configure') - -- with static package description information. This is undesirable, but - -- a better solution is waiting on the next overhaul to the - -- GenericPackageDescription -> PackageDescription resolution process. - } - deriving (Generic, Show, Read, Eq, Typeable, Data) - -instance Binary TestSuite - --- | The test suite interfaces that are currently defined. Each test suite must --- specify which interface it supports. --- --- More interfaces may be defined in future, either new revisions or totally --- new interfaces. --- -data TestSuiteInterface = - - -- | Test interface \"exitcode-stdio-1.0\". The test-suite takes the form - -- of an executable. It returns a zero exit code for success, non-zero for - -- failure. The stdout and stderr channels may be logged. It takes no - -- command line parameters and nothing on stdin. - -- - TestSuiteExeV10 Version FilePath - - -- | Test interface \"detailed-0.9\". The test-suite takes the form of a - -- library containing a designated module that exports \"tests :: [Test]\". - -- - | TestSuiteLibV09 Version ModuleName - - -- | A test suite that does not conform to one of the above interfaces for - -- the given reason (e.g. unknown test type). - -- - | TestSuiteUnsupported TestType - deriving (Eq, Generic, Read, Show, Typeable, Data) - -instance Binary TestSuiteInterface - -instance Monoid TestSuite where - mempty = TestSuite { - testName = mempty, - testInterface = mempty, - testBuildInfo = mempty, - testEnabled = False - } - mappend = (Semi.<>) - -instance Semigroup TestSuite where - a <> b = TestSuite { - testName = combine' testName, - testInterface = combine testInterface, - testBuildInfo = combine testBuildInfo, - testEnabled = testEnabled a || testEnabled b - } - where combine field = field a `mappend` field b - combine' f = case (f a, f b) of - ("", x) -> x - (x, "") -> x - (x, y) -> error "Ambiguous values for test field: '" - ++ x ++ "' and '" ++ y ++ "'" - -instance Monoid TestSuiteInterface where - mempty = TestSuiteUnsupported (TestTypeUnknown mempty (Version [] [])) - mappend = (Semi.<>) - -instance Semigroup TestSuiteInterface where - a <> (TestSuiteUnsupported _) = a - _ <> b = b - -emptyTestSuite :: TestSuite -emptyTestSuite = mempty - --- | Does this package have any test suites? -hasTests :: PackageDescription -> Bool -hasTests = any (buildable . testBuildInfo) . testSuites - --- | Get all the enabled test suites from a package. -enabledTests :: PackageDescription -> [TestSuite] -enabledTests = filter testEnabled . testSuites - --- | Perform an action on each buildable 'TestSuite' in a package. -withTest :: PackageDescription -> (TestSuite -> IO ()) -> IO () -withTest pkg_descr f = - mapM_ f $ filter (buildable . testBuildInfo) $ enabledTests pkg_descr - --- | Get all the module names from a test suite. -testModules :: TestSuite -> [ModuleName] -testModules test = (case testInterface test of - TestSuiteLibV09 _ m -> [m] - _ -> []) - ++ otherModules (testBuildInfo test) - --- | The \"test-type\" field in the test suite stanza. --- -data TestType = TestTypeExe Version -- ^ \"type: exitcode-stdio-x.y\" - | TestTypeLib Version -- ^ \"type: detailed-x.y\" - | TestTypeUnknown String Version -- ^ Some unknown test type e.g. \"type: foo\" - deriving (Generic, Show, Read, Eq, Typeable, Data) - -instance Binary TestType - -knownTestTypes :: [TestType] -knownTestTypes = [ TestTypeExe (Version [1,0] []) - , TestTypeLib (Version [0,9] []) ] - -stdParse :: Text ver => (ver -> String -> res) -> Parse.ReadP r res -stdParse f = do - cs <- Parse.sepBy1 component (Parse.char '-') - _ <- Parse.char '-' - ver <- parse - let name = intercalate "-" cs - return $! f ver (lowercase name) - where - component = do - cs <- Parse.munch1 Char.isAlphaNum - if all Char.isDigit cs then Parse.pfail else return cs - -- each component must contain an alphabetic character, to avoid - -- ambiguity in identifiers like foo-1 (the 1 is the version number). - -instance Text TestType where - disp (TestTypeExe ver) = text "exitcode-stdio-" <> disp ver - disp (TestTypeLib ver) = text "detailed-" <> disp ver - disp (TestTypeUnknown name ver) = text name <> char '-' <> disp ver - - parse = stdParse $ \ver name -> case name of - "exitcode-stdio" -> TestTypeExe ver - "detailed" -> TestTypeLib ver - _ -> TestTypeUnknown name ver - - -testType :: TestSuite -> TestType -testType test = case testInterface test of - TestSuiteExeV10 ver _ -> TestTypeExe ver - TestSuiteLibV09 ver _ -> TestTypeLib ver - TestSuiteUnsupported testtype -> testtype - --- --------------------------------------------------------------------------- --- The Benchmark type - --- | A \"benchmark\" stanza in a cabal file. --- -data Benchmark = Benchmark { - benchmarkName :: String, - benchmarkInterface :: BenchmarkInterface, - benchmarkBuildInfo :: BuildInfo, - benchmarkEnabled :: Bool - -- TODO: See TODO for 'testEnabled'. - } - deriving (Generic, Show, Read, Eq, Typeable, Data) - -instance Binary Benchmark - --- | The benchmark interfaces that are currently defined. Each --- benchmark must specify which interface it supports. --- --- More interfaces may be defined in future, either new revisions or --- totally new interfaces. --- -data BenchmarkInterface = - - -- | Benchmark interface \"exitcode-stdio-1.0\". The benchmark - -- takes the form of an executable. It returns a zero exit code - -- for success, non-zero for failure. The stdout and stderr - -- channels may be logged. It takes no command line parameters - -- and nothing on stdin. - -- - BenchmarkExeV10 Version FilePath - - -- | A benchmark that does not conform to one of the above - -- interfaces for the given reason (e.g. unknown benchmark type). - -- - | BenchmarkUnsupported BenchmarkType - deriving (Eq, Generic, Read, Show, Typeable, Data) - -instance Binary BenchmarkInterface - -instance Monoid Benchmark where - mempty = Benchmark { - benchmarkName = mempty, - benchmarkInterface = mempty, - benchmarkBuildInfo = mempty, - benchmarkEnabled = False - } - mappend = (Semi.<>) - -instance Semigroup Benchmark where - a <> b = Benchmark { - benchmarkName = combine' benchmarkName, - benchmarkInterface = combine benchmarkInterface, - benchmarkBuildInfo = combine benchmarkBuildInfo, - benchmarkEnabled = benchmarkEnabled a || benchmarkEnabled b - } - where combine field = field a `mappend` field b - combine' f = case (f a, f b) of - ("", x) -> x - (x, "") -> x - (x, y) -> error "Ambiguous values for benchmark field: '" - ++ x ++ "' and '" ++ y ++ "'" - -instance Monoid BenchmarkInterface where - mempty = BenchmarkUnsupported (BenchmarkTypeUnknown mempty (Version [] [])) - mappend = (Semi.<>) - -instance Semigroup BenchmarkInterface where - a <> (BenchmarkUnsupported _) = a - _ <> b = b - -emptyBenchmark :: Benchmark -emptyBenchmark = mempty - --- | Does this package have any benchmarks? -hasBenchmarks :: PackageDescription -> Bool -hasBenchmarks = any (buildable . benchmarkBuildInfo) . benchmarks - --- | Get all the enabled benchmarks from a package. -enabledBenchmarks :: PackageDescription -> [Benchmark] -enabledBenchmarks = filter benchmarkEnabled . benchmarks - --- | Perform an action on each buildable 'Benchmark' in a package. -withBenchmark :: PackageDescription -> (Benchmark -> IO ()) -> IO () -withBenchmark pkg_descr f = - mapM_ f $ filter (buildable . benchmarkBuildInfo) $ enabledBenchmarks pkg_descr - --- | Get all the module names from a benchmark. -benchmarkModules :: Benchmark -> [ModuleName] -benchmarkModules benchmark = otherModules (benchmarkBuildInfo benchmark) - --- | The \"benchmark-type\" field in the benchmark stanza. --- -data BenchmarkType = BenchmarkTypeExe Version - -- ^ \"type: exitcode-stdio-x.y\" - | BenchmarkTypeUnknown String Version - -- ^ Some unknown benchmark type e.g. \"type: foo\" - deriving (Generic, Show, Read, Eq, Typeable, Data) - -instance Binary BenchmarkType - -knownBenchmarkTypes :: [BenchmarkType] -knownBenchmarkTypes = [ BenchmarkTypeExe (Version [1,0] []) ] - -instance Text BenchmarkType where - disp (BenchmarkTypeExe ver) = text "exitcode-stdio-" <> disp ver - disp (BenchmarkTypeUnknown name ver) = text name <> char '-' <> disp ver - - parse = stdParse $ \ver name -> case name of - "exitcode-stdio" -> BenchmarkTypeExe ver - _ -> BenchmarkTypeUnknown name ver - - -benchmarkType :: Benchmark -> BenchmarkType -benchmarkType benchmark = case benchmarkInterface benchmark of - BenchmarkExeV10 ver _ -> BenchmarkTypeExe ver - BenchmarkUnsupported benchmarktype -> benchmarktype - --- --------------------------------------------------------------------------- --- The BuildInfo type - --- Consider refactoring into executable and library versions. -data BuildInfo = BuildInfo { - buildable :: Bool, -- ^ component is buildable here - buildTools :: [Dependency], -- ^ tools needed to build this bit - cppOptions :: [String], -- ^ options for pre-processing Haskell code - ccOptions :: [String], -- ^ options for C compiler - ldOptions :: [String], -- ^ options for linker - pkgconfigDepends :: [Dependency], -- ^ pkg-config packages that are used - frameworks :: [String], -- ^support frameworks for Mac OS X - extraFrameworkDirs:: [String], -- ^ extra locations to find frameworks. - cSources :: [FilePath], - jsSources :: [FilePath], - hsSourceDirs :: [FilePath], -- ^ where to look for the Haskell module hierarchy - otherModules :: [ModuleName], -- ^ non-exposed or non-main modules - - defaultLanguage :: Maybe Language,-- ^ language used when not explicitly specified - otherLanguages :: [Language], -- ^ other languages used within the package - defaultExtensions :: [Extension], -- ^ language extensions used by all modules - otherExtensions :: [Extension], -- ^ other language extensions used within the package - oldExtensions :: [Extension], -- ^ the old extensions field, treated same as 'defaultExtensions' - - extraLibs :: [String], -- ^ what libraries to link with when compiling a program that uses your package - extraGHCiLibs :: [String], -- ^ if present, overrides extraLibs when package is loaded with GHCi. - extraLibDirs :: [String], - includeDirs :: [FilePath], -- ^directories to find .h files - includes :: [FilePath], -- ^ The .h files to be found in includeDirs - installIncludes :: [FilePath], -- ^ .h files to install with the package - options :: [(CompilerFlavor,[String])], - profOptions :: [(CompilerFlavor,[String])], - sharedOptions :: [(CompilerFlavor,[String])], - customFieldsBI :: [(String,String)], -- ^Custom fields starting - -- with x-, stored in a - -- simple assoc-list. - targetBuildDepends :: [Dependency], -- ^ Dependencies specific to a library or executable target - targetBuildRenaming :: Map PackageName ModuleRenaming - } - deriving (Generic, Show, Read, Eq, Typeable, Data) - -instance Binary BuildInfo - -instance Monoid BuildInfo where - mempty = BuildInfo { - buildable = True, - buildTools = [], - cppOptions = [], - ccOptions = [], - ldOptions = [], - pkgconfigDepends = [], - frameworks = [], - extraFrameworkDirs = [], - cSources = [], - jsSources = [], - hsSourceDirs = [], - otherModules = [], - defaultLanguage = Nothing, - otherLanguages = [], - defaultExtensions = [], - otherExtensions = [], - oldExtensions = [], - extraLibs = [], - extraGHCiLibs = [], - extraLibDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - options = [], - profOptions = [], - sharedOptions = [], - customFieldsBI = [], - targetBuildDepends = [], - targetBuildRenaming = Map.empty - } - mappend = (Semi.<>) - -instance Semigroup BuildInfo where - a <> b = BuildInfo { - buildable = buildable a && buildable b, - buildTools = combine buildTools, - cppOptions = combine cppOptions, - ccOptions = combine ccOptions, - ldOptions = combine ldOptions, - pkgconfigDepends = combine pkgconfigDepends, - frameworks = combineNub frameworks, - extraFrameworkDirs = combineNub extraFrameworkDirs, - cSources = combineNub cSources, - jsSources = combineNub jsSources, - hsSourceDirs = combineNub hsSourceDirs, - otherModules = combineNub otherModules, - defaultLanguage = combineMby defaultLanguage, - otherLanguages = combineNub otherLanguages, - defaultExtensions = combineNub defaultExtensions, - otherExtensions = combineNub otherExtensions, - oldExtensions = combineNub oldExtensions, - extraLibs = combine extraLibs, - extraGHCiLibs = combine extraGHCiLibs, - extraLibDirs = combineNub extraLibDirs, - includeDirs = combineNub includeDirs, - includes = combineNub includes, - installIncludes = combineNub installIncludes, - options = combine options, - profOptions = combine profOptions, - sharedOptions = combine sharedOptions, - customFieldsBI = combine customFieldsBI, - targetBuildDepends = combineNub targetBuildDepends, - targetBuildRenaming = combineMap targetBuildRenaming - } - where - combine field = field a `mappend` field b - combineNub field = nub (combine field) - combineMby field = field b `mplus` field a - combineMap field = Map.unionWith mappend (field a) (field b) - -emptyBuildInfo :: BuildInfo -emptyBuildInfo = mempty - --- | The 'BuildInfo' for the library (if there is one and it's buildable), and --- all buildable executables, test suites and benchmarks. Useful for gathering --- dependencies. -allBuildInfo :: PackageDescription -> [BuildInfo] -allBuildInfo pkg_descr = [ bi | Just lib <- [library pkg_descr] - , let bi = libBuildInfo lib - , buildable bi ] - ++ [ bi | exe <- executables pkg_descr - , let bi = buildInfo exe - , buildable bi ] - ++ [ bi | tst <- testSuites pkg_descr - , let bi = testBuildInfo tst - , buildable bi - , testEnabled tst ] - ++ [ bi | tst <- benchmarks pkg_descr - , let bi = benchmarkBuildInfo tst - , buildable bi - , benchmarkEnabled tst ] - --FIXME: many of the places where this is used, we actually want to look at - -- unbuildable bits too, probably need separate functions - --- | The 'Language's used by this component --- -allLanguages :: BuildInfo -> [Language] -allLanguages bi = maybeToList (defaultLanguage bi) - ++ otherLanguages bi - --- | The 'Extension's that are used somewhere by this component --- -allExtensions :: BuildInfo -> [Extension] -allExtensions bi = usedExtensions bi - ++ otherExtensions bi - --- | The 'Extensions' that are used by all modules in this component --- -usedExtensions :: BuildInfo -> [Extension] -usedExtensions bi = oldExtensions bi - ++ defaultExtensions bi - -type HookedBuildInfo = (Maybe BuildInfo, [(String, BuildInfo)]) - -emptyHookedBuildInfo :: HookedBuildInfo -emptyHookedBuildInfo = (Nothing, []) - --- |Select options for a particular Haskell compiler. -hcOptions :: CompilerFlavor -> BuildInfo -> [String] -hcOptions = lookupHcOptions options - -hcProfOptions :: CompilerFlavor -> BuildInfo -> [String] -hcProfOptions = lookupHcOptions profOptions - -hcSharedOptions :: CompilerFlavor -> BuildInfo -> [String] -hcSharedOptions = lookupHcOptions sharedOptions - -lookupHcOptions :: (BuildInfo -> [(CompilerFlavor,[String])]) - -> CompilerFlavor -> BuildInfo -> [String] -lookupHcOptions f hc bi = [ opt | (hc',opts) <- f bi - , hc' == hc - , opt <- opts ] - --- ------------------------------------------------------------ --- * Source repos --- ------------------------------------------------------------ - --- | Information about the source revision control system for a package. --- --- When specifying a repo it is useful to know the meaning or intention of the --- information as doing so enables automation. There are two obvious common --- purposes: one is to find the repo for the latest development version, the --- other is to find the repo for this specific release. The 'ReopKind' --- specifies which one we mean (or another custom one). --- --- A package can specify one or the other kind or both. Most will specify just --- a head repo but some may want to specify a repo to reconstruct the sources --- for this package release. --- --- The required information is the 'RepoType' which tells us if it's using --- 'Darcs', 'Git' for example. The 'repoLocation' and other details are --- interpreted according to the repo type. --- -data SourceRepo = SourceRepo { - -- | The kind of repo. This field is required. - repoKind :: RepoKind, - - -- | The type of the source repository system for this repo, eg 'Darcs' or - -- 'Git'. This field is required. - repoType :: Maybe RepoType, - - -- | The location of the repository. For most 'RepoType's this is a URL. - -- This field is required. - repoLocation :: Maybe String, - - -- | 'CVS' can put multiple \"modules\" on one server and requires a - -- module name in addition to the location to identify a particular repo. - -- Logically this is part of the location but unfortunately has to be - -- specified separately. This field is required for the 'CVS' 'RepoType' and - -- should not be given otherwise. - repoModule :: Maybe String, - - -- | The name or identifier of the branch, if any. Many source control - -- systems have the notion of multiple branches in a repo that exist in the - -- same location. For example 'Git' and 'CVS' use this while systems like - -- 'Darcs' use different locations for different branches. This field is - -- optional but should be used if necessary to identify the sources, - -- especially for the 'RepoThis' repo kind. - repoBranch :: Maybe String, - - -- | The tag identify a particular state of the repository. This should be - -- given for the 'RepoThis' repo kind and not for 'RepoHead' kind. - -- - repoTag :: Maybe String, - - -- | Some repositories contain multiple projects in different subdirectories - -- This field specifies the subdirectory where this packages sources can be - -- found, eg the subdirectory containing the @.cabal@ file. It is interpreted - -- relative to the root of the repository. This field is optional. If not - -- given the default is \".\" ie no subdirectory. - repoSubdir :: Maybe FilePath -} - deriving (Eq, Generic, Read, Show, Typeable, Data) - -instance Binary SourceRepo - --- | What this repo info is for, what it represents. --- -data RepoKind = - -- | The repository for the \"head\" or development version of the project. - -- This repo is where we should track the latest development activity or - -- the usual repo people should get to contribute patches. - RepoHead - - -- | The repository containing the sources for this exact package version - -- or release. For this kind of repo a tag should be given to give enough - -- information to re-create the exact sources. - | RepoThis - - | RepoKindUnknown String - deriving (Eq, Generic, Ord, Read, Show, Typeable, Data) - -instance Binary RepoKind - --- | An enumeration of common source control systems. The fields used in the --- 'SourceRepo' depend on the type of repo. The tools and methods used to --- obtain and track the repo depend on the repo type. --- -data RepoType = Darcs | Git | SVN | CVS - | Mercurial | GnuArch | Bazaar | Monotone - | OtherRepoType String - deriving (Eq, Generic, Ord, Read, Show, Typeable, Data) - -instance Binary RepoType - -knownRepoTypes :: [RepoType] -knownRepoTypes = [Darcs, Git, SVN, CVS - ,Mercurial, GnuArch, Bazaar, Monotone] - -repoTypeAliases :: RepoType -> [String] -repoTypeAliases Bazaar = ["bzr"] -repoTypeAliases Mercurial = ["hg"] -repoTypeAliases GnuArch = ["arch"] -repoTypeAliases _ = [] - -instance Text RepoKind where - disp RepoHead = Disp.text "head" - disp RepoThis = Disp.text "this" - disp (RepoKindUnknown other) = Disp.text other - - parse = do - name <- ident - return $ case lowercase name of - "head" -> RepoHead - "this" -> RepoThis - _ -> RepoKindUnknown name - -instance Text RepoType where - disp (OtherRepoType other) = Disp.text other - disp other = Disp.text (lowercase (show other)) - parse = fmap classifyRepoType ident - -classifyRepoType :: String -> RepoType -classifyRepoType s = - fromMaybe (OtherRepoType s) $ lookup (lowercase s) repoTypeMap - where - repoTypeMap = [ (name, repoType') - | repoType' <- knownRepoTypes - , name <- display repoType' : repoTypeAliases repoType' ] - -ident :: Parse.ReadP r String -ident = Parse.munch1 (\c -> Char.isAlphaNum c || c == '_' || c == '-') - -lowercase :: String -> String -lowercase = map Char.toLower - --- ------------------------------------------------------------ --- * Utils --- ------------------------------------------------------------ - -updatePackageDescription :: HookedBuildInfo -> PackageDescription -> PackageDescription -updatePackageDescription (mb_lib_bi, exe_bi) p - = p{ executables = updateExecutables exe_bi (executables p) - , library = updateLibrary mb_lib_bi (library p) - } - where - updateLibrary :: Maybe BuildInfo -> Maybe Library -> Maybe Library - updateLibrary (Just bi) (Just lib) = Just (lib{libBuildInfo = bi `mappend` libBuildInfo lib}) - updateLibrary Nothing mb_lib = mb_lib - updateLibrary (Just _) Nothing = Nothing - - updateExecutables :: [(String, BuildInfo)] -- ^[(exeName, new buildinfo)] - -> [Executable] -- ^list of executables to update - -> [Executable] -- ^list with exeNames updated - updateExecutables exe_bi' executables' = foldr updateExecutable executables' exe_bi' - - updateExecutable :: (String, BuildInfo) -- ^(exeName, new buildinfo) - -> [Executable] -- ^list of executables to update - -> [Executable] -- ^list with exeName updated - updateExecutable _ [] = [] - updateExecutable exe_bi'@(name,bi) (exe:exes) - | exeName exe == name = exe{buildInfo = bi `mappend` buildInfo exe} : exes - | otherwise = exe : updateExecutable exe_bi' exes - --- --------------------------------------------------------------------------- --- The GenericPackageDescription type - -data GenericPackageDescription = - GenericPackageDescription { - packageDescription :: PackageDescription, - genPackageFlags :: [Flag], - condLibrary :: Maybe (CondTree ConfVar [Dependency] Library), - condExecutables :: [(String, CondTree ConfVar [Dependency] Executable)], - condTestSuites :: [(String, CondTree ConfVar [Dependency] TestSuite)], - condBenchmarks :: [(String, CondTree ConfVar [Dependency] Benchmark)] - } - deriving (Show, Eq, Typeable, Data, Generic) - -instance Package GenericPackageDescription where - packageId = packageId . packageDescription - -instance Binary GenericPackageDescription - --- | A flag can represent a feature to be included, or a way of linking --- a target against its dependencies, or in fact whatever you can think of. -data Flag = MkFlag - { flagName :: FlagName - , flagDescription :: String - , flagDefault :: Bool - , flagManual :: Bool - } - deriving (Show, Eq, Typeable, Data, Generic) - -instance Binary Flag - --- | A 'FlagName' is the name of a user-defined configuration flag -newtype FlagName = FlagName String - deriving (Eq, Generic, Ord, Show, Read, Typeable, Data) - -instance Binary FlagName - --- | A 'FlagAssignment' is a total or partial mapping of 'FlagName's to --- 'Bool' flag values. It represents the flags chosen by the user or --- discovered during configuration. For example @--flags=foo --flags=-bar@ --- becomes @[("foo", True), ("bar", False)]@ --- -type FlagAssignment = [(FlagName, Bool)] - --- | A @ConfVar@ represents the variable type used. -data ConfVar = OS OS - | Arch Arch - | Flag FlagName - | Impl CompilerFlavor VersionRange - deriving (Eq, Show, Typeable, Data, Generic) - -instance Binary ConfVar - --- | A boolean expression parameterized over the variable type used. -data Condition c = Var c - | Lit Bool - | CNot (Condition c) - | COr (Condition c) (Condition c) - | CAnd (Condition c) (Condition c) - deriving (Show, Eq, Typeable, Data, Generic) - --- | Boolean negation of a 'Condition' value. -cNot :: Condition a -> Condition a -cNot (Lit b) = Lit (not b) -cNot (CNot c) = c -cNot c = CNot c - --- | Boolean AND of two 'Condtion' values. -cAnd :: Condition a -> Condition a -> Condition a -cAnd (Lit False) _ = Lit False -cAnd _ (Lit False) = Lit False -cAnd (Lit True) x = x -cAnd x (Lit True) = x -cAnd x y = CAnd x y - --- | Boolean OR of two 'Condition' values. -cOr :: Eq v => Condition v -> Condition v -> Condition v -cOr (Lit True) _ = Lit True -cOr _ (Lit True) = Lit True -cOr (Lit False) x = x -cOr x (Lit False) = x -cOr c (CNot d) - | c == d = Lit True -cOr (CNot c) d - | c == d = Lit True -cOr x y = COr x y - -instance Functor Condition where - f `fmap` Var c = Var (f c) - _ `fmap` Lit c = Lit c - f `fmap` CNot c = CNot (fmap f c) - f `fmap` COr c d = COr (fmap f c) (fmap f d) - f `fmap` CAnd c d = CAnd (fmap f c) (fmap f d) - -instance Foldable Condition where - f `foldMap` Var c = f c - _ `foldMap` Lit _ = mempty - f `foldMap` CNot c = Fold.foldMap f c - f `foldMap` COr c d = foldMap f c `mappend` foldMap f d - f `foldMap` CAnd c d = foldMap f c `mappend` foldMap f d - -instance Traversable Condition where - f `traverse` Var c = Var `fmap` f c - _ `traverse` Lit c = pure $ Lit c - f `traverse` CNot c = CNot `fmap` Trav.traverse f c - f `traverse` COr c d = COr `fmap` traverse f c <*> traverse f d - f `traverse` CAnd c d = CAnd `fmap` traverse f c <*> traverse f d - -instance Applicative Condition where - pure = Var - (<*>) = ap - -instance Monad Condition where - return = AP.pure - -- Terminating cases - (>>=) (Lit x) _ = Lit x - (>>=) (Var x) f = f x - -- Recursing cases - (>>=) (CNot x ) f = CNot (x >>= f) - (>>=) (COr x y) f = COr (x >>= f) (y >>= f) - (>>=) (CAnd x y) f = CAnd (x >>= f) (y >>= f) - -instance Monoid (Condition a) where - mempty = Lit False - mappend = (Semi.<>) - -instance Semigroup (Condition a) where - (<>) = COr - -instance Alternative Condition where - empty = mempty - (<|>) = mappend - -instance MonadPlus Condition where - mzero = mempty - mplus = mappend - -instance Binary c => Binary (Condition c) - -data CondTree v c a = CondNode - { condTreeData :: a - , condTreeConstraints :: c - , condTreeComponents :: [( Condition v - , CondTree v c a - , Maybe (CondTree v c a))] - } - deriving (Show, Eq, Typeable, Data, Generic) - -instance (Binary v, Binary c, Binary a) => Binary (CondTree v c a) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Package.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Package.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Package.hs 2016-11-07 10:02:22.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Package.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,242 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Package --- Copyright : Isaac Jones 2003-2004 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Defines a package identifier along with a parser and pretty printer for it. --- 'PackageIdentifier's consist of a name and an exact version. It also defines --- a 'Dependency' data type. A dependency is a package name and a version --- range, like @\"foo >= 1.2 && < 2\"@. - -module Distribution.Package ( - -- * Package ids - PackageName(..), - PackageIdentifier(..), - PackageId, - - -- * Package keys/installed package IDs (used for linker symbols) - ComponentId(..), - UnitId(..), - mkUnitId, - mkLegacyUnitId, - getHSLibraryName, - InstalledPackageId, -- backwards compat - - -- * ABI hash - AbiHash(..), - - -- * Package source dependencies - Dependency(..), - thisPackageVersion, - notThisPackageVersion, - simplifyDependency, - - -- * Package classes - Package(..), packageName, packageVersion, - HasUnitId(..), - installedPackageId, - PackageInstalled(..), - ) where - -import Distribution.Version - ( Version(..), VersionRange, anyVersion, thisVersion - , notThisVersion, simplifyVersionRange ) - -import qualified Distribution.Compat.ReadP as Parse -import qualified Text.PrettyPrint as Disp -import Distribution.Compat.ReadP -import Distribution.Compat.Binary -import Distribution.Text - -import Control.DeepSeq (NFData(..)) -import qualified Data.Char as Char - ( isDigit, isAlphaNum, ) -import Data.Data ( Data ) -import Data.List ( intercalate ) -import Data.Typeable ( Typeable ) -import GHC.Generics (Generic) -import Text.PrettyPrint ((<>), (<+>), text) - -newtype PackageName = PackageName { unPackageName :: String } - deriving (Generic, Read, Show, Eq, Ord, Typeable, Data) - -instance Binary PackageName - -instance Text PackageName where - disp (PackageName n) = Disp.text n - parse = do - ns <- Parse.sepBy1 component (Parse.char '-') - return (PackageName (intercalate "-" ns)) - where - component = do - cs <- Parse.munch1 Char.isAlphaNum - if all Char.isDigit cs then Parse.pfail else return cs - -- each component must contain an alphabetic character, to avoid - -- ambiguity in identifiers like foo-1 (the 1 is the version number). - -instance NFData PackageName where - rnf (PackageName pkg) = rnf pkg - --- | Type alias so we can use the shorter name PackageId. -type PackageId = PackageIdentifier - --- | The name and version of a package. -data PackageIdentifier - = PackageIdentifier { - pkgName :: PackageName, -- ^The name of this package, eg. foo - pkgVersion :: Version -- ^the version of this package, eg 1.2 - } - deriving (Generic, Read, Show, Eq, Ord, Typeable, Data) - -instance Binary PackageIdentifier - -instance Text PackageIdentifier where - disp (PackageIdentifier n v) = case v of - Version [] _ -> disp n -- if no version, don't show version. - _ -> disp n <> Disp.char '-' <> disp v - - parse = do - n <- parse - v <- (Parse.char '-' >> parse) <++ return (Version [] []) - return (PackageIdentifier n v) - -instance NFData PackageIdentifier where - rnf (PackageIdentifier name version) = rnf name `seq` rnf version - --- ------------------------------------------------------------ --- * Component Source Hash --- ------------------------------------------------------------ - --- | A 'ComponentId' uniquely identifies the transitive source --- code closure of a component. For non-Backpack components, it also --- serves as the basis for install paths, symbols, etc. --- -data ComponentId - = ComponentId String - deriving (Generic, Read, Show, Eq, Ord, Typeable, Data) - -{-# DEPRECATED InstalledPackageId "Use UnitId instead" #-} -type InstalledPackageId = UnitId - -instance Binary ComponentId - -instance Text ComponentId where - disp (ComponentId str) = text str - - parse = ComponentId `fmap` Parse.munch1 abi_char - where abi_char c = Char.isAlphaNum c || c `elem` "-_." - -instance NFData ComponentId where - rnf (ComponentId pk) = rnf pk - --- | Returns library name prefixed with HS, suitable for filenames -getHSLibraryName :: UnitId -> String -getHSLibraryName (SimpleUnitId (ComponentId s)) = "HS" ++ s - --- | For now, there is no distinction between component IDs --- and unit IDs in Cabal. -newtype UnitId = SimpleUnitId ComponentId - deriving (Generic, Read, Show, Eq, Ord, Typeable, Data, Binary, Text, NFData) - --- | Makes a simple-style UnitId from a string. -mkUnitId :: String -> UnitId -mkUnitId = SimpleUnitId . ComponentId - --- | Make an old-style UnitId from a package identifier -mkLegacyUnitId :: PackageId -> UnitId -mkLegacyUnitId = SimpleUnitId . ComponentId . display - --- ------------------------------------------------------------ --- * Package source dependencies --- ------------------------------------------------------------ - --- | Describes a dependency on a source package (API) --- -data Dependency = Dependency PackageName VersionRange - deriving (Generic, Read, Show, Eq, Typeable, Data) - -instance Binary Dependency - -instance Text Dependency where - disp (Dependency name ver) = - disp name <+> disp ver - - parse = do name <- parse - Parse.skipSpaces - ver <- parse <++ return anyVersion - Parse.skipSpaces - return (Dependency name ver) - -thisPackageVersion :: PackageIdentifier -> Dependency -thisPackageVersion (PackageIdentifier n v) = - Dependency n (thisVersion v) - -notThisPackageVersion :: PackageIdentifier -> Dependency -notThisPackageVersion (PackageIdentifier n v) = - Dependency n (notThisVersion v) - --- | Simplify the 'VersionRange' expression in a 'Dependency'. --- See 'simplifyVersionRange'. --- -simplifyDependency :: Dependency -> Dependency -simplifyDependency (Dependency name range) = - Dependency name (simplifyVersionRange range) - --- | Class of things that have a 'PackageIdentifier' --- --- Types in this class are all notions of a package. This allows us to have --- different types for the different phases that packages go though, from --- simple name\/id, package description, configured or installed packages. --- --- Not all kinds of packages can be uniquely identified by a --- 'PackageIdentifier'. In particular, installed packages cannot, there may be --- many installed instances of the same source package. --- -class Package pkg where - packageId :: pkg -> PackageIdentifier - -packageName :: Package pkg => pkg -> PackageName -packageName = pkgName . packageId - -packageVersion :: Package pkg => pkg -> Version -packageVersion = pkgVersion . packageId - -instance Package PackageIdentifier where - packageId = id - --- | Packages that have an installed package ID -class Package pkg => HasUnitId pkg where - installedUnitId :: pkg -> UnitId - -{-# DEPRECATED installedPackageId "Use installedUnitId instead" #-} --- | Compatibility wrapper for Cabal pre-1.24. -installedPackageId :: HasUnitId pkg => pkg -> UnitId -installedPackageId = installedUnitId - --- | Class of installed packages. --- --- The primary data type which is an instance of this package is --- 'InstalledPackageInfo', but when we are doing install plans in Cabal install --- we may have other, installed package-like things which contain more metadata. --- Installed packages have exact dependencies 'installedDepends'. -class (HasUnitId pkg) => PackageInstalled pkg where - installedDepends :: pkg -> [UnitId] - --- ----------------------------------------------------------------------------- --- ABI hash - -newtype AbiHash = AbiHash String - deriving (Eq, Show, Read, Generic) -instance Binary AbiHash - -instance Text AbiHash where - disp (AbiHash abi) = Disp.text abi - parse = fmap AbiHash (Parse.munch Char.isAlphaNum) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/ParseUtils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/ParseUtils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/ParseUtils.hs 2016-11-07 10:02:23.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/ParseUtils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,750 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.ParseUtils --- Copyright : (c) The University of Glasgow 2004 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Utilities for parsing 'PackageDescription' and 'InstalledPackageInfo'. --- --- The @.cabal@ file format is not trivial, especially with the introduction --- of configurations and the section syntax that goes with that. This module --- has a bunch of parsing functions that is used by the @.cabal@ parser and a --- couple others. It has the parsing framework code and also little parsers for --- many of the formats we get in various @.cabal@ file fields, like module --- names, comma separated lists etc. - --- This module is meant to be local-only to Distribution... - -{-# OPTIONS_HADDOCK hide #-} -module Distribution.ParseUtils ( - LineNo, PError(..), PWarning(..), locatedErrorMsg, syntaxError, warning, - runP, runE, ParseResult(..), catchParseError, parseFail, showPWarning, - Field(..), fName, lineNo, - FieldDescr(..), ppField, ppFields, readFields, readFieldsFlat, - showFields, showSingleNamedField, showSimpleSingleNamedField, - parseFields, parseFieldsFlat, - parseFilePathQ, parseTokenQ, parseTokenQ', - parseModuleNameQ, parseBuildTool, parsePkgconfigDependency, - parseOptVersion, parsePackageNameQ, parseVersionRangeQ, - parseTestedWithQ, parseLicenseQ, parseLanguageQ, parseExtensionQ, - parseSepList, parseCommaList, parseOptCommaList, - showFilePath, showToken, showTestedWith, showFreeText, parseFreeText, - field, simpleField, listField, listFieldWithSep, spaceListField, - commaListField, commaListFieldWithSep, commaNewLineListField, - optsField, liftField, boolField, parseQuoted, indentWith, - - UnrecFieldParser, warnUnrec, ignoreUnrec, - ) where - -import Distribution.Compiler -import Distribution.License -import Distribution.Version -import Distribution.Package -import Distribution.ModuleName -import qualified Distribution.Compat.MonadFail as Fail -import Distribution.Compat.ReadP as ReadP hiding (get) -import Distribution.ReadE -import Distribution.Text -import Distribution.Simple.Utils -import Language.Haskell.Extension - -import Text.PrettyPrint hiding (braces) -import Data.Char (isSpace, toLower, isAlphaNum, isDigit) -import Data.Maybe (fromMaybe) -import Data.Tree as Tree (Tree(..), flatten) -import qualified Data.Map as Map -import Control.Monad (foldM, ap) -import Control.Applicative as AP (Applicative(..)) -import System.FilePath (normalise) -import Data.List (sortBy) - --- ----------------------------------------------------------------------------- - -type LineNo = Int -type Separator = ([Doc] -> Doc) - -data PError = AmbiguousParse String LineNo - | NoParse String LineNo - | TabsError LineNo - | FromString String (Maybe LineNo) - deriving (Eq, Show) - -data PWarning = PWarning String - | UTFWarning LineNo String - deriving (Eq, Show) - -showPWarning :: FilePath -> PWarning -> String -showPWarning fpath (PWarning msg) = - normalise fpath ++ ": " ++ msg -showPWarning fpath (UTFWarning line fname) = - normalise fpath ++ ":" ++ show line - ++ ": Invalid UTF-8 text in the '" ++ fname ++ "' field." - -data ParseResult a = ParseFailed PError | ParseOk [PWarning] a - deriving Show - -instance Functor ParseResult where - fmap _ (ParseFailed err) = ParseFailed err - fmap f (ParseOk ws x) = ParseOk ws $ f x - -instance Applicative ParseResult where - pure = ParseOk [] - (<*>) = ap - - -instance Monad ParseResult where - return = AP.pure - ParseFailed err >>= _ = ParseFailed err - ParseOk ws x >>= f = case f x of - ParseFailed err -> ParseFailed err - ParseOk ws' x' -> ParseOk (ws'++ws) x' - fail = Fail.fail - -instance Fail.MonadFail ParseResult where - fail s = ParseFailed (FromString s Nothing) - -catchParseError :: ParseResult a -> (PError -> ParseResult a) - -> ParseResult a -p@(ParseOk _ _) `catchParseError` _ = p -ParseFailed e `catchParseError` k = k e - -parseFail :: PError -> ParseResult a -parseFail = ParseFailed - -runP :: LineNo -> String -> ReadP a a -> String -> ParseResult a -runP line fieldname p s = - case [ x | (x,"") <- results ] of - [a] -> ParseOk (utf8Warnings line fieldname s) a - --TODO: what is this double parse thing all about? - -- Can't we just do the all isSpace test the first time? - [] -> case [ x | (x,ys) <- results, all isSpace ys ] of - [a] -> ParseOk (utf8Warnings line fieldname s) a - [] -> ParseFailed (NoParse fieldname line) - _ -> ParseFailed (AmbiguousParse fieldname line) - _ -> ParseFailed (AmbiguousParse fieldname line) - where results = readP_to_S p s - -runE :: LineNo -> String -> ReadE a -> String -> ParseResult a -runE line fieldname p s = - case runReadE p s of - Right a -> ParseOk (utf8Warnings line fieldname s) a - Left e -> syntaxError line $ - "Parse of field '" ++ fieldname ++ "' failed (" ++ e ++ "): " ++ s - -utf8Warnings :: LineNo -> String -> String -> [PWarning] -utf8Warnings line fieldname s = - take 1 [ UTFWarning n fieldname - | (n,l) <- zip [line..] (lines s) - , '\xfffd' `elem` l ] - -locatedErrorMsg :: PError -> (Maybe LineNo, String) -locatedErrorMsg (AmbiguousParse f n) = (Just n, - "Ambiguous parse in field '"++f++"'.") -locatedErrorMsg (NoParse f n) = (Just n, - "Parse of field '"++f++"' failed.") -locatedErrorMsg (TabsError n) = (Just n, "Tab used as indentation.") -locatedErrorMsg (FromString s n) = (n, s) - -syntaxError :: LineNo -> String -> ParseResult a -syntaxError n s = ParseFailed $ FromString s (Just n) - -tabsError :: LineNo -> ParseResult a -tabsError ln = ParseFailed $ TabsError ln - -warning :: String -> ParseResult () -warning s = ParseOk [PWarning s] () - --- | Field descriptor. The parameter @a@ parameterizes over where the field's --- value is stored in. -data FieldDescr a - = FieldDescr - { fieldName :: String - , fieldGet :: a -> Doc - , fieldSet :: LineNo -> String -> a -> ParseResult a - -- ^ @fieldSet n str x@ Parses the field value from the given input - -- string @str@ and stores the result in @x@ if the parse was - -- successful. Otherwise, reports an error on line number @n@. - } - -field :: String -> (a -> Doc) -> ReadP a a -> FieldDescr a -field name showF readF = - FieldDescr name showF (\line val _st -> runP line name readF val) - --- Lift a field descriptor storing into an 'a' to a field descriptor storing --- into a 'b'. -liftField :: (b -> a) -> (a -> b -> b) -> FieldDescr a -> FieldDescr b -liftField get set (FieldDescr name showF parseF) - = FieldDescr name (showF . get) - (\line str b -> do - a <- parseF line str (get b) - return (set a b)) - --- Parser combinator for simple fields. Takes a field name, a pretty printer, --- a parser function, an accessor, and a setter, returns a FieldDescr over the --- compoid structure. -simpleField :: String -> (a -> Doc) -> ReadP a a - -> (b -> a) -> (a -> b -> b) -> FieldDescr b -simpleField name showF readF get set - = liftField get set $ field name showF readF - -commaListFieldWithSep :: Separator -> String -> (a -> Doc) -> ReadP [a] a - -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b -commaListFieldWithSep separator name showF readF get set = - liftField get set' $ - field name showF' (parseCommaList readF) - where - set' xs b = set (get b ++ xs) b - showF' = separator . punctuate comma . map showF - -commaListField :: String -> (a -> Doc) -> ReadP [a] a - -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b -commaListField = commaListFieldWithSep fsep - -commaNewLineListField :: String -> (a -> Doc) -> ReadP [a] a - -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b -commaNewLineListField = commaListFieldWithSep sep - -spaceListField :: String -> (a -> Doc) -> ReadP [a] a - -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b -spaceListField name showF readF get set = - liftField get set' $ - field name showF' (parseSpaceList readF) - where - set' xs b = set (get b ++ xs) b - showF' = fsep . map showF - -listFieldWithSep :: Separator -> String -> (a -> Doc) -> ReadP [a] a - -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b -listFieldWithSep separator name showF readF get set = - liftField get set' $ - field name showF' (parseOptCommaList readF) - where - set' xs b = set (get b ++ xs) b - showF' = separator . map showF - -listField :: String -> (a -> Doc) -> ReadP [a] a - -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b -listField = listFieldWithSep fsep - -optsField :: String -> CompilerFlavor -> (b -> [(CompilerFlavor,[String])]) - -> ([(CompilerFlavor,[String])] -> b -> b) -> FieldDescr b -optsField name flavor get set = - liftField (fromMaybe [] . lookup flavor . get) - (\opts b -> set (reorder (update flavor opts (get b))) b) $ - field name showF (sepBy parseTokenQ' (munch1 isSpace)) - where - update _ opts l | all null opts = l --empty opts as if no opts - update f opts [] = [(f,opts)] - update f opts ((f',opts'):rest) - | f == f' = (f, opts' ++ opts) : rest - | otherwise = (f',opts') : update f opts rest - reorder = sortBy (comparing fst) - showF = hsep . map text - --- TODO: this is a bit smelly hack. It's because we want to parse bool fields --- liberally but not accept new parses. We cannot do that with ReadP --- because it does not support warnings. We need a new parser framework! -boolField :: String -> (b -> Bool) -> (Bool -> b -> b) -> FieldDescr b -boolField name get set = liftField get set (FieldDescr name showF readF) - where - showF = text . show - readF line str _ - | str == "True" = ParseOk [] True - | str == "False" = ParseOk [] False - | lstr == "true" = ParseOk [caseWarning] True - | lstr == "false" = ParseOk [caseWarning] False - | otherwise = ParseFailed (NoParse name line) - where - lstr = lowercase str - caseWarning = PWarning $ - "The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'." - -ppFields :: [FieldDescr a] -> a -> Doc -ppFields fields x = - vcat [ ppField name (getter x) | FieldDescr name getter _ <- fields ] - -ppField :: String -> Doc -> Doc -ppField name fielddoc - | isEmpty fielddoc = empty - | name `elem` nestedFields = text name <> colon $+$ nest indentWith fielddoc - | otherwise = text name <> colon <+> fielddoc - where - nestedFields = - [ "description" - , "build-depends" - , "data-files" - , "extra-source-files" - , "extra-tmp-files" - , "exposed-modules" - , "c-sources" - , "js-sources" - , "extra-libraries" - , "includes" - , "install-includes" - , "other-modules" - , "depends" - ] - -showFields :: [FieldDescr a] -> a -> String -showFields fields = render . ($+$ text "") . ppFields fields - -showSingleNamedField :: [FieldDescr a] -> String -> Maybe (a -> String) -showSingleNamedField fields f = - case [ get | (FieldDescr f' get _) <- fields, f' == f ] of - [] -> Nothing - (get:_) -> Just (render . ppField f . get) - -showSimpleSingleNamedField :: [FieldDescr a] -> String -> Maybe (a -> String) -showSimpleSingleNamedField fields f = - case [ get | (FieldDescr f' get _) <- fields, f' == f ] of - [] -> Nothing - (get:_) -> Just (renderStyle myStyle . get) - where myStyle = style { mode = LeftMode } - -parseFields :: [FieldDescr a] -> a -> String -> ParseResult a -parseFields fields initial str = - readFields str >>= accumFields fields initial - -parseFieldsFlat :: [FieldDescr a] -> a -> String -> ParseResult a -parseFieldsFlat fields initial str = - readFieldsFlat str >>= accumFields fields initial - -accumFields :: [FieldDescr a] -> a -> [Field] -> ParseResult a -accumFields fields = foldM setField - where - fieldMap = Map.fromList - [ (name, f) | f@(FieldDescr name _ _) <- fields ] - setField accum (F line name value) = case Map.lookup name fieldMap of - Just (FieldDescr _ _ set) -> set line value accum - Nothing -> do - warning ("Unrecognized field " ++ name ++ " on line " ++ show line) - return accum - setField accum f = do - warning ("Unrecognized stanza on line " ++ show (lineNo f)) - return accum - --- | The type of a function which, given a name-value pair of an --- unrecognized field, and the current structure being built, --- decides whether to incorporate the unrecognized field --- (by returning Just x, where x is a possibly modified version --- of the structure being built), or not (by returning Nothing). -type UnrecFieldParser a = (String,String) -> a -> Maybe a - --- | A default unrecognized field parser which simply returns Nothing, --- i.e. ignores all unrecognized fields, so warnings will be generated. -warnUnrec :: UnrecFieldParser a -warnUnrec _ _ = Nothing - --- | A default unrecognized field parser which silently (i.e. no --- warnings will be generated) ignores unrecognized fields, by --- returning the structure being built unmodified. -ignoreUnrec :: UnrecFieldParser a -ignoreUnrec _ = Just - ------------------------------------------------------------------------------- - --- The data type for our three syntactic categories -data Field - = F LineNo String String - -- ^ A regular @: @ field - | Section LineNo String String [Field] - -- ^ A section with a name and possible parameter. The syntactic - -- structure is: - -- - -- @ - -- { - -- * - -- } - -- @ - | IfBlock LineNo String [Field] [Field] - -- ^ A conditional block with an optional else branch: - -- - -- @ - -- if { - -- * - -- } else { - -- * - -- } - -- @ - deriving (Show - ,Eq) -- for testing - -lineNo :: Field -> LineNo -lineNo (F n _ _) = n -lineNo (Section n _ _ _) = n -lineNo (IfBlock n _ _ _) = n - -fName :: Field -> String -fName (F _ n _) = n -fName (Section _ n _ _) = n -fName _ = error "fname: not a field or section" - -readFields :: String -> ParseResult [Field] -readFields input = ifelse - =<< mapM (mkField 0) - =<< mkTree tokens - - where ls = (lines . normaliseLineEndings) input - tokens = (concatMap tokeniseLine . trimLines) ls - -readFieldsFlat :: String -> ParseResult [Field] -readFieldsFlat input = mapM (mkField 0) - =<< mkTree tokens - where ls = (lines . normaliseLineEndings) input - tokens = (concatMap tokeniseLineFlat . trimLines) ls - --- attach line number and determine indentation -trimLines :: [String] -> [(LineNo, Indent, HasTabs, String)] -trimLines ls = [ (lineno, indent, hastabs, trimTrailing l') - | (lineno, l) <- zip [1..] ls - , let (sps, l') = span isSpace l - indent = length sps - hastabs = '\t' `elem` sps - , validLine l' ] - where validLine ('-':'-':_) = False -- Comment - validLine [] = False -- blank line - validLine _ = True - --- | We parse generically based on indent level and braces '{' '}'. To do that --- we split into lines and then '{' '}' tokens and other spans within a line. -data Token = - -- | The 'Line' token is for bits that /start/ a line, eg: - -- - -- > "\n blah blah { blah" - -- - -- tokenises to: - -- - -- > [Line n 2 False "blah blah", OpenBracket, Span n "blah"] - -- - -- so lines are the only ones that can have nested layout, since they - -- have a known indentation level. - -- - -- eg: we can't have this: - -- - -- > if ... { - -- > } else - -- > other - -- - -- because other cannot nest under else, since else doesn't start a line - -- so cannot have nested layout. It'd have to be: - -- - -- > if ... { - -- > } - -- > else - -- > other - -- - -- but that's not so common, people would normally use layout or - -- brackets not both in a single @if else@ construct. - -- - -- > if ... { foo : bar } - -- > else - -- > other - -- - -- this is OK - Line LineNo Indent HasTabs String - | Span LineNo String -- ^ span in a line, following brackets - | OpenBracket LineNo | CloseBracket LineNo - -type Indent = Int -type HasTabs = Bool - --- | Tokenise a single line, splitting on '{' '}' and the spans in between. --- Also trims leading & trailing space on those spans within the line. -tokeniseLine :: (LineNo, Indent, HasTabs, String) -> [Token] -tokeniseLine (n0, i, t, l) = case split n0 l of - (Span _ l':ss) -> Line n0 i t l' :ss - cs -> cs - where split _ "" = [] - split n s = case span (\c -> c /='}' && c /= '{') s of - ("", '{' : s') -> OpenBracket n : split n s' - (w , '{' : s') -> mkspan n w (OpenBracket n : split n s') - ("", '}' : s') -> CloseBracket n : split n s' - (w , '}' : s') -> mkspan n w (CloseBracket n : split n s') - (w , _) -> mkspan n w [] - - mkspan n s ss | null s' = ss - | otherwise = Span n s' : ss - where s' = trimTrailing (trimLeading s) - -tokeniseLineFlat :: (LineNo, Indent, HasTabs, String) -> [Token] -tokeniseLineFlat (n0, i, t, l) - | null l' = [] - | otherwise = [Line n0 i t l'] - where - l' = trimTrailing (trimLeading l) - -trimLeading, trimTrailing :: String -> String -trimLeading = dropWhile isSpace -trimTrailing = dropWhileEndLE isSpace - - -type SyntaxTree = Tree (LineNo, HasTabs, String) - --- | Parse the stream of tokens into a tree of them, based on indent \/ layout -mkTree :: [Token] -> ParseResult [SyntaxTree] -mkTree toks = - layout 0 [] toks >>= \(trees, trailing) -> case trailing of - [] -> return trees - OpenBracket n:_ -> syntaxError n "mismatched brackets, unexpected {" - CloseBracket n:_ -> syntaxError n "mismatched brackets, unexpected }" - -- the following two should never happen: - Span n l :_ -> syntaxError n $ "unexpected span: " ++ show l - Line n _ _ l :_ -> syntaxError n $ "unexpected line: " ++ show l - - --- | Parse the stream of tokens into a tree of them, based on indent --- This parse state expect to be in a layout context, though possibly --- nested within a braces context so we may still encounter closing braces. -layout :: Indent -- ^ indent level of the parent\/previous line - -> [SyntaxTree] -- ^ accumulating param, trees in this level - -> [Token] -- ^ remaining tokens - -> ParseResult ([SyntaxTree], [Token]) - -- ^ collected trees on this level and trailing tokens -layout _ a [] = return (reverse a, []) -layout i a (s@(Line _ i' _ _):ss) | i' < i = return (reverse a, s:ss) -layout i a (Line n _ t l:OpenBracket n':ss) = do - (sub, ss') <- braces n' [] ss - layout i (Node (n,t,l) sub:a) ss' - -layout i a (Span n l:OpenBracket n':ss) = do - (sub, ss') <- braces n' [] ss - layout i (Node (n,False,l) sub:a) ss' - --- look ahead to see if following lines are more indented, giving a sub-tree -layout i a (Line n i' t l:ss) = do - lookahead <- layout (i'+1) [] ss - case lookahead of - ([], _) -> layout i (Node (n,t,l) [] :a) ss - (ts, ss') -> layout i (Node (n,t,l) ts :a) ss' - -layout _ _ ( OpenBracket n :_) = syntaxError n "unexpected '{'" -layout _ a (s@(CloseBracket _):ss) = return (reverse a, s:ss) -layout _ _ ( Span n l : _) = syntaxError n $ "unexpected span: " - ++ show l - --- | Parse the stream of tokens into a tree of them, based on explicit braces --- This parse state expects to find a closing bracket. -braces :: LineNo -- ^ line of the '{', used for error messages - -> [SyntaxTree] -- ^ accumulating param, trees in this level - -> [Token] -- ^ remaining tokens - -> ParseResult ([SyntaxTree],[Token]) - -- ^ collected trees on this level and trailing tokens -braces m a (Line n _ t l:OpenBracket n':ss) = do - (sub, ss') <- braces n' [] ss - braces m (Node (n,t,l) sub:a) ss' - -braces m a (Span n l:OpenBracket n':ss) = do - (sub, ss') <- braces n' [] ss - braces m (Node (n,False,l) sub:a) ss' - -braces m a (Line n i t l:ss) = do - lookahead <- layout (i+1) [] ss - case lookahead of - ([], _) -> braces m (Node (n,t,l) [] :a) ss - (ts, ss') -> braces m (Node (n,t,l) ts :a) ss' - -braces m a (Span n l:ss) = braces m (Node (n,False,l) []:a) ss -braces _ a (CloseBracket _:ss) = return (reverse a, ss) -braces n _ [] = syntaxError n $ "opening brace '{'" - ++ "has no matching closing brace '}'" -braces _ _ (OpenBracket n:_) = syntaxError n "unexpected '{'" - --- | Convert the parse tree into the Field AST --- Also check for dodgy uses of tabs in indentation. -mkField :: Int -> SyntaxTree -> ParseResult Field -mkField d (Node (n,t,_) _) | d >= 1 && t = tabsError n -mkField d (Node (n,_,l) ts) = case span (\c -> isAlphaNum c || c == '-') l of - ([], _) -> syntaxError n $ "unrecognised field or section: " ++ show l - (name, rest) -> case trimLeading rest of - (':':rest') -> do let followingLines = concatMap Tree.flatten ts - tabs = not (null [()| (_,True,_) <- followingLines ]) - if tabs && d >= 1 - then tabsError n - else return $ F n (map toLower name) - (fieldValue rest' followingLines) - rest' -> do ts' <- mapM (mkField (d+1)) ts - return (Section n (map toLower name) rest' ts') - where fieldValue firstLine followingLines = - let firstLine' = trimLeading firstLine - followingLines' = map (\(_,_,s) -> stripDot s) followingLines - allLines | null firstLine' = followingLines' - | otherwise = firstLine' : followingLines' - in intercalate "\n" allLines - stripDot "." = "" - stripDot s = s - --- | Convert if/then/else 'Section's to 'IfBlock's -ifelse :: [Field] -> ParseResult [Field] -ifelse [] = return [] -ifelse (Section n "if" cond thenpart - :Section _ "else" as elsepart:fs) - | null cond = syntaxError n "'if' with missing condition" - | null thenpart = syntaxError n "'then' branch of 'if' is empty" - | not (null as) = syntaxError n "'else' takes no arguments" - | null elsepart = syntaxError n "'else' branch of 'if' is empty" - | otherwise = do tp <- ifelse thenpart - ep <- ifelse elsepart - fs' <- ifelse fs - return (IfBlock n cond tp ep:fs') -ifelse (Section n "if" cond thenpart:fs) - | null cond = syntaxError n "'if' with missing condition" - | null thenpart = syntaxError n "'then' branch of 'if' is empty" - | otherwise = do tp <- ifelse thenpart - fs' <- ifelse fs - return (IfBlock n cond tp []:fs') -ifelse (Section n "else" _ _:_) = syntaxError n - "stray 'else' with no preceding 'if'" -ifelse (Section n s a fs':fs) = do fs'' <- ifelse fs' - fs''' <- ifelse fs - return (Section n s a fs'' : fs''') -ifelse (f:fs) = do fs' <- ifelse fs - return (f : fs') - ------------------------------------------------------------------------------- - --- |parse a module name -parseModuleNameQ :: ReadP r ModuleName -parseModuleNameQ = parseQuoted parse <++ parse - -parseFilePathQ :: ReadP r FilePath -parseFilePathQ = parseTokenQ - -- removed until normalise is no longer broken, was: - -- liftM normalise parseTokenQ - -betweenSpaces :: ReadP r a -> ReadP r a -betweenSpaces act = do skipSpaces - res <- act - skipSpaces - return res - -parseBuildTool :: ReadP r Dependency -parseBuildTool = do name <- parseBuildToolNameQ - ver <- betweenSpaces $ - parseVersionRangeQ <++ return anyVersion - return $ Dependency name ver - -parseBuildToolNameQ :: ReadP r PackageName -parseBuildToolNameQ = parseQuoted parseBuildToolName <++ parseBuildToolName - --- like parsePackageName but accepts symbols in components -parseBuildToolName :: ReadP r PackageName -parseBuildToolName = do ns <- sepBy1 component (ReadP.char '-') - return (PackageName (intercalate "-" ns)) - where component = do - cs <- munch1 (\c -> isAlphaNum c || c == '+' || c == '_') - if all isDigit cs then pfail else return cs - --- pkg-config allows versions and other letters in package names, --- eg "gtk+-2.0" is a valid pkg-config package _name_. --- It then has a package version number like 2.10.13 -parsePkgconfigDependency :: ReadP r Dependency -parsePkgconfigDependency = do name <- munch1 - (\c -> isAlphaNum c || c `elem` "+-._") - ver <- betweenSpaces $ - parseVersionRangeQ <++ return anyVersion - return $ Dependency (PackageName name) ver - -parsePackageNameQ :: ReadP r PackageName -parsePackageNameQ = parseQuoted parse <++ parse - -parseVersionRangeQ :: ReadP r VersionRange -parseVersionRangeQ = parseQuoted parse <++ parse - -parseOptVersion :: ReadP r Version -parseOptVersion = parseQuoted ver <++ ver - where ver :: ReadP r Version - ver = parse <++ return (Version [] []) - -parseTestedWithQ :: ReadP r (CompilerFlavor,VersionRange) -parseTestedWithQ = parseQuoted tw <++ tw - where - tw :: ReadP r (CompilerFlavor,VersionRange) - tw = do compiler <- parseCompilerFlavorCompat - version <- betweenSpaces $ parse <++ return anyVersion - return (compiler,version) - -parseLicenseQ :: ReadP r License -parseLicenseQ = parseQuoted parse <++ parse - --- urgh, we can't define optQuotes :: ReadP r a -> ReadP r a --- because the "compat" version of ReadP isn't quite powerful enough. In --- particular, the type of <++ is ReadP r r -> ReadP r a -> ReadP r a --- Hence the trick above to make 'lic' polymorphic. - -parseLanguageQ :: ReadP r Language -parseLanguageQ = parseQuoted parse <++ parse - -parseExtensionQ :: ReadP r Extension -parseExtensionQ = parseQuoted parse <++ parse - -parseHaskellString :: ReadP r String -parseHaskellString = readS_to_P reads - -parseTokenQ :: ReadP r String -parseTokenQ = parseHaskellString <++ munch1 (\x -> not (isSpace x) && x /= ',') - -parseTokenQ' :: ReadP r String -parseTokenQ' = parseHaskellString <++ munch1 (not . isSpace) - -parseSepList :: ReadP r b - -> ReadP r a -- ^The parser for the stuff between commas - -> ReadP r [a] -parseSepList sepr p = sepBy p separator - where separator = betweenSpaces sepr - -parseSpaceList :: ReadP r a -- ^The parser for the stuff between commas - -> ReadP r [a] -parseSpaceList p = sepBy p skipSpaces - -parseCommaList :: ReadP r a -- ^The parser for the stuff between commas - -> ReadP r [a] -parseCommaList = parseSepList (ReadP.char ',') - -parseOptCommaList :: ReadP r a -- ^The parser for the stuff between commas - -> ReadP r [a] -parseOptCommaList = parseSepList (optional (ReadP.char ',')) - -parseQuoted :: ReadP r a -> ReadP r a -parseQuoted = between (ReadP.char '"') (ReadP.char '"') - -parseFreeText :: ReadP.ReadP s String -parseFreeText = ReadP.munch (const True) - --- -------------------------------------------- --- ** Pretty printing - -showFilePath :: FilePath -> Doc -showFilePath "" = empty -showFilePath x = showToken x - -showToken :: String -> Doc -showToken str - | not (any dodgy str) && - not (null str) = text str - | otherwise = text (show str) - where dodgy c = isSpace c || c == ',' - -showTestedWith :: (CompilerFlavor,VersionRange) -> Doc -showTestedWith (compiler, version) = text (show compiler) <+> disp version - --- | Pretty-print free-format text, ensuring that it is vertically aligned, --- and with blank lines replaced by dots for correct re-parsing. -showFreeText :: String -> Doc -showFreeText "" = empty -showFreeText s = vcat [text (if null l then "." else l) | l <- lines_ s] - --- | 'lines_' breaks a string up into a list of strings at newline --- characters. The resulting strings do not contain newlines. -lines_ :: String -> [String] -lines_ [] = [""] -lines_ s = let (l, s') = break (== '\n') s - in l : case s' of - [] -> [] - (_:s'') -> lines_ s'' - --- | the indentation used for pretty printing -indentWith :: Int -indentWith = 4 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/ReadE.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/ReadE.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/ReadE.hs 2016-11-07 10:02:23.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/ReadE.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,51 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.ReadE --- Copyright : Jose Iborra 2008 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Simple parsing with failure - -module Distribution.ReadE ( - -- * ReadE - ReadE(..), succeedReadE, failReadE, - -- * Projections - parseReadE, readEOrFail, - readP_to_E - ) where - -import Distribution.Compat.ReadP -import Data.Char ( isSpace ) - --- | Parser with simple error reporting -newtype ReadE a = ReadE {runReadE :: String -> Either ErrorMsg a} -type ErrorMsg = String - -instance Functor ReadE where - fmap f (ReadE p) = ReadE $ \txt -> case p txt of - Right a -> Right (f a) - Left err -> Left err - -succeedReadE :: (String -> a) -> ReadE a -succeedReadE f = ReadE (Right . f) - -failReadE :: ErrorMsg -> ReadE a -failReadE = ReadE . const . Left - -parseReadE :: ReadE a -> ReadP r a -parseReadE (ReadE p) = do - txt <- look - either fail return (p txt) - -readEOrFail :: ReadE a -> String -> a -readEOrFail r = either error id . runReadE r - -readP_to_E :: (String -> ErrorMsg) -> ReadP a a -> ReadE a -readP_to_E err r = - ReadE $ \txt -> case [ p | (p, s) <- readP_to_S r txt - , all isSpace s ] - of [] -> Left (err txt) - (p:_) -> Right p diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Bench.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Bench.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Bench.hs 2016-11-07 10:02:23.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Bench.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,123 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Bench --- Copyright : Johan Tibell 2011 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This is the entry point into running the benchmarks in a built --- package. It performs the \"@.\/setup bench@\" action. It runs --- benchmarks designated in the package description. - -module Distribution.Simple.Bench - ( bench - ) where - -import qualified Distribution.PackageDescription as PD -import Distribution.Simple.BuildPaths -import Distribution.Simple.Compiler -import Distribution.Simple.InstallDirs -import qualified Distribution.Simple.LocalBuildInfo as LBI -import Distribution.Simple.Setup -import Distribution.Simple.UserHooks -import Distribution.Simple.Utils -import Distribution.Text - -import Control.Monad ( when, unless, forM ) -import System.Exit ( ExitCode(..), exitFailure, exitSuccess ) -import System.Directory ( doesFileExist ) -import System.FilePath ( (), (<.>) ) - --- | Perform the \"@.\/setup bench@\" action. -bench :: Args -- ^positional command-line arguments - -> PD.PackageDescription -- ^information from the .cabal file - -> LBI.LocalBuildInfo -- ^information from the configure step - -> BenchmarkFlags -- ^flags sent to benchmark - -> IO () -bench args pkg_descr lbi flags = do - let verbosity = fromFlag $ benchmarkVerbosity flags - benchmarkNames = args - pkgBenchmarks = PD.benchmarks pkg_descr - enabledBenchmarks = [ t | t <- pkgBenchmarks - , PD.benchmarkEnabled t - , PD.buildable (PD.benchmarkBuildInfo t) ] - - -- Run the benchmark - doBench :: PD.Benchmark -> IO ExitCode - doBench bm = - case PD.benchmarkInterface bm of - PD.BenchmarkExeV10 _ _ -> do - let cmd = LBI.buildDir lbi PD.benchmarkName bm - PD.benchmarkName bm <.> exeExtension - options = map (benchOption pkg_descr lbi bm) $ - benchmarkOptions flags - name = PD.benchmarkName bm - -- Check that the benchmark executable exists. - exists <- doesFileExist cmd - unless exists $ die $ - "Error: Could not find benchmark program \"" - ++ cmd ++ "\". Did you build the package first?" - - notice verbosity $ startMessage name - -- This will redirect the child process - -- stdout/stderr to the parent process. - exitcode <- rawSystemExitCode verbosity cmd options - notice verbosity $ finishMessage name exitcode - return exitcode - - _ -> do - notice verbosity $ "No support for running " - ++ "benchmark " ++ PD.benchmarkName bm ++ " of type: " - ++ show (disp $ PD.benchmarkType bm) - exitFailure - - unless (PD.hasBenchmarks pkg_descr) $ do - notice verbosity "Package has no benchmarks." - exitSuccess - - when (PD.hasBenchmarks pkg_descr && null enabledBenchmarks) $ - die $ "No benchmarks enabled. Did you remember to configure with " - ++ "\'--enable-benchmarks\'?" - - bmsToRun <- case benchmarkNames of - [] -> return enabledBenchmarks - names -> forM names $ \bmName -> - let benchmarkMap = zip enabledNames enabledBenchmarks - enabledNames = map PD.benchmarkName enabledBenchmarks - allNames = map PD.benchmarkName pkgBenchmarks - in case lookup bmName benchmarkMap of - Just t -> return t - _ | bmName `elem` allNames -> - die $ "Package configured with benchmark " - ++ bmName ++ " disabled." - | otherwise -> die $ "no such benchmark: " ++ bmName - - let totalBenchmarks = length bmsToRun - notice verbosity $ "Running " ++ show totalBenchmarks ++ " benchmarks..." - exitcodes <- mapM doBench bmsToRun - let allOk = totalBenchmarks == length (filter (== ExitSuccess) exitcodes) - unless allOk exitFailure - where - startMessage name = "Benchmark " ++ name ++ ": RUNNING...\n" - finishMessage name exitcode = "Benchmark " ++ name ++ ": " - ++ (case exitcode of - ExitSuccess -> "FINISH" - ExitFailure _ -> "ERROR") - - --- TODO: This is abusing the notion of a 'PathTemplate'. The result isn't --- necessarily a path. -benchOption :: PD.PackageDescription - -> LBI.LocalBuildInfo - -> PD.Benchmark - -> PathTemplate - -> String -benchOption pkg_descr lbi bm template = - fromPathTemplate $ substPathTemplate env template - where - env = initialPathTemplateEnv - (PD.package pkg_descr) (LBI.localUnitId lbi) - (compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) ++ - [(BenchmarkNameVar, toPathTemplate $ PD.benchmarkName bm)] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Build/Macros.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Build/Macros.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Build/Macros.hs 2016-11-07 10:02:23.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Build/Macros.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,110 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Build.Macros --- Copyright : Simon Marlow 2008 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Generate cabal_macros.h - CPP macros for package version testing --- --- When using CPP you get --- --- > VERSION_ --- > MIN_VERSION_(A,B,C) --- --- for each /package/ in @build-depends@, which is true if the version of --- /package/ in use is @>= A.B.C@, using the normal ordering on version --- numbers. --- -module Distribution.Simple.Build.Macros ( - generate, - generatePackageVersionMacros, - ) where - -import Data.Maybe - ( isJust ) -import Distribution.Package - ( PackageIdentifier(PackageIdentifier) ) -import Distribution.Version - ( Version(versionBranch) ) -import Distribution.PackageDescription - ( PackageDescription ( package ) ) -import Distribution.Simple.LocalBuildInfo - ( LocalBuildInfo(withPrograms), externalPackageDeps - , localComponentId, localCompatPackageKey ) -import Distribution.Simple.Program.Db - ( configuredPrograms ) -import Distribution.Simple.Program.Types - ( ConfiguredProgram(programId, programVersion) ) -import Distribution.Text - ( display ) - --- ------------------------------------------------------------ --- * Generate cabal_macros.h --- ------------------------------------------------------------ - --- | The contents of the @cabal_macros.h@ for the given configured package. --- -generate :: PackageDescription -> LocalBuildInfo -> String -generate pkg_descr lbi = - "/* DO NOT EDIT: This file is automatically generated by Cabal */\n\n" ++ - generatePackageVersionMacros - (package pkg_descr : map snd (externalPackageDeps lbi)) ++ - generateToolVersionMacros (configuredPrograms . withPrograms $ lbi) ++ - generateComponentIdMacro lbi - --- | Helper function that generates just the @VERSION_pkg@ and @MIN_VERSION_pkg@ --- macros for a list of package ids (usually used with the specific deps of --- a configured package). --- -generatePackageVersionMacros :: [PackageIdentifier] -> String -generatePackageVersionMacros pkgids = concat - [ "/* package " ++ display pkgid ++ " */\n" - ++ generateMacros "" pkgname version - | pkgid@(PackageIdentifier name version) <- pkgids - , let pkgname = map fixchar (display name) - ] - --- | Helper function that generates just the @TOOL_VERSION_pkg@ and --- @MIN_TOOL_VERSION_pkg@ macros for a list of configured programs. --- -generateToolVersionMacros :: [ConfiguredProgram] -> String -generateToolVersionMacros progs = concat - [ "/* tool " ++ progid ++ " */\n" - ++ generateMacros "TOOL_" progname version - | prog <- progs - , isJust . programVersion $ prog - , let progid = programId prog ++ "-" ++ display version - progname = map fixchar (programId prog) - Just version = programVersion prog - ] - --- | Common implementation of 'generatePackageVersionMacros' and --- 'generateToolVersionMacros'. --- -generateMacros :: String -> String -> Version -> String -generateMacros prefix name version = - concat - ["#define ", prefix, "VERSION_",name," ",show (display version),"\n" - ,"#define MIN_", prefix, "VERSION_",name,"(major1,major2,minor) (\\\n" - ," (major1) < ",major1," || \\\n" - ," (major1) == ",major1," && (major2) < ",major2," || \\\n" - ," (major1) == ",major1," && (major2) == ",major2," && (minor) <= ",minor,")" - ,"\n\n" - ] - where - (major1:major2:minor:_) = map show (versionBranch version ++ repeat 0) - --- | Generate the @CURRENT_COMPONENT_ID@ definition for the component ID --- of the current package. -generateComponentIdMacro :: LocalBuildInfo -> String -generateComponentIdMacro lbi = - concat - [ "#define CURRENT_COMPONENT_ID \"" ++ display (localComponentId lbi) ++ "\"\n\n" - , "#define CURRENT_PACKAGE_KEY \"" ++ localCompatPackageKey lbi ++ "\"\n\n" - ] - -fixchar :: Char -> Char -fixchar '-' = '_' -fixchar c = c diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Build/PathsModule.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Build/PathsModule.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Build/PathsModule.hs 2016-11-07 10:02:23.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Build/PathsModule.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,325 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Build.Macros --- Copyright : Isaac Jones 2003-2005, --- Ross Paterson 2006, --- Duncan Coutts 2007-2008 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Generating the Paths_pkgname module. --- --- This is a module that Cabal generates for the benefit of packages. It --- enables them to find their version number and find any installed data files --- at runtime. This code should probably be split off into another module. --- -module Distribution.Simple.Build.PathsModule ( - generate, pkgPathEnvVar - ) where - -import Distribution.System -import Distribution.Simple.Compiler -import Distribution.Package -import Distribution.PackageDescription -import Distribution.Simple.LocalBuildInfo -import Distribution.Simple.BuildPaths -import Distribution.Simple.Utils -import Distribution.Text -import Distribution.Version - -import System.FilePath - ( pathSeparator ) -import Data.Maybe - ( fromJust, isNothing ) - --- ------------------------------------------------------------ --- * Building Paths_.hs --- ------------------------------------------------------------ - -generate :: PackageDescription -> LocalBuildInfo -> String -generate pkg_descr lbi = - let pragmas = cpp_pragma ++ ffi_pragmas ++ warning_pragmas - - cpp_pragma | supports_cpp = "{-# LANGUAGE CPP #-}\n" - | otherwise = "" - - ffi_pragmas - | absolute = "" - | supports_language_pragma = - "{-# LANGUAGE ForeignFunctionInterface #-}\n" - | otherwise = - "{-# OPTIONS_GHC -fffi #-}\n"++ - "{-# OPTIONS_JHC -fffi #-}\n" - - warning_pragmas = - "{-# OPTIONS_GHC -fno-warn-missing-import-lists #-}\n"++ - "{-# OPTIONS_GHC -fno-warn-implicit-prelude #-}\n" - - foreign_imports - | absolute = "" - | otherwise = - "import Foreign\n"++ - "import Foreign.C\n" - - reloc_imports - | reloc = - "import System.Environment (getExecutablePath)\n" - | otherwise = "" - - header = - pragmas++ - "module " ++ display paths_modulename ++ " (\n"++ - " version,\n"++ - " getBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir,\n"++ - " getDataFileName, getSysconfDir\n"++ - " ) where\n"++ - "\n"++ - foreign_imports++ - "import qualified Control.Exception as Exception\n"++ - "import Data.Version (Version(..))\n"++ - "import System.Environment (getEnv)\n"++ - reloc_imports ++ - "import Prelude\n"++ - "\n"++ - (if supports_cpp - then - ("#if defined(VERSION_base)\n"++ - "\n"++ - "#if MIN_VERSION_base(4,0,0)\n"++ - "catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a\n"++ - "#else\n"++ - "catchIO :: IO a -> (Exception.Exception -> IO a) -> IO a\n"++ - "#endif\n"++ - "\n"++ - "#else\n"++ - "catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a\n"++ - "#endif\n") - else - "catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a\n")++ - "catchIO = Exception.catch\n" ++ - "\n"++ - "version :: Version"++ - "\nversion = Version " ++ show branch ++ " " ++ show tags - where Version branch tags = packageVersion pkg_descr - - body - | reloc = - "\n\nbindirrel :: FilePath\n" ++ - "bindirrel = " ++ show flat_bindirreloc ++ - "\n"++ - "\ngetBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath\n"++ - "getBinDir = "++mkGetEnvOrReloc "bindir" flat_bindirreloc++"\n"++ - "getLibDir = "++mkGetEnvOrReloc "libdir" flat_libdirreloc++"\n"++ - "getDynLibDir = "++mkGetEnvOrReloc "dynlibdir" flat_dynlibdirreloc++"\n"++ - "getDataDir = "++mkGetEnvOrReloc "datadir" flat_datadirreloc++"\n"++ - "getLibexecDir = "++mkGetEnvOrReloc "libexecdir" flat_libexecdirreloc++"\n"++ - "getSysconfDir = "++mkGetEnvOrReloc "sysconfdir" flat_sysconfdirreloc++"\n"++ - "\n"++ - "getDataFileName :: FilePath -> IO FilePath\n"++ - "getDataFileName name = do\n"++ - " dir <- getDataDir\n"++ - " return (dir `joinFileName` name)\n"++ - "\n"++ - get_prefix_reloc_stuff++ - "\n"++ - filename_stuff - | absolute = - "\nbindir, libdir, dynlibdir, datadir, libexecdir, sysconfdir :: FilePath\n"++ - "\nbindir = " ++ show flat_bindir ++ - "\nlibdir = " ++ show flat_libdir ++ - "\ndynlibdir = " ++ show flat_dynlibdir ++ - "\ndatadir = " ++ show flat_datadir ++ - "\nlibexecdir = " ++ show flat_libexecdir ++ - "\nsysconfdir = " ++ show flat_sysconfdir ++ - "\n"++ - "\ngetBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath\n"++ - "getBinDir = "++mkGetEnvOr "bindir" "return bindir"++"\n"++ - "getLibDir = "++mkGetEnvOr "libdir" "return libdir"++"\n"++ - "getDynLibDir = "++mkGetEnvOr "dynlibdir" "return dynlibdir"++"\n"++ - "getDataDir = "++mkGetEnvOr "datadir" "return datadir"++"\n"++ - "getLibexecDir = "++mkGetEnvOr "libexecdir" "return libexecdir"++"\n"++ - "getSysconfDir = "++mkGetEnvOr "sysconfdir" "return sysconfdir"++"\n"++ - "\n"++ - "getDataFileName :: FilePath -> IO FilePath\n"++ - "getDataFileName name = do\n"++ - " dir <- getDataDir\n"++ - " return (dir ++ "++path_sep++" ++ name)\n" - | otherwise = - "\nprefix, bindirrel :: FilePath" ++ - "\nprefix = " ++ show flat_prefix ++ - "\nbindirrel = " ++ show (fromJust flat_bindirrel) ++ - "\n\n"++ - "getBinDir :: IO FilePath\n"++ - "getBinDir = getPrefixDirRel bindirrel\n\n"++ - "getLibDir :: IO FilePath\n"++ - "getLibDir = "++mkGetDir flat_libdir flat_libdirrel++"\n\n"++ - "getDynLibDir :: IO FilePath\n"++ - "getDynLibDir = "++mkGetDir flat_dynlibdir flat_dynlibdirrel++"\n\n"++ - "getDataDir :: IO FilePath\n"++ - "getDataDir = "++ mkGetEnvOr "datadir" - (mkGetDir flat_datadir flat_datadirrel)++"\n\n"++ - "getLibexecDir :: IO FilePath\n"++ - "getLibexecDir = "++mkGetDir flat_libexecdir flat_libexecdirrel++"\n\n"++ - "getSysconfDir :: IO FilePath\n"++ - "getSysconfDir = "++mkGetDir flat_sysconfdir flat_sysconfdirrel++"\n\n"++ - "getDataFileName :: FilePath -> IO FilePath\n"++ - "getDataFileName name = do\n"++ - " dir <- getDataDir\n"++ - " return (dir `joinFileName` name)\n"++ - "\n"++ - get_prefix_stuff++ - "\n"++ - filename_stuff - in header++body - - where - InstallDirs { - prefix = flat_prefix, - bindir = flat_bindir, - libdir = flat_libdir, - dynlibdir = flat_dynlibdir, - datadir = flat_datadir, - libexecdir = flat_libexecdir, - sysconfdir = flat_sysconfdir - } = absoluteInstallDirs pkg_descr lbi NoCopyDest - InstallDirs { - bindir = flat_bindirrel, - libdir = flat_libdirrel, - dynlibdir = flat_dynlibdirrel, - datadir = flat_datadirrel, - libexecdir = flat_libexecdirrel, - sysconfdir = flat_sysconfdirrel - } = prefixRelativeInstallDirs (packageId pkg_descr) lbi - - flat_bindirreloc = shortRelativePath flat_prefix flat_bindir - flat_libdirreloc = shortRelativePath flat_prefix flat_libdir - flat_dynlibdirreloc = shortRelativePath flat_prefix flat_dynlibdir - flat_datadirreloc = shortRelativePath flat_prefix flat_datadir - flat_libexecdirreloc = shortRelativePath flat_prefix flat_libexecdir - flat_sysconfdirreloc = shortRelativePath flat_prefix flat_sysconfdir - - mkGetDir _ (Just dirrel) = "getPrefixDirRel " ++ show dirrel - mkGetDir dir Nothing = "return " ++ show dir - - mkGetEnvOrReloc var dirrel = "catchIO (getEnv \""++var'++"\")" ++ - " (\\_ -> getPrefixDirReloc \"" ++ dirrel ++ - "\")" - where var' = pkgPathEnvVar pkg_descr var - - mkGetEnvOr var expr = "catchIO (getEnv \""++var'++"\")"++ - " (\\_ -> "++expr++")" - where var' = pkgPathEnvVar pkg_descr var - - -- In several cases we cannot make relocatable installations - absolute = - hasLibs pkg_descr -- we can only make progs relocatable - || isNothing flat_bindirrel -- if the bin dir is an absolute path - || not (supportsRelocatableProgs (compilerFlavor (compiler lbi))) - - reloc = relocatable lbi - - supportsRelocatableProgs GHC = case buildOS of - Windows -> True - _ -> False - supportsRelocatableProgs GHCJS = case buildOS of - Windows -> True - _ -> False - supportsRelocatableProgs _ = False - - paths_modulename = autogenModuleName pkg_descr - - get_prefix_stuff = get_prefix_win32 buildArch - - path_sep = show [pathSeparator] - - supports_cpp = compilerFlavor (compiler lbi) == GHC - - supports_language_pragma = - (compilerFlavor (compiler lbi) == GHC && - (compilerVersion (compiler lbi) - `withinRange` orLaterVersion (Version [6,6,1] []))) || - compilerFlavor (compiler lbi) == GHCJS - --- | Generates the name of the environment variable controlling the path --- component of interest. -pkgPathEnvVar :: PackageDescription - -> String -- ^ path component; one of \"bindir\", \"libdir\", - -- \"datadir\", \"libexecdir\", or \"sysconfdir\" - -> String -- ^ environment variable name -pkgPathEnvVar pkg_descr var = - showPkgName (packageName pkg_descr) ++ "_" ++ var - where - showPkgName = map fixchar . display - fixchar '-' = '_' - fixchar c = c - -get_prefix_reloc_stuff :: String -get_prefix_reloc_stuff = - "getPrefixDirReloc :: FilePath -> IO FilePath\n"++ - "getPrefixDirReloc dirRel = do\n"++ - " exePath <- getExecutablePath\n"++ - " let (bindir,_) = splitFileName exePath\n"++ - " return ((bindir `minusFileName` bindirrel) `joinFileName` dirRel)\n" - -get_prefix_win32 :: Arch -> String -get_prefix_win32 arch = - "getPrefixDirRel :: FilePath -> IO FilePath\n"++ - "getPrefixDirRel dirRel = try_size 2048 -- plenty, PATH_MAX is 512 under Win32.\n"++ - " where\n"++ - " try_size size = allocaArray (fromIntegral size) $ \\buf -> do\n"++ - " ret <- c_GetModuleFileName nullPtr buf size\n"++ - " case ret of\n"++ - " 0 -> return (prefix `joinFileName` dirRel)\n"++ - " _ | ret < size -> do\n"++ - " exePath <- peekCWString buf\n"++ - " let (bindir,_) = splitFileName exePath\n"++ - " return ((bindir `minusFileName` bindirrel) `joinFileName` dirRel)\n"++ - " | otherwise -> try_size (size * 2)\n"++ - "\n"++ - "foreign import " ++ cconv ++ " unsafe \"windows.h GetModuleFileNameW\"\n"++ - " c_GetModuleFileName :: Ptr () -> CWString -> Int32 -> IO Int32\n" - where cconv = case arch of - I386 -> "stdcall" - X86_64 -> "ccall" - _ -> error "win32 supported only with I386, X86_64" - -filename_stuff :: String -filename_stuff = - "minusFileName :: FilePath -> String -> FilePath\n"++ - "minusFileName dir \"\" = dir\n"++ - "minusFileName dir \".\" = dir\n"++ - "minusFileName dir suffix =\n"++ - " minusFileName (fst (splitFileName dir)) (fst (splitFileName suffix))\n"++ - "\n"++ - "joinFileName :: String -> String -> FilePath\n"++ - "joinFileName \"\" fname = fname\n"++ - "joinFileName \".\" fname = fname\n"++ - "joinFileName dir \"\" = dir\n"++ - "joinFileName dir fname\n"++ - " | isPathSeparator (last dir) = dir++fname\n"++ - " | otherwise = dir++pathSeparator:fname\n"++ - "\n"++ - "splitFileName :: FilePath -> (String, String)\n"++ - "splitFileName p = (reverse (path2++drive), reverse fname)\n"++ - " where\n"++ - " (path,drive) = case p of\n"++ - " (c:':':p') -> (reverse p',[':',c])\n"++ - " _ -> (reverse p ,\"\")\n"++ - " (fname,path1) = break isPathSeparator path\n"++ - " path2 = case path1 of\n"++ - " [] -> \".\"\n"++ - " [_] -> path1 -- don't remove the trailing slash if \n"++ - " -- there is only one character\n"++ - " (c:path') | isPathSeparator c -> path'\n"++ - " _ -> path1\n"++ - "\n"++ - "pathSeparator :: Char\n"++ - (case buildOS of - Windows -> "pathSeparator = '\\\\'\n" - _ -> "pathSeparator = '/'\n") ++ - "\n"++ - "isPathSeparator :: Char -> Bool\n"++ - (case buildOS of - Windows -> "isPathSeparator c = c == '/' || c == '\\\\'\n" - _ -> "isPathSeparator c = c == '/'\n") diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Build.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Build.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Build.hs 2016-11-07 10:02:23.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Build.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,606 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Build --- Copyright : Isaac Jones 2003-2005, --- Ross Paterson 2006, --- Duncan Coutts 2007-2008, 2012 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This is the entry point to actually building the modules in a package. It --- doesn't actually do much itself, most of the work is delegated to --- compiler-specific actions. It does do some non-compiler specific bits like --- running pre-processors. --- - -module Distribution.Simple.Build ( - build, repl, - startInterpreter, - - initialBuildSteps, - writeAutogenFiles, - ) where - -import Distribution.Package -import qualified Distribution.Simple.GHC as GHC -import qualified Distribution.Simple.GHCJS as GHCJS -import qualified Distribution.Simple.JHC as JHC -import qualified Distribution.Simple.LHC as LHC -import qualified Distribution.Simple.UHC as UHC -import qualified Distribution.Simple.HaskellSuite as HaskellSuite - -import qualified Distribution.Simple.Build.Macros as Build.Macros -import qualified Distribution.Simple.Build.PathsModule as Build.PathsModule - -import Distribution.Simple.Compiler hiding (Flag) -import Distribution.PackageDescription hiding (Flag) -import qualified Distribution.InstalledPackageInfo as IPI -import qualified Distribution.ModuleName as ModuleName -import Distribution.ModuleName (ModuleName) - -import Distribution.Simple.Setup -import Distribution.Simple.BuildTarget -import Distribution.Simple.PreProcess -import Distribution.Simple.LocalBuildInfo -import Distribution.Simple.Program.Types -import Distribution.Simple.Program.Db -import Distribution.Simple.BuildPaths -import Distribution.Simple.Configure -import Distribution.Simple.Register -import Distribution.Simple.Test.LibV09 -import Distribution.Simple.Utils - -import Distribution.System -import Distribution.Text -import Distribution.Verbosity - -import qualified Data.Map as Map -import qualified Data.Set as Set -import Data.Either - ( partitionEithers ) -import Data.List - ( intersect ) -import Control.Monad - ( when, unless, forM_ ) -import System.FilePath - ( (), (<.>) ) -import System.Directory - ( getCurrentDirectory ) - --- ----------------------------------------------------------------------------- --- |Build the libraries and executables in this package. - -build :: PackageDescription -- ^ Mostly information from the .cabal file - -> LocalBuildInfo -- ^ Configuration information - -> BuildFlags -- ^ Flags that the user passed to build - -> [ PPSuffixHandler ] -- ^ preprocessors to run before compiling - -> IO () -build pkg_descr lbi flags suffixes = do - let distPref = fromFlag (buildDistPref flags) - verbosity = fromFlag (buildVerbosity flags) - - targets <- readBuildTargets pkg_descr (buildArgs flags) - targets' <- checkBuildTargets verbosity pkg_descr targets - let componentsToBuild = map fst (componentsInBuildOrder lbi (map fst targets')) - info verbosity $ "Component build order: " - ++ intercalate ", " (map showComponentName componentsToBuild) - - initialBuildSteps distPref pkg_descr lbi verbosity - when (null targets) $ - -- Only bother with this message if we're building the whole package - setupMessage verbosity "Building" (packageId pkg_descr) - - internalPackageDB <- createInternalPackageDB verbosity lbi distPref - - withComponentsInBuildOrder pkg_descr lbi componentsToBuild $ \comp clbi -> - let bi = componentBuildInfo comp - progs' = addInternalBuildTools pkg_descr lbi bi (withPrograms lbi) - lbi' = lbi { - withPrograms = progs', - withPackageDB = withPackageDB lbi ++ [internalPackageDB] - } - in buildComponent verbosity (buildNumJobs flags) pkg_descr - lbi' suffixes comp clbi distPref - - -repl :: PackageDescription -- ^ Mostly information from the .cabal file - -> LocalBuildInfo -- ^ Configuration information - -> ReplFlags -- ^ Flags that the user passed to build - -> [ PPSuffixHandler ] -- ^ preprocessors to run before compiling - -> [String] - -> IO () -repl pkg_descr lbi flags suffixes args = do - let distPref = fromFlag (replDistPref flags) - verbosity = fromFlag (replVerbosity flags) - - targets <- readBuildTargets pkg_descr args - targets' <- case targets of - [] -> return $ take 1 [ componentName c - | c <- pkgEnabledComponents pkg_descr ] - [target] -> fmap (map fst) (checkBuildTargets verbosity pkg_descr [target]) - _ -> die $ "The 'repl' command does not support multiple targets at once." - let componentsToBuild = componentsInBuildOrder lbi targets' - componentForRepl = last componentsToBuild - debug verbosity $ "Component build order: " - ++ intercalate ", " - [ showComponentName c | (c,_) <- componentsToBuild ] - - initialBuildSteps distPref pkg_descr lbi verbosity - - internalPackageDB <- createInternalPackageDB verbosity lbi distPref - - let lbiForComponent comp lbi' = - lbi' { - withPackageDB = withPackageDB lbi ++ [internalPackageDB], - withPrograms = addInternalBuildTools pkg_descr lbi' - (componentBuildInfo comp) (withPrograms lbi') - } - - -- build any dependent components - sequence_ - [ let comp = getComponent pkg_descr cname - lbi' = lbiForComponent comp lbi - in buildComponent verbosity NoFlag - pkg_descr lbi' suffixes comp clbi distPref - | (cname, clbi) <- init componentsToBuild ] - - -- REPL for target components - let (cname, clbi) = componentForRepl - comp = getComponent pkg_descr cname - lbi' = lbiForComponent comp lbi - in replComponent verbosity pkg_descr lbi' suffixes comp clbi distPref - - --- | Start an interpreter without loading any package files. -startInterpreter :: Verbosity -> ProgramDb -> Compiler -> Platform - -> PackageDBStack -> IO () -startInterpreter verbosity programDb comp platform packageDBs = - case compilerFlavor comp of - GHC -> GHC.startInterpreter verbosity programDb comp platform packageDBs - GHCJS -> GHCJS.startInterpreter verbosity programDb comp platform packageDBs - _ -> die "A REPL is not supported with this compiler." - -buildComponent :: Verbosity - -> Flag (Maybe Int) - -> PackageDescription - -> LocalBuildInfo - -> [PPSuffixHandler] - -> Component - -> ComponentLocalBuildInfo - -> FilePath - -> IO () -buildComponent verbosity numJobs pkg_descr lbi suffixes - comp@(CLib lib) clbi distPref = do - preprocessComponent pkg_descr comp lbi False verbosity suffixes - extras <- preprocessExtras comp lbi - info verbosity "Building library..." - let libbi = libBuildInfo lib - lib' = lib { libBuildInfo = addExtraCSources libbi extras } - buildLib verbosity numJobs pkg_descr lbi lib' clbi - - -- Register the library in-place, so exes can depend - -- on internally defined libraries. - pwd <- getCurrentDirectory - let -- The in place registration uses the "-inplace" suffix, not an ABI hash - installedPkgInfo = inplaceInstalledPackageInfo pwd distPref pkg_descr - (AbiHash "") lib' lbi clbi - - registerPackage verbosity (compiler lbi) (withPrograms lbi) False - (withPackageDB lbi) installedPkgInfo - -buildComponent verbosity numJobs pkg_descr lbi suffixes - comp@(CExe exe) clbi _ = do - preprocessComponent pkg_descr comp lbi False verbosity suffixes - extras <- preprocessExtras comp lbi - info verbosity $ "Building executable " ++ exeName exe ++ "..." - let ebi = buildInfo exe - exe' = exe { buildInfo = addExtraCSources ebi extras } - buildExe verbosity numJobs pkg_descr lbi exe' clbi - - -buildComponent verbosity numJobs pkg_descr lbi suffixes - comp@(CTest test@TestSuite { testInterface = TestSuiteExeV10{} }) - clbi _distPref = do - let exe = testSuiteExeV10AsExe test - preprocessComponent pkg_descr comp lbi False verbosity suffixes - extras <- preprocessExtras comp lbi - info verbosity $ "Building test suite " ++ testName test ++ "..." - let ebi = buildInfo exe - exe' = exe { buildInfo = addExtraCSources ebi extras } - buildExe verbosity numJobs pkg_descr lbi exe' clbi - - -buildComponent verbosity numJobs pkg_descr lbi0 suffixes - comp@(CTest - test@TestSuite { testInterface = TestSuiteLibV09{} }) - clbi -- This ComponentLocalBuildInfo corresponds to a detailed - -- test suite and not a real component. It should not - -- be used, except to construct the CLBIs for the - -- library and stub executable that will actually be - -- built. - distPref = do - pwd <- getCurrentDirectory - let (pkg, lib, libClbi, lbi, ipi, exe, exeClbi) = - testSuiteLibV09AsLibAndExe pkg_descr test clbi lbi0 distPref pwd - preprocessComponent pkg_descr comp lbi False verbosity suffixes - extras <- preprocessExtras comp lbi - info verbosity $ "Building test suite " ++ testName test ++ "..." - buildLib verbosity numJobs pkg lbi lib libClbi - -- NB: need to enable multiple instances here, because on 7.10+ - -- the package name is the same as the library, and we still - -- want the registration to go through. - registerPackage verbosity (compiler lbi) (withPrograms lbi) True - (withPackageDB lbi) ipi - let ebi = buildInfo exe - exe' = exe { buildInfo = addExtraCSources ebi extras } - buildExe verbosity numJobs pkg_descr lbi exe' exeClbi - - -buildComponent _ _ _ _ _ - (CTest TestSuite { testInterface = TestSuiteUnsupported tt }) - _ _ = - die $ "No support for building test suite type " ++ display tt - - -buildComponent verbosity numJobs pkg_descr lbi suffixes - comp@(CBench bm@Benchmark { benchmarkInterface = BenchmarkExeV10 {} }) - clbi _ = do - let (exe, exeClbi) = benchmarkExeV10asExe bm clbi - preprocessComponent pkg_descr comp lbi False verbosity suffixes - extras <- preprocessExtras comp lbi - info verbosity $ "Building benchmark " ++ benchmarkName bm ++ "..." - let ebi = buildInfo exe - exe' = exe { buildInfo = addExtraCSources ebi extras } - buildExe verbosity numJobs pkg_descr lbi exe' exeClbi - - -buildComponent _ _ _ _ _ - (CBench Benchmark { benchmarkInterface = BenchmarkUnsupported tt }) - _ _ = - die $ "No support for building benchmark type " ++ display tt - - --- | Add extra C sources generated by preprocessing to build --- information. -addExtraCSources :: BuildInfo -> [FilePath] -> BuildInfo -addExtraCSources bi extras = bi { cSources = new } - where new = Set.toList $ old `Set.union` exs - old = Set.fromList $ cSources bi - exs = Set.fromList extras - - -replComponent :: Verbosity - -> PackageDescription - -> LocalBuildInfo - -> [PPSuffixHandler] - -> Component - -> ComponentLocalBuildInfo - -> FilePath - -> IO () -replComponent verbosity pkg_descr lbi suffixes - comp@(CLib lib) clbi _ = do - preprocessComponent pkg_descr comp lbi False verbosity suffixes - extras <- preprocessExtras comp lbi - let libbi = libBuildInfo lib - lib' = lib { libBuildInfo = libbi { cSources = cSources libbi ++ extras } } - replLib verbosity pkg_descr lbi lib' clbi - -replComponent verbosity pkg_descr lbi suffixes - comp@(CExe exe) clbi _ = do - preprocessComponent pkg_descr comp lbi False verbosity suffixes - extras <- preprocessExtras comp lbi - let ebi = buildInfo exe - exe' = exe { buildInfo = ebi { cSources = cSources ebi ++ extras } } - replExe verbosity pkg_descr lbi exe' clbi - - -replComponent verbosity pkg_descr lbi suffixes - comp@(CTest test@TestSuite { testInterface = TestSuiteExeV10{} }) - clbi _distPref = do - let exe = testSuiteExeV10AsExe test - preprocessComponent pkg_descr comp lbi False verbosity suffixes - extras <- preprocessExtras comp lbi - let ebi = buildInfo exe - exe' = exe { buildInfo = ebi { cSources = cSources ebi ++ extras } } - replExe verbosity pkg_descr lbi exe' clbi - - -replComponent verbosity pkg_descr lbi0 suffixes - comp@(CTest - test@TestSuite { testInterface = TestSuiteLibV09{} }) - clbi distPref = do - pwd <- getCurrentDirectory - let (pkg, lib, libClbi, lbi, _, _, _) = - testSuiteLibV09AsLibAndExe pkg_descr test clbi lbi0 distPref pwd - preprocessComponent pkg_descr comp lbi False verbosity suffixes - extras <- preprocessExtras comp lbi - let libbi = libBuildInfo lib - lib' = lib { libBuildInfo = libbi { cSources = cSources libbi ++ extras } } - replLib verbosity pkg lbi lib' libClbi - - -replComponent _ _ _ _ - (CTest TestSuite { testInterface = TestSuiteUnsupported tt }) - _ _ = - die $ "No support for building test suite type " ++ display tt - - -replComponent verbosity pkg_descr lbi suffixes - comp@(CBench bm@Benchmark { benchmarkInterface = BenchmarkExeV10 {} }) - clbi _ = do - let (exe, exeClbi) = benchmarkExeV10asExe bm clbi - preprocessComponent pkg_descr comp lbi False verbosity suffixes - extras <- preprocessExtras comp lbi - let ebi = buildInfo exe - exe' = exe { buildInfo = ebi { cSources = cSources ebi ++ extras } } - replExe verbosity pkg_descr lbi exe' exeClbi - - -replComponent _ _ _ _ - (CBench Benchmark { benchmarkInterface = BenchmarkUnsupported tt }) - _ _ = - die $ "No support for building benchmark type " ++ display tt - ----------------------------------------------------- --- Shared code for buildComponent and replComponent --- - --- | Translate a exe-style 'TestSuite' component into an exe for building -testSuiteExeV10AsExe :: TestSuite -> Executable -testSuiteExeV10AsExe test@TestSuite { testInterface = TestSuiteExeV10 _ mainFile } = - Executable { - exeName = testName test, - modulePath = mainFile, - buildInfo = testBuildInfo test - } -testSuiteExeV10AsExe TestSuite{} = error "testSuiteExeV10AsExe: wrong kind" - --- | Translate a lib-style 'TestSuite' component into a lib + exe for building -testSuiteLibV09AsLibAndExe :: PackageDescription - -> TestSuite - -> ComponentLocalBuildInfo - -> LocalBuildInfo - -> FilePath - -> FilePath - -> (PackageDescription, - Library, ComponentLocalBuildInfo, - LocalBuildInfo, - IPI.InstalledPackageInfo, - Executable, ComponentLocalBuildInfo) -testSuiteLibV09AsLibAndExe pkg_descr - test@TestSuite { testInterface = TestSuiteLibV09 _ m } - clbi lbi distPref pwd = - (pkg, lib, libClbi, lbi, ipi, exe, exeClbi) - where - bi = testBuildInfo test - lib = Library { - exposedModules = [ m ], - reexportedModules = [], - requiredSignatures = [], - exposedSignatures = [], - libExposed = True, - libBuildInfo = bi - } - -- NB: temporary hack; I have a refactor which solves this - cid = computeComponentId (package pkg_descr) - (CTestName (testName test)) - (map ((\(SimpleUnitId cid0) -> cid0) . fst) - (componentPackageDeps clbi)) - (flagAssignment lbi) - uid = SimpleUnitId cid - (pkg_name, compat_key) = computeCompatPackageKey - (compiler lbi) (package pkg_descr) - (CTestName (testName test)) uid - libClbi = LibComponentLocalBuildInfo - { componentPackageDeps = componentPackageDeps clbi - , componentPackageRenaming = componentPackageRenaming clbi - , componentUnitId = uid - , componentCompatPackageKey = compat_key - , componentExposedModules = [IPI.ExposedModule m Nothing] - } - pkg = pkg_descr { - package = (package pkg_descr) { pkgName = pkg_name } - , buildDepends = targetBuildDepends $ testBuildInfo test - , executables = [] - , testSuites = [] - , library = Just lib - } - ipi = inplaceInstalledPackageInfo pwd distPref pkg (AbiHash "") lib lbi libClbi - testDir = buildDir lbi stubName test - stubName test ++ "-tmp" - testLibDep = thisPackageVersion $ package pkg - exe = Executable { - exeName = stubName test, - modulePath = stubFilePath test, - buildInfo = (testBuildInfo test) { - hsSourceDirs = [ testDir ], - targetBuildDepends = testLibDep - : (targetBuildDepends $ testBuildInfo test), - targetBuildRenaming = Map.empty - } - } - -- | The stub executable needs a new 'ComponentLocalBuildInfo' - -- that exposes the relevant test suite library. - exeClbi = ExeComponentLocalBuildInfo { - componentPackageDeps = - (IPI.installedUnitId ipi, packageId ipi) - : (filter (\(_, x) -> let PackageName name = pkgName x - in name == "Cabal" || name == "base") - (componentPackageDeps clbi)), - componentPackageRenaming = Map.empty - } -testSuiteLibV09AsLibAndExe _ TestSuite{} _ _ _ _ = error "testSuiteLibV09AsLibAndExe: wrong kind" - - --- | Translate a exe-style 'Benchmark' component into an exe for building -benchmarkExeV10asExe :: Benchmark -> ComponentLocalBuildInfo - -> (Executable, ComponentLocalBuildInfo) -benchmarkExeV10asExe bm@Benchmark { benchmarkInterface = BenchmarkExeV10 _ f } - clbi = - (exe, exeClbi) - where - exe = Executable { - exeName = benchmarkName bm, - modulePath = f, - buildInfo = benchmarkBuildInfo bm - } - exeClbi = ExeComponentLocalBuildInfo { - componentPackageDeps = componentPackageDeps clbi, - componentPackageRenaming = componentPackageRenaming clbi - } -benchmarkExeV10asExe Benchmark{} _ = error "benchmarkExeV10asExe: wrong kind" - --- | Initialize a new package db file for libraries defined --- internally to the package. -createInternalPackageDB :: Verbosity -> LocalBuildInfo -> FilePath - -> IO PackageDB -createInternalPackageDB verbosity lbi distPref = do - existsAlready <- doesPackageDBExist dbPath - when existsAlready $ deletePackageDB dbPath - createPackageDB verbosity (compiler lbi) (withPrograms lbi) False dbPath - return (SpecificPackageDB dbPath) - where - dbPath = case compilerFlavor (compiler lbi) of - UHC -> UHC.inplacePackageDbPath lbi - _ -> distPref "package.conf.inplace" - -addInternalBuildTools :: PackageDescription -> LocalBuildInfo -> BuildInfo - -> ProgramDb -> ProgramDb -addInternalBuildTools pkg lbi bi progs = - foldr updateProgram progs internalBuildTools - where - internalBuildTools = - [ simpleConfiguredProgram toolName (FoundOnSystem toolLocation) - | toolName <- toolNames - , let toolLocation = buildDir lbi toolName toolName <.> exeExtension ] - toolNames = intersect buildToolNames internalExeNames - internalExeNames = map exeName (executables pkg) - buildToolNames = map buildToolName (buildTools bi) - where - buildToolName (Dependency (PackageName name) _ ) = name - - --- TODO: build separate libs in separate dirs so that we can build --- multiple libs, e.g. for 'LibTest' library-style test suites -buildLib :: Verbosity -> Flag (Maybe Int) - -> PackageDescription -> LocalBuildInfo - -> Library -> ComponentLocalBuildInfo -> IO () -buildLib verbosity numJobs pkg_descr lbi lib clbi = - case compilerFlavor (compiler lbi) of - GHC -> GHC.buildLib verbosity numJobs pkg_descr lbi lib clbi - GHCJS -> GHCJS.buildLib verbosity numJobs pkg_descr lbi lib clbi - JHC -> JHC.buildLib verbosity pkg_descr lbi lib clbi - LHC -> LHC.buildLib verbosity pkg_descr lbi lib clbi - UHC -> UHC.buildLib verbosity pkg_descr lbi lib clbi - HaskellSuite {} -> HaskellSuite.buildLib verbosity pkg_descr lbi lib clbi - _ -> die "Building is not supported with this compiler." - -buildExe :: Verbosity -> Flag (Maybe Int) - -> PackageDescription -> LocalBuildInfo - -> Executable -> ComponentLocalBuildInfo -> IO () -buildExe verbosity numJobs pkg_descr lbi exe clbi = - case compilerFlavor (compiler lbi) of - GHC -> GHC.buildExe verbosity numJobs pkg_descr lbi exe clbi - GHCJS -> GHCJS.buildExe verbosity numJobs pkg_descr lbi exe clbi - JHC -> JHC.buildExe verbosity pkg_descr lbi exe clbi - LHC -> LHC.buildExe verbosity pkg_descr lbi exe clbi - UHC -> UHC.buildExe verbosity pkg_descr lbi exe clbi - _ -> die "Building is not supported with this compiler." - -replLib :: Verbosity -> PackageDescription -> LocalBuildInfo - -> Library -> ComponentLocalBuildInfo -> IO () -replLib verbosity pkg_descr lbi lib clbi = - case compilerFlavor (compiler lbi) of - -- 'cabal repl' doesn't need to support 'ghc --make -j', so we just pass - -- NoFlag as the numJobs parameter. - GHC -> GHC.replLib verbosity NoFlag pkg_descr lbi lib clbi - GHCJS -> GHCJS.replLib verbosity NoFlag pkg_descr lbi lib clbi - _ -> die "A REPL is not supported for this compiler." - -replExe :: Verbosity -> PackageDescription -> LocalBuildInfo - -> Executable -> ComponentLocalBuildInfo -> IO () -replExe verbosity pkg_descr lbi exe clbi = - case compilerFlavor (compiler lbi) of - GHC -> GHC.replExe verbosity NoFlag pkg_descr lbi exe clbi - GHCJS -> GHCJS.replExe verbosity NoFlag pkg_descr lbi exe clbi - _ -> die "A REPL is not supported for this compiler." - - -initialBuildSteps :: FilePath -- ^"dist" prefix - -> PackageDescription -- ^mostly information from the .cabal file - -> LocalBuildInfo -- ^Configuration information - -> Verbosity -- ^The verbosity to use - -> IO () -initialBuildSteps _distPref pkg_descr lbi verbosity = do - -- check that there's something to build - unless (not . null $ allBuildInfo pkg_descr) $ do - let name = display (packageId pkg_descr) - die $ "No libraries, executables, tests, or benchmarks " - ++ "are enabled for package " ++ name ++ "." - - createDirectoryIfMissingVerbose verbosity True (buildDir lbi) - - writeAutogenFiles verbosity pkg_descr lbi - --- | Generate and write out the Paths_.hs and cabal_macros.h files --- -writeAutogenFiles :: Verbosity - -> PackageDescription - -> LocalBuildInfo - -> IO () -writeAutogenFiles verbosity pkg lbi = do - createDirectoryIfMissingVerbose verbosity True (autogenModulesDir lbi) - - let pathsModulePath = autogenModulesDir lbi - ModuleName.toFilePath (autogenModuleName pkg) <.> "hs" - rewriteFile pathsModulePath (Build.PathsModule.generate pkg lbi) - - let cppHeaderPath = autogenModulesDir lbi cppHeaderName - rewriteFile cppHeaderPath (Build.Macros.generate pkg lbi) - --- | Check that the given build targets are valid in the current context. --- --- Also swizzle into a more convenient form. --- -checkBuildTargets :: Verbosity -> PackageDescription -> [BuildTarget] - -> IO [(ComponentName, Maybe (Either ModuleName FilePath))] -checkBuildTargets _ pkg [] = - return [ (componentName c, Nothing) | c <- pkgEnabledComponents pkg ] - -checkBuildTargets verbosity pkg targets = do - - let (enabled, disabled) = - partitionEithers - [ case componentDisabledReason (getComponent pkg cname) of - Nothing -> Left target' - Just reason -> Right (cname, reason) - | target <- targets - , let target'@(cname,_) = swizzleTarget target ] - - case disabled of - [] -> return () - ((cname,reason):_) -> die $ formatReason (showComponentName cname) reason - - forM_ [ (c, t) | (c, Just t) <- enabled ] $ \(c, t) -> - warn verbosity $ "Ignoring '" ++ either display id t ++ ". The whole " - ++ showComponentName c ++ " will be built. (Support for " - ++ "module and file targets has not been implemented yet.)" - - return enabled - - where - swizzleTarget (BuildTargetComponent c) = (c, Nothing) - swizzleTarget (BuildTargetModule c m) = (c, Just (Left m)) - swizzleTarget (BuildTargetFile c f) = (c, Just (Right f)) - - formatReason cn DisabledComponent = - "Cannot build the " ++ cn ++ " because the component is marked " - ++ "as disabled in the .cabal file." - formatReason cn DisabledAllTests = - "Cannot build the " ++ cn ++ " because test suites are not " - ++ "enabled. Run configure with the flag --enable-tests" - formatReason cn DisabledAllBenchmarks = - "Cannot build the " ++ cn ++ " because benchmarks are not " - ++ "enabled. Re-run configure with the flag --enable-benchmarks" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/BuildPaths.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/BuildPaths.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/BuildPaths.hs 2016-11-07 10:02:23.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/BuildPaths.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,121 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.BuildPaths --- Copyright : Isaac Jones 2003-2004, --- Duncan Coutts 2008 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- A bunch of dirs, paths and file names used for intermediate build steps. --- - -module Distribution.Simple.BuildPaths ( - defaultDistPref, srcPref, - haddockDirName, hscolourPref, haddockPref, - autogenModulesDir, - - autogenModuleName, - cppHeaderName, - haddockName, - - mkLibName, - mkProfLibName, - mkSharedLibName, - - exeExtension, - objExtension, - dllExtension, - - ) where - - -import Distribution.Package -import Distribution.ModuleName as ModuleName -import Distribution.Compiler -import Distribution.PackageDescription -import Distribution.Simple.LocalBuildInfo -import Distribution.Simple.Setup -import Distribution.Text -import Distribution.System - -import System.FilePath ((), (<.>)) - --- --------------------------------------------------------------------------- --- Build directories and files - -srcPref :: FilePath -> FilePath -srcPref distPref = distPref "src" - -hscolourPref :: HaddockTarget -> FilePath -> PackageDescription -> FilePath -hscolourPref = haddockPref - --- | This is the name of the directory in which the generated haddocks --- should be stored. It does not include the @/doc/html@ prefix. -haddockDirName :: HaddockTarget -> PackageDescription -> FilePath -haddockDirName ForDevelopment = display . packageName -haddockDirName ForHackage = (++ "-docs") . display . packageId - --- | The directory to which generated haddock documentation should be written. -haddockPref :: HaddockTarget -> FilePath -> PackageDescription -> FilePath -haddockPref haddockTarget distPref pkg_descr - = distPref "doc" "html" haddockDirName haddockTarget pkg_descr - --- |The directory in which we put auto-generated modules -autogenModulesDir :: LocalBuildInfo -> String -autogenModulesDir lbi = buildDir lbi "autogen" - -cppHeaderName :: String -cppHeaderName = "cabal_macros.h" - --- |The name of the auto-generated module associated with a package -autogenModuleName :: PackageDescription -> ModuleName -autogenModuleName pkg_descr = - ModuleName.fromString $ - "Paths_" ++ map fixchar (display (packageName pkg_descr)) - where fixchar '-' = '_' - fixchar c = c - -haddockName :: PackageDescription -> FilePath -haddockName pkg_descr = display (packageName pkg_descr) <.> "haddock" - --- --------------------------------------------------------------------------- --- Library file names - -mkLibName :: UnitId -> String -mkLibName lib = "lib" ++ getHSLibraryName lib <.> "a" - -mkProfLibName :: UnitId -> String -mkProfLibName lib = "lib" ++ getHSLibraryName lib ++ "_p" <.> "a" - --- Implement proper name mangling for dynamical shared objects --- libHS- --- e.g. libHSbase-2.1-ghc6.6.1.so -mkSharedLibName :: CompilerId -> UnitId -> String -mkSharedLibName (CompilerId compilerFlavor compilerVersion) lib - = "lib" ++ getHSLibraryName lib ++ "-" ++ comp <.> dllExtension - where comp = display compilerFlavor ++ display compilerVersion - --- ------------------------------------------------------------ --- * Platform file extensions --- ------------------------------------------------------------ - --- | Default extension for executable files on the current platform. --- (typically @\"\"@ on Unix and @\"exe\"@ on Windows or OS\/2) -exeExtension :: String -exeExtension = case buildOS of - Windows -> "exe" - _ -> "" - --- | Extension for object files. For GHC the extension is @\"o\"@. -objExtension :: String -objExtension = "o" - --- | Extension for dynamically linked (or shared) libraries --- (typically @\"so\"@ on Unix and @\"dll\"@ on Windows) -dllExtension :: String -dllExtension = case buildOS of - Windows -> "dll" - OSX -> "dylib" - _ -> "so" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/BuildTarget.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/BuildTarget.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/BuildTarget.hs 2016-11-07 10:02:23.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/BuildTarget.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,940 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.BuildTargets --- Copyright : (c) Duncan Coutts 2012 --- License : BSD-like --- --- Maintainer : duncan@community.haskell.org --- --- Handling for user-specified build targets ------------------------------------------------------------------------------ -module Distribution.Simple.BuildTarget ( - - -- * Build targets - BuildTarget(..), - readBuildTargets, - showBuildTarget, - QualLevel(..), - buildTargetComponentName, - - -- * Parsing user build targets - UserBuildTarget, - readUserBuildTargets, - showUserBuildTarget, - UserBuildTargetProblem(..), - reportUserBuildTargetProblems, - - -- * Resolving build targets - resolveBuildTargets, - BuildTargetProblem(..), - reportBuildTargetProblems, - ) where - -import Distribution.Package -import Distribution.PackageDescription -import Distribution.ModuleName -import Distribution.Simple.LocalBuildInfo -import Distribution.Text -import Distribution.Simple.Utils - -import Distribution.Compat.Binary (Binary) -import qualified Distribution.Compat.ReadP as Parse -import Distribution.Compat.ReadP - ( (+++), (<++) ) - -import Data.List - ( nub, stripPrefix, sortBy, groupBy, partition ) -import Data.Maybe - ( listToMaybe, catMaybes ) -import Data.Either - ( partitionEithers ) -import GHC.Generics (Generic) -import qualified Data.Map as Map -import Control.Monad -import Control.Applicative as AP (Alternative(..), Applicative(..)) -import Data.Char - ( isSpace, isAlphaNum ) -import System.FilePath as FilePath - ( dropExtension, normalise, splitDirectories, joinPath, splitPath - , hasTrailingPathSeparator ) -import System.Directory - ( doesFileExist, doesDirectoryExist ) - --- ------------------------------------------------------------ --- * User build targets --- ------------------------------------------------------------ - --- | Various ways that a user may specify a build target. --- -data UserBuildTarget = - - -- | A target specified by a single name. This could be a component - -- module or file. - -- - -- > cabal build foo - -- > cabal build Data.Foo - -- > cabal build Data/Foo.hs Data/Foo.hsc - -- - UserBuildTargetSingle String - - -- | A target specified by a qualifier and name. This could be a component - -- name qualified by the component namespace kind, or a module or file - -- qualified by the component name. - -- - -- > cabal build lib:foo exe:foo - -- > cabal build foo:Data.Foo - -- > cabal build foo:Data/Foo.hs - -- - | UserBuildTargetDouble String String - - -- A fully qualified target, either a module or file qualified by a - -- component name with the component namespace kind. - -- - -- > cabal build lib:foo:Data/Foo.hs exe:foo:Data/Foo.hs - -- > cabal build lib:foo:Data.Foo exe:foo:Data.Foo - -- - | UserBuildTargetTriple String String String - deriving (Show, Eq, Ord) - - --- ------------------------------------------------------------ --- * Resolved build targets --- ------------------------------------------------------------ - --- | A fully resolved build target. --- -data BuildTarget = - - -- | A specific component - -- - BuildTargetComponent ComponentName - - -- | A specific module within a specific component. - -- - | BuildTargetModule ComponentName ModuleName - - -- | A specific file within a specific component. - -- - | BuildTargetFile ComponentName FilePath - deriving (Eq, Show, Generic) - -instance Binary BuildTarget - -buildTargetComponentName :: BuildTarget -> ComponentName -buildTargetComponentName (BuildTargetComponent cn) = cn -buildTargetComponentName (BuildTargetModule cn _) = cn -buildTargetComponentName (BuildTargetFile cn _) = cn - --- | Read a list of user-supplied build target strings and resolve them to --- 'BuildTarget's according to a 'PackageDescription'. If there are problems --- with any of the targets e.g. they don't exist or are misformatted, throw an --- 'IOException'. -readBuildTargets :: PackageDescription -> [String] -> IO [BuildTarget] -readBuildTargets pkg targetStrs = do - let (uproblems, utargets) = readUserBuildTargets targetStrs - reportUserBuildTargetProblems uproblems - - utargets' <- mapM checkTargetExistsAsFile utargets - - let (bproblems, btargets) = resolveBuildTargets pkg utargets' - reportBuildTargetProblems bproblems - - return btargets - -checkTargetExistsAsFile :: UserBuildTarget -> IO (UserBuildTarget, Bool) -checkTargetExistsAsFile t = do - fexists <- existsAsFile (fileComponentOfTarget t) - return (t, fexists) - - where - existsAsFile f = do - exists <- doesFileExist f - case splitPath f of - (d:_) | hasTrailingPathSeparator d -> doesDirectoryExist d - (d:_:_) | not exists -> doesDirectoryExist d - _ -> return exists - - fileComponentOfTarget (UserBuildTargetSingle s1) = s1 - fileComponentOfTarget (UserBuildTargetDouble _ s2) = s2 - fileComponentOfTarget (UserBuildTargetTriple _ _ s3) = s3 - - --- ------------------------------------------------------------ --- * Parsing user targets --- ------------------------------------------------------------ - -readUserBuildTargets :: [String] -> ([UserBuildTargetProblem] - ,[UserBuildTarget]) -readUserBuildTargets = partitionEithers . map readUserBuildTarget - -readUserBuildTarget :: String -> Either UserBuildTargetProblem - UserBuildTarget -readUserBuildTarget targetstr = - case readPToMaybe parseTargetApprox targetstr of - Nothing -> Left (UserBuildTargetUnrecognised targetstr) - Just tgt -> Right tgt - - where - parseTargetApprox :: Parse.ReadP r UserBuildTarget - parseTargetApprox = - (do a <- tokenQ - return (UserBuildTargetSingle a)) - +++ (do a <- token - _ <- Parse.char ':' - b <- tokenQ - return (UserBuildTargetDouble a b)) - +++ (do a <- token - _ <- Parse.char ':' - b <- token - _ <- Parse.char ':' - c <- tokenQ - return (UserBuildTargetTriple a b c)) - - token = Parse.munch1 (\x -> not (isSpace x) && x /= ':') - tokenQ = parseHaskellString <++ token - parseHaskellString :: Parse.ReadP r String - parseHaskellString = Parse.readS_to_P reads - - readPToMaybe :: Parse.ReadP a a -> String -> Maybe a - readPToMaybe p str = listToMaybe [ r | (r,s) <- Parse.readP_to_S p str - , all isSpace s ] - -data UserBuildTargetProblem - = UserBuildTargetUnrecognised String - deriving Show - -reportUserBuildTargetProblems :: [UserBuildTargetProblem] -> IO () -reportUserBuildTargetProblems problems = do - case [ target | UserBuildTargetUnrecognised target <- problems ] of - [] -> return () - target -> - die $ unlines - [ "Unrecognised build target '" ++ name ++ "'." - | name <- target ] - ++ "Examples:\n" - ++ " - build foo -- component name " - ++ "(library, executable, test-suite or benchmark)\n" - ++ " - build Data.Foo -- module name\n" - ++ " - build Data/Foo.hsc -- file name\n" - ++ " - build lib:foo exe:foo -- component qualified by kind\n" - ++ " - build foo:Data.Foo -- module qualified by component\n" - ++ " - build foo:Data/Foo.hsc -- file qualified by component" - -showUserBuildTarget :: UserBuildTarget -> String -showUserBuildTarget = intercalate ":" . getComponents - where - getComponents (UserBuildTargetSingle s1) = [s1] - getComponents (UserBuildTargetDouble s1 s2) = [s1,s2] - getComponents (UserBuildTargetTriple s1 s2 s3) = [s1,s2,s3] - -showBuildTarget :: QualLevel -> PackageId -> BuildTarget -> String -showBuildTarget ql pkgid bt = - showUserBuildTarget (renderBuildTarget ql bt pkgid) - - --- ------------------------------------------------------------ --- * Resolving user targets to build targets --- ------------------------------------------------------------ - -{- -stargets = - [ BuildTargetComponent (CExeName "foo") - , BuildTargetModule (CExeName "foo") (mkMn "Foo") - , BuildTargetModule (CExeName "tst") (mkMn "Foo") - ] - where - mkMn :: String -> ModuleName - mkMn = fromJust . simpleParse - -ex_pkgid :: PackageIdentifier -Just ex_pkgid = simpleParse "thelib" --} - --- | Given a bunch of user-specified targets, try to resolve what it is they --- refer to. --- -resolveBuildTargets :: PackageDescription - -> [(UserBuildTarget, Bool)] - -> ([BuildTargetProblem], [BuildTarget]) -resolveBuildTargets pkg = partitionEithers - . map (uncurry (resolveBuildTarget pkg)) - -resolveBuildTarget :: PackageDescription -> UserBuildTarget -> Bool - -> Either BuildTargetProblem BuildTarget -resolveBuildTarget pkg userTarget fexists = - case findMatch (matchBuildTarget pkg userTarget fexists) of - Unambiguous target -> Right target - Ambiguous targets -> Left (BuildTargetAmbiguous userTarget targets') - where targets' = disambiguateBuildTargets - (packageId pkg) userTarget - targets - None errs -> Left (classifyMatchErrors errs) - - where - classifyMatchErrors errs - | not (null expected) = let (things, got:_) = unzip expected in - BuildTargetExpected userTarget things got - | not (null nosuch) = BuildTargetNoSuch userTarget nosuch - | otherwise = error $ "resolveBuildTarget: internal error in matching" - where - expected = [ (thing, got) | MatchErrorExpected thing got <- errs ] - nosuch = [ (thing, got) | MatchErrorNoSuch thing got <- errs ] - - -data BuildTargetProblem - = BuildTargetExpected UserBuildTarget [String] String - -- ^ [expected thing] (actually got) - | BuildTargetNoSuch UserBuildTarget [(String, String)] - -- ^ [(no such thing, actually got)] - | BuildTargetAmbiguous UserBuildTarget [(UserBuildTarget, BuildTarget)] - deriving Show - - -disambiguateBuildTargets :: PackageId -> UserBuildTarget -> [BuildTarget] - -> [(UserBuildTarget, BuildTarget)] -disambiguateBuildTargets pkgid original = - disambiguate (userTargetQualLevel original) - where - disambiguate ql ts - | null amb = unamb - | otherwise = unamb ++ disambiguate (succ ql) amb - where - (amb, unamb) = step ql ts - - userTargetQualLevel (UserBuildTargetSingle _ ) = QL1 - userTargetQualLevel (UserBuildTargetDouble _ _ ) = QL2 - userTargetQualLevel (UserBuildTargetTriple _ _ _) = QL3 - - step :: QualLevel -> [BuildTarget] - -> ([BuildTarget], [(UserBuildTarget, BuildTarget)]) - step ql = (\(amb, unamb) -> (map snd $ concat amb, concat unamb)) - . partition (\g -> length g > 1) - . groupBy (equating fst) - . sortBy (comparing fst) - . map (\t -> (renderBuildTarget ql t pkgid, t)) - -data QualLevel = QL1 | QL2 | QL3 - deriving (Enum, Show) - -renderBuildTarget :: QualLevel -> BuildTarget -> PackageId -> UserBuildTarget -renderBuildTarget ql target pkgid = - case ql of - QL1 -> UserBuildTargetSingle s1 where s1 = single target - QL2 -> UserBuildTargetDouble s1 s2 where (s1, s2) = double target - QL3 -> UserBuildTargetTriple s1 s2 s3 where (s1, s2, s3) = triple target - - where - single (BuildTargetComponent cn ) = dispCName cn - single (BuildTargetModule _ m) = display m - single (BuildTargetFile _ f) = f - - double (BuildTargetComponent cn ) = (dispKind cn, dispCName cn) - double (BuildTargetModule cn m) = (dispCName cn, display m) - double (BuildTargetFile cn f) = (dispCName cn, f) - - triple (BuildTargetComponent _ ) = error "triple BuildTargetComponent" - triple (BuildTargetModule cn m) = (dispKind cn, dispCName cn, display m) - triple (BuildTargetFile cn f) = (dispKind cn, dispCName cn, f) - - dispCName = componentStringName pkgid - dispKind = showComponentKindShort . componentKind - -reportBuildTargetProblems :: [BuildTargetProblem] -> IO () -reportBuildTargetProblems problems = do - - case [ (t, e, g) | BuildTargetExpected t e g <- problems ] of - [] -> return () - targets -> - die $ unlines - [ "Unrecognised build target '" ++ showUserBuildTarget target - ++ "'.\n" - ++ "Expected a " ++ intercalate " or " expected - ++ ", rather than '" ++ got ++ "'." - | (target, expected, got) <- targets ] - - case [ (t, e) | BuildTargetNoSuch t e <- problems ] of - [] -> return () - targets -> - die $ unlines - [ "Unknown build target '" ++ showUserBuildTarget target - ++ "'.\nThere is no " - ++ intercalate " or " [ mungeThing thing ++ " '" ++ got ++ "'" - | (thing, got) <- nosuch ] ++ "." - | (target, nosuch) <- targets ] - where - mungeThing "file" = "file target" - mungeThing thing = thing - - case [ (t, ts) | BuildTargetAmbiguous t ts <- problems ] of - [] -> return () - targets -> - die $ unlines - [ "Ambiguous build target '" ++ showUserBuildTarget target - ++ "'. It could be:\n " - ++ unlines [ " "++ showUserBuildTarget ut ++ - " (" ++ showBuildTargetKind bt ++ ")" - | (ut, bt) <- amb ] - | (target, amb) <- targets ] - - where - showBuildTargetKind (BuildTargetComponent _ ) = "component" - showBuildTargetKind (BuildTargetModule _ _) = "module" - showBuildTargetKind (BuildTargetFile _ _) = "file" - - ----------------------------------- --- Top level BuildTarget matcher --- - -matchBuildTarget :: PackageDescription - -> UserBuildTarget -> Bool -> Match BuildTarget -matchBuildTarget pkg = \utarget fexists -> - case utarget of - UserBuildTargetSingle str1 -> - matchBuildTarget1 cinfo str1 fexists - - UserBuildTargetDouble str1 str2 -> - matchBuildTarget2 cinfo str1 str2 fexists - - UserBuildTargetTriple str1 str2 str3 -> - matchBuildTarget3 cinfo str1 str2 str3 fexists - where - cinfo = pkgComponentInfo pkg - -matchBuildTarget1 :: [ComponentInfo] -> String -> Bool -> Match BuildTarget -matchBuildTarget1 cinfo str1 fexists = - matchComponent1 cinfo str1 - `matchPlusShadowing` matchModule1 cinfo str1 - `matchPlusShadowing` matchFile1 cinfo str1 fexists - - -matchBuildTarget2 :: [ComponentInfo] -> String -> String -> Bool - -> Match BuildTarget -matchBuildTarget2 cinfo str1 str2 fexists = - matchComponent2 cinfo str1 str2 - `matchPlusShadowing` matchModule2 cinfo str1 str2 - `matchPlusShadowing` matchFile2 cinfo str1 str2 fexists - - -matchBuildTarget3 :: [ComponentInfo] -> String -> String -> String -> Bool - -> Match BuildTarget -matchBuildTarget3 cinfo str1 str2 str3 fexists = - matchModule3 cinfo str1 str2 str3 - `matchPlusShadowing` matchFile3 cinfo str1 str2 str3 fexists - - -data ComponentInfo = ComponentInfo { - cinfoName :: ComponentName, - cinfoStrName :: ComponentStringName, - cinfoSrcDirs :: [FilePath], - cinfoModules :: [ModuleName], - cinfoHsFiles :: [FilePath], -- other hs files (like main.hs) - cinfoCFiles :: [FilePath], - cinfoJsFiles :: [FilePath] - } - -type ComponentStringName = String - -pkgComponentInfo :: PackageDescription -> [ComponentInfo] -pkgComponentInfo pkg = - [ ComponentInfo { - cinfoName = componentName c, - cinfoStrName = componentStringName pkg (componentName c), - cinfoSrcDirs = hsSourceDirs bi, - cinfoModules = componentModules c, - cinfoHsFiles = componentHsFiles c, - cinfoCFiles = cSources bi, - cinfoJsFiles = jsSources bi - } - | c <- pkgComponents pkg - , let bi = componentBuildInfo c ] - -componentStringName :: Package pkg => pkg -> ComponentName -> ComponentStringName -componentStringName pkg CLibName = display (packageName pkg) -componentStringName _ (CExeName name) = name -componentStringName _ (CTestName name) = name -componentStringName _ (CBenchName name) = name - -componentModules :: Component -> [ModuleName] -componentModules (CLib lib) = libModules lib -componentModules (CExe exe) = exeModules exe -componentModules (CTest test) = testModules test -componentModules (CBench bench) = benchmarkModules bench - -componentHsFiles :: Component -> [FilePath] -componentHsFiles (CExe exe) = [modulePath exe] -componentHsFiles (CTest TestSuite { - testInterface = TestSuiteExeV10 _ mainfile - }) = [mainfile] -componentHsFiles (CBench Benchmark { - benchmarkInterface = BenchmarkExeV10 _ mainfile - }) = [mainfile] -componentHsFiles _ = [] - -{- -ex_cs :: [ComponentInfo] -ex_cs = - [ (mkC (CExeName "foo") ["src1", "src1/src2"] ["Foo", "Src2.Bar", "Bar"]) - , (mkC (CExeName "tst") ["src1", "test"] ["Foo"]) - ] - where - mkC n ds ms = ComponentInfo n (componentStringName pkgid n) ds (map mkMn ms) - mkMn :: String -> ModuleName - mkMn = fromJust . simpleParse - pkgid :: PackageIdentifier - Just pkgid = simpleParse "thelib" --} - ------------------------------- --- Matching component kinds --- - -data ComponentKind = LibKind | ExeKind | TestKind | BenchKind - deriving (Eq, Ord, Show) - -componentKind :: ComponentName -> ComponentKind -componentKind CLibName = LibKind -componentKind (CExeName _) = ExeKind -componentKind (CTestName _) = TestKind -componentKind (CBenchName _) = BenchKind - -cinfoKind :: ComponentInfo -> ComponentKind -cinfoKind = componentKind . cinfoName - -matchComponentKind :: String -> Match ComponentKind -matchComponentKind s - | s `elem` ["lib", "library"] = increaseConfidence >> return LibKind - | s `elem` ["exe", "executable"] = increaseConfidence >> return ExeKind - | s `elem` ["tst", "test", "test-suite"] = increaseConfidence - >> return TestKind - | s `elem` ["bench", "benchmark"] = increaseConfidence - >> return BenchKind - | otherwise = matchErrorExpected - "component kind" s - -showComponentKind :: ComponentKind -> String -showComponentKind LibKind = "library" -showComponentKind ExeKind = "executable" -showComponentKind TestKind = "test-suite" -showComponentKind BenchKind = "benchmark" - -showComponentKindShort :: ComponentKind -> String -showComponentKindShort LibKind = "lib" -showComponentKindShort ExeKind = "exe" -showComponentKindShort TestKind = "test" -showComponentKindShort BenchKind = "bench" - ------------------------------- --- Matching component targets --- - -matchComponent1 :: [ComponentInfo] -> String -> Match BuildTarget -matchComponent1 cs = \str1 -> do - guardComponentName str1 - c <- matchComponentName cs str1 - return (BuildTargetComponent (cinfoName c)) - -matchComponent2 :: [ComponentInfo] -> String -> String -> Match BuildTarget -matchComponent2 cs = \str1 str2 -> do - ckind <- matchComponentKind str1 - guardComponentName str2 - c <- matchComponentKindAndName cs ckind str2 - return (BuildTargetComponent (cinfoName c)) - --- utils: - -guardComponentName :: String -> Match () -guardComponentName s - | all validComponentChar s - && not (null s) = increaseConfidence - | otherwise = matchErrorExpected "component name" s - where - validComponentChar c = isAlphaNum c || c == '.' - || c == '_' || c == '-' || c == '\'' - -matchComponentName :: [ComponentInfo] -> String -> Match ComponentInfo -matchComponentName cs str = - orNoSuchThing "component" str - $ increaseConfidenceFor - $ matchInexactly caseFold - [ (cinfoStrName c, c) | c <- cs ] - str - -matchComponentKindAndName :: [ComponentInfo] -> ComponentKind -> String - -> Match ComponentInfo -matchComponentKindAndName cs ckind str = - orNoSuchThing (showComponentKind ckind ++ " component") str - $ increaseConfidenceFor - $ matchInexactly (\(ck, cn) -> (ck, caseFold cn)) - [ ((cinfoKind c, cinfoStrName c), c) | c <- cs ] - (ckind, str) - - ------------------------------- --- Matching module targets --- - -matchModule1 :: [ComponentInfo] -> String -> Match BuildTarget -matchModule1 cs = \str1 -> do - guardModuleName str1 - nubMatchErrors $ do - c <- tryEach cs - let ms = cinfoModules c - m <- matchModuleName ms str1 - return (BuildTargetModule (cinfoName c) m) - -matchModule2 :: [ComponentInfo] -> String -> String -> Match BuildTarget -matchModule2 cs = \str1 str2 -> do - guardComponentName str1 - guardModuleName str2 - c <- matchComponentName cs str1 - let ms = cinfoModules c - m <- matchModuleName ms str2 - return (BuildTargetModule (cinfoName c) m) - -matchModule3 :: [ComponentInfo] -> String -> String -> String - -> Match BuildTarget -matchModule3 cs str1 str2 str3 = do - ckind <- matchComponentKind str1 - guardComponentName str2 - c <- matchComponentKindAndName cs ckind str2 - guardModuleName str3 - let ms = cinfoModules c - m <- matchModuleName ms str3 - return (BuildTargetModule (cinfoName c) m) - --- utils: - -guardModuleName :: String -> Match () -guardModuleName s - | all validModuleChar s - && not (null s) = increaseConfidence - | otherwise = matchErrorExpected "module name" s - where - validModuleChar c = isAlphaNum c || c == '.' || c == '_' || c == '\'' - -matchModuleName :: [ModuleName] -> String -> Match ModuleName -matchModuleName ms str = - orNoSuchThing "module" str - $ increaseConfidenceFor - $ matchInexactly caseFold - [ (display m, m) - | m <- ms ] - str - - ------------------------------- --- Matching file targets --- - -matchFile1 :: [ComponentInfo] -> String -> Bool -> Match BuildTarget -matchFile1 cs str1 exists = - nubMatchErrors $ do - c <- tryEach cs - filepath <- matchComponentFile c str1 exists - return (BuildTargetFile (cinfoName c) filepath) - - -matchFile2 :: [ComponentInfo] -> String -> String -> Bool -> Match BuildTarget -matchFile2 cs str1 str2 exists = do - guardComponentName str1 - c <- matchComponentName cs str1 - filepath <- matchComponentFile c str2 exists - return (BuildTargetFile (cinfoName c) filepath) - - -matchFile3 :: [ComponentInfo] -> String -> String -> String -> Bool - -> Match BuildTarget -matchFile3 cs str1 str2 str3 exists = do - ckind <- matchComponentKind str1 - guardComponentName str2 - c <- matchComponentKindAndName cs ckind str2 - filepath <- matchComponentFile c str3 exists - return (BuildTargetFile (cinfoName c) filepath) - - -matchComponentFile :: ComponentInfo -> String -> Bool -> Match FilePath -matchComponentFile c str fexists = - expecting "file" str $ - matchPlus - (matchFileExists str fexists) - (matchPlusShadowing - (msum [ matchModuleFileRooted dirs ms str - , matchOtherFileRooted dirs hsFiles str ]) - (msum [ matchModuleFileUnrooted ms str - , matchOtherFileUnrooted hsFiles str - , matchOtherFileUnrooted cFiles str - , matchOtherFileUnrooted jsFiles str ])) - where - dirs = cinfoSrcDirs c - ms = cinfoModules c - hsFiles = cinfoHsFiles c - cFiles = cinfoCFiles c - jsFiles = cinfoJsFiles c - - --- utils - -matchFileExists :: FilePath -> Bool -> Match a -matchFileExists _ False = mzero -matchFileExists fname True = do increaseConfidence - matchErrorNoSuch "file" fname - -matchModuleFileUnrooted :: [ModuleName] -> String -> Match FilePath -matchModuleFileUnrooted ms str = do - let filepath = normalise str - _ <- matchModuleFileStem ms filepath - return filepath - -matchModuleFileRooted :: [FilePath] -> [ModuleName] -> String -> Match FilePath -matchModuleFileRooted dirs ms str = nubMatches $ do - let filepath = normalise str - filepath' <- matchDirectoryPrefix dirs filepath - _ <- matchModuleFileStem ms filepath' - return filepath - -matchModuleFileStem :: [ModuleName] -> FilePath -> Match ModuleName -matchModuleFileStem ms = - increaseConfidenceFor - . matchInexactly caseFold - [ (toFilePath m, m) | m <- ms ] - . dropExtension - -matchOtherFileRooted :: [FilePath] -> [FilePath] -> FilePath -> Match FilePath -matchOtherFileRooted dirs fs str = do - let filepath = normalise str - filepath' <- matchDirectoryPrefix dirs filepath - _ <- matchFile fs filepath' - return filepath - -matchOtherFileUnrooted :: [FilePath] -> FilePath -> Match FilePath -matchOtherFileUnrooted fs str = do - let filepath = normalise str - _ <- matchFile fs filepath - return filepath - -matchFile :: [FilePath] -> FilePath -> Match FilePath -matchFile fs = increaseConfidenceFor - . matchInexactly caseFold [ (f, f) | f <- fs ] - -matchDirectoryPrefix :: [FilePath] -> FilePath -> Match FilePath -matchDirectoryPrefix dirs filepath = - exactMatches $ - catMaybes - [ stripDirectory (normalise dir) filepath | dir <- dirs ] - where - stripDirectory :: FilePath -> FilePath -> Maybe FilePath - stripDirectory dir fp = - joinPath `fmap` stripPrefix (splitDirectories dir) (splitDirectories fp) - - ------------------------------- --- Matching monad --- - --- | A matcher embodies a way to match some input as being some recognised --- value. In particular it deals with multiple and ambiguous matches. --- --- There are various matcher primitives ('matchExactly', 'matchInexactly'), --- ways to combine matchers ('ambiguousWith', 'shadows') and finally we can --- run a matcher against an input using 'findMatch'. --- - -data Match a = NoMatch Confidence [MatchError] - | ExactMatch Confidence [a] - | InexactMatch Confidence [a] - deriving Show - -type Confidence = Int - -data MatchError = MatchErrorExpected String String - | MatchErrorNoSuch String String - deriving (Show, Eq) - - -instance Alternative Match where - empty = mzero - (<|>) = mplus - -instance MonadPlus Match where - mzero = matchZero - mplus = matchPlus - -matchZero :: Match a -matchZero = NoMatch 0 [] - --- | Combine two matchers. Exact matches are used over inexact matches --- but if we have multiple exact, or inexact then the we collect all the --- ambiguous matches. --- -matchPlus :: Match a -> Match a -> Match a -matchPlus (ExactMatch d1 xs) (ExactMatch d2 xs') = - ExactMatch (max d1 d2) (xs ++ xs') -matchPlus a@(ExactMatch _ _ ) (InexactMatch _ _ ) = a -matchPlus a@(ExactMatch _ _ ) (NoMatch _ _ ) = a -matchPlus (InexactMatch _ _ ) b@(ExactMatch _ _ ) = b -matchPlus (InexactMatch d1 xs) (InexactMatch d2 xs') = - InexactMatch (max d1 d2) (xs ++ xs') -matchPlus a@(InexactMatch _ _ ) (NoMatch _ _ ) = a -matchPlus (NoMatch _ _ ) b@(ExactMatch _ _ ) = b -matchPlus (NoMatch _ _ ) b@(InexactMatch _ _ ) = b -matchPlus a@(NoMatch d1 ms) b@(NoMatch d2 ms') - | d1 > d2 = a - | d1 < d2 = b - | otherwise = NoMatch d1 (ms ++ ms') - --- | Combine two matchers. This is similar to 'ambiguousWith' with the --- difference that an exact match from the left matcher shadows any exact --- match on the right. Inexact matches are still collected however. --- -matchPlusShadowing :: Match a -> Match a -> Match a -matchPlusShadowing a@(ExactMatch _ _) (ExactMatch _ _) = a -matchPlusShadowing a b = matchPlus a b - -instance Functor Match where - fmap _ (NoMatch d ms) = NoMatch d ms - fmap f (ExactMatch d xs) = ExactMatch d (fmap f xs) - fmap f (InexactMatch d xs) = InexactMatch d (fmap f xs) - -instance Applicative Match where - pure a = ExactMatch 0 [a] - (<*>) = ap - -instance Monad Match where - return = AP.pure - - NoMatch d ms >>= _ = NoMatch d ms - ExactMatch d xs >>= f = addDepth d - $ foldr matchPlus matchZero (map f xs) - InexactMatch d xs >>= f = addDepth d . forceInexact - $ foldr matchPlus matchZero (map f xs) - -addDepth :: Confidence -> Match a -> Match a -addDepth d' (NoMatch d msgs) = NoMatch (d'+d) msgs -addDepth d' (ExactMatch d xs) = ExactMatch (d'+d) xs -addDepth d' (InexactMatch d xs) = InexactMatch (d'+d) xs - -forceInexact :: Match a -> Match a -forceInexact (ExactMatch d ys) = InexactMatch d ys -forceInexact m = m - ------------------------------- --- Various match primitives --- - -matchErrorExpected, matchErrorNoSuch :: String -> String -> Match a -matchErrorExpected thing got = NoMatch 0 [MatchErrorExpected thing got] -matchErrorNoSuch thing got = NoMatch 0 [MatchErrorNoSuch thing got] - -expecting :: String -> String -> Match a -> Match a -expecting thing got (NoMatch 0 _) = matchErrorExpected thing got -expecting _ _ m = m - -orNoSuchThing :: String -> String -> Match a -> Match a -orNoSuchThing thing got (NoMatch 0 _) = matchErrorNoSuch thing got -orNoSuchThing _ _ m = m - -increaseConfidence :: Match () -increaseConfidence = ExactMatch 1 [()] - -increaseConfidenceFor :: Match a -> Match a -increaseConfidenceFor m = m >>= \r -> increaseConfidence >> return r - -nubMatches :: Eq a => Match a -> Match a -nubMatches (NoMatch d msgs) = NoMatch d msgs -nubMatches (ExactMatch d xs) = ExactMatch d (nub xs) -nubMatches (InexactMatch d xs) = InexactMatch d (nub xs) - -nubMatchErrors :: Match a -> Match a -nubMatchErrors (NoMatch d msgs) = NoMatch d (nub msgs) -nubMatchErrors (ExactMatch d xs) = ExactMatch d xs -nubMatchErrors (InexactMatch d xs) = InexactMatch d xs - --- | Lift a list of matches to an exact match. --- -exactMatches, inexactMatches :: [a] -> Match a - -exactMatches [] = matchZero -exactMatches xs = ExactMatch 0 xs - -inexactMatches [] = matchZero -inexactMatches xs = InexactMatch 0 xs - -tryEach :: [a] -> Match a -tryEach = exactMatches - - ------------------------------- --- Top level match runner --- - --- | Given a matcher and a key to look up, use the matcher to find all the --- possible matches. There may be 'None', a single 'Unambiguous' match or --- you may have an 'Ambiguous' match with several possibilities. --- -findMatch :: Eq b => Match b -> MaybeAmbiguous b -findMatch match = - case match of - NoMatch _ msgs -> None (nub msgs) - ExactMatch _ xs -> checkAmbiguous xs - InexactMatch _ xs -> checkAmbiguous xs - where - checkAmbiguous xs = case nub xs of - [x] -> Unambiguous x - xs' -> Ambiguous xs' - -data MaybeAmbiguous a = None [MatchError] | Unambiguous a | Ambiguous [a] - deriving Show - - ------------------------------- --- Basic matchers --- - -{- --- | A primitive matcher that looks up a value in a finite 'Map'. The --- value must match exactly. --- -matchExactly :: forall a b. Ord a => [(a, b)] -> (a -> Match b) -matchExactly xs = - \x -> case Map.lookup x m of - Nothing -> matchZero - Just ys -> ExactMatch 0 ys - where - m :: Ord a => Map a [b] - m = Map.fromListWith (++) [ (k,[x]) | (k,x) <- xs ] --} - --- | A primitive matcher that looks up a value in a finite 'Map'. It checks --- for an exact or inexact match. We get an inexact match if the match --- is not exact, but the canonical forms match. It takes a canonicalisation --- function for this purpose. --- --- So for example if we used string case fold as the canonicalisation --- function, then we would get case insensitive matching (but it will still --- report an exact match when the case matches too). --- -matchInexactly :: (Ord a, Ord a') => - (a -> a') -> - [(a, b)] -> (a -> Match b) -matchInexactly cannonicalise xs = - \x -> case Map.lookup x m of - Just ys -> exactMatches ys - Nothing -> case Map.lookup (cannonicalise x) m' of - Just ys -> inexactMatches ys - Nothing -> matchZero - where - m = Map.fromListWith (++) [ (k,[x]) | (k,x) <- xs ] - - -- the map of canonicalised keys to groups of inexact matches - m' = Map.mapKeysWith (++) cannonicalise m - - - ------------------------------- --- Utils --- - -caseFold :: String -> String -caseFold = lowercase diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/CCompiler.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/CCompiler.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/CCompiler.hs 2016-11-07 10:02:23.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/CCompiler.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,122 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.CCompiler --- Copyright : 2011, Dan Knapp --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This simple package provides types and functions for interacting with --- C compilers. Currently it's just a type enumerating extant C-like --- languages, which we call dialects. - -{- -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - - * Neither the name of Isaac Jones nor the names of other - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -} - -module Distribution.Simple.CCompiler ( - CDialect(..), - cSourceExtensions, - cDialectFilenameExtension, - filenameCDialect - ) where - -import Distribution.Compat.Semigroup as Semi - -import System.FilePath - ( takeExtension ) - - --- | Represents a dialect of C. The Monoid instance expresses backward --- compatibility, in the sense that 'mappend a b' is the least inclusive --- dialect which both 'a' and 'b' can be correctly interpreted as. -data CDialect = C - | ObjectiveC - | CPlusPlus - | ObjectiveCPlusPlus - deriving (Eq, Show) - -instance Monoid CDialect where - mempty = C - mappend = (Semi.<>) - -instance Semigroup CDialect where - C <> anything = anything - ObjectiveC <> CPlusPlus = ObjectiveCPlusPlus - CPlusPlus <> ObjectiveC = ObjectiveCPlusPlus - _ <> ObjectiveCPlusPlus = ObjectiveCPlusPlus - ObjectiveC <> _ = ObjectiveC - CPlusPlus <> _ = CPlusPlus - ObjectiveCPlusPlus <> _ = ObjectiveCPlusPlus - --- | A list of all file extensions which are recognized as possibly containing --- some dialect of C code. Note that this list is only for source files, --- not for header files. -cSourceExtensions :: [String] -cSourceExtensions = ["c", "i", "ii", "m", "mi", "mm", "M", "mii", "cc", "cp", - "cxx", "cpp", "CPP", "c++", "C"] - - --- | Takes a dialect of C and whether code is intended to be passed through --- the preprocessor, and returns a filename extension for containing that --- code. -cDialectFilenameExtension :: CDialect -> Bool -> String -cDialectFilenameExtension C True = "c" -cDialectFilenameExtension C False = "i" -cDialectFilenameExtension ObjectiveC True = "m" -cDialectFilenameExtension ObjectiveC False = "mi" -cDialectFilenameExtension CPlusPlus True = "cpp" -cDialectFilenameExtension CPlusPlus False = "ii" -cDialectFilenameExtension ObjectiveCPlusPlus True = "mm" -cDialectFilenameExtension ObjectiveCPlusPlus False = "mii" - - --- | Infers from a filename's extension the dialect of C which it contains, --- and whether it is intended to be passed through the preprocessor. -filenameCDialect :: String -> Maybe (CDialect, Bool) -filenameCDialect filename = do - extension <- case takeExtension filename of - '.':ext -> Just ext - _ -> Nothing - case extension of - "c" -> return (C, True) - "i" -> return (C, False) - "ii" -> return (CPlusPlus, False) - "m" -> return (ObjectiveC, True) - "mi" -> return (ObjectiveC, False) - "mm" -> return (ObjectiveCPlusPlus, True) - "M" -> return (ObjectiveCPlusPlus, True) - "mii" -> return (ObjectiveCPlusPlus, False) - "cc" -> return (CPlusPlus, True) - "cp" -> return (CPlusPlus, True) - "cxx" -> return (CPlusPlus, True) - "cpp" -> return (CPlusPlus, True) - "CPP" -> return (CPlusPlus, True) - "c++" -> return (CPlusPlus, True) - "C" -> return (CPlusPlus, True) - _ -> Nothing diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Command.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Command.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Command.hs 2016-11-07 10:02:23.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Command.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,620 +0,0 @@ -{-# LANGUAGE ExistentialQuantification #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Command --- Copyright : Duncan Coutts 2007 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : non-portable (ExistentialQuantification) --- --- This is to do with command line handling. The Cabal command line is --- organised into a number of named sub-commands (much like darcs). The --- 'CommandUI' abstraction represents one of these sub-commands, with a name, --- description, a set of flags. Commands can be associated with actions and --- run. It handles some common stuff automatically, like the @--help@ and --- command line completion flags. It is designed to allow other tools make --- derived commands. This feature is used heavily in @cabal-install@. - -module Distribution.Simple.Command ( - - -- * Command interface - CommandUI(..), - commandShowOptions, - CommandParse(..), - commandParseArgs, - getNormalCommandDescriptions, - helpCommandUI, - - -- ** Constructing commands - ShowOrParseArgs(..), - usageDefault, - usageAlternatives, - mkCommandUI, - hiddenCommand, - - -- ** Associating actions with commands - Command, - commandAddAction, - noExtraFlags, - - -- ** Building lists of commands - CommandType(..), - CommandSpec(..), - commandFromSpec, - - -- ** Running commands - commandsRun, - --- * Option Fields - OptionField(..), Name, - --- ** Constructing Option Fields - option, multiOption, - --- ** Liftings & Projections - liftOption, viewAsFieldDescr, - --- * Option Descriptions - OptDescr(..), Description, SFlags, LFlags, OptFlags, ArgPlaceHolder, - --- ** OptDescr 'smart' constructors - MkOptDescr, - reqArg, reqArg', optArg, optArg', noArg, - boolOpt, boolOpt', choiceOpt, choiceOptFromEnum - - ) where - -import qualified Distribution.GetOpt as GetOpt -import Distribution.Text -import Distribution.ParseUtils -import Distribution.ReadE -import Distribution.Simple.Utils - -import Control.Monad -import Data.Char (isAlpha, toLower) -import Data.List (sortBy) -import Data.Maybe -import Data.Monoid as Mon -import Text.PrettyPrint ( punctuate, cat, comma, text ) -import Text.PrettyPrint as PP ( empty ) - -data CommandUI flags = CommandUI { - -- | The name of the command as it would be entered on the command line. - -- For example @\"build\"@. - commandName :: String, - -- | A short, one line description of the command to use in help texts. - commandSynopsis :: String, - -- | A function that maps a program name to a usage summary for this - -- command. - commandUsage :: String -> String, - -- | Additional explanation of the command to use in help texts. - commandDescription :: Maybe (String -> String), - -- | Post-Usage notes and examples in help texts - commandNotes :: Maybe (String -> String), - -- | Initial \/ empty flags - commandDefaultFlags :: flags, - -- | All the Option fields for this command - commandOptions :: ShowOrParseArgs -> [OptionField flags] - } - -data ShowOrParseArgs = ShowArgs | ParseArgs -type Name = String -type Description = String - --- | We usually have a data type for storing configuration values, where --- every field stores a configuration option, and the user sets --- the value either via command line flags or a configuration file. --- An individual OptionField models such a field, and we usually --- build a list of options associated to a configuration data type. -data OptionField a = OptionField { - optionName :: Name, - optionDescr :: [OptDescr a] } - --- | An OptionField takes one or more OptDescrs, describing the command line --- interface for the field. -data OptDescr a = ReqArg Description OptFlags ArgPlaceHolder - (ReadE (a->a)) (a -> [String]) - - | OptArg Description OptFlags ArgPlaceHolder - (ReadE (a->a)) (a->a) (a -> [Maybe String]) - - | ChoiceOpt [(Description, OptFlags, a->a, a -> Bool)] - - | BoolOpt Description OptFlags{-True-} OptFlags{-False-} - (Bool -> a -> a) (a-> Maybe Bool) - --- | Short command line option strings -type SFlags = [Char] --- | Long command line option strings -type LFlags = [String] -type OptFlags = (SFlags,LFlags) -type ArgPlaceHolder = String - - --- | Create an option taking a single OptDescr. --- No explicit Name is given for the Option, the name is the first LFlag given. -option :: SFlags -> LFlags -> Description -> get -> set -> MkOptDescr get set a - -> OptionField a -option sf lf@(n:_) d get set arg = OptionField n [arg sf lf d get set] -option _ _ _ _ _ _ = error $ "Distribution.command.option: " - ++ "An OptionField must have at least one LFlag" - --- | Create an option taking several OptDescrs. --- You will have to give the flags and description individually to the --- OptDescr constructor. -multiOption :: Name -> get -> set - -> [get -> set -> OptDescr a] -- ^MkOptDescr constructors partially - -- applied to flags and description. - -> OptionField a -multiOption n get set args = OptionField n [arg get set | arg <- args] - -type MkOptDescr get set a = SFlags -> LFlags -> Description -> get -> set - -> OptDescr a - --- | Create a string-valued command line interface. -reqArg :: Monoid b => ArgPlaceHolder -> ReadE b -> (b -> [String]) - -> MkOptDescr (a -> b) (b -> a -> a) a -reqArg ad mkflag showflag sf lf d get set = - ReqArg d (sf,lf) ad (fmap (\a b -> set (get b `mappend` a) b) mkflag) - (showflag . get) - --- | Create a string-valued command line interface with a default value. -optArg :: Monoid b => ArgPlaceHolder -> ReadE b -> b -> (b -> [Maybe String]) - -> MkOptDescr (a -> b) (b -> a -> a) a -optArg ad mkflag def showflag sf lf d get set = - OptArg d (sf,lf) ad (fmap (\a b -> set (get b `mappend` a) b) mkflag) - (\b -> set (get b `mappend` def) b) - (showflag . get) - --- | (String -> a) variant of "reqArg" -reqArg' :: Monoid b => ArgPlaceHolder -> (String -> b) -> (b -> [String]) - -> MkOptDescr (a -> b) (b -> a -> a) a -reqArg' ad mkflag showflag = - reqArg ad (succeedReadE mkflag) showflag - --- | (String -> a) variant of "optArg" -optArg' :: Mon.Monoid b => ArgPlaceHolder -> (Maybe String -> b) - -> (b -> [Maybe String]) - -> MkOptDescr (a -> b) (b -> a -> a) a -optArg' ad mkflag showflag = - optArg ad (succeedReadE (mkflag . Just)) def showflag - where def = mkflag Nothing - -noArg :: (Eq b) => b -> MkOptDescr (a -> b) (b -> a -> a) a -noArg flag sf lf d = choiceOpt [(flag, (sf,lf), d)] sf lf d - -boolOpt :: (b -> Maybe Bool) -> (Bool -> b) -> SFlags -> SFlags - -> MkOptDescr (a -> b) (b -> a -> a) a -boolOpt g s sfT sfF _sf _lf@(n:_) d get set = - BoolOpt d (sfT, ["enable-"++n]) (sfF, ["disable-"++n]) (set.s) (g.get) -boolOpt _ _ _ _ _ _ _ _ _ = error - "Distribution.Simple.Setup.boolOpt: unreachable" - -boolOpt' :: (b -> Maybe Bool) -> (Bool -> b) -> OptFlags -> OptFlags - -> MkOptDescr (a -> b) (b -> a -> a) a -boolOpt' g s ffT ffF _sf _lf d get set = BoolOpt d ffT ffF (set.s) (g . get) - --- | create a Choice option -choiceOpt :: Eq b => [(b,OptFlags,Description)] - -> MkOptDescr (a -> b) (b -> a -> a) a -choiceOpt aa_ff _sf _lf _d get set = ChoiceOpt alts - where alts = [(d,flags, set alt, (==alt) . get) | (alt,flags,d) <- aa_ff] - --- | create a Choice option out of an enumeration type. --- As long flags, the Show output is used. As short flags, the first character --- which does not conflict with a previous one is used. -choiceOptFromEnum :: (Bounded b, Enum b, Show b, Eq b) => - MkOptDescr (a -> b) (b -> a -> a) a -choiceOptFromEnum _sf _lf d get = - choiceOpt [ (x, (sf, [map toLower $ show x]), d') - | (x, sf) <- sflags' - , let d' = d ++ show x] - _sf _lf d get - where sflags' = foldl f [] [firstOne..] - f prev x = let prevflags = concatMap snd prev in - prev ++ take 1 [(x, [toLower sf]) - | sf <- show x, isAlpha sf - , toLower sf `notElem` prevflags] - firstOne = minBound `asTypeOf` get undefined - -commandGetOpts :: ShowOrParseArgs -> CommandUI flags - -> [GetOpt.OptDescr (flags -> flags)] -commandGetOpts showOrParse command = - concatMap viewAsGetOpt (commandOptions command showOrParse) - -viewAsGetOpt :: OptionField a -> [GetOpt.OptDescr (a->a)] -viewAsGetOpt (OptionField _n aa) = concatMap optDescrToGetOpt aa - where - optDescrToGetOpt (ReqArg d (cs,ss) arg_desc set _) = - [GetOpt.Option cs ss (GetOpt.ReqArg set' arg_desc) d] - where set' = readEOrFail set - optDescrToGetOpt (OptArg d (cs,ss) arg_desc set def _) = - [GetOpt.Option cs ss (GetOpt.OptArg set' arg_desc) d] - where set' Nothing = def - set' (Just txt) = readEOrFail set txt - optDescrToGetOpt (ChoiceOpt alts) = - [GetOpt.Option sf lf (GetOpt.NoArg set) d | (d,(sf,lf),set,_) <- alts ] - optDescrToGetOpt (BoolOpt d (sfT, lfT) ([], []) set _) = - [ GetOpt.Option sfT lfT (GetOpt.NoArg (set True)) d ] - optDescrToGetOpt (BoolOpt d ([], []) (sfF, lfF) set _) = - [ GetOpt.Option sfF lfF (GetOpt.NoArg (set False)) d ] - optDescrToGetOpt (BoolOpt d (sfT,lfT) (sfF, lfF) set _) = - [ GetOpt.Option sfT lfT (GetOpt.NoArg (set True)) ("Enable " ++ d) - , GetOpt.Option sfF lfF (GetOpt.NoArg (set False)) ("Disable " ++ d) ] - --- | to view as a FieldDescr, we sort the list of interfaces (Req > Bool > --- Choice > Opt) and consider only the first one. -viewAsFieldDescr :: OptionField a -> FieldDescr a -viewAsFieldDescr (OptionField _n []) = - error "Distribution.command.viewAsFieldDescr: unexpected" -viewAsFieldDescr (OptionField n dd) = FieldDescr n get set - where - optDescr = head $ sortBy cmp dd - - cmp :: OptDescr a -> OptDescr a -> Ordering - ReqArg{} `cmp` ReqArg{} = EQ - ReqArg{} `cmp` _ = GT - BoolOpt{} `cmp` ReqArg{} = LT - BoolOpt{} `cmp` BoolOpt{} = EQ - BoolOpt{} `cmp` _ = GT - ChoiceOpt{} `cmp` ReqArg{} = LT - ChoiceOpt{} `cmp` BoolOpt{} = LT - ChoiceOpt{} `cmp` ChoiceOpt{} = EQ - ChoiceOpt{} `cmp` _ = GT - OptArg{} `cmp` OptArg{} = EQ - OptArg{} `cmp` _ = LT - --- get :: a -> Doc - get t = case optDescr of - ReqArg _ _ _ _ ppr -> - (cat . punctuate comma . map text . ppr) t - - OptArg _ _ _ _ _ ppr -> - case ppr t of [] -> PP.empty - (Nothing : _) -> text "True" - (Just a : _) -> text a - - ChoiceOpt alts -> - fromMaybe PP.empty $ listToMaybe - [ text lf | (_,(_,lf:_), _,enabled) <- alts, enabled t] - - BoolOpt _ _ _ _ enabled -> (maybe PP.empty disp . enabled) t - --- set :: LineNo -> String -> a -> ParseResult a - set line val a = - case optDescr of - ReqArg _ _ _ readE _ -> ($ a) `liftM` runE line n readE val - -- We parse for a single value instead of a - -- list, as one can't really implement - -- parseList :: ReadE a -> ReadE [a] with - -- the current ReadE definition - ChoiceOpt{} -> - case getChoiceByLongFlag optDescr val of - Just f -> return (f a) - _ -> syntaxError line val - - BoolOpt _ _ _ setV _ -> (`setV` a) `liftM` runP line n parse val - - OptArg _ _ _ readE _ _ -> ($ a) `liftM` runE line n readE val - -- Optional arguments are parsed just like - -- required arguments here; we don't - -- provide a method to set an OptArg field - -- to the default value. - -getChoiceByLongFlag :: OptDescr b -> String -> Maybe (b->b) -getChoiceByLongFlag (ChoiceOpt alts) val = listToMaybe - [ set | (_,(_sf,lf:_), set, _) <- alts - , lf == val] - -getChoiceByLongFlag _ _ = - error "Distribution.command.getChoiceByLongFlag: expected a choice option" - -getCurrentChoice :: OptDescr a -> a -> [String] -getCurrentChoice (ChoiceOpt alts) a = - [ lf | (_,(_sf,lf:_), _, currentChoice) <- alts, currentChoice a] - -getCurrentChoice _ _ = error "Command.getChoice: expected a Choice OptDescr" - - -liftOption :: (b -> a) -> (a -> (b -> b)) -> OptionField a -> OptionField b -liftOption get' set' opt = - opt { optionDescr = liftOptDescr get' set' `map` optionDescr opt} - - -liftOptDescr :: (b -> a) -> (a -> (b -> b)) -> OptDescr a -> OptDescr b -liftOptDescr get' set' (ChoiceOpt opts) = - ChoiceOpt [ (d, ff, liftSet get' set' set , (get . get')) - | (d, ff, set, get) <- opts] - -liftOptDescr get' set' (OptArg d ff ad set def get) = - OptArg d ff ad (liftSet get' set' `fmap` set) - (liftSet get' set' def) (get . get') - -liftOptDescr get' set' (ReqArg d ff ad set get) = - ReqArg d ff ad (liftSet get' set' `fmap` set) (get . get') - -liftOptDescr get' set' (BoolOpt d ffT ffF set get) = - BoolOpt d ffT ffF (liftSet get' set' . set) (get . get') - -liftSet :: (b -> a) -> (a -> (b -> b)) -> (a -> a) -> b -> b -liftSet get' set' set x = set' (set $ get' x) x - --- | Show flags in the standard long option command line format -commandShowOptions :: CommandUI flags -> flags -> [String] -commandShowOptions command v = concat - [ showOptDescr v od | o <- commandOptions command ParseArgs - , od <- optionDescr o] - where - maybePrefix [] = [] - maybePrefix (lOpt:_) = ["--" ++ lOpt] - - showOptDescr :: a -> OptDescr a -> [String] - showOptDescr x (BoolOpt _ (_,lfTs) (_,lfFs) _ enabled) - = case enabled x of - Nothing -> [] - Just True -> maybePrefix lfTs - Just False -> maybePrefix lfFs - showOptDescr x c@ChoiceOpt{} - = ["--" ++ val | val <- getCurrentChoice c x] - showOptDescr x (ReqArg _ (_ssff,lf:_) _ _ showflag) - = [ "--"++lf++"="++flag - | flag <- showflag x ] - showOptDescr x (OptArg _ (_ssff,lf:_) _ _ _ showflag) - = [ case flag of - Just s -> "--"++lf++"="++s - Nothing -> "--"++lf - | flag <- showflag x ] - showOptDescr _ _ - = error "Distribution.Simple.Command.showOptDescr: unreachable" - - -commandListOptions :: CommandUI flags -> [String] -commandListOptions command = - concatMap listOption $ - addCommonFlags ShowArgs $ -- This is a slight hack, we don't want - -- "--list-options" showing up in the - -- list options output, so use ShowArgs - commandGetOpts ShowArgs command - where - listOption (GetOpt.Option shortNames longNames _ _) = - [ "-" ++ [name] | name <- shortNames ] - ++ [ "--" ++ name | name <- longNames ] - --- | The help text for this command with descriptions of all the options. -commandHelp :: CommandUI flags -> String -> String -commandHelp command pname = - commandSynopsis command - ++ "\n\n" - ++ commandUsage command pname - ++ ( case commandDescription command of - Nothing -> "" - Just desc -> '\n': desc pname) - ++ "\n" - ++ ( if cname == "" - then "Global flags:" - else "Flags for " ++ cname ++ ":" ) - ++ ( GetOpt.usageInfo "" - . addCommonFlags ShowArgs - $ commandGetOpts ShowArgs command ) - ++ ( case commandNotes command of - Nothing -> "" - Just notes -> '\n': notes pname) - where cname = commandName command - --- | Default "usage" documentation text for commands. -usageDefault :: String -> String -> String -usageDefault name pname = - "Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n\n" - ++ "Flags for " ++ name ++ ":" - --- | Create "usage" documentation from a list of parameter --- configurations. -usageAlternatives :: String -> [String] -> String -> String -usageAlternatives name strs pname = unlines - [ start ++ pname ++ " " ++ name ++ " " ++ s - | let starts = "Usage: " : repeat " or: " - , (start, s) <- zip starts strs - ] - --- | Make a Command from standard 'GetOpt' options. -mkCommandUI :: String -- ^ name - -> String -- ^ synopsis - -> [String] -- ^ usage alternatives - -> flags -- ^ initial\/empty flags - -> (ShowOrParseArgs -> [OptionField flags]) -- ^ options - -> CommandUI flags -mkCommandUI name synopsis usages flags options = CommandUI - { commandName = name - , commandSynopsis = synopsis - , commandDescription = Nothing - , commandNotes = Nothing - , commandUsage = usageAlternatives name usages - , commandDefaultFlags = flags - , commandOptions = options - } - --- | Common flags that apply to every command -data CommonFlag = HelpFlag | ListOptionsFlag - -commonFlags :: ShowOrParseArgs -> [GetOpt.OptDescr CommonFlag] -commonFlags showOrParseArgs = case showOrParseArgs of - ShowArgs -> [help] - ParseArgs -> [help, list] - where - help = GetOpt.Option helpShortFlags ["help"] (GetOpt.NoArg HelpFlag) - "Show this help text" - helpShortFlags = case showOrParseArgs of - ShowArgs -> ['h'] - ParseArgs -> ['h', '?'] - list = GetOpt.Option [] ["list-options"] (GetOpt.NoArg ListOptionsFlag) - "Print a list of command line flags" - -addCommonFlags :: ShowOrParseArgs - -> [GetOpt.OptDescr a] - -> [GetOpt.OptDescr (Either CommonFlag a)] -addCommonFlags showOrParseArgs options = - map (fmapOptDesc Left) (commonFlags showOrParseArgs) - ++ map (fmapOptDesc Right) options - where fmapOptDesc f (GetOpt.Option s l d m) = - GetOpt.Option s l (fmapArgDesc f d) m - fmapArgDesc f (GetOpt.NoArg a) = GetOpt.NoArg (f a) - fmapArgDesc f (GetOpt.ReqArg s d) = GetOpt.ReqArg (f . s) d - fmapArgDesc f (GetOpt.OptArg s d) = GetOpt.OptArg (f . s) d - --- | Parse a bunch of command line arguments --- -commandParseArgs :: CommandUI flags - -> Bool -- ^ Is the command a global or subcommand? - -> [String] - -> CommandParse (flags -> flags, [String]) -commandParseArgs command global args = - let options = addCommonFlags ParseArgs - $ commandGetOpts ParseArgs command - order | global = GetOpt.RequireOrder - | otherwise = GetOpt.Permute - in case GetOpt.getOpt' order options args of - (flags, _, _, _) - | any listFlag flags -> CommandList (commandListOptions command) - | any helpFlag flags -> CommandHelp (commandHelp command) - where listFlag (Left ListOptionsFlag) = True; listFlag _ = False - helpFlag (Left HelpFlag) = True; helpFlag _ = False - (flags, opts, opts', []) - | global || null opts' -> CommandReadyToGo (accum flags, mix opts opts') - | otherwise -> CommandErrors (unrecognised opts') - (_, _, _, errs) -> CommandErrors errs - - where -- Note: It is crucial to use reverse function composition here or to - -- reverse the flags here as we want to process the flags left to right - -- but data flow in function composition is right to left. - accum flags = foldr (flip (.)) id [ f | Right f <- flags ] - unrecognised opts = [ "unrecognized " - ++ "'" ++ (commandName command) ++ "'" - ++ " option `" ++ opt ++ "'\n" - | opt <- opts ] - -- For unrecognised global flags we put them in the position just after - -- the command, if there is one. This gives us a chance to parse them - -- as sub-command rather than global flags. - mix [] ys = ys - mix (x:xs) ys = x:ys++xs - -data CommandParse flags = CommandHelp (String -> String) - | CommandList [String] - | CommandErrors [String] - | CommandReadyToGo flags -instance Functor CommandParse where - fmap _ (CommandHelp help) = CommandHelp help - fmap _ (CommandList opts) = CommandList opts - fmap _ (CommandErrors errs) = CommandErrors errs - fmap f (CommandReadyToGo flags) = CommandReadyToGo (f flags) - - -data CommandType = NormalCommand | HiddenCommand -data Command action = - Command String String ([String] -> CommandParse action) CommandType - --- | Mark command as hidden. Hidden commands don't show up in the 'progname --- help' or 'progname --help' output. -hiddenCommand :: Command action -> Command action -hiddenCommand (Command name synopsys f _cmdType) = - Command name synopsys f HiddenCommand - -commandAddAction :: CommandUI flags - -> (flags -> [String] -> action) - -> Command action -commandAddAction command action = - Command (commandName command) - (commandSynopsis command) - (fmap (uncurry applyDefaultArgs) . commandParseArgs command False) - NormalCommand - - where applyDefaultArgs mkflags args = - let flags = mkflags (commandDefaultFlags command) - in action flags args - -commandsRun :: CommandUI a - -> [Command action] - -> [String] - -> CommandParse (a, CommandParse action) -commandsRun globalCommand commands args = - case commandParseArgs globalCommand True args of - CommandHelp help -> CommandHelp help - CommandList opts -> CommandList (opts ++ commandNames) - CommandErrors errs -> CommandErrors errs - CommandReadyToGo (mkflags, args') -> case args' of - ("help":cmdArgs) -> handleHelpCommand cmdArgs - (name:cmdArgs) -> case lookupCommand name of - [Command _ _ action _] - -> CommandReadyToGo (flags, action cmdArgs) - _ -> CommandReadyToGo (flags, badCommand name) - [] -> CommandReadyToGo (flags, noCommand) - where flags = mkflags (commandDefaultFlags globalCommand) - - where - lookupCommand cname = [ cmd | cmd@(Command cname' _ _ _) <- commands' - , cname' == cname ] - noCommand = CommandErrors ["no command given (try --help)\n"] - badCommand cname = CommandErrors ["unrecognised command: " ++ cname - ++ " (try --help)\n"] - commands' = commands ++ [commandAddAction helpCommandUI undefined] - commandNames = [ name | (Command name _ _ NormalCommand) <- commands' ] - - -- A bit of a hack: support "prog help" as a synonym of "prog --help" - -- furthermore, support "prog help command" as "prog command --help" - handleHelpCommand cmdArgs = - case commandParseArgs helpCommandUI True cmdArgs of - CommandHelp help -> CommandHelp help - CommandList list -> CommandList (list ++ commandNames) - CommandErrors _ -> CommandHelp globalHelp - CommandReadyToGo (_,[]) -> CommandHelp globalHelp - CommandReadyToGo (_,(name:cmdArgs')) -> - case lookupCommand name of - [Command _ _ action _] -> - case action ("--help":cmdArgs') of - CommandHelp help -> CommandHelp help - CommandList _ -> CommandList [] - _ -> CommandHelp globalHelp - _ -> badCommand name - - where globalHelp = commandHelp globalCommand - --- | Utility function, many commands do not accept additional flags. This --- action fails with a helpful error message if the user supplies any extra. --- -noExtraFlags :: [String] -> IO () -noExtraFlags [] = return () -noExtraFlags extraFlags = - die $ "Unrecognised flags: " ++ intercalate ", " extraFlags ---TODO: eliminate this function and turn it into a variant on commandAddAction --- instead like commandAddActionNoArgs that doesn't supply the [String] - --- | Helper function for creating globalCommand description -getNormalCommandDescriptions :: [Command action] -> [(String, String)] -getNormalCommandDescriptions cmds = - [ (name, description) - | Command name description _ NormalCommand <- cmds ] - -helpCommandUI :: CommandUI () -helpCommandUI = - (mkCommandUI - "help" - "Help about commands." - ["[FLAGS]", "COMMAND [FLAGS]"] - () - (const [])) - { - commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " help help\n" - ++ " Oh, appararently you already know this.\n" - } - --- | wraps a @CommandUI@ together with a function that turns it into a @Command@. --- By hiding the type of flags for the UI allows construction of a list of all UIs at the --- top level of the program. That list can then be used for generation of manual page --- as well as for executing the selected command. -data CommandSpec action - = forall flags. CommandSpec (CommandUI flags) (CommandUI flags -> Command action) CommandType - -commandFromSpec :: CommandSpec a -> Command a -commandFromSpec (CommandSpec ui action _) = action ui diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Compiler.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Compiler.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Compiler.hs 2016-11-07 10:02:23.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Compiler.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,362 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Compiler --- Copyright : Isaac Jones 2003-2004 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This should be a much more sophisticated abstraction than it is. Currently --- it's just a bit of data about the compiler, like it's flavour and name and --- version. The reason it's just data is because currently it has to be in --- 'Read' and 'Show' so it can be saved along with the 'LocalBuildInfo'. The --- only interesting bit of info it contains is a mapping between language --- extensions and compiler command line flags. This module also defines a --- 'PackageDB' type which is used to refer to package databases. Most compilers --- only know about a single global package collection but GHC has a global and --- per-user one and it lets you create arbitrary other package databases. We do --- not yet fully support this latter feature. - -module Distribution.Simple.Compiler ( - -- * Haskell implementations - module Distribution.Compiler, - Compiler(..), - showCompilerId, showCompilerIdWithAbi, - compilerFlavor, compilerVersion, - compilerCompatVersion, - compilerInfo, - - -- * Support for package databases - PackageDB(..), - PackageDBStack, - registrationPackageDB, - absolutePackageDBPaths, - absolutePackageDBPath, - - -- * Support for optimisation levels - OptimisationLevel(..), - flagToOptimisationLevel, - - -- * Support for debug info levels - DebugInfoLevel(..), - flagToDebugInfoLevel, - - -- * Support for language extensions - Flag, - languageToFlags, - unsupportedLanguages, - extensionsToFlags, - unsupportedExtensions, - parmakeSupported, - reexportedModulesSupported, - renamingPackageFlagsSupported, - unifiedIPIDRequired, - packageKeySupported, - unitIdSupported, - libraryDynDirSupported, - - -- * Support for profiling detail levels - ProfDetailLevel(..), - knownProfDetailLevels, - flagToProfDetailLevel, - showProfDetailLevel, - ) where - -import Distribution.Compiler -import Distribution.Version -import Distribution.Text -import Language.Haskell.Extension -import Distribution.Simple.Utils -import Distribution.Compat.Binary - -import Control.Monad (liftM) -import Data.List (nub) -import qualified Data.Map as M (Map, lookup) -import Data.Maybe (catMaybes, isNothing, listToMaybe) -import GHC.Generics (Generic) -import System.Directory (canonicalizePath) - -data Compiler = Compiler { - compilerId :: CompilerId, - -- ^ Compiler flavour and version. - compilerAbiTag :: AbiTag, - -- ^ Tag for distinguishing incompatible ABI's on the same architecture/os. - compilerCompat :: [CompilerId], - -- ^ Other implementations that this compiler claims to be compatible with. - compilerLanguages :: [(Language, Flag)], - -- ^ Supported language standards. - compilerExtensions :: [(Extension, Flag)], - -- ^ Supported extensions. - compilerProperties :: M.Map String String - -- ^ A key-value map for properties not covered by the above fields. - } - deriving (Eq, Generic, Show, Read) - -instance Binary Compiler - -showCompilerId :: Compiler -> String -showCompilerId = display . compilerId - -showCompilerIdWithAbi :: Compiler -> String -showCompilerIdWithAbi comp = - display (compilerId comp) ++ - case compilerAbiTag comp of - NoAbiTag -> [] - AbiTag xs -> '-':xs - -compilerFlavor :: Compiler -> CompilerFlavor -compilerFlavor = (\(CompilerId f _) -> f) . compilerId - -compilerVersion :: Compiler -> Version -compilerVersion = (\(CompilerId _ v) -> v) . compilerId - -compilerCompatVersion :: CompilerFlavor -> Compiler -> Maybe Version -compilerCompatVersion flavor comp - | compilerFlavor comp == flavor = Just (compilerVersion comp) - | otherwise = - listToMaybe [ v | CompilerId fl v <- compilerCompat comp, fl == flavor ] - -compilerInfo :: Compiler -> CompilerInfo -compilerInfo c = CompilerInfo (compilerId c) - (compilerAbiTag c) - (Just . compilerCompat $ c) - (Just . map fst . compilerLanguages $ c) - (Just . map fst . compilerExtensions $ c) - --- ------------------------------------------------------------ --- * Package databases --- ------------------------------------------------------------ - --- |Some compilers have a notion of a database of available packages. --- For some there is just one global db of packages, other compilers --- support a per-user or an arbitrary db specified at some location in --- the file system. This can be used to build isloated environments of --- packages, for example to build a collection of related packages --- without installing them globally. --- -data PackageDB = GlobalPackageDB - | UserPackageDB - | SpecificPackageDB FilePath - deriving (Eq, Generic, Ord, Show, Read) - -instance Binary PackageDB - --- | We typically get packages from several databases, and stack them --- together. This type lets us be explicit about that stacking. For example --- typical stacks include: --- --- > [GlobalPackageDB] --- > [GlobalPackageDB, UserPackageDB] --- > [GlobalPackageDB, SpecificPackageDB "package.conf.inplace"] --- --- Note that the 'GlobalPackageDB' is invariably at the bottom since it --- contains the rts, base and other special compiler-specific packages. --- --- We are not restricted to using just the above combinations. In particular --- we can use several custom package dbs and the user package db together. --- --- When it comes to writing, the top most (last) package is used. --- -type PackageDBStack = [PackageDB] - --- | Return the package that we should register into. This is the package db at --- the top of the stack. --- -registrationPackageDB :: PackageDBStack -> PackageDB -registrationPackageDB [] = error "internal error: empty package db set" -registrationPackageDB dbs = last dbs - --- | Make package paths absolute - - -absolutePackageDBPaths :: PackageDBStack -> IO PackageDBStack -absolutePackageDBPaths = mapM absolutePackageDBPath - -absolutePackageDBPath :: PackageDB -> IO PackageDB -absolutePackageDBPath GlobalPackageDB = return GlobalPackageDB -absolutePackageDBPath UserPackageDB = return UserPackageDB -absolutePackageDBPath (SpecificPackageDB db) = - SpecificPackageDB `liftM` canonicalizePath db - --- ------------------------------------------------------------ --- * Optimisation levels --- ------------------------------------------------------------ - --- | Some compilers support optimising. Some have different levels. --- For compilers that do not the level is just capped to the level --- they do support. --- -data OptimisationLevel = NoOptimisation - | NormalOptimisation - | MaximumOptimisation - deriving (Bounded, Enum, Eq, Generic, Read, Show) - -instance Binary OptimisationLevel - -flagToOptimisationLevel :: Maybe String -> OptimisationLevel -flagToOptimisationLevel Nothing = NormalOptimisation -flagToOptimisationLevel (Just s) = case reads s of - [(i, "")] - | i >= fromEnum (minBound :: OptimisationLevel) - && i <= fromEnum (maxBound :: OptimisationLevel) - -> toEnum i - | otherwise -> error $ "Bad optimisation level: " ++ show i - ++ ". Valid values are 0..2" - _ -> error $ "Can't parse optimisation level " ++ s - --- ------------------------------------------------------------ --- * Debug info levels --- ------------------------------------------------------------ - --- | Some compilers support emitting debug info. Some have different --- levels. For compilers that do not the level is just capped to the --- level they do support. --- -data DebugInfoLevel = NoDebugInfo - | MinimalDebugInfo - | NormalDebugInfo - | MaximalDebugInfo - deriving (Bounded, Enum, Eq, Generic, Read, Show) - -instance Binary DebugInfoLevel - -flagToDebugInfoLevel :: Maybe String -> DebugInfoLevel -flagToDebugInfoLevel Nothing = NormalDebugInfo -flagToDebugInfoLevel (Just s) = case reads s of - [(i, "")] - | i >= fromEnum (minBound :: DebugInfoLevel) - && i <= fromEnum (maxBound :: DebugInfoLevel) - -> toEnum i - | otherwise -> error $ "Bad debug info level: " ++ show i - ++ ". Valid values are 0..3" - _ -> error $ "Can't parse debug info level " ++ s - --- ------------------------------------------------------------ --- * Languages and Extensions --- ------------------------------------------------------------ - -unsupportedLanguages :: Compiler -> [Language] -> [Language] -unsupportedLanguages comp langs = - [ lang | lang <- langs - , isNothing (languageToFlag comp lang) ] - -languageToFlags :: Compiler -> Maybe Language -> [Flag] -languageToFlags comp = filter (not . null) - . catMaybes . map (languageToFlag comp) - . maybe [Haskell98] (\x->[x]) - -languageToFlag :: Compiler -> Language -> Maybe Flag -languageToFlag comp ext = lookup ext (compilerLanguages comp) - - --- |For the given compiler, return the extensions it does not support. -unsupportedExtensions :: Compiler -> [Extension] -> [Extension] -unsupportedExtensions comp exts = - [ ext | ext <- exts - , isNothing (extensionToFlag comp ext) ] - -type Flag = String - --- |For the given compiler, return the flags for the supported extensions. -extensionsToFlags :: Compiler -> [Extension] -> [Flag] -extensionsToFlags comp = nub . filter (not . null) - . catMaybes . map (extensionToFlag comp) - -extensionToFlag :: Compiler -> Extension -> Maybe Flag -extensionToFlag comp ext = lookup ext (compilerExtensions comp) - --- | Does this compiler support parallel --make mode? -parmakeSupported :: Compiler -> Bool -parmakeSupported = ghcSupported "Support parallel --make" - --- | Does this compiler support reexported-modules? -reexportedModulesSupported :: Compiler -> Bool -reexportedModulesSupported = ghcSupported "Support reexported-modules" - --- | Does this compiler support thinning/renaming on package flags? -renamingPackageFlagsSupported :: Compiler -> Bool -renamingPackageFlagsSupported = ghcSupported "Support thinning and renaming package flags" - --- | Does this compiler have unified IPIDs (so no package keys) -unifiedIPIDRequired :: Compiler -> Bool -unifiedIPIDRequired = ghcSupported "Requires unified installed package IDs" - --- | Does this compiler support package keys? -packageKeySupported :: Compiler -> Bool -packageKeySupported = ghcSupported "Uses package keys" - --- | Does this compiler support unit IDs? -unitIdSupported :: Compiler -> Bool -unitIdSupported = ghcSupported "Uses unit IDs" - --- | Does this compiler support a package database entry with: --- "dynamic-library-dirs"? -libraryDynDirSupported :: Compiler -> Bool -libraryDynDirSupported comp = case compilerFlavor comp of - GHC -> compilerVersion comp >= Version [8,0,1,20161021] [] - _ -> False - --- | Utility function for GHC only features -ghcSupported :: String -> Compiler -> Bool -ghcSupported key comp = - case compilerFlavor comp of - GHC -> checkProp - GHCJS -> checkProp - _ -> False - where checkProp = - case M.lookup key (compilerProperties comp) of - Just "YES" -> True - _ -> False - --- ------------------------------------------------------------ --- * Profiling detail level --- ------------------------------------------------------------ - --- | Some compilers (notably GHC) support profiling and can instrument --- programs so the system can account costs to different functions. There are --- different levels of detail that can be used for this accounting. --- For compilers that do not support this notion or the particular detail --- levels, this is either ignored or just capped to some similar level --- they do support. --- -data ProfDetailLevel = ProfDetailNone - | ProfDetailDefault - | ProfDetailExportedFunctions - | ProfDetailToplevelFunctions - | ProfDetailAllFunctions - | ProfDetailOther String - deriving (Eq, Generic, Read, Show) - -instance Binary ProfDetailLevel - -flagToProfDetailLevel :: String -> ProfDetailLevel -flagToProfDetailLevel "" = ProfDetailDefault -flagToProfDetailLevel s = - case lookup (lowercase s) - [ (name, value) - | (primary, aliases, value) <- knownProfDetailLevels - , name <- primary : aliases ] - of Just value -> value - Nothing -> ProfDetailOther s - -knownProfDetailLevels :: [(String, [String], ProfDetailLevel)] -knownProfDetailLevels = - [ ("default", [], ProfDetailDefault) - , ("none", [], ProfDetailNone) - , ("exported-functions", ["exported"], ProfDetailExportedFunctions) - , ("toplevel-functions", ["toplevel", "top"], ProfDetailToplevelFunctions) - , ("all-functions", ["all"], ProfDetailAllFunctions) - ] - -showProfDetailLevel :: ProfDetailLevel -> String -showProfDetailLevel dl = case dl of - ProfDetailNone -> "none" - ProfDetailDefault -> "default" - ProfDetailExportedFunctions -> "exported-functions" - ProfDetailToplevelFunctions -> "toplevel-functions" - ProfDetailAllFunctions -> "all-functions" - ProfDetailOther other -> other - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Configure.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Configure.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Configure.hs 2016-11-07 10:02:23.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Configure.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2037 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE ScopedTypeVariables #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Configure --- Copyright : Isaac Jones 2003-2005 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This deals with the /configure/ phase. It provides the 'configure' action --- which is given the package description and configure flags. It then tries --- to: configure the compiler; resolves any conditionals in the package --- description; resolve the package dependencies; check if all the extensions --- used by this package are supported by the compiler; check that all the build --- tools are available (including version checks if appropriate); checks for --- any required @pkg-config@ packages (updating the 'BuildInfo' with the --- results) --- --- Then based on all this it saves the info in the 'LocalBuildInfo' and writes --- it out to the @dist\/setup-config@ file. It also displays various details to --- the user, the amount of information displayed depending on the verbosity --- level. - -module Distribution.Simple.Configure (configure, - writePersistBuildConfig, - getConfigStateFile, - getPersistBuildConfig, - checkPersistBuildConfigOutdated, - tryGetPersistBuildConfig, - maybeGetPersistBuildConfig, - findDistPref, findDistPrefOrDefault, - computeComponentId, - computeCompatPackageKey, - localBuildInfoFile, - getInstalledPackages, - getInstalledPackagesMonitorFiles, - getPackageDBContents, - configCompiler, configCompilerAux, - configCompilerEx, configCompilerAuxEx, - computeEffectiveProfiling, - ccLdOptionsBuildInfo, - checkForeignDeps, - interpretPackageDbFlags, - ConfigStateFileError(..), - tryGetConfigStateFile, - platformDefines, - relaxPackageDeps, - ) - where - -import Distribution.Compiler -import Distribution.Utils.NubList -import Distribution.Simple.Compiler hiding (Flag) -import Distribution.Simple.PreProcess -import Distribution.Package -import qualified Distribution.InstalledPackageInfo as Installed -import Distribution.InstalledPackageInfo (InstalledPackageInfo - ,emptyInstalledPackageInfo) -import qualified Distribution.Simple.PackageIndex as PackageIndex -import Distribution.Simple.PackageIndex (InstalledPackageIndex) -import Distribution.PackageDescription as PD hiding (Flag) -import Distribution.ModuleName -import Distribution.PackageDescription.Configuration -import Distribution.PackageDescription.Check hiding (doesFileExist) -import Distribution.Simple.Program -import Distribution.Simple.Setup as Setup -import qualified Distribution.Simple.InstallDirs as InstallDirs -import Distribution.Simple.LocalBuildInfo -import Distribution.Simple.BuildPaths -import Distribution.Simple.Utils -import Distribution.System -import Distribution.Version -import Distribution.Verbosity - -import qualified Distribution.Simple.GHC as GHC -import qualified Distribution.Simple.GHCJS as GHCJS -import qualified Distribution.Simple.JHC as JHC -import qualified Distribution.Simple.LHC as LHC -import qualified Distribution.Simple.UHC as UHC -import qualified Distribution.Simple.HaskellSuite as HaskellSuite - --- Prefer the more generic Data.Traversable.mapM to Prelude.mapM -import Prelude hiding ( mapM ) -import Control.Exception - ( Exception, evaluate, throw, throwIO, try ) -import Control.Exception ( ErrorCall ) -import Control.Monad - ( liftM, when, unless, foldM, filterM, mplus ) -import Distribution.Compat.Binary ( decodeOrFailIO, encode ) -import GHC.Fingerprint ( Fingerprint(..), fingerprintString ) -import Data.ByteString.Lazy (ByteString) -import qualified Data.ByteString as BS -import qualified Data.ByteString.Lazy.Char8 as BLC8 -import Data.List - ( (\\), nub, partition, isPrefixOf, inits, stripPrefix ) -import Data.Maybe - ( isNothing, catMaybes, fromMaybe, mapMaybe, isJust ) -import Data.Either - ( partitionEithers ) -import qualified Data.Set as Set -import Data.Monoid as Mon ( Monoid(..) ) -import qualified Data.Map as Map -import Data.Map (Map) -import Data.Traversable - ( mapM ) -import Data.Typeable -import Data.Char ( chr, isAlphaNum ) -import Numeric ( showIntAtBase ) -import System.Directory - ( doesFileExist, createDirectoryIfMissing, getTemporaryDirectory ) -import System.FilePath - ( (), isAbsolute ) -import qualified System.Info - ( compilerName, compilerVersion ) -import System.IO - ( hPutStrLn, hClose ) -import Distribution.Text - ( Text(disp), defaultStyle, display, simpleParse ) -import Text.PrettyPrint - ( Doc, (<>), (<+>), ($+$), char, comma, empty, hsep, nest - , punctuate, quotes, render, renderStyle, sep, text ) -import Distribution.Compat.Environment ( lookupEnv ) -import Distribution.Compat.Exception ( catchExit, catchIO ) - --- | The errors that can be thrown when reading the @setup-config@ file. -data ConfigStateFileError - = ConfigStateFileNoHeader -- ^ No header found. - | ConfigStateFileBadHeader -- ^ Incorrect header. - | ConfigStateFileNoParse -- ^ Cannot parse file contents. - | ConfigStateFileMissing -- ^ No file! - | ConfigStateFileBadVersion PackageIdentifier PackageIdentifier - (Either ConfigStateFileError LocalBuildInfo) -- ^ Mismatched version. - deriving (Typeable) - --- | Format a 'ConfigStateFileError' as a user-facing error message. -dispConfigStateFileError :: ConfigStateFileError -> Doc -dispConfigStateFileError ConfigStateFileNoHeader = - text "Saved package config file header is missing." - <+> text "Re-run the 'configure' command." -dispConfigStateFileError ConfigStateFileBadHeader = - text "Saved package config file header is corrupt." - <+> text "Re-run the 'configure' command." -dispConfigStateFileError ConfigStateFileNoParse = - text "Saved package config file is corrupt." - <+> text "Re-run the 'configure' command." -dispConfigStateFileError ConfigStateFileMissing = - text "Run the 'configure' command first." -dispConfigStateFileError (ConfigStateFileBadVersion oldCabal oldCompiler _) = - text "Saved package config file is outdated:" - $+$ badCabal $+$ badCompiler - $+$ text "Re-run the 'configure' command." - where - badCabal = - text "• the Cabal version changed from" - <+> disp oldCabal <+> "to" <+> disp currentCabalId - badCompiler - | oldCompiler == currentCompilerId = empty - | otherwise = - text "• the compiler changed from" - <+> disp oldCompiler <+> "to" <+> disp currentCompilerId - -instance Show ConfigStateFileError where - show = renderStyle defaultStyle . dispConfigStateFileError - -instance Exception ConfigStateFileError - --- | Read the 'localBuildInfoFile'. Throw an exception if the file is --- missing, if the file cannot be read, or if the file was created by an older --- version of Cabal. -getConfigStateFile :: FilePath -- ^ The file path of the @setup-config@ file. - -> IO LocalBuildInfo -getConfigStateFile filename = do - exists <- doesFileExist filename - unless exists $ throwIO ConfigStateFileMissing - -- Read the config file into a strict ByteString to avoid problems with - -- lazy I/O, then convert to lazy because the binary package needs that. - contents <- BS.readFile filename - let (header, body) = BLC8.span (/='\n') (BLC8.fromChunks [contents]) - - headerParseResult <- try $ evaluate $ parseHeader header - let (cabalId, compId) = - case headerParseResult of - Left (_ :: ErrorCall) -> throw ConfigStateFileBadHeader - Right x -> x - - let getStoredValue = do - result <- decodeOrFailIO (BLC8.tail body) - case result of - Left _ -> throw ConfigStateFileNoParse - Right x -> return x - deferErrorIfBadVersion act - | cabalId /= currentCabalId = do - eResult <- try act - throw $ ConfigStateFileBadVersion cabalId compId eResult - | otherwise = act - deferErrorIfBadVersion getStoredValue - --- | Read the 'localBuildInfoFile', returning either an error or the local build --- info. -tryGetConfigStateFile :: FilePath -- ^ The file path of the @setup-config@ file. - -> IO (Either ConfigStateFileError LocalBuildInfo) -tryGetConfigStateFile = try . getConfigStateFile - --- | Try to read the 'localBuildInfoFile'. -tryGetPersistBuildConfig :: FilePath -- ^ The @dist@ directory path. - -> IO (Either ConfigStateFileError LocalBuildInfo) -tryGetPersistBuildConfig = try . getPersistBuildConfig - --- | Read the 'localBuildInfoFile'. Throw an exception if the file is --- missing, if the file cannot be read, or if the file was created by an older --- version of Cabal. -getPersistBuildConfig :: FilePath -- ^ The @dist@ directory path. - -> IO LocalBuildInfo -getPersistBuildConfig = getConfigStateFile . localBuildInfoFile - --- | Try to read the 'localBuildInfoFile'. -maybeGetPersistBuildConfig :: FilePath -- ^ The @dist@ directory path. - -> IO (Maybe LocalBuildInfo) -maybeGetPersistBuildConfig = - liftM (either (const Nothing) Just) . tryGetPersistBuildConfig - --- | After running configure, output the 'LocalBuildInfo' to the --- 'localBuildInfoFile'. -writePersistBuildConfig :: FilePath -- ^ The @dist@ directory path. - -> LocalBuildInfo -- ^ The 'LocalBuildInfo' to write. - -> IO () -writePersistBuildConfig distPref lbi = do - createDirectoryIfMissing False distPref - writeFileAtomic (localBuildInfoFile distPref) $ - BLC8.unlines [showHeader pkgId, encode lbi] - where - pkgId = packageId $ localPkgDescr lbi - --- | Identifier of the current Cabal package. -currentCabalId :: PackageIdentifier -currentCabalId = PackageIdentifier (PackageName "Cabal") cabalVersion - --- | Identifier of the current compiler package. -currentCompilerId :: PackageIdentifier -currentCompilerId = PackageIdentifier (PackageName System.Info.compilerName) - System.Info.compilerVersion - --- | Parse the @setup-config@ file header, returning the package identifiers --- for Cabal and the compiler. -parseHeader :: ByteString -- ^ The file contents. - -> (PackageIdentifier, PackageIdentifier) -parseHeader header = case BLC8.words header of - ["Saved", "package", "config", "for", pkgId, "written", "by", cabalId, - "using", compId] -> - fromMaybe (throw ConfigStateFileBadHeader) $ do - _ <- simpleParse (BLC8.unpack pkgId) :: Maybe PackageIdentifier - cabalId' <- simpleParse (BLC8.unpack cabalId) - compId' <- simpleParse (BLC8.unpack compId) - return (cabalId', compId') - _ -> throw ConfigStateFileNoHeader - --- | Generate the @setup-config@ file header. -showHeader :: PackageIdentifier -- ^ The processed package. - -> ByteString -showHeader pkgId = BLC8.unwords - [ "Saved", "package", "config", "for" - , BLC8.pack $ display pkgId - , "written", "by" - , BLC8.pack $ display currentCabalId - , "using" - , BLC8.pack $ display currentCompilerId - ] - --- | Check that localBuildInfoFile is up-to-date with respect to the --- .cabal file. -checkPersistBuildConfigOutdated :: FilePath -> FilePath -> IO Bool -checkPersistBuildConfigOutdated distPref pkg_descr_file = do - pkg_descr_file `moreRecentFile` (localBuildInfoFile distPref) - --- | Get the path of @dist\/setup-config@. -localBuildInfoFile :: FilePath -- ^ The @dist@ directory path. - -> FilePath -localBuildInfoFile distPref = distPref "setup-config" - --- ----------------------------------------------------------------------------- --- * Configuration --- ----------------------------------------------------------------------------- - --- | Return the \"dist/\" prefix, or the default prefix. The prefix is taken --- from (in order of highest to lowest preference) the override prefix, the --- \"CABAL_BUILDDIR\" environment variable, or the default prefix. -findDistPref :: FilePath -- ^ default \"dist\" prefix - -> Setup.Flag FilePath -- ^ override \"dist\" prefix - -> IO FilePath -findDistPref defDistPref overrideDistPref = do - envDistPref <- liftM parseEnvDistPref (lookupEnv "CABAL_BUILDDIR") - return $ fromFlagOrDefault defDistPref (mappend envDistPref overrideDistPref) - where - parseEnvDistPref env = - case env of - Just distPref | not (null distPref) -> toFlag distPref - _ -> NoFlag - --- | Return the \"dist/\" prefix, or the default prefix. The prefix is taken --- from (in order of highest to lowest preference) the override prefix, the --- \"CABAL_BUILDDIR\" environment variable, or 'defaultDistPref' is used. Call --- this function to resolve a @*DistPref@ flag whenever it is not known to be --- set. (The @*DistPref@ flags are always set to a definite value before --- invoking 'UserHooks'.) -findDistPrefOrDefault :: Setup.Flag FilePath -- ^ override \"dist\" prefix - -> IO FilePath -findDistPrefOrDefault = findDistPref defaultDistPref - --- | Compute the effective value of the profiling flags --- @--enable-library-profiling@ and @--enable-executable-profiling@ --- from the specified 'ConfigFlags'. This may be useful for --- external Cabal tools which need to interact with Setup in --- a backwards-compatible way: the most predictable mechanism --- for enabling profiling across many legacy versions is to --- NOT use @--enable-profiling@ and use those two flags instead. --- --- Note that @--enable-executable-profiling@ also affects profiling --- of benchmarks and (non-detailed) test suites. -computeEffectiveProfiling :: ConfigFlags -> (Bool {- lib -}, Bool {- exe -}) -computeEffectiveProfiling cfg = - -- The --profiling flag sets the default for both libs and exes, - -- but can be overidden by --library-profiling, or the old deprecated - -- --executable-profiling flag. - -- - -- The --profiling-detail and --library-profiling-detail flags behave - -- similarly - let profEnabledBoth = fromFlagOrDefault False (configProf cfg) - profEnabledLib = fromFlagOrDefault profEnabledBoth (configProfLib cfg) - profEnabledExe = fromFlagOrDefault profEnabledBoth (configProfExe cfg) - in (profEnabledLib, profEnabledExe) - --- |Perform the \"@.\/setup configure@\" action. --- Returns the @.setup-config@ file. -configure :: (GenericPackageDescription, HookedBuildInfo) - -> ConfigFlags -> IO LocalBuildInfo -configure (pkg_descr0', pbi) cfg = do - let pkg_descr0 = - -- Ignore '--allow-newer' when we're given '--exact-configuration'. - if fromFlagOrDefault False (configExactConfiguration cfg) - then pkg_descr0' - else relaxPackageDeps - (fromMaybe AllowNewerNone $ configAllowNewer cfg) - pkg_descr0' - - setupMessage verbosity "Configuring" (packageId pkg_descr0) - - checkDeprecatedFlags verbosity cfg - checkExactConfiguration pkg_descr0 cfg - - -- Where to build the package - let buildDir :: FilePath -- e.g. dist/build - -- fromFlag OK due to Distribution.Simple calling - -- findDistPrefOrDefault to fill it in - buildDir = fromFlag (configDistPref cfg) "build" - createDirectoryIfMissingVerbose (lessVerbose verbosity) True buildDir - - -- What package database(s) to use - let packageDbs - = interpretPackageDbFlags - (fromFlag (configUserInstall cfg)) - (configPackageDBs cfg) - - -- comp: the compiler we're building with - -- compPlatform: the platform we're building for - -- programsConfig: location and args of all programs we're - -- building with - (comp, compPlatform, programsConfig) - <- configCompilerEx - (flagToMaybe (configHcFlavor cfg)) - (flagToMaybe (configHcPath cfg)) - (flagToMaybe (configHcPkg cfg)) - (mkProgramsConfig cfg (configPrograms cfg)) - (lessVerbose verbosity) - - -- The InstalledPackageIndex of all installed packages - installedPackageSet <- getInstalledPackages (lessVerbose verbosity) comp - packageDbs programsConfig - - -- The InstalledPackageIndex of all (possible) internal packages - let internalPackageSet = getInternalPackages pkg_descr0 - - -- allConstraints: The set of all 'Dependency's we have. Used ONLY - -- to 'configureFinalizedPackage'. - -- requiredDepsMap: A map from 'PackageName' to the specifically - -- required 'InstalledPackageInfo', due to --dependency - -- - -- NB: These constraints are to be applied to ALL components of - -- a package. Thus, it's not an error if allConstraints contains - -- more constraints than is necessary for a component (another - -- component might need it.) - -- - -- NB: The fact that we bundle all the constraints together means - -- that is not possible to configure a test-suite to use one - -- version of a dependency, and the executable to use another. - (allConstraints, requiredDepsMap) <- either die return $ - combinedConstraints (configConstraints cfg) - (configDependencies cfg) - installedPackageSet - - -- pkg_descr: The resolved package description, that does not contain any - -- conditionals, because we have have an assignment for - -- every flag, either picking them ourselves using a - -- simple naive algorithm, or having them be passed to - -- us by 'configConfigurationsFlags') - -- flags: The 'FlagAssignment' that the conditionals were - -- resolved with. - -- - -- NB: Why doesn't finalizing a package also tell us what the - -- dependencies are (e.g. when we run the naive algorithm, - -- we are checking if dependencies are satisfiable)? The - -- primary reason is that we may NOT have done any solving: - -- if the flags are all chosen for us, this step is a simple - -- matter of flattening according to that assignment. It's - -- cleaner to then configure the dependencies afterwards. - (pkg_descr, flags) - <- configureFinalizedPackage verbosity cfg - allConstraints - (dependencySatisfiable - (fromFlagOrDefault False (configExactConfiguration cfg)) - installedPackageSet - internalPackageSet - requiredDepsMap) - comp - compPlatform - pkg_descr0 - - checkCompilerProblems comp pkg_descr - checkPackageProblems verbosity pkg_descr0 - (updatePackageDescription pbi pkg_descr) - - -- The list of 'InstalledPackageInfo' recording the selected - -- dependencies... - -- internalPkgDeps: ...on internal packages (these are fake!) - -- externalPkgDeps: ...on external packages - -- - -- Invariant: For any package name, there is at most one package - -- in externalPackageDeps which has that name. - -- - -- NB: The dependency selection is global over ALL components - -- in the package (similar to how allConstraints and - -- requiredDepsMap are global over all components). In particular, - -- if *any* component (post-flag resolution) has an unsatisfiable - -- dependency, we will fail. This can sometimes be undesirable - -- for users, see #1786 (benchmark conflicts with executable), - (internalPkgDeps, externalPkgDeps) - <- configureDependencies - verbosity - internalPackageSet - installedPackageSet - requiredDepsMap - pkg_descr - - let installDeps = Map.elems -- deduplicate - . Map.fromList - . map (\v -> (Installed.installedUnitId v, v)) - $ externalPkgDeps - - packageDependsIndex <- - case PackageIndex.dependencyClosure installedPackageSet - (map Installed.installedUnitId installDeps) of - Left packageDependsIndex -> return packageDependsIndex - Right broken -> - die $ "The following installed packages are broken because other" - ++ " packages they depend on are missing. These broken " - ++ "packages must be rebuilt before they can be used.\n" - ++ unlines [ "package " - ++ display (packageId pkg) - ++ " is broken due to missing package " - ++ intercalate ", " (map display deps) - | (pkg, deps) <- broken ] - - let pseudoTopPkg = emptyInstalledPackageInfo { - Installed.installedUnitId = - mkLegacyUnitId (packageId pkg_descr), - Installed.sourcePackageId = packageId pkg_descr, - Installed.depends = - map Installed.installedUnitId installDeps - } - case PackageIndex.dependencyInconsistencies - . PackageIndex.insert pseudoTopPkg - $ packageDependsIndex of - [] -> return () - inconsistencies -> - warn verbosity $ - "This package indirectly depends on multiple versions of the same " - ++ "package. This is highly likely to cause a compile failure.\n" - ++ unlines [ "package " ++ display pkg ++ " requires " - ++ display (PackageIdentifier name ver) - | (name, uses) <- inconsistencies - , (pkg, ver) <- uses ] - - -- installation directories - defaultDirs <- defaultInstallDirs (compilerFlavor comp) - (fromFlag (configUserInstall cfg)) (hasLibs pkg_descr) - let installDirs = combineInstallDirs fromFlagOrDefault - defaultDirs (configInstallDirs cfg) - - -- check languages and extensions - let langlist = nub $ catMaybes $ map defaultLanguage - (allBuildInfo pkg_descr) - let langs = unsupportedLanguages comp langlist - when (not (null langs)) $ - die $ "The package " ++ display (packageId pkg_descr0) - ++ " requires the following languages which are not " - ++ "supported by " ++ display (compilerId comp) ++ ": " - ++ intercalate ", " (map display langs) - let extlist = nub $ concatMap allExtensions (allBuildInfo pkg_descr) - let exts = unsupportedExtensions comp extlist - when (not (null exts)) $ - die $ "The package " ++ display (packageId pkg_descr0) - ++ " requires the following language extensions which are not " - ++ "supported by " ++ display (compilerId comp) ++ ": " - ++ intercalate ", " (map display exts) - - -- configured known/required programs & external build tools - -- exclude build-tool deps on "internal" exes in the same package - let requiredBuildTools = - [ buildTool - | let exeNames = map exeName (executables pkg_descr) - , bi <- allBuildInfo pkg_descr - , buildTool@(Dependency (PackageName toolName) reqVer) - <- buildTools bi - , let isInternal = - toolName `elem` exeNames - -- we assume all internal build-tools are - -- versioned with the package: - && packageVersion pkg_descr `withinRange` reqVer - , not isInternal ] - - programsConfig' <- - configureAllKnownPrograms (lessVerbose verbosity) programsConfig - >>= configureRequiredPrograms verbosity requiredBuildTools - - (pkg_descr', programsConfig'') <- - configurePkgconfigPackages verbosity pkg_descr programsConfig' - - -- internal component graph - buildComponents <- - case mkComponentsGraph pkg_descr internalPkgDeps of - Left componentCycle -> reportComponentCycle componentCycle - Right comps -> - mkComponentsLocalBuildInfo cfg comp packageDependsIndex pkg_descr - internalPkgDeps externalPkgDeps - comps (configConfigurationsFlags cfg) - - split_objs <- - if not (fromFlag $ configSplitObjs cfg) - then return False - else case compilerFlavor comp of - GHC | compilerVersion comp >= Version [6,5] [] - -> return True - GHCJS - -> return True - _ -> do warn verbosity - ("this compiler does not support " ++ - "--enable-split-objs; ignoring") - return False - - let ghciLibByDefault = - case compilerId comp of - CompilerId GHC _ -> - -- If ghc is non-dynamic, then ghci needs object files, - -- so we build one by default. - -- - -- Technically, archive files should be sufficient for ghci, - -- but because of GHC bug #8942, it has never been safe to - -- rely on them. By the time that bug was fixed, ghci had - -- been changed to read shared libraries instead of archive - -- files (see next code block). - not (GHC.isDynamic comp) - CompilerId GHCJS _ -> - not (GHCJS.isDynamic comp) - _ -> False - - let sharedLibsByDefault - | fromFlag (configDynExe cfg) = - -- build a shared library if dynamically-linked - -- executables are requested - True - | otherwise = case compilerId comp of - CompilerId GHC _ -> - -- if ghc is dynamic, then ghci needs a shared - -- library, so we build one by default. - GHC.isDynamic comp - CompilerId GHCJS _ -> - GHCJS.isDynamic comp - _ -> False - withSharedLib_ = - -- build shared libraries if required by GHC or by the - -- executable linking mode, but allow the user to force - -- building only static library archives with - -- --disable-shared. - fromFlagOrDefault sharedLibsByDefault $ configSharedLib cfg - withDynExe_ = fromFlag $ configDynExe cfg - when (withDynExe_ && not withSharedLib_) $ warn verbosity $ - "Executables will use dynamic linking, but a shared library " - ++ "is not being built. Linking will fail if any executables " - ++ "depend on the library." - - let (profEnabledLib, profEnabledExe) = computeEffectiveProfiling cfg - - profDetailLibOnly <- checkProfDetail (configProfLibDetail cfg) - profDetailBoth <- liftM (fromFlagOrDefault ProfDetailDefault) - (checkProfDetail (configProfDetail cfg)) - let profDetailLib = fromFlagOrDefault profDetailBoth profDetailLibOnly - profDetailExe = profDetailBoth - - when (profEnabledExe && not profEnabledLib) $ - warn verbosity $ - "Executables will be built with profiling, but library " - ++ "profiling is disabled. Linking will fail if any executables " - ++ "depend on the library." - - let configCoverage_ = - mappend (configCoverage cfg) (configLibCoverage cfg) - - cfg' = cfg { configCoverage = configCoverage_ } - - reloc <- - if not (fromFlag $ configRelocatable cfg) - then return False - else return True - - let lbi = LocalBuildInfo { - configFlags = cfg', - flagAssignment = flags, - extraConfigArgs = [], -- Currently configure does not - -- take extra args, but if it - -- did they would go here. - installDirTemplates = installDirs, - compiler = comp, - hostPlatform = compPlatform, - buildDir = buildDir, - componentsConfigs = buildComponents, - installedPkgs = packageDependsIndex, - pkgDescrFile = Nothing, - localPkgDescr = pkg_descr', - withPrograms = programsConfig'', - withVanillaLib = fromFlag $ configVanillaLib cfg, - withProfLib = profEnabledLib, - withSharedLib = withSharedLib_, - withDynExe = withDynExe_, - withProfExe = profEnabledExe, - withProfLibDetail = profDetailLib, - withProfExeDetail = profDetailExe, - withOptimization = fromFlag $ configOptimization cfg, - withDebugInfo = fromFlag $ configDebugInfo cfg, - withGHCiLib = fromFlagOrDefault ghciLibByDefault $ - configGHCiLib cfg, - splitObjs = split_objs, - stripExes = fromFlag $ configStripExes cfg, - stripLibs = fromFlag $ configStripLibs cfg, - withPackageDB = packageDbs, - progPrefix = fromFlag $ configProgPrefix cfg, - progSuffix = fromFlag $ configProgSuffix cfg, - relocatable = reloc - } - - when reloc (checkRelocatable verbosity pkg_descr lbi) - - let dirs = absoluteInstallDirs pkg_descr lbi NoCopyDest - relative = prefixRelativeInstallDirs (packageId pkg_descr) lbi - - unless (isAbsolute (prefix dirs)) $ die $ - "expected an absolute directory name for --prefix: " ++ prefix dirs - - info verbosity $ "Using " ++ display currentCabalId - ++ " compiled by " ++ display currentCompilerId - info verbosity $ "Using compiler: " ++ showCompilerId comp - info verbosity $ "Using install prefix: " ++ prefix dirs - - let dirinfo name dir isPrefixRelative = - info verbosity $ name ++ " installed in: " ++ dir ++ relNote - where relNote = case buildOS of - Windows | not (hasLibs pkg_descr) - && isNothing isPrefixRelative - -> " (fixed location)" - _ -> "" - - dirinfo "Binaries" (bindir dirs) (bindir relative) - dirinfo "Libraries" (libdir dirs) (libdir relative) - dirinfo "Dynamic libraries" (dynlibdir dirs) (dynlibdir relative) - dirinfo "Private binaries" (libexecdir dirs) (libexecdir relative) - dirinfo "Data files" (datadir dirs) (datadir relative) - dirinfo "Documentation" (docdir dirs) (docdir relative) - dirinfo "Configuration files" (sysconfdir dirs) (sysconfdir relative) - - sequence_ [ reportProgram verbosity prog configuredProg - | (prog, configuredProg) <- knownPrograms programsConfig'' ] - - return lbi - - where - verbosity = fromFlag (configVerbosity cfg) - - checkProfDetail (Flag (ProfDetailOther other)) = do - warn verbosity $ - "Unknown profiling detail level '" ++ other - ++ "', using default.\n" - ++ "The profiling detail levels are: " ++ intercalate ", " - [ name | (name, _, _) <- knownProfDetailLevels ] - return (Flag ProfDetailDefault) - checkProfDetail other = return other - -mkProgramsConfig :: ConfigFlags -> ProgramConfiguration -> ProgramConfiguration -mkProgramsConfig cfg initialProgramsConfig = programsConfig - where - programsConfig = userSpecifyArgss (configProgramArgs cfg) - . userSpecifyPaths (configProgramPaths cfg) - . setProgramSearchPath searchpath - $ initialProgramsConfig - searchpath = getProgramSearchPath (initialProgramsConfig) - ++ map ProgramSearchPathDir - (fromNubList $ configProgramPathExtra cfg) - --- ----------------------------------------------------------------------------- --- Helper functions for configure - --- | Check if the user used any deprecated flags. -checkDeprecatedFlags :: Verbosity -> ConfigFlags -> IO () -checkDeprecatedFlags verbosity cfg = do - unless (configProfExe cfg == NoFlag) $ do - let enable | fromFlag (configProfExe cfg) = "enable" - | otherwise = "disable" - warn verbosity - ("The flag --" ++ enable ++ "-executable-profiling is deprecated. " - ++ "Please use --" ++ enable ++ "-profiling instead.") - - unless (configLibCoverage cfg == NoFlag) $ do - let enable | fromFlag (configLibCoverage cfg) = "enable" - | otherwise = "disable" - warn verbosity - ("The flag --" ++ enable ++ "-library-coverage is deprecated. " - ++ "Please use --" ++ enable ++ "-coverage instead.") - --- | Sanity check: if '--exact-configuration' was given, ensure that the --- complete flag assignment was specified on the command line. -checkExactConfiguration :: GenericPackageDescription -> ConfigFlags -> IO () -checkExactConfiguration pkg_descr0 cfg = do - when (fromFlagOrDefault False (configExactConfiguration cfg)) $ do - let cmdlineFlags = map fst (configConfigurationsFlags cfg) - allFlags = map flagName . genPackageFlags $ pkg_descr0 - diffFlags = allFlags \\ cmdlineFlags - when (not . null $ diffFlags) $ - die $ "'--exact-configuration' was given, " - ++ "but the following flags were not specified: " - ++ intercalate ", " (map show diffFlags) - --- | Create a PackageIndex that makes *any libraries that might be* --- defined internally to this package look like installed packages, in --- case an executable should refer to any of them as dependencies. --- --- It must be *any libraries that might be* defined rather than the --- actual definitions, because these depend on conditionals in the .cabal --- file, and we haven't resolved them yet. finalizePackageDescription --- does the resolution of conditionals, and it takes internalPackageSet --- as part of its input. --- --- Currently a package can define no more than one library (which has --- the same name as the package) but we could extend this later. --- If we later allowed private internal libraries, then here we would --- need to pre-scan the conditional data to make a list of all private --- libraries that could possibly be defined by the .cabal file. -getInternalPackages :: GenericPackageDescription - -> InstalledPackageIndex -getInternalPackages pkg_descr0 = - let pid :: PackageIdentifier -- e.g. foo-0.1 - pid = packageId pkg_descr0 - internalPackage = emptyInstalledPackageInfo { - --TODO: should use a per-compiler method to map the source - -- package ID into an installed package id we can use - -- for the internal package set. The use of - -- mkLegacyUnitId here is a hack. - Installed.installedUnitId = mkLegacyUnitId pid, - Installed.sourcePackageId = pid - } - in PackageIndex.fromList [internalPackage] - - --- | Returns true if a dependency is satisfiable. This is to be passed --- to finalizePackageDescription. -dependencySatisfiable - :: Bool - -> InstalledPackageIndex -- ^ installed set - -> InstalledPackageIndex -- ^ internal set - -> Map PackageName InstalledPackageInfo -- ^ required dependencies - -> (Dependency -> Bool) -dependencySatisfiable - exact_config installedPackageSet internalPackageSet requiredDepsMap - d@(Dependency depName _) - | exact_config = - -- When we're given '--exact-configuration', we assume that all - -- dependencies and flags are exactly specified on the command - -- line. Thus we only consult the 'requiredDepsMap'. Note that - -- we're not doing the version range check, so if there's some - -- dependency that wasn't specified on the command line, - -- 'finalizePackageDescription' will fail. - -- - -- TODO: mention '--exact-configuration' in the error message - -- when this fails? - -- - -- (However, note that internal deps don't have to be - -- specified!) - (depName `Map.member` requiredDepsMap) || isInternalDep - - | otherwise = - -- Normal operation: just look up dependency in the combined - -- package index. - not . null . PackageIndex.lookupDependency pkgs $ d - where - pkgs = PackageIndex.merge internalPackageSet installedPackageSet - isInternalDep = not . null - $ PackageIndex.lookupDependency internalPackageSet d - --- | Relax the dependencies of this package if needed. -relaxPackageDeps :: AllowNewer -> GenericPackageDescription - -> GenericPackageDescription -relaxPackageDeps AllowNewerNone gpd = gpd -relaxPackageDeps AllowNewerAll gpd = transformAllBuildDepends relaxAll gpd - where - relaxAll = \(Dependency pkgName verRange) -> - Dependency pkgName (removeUpperBound verRange) -relaxPackageDeps (AllowNewerSome allowNewerDeps') gpd = - transformAllBuildDepends relaxSome gpd - where - thisPkgName = packageName gpd - allowNewerDeps = mapMaybe f allowNewerDeps' - - f (Setup.AllowNewerDep p) = Just p - f (Setup.AllowNewerDepScoped scope p) | scope == thisPkgName = Just p - | otherwise = Nothing - - relaxSome = \d@(Dependency depName verRange) -> - if depName `elem` allowNewerDeps - then Dependency depName (removeUpperBound verRange) - else d - --- | Finalize a generic package description. The workhorse is --- 'finalizePackageDescription' but there's a bit of other nattering --- about necessary. --- --- TODO: what exactly is the business with @flaggedTests@ and --- @flaggedBenchmarks@? -configureFinalizedPackage - :: Verbosity - -> ConfigFlags - -> [Dependency] - -> (Dependency -> Bool) -- ^ tests if a dependency is satisfiable. - -- Might say it's satisfiable even when not. - -> Compiler - -> Platform - -> GenericPackageDescription - -> IO (PackageDescription, FlagAssignment) -configureFinalizedPackage verbosity cfg - allConstraints satisfies comp compPlatform pkg_descr0 = do - let enableTest t = t { testEnabled = fromFlag (configTests cfg) } - flaggedTests = map (\(n, t) -> (n, mapTreeData enableTest t)) - (condTestSuites pkg_descr0) - enableBenchmark bm = bm { benchmarkEnabled = - fromFlag (configBenchmarks cfg) } - flaggedBenchmarks = map (\(n, bm) -> - (n, mapTreeData enableBenchmark bm)) - (condBenchmarks pkg_descr0) - pkg_descr0'' = pkg_descr0 { condTestSuites = flaggedTests - , condBenchmarks = flaggedBenchmarks } - - (pkg_descr0', flags) <- - case finalizePackageDescription - (configConfigurationsFlags cfg) - satisfies - compPlatform - (compilerInfo comp) - allConstraints - pkg_descr0'' - of Right r -> return r - Left missing -> - die $ "Encountered missing dependencies:\n" - ++ (render . nest 4 . sep . punctuate comma - . map (disp . simplifyDependency) - $ missing) - - -- add extra include/lib dirs as specified in cfg - -- we do it here so that those get checked too - let pkg_descr = addExtraIncludeLibDirs pkg_descr0' - - when (not (null flags)) $ - info verbosity $ "Flags chosen: " - ++ intercalate ", " [ name ++ "=" ++ display value - | (FlagName name, value) <- flags ] - - return (pkg_descr, flags) - where - addExtraIncludeLibDirs pkg_descr = - let extraBi = mempty { extraLibDirs = configExtraLibDirs cfg - , extraFrameworkDirs = configExtraFrameworkDirs cfg - , PD.includeDirs = configExtraIncludeDirs cfg} - modifyLib l = l{ libBuildInfo = libBuildInfo l - `mappend` extraBi } - modifyExecutable e = e{ buildInfo = buildInfo e - `mappend` extraBi} - in pkg_descr{ library = modifyLib `fmap` library pkg_descr - , executables = modifyExecutable `map` - executables pkg_descr} - --- | Check for use of Cabal features which require compiler support -checkCompilerProblems :: Compiler -> PackageDescription -> IO () -checkCompilerProblems comp pkg_descr = do - unless (renamingPackageFlagsSupported comp || - and [ True - | bi <- allBuildInfo pkg_descr - , _ <- Map.elems (targetBuildRenaming bi)]) $ - die $ "Your compiler does not support thinning and renaming on " - ++ "package flags. To use this feature you probably must use " - ++ "GHC 7.9 or later." - - when (maybe False (not.null.PD.reexportedModules) (PD.library pkg_descr) - && not (reexportedModulesSupported comp)) $ do - die $ "Your compiler does not support module re-exports. To use " - ++ "this feature you probably must use GHC 7.9 or later." - --- | Select dependencies for the package. -configureDependencies - :: Verbosity - -> InstalledPackageIndex -- ^ internal packages - -> InstalledPackageIndex -- ^ installed packages - -> Map PackageName InstalledPackageInfo -- ^ required deps - -> PackageDescription - -> IO ([PackageId], [InstalledPackageInfo]) -configureDependencies verbosity - internalPackageSet installedPackageSet requiredDepsMap pkg_descr = do - let selectDependencies :: [Dependency] -> - ([FailedDependency], [ResolvedDependency]) - selectDependencies = - partitionEithers - . map (selectDependency internalPackageSet installedPackageSet - requiredDepsMap) - - (failedDeps, allPkgDeps) = - selectDependencies (buildDepends pkg_descr) - - internalPkgDeps = [ pkgid - | InternalDependency _ pkgid <- allPkgDeps ] - externalPkgDeps = [ pkg - | ExternalDependency _ pkg <- allPkgDeps ] - - when (not (null internalPkgDeps) - && not (newPackageDepsBehaviour pkg_descr)) $ - die $ "The field 'build-depends: " - ++ intercalate ", " (map (display . packageName) internalPkgDeps) - ++ "' refers to a library which is defined within the same " - ++ "package. To use this feature the package must specify at " - ++ "least 'cabal-version: >= 1.8'." - - reportFailedDependencies failedDeps - reportSelectedDependencies verbosity allPkgDeps - - return (internalPkgDeps, externalPkgDeps) - --- ----------------------------------------------------------------------------- --- Configuring package dependencies - -reportProgram :: Verbosity -> Program -> Maybe ConfiguredProgram -> IO () -reportProgram verbosity prog Nothing - = info verbosity $ "No " ++ programName prog ++ " found" -reportProgram verbosity prog (Just configuredProg) - = info verbosity $ "Using " ++ programName prog ++ version ++ location - where location = case programLocation configuredProg of - FoundOnSystem p -> " found on system at: " ++ p - UserSpecified p -> " given by user at: " ++ p - version = case programVersion configuredProg of - Nothing -> "" - Just v -> " version " ++ display v - -hackageUrl :: String -hackageUrl = "http://hackage.haskell.org/package/" - -data ResolvedDependency = ExternalDependency Dependency InstalledPackageInfo - | InternalDependency Dependency PackageId -- should be a - -- lib name - -data FailedDependency = DependencyNotExists PackageName - | DependencyNoVersion Dependency - --- | Test for a package dependency and record the version we have installed. -selectDependency :: InstalledPackageIndex -- ^ Internally defined packages - -> InstalledPackageIndex -- ^ Installed packages - -> Map PackageName InstalledPackageInfo - -- ^ Packages for which we have been given specific deps to - -- use - -> Dependency - -> Either FailedDependency ResolvedDependency -selectDependency internalIndex installedIndex requiredDepsMap - dep@(Dependency pkgname vr) = - -- If the dependency specification matches anything in the internal package - -- index, then we prefer that match to anything in the second. - -- For example: - -- - -- Name: MyLibrary - -- Version: 0.1 - -- Library - -- .. - -- Executable my-exec - -- build-depends: MyLibrary - -- - -- We want "build-depends: MyLibrary" always to match the internal library - -- even if there is a newer installed library "MyLibrary-0.2". - -- However, "build-depends: MyLibrary >= 0.2" should match the installed one. - case PackageIndex.lookupPackageName internalIndex pkgname of - [(_,[pkg])] | packageVersion pkg `withinRange` vr - -> Right $ InternalDependency dep (packageId pkg) - - _ -> case Map.lookup pkgname requiredDepsMap of - -- If we know the exact pkg to use, then use it. - Just pkginstance -> Right (ExternalDependency dep pkginstance) - -- Otherwise we just pick an arbitrary instance of the latest version. - Nothing -> case PackageIndex.lookupDependency installedIndex dep of - [] -> Left $ DependencyNotExists pkgname - pkgs -> Right $ ExternalDependency dep $ - case last pkgs of - (_ver, pkginstances) -> head pkginstances - -reportSelectedDependencies :: Verbosity - -> [ResolvedDependency] -> IO () -reportSelectedDependencies verbosity deps = - info verbosity $ unlines - [ "Dependency " ++ display (simplifyDependency dep) - ++ ": using " ++ display pkgid - | resolved <- deps - , let (dep, pkgid) = case resolved of - ExternalDependency dep' pkg' -> (dep', packageId pkg') - InternalDependency dep' pkgid' -> (dep', pkgid') ] - -reportFailedDependencies :: [FailedDependency] -> IO () -reportFailedDependencies [] = return () -reportFailedDependencies failed = - die (intercalate "\n\n" (map reportFailedDependency failed)) - - where - reportFailedDependency (DependencyNotExists pkgname) = - "there is no version of " ++ display pkgname ++ " installed.\n" - ++ "Perhaps you need to download and install it from\n" - ++ hackageUrl ++ display pkgname ++ "?" - - reportFailedDependency (DependencyNoVersion dep) = - "cannot satisfy dependency " ++ display (simplifyDependency dep) ++ "\n" - --- | List all installed packages in the given package databases. -getInstalledPackages :: Verbosity -> Compiler - -> PackageDBStack -- ^ The stack of package databases. - -> ProgramConfiguration - -> IO InstalledPackageIndex -getInstalledPackages verbosity comp packageDBs progconf = do - when (null packageDBs) $ - die $ "No package databases have been specified. If you use " - ++ "--package-db=clear, you must follow it with --package-db= " - ++ "with 'global', 'user' or a specific file." - - info verbosity "Reading installed packages..." - case compilerFlavor comp of - GHC -> GHC.getInstalledPackages verbosity comp packageDBs progconf - GHCJS -> GHCJS.getInstalledPackages verbosity packageDBs progconf - JHC -> JHC.getInstalledPackages verbosity packageDBs progconf - LHC -> LHC.getInstalledPackages verbosity packageDBs progconf - UHC -> UHC.getInstalledPackages verbosity comp packageDBs progconf - HaskellSuite {} -> - HaskellSuite.getInstalledPackages verbosity packageDBs progconf - flv -> die $ "don't know how to find the installed packages for " - ++ display flv - --- | Like 'getInstalledPackages', but for a single package DB. --- --- NB: Why isn't this always a fall through to 'getInstalledPackages'? --- That is because 'getInstalledPackages' performs some sanity checks --- on the package database stack in question. However, when sandboxes --- are involved these sanity checks are not desirable. -getPackageDBContents :: Verbosity -> Compiler - -> PackageDB -> ProgramConfiguration - -> IO InstalledPackageIndex -getPackageDBContents verbosity comp packageDB progconf = do - info verbosity "Reading installed packages..." - case compilerFlavor comp of - GHC -> GHC.getPackageDBContents verbosity packageDB progconf - GHCJS -> GHCJS.getPackageDBContents verbosity packageDB progconf - -- For other compilers, try to fall back on 'getInstalledPackages'. - _ -> getInstalledPackages verbosity comp [packageDB] progconf - - --- | A set of files (or directories) that can be monitored to detect when --- there might have been a change in the installed packages. --- -getInstalledPackagesMonitorFiles :: Verbosity -> Compiler - -> PackageDBStack - -> ProgramConfiguration -> Platform - -> IO [FilePath] -getInstalledPackagesMonitorFiles verbosity comp packageDBs progconf platform = - case compilerFlavor comp of - GHC -> GHC.getInstalledPackagesMonitorFiles - verbosity platform progconf packageDBs - other -> do - warn verbosity $ "don't know how to find change monitoring files for " - ++ "the installed package databases for " ++ display other - return [] - --- | The user interface specifies the package dbs to use with a combination of --- @--global@, @--user@ and @--package-db=global|user|clear|$file@. --- This function combines the global/user flag and interprets the package-db --- flag into a single package db stack. --- -interpretPackageDbFlags :: Bool -> [Maybe PackageDB] -> PackageDBStack -interpretPackageDbFlags userInstall specificDBs = - extra initialStack specificDBs - where - initialStack | userInstall = [GlobalPackageDB, UserPackageDB] - | otherwise = [GlobalPackageDB] - - extra dbs' [] = dbs' - extra _ (Nothing:dbs) = extra [] dbs - extra dbs' (Just db:dbs) = extra (dbs' ++ [db]) dbs - -newPackageDepsBehaviourMinVersion :: Version -newPackageDepsBehaviourMinVersion = Version [1,7,1] [] - --- In older cabal versions, there was only one set of package dependencies for --- the whole package. In this version, we can have separate dependencies per --- target, but we only enable this behaviour if the minimum cabal version --- specified is >= a certain minimum. Otherwise, for compatibility we use the --- old behaviour. -newPackageDepsBehaviour :: PackageDescription -> Bool -newPackageDepsBehaviour pkg = - specVersion pkg >= newPackageDepsBehaviourMinVersion - --- We are given both --constraint="foo < 2.0" style constraints and also --- specific packages to pick via --dependency="foo=foo-2.0-177d5cdf20962d0581". --- --- When finalising the package we have to take into account the specific --- installed deps we've been given, and the finalise function expects --- constraints, so we have to translate these deps into version constraints. --- --- But after finalising we then have to make sure we pick the right specific --- deps in the end. So we still need to remember which installed packages to --- pick. -combinedConstraints :: [Dependency] -> - [(PackageName, UnitId)] -> - InstalledPackageIndex -> - Either String ([Dependency], - Map PackageName InstalledPackageInfo) -combinedConstraints constraints dependencies installedPackages = do - - when (not (null badUnitIds)) $ - Left $ render $ text "The following package dependencies were requested" - $+$ nest 4 (dispDependencies badUnitIds) - $+$ text "however the given installed package instance does not exist." - - when (not (null badNames)) $ - Left $ render $ text "The following package dependencies were requested" - $+$ nest 4 (dispDependencies badNames) - $+$ text ("however the installed package's name does not match " - ++ "the name given.") - - --TODO: we don't check that all dependencies are used! - - return (allConstraints, idConstraintMap) - - where - allConstraints :: [Dependency] - allConstraints = constraints - ++ [ thisPackageVersion (packageId pkg) - | (_, _, Just pkg) <- dependenciesPkgInfo ] - - idConstraintMap :: Map PackageName InstalledPackageInfo - idConstraintMap = Map.fromList - [ (packageName pkg, pkg) - | (_, _, Just pkg) <- dependenciesPkgInfo ] - - -- The dependencies along with the installed package info, if it exists - dependenciesPkgInfo :: [(PackageName, UnitId, - Maybe InstalledPackageInfo)] - dependenciesPkgInfo = - [ (pkgname, ipkgid, mpkg) - | (pkgname, ipkgid) <- dependencies - , let mpkg = PackageIndex.lookupUnitId - installedPackages ipkgid - ] - - -- If we looked up a package specified by an installed package id - -- (i.e. someone has written a hash) and didn't find it then it's - -- an error. - badUnitIds = - [ (pkgname, ipkgid) - | (pkgname, ipkgid, Nothing) <- dependenciesPkgInfo ] - - -- If someone has written e.g. - -- --dependency="foo=MyOtherLib-1.0-07...5bf30" then they have - -- probably made a mistake. - badNames = - [ (requestedPkgName, ipkgid) - | (requestedPkgName, ipkgid, Just pkg) <- dependenciesPkgInfo - , let foundPkgName = packageName pkg - , requestedPkgName /= foundPkgName ] - - dispDependencies deps = - hsep [ text "--dependency=" - <> quotes (disp pkgname <> char '=' <> disp ipkgid) - | (pkgname, ipkgid) <- deps ] - --- ----------------------------------------------------------------------------- --- Configuring program dependencies - -configureRequiredPrograms :: Verbosity -> [Dependency] -> ProgramConfiguration - -> IO ProgramConfiguration -configureRequiredPrograms verbosity deps conf = - foldM (configureRequiredProgram verbosity) conf deps - -configureRequiredProgram :: Verbosity -> ProgramConfiguration -> Dependency - -> IO ProgramConfiguration -configureRequiredProgram verbosity conf - (Dependency (PackageName progName) verRange) = - case lookupKnownProgram progName conf of - Nothing -> die ("Unknown build tool " ++ progName) - Just prog - -- requireProgramVersion always requires the program have a version - -- but if the user says "build-depends: foo" ie no version constraint - -- then we should not fail if we cannot discover the program version. - | verRange == anyVersion -> do - (_, conf') <- requireProgram verbosity prog conf - return conf' - | otherwise -> do - (_, _, conf') <- requireProgramVersion verbosity prog verRange conf - return conf' - --- ----------------------------------------------------------------------------- --- Configuring pkg-config package dependencies - -configurePkgconfigPackages :: Verbosity -> PackageDescription - -> ProgramConfiguration - -> IO (PackageDescription, ProgramConfiguration) -configurePkgconfigPackages verbosity pkg_descr conf - | null allpkgs = return (pkg_descr, conf) - | otherwise = do - (_, _, conf') <- requireProgramVersion - (lessVerbose verbosity) pkgConfigProgram - (orLaterVersion $ Version [0,9,0] []) conf - mapM_ requirePkg allpkgs - lib' <- mapM addPkgConfigBILib (library pkg_descr) - exes' <- mapM addPkgConfigBIExe (executables pkg_descr) - tests' <- mapM addPkgConfigBITest (testSuites pkg_descr) - benches' <- mapM addPkgConfigBIBench (benchmarks pkg_descr) - let pkg_descr' = pkg_descr { library = lib', executables = exes', - testSuites = tests', benchmarks = benches' } - return (pkg_descr', conf') - - where - allpkgs = concatMap pkgconfigDepends (allBuildInfo pkg_descr) - pkgconfig = rawSystemProgramStdoutConf (lessVerbose verbosity) - pkgConfigProgram conf - - requirePkg dep@(Dependency (PackageName pkg) range) = do - version <- pkgconfig ["--modversion", pkg] - `catchIO` (\_ -> die notFound) - `catchExit` (\_ -> die notFound) - case simpleParse version of - Nothing -> die "parsing output of pkg-config --modversion failed" - Just v | not (withinRange v range) -> die (badVersion v) - | otherwise -> info verbosity (depSatisfied v) - where - notFound = "The pkg-config package '" ++ pkg ++ "'" - ++ versionRequirement - ++ " is required but it could not be found." - badVersion v = "The pkg-config package '" ++ pkg ++ "'" - ++ versionRequirement - ++ " is required but the version installed on the" - ++ " system is version " ++ display v - depSatisfied v = "Dependency " ++ display dep - ++ ": using version " ++ display v - - versionRequirement - | isAnyVersion range = "" - | otherwise = " version " ++ display range - - -- Adds pkgconfig dependencies to the build info for a component - addPkgConfigBI compBI setCompBI comp = do - bi <- pkgconfigBuildInfo (pkgconfigDepends (compBI comp)) - return $ setCompBI comp (compBI comp `mappend` bi) - - -- Adds pkgconfig dependencies to the build info for a library - addPkgConfigBILib = addPkgConfigBI libBuildInfo $ - \lib bi -> lib { libBuildInfo = bi } - - -- Adds pkgconfig dependencies to the build info for an executable - addPkgConfigBIExe = addPkgConfigBI buildInfo $ - \exe bi -> exe { buildInfo = bi } - - -- Adds pkgconfig dependencies to the build info for a test suite - addPkgConfigBITest = addPkgConfigBI testBuildInfo $ - \test bi -> test { testBuildInfo = bi } - - -- Adds pkgconfig dependencies to the build info for a benchmark - addPkgConfigBIBench = addPkgConfigBI benchmarkBuildInfo $ - \bench bi -> bench { benchmarkBuildInfo = bi } - - pkgconfigBuildInfo :: [Dependency] -> IO BuildInfo - pkgconfigBuildInfo [] = return Mon.mempty - pkgconfigBuildInfo pkgdeps = do - let pkgs = nub [ display pkg | Dependency pkg _ <- pkgdeps ] - ccflags <- pkgconfig ("--cflags" : pkgs) - ldflags <- pkgconfig ("--libs" : pkgs) - return (ccLdOptionsBuildInfo (words ccflags) (words ldflags)) - --- | Makes a 'BuildInfo' from C compiler and linker flags. --- --- This can be used with the output from configuration programs like pkg-config --- and similar package-specific programs like mysql-config, freealut-config etc. --- For example: --- --- > ccflags <- rawSystemProgramStdoutConf verbosity prog conf ["--cflags"] --- > ldflags <- rawSystemProgramStdoutConf verbosity prog conf ["--libs"] --- > return (ccldOptionsBuildInfo (words ccflags) (words ldflags)) --- -ccLdOptionsBuildInfo :: [String] -> [String] -> BuildInfo -ccLdOptionsBuildInfo cflags ldflags = - let (includeDirs', cflags') = partition ("-I" `isPrefixOf`) cflags - (extraLibs', ldflags') = partition ("-l" `isPrefixOf`) ldflags - (extraLibDirs', ldflags'') = partition ("-L" `isPrefixOf`) ldflags' - in mempty { - PD.includeDirs = map (drop 2) includeDirs', - PD.extraLibs = map (drop 2) extraLibs', - PD.extraLibDirs = map (drop 2) extraLibDirs', - PD.ccOptions = cflags', - PD.ldOptions = ldflags'' - } - --- ----------------------------------------------------------------------------- --- Determining the compiler details - -configCompilerAuxEx :: ConfigFlags - -> IO (Compiler, Platform, ProgramConfiguration) -configCompilerAuxEx cfg = configCompilerEx (flagToMaybe $ configHcFlavor cfg) - (flagToMaybe $ configHcPath cfg) - (flagToMaybe $ configHcPkg cfg) - programsConfig - (fromFlag (configVerbosity cfg)) - where - programsConfig = mkProgramsConfig cfg defaultProgramConfiguration - -configCompilerEx :: Maybe CompilerFlavor -> Maybe FilePath -> Maybe FilePath - -> ProgramConfiguration -> Verbosity - -> IO (Compiler, Platform, ProgramConfiguration) -configCompilerEx Nothing _ _ _ _ = die "Unknown compiler" -configCompilerEx (Just hcFlavor) hcPath hcPkg conf verbosity = do - (comp, maybePlatform, programsConfig) <- case hcFlavor of - GHC -> GHC.configure verbosity hcPath hcPkg conf - GHCJS -> GHCJS.configure verbosity hcPath hcPkg conf - JHC -> JHC.configure verbosity hcPath hcPkg conf - LHC -> do (_, _, ghcConf) <- GHC.configure verbosity Nothing hcPkg conf - LHC.configure verbosity hcPath Nothing ghcConf - UHC -> UHC.configure verbosity hcPath hcPkg conf - HaskellSuite {} -> HaskellSuite.configure verbosity hcPath hcPkg conf - _ -> die "Unknown compiler" - return (comp, fromMaybe buildPlatform maybePlatform, programsConfig) - --- Ideally we would like to not have separate configCompiler* and --- configCompiler*Ex sets of functions, but there are many custom setup scripts --- in the wild that are using them, so the versions with old types are kept for --- backwards compatibility. Platform was added to the return triple in 1.18. - -{-# DEPRECATED configCompiler - "'configCompiler' is deprecated. Use 'configCompilerEx' instead." #-} -configCompiler :: Maybe CompilerFlavor -> Maybe FilePath -> Maybe FilePath - -> ProgramConfiguration -> Verbosity - -> IO (Compiler, ProgramConfiguration) -configCompiler mFlavor hcPath hcPkg conf verbosity = - fmap (\(a,_,b) -> (a,b)) $ configCompilerEx mFlavor hcPath hcPkg conf verbosity - -{-# DEPRECATED configCompilerAux - "configCompilerAux is deprecated. Use 'configCompilerAuxEx' instead." #-} -configCompilerAux :: ConfigFlags - -> IO (Compiler, ProgramConfiguration) -configCompilerAux = fmap (\(a,_,b) -> (a,b)) . configCompilerAuxEx - --- ----------------------------------------------------------------------------- --- Making the internal component graph - - -mkComponentsGraph :: PackageDescription - -> [PackageId] - -> Either [ComponentName] - [(Component, [ComponentName])] -mkComponentsGraph pkg_descr internalPkgDeps = - let graph = [ (c, componentName c, componentDeps c) - | c <- pkgEnabledComponents pkg_descr ] - in case checkComponentsCyclic graph of - Just ccycle -> Left [ cname | (_,cname,_) <- ccycle ] - Nothing -> Right [ (c, cdeps) | (c, _, cdeps) <- graph ] - where - -- The dependencies for the given component - componentDeps component = - [ CExeName toolname | Dependency (PackageName toolname) _ - <- buildTools bi - , toolname `elem` map exeName - (executables pkg_descr) ] - - ++ [ CLibName | Dependency pkgname _ <- targetBuildDepends bi - , pkgname `elem` map packageName internalPkgDeps ] - where - bi = componentBuildInfo component - -reportComponentCycle :: [ComponentName] -> IO a -reportComponentCycle cnames = - die $ "Components in the package depend on each other in a cyclic way:\n " - ++ intercalate " depends on " - [ "'" ++ showComponentName cname ++ "'" - | cname <- cnames ++ [head cnames] ] - --- | This method computes a default, "good enough" 'ComponentId' --- for a package. The intent is that cabal-install (or the user) will --- specify a more detailed IPID via the @--ipid@ flag if necessary. -computeComponentId :: PackageIdentifier - -> ComponentName - -- TODO: careful here! - -> [ComponentId] -- IPIDs of the component dependencies - -> FlagAssignment - -> ComponentId -computeComponentId pid cname dep_ipids flagAssignment = do - -- show is found to be faster than intercalate and then replacement of - -- special character used in intercalating. We cannot simply hash by - -- doubly concating list, as it just flatten out the nested list, so - -- different sources can produce same hash - let hash = hashToBase62 $ - -- For safety, include the package + version here - -- for GHC 7.10, where just the hash is used as - -- the package key - (display pid) - ++ (show $ dep_ipids) - ++ show flagAssignment - ComponentId $ - display pid - ++ "-" ++ hash - ++ (case cname of - CLibName -> "" - -- TODO: these could result in non-parseable IPIDs - -- since the component name format is very flexible - CExeName s -> "-" ++ s ++ ".exe" - CTestName s -> "-" ++ s ++ ".test" - CBenchName s -> "-" ++ s ++ ".bench") - -hashToBase62 :: String -> String -hashToBase62 s = showFingerprint $ fingerprintString s - where - showIntAtBase62 x = showIntAtBase 62 representBase62 x "" - representBase62 x - | x < 10 = chr (48 + x) - | x < 36 = chr (65 + x - 10) - | x < 62 = chr (97 + x - 36) - | otherwise = '@' - showFingerprint (Fingerprint a b) = showIntAtBase62 a ++ showIntAtBase62 b - --- | In GHC 8.0, the string we pass to GHC to use for symbol --- names for a package can be an arbitrary, IPID-compatible string. --- However, prior to GHC 8.0 there are some restrictions on what --- format this string can be (due to how ghc-pkg parsed the key): --- --- 1. In GHC 7.10, the string had either be of the form --- foo_ABCD, where foo is a non-semantic alphanumeric/hyphenated --- prefix and ABCD is two base-64 encoded 64-bit integers, --- or a GHC 7.8 style identifier. --- --- 2. In GHC 7.8, the string had to be a valid package identifier --- like foo-0.1. --- --- So, the problem is that Cabal, in general, has a general IPID, --- but needs to figure out a package key / package ID that the --- old ghc-pkg will actually accept. But there's an EVERY WORSE --- problem: if ghc-pkg decides to parse an identifier foo-0.1-xxx --- as if it were a package identifier, which means it will SILENTLY --- DROP the "xxx" (because it's a tag, and Cabal does not allow tags.) --- So we must CONNIVE to ensure that we don't pick something that --- looks like this. --- --- So this function attempts to define a mapping into the old formats. --- --- The mapping for GHC 7.8 and before: --- --- * For CLibName, we unconditionally use the 'PackageIdentifier'. --- --- * For sub-components, we create a new 'PackageIdentifier' which --- is encoded in the following way. The test suite "qux" in package --- "foobar-0.2" gets this package identifier "z-foobar-z-test-qux-0.2". --- These package IDs have the form: --- --- cpid ::= "z-" package-id "-z-" component-type "-" component-name --- component-type ::= "test" | "bench" | "exe" | "lib" --- package-id and component-name have "-" ( "z" + ) "-" --- segments encoded by adding an extra "z". --- --- The mapping for GHC 7.10: --- --- * For CLibName: --- If the IPID is of the form foo-0.1-ABCDEF where foo_ABCDEF would --- validly parse as a package key, we pass "ABCDEF". (NB: not --- all hashes parse this way, because GHC 7.10 mandated that --- these hashes be two base-62 encoded 64 bit integers), --- but hashes that Cabal generated using 'computeComponentId' --- are guaranteed to have this form. --- --- If it is not of this form, we rehash the IPID into the --- correct form and pass that. --- --- * For sub-components, we rehash the IPID into the correct format --- and pass that. --- -computeCompatPackageKey - :: Compiler - -> PackageIdentifier - -> ComponentName - -> UnitId - -> (PackageName, String) -computeCompatPackageKey comp pid cname uid@(SimpleUnitId (ComponentId str)) - | not (packageKeySupported comp || unitIdSupported comp) = - -- NB: the package ID in the database entry has to follow this - let zdashcode s = go s (Nothing :: Maybe Int) [] - where go [] _ r = reverse r - go ('-':z) (Just n) r | n > 0 = go z (Just 0) ('-':'z':r) - go ('-':z) _ r = go z (Just 0) ('-':r) - go ('z':z) (Just n) r = go z (Just (n+1)) ('z':r) - go (c:z) _ r = go z Nothing (c:r) - cname_str = case cname of - CLibName -> error "computeCompatPackageKey" - CTestName n -> "-z-test-" ++ zdashcode n - CBenchName n -> "-z-bench-" ++ zdashcode n - CExeName n -> "-z-exe-" ++ zdashcode n - package_name - | cname == CLibName = pkgName pid - | otherwise = PackageName $ "z-" - ++ zdashcode (display (pkgName pid)) - ++ zdashcode cname_str - old_style_key - | cname == CLibName = display pid - | otherwise = display package_name ++ "-" - ++ display (pkgVersion pid) - in (package_name, old_style_key) - | not (unifiedIPIDRequired comp) = - let mb_verbatim_key - = case simpleParse str :: Maybe PackageId of - -- Something like 'foo-0.1', use it verbatim. - -- (NB: hash tags look like tags, so they are parsed, - -- so the extra equality check tests if a tag was dropped.) - Just pid0 | display pid0 == str -> Just str - _ -> Nothing - mb_truncated_key - = let cand = reverse (takeWhile isAlphaNum (reverse str)) - in if length cand == 22 && all isAlphaNum cand - then Just cand - else Nothing - rehashed_key = hashToBase62 str - in (pkgName pid, fromMaybe rehashed_key - (mb_verbatim_key `mplus` mb_truncated_key)) - | otherwise = (pkgName pid, display uid) - -mkComponentsLocalBuildInfo :: ConfigFlags - -> Compiler - -> InstalledPackageIndex - -> PackageDescription - -> [PackageId] -- internal package deps - -> [InstalledPackageInfo] -- external package deps - -> [(Component, [ComponentName])] - -> FlagAssignment - -> IO [(ComponentName, ComponentLocalBuildInfo, - [ComponentName])] -mkComponentsLocalBuildInfo cfg comp installedPackages pkg_descr - internalPkgDeps externalPkgDeps - graph flagAssignment = do - -- Pre-compute library hash so we can setup internal deps - -- TODO configIPID should have name changed - let cid = case configIPID cfg of - Flag cid0 -> - -- Hack to reuse install dirs machinery - -- NB: no real IPID available at this point - let env = packageTemplateEnv (package pkg_descr) - (mkUnitId "") - str = fromPathTemplate - (InstallDirs.substPathTemplate env - (toPathTemplate cid0)) - in ComponentId str - _ -> - computeComponentId (package pkg_descr) CLibName - (getDeps CLibName) flagAssignment - uid = SimpleUnitId cid - (_, compat_key) = computeCompatPackageKey comp - (package pkg_descr) CLibName uid - sequence - [ do clbi <- componentLocalBuildInfo uid compat_key c - return (componentName c, clbi, cdeps) - | (c, cdeps) <- graph ] - where - getDeps cname = - let externalPkgs = maybe [] (\lib -> selectSubset - (componentBuildInfo lib) - externalPkgDeps) - (lookupComponent pkg_descr cname) - in map Installed.installedComponentId externalPkgs - - -- The allPkgDeps contains all the package deps for the whole package - -- but we need to select the subset for this specific component. - -- we just take the subset for the package names this component - -- needs. Note, this only works because we cannot yet depend on two - -- versions of the same package. - componentLocalBuildInfo uid compat_key component = - case component of - CLib lib -> do - let exports = map (\n -> Installed.ExposedModule n Nothing) - (PD.exposedModules lib) - let mb_reexports = resolveModuleReexports installedPackages - (packageId pkg_descr) - uid - externalPkgDeps lib - reexports <- case mb_reexports of - Left problems -> reportModuleReexportProblems problems - Right r -> return r - - return LibComponentLocalBuildInfo { - componentPackageDeps = cpds, - componentUnitId = uid, - componentCompatPackageKey = compat_key, - componentPackageRenaming = cprns, - componentExposedModules = exports ++ reexports - } - CExe _ -> - return ExeComponentLocalBuildInfo { - componentPackageDeps = cpds, - componentPackageRenaming = cprns - } - CTest _ -> - return TestComponentLocalBuildInfo { - componentPackageDeps = cpds, - componentPackageRenaming = cprns - } - CBench _ -> - return BenchComponentLocalBuildInfo { - componentPackageDeps = cpds, - componentPackageRenaming = cprns - } - where - bi = componentBuildInfo component - dedup = Map.toList . Map.fromList - cpds = if newPackageDepsBehaviour pkg_descr - then dedup $ - [ (Installed.installedUnitId pkg, packageId pkg) - | pkg <- selectSubset bi externalPkgDeps ] - ++ [ (uid, pkgid) - | pkgid <- selectSubset bi internalPkgDeps ] - else [ (Installed.installedUnitId pkg, packageId pkg) - | pkg <- externalPkgDeps ] - cprns = if newPackageDepsBehaviour pkg_descr - then targetBuildRenaming bi - -- Hack: if we have old package-deps behavior, it's impossible - -- for non-default renamings to be used, because the Cabal - -- version is too early. This is a good, because while all the - -- deps were bundled up in buildDepends, we didn't do this for - -- renamings, so it's not even clear how to get the merged - -- version. So just assume that all of them are the default.. - else Map.fromList (map (\(_,pid) -> - (packageName pid, defaultRenaming)) cpds) - - selectSubset :: Package pkg => BuildInfo -> [pkg] -> [pkg] - selectSubset bi pkgs = - [ pkg | pkg <- pkgs, packageName pkg `elem` names bi ] - - names bi = [ name | Dependency name _ <- targetBuildDepends bi ] - --- | Given the author-specified re-export declarations from the .cabal file, --- resolve them to the form that we need for the package database. --- --- An invariant of the package database is that we always link the re-export --- directly to its original defining location (rather than indirectly via a --- chain of re-exporting packages). --- -resolveModuleReexports :: InstalledPackageIndex - -> PackageId - -> UnitId - -> [InstalledPackageInfo] - -> Library - -> Either [(ModuleReexport, String)] -- errors - [Installed.ExposedModule] -- ok -resolveModuleReexports installedPackages srcpkgid key externalPkgDeps lib = - case partitionEithers - (map resolveModuleReexport (PD.reexportedModules lib)) of - ([], ok) -> Right ok - (errs, _) -> Left errs - where - -- A mapping from visible module names to their original defining - -- module name. We also record the package name of the package which - -- *immediately* provided the module (not the original) to handle if the - -- user explicitly says which build-depends they want to reexport from. - visibleModules :: Map ModuleName [(PackageName, Installed.ExposedModule)] - visibleModules = - Map.fromListWith (++) $ - [ (Installed.exposedName exposedModule, [(exportingPackageName, - exposedModule)]) - -- The package index here contains all the indirect deps of the - -- package we're configuring, but we want just the direct deps - | let directDeps = Set.fromList - (map Installed.installedUnitId externalPkgDeps) - , pkg <- PackageIndex.allPackages installedPackages - , Installed.installedUnitId pkg `Set.member` directDeps - , let exportingPackageName = packageName pkg - , exposedModule <- visibleModuleDetails pkg - ] - ++ [ (visibleModuleName, [(exportingPackageName, exposedModule)]) - | visibleModuleName <- PD.exposedModules lib - ++ otherModules (libBuildInfo lib) - , let exportingPackageName = packageName srcpkgid - definingModuleName = visibleModuleName - definingPackageId = key - originalModule = Installed.OriginalModule definingPackageId - definingModuleName - exposedModule = Installed.ExposedModule visibleModuleName - (Just originalModule) - ] - - -- All the modules exported from this package and their defining name and - -- package (either defined here in this package or re-exported from some - -- other package). Return an ExposedModule because we want to hold onto - -- signature information. - visibleModuleDetails :: InstalledPackageInfo -> [Installed.ExposedModule] - visibleModuleDetails pkg = do - exposedModule <- Installed.exposedModules pkg - case Installed.exposedReexport exposedModule of - -- The first case is the modules actually defined in this package. - -- In this case the reexport will point to this package. - Nothing -> return exposedModule { - Installed.exposedReexport = - Just (Installed.OriginalModule - (Installed.installedUnitId pkg) - (Installed.exposedName exposedModule)) } - -- On the other hand, a visible module might actually be itself - -- a re-export! In this case, the re-export info for the package - -- doing the re-export will point us to the original defining - -- module name and package, so we can reuse the entry. - Just _ -> return exposedModule - - resolveModuleReexport reexport@ModuleReexport { - moduleReexportOriginalPackage = moriginalPackageName, - moduleReexportOriginalName = originalName, - moduleReexportName = newName - } = - - let filterForSpecificPackage = - case moriginalPackageName of - Nothing -> id - Just originalPackageName -> - filter (\(pkgname, _) -> pkgname == originalPackageName) - - matches = filterForSpecificPackage - (Map.findWithDefault [] originalName visibleModules) - in - case (matches, moriginalPackageName) of - ((_, exposedModule):rest, _) - -- TODO: Refine this check for signatures - | all (\(_, exposedModule') -> - Installed.exposedReexport exposedModule - == Installed.exposedReexport exposedModule') rest - -> Right exposedModule { Installed.exposedName = newName } - - ([], Just originalPackageName) - -> Left $ (,) reexport - $ "The package " ++ display originalPackageName - ++ " does not export a module " ++ display originalName - - ([], Nothing) - -> Left $ (,) reexport - $ "The module " ++ display originalName - ++ " is not exported by any suitable package (this package " - ++ "itself nor any of its 'build-depends' dependencies)." - - (ms, _) - -> Left $ (,) reexport - $ "The module " ++ display originalName ++ " is exported " - ++ "by more than one package (" - ++ intercalate ", " [ display pkgname | (pkgname,_) <- ms ] - ++ ") and so the re-export is ambiguous. The ambiguity can " - ++ "be resolved by qualifying by the package name. The " - ++ "syntax is 'packagename:moduleName [as newname]'." - - -- Note: if in future Cabal allows directly depending on multiple - -- instances of the same package (e.g. backpack) then an additional - -- ambiguity case is possible here: (_, Just originalPackageName) - -- with the module being ambiguous despite being qualified by a - -- package name. Presumably by that time we'll have a mechanism to - -- qualify the instance we're referring to. - -reportModuleReexportProblems :: [(ModuleReexport, String)] -> IO a -reportModuleReexportProblems reexportProblems = - die $ unlines - [ "Problem with the module re-export '" ++ display reexport ++ "': " ++ msg - | (reexport, msg) <- reexportProblems ] - --- ----------------------------------------------------------------------------- --- Testing C lib and header dependencies - --- Try to build a test C program which includes every header and links every --- lib. If that fails, try to narrow it down by preprocessing (only) and linking --- with individual headers and libs. If none is the obvious culprit then give a --- generic error message. --- TODO: produce a log file from the compiler errors, if any. -checkForeignDeps :: PackageDescription -> LocalBuildInfo -> Verbosity -> IO () -checkForeignDeps pkg lbi verbosity = do - ifBuildsWith allHeaders (commonCcArgs ++ makeLdArgs allLibs) -- I'm feeling - -- lucky - (return ()) - (do missingLibs <- findMissingLibs - missingHdr <- findOffendingHdr - explainErrors missingHdr missingLibs) - where - allHeaders = collectField PD.includes - allLibs = collectField PD.extraLibs - - ifBuildsWith headers args success failure = do - ok <- builds (makeProgram headers) args - if ok then success else failure - - findOffendingHdr = - ifBuildsWith allHeaders ccArgs - (return Nothing) - (go . tail . inits $ allHeaders) - where - go [] = return Nothing -- cannot happen - go (hdrs:hdrsInits) = - -- Try just preprocessing first - ifBuildsWith hdrs cppArgs - -- If that works, try compiling too - (ifBuildsWith hdrs ccArgs - (go hdrsInits) - (return . Just . Right . last $ hdrs)) - (return . Just . Left . last $ hdrs) - - cppArgs = "-E":commonCppArgs -- preprocess only - ccArgs = "-c":commonCcArgs -- don't try to link - - findMissingLibs = ifBuildsWith [] (makeLdArgs allLibs) - (return []) - (filterM (fmap not . libExists) allLibs) - - libExists lib = builds (makeProgram []) (makeLdArgs [lib]) - - commonCppArgs = platformDefines lbi - ++ [ "-I" ++ autogenModulesDir lbi ] - ++ [ "-I" ++ dir | dir <- collectField PD.includeDirs ] - ++ ["-I."] - ++ collectField PD.cppOptions - ++ collectField PD.ccOptions - ++ [ "-I" ++ dir - | dep <- deps - , dir <- Installed.includeDirs dep ] - ++ [ opt - | dep <- deps - , opt <- Installed.ccOptions dep ] - - commonCcArgs = commonCppArgs - ++ collectField PD.ccOptions - ++ [ opt - | dep <- deps - , opt <- Installed.ccOptions dep ] - - commonLdArgs = [ "-L" ++ dir | dir <- collectField PD.extraLibDirs ] - ++ collectField PD.ldOptions - ++ [ "-L" ++ dir - | dep <- deps - , dir <- Installed.libraryDirs dep ] - --TODO: do we also need dependent packages' ld options? - makeLdArgs libs = [ "-l"++lib | lib <- libs ] ++ commonLdArgs - - makeProgram hdrs = unlines $ - [ "#include \"" ++ hdr ++ "\"" | hdr <- hdrs ] ++ - ["int main(int argc, char** argv) { return 0; }"] - - collectField f = concatMap f allBi - allBi = allBuildInfo pkg - deps = PackageIndex.topologicalOrder (installedPkgs lbi) - - builds program args = do - tempDir <- getTemporaryDirectory - withTempFile tempDir ".c" $ \cName cHnd -> - withTempFile tempDir "" $ \oNname oHnd -> do - hPutStrLn cHnd program - hClose cHnd - hClose oHnd - _ <- rawSystemProgramStdoutConf verbosity - gccProgram (withPrograms lbi) (cName:"-o":oNname:args) - return True - `catchIO` (\_ -> return False) - `catchExit` (\_ -> return False) - - explainErrors Nothing [] = return () -- should be impossible! - explainErrors _ _ - | isNothing . lookupProgram gccProgram . withPrograms $ lbi - - = die $ unlines $ - [ "No working gcc", - "This package depends on foreign library but we cannot " - ++ "find a working C compiler. If you have it in a " - ++ "non-standard location you can use the --with-gcc " - ++ "flag to specify it." ] - - explainErrors hdr libs = die $ unlines $ - [ if plural - then "Missing dependencies on foreign libraries:" - else "Missing dependency on a foreign library:" - | missing ] - ++ case hdr of - Just (Left h) -> ["* Missing (or bad) header file: " ++ h ] - _ -> [] - ++ case libs of - [] -> [] - [lib] -> ["* Missing C library: " ++ lib] - _ -> ["* Missing C libraries: " ++ intercalate ", " libs] - ++ [if plural then messagePlural else messageSingular | missing] - ++ case hdr of - Just (Left _) -> [ headerCppMessage ] - Just (Right h) -> [ (if missing then "* " else "") - ++ "Bad header file: " ++ h - , headerCcMessage ] - _ -> [] - - where - plural = length libs >= 2 - -- Is there something missing? (as opposed to broken) - missing = not (null libs) - || case hdr of Just (Left _) -> True; _ -> False - - messageSingular = - "This problem can usually be solved by installing the system " - ++ "package that provides this library (you may need the " - ++ "\"-dev\" version). If the library is already installed " - ++ "but in a non-standard location then you can use the flags " - ++ "--extra-include-dirs= and --extra-lib-dirs= to specify " - ++ "where it is." - messagePlural = - "This problem can usually be solved by installing the system " - ++ "packages that provide these libraries (you may need the " - ++ "\"-dev\" versions). If the libraries are already installed " - ++ "but in a non-standard location then you can use the flags " - ++ "--extra-include-dirs= and --extra-lib-dirs= to specify " - ++ "where they are." - headerCppMessage = - "If the header file does exist, it may contain errors that " - ++ "are caught by the C compiler at the preprocessing stage. " - ++ "In this case you can re-run configure with the verbosity " - ++ "flag -v3 to see the error messages." - headerCcMessage = - "The header file contains a compile error. " - ++ "You can re-run configure with the verbosity flag " - ++ "-v3 to see the error messages from the C compiler." - --- | Output package check warnings and errors. Exit if any errors. -checkPackageProblems :: Verbosity - -> GenericPackageDescription - -> PackageDescription - -> IO () -checkPackageProblems verbosity gpkg pkg = do - ioChecks <- checkPackageFiles pkg "." - let pureChecks = checkPackage gpkg (Just pkg) - errors = [ e | PackageBuildImpossible e <- pureChecks ++ ioChecks ] - warnings = [ w | PackageBuildWarning w <- pureChecks ++ ioChecks ] - if null errors - then mapM_ (warn verbosity) warnings - else die (intercalate "\n\n" errors) - --- | Preform checks if a relocatable build is allowed -checkRelocatable :: Verbosity - -> PackageDescription - -> LocalBuildInfo - -> IO () -checkRelocatable verbosity pkg lbi - = sequence_ [ checkOS - , checkCompiler - , packagePrefixRelative - , depsPrefixRelative - ] - where - -- Check if the OS support relocatable builds. - -- - -- If you add new OS' to this list, and your OS supports dynamic libraries - -- and RPATH, make sure you add your OS to RPATH-support list of: - -- Distribution.Simple.GHC.getRPaths - checkOS - = unless (os `elem` [ OSX, Linux ]) - $ die $ "Operating system: " ++ display os ++ - ", does not support relocatable builds" - where - (Platform _ os) = hostPlatform lbi - - -- Check if the Compiler support relocatable builds - checkCompiler - = unless (compilerFlavor comp `elem` [ GHC ]) - $ die $ "Compiler: " ++ show comp ++ - ", does not support relocatable builds" - where - comp = compiler lbi - - -- Check if all the install dirs are relative to same prefix - packagePrefixRelative - = unless (relativeInstallDirs installDirs) - $ die $ "Installation directories are not prefix_relative:\n" ++ - show installDirs - where - installDirs = absoluteInstallDirs pkg lbi NoCopyDest - p = prefix installDirs - relativeInstallDirs (InstallDirs {..}) = - all isJust - (fmap (stripPrefix p) - [ bindir, libdir, dynlibdir, libexecdir, includedir, datadir - , docdir, mandir, htmldir, haddockdir, sysconfdir] ) - - -- Check if the library dirs of the dependencies that are in the package - -- database to which the package is installed are relative to the - -- prefix of the package - depsPrefixRelative = do - pkgr <- GHC.pkgRoot verbosity lbi (last (withPackageDB lbi)) - mapM_ (doCheck pkgr) ipkgs - where - doCheck pkgr ipkg - | maybe False (== pkgr) (Installed.pkgRoot ipkg) - = mapM_ (\l -> when (isNothing $ stripPrefix p l) (die (msg l))) - (Installed.libraryDirs ipkg) - | otherwise - = return () - installDirs = absoluteInstallDirs pkg lbi NoCopyDest - p = prefix installDirs - ipkgs = PackageIndex.allPackages (installedPkgs lbi) - msg l = "Library directory of a dependency: " ++ show l ++ - "\nis not relative to the installation prefix:\n" ++ - show p diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/GHC/ImplInfo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/GHC/ImplInfo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/GHC/ImplInfo.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/GHC/ImplInfo.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,109 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.GHC.ImplInfo --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module contains the data structure describing invocation --- details for a GHC or GHC-derived compiler, such as supported flags --- and workarounds for bugs. - -module Distribution.Simple.GHC.ImplInfo ( - GhcImplInfo(..), getImplInfo, - ghcVersionImplInfo, ghcjsVersionImplInfo, lhcVersionImplInfo - ) where - -import Distribution.Simple.Compiler -import Distribution.Version - -{- | - Information about features and quirks of a GHC-based implementation. - - Compiler flavors based on GHC behave similarly enough that some of - the support code for them is shared. Every implementation has its - own peculiarities, that may or may not be a direct result of the - underlying GHC version. This record keeps track of these differences. - - All shared code (i.e. everything not in the Distribution.Simple.FLAVOR - module) should use implementation info rather than version numbers - to test for supported features. --} - -data GhcImplInfo = GhcImplInfo - { hasCcOdirBug :: Bool -- ^ bug in -odir handling for C compilations. - , flagInfoLanguages :: Bool -- ^ --info and --supported-languages flags - , fakeRecordPuns :: Bool -- ^ use -XRecordPuns for NamedFieldPuns - , flagStubdir :: Bool -- ^ -stubdir flag supported - , flagOutputDir :: Bool -- ^ -outputdir flag supported - , noExtInSplitSuffix :: Bool -- ^ split-obj suffix does not contain p_o ext - , flagFfiIncludes :: Bool -- ^ -#include on command line for FFI includes - , flagBuildingCabalPkg :: Bool -- ^ -fbuilding-cabal-package flag supported - , flagPackageId :: Bool -- ^ -package-id / -package flags supported - , separateGccMingw :: Bool -- ^ mingw and gcc are in separate directories - , supportsHaskell2010 :: Bool -- ^ -XHaskell2010 and -XHaskell98 flags - , reportsNoExt :: Bool -- ^ --supported-languages gives Ext and NoExt - , alwaysNondecIndent :: Bool -- ^ NondecreasingIndentation is always on - , flagGhciScript :: Bool -- ^ -ghci-script flag supported - , flagProfAuto :: Bool -- ^ new style -fprof-auto* flags - , flagPackageConf :: Bool -- ^ use package-conf instead of package-db - , flagDebugInfo :: Bool -- ^ -g flag supported - } - -getImplInfo :: Compiler -> GhcImplInfo -getImplInfo comp = - case compilerFlavor comp of - GHC -> ghcVersionImplInfo (compilerVersion comp) - LHC -> lhcVersionImplInfo (compilerVersion comp) - GHCJS -> case compilerCompatVersion GHC comp of - Just ghcVer -> ghcjsVersionImplInfo (compilerVersion comp) ghcVer - _ -> error ("Distribution.Simple.GHC.Props.getImplProps: " ++ - "could not find GHC version for GHCJS compiler") - x -> error ("Distribution.Simple.GHC.Props.getImplProps only works" ++ - "for GHC-like compilers (GHC, GHCJS, LHC)" ++ - ", but found " ++ show x) - -ghcVersionImplInfo :: Version -> GhcImplInfo -ghcVersionImplInfo (Version v _) = GhcImplInfo - { hasCcOdirBug = v < [6,4,1] - , flagInfoLanguages = v >= [6,7] - , fakeRecordPuns = v >= [6,8] && v < [6,10] - , flagStubdir = v >= [6,8] - , flagOutputDir = v >= [6,10] - , noExtInSplitSuffix = v < [6,11] - , flagFfiIncludes = v < [6,11] - , flagBuildingCabalPkg = v >= [6,11] - , flagPackageId = v > [6,11] - , separateGccMingw = v < [6,12] - , supportsHaskell2010 = v >= [7] - , reportsNoExt = v >= [7] - , alwaysNondecIndent = v < [7,1] - , flagGhciScript = v >= [7,2] - , flagProfAuto = v >= [7,4] - , flagPackageConf = v < [7,5] - , flagDebugInfo = v >= [7,10] - } - -ghcjsVersionImplInfo :: Version -> Version -> GhcImplInfo -ghcjsVersionImplInfo _ghcjsVer _ghcVer = GhcImplInfo - { hasCcOdirBug = False - , flagInfoLanguages = True - , fakeRecordPuns = False - , flagStubdir = True - , flagOutputDir = True - , noExtInSplitSuffix = False - , flagFfiIncludes = False - , flagBuildingCabalPkg = True - , flagPackageId = True - , separateGccMingw = False - , supportsHaskell2010 = True - , reportsNoExt = True - , alwaysNondecIndent = False - , flagGhciScript = True - , flagProfAuto = True - , flagPackageConf = False - , flagDebugInfo = False - } - -lhcVersionImplInfo :: Version -> GhcImplInfo -lhcVersionImplInfo = ghcVersionImplInfo diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/GHC/Internal.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/GHC/Internal.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/GHC/Internal.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/GHC/Internal.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,535 +0,0 @@ -{-# LANGUAGE PatternGuards #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.GHC.Internal --- Copyright : Isaac Jones 2003-2007 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module contains functions shared by GHC (Distribution.Simple.GHC) --- and GHC-derived compilers. - -module Distribution.Simple.GHC.Internal ( - configureToolchain, - getLanguages, - getExtensions, - targetPlatform, - getGhcInfo, - componentCcGhcOptions, - componentGhcOptions, - mkGHCiLibName, - filterGhciFlags, - ghcLookupProperty, - getHaskellObjects, - mkGhcOptPackages, - substTopDir, - checkPackageDbEnvVar, - profDetailLevelFlag, - showArchString, - showOsString, - ) where - -import Distribution.Simple.GHC.ImplInfo -import Distribution.Package -import Distribution.InstalledPackageInfo -import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo -import Distribution.PackageDescription as PD hiding (Flag) -import Distribution.Compat.Exception -import Distribution.Lex -import Distribution.Simple.Compiler hiding (Flag) -import Distribution.Simple.Program.GHC -import Distribution.Simple.Setup -import qualified Distribution.ModuleName as ModuleName -import Distribution.Simple.Program -import Distribution.Simple.LocalBuildInfo -import Distribution.Simple.Utils -import Distribution.Simple.BuildPaths -import Distribution.System -import Distribution.Text ( display, simpleParse ) -import Distribution.Utils.NubList ( toNubListR ) -import Distribution.Verbosity -import Language.Haskell.Extension - -import qualified Data.Map as M -import Data.Char ( isSpace ) -import Data.Maybe ( fromMaybe, maybeToList, isJust ) -import Control.Monad ( unless, when ) -import Data.Monoid as Mon ( Monoid(..) ) -import System.Directory ( getDirectoryContents, getTemporaryDirectory ) -import System.Environment ( getEnv ) -import System.FilePath ( (), (<.>), takeExtension - , takeDirectory, takeFileName) -import System.IO ( hClose, hPutStrLn ) - -targetPlatform :: [(String, String)] -> Maybe Platform -targetPlatform ghcInfo = platformFromTriple =<< lookup "Target platform" ghcInfo - --- | Adjust the way we find and configure gcc and ld --- -configureToolchain :: GhcImplInfo - -> ConfiguredProgram - -> M.Map String String - -> ProgramConfiguration - -> ProgramConfiguration -configureToolchain implInfo ghcProg ghcInfo = - addKnownProgram gccProgram { - programFindLocation = findProg gccProgramName extraGccPath, - programPostConf = configureGcc - } - . addKnownProgram ldProgram { - programFindLocation = findProg ldProgramName extraLdPath, - programPostConf = configureLd - } - . addKnownProgram arProgram { - programFindLocation = findProg arProgramName extraArPath - } - . addKnownProgram stripProgram { - programFindLocation = findProg stripProgramName extraStripPath - } - where - compilerDir = takeDirectory (programPath ghcProg) - baseDir = takeDirectory compilerDir - mingwBinDir = baseDir "mingw" "bin" - libDir = baseDir "gcc-lib" - includeDir = baseDir "include" "mingw" - isWindows = case buildOS of Windows -> True; _ -> False - binPrefix = "" - - maybeName :: Program -> Maybe FilePath -> String - maybeName prog = maybe (programName prog) (dropExeExtension . takeFileName) - - gccProgramName = maybeName gccProgram mbGccLocation - ldProgramName = maybeName ldProgram mbLdLocation - arProgramName = maybeName arProgram mbArLocation - stripProgramName = maybeName stripProgram mbStripLocation - - mkExtraPath :: Maybe FilePath -> FilePath -> [FilePath] - mkExtraPath mbPath mingwPath | isWindows = mbDir ++ [mingwPath] - | otherwise = mbDir - where - mbDir = maybeToList . fmap takeDirectory $ mbPath - - extraGccPath = mkExtraPath mbGccLocation windowsExtraGccDir - extraLdPath = mkExtraPath mbLdLocation windowsExtraLdDir - extraArPath = mkExtraPath mbArLocation windowsExtraArDir - extraStripPath = mkExtraPath mbStripLocation windowsExtraStripDir - - -- on Windows finding and configuring ghc's gcc & binutils is a bit special - (windowsExtraGccDir, windowsExtraLdDir, - windowsExtraArDir, windowsExtraStripDir) - | separateGccMingw implInfo = (baseDir, libDir, libDir, libDir) - | otherwise = -- GHC >= 6.12 - let b = mingwBinDir binPrefix - in (b, b, b, b) - - findProg :: String -> [FilePath] - -> Verbosity -> ProgramSearchPath - -> IO (Maybe (FilePath, [FilePath])) - findProg progName extraPath v searchpath = - findProgramOnSearchPath v searchpath' progName - where - searchpath' = (map ProgramSearchPathDir extraPath) ++ searchpath - - -- Read tool locations from the 'ghc --info' output. Useful when - -- cross-compiling. - mbGccLocation = M.lookup "C compiler command" ghcInfo - mbLdLocation = M.lookup "ld command" ghcInfo - mbArLocation = M.lookup "ar command" ghcInfo - mbStripLocation = M.lookup "strip command" ghcInfo - - ccFlags = getFlags "C compiler flags" - -- GHC 7.8 renamed "Gcc Linker flags" to "C compiler link flags" - -- and "Ld Linker flags" to "ld flags" (GHC #4862). - gccLinkerFlags = getFlags "Gcc Linker flags" ++ getFlags "C compiler link flags" - ldLinkerFlags = getFlags "Ld Linker flags" ++ getFlags "ld flags" - - -- It appears that GHC 7.6 and earlier encode the tokenized flags as a - -- [String] in these settings whereas later versions just encode the flags as - -- String. - -- - -- We first try to parse as a [String] and if this fails then tokenize the - -- flags ourself. - getFlags :: String -> [String] - getFlags key = - case M.lookup key ghcInfo of - Nothing -> [] - Just flags - | (flags', ""):_ <- reads flags -> flags' - | otherwise -> tokenizeQuotedWords flags - - configureGcc :: Verbosity -> ConfiguredProgram -> IO ConfiguredProgram - configureGcc v gccProg = do - gccProg' <- configureGcc' v gccProg - return gccProg' { - programDefaultArgs = programDefaultArgs gccProg' - ++ ccFlags ++ gccLinkerFlags - } - - configureGcc' :: Verbosity -> ConfiguredProgram -> IO ConfiguredProgram - configureGcc' - | isWindows = \_ gccProg -> case programLocation gccProg of - -- if it's found on system then it means we're using the result - -- of programFindLocation above rather than a user-supplied path - -- Pre GHC 6.12, that meant we should add these flags to tell - -- ghc's gcc where it lives and thus where gcc can find its - -- various files: - FoundOnSystem {} - | separateGccMingw implInfo -> - return gccProg { programDefaultArgs = ["-B" ++ libDir, - "-I" ++ includeDir] } - _ -> return gccProg - | otherwise = \_ gccProg -> return gccProg - - configureLd :: Verbosity -> ConfiguredProgram -> IO ConfiguredProgram - configureLd v ldProg = do - ldProg' <- configureLd' v ldProg - return ldProg' { - programDefaultArgs = programDefaultArgs ldProg' ++ ldLinkerFlags - } - - -- we need to find out if ld supports the -x flag - configureLd' :: Verbosity -> ConfiguredProgram -> IO ConfiguredProgram - configureLd' verbosity ldProg = do - tempDir <- getTemporaryDirectory - ldx <- withTempFile tempDir ".c" $ \testcfile testchnd -> - withTempFile tempDir ".o" $ \testofile testohnd -> do - hPutStrLn testchnd "int foo() { return 0; }" - hClose testchnd; hClose testohnd - rawSystemProgram verbosity ghcProg - [ "-hide-all-packages" - , "-c", testcfile - , "-o", testofile - ] - withTempFile tempDir ".o" $ \testofile' testohnd' -> - do - hClose testohnd' - _ <- rawSystemProgramStdout verbosity ldProg - ["-x", "-r", testofile, "-o", testofile'] - return True - `catchIO` (\_ -> return False) - `catchExit` (\_ -> return False) - if ldx - then return ldProg { programDefaultArgs = ["-x"] } - else return ldProg - -getLanguages :: Verbosity -> GhcImplInfo -> ConfiguredProgram - -> IO [(Language, String)] -getLanguages _ implInfo _ - -- TODO: should be using --supported-languages rather than hard coding - | supportsHaskell2010 implInfo = return [(Haskell98, "-XHaskell98") - ,(Haskell2010, "-XHaskell2010")] - | otherwise = return [(Haskell98, "")] - -getGhcInfo :: Verbosity -> GhcImplInfo -> ConfiguredProgram - -> IO [(String, String)] -getGhcInfo verbosity implInfo ghcProg - | flagInfoLanguages implInfo = do - xs <- getProgramOutput verbosity (suppressOverrideArgs ghcProg) - ["--info"] - case reads xs of - [(i, ss)] - | all isSpace ss -> - return i - _ -> - die "Can't parse --info output of GHC" - | otherwise = - return [] - -getExtensions :: Verbosity -> GhcImplInfo -> ConfiguredProgram - -> IO [(Extension, String)] -getExtensions verbosity implInfo ghcProg - | flagInfoLanguages implInfo = do - str <- getProgramOutput verbosity (suppressOverrideArgs ghcProg) - ["--supported-languages"] - let extStrs = if reportsNoExt implInfo - then lines str - else -- Older GHCs only gave us either Foo or NoFoo, - -- so we have to work out the other one ourselves - [ extStr'' - | extStr <- lines str - , let extStr' = case extStr of - 'N' : 'o' : xs -> xs - _ -> "No" ++ extStr - , extStr'' <- [extStr, extStr'] - ] - let extensions0 = [ (ext, "-X" ++ display ext) - | Just ext <- map simpleParse extStrs ] - extensions1 = if fakeRecordPuns implInfo - then -- ghc-6.8 introduced RecordPuns however it - -- should have been NamedFieldPuns. We now - -- encourage packages to use NamedFieldPuns - -- so for compatibility we fake support for - -- it in ghc-6.8 by making it an alias for - -- the old RecordPuns extension. - (EnableExtension NamedFieldPuns, "-XRecordPuns") : - (DisableExtension NamedFieldPuns, "-XNoRecordPuns") : - extensions0 - else extensions0 - extensions2 = if alwaysNondecIndent implInfo - then -- ghc-7.2 split NondecreasingIndentation off - -- into a proper extension. Before that it - -- was always on. - (EnableExtension NondecreasingIndentation, "") : - (DisableExtension NondecreasingIndentation, "") : - extensions1 - else extensions1 - return extensions2 - - | otherwise = return oldLanguageExtensions - --- | For GHC 6.6.x and earlier, the mapping from supported extensions to flags -oldLanguageExtensions :: [(Extension, String)] -oldLanguageExtensions = - let doFlag (f, (enable, disable)) = [(EnableExtension f, enable), - (DisableExtension f, disable)] - fglasgowExts = ("-fglasgow-exts", - "") -- This is wrong, but we don't want to turn - -- all the extensions off when asked to just - -- turn one off - fFlag flag = ("-f" ++ flag, "-fno-" ++ flag) - in concatMap doFlag - [(OverlappingInstances , fFlag "allow-overlapping-instances") - ,(TypeSynonymInstances , fglasgowExts) - ,(TemplateHaskell , fFlag "th") - ,(ForeignFunctionInterface , fFlag "ffi") - ,(MonomorphismRestriction , fFlag "monomorphism-restriction") - ,(MonoPatBinds , fFlag "mono-pat-binds") - ,(UndecidableInstances , fFlag "allow-undecidable-instances") - ,(IncoherentInstances , fFlag "allow-incoherent-instances") - ,(Arrows , fFlag "arrows") - ,(Generics , fFlag "generics") - ,(ImplicitPrelude , fFlag "implicit-prelude") - ,(ImplicitParams , fFlag "implicit-params") - ,(CPP , ("-cpp", ""{- Wrong -})) - ,(BangPatterns , fFlag "bang-patterns") - ,(KindSignatures , fglasgowExts) - ,(RecursiveDo , fglasgowExts) - ,(ParallelListComp , fglasgowExts) - ,(MultiParamTypeClasses , fglasgowExts) - ,(FunctionalDependencies , fglasgowExts) - ,(Rank2Types , fglasgowExts) - ,(RankNTypes , fglasgowExts) - ,(PolymorphicComponents , fglasgowExts) - ,(ExistentialQuantification , fglasgowExts) - ,(ScopedTypeVariables , fFlag "scoped-type-variables") - ,(FlexibleContexts , fglasgowExts) - ,(FlexibleInstances , fglasgowExts) - ,(EmptyDataDecls , fglasgowExts) - ,(PatternGuards , fglasgowExts) - ,(GeneralizedNewtypeDeriving , fglasgowExts) - ,(MagicHash , fglasgowExts) - ,(UnicodeSyntax , fglasgowExts) - ,(PatternSignatures , fglasgowExts) - ,(UnliftedFFITypes , fglasgowExts) - ,(LiberalTypeSynonyms , fglasgowExts) - ,(TypeOperators , fglasgowExts) - ,(GADTs , fglasgowExts) - ,(RelaxedPolyRec , fglasgowExts) - ,(ExtendedDefaultRules , fFlag "extended-default-rules") - ,(UnboxedTuples , fglasgowExts) - ,(DeriveDataTypeable , fglasgowExts) - ,(ConstrainedClassMethods , fglasgowExts) - ] - -componentCcGhcOptions :: Verbosity -> GhcImplInfo -> LocalBuildInfo - -> BuildInfo -> ComponentLocalBuildInfo - -> FilePath -> FilePath - -> GhcOptions -componentCcGhcOptions verbosity implInfo lbi bi clbi pref filename = - mempty { - ghcOptVerbosity = toFlag verbosity, - ghcOptMode = toFlag GhcModeCompile, - ghcOptInputFiles = toNubListR [filename], - - ghcOptCppIncludePath = toNubListR $ [autogenModulesDir lbi, odir] - ++ PD.includeDirs bi, - ghcOptHideAllPackages= toFlag True, - ghcOptPackageDBs = withPackageDB lbi, - ghcOptPackages = toNubListR $ mkGhcOptPackages clbi, - ghcOptCcOptions = toNubListR $ - (case withOptimization lbi of - NoOptimisation -> [] - _ -> ["-O2"]) ++ - (case withDebugInfo lbi of - NoDebugInfo -> [] - MinimalDebugInfo -> ["-g1"] - NormalDebugInfo -> ["-g"] - MaximalDebugInfo -> ["-g3"]) ++ - PD.ccOptions bi, - ghcOptObjDir = toFlag odir - } - where - odir | hasCcOdirBug implInfo = pref takeDirectory filename - | otherwise = pref - -- ghc 6.4.0 had a bug in -odir handling for C compilations. - -componentGhcOptions :: Verbosity -> LocalBuildInfo - -> BuildInfo -> ComponentLocalBuildInfo -> FilePath - -> GhcOptions -componentGhcOptions verbosity lbi bi clbi odir = - mempty { - ghcOptVerbosity = toFlag verbosity, - ghcOptCabal = toFlag True, - ghcOptThisUnitId = case clbi of - LibComponentLocalBuildInfo { componentCompatPackageKey = pk } - -> toFlag pk - _ -> Mon.mempty, - ghcOptHideAllPackages = toFlag True, - ghcOptPackageDBs = withPackageDB lbi, - ghcOptPackages = toNubListR $ mkGhcOptPackages clbi, - ghcOptSplitObjs = toFlag (splitObjs lbi), - ghcOptSourcePathClear = toFlag True, - ghcOptSourcePath = toNubListR $ [odir] ++ (hsSourceDirs bi) - ++ [autogenModulesDir lbi], - ghcOptCppIncludePath = toNubListR $ [autogenModulesDir lbi, odir] - ++ PD.includeDirs bi, - ghcOptCppOptions = toNubListR $ cppOptions bi, - ghcOptCppIncludes = toNubListR $ - [autogenModulesDir lbi cppHeaderName], - ghcOptFfiIncludes = toNubListR $ PD.includes bi, - ghcOptObjDir = toFlag odir, - ghcOptHiDir = toFlag odir, - ghcOptStubDir = toFlag odir, - ghcOptOutputDir = toFlag odir, - ghcOptOptimisation = toGhcOptimisation (withOptimization lbi), - ghcOptDebugInfo = toGhcDebugInfo (withDebugInfo lbi), - ghcOptExtra = toNubListR $ hcOptions GHC bi, - ghcOptLanguage = toFlag (fromMaybe Haskell98 (defaultLanguage bi)), - -- Unsupported extensions have already been checked by configure - ghcOptExtensions = toNubListR $ usedExtensions bi, - ghcOptExtensionMap = M.fromList . compilerExtensions $ (compiler lbi) - } - where - toGhcOptimisation NoOptimisation = mempty --TODO perhaps override? - toGhcOptimisation NormalOptimisation = toFlag GhcNormalOptimisation - toGhcOptimisation MaximumOptimisation = toFlag GhcMaximumOptimisation - - -- GHC doesn't support debug info levels yet. - toGhcDebugInfo NoDebugInfo = mempty - toGhcDebugInfo MinimalDebugInfo = toFlag True - toGhcDebugInfo NormalDebugInfo = toFlag True - toGhcDebugInfo MaximalDebugInfo = toFlag True - --- | Strip out flags that are not supported in ghci -filterGhciFlags :: [String] -> [String] -filterGhciFlags = filter supported - where - supported ('-':'O':_) = False - supported "-debug" = False - supported "-threaded" = False - supported "-ticky" = False - supported "-eventlog" = False - supported "-prof" = False - supported "-unreg" = False - supported _ = True - -mkGHCiLibName :: UnitId -> String -mkGHCiLibName lib = getHSLibraryName lib <.> "o" - -ghcLookupProperty :: String -> Compiler -> Bool -ghcLookupProperty prop comp = - case M.lookup prop (compilerProperties comp) of - Just "YES" -> True - _ -> False - --- when using -split-objs, we need to search for object files in the --- Module_split directory for each module. -getHaskellObjects :: GhcImplInfo -> Library -> LocalBuildInfo - -> FilePath -> String -> Bool -> IO [FilePath] -getHaskellObjects implInfo lib lbi pref wanted_obj_ext allow_split_objs - | splitObjs lbi && allow_split_objs = do - let splitSuffix = if noExtInSplitSuffix implInfo - then "_split" - else "_" ++ wanted_obj_ext ++ "_split" - dirs = [ pref (ModuleName.toFilePath x ++ splitSuffix) - | x <- libModules lib ] - objss <- mapM getDirectoryContents dirs - let objs = [ dir obj - | (objs',dir) <- zip objss dirs, obj <- objs', - let obj_ext = takeExtension obj, - '.':wanted_obj_ext == obj_ext ] - return objs - | otherwise = - return [ pref ModuleName.toFilePath x <.> wanted_obj_ext - | x <- libModules lib ] - -mkGhcOptPackages :: ComponentLocalBuildInfo - -> [(UnitId, PackageId, ModuleRenaming)] -mkGhcOptPackages clbi = - map (\(i,p) -> (i,p,lookupRenaming p (componentPackageRenaming clbi))) - (componentPackageDeps clbi) - -substTopDir :: FilePath -> InstalledPackageInfo -> InstalledPackageInfo -substTopDir topDir ipo - = ipo { - InstalledPackageInfo.importDirs - = map f (InstalledPackageInfo.importDirs ipo), - InstalledPackageInfo.libraryDirs - = map f (InstalledPackageInfo.libraryDirs ipo), - InstalledPackageInfo.includeDirs - = map f (InstalledPackageInfo.includeDirs ipo), - InstalledPackageInfo.frameworkDirs - = map f (InstalledPackageInfo.frameworkDirs ipo), - InstalledPackageInfo.haddockInterfaces - = map f (InstalledPackageInfo.haddockInterfaces ipo), - InstalledPackageInfo.haddockHTMLs - = map f (InstalledPackageInfo.haddockHTMLs ipo) - } - where f ('$':'t':'o':'p':'d':'i':'r':rest) = topDir ++ rest - f x = x - --- Cabal does not use the environment variable GHC{,JS}_PACKAGE_PATH; let --- users know that this is the case. See ticket #335. Simply ignoring it is --- not a good idea, since then ghc and cabal are looking at different sets --- of package DBs and chaos is likely to ensue. --- --- An exception to this is when running cabal from within a `cabal exec` --- environment. In this case, `cabal exec` will set the --- CABAL_SANDBOX_PACKAGE_PATH to the same value that it set --- GHC{,JS}_PACKAGE_PATH to. If that is the case it is OK to allow --- GHC{,JS}_PACKAGE_PATH. -checkPackageDbEnvVar :: String -> String -> IO () -checkPackageDbEnvVar compilerName packagePathEnvVar = do - mPP <- lookupEnv packagePathEnvVar - when (isJust mPP) $ do - mcsPP <- lookupEnv "CABAL_SANDBOX_PACKAGE_PATH" - unless (mPP == mcsPP) abort - where - lookupEnv :: String -> IO (Maybe String) - lookupEnv name = (Just `fmap` getEnv name) - `catchIO` const (return Nothing) - abort = - die $ "Use of " ++ compilerName ++ "'s environment variable " - ++ packagePathEnvVar ++ " is incompatible with Cabal. Use the " - ++ "flag --package-db to specify a package database (it can be " - ++ "used multiple times)." - -profDetailLevelFlag :: Bool -> ProfDetailLevel -> Flag GhcProfAuto -profDetailLevelFlag forLib mpl = - case mpl of - ProfDetailNone -> mempty - ProfDetailDefault | forLib -> toFlag GhcProfAutoExported - | otherwise -> toFlag GhcProfAutoToplevel - ProfDetailExportedFunctions -> toFlag GhcProfAutoExported - ProfDetailToplevelFunctions -> toFlag GhcProfAutoToplevel - ProfDetailAllFunctions -> toFlag GhcProfAutoAll - ProfDetailOther _ -> mempty - --- | GHC's rendering of it's host or target 'Arch' as used in its platform --- strings and certain file locations (such as user package db location). --- -showArchString :: Arch -> String -showArchString PPC = "powerpc" -showArchString PPC64 = "powerpc64" -showArchString other = display other - --- | GHC's rendering of it's host or target 'OS' as used in its platform --- strings and certain file locations (such as user package db location). --- -showOsString :: OS -> String -showOsString Windows = "mingw32" -showOsString OSX = "darwin" -showOsString Solaris = "solaris2" -showOsString other = display other diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/GHC/IPI642.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/GHC/IPI642.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/GHC/IPI642.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/GHC/IPI642.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,102 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.GHC.IPI642 --- Copyright : (c) The University of Glasgow 2004 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- - -module Distribution.Simple.GHC.IPI642 ( - InstalledPackageInfo(..), - toCurrent, - ) where - -import qualified Distribution.InstalledPackageInfo as Current -import qualified Distribution.Package as Current hiding (installedUnitId) -import Distribution.Simple.GHC.IPIConvert - --- | This is the InstalledPackageInfo type used by ghc-6.4.2 and later. --- --- It's here purely for the 'Read' instance so that we can read the package --- database used by those ghc versions. It is a little hacky to read the --- package db directly, but we do need the info and until ghc-6.9 there was --- no better method. --- --- In ghc-6.4.1 and before the format was slightly different. --- See "Distribution.Simple.GHC.IPI642" --- -data InstalledPackageInfo = InstalledPackageInfo { - package :: PackageIdentifier, - license :: License, - copyright :: String, - maintainer :: String, - author :: String, - stability :: String, - homepage :: String, - pkgUrl :: String, - description :: String, - category :: String, - exposed :: Bool, - exposedModules :: [String], - hiddenModules :: [String], - importDirs :: [FilePath], - libraryDirs :: [FilePath], - hsLibraries :: [String], - extraLibraries :: [String], - extraGHCiLibraries:: [String], - includeDirs :: [FilePath], - includes :: [String], - depends :: [PackageIdentifier], - hugsOptions :: [String], - ccOptions :: [String], - ldOptions :: [String], - frameworkDirs :: [FilePath], - frameworks :: [String], - haddockInterfaces :: [FilePath], - haddockHTMLs :: [FilePath] - } - deriving Read - -toCurrent :: InstalledPackageInfo -> Current.InstalledPackageInfo -toCurrent ipi@InstalledPackageInfo{} = - let pid = convertPackageId (package ipi) - mkExposedModule m = Current.ExposedModule m Nothing - in Current.InstalledPackageInfo { - Current.sourcePackageId = pid, - Current.installedUnitId = Current.mkLegacyUnitId pid, - Current.compatPackageKey = "", - Current.abiHash = Current.AbiHash "", -- bogus but old GHCs don't care. - Current.license = convertLicense (license ipi), - Current.copyright = copyright ipi, - Current.maintainer = maintainer ipi, - Current.author = author ipi, - Current.stability = stability ipi, - Current.homepage = homepage ipi, - Current.pkgUrl = pkgUrl ipi, - Current.synopsis = "", - Current.description = description ipi, - Current.category = category ipi, - Current.exposed = exposed ipi, - Current.exposedModules = map (mkExposedModule . convertModuleName) (exposedModules ipi), - Current.hiddenModules = map convertModuleName (hiddenModules ipi), - Current.trusted = Current.trusted Current.emptyInstalledPackageInfo, - Current.importDirs = importDirs ipi, - Current.libraryDirs = libraryDirs ipi, - Current.libraryDynDirs = [], - Current.dataDir = "", - Current.hsLibraries = hsLibraries ipi, - Current.extraLibraries = extraLibraries ipi, - Current.extraGHCiLibraries = extraGHCiLibraries ipi, - Current.includeDirs = includeDirs ipi, - Current.includes = includes ipi, - Current.depends = map (Current.mkLegacyUnitId . convertPackageId) (depends ipi), - Current.ccOptions = ccOptions ipi, - Current.ldOptions = ldOptions ipi, - Current.frameworkDirs = frameworkDirs ipi, - Current.frameworks = frameworks ipi, - Current.haddockInterfaces = haddockInterfaces ipi, - Current.haddockHTMLs = haddockHTMLs ipi, - Current.pkgRoot = Nothing - } diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/GHC/IPIConvert.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/GHC/IPIConvert.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/GHC/IPIConvert.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/GHC/IPIConvert.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,50 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.GHC.IPI642 --- Copyright : (c) The University of Glasgow 2004 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Helper functions for 'Distribution.Simple.GHC.IPI642'. -module Distribution.Simple.GHC.IPIConvert ( - PackageIdentifier, convertPackageId, - License, convertLicense, - convertModuleName - ) where - -import qualified Distribution.Package as Current hiding (installedUnitId) -import qualified Distribution.License as Current - -import Distribution.Version -import Distribution.ModuleName -import Distribution.Text - -import Data.Maybe - -data PackageIdentifier = PackageIdentifier { - pkgName :: String, - pkgVersion :: Version - } - deriving Read - -convertPackageId :: PackageIdentifier -> Current.PackageIdentifier -convertPackageId PackageIdentifier { pkgName = n, pkgVersion = v } = - Current.PackageIdentifier (Current.PackageName n) v - -data License = GPL | LGPL | BSD3 | BSD4 - | PublicDomain | AllRightsReserved | OtherLicense - deriving Read - -convertModuleName :: String -> ModuleName -convertModuleName s = fromJust $ simpleParse s - -convertLicense :: License -> Current.License -convertLicense GPL = Current.GPL Nothing -convertLicense LGPL = Current.LGPL Nothing -convertLicense BSD3 = Current.BSD3 -convertLicense BSD4 = Current.BSD4 -convertLicense PublicDomain = Current.PublicDomain -convertLicense AllRightsReserved = Current.AllRightsReserved -convertLicense OtherLicense = Current.OtherLicense diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/GHC.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/GHC.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/GHC.hs 2016-11-07 10:02:23.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/GHC.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1237 +0,0 @@ -{-# LANGUAGE PatternGuards #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.GHC --- Copyright : Isaac Jones 2003-2007 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This is a fairly large module. It contains most of the GHC-specific code for --- configuring, building and installing packages. It also exports a function --- for finding out what packages are already installed. Configuring involves --- finding the @ghc@ and @ghc-pkg@ programs, finding what language extensions --- this version of ghc supports and returning a 'Compiler' value. --- --- 'getInstalledPackages' involves calling the @ghc-pkg@ program to find out --- what packages are installed. --- --- Building is somewhat complex as there is quite a bit of information to take --- into account. We have to build libs and programs, possibly for profiling and --- shared libs. We have to support building libraries that will be usable by --- GHCi and also ghc's @-split-objs@ feature. We have to compile any C files --- using ghc. Linking, especially for @split-objs@ is remarkably complex, --- partly because there tend to be 1,000's of @.o@ files and this can often be --- more than we can pass to the @ld@ or @ar@ programs in one go. --- --- Installing for libs and exes involves finding the right files and copying --- them to the right places. One of the more tricky things about this module is --- remembering the layout of files in the build directory (which is not --- explicitly documented) and thus what search dirs are used for various kinds --- of files. - -module Distribution.Simple.GHC ( - getGhcInfo, - configure, - getInstalledPackages, - getInstalledPackagesMonitorFiles, - getPackageDBContents, - buildLib, buildExe, - replLib, replExe, - startInterpreter, - installLib, installExe, - libAbiHash, - hcPkgInfo, - registerPackage, - componentGhcOptions, - componentCcGhcOptions, - getLibDir, - isDynamic, - getGlobalPackageDB, - pkgRoot - ) where - -import Control.Applicative -- 7.10 -Werror workaround -import Prelude -- https://ghc.haskell.org/trac/ghc/wiki/Migration/7.10#GHCsaysTheimportof...isredundant - -import qualified Distribution.Simple.GHC.IPI642 as IPI642 -import qualified Distribution.Simple.GHC.Internal as Internal -import Distribution.Simple.GHC.ImplInfo -import Distribution.PackageDescription as PD -import Distribution.InstalledPackageInfo (InstalledPackageInfo) -import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo -import Distribution.Simple.PackageIndex (InstalledPackageIndex) -import qualified Distribution.Simple.PackageIndex as PackageIndex -import Distribution.Simple.LocalBuildInfo -import qualified Distribution.Simple.Hpc as Hpc -import Distribution.Simple.BuildPaths -import Distribution.Simple.Utils -import Distribution.Package -import qualified Distribution.ModuleName as ModuleName -import Distribution.Simple.Program -import qualified Distribution.Simple.Program.HcPkg as HcPkg -import qualified Distribution.Simple.Program.Ar as Ar -import qualified Distribution.Simple.Program.Ld as Ld -import qualified Distribution.Simple.Program.Strip as Strip -import Distribution.Simple.Program.GHC -import Distribution.Simple.Setup -import qualified Distribution.Simple.Setup as Cabal -import Distribution.Simple.Compiler hiding (Flag) -import Distribution.Version -import Distribution.System -import Distribution.Verbosity -import Distribution.Text -import Distribution.Utils.NubList -import Language.Haskell.Extension - -import Control.Monad ( unless, when ) -import Data.Char ( isDigit, isSpace ) -import Data.List -import qualified Data.Map as M ( fromList, lookup ) -import Data.Maybe ( catMaybes ) -import Data.Monoid as Mon ( Monoid(..) ) -import Data.Version ( showVersion ) -import System.Directory - ( doesFileExist, getAppUserDataDirectory, createDirectoryIfMissing - , canonicalizePath ) -import System.FilePath ( (), (<.>), takeExtension - , takeDirectory, replaceExtension - , isRelative ) -import qualified System.Info - --- ----------------------------------------------------------------------------- --- Configuring - -configure :: Verbosity -> Maybe FilePath -> Maybe FilePath - -> ProgramConfiguration - -> IO (Compiler, Maybe Platform, ProgramConfiguration) -configure verbosity hcPath hcPkgPath conf0 = do - - (ghcProg, ghcVersion, conf1) <- - requireProgramVersion verbosity ghcProgram - (orLaterVersion (Version [6,4] [])) - (userMaybeSpecifyPath "ghc" hcPath conf0) - let implInfo = ghcVersionImplInfo ghcVersion - - -- This is slightly tricky, we have to configure ghc first, then we use the - -- location of ghc to help find ghc-pkg in the case that the user did not - -- specify the location of ghc-pkg directly: - (ghcPkgProg, ghcPkgVersion, conf2) <- - requireProgramVersion verbosity ghcPkgProgram { - programFindLocation = guessGhcPkgFromGhcPath ghcProg - } - anyVersion (userMaybeSpecifyPath "ghc-pkg" hcPkgPath conf1) - - when (ghcVersion /= ghcPkgVersion) $ die $ - "Version mismatch between ghc and ghc-pkg: " - ++ programPath ghcProg ++ " is version " ++ display ghcVersion ++ " " - ++ programPath ghcPkgProg ++ " is version " ++ display ghcPkgVersion - - -- Likewise we try to find the matching hsc2hs and haddock programs. - let hsc2hsProgram' = hsc2hsProgram { - programFindLocation = guessHsc2hsFromGhcPath ghcProg - } - haddockProgram' = haddockProgram { - programFindLocation = guessHaddockFromGhcPath ghcProg - } - conf3 = addKnownProgram haddockProgram' $ - addKnownProgram hsc2hsProgram' conf2 - - languages <- Internal.getLanguages verbosity implInfo ghcProg - extensions0 <- Internal.getExtensions verbosity implInfo ghcProg - - ghcInfo <- Internal.getGhcInfo verbosity implInfo ghcProg - let ghcInfoMap = M.fromList ghcInfo - extensions = -- workaround https://ghc.haskell.org/ticket/11214 - filterExt JavaScriptFFI $ - -- see 'filterExtTH' comment below - filterExtTH $ extensions0 - - -- starting with GHC 8.0, `TemplateHaskell` will be omitted from - -- `--supported-extensions` when it's not available. - -- for older GHCs we can use the "Have interpreter" property to - -- filter out `TemplateHaskell` - filterExtTH | ghcVersion < Version [8] [] - , Just "NO" <- M.lookup "Have interpreter" ghcInfoMap - = filterExt TemplateHaskell - | otherwise = id - - filterExt ext = filter ((/= EnableExtension ext) . fst) - - let comp = Compiler { - compilerId = CompilerId GHC ghcVersion, - compilerAbiTag = NoAbiTag, - compilerCompat = [], - compilerLanguages = languages, - compilerExtensions = extensions, - compilerProperties = ghcInfoMap - } - compPlatform = Internal.targetPlatform ghcInfo - -- configure gcc and ld - conf4 = Internal.configureToolchain implInfo ghcProg ghcInfoMap conf3 - return (comp, compPlatform, conf4) - --- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find --- the corresponding tool; e.g. if the tool is ghc-pkg, we try looking --- for a versioned or unversioned ghc-pkg in the same dir, that is: --- --- > /usr/local/bin/ghc-pkg-ghc-6.6.1(.exe) --- > /usr/local/bin/ghc-pkg-6.6.1(.exe) --- > /usr/local/bin/ghc-pkg(.exe) --- -guessToolFromGhcPath :: Program -> ConfiguredProgram - -> Verbosity -> ProgramSearchPath - -> IO (Maybe (FilePath, [FilePath])) -guessToolFromGhcPath tool ghcProg verbosity searchpath - = do let toolname = programName tool - given_path = programPath ghcProg - given_dir = takeDirectory given_path - real_path <- canonicalizePath given_path - let real_dir = takeDirectory real_path - versionSuffix path = takeVersionSuffix (dropExeExtension path) - given_suf = versionSuffix given_path - real_suf = versionSuffix real_path - guessNormal dir = dir toolname <.> exeExtension - guessGhcVersioned dir suf = dir (toolname ++ "-ghc" ++ suf) - <.> exeExtension - guessVersioned dir suf = dir (toolname ++ suf) - <.> exeExtension - mkGuesses dir suf | null suf = [guessNormal dir] - | otherwise = [guessGhcVersioned dir suf, - guessVersioned dir suf, - guessNormal dir] - guesses = mkGuesses given_dir given_suf ++ - if real_path == given_path - then [] - else mkGuesses real_dir real_suf - info verbosity $ "looking for tool " ++ toolname - ++ " near compiler in " ++ given_dir - debug verbosity $ "candidate locations: " ++ show guesses - exists <- mapM doesFileExist guesses - case [ file | (file, True) <- zip guesses exists ] of - -- If we can't find it near ghc, fall back to the usual - -- method. - [] -> programFindLocation tool verbosity searchpath - (fp:_) -> do info verbosity $ "found " ++ toolname ++ " in " ++ fp - let lookedAt = map fst - . takeWhile (\(_file, exist) -> not exist) - $ zip guesses exists - return (Just (fp, lookedAt)) - - where takeVersionSuffix :: FilePath -> String - takeVersionSuffix = takeWhileEndLE isSuffixChar - - isSuffixChar :: Char -> Bool - isSuffixChar c = isDigit c || c == '.' || c == '-' - --- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find a --- corresponding ghc-pkg, we try looking for both a versioned and unversioned --- ghc-pkg in the same dir, that is: --- --- > /usr/local/bin/ghc-pkg-ghc-6.6.1(.exe) --- > /usr/local/bin/ghc-pkg-6.6.1(.exe) --- > /usr/local/bin/ghc-pkg(.exe) --- -guessGhcPkgFromGhcPath :: ConfiguredProgram - -> Verbosity -> ProgramSearchPath - -> IO (Maybe (FilePath, [FilePath])) -guessGhcPkgFromGhcPath = guessToolFromGhcPath ghcPkgProgram - --- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find a --- corresponding hsc2hs, we try looking for both a versioned and unversioned --- hsc2hs in the same dir, that is: --- --- > /usr/local/bin/hsc2hs-ghc-6.6.1(.exe) --- > /usr/local/bin/hsc2hs-6.6.1(.exe) --- > /usr/local/bin/hsc2hs(.exe) --- -guessHsc2hsFromGhcPath :: ConfiguredProgram - -> Verbosity -> ProgramSearchPath - -> IO (Maybe (FilePath, [FilePath])) -guessHsc2hsFromGhcPath = guessToolFromGhcPath hsc2hsProgram - --- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find a --- corresponding haddock, we try looking for both a versioned and unversioned --- haddock in the same dir, that is: --- --- > /usr/local/bin/haddock-ghc-6.6.1(.exe) --- > /usr/local/bin/haddock-6.6.1(.exe) --- > /usr/local/bin/haddock(.exe) --- -guessHaddockFromGhcPath :: ConfiguredProgram - -> Verbosity -> ProgramSearchPath - -> IO (Maybe (FilePath, [FilePath])) -guessHaddockFromGhcPath = guessToolFromGhcPath haddockProgram - -getGhcInfo :: Verbosity -> ConfiguredProgram -> IO [(String, String)] -getGhcInfo verbosity ghcProg = Internal.getGhcInfo verbosity implInfo ghcProg - where - Just version = programVersion ghcProg - implInfo = ghcVersionImplInfo version - --- | Given a single package DB, return all installed packages. -getPackageDBContents :: Verbosity -> PackageDB -> ProgramConfiguration - -> IO InstalledPackageIndex -getPackageDBContents verbosity packagedb conf = do - pkgss <- getInstalledPackages' verbosity [packagedb] conf - toPackageIndex verbosity pkgss conf - --- | Given a package DB stack, return all installed packages. -getInstalledPackages :: Verbosity -> Compiler -> PackageDBStack - -> ProgramConfiguration - -> IO InstalledPackageIndex -getInstalledPackages verbosity comp packagedbs conf = do - checkPackageDbEnvVar - checkPackageDbStack comp packagedbs - pkgss <- getInstalledPackages' verbosity packagedbs conf - index <- toPackageIndex verbosity pkgss conf - return $! hackRtsPackage index - - where - hackRtsPackage index = - case PackageIndex.lookupPackageName index (PackageName "rts") of - [(_,[rts])] - -> PackageIndex.insert (removeMingwIncludeDir rts) index - _ -> index -- No (or multiple) ghc rts package is registered!! - -- Feh, whatever, the ghc test suite does some crazy stuff. - --- | Given a list of @(PackageDB, InstalledPackageInfo)@ pairs, produce a --- @PackageIndex@. Helper function used by 'getPackageDBContents' and --- 'getInstalledPackages'. -toPackageIndex :: Verbosity - -> [(PackageDB, [InstalledPackageInfo])] - -> ProgramConfiguration - -> IO InstalledPackageIndex -toPackageIndex verbosity pkgss conf = do - -- On Windows, various fields have $topdir/foo rather than full - -- paths. We need to substitute the right value in so that when - -- we, for example, call gcc, we have proper paths to give it. - topDir <- getLibDir' verbosity ghcProg - let indices = [ PackageIndex.fromList (map (Internal.substTopDir topDir) pkgs) - | (_, pkgs) <- pkgss ] - return $! mconcat indices - - where - Just ghcProg = lookupProgram ghcProgram conf - -getLibDir :: Verbosity -> LocalBuildInfo -> IO FilePath -getLibDir verbosity lbi = - dropWhileEndLE isSpace `fmap` - rawSystemProgramStdoutConf verbosity ghcProgram - (withPrograms lbi) ["--print-libdir"] - -getLibDir' :: Verbosity -> ConfiguredProgram -> IO FilePath -getLibDir' verbosity ghcProg = - dropWhileEndLE isSpace `fmap` - rawSystemProgramStdout verbosity ghcProg ["--print-libdir"] - - --- | Return the 'FilePath' to the global GHC package database. -getGlobalPackageDB :: Verbosity -> ConfiguredProgram -> IO FilePath -getGlobalPackageDB verbosity ghcProg = - dropWhileEndLE isSpace `fmap` - rawSystemProgramStdout verbosity ghcProg ["--print-global-package-db"] - --- | Return the 'FilePath' to the per-user GHC package database. -getUserPackageDB :: Verbosity -> ConfiguredProgram -> Platform -> IO FilePath -getUserPackageDB _verbosity ghcProg (Platform arch os) = do - -- It's rather annoying that we have to reconstruct this, because ghc - -- hides this information from us otherwise. But for certain use cases - -- like change monitoring it really can't remain hidden. - appdir <- getAppUserDataDirectory "ghc" - return (appdir platformAndVersion packageConfFileName) - where - platformAndVersion = intercalate "-" [ Internal.showArchString arch - , Internal.showOsString os - , display ghcVersion ] - packageConfFileName - | ghcVersion >= Version [6,12] [] = "package.conf.d" - | otherwise = "package.conf" - Just ghcVersion = programVersion ghcProg - -checkPackageDbEnvVar :: IO () -checkPackageDbEnvVar = - Internal.checkPackageDbEnvVar "GHC" "GHC_PACKAGE_PATH" - -checkPackageDbStack :: Compiler -> PackageDBStack -> IO () -checkPackageDbStack comp = if flagPackageConf implInfo - then checkPackageDbStackPre76 - else checkPackageDbStackPost76 - where implInfo = ghcVersionImplInfo (compilerVersion comp) - -checkPackageDbStackPost76 :: PackageDBStack -> IO () -checkPackageDbStackPost76 (GlobalPackageDB:rest) - | GlobalPackageDB `notElem` rest = return () -checkPackageDbStackPost76 rest - | GlobalPackageDB `elem` rest = - die $ "If the global package db is specified, it must be " - ++ "specified first and cannot be specified multiple times" -checkPackageDbStackPost76 _ = return () - -checkPackageDbStackPre76 :: PackageDBStack -> IO () -checkPackageDbStackPre76 (GlobalPackageDB:rest) - | GlobalPackageDB `notElem` rest = return () -checkPackageDbStackPre76 rest - | GlobalPackageDB `notElem` rest = - die $ "With current ghc versions the global package db is always used " - ++ "and must be listed first. This ghc limitation is lifted in GHC 7.6," - ++ "see http://hackage.haskell.org/trac/ghc/ticket/5977" -checkPackageDbStackPre76 _ = - die $ "If the global package db is specified, it must be " - ++ "specified first and cannot be specified multiple times" - --- GHC < 6.10 put "$topdir/include/mingw" in rts's installDirs. This --- breaks when you want to use a different gcc, so we need to filter --- it out. -removeMingwIncludeDir :: InstalledPackageInfo -> InstalledPackageInfo -removeMingwIncludeDir pkg = - let ids = InstalledPackageInfo.includeDirs pkg - ids' = filter (not . ("mingw" `isSuffixOf`)) ids - in pkg { InstalledPackageInfo.includeDirs = ids' } - --- | Get the packages from specific PackageDBs, not cumulative. --- -getInstalledPackages' :: Verbosity -> [PackageDB] -> ProgramConfiguration - -> IO [(PackageDB, [InstalledPackageInfo])] -getInstalledPackages' verbosity packagedbs conf - | ghcVersion >= Version [6,9] [] = - sequence - [ do pkgs <- HcPkg.dump (hcPkgInfo conf) verbosity packagedb - return (packagedb, pkgs) - | packagedb <- packagedbs ] - - where - Just ghcProg = lookupProgram ghcProgram conf - Just ghcVersion = programVersion ghcProg - -getInstalledPackages' verbosity packagedbs conf = do - str <- rawSystemProgramStdoutConf verbosity ghcPkgProgram conf ["list"] - let pkgFiles = [ init line | line <- lines str, last line == ':' ] - dbFile packagedb = case (packagedb, pkgFiles) of - (GlobalPackageDB, global:_) -> return $ Just global - (UserPackageDB, _global:user:_) -> return $ Just user - (UserPackageDB, _global:_) -> return $ Nothing - (SpecificPackageDB specific, _) -> return $ Just specific - _ -> die "cannot read ghc-pkg package listing" - pkgFiles' <- mapM dbFile packagedbs - sequence [ withFileContents file $ \content -> do - pkgs <- readPackages file content - return (db, pkgs) - | (db , Just file) <- zip packagedbs pkgFiles' ] - where - -- Depending on the version of ghc we use a different type's Read - -- instance to parse the package file and then convert. - -- It's a bit yuck. But that's what we get for using Read/Show. - readPackages - | ghcVersion >= Version [6,4,2] [] - = \file content -> case reads content of - [(pkgs, _)] -> return (map IPI642.toCurrent pkgs) - _ -> failToRead file - -- We dropped support for 6.4.2 and earlier. - | otherwise - = \file _ -> failToRead file - Just ghcProg = lookupProgram ghcProgram conf - Just ghcVersion = programVersion ghcProg - failToRead file = die $ "cannot read ghc package database " ++ file - -getInstalledPackagesMonitorFiles :: Verbosity -> Platform - -> ProgramConfiguration - -> [PackageDB] - -> IO [FilePath] -getInstalledPackagesMonitorFiles verbosity platform progdb = - mapM getPackageDBPath - where - getPackageDBPath :: PackageDB -> IO FilePath - getPackageDBPath GlobalPackageDB = - selectMonitorFile =<< getGlobalPackageDB verbosity ghcProg - - getPackageDBPath UserPackageDB = - selectMonitorFile =<< getUserPackageDB verbosity ghcProg platform - - getPackageDBPath (SpecificPackageDB path) = selectMonitorFile path - - -- GHC has old style file dbs, and new style directory dbs. - -- Note that for dir style dbs, we only need to monitor the cache file, not - -- the whole directory. The ghc program itself only reads the cache file - -- so it's safe to only monitor this one file. - selectMonitorFile path = do - isFileStyle <- doesFileExist path - if isFileStyle then return path - else return (path "package.cache") - - Just ghcProg = lookupProgram ghcProgram progdb - - --- ----------------------------------------------------------------------------- --- Building - --- | Build a library with GHC. --- -buildLib, replLib :: Verbosity -> Cabal.Flag (Maybe Int) - -> PackageDescription -> LocalBuildInfo - -> Library -> ComponentLocalBuildInfo -> IO () -buildLib = buildOrReplLib False -replLib = buildOrReplLib True - -buildOrReplLib :: Bool -> Verbosity -> Cabal.Flag (Maybe Int) - -> PackageDescription -> LocalBuildInfo - -> Library -> ComponentLocalBuildInfo -> IO () -buildOrReplLib forRepl verbosity numJobs pkg_descr lbi lib clbi = do - let libName = componentUnitId clbi - libTargetDir - | componentUnitId clbi == localUnitId lbi = buildDir lbi - | otherwise = buildDir lbi display libName - whenVanillaLib forceVanilla = - when (forceVanilla || withVanillaLib lbi) - whenProfLib = when (withProfLib lbi) - whenSharedLib forceShared = - when (forceShared || withSharedLib lbi) - whenGHCiLib = when (withGHCiLib lbi && withVanillaLib lbi) - ifReplLib = when forRepl - comp = compiler lbi - ghcVersion = compilerVersion comp - implInfo = getImplInfo comp - platform@(Platform _hostArch hostOS) = hostPlatform lbi - - (ghcProg, _) <- requireProgram verbosity ghcProgram (withPrograms lbi) - let runGhcProg = runGHC verbosity ghcProg comp platform - - libBi <- hackThreadedFlag verbosity - comp (withProfLib lbi) (libBuildInfo lib) - - let isGhcDynamic = isDynamic comp - dynamicTooSupported = supportsDynamicToo comp - doingTH = EnableExtension TemplateHaskell `elem` allExtensions libBi - forceVanillaLib = doingTH && not isGhcDynamic - forceSharedLib = doingTH && isGhcDynamic - -- TH always needs default libs, even when building for profiling - - -- Determine if program coverage should be enabled and if so, what - -- '-hpcdir' should be. - let isCoverageEnabled = fromFlag $ configCoverage $ configFlags lbi - -- Component name. Not 'libName' because that has the "HS" prefix - -- that GHC gives Haskell libraries. - cname = display $ PD.package $ localPkgDescr lbi - distPref = fromFlag $ configDistPref $ configFlags lbi - hpcdir way - | forRepl = Mon.mempty -- HPC is not supported in ghci - | isCoverageEnabled = toFlag $ Hpc.mixDir distPref way cname - | otherwise = mempty - - createDirectoryIfMissingVerbose verbosity True libTargetDir - -- TODO: do we need to put hs-boot files into place for mutually recursive - -- modules? - let cObjs = map (`replaceExtension` objExtension) (cSources libBi) - baseOpts = componentGhcOptions verbosity lbi libBi clbi libTargetDir - vanillaOpts = baseOpts `mappend` mempty { - ghcOptMode = toFlag GhcModeMake, - ghcOptNumJobs = numJobs, - ghcOptInputModules = toNubListR $ libModules lib, - ghcOptHPCDir = hpcdir Hpc.Vanilla - } - - profOpts = vanillaOpts `mappend` mempty { - ghcOptProfilingMode = toFlag True, - ghcOptProfilingAuto = Internal.profDetailLevelFlag True - (withProfLibDetail lbi), - ghcOptHiSuffix = toFlag "p_hi", - ghcOptObjSuffix = toFlag "p_o", - ghcOptExtra = toNubListR $ hcProfOptions GHC libBi, - ghcOptHPCDir = hpcdir Hpc.Prof - } - - sharedOpts = vanillaOpts `mappend` mempty { - ghcOptDynLinkMode = toFlag GhcDynamicOnly, - ghcOptFPic = toFlag True, - ghcOptHiSuffix = toFlag "dyn_hi", - ghcOptObjSuffix = toFlag "dyn_o", - ghcOptExtra = toNubListR $ hcSharedOptions GHC libBi, - ghcOptHPCDir = hpcdir Hpc.Dyn - } - linkerOpts = mempty { - ghcOptLinkOptions = toNubListR $ PD.ldOptions libBi, - ghcOptLinkLibs = toNubListR $ extraLibs libBi, - ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi, - ghcOptLinkFrameworks = toNubListR $ - PD.frameworks libBi, - ghcOptLinkFrameworkDirs = toNubListR $ - PD.extraFrameworkDirs libBi, - ghcOptInputFiles = toNubListR - [libTargetDir x | x <- cObjs] - } - replOpts = vanillaOpts { - ghcOptExtra = overNubListR - Internal.filterGhciFlags $ - ghcOptExtra vanillaOpts, - ghcOptNumJobs = mempty - } - `mappend` linkerOpts - `mappend` mempty { - ghcOptMode = toFlag GhcModeInteractive, - ghcOptOptimisation = toFlag GhcNoOptimisation - } - - vanillaSharedOpts = vanillaOpts `mappend` mempty { - ghcOptDynLinkMode = toFlag GhcStaticAndDynamic, - ghcOptDynHiSuffix = toFlag "dyn_hi", - ghcOptDynObjSuffix = toFlag "dyn_o", - ghcOptHPCDir = hpcdir Hpc.Dyn - } - - unless (forRepl || null (libModules lib)) $ - do let vanilla = whenVanillaLib forceVanillaLib (runGhcProg vanillaOpts) - shared = whenSharedLib forceSharedLib (runGhcProg sharedOpts) - useDynToo = dynamicTooSupported && - (forceVanillaLib || withVanillaLib lbi) && - (forceSharedLib || withSharedLib lbi) && - null (hcSharedOptions GHC libBi) - if useDynToo - then do - runGhcProg vanillaSharedOpts - case (hpcdir Hpc.Dyn, hpcdir Hpc.Vanilla) of - (Cabal.Flag dynDir, Cabal.Flag vanillaDir) -> - -- When the vanilla and shared library builds are done - -- in one pass, only one set of HPC module interfaces - -- are generated. This set should suffice for both - -- static and dynamically linked executables. We copy - -- the modules interfaces so they are available under - -- both ways. - copyDirectoryRecursive verbosity dynDir vanillaDir - _ -> return () - else if isGhcDynamic - then do shared; vanilla - else do vanilla; shared - whenProfLib (runGhcProg profOpts) - - -- build any C sources - unless (null (cSources libBi)) $ do - info verbosity "Building C Sources..." - sequence_ - [ do let baseCcOpts = Internal.componentCcGhcOptions verbosity implInfo - lbi libBi clbi libTargetDir filename - vanillaCcOpts = if isGhcDynamic - -- Dynamic GHC requires C sources to be built - -- with -fPIC for REPL to work. See #2207. - then baseCcOpts { ghcOptFPic = toFlag True } - else baseCcOpts - profCcOpts = vanillaCcOpts `mappend` mempty { - ghcOptProfilingMode = toFlag True, - ghcOptObjSuffix = toFlag "p_o" - } - sharedCcOpts = vanillaCcOpts `mappend` mempty { - ghcOptFPic = toFlag True, - ghcOptDynLinkMode = toFlag GhcDynamicOnly, - ghcOptObjSuffix = toFlag "dyn_o" - } - odir = fromFlag (ghcOptObjDir vanillaCcOpts) - createDirectoryIfMissingVerbose verbosity True odir - let runGhcProgIfNeeded ccOpts = do - needsRecomp <- checkNeedsRecompilation filename ccOpts - when needsRecomp $ runGhcProg ccOpts - runGhcProgIfNeeded vanillaCcOpts - unless forRepl $ - whenSharedLib forceSharedLib (runGhcProgIfNeeded sharedCcOpts) - unless forRepl $ whenProfLib (runGhcProgIfNeeded profCcOpts) - | filename <- cSources libBi] - - -- TODO: problem here is we need the .c files built first, so we can load them - -- with ghci, but .c files can depend on .h files generated by ghc by ffi - -- exports. - - ifReplLib $ do - when (null (libModules lib)) $ warn verbosity "No exposed modules" - ifReplLib (runGhcProg replOpts) - - -- link: - unless forRepl $ do - info verbosity "Linking..." - let cProfObjs = map (`replaceExtension` ("p_" ++ objExtension)) - (cSources libBi) - cSharedObjs = map (`replaceExtension` ("dyn_" ++ objExtension)) - (cSources libBi) - cid = compilerId (compiler lbi) - vanillaLibFilePath = libTargetDir mkLibName libName - profileLibFilePath = libTargetDir mkProfLibName libName - sharedLibFilePath = libTargetDir mkSharedLibName cid libName - ghciLibFilePath = libTargetDir Internal.mkGHCiLibName libName - libInstallPath = libdir $ absoluteInstallDirs pkg_descr lbi NoCopyDest - sharedLibInstallPath = libInstallPath mkSharedLibName cid libName - - stubObjs <- catMaybes <$> sequence - [ findFileWithExtension [objExtension] [libTargetDir] - (ModuleName.toFilePath x ++"_stub") - | ghcVersion < Version [7,2] [] -- ghc-7.2+ does not make _stub.o files - , x <- libModules lib ] - stubProfObjs <- catMaybes <$> sequence - [ findFileWithExtension ["p_" ++ objExtension] [libTargetDir] - (ModuleName.toFilePath x ++"_stub") - | ghcVersion < Version [7,2] [] -- ghc-7.2+ does not make _stub.o files - , x <- libModules lib ] - stubSharedObjs <- catMaybes <$> sequence - [ findFileWithExtension ["dyn_" ++ objExtension] [libTargetDir] - (ModuleName.toFilePath x ++"_stub") - | ghcVersion < Version [7,2] [] -- ghc-7.2+ does not make _stub.o files - , x <- libModules lib ] - - hObjs <- Internal.getHaskellObjects implInfo lib lbi - libTargetDir objExtension True - hProfObjs <- - if withProfLib lbi - then Internal.getHaskellObjects implInfo lib lbi - libTargetDir ("p_" ++ objExtension) True - else return [] - hSharedObjs <- - if withSharedLib lbi - then Internal.getHaskellObjects implInfo lib lbi - libTargetDir ("dyn_" ++ objExtension) False - else return [] - - unless (null hObjs && null cObjs && null stubObjs) $ do - rpaths <- getRPaths lbi clbi - - let staticObjectFiles = - hObjs - ++ map (libTargetDir ) cObjs - ++ stubObjs - profObjectFiles = - hProfObjs - ++ map (libTargetDir ) cProfObjs - ++ stubProfObjs - ghciObjFiles = - hObjs - ++ map (libTargetDir ) cObjs - ++ stubObjs - dynamicObjectFiles = - hSharedObjs - ++ map (libTargetDir ) cSharedObjs - ++ stubSharedObjs - -- After the relocation lib is created we invoke ghc -shared - -- with the dependencies spelled out as -package arguments - -- and ghc invokes the linker with the proper library paths - ghcSharedLinkArgs = - mempty { - ghcOptShared = toFlag True, - ghcOptDynLinkMode = toFlag GhcDynamicOnly, - ghcOptInputFiles = toNubListR dynamicObjectFiles, - ghcOptOutputFile = toFlag sharedLibFilePath, - ghcOptExtra = toNubListR $ - hcSharedOptions GHC libBi, - -- For dynamic libs, Mac OS/X needs to know the install location - -- at build time. This only applies to GHC < 7.8 - see the - -- discussion in #1660. - ghcOptDylibName = if hostOS == OSX - && ghcVersion < Version [7,8] [] - then toFlag sharedLibInstallPath - else mempty, - ghcOptHideAllPackages = toFlag True, - ghcOptNoAutoLinkPackages = toFlag True, - ghcOptPackageDBs = withPackageDB lbi, - ghcOptPackages = toNubListR $ - Internal.mkGhcOptPackages clbi , - ghcOptLinkLibs = toNubListR $ extraLibs libBi, - ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi, - ghcOptLinkFrameworks = toNubListR $ PD.frameworks libBi, - ghcOptLinkFrameworkDirs = - toNubListR $ PD.extraFrameworkDirs libBi, - ghcOptRPaths = rpaths - } - - info verbosity (show (ghcOptPackages ghcSharedLinkArgs)) - - whenVanillaLib False $ - Ar.createArLibArchive verbosity lbi vanillaLibFilePath staticObjectFiles - - whenProfLib $ - Ar.createArLibArchive verbosity lbi profileLibFilePath profObjectFiles - - whenGHCiLib $ do - (ldProg, _) <- requireProgram verbosity ldProgram (withPrograms lbi) - Ld.combineObjectFiles verbosity ldProg - ghciLibFilePath ghciObjFiles - - whenSharedLib False $ - runGhcProg ghcSharedLinkArgs - --- | Start a REPL without loading any source files. -startInterpreter :: Verbosity -> ProgramConfiguration -> Compiler -> Platform - -> PackageDBStack -> IO () -startInterpreter verbosity conf comp platform packageDBs = do - let replOpts = mempty { - ghcOptMode = toFlag GhcModeInteractive, - ghcOptPackageDBs = packageDBs - } - checkPackageDbStack comp packageDBs - (ghcProg, _) <- requireProgram verbosity ghcProgram conf - runGHC verbosity ghcProg comp platform replOpts - --- | Build an executable with GHC. --- -buildExe, replExe :: Verbosity -> Cabal.Flag (Maybe Int) - -> PackageDescription -> LocalBuildInfo - -> Executable -> ComponentLocalBuildInfo -> IO () -buildExe = buildOrReplExe False -replExe = buildOrReplExe True - -buildOrReplExe :: Bool -> Verbosity -> Cabal.Flag (Maybe Int) - -> PackageDescription -> LocalBuildInfo - -> Executable -> ComponentLocalBuildInfo -> IO () -buildOrReplExe forRepl verbosity numJobs _pkg_descr lbi - exe@Executable { exeName = exeName', modulePath = modPath } clbi = do - - (ghcProg, _) <- requireProgram verbosity ghcProgram (withPrograms lbi) - let comp = compiler lbi - platform = hostPlatform lbi - implInfo = getImplInfo comp - runGhcProg = runGHC verbosity ghcProg comp platform - - exeBi <- hackThreadedFlag verbosity - comp (withProfExe lbi) (buildInfo exe) - - -- exeNameReal, the name that GHC really uses (with .exe on Windows) - let exeNameReal = exeName' <.> - (if takeExtension exeName' /= ('.':exeExtension) - then exeExtension - else "") - - let targetDir = buildDir lbi exeName' - let exeDir = targetDir (exeName' ++ "-tmp") - createDirectoryIfMissingVerbose verbosity True targetDir - createDirectoryIfMissingVerbose verbosity True exeDir - -- TODO: do we need to put hs-boot files into place for mutually recursive - -- modules? FIX: what about exeName.hi-boot? - - -- Determine if program coverage should be enabled and if so, what - -- '-hpcdir' should be. - let isCoverageEnabled = fromFlag $ configCoverage $ configFlags lbi - distPref = fromFlag $ configDistPref $ configFlags lbi - hpcdir way - | forRepl = mempty -- HPC is not supported in ghci - | isCoverageEnabled = toFlag $ Hpc.mixDir distPref way exeName' - | otherwise = mempty - - -- build executables - - srcMainFile <- findFile (exeDir : hsSourceDirs exeBi) modPath - rpaths <- getRPaths lbi clbi - - let isGhcDynamic = isDynamic comp - dynamicTooSupported = supportsDynamicToo comp - isHaskellMain = elem (takeExtension srcMainFile) [".hs", ".lhs"] - cSrcs = cSources exeBi ++ [srcMainFile | not isHaskellMain] - cObjs = map (`replaceExtension` objExtension) cSrcs - baseOpts = (componentGhcOptions verbosity lbi exeBi clbi exeDir) - `mappend` mempty { - ghcOptMode = toFlag GhcModeMake, - ghcOptInputFiles = toNubListR - [ srcMainFile | isHaskellMain], - ghcOptInputModules = toNubListR - [ m | not isHaskellMain, m <- exeModules exe] - } - staticOpts = baseOpts `mappend` mempty { - ghcOptDynLinkMode = toFlag GhcStaticOnly, - ghcOptHPCDir = hpcdir Hpc.Vanilla - } - profOpts = baseOpts `mappend` mempty { - ghcOptProfilingMode = toFlag True, - ghcOptProfilingAuto = Internal.profDetailLevelFlag False - (withProfExeDetail lbi), - ghcOptHiSuffix = toFlag "p_hi", - ghcOptObjSuffix = toFlag "p_o", - ghcOptExtra = toNubListR - (hcProfOptions GHC exeBi), - ghcOptHPCDir = hpcdir Hpc.Prof - } - dynOpts = baseOpts `mappend` mempty { - ghcOptDynLinkMode = toFlag GhcDynamicOnly, - ghcOptHiSuffix = toFlag "dyn_hi", - ghcOptObjSuffix = toFlag "dyn_o", - ghcOptExtra = toNubListR $ - hcSharedOptions GHC exeBi, - ghcOptHPCDir = hpcdir Hpc.Dyn - } - dynTooOpts = staticOpts `mappend` mempty { - ghcOptDynLinkMode = toFlag GhcStaticAndDynamic, - ghcOptDynHiSuffix = toFlag "dyn_hi", - ghcOptDynObjSuffix = toFlag "dyn_o", - ghcOptHPCDir = hpcdir Hpc.Dyn - } - linkerOpts = mempty { - ghcOptLinkOptions = toNubListR $ PD.ldOptions exeBi, - ghcOptLinkLibs = toNubListR $ extraLibs exeBi, - ghcOptLinkLibPath = toNubListR $ extraLibDirs exeBi, - ghcOptLinkFrameworks = toNubListR $ - PD.frameworks exeBi, - ghcOptLinkFrameworkDirs = toNubListR $ - PD.extraFrameworkDirs exeBi, - ghcOptInputFiles = toNubListR - [exeDir x | x <- cObjs] - } - dynLinkerOpts = mempty { - ghcOptRPaths = rpaths - } - replOpts = baseOpts { - ghcOptExtra = overNubListR - Internal.filterGhciFlags - (ghcOptExtra baseOpts) - } - -- For a normal compile we do separate invocations of ghc for - -- compiling as for linking. But for repl we have to do just - -- the one invocation, so that one has to include all the - -- linker stuff too, like -l flags and any .o files from C - -- files etc. - `mappend` linkerOpts - `mappend` mempty { - ghcOptMode = toFlag GhcModeInteractive, - ghcOptOptimisation = toFlag GhcNoOptimisation - } - commonOpts | withProfExe lbi = profOpts - | withDynExe lbi = dynOpts - | otherwise = staticOpts - compileOpts | useDynToo = dynTooOpts - | otherwise = commonOpts - withStaticExe = (not $ withProfExe lbi) && (not $ withDynExe lbi) - - -- For building exe's that use TH with -prof or -dynamic we actually have - -- to build twice, once without -prof/-dynamic and then again with - -- -prof/-dynamic. This is because the code that TH needs to run at - -- compile time needs to be the vanilla ABI so it can be loaded up and run - -- by the compiler. - -- With dynamic-by-default GHC the TH object files loaded at compile-time - -- need to be .dyn_o instead of .o. - doingTH = EnableExtension TemplateHaskell `elem` allExtensions exeBi - -- Should we use -dynamic-too instead of compiling twice? - useDynToo = dynamicTooSupported && isGhcDynamic - && doingTH && withStaticExe - && null (hcSharedOptions GHC exeBi) - compileTHOpts | isGhcDynamic = dynOpts - | otherwise = staticOpts - compileForTH - | forRepl = False - | useDynToo = False - | isGhcDynamic = doingTH && (withProfExe lbi || withStaticExe) - | otherwise = doingTH && (withProfExe lbi || withDynExe lbi) - - linkOpts = - commonOpts `mappend` - linkerOpts `mappend` - mempty { ghcOptLinkNoHsMain = toFlag (not isHaskellMain) } `mappend` - (if withDynExe lbi then dynLinkerOpts else mempty) - - -- Build static/dynamic object files for TH, if needed. - when compileForTH $ - runGhcProg compileTHOpts { ghcOptNoLink = toFlag True - , ghcOptNumJobs = numJobs } - - unless forRepl $ - runGhcProg compileOpts { ghcOptNoLink = toFlag True - , ghcOptNumJobs = numJobs } - - -- build any C sources - unless (null cSrcs) $ do - info verbosity "Building C Sources..." - sequence_ - [ do let opts = (Internal.componentCcGhcOptions verbosity implInfo lbi exeBi - clbi exeDir filename) `mappend` mempty { - ghcOptDynLinkMode = toFlag (if withDynExe lbi - then GhcDynamicOnly - else GhcStaticOnly), - ghcOptProfilingMode = toFlag (withProfExe lbi) - } - odir = fromFlag (ghcOptObjDir opts) - createDirectoryIfMissingVerbose verbosity True odir - needsRecomp <- checkNeedsRecompilation filename opts - when needsRecomp $ - runGhcProg opts - | filename <- cSrcs ] - - -- TODO: problem here is we need the .c files built first, so we can load them - -- with ghci, but .c files can depend on .h files generated by ghc by ffi - -- exports. - when forRepl $ runGhcProg replOpts - - -- link: - unless forRepl $ do - info verbosity "Linking..." - runGhcProg linkOpts { ghcOptOutputFile = toFlag (targetDir exeNameReal) } - --- | Returns True if the modification date of the given source file is newer than --- the object file we last compiled for it, or if no object file exists yet. -checkNeedsRecompilation :: FilePath -> GhcOptions -> IO Bool -checkNeedsRecompilation filename opts = filename `moreRecentFile` oname - where oname = getObjectFileName filename opts - --- | Finds the object file name of the given source file -getObjectFileName :: FilePath -> GhcOptions -> FilePath -getObjectFileName filename opts = oname - where odir = fromFlag (ghcOptObjDir opts) - oext = fromFlagOrDefault "o" (ghcOptObjSuffix opts) - oname = odir replaceExtension filename oext - --- | Calculate the RPATHs for the component we are building. --- --- Calculates relative RPATHs when 'relocatable' is set. -getRPaths :: LocalBuildInfo - -> ComponentLocalBuildInfo -- ^ Component we are building - -> IO (NubListR FilePath) -getRPaths lbi clbi | supportRPaths hostOS = do - libraryPaths <- depLibraryPaths False (relocatable lbi) lbi clbi - let hostPref = case hostOS of - OSX -> "@loader_path" - _ -> "$ORIGIN" - relPath p = if isRelative p then hostPref p else p - rpaths = toNubListR (map relPath libraryPaths) - return rpaths - where - (Platform _ hostOS) = hostPlatform lbi - - -- The list of RPath-supported operating systems below reflects the - -- platforms on which Cabal's RPATH handling is tested. It does _NOT_ - -- reflect whether the OS supports RPATH. - - -- E.g. when this comment was written, the *BSD operating systems were - -- untested with regards to Cabal RPATH handling, and were hence set to - -- 'False', while those operating systems themselves do support RPATH. - supportRPaths Linux   = True - supportRPaths Windows = False - supportRPaths OSX   = True - supportRPaths FreeBSD   = False - supportRPaths OpenBSD   = False - supportRPaths NetBSD   = False - supportRPaths DragonFly = False - supportRPaths Solaris = False - supportRPaths AIX = False - supportRPaths HPUX = False - supportRPaths IRIX = False - supportRPaths HaLVM = False - supportRPaths IOS = False - supportRPaths Android = False - supportRPaths Ghcjs = False - supportRPaths Hurd = False - supportRPaths (OtherOS _) = False - -- Do _not_ add a default case so that we get a warning here when a new OS - -- is added. - -getRPaths _ _ = return mempty - --- | Filter the "-threaded" flag when profiling as it does not --- work with ghc-6.8 and older. -hackThreadedFlag :: Verbosity -> Compiler -> Bool -> BuildInfo -> IO BuildInfo -hackThreadedFlag verbosity comp prof bi - | not mustFilterThreaded = return bi - | otherwise = do - warn verbosity $ "The ghc flag '-threaded' is not compatible with " - ++ "profiling in ghc-6.8 and older. It will be disabled." - return bi { options = filterHcOptions (/= "-threaded") (options bi) } - where - mustFilterThreaded = prof && compilerVersion comp < Version [6, 10] [] - && "-threaded" `elem` hcOptions GHC bi - filterHcOptions p hcoptss = - [ (hc, if hc == GHC then filter p opts else opts) - | (hc, opts) <- hcoptss ] - - --- | Extracts a String representing a hash of the ABI of a built --- library. It can fail if the library has not yet been built. --- -libAbiHash :: Verbosity -> PackageDescription -> LocalBuildInfo - -> Library -> ComponentLocalBuildInfo -> IO String -libAbiHash verbosity _pkg_descr lbi lib clbi = do - libBi <- hackThreadedFlag verbosity - (compiler lbi) (withProfLib lbi) (libBuildInfo lib) - let - comp = compiler lbi - platform = hostPlatform lbi - vanillaArgs = - (componentGhcOptions verbosity lbi libBi clbi (buildDir lbi)) - `mappend` mempty { - ghcOptMode = toFlag GhcModeAbiHash, - ghcOptInputModules = toNubListR $ exposedModules lib - } - sharedArgs = vanillaArgs `mappend` mempty { - ghcOptDynLinkMode = toFlag GhcDynamicOnly, - ghcOptFPic = toFlag True, - ghcOptHiSuffix = toFlag "dyn_hi", - ghcOptObjSuffix = toFlag "dyn_o", - ghcOptExtra = toNubListR $ hcSharedOptions GHC libBi - } - profArgs = vanillaArgs `mappend` mempty { - ghcOptProfilingMode = toFlag True, - ghcOptProfilingAuto = Internal.profDetailLevelFlag True - (withProfLibDetail lbi), - ghcOptHiSuffix = toFlag "p_hi", - ghcOptObjSuffix = toFlag "p_o", - ghcOptExtra = toNubListR $ hcProfOptions GHC libBi - } - ghcArgs - | withVanillaLib lbi = vanillaArgs - | withSharedLib lbi = sharedArgs - | withProfLib lbi = profArgs - | otherwise = error "libAbiHash: Can't find an enabled library way" - - (ghcProg, _) <- requireProgram verbosity ghcProgram (withPrograms lbi) - hash <- getProgramInvocationOutput verbosity - (ghcInvocation ghcProg comp platform ghcArgs) - return (takeWhile (not . isSpace) hash) - -componentGhcOptions :: Verbosity -> LocalBuildInfo - -> BuildInfo -> ComponentLocalBuildInfo -> FilePath - -> GhcOptions -componentGhcOptions = Internal.componentGhcOptions - -componentCcGhcOptions :: Verbosity -> LocalBuildInfo - -> BuildInfo -> ComponentLocalBuildInfo - -> FilePath -> FilePath - -> GhcOptions -componentCcGhcOptions verbosity lbi = - Internal.componentCcGhcOptions verbosity implInfo lbi - where - comp = compiler lbi - implInfo = getImplInfo comp - --- ----------------------------------------------------------------------------- --- Installing - --- |Install executables for GHC. -installExe :: Verbosity - -> LocalBuildInfo - -> InstallDirs FilePath -- ^Where to copy the files to - -> FilePath -- ^Build location - -> (FilePath, FilePath) -- ^Executable (prefix,suffix) - -> PackageDescription - -> Executable - -> IO () -installExe verbosity lbi installDirs buildPref - (progprefix, progsuffix) _pkg exe = do - let binDir = bindir installDirs - createDirectoryIfMissingVerbose verbosity True binDir - let exeFileName = exeName exe <.> exeExtension - fixedExeBaseName = progprefix ++ exeName exe ++ progsuffix - installBinary dest = do - installExecutableFile verbosity - (buildPref exeName exe exeFileName) - (dest <.> exeExtension) - when (stripExes lbi) $ - Strip.stripExe verbosity (hostPlatform lbi) (withPrograms lbi) - (dest <.> exeExtension) - installBinary (binDir fixedExeBaseName) - --- |Install for ghc, .hi, .a and, if --with-ghci given, .o -installLib :: Verbosity - -> LocalBuildInfo - -> FilePath -- ^install location - -> FilePath -- ^install location for dynamic libraries - -> FilePath -- ^Build location - -> PackageDescription - -> Library - -> ComponentLocalBuildInfo - -> IO () -installLib verbosity lbi targetDir dynlibTargetDir builtDir _pkg lib clbi = do - -- copy .hi files over: - whenVanilla $ copyModuleFiles "hi" - whenProf $ copyModuleFiles "p_hi" - whenShared $ copyModuleFiles "dyn_hi" - - -- copy the built library files over: - whenVanilla $ installOrdinary builtDir targetDir vanillaLibName - whenProf $ installOrdinary builtDir targetDir profileLibName - whenGHCi $ installOrdinary builtDir targetDir ghciLibName - whenShared $ installShared builtDir dynlibTargetDir sharedLibName - - where - install isShared srcDir dstDir name = do - let src = srcDir name - dst = dstDir name - createDirectoryIfMissingVerbose verbosity True dstDir - - if isShared - then installExecutableFile verbosity src dst - else installOrdinaryFile verbosity src dst - - when (stripLibs lbi) $ Strip.stripLib verbosity - (hostPlatform lbi) (withPrograms lbi) dst - - installOrdinary = install False - installShared = install True - - copyModuleFiles ext = - findModuleFiles [builtDir] [ext] (libModules lib) - >>= installOrdinaryFiles verbosity targetDir - - cid = compilerId (compiler lbi) - libName = componentUnitId clbi - vanillaLibName = mkLibName libName - profileLibName = mkProfLibName libName - ghciLibName = Internal.mkGHCiLibName libName - sharedLibName = (mkSharedLibName cid) libName - - hasLib = not $ null (libModules lib) - && null (cSources (libBuildInfo lib)) - whenVanilla = when (hasLib && withVanillaLib lbi) - whenProf = when (hasLib && withProfLib lbi) - whenGHCi = when (hasLib && withGHCiLib lbi) - whenShared = when (hasLib && withSharedLib lbi) - --- ----------------------------------------------------------------------------- --- Registering - -hcPkgInfo :: ProgramConfiguration -> HcPkg.HcPkgInfo -hcPkgInfo conf = HcPkg.HcPkgInfo { HcPkg.hcPkgProgram = ghcPkgProg - , HcPkg.noPkgDbStack = v < [6,9] - , HcPkg.noVerboseFlag = v < [6,11] - , HcPkg.flagPackageConf = v < [7,5] - , HcPkg.supportsDirDbs = v >= [6,8] - , HcPkg.requiresDirDbs = v >= [7,10] - , HcPkg.nativeMultiInstance = v >= [7,10] - , HcPkg.recacheMultiInstance = v >= [6,12] - } - where - v = versionBranch ver - Just ghcPkgProg = lookupProgram ghcPkgProgram conf - Just ver = programVersion ghcPkgProg - -registerPackage - :: Verbosity - -> ProgramConfiguration - -> Bool - -> PackageDBStack - -> InstalledPackageInfo - -> IO () -registerPackage verbosity progdb multiInstance packageDbs installedPkgInfo - | multiInstance - = HcPkg.registerMultiInstance (hcPkgInfo progdb) verbosity - packageDbs installedPkgInfo - - | otherwise - = HcPkg.reregister (hcPkgInfo progdb) verbosity - packageDbs (Right installedPkgInfo) - -pkgRoot :: Verbosity -> LocalBuildInfo -> PackageDB -> IO FilePath -pkgRoot verbosity lbi = pkgRoot' - where - pkgRoot' GlobalPackageDB = - let Just ghcProg = lookupProgram ghcProgram (withPrograms lbi) - in fmap takeDirectory (getGlobalPackageDB verbosity ghcProg) - pkgRoot' UserPackageDB = do - appDir <- getAppUserDataDirectory "ghc" - let ver = compilerVersion (compiler lbi) - subdir = System.Info.arch ++ '-':System.Info.os - ++ '-':showVersion ver - rootDir = appDir subdir - -- We must create the root directory for the user package database if it - -- does not yet exists. Otherwise '${pkgroot}' will resolve to a - -- directory at the time of 'ghc-pkg register', and registration will - -- fail. - createDirectoryIfMissing True rootDir - return rootDir - pkgRoot' (SpecificPackageDB fp) = return (takeDirectory fp) - --- ----------------------------------------------------------------------------- --- Utils - -isDynamic :: Compiler -> Bool -isDynamic = Internal.ghcLookupProperty "GHC Dynamic" - -supportsDynamicToo :: Compiler -> Bool -supportsDynamicToo = Internal.ghcLookupProperty "Support dynamic-too" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/GHCJS.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/GHCJS.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/GHCJS.hs 2016-11-07 10:02:23.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/GHCJS.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,879 +0,0 @@ -module Distribution.Simple.GHCJS ( - configure, getInstalledPackages, getPackageDBContents, - buildLib, buildExe, - replLib, replExe, - startInterpreter, - installLib, installExe, - libAbiHash, - hcPkgInfo, - registerPackage, - componentGhcOptions, - getLibDir, - isDynamic, - getGlobalPackageDB, - runCmd - ) where - -import Distribution.Simple.GHC.ImplInfo -import qualified Distribution.Simple.GHC.Internal as Internal -import Distribution.PackageDescription as PD -import Distribution.InstalledPackageInfo -import Distribution.Package -import Distribution.Simple.PackageIndex ( InstalledPackageIndex ) -import qualified Distribution.Simple.PackageIndex as PackageIndex -import Distribution.Simple.LocalBuildInfo -import qualified Distribution.Simple.Hpc as Hpc -import Distribution.Simple.BuildPaths -import Distribution.Simple.Utils -import Distribution.Simple.Program -import qualified Distribution.Simple.Program.HcPkg as HcPkg -import qualified Distribution.Simple.Program.Ar as Ar -import qualified Distribution.Simple.Program.Ld as Ld -import qualified Distribution.Simple.Program.Strip as Strip -import Distribution.Simple.Program.GHC -import Distribution.Simple.Setup hiding ( Flag ) -import qualified Distribution.Simple.Setup as Cabal -import Distribution.Simple.Compiler hiding ( Flag ) -import Distribution.Version -import Distribution.System -import Distribution.Verbosity -import Distribution.Utils.NubList -import Distribution.Text -import Language.Haskell.Extension - -import Control.Monad ( unless, when ) -import Data.Char ( isSpace ) -import qualified Data.Map as M ( fromList ) -import Data.Monoid as Mon ( Monoid(..) ) -import System.Directory ( doesFileExist ) -import System.FilePath ( (), (<.>), takeExtension - , takeDirectory, replaceExtension ) - -configure :: Verbosity -> Maybe FilePath -> Maybe FilePath - -> ProgramConfiguration - -> IO (Compiler, Maybe Platform, ProgramConfiguration) -configure verbosity hcPath hcPkgPath conf0 = do - (ghcjsProg, ghcjsVersion, conf1) <- - requireProgramVersion verbosity ghcjsProgram - (orLaterVersion (Version [0,1] [])) - (userMaybeSpecifyPath "ghcjs" hcPath conf0) - Just ghcjsGhcVersion <- findGhcjsGhcVersion verbosity (programPath ghcjsProg) - let implInfo = ghcjsVersionImplInfo ghcjsVersion ghcjsGhcVersion - - -- This is slightly tricky, we have to configure ghcjs first, then we use the - -- location of ghcjs to help find ghcjs-pkg in the case that the user did not - -- specify the location of ghc-pkg directly: - (ghcjsPkgProg, ghcjsPkgVersion, conf2) <- - requireProgramVersion verbosity ghcjsPkgProgram { - programFindLocation = guessGhcjsPkgFromGhcjsPath ghcjsProg - } - anyVersion (userMaybeSpecifyPath "ghcjs-pkg" hcPkgPath conf1) - - Just ghcjsPkgGhcjsVersion <- findGhcjsPkgGhcjsVersion - verbosity (programPath ghcjsPkgProg) - - when (ghcjsVersion /= ghcjsPkgGhcjsVersion) $ die $ - "Version mismatch between ghcjs and ghcjs-pkg: " - ++ programPath ghcjsProg ++ " is version " ++ display ghcjsVersion ++ " " - ++ programPath ghcjsPkgProg ++ " is version " ++ display ghcjsPkgGhcjsVersion - - when (ghcjsGhcVersion /= ghcjsPkgVersion) $ die $ - "Version mismatch between ghcjs and ghcjs-pkg: " - ++ programPath ghcjsProg - ++ " was built with GHC version " ++ display ghcjsGhcVersion ++ " " - ++ programPath ghcjsPkgProg - ++ " was built with GHC version " ++ display ghcjsPkgVersion - - -- be sure to use our versions of hsc2hs, c2hs, haddock and ghc - let hsc2hsProgram' = - hsc2hsProgram { programFindLocation = - guessHsc2hsFromGhcjsPath ghcjsProg } - c2hsProgram' = - c2hsProgram { programFindLocation = - guessC2hsFromGhcjsPath ghcjsProg } - - haddockProgram' = - haddockProgram { programFindLocation = - guessHaddockFromGhcjsPath ghcjsProg } - conf3 = addKnownPrograms [ hsc2hsProgram', c2hsProgram', haddockProgram' ] conf2 - - languages <- Internal.getLanguages verbosity implInfo ghcjsProg - extensions <- Internal.getExtensions verbosity implInfo ghcjsProg - - ghcInfo <- Internal.getGhcInfo verbosity implInfo ghcjsProg - let ghcInfoMap = M.fromList ghcInfo - - let comp = Compiler { - compilerId = CompilerId GHCJS ghcjsVersion, - compilerAbiTag = AbiTag $ - "ghc" ++ intercalate "_" (map show . versionBranch $ ghcjsGhcVersion), - compilerCompat = [CompilerId GHC ghcjsGhcVersion], - compilerLanguages = languages, - compilerExtensions = extensions, - compilerProperties = ghcInfoMap - } - compPlatform = Internal.targetPlatform ghcInfo - -- configure gcc and ld - let conf4 = if ghcjsNativeToo comp - then Internal.configureToolchain implInfo - ghcjsProg ghcInfoMap conf3 - else conf3 - return (comp, compPlatform, conf4) - -ghcjsNativeToo :: Compiler -> Bool -ghcjsNativeToo = Internal.ghcLookupProperty "Native Too" - -guessGhcjsPkgFromGhcjsPath :: ConfiguredProgram -> Verbosity - -> ProgramSearchPath -> IO (Maybe (FilePath, [FilePath])) -guessGhcjsPkgFromGhcjsPath = guessToolFromGhcjsPath ghcjsPkgProgram - -guessHsc2hsFromGhcjsPath :: ConfiguredProgram -> Verbosity - -> ProgramSearchPath -> IO (Maybe (FilePath, [FilePath])) -guessHsc2hsFromGhcjsPath = guessToolFromGhcjsPath hsc2hsProgram - -guessC2hsFromGhcjsPath :: ConfiguredProgram -> Verbosity - -> ProgramSearchPath -> IO (Maybe (FilePath, [FilePath])) -guessC2hsFromGhcjsPath = guessToolFromGhcjsPath c2hsProgram - -guessHaddockFromGhcjsPath :: ConfiguredProgram -> Verbosity - -> ProgramSearchPath -> IO (Maybe (FilePath, [FilePath])) -guessHaddockFromGhcjsPath = guessToolFromGhcjsPath haddockProgram - -guessToolFromGhcjsPath :: Program -> ConfiguredProgram - -> Verbosity -> ProgramSearchPath - -> IO (Maybe (FilePath, [FilePath])) -guessToolFromGhcjsPath tool ghcjsProg verbosity searchpath - = do let toolname = programName tool - path = programPath ghcjsProg - dir = takeDirectory path - versionSuffix = takeVersionSuffix (dropExeExtension path) - guessNormal = dir toolname <.> exeExtension - guessGhcjsVersioned = dir (toolname ++ "-ghcjs" ++ versionSuffix) - <.> exeExtension - guessGhcjs = dir (toolname ++ "-ghcjs") - <.> exeExtension - guessVersioned = dir (toolname ++ versionSuffix) <.> exeExtension - guesses | null versionSuffix = [guessGhcjs, guessNormal] - | otherwise = [guessGhcjsVersioned, - guessGhcjs, - guessVersioned, - guessNormal] - info verbosity $ "looking for tool " ++ toolname - ++ " near compiler in " ++ dir - exists <- mapM doesFileExist guesses - case [ file | (file, True) <- zip guesses exists ] of - -- If we can't find it near ghc, fall back to the usual - -- method. - [] -> programFindLocation tool verbosity searchpath - (fp:_) -> do info verbosity $ "found " ++ toolname ++ " in " ++ fp - let lookedAt = map fst - . takeWhile (\(_file, exist) -> not exist) - $ zip guesses exists - return (Just (fp, lookedAt)) - - where takeVersionSuffix :: FilePath -> String - takeVersionSuffix = reverse . takeWhile (`elem ` "0123456789.-") . - reverse - --- | Given a single package DB, return all installed packages. -getPackageDBContents :: Verbosity -> PackageDB -> ProgramConfiguration - -> IO InstalledPackageIndex -getPackageDBContents verbosity packagedb conf = do - pkgss <- getInstalledPackages' verbosity [packagedb] conf - toPackageIndex verbosity pkgss conf - --- | Given a package DB stack, return all installed packages. -getInstalledPackages :: Verbosity -> PackageDBStack -> ProgramConfiguration - -> IO InstalledPackageIndex -getInstalledPackages verbosity packagedbs conf = do - checkPackageDbEnvVar - checkPackageDbStack packagedbs - pkgss <- getInstalledPackages' verbosity packagedbs conf - index <- toPackageIndex verbosity pkgss conf - return $! index - -toPackageIndex :: Verbosity - -> [(PackageDB, [InstalledPackageInfo])] - -> ProgramConfiguration - -> IO InstalledPackageIndex -toPackageIndex verbosity pkgss conf = do - -- On Windows, various fields have $topdir/foo rather than full - -- paths. We need to substitute the right value in so that when - -- we, for example, call gcc, we have proper paths to give it. - topDir <- getLibDir' verbosity ghcjsProg - let indices = [ PackageIndex.fromList (map (Internal.substTopDir topDir) pkgs) - | (_, pkgs) <- pkgss ] - return $! (mconcat indices) - - where - Just ghcjsProg = lookupProgram ghcjsProgram conf - -checkPackageDbEnvVar :: IO () -checkPackageDbEnvVar = - Internal.checkPackageDbEnvVar "GHCJS" "GHCJS_PACKAGE_PATH" - -checkPackageDbStack :: PackageDBStack -> IO () -checkPackageDbStack (GlobalPackageDB:rest) - | GlobalPackageDB `notElem` rest = return () -checkPackageDbStack rest - | GlobalPackageDB `notElem` rest = - die $ "With current ghc versions the global package db is always used " - ++ "and must be listed first. This ghc limitation may be lifted in " - ++ "future, see http://hackage.haskell.org/trac/ghc/ticket/5977" -checkPackageDbStack _ = - die $ "If the global package db is specified, it must be " - ++ "specified first and cannot be specified multiple times" - -getInstalledPackages' :: Verbosity -> [PackageDB] -> ProgramConfiguration - -> IO [(PackageDB, [InstalledPackageInfo])] -getInstalledPackages' verbosity packagedbs conf = - sequence - [ do pkgs <- HcPkg.dump (hcPkgInfo conf) verbosity packagedb - return (packagedb, pkgs) - | packagedb <- packagedbs ] - -getLibDir :: Verbosity -> LocalBuildInfo -> IO FilePath -getLibDir verbosity lbi = - (reverse . dropWhile isSpace . reverse) `fmap` - rawSystemProgramStdoutConf verbosity ghcjsProgram - (withPrograms lbi) ["--print-libdir"] - -getLibDir' :: Verbosity -> ConfiguredProgram -> IO FilePath -getLibDir' verbosity ghcjsProg = - (reverse . dropWhile isSpace . reverse) `fmap` - rawSystemProgramStdout verbosity ghcjsProg ["--print-libdir"] - --- | Return the 'FilePath' to the global GHC package database. -getGlobalPackageDB :: Verbosity -> ConfiguredProgram -> IO FilePath -getGlobalPackageDB verbosity ghcjsProg = - (reverse . dropWhile isSpace . reverse) `fmap` - rawSystemProgramStdout verbosity ghcjsProg ["--print-global-package-db"] - -toJSLibName :: String -> String -toJSLibName lib - | takeExtension lib `elem` [".dll",".dylib",".so"] - = replaceExtension lib "js_so" - | takeExtension lib == ".a" = replaceExtension lib "js_a" - | otherwise = lib <.> "js_a" - -buildLib, replLib :: Verbosity -> Cabal.Flag (Maybe Int) -> PackageDescription - -> LocalBuildInfo -> Library -> ComponentLocalBuildInfo - -> IO () -buildLib = buildOrReplLib False -replLib = buildOrReplLib True - -buildOrReplLib :: Bool -> Verbosity -> Cabal.Flag (Maybe Int) - -> PackageDescription -> LocalBuildInfo - -> Library -> ComponentLocalBuildInfo -> IO () -buildOrReplLib forRepl verbosity numJobs _pkg_descr lbi lib clbi = do - let libName = componentUnitId clbi - libTargetDir = buildDir lbi - whenVanillaLib forceVanilla = - when (not forRepl && (forceVanilla || withVanillaLib lbi)) - whenProfLib = when (not forRepl && withProfLib lbi) - whenSharedLib forceShared = - when (not forRepl && (forceShared || withSharedLib lbi)) - whenGHCiLib = when (not forRepl && withGHCiLib lbi && withVanillaLib lbi) - ifReplLib = when forRepl - comp = compiler lbi - platform = hostPlatform lbi - implInfo = getImplInfo comp - nativeToo = ghcjsNativeToo comp - - (ghcjsProg, _) <- requireProgram verbosity ghcjsProgram (withPrograms lbi) - let runGhcjsProg = runGHC verbosity ghcjsProg comp platform - libBi = libBuildInfo lib - isGhcjsDynamic = isDynamic comp - dynamicTooSupported = supportsDynamicToo comp - doingTH = EnableExtension TemplateHaskell `elem` allExtensions libBi - forceVanillaLib = doingTH && not isGhcjsDynamic - forceSharedLib = doingTH && isGhcjsDynamic - -- TH always needs default libs, even when building for profiling - - -- Determine if program coverage should be enabled and if so, what - -- '-hpcdir' should be. - let isCoverageEnabled = fromFlag $ configCoverage $ configFlags lbi - -- Component name. Not 'libName' because that has the "HS" prefix - -- that GHC gives Haskell libraries. - cname = display $ PD.package $ localPkgDescr lbi - distPref = fromFlag $ configDistPref $ configFlags lbi - hpcdir way - | isCoverageEnabled = toFlag $ Hpc.mixDir distPref way cname - | otherwise = Mon.mempty - - createDirectoryIfMissingVerbose verbosity True libTargetDir - -- TODO: do we need to put hs-boot files into place for mutually recursive - -- modules? - let cObjs = map (`replaceExtension` objExtension) (cSources libBi) - jsSrcs = jsSources libBi - baseOpts = componentGhcOptions verbosity lbi libBi clbi libTargetDir - linkJsLibOpts = mempty { - ghcOptExtra = toNubListR $ - [ "-link-js-lib" , getHSLibraryName libName - , "-js-lib-outputdir", libTargetDir ] ++ - concatMap (\x -> ["-js-lib-src",x]) jsSrcs - } - vanillaOptsNoJsLib = baseOpts `mappend` mempty { - ghcOptMode = toFlag GhcModeMake, - ghcOptNumJobs = numJobs, - ghcOptInputModules = toNubListR $ libModules lib, - ghcOptHPCDir = hpcdir Hpc.Vanilla - } - vanillaOpts = vanillaOptsNoJsLib `mappend` linkJsLibOpts - - profOpts = adjustExts "p_hi" "p_o" vanillaOpts `mappend` mempty { - ghcOptProfilingMode = toFlag True, - ghcOptExtra = toNubListR $ - ghcjsProfOptions libBi, - ghcOptHPCDir = hpcdir Hpc.Prof - } - sharedOpts = adjustExts "dyn_hi" "dyn_o" vanillaOpts `mappend` mempty { - ghcOptDynLinkMode = toFlag GhcDynamicOnly, - ghcOptFPic = toFlag True, - ghcOptExtra = toNubListR $ - ghcjsSharedOptions libBi, - ghcOptHPCDir = hpcdir Hpc.Dyn - } - linkerOpts = mempty { - ghcOptLinkOptions = toNubListR $ PD.ldOptions libBi, - ghcOptLinkLibs = toNubListR $ extraLibs libBi, - ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi, - ghcOptLinkFrameworks = toNubListR $ PD.frameworks libBi, - ghcOptInputFiles = - toNubListR $ [libTargetDir x | x <- cObjs] ++ jsSrcs - } - replOpts = vanillaOptsNoJsLib { - ghcOptExtra = overNubListR - Internal.filterGhciFlags - (ghcOptExtra vanillaOpts), - ghcOptNumJobs = mempty - } - `mappend` linkerOpts - `mappend` mempty { - ghcOptMode = toFlag GhcModeInteractive, - ghcOptOptimisation = toFlag GhcNoOptimisation - } - - vanillaSharedOpts = vanillaOpts `mappend` - mempty { - ghcOptDynLinkMode = toFlag GhcStaticAndDynamic, - ghcOptDynHiSuffix = toFlag "dyn_hi", - ghcOptDynObjSuffix = toFlag "dyn_o", - ghcOptHPCDir = hpcdir Hpc.Dyn - } - - unless (forRepl || (null (libModules lib) && null jsSrcs && null cObjs)) $ - do let vanilla = whenVanillaLib forceVanillaLib (runGhcjsProg vanillaOpts) - shared = whenSharedLib forceSharedLib (runGhcjsProg sharedOpts) - useDynToo = dynamicTooSupported && - (forceVanillaLib || withVanillaLib lbi) && - (forceSharedLib || withSharedLib lbi) && - null (ghcjsSharedOptions libBi) - if useDynToo - then do - runGhcjsProg vanillaSharedOpts - case (hpcdir Hpc.Dyn, hpcdir Hpc.Vanilla) of - (Cabal.Flag dynDir, Cabal.Flag vanillaDir) -> do - -- When the vanilla and shared library builds are done - -- in one pass, only one set of HPC module interfaces - -- are generated. This set should suffice for both - -- static and dynamically linked executables. We copy - -- the modules interfaces so they are available under - -- both ways. - copyDirectoryRecursive verbosity dynDir vanillaDir - _ -> return () - else if isGhcjsDynamic - then do shared; vanilla - else do vanilla; shared - whenProfLib (runGhcjsProg profOpts) - - -- build any C sources - unless (null (cSources libBi) || not nativeToo) $ do - info verbosity "Building C Sources..." - sequence_ - [ do let vanillaCcOpts = - (Internal.componentCcGhcOptions verbosity implInfo - lbi libBi clbi libTargetDir filename) - profCcOpts = vanillaCcOpts `mappend` mempty { - ghcOptProfilingMode = toFlag True, - ghcOptObjSuffix = toFlag "p_o" - } - sharedCcOpts = vanillaCcOpts `mappend` mempty { - ghcOptFPic = toFlag True, - ghcOptDynLinkMode = toFlag GhcDynamicOnly, - ghcOptObjSuffix = toFlag "dyn_o" - } - odir = fromFlag (ghcOptObjDir vanillaCcOpts) - createDirectoryIfMissingVerbose verbosity True odir - runGhcjsProg vanillaCcOpts - whenSharedLib forceSharedLib (runGhcjsProg sharedCcOpts) - whenProfLib (runGhcjsProg profCcOpts) - | filename <- cSources libBi] - - -- TODO: problem here is we need the .c files built first, so we can load them - -- with ghci, but .c files can depend on .h files generated by ghc by ffi - -- exports. - unless (null (libModules lib)) $ - ifReplLib (runGhcjsProg replOpts) - - -- link: - when (nativeToo && not forRepl) $ do - info verbosity "Linking..." - let cProfObjs = map (`replaceExtension` ("p_" ++ objExtension)) - (cSources libBi) - cSharedObjs = map (`replaceExtension` ("dyn_" ++ objExtension)) - (cSources libBi) - cid = compilerId (compiler lbi) - vanillaLibFilePath = libTargetDir mkLibName libName - profileLibFilePath = libTargetDir mkProfLibName libName - sharedLibFilePath = libTargetDir mkSharedLibName cid libName - ghciLibFilePath = libTargetDir Internal.mkGHCiLibName libName - - hObjs <- Internal.getHaskellObjects implInfo lib lbi - libTargetDir objExtension True - hProfObjs <- - if (withProfLib lbi) - then Internal.getHaskellObjects implInfo lib lbi - libTargetDir ("p_" ++ objExtension) True - else return [] - hSharedObjs <- - if (withSharedLib lbi) - then Internal.getHaskellObjects implInfo lib lbi - libTargetDir ("dyn_" ++ objExtension) False - else return [] - - unless (null hObjs && null cObjs) $ do - - let staticObjectFiles = - hObjs - ++ map (libTargetDir ) cObjs - profObjectFiles = - hProfObjs - ++ map (libTargetDir ) cProfObjs - ghciObjFiles = - hObjs - ++ map (libTargetDir ) cObjs - dynamicObjectFiles = - hSharedObjs - ++ map (libTargetDir ) cSharedObjs - -- After the relocation lib is created we invoke ghc -shared - -- with the dependencies spelled out as -package arguments - -- and ghc invokes the linker with the proper library paths - ghcSharedLinkArgs = - mempty { - ghcOptShared = toFlag True, - ghcOptDynLinkMode = toFlag GhcDynamicOnly, - ghcOptInputFiles = toNubListR dynamicObjectFiles, - ghcOptOutputFile = toFlag sharedLibFilePath, - ghcOptExtra = toNubListR $ - ghcjsSharedOptions libBi, - ghcOptNoAutoLinkPackages = toFlag True, - ghcOptPackageDBs = withPackageDB lbi, - ghcOptPackages = toNubListR $ - Internal.mkGhcOptPackages clbi, - ghcOptLinkLibs = toNubListR $ extraLibs libBi, - ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi - } - - whenVanillaLib False $ do - Ar.createArLibArchive verbosity lbi vanillaLibFilePath staticObjectFiles - - whenProfLib $ do - Ar.createArLibArchive verbosity lbi profileLibFilePath profObjectFiles - - whenGHCiLib $ do - (ldProg, _) <- requireProgram verbosity ldProgram (withPrograms lbi) - Ld.combineObjectFiles verbosity ldProg - ghciLibFilePath ghciObjFiles - - whenSharedLib False $ - runGhcjsProg ghcSharedLinkArgs - --- | Start a REPL without loading any source files. -startInterpreter :: Verbosity -> ProgramConfiguration -> Compiler -> Platform - -> PackageDBStack -> IO () -startInterpreter verbosity conf comp platform packageDBs = do - let replOpts = mempty { - ghcOptMode = toFlag GhcModeInteractive, - ghcOptPackageDBs = packageDBs - } - checkPackageDbStack packageDBs - (ghcjsProg, _) <- requireProgram verbosity ghcjsProgram conf - runGHC verbosity ghcjsProg comp platform replOpts - -buildExe, replExe :: Verbosity -> Cabal.Flag (Maybe Int) - -> PackageDescription -> LocalBuildInfo - -> Executable -> ComponentLocalBuildInfo -> IO () -buildExe = buildOrReplExe False -replExe = buildOrReplExe True - -buildOrReplExe :: Bool -> Verbosity -> Cabal.Flag (Maybe Int) - -> PackageDescription -> LocalBuildInfo - -> Executable -> ComponentLocalBuildInfo -> IO () -buildOrReplExe forRepl verbosity numJobs _pkg_descr lbi - exe@Executable { exeName = exeName', modulePath = modPath } clbi = do - - (ghcjsProg, _) <- requireProgram verbosity ghcjsProgram (withPrograms lbi) - let comp = compiler lbi - platform = hostPlatform lbi - implInfo = getImplInfo comp - runGhcjsProg = runGHC verbosity ghcjsProg comp platform - exeBi = buildInfo exe - - -- exeNameReal, the name that GHC really uses (with .exe on Windows) - let exeNameReal = exeName' <.> - (if takeExtension exeName' /= ('.':exeExtension) - then exeExtension - else "") - - let targetDir = (buildDir lbi) exeName' - let exeDir = targetDir (exeName' ++ "-tmp") - createDirectoryIfMissingVerbose verbosity True targetDir - createDirectoryIfMissingVerbose verbosity True exeDir - -- TODO: do we need to put hs-boot files into place for mutually recursive - -- modules? FIX: what about exeName.hi-boot? - - -- Determine if program coverage should be enabled and if so, what - -- '-hpcdir' should be. - let isCoverageEnabled = fromFlag $ configCoverage $ configFlags lbi - distPref = fromFlag $ configDistPref $ configFlags lbi - hpcdir way - | isCoverageEnabled = toFlag $ Hpc.mixDir distPref way exeName' - | otherwise = mempty - - -- build executables - - srcMainFile <- findFile (exeDir : hsSourceDirs exeBi) modPath - let isGhcjsDynamic = isDynamic comp - dynamicTooSupported = supportsDynamicToo comp - buildRunner = case clbi of - ExeComponentLocalBuildInfo {} -> False - _ -> True - isHaskellMain = elem (takeExtension srcMainFile) [".hs", ".lhs"] - jsSrcs = jsSources exeBi - cSrcs = cSources exeBi ++ [srcMainFile | not isHaskellMain] - cObjs = map (`replaceExtension` objExtension) cSrcs - nativeToo = ghcjsNativeToo comp - baseOpts = (componentGhcOptions verbosity lbi exeBi clbi exeDir) - `mappend` mempty { - ghcOptMode = toFlag GhcModeMake, - ghcOptInputFiles = toNubListR $ - [ srcMainFile | isHaskellMain], - ghcOptInputModules = toNubListR $ - [ m | not isHaskellMain, m <- exeModules exe], - ghcOptExtra = - if buildRunner then toNubListR ["-build-runner"] - else mempty - } - staticOpts = baseOpts `mappend` mempty { - ghcOptDynLinkMode = toFlag GhcStaticOnly, - ghcOptHPCDir = hpcdir Hpc.Vanilla - } - profOpts = adjustExts "p_hi" "p_o" baseOpts `mappend` mempty { - ghcOptProfilingMode = toFlag True, - ghcOptExtra = toNubListR $ ghcjsProfOptions exeBi, - ghcOptHPCDir = hpcdir Hpc.Prof - } - dynOpts = adjustExts "dyn_hi" "dyn_o" baseOpts `mappend` mempty { - ghcOptDynLinkMode = toFlag GhcDynamicOnly, - ghcOptExtra = toNubListR $ - ghcjsSharedOptions exeBi, - ghcOptHPCDir = hpcdir Hpc.Dyn - } - dynTooOpts = adjustExts "dyn_hi" "dyn_o" staticOpts `mappend` mempty { - ghcOptDynLinkMode = toFlag GhcStaticAndDynamic, - ghcOptHPCDir = hpcdir Hpc.Dyn - } - linkerOpts = mempty { - ghcOptLinkOptions = toNubListR $ PD.ldOptions exeBi, - ghcOptLinkLibs = toNubListR $ extraLibs exeBi, - ghcOptLinkLibPath = toNubListR $ extraLibDirs exeBi, - ghcOptLinkFrameworks = toNubListR $ PD.frameworks exeBi, - ghcOptInputFiles = toNubListR $ - [exeDir x | x <- cObjs] ++ jsSrcs - } - replOpts = baseOpts { - ghcOptExtra = overNubListR - Internal.filterGhciFlags - (ghcOptExtra baseOpts) - } - -- For a normal compile we do separate invocations of ghc for - -- compiling as for linking. But for repl we have to do just - -- the one invocation, so that one has to include all the - -- linker stuff too, like -l flags and any .o files from C - -- files etc. - `mappend` linkerOpts - `mappend` mempty { - ghcOptMode = toFlag GhcModeInteractive, - ghcOptOptimisation = toFlag GhcNoOptimisation - } - commonOpts | withProfExe lbi = profOpts - | withDynExe lbi = dynOpts - | otherwise = staticOpts - compileOpts | useDynToo = dynTooOpts - | otherwise = commonOpts - withStaticExe = (not $ withProfExe lbi) && (not $ withDynExe lbi) - - -- For building exe's that use TH with -prof or -dynamic we actually have - -- to build twice, once without -prof/-dynamic and then again with - -- -prof/-dynamic. This is because the code that TH needs to run at - -- compile time needs to be the vanilla ABI so it can be loaded up and run - -- by the compiler. - -- With dynamic-by-default GHC the TH object files loaded at compile-time - -- need to be .dyn_o instead of .o. - doingTH = EnableExtension TemplateHaskell `elem` allExtensions exeBi - -- Should we use -dynamic-too instead of compiling twice? - useDynToo = dynamicTooSupported && isGhcjsDynamic - && doingTH && withStaticExe && null (ghcjsSharedOptions exeBi) - compileTHOpts | isGhcjsDynamic = dynOpts - | otherwise = staticOpts - compileForTH - | forRepl = False - | useDynToo = False - | isGhcjsDynamic = doingTH && (withProfExe lbi || withStaticExe) - | otherwise = doingTH && (withProfExe lbi || withDynExe lbi) - - linkOpts = commonOpts `mappend` - linkerOpts `mappend` mempty { - ghcOptLinkNoHsMain = toFlag (not isHaskellMain) - } - - -- Build static/dynamic object files for TH, if needed. - when compileForTH $ - runGhcjsProg compileTHOpts { ghcOptNoLink = toFlag True - , ghcOptNumJobs = numJobs } - - unless forRepl $ - runGhcjsProg compileOpts { ghcOptNoLink = toFlag True - , ghcOptNumJobs = numJobs } - - -- build any C sources - unless (null cSrcs || not nativeToo) $ do - info verbosity "Building C Sources..." - sequence_ - [ do let opts = (Internal.componentCcGhcOptions verbosity implInfo lbi exeBi - clbi exeDir filename) `mappend` mempty { - ghcOptDynLinkMode = toFlag (if withDynExe lbi - then GhcDynamicOnly - else GhcStaticOnly), - ghcOptProfilingMode = toFlag (withProfExe lbi) - } - odir = fromFlag (ghcOptObjDir opts) - createDirectoryIfMissingVerbose verbosity True odir - runGhcjsProg opts - | filename <- cSrcs ] - - -- TODO: problem here is we need the .c files built first, so we can load them - -- with ghci, but .c files can depend on .h files generated by ghc by ffi - -- exports. - when forRepl $ runGhcjsProg replOpts - - -- link: - unless forRepl $ do - info verbosity "Linking..." - runGhcjsProg linkOpts { ghcOptOutputFile = toFlag (targetDir exeNameReal) } - --- |Install for ghc, .hi, .a and, if --with-ghci given, .o -installLib :: Verbosity - -> LocalBuildInfo - -> FilePath -- ^install location - -> FilePath -- ^install location for dynamic libraries - -> FilePath -- ^Build location - -> PackageDescription - -> Library - -> ComponentLocalBuildInfo - -> IO () -installLib verbosity lbi targetDir dynlibTargetDir builtDir _pkg lib clbi = do - whenVanilla $ copyModuleFiles "js_hi" - whenProf $ copyModuleFiles "js_p_hi" - whenShared $ copyModuleFiles "js_dyn_hi" - - whenVanilla $ installOrdinary builtDir targetDir $ toJSLibName vanillaLibName - whenProf $ installOrdinary builtDir targetDir $ toJSLibName profileLibName - whenShared $ installShared builtDir dynlibTargetDir $ toJSLibName sharedLibName - - when (ghcjsNativeToo $ compiler lbi) $ do - -- copy .hi files over: - whenVanilla $ copyModuleFiles "hi" - whenProf $ copyModuleFiles "p_hi" - whenShared $ copyModuleFiles "dyn_hi" - - -- copy the built library files over: - whenVanilla $ installOrdinaryNative builtDir targetDir vanillaLibName - whenProf $ installOrdinaryNative builtDir targetDir profileLibName - whenGHCi $ installOrdinaryNative builtDir targetDir ghciLibName - whenShared $ installSharedNative builtDir dynlibTargetDir sharedLibName - - where - install isShared isJS srcDir dstDir name = do - let src = srcDir name - dst = dstDir name - createDirectoryIfMissingVerbose verbosity True dstDir - - if isShared - then installExecutableFile verbosity src dst - else installOrdinaryFile verbosity src dst - - when (stripLibs lbi && not isJS) $ - Strip.stripLib verbosity - (hostPlatform lbi) (withPrograms lbi) dst - - installOrdinary = install False True - installShared = install True True - - installOrdinaryNative = install False False - installSharedNative = install True False - - copyModuleFiles ext = - findModuleFiles [builtDir] [ext] (libModules lib) - >>= installOrdinaryFiles verbosity targetDir - - cid = compilerId (compiler lbi) - libName = componentUnitId clbi - vanillaLibName = mkLibName libName - profileLibName = mkProfLibName libName - ghciLibName = Internal.mkGHCiLibName libName - sharedLibName = (mkSharedLibName cid) libName - - hasLib = not $ null (libModules lib) - && null (cSources (libBuildInfo lib)) - whenVanilla = when (hasLib && withVanillaLib lbi) - whenProf = when (hasLib && withProfLib lbi) - whenGHCi = when (hasLib && withGHCiLib lbi) - whenShared = when (hasLib && withSharedLib lbi) - -installExe :: Verbosity - -> LocalBuildInfo - -> InstallDirs FilePath -- ^Where to copy the files to - -> FilePath -- ^Build location - -> (FilePath, FilePath) -- ^Executable (prefix,suffix) - -> PackageDescription - -> Executable - -> IO () -installExe verbosity lbi installDirs buildPref - (progprefix, progsuffix) _pkg exe = do - let binDir = bindir installDirs - createDirectoryIfMissingVerbose verbosity True binDir - let exeFileName = exeName exe - fixedExeBaseName = progprefix ++ exeName exe ++ progsuffix - installBinary dest = do - rawSystemProgramConf verbosity ghcjsProgram (withPrograms lbi) $ - [ "--install-executable" - , buildPref exeName exe exeFileName - , "-o", dest - ] ++ - case (stripExes lbi, lookupProgram stripProgram $ withPrograms lbi) of - (True, Just strip) -> ["-strip-program", programPath strip] - _ -> [] - installBinary (binDir fixedExeBaseName) - -libAbiHash :: Verbosity -> PackageDescription -> LocalBuildInfo - -> Library -> ComponentLocalBuildInfo -> IO String -libAbiHash verbosity _pkg_descr lbi lib clbi = do - let - libBi = libBuildInfo lib - comp = compiler lbi - platform = hostPlatform lbi - vanillaArgs = - (componentGhcOptions verbosity lbi libBi clbi (buildDir lbi)) - `mappend` mempty { - ghcOptMode = toFlag GhcModeAbiHash, - ghcOptInputModules = toNubListR $ PD.exposedModules lib - } - profArgs = adjustExts "js_p_hi" "js_p_o" vanillaArgs `mappend` mempty { - ghcOptProfilingMode = toFlag True, - ghcOptExtra = toNubListR (ghcjsProfOptions libBi) - } - ghcArgs = if withVanillaLib lbi then vanillaArgs - else if withProfLib lbi then profArgs - else error "libAbiHash: Can't find an enabled library way" - -- - (ghcjsProg, _) <- requireProgram verbosity ghcjsProgram (withPrograms lbi) - hash <- getProgramInvocationOutput verbosity - (ghcInvocation ghcjsProg comp platform ghcArgs) - return (takeWhile (not . isSpace) hash) - -adjustExts :: String -> String -> GhcOptions -> GhcOptions -adjustExts hiSuf objSuf opts = - opts `mappend` mempty { - ghcOptHiSuffix = toFlag hiSuf, - ghcOptObjSuffix = toFlag objSuf - } - -registerPackage :: Verbosity - -> ProgramConfiguration - -> Bool - -> PackageDBStack - -> InstalledPackageInfo - -> IO () -registerPackage verbosity progdb multiInstance packageDbs installedPkgInfo - | multiInstance - = HcPkg.registerMultiInstance (hcPkgInfo progdb) verbosity - packageDbs installedPkgInfo - - | otherwise - = HcPkg.reregister (hcPkgInfo progdb) verbosity - packageDbs (Right installedPkgInfo) - -componentGhcOptions :: Verbosity -> LocalBuildInfo - -> BuildInfo -> ComponentLocalBuildInfo -> FilePath - -> GhcOptions -componentGhcOptions verbosity lbi bi clbi odir = - let opts = Internal.componentGhcOptions verbosity lbi bi clbi odir - in opts { ghcOptExtra = ghcOptExtra opts `mappend` toNubListR - (hcOptions GHCJS bi) - } - -ghcjsProfOptions :: BuildInfo -> [String] -ghcjsProfOptions bi = - hcProfOptions GHC bi `mappend` hcProfOptions GHCJS bi - -ghcjsSharedOptions :: BuildInfo -> [String] -ghcjsSharedOptions bi = - hcSharedOptions GHC bi `mappend` hcSharedOptions GHCJS bi - -isDynamic :: Compiler -> Bool -isDynamic = Internal.ghcLookupProperty "GHC Dynamic" - -supportsDynamicToo :: Compiler -> Bool -supportsDynamicToo = Internal.ghcLookupProperty "Support dynamic-too" - -findGhcjsGhcVersion :: Verbosity -> FilePath -> IO (Maybe Version) -findGhcjsGhcVersion verbosity pgm = - findProgramVersion "--numeric-ghc-version" id verbosity pgm - -findGhcjsPkgGhcjsVersion :: Verbosity -> FilePath -> IO (Maybe Version) -findGhcjsPkgGhcjsVersion verbosity pgm = - findProgramVersion "--numeric-ghcjs-version" id verbosity pgm - --- ----------------------------------------------------------------------------- --- Registering - -hcPkgInfo :: ProgramConfiguration -> HcPkg.HcPkgInfo -hcPkgInfo conf = HcPkg.HcPkgInfo { HcPkg.hcPkgProgram = ghcjsPkgProg - , HcPkg.noPkgDbStack = False - , HcPkg.noVerboseFlag = False - , HcPkg.flagPackageConf = False - , HcPkg.supportsDirDbs = True - , HcPkg.requiresDirDbs = v >= [7,10] - , HcPkg.nativeMultiInstance = v >= [7,10] - , HcPkg.recacheMultiInstance = True - } - where - v = versionBranch ver - Just ghcjsPkgProg = lookupProgram ghcjsPkgProgram conf - Just ver = programVersion ghcjsPkgProg - --- | Get the JavaScript file name and command and arguments to run a --- program compiled by GHCJS --- the exe should be the base program name without exe extension -runCmd :: ProgramConfiguration -> FilePath - -> (FilePath, FilePath, [String]) -runCmd conf exe = - ( script - , programPath ghcjsProg - , programDefaultArgs ghcjsProg ++ programOverrideArgs ghcjsProg ++ ["--run"] - ) - where - script = exe <.> "jsexe" "all" <.> "js" - Just ghcjsProg = lookupProgram ghcjsProgram conf diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Haddock.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Haddock.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Haddock.hs 2016-11-07 10:02:23.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Haddock.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,782 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Haddock --- Copyright : Isaac Jones 2003-2005 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module deals with the @haddock@ and @hscolour@ commands. --- It uses information about installed packages (from @ghc-pkg@) to find the --- locations of documentation for dependent packages, so it can create links. --- --- The @hscolour@ support allows generating HTML versions of the original --- source, with coloured syntax highlighting. - -module Distribution.Simple.Haddock ( - haddock, hscolour, - - haddockPackagePaths - ) where - -import qualified Distribution.Simple.GHC as GHC -import qualified Distribution.Simple.GHCJS as GHCJS - --- local -import Distribution.Compat.Semigroup as Semi -import Distribution.Package -import qualified Distribution.ModuleName as ModuleName -import Distribution.PackageDescription as PD hiding (Flag) -import Distribution.Simple.Compiler hiding (Flag) -import Distribution.Simple.Program.GHC -import Distribution.Simple.Program -import Distribution.Simple.PreProcess -import Distribution.Simple.Setup -import Distribution.Simple.Build -import Distribution.Simple.InstallDirs -import Distribution.Simple.LocalBuildInfo hiding (substPathTemplate) -import Distribution.Simple.BuildPaths -import qualified Distribution.Simple.PackageIndex as PackageIndex -import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo -import Distribution.InstalledPackageInfo ( InstalledPackageInfo ) -import Distribution.Simple.Utils -import Distribution.System -import Distribution.Text -import Distribution.Utils.NubList -import Distribution.Version -import Distribution.Verbosity -import Language.Haskell.Extension - - -import Control.Monad ( when, forM_ ) -import Data.Char ( isSpace ) -import Data.Either ( rights ) -import Data.Foldable ( traverse_, foldl' ) -import Data.Maybe ( fromMaybe, listToMaybe ) -import GHC.Generics ( Generic ) - -import System.Directory (doesFileExist) -import System.FilePath ( (), (<.>) - , normalise, splitPath, joinPath, isAbsolute ) -import System.IO (hClose, hPutStr, hPutStrLn, hSetEncoding, utf8) - --- ------------------------------------------------------------------------------ --- Types - --- | A record that represents the arguments to the haddock executable, a product --- monoid. -data HaddockArgs = HaddockArgs { - argInterfaceFile :: Flag FilePath, - -- ^ Path to the interface file, relative to argOutputDir, required. - argPackageName :: Flag PackageIdentifier, - -- ^ Package name, required. - argHideModules :: (All,[ModuleName.ModuleName]), - -- ^ (Hide modules ?, modules to hide) - argIgnoreExports :: Any, - -- ^ Ignore export lists in modules? - argLinkSource :: Flag (Template,Template,Template), - -- ^ (Template for modules, template for symbols, template for lines). - argCssFile :: Flag FilePath, - -- ^ Optional custom CSS file. - argContents :: Flag String, - -- ^ Optional URL to contents page. - argVerbose :: Any, - argOutput :: Flag [Output], - -- ^ HTML or Hoogle doc or both? Required. - argInterfaces :: [(FilePath, Maybe String)], - -- ^ [(Interface file, URL to the HTML docs for links)]. - argOutputDir :: Directory, - -- ^ Where to generate the documentation. - argTitle :: Flag String, - -- ^ Page title, required. - argPrologue :: Flag String, - -- ^ Prologue text, required. - argGhcOptions :: Flag (GhcOptions, Version), - -- ^ Additional flags to pass to GHC. - argGhcLibDir :: Flag FilePath, - -- ^ To find the correct GHC, required. - argTargets :: [FilePath] - -- ^ Modules to process. -} deriving Generic - --- | The FilePath of a directory, it's a monoid under '()'. -newtype Directory = Dir { unDir' :: FilePath } deriving (Read,Show,Eq,Ord) - -unDir :: Directory -> FilePath -unDir = joinPath . filter (\p -> p /="./" && p /= ".") . splitPath . unDir' - -type Template = String - -data Output = Html | Hoogle - --- ------------------------------------------------------------------------------ --- Haddock support - -haddock :: PackageDescription - -> LocalBuildInfo - -> [PPSuffixHandler] - -> HaddockFlags - -> IO () -haddock pkg_descr _ _ haddockFlags - | not (hasLibs pkg_descr) - && not (fromFlag $ haddockExecutables haddockFlags) - && not (fromFlag $ haddockTestSuites haddockFlags) - && not (fromFlag $ haddockBenchmarks haddockFlags) = - warn (fromFlag $ haddockVerbosity haddockFlags) $ - "No documentation was generated as this package does not contain " - ++ "a library. Perhaps you want to use the --executables, --tests or" - ++ " --benchmarks flags." - -haddock pkg_descr lbi suffixes flags' = do - let verbosity = flag haddockVerbosity - comp = compiler lbi - platform = hostPlatform lbi - - flags = case haddockTarget of - ForDevelopment -> flags' - ForHackage -> flags' - { haddockHoogle = Flag True - , haddockHtml = Flag True - , haddockHtmlLocation = Flag (pkg_url ++ "/docs") - , haddockContents = Flag (toPathTemplate pkg_url) - , haddockHscolour = Flag True - } - pkg_url = "/package/$pkg-$version" - flag f = fromFlag $ f flags - - tmpFileOpts = defaultTempFileOptions - { optKeepTempFiles = flag haddockKeepTempFiles } - htmlTemplate = fmap toPathTemplate . flagToMaybe . haddockHtmlLocation - $ flags - haddockTarget = - fromFlagOrDefault ForDevelopment (haddockForHackage flags') - - setupMessage verbosity "Running Haddock for" (packageId pkg_descr) - (confHaddock, version, _) <- - requireProgramVersion verbosity haddockProgram - (orLaterVersion (Version [2,0] [])) (withPrograms lbi) - - -- various sanity checks - when ( flag haddockHoogle - && version < Version [2,2] []) $ - die "haddock 2.0 and 2.1 do not support the --hoogle flag." - - haddockGhcVersionStr <- rawSystemProgramStdout verbosity confHaddock - ["--ghc-version"] - case (simpleParse haddockGhcVersionStr, compilerCompatVersion GHC comp) of - (Nothing, _) -> die "Could not get GHC version from Haddock" - (_, Nothing) -> die "Could not get GHC version from compiler" - (Just haddockGhcVersion, Just ghcVersion) - | haddockGhcVersion == ghcVersion -> return () - | otherwise -> die $ - "Haddock's internal GHC version must match the configured " - ++ "GHC version.\n" - ++ "The GHC version is " ++ display ghcVersion ++ " but " - ++ "haddock is using GHC version " ++ display haddockGhcVersion - - -- the tools match the requests, we can proceed - - initialBuildSteps (flag haddockDistPref) pkg_descr lbi verbosity - - when (flag haddockHscolour) $ - hscolour' (warn verbosity) haddockTarget pkg_descr lbi suffixes - (defaultHscolourFlags `mappend` haddockToHscolour flags) - - libdirArgs <- getGhcLibDir verbosity lbi - let commonArgs = mconcat - [ libdirArgs - , fromFlags (haddockTemplateEnv lbi (packageId pkg_descr)) flags - , fromPackageDescription haddockTarget pkg_descr ] - - let pre c = preprocessComponent pkg_descr c lbi False verbosity suffixes - withAllComponentsInBuildOrder pkg_descr lbi $ \component clbi -> do - pre component - let - doExe com = case (compToExe com) of - Just exe -> do - withTempDirectoryEx verbosity tmpFileOpts (buildDir lbi) "tmp" $ - \tmp -> do - exeArgs <- fromExecutable verbosity tmp lbi exe clbi htmlTemplate - version - let exeArgs' = commonArgs `mappend` exeArgs - runHaddock verbosity tmpFileOpts comp platform - confHaddock exeArgs' - Nothing -> do - warn (fromFlag $ haddockVerbosity flags) - "Unsupported component, skipping..." - return () - case component of - CLib lib -> do - withTempDirectoryEx verbosity tmpFileOpts (buildDir lbi) "tmp" $ - \tmp -> do - libArgs <- fromLibrary verbosity tmp lbi lib clbi htmlTemplate - version - let libArgs' = commonArgs `mappend` libArgs - runHaddock verbosity tmpFileOpts comp platform confHaddock libArgs' - CExe _ -> when (flag haddockExecutables) $ doExe component - CTest _ -> when (flag haddockTestSuites) $ doExe component - CBench _ -> when (flag haddockBenchmarks) $ doExe component - - forM_ (extraDocFiles pkg_descr) $ \ fpath -> do - files <- matchFileGlob fpath - forM_ files $ copyFileTo verbosity (unDir $ argOutputDir commonArgs) - --- ------------------------------------------------------------------------------ --- Contributions to HaddockArgs. - -fromFlags :: PathTemplateEnv -> HaddockFlags -> HaddockArgs -fromFlags env flags = - mempty { - argHideModules = (maybe mempty (All . not) - $ flagToMaybe (haddockInternal flags), mempty), - argLinkSource = if fromFlag (haddockHscolour flags) - then Flag ("src/%{MODULE/./-}.html" - ,"src/%{MODULE/./-}.html#%{NAME}" - ,"src/%{MODULE/./-}.html#line-%{LINE}") - else NoFlag, - argCssFile = haddockCss flags, - argContents = fmap (fromPathTemplate . substPathTemplate env) - (haddockContents flags), - argVerbose = maybe mempty (Any . (>= deafening)) - . flagToMaybe $ haddockVerbosity flags, - argOutput = - Flag $ case [ Html | Flag True <- [haddockHtml flags] ] ++ - [ Hoogle | Flag True <- [haddockHoogle flags] ] - of [] -> [ Html ] - os -> os, - argOutputDir = maybe mempty Dir . flagToMaybe $ haddockDistPref flags - } - -fromPackageDescription :: HaddockTarget -> PackageDescription -> HaddockArgs -fromPackageDescription haddockTarget pkg_descr = - mempty { argInterfaceFile = Flag $ haddockName pkg_descr, - argPackageName = Flag $ packageId $ pkg_descr, - argOutputDir = Dir $ - "doc" "html" haddockDirName haddockTarget pkg_descr, - argPrologue = Flag $ if null desc then synopsis pkg_descr - else desc, - argTitle = Flag $ showPkg ++ subtitle - } - where - desc = PD.description pkg_descr - showPkg = display (packageId pkg_descr) - subtitle | null (synopsis pkg_descr) = "" - | otherwise = ": " ++ synopsis pkg_descr - -componentGhcOptions :: Verbosity -> LocalBuildInfo - -> BuildInfo -> ComponentLocalBuildInfo -> FilePath - -> GhcOptions -componentGhcOptions verbosity lbi bi clbi odir = - let f = case compilerFlavor (compiler lbi) of - GHC -> GHC.componentGhcOptions - GHCJS -> GHCJS.componentGhcOptions - _ -> error $ - "Distribution.Simple.Haddock.componentGhcOptions:" ++ - "haddock only supports GHC and GHCJS" - in f verbosity lbi bi clbi odir - -fromLibrary :: Verbosity - -> FilePath - -> LocalBuildInfo -> Library -> ComponentLocalBuildInfo - -> Maybe PathTemplate -- ^ template for HTML location - -> Version - -> IO HaddockArgs -fromLibrary verbosity tmp lbi lib clbi htmlTemplate haddockVersion = do - inFiles <- map snd `fmap` getLibSourceFiles lbi lib - ifaceArgs <- getInterfaces verbosity lbi clbi htmlTemplate - let vanillaOpts = (componentGhcOptions normal lbi bi clbi (buildDir lbi)) { - -- Noooooooooo!!!!!111 - -- haddock stomps on our precious .hi - -- and .o files. Workaround by telling - -- haddock to write them elsewhere. - ghcOptObjDir = toFlag tmp, - ghcOptHiDir = toFlag tmp, - ghcOptStubDir = toFlag tmp - } `mappend` getGhcCppOpts haddockVersion bi - sharedOpts = vanillaOpts { - ghcOptDynLinkMode = toFlag GhcDynamicOnly, - ghcOptFPic = toFlag True, - ghcOptHiSuffix = toFlag "dyn_hi", - ghcOptObjSuffix = toFlag "dyn_o", - ghcOptExtra = - toNubListR $ hcSharedOptions GHC bi - - } - opts <- if withVanillaLib lbi - then return vanillaOpts - else if withSharedLib lbi - then return sharedOpts - else die $ "Must have vanilla or shared libraries " - ++ "enabled in order to run haddock" - ghcVersion <- maybe (die "Compiler has no GHC version") - return - (compilerCompatVersion GHC (compiler lbi)) - - return ifaceArgs { - argHideModules = (mempty,otherModules $ bi), - argGhcOptions = toFlag (opts, ghcVersion), - argTargets = inFiles - } - where - bi = libBuildInfo lib - -fromExecutable :: Verbosity - -> FilePath - -> LocalBuildInfo -> Executable -> ComponentLocalBuildInfo - -> Maybe PathTemplate -- ^ template for HTML location - -> Version - -> IO HaddockArgs -fromExecutable verbosity tmp lbi exe clbi htmlTemplate haddockVersion = do - inFiles <- map snd `fmap` getExeSourceFiles lbi exe - ifaceArgs <- getInterfaces verbosity lbi clbi htmlTemplate - let vanillaOpts = (componentGhcOptions normal lbi bi clbi (buildDir lbi)) { - -- Noooooooooo!!!!!111 - -- haddock stomps on our precious .hi - -- and .o files. Workaround by telling - -- haddock to write them elsewhere. - ghcOptObjDir = toFlag tmp, - ghcOptHiDir = toFlag tmp, - ghcOptStubDir = toFlag tmp - } `mappend` getGhcCppOpts haddockVersion bi - sharedOpts = vanillaOpts { - ghcOptDynLinkMode = toFlag GhcDynamicOnly, - ghcOptFPic = toFlag True, - ghcOptHiSuffix = toFlag "dyn_hi", - ghcOptObjSuffix = toFlag "dyn_o", - ghcOptExtra = - toNubListR $ hcSharedOptions GHC bi - } - opts <- if withVanillaLib lbi - then return vanillaOpts - else if withSharedLib lbi - then return sharedOpts - else die $ "Must have vanilla or shared libraries " - ++ "enabled in order to run haddock" - ghcVersion <- maybe (die "Compiler has no GHC version") - return - (compilerCompatVersion GHC (compiler lbi)) - - return ifaceArgs { - argGhcOptions = toFlag (opts, ghcVersion), - argOutputDir = Dir (exeName exe), - argTitle = Flag (exeName exe), - argTargets = inFiles - } - where - bi = buildInfo exe - -compToExe :: Component -> Maybe Executable -compToExe comp = - case comp of - CTest test@TestSuite { testInterface = TestSuiteExeV10 _ f } -> - Just Executable { - exeName = testName test, - modulePath = f, - buildInfo = testBuildInfo test - } - CBench bench@Benchmark { benchmarkInterface = BenchmarkExeV10 _ f } -> - Just Executable { - exeName = benchmarkName bench, - modulePath = f, - buildInfo = benchmarkBuildInfo bench - } - CExe exe -> Just exe - _ -> Nothing - -getInterfaces :: Verbosity - -> LocalBuildInfo - -> ComponentLocalBuildInfo - -> Maybe PathTemplate -- ^ template for HTML location - -> IO HaddockArgs -getInterfaces verbosity lbi clbi htmlTemplate = do - (packageFlags, warnings) <- haddockPackageFlags lbi clbi htmlTemplate - traverse_ (warn verbosity) warnings - return $ mempty { - argInterfaces = packageFlags - } - -getGhcCppOpts :: Version - -> BuildInfo - -> GhcOptions -getGhcCppOpts haddockVersion bi = - mempty { - ghcOptExtensions = toNubListR [EnableExtension CPP | needsCpp], - ghcOptCppOptions = toNubListR defines - } - where - needsCpp = EnableExtension CPP `elem` usedExtensions bi - defines = [haddockVersionMacro] - haddockVersionMacro = "-D__HADDOCK_VERSION__=" - ++ show (v1 * 1000 + v2 * 10 + v3) - where - [v1, v2, v3] = take 3 $ versionBranch haddockVersion ++ [0,0] - -getGhcLibDir :: Verbosity -> LocalBuildInfo - -> IO HaddockArgs -getGhcLibDir verbosity lbi = do - l <- case compilerFlavor (compiler lbi) of - GHC -> GHC.getLibDir verbosity lbi - GHCJS -> GHCJS.getLibDir verbosity lbi - _ -> error "haddock only supports GHC and GHCJS" - return $ mempty { argGhcLibDir = Flag l } - --- ------------------------------------------------------------------------------ --- | Call haddock with the specified arguments. -runHaddock :: Verbosity - -> TempFileOptions - -> Compiler - -> Platform - -> ConfiguredProgram - -> HaddockArgs - -> IO () -runHaddock verbosity tmpFileOpts comp platform confHaddock args = do - let haddockVersion = fromMaybe (error "unable to determine haddock version") - (programVersion confHaddock) - renderArgs verbosity tmpFileOpts haddockVersion comp platform args $ - \(flags,result)-> do - - rawSystemProgram verbosity confHaddock flags - - notice verbosity $ "Documentation created: " ++ result - - -renderArgs :: Verbosity - -> TempFileOptions - -> Version - -> Compiler - -> Platform - -> HaddockArgs - -> (([String], FilePath) -> IO a) - -> IO a -renderArgs verbosity tmpFileOpts version comp platform args k = do - let haddockSupportsUTF8 = version >= Version [2,14,4] [] - haddockSupportsResponseFiles = version > Version [2,16,2] [] - createDirectoryIfMissingVerbose verbosity True outputDir - withTempFileEx tmpFileOpts outputDir "haddock-prologue.txt" $ - \prologueFileName h -> do - do - when haddockSupportsUTF8 (hSetEncoding h utf8) - hPutStrLn h $ fromFlag $ argPrologue args - hClose h - let pflag = "--prologue=" ++ prologueFileName - renderedArgs = pflag : renderPureArgs version comp platform args - if haddockSupportsResponseFiles - then - withTempFileEx tmpFileOpts outputDir "haddock-response.txt" $ - \responseFileName hf -> do - when haddockSupportsUTF8 (hSetEncoding hf utf8) - let responseContents = - unlines $ map escapeArg renderedArgs - hPutStr hf responseContents - hClose hf - info verbosity $ responseFileName ++ " contents: <<<" - info verbosity responseContents - info verbosity $ ">>> " ++ responseFileName - let respFile = "@" ++ responseFileName - k ([respFile], result) - else - k (renderedArgs, result) - where - outputDir = (unDir $ argOutputDir args) - result = intercalate ", " - . map (\o -> outputDir - case o of - Html -> "index.html" - Hoogle -> pkgstr <.> "txt") - $ arg argOutput - where - pkgstr = display $ packageName pkgid - pkgid = arg argPackageName - arg f = fromFlag $ f args - -- Support a gcc-like response file syntax. Each separate - -- argument and its possible parameter(s), will be separated in the - -- response file by an actual newline; all other whitespace, - -- single quotes, double quotes, and the character used for escaping - -- (backslash) are escaped. The called program will need to do a similar - -- inverse operation to de-escape and re-constitute the argument list. - escape cs c - | isSpace c - || '\\' == c - || '\'' == c - || '"' == c = c:'\\':cs -- n.b., our caller must reverse the result - | otherwise = c:cs - escapeArg = reverse . foldl' escape [] - -renderPureArgs :: Version -> Compiler -> Platform -> HaddockArgs -> [String] -renderPureArgs version comp platform args = concat - [ (:[]) . (\f -> "--dump-interface="++ unDir (argOutputDir args) f) - . fromFlag . argInterfaceFile $ args - - , if isVersion 2 16 - then (\pkg -> [ "--package-name=" ++ display (pkgName pkg) - , "--package-version="++display (pkgVersion pkg) - ]) - . fromFlag . argPackageName $ args - else [] - - , (\(All b,xs) -> bool (map (("--hide=" ++). display) xs) [] b) - . argHideModules $ args - - , bool ["--ignore-all-exports"] [] . getAny . argIgnoreExports $ args - - , maybe [] (\(m,e,l) -> - ["--source-module=" ++ m - ,"--source-entity=" ++ e] - ++ if isVersion 2 14 then ["--source-entity-line=" ++ l] - else [] - ) . flagToMaybe . argLinkSource $ args - - , maybe [] ((:[]) . ("--css="++)) . flagToMaybe . argCssFile $ args - - , maybe [] ((:[]) . ("--use-contents="++)) . flagToMaybe . argContents $ args - - , bool [] [verbosityFlag] . getAny . argVerbose $ args - - , map (\o -> case o of Hoogle -> "--hoogle"; Html -> "--html") - . fromFlag . argOutput $ args - - , renderInterfaces . argInterfaces $ args - - , (:[]) . ("--odir="++) . unDir . argOutputDir $ args - - , (:[]) . ("--title="++) - . (bool (++" (internal documentation)") - id (getAny $ argIgnoreExports args)) - . fromFlag . argTitle $ args - - , [ "--optghc=" ++ opt | (opts, _ghcVer) <- flagToList (argGhcOptions args) - , opt <- renderGhcOptions comp platform opts ] - - , maybe [] (\l -> ["-B"++l]) $ - flagToMaybe (argGhcLibDir args) -- error if Nothing? - - , argTargets $ args - ] - where - renderInterfaces = - map (\(i,mh) -> "--read-interface=" ++ - maybe "" (++",") mh ++ i) - bool a b c = if c then a else b - isVersion major minor = version >= Version [major,minor] [] - verbosityFlag - | isVersion 2 5 = "--verbosity=1" - | otherwise = "--verbose" - ---------------------------------------------------------------------------------- - --- | Given a list of 'InstalledPackageInfo's, return a list of interfaces and --- HTML paths, and an optional warning for packages with missing documentation. -haddockPackagePaths :: [InstalledPackageInfo] - -> Maybe (InstalledPackageInfo -> FilePath) - -> IO ([(FilePath, Maybe FilePath)], Maybe String) -haddockPackagePaths ipkgs mkHtmlPath = do - interfaces <- sequence - [ case interfaceAndHtmlPath ipkg of - Nothing -> return (Left (packageId ipkg)) - Just (interface, html) -> do - exists <- doesFileExist interface - if exists - then return (Right (interface, html)) - else return (Left pkgid) - | ipkg <- ipkgs, let pkgid = packageId ipkg - , pkgName pkgid `notElem` noHaddockWhitelist - ] - - let missing = [ pkgid | Left pkgid <- interfaces ] - warning = "The documentation for the following packages are not " - ++ "installed. No links will be generated to these packages: " - ++ intercalate ", " (map display missing) - flags = rights interfaces - - return (flags, if null missing then Nothing else Just warning) - - where - -- Don't warn about missing documentation for these packages. See #1231. - noHaddockWhitelist = map PackageName [ "rts" ] - - -- Actually extract interface and HTML paths from an 'InstalledPackageInfo'. - interfaceAndHtmlPath :: InstalledPackageInfo - -> Maybe (FilePath, Maybe FilePath) - interfaceAndHtmlPath pkg = do - interface <- listToMaybe (InstalledPackageInfo.haddockInterfaces pkg) - html <- case mkHtmlPath of - Nothing -> fmap fixFileUrl - (listToMaybe (InstalledPackageInfo.haddockHTMLs pkg)) - Just mkPath -> Just (mkPath pkg) - return (interface, if null html then Nothing else Just html) - where - -- The 'haddock-html' field in the hc-pkg output is often set as a - -- native path, but we need it as a URL. See #1064. - fixFileUrl f | isAbsolute f = "file://" ++ f - | otherwise = f - -haddockPackageFlags :: LocalBuildInfo - -> ComponentLocalBuildInfo - -> Maybe PathTemplate - -> IO ([(FilePath, Maybe FilePath)], Maybe String) -haddockPackageFlags lbi clbi htmlTemplate = do - let allPkgs = installedPkgs lbi - directDeps = map fst (componentPackageDeps clbi) - transitiveDeps <- case PackageIndex.dependencyClosure allPkgs directDeps of - Left x -> return x - Right inf -> die $ "internal error when calculating transitive " - ++ "package dependencies.\nDebug info: " ++ show inf - haddockPackagePaths (PackageIndex.allPackages transitiveDeps) mkHtmlPath - where - mkHtmlPath = fmap expandTemplateVars htmlTemplate - expandTemplateVars tmpl pkg = - fromPathTemplate . substPathTemplate (env pkg) $ tmpl - env pkg = haddockTemplateEnv lbi (packageId pkg) - - -haddockTemplateEnv :: LocalBuildInfo -> PackageIdentifier -> PathTemplateEnv -haddockTemplateEnv lbi pkg_id = - (PrefixVar, prefix (installDirTemplates lbi)) - -- We want the legacy unit ID here, because it gives us nice paths - -- (Haddock people don't care about the dependencies) - : initialPathTemplateEnv pkg_id (mkLegacyUnitId pkg_id) (compilerInfo (compiler lbi)) - (hostPlatform lbi) - --- ------------------------------------------------------------------------------ --- hscolour support. - -hscolour :: PackageDescription - -> LocalBuildInfo - -> [PPSuffixHandler] - -> HscolourFlags - -> IO () -hscolour pkg_descr lbi suffixes flags = do - -- we preprocess even if hscolour won't be found on the machine - -- will this upset someone? - initialBuildSteps distPref pkg_descr lbi verbosity - hscolour' die ForDevelopment pkg_descr lbi suffixes flags - where - verbosity = fromFlag (hscolourVerbosity flags) - distPref = fromFlag $ hscolourDistPref flags - -hscolour' :: (String -> IO ()) -- ^ Called when the 'hscolour' exe is not found. - -> HaddockTarget - -> PackageDescription - -> LocalBuildInfo - -> [PPSuffixHandler] - -> HscolourFlags - -> IO () -hscolour' onNoHsColour haddockTarget pkg_descr lbi suffixes flags = - either onNoHsColour (\(hscolourProg, _, _) -> go hscolourProg) =<< - lookupProgramVersion verbosity hscolourProgram - (orLaterVersion (Version [1,8] [])) (withPrograms lbi) - where - go :: ConfiguredProgram -> IO () - go hscolourProg = do - setupMessage verbosity "Running hscolour for" (packageId pkg_descr) - createDirectoryIfMissingVerbose verbosity True $ - hscolourPref haddockTarget distPref pkg_descr - - let pre c = preprocessComponent pkg_descr c lbi False verbosity suffixes - withAllComponentsInBuildOrder pkg_descr lbi $ \comp _ -> do - pre comp - let - doExe com = case (compToExe com) of - Just exe -> do - let outputDir = hscolourPref haddockTarget distPref pkg_descr - exeName exe "src" - runHsColour hscolourProg outputDir =<< getExeSourceFiles lbi exe - Nothing -> do - warn (fromFlag $ hscolourVerbosity flags) - "Unsupported component, skipping..." - return () - case comp of - CLib lib -> do - let outputDir = hscolourPref haddockTarget distPref pkg_descr "src" - runHsColour hscolourProg outputDir =<< getLibSourceFiles lbi lib - CExe _ -> when (fromFlag (hscolourExecutables flags)) $ doExe comp - CTest _ -> when (fromFlag (hscolourTestSuites flags)) $ doExe comp - CBench _ -> when (fromFlag (hscolourBenchmarks flags)) $ doExe comp - - stylesheet = flagToMaybe (hscolourCSS flags) - - verbosity = fromFlag (hscolourVerbosity flags) - distPref = fromFlag (hscolourDistPref flags) - - runHsColour prog outputDir moduleFiles = do - createDirectoryIfMissingVerbose verbosity True outputDir - - case stylesheet of -- copy the CSS file - Nothing | programVersion prog >= Just (Version [1,9] []) -> - rawSystemProgram verbosity prog - ["-print-css", "-o" ++ outputDir "hscolour.css"] - | otherwise -> return () - Just s -> copyFileVerbose verbosity s (outputDir "hscolour.css") - - forM_ moduleFiles $ \(m, inFile) -> - rawSystemProgram verbosity prog - ["-css", "-anchor", "-o" ++ outFile m, inFile] - where - outFile m = outputDir - intercalate "-" (ModuleName.components m) <.> "html" - -haddockToHscolour :: HaddockFlags -> HscolourFlags -haddockToHscolour flags = - HscolourFlags { - hscolourCSS = haddockHscolourCss flags, - hscolourExecutables = haddockExecutables flags, - hscolourTestSuites = haddockTestSuites flags, - hscolourBenchmarks = haddockBenchmarks flags, - hscolourVerbosity = haddockVerbosity flags, - hscolourDistPref = haddockDistPref flags - } ---------------------------------------------------------------------------------- --- TODO these should be moved elsewhere. - -getLibSourceFiles :: LocalBuildInfo - -> Library - -> IO [(ModuleName.ModuleName, FilePath)] -getLibSourceFiles lbi lib = getSourceFiles searchpaths modules - where - bi = libBuildInfo lib - modules = PD.exposedModules lib ++ otherModules bi - searchpaths = autogenModulesDir lbi : buildDir lbi : hsSourceDirs bi - -getExeSourceFiles :: LocalBuildInfo - -> Executable - -> IO [(ModuleName.ModuleName, FilePath)] -getExeSourceFiles lbi exe = do - moduleFiles <- getSourceFiles searchpaths modules - srcMainPath <- findFile (hsSourceDirs bi) (modulePath exe) - return ((ModuleName.main, srcMainPath) : moduleFiles) - where - bi = buildInfo exe - modules = otherModules bi - searchpaths = autogenModulesDir lbi : exeBuildDir lbi exe : hsSourceDirs bi - -getSourceFiles :: [FilePath] - -> [ModuleName.ModuleName] - -> IO [(ModuleName.ModuleName, FilePath)] -getSourceFiles dirs modules = flip mapM modules $ \m -> fmap ((,) m) $ - findFileWithExtension ["hs", "lhs"] dirs (ModuleName.toFilePath m) - >>= maybe (notFound m) (return . normalise) - where - notFound module_ = die $ "can't find source for module " ++ display module_ - --- | The directory where we put build results for an executable -exeBuildDir :: LocalBuildInfo -> Executable -> FilePath -exeBuildDir lbi exe = buildDir lbi exeName exe exeName exe ++ "-tmp" - --- ------------------------------------------------------------------------------ --- Boilerplate Monoid instance. -instance Monoid HaddockArgs where - mempty = gmempty - mappend = (Semi.<>) - -instance Semigroup HaddockArgs where - (<>) = gmappend - -instance Monoid Directory where - mempty = Dir "." - mappend = (Semi.<>) - -instance Semigroup Directory where - Dir m <> Dir n = Dir $ m n diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/HaskellSuite.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/HaskellSuite.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/HaskellSuite.hs 2016-11-07 10:02:23.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/HaskellSuite.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,222 +0,0 @@ -module Distribution.Simple.HaskellSuite where - -import Control.Monad -import Data.Maybe -import Data.Version -import qualified Data.Map as M (empty) - -import Distribution.Simple.Program -import Distribution.Simple.Compiler as Compiler -import Distribution.Simple.Utils -import Distribution.Simple.BuildPaths -import Distribution.Verbosity -import Distribution.Text -import Distribution.Package -import Distribution.InstalledPackageInfo hiding (includeDirs) -import Distribution.Simple.PackageIndex as PackageIndex -import Distribution.PackageDescription -import Distribution.Simple.LocalBuildInfo -import Distribution.System (Platform) -import Distribution.Compat.Exception -import Language.Haskell.Extension -import Distribution.Simple.Program.Builtin - -configure - :: Verbosity -> Maybe FilePath -> Maybe FilePath - -> ProgramConfiguration -> IO (Compiler, Maybe Platform, ProgramConfiguration) -configure verbosity mbHcPath hcPkgPath conf0 = do - - -- We have no idea how a haskell-suite tool is named, so we require at - -- least some information from the user. - hcPath <- - let msg = "You have to provide name or path of a haskell-suite tool (-w PATH)" - in maybe (die msg) return mbHcPath - - when (isJust hcPkgPath) $ - warn verbosity "--with-hc-pkg option is ignored for haskell-suite" - - (comp, confdCompiler, conf1) <- configureCompiler hcPath conf0 - - -- Update our pkg tool. It uses the same executable as the compiler, but - -- all command start with "pkg" - (confdPkg, _) <- requireProgram verbosity haskellSuitePkgProgram conf1 - let conf2 = - updateProgram - confdPkg - { programLocation = programLocation confdCompiler - , programDefaultArgs = ["pkg"] - } - conf1 - - return (comp, Nothing, conf2) - - where - configureCompiler hcPath conf0' = do - let - haskellSuiteProgram' = - haskellSuiteProgram - { programFindLocation = \v p -> findProgramOnSearchPath v p hcPath } - - -- NB: cannot call requireProgram right away — it'd think that - -- the program is already configured and won't reconfigure it again. - -- Instead, call configureProgram directly first. - conf1 <- configureProgram verbosity haskellSuiteProgram' conf0' - (confdCompiler, conf2) <- requireProgram verbosity haskellSuiteProgram' conf1 - - extensions <- getExtensions verbosity confdCompiler - languages <- getLanguages verbosity confdCompiler - (compName, compVersion) <- - getCompilerVersion verbosity confdCompiler - - let - comp = Compiler { - compilerId = CompilerId (HaskellSuite compName) compVersion, - compilerAbiTag = Compiler.NoAbiTag, - compilerCompat = [], - compilerLanguages = languages, - compilerExtensions = extensions, - compilerProperties = M.empty - } - - return (comp, confdCompiler, conf2) - -hstoolVersion :: Verbosity -> FilePath -> IO (Maybe Version) -hstoolVersion = findProgramVersion "--hspkg-version" id - -numericVersion :: Verbosity -> FilePath -> IO (Maybe Version) -numericVersion = findProgramVersion "--compiler-version" (last . words) - -getCompilerVersion :: Verbosity -> ConfiguredProgram -> IO (String, Version) -getCompilerVersion verbosity prog = do - output <- rawSystemStdout verbosity (programPath prog) ["--compiler-version"] - let - parts = words output - name = concat $ init parts -- there shouldn't be any spaces in the name anyway - versionStr = last parts - version <- - maybe (die "haskell-suite: couldn't determine compiler version") return $ - simpleParse versionStr - return (name, version) - -getExtensions :: Verbosity -> ConfiguredProgram -> IO [(Extension, Compiler.Flag)] -getExtensions verbosity prog = do - extStrs <- - lines `fmap` - rawSystemStdout verbosity (programPath prog) ["--supported-extensions"] - return - [ (ext, "-X" ++ display ext) | Just ext <- map simpleParse extStrs ] - -getLanguages :: Verbosity -> ConfiguredProgram -> IO [(Language, Compiler.Flag)] -getLanguages verbosity prog = do - langStrs <- - lines `fmap` - rawSystemStdout verbosity (programPath prog) ["--supported-languages"] - return - [ (ext, "-G" ++ display ext) | Just ext <- map simpleParse langStrs ] - --- Other compilers do some kind of a packagedb stack check here. Not sure --- if we need something like that as well. -getInstalledPackages :: Verbosity -> PackageDBStack -> ProgramConfiguration - -> IO InstalledPackageIndex -getInstalledPackages verbosity packagedbs conf = - liftM (PackageIndex.fromList . concat) $ forM packagedbs $ \packagedb -> - do str <- - getDbProgramOutput verbosity haskellSuitePkgProgram conf - ["dump", packageDbOpt packagedb] - `catchExit` \_ -> die $ "pkg dump failed" - case parsePackages str of - Right ok -> return ok - _ -> die "failed to parse output of 'pkg dump'" - - where - parsePackages str = - let parsed = map parseInstalledPackageInfo (splitPkgs str) - in case [ msg | ParseFailed msg <- parsed ] of - [] -> Right [ pkg | ParseOk _ pkg <- parsed ] - msgs -> Left msgs - - splitPkgs :: String -> [String] - splitPkgs = map unlines . splitWith ("---" ==) . lines - where - splitWith :: (a -> Bool) -> [a] -> [[a]] - splitWith p xs = ys : case zs of - [] -> [] - _:ws -> splitWith p ws - where (ys,zs) = break p xs - -buildLib - :: Verbosity -> PackageDescription -> LocalBuildInfo - -> Library -> ComponentLocalBuildInfo -> IO () -buildLib verbosity pkg_descr lbi lib clbi = do - -- In future, there should be a mechanism for the compiler to request any - -- number of the above parameters (or their parts) — in particular, - -- pieces of PackageDescription. - -- - -- For now, we only pass those that we know are used. - - let odir = buildDir lbi - bi = libBuildInfo lib - srcDirs = hsSourceDirs bi ++ [odir] - dbStack = withPackageDB lbi - language = fromMaybe Haskell98 (defaultLanguage bi) - conf = withPrograms lbi - pkgid = packageId pkg_descr - - runDbProgram verbosity haskellSuiteProgram conf $ - [ "compile", "--build-dir", odir ] ++ - concat [ ["-i", d] | d <- srcDirs ] ++ - concat [ ["-I", d] | d <- [autogenModulesDir lbi, odir] ++ includeDirs bi ] ++ - [ packageDbOpt pkgDb | pkgDb <- dbStack ] ++ - [ "--package-name", display pkgid ] ++ - concat [ ["--package-id", display ipkgid ] - | (ipkgid, _) <- componentPackageDeps clbi ] ++ - ["-G", display language] ++ - concat [ ["-X", display ex] | ex <- usedExtensions bi ] ++ - cppOptions (libBuildInfo lib) ++ - [ display modu | modu <- libModules lib ] - - - -installLib - :: Verbosity - -> LocalBuildInfo - -> FilePath -- ^install location - -> FilePath -- ^install location for dynamic libraries - -> FilePath -- ^Build location - -> PackageDescription - -> Library - -> ComponentLocalBuildInfo - -> IO () -installLib verbosity lbi targetDir dynlibTargetDir builtDir pkg lib _clbi = do - let conf = withPrograms lbi - runDbProgram verbosity haskellSuitePkgProgram conf $ - [ "install-library" - , "--build-dir", builtDir - , "--target-dir", targetDir - , "--dynlib-target-dir", dynlibTargetDir - , "--package-id", display $ packageId pkg - ] ++ map display (libModules lib) - -registerPackage - :: Verbosity - -> ProgramConfiguration - -> PackageDBStack - -> InstalledPackageInfo - -> IO () -registerPackage verbosity progdb packageDbs installedPkgInfo = do - (hspkg, _) <- requireProgram verbosity haskellSuitePkgProgram progdb - - runProgramInvocation verbosity $ - (programInvocation hspkg - ["update", packageDbOpt $ last packageDbs]) - { progInvokeInput = Just $ showInstalledPackageInfo installedPkgInfo } - -initPackageDB :: Verbosity -> ProgramConfiguration -> FilePath -> IO () -initPackageDB verbosity conf dbPath = - runDbProgram verbosity haskellSuitePkgProgram conf - ["init", dbPath] - -packageDbOpt :: PackageDB -> String -packageDbOpt GlobalPackageDB = "--global" -packageDbOpt UserPackageDB = "--user" -packageDbOpt (SpecificPackageDB db) = "--package-db=" ++ db diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Hpc.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Hpc.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Hpc.hs 2016-11-07 10:02:24.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Hpc.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,141 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Hpc --- Copyright : Thomas Tuegel 2011 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module provides functions for locating various HPC-related paths and --- a function for adding the necessary options to a PackageDescription to --- build test suites with HPC enabled. - -module Distribution.Simple.Hpc - ( Way(..), guessWay - , htmlDir - , mixDir - , tixDir - , tixFilePath - , markupPackage - , markupTest - ) where - -import Control.Monad ( when ) -import Distribution.ModuleName ( main ) -import Distribution.PackageDescription - ( TestSuite(..) - , testModules - ) -import Distribution.Simple.LocalBuildInfo ( LocalBuildInfo(..) ) -import Distribution.Simple.Program - ( hpcProgram - , requireProgramVersion - ) -import Distribution.Simple.Program.Hpc ( markup, union ) -import Distribution.Simple.Utils ( notice ) -import Distribution.Version ( anyVersion ) -import Distribution.Verbosity ( Verbosity() ) -import System.Directory ( createDirectoryIfMissing, doesFileExist ) -import System.FilePath - --- ------------------------------------------------------------------------- --- Haskell Program Coverage - -data Way = Vanilla | Prof | Dyn - deriving (Bounded, Enum, Eq, Read, Show) - -hpcDir :: FilePath -- ^ \"dist/\" prefix - -> Way - -> FilePath -- ^ Directory containing component's HPC .mix files -hpcDir distPref way = distPref "hpc" wayDir - where - wayDir = case way of - Vanilla -> "vanilla" - Prof -> "prof" - Dyn -> "dyn" - -mixDir :: FilePath -- ^ \"dist/\" prefix - -> Way - -> FilePath -- ^ Component name - -> FilePath -- ^ Directory containing test suite's .mix files -mixDir distPref way name = hpcDir distPref way "mix" name - -tixDir :: FilePath -- ^ \"dist/\" prefix - -> Way - -> FilePath -- ^ Component name - -> FilePath -- ^ Directory containing test suite's .tix files -tixDir distPref way name = hpcDir distPref way "tix" name - --- | Path to the .tix file containing a test suite's sum statistics. -tixFilePath :: FilePath -- ^ \"dist/\" prefix - -> Way - -> FilePath -- ^ Component name - -> FilePath -- ^ Path to test suite's .tix file -tixFilePath distPref way name = tixDir distPref way name name <.> "tix" - -htmlDir :: FilePath -- ^ \"dist/\" prefix - -> Way - -> FilePath -- ^ Component name - -> FilePath -- ^ Path to test suite's HTML markup directory -htmlDir distPref way name = hpcDir distPref way "html" name - --- | Attempt to guess the way the test suites in this package were compiled --- and linked with the library so the correct module interfaces are found. -guessWay :: LocalBuildInfo -> Way -guessWay lbi - | withProfExe lbi = Prof - | withDynExe lbi = Dyn - | otherwise = Vanilla - --- | Generate the HTML markup for a test suite. -markupTest :: Verbosity - -> LocalBuildInfo - -> FilePath -- ^ \"dist/\" prefix - -> String -- ^ Library name - -> TestSuite - -> IO () -markupTest verbosity lbi distPref libName suite = do - tixFileExists <- doesFileExist $ tixFilePath distPref way $ testName suite - when tixFileExists $ do - -- behaviour of 'markup' depends on version, so we need *a* version - -- but no particular one - (hpc, hpcVer, _) <- requireProgramVersion verbosity - hpcProgram anyVersion (withPrograms lbi) - let htmlDir_ = htmlDir distPref way $ testName suite - markup hpc hpcVer verbosity - (tixFilePath distPref way $ testName suite) mixDirs - htmlDir_ - (testModules suite ++ [ main ]) - notice verbosity $ "Test coverage report written to " - ++ htmlDir_ "hpc_index" <.> "html" - where - way = guessWay lbi - mixDirs = map (mixDir distPref way) [ testName suite, libName ] - --- | Generate the HTML markup for all of a package's test suites. -markupPackage :: Verbosity - -> LocalBuildInfo - -> FilePath -- ^ \"dist/\" prefix - -> String -- ^ Library name - -> [TestSuite] - -> IO () -markupPackage verbosity lbi distPref libName suites = do - let tixFiles = map (tixFilePath distPref way . testName) suites - tixFilesExist <- mapM doesFileExist tixFiles - when (and tixFilesExist) $ do - -- behaviour of 'markup' depends on version, so we need *a* version - -- but no particular one - (hpc, hpcVer, _) <- requireProgramVersion verbosity - hpcProgram anyVersion (withPrograms lbi) - let outFile = tixFilePath distPref way libName - htmlDir' = htmlDir distPref way libName - excluded = concatMap testModules suites ++ [ main ] - createDirectoryIfMissing True $ takeDirectory outFile - union hpc verbosity tixFiles outFile excluded - markup hpc hpcVer verbosity outFile mixDirs htmlDir' excluded - notice verbosity $ "Package coverage report written to " - ++ htmlDir' "hpc_index.html" - where - way = guessWay lbi - mixDirs = map (mixDir distPref way) $ libName : map testName suites diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/InstallDirs.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/InstallDirs.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/InstallDirs.hs 2016-11-07 10:02:24.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/InstallDirs.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,580 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE ForeignFunctionInterface #-} -{-# LANGUAGE DeriveFunctor #-} -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.InstallDirs --- Copyright : Isaac Jones 2003-2004 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This manages everything to do with where files get installed (though does --- not get involved with actually doing any installation). It provides an --- 'InstallDirs' type which is a set of directories for where to install --- things. It also handles the fact that we use templates in these install --- dirs. For example most install dirs are relative to some @$prefix@ and by --- changing the prefix all other dirs still end up changed appropriately. So it --- provides a 'PathTemplate' type and functions for substituting for these --- templates. - -module Distribution.Simple.InstallDirs ( - InstallDirs(..), - InstallDirTemplates, - defaultInstallDirs, - combineInstallDirs, - absoluteInstallDirs, - CopyDest(..), - prefixRelativeInstallDirs, - substituteInstallDirTemplates, - - PathTemplate, - PathTemplateVariable(..), - PathTemplateEnv, - toPathTemplate, - fromPathTemplate, - substPathTemplate, - initialPathTemplateEnv, - platformTemplateEnv, - compilerTemplateEnv, - packageTemplateEnv, - abiTemplateEnv, - installDirsTemplateEnv, - ) where - - -import Distribution.Compat.Binary (Binary) -import Distribution.Compat.Semigroup as Semi -import Distribution.Package -import Distribution.System -import Distribution.Compiler -import Distribution.Text - -import Data.List (isPrefixOf) -import Data.Maybe (fromMaybe) -import GHC.Generics (Generic) -import System.Directory (getAppUserDataDirectory) -import System.FilePath ((), isPathSeparator, pathSeparator) -import System.FilePath (dropDrive) - -#if mingw32_HOST_OS -import Foreign -import Foreign.C -#endif - --- --------------------------------------------------------------------------- --- Installation directories - - --- | The directories where we will install files for packages. --- --- We have several different directories for different types of files since --- many systems have conventions whereby different types of files in a package --- are installed in different directories. This is particularly the case on --- Unix style systems. --- -data InstallDirs dir = InstallDirs { - prefix :: dir, - bindir :: dir, - libdir :: dir, - libsubdir :: dir, - dynlibdir :: dir, - libexecdir :: dir, - includedir :: dir, - datadir :: dir, - datasubdir :: dir, - docdir :: dir, - mandir :: dir, - htmldir :: dir, - haddockdir :: dir, - sysconfdir :: dir - } deriving (Eq, Read, Show, Functor, Generic) - -instance Binary dir => Binary (InstallDirs dir) - -instance (Semigroup dir, Monoid dir) => Monoid (InstallDirs dir) where - mempty = gmempty - mappend = (Semi.<>) - -instance Semigroup dir => Semigroup (InstallDirs dir) where - (<>) = gmappend - -combineInstallDirs :: (a -> b -> c) - -> InstallDirs a - -> InstallDirs b - -> InstallDirs c -combineInstallDirs combine a b = InstallDirs { - prefix = prefix a `combine` prefix b, - bindir = bindir a `combine` bindir b, - libdir = libdir a `combine` libdir b, - libsubdir = libsubdir a `combine` libsubdir b, - dynlibdir = dynlibdir a `combine` dynlibdir b, - libexecdir = libexecdir a `combine` libexecdir b, - includedir = includedir a `combine` includedir b, - datadir = datadir a `combine` datadir b, - datasubdir = datasubdir a `combine` datasubdir b, - docdir = docdir a `combine` docdir b, - mandir = mandir a `combine` mandir b, - htmldir = htmldir a `combine` htmldir b, - haddockdir = haddockdir a `combine` haddockdir b, - sysconfdir = sysconfdir a `combine` sysconfdir b - } - -appendSubdirs :: (a -> a -> a) -> InstallDirs a -> InstallDirs a -appendSubdirs append dirs = dirs { - libdir = libdir dirs `append` libsubdir dirs, - datadir = datadir dirs `append` datasubdir dirs, - libsubdir = error "internal error InstallDirs.libsubdir", - datasubdir = error "internal error InstallDirs.datasubdir" - } - --- | The installation directories in terms of 'PathTemplate's that contain --- variables. --- --- The defaults for most of the directories are relative to each other, in --- particular they are all relative to a single prefix. This makes it --- convenient for the user to override the default installation directory --- by only having to specify --prefix=... rather than overriding each --- individually. This is done by allowing $-style variables in the dirs. --- These are expanded by textual substitution (see 'substPathTemplate'). --- --- A few of these installation directories are split into two components, the --- dir and subdir. The full installation path is formed by combining the two --- together with @\/@. The reason for this is compatibility with other Unix --- build systems which also support @--libdir@ and @--datadir@. We would like --- users to be able to configure @--libdir=\/usr\/lib64@ for example but --- because by default we want to support installing multiple versions of --- packages and building the same package for multiple compilers we append the --- libsubdir to get: @\/usr\/lib64\/$libname\/$compiler@. --- --- An additional complication is the need to support relocatable packages on --- systems which support such things, like Windows. --- -type InstallDirTemplates = InstallDirs PathTemplate - --- --------------------------------------------------------------------------- --- Default installation directories - -defaultInstallDirs :: CompilerFlavor -> Bool -> Bool -> IO InstallDirTemplates -defaultInstallDirs comp userInstall _hasLibs = do - installPrefix <- - if userInstall - then getAppUserDataDirectory "cabal" - else case buildOS of - Windows -> do windowsProgramFilesDir <- getWindowsProgramFilesDir - return (windowsProgramFilesDir "Haskell") - _ -> return "/usr/local" - installLibDir <- - case buildOS of - Windows -> return "$prefix" - _ -> case comp of - LHC | userInstall -> getAppUserDataDirectory "lhc" - _ -> return ("$prefix" "lib") - return $ fmap toPathTemplate $ InstallDirs { - prefix = installPrefix, - bindir = "$prefix" "bin", - libdir = installLibDir, - libsubdir = case comp of - JHC -> "$compiler" - LHC -> "$compiler" - UHC -> "$pkgid" - _other -> "$abi" "$libname", - dynlibdir = "$libdir" case comp of - JHC -> "$compiler" - LHC -> "$compiler" - UHC -> "$pkgid" - _other -> "$abi", - libexecdir = case buildOS of - Windows -> "$prefix" "$libname" - _other -> "$prefix" "libexec", - includedir = "$libdir" "$libsubdir" "include", - datadir = case buildOS of - Windows -> "$prefix" - _other -> "$prefix" "share", - datasubdir = "$abi" "$pkgid", - docdir = "$datadir" "doc" "$abi" "$pkgid", - mandir = "$datadir" "man", - htmldir = "$docdir" "html", - haddockdir = "$htmldir", - sysconfdir = "$prefix" "etc" - } - --- --------------------------------------------------------------------------- --- Converting directories, absolute or prefix-relative - --- | Substitute the install dir templates into each other. --- --- To prevent cyclic substitutions, only some variables are allowed in --- particular dir templates. If out of scope vars are present, they are not --- substituted for. Checking for any remaining unsubstituted vars can be done --- as a subsequent operation. --- --- The reason it is done this way is so that in 'prefixRelativeInstallDirs' we --- can replace 'prefix' with the 'PrefixVar' and get resulting --- 'PathTemplate's that still have the 'PrefixVar' in them. Doing this makes it --- each to check which paths are relative to the $prefix. --- -substituteInstallDirTemplates :: PathTemplateEnv - -> InstallDirTemplates -> InstallDirTemplates -substituteInstallDirTemplates env dirs = dirs' - where - dirs' = InstallDirs { - -- So this specifies exactly which vars are allowed in each template - prefix = subst prefix [], - bindir = subst bindir [prefixVar], - libdir = subst libdir [prefixVar, bindirVar], - libsubdir = subst libsubdir [], - dynlibdir = subst dynlibdir [prefixVar, bindirVar, libdirVar], - libexecdir = subst libexecdir prefixBinLibVars, - includedir = subst includedir prefixBinLibVars, - datadir = subst datadir prefixBinLibVars, - datasubdir = subst datasubdir [], - docdir = subst docdir prefixBinLibDataVars, - mandir = subst mandir (prefixBinLibDataVars ++ [docdirVar]), - htmldir = subst htmldir (prefixBinLibDataVars ++ [docdirVar]), - haddockdir = subst haddockdir (prefixBinLibDataVars ++ - [docdirVar, htmldirVar]), - sysconfdir = subst sysconfdir prefixBinLibVars - } - subst dir env' = substPathTemplate (env'++env) (dir dirs) - - prefixVar = (PrefixVar, prefix dirs') - bindirVar = (BindirVar, bindir dirs') - libdirVar = (LibdirVar, libdir dirs') - libsubdirVar = (LibsubdirVar, libsubdir dirs') - datadirVar = (DatadirVar, datadir dirs') - datasubdirVar = (DatasubdirVar, datasubdir dirs') - docdirVar = (DocdirVar, docdir dirs') - htmldirVar = (HtmldirVar, htmldir dirs') - prefixBinLibVars = [prefixVar, bindirVar, libdirVar, libsubdirVar] - prefixBinLibDataVars = prefixBinLibVars ++ [datadirVar, datasubdirVar] - --- | Convert from abstract install directories to actual absolute ones by --- substituting for all the variables in the abstract paths, to get real --- absolute path. -absoluteInstallDirs :: PackageIdentifier - -> UnitId - -> CompilerInfo - -> CopyDest - -> Platform - -> InstallDirs PathTemplate - -> InstallDirs FilePath -absoluteInstallDirs pkgId libname compilerId copydest platform dirs = - (case copydest of - CopyTo destdir -> fmap ((destdir ) . dropDrive) - _ -> id) - . appendSubdirs () - . fmap fromPathTemplate - $ substituteInstallDirTemplates env dirs - where - env = initialPathTemplateEnv pkgId libname compilerId platform - - --- |The location prefix for the /copy/ command. -data CopyDest - = NoCopyDest - | CopyTo FilePath - deriving (Eq, Show) - --- | Check which of the paths are relative to the installation $prefix. --- --- If any of the paths are not relative, ie they are absolute paths, then it --- prevents us from making a relocatable package (also known as a \"prefix --- independent\" package). --- -prefixRelativeInstallDirs :: PackageIdentifier - -> UnitId - -> CompilerInfo - -> Platform - -> InstallDirTemplates - -> InstallDirs (Maybe FilePath) -prefixRelativeInstallDirs pkgId libname compilerId platform dirs = - fmap relative - . appendSubdirs combinePathTemplate - $ -- substitute the path template into each other, except that we map - -- \$prefix back to $prefix. We're trying to end up with templates that - -- mention no vars except $prefix. - substituteInstallDirTemplates env dirs { - prefix = PathTemplate [Variable PrefixVar] - } - where - env = initialPathTemplateEnv pkgId libname compilerId platform - - -- If it starts with $prefix then it's relative and produce the relative - -- path by stripping off $prefix/ or $prefix - relative dir = case dir of - PathTemplate cs -> fmap (fromPathTemplate . PathTemplate) (relative' cs) - relative' (Variable PrefixVar : Ordinary (s:rest) : rest') - | isPathSeparator s = Just (Ordinary rest : rest') - relative' (Variable PrefixVar : rest) = Just rest - relative' _ = Nothing - --- --------------------------------------------------------------------------- --- Path templates - --- | An abstract path, possibly containing variables that need to be --- substituted for to get a real 'FilePath'. --- -newtype PathTemplate = PathTemplate [PathComponent] - deriving (Eq, Ord, Generic) - -instance Binary PathTemplate - -data PathComponent = - Ordinary FilePath - | Variable PathTemplateVariable - deriving (Eq, Ord, Generic) - -instance Binary PathComponent - -data PathTemplateVariable = - PrefixVar -- ^ The @$prefix@ path variable - | BindirVar -- ^ The @$bindir@ path variable - | LibdirVar -- ^ The @$libdir@ path variable - | LibsubdirVar -- ^ The @$libsubdir@ path variable - | DynlibdirVar -- ^ The @$dynlibdir@ path variable - | DatadirVar -- ^ The @$datadir@ path variable - | DatasubdirVar -- ^ The @$datasubdir@ path variable - | DocdirVar -- ^ The @$docdir@ path variable - | HtmldirVar -- ^ The @$htmldir@ path variable - | PkgNameVar -- ^ The @$pkg@ package name path variable - | PkgVerVar -- ^ The @$version@ package version path variable - | PkgIdVar -- ^ The @$pkgid@ package Id path variable, eg @foo-1.0@ - | LibNameVar -- ^ The @$libname@ path variable - | CompilerVar -- ^ The compiler name and version, eg @ghc-6.6.1@ - | OSVar -- ^ The operating system name, eg @windows@ or @linux@ - | ArchVar -- ^ The CPU architecture name, eg @i386@ or @x86_64@ - | AbiVar -- ^ The Compiler's ABI identifier, $arch-$os-$compiler-$abitag - | AbiTagVar -- ^ The optional ABI tag for the compiler - | ExecutableNameVar -- ^ The executable name; used in shell wrappers - | TestSuiteNameVar -- ^ The name of the test suite being run - | TestSuiteResultVar -- ^ The result of the test suite being run, eg - -- @pass@, @fail@, or @error@. - | BenchmarkNameVar -- ^ The name of the benchmark being run - deriving (Eq, Ord, Generic) - -instance Binary PathTemplateVariable - -type PathTemplateEnv = [(PathTemplateVariable, PathTemplate)] - --- | Convert a 'FilePath' to a 'PathTemplate' including any template vars. --- -toPathTemplate :: FilePath -> PathTemplate -toPathTemplate = PathTemplate . read - --- | Convert back to a path, any remaining vars are included --- -fromPathTemplate :: PathTemplate -> FilePath -fromPathTemplate (PathTemplate template) = show template - -combinePathTemplate :: PathTemplate -> PathTemplate -> PathTemplate -combinePathTemplate (PathTemplate t1) (PathTemplate t2) = - PathTemplate (t1 ++ [Ordinary [pathSeparator]] ++ t2) - -substPathTemplate :: PathTemplateEnv -> PathTemplate -> PathTemplate -substPathTemplate environment (PathTemplate template) = - PathTemplate (concatMap subst template) - - where subst component@(Ordinary _) = [component] - subst component@(Variable variable) = - case lookup variable environment of - Just (PathTemplate components) -> components - Nothing -> [component] - --- | The initial environment has all the static stuff but no paths -initialPathTemplateEnv :: PackageIdentifier - -> UnitId - -> CompilerInfo - -> Platform - -> PathTemplateEnv -initialPathTemplateEnv pkgId libname compiler platform = - packageTemplateEnv pkgId libname - ++ compilerTemplateEnv compiler - ++ platformTemplateEnv platform - ++ abiTemplateEnv compiler platform - -packageTemplateEnv :: PackageIdentifier -> UnitId -> PathTemplateEnv -packageTemplateEnv pkgId libname = - [(PkgNameVar, PathTemplate [Ordinary $ display (packageName pkgId)]) - ,(PkgVerVar, PathTemplate [Ordinary $ display (packageVersion pkgId)]) - ,(LibNameVar, PathTemplate [Ordinary $ display libname]) - ,(PkgIdVar, PathTemplate [Ordinary $ display pkgId]) - ] - -compilerTemplateEnv :: CompilerInfo -> PathTemplateEnv -compilerTemplateEnv compiler = - [(CompilerVar, PathTemplate [Ordinary $ display (compilerInfoId compiler)]) - ] - -platformTemplateEnv :: Platform -> PathTemplateEnv -platformTemplateEnv (Platform arch os) = - [(OSVar, PathTemplate [Ordinary $ display os]) - ,(ArchVar, PathTemplate [Ordinary $ display arch]) - ] - -abiTemplateEnv :: CompilerInfo -> Platform -> PathTemplateEnv -abiTemplateEnv compiler (Platform arch os) = - [(AbiVar, PathTemplate [Ordinary $ display arch ++ '-':display os ++ - '-':display (compilerInfoId compiler) ++ - case compilerInfoAbiTag compiler of - NoAbiTag -> "" - AbiTag tag -> '-':tag]) - ,(AbiTagVar, PathTemplate [Ordinary $ abiTagString (compilerInfoAbiTag compiler)]) - ] - -installDirsTemplateEnv :: InstallDirs PathTemplate -> PathTemplateEnv -installDirsTemplateEnv dirs = - [(PrefixVar, prefix dirs) - ,(BindirVar, bindir dirs) - ,(LibdirVar, libdir dirs) - ,(LibsubdirVar, libsubdir dirs) - ,(DynlibdirVar, dynlibdir dirs) - ,(DatadirVar, datadir dirs) - ,(DatasubdirVar, datasubdir dirs) - ,(DocdirVar, docdir dirs) - ,(HtmldirVar, htmldir dirs) - ] - - --- --------------------------------------------------------------------------- --- Parsing and showing path templates: - --- The textual format is that of an ordinary Haskell String, eg --- "$prefix/bin" --- and this gets parsed to the internal representation as a sequence of path --- spans which are either strings or variables, eg: --- PathTemplate [Variable PrefixVar, Ordinary "/bin" ] - -instance Show PathTemplateVariable where - show PrefixVar = "prefix" - show LibNameVar = "libname" - show BindirVar = "bindir" - show LibdirVar = "libdir" - show LibsubdirVar = "libsubdir" - show DynlibdirVar = "dynlibdir" - show DatadirVar = "datadir" - show DatasubdirVar = "datasubdir" - show DocdirVar = "docdir" - show HtmldirVar = "htmldir" - show PkgNameVar = "pkg" - show PkgVerVar = "version" - show PkgIdVar = "pkgid" - show CompilerVar = "compiler" - show OSVar = "os" - show ArchVar = "arch" - show AbiTagVar = "abitag" - show AbiVar = "abi" - show ExecutableNameVar = "executablename" - show TestSuiteNameVar = "test-suite" - show TestSuiteResultVar = "result" - show BenchmarkNameVar = "benchmark" - -instance Read PathTemplateVariable where - readsPrec _ s = - take 1 - [ (var, drop (length varStr) s) - | (varStr, var) <- vars - , varStr `isPrefixOf` s ] - -- NB: order matters! Longer strings first - where vars = [("prefix", PrefixVar) - ,("bindir", BindirVar) - ,("libdir", LibdirVar) - ,("libsubdir", LibsubdirVar) - ,("dynlibdir", DynlibdirVar) - ,("datadir", DatadirVar) - ,("datasubdir", DatasubdirVar) - ,("docdir", DocdirVar) - ,("htmldir", HtmldirVar) - ,("pkgid", PkgIdVar) - ,("libname", LibNameVar) - ,("pkgkey", LibNameVar) -- backwards compatibility - ,("pkg", PkgNameVar) - ,("version", PkgVerVar) - ,("compiler", CompilerVar) - ,("os", OSVar) - ,("arch", ArchVar) - ,("abitag", AbiTagVar) - ,("abi", AbiVar) - ,("executablename", ExecutableNameVar) - ,("test-suite", TestSuiteNameVar) - ,("result", TestSuiteResultVar) - ,("benchmark", BenchmarkNameVar)] - -instance Show PathComponent where - show (Ordinary path) = path - show (Variable var) = '$':show var - showList = foldr (\x -> (shows x .)) id - -instance Read PathComponent where - -- for some reason we collapse multiple $ symbols here - readsPrec _ = lex0 - where lex0 [] = [] - lex0 ('$':'$':s') = lex0 ('$':s') - lex0 ('$':s') = case [ (Variable var, s'') - | (var, s'') <- reads s' ] of - [] -> lex1 "$" s' - ok -> ok - lex0 s' = lex1 [] s' - lex1 "" "" = [] - lex1 acc "" = [(Ordinary (reverse acc), "")] - lex1 acc ('$':'$':s) = lex1 acc ('$':s) - lex1 acc ('$':s) = [(Ordinary (reverse acc), '$':s)] - lex1 acc (c:s) = lex1 (c:acc) s - readList [] = [([],"")] - readList s = [ (component:components, s'') - | (component, s') <- reads s - , (components, s'') <- readList s' ] - -instance Show PathTemplate where - show (PathTemplate template) = show (show template) - -instance Read PathTemplate where - readsPrec p s = [ (PathTemplate template, s') - | (path, s') <- readsPrec p s - , (template, "") <- reads path ] - --- --------------------------------------------------------------------------- --- Internal utilities - -getWindowsProgramFilesDir :: IO FilePath -getWindowsProgramFilesDir = do -#if mingw32_HOST_OS - m <- shGetFolderPath csidl_PROGRAM_FILES -#else - let m = Nothing -#endif - return (fromMaybe "C:\\Program Files" m) - -#if mingw32_HOST_OS -shGetFolderPath :: CInt -> IO (Maybe FilePath) -shGetFolderPath n = - allocaArray long_path_size $ \pPath -> do - r <- c_SHGetFolderPath nullPtr n nullPtr 0 pPath - if (r /= 0) - then return Nothing - else do s <- peekCWString pPath; return (Just s) - where - long_path_size = 1024 -- MAX_PATH is 260, this should be plenty - -csidl_PROGRAM_FILES :: CInt -csidl_PROGRAM_FILES = 0x0026 --- csidl_PROGRAM_FILES_COMMON :: CInt --- csidl_PROGRAM_FILES_COMMON = 0x002b - -#ifdef x86_64_HOST_ARCH -#define CALLCONV ccall -#else -#define CALLCONV stdcall -#endif - -foreign import CALLCONV unsafe "shlobj.h SHGetFolderPathW" - c_SHGetFolderPath :: Ptr () - -> CInt - -> Ptr () - -> CInt - -> CWString - -> IO CInt -#endif diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Install.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Install.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Install.hs 2016-11-07 10:02:24.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Install.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,187 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Install --- Copyright : Isaac Jones 2003-2004 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This is the entry point into installing a built package. Performs the --- \"@.\/setup install@\" and \"@.\/setup copy@\" actions. It moves files into --- place based on the prefix argument. It does the generic bits and then calls --- compiler-specific functions to do the rest. - -module Distribution.Simple.Install ( - install, - ) where - -import Distribution.PackageDescription -import Distribution.Package (Package(..)) -import Distribution.Simple.LocalBuildInfo -import Distribution.Simple.BuildPaths (haddockName, haddockPref) -import Distribution.Simple.Utils - ( createDirectoryIfMissingVerbose - , installDirectoryContents, installOrdinaryFile, isInSearchPath - , die, info, notice, warn, matchDirFileGlob ) -import Distribution.Simple.Compiler - ( CompilerFlavor(..), compilerFlavor ) -import Distribution.Simple.Setup (CopyFlags(..), fromFlag - ,HaddockTarget(ForDevelopment)) - -import qualified Distribution.Simple.GHC as GHC -import qualified Distribution.Simple.GHCJS as GHCJS -import qualified Distribution.Simple.JHC as JHC -import qualified Distribution.Simple.LHC as LHC -import qualified Distribution.Simple.UHC as UHC -import qualified Distribution.Simple.HaskellSuite as HaskellSuite - -import Control.Monad (when, unless) -import System.Directory - ( doesDirectoryExist, doesFileExist ) -import System.FilePath - ( takeFileName, takeDirectory, (), isAbsolute ) - -import Distribution.Verbosity -import Distribution.Text - ( display ) - --- |Perform the \"@.\/setup install@\" and \"@.\/setup copy@\" --- actions. Move files into place based on the prefix argument. - -install :: PackageDescription -- ^information from the .cabal file - -> LocalBuildInfo -- ^information from the configure step - -> CopyFlags -- ^flags sent to copy or install - -> IO () -install pkg_descr lbi flags = do - let distPref = fromFlag (copyDistPref flags) - verbosity = fromFlag (copyVerbosity flags) - copydest = fromFlag (copyDest flags) - installDirs@(InstallDirs { - bindir = binPref, - libdir = libPref, - dynlibdir = dynlibPref, - datadir = dataPref, - docdir = docPref, - htmldir = htmlPref, - haddockdir = interfacePref, - includedir = incPref}) - -- Using the library clbi for binPref is a hack; - -- binPref should be computed per executable - = absoluteInstallDirs pkg_descr lbi copydest - - progPrefixPref = substPathTemplate (packageId pkg_descr) lbi (progPrefix lbi) - progSuffixPref = substPathTemplate (packageId pkg_descr) lbi (progSuffix lbi) - - unless (hasLibs pkg_descr || hasExes pkg_descr) $ - die "No executables and no library found. Nothing to do." - docExists <- doesDirectoryExist $ haddockPref ForDevelopment distPref pkg_descr - info verbosity ("directory " ++ haddockPref ForDevelopment distPref pkg_descr ++ - " does exist: " ++ show docExists) - - installDataFiles verbosity pkg_descr dataPref - - when docExists $ do - createDirectoryIfMissingVerbose verbosity True htmlPref - installDirectoryContents verbosity - (haddockPref ForDevelopment distPref pkg_descr) htmlPref - -- setPermissionsRecursive [Read] htmlPref - -- The haddock interface file actually already got installed - -- in the recursive copy, but now we install it where we actually - -- want it to be (normally the same place). We could remove the - -- copy in htmlPref first. - let haddockInterfaceFileSrc = haddockPref ForDevelopment distPref pkg_descr - haddockName pkg_descr - haddockInterfaceFileDest = interfacePref haddockName pkg_descr - -- We only generate the haddock interface file for libs, So if the - -- package consists only of executables there will not be one: - exists <- doesFileExist haddockInterfaceFileSrc - when exists $ do - createDirectoryIfMissingVerbose verbosity True interfacePref - installOrdinaryFile verbosity haddockInterfaceFileSrc - haddockInterfaceFileDest - - let lfiles = licenseFiles pkg_descr - unless (null lfiles) $ do - createDirectoryIfMissingVerbose verbosity True docPref - sequence_ - [ installOrdinaryFile verbosity lfile (docPref takeFileName lfile) - | lfile <- lfiles ] - - let buildPref = buildDir lbi - when (hasLibs pkg_descr) $ - notice verbosity ("Installing library in " ++ libPref) - when (hasExes pkg_descr) $ do - notice verbosity ("Installing executable(s) in " ++ binPref) - inPath <- isInSearchPath binPref - when (not inPath) $ - warn verbosity ("The directory " ++ binPref - ++ " is not in the system search path.") - - -- install include files for all compilers - they may be needed to compile - -- haskell files (using the CPP extension) - -- - when (hasLibs pkg_descr) $ installIncludeFiles verbosity pkg_descr incPref - - withLibLBI pkg_descr lbi $ - case compilerFlavor (compiler lbi) of - GHC -> GHC.installLib verbosity lbi libPref dynlibPref buildPref pkg_descr - GHCJS -> GHCJS.installLib verbosity lbi libPref dynlibPref buildPref pkg_descr - LHC -> LHC.installLib verbosity lbi libPref dynlibPref buildPref pkg_descr - JHC -> JHC.installLib verbosity lbi libPref dynlibPref buildPref pkg_descr - UHC -> UHC.installLib verbosity lbi libPref dynlibPref buildPref pkg_descr - HaskellSuite _ -> HaskellSuite.installLib - verbosity lbi libPref dynlibPref buildPref pkg_descr - _ -> \_ _ -> die $ "installing with " - ++ display (compilerFlavor (compiler lbi)) - ++ " is not implemented" - - withExe pkg_descr $ - case compilerFlavor (compiler lbi) of - GHC -> GHC.installExe verbosity lbi installDirs buildPref (progPrefixPref, progSuffixPref) pkg_descr - GHCJS -> GHCJS.installExe verbosity lbi installDirs buildPref (progPrefixPref, progSuffixPref) pkg_descr - LHC -> LHC.installExe verbosity lbi installDirs buildPref (progPrefixPref, progSuffixPref) pkg_descr - JHC -> JHC.installExe verbosity binPref buildPref (progPrefixPref, progSuffixPref) pkg_descr - UHC -> \_ -> return () - HaskellSuite {} -> \_ -> return () - _ -> \_ -> die $ "installing with " - ++ display (compilerFlavor (compiler lbi)) - ++ " is not implemented" - -- register step should be performed by caller. - --- | Install the files listed in data-files --- -installDataFiles :: Verbosity -> PackageDescription -> FilePath -> IO () -installDataFiles verbosity pkg_descr destDataDir = - flip mapM_ (dataFiles pkg_descr) $ \ file -> do - let srcDataDir = dataDir pkg_descr - files <- matchDirFileGlob srcDataDir file - let dir = takeDirectory file - createDirectoryIfMissingVerbose verbosity True (destDataDir dir) - sequence_ [ installOrdinaryFile verbosity (srcDataDir file') - (destDataDir file') - | file' <- files ] - --- | Install the files listed in install-includes --- -installIncludeFiles :: Verbosity -> PackageDescription -> FilePath -> IO () -installIncludeFiles verbosity - PackageDescription { library = Just lib } destIncludeDir = do - - incs <- mapM (findInc relincdirs) (installIncludes lbi) - sequence_ - [ do createDirectoryIfMissingVerbose verbosity True destDir - installOrdinaryFile verbosity srcFile destFile - | (relFile, srcFile) <- incs - , let destFile = destIncludeDir relFile - destDir = takeDirectory destFile ] - where - relincdirs = "." : filter (not.isAbsolute) (includeDirs lbi) - lbi = libBuildInfo lib - - findInc [] file = die ("can't find include file " ++ file) - findInc (dir:dirs) file = do - let path = dir file - exists <- doesFileExist path - if exists then return (file, path) else findInc dirs file -installIncludeFiles _ _ _ = die "installIncludeFiles: Can't happen?" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/JHC.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/JHC.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/JHC.hs 2016-11-07 10:02:24.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/JHC.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,186 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.JHC --- Copyright : Isaac Jones 2003-2006 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module contains most of the JHC-specific code for configuring, building --- and installing packages. - -module Distribution.Simple.JHC ( - configure, getInstalledPackages, - buildLib, buildExe, - installLib, installExe - ) where - -import Distribution.PackageDescription as PD hiding (Flag) -import Distribution.InstalledPackageInfo -import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo -import Distribution.Simple.PackageIndex (InstalledPackageIndex) -import qualified Distribution.Simple.PackageIndex as PackageIndex -import Distribution.Simple.LocalBuildInfo -import Distribution.Simple.BuildPaths -import Distribution.Simple.Compiler -import Language.Haskell.Extension -import Distribution.Simple.Program -import Distribution.Version -import Distribution.Package -import Distribution.Simple.Utils -import Distribution.Verbosity -import Distribution.Text - -import System.FilePath ( () ) -import Distribution.Compat.ReadP - ( readP_to_S, string, skipSpaces ) -import Distribution.System ( Platform ) - -import Data.List ( nub ) -import Data.Char ( isSpace ) -import qualified Data.Map as M ( empty ) -import Data.Maybe ( fromMaybe ) - -import qualified Data.ByteString.Lazy.Char8 as BS.Char8 - - --- ----------------------------------------------------------------------------- --- Configuring - -configure :: Verbosity -> Maybe FilePath -> Maybe FilePath - -> ProgramConfiguration -> IO (Compiler, Maybe Platform, ProgramConfiguration) -configure verbosity hcPath _hcPkgPath conf = do - - (jhcProg, _, conf') <- requireProgramVersion verbosity - jhcProgram (orLaterVersion (Version [0,7,2] [])) - (userMaybeSpecifyPath "jhc" hcPath conf) - - let Just version = programVersion jhcProg - comp = Compiler { - compilerId = CompilerId JHC version, - compilerAbiTag = NoAbiTag, - compilerCompat = [], - compilerLanguages = jhcLanguages, - compilerExtensions = jhcLanguageExtensions, - compilerProperties = M.empty - } - compPlatform = Nothing - return (comp, compPlatform, conf') - -jhcLanguages :: [(Language, Flag)] -jhcLanguages = [(Haskell98, "")] - --- | The flags for the supported extensions -jhcLanguageExtensions :: [(Extension, Flag)] -jhcLanguageExtensions = - [(EnableExtension TypeSynonymInstances , "") - ,(DisableExtension TypeSynonymInstances , "") - ,(EnableExtension ForeignFunctionInterface , "") - ,(DisableExtension ForeignFunctionInterface , "") - ,(EnableExtension ImplicitPrelude , "") -- Wrong - ,(DisableExtension ImplicitPrelude , "--noprelude") - ,(EnableExtension CPP , "-fcpp") - ,(DisableExtension CPP , "-fno-cpp") - ] - -getInstalledPackages :: Verbosity -> PackageDBStack -> ProgramConfiguration - -> IO InstalledPackageIndex -getInstalledPackages verbosity _packageDBs conf = do - -- jhc --list-libraries lists all available libraries. - -- How shall I find out, whether they are global or local - -- without checking all files and locations? - str <- rawSystemProgramStdoutConf verbosity jhcProgram conf ["--list-libraries"] - let pCheck :: [(a, String)] -> [a] - pCheck rs = [ r | (r,s) <- rs, all isSpace s ] - let parseLine ln = - pCheck (readP_to_S - (skipSpaces >> string "Name:" >> skipSpaces >> parse) ln) - return $ - PackageIndex.fromList $ - map (\p -> emptyInstalledPackageInfo { - InstalledPackageInfo.installedUnitId = mkLegacyUnitId p, - InstalledPackageInfo.sourcePackageId = p - }) $ - concatMap parseLine $ - lines str - --- ----------------------------------------------------------------------------- --- Building - --- | Building a package for JHC. --- Currently C source files are not supported. -buildLib :: Verbosity -> PackageDescription -> LocalBuildInfo - -> Library -> ComponentLocalBuildInfo -> IO () -buildLib verbosity pkg_descr lbi lib clbi = do - let Just jhcProg = lookupProgram jhcProgram (withPrograms lbi) - let libBi = libBuildInfo lib - let args = constructJHCCmdLine lbi libBi clbi (buildDir lbi) verbosity - let pkgid = display (packageId pkg_descr) - pfile = buildDir lbi "jhc-pkg.conf" - hlfile= buildDir lbi (pkgid ++ ".hl") - writeFileAtomic pfile . BS.Char8.pack $ jhcPkgConf pkg_descr - rawSystemProgram verbosity jhcProg $ - ["--build-hl="++pfile, "-o", hlfile] ++ - args ++ map display (libModules lib) - --- | Building an executable for JHC. --- Currently C source files are not supported. -buildExe :: Verbosity -> PackageDescription -> LocalBuildInfo - -> Executable -> ComponentLocalBuildInfo -> IO () -buildExe verbosity _pkg_descr lbi exe clbi = do - let Just jhcProg = lookupProgram jhcProgram (withPrograms lbi) - let exeBi = buildInfo exe - let out = buildDir lbi exeName exe - let args = constructJHCCmdLine lbi exeBi clbi (buildDir lbi) verbosity - rawSystemProgram verbosity jhcProg (["-o",out] ++ args ++ [modulePath exe]) - -constructJHCCmdLine :: LocalBuildInfo -> BuildInfo -> ComponentLocalBuildInfo - -> FilePath -> Verbosity -> [String] -constructJHCCmdLine lbi bi clbi _odir verbosity = - (if verbosity >= deafening then ["-v"] else []) - ++ hcOptions JHC bi - ++ languageToFlags (compiler lbi) (defaultLanguage bi) - ++ extensionsToFlags (compiler lbi) (usedExtensions bi) - ++ ["--noauto","-i-"] - ++ concat [["-i", l] | l <- nub (hsSourceDirs bi)] - ++ ["-i", autogenModulesDir lbi] - ++ ["-optc" ++ opt | opt <- PD.ccOptions bi] - -- It would be better if JHC would accept package names with versions, - -- but JHC-0.7.2 doesn't accept this. - -- Thus, we have to strip the version with 'pkgName'. - ++ (concat [ ["-p", display (pkgName pkgid)] - | (_, pkgid) <- componentPackageDeps clbi ]) - -jhcPkgConf :: PackageDescription -> String -jhcPkgConf pd = - let sline name sel = name ++ ": "++sel pd - lib = fromMaybe (error "no library available") . library - comma = intercalate "," . map display - in unlines [sline "name" (display . pkgName . packageId) - ,sline "version" (display . pkgVersion . packageId) - ,sline "exposed-modules" (comma . PD.exposedModules . lib) - ,sline "hidden-modules" (comma . otherModules . libBuildInfo . lib) - ] - -installLib :: Verbosity - -> LocalBuildInfo - -> FilePath - -> FilePath - -> FilePath - -> PackageDescription - -> Library - -> ComponentLocalBuildInfo - -> IO () -installLib verb _lbi dest _dyn_dest build_dir pkg_descr _lib _clbi = do - let p = display (packageId pkg_descr)++".hl" - createDirectoryIfMissingVerbose verb True dest - installOrdinaryFile verb (build_dir p) (dest p) - -installExe :: Verbosity -> FilePath -> FilePath -> (FilePath,FilePath) -> PackageDescription -> Executable -> IO () -installExe verb dest build_dir (progprefix,progsuffix) _ exe = do - let exe_name = exeName exe - src = exe_name exeExtension - out = (progprefix ++ exe_name ++ progsuffix) exeExtension - createDirectoryIfMissingVerbose verb True dest - installExecutableFile verb (build_dir src) (dest out) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/LHC.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/LHC.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/LHC.hs 2016-11-07 10:02:24.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/LHC.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,770 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.LHC --- Copyright : Isaac Jones 2003-2007 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This is a fairly large module. It contains most of the GHC-specific code for --- configuring, building and installing packages. It also exports a function --- for finding out what packages are already installed. Configuring involves --- finding the @ghc@ and @ghc-pkg@ programs, finding what language extensions --- this version of ghc supports and returning a 'Compiler' value. --- --- 'getInstalledPackages' involves calling the @ghc-pkg@ program to find out --- what packages are installed. --- --- Building is somewhat complex as there is quite a bit of information to take --- into account. We have to build libs and programs, possibly for profiling and --- shared libs. We have to support building libraries that will be usable by --- GHCi and also ghc's @-split-objs@ feature. We have to compile any C files --- using ghc. Linking, especially for @split-objs@ is remarkably complex, --- partly because there tend to be 1,000's of @.o@ files and this can often be --- more than we can pass to the @ld@ or @ar@ programs in one go. --- --- Installing for libs and exes involves finding the right files and copying --- them to the right places. One of the more tricky things about this module is --- remembering the layout of files in the build directory (which is not --- explicitly documented) and thus what search dirs are used for various kinds --- of files. - -module Distribution.Simple.LHC ( - configure, getInstalledPackages, - buildLib, buildExe, - installLib, installExe, - registerPackage, - hcPkgInfo, - ghcOptions, - ghcVerbosityOptions - ) where - -import Distribution.PackageDescription as PD hiding (Flag) -import Distribution.InstalledPackageInfo -import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo -import Distribution.Simple.PackageIndex -import qualified Distribution.Simple.PackageIndex as PackageIndex -import Distribution.Simple.LocalBuildInfo -import Distribution.Simple.BuildPaths -import Distribution.Simple.Utils -import Distribution.Package -import qualified Distribution.ModuleName as ModuleName -import Distribution.Simple.Program -import qualified Distribution.Simple.Program.HcPkg as HcPkg -import Distribution.Simple.Compiler -import Distribution.Version -import Distribution.Verbosity -import Distribution.Text -import Distribution.Compat.Exception -import Distribution.System -import Language.Haskell.Extension - -import Control.Monad ( unless, when ) -import Data.Monoid as Mon -import Data.List -import qualified Data.Map as M ( empty ) -import Data.Maybe ( catMaybes ) -import System.Directory ( removeFile, renameFile, - getDirectoryContents, doesFileExist, - getTemporaryDirectory ) -import System.FilePath ( (), (<.>), takeExtension, - takeDirectory, replaceExtension ) -import System.IO (hClose, hPutStrLn) - --- ----------------------------------------------------------------------------- --- Configuring - -configure :: Verbosity -> Maybe FilePath -> Maybe FilePath - -> ProgramConfiguration -> IO (Compiler, Maybe Platform, ProgramConfiguration) -configure verbosity hcPath hcPkgPath conf = do - - (lhcProg, lhcVersion, conf') <- - requireProgramVersion verbosity lhcProgram - (orLaterVersion (Version [0,7] [])) - (userMaybeSpecifyPath "lhc" hcPath conf) - - (lhcPkgProg, lhcPkgVersion, conf'') <- - requireProgramVersion verbosity lhcPkgProgram - (orLaterVersion (Version [0,7] [])) - (userMaybeSpecifyPath "lhc-pkg" hcPkgPath conf') - - when (lhcVersion /= lhcPkgVersion) $ die $ - "Version mismatch between lhc and lhc-pkg: " - ++ programPath lhcProg ++ " is version " ++ display lhcVersion ++ " " - ++ programPath lhcPkgProg ++ " is version " ++ display lhcPkgVersion - - languages <- getLanguages verbosity lhcProg - extensions <- getExtensions verbosity lhcProg - - let comp = Compiler { - compilerId = CompilerId LHC lhcVersion, - compilerAbiTag = NoAbiTag, - compilerCompat = [], - compilerLanguages = languages, - compilerExtensions = extensions, - compilerProperties = M.empty - } - conf''' = configureToolchain lhcProg conf'' -- configure gcc and ld - compPlatform = Nothing - return (comp, compPlatform, conf''') - --- | Adjust the way we find and configure gcc and ld --- -configureToolchain :: ConfiguredProgram -> ProgramConfiguration - -> ProgramConfiguration -configureToolchain lhcProg = - addKnownProgram gccProgram { - programFindLocation = findProg gccProgram (baseDir "gcc.exe"), - programPostConf = configureGcc - } - . addKnownProgram ldProgram { - programFindLocation = findProg ldProgram (libDir "ld.exe"), - programPostConf = configureLd - } - where - compilerDir = takeDirectory (programPath lhcProg) - baseDir = takeDirectory compilerDir - libDir = baseDir "gcc-lib" - includeDir = baseDir "include" "mingw" - isWindows = case buildOS of Windows -> True; _ -> False - - -- on Windows finding and configuring ghc's gcc and ld is a bit special - findProg :: Program -> FilePath - -> Verbosity -> ProgramSearchPath - -> IO (Maybe (FilePath, [FilePath])) - findProg prog location | isWindows = \verbosity searchpath -> do - exists <- doesFileExist location - if exists then return (Just (location, [])) - else do warn verbosity ("Couldn't find " ++ programName prog ++ " where I expected it. Trying the search path.") - programFindLocation prog verbosity searchpath - | otherwise = programFindLocation prog - - configureGcc :: Verbosity -> ConfiguredProgram -> IO ConfiguredProgram - configureGcc - | isWindows = \_ gccProg -> case programLocation gccProg of - -- if it's found on system then it means we're using the result - -- of programFindLocation above rather than a user-supplied path - -- that means we should add this extra flag to tell ghc's gcc - -- where it lives and thus where gcc can find its various files: - FoundOnSystem {} -> return gccProg { - programDefaultArgs = ["-B" ++ libDir, - "-I" ++ includeDir] - } - UserSpecified {} -> return gccProg - | otherwise = \_ gccProg -> return gccProg - - -- we need to find out if ld supports the -x flag - configureLd :: Verbosity -> ConfiguredProgram -> IO ConfiguredProgram - configureLd verbosity ldProg = do - tempDir <- getTemporaryDirectory - ldx <- withTempFile tempDir ".c" $ \testcfile testchnd -> - withTempFile tempDir ".o" $ \testofile testohnd -> do - hPutStrLn testchnd "int foo() { return 0; }" - hClose testchnd; hClose testohnd - rawSystemProgram verbosity lhcProg ["-c", testcfile, - "-o", testofile] - withTempFile tempDir ".o" $ \testofile' testohnd' -> - do - hClose testohnd' - _ <- rawSystemProgramStdout verbosity ldProg - ["-x", "-r", testofile, "-o", testofile'] - return True - `catchIO` (\_ -> return False) - `catchExit` (\_ -> return False) - if ldx - then return ldProg { programDefaultArgs = ["-x"] } - else return ldProg - -getLanguages :: Verbosity -> ConfiguredProgram -> IO [(Language, Flag)] -getLanguages _ _ = return [(Haskell98, "")] ---FIXME: does lhc support -XHaskell98 flag? from what version? - -getExtensions :: Verbosity -> ConfiguredProgram -> IO [(Extension, Flag)] -getExtensions verbosity lhcProg = do - exts <- rawSystemStdout verbosity (programPath lhcProg) - ["--supported-languages"] - -- GHC has the annoying habit of inverting some of the extensions - -- so we have to try parsing ("No" ++ ghcExtensionName) first - let readExtension str = do - ext <- simpleParse ("No" ++ str) - case ext of - UnknownExtension _ -> simpleParse str - _ -> return ext - return $ [ (ext, "-X" ++ display ext) - | Just ext <- map readExtension (lines exts) ] - -getInstalledPackages :: Verbosity -> PackageDBStack -> ProgramConfiguration - -> IO InstalledPackageIndex -getInstalledPackages verbosity packagedbs conf = do - checkPackageDbStack packagedbs - pkgss <- getInstalledPackages' lhcPkg verbosity packagedbs conf - let indexes = [ PackageIndex.fromList (map (substTopDir topDir) pkgs) - | (_, pkgs) <- pkgss ] - return $! (Mon.mconcat indexes) - - where - -- On Windows, various fields have $topdir/foo rather than full - -- paths. We need to substitute the right value in so that when - -- we, for example, call gcc, we have proper paths to give it - Just ghcProg = lookupProgram lhcProgram conf - Just lhcPkg = lookupProgram lhcPkgProgram conf - compilerDir = takeDirectory (programPath ghcProg) - topDir = takeDirectory compilerDir - -checkPackageDbStack :: PackageDBStack -> IO () -checkPackageDbStack (GlobalPackageDB:rest) - | GlobalPackageDB `notElem` rest = return () -checkPackageDbStack _ = - die $ "GHC.getInstalledPackages: the global package db must be " - ++ "specified first and cannot be specified multiple times" - --- | Get the packages from specific PackageDBs, not cumulative. --- -getInstalledPackages' :: ConfiguredProgram -> Verbosity - -> [PackageDB] -> ProgramConfiguration - -> IO [(PackageDB, [InstalledPackageInfo])] -getInstalledPackages' lhcPkg verbosity packagedbs conf - = - sequence - [ do str <- rawSystemProgramStdoutConf verbosity lhcPkgProgram conf - ["dump", packageDbGhcPkgFlag packagedb] - `catchExit` \_ -> die $ "ghc-pkg dump failed" - case parsePackages str of - Left ok -> return (packagedb, ok) - _ -> die "failed to parse output of 'ghc-pkg dump'" - | packagedb <- packagedbs ] - - where - parsePackages str = - let parsed = map parseInstalledPackageInfo (splitPkgs str) - in case [ msg | ParseFailed msg <- parsed ] of - [] -> Left [ pkg | ParseOk _ pkg <- parsed ] - msgs -> Right msgs - - splitPkgs :: String -> [String] - splitPkgs = map unlines . splitWith ("---" ==) . lines - where - splitWith :: (a -> Bool) -> [a] -> [[a]] - splitWith p xs = ys : case zs of - [] -> [] - _:ws -> splitWith p ws - where (ys,zs) = break p xs - - packageDbGhcPkgFlag GlobalPackageDB = "--global" - packageDbGhcPkgFlag UserPackageDB = "--user" - packageDbGhcPkgFlag (SpecificPackageDB path) = "--" ++ packageDbFlag ++ "=" ++ path - - packageDbFlag - | programVersion lhcPkg < Just (Version [7,5] []) - = "package-conf" - | otherwise - = "package-db" - - -substTopDir :: FilePath -> InstalledPackageInfo -> InstalledPackageInfo -substTopDir topDir ipo - = ipo { - InstalledPackageInfo.importDirs - = map f (InstalledPackageInfo.importDirs ipo), - InstalledPackageInfo.libraryDirs - = map f (InstalledPackageInfo.libraryDirs ipo), - InstalledPackageInfo.includeDirs - = map f (InstalledPackageInfo.includeDirs ipo), - InstalledPackageInfo.frameworkDirs - = map f (InstalledPackageInfo.frameworkDirs ipo), - InstalledPackageInfo.haddockInterfaces - = map f (InstalledPackageInfo.haddockInterfaces ipo), - InstalledPackageInfo.haddockHTMLs - = map f (InstalledPackageInfo.haddockHTMLs ipo) - } - where f ('$':'t':'o':'p':'d':'i':'r':rest) = topDir ++ rest - f x = x - --- ----------------------------------------------------------------------------- --- Building - --- | Build a library with LHC. --- -buildLib :: Verbosity -> PackageDescription -> LocalBuildInfo - -> Library -> ComponentLocalBuildInfo -> IO () -buildLib verbosity pkg_descr lbi lib clbi = do - let libName = componentUnitId clbi - pref = buildDir lbi - pkgid = packageId pkg_descr - runGhcProg = rawSystemProgramConf verbosity lhcProgram (withPrograms lbi) - ifVanillaLib forceVanilla = when (forceVanilla || withVanillaLib lbi) - ifProfLib = when (withProfLib lbi) - ifSharedLib = when (withSharedLib lbi) - ifGHCiLib = when (withGHCiLib lbi && withVanillaLib lbi) - - libBi <- hackThreadedFlag verbosity - (compiler lbi) (withProfLib lbi) (libBuildInfo lib) - - let libTargetDir = pref - forceVanillaLib = EnableExtension TemplateHaskell `elem` allExtensions libBi - -- TH always needs vanilla libs, even when building for profiling - - createDirectoryIfMissingVerbose verbosity True libTargetDir - -- TODO: do we need to put hs-boot files into place for mutually recursive modules? - let ghcArgs = - ["-package-name", display pkgid ] - ++ constructGHCCmdLine lbi libBi clbi libTargetDir verbosity - ++ map display (libModules lib) - lhcWrap x = ["--build-library", "--ghc-opts=" ++ unwords x] - ghcArgsProf = ghcArgs - ++ ["-prof", - "-hisuf", "p_hi", - "-osuf", "p_o" - ] - ++ hcProfOptions GHC libBi - ghcArgsShared = ghcArgs - ++ ["-dynamic", - "-hisuf", "dyn_hi", - "-osuf", "dyn_o", "-fPIC" - ] - ++ hcSharedOptions GHC libBi - unless (null (libModules lib)) $ - do ifVanillaLib forceVanillaLib (runGhcProg $ lhcWrap ghcArgs) - ifProfLib (runGhcProg $ lhcWrap ghcArgsProf) - ifSharedLib (runGhcProg $ lhcWrap ghcArgsShared) - - -- build any C sources - unless (null (cSources libBi)) $ do - info verbosity "Building C Sources..." - sequence_ [do let (odir,args) = constructCcCmdLine lbi libBi clbi pref - filename verbosity - createDirectoryIfMissingVerbose verbosity True odir - runGhcProg args - ifSharedLib (runGhcProg (args ++ ["-fPIC", "-osuf dyn_o"])) - | filename <- cSources libBi] - - -- link: - info verbosity "Linking..." - let cObjs = map (`replaceExtension` objExtension) (cSources libBi) - cSharedObjs = map (`replaceExtension` ("dyn_" ++ objExtension)) (cSources libBi) - cid = compilerId (compiler lbi) - vanillaLibFilePath = libTargetDir mkLibName libName - profileLibFilePath = libTargetDir mkProfLibName libName - sharedLibFilePath = libTargetDir mkSharedLibName cid libName - ghciLibFilePath = libTargetDir mkGHCiLibName libName - - stubObjs <- fmap catMaybes $ sequence - [ findFileWithExtension [objExtension] [libTargetDir] - (ModuleName.toFilePath x ++"_stub") - | x <- libModules lib ] - stubProfObjs <- fmap catMaybes $ sequence - [ findFileWithExtension ["p_" ++ objExtension] [libTargetDir] - (ModuleName.toFilePath x ++"_stub") - | x <- libModules lib ] - stubSharedObjs <- fmap catMaybes $ sequence - [ findFileWithExtension ["dyn_" ++ objExtension] [libTargetDir] - (ModuleName.toFilePath x ++"_stub") - | x <- libModules lib ] - - hObjs <- getHaskellObjects lib lbi - pref objExtension True - hProfObjs <- - if (withProfLib lbi) - then getHaskellObjects lib lbi - pref ("p_" ++ objExtension) True - else return [] - hSharedObjs <- - if (withSharedLib lbi) - then getHaskellObjects lib lbi - pref ("dyn_" ++ objExtension) False - else return [] - - unless (null hObjs && null cObjs && null stubObjs) $ do - -- first remove library files if they exists - sequence_ - [ removeFile libFilePath `catchIO` \_ -> return () - | libFilePath <- [vanillaLibFilePath, profileLibFilePath - ,sharedLibFilePath, ghciLibFilePath] ] - - let arVerbosity | verbosity >= deafening = "v" - | verbosity >= normal = "" - | otherwise = "c" - arArgs = ["q"++ arVerbosity] - ++ [vanillaLibFilePath] - arObjArgs = - hObjs - ++ map (pref ) cObjs - ++ stubObjs - arProfArgs = ["q"++ arVerbosity] - ++ [profileLibFilePath] - arProfObjArgs = - hProfObjs - ++ map (pref ) cObjs - ++ stubProfObjs - ldArgs = ["-r"] - ++ ["-o", ghciLibFilePath <.> "tmp"] - ldObjArgs = - hObjs - ++ map (pref ) cObjs - ++ stubObjs - ghcSharedObjArgs = - hSharedObjs - ++ map (pref ) cSharedObjs - ++ stubSharedObjs - -- After the relocation lib is created we invoke ghc -shared - -- with the dependencies spelled out as -package arguments - -- and ghc invokes the linker with the proper library paths - ghcSharedLinkArgs = - [ "-no-auto-link-packages", - "-shared", - "-dynamic", - "-o", sharedLibFilePath ] - ++ ghcSharedObjArgs - ++ ["-package-name", display pkgid ] - ++ ghcPackageFlags lbi clbi - ++ ["-l"++extraLib | extraLib <- extraLibs libBi] - ++ ["-L"++extraLibDir | extraLibDir <- extraLibDirs libBi] - - runLd ldLibName args = do - exists <- doesFileExist ldLibName - -- This method is called iteratively by xargs. The - -- output goes to .tmp, and any existing file - -- named is included when linking. The - -- output is renamed to . - rawSystemProgramConf verbosity ldProgram (withPrograms lbi) - (args ++ if exists then [ldLibName] else []) - renameFile (ldLibName <.> "tmp") ldLibName - - runAr = rawSystemProgramConf verbosity arProgram (withPrograms lbi) - - --TODO: discover this at configure time or runtime on Unix - -- The value is 32k on Windows and POSIX specifies a minimum of 4k - -- but all sensible Unixes use more than 4k. - -- we could use getSysVar ArgumentLimit but that's in the Unix lib - maxCommandLineSize = 30 * 1024 - - ifVanillaLib False $ xargs maxCommandLineSize - runAr arArgs arObjArgs - - ifProfLib $ xargs maxCommandLineSize - runAr arProfArgs arProfObjArgs - - ifGHCiLib $ xargs maxCommandLineSize - (runLd ghciLibFilePath) ldArgs ldObjArgs - - ifSharedLib $ runGhcProg ghcSharedLinkArgs - - --- | Build an executable with LHC. --- -buildExe :: Verbosity -> PackageDescription -> LocalBuildInfo - -> Executable -> ComponentLocalBuildInfo -> IO () -buildExe verbosity _pkg_descr lbi - exe@Executable { exeName = exeName', modulePath = modPath } clbi = do - let pref = buildDir lbi - runGhcProg = rawSystemProgramConf verbosity lhcProgram (withPrograms lbi) - - exeBi <- hackThreadedFlag verbosity - (compiler lbi) (withProfExe lbi) (buildInfo exe) - - -- exeNameReal, the name that GHC really uses (with .exe on Windows) - let exeNameReal = exeName' <.> - (if null $ takeExtension exeName' then exeExtension else "") - - let targetDir = pref exeName' - let exeDir = targetDir (exeName' ++ "-tmp") - createDirectoryIfMissingVerbose verbosity True targetDir - createDirectoryIfMissingVerbose verbosity True exeDir - -- TODO: do we need to put hs-boot files into place for mutually recursive modules? - -- FIX: what about exeName.hi-boot? - - -- build executables - unless (null (cSources exeBi)) $ do - info verbosity "Building C Sources." - sequence_ [do let (odir,args) = constructCcCmdLine lbi exeBi clbi - exeDir filename verbosity - createDirectoryIfMissingVerbose verbosity True odir - runGhcProg args - | filename <- cSources exeBi] - - srcMainFile <- findFile (exeDir : hsSourceDirs exeBi) modPath - - let cObjs = map (`replaceExtension` objExtension) (cSources exeBi) - let lhcWrap x = ("--ghc-opts\"":x) ++ ["\""] - let binArgs linkExe profExe = - (if linkExe - then ["-o", targetDir exeNameReal] - else ["-c"]) - ++ constructGHCCmdLine lbi exeBi clbi exeDir verbosity - ++ [exeDir x | x <- cObjs] - ++ [srcMainFile] - ++ ["-optl" ++ opt | opt <- PD.ldOptions exeBi] - ++ ["-l"++lib | lib <- extraLibs exeBi] - ++ ["-L"++libDir | libDir <- extraLibDirs exeBi] - ++ concat [["-framework", f] | f <- PD.frameworks exeBi] - ++ if profExe - then ["-prof", - "-hisuf", "p_hi", - "-osuf", "p_o" - ] ++ hcProfOptions GHC exeBi - else [] - - -- For building exe's for profiling that use TH we actually - -- have to build twice, once without profiling and the again - -- with profiling. This is because the code that TH needs to - -- run at compile time needs to be the vanilla ABI so it can - -- be loaded up and run by the compiler. - when (withProfExe lbi && EnableExtension TemplateHaskell `elem` allExtensions exeBi) - (runGhcProg $ lhcWrap (binArgs False False)) - - runGhcProg (binArgs True (withProfExe lbi)) - --- | Filter the "-threaded" flag when profiling as it does not --- work with ghc-6.8 and older. -hackThreadedFlag :: Verbosity -> Compiler -> Bool -> BuildInfo -> IO BuildInfo -hackThreadedFlag verbosity comp prof bi - | not mustFilterThreaded = return bi - | otherwise = do - warn verbosity $ "The ghc flag '-threaded' is not compatible with " - ++ "profiling in ghc-6.8 and older. It will be disabled." - return bi { options = filterHcOptions (/= "-threaded") (options bi) } - where - mustFilterThreaded = prof && compilerVersion comp < Version [6, 10] [] - && "-threaded" `elem` hcOptions GHC bi - filterHcOptions p hcoptss = - [ (hc, if hc == GHC then filter p opts else opts) - | (hc, opts) <- hcoptss ] - --- when using -split-objs, we need to search for object files in the --- Module_split directory for each module. -getHaskellObjects :: Library -> LocalBuildInfo - -> FilePath -> String -> Bool -> IO [FilePath] -getHaskellObjects lib lbi pref wanted_obj_ext allow_split_objs - | splitObjs lbi && allow_split_objs = do - let dirs = [ pref (ModuleName.toFilePath x ++ "_split") - | x <- libModules lib ] - objss <- mapM getDirectoryContents dirs - let objs = [ dir obj - | (objs',dir) <- zip objss dirs, obj <- objs', - let obj_ext = takeExtension obj, - '.':wanted_obj_ext == obj_ext ] - return objs - | otherwise = - return [ pref ModuleName.toFilePath x <.> wanted_obj_ext - | x <- libModules lib ] - - -constructGHCCmdLine - :: LocalBuildInfo - -> BuildInfo - -> ComponentLocalBuildInfo - -> FilePath - -> Verbosity - -> [String] -constructGHCCmdLine lbi bi clbi odir verbosity = - ["--make"] - ++ ghcVerbosityOptions verbosity - -- Unsupported extensions have already been checked by configure - ++ ghcOptions lbi bi clbi odir - -ghcVerbosityOptions :: Verbosity -> [String] -ghcVerbosityOptions verbosity - | verbosity >= deafening = ["-v"] - | verbosity >= normal = [] - | otherwise = ["-w", "-v0"] - -ghcOptions :: LocalBuildInfo -> BuildInfo -> ComponentLocalBuildInfo - -> FilePath -> [String] -ghcOptions lbi bi clbi odir - = ["-hide-all-packages"] - ++ ghcPackageDbOptions lbi - ++ (if splitObjs lbi then ["-split-objs"] else []) - ++ ["-i"] - ++ ["-i" ++ odir] - ++ ["-i" ++ l | l <- nub (hsSourceDirs bi)] - ++ ["-i" ++ autogenModulesDir lbi] - ++ ["-I" ++ autogenModulesDir lbi] - ++ ["-I" ++ odir] - ++ ["-I" ++ dir | dir <- PD.includeDirs bi] - ++ ["-optP" ++ opt | opt <- cppOptions bi] - ++ [ "-optP-include", "-optP"++ (autogenModulesDir lbi cppHeaderName) ] - ++ [ "-#include \"" ++ inc ++ "\"" | inc <- PD.includes bi ] - ++ [ "-odir", odir, "-hidir", odir ] - ++ (if compilerVersion c >= Version [6,8] [] - then ["-stubdir", odir] else []) - ++ ghcPackageFlags lbi clbi - ++ (case withOptimization lbi of - NoOptimisation -> [] - NormalOptimisation -> ["-O"] - MaximumOptimisation -> ["-O2"]) - ++ hcOptions GHC bi - ++ languageToFlags c (defaultLanguage bi) - ++ extensionsToFlags c (usedExtensions bi) - where c = compiler lbi - -ghcPackageFlags :: LocalBuildInfo -> ComponentLocalBuildInfo -> [String] -ghcPackageFlags lbi clbi - | ghcVer >= Version [6,11] [] - = concat [ ["-package-id", display ipkgid] - | (ipkgid, _) <- componentPackageDeps clbi ] - - | otherwise = concat [ ["-package", display pkgid] - | (_, pkgid) <- componentPackageDeps clbi ] - where - ghcVer = compilerVersion (compiler lbi) - -ghcPackageDbOptions :: LocalBuildInfo -> [String] -ghcPackageDbOptions lbi = case dbstack of - (GlobalPackageDB:UserPackageDB:dbs) -> concatMap specific dbs - (GlobalPackageDB:dbs) -> ("-no-user-" ++ packageDbFlag) - : concatMap specific dbs - _ -> ierror - where - specific (SpecificPackageDB db) = [ '-':packageDbFlag, db ] - specific _ = ierror - ierror = error ("internal error: unexpected package db stack: " ++ show dbstack) - - dbstack = withPackageDB lbi - packageDbFlag - | compilerVersion (compiler lbi) < Version [7,5] [] - = "package-conf" - | otherwise - = "package-db" - -constructCcCmdLine :: LocalBuildInfo -> BuildInfo -> ComponentLocalBuildInfo - -> FilePath -> FilePath -> Verbosity -> (FilePath,[String]) -constructCcCmdLine lbi bi clbi pref filename verbosity - = let odir | compilerVersion (compiler lbi) >= Version [6,4,1] [] = pref - | otherwise = pref takeDirectory filename - -- ghc 6.4.1 fixed a bug in -odir handling - -- for C compilations. - in - (odir, - ghcCcOptions lbi bi clbi odir - ++ (if verbosity >= deafening then ["-v"] else []) - ++ ["-c",filename]) - - -ghcCcOptions :: LocalBuildInfo -> BuildInfo -> ComponentLocalBuildInfo - -> FilePath -> [String] -ghcCcOptions lbi bi clbi odir - = ["-I" ++ dir | dir <- PD.includeDirs bi] - ++ ghcPackageDbOptions lbi - ++ ghcPackageFlags lbi clbi - ++ ["-optc" ++ opt | opt <- PD.ccOptions bi] - ++ (case withOptimization lbi of - NoOptimisation -> [] - _ -> ["-optc-O2"]) - ++ ["-odir", odir] - -mkGHCiLibName :: UnitId -> String -mkGHCiLibName lib = getHSLibraryName lib <.> "o" - --- ----------------------------------------------------------------------------- --- Installing - --- |Install executables for GHC. -installExe :: Verbosity - -> LocalBuildInfo - -> InstallDirs FilePath -- ^Where to copy the files to - -> FilePath -- ^Build location - -> (FilePath, FilePath) -- ^Executable (prefix,suffix) - -> PackageDescription - -> Executable - -> IO () -installExe verbosity lbi installDirs buildPref (progprefix, progsuffix) _pkg exe = do - let binDir = bindir installDirs - createDirectoryIfMissingVerbose verbosity True binDir - let exeFileName = exeName exe <.> exeExtension - fixedExeBaseName = progprefix ++ exeName exe ++ progsuffix - installBinary dest = do - installExecutableFile verbosity - (buildPref exeName exe exeFileName) - (dest <.> exeExtension) - stripExe verbosity lbi exeFileName (dest <.> exeExtension) - installBinary (binDir fixedExeBaseName) - -stripExe :: Verbosity -> LocalBuildInfo -> FilePath -> FilePath -> IO () -stripExe verbosity lbi name path = when (stripExes lbi) $ - case lookupProgram stripProgram (withPrograms lbi) of - Just strip -> rawSystemProgram verbosity strip args - Nothing -> unless (buildOS == Windows) $ - -- Don't bother warning on windows, we don't expect them to - -- have the strip program anyway. - warn verbosity $ "Unable to strip executable '" ++ name - ++ "' (missing the 'strip' program)" - where - args = path : case buildOS of - OSX -> ["-x"] -- By default, stripping the ghc binary on at least - -- some OS X installations causes: - -- HSbase-3.0.o: unknown symbol `_environ'" - -- The -x flag fixes that. - _ -> [] - --- |Install for ghc, .hi, .a and, if --with-ghci given, .o -installLib :: Verbosity - -> LocalBuildInfo - -> FilePath -- ^install location - -> FilePath -- ^install location for dynamic libraries - -> FilePath -- ^Build location - -> PackageDescription - -> Library - -> ComponentLocalBuildInfo - -> IO () -installLib verbosity lbi targetDir dynlibTargetDir builtDir _pkg lib clbi = do - -- copy .hi files over: - let copy src dst n = do - createDirectoryIfMissingVerbose verbosity True dst - installOrdinaryFile verbosity (src n) (dst n) - copyModuleFiles ext = - findModuleFiles [builtDir] [ext] (libModules lib) - >>= installOrdinaryFiles verbosity targetDir - ifVanilla $ copyModuleFiles "hi" - ifProf $ copyModuleFiles "p_hi" - hcrFiles <- findModuleFiles (builtDir : hsSourceDirs (libBuildInfo lib)) ["hcr"] (libModules lib) - flip mapM_ hcrFiles $ \(srcBase, srcFile) -> runLhc ["--install-library", srcBase srcFile] - - -- copy the built library files over: - ifVanilla $ copy builtDir targetDir vanillaLibName - ifProf $ copy builtDir targetDir profileLibName - ifGHCi $ copy builtDir targetDir ghciLibName - ifShared $ copy builtDir dynlibTargetDir sharedLibName - - where - cid = compilerId (compiler lbi) - libName = componentUnitId clbi - vanillaLibName = mkLibName libName - profileLibName = mkProfLibName libName - ghciLibName = mkGHCiLibName libName - sharedLibName = mkSharedLibName cid libName - - hasLib = not $ null (libModules lib) - && null (cSources (libBuildInfo lib)) - ifVanilla = when (hasLib && withVanillaLib lbi) - ifProf = when (hasLib && withProfLib lbi) - ifGHCi = when (hasLib && withGHCiLib lbi) - ifShared = when (hasLib && withSharedLib lbi) - - runLhc = rawSystemProgramConf verbosity lhcProgram (withPrograms lbi) - --- ----------------------------------------------------------------------------- --- Registering - -registerPackage - :: Verbosity - -> ProgramConfiguration - -> PackageDBStack - -> InstalledPackageInfo - -> IO () -registerPackage verbosity progdb packageDbs installedPkgInfo = - HcPkg.reregister (hcPkgInfo progdb) verbosity packageDbs - (Right installedPkgInfo) - -hcPkgInfo :: ProgramConfiguration -> HcPkg.HcPkgInfo -hcPkgInfo conf = HcPkg.HcPkgInfo { HcPkg.hcPkgProgram = lhcPkgProg - , HcPkg.noPkgDbStack = False - , HcPkg.noVerboseFlag = False - , HcPkg.flagPackageConf = False - , HcPkg.supportsDirDbs = True - , HcPkg.requiresDirDbs = True - , HcPkg.nativeMultiInstance = False -- ? - , HcPkg.recacheMultiInstance = False -- ? - } - where - Just lhcPkgProg = lookupProgram lhcPkgProgram conf diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/LocalBuildInfo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/LocalBuildInfo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/LocalBuildInfo.hs 2016-11-07 10:02:24.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/LocalBuildInfo.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,515 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.LocalBuildInfo --- Copyright : Isaac Jones 2003-2004 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Once a package has been configured we have resolved conditionals and --- dependencies, configured the compiler and other needed external programs. --- The 'LocalBuildInfo' is used to hold all this information. It holds the --- install dirs, the compiler, the exact package dependencies, the configured --- programs, the package database to use and a bunch of miscellaneous configure --- flags. It gets saved and reloaded from a file (@dist\/setup-config@). It gets --- passed in to very many subsequent build actions. - -module Distribution.Simple.LocalBuildInfo ( - LocalBuildInfo(..), - externalPackageDeps, - localComponentId, - localUnitId, - localCompatPackageKey, - - -- * Buildable package components - Component(..), - ComponentName(..), - showComponentName, - ComponentLocalBuildInfo(..), - foldComponent, - componentName, - componentBuildInfo, - componentEnabled, - componentDisabledReason, - ComponentDisabledReason(..), - pkgComponents, - pkgEnabledComponents, - lookupComponent, - getComponent, - getComponentLocalBuildInfo, - allComponentsInBuildOrder, - componentsInBuildOrder, - checkComponentsCyclic, - depLibraryPaths, - - withAllComponentsInBuildOrder, - withComponentsInBuildOrder, - withComponentsLBI, - withLibLBI, - withExeLBI, - withTestLBI, - - -- * Installation directories - module Distribution.Simple.InstallDirs, - absoluteInstallDirs, prefixRelativeInstallDirs, - substPathTemplate - ) where - - -import Distribution.Simple.InstallDirs hiding (absoluteInstallDirs, - prefixRelativeInstallDirs, - substPathTemplate, ) -import qualified Distribution.Simple.InstallDirs as InstallDirs -import Distribution.Simple.Program -import Distribution.PackageDescription -import qualified Distribution.InstalledPackageInfo as Installed -import Distribution.Package -import Distribution.Simple.Compiler -import Distribution.Simple.PackageIndex -import Distribution.Simple.Setup -import Distribution.Simple.Utils -import Distribution.Text -import Distribution.System - -import Data.Array ((!)) -import Distribution.Compat.Binary (Binary) -import Data.Graph -import Data.List (nub, find, stripPrefix) -import Data.Maybe -import Data.Tree (flatten) -import GHC.Generics (Generic) -import Data.Map (Map) - -import System.Directory (doesDirectoryExist, canonicalizePath) - --- | Data cached after configuration step. See also --- 'Distribution.Simple.Setup.ConfigFlags'. -data LocalBuildInfo = LocalBuildInfo { - configFlags :: ConfigFlags, - -- ^ Options passed to the configuration step. - -- Needed to re-run configuration when .cabal is out of date - flagAssignment :: FlagAssignment, - -- ^ The final set of flags which were picked for this package - extraConfigArgs :: [String], - -- ^ Extra args on the command line for the configuration step. - -- Needed to re-run configuration when .cabal is out of date - installDirTemplates :: InstallDirTemplates, - -- ^ The installation directories for the various different - -- kinds of files - --TODO: inplaceDirTemplates :: InstallDirs FilePath - compiler :: Compiler, - -- ^ The compiler we're building with - hostPlatform :: Platform, - -- ^ The platform we're building for - buildDir :: FilePath, - -- ^ Where to build the package. - componentsConfigs :: [(ComponentName, ComponentLocalBuildInfo, [ComponentName])], - -- ^ All the components to build, ordered by topological sort, and with their dependencies - -- over the intrapackage dependency graph - installedPkgs :: InstalledPackageIndex, - -- ^ All the info about the installed packages that the - -- current package depends on (directly or indirectly). - pkgDescrFile :: Maybe FilePath, - -- ^ the filename containing the .cabal file, if available - localPkgDescr :: PackageDescription, - -- ^ The resolved package description, that does not contain - -- any conditionals. - withPrograms :: ProgramConfiguration, -- ^Location and args for all programs - withPackageDB :: PackageDBStack, -- ^What package database to use, global\/user - withVanillaLib:: Bool, -- ^Whether to build normal libs. - withProfLib :: Bool, -- ^Whether to build profiling versions of libs. - withSharedLib :: Bool, -- ^Whether to build shared versions of libs. - withDynExe :: Bool, -- ^Whether to link executables dynamically - withProfExe :: Bool, -- ^Whether to build executables for profiling. - withProfLibDetail :: ProfDetailLevel, -- ^Level of automatic profile detail. - withProfExeDetail :: ProfDetailLevel, -- ^Level of automatic profile detail. - withOptimization :: OptimisationLevel, -- ^Whether to build with optimization (if available). - withDebugInfo :: DebugInfoLevel, -- ^Whether to emit debug info (if available). - withGHCiLib :: Bool, -- ^Whether to build libs suitable for use with GHCi. - splitObjs :: Bool, -- ^Use -split-objs with GHC, if available - stripExes :: Bool, -- ^Whether to strip executables during install - stripLibs :: Bool, -- ^Whether to strip libraries during install - progPrefix :: PathTemplate, -- ^Prefix to be prepended to installed executables - progSuffix :: PathTemplate, -- ^Suffix to be appended to installed executables - relocatable :: Bool -- ^Whether to build a relocatable package - } deriving (Generic, Read, Show) - -instance Binary LocalBuildInfo - --- | Extract the 'ComponentId' from the library component of a --- 'LocalBuildInfo' if it exists, or make a fake component ID based --- on the package ID. -localComponentId :: LocalBuildInfo -> ComponentId -localComponentId lbi - = case localUnitId lbi of - SimpleUnitId cid -> cid - --- | Extract the 'UnitId' from the library component of a --- 'LocalBuildInfo' if it exists, or make a fake unit ID based on --- the package ID. -localUnitId :: LocalBuildInfo -> UnitId -localUnitId lbi = - foldr go (mkLegacyUnitId (package (localPkgDescr lbi))) (componentsConfigs lbi) - where go (_, clbi, _) old_uid = case clbi of - LibComponentLocalBuildInfo { componentUnitId = uid } -> uid - _ -> old_uid - --- | Extract the compatibility 'ComponentId' from the library component of a --- 'LocalBuildInfo' if it exists, or make a fake compatibility package --- key based on the package ID. -localCompatPackageKey :: LocalBuildInfo -> String -localCompatPackageKey lbi = - foldr go (display (package (localPkgDescr lbi))) (componentsConfigs lbi) - where go (_, clbi, _) old_pk = case clbi of - LibComponentLocalBuildInfo { componentCompatPackageKey = pk } -> pk - _ -> old_pk - --- | External package dependencies for the package as a whole. This is the --- union of the individual 'componentPackageDeps', less any internal deps. -externalPackageDeps :: LocalBuildInfo -> [(UnitId, PackageId)] -externalPackageDeps lbi = - -- TODO: what about non-buildable components? - nub [ (ipkgid, pkgid) - | (_,clbi,_) <- componentsConfigs lbi - , (ipkgid, pkgid) <- componentPackageDeps clbi - , not (internal pkgid) ] - where - -- True if this dependency is an internal one (depends on the library - -- defined in the same package). - internal pkgid = pkgid == packageId (localPkgDescr lbi) - --- ----------------------------------------------------------------------------- --- Buildable components - -data Component = CLib Library - | CExe Executable - | CTest TestSuite - | CBench Benchmark - deriving (Show, Eq, Read) - -data ComponentName = CLibName -- currently only a single lib - | CExeName String - | CTestName String - | CBenchName String - deriving (Eq, Generic, Ord, Read, Show) - -instance Binary ComponentName - -showComponentName :: ComponentName -> String -showComponentName CLibName = "library" -showComponentName (CExeName name) = "executable '" ++ name ++ "'" -showComponentName (CTestName name) = "test suite '" ++ name ++ "'" -showComponentName (CBenchName name) = "benchmark '" ++ name ++ "'" - -data ComponentLocalBuildInfo - = LibComponentLocalBuildInfo { - -- | Resolved internal and external package dependencies for this component. - -- The 'BuildInfo' specifies a set of build dependencies that must be - -- satisfied in terms of version ranges. This field fixes those dependencies - -- to the specific versions available on this machine for this compiler. - componentPackageDeps :: [(UnitId, PackageId)], - componentUnitId :: UnitId, - componentCompatPackageKey :: String, - componentExposedModules :: [Installed.ExposedModule], - componentPackageRenaming :: Map PackageName ModuleRenaming - } - | ExeComponentLocalBuildInfo { - componentPackageDeps :: [(UnitId, PackageId)], - componentPackageRenaming :: Map PackageName ModuleRenaming - } - | TestComponentLocalBuildInfo { - componentPackageDeps :: [(UnitId, PackageId)], - componentPackageRenaming :: Map PackageName ModuleRenaming - } - | BenchComponentLocalBuildInfo { - componentPackageDeps :: [(UnitId, PackageId)], - componentPackageRenaming :: Map PackageName ModuleRenaming - } - deriving (Generic, Read, Show) - -instance Binary ComponentLocalBuildInfo - -foldComponent :: (Library -> a) - -> (Executable -> a) - -> (TestSuite -> a) - -> (Benchmark -> a) - -> Component - -> a -foldComponent f _ _ _ (CLib lib) = f lib -foldComponent _ f _ _ (CExe exe) = f exe -foldComponent _ _ f _ (CTest tst) = f tst -foldComponent _ _ _ f (CBench bch) = f bch - -componentBuildInfo :: Component -> BuildInfo -componentBuildInfo = - foldComponent libBuildInfo buildInfo testBuildInfo benchmarkBuildInfo - -componentName :: Component -> ComponentName -componentName = - foldComponent (const CLibName) - (CExeName . exeName) - (CTestName . testName) - (CBenchName . benchmarkName) - --- | All the components in the package (libs, exes, or test suites). --- -pkgComponents :: PackageDescription -> [Component] -pkgComponents pkg = - [ CLib lib | Just lib <- [library pkg] ] - ++ [ CExe exe | exe <- executables pkg ] - ++ [ CTest tst | tst <- testSuites pkg ] - ++ [ CBench bm | bm <- benchmarks pkg ] - --- | All the components in the package that are buildable and enabled. --- Thus this excludes non-buildable components and test suites or benchmarks --- that have been disabled. --- -pkgEnabledComponents :: PackageDescription -> [Component] -pkgEnabledComponents = filter componentEnabled . pkgComponents - -componentEnabled :: Component -> Bool -componentEnabled = isNothing . componentDisabledReason - -data ComponentDisabledReason = DisabledComponent - | DisabledAllTests - | DisabledAllBenchmarks - -componentDisabledReason :: Component -> Maybe ComponentDisabledReason -componentDisabledReason (CLib lib) - | not (buildable (libBuildInfo lib)) = Just DisabledComponent -componentDisabledReason (CExe exe) - | not (buildable (buildInfo exe)) = Just DisabledComponent -componentDisabledReason (CTest tst) - | not (buildable (testBuildInfo tst)) = Just DisabledComponent - | not (testEnabled tst) = Just DisabledAllTests -componentDisabledReason (CBench bm) - | not (buildable (benchmarkBuildInfo bm)) = Just DisabledComponent - | not (benchmarkEnabled bm) = Just DisabledAllBenchmarks -componentDisabledReason _ = Nothing - -lookupComponent :: PackageDescription -> ComponentName -> Maybe Component -lookupComponent pkg CLibName = - fmap CLib $ library pkg -lookupComponent pkg (CExeName name) = - fmap CExe $ find ((name ==) . exeName) (executables pkg) -lookupComponent pkg (CTestName name) = - fmap CTest $ find ((name ==) . testName) (testSuites pkg) -lookupComponent pkg (CBenchName name) = - fmap CBench $ find ((name ==) . benchmarkName) (benchmarks pkg) - -getComponent :: PackageDescription -> ComponentName -> Component -getComponent pkg cname = - case lookupComponent pkg cname of - Just cpnt -> cpnt - Nothing -> missingComponent - where - missingComponent = - error $ "internal error: the package description contains no " - ++ "component corresponding to " ++ show cname - - -getComponentLocalBuildInfo :: LocalBuildInfo -> ComponentName - -> ComponentLocalBuildInfo -getComponentLocalBuildInfo lbi cname = - case [ clbi - | (cname', clbi, _) <- componentsConfigs lbi - , cname == cname' ] of - [clbi] -> clbi - _ -> missingComponent - where - missingComponent = - error $ "internal error: there is no configuration data " - ++ "for component " ++ show cname - - --- |If the package description has a library section, call the given --- function with the library build info as argument. Extended version of --- 'withLib' that also gives corresponding build info. -withLibLBI :: PackageDescription -> LocalBuildInfo - -> (Library -> ComponentLocalBuildInfo -> IO ()) -> IO () -withLibLBI pkg_descr lbi f = - withLib pkg_descr $ \lib -> - f lib (getComponentLocalBuildInfo lbi CLibName) - --- | Perform the action on each buildable 'Executable' in the package --- description. Extended version of 'withExe' that also gives corresponding --- build info. -withExeLBI :: PackageDescription -> LocalBuildInfo - -> (Executable -> ComponentLocalBuildInfo -> IO ()) -> IO () -withExeLBI pkg_descr lbi f = - withExe pkg_descr $ \exe -> - f exe (getComponentLocalBuildInfo lbi (CExeName (exeName exe))) - -withTestLBI :: PackageDescription -> LocalBuildInfo - -> (TestSuite -> ComponentLocalBuildInfo -> IO ()) -> IO () -withTestLBI pkg_descr lbi f = - withTest pkg_descr $ \test -> - f test (getComponentLocalBuildInfo lbi (CTestName (testName test))) - -{-# DEPRECATED withComponentsLBI "Use withAllComponentsInBuildOrder" #-} -withComponentsLBI :: PackageDescription -> LocalBuildInfo - -> (Component -> ComponentLocalBuildInfo -> IO ()) - -> IO () -withComponentsLBI = withAllComponentsInBuildOrder - --- | Perform the action on each buildable 'Library' or 'Executable' (Component) --- in the PackageDescription, subject to the build order specified by the --- 'compBuildOrder' field of the given 'LocalBuildInfo' -withAllComponentsInBuildOrder :: PackageDescription -> LocalBuildInfo - -> (Component -> ComponentLocalBuildInfo -> IO ()) - -> IO () -withAllComponentsInBuildOrder pkg lbi f = - sequence_ - [ f (getComponent pkg cname) clbi - | (cname, clbi) <- allComponentsInBuildOrder lbi ] - -withComponentsInBuildOrder :: PackageDescription -> LocalBuildInfo - -> [ComponentName] - -> (Component -> ComponentLocalBuildInfo -> IO ()) - -> IO () -withComponentsInBuildOrder pkg lbi cnames f = - sequence_ - [ f (getComponent pkg cname') clbi - | (cname', clbi) <- componentsInBuildOrder lbi cnames ] - -allComponentsInBuildOrder :: LocalBuildInfo - -> [(ComponentName, ComponentLocalBuildInfo)] -allComponentsInBuildOrder lbi = - componentsInBuildOrder lbi - [ cname | (cname, _, _) <- componentsConfigs lbi ] - -componentsInBuildOrder :: LocalBuildInfo -> [ComponentName] - -> [(ComponentName, ComponentLocalBuildInfo)] -componentsInBuildOrder lbi cnames = - map ((\(clbi,cname,_) -> (cname,clbi)) . vertexToNode) - . postOrder graph - . map (\cname -> fromMaybe (noSuchComp cname) (keyToVertex cname)) - $ cnames - where - (graph, vertexToNode, keyToVertex) = - graphFromEdges (map (\(a,b,c) -> (b,a,c)) (componentsConfigs lbi)) - - noSuchComp cname = error $ "internal error: componentsInBuildOrder: " - ++ "no such component: " ++ show cname - - postOrder :: Graph -> [Vertex] -> [Vertex] - postOrder g vs = postorderF (dfs g vs) [] - - postorderF :: Forest a -> [a] -> [a] - postorderF ts = foldr (.) id $ map postorderT ts - - postorderT :: Tree a -> [a] -> [a] - postorderT (Node a ts) = postorderF ts . (a :) - -checkComponentsCyclic :: Ord key => [(node, key, [key])] - -> Maybe [(node, key, [key])] -checkComponentsCyclic es = - let (graph, vertexToNode, _) = graphFromEdges es - cycles = [ flatten c | c <- scc graph, isCycle c ] - isCycle (Node v []) = selfCyclic v - isCycle _ = True - selfCyclic v = v `elem` graph ! v - in case cycles of - [] -> Nothing - (c:_) -> Just (map vertexToNode c) - --- | Determine the directories containing the dynamic libraries of the --- transitive dependencies of the component we are building. --- --- When wanted, and possible, returns paths relative to the installDirs 'prefix' -depLibraryPaths :: Bool -- ^ Building for inplace? - -> Bool -- ^ Generate prefix-relative library paths - -> LocalBuildInfo - -> ComponentLocalBuildInfo -- ^ Component that is being built - -> IO [FilePath] -depLibraryPaths inplace relative lbi clbi = do - let pkgDescr = localPkgDescr lbi - installDirs = absoluteInstallDirs pkgDescr lbi NoCopyDest - executable = case clbi of - ExeComponentLocalBuildInfo {} -> True - _ -> False - relDir | executable = bindir installDirs - | otherwise = libdir installDirs - - let hasInternalDeps = not $ null - $ [ pkgid - | (_,pkgid) <- componentPackageDeps clbi - , internal pkgid - ] - - let ipkgs = allPackages (installedPkgs lbi) - -- First look for dynamic libraries in `dynamic-library-dirs`, and use - -- `library-dirs` as a fall back. - getDynDir pkg = case Installed.libraryDynDirs pkg of - [] -> Installed.libraryDirs pkg - d -> d - allDepLibDirs = concatMap getDynDir ipkgs - internalLib - | inplace = buildDir lbi - | otherwise = dynlibdir installDirs - allDepLibDirs' = if hasInternalDeps - then internalLib : allDepLibDirs - else allDepLibDirs - allDepLibDirsC <- mapM canonicalizePathNoFail allDepLibDirs' - - let p = prefix installDirs - prefixRelative l = isJust (stripPrefix p l) - libPaths - | relative && - prefixRelative relDir = map (\l -> - if prefixRelative l - then shortRelativePath relDir l - else l - ) allDepLibDirsC - | otherwise = allDepLibDirsC - - return libPaths - where - internal pkgid = pkgid == packageId (localPkgDescr lbi) - -- 'canonicalizePath' fails on UNIX when the directory does not exists. - -- So just don't canonicalize when it doesn't exist. - canonicalizePathNoFail p = do - exists <- doesDirectoryExist p - if exists - then canonicalizePath p - else return p - - --- ----------------------------------------------------------------------------- --- Wrappers for a couple functions from InstallDirs - --- |See 'InstallDirs.absoluteInstallDirs' -absoluteInstallDirs :: PackageDescription -> LocalBuildInfo -> CopyDest - -> InstallDirs FilePath -absoluteInstallDirs pkg lbi copydest = - InstallDirs.absoluteInstallDirs - (packageId pkg) - (localUnitId lbi) - (compilerInfo (compiler lbi)) - copydest - (hostPlatform lbi) - (installDirTemplates lbi) - --- |See 'InstallDirs.prefixRelativeInstallDirs' -prefixRelativeInstallDirs :: PackageId -> LocalBuildInfo - -> InstallDirs (Maybe FilePath) -prefixRelativeInstallDirs pkg_descr lbi = - InstallDirs.prefixRelativeInstallDirs - (packageId pkg_descr) - (localUnitId lbi) - (compilerInfo (compiler lbi)) - (hostPlatform lbi) - (installDirTemplates lbi) - -substPathTemplate :: PackageId -> LocalBuildInfo - -> PathTemplate -> FilePath -substPathTemplate pkgid lbi = fromPathTemplate - . ( InstallDirs.substPathTemplate env ) - where env = initialPathTemplateEnv - pkgid - (localUnitId lbi) - (compilerInfo (compiler lbi)) - (hostPlatform lbi) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/PackageIndex.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/PackageIndex.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/PackageIndex.hs 2016-11-07 10:02:24.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/PackageIndex.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,623 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.PackageIndex --- Copyright : (c) David Himmelstrup 2005, --- Bjorn Bringert 2007, --- Duncan Coutts 2008-2009 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- An index of packages. --- -module Distribution.Simple.PackageIndex ( - -- * Package index data type - InstalledPackageIndex, - PackageIndex, - - -- * Creating an index - fromList, - - -- * Updates - merge, - - insert, - - deleteUnitId, - deleteSourcePackageId, - deletePackageName, --- deleteDependency, - - -- * Queries - - -- ** Precise lookups - lookupUnitId, - lookupSourcePackageId, - lookupPackageId, - lookupPackageName, - lookupDependency, - - -- ** Case-insensitive searches - searchByName, - SearchResult(..), - searchByNameSubstring, - - -- ** Bulk queries - allPackages, - allPackagesByName, - allPackagesBySourcePackageId, - - -- ** Special queries - brokenPackages, - dependencyClosure, - reverseDependencyClosure, - topologicalOrder, - reverseTopologicalOrder, - dependencyInconsistencies, - dependencyCycles, - dependencyGraph, - moduleNameIndex, - - -- * Backwards compatibility - deleteInstalledPackageId, - lookupInstalledPackageId, - ) where - -import Distribution.Compat.Binary -import Distribution.Compat.Semigroup as Semi -import Distribution.Package -import Distribution.ModuleName -import qualified Distribution.InstalledPackageInfo as IPI -import Distribution.Version -import Distribution.Simple.Utils - -import Control.Exception (assert) -import Data.Array ((!)) -import qualified Data.Array as Array -import qualified Data.Graph as Graph -import Data.List as List - ( null, foldl', sort - , groupBy, sortBy, find, nubBy, deleteBy, deleteFirstsBy ) -import Data.Map (Map) -import qualified Data.Map as Map -import Data.Maybe (isNothing, fromMaybe) -import qualified Data.Tree as Tree -import GHC.Generics (Generic) -import Prelude hiding (lookup) - --- | The collection of information about packages from one or more 'PackageDB's. --- These packages generally should have an instance of 'PackageInstalled' --- --- Packages are uniquely identified in by their 'UnitId', they can --- also be efficiently looked up by package name or by name and version. --- -data PackageIndex a = PackageIndex - -- The primary index. Each InstalledPackageInfo record is uniquely identified - -- by its UnitId. - -- - !(Map UnitId a) - - -- This auxiliary index maps package names (case-sensitively) to all the - -- versions and instances of that package. This allows us to find all - -- versions satisfying a dependency. - -- - -- It is a three-level index. The first level is the package name, - -- the second is the package version and the final level is instances - -- of the same package version. These are unique by UnitId - -- and are kept in preference order. - -- - -- FIXME: Clarify what "preference order" means. Check that this invariant is - -- preserved. See #1463 for discussion. - !(Map PackageName (Map Version [a])) - - deriving (Eq, Generic, Show, Read) - -instance Binary a => Binary (PackageIndex a) - --- | The default package index which contains 'InstalledPackageInfo'. Normally --- use this. -type InstalledPackageIndex = PackageIndex IPI.InstalledPackageInfo - -instance HasUnitId a => Monoid (PackageIndex a) where - mempty = PackageIndex Map.empty Map.empty - mappend = (Semi.<>) - --save one mappend with empty in the common case: - mconcat [] = mempty - mconcat xs = foldr1 mappend xs - -instance HasUnitId a => Semigroup (PackageIndex a) where - (<>) = merge - -invariant :: HasUnitId a => PackageIndex a -> Bool -invariant (PackageIndex pids pnames) = - map installedUnitId (Map.elems pids) - == sort - [ assert pinstOk (installedUnitId pinst) - | (pname, pvers) <- Map.toList pnames - , let pversOk = not (Map.null pvers) - , (pver, pinsts) <- assert pversOk $ Map.toList pvers - , let pinsts' = sortBy (comparing installedUnitId) pinsts - pinstsOk = all (\g -> length g == 1) - (groupBy (equating installedUnitId) pinsts') - , pinst <- assert pinstsOk $ pinsts' - , let pinstOk = packageName pinst == pname - && packageVersion pinst == pver - ] - - --- --- * Internal helpers --- - -mkPackageIndex :: HasUnitId a - => Map UnitId a - -> Map PackageName (Map Version [a]) - -> PackageIndex a -mkPackageIndex pids pnames = assert (invariant index) index - where index = PackageIndex pids pnames - - --- --- * Construction --- - --- | Build an index out of a bunch of packages. --- --- If there are duplicates by 'UnitId' then later ones mask earlier --- ones. --- -fromList :: HasUnitId a => [a] -> PackageIndex a -fromList pkgs = mkPackageIndex pids pnames - where - pids = Map.fromList [ (installedUnitId pkg, pkg) | pkg <- pkgs ] - pnames = - Map.fromList - [ (packageName (head pkgsN), pvers) - | pkgsN <- groupBy (equating packageName) - . sortBy (comparing packageId) - $ pkgs - , let pvers = - Map.fromList - [ (packageVersion (head pkgsNV), - nubBy (equating installedUnitId) (reverse pkgsNV)) - | pkgsNV <- groupBy (equating packageVersion) pkgsN - ] - ] - --- --- * Updates --- - --- | Merge two indexes. --- --- Packages from the second mask packages from the first if they have the exact --- same 'UnitId'. --- --- For packages with the same source 'PackageId', packages from the second are --- \"preferred\" over those from the first. Being preferred means they are top --- result when we do a lookup by source 'PackageId'. This is the mechanism we --- use to prefer user packages over global packages. --- -merge :: HasUnitId a => PackageIndex a -> PackageIndex a - -> PackageIndex a -merge (PackageIndex pids1 pnames1) (PackageIndex pids2 pnames2) = - mkPackageIndex (Map.unionWith (\_ y -> y) pids1 pids2) - (Map.unionWith (Map.unionWith mergeBuckets) pnames1 pnames2) - where - -- Packages in the second list mask those in the first, however preferred - -- packages go first in the list. - mergeBuckets xs ys = ys ++ (xs \\ ys) - (\\) = deleteFirstsBy (equating installedUnitId) - - --- | Inserts a single package into the index. --- --- This is equivalent to (but slightly quicker than) using 'mappend' or --- 'merge' with a singleton index. --- -insert :: HasUnitId a => a -> PackageIndex a -> PackageIndex a -insert pkg (PackageIndex pids pnames) = - mkPackageIndex pids' pnames' - - where - pids' = Map.insert (installedUnitId pkg) pkg pids - pnames' = insertPackageName pnames - insertPackageName = - Map.insertWith' (\_ -> insertPackageVersion) - (packageName pkg) - (Map.singleton (packageVersion pkg) [pkg]) - - insertPackageVersion = - Map.insertWith' (\_ -> insertPackageInstance) - (packageVersion pkg) [pkg] - - insertPackageInstance pkgs = - pkg : deleteBy (equating installedUnitId) pkg pkgs - - --- | Removes a single installed package from the index. --- -deleteUnitId :: HasUnitId a - => UnitId -> PackageIndex a - -> PackageIndex a -deleteUnitId ipkgid original@(PackageIndex pids pnames) = - case Map.updateLookupWithKey (\_ _ -> Nothing) ipkgid pids of - (Nothing, _) -> original - (Just spkgid, pids') -> mkPackageIndex pids' - (deletePkgName spkgid pnames) - - where - deletePkgName spkgid = - Map.update (deletePkgVersion spkgid) (packageName spkgid) - - deletePkgVersion spkgid = - (\m -> if Map.null m then Nothing else Just m) - . Map.update deletePkgInstance (packageVersion spkgid) - - deletePkgInstance = - (\xs -> if List.null xs then Nothing else Just xs) - . List.deleteBy (\_ pkg -> installedUnitId pkg == ipkgid) undefined - --- | Backwards compatibility wrapper for Cabal pre-1.24. -{-# DEPRECATED deleteInstalledPackageId "Use deleteUnitId instead" #-} -deleteInstalledPackageId :: HasUnitId a - => UnitId -> PackageIndex a - -> PackageIndex a -deleteInstalledPackageId = deleteUnitId - --- | Removes all packages with this source 'PackageId' from the index. --- -deleteSourcePackageId :: HasUnitId a => PackageId -> PackageIndex a - -> PackageIndex a -deleteSourcePackageId pkgid original@(PackageIndex pids pnames) = - case Map.lookup (packageName pkgid) pnames of - Nothing -> original - Just pvers -> case Map.lookup (packageVersion pkgid) pvers of - Nothing -> original - Just pkgs -> mkPackageIndex - (foldl' (flip (Map.delete . installedUnitId)) pids pkgs) - (deletePkgName pnames) - where - deletePkgName = - Map.update deletePkgVersion (packageName pkgid) - - deletePkgVersion = - (\m -> if Map.null m then Nothing else Just m) - . Map.delete (packageVersion pkgid) - - --- | Removes all packages with this (case-sensitive) name from the index. --- -deletePackageName :: HasUnitId a => PackageName -> PackageIndex a - -> PackageIndex a -deletePackageName name original@(PackageIndex pids pnames) = - case Map.lookup name pnames of - Nothing -> original - Just pvers -> mkPackageIndex - (foldl' (flip (Map.delete . installedUnitId)) pids - (concat (Map.elems pvers))) - (Map.delete name pnames) - -{- --- | Removes all packages satisfying this dependency from the index. --- -deleteDependency :: Dependency -> PackageIndex -> PackageIndex -deleteDependency (Dependency name verstionRange) = - delete' name (\pkg -> packageVersion pkg `withinRange` verstionRange) --} - --- --- * Bulk queries --- - --- | Get all the packages from the index. --- -allPackages :: PackageIndex a -> [a] -allPackages (PackageIndex pids _) = Map.elems pids - --- | Get all the packages from the index. --- --- They are grouped by package name (case-sensitively). --- -allPackagesByName :: PackageIndex a -> [(PackageName, [a])] -allPackagesByName (PackageIndex _ pnames) = - [ (pkgname, concat (Map.elems pvers)) - | (pkgname, pvers) <- Map.toList pnames ] - --- | Get all the packages from the index. --- --- They are grouped by source package id (package name and version). --- -allPackagesBySourcePackageId :: HasUnitId a => PackageIndex a - -> [(PackageId, [a])] -allPackagesBySourcePackageId (PackageIndex _ pnames) = - [ (packageId ipkg, ipkgs) - | pvers <- Map.elems pnames - , ipkgs@(ipkg:_) <- Map.elems pvers ] - --- --- * Lookups --- - --- | Does a lookup by source package id (name & version). --- --- Since multiple package DBs mask each other by 'UnitId', --- then we get back at most one package. --- -lookupUnitId :: PackageIndex a -> UnitId - -> Maybe a -lookupUnitId (PackageIndex pids _) pid = Map.lookup pid pids - --- | Backwards compatibility for Cabal pre-1.24. -{-# DEPRECATED lookupInstalledPackageId "Use lookupUnitId instead" #-} -lookupInstalledPackageId :: PackageIndex a -> UnitId - -> Maybe a -lookupInstalledPackageId = lookupUnitId - - --- | Does a lookup by source package id (name & version). --- --- There can be multiple installed packages with the same source 'PackageId' --- but different 'UnitId'. They are returned in order of --- preference, with the most preferred first. --- -lookupSourcePackageId :: PackageIndex a -> PackageId -> [a] -lookupSourcePackageId (PackageIndex _ pnames) pkgid = - case Map.lookup (packageName pkgid) pnames of - Nothing -> [] - Just pvers -> case Map.lookup (packageVersion pkgid) pvers of - Nothing -> [] - Just pkgs -> pkgs -- in preference order - --- | Convenient alias of 'lookupSourcePackageId', but assuming only --- one package per package ID. -lookupPackageId :: PackageIndex a -> PackageId -> Maybe a -lookupPackageId index pkgid = case lookupSourcePackageId index pkgid of - [] -> Nothing - [pkg] -> Just pkg - _ -> error "Distribution.Simple.PackageIndex: multiple matches found" - --- | Does a lookup by source package name. --- -lookupPackageName :: PackageIndex a -> PackageName - -> [(Version, [a])] -lookupPackageName (PackageIndex _ pnames) name = - case Map.lookup name pnames of - Nothing -> [] - Just pvers -> Map.toList pvers - - --- | Does a lookup by source package name and a range of versions. --- --- We get back any number of versions of the specified package name, all --- satisfying the version range constraint. --- -lookupDependency :: PackageIndex a -> Dependency - -> [(Version, [a])] -lookupDependency (PackageIndex _ pnames) (Dependency name versionRange) = - case Map.lookup name pnames of - Nothing -> [] - Just pvers -> [ entry - | entry@(ver, _) <- Map.toList pvers - , ver `withinRange` versionRange ] - --- --- * Case insensitive name lookups --- - --- | Does a case-insensitive search by package name. --- --- If there is only one package that compares case-insensitively to this name --- then the search is unambiguous and we get back all versions of that package. --- If several match case-insensitively but one matches exactly then it is also --- unambiguous. --- --- If however several match case-insensitively and none match exactly then we --- have an ambiguous result, and we get back all the versions of all the --- packages. The list of ambiguous results is split by exact package name. So --- it is a non-empty list of non-empty lists. --- -searchByName :: PackageIndex a -> String -> SearchResult [a] -searchByName (PackageIndex _ pnames) name = - case [ pkgs | pkgs@(PackageName name',_) <- Map.toList pnames - , lowercase name' == lname ] of - [] -> None - [(_,pvers)] -> Unambiguous (concat (Map.elems pvers)) - pkgss -> case find ((PackageName name==) . fst) pkgss of - Just (_,pvers) -> Unambiguous (concat (Map.elems pvers)) - Nothing -> Ambiguous (map (concat . Map.elems . snd) pkgss) - where lname = lowercase name - -data SearchResult a = None | Unambiguous a | Ambiguous [a] - --- | Does a case-insensitive substring search by package name. --- --- That is, all packages that contain the given string in their name. --- -searchByNameSubstring :: PackageIndex a -> String -> [a] -searchByNameSubstring (PackageIndex _ pnames) searchterm = - [ pkg - | (PackageName name, pvers) <- Map.toList pnames - , lsearchterm `isInfixOf` lowercase name - , pkgs <- Map.elems pvers - , pkg <- pkgs ] - where lsearchterm = lowercase searchterm - - --- --- * Special queries --- - --- None of the stuff below depends on the internal representation of the index. --- - --- | Find if there are any cycles in the dependency graph. If there are no --- cycles the result is @[]@. --- --- This actually computes the strongly connected components. So it gives us a --- list of groups of packages where within each group they all depend on each --- other, directly or indirectly. --- -dependencyCycles :: PackageInstalled a => PackageIndex a -> [[a]] -dependencyCycles index = - [ vs | Graph.CyclicSCC vs <- Graph.stronglyConnComp adjacencyList ] - where - adjacencyList = [ (pkg, installedUnitId pkg, installedDepends pkg) - | pkg <- allPackages index ] - - --- | All packages that have immediate dependencies that are not in the index. --- --- Returns such packages along with the dependencies that they're missing. --- -brokenPackages :: PackageInstalled a => PackageIndex a - -> [(a, [UnitId])] -brokenPackages index = - [ (pkg, missing) - | pkg <- allPackages index - , let missing = [ pkg' | pkg' <- installedDepends pkg - , isNothing (lookupUnitId index pkg') ] - , not (null missing) ] - --- | Tries to take the transitive closure of the package dependencies. --- --- If the transitive closure is complete then it returns that subset of the --- index. Otherwise it returns the broken packages as in 'brokenPackages'. --- --- * Note that if the result is @Right []@ it is because at least one of --- the original given 'PackageId's do not occur in the index. --- -dependencyClosure :: PackageInstalled a => PackageIndex a - -> [UnitId] - -> Either (PackageIndex a) - [(a, [UnitId])] -dependencyClosure index pkgids0 = case closure mempty [] pkgids0 of - (completed, []) -> Left completed - (completed, _) -> Right (brokenPackages completed) - where - closure completed failed [] = (completed, failed) - closure completed failed (pkgid:pkgids) = case lookupUnitId index pkgid of - Nothing -> closure completed (pkgid:failed) pkgids - Just pkg -> case lookupUnitId completed (installedUnitId pkg) of - Just _ -> closure completed failed pkgids - Nothing -> closure completed' failed pkgids' - where completed' = insert pkg completed - pkgids' = installedDepends pkg ++ pkgids - --- | Takes the transitive closure of the packages reverse dependencies. --- --- * The given 'PackageId's must be in the index. --- -reverseDependencyClosure :: PackageInstalled a => PackageIndex a - -> [UnitId] - -> [a] -reverseDependencyClosure index = - map vertexToPkg - . concatMap Tree.flatten - . Graph.dfs reverseDepGraph - . map (fromMaybe noSuchPkgId . pkgIdToVertex) - - where - (depGraph, vertexToPkg, pkgIdToVertex) = dependencyGraph index - reverseDepGraph = Graph.transposeG depGraph - noSuchPkgId = error "reverseDependencyClosure: package is not in the graph" - -topologicalOrder :: PackageInstalled a => PackageIndex a -> [a] -topologicalOrder index = map toPkgId - . Graph.topSort - $ graph - where (graph, toPkgId, _) = dependencyGraph index - -reverseTopologicalOrder :: PackageInstalled a => PackageIndex a -> [a] -reverseTopologicalOrder index = map toPkgId - . Graph.topSort - . Graph.transposeG - $ graph - where (graph, toPkgId, _) = dependencyGraph index - --- | Builds a graph of the package dependencies. --- --- Dependencies on other packages that are not in the index are discarded. --- You can check if there are any such dependencies with 'brokenPackages'. --- -dependencyGraph :: PackageInstalled a => PackageIndex a - -> (Graph.Graph, - Graph.Vertex -> a, - UnitId -> Maybe Graph.Vertex) -dependencyGraph index = (graph, vertex_to_pkg, id_to_vertex) - where - graph = Array.listArray bounds - [ [ v | Just v <- map id_to_vertex (installedDepends pkg) ] - | pkg <- pkgs ] - - pkgs = sortBy (comparing packageId) (allPackages index) - vertices = zip (map installedUnitId pkgs) [0..] - vertex_map = Map.fromList vertices - id_to_vertex pid = Map.lookup pid vertex_map - - vertex_to_pkg vertex = pkgTable ! vertex - - pkgTable = Array.listArray bounds pkgs - topBound = length pkgs - 1 - bounds = (0, topBound) - --- | Given a package index where we assume we want to use all the packages --- (use 'dependencyClosure' if you need to get such a index subset) find out --- if the dependencies within it use consistent versions of each package. --- Return all cases where multiple packages depend on different versions of --- some other package. --- --- Each element in the result is a package name along with the packages that --- depend on it and the versions they require. These are guaranteed to be --- distinct. --- -dependencyInconsistencies :: PackageInstalled a => PackageIndex a - -> [(PackageName, [(PackageId, Version)])] -dependencyInconsistencies index = - [ (name, [ (pid,packageVersion dep) | (dep,pids) <- uses, pid <- pids]) - | (name, ipid_map) <- Map.toList inverseIndex - , let uses = Map.elems ipid_map - , reallyIsInconsistent (map fst uses) ] - - where -- for each PackageName, - -- for each package with that name, - -- the InstalledPackageInfo and the package Ids of packages - -- that depend on it. - inverseIndex = Map.fromListWith (Map.unionWith (\(a,b) (_,b') -> (a,b++b'))) - [ (packageName dep, - Map.fromList [(ipid,(dep,[packageId pkg]))]) - | pkg <- allPackages index - , ipid <- installedDepends pkg - , Just dep <- [lookupUnitId index ipid] - ] - - reallyIsInconsistent :: PackageInstalled a => [a] -> Bool - reallyIsInconsistent [] = False - reallyIsInconsistent [_p] = False - reallyIsInconsistent [p1, p2] = - let pid1 = installedUnitId p1 - pid2 = installedUnitId p2 - in pid1 `notElem` installedDepends p2 - && pid2 `notElem` installedDepends p1 - reallyIsInconsistent _ = True - --- | A rough approximation of GHC's module finder, takes a --- 'InstalledPackageIndex' and turns it into a map from module names to their --- source packages. It's used to initialize the @build-deps@ field in @cabal --- init@. -moduleNameIndex :: InstalledPackageIndex -> Map ModuleName [IPI.InstalledPackageInfo] -moduleNameIndex index = - Map.fromListWith (++) $ do - pkg <- allPackages index - IPI.ExposedModule m reexport <- IPI.exposedModules pkg - case reexport of - Nothing -> return (m, [pkg]) - Just (IPI.OriginalModule _ m') | m == m' -> [] - | otherwise -> return (m', [pkg]) - -- The heuristic is this: we want to prefer the original package - -- which originally exported a module. However, if a reexport - -- also *renamed* the module (m /= m'), then we have to use the - -- downstream package, since the upstream package has the wrong - -- module name! diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/PreProcess/Unlit.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/PreProcess/Unlit.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/PreProcess/Unlit.hs 2016-11-07 10:02:24.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/PreProcess/Unlit.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,165 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.PreProcess.Unlit --- Copyright : ... --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Remove the \"literal\" markups from a Haskell source file, including --- \"@>@\", \"@\\begin{code}@\", \"@\\end{code}@\", and \"@#@\" - --- This version is interesting because instead of striping comment lines, it --- turns them into "-- " style comments. This allows using haddock markup --- in literate scripts without having to use "> --" prefix. - -module Distribution.Simple.PreProcess.Unlit (unlit,plain) where - -import Data.Char -import Data.List - -data Classified = BirdTrack String | Blank String | Ordinary String - | Line !Int String | CPP String - | BeginCode | EndCode - -- output only: - | Error String | Comment String - --- | No unliteration. -plain :: String -> String -> String -plain _ hs = hs - -classify :: String -> Classified -classify ('>':s) = BirdTrack s -classify ('#':s) = case tokens s of - (line:file:_) | all isDigit line - && length file >= 2 - && head file == '"' - && last file == '"' - -> Line (read line) (tail (init file)) - _ -> CPP s - where tokens = unfoldr $ \str -> case lex str of - (t@(_:_), str'):_ -> Just (t, str') - _ -> Nothing -classify ('\\':s) - | "begin{code}" `isPrefixOf` s = BeginCode - | "end{code}" `isPrefixOf` s = EndCode -classify s | all isSpace s = Blank s -classify s = Ordinary s - --- So the weird exception for comment indenting is to make things work with --- haddock, see classifyAndCheckForBirdTracks below. -unclassify :: Bool -> Classified -> String -unclassify _ (BirdTrack s) = ' ':s -unclassify _ (Blank s) = s -unclassify _ (Ordinary s) = s -unclassify _ (Line n file) = "# " ++ show n ++ " " ++ show file -unclassify _ (CPP s) = '#':s -unclassify True (Comment "") = " --" -unclassify True (Comment s) = " -- " ++ s -unclassify False (Comment "") = "--" -unclassify False (Comment s) = "-- " ++ s -unclassify _ _ = internalError - --- | 'unlit' takes a filename (for error reports), and transforms the --- given string, to eliminate the literate comments from the program text. -unlit :: FilePath -> String -> Either String String -unlit file input = - let (usesBirdTracks, classified) = classifyAndCheckForBirdTracks - . inlines - $ input - in either (Left . unlines . map (unclassify usesBirdTracks)) - Right - . checkErrors - . reclassify - $ classified - - where - -- So haddock requires comments and code to align, since it treats comments - -- as following the layout rule. This is a pain for us since bird track - -- style literate code typically gets indented by two since ">" is replaced - -- by " " and people usually use one additional space of indent ie - -- "> then the code". On the other hand we cannot just go and indent all - -- the comments by two since that does not work for latex style literate - -- code. So the hacky solution we use here is that if we see any bird track - -- style code then we'll indent all comments by two, otherwise by none. - -- Of course this will not work for mixed latex/bird track .lhs files but - -- nobody does that, it's silly and specifically recommended against in the - -- H98 unlit spec. - -- - classifyAndCheckForBirdTracks = - flip mapAccumL False $ \seenBirdTrack line -> - let classification = classify line - in (seenBirdTrack || isBirdTrack classification, classification) - - isBirdTrack (BirdTrack _) = True - isBirdTrack _ = False - - checkErrors ls = case [ e | Error e <- ls ] of - [] -> Left ls - (message:_) -> Right (f ++ ":" ++ show n ++ ": " ++ message) - where (f, n) = errorPos file 1 ls - errorPos f n [] = (f, n) - errorPos f n (Error _:_) = (f, n) - errorPos _ _ (Line n' f':ls) = errorPos f' n' ls - errorPos f n (_ :ls) = errorPos f (n+1) ls - --- Here we model a state machine, with each state represented by --- a local function. We only have four states (well, five, --- if you count the error state), but the rules --- to transition between then are not so simple. --- Would it be simpler to have more states? --- --- Each state represents the type of line that was last read --- i.e. are we in a comment section, or a latex-code section, --- or a bird-code section, etc? -reclassify :: [Classified] -> [Classified] -reclassify = blank -- begin in blank state - where - latex [] = [] - latex (EndCode :ls) = Blank "" : comment ls - latex (BeginCode :_ ) = [Error "\\begin{code} in code section"] - latex (BirdTrack l:ls) = Ordinary ('>':l) : latex ls - latex ( l:ls) = l : latex ls - - blank [] = [] - blank (EndCode :_ ) = [Error "\\end{code} without \\begin{code}"] - blank (BeginCode :ls) = Blank "" : latex ls - blank (BirdTrack l:ls) = BirdTrack l : bird ls - blank (Ordinary l:ls) = Comment l : comment ls - blank ( l:ls) = l : blank ls - - bird [] = [] - bird (EndCode :_ ) = [Error "\\end{code} without \\begin{code}"] - bird (BeginCode :ls) = Blank "" : latex ls - bird (Blank l :ls) = Blank l : blank ls - bird (Ordinary _:_ ) = [Error "program line before comment line"] - bird ( l:ls) = l : bird ls - - comment [] = [] - comment (EndCode :_ ) = [Error "\\end{code} without \\begin{code}"] - comment (BeginCode :ls) = Blank "" : latex ls - comment (CPP l :ls) = CPP l : comment ls - comment (BirdTrack _:_ ) = [Error "comment line before program line"] - -- a blank line and another ordinary line following a comment - -- will be treated as continuing the comment. Otherwise it's - -- then end of the comment, with a blank line. - comment (Blank l:ls@(Ordinary _:_)) = Comment l : comment ls - comment (Blank l:ls) = Blank l : blank ls - comment (Line n f :ls) = Line n f : comment ls - comment (Ordinary l:ls) = Comment l : comment ls - comment (Comment _: _) = internalError - comment (Error _: _) = internalError - --- Re-implementation of 'lines', for better efficiency (but decreased laziness). --- Also, importantly, accepts non-standard DOS and Mac line ending characters. -inlines :: String -> [String] -inlines xs = lines' xs id - where - lines' [] acc = [acc []] - lines' ('\^M':'\n':s) acc = acc [] : lines' s id -- DOS - lines' ('\^M':s) acc = acc [] : lines' s id -- MacOS - lines' ('\n':s) acc = acc [] : lines' s id -- Unix - lines' (c:s) acc = lines' s (acc . (c:)) - -internalError :: a -internalError = error "unlit: internal error" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/PreProcess.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/PreProcess.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/PreProcess.hs 2016-11-07 10:02:24.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/PreProcess.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,652 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.PreProcess --- Copyright : (c) 2003-2005, Isaac Jones, Malcolm Wallace --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This defines a 'PreProcessor' abstraction which represents a pre-processor --- that can transform one kind of file into another. There is also a --- 'PPSuffixHandler' which is a combination of a file extension and a function --- for configuring a 'PreProcessor'. It defines a bunch of known built-in --- preprocessors like @cpp@, @cpphs@, @c2hs@, @hsc2hs@, @happy@, @alex@ etc and --- lists them in 'knownSuffixHandlers'. On top of this it provides a function --- for actually preprocessing some sources given a bunch of known suffix --- handlers. This module is not as good as it could be, it could really do with --- a rewrite to address some of the problems we have with pre-processors. - -module Distribution.Simple.PreProcess (preprocessComponent, preprocessExtras, - knownSuffixHandlers, ppSuffixes, - PPSuffixHandler, PreProcessor(..), - mkSimplePreProcessor, runSimplePreProcessor, - ppCpp, ppCpp', ppGreenCard, ppC2hs, ppHsc2hs, - ppHappy, ppAlex, ppUnlit, platformDefines - ) - where - - -import Distribution.Simple.PreProcess.Unlit -import Distribution.Package -import qualified Distribution.ModuleName as ModuleName -import Distribution.PackageDescription as PD -import qualified Distribution.InstalledPackageInfo as Installed -import qualified Distribution.Simple.PackageIndex as PackageIndex -import Distribution.Simple.CCompiler -import Distribution.Simple.Compiler -import Distribution.Simple.LocalBuildInfo -import Distribution.Simple.BuildPaths -import Distribution.Simple.Utils -import Distribution.Simple.Program -import Distribution.Simple.Test.LibV09 -import Distribution.System -import Distribution.Text -import Distribution.Version -import Distribution.Verbosity - -import Control.Monad -import Data.Maybe (fromMaybe) -import Data.List (nub, isSuffixOf) -import System.Directory (doesFileExist) -import System.Info (os, arch) -import System.FilePath (splitExtension, dropExtensions, (), (<.>), - takeDirectory, normalise, replaceExtension, - takeExtensions) - --- |The interface to a preprocessor, which may be implemented using an --- external program, but need not be. The arguments are the name of --- the input file, the name of the output file and a verbosity level. --- Here is a simple example that merely prepends a comment to the given --- source file: --- --- > ppTestHandler :: PreProcessor --- > ppTestHandler = --- > PreProcessor { --- > platformIndependent = True, --- > runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> --- > do info verbosity (inFile++" has been preprocessed to "++outFile) --- > stuff <- readFile inFile --- > writeFile outFile ("-- preprocessed as a test\n\n" ++ stuff) --- > return ExitSuccess --- --- We split the input and output file names into a base directory and the --- rest of the file name. The input base dir is the path in the list of search --- dirs that this file was found in. The output base dir is the build dir where --- all the generated source files are put. --- --- The reason for splitting it up this way is that some pre-processors don't --- simply generate one output .hs file from one input file but have --- dependencies on other generated files (notably c2hs, where building one --- .hs file may require reading other .chi files, and then compiling the .hs --- file may require reading a generated .h file). In these cases the generated --- files need to embed relative path names to each other (eg the generated .hs --- file mentions the .h file in the FFI imports). This path must be relative to --- the base directory where the generated files are located, it cannot be --- relative to the top level of the build tree because the compilers do not --- look for .h files relative to there, ie we do not use \"-I .\", instead we --- use \"-I dist\/build\" (or whatever dist dir has been set by the user) --- --- Most pre-processors do not care of course, so mkSimplePreProcessor and --- runSimplePreProcessor functions handle the simple case. --- -data PreProcessor = PreProcessor { - - -- Is the output of the pre-processor platform independent? eg happy output - -- is portable haskell but c2hs's output is platform dependent. - -- This matters since only platform independent generated code can be - -- inlcuded into a source tarball. - platformIndependent :: Bool, - - -- TODO: deal with pre-processors that have implementaion dependent output - -- eg alex and happy have --ghc flags. However we can't really inlcude - -- ghc-specific code into supposedly portable source tarballs. - - runPreProcessor :: (FilePath, FilePath) -- Location of the source file relative to a base dir - -> (FilePath, FilePath) -- Output file name, relative to an output base dir - -> Verbosity -- verbosity - -> IO () -- Should exit if the preprocessor fails - } - --- | Function to determine paths to possible extra C sources for a --- preprocessor: just takes the path to the build directory and uses --- this to search for C sources with names that match the --- preprocessor's output name format. -type PreProcessorExtras = FilePath -> IO [FilePath] - - -mkSimplePreProcessor :: (FilePath -> FilePath -> Verbosity -> IO ()) - -> (FilePath, FilePath) - -> (FilePath, FilePath) -> Verbosity -> IO () -mkSimplePreProcessor simplePP - (inBaseDir, inRelativeFile) - (outBaseDir, outRelativeFile) verbosity = simplePP inFile outFile verbosity - where inFile = normalise (inBaseDir inRelativeFile) - outFile = normalise (outBaseDir outRelativeFile) - -runSimplePreProcessor :: PreProcessor -> FilePath -> FilePath -> Verbosity - -> IO () -runSimplePreProcessor pp inFile outFile verbosity = - runPreProcessor pp (".", inFile) (".", outFile) verbosity - --- |A preprocessor for turning non-Haskell files with the given extension --- into plain Haskell source files. -type PPSuffixHandler - = (String, BuildInfo -> LocalBuildInfo -> PreProcessor) - --- | Apply preprocessors to the sources from 'hsSourceDirs' for a given --- component (lib, exe, or test suite). -preprocessComponent :: PackageDescription - -> Component - -> LocalBuildInfo - -> Bool - -> Verbosity - -> [PPSuffixHandler] - -> IO () -preprocessComponent pd comp lbi isSrcDist verbosity handlers = case comp of - (CLib lib@Library{ libBuildInfo = bi }) -> do - let dirs = hsSourceDirs bi ++ [autogenModulesDir lbi] - setupMessage verbosity "Preprocessing library" (packageId pd) - forM_ (map ModuleName.toFilePath $ libModules lib) $ - pre dirs (buildDir lbi) (localHandlers bi) - (CExe exe@Executable { buildInfo = bi, exeName = nm }) -> do - let exeDir = buildDir lbi nm nm ++ "-tmp" - dirs = hsSourceDirs bi ++ [autogenModulesDir lbi] - setupMessage verbosity ("Preprocessing executable '" ++ nm ++ "' for") (packageId pd) - forM_ (map ModuleName.toFilePath $ otherModules bi) $ - pre dirs exeDir (localHandlers bi) - pre (hsSourceDirs bi) exeDir (localHandlers bi) $ - dropExtensions (modulePath exe) - CTest test@TestSuite{ testName = nm } -> do - setupMessage verbosity ("Preprocessing test suite '" ++ nm ++ "' for") (packageId pd) - case testInterface test of - TestSuiteExeV10 _ f -> - preProcessTest test f $ buildDir lbi testName test - testName test ++ "-tmp" - TestSuiteLibV09 _ _ -> do - let testDir = buildDir lbi stubName test - stubName test ++ "-tmp" - writeSimpleTestStub test testDir - preProcessTest test (stubFilePath test) testDir - TestSuiteUnsupported tt -> die $ "No support for preprocessing test " - ++ "suite type " ++ display tt - CBench bm@Benchmark{ benchmarkName = nm } -> do - setupMessage verbosity ("Preprocessing benchmark '" ++ nm ++ "' for") (packageId pd) - case benchmarkInterface bm of - BenchmarkExeV10 _ f -> - preProcessBench bm f $ buildDir lbi benchmarkName bm - benchmarkName bm ++ "-tmp" - BenchmarkUnsupported tt -> die $ "No support for preprocessing benchmark " - ++ "type " ++ display tt - where - builtinHaskellSuffixes = ["hs", "lhs", "hsig", "lhsig"] - builtinCSuffixes = cSourceExtensions - builtinSuffixes = builtinHaskellSuffixes ++ builtinCSuffixes - localHandlers bi = [(ext, h bi lbi) | (ext, h) <- handlers] - pre dirs dir lhndlrs fp = - preprocessFile dirs dir isSrcDist fp verbosity builtinSuffixes lhndlrs - preProcessTest test = preProcessComponent (testBuildInfo test) - (testModules test) - preProcessBench bm = preProcessComponent (benchmarkBuildInfo bm) - (benchmarkModules bm) - preProcessComponent bi modules exePath dir = do - let biHandlers = localHandlers bi - sourceDirs = hsSourceDirs bi ++ [ autogenModulesDir lbi ] - sequence_ [ preprocessFile sourceDirs dir isSrcDist - (ModuleName.toFilePath modu) verbosity builtinSuffixes - biHandlers - | modu <- modules ] - preprocessFile (dir : (hsSourceDirs bi)) dir isSrcDist - (dropExtensions $ exePath) verbosity - builtinSuffixes biHandlers - ---TODO: try to list all the modules that could not be found --- not just the first one. It's annoying and slow due to the need --- to reconfigure after editing the .cabal file each time. - --- |Find the first extension of the file that exists, and preprocess it --- if required. -preprocessFile - :: [FilePath] -- ^source directories - -> FilePath -- ^build directory - -> Bool -- ^preprocess for sdist - -> FilePath -- ^module file name - -> Verbosity -- ^verbosity - -> [String] -- ^builtin suffixes - -> [(String, PreProcessor)] -- ^possible preprocessors - -> IO () -preprocessFile searchLoc buildLoc forSDist baseFile verbosity builtinSuffixes handlers = do - -- look for files in the various source dirs with this module name - -- and a file extension of a known preprocessor - psrcFiles <- findFileWithExtension' (map fst handlers) searchLoc baseFile - case psrcFiles of - -- no preprocessor file exists, look for an ordinary source file - -- just to make sure one actually exists at all for this module. - -- Note: by looking in the target/output build dir too, we allow - -- source files to appear magically in the target build dir without - -- any corresponding "real" source file. This lets custom Setup.hs - -- files generate source modules directly into the build dir without - -- the rest of the build system being aware of it (somewhat dodgy) - Nothing -> do - bsrcFiles <- findFileWithExtension builtinSuffixes (buildLoc : searchLoc) baseFile - case bsrcFiles of - Nothing -> die $ "can't find source for " ++ baseFile - ++ " in " ++ intercalate ", " searchLoc - _ -> return () - -- found a pre-processable file in one of the source dirs - Just (psrcLoc, psrcRelFile) -> do - let (srcStem, ext) = splitExtension psrcRelFile - psrcFile = psrcLoc psrcRelFile - pp = fromMaybe (error "Distribution.Simple.PreProcess: Just expected") - (lookup (tailNotNull ext) handlers) - -- Preprocessing files for 'sdist' is different from preprocessing - -- for 'build'. When preprocessing for sdist we preprocess to - -- avoid that the user has to have the preprocessors available. - -- ATM, we don't have a way to specify which files are to be - -- preprocessed and which not, so for sdist we only process - -- platform independent files and put them into the 'buildLoc' - -- (which we assume is set to the temp. directory that will become - -- the tarball). - --TODO: eliminate sdist variant, just supply different handlers - when (not forSDist || forSDist && platformIndependent pp) $ do - -- look for existing pre-processed source file in the dest dir to - -- see if we really have to re-run the preprocessor. - ppsrcFiles <- findFileWithExtension builtinSuffixes [buildLoc] baseFile - recomp <- case ppsrcFiles of - Nothing -> return True - Just ppsrcFile -> - psrcFile `moreRecentFile` ppsrcFile - when recomp $ do - let destDir = buildLoc dirName srcStem - createDirectoryIfMissingVerbose verbosity True destDir - runPreProcessorWithHsBootHack pp - (psrcLoc, psrcRelFile) - (buildLoc, srcStem <.> "hs") - - where - dirName = takeDirectory - tailNotNull [] = [] - tailNotNull x = tail x - - -- FIXME: This is a somewhat nasty hack. GHC requires that hs-boot files - -- be in the same place as the hs files, so if we put the hs file in dist/ - -- then we need to copy the hs-boot file there too. This should probably be - -- done another way. Possibly we should also be looking for .lhs-boot - -- files, but I think that preprocessors only produce .hs files. - runPreProcessorWithHsBootHack pp - (inBaseDir, inRelativeFile) - (outBaseDir, outRelativeFile) = do - runPreProcessor pp - (inBaseDir, inRelativeFile) - (outBaseDir, outRelativeFile) verbosity - - exists <- doesFileExist inBoot - when exists $ copyFileVerbose verbosity inBoot outBoot - - where - inBoot = replaceExtension inFile "hs-boot" - outBoot = replaceExtension outFile "hs-boot" - - inFile = normalise (inBaseDir inRelativeFile) - outFile = normalise (outBaseDir outRelativeFile) - --- ------------------------------------------------------------ --- * known preprocessors --- ------------------------------------------------------------ - -ppGreenCard :: BuildInfo -> LocalBuildInfo -> PreProcessor -ppGreenCard _ lbi - = PreProcessor { - platformIndependent = False, - runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> - rawSystemProgramConf verbosity greencardProgram (withPrograms lbi) - (["-tffi", "-o" ++ outFile, inFile]) - } - --- This one is useful for preprocessors that can't handle literate source. --- We also need a way to chain preprocessors. -ppUnlit :: PreProcessor -ppUnlit = - PreProcessor { - platformIndependent = True, - runPreProcessor = mkSimplePreProcessor $ \inFile outFile _verbosity -> - withUTF8FileContents inFile $ \contents -> - either (writeUTF8File outFile) die (unlit inFile contents) - } - -ppCpp :: BuildInfo -> LocalBuildInfo -> PreProcessor -ppCpp = ppCpp' [] - -ppCpp' :: [String] -> BuildInfo -> LocalBuildInfo -> PreProcessor -ppCpp' extraArgs bi lbi = - case compilerFlavor (compiler lbi) of - GHC -> ppGhcCpp ghcProgram (>= Version [6,6] []) args bi lbi - GHCJS -> ppGhcCpp ghcjsProgram (const True) args bi lbi - _ -> ppCpphs args bi lbi - where cppArgs = getCppOptions bi lbi - args = cppArgs ++ extraArgs - -ppGhcCpp :: Program -> (Version -> Bool) - -> [String] -> BuildInfo -> LocalBuildInfo -> PreProcessor -ppGhcCpp program xHs extraArgs _bi lbi = - PreProcessor { - platformIndependent = False, - runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do - (prog, version, _) <- requireProgramVersion verbosity - program anyVersion (withPrograms lbi) - rawSystemProgram verbosity prog $ - ["-E", "-cpp"] - -- This is a bit of an ugly hack. We're going to - -- unlit the file ourselves later on if appropriate, - -- so we need GHC not to unlit it now or it'll get - -- double-unlitted. In the future we might switch to - -- using cpphs --unlit instead. - ++ (if xHs version then ["-x", "hs"] else []) - ++ [ "-optP-include", "-optP"++ (autogenModulesDir lbi cppHeaderName) ] - ++ ["-o", outFile, inFile] - ++ extraArgs - } - -ppCpphs :: [String] -> BuildInfo -> LocalBuildInfo -> PreProcessor -ppCpphs extraArgs _bi lbi = - PreProcessor { - platformIndependent = False, - runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do - (cpphsProg, cpphsVersion, _) <- requireProgramVersion verbosity - cpphsProgram anyVersion (withPrograms lbi) - rawSystemProgram verbosity cpphsProg $ - ("-O" ++ outFile) : inFile - : "--noline" : "--strip" - : (if cpphsVersion >= Version [1,6] [] - then ["--include="++ (autogenModulesDir lbi cppHeaderName)] - else []) - ++ extraArgs - } - -ppHsc2hs :: BuildInfo -> LocalBuildInfo -> PreProcessor -ppHsc2hs bi lbi = - PreProcessor { - platformIndependent = False, - runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do - (gccProg, _) <- requireProgram verbosity gccProgram (withPrograms lbi) - rawSystemProgramConf verbosity hsc2hsProgram (withPrograms lbi) $ - [ "--cc=" ++ programPath gccProg - , "--ld=" ++ programPath gccProg ] - - -- Additional gcc options - ++ [ "--cflag=" ++ opt | opt <- programDefaultArgs gccProg - ++ programOverrideArgs gccProg ] - ++ [ "--lflag=" ++ opt | opt <- programDefaultArgs gccProg - ++ programOverrideArgs gccProg ] - - -- OSX frameworks: - ++ [ what ++ "=-F" ++ opt - | isOSX - , opt <- nub (concatMap Installed.frameworkDirs pkgs) - , what <- ["--cflag", "--lflag"] ] - ++ [ "--lflag=" ++ arg - | isOSX - , opt <- PD.frameworks bi ++ concatMap Installed.frameworks pkgs - , arg <- ["-framework", opt] ] - - -- Note that on ELF systems, wherever we use -L, we must also use -R - -- because presumably that -L dir is not on the normal path for the - -- system's dynamic linker. This is needed because hsc2hs works by - -- compiling a C program and then running it. - - ++ [ "--cflag=" ++ opt | opt <- platformDefines lbi ] - - -- Options from the current package: - ++ [ "--cflag=-I" ++ dir | dir <- PD.includeDirs bi ] - ++ [ "--cflag=" ++ opt | opt <- PD.ccOptions bi - ++ PD.cppOptions bi ] - ++ [ "--cflag=" ++ opt | opt <- - [ "-I" ++ autogenModulesDir lbi, - "-include", autogenModulesDir lbi cppHeaderName ] ] - ++ [ "--lflag=-L" ++ opt | opt <- PD.extraLibDirs bi ] - ++ [ "--lflag=-Wl,-R," ++ opt | isELF - , opt <- PD.extraLibDirs bi ] - ++ [ "--lflag=-l" ++ opt | opt <- PD.extraLibs bi ] - ++ [ "--lflag=" ++ opt | opt <- PD.ldOptions bi ] - - -- Options from dependent packages - ++ [ "--cflag=" ++ opt - | pkg <- pkgs - , opt <- [ "-I" ++ opt | opt <- Installed.includeDirs pkg ] - ++ [ opt | opt <- Installed.ccOptions pkg ] ] - ++ [ "--lflag=" ++ opt - | pkg <- pkgs - , opt <- [ "-L" ++ opt | opt <- Installed.libraryDirs pkg ] - ++ [ "-Wl,-R," ++ opt | isELF - , opt <- Installed.libraryDirs pkg ] - ++ [ "-l" ++ opt | opt <- Installed.extraLibraries pkg ] - ++ [ opt | opt <- Installed.ldOptions pkg ] ] - ++ ["-o", outFile, inFile] - } - where - -- TODO: installedPkgs contains ALL dependencies associated with - -- the package, but we really only want to look at packages for the - -- *current* dependency. We should use PackageIndex.dependencyClosure - -- on the direct depends of the component. Can't easily do that, - -- because the signature of this function is wrong. Tracked with - -- #2971 (which has a test case.) - pkgs = PackageIndex.topologicalOrder (packageHacks (installedPkgs lbi)) - isOSX = case buildOS of OSX -> True; _ -> False - isELF = case buildOS of OSX -> False; Windows -> False; AIX -> False; _ -> True; - packageHacks = case compilerFlavor (compiler lbi) of - GHC -> hackRtsPackage - GHCJS -> hackRtsPackage - _ -> id - -- We don't link in the actual Haskell libraries of our dependencies, so - -- the -u flags in the ldOptions of the rts package mean linking fails on - -- OS X (it's ld is a tad stricter than gnu ld). Thus we remove the - -- ldOptions for GHC's rts package: - hackRtsPackage index = - case PackageIndex.lookupPackageName index (PackageName "rts") of - [(_, [rts])] - -> PackageIndex.insert rts { Installed.ldOptions = [] } index - _ -> error "No (or multiple) ghc rts package is registered!!" - -ppHsc2hsExtras :: PreProcessorExtras -ppHsc2hsExtras buildBaseDir = filter ("_hsc.c" `isSuffixOf`) `fmap` - getDirectoryContentsRecursive buildBaseDir - -ppC2hs :: BuildInfo -> LocalBuildInfo -> PreProcessor -ppC2hs bi lbi = - PreProcessor { - platformIndependent = False, - runPreProcessor = \(inBaseDir, inRelativeFile) - (outBaseDir, outRelativeFile) verbosity -> do - (c2hsProg, _, _) <- requireProgramVersion verbosity - c2hsProgram (orLaterVersion (Version [0,15] [])) - (withPrograms lbi) - (gccProg, _) <- requireProgram verbosity gccProgram (withPrograms lbi) - rawSystemProgram verbosity c2hsProg $ - - -- Options from the current package: - [ "--cpp=" ++ programPath gccProg, "--cppopts=-E" ] - ++ [ "--cppopts=" ++ opt | opt <- getCppOptions bi lbi ] - ++ [ "--cppopts=-include" ++ (autogenModulesDir lbi cppHeaderName) ] - ++ [ "--include=" ++ outBaseDir ] - - -- Options from dependent packages - ++ [ "--cppopts=" ++ opt - | pkg <- pkgs - , opt <- [ "-I" ++ opt | opt <- Installed.includeDirs pkg ] - ++ [ opt | opt@('-':c:_) <- Installed.ccOptions pkg - , c `elem` "DIU" ] ] - --TODO: install .chi files for packages, so we can --include - -- those dirs here, for the dependencies - - -- input and output files - ++ [ "--output-dir=" ++ outBaseDir - , "--output=" ++ outRelativeFile - , inBaseDir inRelativeFile ] - } - where - pkgs = PackageIndex.topologicalOrder (installedPkgs lbi) - -ppC2hsExtras :: PreProcessorExtras -ppC2hsExtras d = filter (\p -> takeExtensions p == ".chs.c") `fmap` - getDirectoryContentsRecursive d - ---TODO: perhaps use this with hsc2hs too ---TODO: remove cc-options from cpphs for cabal-version: >= 1.10 -getCppOptions :: BuildInfo -> LocalBuildInfo -> [String] -getCppOptions bi lbi - = platformDefines lbi - ++ cppOptions bi - ++ ["-I" ++ dir | dir <- PD.includeDirs bi] - ++ [opt | opt@('-':c:_) <- PD.ccOptions bi, c `elem` "DIU"] - -platformDefines :: LocalBuildInfo -> [String] -platformDefines lbi = - case compilerFlavor comp of - GHC -> - ["-D__GLASGOW_HASKELL__=" ++ versionInt version] ++ - ["-D" ++ os ++ "_BUILD_OS=1"] ++ - ["-D" ++ arch ++ "_BUILD_ARCH=1"] ++ - map (\os' -> "-D" ++ os' ++ "_HOST_OS=1") osStr ++ - map (\arch' -> "-D" ++ arch' ++ "_HOST_ARCH=1") archStr - GHCJS -> - compatGlasgowHaskell ++ - ["-D__GHCJS__=" ++ versionInt version] ++ - ["-D" ++ os ++ "_BUILD_OS=1"] ++ - ["-D" ++ arch ++ "_BUILD_ARCH=1"] ++ - map (\os' -> "-D" ++ os' ++ "_HOST_OS=1") osStr ++ - map (\arch' -> "-D" ++ arch' ++ "_HOST_ARCH=1") archStr - JHC -> ["-D__JHC__=" ++ versionInt version] - HaskellSuite {} -> - ["-D__HASKELL_SUITE__"] ++ - map (\os' -> "-D" ++ os' ++ "_HOST_OS=1") osStr ++ - map (\arch' -> "-D" ++ arch' ++ "_HOST_ARCH=1") archStr - _ -> [] - where - comp = compiler lbi - Platform hostArch hostOS = hostPlatform lbi - version = compilerVersion comp - compatGlasgowHaskell = - maybe [] (\v -> ["-D__GLASGOW_HASKELL__=" ++ versionInt v]) - (compilerCompatVersion GHC comp) - -- TODO: move this into the compiler abstraction - -- FIXME: this forces GHC's crazy 4.8.2 -> 408 convention on all - -- the other compilers. Check if that's really what they want. - versionInt :: Version -> String - versionInt (Version { versionBranch = [] }) = "1" - versionInt (Version { versionBranch = [n] }) = show n - versionInt (Version { versionBranch = n1:n2:_ }) - = -- 6.8.x -> 608 - -- 6.10.x -> 610 - let s1 = show n1 - s2 = show n2 - middle = case s2 of - _ : _ : _ -> "" - _ -> "0" - in s1 ++ middle ++ s2 - osStr = case hostOS of - Linux -> ["linux"] - Windows -> ["mingw32"] - OSX -> ["darwin"] - FreeBSD -> ["freebsd"] - OpenBSD -> ["openbsd"] - NetBSD -> ["netbsd"] - DragonFly -> ["dragonfly"] - Solaris -> ["solaris2"] - AIX -> ["aix"] - HPUX -> ["hpux"] - IRIX -> ["irix"] - HaLVM -> [] - IOS -> ["ios"] - Android -> ["android"] - Ghcjs -> ["ghcjs"] - Hurd -> ["hurd"] - OtherOS _ -> [] - archStr = case hostArch of - I386 -> ["i386"] - X86_64 -> ["x86_64"] - PPC -> ["powerpc"] - PPC64 -> ["powerpc64"] - Sparc -> ["sparc"] - Arm -> ["arm"] - Mips -> ["mips"] - SH -> [] - IA64 -> ["ia64"] - S390 -> ["s390"] - Alpha -> ["alpha"] - Hppa -> ["hppa"] - Rs6000 -> ["rs6000"] - M68k -> ["m68k"] - Vax -> ["vax"] - JavaScript -> ["javascript"] - OtherArch _ -> [] - -ppHappy :: BuildInfo -> LocalBuildInfo -> PreProcessor -ppHappy _ lbi = pp { platformIndependent = True } - where pp = standardPP lbi happyProgram (hcFlags hc) - hc = compilerFlavor (compiler lbi) - hcFlags GHC = ["-agc"] - hcFlags GHCJS = ["-agc"] - hcFlags _ = [] - -ppAlex :: BuildInfo -> LocalBuildInfo -> PreProcessor -ppAlex _ lbi = pp { platformIndependent = True } - where pp = standardPP lbi alexProgram (hcFlags hc) - hc = compilerFlavor (compiler lbi) - hcFlags GHC = ["-g"] - hcFlags GHCJS = ["-g"] - hcFlags _ = [] - -standardPP :: LocalBuildInfo -> Program -> [String] -> PreProcessor -standardPP lbi prog args = - PreProcessor { - platformIndependent = False, - runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> - rawSystemProgramConf verbosity prog (withPrograms lbi) - (args ++ ["-o", outFile, inFile]) - } - --- |Convenience function; get the suffixes of these preprocessors. -ppSuffixes :: [ PPSuffixHandler ] -> [String] -ppSuffixes = map fst - --- |Standard preprocessors: GreenCard, c2hs, hsc2hs, happy, alex and cpphs. -knownSuffixHandlers :: [ PPSuffixHandler ] -knownSuffixHandlers = - [ ("gc", ppGreenCard) - , ("chs", ppC2hs) - , ("hsc", ppHsc2hs) - , ("x", ppAlex) - , ("y", ppHappy) - , ("ly", ppHappy) - , ("cpphs", ppCpp) - ] - --- |Standard preprocessors with possible extra C sources: c2hs, hsc2hs. -knownExtrasHandlers :: [ PreProcessorExtras ] -knownExtrasHandlers = [ ppC2hsExtras, ppHsc2hsExtras ] - --- | Find any extra C sources generated by preprocessing that need to --- be added to the component (addresses issue #238). -preprocessExtras :: Component - -> LocalBuildInfo - -> IO [FilePath] -preprocessExtras comp lbi = case comp of - CLib _ -> pp $ buildDir lbi - (CExe Executable { exeName = nm }) -> - pp $ buildDir lbi nm nm ++ "-tmp" - CTest test -> do - case testInterface test of - TestSuiteExeV10 _ _ -> - pp $ buildDir lbi testName test testName test ++ "-tmp" - TestSuiteLibV09 _ _ -> - pp $ buildDir lbi stubName test stubName test ++ "-tmp" - TestSuiteUnsupported tt -> die $ "No support for preprocessing test " - ++ "suite type " ++ display tt - CBench bm -> do - case benchmarkInterface bm of - BenchmarkExeV10 _ _ -> - pp $ buildDir lbi benchmarkName bm benchmarkName bm ++ "-tmp" - BenchmarkUnsupported tt -> die $ "No support for preprocessing benchmark " - ++ "type " ++ display tt - where - pp dir = (map (dir ) . concat) `fmap` forM knownExtrasHandlers ($ dir) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/Ar.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/Ar.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/Ar.hs 2016-11-07 10:02:24.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/Ar.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,166 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program.Ar --- Copyright : Duncan Coutts 2009 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module provides an library interface to the @ar@ program. - -module Distribution.Simple.Program.Ar ( - createArLibArchive, - multiStageProgramInvocation - ) where - -import Control.Monad (unless) -import qualified Data.ByteString as BS -import qualified Data.ByteString.Char8 as BS8 -import Data.Char (isSpace) -import Distribution.Compat.CopyFile (filesEqual) -import Distribution.Simple.LocalBuildInfo (LocalBuildInfo(..)) -import Distribution.Simple.Program - ( arProgram, requireProgram ) -import Distribution.Simple.Program.Run - ( programInvocation, multiStageProgramInvocation - , runProgramInvocation ) -import Distribution.Simple.Utils - ( dieWithLocation, withTempDirectory ) -import Distribution.System - ( Arch(..), OS(..), Platform(..) ) -import Distribution.Verbosity - ( Verbosity, deafening, verbose ) -import System.Directory (doesFileExist, renameFile) -import System.FilePath ((), splitFileName) -import System.IO - ( Handle, IOMode(ReadWriteMode), SeekMode(AbsoluteSeek) - , hFileSize, hSeek, withBinaryFile ) - --- | Call @ar@ to create a library archive from a bunch of object files. --- -createArLibArchive :: Verbosity -> LocalBuildInfo - -> FilePath -> [FilePath] -> IO () -createArLibArchive verbosity lbi targetPath files = do - (ar, _) <- requireProgram verbosity arProgram progConf - - let (targetDir, targetName) = splitFileName targetPath - withTempDirectory verbosity targetDir "objs" $ \ tmpDir -> do - let tmpPath = tmpDir targetName - - -- The args to use with "ar" are actually rather subtle and system-dependent. - -- In particular we have the following issues: - -- - -- -- On OS X, "ar q" does not make an archive index. Archives with no - -- index cannot be used. - -- - -- -- GNU "ar r" will not let us add duplicate objects, only "ar q" lets us - -- do that. We have duplicates because of modules like "A.M" and "B.M" - -- both make an object file "M.o" and ar does not consider the directory. - -- - -- Our solution is to use "ar r" in the simple case when one call is enough. - -- When we need to call ar multiple times we use "ar q" and for the last - -- call on OSX we use "ar qs" so that it'll make the index. - - let simpleArgs = case hostOS of - OSX -> ["-r", "-s"] - _ -> ["-r"] - - initialArgs = ["-q"] - finalArgs = case hostOS of - OSX -> ["-q", "-s"] - _ -> ["-q"] - - extraArgs = verbosityOpts verbosity ++ [tmpPath] - - simple = programInvocation ar (simpleArgs ++ extraArgs) - initial = programInvocation ar (initialArgs ++ extraArgs) - middle = initial - final = programInvocation ar (finalArgs ++ extraArgs) - - sequence_ - [ runProgramInvocation verbosity inv - | inv <- multiStageProgramInvocation - simple (initial, middle, final) files ] - - unless (hostArch == Arm -- See #1537 - || hostOS == AIX) $ -- AIX uses its own "ar" format variant - wipeMetadata tmpPath - equal <- filesEqual tmpPath targetPath - unless equal $ renameFile tmpPath targetPath - - where - progConf = withPrograms lbi - Platform hostArch hostOS = hostPlatform lbi - verbosityOpts v | v >= deafening = ["-v"] - | v >= verbose = [] - | otherwise = ["-c"] - --- | @ar@ by default includes various metadata for each object file in their --- respective headers, so the output can differ for the same inputs, making --- it difficult to avoid re-linking. GNU @ar@(1) has a deterministic mode --- (@-D@) flag that always writes zero for the mtime, UID and GID, and 0644 --- for the file mode. However detecting whether @-D@ is supported seems --- rather harder than just re-implementing this feature. -wipeMetadata :: FilePath -> IO () -wipeMetadata path = do - -- Check for existence first (ReadWriteMode would create one otherwise) - exists <- doesFileExist path - unless exists $ wipeError "Temporary file disappeared" - withBinaryFile path ReadWriteMode $ \ h -> hFileSize h >>= wipeArchive h - - where - wipeError msg = dieWithLocation path Nothing $ - "Distribution.Simple.Program.Ar.wipeMetadata: " ++ msg - archLF = "!\x0a" -- global magic, 8 bytes - x60LF = "\x60\x0a" -- header magic, 2 bytes - metadata = BS.concat - [ "0 " -- mtime, 12 bytes - , "0 " -- UID, 6 bytes - , "0 " -- GID, 6 bytes - , "0644 " -- mode, 8 bytes - ] - headerSize :: Int - headerSize = 60 - - -- http://en.wikipedia.org/wiki/Ar_(Unix)#File_format_details - wipeArchive :: Handle -> Integer -> IO () - wipeArchive h archiveSize = do - global <- BS.hGet h (BS.length archLF) - unless (global == archLF) $ wipeError "Bad global header" - wipeHeader (toInteger $ BS.length archLF) - - where - wipeHeader :: Integer -> IO () - wipeHeader offset = case compare offset archiveSize of - EQ -> return () - GT -> wipeError (atOffset "Archive truncated") - LT -> do - header <- BS.hGet h headerSize - unless (BS.length header == headerSize) $ - wipeError (atOffset "Short header") - let magic = BS.drop 58 header - unless (magic == x60LF) . wipeError . atOffset $ - "Bad magic " ++ show magic ++ " in header" - - let name = BS.take 16 header - let size = BS.take 10 $ BS.drop 48 header - objSize <- case reads (BS8.unpack size) of - [(n, s)] | all isSpace s -> return n - _ -> wipeError (atOffset "Bad file size in header") - - let replacement = BS.concat [ name, metadata, size, magic ] - unless (BS.length replacement == headerSize) $ - wipeError (atOffset "Something has gone terribly wrong") - hSeek h AbsoluteSeek offset - BS.hPut h replacement - - let nextHeader = offset + toInteger headerSize + - -- Odd objects are padded with an extra '\x0a' - if odd objSize then objSize + 1 else objSize - hSeek h AbsoluteSeek nextHeader - wipeHeader nextHeader - - where - atOffset msg = msg ++ " at offset " ++ show offset diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/Builtin.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/Builtin.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/Builtin.hs 2016-11-07 10:02:24.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/Builtin.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,337 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program.Builtin --- Copyright : Isaac Jones 2006, Duncan Coutts 2007-2009 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- The module defines all the known built-in 'Program's. --- --- Where possible we try to find their version numbers. --- -module Distribution.Simple.Program.Builtin ( - - -- * The collection of unconfigured and configured programs - builtinPrograms, - - -- * Programs that Cabal knows about - ghcProgram, - ghcPkgProgram, - ghcjsProgram, - ghcjsPkgProgram, - lhcProgram, - lhcPkgProgram, - hmakeProgram, - jhcProgram, - haskellSuiteProgram, - haskellSuitePkgProgram, - uhcProgram, - gccProgram, - arProgram, - stripProgram, - happyProgram, - alexProgram, - hsc2hsProgram, - c2hsProgram, - cpphsProgram, - hscolourProgram, - haddockProgram, - greencardProgram, - ldProgram, - tarProgram, - cppProgram, - pkgConfigProgram, - hpcProgram, - ) where - -import Distribution.Simple.Program.Find -import Distribution.Simple.Program.Internal -import Distribution.Simple.Program.Run -import Distribution.Simple.Program.Types -import Distribution.Simple.Utils -import Distribution.Compat.Exception -import Distribution.Verbosity -import Distribution.Version - -import Data.Char - ( isDigit ) -import qualified Data.Map as Map - --- ------------------------------------------------------------ --- * Known programs --- ------------------------------------------------------------ - --- | The default list of programs. --- These programs are typically used internally to Cabal. -builtinPrograms :: [Program] -builtinPrograms = - [ - -- compilers and related progs - ghcProgram - , ghcPkgProgram - , ghcjsProgram - , ghcjsPkgProgram - , haskellSuiteProgram - , haskellSuitePkgProgram - , hmakeProgram - , jhcProgram - , lhcProgram - , lhcPkgProgram - , uhcProgram - , hpcProgram - -- preprocessors - , hscolourProgram - , haddockProgram - , happyProgram - , alexProgram - , hsc2hsProgram - , c2hsProgram - , cpphsProgram - , greencardProgram - -- platform toolchain - , gccProgram - , arProgram - , stripProgram - , ldProgram - , tarProgram - -- configuration tools - , pkgConfigProgram - ] - -ghcProgram :: Program -ghcProgram = (simpleProgram "ghc") { - programFindVersion = findProgramVersion "--numeric-version" id, - - -- Workaround for https://ghc.haskell.org/trac/ghc/ticket/8825 - -- (spurious warning on non-english locales) - programPostConf = \_verbosity ghcProg -> - do let ghcProg' = ghcProg { - programOverrideEnv = ("LANGUAGE", Just "en") - : programOverrideEnv ghcProg - } - -- Only the 7.8 branch seems to be affected. Fixed in 7.8.4. - affectedVersionRange = intersectVersionRanges - (laterVersion $ Version [7,8,0] []) - (earlierVersion $ Version [7,8,4] []) - return $ maybe ghcProg - (\v -> if withinRange v affectedVersionRange - then ghcProg' else ghcProg) - (programVersion ghcProg) - } - -ghcPkgProgram :: Program -ghcPkgProgram = (simpleProgram "ghc-pkg") { - programFindVersion = findProgramVersion "--version" $ \str -> - -- Invoking "ghc-pkg --version" gives a string like - -- "GHC package manager version 6.4.1" - case words str of - (_:_:_:_:ver:_) -> ver - _ -> "" - } - -ghcjsProgram :: Program -ghcjsProgram = (simpleProgram "ghcjs") { - programFindVersion = findProgramVersion "--numeric-ghcjs-version" id - } - --- note: version is the version number of the GHC version that ghcjs-pkg was built with -ghcjsPkgProgram :: Program -ghcjsPkgProgram = (simpleProgram "ghcjs-pkg") { - programFindVersion = findProgramVersion "--version" $ \str -> - -- Invoking "ghcjs-pkg --version" gives a string like - -- "GHCJS package manager version 6.4.1" - case words str of - (_:_:_:_:ver:_) -> ver - _ -> "" - } - -lhcProgram :: Program -lhcProgram = (simpleProgram "lhc") { - programFindVersion = findProgramVersion "--numeric-version" id - } - -lhcPkgProgram :: Program -lhcPkgProgram = (simpleProgram "lhc-pkg") { - programFindVersion = findProgramVersion "--version" $ \str -> - -- Invoking "lhc-pkg --version" gives a string like - -- "LHC package manager version 0.7" - case words str of - (_:_:_:_:ver:_) -> ver - _ -> "" - } - -hmakeProgram :: Program -hmakeProgram = (simpleProgram "hmake") { - programFindVersion = findProgramVersion "--version" $ \str -> - -- Invoking "hmake --version" gives a string line - -- "/usr/local/bin/hmake: 3.13 (2006-11-01)" - case words str of - (_:ver:_) -> ver - _ -> "" - } - -jhcProgram :: Program -jhcProgram = (simpleProgram "jhc") { - programFindVersion = findProgramVersion "--version" $ \str -> - -- invoking "jhc --version" gives a string like - -- "jhc 0.3.20080208 (wubgipkamcep-2) - -- compiled by ghc-6.8 on a x86_64 running linux" - case words str of - (_:ver:_) -> ver - _ -> "" - } - -uhcProgram :: Program -uhcProgram = (simpleProgram "uhc") { - programFindVersion = findProgramVersion "--version-dotted" id - } - -hpcProgram :: Program -hpcProgram = (simpleProgram "hpc") - { - programFindVersion = findProgramVersion "version" $ \str -> - case words str of - (_ : _ : _ : ver : _) -> ver - _ -> "" - } - --- This represents a haskell-suite compiler. Of course, the compiler --- itself probably is not called "haskell-suite", so this is not a real --- program. (But we don't know statically the name of the actual compiler, --- so this is the best we can do.) --- --- Having this Program value serves two purposes: --- --- 1. We can accept options for the compiler in the form of --- --- --haskell-suite-option(s)=... --- --- 2. We can find a program later using this static id (with --- requireProgram). --- --- The path to the real compiler is found and recorded in the ProgramDb --- during the configure phase. -haskellSuiteProgram :: Program -haskellSuiteProgram = (simpleProgram "haskell-suite") { - -- pretend that the program exists, otherwise it won't be in the - -- "configured" state - programFindLocation = \_verbosity _searchPath -> - return $ Just ("haskell-suite-dummy-location", []) - } - --- This represent a haskell-suite package manager. See the comments for --- haskellSuiteProgram. -haskellSuitePkgProgram :: Program -haskellSuitePkgProgram = (simpleProgram "haskell-suite-pkg") { - programFindLocation = \_verbosity _searchPath -> - return $ Just ("haskell-suite-pkg-dummy-location", []) - } - - -happyProgram :: Program -happyProgram = (simpleProgram "happy") { - programFindVersion = findProgramVersion "--version" $ \str -> - -- Invoking "happy --version" gives a string like - -- "Happy Version 1.16 Copyright (c) ...." - case words str of - (_:_:ver:_) -> ver - _ -> "" - } - -alexProgram :: Program -alexProgram = (simpleProgram "alex") { - programFindVersion = findProgramVersion "--version" $ \str -> - -- Invoking "alex --version" gives a string like - -- "Alex version 2.1.0, (c) 2003 Chris Dornan and Simon Marlow" - case words str of - (_:_:ver:_) -> takeWhile (\x -> isDigit x || x == '.') ver - _ -> "" - } - -gccProgram :: Program -gccProgram = (simpleProgram "gcc") { - programFindVersion = findProgramVersion "-dumpversion" id - } - -arProgram :: Program -arProgram = simpleProgram "ar" - -stripProgram :: Program -stripProgram = (simpleProgram "strip") { - programFindVersion = \verbosity -> - findProgramVersion "--version" stripExtractVersion (lessVerbose verbosity) - } - -hsc2hsProgram :: Program -hsc2hsProgram = (simpleProgram "hsc2hs") { - programFindVersion = - findProgramVersion "--version" $ \str -> - -- Invoking "hsc2hs --version" gives a string like "hsc2hs version 0.66" - case words str of - (_:_:ver:_) -> ver - _ -> "" - } - -c2hsProgram :: Program -c2hsProgram = (simpleProgram "c2hs") { - programFindVersion = findProgramVersion "--numeric-version" id - } - -cpphsProgram :: Program -cpphsProgram = (simpleProgram "cpphs") { - programFindVersion = findProgramVersion "--version" $ \str -> - -- Invoking "cpphs --version" gives a string like "cpphs 1.3" - case words str of - (_:ver:_) -> ver - _ -> "" - } - -hscolourProgram :: Program -hscolourProgram = (simpleProgram "hscolour") { - programFindLocation = \v p -> findProgramOnSearchPath v p "HsColour", - programFindVersion = findProgramVersion "-version" $ \str -> - -- Invoking "HsColour -version" gives a string like "HsColour 1.7" - case words str of - (_:ver:_) -> ver - _ -> "" - } - -haddockProgram :: Program -haddockProgram = (simpleProgram "haddock") { - programFindVersion = findProgramVersion "--version" $ \str -> - -- Invoking "haddock --version" gives a string like - -- "Haddock version 0.8, (c) Simon Marlow 2006" - case words str of - (_:_:ver:_) -> takeWhile (`elem` ('.':['0'..'9'])) ver - _ -> "" - } - -greencardProgram :: Program -greencardProgram = simpleProgram "greencard" - -ldProgram :: Program -ldProgram = simpleProgram "ld" - -tarProgram :: Program -tarProgram = (simpleProgram "tar") { - -- See #1901. Some versions of 'tar' (OpenBSD, NetBSD, ...) don't support the - -- '--format' option. - programPostConf = \verbosity tarProg -> do - tarHelpOutput <- getProgramInvocationOutput - verbosity (programInvocation tarProg ["--help"]) - -- Some versions of tar don't support '--help'. - `catchIO` (\_ -> return "") - let k = "Supports --format" - v = if ("--format" `isInfixOf` tarHelpOutput) then "YES" else "NO" - m = Map.insert k v (programProperties tarProg) - return $ tarProg { programProperties = m } - } - -cppProgram :: Program -cppProgram = simpleProgram "cpp" - -pkgConfigProgram :: Program -pkgConfigProgram = (simpleProgram "pkg-config") { - programFindVersion = findProgramVersion "--version" id - } diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/Db.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/Db.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/Db.hs 2016-11-07 10:02:24.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/Db.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,475 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program.Db --- Copyright : Isaac Jones 2006, Duncan Coutts 2007-2009 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This provides a 'ProgramDb' type which holds configured and not-yet --- configured programs. It is the parameter to lots of actions elsewhere in --- Cabal that need to look up and run programs. If we had a Cabal monad, --- the 'ProgramDb' would probably be a reader or state component of it. --- --- One nice thing about using it is that any program that is --- registered with Cabal will get some \"configure\" and \".cabal\" --- helpers like --with-foo-args --foo-path= and extra-foo-args. --- --- There's also a hook for adding programs in a Setup.lhs script. See --- hookedPrograms in 'Distribution.Simple.UserHooks'. This gives a --- hook user the ability to get the above flags and such so that they --- don't have to write all the PATH logic inside Setup.lhs. - -module Distribution.Simple.Program.Db ( - -- * The collection of configured programs we can run - ProgramDb, - emptyProgramDb, - defaultProgramDb, - restoreProgramDb, - - -- ** Query and manipulate the program db - addKnownProgram, - addKnownPrograms, - lookupKnownProgram, - knownPrograms, - getProgramSearchPath, - setProgramSearchPath, - modifyProgramSearchPath, - userSpecifyPath, - userSpecifyPaths, - userMaybeSpecifyPath, - userSpecifyArgs, - userSpecifyArgss, - userSpecifiedArgs, - lookupProgram, - updateProgram, - configuredPrograms, - - -- ** Query and manipulate the program db - configureProgram, - configureAllKnownPrograms, - lookupProgramVersion, - reconfigurePrograms, - requireProgram, - requireProgramVersion, - - ) where - -import Distribution.Simple.Program.Types -import Distribution.Simple.Program.Find -import Distribution.Simple.Program.Builtin -import Distribution.Simple.Utils -import Distribution.Version -import Distribution.Text -import Distribution.Verbosity -import Distribution.Compat.Binary - -import Data.List - ( foldl' ) -import Data.Maybe - ( catMaybes ) -import Data.Tuple (swap) -import qualified Data.Map as Map -import Control.Monad - ( join, foldM ) - --- ------------------------------------------------------------ --- * Programs database --- ------------------------------------------------------------ - --- | The configuration is a collection of information about programs. It --- contains information both about configured programs and also about programs --- that we are yet to configure. --- --- The idea is that we start from a collection of unconfigured programs and one --- by one we try to configure them at which point we move them into the --- configured collection. For unconfigured programs we record not just the --- 'Program' but also any user-provided arguments and location for the program. -data ProgramDb = ProgramDb { - unconfiguredProgs :: UnconfiguredProgs, - progSearchPath :: ProgramSearchPath, - configuredProgs :: ConfiguredProgs - } - -type UnconfiguredProgram = (Program, Maybe FilePath, [ProgArg]) -type UnconfiguredProgs = Map.Map String UnconfiguredProgram -type ConfiguredProgs = Map.Map String ConfiguredProgram - - -emptyProgramDb :: ProgramDb -emptyProgramDb = ProgramDb Map.empty defaultProgramSearchPath Map.empty - -defaultProgramDb :: ProgramDb -defaultProgramDb = restoreProgramDb builtinPrograms emptyProgramDb - - --- internal helpers: -updateUnconfiguredProgs :: (UnconfiguredProgs -> UnconfiguredProgs) - -> ProgramDb -> ProgramDb -updateUnconfiguredProgs update conf = - conf { unconfiguredProgs = update (unconfiguredProgs conf) } - -updateConfiguredProgs :: (ConfiguredProgs -> ConfiguredProgs) - -> ProgramDb -> ProgramDb -updateConfiguredProgs update conf = - conf { configuredProgs = update (configuredProgs conf) } - - --- Read & Show instances are based on listToFM - --- | Note that this instance does not preserve the known 'Program's. --- See 'restoreProgramDb' for details. --- -instance Show ProgramDb where - show = show . Map.toAscList . configuredProgs - --- | Note that this instance does not preserve the known 'Program's. --- See 'restoreProgramDb' for details. --- -instance Read ProgramDb where - readsPrec p s = - [ (emptyProgramDb { configuredProgs = Map.fromList s' }, r) - | (s', r) <- readsPrec p s ] - --- | Note that this instance does not preserve the known 'Program's. --- See 'restoreProgramDb' for details. --- -instance Binary ProgramDb where - put db = do - put (progSearchPath db) - put (configuredProgs db) - - get = do - searchpath <- get - progs <- get - return $! emptyProgramDb { - progSearchPath = searchpath, - configuredProgs = progs - } - - --- | The 'Read'\/'Show' and 'Binary' instances do not preserve all the --- unconfigured 'Programs' because 'Program' is not in 'Read'\/'Show' because --- it contains functions. So to fully restore a deserialised 'ProgramDb' use --- this function to add back all the known 'Program's. --- --- * It does not add the default programs, but you probably want them, use --- 'builtinPrograms' in addition to any extra you might need. --- -restoreProgramDb :: [Program] -> ProgramDb -> ProgramDb -restoreProgramDb = addKnownPrograms - - --- ------------------------------- --- Managing unconfigured programs - --- | Add a known program that we may configure later --- -addKnownProgram :: Program -> ProgramDb -> ProgramDb -addKnownProgram prog = updateUnconfiguredProgs $ - Map.insertWith combine (programName prog) (prog, Nothing, []) - where combine _ (_, path, args) = (prog, path, args) - - -addKnownPrograms :: [Program] -> ProgramDb -> ProgramDb -addKnownPrograms progs conf = foldl' (flip addKnownProgram) conf progs - - -lookupKnownProgram :: String -> ProgramDb -> Maybe Program -lookupKnownProgram name = - fmap (\(p,_,_)->p) . Map.lookup name . unconfiguredProgs - - -knownPrograms :: ProgramDb -> [(Program, Maybe ConfiguredProgram)] -knownPrograms conf = - [ (p,p') | (p,_,_) <- Map.elems (unconfiguredProgs conf) - , let p' = Map.lookup (programName p) (configuredProgs conf) ] - --- | Get the current 'ProgramSearchPath' used by the 'ProgramDb'. --- This is the default list of locations where programs are looked for when --- configuring them. This can be overridden for specific programs (with --- 'userSpecifyPath'), and specific known programs can modify or ignore this --- search path in their own configuration code. --- -getProgramSearchPath :: ProgramDb -> ProgramSearchPath -getProgramSearchPath = progSearchPath - --- | Change the current 'ProgramSearchPath' used by the 'ProgramDb'. --- This will affect programs that are configured from here on, so you --- should usually set it before configuring any programs. --- -setProgramSearchPath :: ProgramSearchPath -> ProgramDb -> ProgramDb -setProgramSearchPath searchpath db = db { progSearchPath = searchpath } - --- | Modify the current 'ProgramSearchPath' used by the 'ProgramDb'. --- This will affect programs that are configured from here on, so you --- should usually modify it before configuring any programs. --- -modifyProgramSearchPath :: (ProgramSearchPath -> ProgramSearchPath) - -> ProgramDb - -> ProgramDb -modifyProgramSearchPath f db = - setProgramSearchPath (f $ getProgramSearchPath db) db - --- |User-specify this path. Basically override any path information --- for this program in the configuration. If it's not a known --- program ignore it. --- -userSpecifyPath :: String -- ^Program name - -> FilePath -- ^user-specified path to the program - -> ProgramDb -> ProgramDb -userSpecifyPath name path = updateUnconfiguredProgs $ - flip Map.update name $ \(prog, _, args) -> Just (prog, Just path, args) - - -userMaybeSpecifyPath :: String -> Maybe FilePath - -> ProgramDb -> ProgramDb -userMaybeSpecifyPath _ Nothing conf = conf -userMaybeSpecifyPath name (Just path) conf = userSpecifyPath name path conf - - --- |User-specify the arguments for this program. Basically override --- any args information for this program in the configuration. If it's --- not a known program, ignore it.. -userSpecifyArgs :: String -- ^Program name - -> [ProgArg] -- ^user-specified args - -> ProgramDb - -> ProgramDb -userSpecifyArgs name args' = - updateUnconfiguredProgs - (flip Map.update name $ - \(prog, path, args) -> Just (prog, path, args ++ args')) - . updateConfiguredProgs - (flip Map.update name $ - \prog -> Just prog { programOverrideArgs = programOverrideArgs prog - ++ args' }) - - --- | Like 'userSpecifyPath' but for a list of progs and their paths. --- -userSpecifyPaths :: [(String, FilePath)] - -> ProgramDb - -> ProgramDb -userSpecifyPaths paths conf = - foldl' (\conf' (prog, path) -> userSpecifyPath prog path conf') conf paths - - --- | Like 'userSpecifyPath' but for a list of progs and their args. --- -userSpecifyArgss :: [(String, [ProgArg])] - -> ProgramDb - -> ProgramDb -userSpecifyArgss argss conf = - foldl' (\conf' (prog, args) -> userSpecifyArgs prog args conf') conf argss - - --- | Get the path that has been previously specified for a program, if any. --- -userSpecifiedPath :: Program -> ProgramDb -> Maybe FilePath -userSpecifiedPath prog = - join . fmap (\(_,p,_)->p) . Map.lookup (programName prog) . unconfiguredProgs - - --- | Get any extra args that have been previously specified for a program. --- -userSpecifiedArgs :: Program -> ProgramDb -> [ProgArg] -userSpecifiedArgs prog = - maybe [] (\(_,_,as)->as) . Map.lookup (programName prog) . unconfiguredProgs - - --- ----------------------------- --- Managing configured programs - --- | Try to find a configured program -lookupProgram :: Program -> ProgramDb -> Maybe ConfiguredProgram -lookupProgram prog = Map.lookup (programName prog) . configuredProgs - - --- | Update a configured program in the database. -updateProgram :: ConfiguredProgram -> ProgramDb - -> ProgramDb -updateProgram prog = updateConfiguredProgs $ - Map.insert (programId prog) prog - - --- | List all configured programs. -configuredPrograms :: ProgramDb -> [ConfiguredProgram] -configuredPrograms = Map.elems . configuredProgs - --- --------------------------- --- Configuring known programs - --- | Try to configure a specific program. If the program is already included in --- the collection of unconfigured programs then we use any user-supplied --- location and arguments. If the program gets configured successfully it gets --- added to the configured collection. --- --- Note that it is not a failure if the program cannot be configured. It's only --- a failure if the user supplied a location and the program could not be found --- at that location. --- --- The reason for it not being a failure at this stage is that we don't know up --- front all the programs we will need, so we try to configure them all. --- To verify that a program was actually successfully configured use --- 'requireProgram'. --- -configureProgram :: Verbosity - -> Program - -> ProgramDb - -> IO ProgramDb -configureProgram verbosity prog conf = do - let name = programName prog - maybeLocation <- case userSpecifiedPath prog conf of - Nothing -> - programFindLocation prog verbosity (progSearchPath conf) - >>= return . fmap (swap . fmap FoundOnSystem . swap) - Just path -> do - absolute <- doesExecutableExist path - if absolute - then return (Just (UserSpecified path, [])) - else findProgramOnSearchPath verbosity (progSearchPath conf) path - >>= maybe (die notFound) - (return . Just . swap . fmap UserSpecified . swap) - where notFound = "Cannot find the program '" ++ name - ++ "'. User-specified path '" - ++ path ++ "' does not refer to an executable and " - ++ "the program is not on the system path." - case maybeLocation of - Nothing -> return conf - Just (location, triedLocations) -> do - version <- programFindVersion prog verbosity (locationPath location) - newPath <- programSearchPathAsPATHVar (progSearchPath conf) - let configuredProg = ConfiguredProgram { - programId = name, - programVersion = version, - programDefaultArgs = [], - programOverrideArgs = userSpecifiedArgs prog conf, - programOverrideEnv = [("PATH", Just newPath)], - programProperties = Map.empty, - programLocation = location, - programMonitorFiles = triedLocations - } - configuredProg' <- programPostConf prog verbosity configuredProg - return (updateConfiguredProgs (Map.insert name configuredProg') conf) - - --- | Configure a bunch of programs using 'configureProgram'. Just a 'foldM'. --- -configurePrograms :: Verbosity - -> [Program] - -> ProgramDb - -> IO ProgramDb -configurePrograms verbosity progs conf = - foldM (flip (configureProgram verbosity)) conf progs - - --- | Try to configure all the known programs that have not yet been configured. --- -configureAllKnownPrograms :: Verbosity - -> ProgramDb - -> IO ProgramDb -configureAllKnownPrograms verbosity conf = - configurePrograms verbosity - [ prog | (prog,_,_) <- Map.elems notYetConfigured ] conf - where - notYetConfigured = unconfiguredProgs conf - `Map.difference` configuredProgs conf - - --- | reconfigure a bunch of programs given new user-specified args. It takes --- the same inputs as 'userSpecifyPath' and 'userSpecifyArgs' and for all progs --- with a new path it calls 'configureProgram'. --- -reconfigurePrograms :: Verbosity - -> [(String, FilePath)] - -> [(String, [ProgArg])] - -> ProgramDb - -> IO ProgramDb -reconfigurePrograms verbosity paths argss conf = do - configurePrograms verbosity progs - . userSpecifyPaths paths - . userSpecifyArgss argss - $ conf - - where - progs = catMaybes [ lookupKnownProgram name conf | (name,_) <- paths ] - - --- | Check that a program is configured and available to be run. --- --- It raises an exception if the program could not be configured, otherwise --- it returns the configured program. --- -requireProgram :: Verbosity -> Program -> ProgramDb - -> IO (ConfiguredProgram, ProgramDb) -requireProgram verbosity prog conf = do - - -- If it's not already been configured, try to configure it now - conf' <- case lookupProgram prog conf of - Nothing -> configureProgram verbosity prog conf - Just _ -> return conf - - case lookupProgram prog conf' of - Nothing -> die notFound - Just configuredProg -> return (configuredProg, conf') - - where notFound = "The program '" ++ programName prog - ++ "' is required but it could not be found." - - --- | Check that a program is configured and available to be run. --- --- Additionally check that the program version number is suitable and return --- it. For example you could require 'AnyVersion' or @'orLaterVersion' --- ('Version' [1,0] [])@ --- --- It returns the configured program, its version number and a possibly updated --- 'ProgramDb'. If the program could not be configured or the version is --- unsuitable, it returns an error value. --- -lookupProgramVersion - :: Verbosity -> Program -> VersionRange -> ProgramDb - -> IO (Either String (ConfiguredProgram, Version, ProgramDb)) -lookupProgramVersion verbosity prog range programDb = do - - -- If it's not already been configured, try to configure it now - programDb' <- case lookupProgram prog programDb of - Nothing -> configureProgram verbosity prog programDb - Just _ -> return programDb - - case lookupProgram prog programDb' of - Nothing -> return $! Left notFound - Just configuredProg@ConfiguredProgram { programLocation = location } -> - case programVersion configuredProg of - Just version - | withinRange version range -> - return $! Right (configuredProg, version ,programDb') - | otherwise -> - return $! Left (badVersion version location) - Nothing -> - return $! Left (unknownVersion location) - - where notFound = "The program '" - ++ programName prog ++ "'" ++ versionRequirement - ++ " is required but it could not be found." - badVersion v l = "The program '" - ++ programName prog ++ "'" ++ versionRequirement - ++ " is required but the version found at " - ++ locationPath l ++ " is version " ++ display v - unknownVersion l = "The program '" - ++ programName prog ++ "'" ++ versionRequirement - ++ " is required but the version of " - ++ locationPath l ++ " could not be determined." - versionRequirement - | isAnyVersion range = "" - | otherwise = " version " ++ display range - --- | Like 'lookupProgramVersion', but raises an exception in case of error --- instead of returning 'Left errMsg'. --- -requireProgramVersion :: Verbosity -> Program -> VersionRange - -> ProgramDb - -> IO (ConfiguredProgram, Version, ProgramDb) -requireProgramVersion verbosity prog range programDb = - join $ either die return `fmap` - lookupProgramVersion verbosity prog range programDb diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/Find.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/Find.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/Find.hs 2016-11-07 10:02:24.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/Find.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,185 +0,0 @@ -{-# LANGUAGE CPP, DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program.Find --- Copyright : Duncan Coutts 2013 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- A somewhat extended notion of the normal program search path concept. --- --- Usually when finding executables we just want to look in the usual places --- using the OS's usual method for doing so. In Haskell the normal OS-specific --- method is captured by 'findExecutable'. On all common OSs that makes use of --- a @PATH@ environment variable, (though on Windows it is not just the @PATH@). --- --- However it is sometimes useful to be able to look in additional locations --- without having to change the process-global @PATH@ environment variable. --- So we need an extension of the usual 'findExecutable' that can look in --- additional locations, either before, after or instead of the normal OS --- locations. --- -module Distribution.Simple.Program.Find ( - -- * Program search path - ProgramSearchPath, - ProgramSearchPathEntry(..), - defaultProgramSearchPath, - findProgramOnSearchPath, - programSearchPathAsPATHVar, - getSystemSearchPath, - ) where - -import Distribution.Verbosity -import Distribution.Simple.Utils -import Distribution.System -import Distribution.Compat.Environment -import Distribution.Compat.Binary - -import qualified System.Directory as Directory - ( findExecutable ) -import System.FilePath as FilePath - ( (), (<.>), splitSearchPath, searchPathSeparator, getSearchPath - , takeDirectory ) -import Data.List - ( nub ) -import GHC.Generics -#if defined(mingw32_HOST_OS) -import qualified System.Win32 as Win32 -#endif - --- | A search path to use when locating executables. This is analogous --- to the unix @$PATH@ or win32 @%PATH%@ but with the ability to use --- the system default method for finding executables ('findExecutable' which --- on unix is simply looking on the @$PATH@ but on win32 is a bit more --- complicated). --- --- The default to use is @[ProgSearchPathDefault]@ but you can add extra dirs --- either before, after or instead of the default, e.g. here we add an extra --- dir to search after the usual ones. --- --- > ['ProgramSearchPathDefault', 'ProgramSearchPathDir' dir] --- -type ProgramSearchPath = [ProgramSearchPathEntry] -data ProgramSearchPathEntry = - ProgramSearchPathDir FilePath -- ^ A specific dir - | ProgramSearchPathDefault -- ^ The system default - deriving (Eq, Generic) - -instance Binary ProgramSearchPathEntry - -defaultProgramSearchPath :: ProgramSearchPath -defaultProgramSearchPath = [ProgramSearchPathDefault] - -findProgramOnSearchPath :: Verbosity -> ProgramSearchPath - -> FilePath -> IO (Maybe (FilePath, [FilePath])) -findProgramOnSearchPath verbosity searchpath prog = do - debug verbosity $ "Searching for " ++ prog ++ " in path." - res <- tryPathElems [] searchpath - case res of - Nothing -> debug verbosity ("Cannot find " ++ prog ++ " on the path") - Just (path, _) -> debug verbosity ("Found " ++ prog ++ " at "++ path) - return res - where - tryPathElems :: [[FilePath]] -> [ProgramSearchPathEntry] - -> IO (Maybe (FilePath, [FilePath])) - tryPathElems _ [] = return Nothing - tryPathElems tried (pe:pes) = do - res <- tryPathElem pe - case res of - (Nothing, notfoundat) -> tryPathElems (notfoundat : tried) pes - (Just foundat, notfoundat) -> return (Just (foundat, alltried)) - where - alltried = concat (reverse (notfoundat : tried)) - - tryPathElem :: ProgramSearchPathEntry -> IO (Maybe FilePath, [FilePath]) - tryPathElem (ProgramSearchPathDir dir) = - findFirstExe [ dir prog <.> ext | ext <- exeExtensions ] - - -- On windows, getSystemSearchPath is not guaranteed 100% correct so we - -- use findExecutable and then approximate the not-found-at locations. - tryPathElem ProgramSearchPathDefault | buildOS == Windows = do - mExe <- findExecutable prog - syspath <- getSystemSearchPath - case mExe of - Nothing -> - let notfoundat = [ dir prog | dir <- syspath ] in - return (Nothing, notfoundat) - - Just foundat -> do - let founddir = takeDirectory foundat - notfoundat = [ dir prog - | dir <- takeWhile (/= founddir) syspath ] - return (Just foundat, notfoundat) - - -- On other OSs we can just do the simple thing - tryPathElem ProgramSearchPathDefault = do - dirs <- getSystemSearchPath - findFirstExe [ dir prog <.> ext | dir <- dirs, ext <- exeExtensions ] - - findFirstExe :: [FilePath] -> IO (Maybe FilePath, [FilePath]) - findFirstExe = go [] - where - go fs' [] = return (Nothing, reverse fs') - go fs' (f:fs) = do - isExe <- doesExecutableExist f - if isExe - then return (Just f, reverse fs') - else go (f:fs') fs - --- | Interpret a 'ProgramSearchPath' to construct a new @$PATH@ env var. --- Note that this is close but not perfect because on Windows the search --- algorithm looks at more than just the @%PATH%@. -programSearchPathAsPATHVar :: ProgramSearchPath -> IO String -programSearchPathAsPATHVar searchpath = do - ess <- mapM getEntries searchpath - return (intercalate [searchPathSeparator] (concat ess)) - where - getEntries (ProgramSearchPathDir dir) = return [dir] - getEntries ProgramSearchPathDefault = do - env <- getEnvironment - return (maybe [] splitSearchPath (lookup "PATH" env)) - --- | Get the system search path. On Unix systems this is just the @$PATH@ env --- var, but on windows it's a bit more complicated. --- -getSystemSearchPath :: IO [FilePath] -getSystemSearchPath = fmap nub $ do -#if defined(mingw32_HOST_OS) - processdir <- takeDirectory `fmap` Win32.getModuleFileName Win32.nullHANDLE - currentdir <- Win32.getCurrentDirectory - systemdir <- Win32.getSystemDirectory - windowsdir <- Win32.getWindowsDirectory - pathdirs <- FilePath.getSearchPath - let path = processdir : currentdir - : systemdir : windowsdir - : pathdirs - return path -#else - FilePath.getSearchPath -#endif - -#ifdef MIN_VERSION_directory -#if MIN_VERSION_directory(1,2,1) -#define HAVE_directory_121 -#endif -#endif - -findExecutable :: FilePath -> IO (Maybe FilePath) -#ifdef HAVE_directory_121 -findExecutable = Directory.findExecutable -#else -findExecutable prog = do - -- With directory < 1.2.1 'findExecutable' doesn't check that the path - -- really refers to an executable. - mExe <- Directory.findExecutable prog - case mExe of - Just exe -> do - exeExists <- doesExecutableExist exe - if exeExists - then return mExe - else return Nothing - _ -> return mExe -#endif - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/GHC.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/GHC.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/GHC.hs 2016-11-07 10:02:24.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/GHC.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,506 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Simple.Program.GHC ( - GhcOptions(..), - GhcMode(..), - GhcOptimisation(..), - GhcDynLinkMode(..), - GhcProfAuto(..), - - ghcInvocation, - renderGhcOptions, - - runGHC, - - ) where - -import Distribution.Compat.Semigroup as Semi -import Distribution.Simple.GHC.ImplInfo -import Distribution.Package -import Distribution.PackageDescription hiding (Flag) -import Distribution.ModuleName -import Distribution.Simple.Compiler hiding (Flag) -import Distribution.Simple.Setup -import Distribution.Simple.Program.Types -import Distribution.Simple.Program.Run -import Distribution.System -import Distribution.Text -import Distribution.Verbosity -import Distribution.Utils.NubList -import Language.Haskell.Extension - -import GHC.Generics (Generic) -import qualified Data.Map as M - --- | A structured set of GHC options/flags --- -data GhcOptions = GhcOptions { - - -- | The major mode for the ghc invocation. - ghcOptMode :: Flag GhcMode, - - -- | Any extra options to pass directly to ghc. These go at the end and hence - -- override other stuff. - ghcOptExtra :: NubListR String, - - -- | Extra default flags to pass directly to ghc. These go at the beginning - -- and so can be overridden by other stuff. - ghcOptExtraDefault :: NubListR String, - - ----------------------- - -- Inputs and outputs - - -- | The main input files; could be .hs, .hi, .c, .o, depending on mode. - ghcOptInputFiles :: NubListR FilePath, - - -- | The names of input Haskell modules, mainly for @--make@ mode. - ghcOptInputModules :: NubListR ModuleName, - - -- | Location for output file; the @ghc -o@ flag. - ghcOptOutputFile :: Flag FilePath, - - -- | Location for dynamic output file in 'GhcStaticAndDynamic' mode; - -- the @ghc -dyno@ flag. - ghcOptOutputDynFile :: Flag FilePath, - - -- | Start with an empty search path for Haskell source files; - -- the @ghc -i@ flag (@-i@ on it's own with no path argument). - ghcOptSourcePathClear :: Flag Bool, - - -- | Search path for Haskell source files; the @ghc -i@ flag. - ghcOptSourcePath :: NubListR FilePath, - - ------------- - -- Packages - - -- | The unit ID the modules will belong to; the @ghc -this-unit-id@ - -- flag (or @-this-package-key@ or @-package-name@ on older - -- versions of GHC). This is a 'String' because we assume you've - -- already figured out what the correct format for this string is - -- (we need to handle backwards compatibility.) - ghcOptThisUnitId :: Flag String, - - -- | GHC package databases to use, the @ghc -package-conf@ flag. - ghcOptPackageDBs :: PackageDBStack, - - -- | The GHC packages to use. For compatability with old and new ghc, this - -- requires both the short and long form of the package id; - -- the @ghc -package@ or @ghc -package-id@ flags. - ghcOptPackages :: - NubListR (UnitId, PackageId, ModuleRenaming), - - -- | Start with a clean package set; the @ghc -hide-all-packages@ flag - ghcOptHideAllPackages :: Flag Bool, - - -- | Don't automatically link in Haskell98 etc; the @ghc - -- -no-auto-link-packages@ flag. - ghcOptNoAutoLinkPackages :: Flag Bool, - - ----------------- - -- Linker stuff - - -- | Names of libraries to link in; the @ghc -l@ flag. - ghcOptLinkLibs :: NubListR FilePath, - - -- | Search path for libraries to link in; the @ghc -L@ flag. - ghcOptLinkLibPath :: NubListR FilePath, - - -- | Options to pass through to the linker; the @ghc -optl@ flag. - ghcOptLinkOptions :: NubListR String, - - -- | OSX only: frameworks to link in; the @ghc -framework@ flag. - ghcOptLinkFrameworks :: NubListR String, - - -- | OSX only: Search path for frameworks to link in; the - -- @ghc -framework-path@ flag. - ghcOptLinkFrameworkDirs :: NubListR String, - - -- | Don't do the link step, useful in make mode; the @ghc -no-link@ flag. - ghcOptNoLink :: Flag Bool, - - -- | Don't link in the normal RTS @main@ entry point; the @ghc -no-hs-main@ - -- flag. - ghcOptLinkNoHsMain :: Flag Bool, - - -------------------- - -- C and CPP stuff - - -- | Options to pass through to the C compiler; the @ghc -optc@ flag. - ghcOptCcOptions :: NubListR String, - - -- | Options to pass through to CPP; the @ghc -optP@ flag. - ghcOptCppOptions :: NubListR String, - - -- | Search path for CPP includes like header files; the @ghc -I@ flag. - ghcOptCppIncludePath :: NubListR FilePath, - - -- | Extra header files to include at CPP stage; the @ghc -optP-include@ flag. - ghcOptCppIncludes :: NubListR FilePath, - - -- | Extra header files to include for old-style FFI; the @ghc -#include@ flag. - ghcOptFfiIncludes :: NubListR FilePath, - - ---------------------------- - -- Language and extensions - - -- | The base language; the @ghc -XHaskell98@ or @-XHaskell2010@ flag. - ghcOptLanguage :: Flag Language, - - -- | The language extensions; the @ghc -X@ flag. - ghcOptExtensions :: NubListR Extension, - - -- | A GHC version-dependent mapping of extensions to flags. This must be - -- set to be able to make use of the 'ghcOptExtensions'. - ghcOptExtensionMap :: M.Map Extension String, - - ---------------- - -- Compilation - - -- | What optimisation level to use; the @ghc -O@ flag. - ghcOptOptimisation :: Flag GhcOptimisation, - - -- | Emit debug info; the @ghc -g@ flag. - ghcOptDebugInfo :: Flag Bool, - - -- | Compile in profiling mode; the @ghc -prof@ flag. - ghcOptProfilingMode :: Flag Bool, - - -- | Automatically add profiling cost centers; the @ghc -fprof-auto*@ flags. - ghcOptProfilingAuto :: Flag GhcProfAuto, - - -- | Use the \"split object files\" feature; the @ghc -split-objs@ flag. - ghcOptSplitObjs :: Flag Bool, - - -- | Run N jobs simultaneously (if possible). - ghcOptNumJobs :: Flag (Maybe Int), - - -- | Enable coverage analysis; the @ghc -fhpc -hpcdir@ flags. - ghcOptHPCDir :: Flag FilePath, - - ---------------- - -- GHCi - - -- | Extra GHCi startup scripts; the @-ghci-script@ flag - ghcOptGHCiScripts :: NubListR FilePath, - - ------------------------ - -- Redirecting outputs - - ghcOptHiSuffix :: Flag String, - ghcOptObjSuffix :: Flag String, - ghcOptDynHiSuffix :: Flag String, -- ^ only in 'GhcStaticAndDynamic' mode - ghcOptDynObjSuffix :: Flag String, -- ^ only in 'GhcStaticAndDynamic' mode - ghcOptHiDir :: Flag FilePath, - ghcOptObjDir :: Flag FilePath, - ghcOptOutputDir :: Flag FilePath, - ghcOptStubDir :: Flag FilePath, - - -------------------- - -- Dynamic linking - - ghcOptDynLinkMode :: Flag GhcDynLinkMode, - ghcOptShared :: Flag Bool, - ghcOptFPic :: Flag Bool, - ghcOptDylibName :: Flag String, - ghcOptRPaths :: NubListR FilePath, - - --------------- - -- Misc flags - - -- | Get GHC to be quiet or verbose with what it's doing; the @ghc -v@ flag. - ghcOptVerbosity :: Flag Verbosity, - - -- | Let GHC know that it is Cabal that's calling it. - -- Modifies some of the GHC error messages. - ghcOptCabal :: Flag Bool - -} deriving (Show, Generic) - - -data GhcMode = GhcModeCompile -- ^ @ghc -c@ - | GhcModeLink -- ^ @ghc@ - | GhcModeMake -- ^ @ghc --make@ - | GhcModeInteractive -- ^ @ghci@ \/ @ghc --interactive@ - | GhcModeAbiHash -- ^ @ghc --abi-hash@ --- | GhcModeDepAnalysis -- ^ @ghc -M@ --- | GhcModeEvaluate -- ^ @ghc -e@ - deriving (Show, Eq) - -data GhcOptimisation = GhcNoOptimisation -- ^ @-O0@ - | GhcNormalOptimisation -- ^ @-O@ - | GhcMaximumOptimisation -- ^ @-O2@ - | GhcSpecialOptimisation String -- ^ e.g. @-Odph@ - deriving (Show, Eq) - -data GhcDynLinkMode = GhcStaticOnly -- ^ @-static@ - | GhcDynamicOnly -- ^ @-dynamic@ - | GhcStaticAndDynamic -- ^ @-static -dynamic-too@ - deriving (Show, Eq) - -data GhcProfAuto = GhcProfAutoAll -- ^ @-fprof-auto@ - | GhcProfAutoToplevel -- ^ @-fprof-auto-top@ - | GhcProfAutoExported -- ^ @-fprof-auto-exported@ - deriving (Show, Eq) - -runGHC :: Verbosity -> ConfiguredProgram -> Compiler -> Platform -> GhcOptions - -> IO () -runGHC verbosity ghcProg comp platform opts = do - runProgramInvocation verbosity (ghcInvocation ghcProg comp platform opts) - - -ghcInvocation :: ConfiguredProgram -> Compiler -> Platform -> GhcOptions - -> ProgramInvocation -ghcInvocation prog comp platform opts = - programInvocation prog (renderGhcOptions comp platform opts) - -renderGhcOptions :: Compiler -> Platform -> GhcOptions -> [String] -renderGhcOptions comp _platform@(Platform _arch os) opts - | compilerFlavor comp `notElem` [GHC, GHCJS] = - error $ "Distribution.Simple.Program.GHC.renderGhcOptions: " - ++ "compiler flavor must be 'GHC' or 'GHCJS'!" - | otherwise = - concat - [ case flagToMaybe (ghcOptMode opts) of - Nothing -> [] - Just GhcModeCompile -> ["-c"] - Just GhcModeLink -> [] - Just GhcModeMake -> ["--make"] - Just GhcModeInteractive -> ["--interactive"] - Just GhcModeAbiHash -> ["--abi-hash"] --- Just GhcModeDepAnalysis -> ["-M"] --- Just GhcModeEvaluate -> ["-e", expr] - - , flags ghcOptExtraDefault - - , [ "-no-link" | flagBool ghcOptNoLink ] - - --------------- - -- Misc flags - - , maybe [] verbosityOpts (flagToMaybe (ghcOptVerbosity opts)) - - , [ "-fbuilding-cabal-package" | flagBool ghcOptCabal - , flagBuildingCabalPkg implInfo ] - - ---------------- - -- Compilation - - , case flagToMaybe (ghcOptOptimisation opts) of - Nothing -> [] - Just GhcNoOptimisation -> ["-O0"] - Just GhcNormalOptimisation -> ["-O"] - Just GhcMaximumOptimisation -> ["-O2"] - Just (GhcSpecialOptimisation s) -> ["-O" ++ s] -- eg -Odph - - , [ "-g" | flagDebugInfo implInfo && flagBool ghcOptDebugInfo ] - - , [ "-prof" | flagBool ghcOptProfilingMode ] - - , case flagToMaybe (ghcOptProfilingAuto opts) of - _ | not (flagBool ghcOptProfilingMode) - -> [] - Nothing -> [] - Just GhcProfAutoAll - | flagProfAuto implInfo -> ["-fprof-auto"] - | otherwise -> ["-auto-all"] -- not the same, but close - Just GhcProfAutoToplevel - | flagProfAuto implInfo -> ["-fprof-auto-top"] - | otherwise -> ["-auto-all"] - Just GhcProfAutoExported - | flagProfAuto implInfo -> ["-fprof-auto-exported"] - | otherwise -> ["-auto"] - - , [ "-split-objs" | flagBool ghcOptSplitObjs ] - - , case flagToMaybe (ghcOptHPCDir opts) of - Nothing -> [] - Just hpcdir -> ["-fhpc", "-hpcdir", hpcdir] - - , if parmakeSupported comp - then case ghcOptNumJobs opts of - NoFlag -> [] - Flag n -> ["-j" ++ maybe "" show n] - else [] - - -------------------- - -- Dynamic linking - - , [ "-shared" | flagBool ghcOptShared ] - , case flagToMaybe (ghcOptDynLinkMode opts) of - Nothing -> [] - Just GhcStaticOnly -> ["-static"] - Just GhcDynamicOnly -> ["-dynamic"] - Just GhcStaticAndDynamic -> ["-static", "-dynamic-too"] - , [ "-fPIC" | flagBool ghcOptFPic ] - - , concat [ ["-dylib-install-name", libname] | libname <- flag ghcOptDylibName ] - - ------------------------ - -- Redirecting outputs - - , concat [ ["-osuf", suf] | suf <- flag ghcOptObjSuffix ] - , concat [ ["-hisuf", suf] | suf <- flag ghcOptHiSuffix ] - , concat [ ["-dynosuf", suf] | suf <- flag ghcOptDynObjSuffix ] - , concat [ ["-dynhisuf",suf] | suf <- flag ghcOptDynHiSuffix ] - , concat [ ["-outputdir", dir] | dir <- flag ghcOptOutputDir - , flagOutputDir implInfo ] - , concat [ ["-odir", dir] | dir <- flag ghcOptObjDir ] - , concat [ ["-hidir", dir] | dir <- flag ghcOptHiDir ] - , concat [ ["-stubdir", dir] | dir <- flag ghcOptStubDir - , flagStubdir implInfo ] - - ----------------------- - -- Source search path - - , [ "-i" | flagBool ghcOptSourcePathClear ] - , [ "-i" ++ dir | dir <- flags ghcOptSourcePath ] - - -------------------- - -- C and CPP stuff - - , [ "-I" ++ dir | dir <- flags ghcOptCppIncludePath ] - , [ "-optP" ++ opt | opt <- flags ghcOptCppOptions ] - , concat [ [ "-optP-include", "-optP" ++ inc] - | inc <- flags ghcOptCppIncludes ] - , [ "-#include \"" ++ inc ++ "\"" - | inc <- flags ghcOptFfiIncludes, flagFfiIncludes implInfo ] - , [ "-optc" ++ opt | opt <- flags ghcOptCcOptions ] - - ----------------- - -- Linker stuff - - , [ "-optl" ++ opt | opt <- flags ghcOptLinkOptions ] - , ["-l" ++ lib | lib <- flags ghcOptLinkLibs ] - , ["-L" ++ dir | dir <- flags ghcOptLinkLibPath ] - , if isOSX - then concat [ ["-framework", fmwk] - | fmwk <- flags ghcOptLinkFrameworks ] - else [] - , if isOSX - then concat [ ["-framework-path", path] - | path <- flags ghcOptLinkFrameworkDirs ] - else [] - , [ "-no-hs-main" | flagBool ghcOptLinkNoHsMain ] - , [ "-dynload deploy" | not (null (flags ghcOptRPaths)) ] - , concat [ [ "-optl-Wl,-rpath," ++ dir] - | dir <- flags ghcOptRPaths ] - - ------------- - -- Packages - - , concat [ [ case () of - _ | unitIdSupported comp -> "-this-unit-id" - | packageKeySupported comp -> "-this-package-key" - | otherwise -> "-package-name" - , this_arg ] - | this_arg <- flag ghcOptThisUnitId ] - - , [ "-hide-all-packages" | flagBool ghcOptHideAllPackages ] - , [ "-no-auto-link-packages" | flagBool ghcOptNoAutoLinkPackages ] - - , packageDbArgs implInfo (ghcOptPackageDBs opts) - - , concat $ if flagPackageId implInfo - then let space "" = "" - space xs = ' ' : xs - in [ ["-package-id", display ipkgid ++ space (display rns)] - | (ipkgid,_,rns) <- flags ghcOptPackages ] - else [ ["-package", display pkgid] - | (_,pkgid,_) <- flags ghcOptPackages ] - - ---------------------------- - -- Language and extensions - - , if supportsHaskell2010 implInfo - then [ "-X" ++ display lang | lang <- flag ghcOptLanguage ] - else [] - - , [ case M.lookup ext (ghcOptExtensionMap opts) of - Just arg -> arg - Nothing -> error $ "Distribution.Simple.Program.GHC.renderGhcOptions: " - ++ display ext ++ " not present in ghcOptExtensionMap." - | ext <- flags ghcOptExtensions ] - - ---------------- - -- GHCi - - , concat [ [ "-ghci-script", script ] | script <- flags ghcOptGHCiScripts - , flagGhciScript implInfo ] - - --------------- - -- Inputs - - , [ display modu | modu <- flags ghcOptInputModules ] - , flags ghcOptInputFiles - - , concat [ [ "-o", out] | out <- flag ghcOptOutputFile ] - , concat [ [ "-dyno", out] | out <- flag ghcOptOutputDynFile ] - - --------------- - -- Extra - - , flags ghcOptExtra - - ] - - - where - implInfo = getImplInfo comp - isOSX = os == OSX - flag flg = flagToList (flg opts) - flags flg = fromNubListR . flg $ opts - flagBool flg = fromFlagOrDefault False (flg opts) - -verbosityOpts :: Verbosity -> [String] -verbosityOpts verbosity - | verbosity >= deafening = ["-v"] - | verbosity >= normal = [] - | otherwise = ["-w", "-v0"] - - --- | GHC <7.6 uses '-package-conf' instead of '-package-db'. -packageDbArgsConf :: PackageDBStack -> [String] -packageDbArgsConf dbstack = case dbstack of - (GlobalPackageDB:UserPackageDB:dbs) -> concatMap specific dbs - (GlobalPackageDB:dbs) -> ("-no-user-package-conf") - : concatMap specific dbs - _ -> ierror - where - specific (SpecificPackageDB db) = [ "-package-conf", db ] - specific _ = ierror - ierror = error $ "internal error: unexpected package db stack: " - ++ show dbstack - --- | GHC >= 7.6 uses the '-package-db' flag. See --- https://ghc.haskell.org/trac/ghc/ticket/5977. -packageDbArgsDb :: PackageDBStack -> [String] --- special cases to make arguments prettier in common scenarios -packageDbArgsDb dbstack = case dbstack of - (GlobalPackageDB:UserPackageDB:dbs) - | all isSpecific dbs -> concatMap single dbs - (GlobalPackageDB:dbs) - | all isSpecific dbs -> "-no-user-package-db" - : concatMap single dbs - dbs -> "-clear-package-db" - : concatMap single dbs - where - single (SpecificPackageDB db) = [ "-package-db", db ] - single GlobalPackageDB = [ "-global-package-db" ] - single UserPackageDB = [ "-user-package-db" ] - isSpecific (SpecificPackageDB _) = True - isSpecific _ = False - -packageDbArgs :: GhcImplInfo -> PackageDBStack -> [String] -packageDbArgs implInfo - | flagPackageConf implInfo = packageDbArgsConf - | otherwise = packageDbArgsDb - --- ----------------------------------------------------------------------------- --- Boilerplate Monoid instance for GhcOptions - -instance Monoid GhcOptions where - mempty = gmempty - mappend = (Semi.<>) - -instance Semigroup GhcOptions where - (<>) = gmappend diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/HcPkg.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/HcPkg.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/HcPkg.hs 2016-11-07 10:02:24.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/HcPkg.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,481 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program.HcPkg --- Copyright : Duncan Coutts 2009, 2013 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module provides an library interface to the @hc-pkg@ program. --- Currently only GHC, GHCJS and LHC have hc-pkg programs. - -module Distribution.Simple.Program.HcPkg ( - HcPkgInfo(..), - - init, - invoke, - register, - reregister, - registerMultiInstance, - unregister, - recache, - expose, - hide, - dump, - describe, - list, - - -- * Program invocations - initInvocation, - registerInvocation, - reregisterInvocation, - registerMultiInstanceInvocation, - unregisterInvocation, - recacheInvocation, - exposeInvocation, - hideInvocation, - dumpInvocation, - describeInvocation, - listInvocation, - ) where - -import Distribution.Package hiding (installedUnitId) -import Distribution.InstalledPackageInfo -import Distribution.ParseUtils -import Distribution.Simple.Compiler -import Distribution.Simple.Program.Types -import Distribution.Simple.Program.Run -import Distribution.Text -import Distribution.Simple.Utils -import Distribution.Verbosity -import Distribution.Compat.Exception - -import Prelude hiding (init) -import Data.Char - ( isSpace ) -import Data.List - ( stripPrefix ) -import System.FilePath as FilePath - ( (), (<.>) - , splitPath, splitDirectories, joinPath, isPathSeparator ) -import qualified System.FilePath.Posix as FilePath.Posix - --- | Information about the features and capabilities of an @hc-pkg@ --- program. --- -data HcPkgInfo = HcPkgInfo - { hcPkgProgram :: ConfiguredProgram - , noPkgDbStack :: Bool -- ^ no package DB stack supported - , noVerboseFlag :: Bool -- ^ hc-pkg does not support verbosity flags - , flagPackageConf :: Bool -- ^ use package-conf option instead of package-db - , supportsDirDbs :: Bool -- ^ supports directory style package databases - , requiresDirDbs :: Bool -- ^ requires directory style package databases - , nativeMultiInstance :: Bool -- ^ supports --enable-multi-instance flag - , recacheMultiInstance :: Bool -- ^ supports multi-instance via recache - } - --- | Call @hc-pkg@ to initialise a package database at the location {path}. --- --- > hc-pkg init {path} --- -init :: HcPkgInfo -> Verbosity -> Bool -> FilePath -> IO () -init hpi verbosity preferCompat path - | not (supportsDirDbs hpi) - || (not (requiresDirDbs hpi) && preferCompat) - = writeFile path "[]" - - | otherwise - = runProgramInvocation verbosity (initInvocation hpi verbosity path) - --- | Run @hc-pkg@ using a given package DB stack, directly forwarding the --- provided command-line arguments to it. -invoke :: HcPkgInfo -> Verbosity -> PackageDBStack -> [String] -> IO () -invoke hpi verbosity dbStack extraArgs = - runProgramInvocation verbosity invocation - where - args = packageDbStackOpts hpi dbStack ++ extraArgs - invocation = programInvocation (hcPkgProgram hpi) args - --- | Call @hc-pkg@ to register a package. --- --- > hc-pkg register {filename | -} [--user | --global | --package-db] --- -register :: HcPkgInfo -> Verbosity -> PackageDBStack - -> Either FilePath - InstalledPackageInfo - -> IO () -register hpi verbosity packagedb pkgFile = - runProgramInvocation verbosity - (registerInvocation hpi verbosity packagedb pkgFile) - - --- | Call @hc-pkg@ to re-register a package. --- --- > hc-pkg register {filename | -} [--user | --global | --package-db] --- -reregister :: HcPkgInfo -> Verbosity -> PackageDBStack - -> Either FilePath - InstalledPackageInfo - -> IO () -reregister hpi verbosity packagedb pkgFile = - runProgramInvocation verbosity - (reregisterInvocation hpi verbosity packagedb pkgFile) - -registerMultiInstance :: HcPkgInfo -> Verbosity - -> PackageDBStack - -> InstalledPackageInfo - -> IO () -registerMultiInstance hpi verbosity packagedbs pkgInfo - | nativeMultiInstance hpi - = runProgramInvocation verbosity - (registerMultiInstanceInvocation hpi verbosity packagedbs (Right pkgInfo)) - - -- This is a trick. Older versions of GHC do not support the - -- --enable-multi-instance flag for ghc-pkg register but it turns out that - -- the same ability is available by using ghc-pkg recache. The recache - -- command is there to support distro package managers that like to work - -- by just installing files and running update commands, rather than - -- special add/remove commands. So the way to register by this method is - -- to write the package registration file directly into the package db and - -- then call hc-pkg recache. - -- - | recacheMultiInstance hpi - = do let pkgdb = last packagedbs - writeRegistrationFileDirectly hpi pkgdb pkgInfo - recache hpi verbosity pkgdb - - | otherwise - = die $ "HcPkg.registerMultiInstance: the compiler does not support " - ++ "registering multiple instances of packages." - -writeRegistrationFileDirectly :: HcPkgInfo - -> PackageDB - -> InstalledPackageInfo - -> IO () -writeRegistrationFileDirectly hpi (SpecificPackageDB dir) pkgInfo - | supportsDirDbs hpi - = do let pkgfile = dir display (installedUnitId pkgInfo) <.> "conf" - writeUTF8File pkgfile (showInstalledPackageInfo pkgInfo) - - | otherwise - = die $ "HcPkg.writeRegistrationFileDirectly: compiler does not support dir style package dbs" - -writeRegistrationFileDirectly _ _ _ = - -- We don't know here what the dir for the global or user dbs are, - -- if that's needed it'll require a bit more plumbing to support. - die $ "HcPkg.writeRegistrationFileDirectly: only supports SpecificPackageDB for now" - - --- | Call @hc-pkg@ to unregister a package --- --- > hc-pkg unregister [pkgid] [--user | --global | --package-db] --- -unregister :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId -> IO () -unregister hpi verbosity packagedb pkgid = - runProgramInvocation verbosity - (unregisterInvocation hpi verbosity packagedb pkgid) - - --- | Call @hc-pkg@ to recache the registered packages. --- --- > hc-pkg recache [--user | --global | --package-db] --- -recache :: HcPkgInfo -> Verbosity -> PackageDB -> IO () -recache hpi verbosity packagedb = - runProgramInvocation verbosity - (recacheInvocation hpi verbosity packagedb) - - --- | Call @hc-pkg@ to expose a package. --- --- > hc-pkg expose [pkgid] [--user | --global | --package-db] --- -expose :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId -> IO () -expose hpi verbosity packagedb pkgid = - runProgramInvocation verbosity - (exposeInvocation hpi verbosity packagedb pkgid) - --- | Call @hc-pkg@ to retrieve a specific package --- --- > hc-pkg describe [pkgid] [--user | --global | --package-db] --- -describe :: HcPkgInfo -> Verbosity -> PackageDBStack -> PackageId -> IO [InstalledPackageInfo] -describe hpi verbosity packagedb pid = do - - output <- getProgramInvocationOutput verbosity - (describeInvocation hpi verbosity packagedb pid) - `catchIO` \_ -> return "" - - case parsePackages output of - Left ok -> return ok - _ -> die $ "failed to parse output of '" - ++ programId (hcPkgProgram hpi) ++ " describe " ++ display pid ++ "'" - --- | Call @hc-pkg@ to hide a package. --- --- > hc-pkg hide [pkgid] [--user | --global | --package-db] --- -hide :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId -> IO () -hide hpi verbosity packagedb pkgid = - runProgramInvocation verbosity - (hideInvocation hpi verbosity packagedb pkgid) - - --- | Call @hc-pkg@ to get all the details of all the packages in the given --- package database. --- -dump :: HcPkgInfo -> Verbosity -> PackageDB -> IO [InstalledPackageInfo] -dump hpi verbosity packagedb = do - - output <- getProgramInvocationOutput verbosity - (dumpInvocation hpi verbosity packagedb) - `catchIO` \_ -> die $ programId (hcPkgProgram hpi) ++ " dump failed" - - case parsePackages output of - Left ok -> return ok - _ -> die $ "failed to parse output of '" - ++ programId (hcPkgProgram hpi) ++ " dump'" - -parsePackages :: String -> Either [InstalledPackageInfo] [PError] -parsePackages str = - let parsed = map parseInstalledPackageInfo' (splitPkgs str) - in case [ msg | ParseFailed msg <- parsed ] of - [] -> Left [ setUnitId - . maybe id mungePackagePaths (pkgRoot pkg) - $ pkg - | ParseOk _ pkg <- parsed ] - msgs -> Right msgs - where - parseInstalledPackageInfo' = - parseFieldsFlat fieldsInstalledPackageInfo emptyInstalledPackageInfo - ---TODO: this could be a lot faster. We're doing normaliseLineEndings twice --- and converting back and forth with lines/unlines. -splitPkgs :: String -> [String] -splitPkgs = checkEmpty . map unlines . splitWith ("---" ==) . lines - where - -- Handle the case of there being no packages at all. - checkEmpty [s] | all isSpace s = [] - checkEmpty ss = ss - - splitWith :: (a -> Bool) -> [a] -> [[a]] - splitWith p xs = ys : case zs of - [] -> [] - _:ws -> splitWith p ws - where (ys,zs) = break p xs - -mungePackagePaths :: FilePath -> InstalledPackageInfo -> InstalledPackageInfo --- Perform path/URL variable substitution as per the Cabal ${pkgroot} spec --- (http://www.haskell.org/pipermail/libraries/2009-May/011772.html) --- Paths/URLs can be relative to ${pkgroot} or ${pkgrooturl}. --- The "pkgroot" is the directory containing the package database. -mungePackagePaths pkgroot pkginfo = - pkginfo { - importDirs = mungePaths (importDirs pkginfo), - includeDirs = mungePaths (includeDirs pkginfo), - libraryDirs = mungePaths (libraryDirs pkginfo), - frameworkDirs = mungePaths (frameworkDirs pkginfo), - haddockInterfaces = mungePaths (haddockInterfaces pkginfo), - haddockHTMLs = mungeUrls (haddockHTMLs pkginfo) - } - where - mungePaths = map mungePath - mungeUrls = map mungeUrl - - mungePath p = case stripVarPrefix "${pkgroot}" p of - Just p' -> pkgroot p' - Nothing -> p - - mungeUrl p = case stripVarPrefix "${pkgrooturl}" p of - Just p' -> toUrlPath pkgroot p' - Nothing -> p - - toUrlPath r p = "file:///" - -- URLs always use posix style '/' separators: - ++ FilePath.Posix.joinPath (r : FilePath.splitDirectories p) - - stripVarPrefix var p = - case splitPath p of - (root:path') -> case stripPrefix var root of - Just [sep] | isPathSeparator sep -> Just (joinPath path') - _ -> Nothing - _ -> Nothing - - --- Older installed package info files did not have the installedUnitId --- field, so if it is missing then we fill it as the source package ID. -setUnitId :: InstalledPackageInfo -> InstalledPackageInfo -setUnitId pkginfo@InstalledPackageInfo { - installedUnitId = SimpleUnitId (ComponentId ""), - sourcePackageId = pkgid - } - = pkginfo { - installedUnitId = mkLegacyUnitId pkgid - } -setUnitId pkginfo = pkginfo - - --- | Call @hc-pkg@ to get the source package Id of all the packages in the --- given package database. --- --- This is much less information than with 'dump', but also rather quicker. --- Note in particular that it does not include the 'UnitId', just --- the source 'PackageId' which is not necessarily unique in any package db. --- -list :: HcPkgInfo -> Verbosity -> PackageDB - -> IO [PackageId] -list hpi verbosity packagedb = do - - output <- getProgramInvocationOutput verbosity - (listInvocation hpi verbosity packagedb) - `catchIO` \_ -> die $ programId (hcPkgProgram hpi) ++ " list failed" - - case parsePackageIds output of - Just ok -> return ok - _ -> die $ "failed to parse output of '" - ++ programId (hcPkgProgram hpi) ++ " list'" - - where - parsePackageIds = sequence . map simpleParse . words - --------------------------- --- The program invocations --- - -initInvocation :: HcPkgInfo -> Verbosity -> FilePath -> ProgramInvocation -initInvocation hpi verbosity path = - programInvocation (hcPkgProgram hpi) args - where - args = ["init", path] - ++ verbosityOpts hpi verbosity - -registerInvocation, reregisterInvocation, registerMultiInstanceInvocation - :: HcPkgInfo -> Verbosity -> PackageDBStack - -> Either FilePath InstalledPackageInfo - -> ProgramInvocation -registerInvocation = registerInvocation' "register" False -reregisterInvocation = registerInvocation' "update" False -registerMultiInstanceInvocation = registerInvocation' "update" True - -registerInvocation' :: String -> Bool - -> HcPkgInfo -> Verbosity -> PackageDBStack - -> Either FilePath InstalledPackageInfo - -> ProgramInvocation -registerInvocation' cmdname multiInstance hpi - verbosity packagedbs pkgFileOrInfo = - case pkgFileOrInfo of - Left pkgFile -> - programInvocation (hcPkgProgram hpi) (args pkgFile) - - Right pkgInfo -> - (programInvocation (hcPkgProgram hpi) (args "-")) { - progInvokeInput = Just (showInstalledPackageInfo pkgInfo), - progInvokeInputEncoding = IOEncodingUTF8 - } - where - args file = [cmdname, file] - ++ (if noPkgDbStack hpi - then [packageDbOpts hpi (last packagedbs)] - else packageDbStackOpts hpi packagedbs) - ++ [ "--enable-multi-instance" | multiInstance ] - ++ verbosityOpts hpi verbosity - -unregisterInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId - -> ProgramInvocation -unregisterInvocation hpi verbosity packagedb pkgid = - programInvocation (hcPkgProgram hpi) $ - ["unregister", packageDbOpts hpi packagedb, display pkgid] - ++ verbosityOpts hpi verbosity - - -recacheInvocation :: HcPkgInfo -> Verbosity -> PackageDB - -> ProgramInvocation -recacheInvocation hpi verbosity packagedb = - programInvocation (hcPkgProgram hpi) $ - ["recache", packageDbOpts hpi packagedb] - ++ verbosityOpts hpi verbosity - - -exposeInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId - -> ProgramInvocation -exposeInvocation hpi verbosity packagedb pkgid = - programInvocation (hcPkgProgram hpi) $ - ["expose", packageDbOpts hpi packagedb, display pkgid] - ++ verbosityOpts hpi verbosity - -describeInvocation :: HcPkgInfo -> Verbosity -> PackageDBStack -> PackageId - -> ProgramInvocation -describeInvocation hpi verbosity packagedbs pkgid = - programInvocation (hcPkgProgram hpi) $ - ["describe", display pkgid] - ++ (if noPkgDbStack hpi - then [packageDbOpts hpi (last packagedbs)] - else packageDbStackOpts hpi packagedbs) - ++ verbosityOpts hpi verbosity - -hideInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId - -> ProgramInvocation -hideInvocation hpi verbosity packagedb pkgid = - programInvocation (hcPkgProgram hpi) $ - ["hide", packageDbOpts hpi packagedb, display pkgid] - ++ verbosityOpts hpi verbosity - - -dumpInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> ProgramInvocation -dumpInvocation hpi _verbosity packagedb = - (programInvocation (hcPkgProgram hpi) args) { - progInvokeOutputEncoding = IOEncodingUTF8 - } - where - args = ["dump", packageDbOpts hpi packagedb] - ++ verbosityOpts hpi silent - -- We use verbosity level 'silent' because it is important that we - -- do not contaminate the output with info/debug messages. - -listInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> ProgramInvocation -listInvocation hpi _verbosity packagedb = - (programInvocation (hcPkgProgram hpi) args) { - progInvokeOutputEncoding = IOEncodingUTF8 - } - where - args = ["list", "--simple-output", packageDbOpts hpi packagedb] - ++ verbosityOpts hpi silent - -- We use verbosity level 'silent' because it is important that we - -- do not contaminate the output with info/debug messages. - - -packageDbStackOpts :: HcPkgInfo -> PackageDBStack -> [String] -packageDbStackOpts hpi dbstack = case dbstack of - (GlobalPackageDB:UserPackageDB:dbs) -> "--global" - : "--user" - : map specific dbs - (GlobalPackageDB:dbs) -> "--global" - : ("--no-user-" ++ packageDbFlag hpi) - : map specific dbs - _ -> ierror - where - specific (SpecificPackageDB db) = "--" ++ packageDbFlag hpi ++ "=" ++ db - specific _ = ierror - ierror :: a - ierror = error ("internal error: unexpected package db stack: " ++ show dbstack) - -packageDbFlag :: HcPkgInfo -> String -packageDbFlag hpi - | flagPackageConf hpi - = "package-conf" - | otherwise - = "package-db" - -packageDbOpts :: HcPkgInfo -> PackageDB -> String -packageDbOpts _ GlobalPackageDB = "--global" -packageDbOpts _ UserPackageDB = "--user" -packageDbOpts hpi (SpecificPackageDB db) = "--" ++ packageDbFlag hpi ++ "=" ++ db - -verbosityOpts :: HcPkgInfo -> Verbosity -> [String] -verbosityOpts hpi v - | noVerboseFlag hpi - = [] - | v >= deafening = ["-v2"] - | v == silent = ["-v0"] - | otherwise = [] - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/Hpc.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/Hpc.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/Hpc.hs 2016-11-07 10:02:24.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/Hpc.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,99 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program.Hpc --- Copyright : Thomas Tuegel 2011 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module provides an library interface to the @hpc@ program. - -module Distribution.Simple.Program.Hpc - ( markup - , union - ) where - -import Distribution.ModuleName -import Distribution.Simple.Program.Run -import Distribution.Simple.Program.Types -import Distribution.Text -import Distribution.Simple.Utils -import Distribution.Verbosity -import Distribution.Version - --- | Invoke hpc with the given parameters. --- --- Prior to HPC version 0.7 (packaged with GHC 7.8), hpc did not handle --- multiple .mix paths correctly, so we print a warning, and only pass it the --- first path in the list. This means that e.g. test suites that import their --- library as a dependency can still work, but those that include the library --- modules directly (in other-modules) don't. -markup :: ConfiguredProgram - -> Version - -> Verbosity - -> FilePath -- ^ Path to .tix file - -> [FilePath] -- ^ Paths to .mix file directories - -> FilePath -- ^ Path where html output should be located - -> [ModuleName] -- ^ List of modules to exclude from report - -> IO () -markup hpc hpcVer verbosity tixFile hpcDirs destDir excluded = do - hpcDirs' <- if withinRange hpcVer (orLaterVersion version07) - then return hpcDirs - else do - warn verbosity $ "Your version of HPC (" ++ display hpcVer - ++ ") does not properly handle multiple search paths. " - ++ "Coverage report generation may fail unexpectedly. These " - ++ "issues are addressed in version 0.7 or later (GHC 7.8 or " - ++ "later)." - ++ if null droppedDirs - then "" - else " The following search paths have been abandoned: " - ++ show droppedDirs - return passedDirs - - runProgramInvocation verbosity - (markupInvocation hpc tixFile hpcDirs' destDir excluded) - where - version07 = Version [0, 7] [] - (passedDirs, droppedDirs) = splitAt 1 hpcDirs - -markupInvocation :: ConfiguredProgram - -> FilePath -- ^ Path to .tix file - -> [FilePath] -- ^ Paths to .mix file directories - -> FilePath -- ^ Path where html output should be - -- located - -> [ModuleName] -- ^ List of modules to exclude from - -- report - -> ProgramInvocation -markupInvocation hpc tixFile hpcDirs destDir excluded = - let args = [ "markup", tixFile - , "--destdir=" ++ destDir - ] - ++ map ("--hpcdir=" ++) hpcDirs - ++ ["--exclude=" ++ display moduleName - | moduleName <- excluded ] - in programInvocation hpc args - -union :: ConfiguredProgram - -> Verbosity - -> [FilePath] -- ^ Paths to .tix files - -> FilePath -- ^ Path to resultant .tix file - -> [ModuleName] -- ^ List of modules to exclude from union - -> IO () -union hpc verbosity tixFiles outFile excluded = - runProgramInvocation verbosity - (unionInvocation hpc tixFiles outFile excluded) - -unionInvocation :: ConfiguredProgram - -> [FilePath] -- ^ Paths to .tix files - -> FilePath -- ^ Path to resultant .tix file - -> [ModuleName] -- ^ List of modules to exclude from union - -> ProgramInvocation -unionInvocation hpc tixFiles outFile excluded = - programInvocation hpc $ concat - [ ["sum", "--union"] - , tixFiles - , ["--output=" ++ outFile] - , ["--exclude=" ++ display moduleName - | moduleName <- excluded ] - ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/Internal.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/Internal.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/Internal.hs 2016-11-07 10:02:24.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/Internal.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,46 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program.Internal --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Internal utilities used by Distribution.Simple.Program.*. - -module Distribution.Simple.Program.Internal ( - stripExtractVersion, - ) where - -import Data.Char (isDigit) -import Data.List (isPrefixOf, isSuffixOf) - --- | Extract the version number from the output of 'strip --version'. --- --- Invoking "strip --version" gives very inconsistent results. We ignore --- everything in parentheses (see #2497), look for the first word that starts --- with a number, and try parsing out the first two components of it. Non-GNU --- 'strip' doesn't appear to have a version flag. -stripExtractVersion :: String -> String -stripExtractVersion str = - let numeric "" = False - numeric (x:_) = isDigit x - - -- Filter out everything in parentheses. - filterPar' :: Int -> [String] -> [String] - filterPar' _ [] = [] - filterPar' n (x:xs) - | n >= 0 && "(" `isPrefixOf` x = filterPar' (n+1) ((tail x):xs) - | n > 0 && ")" `isSuffixOf` x = filterPar' (n-1) xs - | n > 0 = filterPar' n xs - | otherwise = x:filterPar' n xs - - filterPar = filterPar' 0 - - in case dropWhile (not . numeric) (filterPar . words $ str) of - (ver:_) -> - -- take the first two version components - let isDot = (== '.') - (major, rest) = break isDot ver - minor = takeWhile isDigit (dropWhile isDot rest) - in major ++ "." ++ minor - _ -> "" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/Ld.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/Ld.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/Ld.hs 2016-11-07 10:02:24.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/Ld.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,62 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program.Ld --- Copyright : Duncan Coutts 2009 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module provides an library interface to the @ld@ linker program. - -module Distribution.Simple.Program.Ld ( - combineObjectFiles, - ) where - -import Distribution.Simple.Program.Types - ( ConfiguredProgram(..) ) -import Distribution.Simple.Program.Run - ( programInvocation, multiStageProgramInvocation - , runProgramInvocation ) -import Distribution.Verbosity - ( Verbosity ) - -import System.Directory - ( renameFile ) -import System.FilePath - ( (<.>) ) - --- | Call @ld -r@ to link a bunch of object files together. --- -combineObjectFiles :: Verbosity -> ConfiguredProgram - -> FilePath -> [FilePath] -> IO () -combineObjectFiles verbosity ld target files = - - -- Unlike "ar", the "ld" tool is not designed to be used with xargs. That is, - -- if we have more object files than fit on a single command line then we - -- have a slight problem. What we have to do is link files in batches into - -- a temp object file and then include that one in the next batch. - - let simpleArgs = ["-r", "-o", target] - - initialArgs = ["-r", "-o", target] - middleArgs = ["-r", "-o", target, tmpfile] - finalArgs = middleArgs - - simple = programInvocation ld simpleArgs - initial = programInvocation ld initialArgs - middle = programInvocation ld middleArgs - final = programInvocation ld finalArgs - - invocations = multiStageProgramInvocation - simple (initial, middle, final) files - - in run invocations - - where - tmpfile = target <.> "tmp" -- perhaps should use a proper temp file - - run [] = return () - run [inv] = runProgramInvocation verbosity inv - run (inv:invs) = do runProgramInvocation verbosity inv - renameFile target tmpfile - run invs diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/Run.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/Run.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/Run.hs 2016-11-07 10:02:24.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/Run.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,252 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program.Run --- Copyright : Duncan Coutts 2009 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module provides a data type for program invocations and functions to --- run them. - -module Distribution.Simple.Program.Run ( - ProgramInvocation(..), - IOEncoding(..), - emptyProgramInvocation, - simpleProgramInvocation, - programInvocation, - multiStageProgramInvocation, - - runProgramInvocation, - getProgramInvocationOutput, - - getEffectiveEnvironment, - ) where - -import Distribution.Simple.Program.Types -import Distribution.Simple.Utils -import Distribution.Verbosity -import Distribution.Compat.Environment - -import Data.List - ( foldl', unfoldr ) -import qualified Data.Map as Map -import Control.Monad - ( when ) -import System.Exit - ( ExitCode(..), exitWith ) - --- | Represents a specific invocation of a specific program. --- --- This is used as an intermediate type between deciding how to call a program --- and actually doing it. This provides the opportunity to the caller to --- adjust how the program will be called. These invocations can either be run --- directly or turned into shell or batch scripts. --- -data ProgramInvocation = ProgramInvocation { - progInvokePath :: FilePath, - progInvokeArgs :: [String], - progInvokeEnv :: [(String, Maybe String)], - progInvokeCwd :: Maybe FilePath, - progInvokeInput :: Maybe String, - progInvokeInputEncoding :: IOEncoding, - progInvokeOutputEncoding :: IOEncoding - } - -data IOEncoding = IOEncodingText -- locale mode text - | IOEncodingUTF8 -- always utf8 - -emptyProgramInvocation :: ProgramInvocation -emptyProgramInvocation = - ProgramInvocation { - progInvokePath = "", - progInvokeArgs = [], - progInvokeEnv = [], - progInvokeCwd = Nothing, - progInvokeInput = Nothing, - progInvokeInputEncoding = IOEncodingText, - progInvokeOutputEncoding = IOEncodingText - } - -simpleProgramInvocation :: FilePath -> [String] -> ProgramInvocation -simpleProgramInvocation path args = - emptyProgramInvocation { - progInvokePath = path, - progInvokeArgs = args - } - -programInvocation :: ConfiguredProgram -> [String] -> ProgramInvocation -programInvocation prog args = - emptyProgramInvocation { - progInvokePath = programPath prog, - progInvokeArgs = programDefaultArgs prog - ++ args - ++ programOverrideArgs prog, - progInvokeEnv = programOverrideEnv prog - } - - -runProgramInvocation :: Verbosity -> ProgramInvocation -> IO () -runProgramInvocation verbosity - ProgramInvocation { - progInvokePath = path, - progInvokeArgs = args, - progInvokeEnv = [], - progInvokeCwd = Nothing, - progInvokeInput = Nothing - } = - rawSystemExit verbosity path args - -runProgramInvocation verbosity - ProgramInvocation { - progInvokePath = path, - progInvokeArgs = args, - progInvokeEnv = envOverrides, - progInvokeCwd = mcwd, - progInvokeInput = Nothing - } = do - menv <- getEffectiveEnvironment envOverrides - exitCode <- rawSystemIOWithEnv verbosity - path args - mcwd menv - Nothing Nothing Nothing - when (exitCode /= ExitSuccess) $ - exitWith exitCode - -runProgramInvocation verbosity - ProgramInvocation { - progInvokePath = path, - progInvokeArgs = args, - progInvokeEnv = envOverrides, - progInvokeCwd = mcwd, - progInvokeInput = Just inputStr, - progInvokeInputEncoding = encoding - } = do - menv <- getEffectiveEnvironment envOverrides - (_, errors, exitCode) <- rawSystemStdInOut verbosity - path args - mcwd menv - (Just input) True - when (exitCode /= ExitSuccess) $ - die $ "'" ++ path ++ "' exited with an error:\n" ++ errors - where - input = case encoding of - IOEncodingText -> (inputStr, False) - IOEncodingUTF8 -> (toUTF8 inputStr, True) -- use binary mode for - -- utf8 - - -getProgramInvocationOutput :: Verbosity -> ProgramInvocation -> IO String -getProgramInvocationOutput verbosity - ProgramInvocation { - progInvokePath = path, - progInvokeArgs = args, - progInvokeEnv = envOverrides, - progInvokeCwd = mcwd, - progInvokeInput = minputStr, - progInvokeOutputEncoding = encoding - } = do - let utf8 = case encoding of IOEncodingUTF8 -> True; _ -> False - decode | utf8 = fromUTF8 . normaliseLineEndings - | otherwise = id - menv <- getEffectiveEnvironment envOverrides - (output, errors, exitCode) <- rawSystemStdInOut verbosity - path args - mcwd menv - input utf8 - when (exitCode /= ExitSuccess) $ - die $ "'" ++ path ++ "' exited with an error:\n" ++ errors - return (decode output) - where - input = - case minputStr of - Nothing -> Nothing - Just inputStr -> Just $ - case encoding of - IOEncodingText -> (inputStr, False) - IOEncodingUTF8 -> (toUTF8 inputStr, True) -- use binary mode for utf8 - - --- | Return the current environment extended with the given overrides. --- -getEffectiveEnvironment :: [(String, Maybe String)] - -> IO (Maybe [(String, String)]) -getEffectiveEnvironment [] = return Nothing -getEffectiveEnvironment overrides = - fmap (Just . Map.toList . apply overrides . Map.fromList) getEnvironment - where - apply os env = foldl' (flip update) env os - update (var, Nothing) = Map.delete var - update (var, Just val) = Map.insert var val - --- | Like the unix xargs program. Useful for when we've got very long command --- lines that might overflow an OS limit on command line length and so you --- need to invoke a command multiple times to get all the args in. --- --- It takes four template invocations corresponding to the simple, initial, --- middle and last invocations. If the number of args given is small enough --- that we can get away with just a single invocation then the simple one is --- used: --- --- > $ simple args --- --- If the number of args given means that we need to use multiple invocations --- then the templates for the initial, middle and last invocations are used: --- --- > $ initial args_0 --- > $ middle args_1 --- > $ middle args_2 --- > ... --- > $ final args_n --- -multiStageProgramInvocation - :: ProgramInvocation - -> (ProgramInvocation, ProgramInvocation, ProgramInvocation) - -> [String] - -> [ProgramInvocation] -multiStageProgramInvocation simple (initial, middle, final) args = - - let argSize inv = length (progInvokePath inv) - + foldl' (\s a -> length a + 1 + s) 1 (progInvokeArgs inv) - fixedArgSize = maximum (map argSize [simple, initial, middle, final]) - chunkSize = maxCommandLineSize - fixedArgSize - - in case splitChunks chunkSize args of - [] -> [ simple ] - - [c] -> [ simple `appendArgs` c ] - - [c,c'] -> [ initial `appendArgs` c ] - ++ [ final `appendArgs` c'] - - (c:cs) -> [ initial `appendArgs` c ] - ++ [ middle `appendArgs` c'| c' <- init cs ] - ++ [ final `appendArgs` c'| let c' = last cs ] - - where - inv `appendArgs` as = inv { progInvokeArgs = progInvokeArgs inv ++ as } - - splitChunks len = unfoldr $ \s -> - if null s then Nothing - else Just (chunk len s) - - chunk len (s:_) | length s >= len = error toolong - chunk len ss = chunk' [] len ss - - chunk' acc _ [] = (reverse acc,[]) - chunk' acc len (s:ss) - | len' < len = chunk' (s:acc) (len-len'-1) ss - | otherwise = (reverse acc, s:ss) - where len' = length s - - toolong = "multiStageProgramInvocation: a single program arg is larger " - ++ "than the maximum command line length!" - - ---FIXME: discover this at configure time or runtime on unix --- The value is 32k on Windows and posix specifies a minimum of 4k --- but all sensible unixes use more than 4k. --- we could use getSysVar ArgumentLimit but that's in the unix lib --- -maxCommandLineSize :: Int -maxCommandLineSize = 30 * 1024 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/Script.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/Script.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/Script.hs 2016-11-07 10:02:24.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/Script.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,108 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program.Script --- Copyright : Duncan Coutts 2009 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module provides an library interface to the @hc-pkg@ program. --- Currently only GHC and LHC have hc-pkg programs. - -module Distribution.Simple.Program.Script ( - - invocationAsSystemScript, - invocationAsShellScript, - invocationAsBatchFile, - ) where - -import Distribution.Simple.Program.Run -import Distribution.System - -import Data.Maybe - ( maybeToList ) - --- | Generate a system script, either POSIX shell script or Windows batch file --- as appropriate for the given system. --- -invocationAsSystemScript :: OS -> ProgramInvocation -> String -invocationAsSystemScript Windows = invocationAsBatchFile -invocationAsSystemScript _ = invocationAsShellScript - - --- | Generate a POSIX shell script that invokes a program. --- -invocationAsShellScript :: ProgramInvocation -> String -invocationAsShellScript - ProgramInvocation { - progInvokePath = path, - progInvokeArgs = args, - progInvokeEnv = envExtra, - progInvokeCwd = mcwd, - progInvokeInput = minput - } = unlines $ - [ "#!/bin/sh" ] - ++ concatMap setEnv envExtra - ++ [ "cd " ++ quote cwd | cwd <- maybeToList mcwd ] - ++ [ (case minput of - Nothing -> "" - Just input -> "echo " ++ quote input ++ " | ") - ++ unwords (map quote $ path : args) ++ " \"$@\""] - - where - setEnv (var, Nothing) = ["unset " ++ var, "export " ++ var] - setEnv (var, Just val) = ["export " ++ var ++ "=" ++ quote val] - - quote :: String -> String - quote s = "'" ++ escape s ++ "'" - - escape [] = [] - escape ('\'':cs) = "'\\''" ++ escape cs - escape (c :cs) = c : escape cs - - --- | Generate a Windows batch file that invokes a program. --- -invocationAsBatchFile :: ProgramInvocation -> String -invocationAsBatchFile - ProgramInvocation { - progInvokePath = path, - progInvokeArgs = args, - progInvokeEnv = envExtra, - progInvokeCwd = mcwd, - progInvokeInput = minput - } = unlines $ - [ "@echo off" ] - ++ map setEnv envExtra - ++ [ "cd \"" ++ cwd ++ "\"" | cwd <- maybeToList mcwd ] - ++ case minput of - Nothing -> - [ path ++ concatMap (' ':) args ] - - Just input -> - [ "(" ] - ++ [ "echo " ++ escape line | line <- lines input ] - ++ [ ") | " - ++ "\"" ++ path ++ "\"" - ++ concatMap (\arg -> ' ':quote arg) args ] - - where - setEnv (var, Nothing) = "set " ++ var ++ "=" - setEnv (var, Just val) = "set " ++ var ++ "=" ++ escape val - - quote :: String -> String - quote s = "\"" ++ escapeQ s ++ "\"" - - escapeQ [] = [] - escapeQ ('"':cs) = "\"\"\"" ++ escapeQ cs - escapeQ (c :cs) = c : escapeQ cs - - escape [] = [] - escape ('|':cs) = "^|" ++ escape cs - escape ('<':cs) = "^<" ++ escape cs - escape ('>':cs) = "^>" ++ escape cs - escape ('&':cs) = "^&" ++ escape cs - escape ('(':cs) = "^(" ++ escape cs - escape (')':cs) = "^)" ++ escape cs - escape ('^':cs) = "^^" ++ escape cs - escape (c :cs) = c : escape cs diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/Strip.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/Strip.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/Strip.hs 2016-11-07 10:02:24.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/Strip.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,70 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program.Strip --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module provides an library interface to the @strip@ program. - -module Distribution.Simple.Program.Strip (stripLib, stripExe) - where - -import Distribution.Simple.Program -import Distribution.Simple.Utils -import Distribution.System -import Distribution.Verbosity -import Distribution.Version - -import Control.Monad (unless) -import System.FilePath (takeBaseName) - -runStrip :: Verbosity -> ProgramConfiguration -> FilePath -> [String] -> IO () -runStrip verbosity progConf path args = - case lookupProgram stripProgram progConf of - Just strip -> rawSystemProgram verbosity strip (path:args) - Nothing -> unless (buildOS == Windows) $ - -- Don't bother warning on windows, we don't expect them to - -- have the strip program anyway. - warn verbosity $ "Unable to strip executable or library '" - ++ (takeBaseName path) - ++ "' (missing the 'strip' program)" - -stripExe :: Verbosity -> Platform -> ProgramConfiguration -> FilePath -> IO () -stripExe verbosity (Platform _arch os) conf path = - runStrip verbosity conf path args - where - args = case os of - OSX -> ["-x"] -- By default, stripping the ghc binary on at least - -- some OS X installations causes: - -- HSbase-3.0.o: unknown symbol `_environ'" - -- The -x flag fixes that. - _ -> [] - -stripLib :: Verbosity -> Platform -> ProgramConfiguration -> FilePath -> IO () -stripLib verbosity (Platform arch os) conf path = do - case os of - OSX -> -- '--strip-unneeded' is not supported on OS X, iOS, AIX, or - -- Solaris. See #1630. - return () - IOS -> return () - AIX -> return () - Solaris -> return () - Windows -> -- Stripping triggers a bug in 'strip.exe' for - -- libraries with lots identically named modules. See - -- #1784. - return() - Linux | arch == I386 -> - -- Versions of 'strip' on 32-bit Linux older than 2.18 are - -- broken. See #2339. - let okVersion = orLaterVersion (Version [2,18] []) - in case programVersion =<< lookupProgram stripProgram conf of - Just v | withinRange v okVersion -> - runStrip verbosity conf path args - _ -> warn verbosity $ "Unable to strip library '" - ++ (takeBaseName path) - ++ "' (version of 'strip' too old; " - ++ "requires >= 2.18 on 32-bit Linux)" - _ -> runStrip verbosity conf path args - where - args = ["--strip-unneeded"] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/Types.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/Types.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program/Types.hs 2016-11-07 10:02:24.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program/Types.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,175 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program.Types --- Copyright : Isaac Jones 2006, Duncan Coutts 2007-2009 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This provides an abstraction which deals with configuring and running --- programs. A 'Program' is a static notion of a known program. A --- 'ConfiguredProgram' is a 'Program' that has been found on the current --- machine and is ready to be run (possibly with some user-supplied default --- args). Configuring a program involves finding its location and if necessary --- finding its version. There's reasonable default behavior for trying to find --- \"foo\" in PATH, being able to override its location, etc. --- -module Distribution.Simple.Program.Types ( - -- * Program and functions for constructing them - Program(..), - ProgramSearchPath, - ProgramSearchPathEntry(..), - simpleProgram, - - -- * Configured program and related functions - ConfiguredProgram(..), - programPath, - suppressOverrideArgs, - ProgArg, - ProgramLocation(..), - simpleConfiguredProgram, - ) where - -import Distribution.Simple.Program.Find -import Distribution.Version -import Distribution.Verbosity -import Distribution.Compat.Binary - -import qualified Data.Map as Map -import GHC.Generics (Generic) - --- | Represents a program which can be configured. --- --- Note: rather than constructing this directly, start with 'simpleProgram' and --- override any extra fields. --- -data Program = Program { - -- | The simple name of the program, eg. ghc - programName :: String, - - -- | A function to search for the program if its location was not - -- specified by the user. Usually this will just be a call to - -- 'findProgramOnSearchPath'. - -- - -- It is supplied with the prevailing search path which will typically - -- just be used as-is, but can be extended or ignored as needed. - -- - -- For the purpose of change monitoring, in addition to the location - -- where the program was found, it returns all the other places that - -- were tried. - -- - programFindLocation :: Verbosity -> ProgramSearchPath - -> IO (Maybe (FilePath, [FilePath])), - - -- | Try to find the version of the program. For many programs this is - -- not possible or is not necessary so it's OK to return Nothing. - programFindVersion :: Verbosity -> FilePath -> IO (Maybe Version), - - -- | A function to do any additional configuration after we have - -- located the program (and perhaps identified its version). For example - -- it could add args, or environment vars. - programPostConf :: Verbosity -> ConfiguredProgram -> IO ConfiguredProgram - } - -type ProgArg = String - --- | Represents a program which has been configured and is thus ready to be run. --- --- These are usually made by configuring a 'Program', but if you have to --- construct one directly then start with 'simpleConfiguredProgram' and --- override any extra fields. --- -data ConfiguredProgram = ConfiguredProgram { - -- | Just the name again - programId :: String, - - -- | The version of this program, if it is known. - programVersion :: Maybe Version, - - -- | Default command-line args for this program. - -- These flags will appear first on the command line, so they can be - -- overridden by subsequent flags. - programDefaultArgs :: [String], - - -- | Override command-line args for this program. - -- These flags will appear last on the command line, so they override - -- all earlier flags. - programOverrideArgs :: [String], - - -- | Override environment variables for this program. - -- These env vars will extend\/override the prevailing environment of - -- the current to form the environment for the new process. - programOverrideEnv :: [(String, Maybe String)], - - -- | A key-value map listing various properties of the program, useful - -- for feature detection. Populated during the configuration step, key - -- names depend on the specific program. - programProperties :: Map.Map String String, - - -- | Location of the program. eg. @\/usr\/bin\/ghc-6.4@ - programLocation :: ProgramLocation, - - -- | In addition to the 'programLocation' where the program was found, - -- these are additional locations that were looked at. The combination - -- of ths found location and these not-found locations can be used to - -- monitor to detect when the re-configuring the program might give a - -- different result (e.g. found in a different location). - -- - programMonitorFiles :: [FilePath] - } - deriving (Eq, Generic, Read, Show) - -instance Binary ConfiguredProgram - --- | Where a program was found. Also tells us whether it's specified by user or --- not. This includes not just the path, but the program as well. -data ProgramLocation - = UserSpecified { locationPath :: FilePath } - -- ^The user gave the path to this program, - -- eg. --ghc-path=\/usr\/bin\/ghc-6.6 - | FoundOnSystem { locationPath :: FilePath } - -- ^The program was found automatically. - deriving (Eq, Generic, Read, Show) - -instance Binary ProgramLocation - --- | The full path of a configured program. -programPath :: ConfiguredProgram -> FilePath -programPath = locationPath . programLocation - --- | Suppress any extra arguments added by the user. -suppressOverrideArgs :: ConfiguredProgram -> ConfiguredProgram -suppressOverrideArgs prog = prog { programOverrideArgs = [] } - --- | Make a simple named program. --- --- By default we'll just search for it in the path and not try to find the --- version name. You can override these behaviours if necessary, eg: --- --- > simpleProgram "foo" { programFindLocation = ... , programFindVersion ... } --- -simpleProgram :: String -> Program -simpleProgram name = Program { - programName = name, - programFindLocation = \v p -> findProgramOnSearchPath v p name, - programFindVersion = \_ _ -> return Nothing, - programPostConf = \_ p -> return p - } - --- | Make a simple 'ConfiguredProgram'. --- --- > simpleConfiguredProgram "foo" (FoundOnSystem path) --- -simpleConfiguredProgram :: String -> ProgramLocation -> ConfiguredProgram -simpleConfiguredProgram name loc = ConfiguredProgram { - programId = name, - programVersion = Nothing, - programDefaultArgs = [], - programOverrideArgs = [], - programOverrideEnv = [], - programProperties = Map.empty, - programLocation = loc, - programMonitorFiles = [] -- did not look in any other locations - } diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Program.hs 2016-11-07 10:02:24.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Program.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,224 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program --- Copyright : Isaac Jones 2006, Duncan Coutts 2007-2009 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This provides an abstraction which deals with configuring and running --- programs. A 'Program' is a static notion of a known program. A --- 'ConfiguredProgram' is a 'Program' that has been found on the current --- machine and is ready to be run (possibly with some user-supplied default --- args). Configuring a program involves finding its location and if necessary --- finding its version. There is also a 'ProgramConfiguration' type which holds --- configured and not-yet configured programs. It is the parameter to lots of --- actions elsewhere in Cabal that need to look up and run programs. If we had --- a Cabal monad, the 'ProgramConfiguration' would probably be a reader or --- state component of it. --- --- The module also defines all the known built-in 'Program's and the --- 'defaultProgramConfiguration' which contains them all. --- --- One nice thing about using it is that any program that is --- registered with Cabal will get some \"configure\" and \".cabal\" --- helpers like --with-foo-args --foo-path= and extra-foo-args. --- --- There's also good default behavior for trying to find \"foo\" in --- PATH, being able to override its location, etc. --- --- There's also a hook for adding programs in a Setup.lhs script. See --- hookedPrograms in 'Distribution.Simple.UserHooks'. This gives a --- hook user the ability to get the above flags and such so that they --- don't have to write all the PATH logic inside Setup.lhs. - -module Distribution.Simple.Program ( - -- * Program and functions for constructing them - Program(..) - , ProgramSearchPath - , ProgramSearchPathEntry(..) - , simpleProgram - , findProgramOnSearchPath - , defaultProgramSearchPath - , findProgramVersion - - -- * Configured program and related functions - , ConfiguredProgram(..) - , programPath - , ProgArg - , ProgramLocation(..) - , runProgram - , getProgramOutput - , suppressOverrideArgs - - -- * Program invocations - , ProgramInvocation(..) - , emptyProgramInvocation - , simpleProgramInvocation - , programInvocation - , runProgramInvocation - , getProgramInvocationOutput - - -- * The collection of unconfigured and configured programs - , builtinPrograms - - -- * The collection of configured programs we can run - , ProgramConfiguration - , emptyProgramConfiguration - , defaultProgramConfiguration - , restoreProgramConfiguration - , addKnownProgram - , addKnownPrograms - , lookupKnownProgram - , knownPrograms - , getProgramSearchPath - , setProgramSearchPath - , userSpecifyPath - , userSpecifyPaths - , userMaybeSpecifyPath - , userSpecifyArgs - , userSpecifyArgss - , userSpecifiedArgs - , lookupProgram - , lookupProgramVersion - , updateProgram - , configureProgram - , configureAllKnownPrograms - , reconfigurePrograms - , requireProgram - , requireProgramVersion - , runDbProgram - , getDbProgramOutput - - -- * Programs that Cabal knows about - , ghcProgram - , ghcPkgProgram - , ghcjsProgram - , ghcjsPkgProgram - , lhcProgram - , lhcPkgProgram - , hmakeProgram - , jhcProgram - , uhcProgram - , gccProgram - , arProgram - , stripProgram - , happyProgram - , alexProgram - , hsc2hsProgram - , c2hsProgram - , cpphsProgram - , hscolourProgram - , haddockProgram - , greencardProgram - , ldProgram - , tarProgram - , cppProgram - , pkgConfigProgram - , hpcProgram - - -- * deprecated - , rawSystemProgram - , rawSystemProgramStdout - , rawSystemProgramConf - , rawSystemProgramStdoutConf - , findProgramOnPath - , findProgramLocation - - ) where - -import Distribution.Simple.Program.Types -import Distribution.Simple.Program.Run -import Distribution.Simple.Program.Db -import Distribution.Simple.Program.Builtin -import Distribution.Simple.Program.Find -import Distribution.Simple.Utils -import Distribution.Verbosity - - --- | Runs the given configured program. --- -runProgram :: Verbosity -- ^Verbosity - -> ConfiguredProgram -- ^The program to run - -> [ProgArg] -- ^Any /extra/ arguments to add - -> IO () -runProgram verbosity prog args = - runProgramInvocation verbosity (programInvocation prog args) - - --- | Runs the given configured program and gets the output. --- -getProgramOutput :: Verbosity -- ^Verbosity - -> ConfiguredProgram -- ^The program to run - -> [ProgArg] -- ^Any /extra/ arguments to add - -> IO String -getProgramOutput verbosity prog args = - getProgramInvocationOutput verbosity (programInvocation prog args) - - --- | Looks up the given program in the program database and runs it. --- -runDbProgram :: Verbosity -- ^verbosity - -> Program -- ^The program to run - -> ProgramDb -- ^look up the program here - -> [ProgArg] -- ^Any /extra/ arguments to add - -> IO () -runDbProgram verbosity prog programDb args = - case lookupProgram prog programDb of - Nothing -> die notFound - Just configuredProg -> runProgram verbosity configuredProg args - where - notFound = "The program '" ++ programName prog - ++ "' is required but it could not be found" - --- | Looks up the given program in the program database and runs it. --- -getDbProgramOutput :: Verbosity -- ^verbosity - -> Program -- ^The program to run - -> ProgramDb -- ^look up the program here - -> [ProgArg] -- ^Any /extra/ arguments to add - -> IO String -getDbProgramOutput verbosity prog programDb args = - case lookupProgram prog programDb of - Nothing -> die notFound - Just configuredProg -> getProgramOutput verbosity configuredProg args - where - notFound = "The program '" ++ programName prog - ++ "' is required but it could not be found" - - ---------------------- --- Deprecated aliases --- - -rawSystemProgram :: Verbosity -> ConfiguredProgram - -> [ProgArg] -> IO () -rawSystemProgram = runProgram - -rawSystemProgramStdout :: Verbosity -> ConfiguredProgram - -> [ProgArg] -> IO String -rawSystemProgramStdout = getProgramOutput - -rawSystemProgramConf :: Verbosity -> Program -> ProgramConfiguration - -> [ProgArg] -> IO () -rawSystemProgramConf = runDbProgram - -rawSystemProgramStdoutConf :: Verbosity -> Program -> ProgramConfiguration - -> [ProgArg] -> IO String -rawSystemProgramStdoutConf = getDbProgramOutput - -type ProgramConfiguration = ProgramDb - -emptyProgramConfiguration, defaultProgramConfiguration :: ProgramConfiguration -emptyProgramConfiguration = emptyProgramDb -defaultProgramConfiguration = defaultProgramDb - -restoreProgramConfiguration :: [Program] -> ProgramConfiguration - -> ProgramConfiguration -restoreProgramConfiguration = restoreProgramDb - -{-# DEPRECATED findProgramOnPath "use findProgramOnSearchPath instead" #-} -findProgramOnPath :: String -> Verbosity -> IO (Maybe FilePath) -findProgramOnPath name verbosity = - fmap (fmap fst) $ - findProgramOnSearchPath verbosity defaultProgramSearchPath name diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Register.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Register.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Register.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Register.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,473 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Register --- Copyright : Isaac Jones 2003-2004 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module deals with registering and unregistering packages. There are a --- couple ways it can do this, one is to do it directly. Another is to generate --- a script that can be run later to do it. The idea here being that the user --- is shielded from the details of what command to use for package registration --- for a particular compiler. In practice this aspect was not especially --- popular so we also provide a way to simply generate the package registration --- file which then must be manually passed to @ghc-pkg@. It is possible to --- generate registration information for where the package is to be installed, --- or alternatively to register the package in place in the build tree. The --- latter is occasionally handy, and will become more important when we try to --- build multi-package systems. --- --- This module does not delegate anything to the per-compiler modules but just --- mixes it all in in this module, which is rather unsatisfactory. The script --- generation and the unregister feature are not well used or tested. - -module Distribution.Simple.Register ( - register, - unregister, - - initPackageDB, - doesPackageDBExist, - createPackageDB, - deletePackageDB, - - invokeHcPkg, - registerPackage, - generateRegistrationInfo, - inplaceInstalledPackageInfo, - absoluteInstalledPackageInfo, - generalInstalledPackageInfo, - ) where - -import Distribution.Simple.LocalBuildInfo -import Distribution.Simple.BuildPaths - -import qualified Distribution.Simple.GHC as GHC -import qualified Distribution.Simple.GHCJS as GHCJS -import qualified Distribution.Simple.LHC as LHC -import qualified Distribution.Simple.UHC as UHC -import qualified Distribution.Simple.HaskellSuite as HaskellSuite - -import Distribution.Simple.Compiler -import Distribution.Simple.Program -import Distribution.Simple.Program.Script -import qualified Distribution.Simple.Program.HcPkg as HcPkg -import Distribution.Simple.Setup -import Distribution.PackageDescription -import Distribution.Package -import qualified Distribution.InstalledPackageInfo as IPI -import Distribution.InstalledPackageInfo (InstalledPackageInfo) -import Distribution.Simple.Utils -import Distribution.System -import Distribution.Text -import Distribution.Verbosity as Verbosity - -import System.FilePath ((), (<.>), isAbsolute) -import System.Directory - ( getCurrentDirectory, removeDirectoryRecursive, removeFile - , doesDirectoryExist, doesFileExist ) - -import Data.Version -import Control.Monad (when) -import Data.Maybe - ( isJust, fromMaybe, maybeToList ) -import Data.List - ( partition, nub ) -import qualified Data.ByteString.Lazy.Char8 as BS.Char8 - --- ----------------------------------------------------------------------------- --- Registration - -register :: PackageDescription -> LocalBuildInfo - -> RegisterFlags -- ^Install in the user's database?; verbose - -> IO () -register pkg@PackageDescription { library = Just lib } lbi regFlags - = do - let clbi = getComponentLocalBuildInfo lbi CLibName - - absPackageDBs <- absolutePackageDBPaths packageDbs - installedPkgInfo <- generateRegistrationInfo - verbosity pkg lib lbi clbi inplace reloc distPref - (registrationPackageDB absPackageDBs) - - when (fromFlag (regPrintId regFlags)) $ do - putStrLn (display (IPI.installedUnitId installedPkgInfo)) - - -- Three different modes: - case () of - _ | modeGenerateRegFile -> writeRegistrationFile installedPkgInfo - | modeGenerateRegScript -> writeRegisterScript installedPkgInfo - | otherwise -> do - setupMessage verbosity "Registering" (packageId pkg) - registerPackage verbosity (compiler lbi) (withPrograms lbi) False - packageDbs installedPkgInfo - - where - modeGenerateRegFile = isJust (flagToMaybe (regGenPkgConf regFlags)) - regFile = fromMaybe (display (packageId pkg) <.> "conf") - (fromFlag (regGenPkgConf regFlags)) - - modeGenerateRegScript = fromFlag (regGenScript regFlags) - - inplace = fromFlag (regInPlace regFlags) - reloc = relocatable lbi - -- FIXME: there's really no guarantee this will work. - -- registering into a totally different db stack can - -- fail if dependencies cannot be satisfied. - packageDbs = nub $ withPackageDB lbi - ++ maybeToList (flagToMaybe (regPackageDB regFlags)) - distPref = fromFlag (regDistPref regFlags) - verbosity = fromFlag (regVerbosity regFlags) - - writeRegistrationFile installedPkgInfo = do - notice verbosity ("Creating package registration file: " ++ regFile) - writeUTF8File regFile (IPI.showInstalledPackageInfo installedPkgInfo) - - writeRegisterScript installedPkgInfo = - case compilerFlavor (compiler lbi) of - JHC -> notice verbosity "Registration scripts not needed for jhc" - UHC -> notice verbosity "Registration scripts not needed for uhc" - _ -> withHcPkg - "Registration scripts are not implemented for this compiler" - (compiler lbi) (withPrograms lbi) - (writeHcPkgRegisterScript verbosity installedPkgInfo packageDbs) - -register _ _ regFlags = notice verbosity "No package to register" - where - verbosity = fromFlag (regVerbosity regFlags) - - -generateRegistrationInfo :: Verbosity - -> PackageDescription - -> Library - -> LocalBuildInfo - -> ComponentLocalBuildInfo - -> Bool - -> Bool - -> FilePath - -> PackageDB - -> IO InstalledPackageInfo -generateRegistrationInfo verbosity pkg lib lbi clbi inplace reloc distPref packageDb = do - --TODO: eliminate pwd! - pwd <- getCurrentDirectory - - --TODO: the method of setting the UnitId is compiler specific - -- this aspect should be delegated to a per-compiler helper. - let comp = compiler lbi - abi_hash <- - case compilerFlavor comp of - GHC | compilerVersion comp >= Version [6,11] [] -> do - fmap AbiHash $ GHC.libAbiHash verbosity pkg lbi lib clbi - GHCJS -> do - fmap AbiHash $ GHCJS.libAbiHash verbosity pkg lbi lib clbi - _ -> return (AbiHash "") - - installedPkgInfo <- - if inplace - then return (inplaceInstalledPackageInfo pwd distPref - pkg abi_hash lib lbi clbi) - else if reloc - then relocRegistrationInfo verbosity - pkg lib lbi clbi abi_hash packageDb - else return (absoluteInstalledPackageInfo - pkg abi_hash lib lbi clbi) - - - return installedPkgInfo{ IPI.abiHash = abi_hash } - -relocRegistrationInfo :: Verbosity - -> PackageDescription - -> Library - -> LocalBuildInfo - -> ComponentLocalBuildInfo - -> AbiHash - -> PackageDB - -> IO InstalledPackageInfo -relocRegistrationInfo verbosity pkg lib lbi clbi abi_hash packageDb = - case (compilerFlavor (compiler lbi)) of - GHC -> do fs <- GHC.pkgRoot verbosity lbi packageDb - return (relocatableInstalledPackageInfo - pkg abi_hash lib lbi clbi fs) - _ -> die "Distribution.Simple.Register.relocRegistrationInfo: \ - \not implemented for this compiler" - -initPackageDB :: Verbosity -> Compiler -> ProgramConfiguration -> FilePath -> IO () -initPackageDB verbosity comp progdb dbPath = - createPackageDB verbosity comp progdb False dbPath - --- | Create an empty package DB at the specified location. -createPackageDB :: Verbosity -> Compiler -> ProgramConfiguration -> Bool - -> FilePath -> IO () -createPackageDB verbosity comp progdb preferCompat dbPath = - case compilerFlavor comp of - GHC -> HcPkg.init (GHC.hcPkgInfo progdb) verbosity preferCompat dbPath - GHCJS -> HcPkg.init (GHCJS.hcPkgInfo progdb) verbosity False dbPath - LHC -> HcPkg.init (LHC.hcPkgInfo progdb) verbosity False dbPath - UHC -> return () - HaskellSuite _ -> HaskellSuite.initPackageDB verbosity progdb dbPath - _ -> die $ "Distribution.Simple.Register.createPackageDB: " - ++ "not implemented for this compiler" - -doesPackageDBExist :: FilePath -> IO Bool -doesPackageDBExist dbPath = do - -- currently one impl for all compiler flavours, but could change if needed - dir_exists <- doesDirectoryExist dbPath - if dir_exists - then return True - else doesFileExist dbPath - -deletePackageDB :: FilePath -> IO () -deletePackageDB dbPath = do - -- currently one impl for all compiler flavours, but could change if needed - dir_exists <- doesDirectoryExist dbPath - if dir_exists - then removeDirectoryRecursive dbPath - else do file_exists <- doesFileExist dbPath - when file_exists $ removeFile dbPath - --- | Run @hc-pkg@ using a given package DB stack, directly forwarding the --- provided command-line arguments to it. -invokeHcPkg :: Verbosity -> Compiler -> ProgramConfiguration -> PackageDBStack - -> [String] -> IO () -invokeHcPkg verbosity comp conf dbStack extraArgs = - withHcPkg "invokeHcPkg" comp conf - (\hpi -> HcPkg.invoke hpi verbosity dbStack extraArgs) - -withHcPkg :: String -> Compiler -> ProgramConfiguration - -> (HcPkg.HcPkgInfo -> IO a) -> IO a -withHcPkg name comp conf f = - case compilerFlavor comp of - GHC -> f (GHC.hcPkgInfo conf) - GHCJS -> f (GHCJS.hcPkgInfo conf) - LHC -> f (LHC.hcPkgInfo conf) - _ -> die ("Distribution.Simple.Register." ++ name ++ ":\ - \not implemented for this compiler") - -registerPackage :: Verbosity - -> Compiler - -> ProgramConfiguration - -> Bool - -> PackageDBStack - -> InstalledPackageInfo - -> IO () -registerPackage verbosity comp progdb multiInstance packageDbs installedPkgInfo = - case compilerFlavor comp of - GHC -> GHC.registerPackage verbosity progdb multiInstance packageDbs installedPkgInfo - GHCJS -> GHCJS.registerPackage verbosity progdb multiInstance packageDbs installedPkgInfo - _ | multiInstance - -> die "Registering multiple package instances is not yet supported for this compiler" - LHC -> LHC.registerPackage verbosity progdb packageDbs installedPkgInfo - UHC -> UHC.registerPackage verbosity comp progdb packageDbs installedPkgInfo - JHC -> notice verbosity "Registering for jhc (nothing to do)" - HaskellSuite {} -> - HaskellSuite.registerPackage verbosity progdb packageDbs installedPkgInfo - _ -> die "Registering is not implemented for this compiler" - -writeHcPkgRegisterScript :: Verbosity - -> InstalledPackageInfo - -> PackageDBStack - -> HcPkg.HcPkgInfo - -> IO () -writeHcPkgRegisterScript verbosity installedPkgInfo packageDbs hpi = do - let invocation = HcPkg.reregisterInvocation hpi Verbosity.normal - packageDbs (Right installedPkgInfo) - regScript = invocationAsSystemScript buildOS invocation - - notice verbosity ("Creating package registration script: " ++ regScriptFileName) - writeUTF8File regScriptFileName regScript - setFileExecutable regScriptFileName - -regScriptFileName :: FilePath -regScriptFileName = case buildOS of - Windows -> "register.bat" - _ -> "register.sh" - - --- ----------------------------------------------------------------------------- --- Making the InstalledPackageInfo - --- | Construct 'InstalledPackageInfo' for a library in a package, given a set --- of installation directories. --- -generalInstalledPackageInfo - :: ([FilePath] -> [FilePath]) -- ^ Translate relative include dir paths to - -- absolute paths. - -> PackageDescription - -> AbiHash - -> Library - -> LocalBuildInfo - -> ComponentLocalBuildInfo - -> InstallDirs FilePath - -> InstalledPackageInfo -generalInstalledPackageInfo adjustRelIncDirs pkg abi_hash lib lbi clbi installDirs = - IPI.InstalledPackageInfo { - IPI.sourcePackageId = packageId pkg, - IPI.installedUnitId = componentUnitId clbi, - IPI.compatPackageKey = componentCompatPackageKey clbi, - IPI.license = license pkg, - IPI.copyright = copyright pkg, - IPI.maintainer = maintainer pkg, - IPI.author = author pkg, - IPI.stability = stability pkg, - IPI.homepage = homepage pkg, - IPI.pkgUrl = pkgUrl pkg, - IPI.synopsis = synopsis pkg, - IPI.description = description pkg, - IPI.category = category pkg, - IPI.abiHash = abi_hash, - IPI.exposed = libExposed lib, - IPI.exposedModules = componentExposedModules clbi, - IPI.hiddenModules = otherModules bi, - IPI.trusted = IPI.trusted IPI.emptyInstalledPackageInfo, - IPI.importDirs = [ libdir installDirs | hasModules ], - -- Note. the libsubdir and datasubdir templates have already been expanded - -- into libdir and datadir. - IPI.libraryDirs = libdirs, - IPI.libraryDynDirs = dynlibdirs, - IPI.dataDir = datadir installDirs, - IPI.hsLibraries = if hasLibrary - then [getHSLibraryName (componentUnitId clbi)] - else [], - IPI.extraLibraries = extraLibs bi, - IPI.extraGHCiLibraries = extraGHCiLibs bi, - IPI.includeDirs = absinc ++ adjustRelIncDirs relinc, - IPI.includes = includes bi, - IPI.depends = map fst (componentPackageDeps clbi), - IPI.ccOptions = [], -- Note. NOT ccOptions bi! - -- We don't want cc-options to be propagated - -- to C compilations in other packages. - IPI.ldOptions = ldOptions bi, - IPI.frameworks = frameworks bi, - IPI.frameworkDirs = extraFrameworkDirs bi, - IPI.haddockInterfaces = [haddockdir installDirs haddockName pkg], - IPI.haddockHTMLs = [htmldir installDirs], - IPI.pkgRoot = Nothing - } - where - bi = libBuildInfo lib - (absinc, relinc) = partition isAbsolute (includeDirs bi) - hasModules = not $ null (libModules lib) - comp = compiler lbi - hasLibrary = hasModules || not (null (cSources bi)) - || (not (null (jsSources bi)) && - compilerFlavor comp == GHCJS) - (libdirs, dynlibdirs) - | not hasLibrary - = (extraLibDirs bi, []) - -- the dynamic-library-dirs defaults to the library-dirs if not specified, - -- so this works whether the dynamic-library-dirs field is supported or not - - | libraryDynDirSupported comp - = (libdir installDirs : extraLibDirs bi, - dynlibdir installDirs : extraLibDirs bi) - - | otherwise - = (libdir installDirs : dynlibdir installDirs : extraLibDirs bi, []) - -- the compiler doesn't understand the dynamic-library-dirs field so we - -- add the dyn directory to the "normal" list in the library-dirs field - --- | Construct 'InstalledPackageInfo' for a library that is in place in the --- build tree. --- --- This function knows about the layout of in place packages. --- -inplaceInstalledPackageInfo :: FilePath -- ^ top of the build tree - -> FilePath -- ^ location of the dist tree - -> PackageDescription - -> AbiHash - -> Library - -> LocalBuildInfo - -> ComponentLocalBuildInfo - -> InstalledPackageInfo -inplaceInstalledPackageInfo inplaceDir distPref pkg abi_hash lib lbi clbi = - generalInstalledPackageInfo adjustRelativeIncludeDirs - pkg abi_hash lib lbi clbi installDirs - where - adjustRelativeIncludeDirs = map (inplaceDir ) - libTargetDir - | componentUnitId clbi == localUnitId lbi = buildDir lbi - | otherwise = buildDir lbi display (componentUnitId clbi) - installDirs = - (absoluteInstallDirs pkg lbi NoCopyDest) { - libdir = inplaceDir libTargetDir, - dynlibdir = inplaceDir libTargetDir, - datadir = inplaceDir dataDir pkg, - docdir = inplaceDocdir, - htmldir = inplaceHtmldir, - haddockdir = inplaceHtmldir - } - inplaceDocdir = inplaceDir distPref "doc" - inplaceHtmldir = inplaceDocdir "html" display (packageName pkg) - - --- | Construct 'InstalledPackageInfo' for the final install location of a --- library package. --- --- This function knows about the layout of installed packages. --- -absoluteInstalledPackageInfo :: PackageDescription - -> AbiHash - -> Library - -> LocalBuildInfo - -> ComponentLocalBuildInfo - -> InstalledPackageInfo -absoluteInstalledPackageInfo pkg abi_hash lib lbi clbi = - generalInstalledPackageInfo adjustReativeIncludeDirs - pkg abi_hash lib lbi clbi installDirs - where - -- For installed packages we install all include files into one dir, - -- whereas in the build tree they may live in multiple local dirs. - adjustReativeIncludeDirs _ - | null (installIncludes bi) = [] - | otherwise = [includedir installDirs] - bi = libBuildInfo lib - installDirs = absoluteInstallDirs pkg lbi NoCopyDest - - -relocatableInstalledPackageInfo :: PackageDescription - -> AbiHash - -> Library - -> LocalBuildInfo - -> ComponentLocalBuildInfo - -> FilePath - -> InstalledPackageInfo -relocatableInstalledPackageInfo pkg abi_hash lib lbi clbi pkgroot = - generalInstalledPackageInfo adjustReativeIncludeDirs - pkg abi_hash lib lbi clbi installDirs - where - -- For installed packages we install all include files into one dir, - -- whereas in the build tree they may live in multiple local dirs. - adjustReativeIncludeDirs _ - | null (installIncludes bi) = [] - | otherwise = [includedir installDirs] - bi = libBuildInfo lib - - installDirs = fmap (("${pkgroot}" ) . shortRelativePath pkgroot) - $ absoluteInstallDirs pkg lbi NoCopyDest - --- ----------------------------------------------------------------------------- --- Unregistration - -unregister :: PackageDescription -> LocalBuildInfo -> RegisterFlags -> IO () -unregister pkg lbi regFlags = do - let pkgid = packageId pkg - genScript = fromFlag (regGenScript regFlags) - verbosity = fromFlag (regVerbosity regFlags) - packageDb = fromFlagOrDefault (registrationPackageDB (withPackageDB lbi)) - (regPackageDB regFlags) - unreg hpi = - let invocation = HcPkg.unregisterInvocation - hpi Verbosity.normal packageDb pkgid - in if genScript - then writeFileAtomic unregScriptFileName - (BS.Char8.pack $ invocationAsSystemScript buildOS invocation) - else runProgramInvocation verbosity invocation - setupMessage verbosity "Unregistering" pkgid - withHcPkg "unregistering is only implemented for GHC and GHCJS" - (compiler lbi) (withPrograms lbi) unreg - -unregScriptFileName :: FilePath -unregScriptFileName = case buildOS of - Windows -> "unregister.bat" - _ -> "unregister.sh" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Setup.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2124 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Setup --- Copyright : Isaac Jones 2003-2004 --- Duncan Coutts 2007 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This is a big module, but not very complicated. The code is very regular --- and repetitive. It defines the command line interface for all the Cabal --- commands. For each command (like @configure@, @build@ etc) it defines a type --- that holds all the flags, the default set of flags and a 'CommandUI' that --- maps command line flags to and from the corresponding flags type. --- --- All the flags types are instances of 'Monoid', see --- --- for an explanation. --- --- The types defined here get used in the front end and especially in --- @cabal-install@ which has to do quite a bit of manipulating sets of command --- line flags. --- --- This is actually relatively nice, it works quite well. The main change it --- needs is to unify it with the code for managing sets of fields that can be --- read and written from files. This would allow us to save configure flags in --- config files. - -module Distribution.Simple.Setup ( - - GlobalFlags(..), emptyGlobalFlags, defaultGlobalFlags, globalCommand, - ConfigFlags(..), emptyConfigFlags, defaultConfigFlags, configureCommand, - configPrograms, - AllowNewer(..), AllowNewerDep(..), isAllowNewer, - configAbsolutePaths, readPackageDbList, showPackageDbList, - CopyFlags(..), emptyCopyFlags, defaultCopyFlags, copyCommand, - InstallFlags(..), emptyInstallFlags, defaultInstallFlags, installCommand, - HaddockTarget(..), - HaddockFlags(..), emptyHaddockFlags, defaultHaddockFlags, haddockCommand, - HscolourFlags(..), emptyHscolourFlags, defaultHscolourFlags, hscolourCommand, - BuildFlags(..), emptyBuildFlags, defaultBuildFlags, buildCommand, - buildVerbose, - ReplFlags(..), defaultReplFlags, replCommand, - CleanFlags(..), emptyCleanFlags, defaultCleanFlags, cleanCommand, - RegisterFlags(..), emptyRegisterFlags, defaultRegisterFlags, registerCommand, - unregisterCommand, - SDistFlags(..), emptySDistFlags, defaultSDistFlags, sdistCommand, - TestFlags(..), emptyTestFlags, defaultTestFlags, testCommand, - TestShowDetails(..), - BenchmarkFlags(..), emptyBenchmarkFlags, - defaultBenchmarkFlags, benchmarkCommand, - CopyDest(..), - configureArgs, configureOptions, configureCCompiler, configureLinker, - buildOptions, haddockOptions, installDirsOptions, - programConfigurationOptions, programConfigurationPaths', - splitArgs, - - defaultDistPref, optionDistPref, - - Flag(..), - toFlag, - fromFlag, - fromFlagOrDefault, - flagToMaybe, - flagToList, - boolOpt, boolOpt', trueArg, falseArg, - optionVerbosity, optionNumJobs, readPToMaybe ) where - -import Distribution.Compiler -import Distribution.ReadE -import Distribution.Text -import qualified Distribution.Compat.ReadP as Parse -import qualified Text.PrettyPrint as Disp -import Distribution.Package -import Distribution.PackageDescription hiding (Flag) -import Distribution.Simple.Command hiding (boolOpt, boolOpt') -import qualified Distribution.Simple.Command as Command -import Distribution.Simple.Compiler hiding (Flag) -import Distribution.Simple.Utils -import Distribution.Simple.Program -import Distribution.Simple.InstallDirs -import Distribution.Verbosity -import Distribution.Utils.NubList -import Distribution.Compat.Binary (Binary) -import Distribution.Compat.Semigroup as Semi - -import Control.Applicative as A ( Applicative(..), (<*) ) -import Control.Monad ( liftM ) -import Data.List ( sort ) -import Data.Maybe ( listToMaybe ) -import Data.Char ( isSpace, isAlpha ) -import GHC.Generics ( Generic ) - --- FIXME Not sure where this should live -defaultDistPref :: FilePath -defaultDistPref = "dist" - --- ------------------------------------------------------------ --- * Flag type --- ------------------------------------------------------------ - --- | All flags are monoids, they come in two flavours: --- --- 1. list flags eg --- --- > --ghc-option=foo --ghc-option=bar --- --- gives us all the values ["foo", "bar"] --- --- 2. singular value flags, eg: --- --- > --enable-foo --disable-foo --- --- gives us Just False --- So this Flag type is for the latter singular kind of flag. --- Its monoid instance gives us the behaviour where it starts out as --- 'NoFlag' and later flags override earlier ones. --- -data Flag a = Flag a | NoFlag deriving (Eq, Generic, Show, Read) - -instance Binary a => Binary (Flag a) - -instance Functor Flag where - fmap f (Flag x) = Flag (f x) - fmap _ NoFlag = NoFlag - -instance Monoid (Flag a) where - mempty = NoFlag - mappend = (Semi.<>) - -instance Semigroup (Flag a) where - _ <> f@(Flag _) = f - f <> NoFlag = f - -instance Bounded a => Bounded (Flag a) where - minBound = toFlag minBound - maxBound = toFlag maxBound - -instance Enum a => Enum (Flag a) where - fromEnum = fromEnum . fromFlag - toEnum = toFlag . toEnum - enumFrom (Flag a) = map toFlag . enumFrom $ a - enumFrom _ = [] - enumFromThen (Flag a) (Flag b) = toFlag `map` enumFromThen a b - enumFromThen _ _ = [] - enumFromTo (Flag a) (Flag b) = toFlag `map` enumFromTo a b - enumFromTo _ _ = [] - enumFromThenTo (Flag a) (Flag b) (Flag c) = toFlag `map` enumFromThenTo a b c - enumFromThenTo _ _ _ = [] - -toFlag :: a -> Flag a -toFlag = Flag - -fromFlag :: Flag a -> a -fromFlag (Flag x) = x -fromFlag NoFlag = error "fromFlag NoFlag. Use fromFlagOrDefault" - -fromFlagOrDefault :: a -> Flag a -> a -fromFlagOrDefault _ (Flag x) = x -fromFlagOrDefault def NoFlag = def - -flagToMaybe :: Flag a -> Maybe a -flagToMaybe (Flag x) = Just x -flagToMaybe NoFlag = Nothing - -flagToList :: Flag a -> [a] -flagToList (Flag x) = [x] -flagToList NoFlag = [] - -allFlags :: [Flag Bool] -> Flag Bool -allFlags flags = if all (\f -> fromFlagOrDefault False f) flags - then Flag True - else NoFlag - --- ------------------------------------------------------------ --- * Global flags --- ------------------------------------------------------------ - --- In fact since individual flags types are monoids and these are just sets of --- flags then they are also monoids pointwise. This turns out to be really --- useful. The mempty is the set of empty flags and mappend allows us to --- override specific flags. For example we can start with default flags and --- override with the ones we get from a file or the command line, or both. - --- | Flags that apply at the top level, not to any sub-command. -data GlobalFlags = GlobalFlags { - globalVersion :: Flag Bool, - globalNumericVersion :: Flag Bool - } deriving (Generic) - -defaultGlobalFlags :: GlobalFlags -defaultGlobalFlags = GlobalFlags { - globalVersion = Flag False, - globalNumericVersion = Flag False - } - -globalCommand :: [Command action] -> CommandUI GlobalFlags -globalCommand commands = CommandUI - { commandName = "" - , commandSynopsis = "" - , commandUsage = \pname -> - "This Setup program uses the Haskell Cabal Infrastructure.\n" - ++ "See http://www.haskell.org/cabal/ for more information.\n" - ++ "\n" - ++ "Usage: " ++ pname ++ " [GLOBAL FLAGS] [COMMAND [FLAGS]]\n" - , commandDescription = Just $ \pname -> - let - commands' = commands ++ [commandAddAction helpCommandUI undefined] - cmdDescs = getNormalCommandDescriptions commands' - maxlen = maximum $ [length name | (name, _) <- cmdDescs] - align str = str ++ replicate (maxlen - length str) ' ' - in - "Commands:\n" - ++ unlines [ " " ++ align name ++ " " ++ descr - | (name, descr) <- cmdDescs ] - ++ "\n" - ++ "For more information about a command use\n" - ++ " " ++ pname ++ " COMMAND --help\n\n" - ++ "Typical steps for installing Cabal packages:\n" - ++ concat [ " " ++ pname ++ " " ++ x ++ "\n" - | x <- ["configure", "build", "install"]] - , commandNotes = Nothing - , commandDefaultFlags = defaultGlobalFlags - , commandOptions = \_ -> - [option ['V'] ["version"] - "Print version information" - globalVersion (\v flags -> flags { globalVersion = v }) - trueArg - ,option [] ["numeric-version"] - "Print just the version number" - globalNumericVersion (\v flags -> flags { globalNumericVersion = v }) - trueArg - ] - } - -emptyGlobalFlags :: GlobalFlags -emptyGlobalFlags = mempty - -instance Monoid GlobalFlags where - mempty = gmempty - mappend = (Semi.<>) - -instance Semigroup GlobalFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Config flags --- ------------------------------------------------------------ - --- | Policy for relaxing upper bounds in dependencies. For example, given --- 'build-depends: array >= 0.3 && < 0.5', are we allowed to relax the upper --- bound and choose a version of 'array' that is greater or equal to 0.5? By --- default the upper bounds are always strictly honored. -data AllowNewer = - - -- | Default: honor the upper bounds in all dependencies, never choose - -- versions newer than allowed. - AllowNewerNone - - -- | Ignore upper bounds in dependencies on the given packages. - | AllowNewerSome [AllowNewerDep] - - -- | Ignore upper bounds in dependencies on all packages. - | AllowNewerAll - deriving (Eq, Read, Show, Generic) - --- | Dependencies can be relaxed either for all packages in the install plan, or --- only for some packages. -data AllowNewerDep = AllowNewerDep PackageName - | AllowNewerDepScoped PackageName PackageName - deriving (Eq, Read, Show, Generic) - -instance Text AllowNewerDep where - disp (AllowNewerDep p0) = disp p0 - disp (AllowNewerDepScoped p0 p1) = disp p0 Disp.<> Disp.colon Disp.<> disp p1 - - parse = scopedP Parse.<++ normalP - where - scopedP = AllowNewerDepScoped `fmap` parse A.<* Parse.char ':' A.<*> parse - normalP = AllowNewerDep `fmap` parse - -instance Binary AllowNewer -instance Binary AllowNewerDep - -instance Semigroup AllowNewer where - AllowNewerNone <> r = r - l@AllowNewerAll <> _ = l - l@(AllowNewerSome _) <> AllowNewerNone = l - (AllowNewerSome _) <> r@AllowNewerAll = r - (AllowNewerSome a) <> (AllowNewerSome b) = AllowNewerSome (a ++ b) - -instance Monoid AllowNewer where - mempty = AllowNewerNone - mappend = (Semi.<>) - --- | Convert 'AllowNewer' to a boolean. -isAllowNewer :: AllowNewer -> Bool -isAllowNewer AllowNewerNone = False -isAllowNewer (AllowNewerSome _) = True -isAllowNewer AllowNewerAll = True - -allowNewerParser :: Parse.ReadP r (Maybe AllowNewer) -allowNewerParser = - (Just . AllowNewerSome) `fmap` Parse.sepBy1 parse (Parse.char ',') - -allowNewerPrinter :: (Maybe AllowNewer) -> [Maybe String] -allowNewerPrinter Nothing = [] -allowNewerPrinter (Just AllowNewerNone) = [] -allowNewerPrinter (Just AllowNewerAll) = [Nothing] -allowNewerPrinter (Just (AllowNewerSome pkgs)) = map (Just . display) $ pkgs - --- | Flags to @configure@ command. --- --- IMPORTANT: every time a new flag is added, 'D.C.Setup.filterConfigureFlags' --- should be updated. -data ConfigFlags = ConfigFlags { - --FIXME: the configPrograms is only here to pass info through to configure - -- because the type of configure is constrained by the UserHooks. - -- when we change UserHooks next we should pass the initial - -- ProgramConfiguration directly and not via ConfigFlags - configPrograms_ :: Last' ProgramConfiguration, -- ^All programs that - -- @cabal@ may run - - configProgramPaths :: [(String, FilePath)], -- ^user specified programs paths - configProgramArgs :: [(String, [String])], -- ^user specified programs args - configProgramPathExtra :: NubList FilePath, -- ^Extend the $PATH - configHcFlavor :: Flag CompilerFlavor, -- ^The \"flavor\" of the - -- compiler, such as GHC or - -- JHC. - configHcPath :: Flag FilePath, -- ^given compiler location - configHcPkg :: Flag FilePath, -- ^given hc-pkg location - configVanillaLib :: Flag Bool, -- ^Enable vanilla library - configProfLib :: Flag Bool, -- ^Enable profiling in the library - configSharedLib :: Flag Bool, -- ^Build shared library - configDynExe :: Flag Bool, -- ^Enable dynamic linking of the - -- executables. - configProfExe :: Flag Bool, -- ^Enable profiling in the - -- executables. - configProf :: Flag Bool, -- ^Enable profiling in the library - -- and executables. - configProfDetail :: Flag ProfDetailLevel, -- ^Profiling detail level - -- in the library and executables. - configProfLibDetail :: Flag ProfDetailLevel, -- ^Profiling detail level - -- in the library - configConfigureArgs :: [String], -- ^Extra arguments to @configure@ - configOptimization :: Flag OptimisationLevel, -- ^Enable optimization. - configProgPrefix :: Flag PathTemplate, -- ^Installed executable prefix. - configProgSuffix :: Flag PathTemplate, -- ^Installed executable suffix. - configInstallDirs :: InstallDirs (Flag PathTemplate), -- ^Installation - -- paths - configScratchDir :: Flag FilePath, - configExtraLibDirs :: [FilePath], -- ^ path to search for extra libraries - configExtraFrameworkDirs :: [FilePath], -- ^ path to search for extra - -- frameworks (OS X only) - configExtraIncludeDirs :: [FilePath], -- ^ path to search for header files - configIPID :: Flag String, -- ^ explicit IPID to be used - - configDistPref :: Flag FilePath, -- ^"dist" prefix - configVerbosity :: Flag Verbosity, -- ^verbosity level - configUserInstall :: Flag Bool, -- ^The --user\/--global flag - configPackageDBs :: [Maybe PackageDB], -- ^Which package DBs to use - configGHCiLib :: Flag Bool, -- ^Enable compiling library for GHCi - configSplitObjs :: Flag Bool, -- ^Enable -split-objs with GHC - configStripExes :: Flag Bool, -- ^Enable executable stripping - configStripLibs :: Flag Bool, -- ^Enable library stripping - configConstraints :: [Dependency], -- ^Additional constraints for - -- dependencies. - configDependencies :: [(PackageName, UnitId)], - -- ^The packages depended on. - configConfigurationsFlags :: FlagAssignment, - configTests :: Flag Bool, -- ^Enable test suite compilation - configBenchmarks :: Flag Bool, -- ^Enable benchmark compilation - configCoverage :: Flag Bool, -- ^Enable program coverage - configLibCoverage :: Flag Bool, -- ^Enable program coverage (deprecated) - configExactConfiguration :: Flag Bool, - -- ^All direct dependencies and flags are provided on the command line by - -- the user via the '--dependency' and '--flags' options. - configFlagError :: Flag String, - -- ^Halt and show an error message indicating an error in flag assignment - configRelocatable :: Flag Bool, -- ^ Enable relocatable package built - configDebugInfo :: Flag DebugInfoLevel, -- ^ Emit debug info. - configAllowNewer :: Maybe AllowNewer - -- ^ Ignore upper bounds on all or some dependencies. Wrapped in 'Maybe' to - -- distinguish between "default" and "explicitly disabled". - } - deriving (Generic, Read, Show) - -instance Binary ConfigFlags - --- | More convenient version of 'configPrograms'. Results in an --- 'error' if internal invariant is violated. -configPrograms :: ConfigFlags -> ProgramConfiguration -configPrograms = maybe (error "FIXME: remove configPrograms") id . getLast' . configPrograms_ - -configAbsolutePaths :: ConfigFlags -> IO ConfigFlags -configAbsolutePaths f = - (\v -> f { configPackageDBs = v }) - `liftM` mapM (maybe (return Nothing) (liftM Just . absolutePackageDBPath)) - (configPackageDBs f) - -defaultConfigFlags :: ProgramConfiguration -> ConfigFlags -defaultConfigFlags progConf = emptyConfigFlags { - configPrograms_ = pure progConf, - configHcFlavor = maybe NoFlag Flag defaultCompilerFlavor, - configVanillaLib = Flag True, - configProfLib = NoFlag, - configSharedLib = NoFlag, - configDynExe = Flag False, - configProfExe = NoFlag, - configProf = NoFlag, - configProfDetail = NoFlag, - configProfLibDetail= NoFlag, - configOptimization = Flag NormalOptimisation, - configProgPrefix = Flag (toPathTemplate ""), - configProgSuffix = Flag (toPathTemplate ""), - configDistPref = NoFlag, - configVerbosity = Flag normal, - configUserInstall = Flag False, --TODO: reverse this -#if defined(mingw32_HOST_OS) - -- See #1589. - configGHCiLib = Flag True, -#else - configGHCiLib = NoFlag, -#endif - configSplitObjs = Flag False, -- takes longer, so turn off by default - configStripExes = Flag True, - configStripLibs = Flag True, - configTests = Flag False, - configBenchmarks = Flag False, - configCoverage = Flag False, - configLibCoverage = NoFlag, - configExactConfiguration = Flag False, - configFlagError = NoFlag, - configRelocatable = Flag False, - configDebugInfo = Flag NoDebugInfo, - configAllowNewer = Nothing - } - -configureCommand :: ProgramConfiguration -> CommandUI ConfigFlags -configureCommand progConf = CommandUI - { commandName = "configure" - , commandSynopsis = "Prepare to build the package." - , commandDescription = Just $ \_ -> wrapText $ - "Configure how the package is built by setting " - ++ "package (and other) flags.\n" - ++ "\n" - ++ "The configuration affects several other commands, " - ++ "including build, test, bench, run, repl.\n" - , commandNotes = Just $ \_pname -> programFlagsDescription progConf - , commandUsage = \pname -> - "Usage: " ++ pname ++ " configure [FLAGS]\n" - , commandDefaultFlags = defaultConfigFlags progConf - , commandOptions = \showOrParseArgs -> - configureOptions showOrParseArgs - ++ programConfigurationPaths progConf showOrParseArgs - configProgramPaths (\v fs -> fs { configProgramPaths = v }) - ++ programConfigurationOption progConf showOrParseArgs - configProgramArgs (\v fs -> fs { configProgramArgs = v }) - ++ programConfigurationOptions progConf showOrParseArgs - configProgramArgs (\v fs -> fs { configProgramArgs = v }) - } - -configureOptions :: ShowOrParseArgs -> [OptionField ConfigFlags] -configureOptions showOrParseArgs = - [optionVerbosity configVerbosity - (\v flags -> flags { configVerbosity = v }) - ,optionDistPref - configDistPref (\d flags -> flags { configDistPref = d }) - showOrParseArgs - - ,option [] ["compiler"] "compiler" - configHcFlavor (\v flags -> flags { configHcFlavor = v }) - (choiceOpt [ (Flag GHC, ("g", ["ghc"]), "compile with GHC") - , (Flag GHCJS, ([] , ["ghcjs"]), "compile with GHCJS") - , (Flag JHC, ([] , ["jhc"]), "compile with JHC") - , (Flag LHC, ([] , ["lhc"]), "compile with LHC") - , (Flag UHC, ([] , ["uhc"]), "compile with UHC") - -- "haskell-suite" compiler id string will be replaced - -- by a more specific one during the configure stage - , (Flag (HaskellSuite "haskell-suite"), ([] , ["haskell-suite"]), - "compile with a haskell-suite compiler")]) - - ,option "w" ["with-compiler"] - "give the path to a particular compiler" - configHcPath (\v flags -> flags { configHcPath = v }) - (reqArgFlag "PATH") - - ,option "" ["with-hc-pkg"] - "give the path to the package tool" - configHcPkg (\v flags -> flags { configHcPkg = v }) - (reqArgFlag "PATH") - ] - ++ map liftInstallDirs installDirsOptions - ++ [option "" ["program-prefix"] - "prefix to be applied to installed executables" - configProgPrefix - (\v flags -> flags { configProgPrefix = v }) - (reqPathTemplateArgFlag "PREFIX") - - ,option "" ["program-suffix"] - "suffix to be applied to installed executables" - configProgSuffix (\v flags -> flags { configProgSuffix = v } ) - (reqPathTemplateArgFlag "SUFFIX") - - ,option "" ["library-vanilla"] - "Vanilla libraries" - configVanillaLib (\v flags -> flags { configVanillaLib = v }) - (boolOpt [] []) - - ,option "p" ["library-profiling"] - "Library profiling" - configProfLib (\v flags -> flags { configProfLib = v }) - (boolOpt "p" []) - - ,option "" ["shared"] - "Shared library" - configSharedLib (\v flags -> flags { configSharedLib = v }) - (boolOpt [] []) - - ,option "" ["executable-dynamic"] - "Executable dynamic linking" - configDynExe (\v flags -> flags { configDynExe = v }) - (boolOpt [] []) - - ,option "" ["profiling"] - "Executable and library profiling" - configProf (\v flags -> flags { configProf = v }) - (boolOpt [] []) - - ,option "" ["executable-profiling"] - "Executable profiling (DEPRECATED)" - configProfExe (\v flags -> flags { configProfExe = v }) - (boolOpt [] []) - - ,option "" ["profiling-detail"] - ("Profiling detail level for executable and library (default, " ++ - "none, exported-functions, toplevel-functions, all-functions).") - configProfDetail (\v flags -> flags { configProfDetail = v }) - (reqArg' "level" (Flag . flagToProfDetailLevel) - showProfDetailLevelFlag) - - ,option "" ["library-profiling-detail"] - "Profiling detail level for libraries only." - configProfLibDetail (\v flags -> flags { configProfLibDetail = v }) - (reqArg' "level" (Flag . flagToProfDetailLevel) - showProfDetailLevelFlag) - - ,multiOption "optimization" - configOptimization (\v flags -> flags { configOptimization = v }) - [optArg' "n" (Flag . flagToOptimisationLevel) - (\f -> case f of - Flag NoOptimisation -> [] - Flag NormalOptimisation -> [Nothing] - Flag MaximumOptimisation -> [Just "2"] - _ -> []) - "O" ["enable-optimization","enable-optimisation"] - "Build with optimization (n is 0--2, default is 1)", - noArg (Flag NoOptimisation) [] - ["disable-optimization","disable-optimisation"] - "Build without optimization" - ] - - ,multiOption "debug-info" - configDebugInfo (\v flags -> flags { configDebugInfo = v }) - [optArg' "n" (Flag . flagToDebugInfoLevel) - (\f -> case f of - Flag NoDebugInfo -> [] - Flag MinimalDebugInfo -> [Just "1"] - Flag NormalDebugInfo -> [Nothing] - Flag MaximalDebugInfo -> [Just "3"] - _ -> []) - "" ["enable-debug-info"] - "Emit debug info (n is 0--3, default is 0)", - noArg (Flag NoDebugInfo) [] - ["disable-debug-info"] - "Don't emit debug info" - ] - - ,option "" ["library-for-ghci"] - "compile library for use with GHCi" - configGHCiLib (\v flags -> flags { configGHCiLib = v }) - (boolOpt [] []) - - ,option "" ["split-objs"] - "split library into smaller objects to reduce binary sizes (GHC 6.6+)" - configSplitObjs (\v flags -> flags { configSplitObjs = v }) - (boolOpt [] []) - - ,option "" ["executable-stripping"] - "strip executables upon installation to reduce binary sizes" - configStripExes (\v flags -> flags { configStripExes = v }) - (boolOpt [] []) - - ,option "" ["library-stripping"] - "strip libraries upon installation to reduce binary sizes" - configStripLibs (\v flags -> flags { configStripLibs = v }) - (boolOpt [] []) - - ,option "" ["configure-option"] - "Extra option for configure" - configConfigureArgs (\v flags -> flags { configConfigureArgs = v }) - (reqArg' "OPT" (\x -> [x]) id) - - ,option "" ["user-install"] - "doing a per-user installation" - configUserInstall (\v flags -> flags { configUserInstall = v }) - (boolOpt' ([],["user"]) ([], ["global"])) - - ,option "" ["package-db"] - ( "Append the given package database to the list of package" - ++ " databases used (to satisfy dependencies and register into)." - ++ " May be a specific file, 'global' or 'user'. The initial list" - ++ " is ['global'], ['global', 'user'], or ['global', $sandbox]," - ++ " depending on context. Use 'clear' to reset the list to empty." - ++ " See the user guide for details.") - configPackageDBs (\v flags -> flags { configPackageDBs = v }) - (reqArg' "DB" readPackageDbList showPackageDbList) - - ,option "f" ["flags"] - "Force values for the given flags in Cabal conditionals in the .cabal file. E.g., --flags=\"debug -usebytestrings\" forces the flag \"debug\" to true and \"usebytestrings\" to false." - configConfigurationsFlags (\v flags -> flags { configConfigurationsFlags = v }) - (reqArg' "FLAGS" readFlagList showFlagList) - - ,option "" ["extra-include-dirs"] - "A list of directories to search for header files" - configExtraIncludeDirs (\v flags -> flags {configExtraIncludeDirs = v}) - (reqArg' "PATH" (\x -> [x]) id) - - ,option "" ["ipid"] - "Installed package ID to compile this package as" - configIPID (\v flags -> flags {configIPID = v}) - (reqArgFlag "IPID") - - ,option "" ["extra-lib-dirs"] - "A list of directories to search for external libraries" - configExtraLibDirs (\v flags -> flags {configExtraLibDirs = v}) - (reqArg' "PATH" (\x -> [x]) id) - - ,option "" ["extra-framework-dirs"] - "A list of directories to search for external frameworks (OS X only)" - configExtraFrameworkDirs - (\v flags -> flags {configExtraFrameworkDirs = v}) - (reqArg' "PATH" (\x -> [x]) id) - - ,option "" ["extra-prog-path"] - "A list of directories to search for required programs (in addition to the normal search locations)" - configProgramPathExtra (\v flags -> flags {configProgramPathExtra = v}) - (reqArg' "PATH" (\x -> toNubList [x]) fromNubList) - - ,option "" ["constraint"] - "A list of additional constraints on the dependencies." - configConstraints (\v flags -> flags { configConstraints = v}) - (reqArg "DEPENDENCY" - (readP_to_E (const "dependency expected") ((\x -> [x]) `fmap` parse)) - (map (\x -> display x))) - - ,option "" ["dependency"] - "A list of exact dependencies. E.g., --dependency=\"void=void-0.5.8-177d5cdf20962d0581fe2e4932a6c309\"" - configDependencies (\v flags -> flags { configDependencies = v}) - (reqArg "NAME=ID" - (readP_to_E (const "dependency expected") ((\x -> [x]) `fmap` parseDependency)) - (map (\x -> display (fst x) ++ "=" ++ display (snd x)))) - - ,option "" ["tests"] - "dependency checking and compilation for test suites listed in the package description file." - configTests (\v flags -> flags { configTests = v }) - (boolOpt [] []) - - ,option "" ["coverage"] - "build package with Haskell Program Coverage. (GHC only)" - configCoverage (\v flags -> flags { configCoverage = v }) - (boolOpt [] []) - - ,option "" ["library-coverage"] - "build package with Haskell Program Coverage. (GHC only) (DEPRECATED)" - configLibCoverage (\v flags -> flags { configLibCoverage = v }) - (boolOpt [] []) - - ,option [] ["allow-newer"] - ("Ignore upper bounds in all dependencies or DEPS") - configAllowNewer (\v flags -> flags { configAllowNewer = v}) - (optArg "DEPS" - (readP_to_E ("Cannot parse the list of packages: " ++) allowNewerParser) - (Just AllowNewerAll) allowNewerPrinter) - - ,option "" ["exact-configuration"] - "All direct dependencies and flags are provided on the command line." - configExactConfiguration - (\v flags -> flags { configExactConfiguration = v }) - trueArg - - ,option "" ["benchmarks"] - "dependency checking and compilation for benchmarks listed in the package description file." - configBenchmarks (\v flags -> flags { configBenchmarks = v }) - (boolOpt [] []) - - ,option "" ["relocatable"] - "building a package that is relocatable. (GHC only)" - configRelocatable (\v flags -> flags { configRelocatable = v}) - (boolOpt [] []) - ] - where - readFlagList :: String -> FlagAssignment - readFlagList = map tagWithValue . words - where tagWithValue ('-':fname) = (FlagName (lowercase fname), False) - tagWithValue fname = (FlagName (lowercase fname), True) - - showFlagList :: FlagAssignment -> [String] - showFlagList fs = [ if not set then '-':fname else fname - | (FlagName fname, set) <- fs] - - liftInstallDirs = - liftOption configInstallDirs (\v flags -> flags { configInstallDirs = v }) - - reqPathTemplateArgFlag title _sf _lf d get set = - reqArgFlag title _sf _lf d - (fmap fromPathTemplate . get) (set . fmap toPathTemplate) - -readPackageDbList :: String -> [Maybe PackageDB] -readPackageDbList "clear" = [Nothing] -readPackageDbList "global" = [Just GlobalPackageDB] -readPackageDbList "user" = [Just UserPackageDB] -readPackageDbList other = [Just (SpecificPackageDB other)] - -showPackageDbList :: [Maybe PackageDB] -> [String] -showPackageDbList = map showPackageDb - where - showPackageDb Nothing = "clear" - showPackageDb (Just GlobalPackageDB) = "global" - showPackageDb (Just UserPackageDB) = "user" - showPackageDb (Just (SpecificPackageDB db)) = db - -showProfDetailLevelFlag :: Flag ProfDetailLevel -> [String] -showProfDetailLevelFlag NoFlag = [] -showProfDetailLevelFlag (Flag dl) = [showProfDetailLevel dl] - -parseDependency :: Parse.ReadP r (PackageName, UnitId) -parseDependency = do - x <- parse - _ <- Parse.char '=' - y <- parse - return (x, y) - -installDirsOptions :: [OptionField (InstallDirs (Flag PathTemplate))] -installDirsOptions = - [ option "" ["prefix"] - "bake this prefix in preparation of installation" - prefix (\v flags -> flags { prefix = v }) - installDirArg - - , option "" ["bindir"] - "installation directory for executables" - bindir (\v flags -> flags { bindir = v }) - installDirArg - - , option "" ["libdir"] - "installation directory for libraries" - libdir (\v flags -> flags { libdir = v }) - installDirArg - - , option "" ["libsubdir"] - "subdirectory of libdir in which libs are installed" - libsubdir (\v flags -> flags { libsubdir = v }) - installDirArg - - , option "" ["dynlibdir"] - "installation directory for dynamic libraries" - dynlibdir (\v flags -> flags { dynlibdir = v }) - installDirArg - - , option "" ["libexecdir"] - "installation directory for program executables" - libexecdir (\v flags -> flags { libexecdir = v }) - installDirArg - - , option "" ["datadir"] - "installation directory for read-only data" - datadir (\v flags -> flags { datadir = v }) - installDirArg - - , option "" ["datasubdir"] - "subdirectory of datadir in which data files are installed" - datasubdir (\v flags -> flags { datasubdir = v }) - installDirArg - - , option "" ["docdir"] - "installation directory for documentation" - docdir (\v flags -> flags { docdir = v }) - installDirArg - - , option "" ["htmldir"] - "installation directory for HTML documentation" - htmldir (\v flags -> flags { htmldir = v }) - installDirArg - - , option "" ["haddockdir"] - "installation directory for haddock interfaces" - haddockdir (\v flags -> flags { haddockdir = v }) - installDirArg - - , option "" ["sysconfdir"] - "installation directory for configuration files" - sysconfdir (\v flags -> flags { sysconfdir = v }) - installDirArg - ] - where - installDirArg _sf _lf d get set = - reqArgFlag "DIR" _sf _lf d - (fmap fromPathTemplate . get) (set . fmap toPathTemplate) - -emptyConfigFlags :: ConfigFlags -emptyConfigFlags = mempty - -instance Monoid ConfigFlags where - mempty = gmempty - mappend = (Semi.<>) - -instance Semigroup ConfigFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Copy flags --- ------------------------------------------------------------ - --- | Flags to @copy@: (destdir, copy-prefix (backwards compat), verbosity) -data CopyFlags = CopyFlags { - copyDest :: Flag CopyDest, - copyDistPref :: Flag FilePath, - copyVerbosity :: Flag Verbosity - } - deriving (Show, Generic) - -defaultCopyFlags :: CopyFlags -defaultCopyFlags = CopyFlags { - copyDest = Flag NoCopyDest, - copyDistPref = NoFlag, - copyVerbosity = Flag normal - } - -copyCommand :: CommandUI CopyFlags -copyCommand = CommandUI - { commandName = "copy" - , commandSynopsis = "Copy the files into the install locations." - , commandDescription = Just $ \_ -> wrapText $ - "Does not call register, and allows a prefix at install time. " - ++ "Without the --destdir flag, configure determines location.\n" - , commandNotes = Nothing - , commandUsage = \pname -> - "Usage: " ++ pname ++ " copy [FLAGS]\n" - , commandDefaultFlags = defaultCopyFlags - , commandOptions = \showOrParseArgs -> - [optionVerbosity copyVerbosity (\v flags -> flags { copyVerbosity = v }) - - ,optionDistPref - copyDistPref (\d flags -> flags { copyDistPref = d }) - showOrParseArgs - - ,option "" ["destdir"] - "directory to copy files to, prepended to installation directories" - copyDest (\v flags -> flags { copyDest = v }) - (reqArg "DIR" (succeedReadE (Flag . CopyTo)) - (\f -> case f of Flag (CopyTo p) -> [p]; _ -> [])) - ] - } - -emptyCopyFlags :: CopyFlags -emptyCopyFlags = mempty - -instance Monoid CopyFlags where - mempty = gmempty - mappend = (Semi.<>) - -instance Semigroup CopyFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Install flags --- ------------------------------------------------------------ - --- | Flags to @install@: (package db, verbosity) -data InstallFlags = InstallFlags { - installPackageDB :: Flag PackageDB, - installDistPref :: Flag FilePath, - installUseWrapper :: Flag Bool, - installInPlace :: Flag Bool, - installVerbosity :: Flag Verbosity - } - deriving (Show, Generic) - -defaultInstallFlags :: InstallFlags -defaultInstallFlags = InstallFlags { - installPackageDB = NoFlag, - installDistPref = NoFlag, - installUseWrapper = Flag False, - installInPlace = Flag False, - installVerbosity = Flag normal - } - -installCommand :: CommandUI InstallFlags -installCommand = CommandUI - { commandName = "install" - , commandSynopsis = - "Copy the files into the install locations. Run register." - , commandDescription = Just $ \_ -> wrapText $ - "Unlike the copy command, install calls the register command." - ++ "If you want to install into a location that is not what was" - ++ "specified in the configure step, use the copy command.\n" - , commandNotes = Nothing - , commandUsage = \pname -> - "Usage: " ++ pname ++ " install [FLAGS]\n" - , commandDefaultFlags = defaultInstallFlags - , commandOptions = \showOrParseArgs -> - [optionVerbosity installVerbosity (\v flags -> flags { installVerbosity = v }) - ,optionDistPref - installDistPref (\d flags -> flags { installDistPref = d }) - showOrParseArgs - - ,option "" ["inplace"] - "install the package in the install subdirectory of the dist prefix, so it can be used without being installed" - installInPlace (\v flags -> flags { installInPlace = v }) - trueArg - - ,option "" ["shell-wrappers"] - "using shell script wrappers around executables" - installUseWrapper (\v flags -> flags { installUseWrapper = v }) - (boolOpt [] []) - - ,option "" ["package-db"] "" - installPackageDB (\v flags -> flags { installPackageDB = v }) - (choiceOpt [ (Flag UserPackageDB, ([],["user"]), - "upon configuration register this package in the user's local package database") - , (Flag GlobalPackageDB, ([],["global"]), - "(default) upon configuration register this package in the system-wide package database")]) - ] - } - -emptyInstallFlags :: InstallFlags -emptyInstallFlags = mempty - -instance Monoid InstallFlags where - mempty = gmempty - mappend = (Semi.<>) - -instance Semigroup InstallFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * SDist flags --- ------------------------------------------------------------ - --- | Flags to @sdist@: (snapshot, verbosity) -data SDistFlags = SDistFlags { - sDistSnapshot :: Flag Bool, - sDistDirectory :: Flag FilePath, - sDistDistPref :: Flag FilePath, - sDistListSources :: Flag FilePath, - sDistVerbosity :: Flag Verbosity - } - deriving (Show, Generic) - -defaultSDistFlags :: SDistFlags -defaultSDistFlags = SDistFlags { - sDistSnapshot = Flag False, - sDistDirectory = mempty, - sDistDistPref = NoFlag, - sDistListSources = mempty, - sDistVerbosity = Flag normal - } - -sdistCommand :: CommandUI SDistFlags -sdistCommand = CommandUI - { commandName = "sdist" - , commandSynopsis = - "Generate a source distribution file (.tar.gz)." - , commandDescription = Nothing - , commandNotes = Nothing - , commandUsage = \pname -> - "Usage: " ++ pname ++ " sdist [FLAGS]\n" - , commandDefaultFlags = defaultSDistFlags - , commandOptions = \showOrParseArgs -> - [optionVerbosity sDistVerbosity (\v flags -> flags { sDistVerbosity = v }) - ,optionDistPref - sDistDistPref (\d flags -> flags { sDistDistPref = d }) - showOrParseArgs - - ,option "" ["list-sources"] - "Just write a list of the package's sources to a file" - sDistListSources (\v flags -> flags { sDistListSources = v }) - (reqArgFlag "FILE") - - ,option "" ["snapshot"] - "Produce a snapshot source distribution" - sDistSnapshot (\v flags -> flags { sDistSnapshot = v }) - trueArg - - ,option "" ["output-directory"] - ("Generate a source distribution in the given directory, " - ++ "without creating a tarball") - sDistDirectory (\v flags -> flags { sDistDirectory = v }) - (reqArgFlag "DIR") - ] - } - -emptySDistFlags :: SDistFlags -emptySDistFlags = mempty - -instance Monoid SDistFlags where - mempty = gmempty - mappend = (Semi.<>) - -instance Semigroup SDistFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Register flags --- ------------------------------------------------------------ - --- | Flags to @register@ and @unregister@: (user package, gen-script, --- in-place, verbosity) -data RegisterFlags = RegisterFlags { - regPackageDB :: Flag PackageDB, - regGenScript :: Flag Bool, - regGenPkgConf :: Flag (Maybe FilePath), - regInPlace :: Flag Bool, - regDistPref :: Flag FilePath, - regPrintId :: Flag Bool, - regVerbosity :: Flag Verbosity - } - deriving (Show, Generic) - -defaultRegisterFlags :: RegisterFlags -defaultRegisterFlags = RegisterFlags { - regPackageDB = NoFlag, - regGenScript = Flag False, - regGenPkgConf = NoFlag, - regInPlace = Flag False, - regDistPref = NoFlag, - regPrintId = Flag False, - regVerbosity = Flag normal - } - -registerCommand :: CommandUI RegisterFlags -registerCommand = CommandUI - { commandName = "register" - , commandSynopsis = - "Register this package with the compiler." - , commandDescription = Nothing - , commandNotes = Nothing - , commandUsage = \pname -> - "Usage: " ++ pname ++ " register [FLAGS]\n" - , commandDefaultFlags = defaultRegisterFlags - , commandOptions = \showOrParseArgs -> - [optionVerbosity regVerbosity (\v flags -> flags { regVerbosity = v }) - ,optionDistPref - regDistPref (\d flags -> flags { regDistPref = d }) - showOrParseArgs - - ,option "" ["packageDB"] "" - regPackageDB (\v flags -> flags { regPackageDB = v }) - (choiceOpt [ (Flag UserPackageDB, ([],["user"]), - "upon registration, register this package in the user's local package database") - , (Flag GlobalPackageDB, ([],["global"]), - "(default)upon registration, register this package in the system-wide package database")]) - - ,option "" ["inplace"] - "register the package in the build location, so it can be used without being installed" - regInPlace (\v flags -> flags { regInPlace = v }) - trueArg - - ,option "" ["gen-script"] - "instead of registering, generate a script to register later" - regGenScript (\v flags -> flags { regGenScript = v }) - trueArg - - ,option "" ["gen-pkg-config"] - "instead of registering, generate a package registration file" - regGenPkgConf (\v flags -> flags { regGenPkgConf = v }) - (optArg' "PKG" Flag flagToList) - - ,option "" ["print-ipid"] - "print the installed package ID calculated for this package" - regPrintId (\v flags -> flags { regPrintId = v }) - trueArg - ] - } - -unregisterCommand :: CommandUI RegisterFlags -unregisterCommand = CommandUI - { commandName = "unregister" - , commandSynopsis = - "Unregister this package with the compiler." - , commandDescription = Nothing - , commandNotes = Nothing - , commandUsage = \pname -> - "Usage: " ++ pname ++ " unregister [FLAGS]\n" - , commandDefaultFlags = defaultRegisterFlags - , commandOptions = \showOrParseArgs -> - [optionVerbosity regVerbosity (\v flags -> flags { regVerbosity = v }) - ,optionDistPref - regDistPref (\d flags -> flags { regDistPref = d }) - showOrParseArgs - - ,option "" ["user"] "" - regPackageDB (\v flags -> flags { regPackageDB = v }) - (choiceOpt [ (Flag UserPackageDB, ([],["user"]), - "unregister this package in the user's local package database") - , (Flag GlobalPackageDB, ([],["global"]), - "(default) unregister this package in the system-wide package database")]) - - ,option "" ["gen-script"] - "Instead of performing the unregister command, generate a script to unregister later" - regGenScript (\v flags -> flags { regGenScript = v }) - trueArg - ] - } - -emptyRegisterFlags :: RegisterFlags -emptyRegisterFlags = mempty - -instance Monoid RegisterFlags where - mempty = gmempty - mappend = (Semi.<>) - -instance Semigroup RegisterFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * HsColour flags --- ------------------------------------------------------------ - -data HscolourFlags = HscolourFlags { - hscolourCSS :: Flag FilePath, - hscolourExecutables :: Flag Bool, - hscolourTestSuites :: Flag Bool, - hscolourBenchmarks :: Flag Bool, - hscolourDistPref :: Flag FilePath, - hscolourVerbosity :: Flag Verbosity - } - deriving (Show, Generic) - -emptyHscolourFlags :: HscolourFlags -emptyHscolourFlags = mempty - -defaultHscolourFlags :: HscolourFlags -defaultHscolourFlags = HscolourFlags { - hscolourCSS = NoFlag, - hscolourExecutables = Flag False, - hscolourTestSuites = Flag False, - hscolourBenchmarks = Flag False, - hscolourDistPref = NoFlag, - hscolourVerbosity = Flag normal - } - -instance Monoid HscolourFlags where - mempty = gmempty - mappend = (Semi.<>) - -instance Semigroup HscolourFlags where - (<>) = gmappend - -hscolourCommand :: CommandUI HscolourFlags -hscolourCommand = CommandUI - { commandName = "hscolour" - , commandSynopsis = - "Generate HsColour colourised code, in HTML format." - , commandDescription = Just (\_ -> "Requires the hscolour program.\n") - , commandNotes = Nothing - , commandUsage = \pname -> - "Usage: " ++ pname ++ " hscolour [FLAGS]\n" - , commandDefaultFlags = defaultHscolourFlags - , commandOptions = \showOrParseArgs -> - [optionVerbosity hscolourVerbosity - (\v flags -> flags { hscolourVerbosity = v }) - ,optionDistPref - hscolourDistPref (\d flags -> flags { hscolourDistPref = d }) - showOrParseArgs - - ,option "" ["executables"] - "Run hscolour for Executables targets" - hscolourExecutables (\v flags -> flags { hscolourExecutables = v }) - trueArg - - ,option "" ["tests"] - "Run hscolour for Test Suite targets" - hscolourTestSuites (\v flags -> flags { hscolourTestSuites = v }) - trueArg - - ,option "" ["benchmarks"] - "Run hscolour for Benchmark targets" - hscolourBenchmarks (\v flags -> flags { hscolourBenchmarks = v }) - trueArg - - ,option "" ["all"] - "Run hscolour for all targets" - (\f -> allFlags [ hscolourExecutables f - , hscolourTestSuites f - , hscolourBenchmarks f]) - (\v flags -> flags { hscolourExecutables = v - , hscolourTestSuites = v - , hscolourBenchmarks = v }) - trueArg - - ,option "" ["css"] - "Use a cascading style sheet" - hscolourCSS (\v flags -> flags { hscolourCSS = v }) - (reqArgFlag "PATH") - ] - } - --- ------------------------------------------------------------ --- * Haddock flags --- ------------------------------------------------------------ - - --- | When we build haddock documentation, there are two cases: --- --- 1. We build haddocks only for the current development version, --- intended for local use and not for distribution. In this case, --- we store the generated documentation in @/doc/html/@. --- --- 2. We build haddocks for intended for uploading them to hackage. --- In this case, we need to follow the layout that hackage expects --- from documentation tarballs, and we might also want to use different --- flags than for development builds, so in this case we store the generated --- documentation in @/doc/html/-docs@. -data HaddockTarget = ForHackage | ForDevelopment deriving (Eq, Show, Generic) - -data HaddockFlags = HaddockFlags { - haddockProgramPaths :: [(String, FilePath)], - haddockProgramArgs :: [(String, [String])], - haddockHoogle :: Flag Bool, - haddockHtml :: Flag Bool, - haddockHtmlLocation :: Flag String, - haddockForHackage :: Flag HaddockTarget, - haddockExecutables :: Flag Bool, - haddockTestSuites :: Flag Bool, - haddockBenchmarks :: Flag Bool, - haddockInternal :: Flag Bool, - haddockCss :: Flag FilePath, - haddockHscolour :: Flag Bool, - haddockHscolourCss :: Flag FilePath, - haddockContents :: Flag PathTemplate, - haddockDistPref :: Flag FilePath, - haddockKeepTempFiles:: Flag Bool, - haddockVerbosity :: Flag Verbosity - } - deriving (Show, Generic) - -defaultHaddockFlags :: HaddockFlags -defaultHaddockFlags = HaddockFlags { - haddockProgramPaths = mempty, - haddockProgramArgs = [], - haddockHoogle = Flag False, - haddockHtml = Flag False, - haddockHtmlLocation = NoFlag, - haddockForHackage = Flag ForDevelopment, - haddockExecutables = Flag False, - haddockTestSuites = Flag False, - haddockBenchmarks = Flag False, - haddockInternal = Flag False, - haddockCss = NoFlag, - haddockHscolour = Flag False, - haddockHscolourCss = NoFlag, - haddockContents = NoFlag, - haddockDistPref = NoFlag, - haddockKeepTempFiles= Flag False, - haddockVerbosity = Flag normal - } - -haddockCommand :: CommandUI HaddockFlags -haddockCommand = CommandUI - { commandName = "haddock" - , commandSynopsis = "Generate Haddock HTML documentation." - , commandDescription = Just $ \_ -> - "Requires the program haddock, version 2.x.\n" - , commandNotes = Nothing - , commandUsage = \pname -> - "Usage: " ++ pname ++ " haddock [FLAGS]\n" - , commandDefaultFlags = defaultHaddockFlags - , commandOptions = \showOrParseArgs -> - haddockOptions showOrParseArgs - ++ programConfigurationPaths progConf ParseArgs - haddockProgramPaths (\v flags -> flags { haddockProgramPaths = v}) - ++ programConfigurationOption progConf showOrParseArgs - haddockProgramArgs (\v fs -> fs { haddockProgramArgs = v }) - ++ programConfigurationOptions progConf ParseArgs - haddockProgramArgs (\v flags -> flags { haddockProgramArgs = v}) - } - where - progConf = addKnownProgram haddockProgram - $ addKnownProgram ghcProgram - $ emptyProgramConfiguration - -haddockOptions :: ShowOrParseArgs -> [OptionField HaddockFlags] -haddockOptions showOrParseArgs = - [optionVerbosity haddockVerbosity - (\v flags -> flags { haddockVerbosity = v }) - ,optionDistPref - haddockDistPref (\d flags -> flags { haddockDistPref = d }) - showOrParseArgs - - ,option "" ["keep-temp-files"] - "Keep temporary files" - haddockKeepTempFiles (\b flags -> flags { haddockKeepTempFiles = b }) - trueArg - - ,option "" ["hoogle"] - "Generate a hoogle database" - haddockHoogle (\v flags -> flags { haddockHoogle = v }) - trueArg - - ,option "" ["html"] - "Generate HTML documentation (the default)" - haddockHtml (\v flags -> flags { haddockHtml = v }) - trueArg - - ,option "" ["html-location"] - "Location of HTML documentation for pre-requisite packages" - haddockHtmlLocation (\v flags -> flags { haddockHtmlLocation = v }) - (reqArgFlag "URL") - - ,option "" ["for-hackage"] - "Collection of flags to generate documentation suitable for upload to hackage" - haddockForHackage (\v flags -> flags { haddockForHackage = v }) - (noArg (Flag ForHackage)) - - ,option "" ["executables"] - "Run haddock for Executables targets" - haddockExecutables (\v flags -> flags { haddockExecutables = v }) - trueArg - - ,option "" ["tests"] - "Run haddock for Test Suite targets" - haddockTestSuites (\v flags -> flags { haddockTestSuites = v }) - trueArg - - ,option "" ["benchmarks"] - "Run haddock for Benchmark targets" - haddockBenchmarks (\v flags -> flags { haddockBenchmarks = v }) - trueArg - - ,option "" ["all"] - "Run haddock for all targets" - (\f -> allFlags [ haddockExecutables f - , haddockTestSuites f - , haddockBenchmarks f]) - (\v flags -> flags { haddockExecutables = v - , haddockTestSuites = v - , haddockBenchmarks = v }) - trueArg - - ,option "" ["internal"] - "Run haddock for internal modules and include all symbols" - haddockInternal (\v flags -> flags { haddockInternal = v }) - trueArg - - ,option "" ["css"] - "Use PATH as the haddock stylesheet" - haddockCss (\v flags -> flags { haddockCss = v }) - (reqArgFlag "PATH") - - ,option "" ["hyperlink-source","hyperlink-sources"] - "Hyperlink the documentation to the source code (using HsColour)" - haddockHscolour (\v flags -> flags { haddockHscolour = v }) - trueArg - - ,option "" ["hscolour-css"] - "Use PATH as the HsColour stylesheet" - haddockHscolourCss (\v flags -> flags { haddockHscolourCss = v }) - (reqArgFlag "PATH") - - ,option "" ["contents-location"] - "Bake URL in as the location for the contents page" - haddockContents (\v flags -> flags { haddockContents = v }) - (reqArg' "URL" - (toFlag . toPathTemplate) - (flagToList . fmap fromPathTemplate)) - ] - -emptyHaddockFlags :: HaddockFlags -emptyHaddockFlags = mempty - -instance Monoid HaddockFlags where - mempty = gmempty - mappend = (Semi.<>) - -instance Semigroup HaddockFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Clean flags --- ------------------------------------------------------------ - -data CleanFlags = CleanFlags { - cleanSaveConf :: Flag Bool, - cleanDistPref :: Flag FilePath, - cleanVerbosity :: Flag Verbosity - } - deriving (Show, Generic) - -defaultCleanFlags :: CleanFlags -defaultCleanFlags = CleanFlags { - cleanSaveConf = Flag False, - cleanDistPref = NoFlag, - cleanVerbosity = Flag normal - } - -cleanCommand :: CommandUI CleanFlags -cleanCommand = CommandUI - { commandName = "clean" - , commandSynopsis = "Clean up after a build." - , commandDescription = Just $ \_ -> - "Removes .hi, .o, preprocessed sources, etc.\n" - , commandNotes = Nothing - , commandUsage = \pname -> - "Usage: " ++ pname ++ " clean [FLAGS]\n" - , commandDefaultFlags = defaultCleanFlags - , commandOptions = \showOrParseArgs -> - [optionVerbosity cleanVerbosity (\v flags -> flags { cleanVerbosity = v }) - ,optionDistPref - cleanDistPref (\d flags -> flags { cleanDistPref = d }) - showOrParseArgs - - ,option "s" ["save-configure"] - "Do not remove the configuration file (dist/setup-config) during cleaning. Saves need to reconfigure." - cleanSaveConf (\v flags -> flags { cleanSaveConf = v }) - trueArg - ] - } - -emptyCleanFlags :: CleanFlags -emptyCleanFlags = mempty - -instance Monoid CleanFlags where - mempty = gmempty - mappend = (Semi.<>) - -instance Semigroup CleanFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Build flags --- ------------------------------------------------------------ - -data BuildFlags = BuildFlags { - buildProgramPaths :: [(String, FilePath)], - buildProgramArgs :: [(String, [String])], - buildDistPref :: Flag FilePath, - buildVerbosity :: Flag Verbosity, - buildNumJobs :: Flag (Maybe Int), - -- TODO: this one should not be here, it's just that the silly - -- UserHooks stop us from passing extra info in other ways - buildArgs :: [String] - } - deriving (Show, Generic) - -{-# DEPRECATED buildVerbose "Use buildVerbosity instead" #-} -buildVerbose :: BuildFlags -> Verbosity -buildVerbose = fromFlagOrDefault normal . buildVerbosity - -defaultBuildFlags :: BuildFlags -defaultBuildFlags = BuildFlags { - buildProgramPaths = mempty, - buildProgramArgs = [], - buildDistPref = mempty, - buildVerbosity = Flag normal, - buildNumJobs = mempty, - buildArgs = [] - } - -buildCommand :: ProgramConfiguration -> CommandUI BuildFlags -buildCommand progConf = CommandUI - { commandName = "build" - , commandSynopsis = "Compile all/specific components." - , commandDescription = Just $ \_ -> wrapText $ - "Components encompass executables, tests, and benchmarks.\n" - ++ "\n" - ++ "Affected by configuration options, see `configure`.\n" - , commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " build " - ++ " All the components in the package\n" - ++ " " ++ pname ++ " build foo " - ++ " A component (i.e. lib, exe, test suite)\n\n" - ++ programFlagsDescription progConf ---TODO: re-enable once we have support for module/file targets --- ++ " " ++ pname ++ " build Foo.Bar " --- ++ " A module\n" --- ++ " " ++ pname ++ " build Foo/Bar.hs" --- ++ " A file\n\n" --- ++ "If a target is ambiguous it can be qualified with the component " --- ++ "name, e.g.\n" --- ++ " " ++ pname ++ " build foo:Foo.Bar\n" --- ++ " " ++ pname ++ " build testsuite1:Foo/Bar.hs\n" - , commandUsage = usageAlternatives "build" $ - [ "[FLAGS]" - , "COMPONENTS [FLAGS]" - ] - , commandDefaultFlags = defaultBuildFlags - , commandOptions = \showOrParseArgs -> - [ optionVerbosity - buildVerbosity (\v flags -> flags { buildVerbosity = v }) - - , optionDistPref - buildDistPref (\d flags -> flags { buildDistPref = d }) showOrParseArgs - ] - ++ buildOptions progConf showOrParseArgs - } - -buildOptions :: ProgramConfiguration -> ShowOrParseArgs - -> [OptionField BuildFlags] -buildOptions progConf showOrParseArgs = - [ optionNumJobs - buildNumJobs (\v flags -> flags { buildNumJobs = v }) - ] - - ++ programConfigurationPaths progConf showOrParseArgs - buildProgramPaths (\v flags -> flags { buildProgramPaths = v}) - - ++ programConfigurationOption progConf showOrParseArgs - buildProgramArgs (\v fs -> fs { buildProgramArgs = v }) - - ++ programConfigurationOptions progConf showOrParseArgs - buildProgramArgs (\v flags -> flags { buildProgramArgs = v}) - -emptyBuildFlags :: BuildFlags -emptyBuildFlags = mempty - -instance Monoid BuildFlags where - mempty = gmempty - mappend = (Semi.<>) - -instance Semigroup BuildFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * REPL Flags --- ------------------------------------------------------------ - -data ReplFlags = ReplFlags { - replProgramPaths :: [(String, FilePath)], - replProgramArgs :: [(String, [String])], - replDistPref :: Flag FilePath, - replVerbosity :: Flag Verbosity, - replReload :: Flag Bool - } - deriving (Show, Generic) - -defaultReplFlags :: ReplFlags -defaultReplFlags = ReplFlags { - replProgramPaths = mempty, - replProgramArgs = [], - replDistPref = NoFlag, - replVerbosity = Flag normal, - replReload = Flag False - } - -instance Monoid ReplFlags where - mempty = gmempty - mappend = (Semi.<>) - -instance Semigroup ReplFlags where - (<>) = gmappend - -replCommand :: ProgramConfiguration -> CommandUI ReplFlags -replCommand progConf = CommandUI - { commandName = "repl" - , commandSynopsis = - "Open an interpreter session for the given component." - , commandDescription = Just $ \pname -> wrapText $ - "If the current directory contains no package, ignores COMPONENT " - ++ "parameters and opens an interactive interpreter session; if a " - ++ "sandbox is present, its package database will be used.\n" - ++ "\n" - ++ "Otherwise, (re)configures with the given or default flags, and " - ++ "loads the interpreter with the relevant modules. For executables, " - ++ "tests and benchmarks, loads the main module (and its " - ++ "dependencies); for libraries all exposed/other modules.\n" - ++ "\n" - ++ "The default component is the library itself, or the executable " - ++ "if that is the only component.\n" - ++ "\n" - ++ "Support for loading specific modules is planned but not " - ++ "implemented yet. For certain scenarios, `" ++ pname - ++ " exec -- ghci :l Foo` may be used instead. Note that `exec` will " - ++ "not (re)configure and you will have to specify the location of " - ++ "other modules, if required.\n" - - , commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " repl " - ++ " The first component in the package\n" - ++ " " ++ pname ++ " repl foo " - ++ " A named component (i.e. lib, exe, test suite)\n" - ++ " " ++ pname ++ " repl --ghc-options=\"-lstdc++\"" - ++ " Specifying flags for interpreter\n" ---TODO: re-enable once we have support for module/file targets --- ++ " " ++ pname ++ " repl Foo.Bar " --- ++ " A module\n" --- ++ " " ++ pname ++ " repl Foo/Bar.hs" --- ++ " A file\n\n" --- ++ "If a target is ambiguous it can be qualified with the component " --- ++ "name, e.g.\n" --- ++ " " ++ pname ++ " repl foo:Foo.Bar\n" --- ++ " " ++ pname ++ " repl testsuite1:Foo/Bar.hs\n" - , commandUsage = \pname -> "Usage: " ++ pname ++ " repl [COMPONENT] [FLAGS]\n" - , commandDefaultFlags = defaultReplFlags - , commandOptions = \showOrParseArgs -> - optionVerbosity replVerbosity (\v flags -> flags { replVerbosity = v }) - : optionDistPref - replDistPref (\d flags -> flags { replDistPref = d }) - showOrParseArgs - - : programConfigurationPaths progConf showOrParseArgs - replProgramPaths (\v flags -> flags { replProgramPaths = v}) - - ++ programConfigurationOption progConf showOrParseArgs - replProgramArgs (\v flags -> flags { replProgramArgs = v}) - - ++ programConfigurationOptions progConf showOrParseArgs - replProgramArgs (\v flags -> flags { replProgramArgs = v}) - - ++ case showOrParseArgs of - ParseArgs -> - [ option "" ["reload"] - "Used from within an interpreter to update files." - replReload (\v flags -> flags { replReload = v }) - trueArg - ] - _ -> [] - } - --- ------------------------------------------------------------ --- * Test flags --- ------------------------------------------------------------ - -data TestShowDetails = Never | Failures | Always | Streaming | Direct - deriving (Eq, Ord, Enum, Bounded, Show) - -knownTestShowDetails :: [TestShowDetails] -knownTestShowDetails = [minBound..maxBound] - -instance Text TestShowDetails where - disp = Disp.text . lowercase . show - - parse = maybe Parse.pfail return . classify =<< ident - where - ident = Parse.munch1 (\c -> isAlpha c || c == '_' || c == '-') - classify str = lookup (lowercase str) enumMap - enumMap :: [(String, TestShowDetails)] - enumMap = [ (display x, x) - | x <- knownTestShowDetails ] - ---TODO: do we need this instance? -instance Monoid TestShowDetails where - mempty = Never - mappend = (Semi.<>) - -instance Semigroup TestShowDetails where - a <> b = if a < b then b else a - -data TestFlags = TestFlags { - testDistPref :: Flag FilePath, - testVerbosity :: Flag Verbosity, - testHumanLog :: Flag PathTemplate, - testMachineLog :: Flag PathTemplate, - testShowDetails :: Flag TestShowDetails, - testKeepTix :: Flag Bool, - -- TODO: think about if/how options are passed to test exes - testOptions :: [PathTemplate] - } deriving (Generic) - -defaultTestFlags :: TestFlags -defaultTestFlags = TestFlags { - testDistPref = NoFlag, - testVerbosity = Flag normal, - testHumanLog = toFlag $ toPathTemplate $ "$pkgid-$test-suite.log", - testMachineLog = toFlag $ toPathTemplate $ "$pkgid.log", - testShowDetails = toFlag Failures, - testKeepTix = toFlag False, - testOptions = [] - } - -testCommand :: CommandUI TestFlags -testCommand = CommandUI - { commandName = "test" - , commandSynopsis = - "Run all/specific tests in the test suite." - , commandDescription = Just $ \pname -> wrapText $ - "If necessary (re)configures with `--enable-tests` flag and builds" - ++ " the test suite.\n" - ++ "\n" - ++ "Remember that the tests' dependencies must be installed if there" - ++ " are additional ones; e.g. with `" ++ pname - ++ " install --only-dependencies --enable-tests`.\n" - ++ "\n" - ++ "By defining UserHooks in a custom Setup.hs, the package can" - ++ " define actions to be executed before and after running tests.\n" - , commandNotes = Nothing - , commandUsage = usageAlternatives "test" - [ "[FLAGS]" - , "TESTCOMPONENTS [FLAGS]" - ] - , commandDefaultFlags = defaultTestFlags - , commandOptions = \showOrParseArgs -> - [ optionVerbosity testVerbosity (\v flags -> flags { testVerbosity = v }) - , optionDistPref - testDistPref (\d flags -> flags { testDistPref = d }) - showOrParseArgs - , option [] ["log"] - ("Log all test suite results to file (name template can use " - ++ "$pkgid, $compiler, $os, $arch, $test-suite, $result)") - testHumanLog (\v flags -> flags { testHumanLog = v }) - (reqArg' "TEMPLATE" - (toFlag . toPathTemplate) - (flagToList . fmap fromPathTemplate)) - , option [] ["machine-log"] - ("Produce a machine-readable log file (name template can use " - ++ "$pkgid, $compiler, $os, $arch, $result)") - testMachineLog (\v flags -> flags { testMachineLog = v }) - (reqArg' "TEMPLATE" - (toFlag . toPathTemplate) - (flagToList . fmap fromPathTemplate)) - , option [] ["show-details"] - ("'always': always show results of individual test cases. " - ++ "'never': never show results of individual test cases. " - ++ "'failures': show results of failing test cases. " - ++ "'streaming': show results of test cases in real time." - ++ "'direct': send results of test cases in real time; no log file.") - testShowDetails (\v flags -> flags { testShowDetails = v }) - (reqArg "FILTER" - (readP_to_E (\_ -> "--show-details flag expects one of " - ++ intercalate ", " - (map display knownTestShowDetails)) - (fmap toFlag parse)) - (flagToList . fmap display)) - , option [] ["keep-tix-files"] - "keep .tix files for HPC between test runs" - testKeepTix (\v flags -> flags { testKeepTix = v}) - trueArg - , option [] ["test-options"] - ("give extra options to test executables " - ++ "(name templates can use $pkgid, $compiler, " - ++ "$os, $arch, $test-suite)") - testOptions (\v flags -> flags { testOptions = v }) - (reqArg' "TEMPLATES" (map toPathTemplate . splitArgs) - (const [])) - , option [] ["test-option"] - ("give extra option to test executables " - ++ "(no need to quote options containing spaces, " - ++ "name template can use $pkgid, $compiler, " - ++ "$os, $arch, $test-suite)") - testOptions (\v flags -> flags { testOptions = v }) - (reqArg' "TEMPLATE" (\x -> [toPathTemplate x]) - (map fromPathTemplate)) - ] - } - -emptyTestFlags :: TestFlags -emptyTestFlags = mempty - -instance Monoid TestFlags where - mempty = gmempty - mappend = (Semi.<>) - -instance Semigroup TestFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Benchmark flags --- ------------------------------------------------------------ - -data BenchmarkFlags = BenchmarkFlags { - benchmarkDistPref :: Flag FilePath, - benchmarkVerbosity :: Flag Verbosity, - benchmarkOptions :: [PathTemplate] - } deriving (Generic) - -defaultBenchmarkFlags :: BenchmarkFlags -defaultBenchmarkFlags = BenchmarkFlags { - benchmarkDistPref = NoFlag, - benchmarkVerbosity = Flag normal, - benchmarkOptions = [] - } - -benchmarkCommand :: CommandUI BenchmarkFlags -benchmarkCommand = CommandUI - { commandName = "bench" - , commandSynopsis = - "Run all/specific benchmarks." - , commandDescription = Just $ \pname -> wrapText $ - "If necessary (re)configures with `--enable-benchmarks` flag and" - ++ " builds the benchmarks.\n" - ++ "\n" - ++ "Remember that the benchmarks' dependencies must be installed if" - ++ " there are additional ones; e.g. with `" ++ pname - ++ " install --only-dependencies --enable-benchmarks`.\n" - ++ "\n" - ++ "By defining UserHooks in a custom Setup.hs, the package can" - ++ " define actions to be executed before and after running" - ++ " benchmarks.\n" - , commandNotes = Nothing - , commandUsage = usageAlternatives "bench" - [ "[FLAGS]" - , "BENCHCOMPONENTS [FLAGS]" - ] - , commandDefaultFlags = defaultBenchmarkFlags - , commandOptions = \showOrParseArgs -> - [ optionVerbosity benchmarkVerbosity - (\v flags -> flags { benchmarkVerbosity = v }) - , optionDistPref - benchmarkDistPref (\d flags -> flags { benchmarkDistPref = d }) - showOrParseArgs - , option [] ["benchmark-options"] - ("give extra options to benchmark executables " - ++ "(name templates can use $pkgid, $compiler, " - ++ "$os, $arch, $benchmark)") - benchmarkOptions (\v flags -> flags { benchmarkOptions = v }) - (reqArg' "TEMPLATES" (map toPathTemplate . splitArgs) - (const [])) - , option [] ["benchmark-option"] - ("give extra option to benchmark executables " - ++ "(no need to quote options containing spaces, " - ++ "name template can use $pkgid, $compiler, " - ++ "$os, $arch, $benchmark)") - benchmarkOptions (\v flags -> flags { benchmarkOptions = v }) - (reqArg' "TEMPLATE" (\x -> [toPathTemplate x]) - (map fromPathTemplate)) - ] - } - -emptyBenchmarkFlags :: BenchmarkFlags -emptyBenchmarkFlags = mempty - -instance Monoid BenchmarkFlags where - mempty = gmempty - mappend = (Semi.<>) - -instance Semigroup BenchmarkFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Shared options utils --- ------------------------------------------------------------ - -programFlagsDescription :: ProgramConfiguration -> String -programFlagsDescription progConf = - "The flags --with-PROG and --PROG-option(s) can be used with" - ++ " the following programs:" - ++ (concatMap (\line -> "\n " ++ unwords line) . wrapLine 77 . sort) - [ programName prog | (prog, _) <- knownPrograms progConf ] - ++ "\n" - --- | For each known program @PROG@ in 'progConf', produce a @with-PROG@ --- 'OptionField'. -programConfigurationPaths - :: ProgramConfiguration - -> ShowOrParseArgs - -> (flags -> [(String, FilePath)]) - -> ([(String, FilePath)] -> (flags -> flags)) - -> [OptionField flags] -programConfigurationPaths progConf showOrParseArgs get set = - programConfigurationPaths' ("with-" ++) progConf showOrParseArgs get set - --- | Like 'programConfigurationPaths', but allows to customise the option name. -programConfigurationPaths' - :: (String -> String) - -> ProgramConfiguration - -> ShowOrParseArgs - -> (flags -> [(String, FilePath)]) - -> ([(String, FilePath)] -> (flags -> flags)) - -> [OptionField flags] -programConfigurationPaths' mkName progConf showOrParseArgs get set = - case showOrParseArgs of - -- we don't want a verbose help text list so we just show a generic one: - ShowArgs -> [withProgramPath "PROG"] - ParseArgs -> map (withProgramPath . programName . fst) - (knownPrograms progConf) - where - withProgramPath prog = - option "" [mkName prog] - ("give the path to " ++ prog) - get set - (reqArg' "PATH" (\path -> [(prog, path)]) - (\progPaths -> [ path | (prog', path) <- progPaths, prog==prog' ])) - --- | For each known program @PROG@ in 'progConf', produce a @PROG-option@ --- 'OptionField'. -programConfigurationOption - :: ProgramConfiguration - -> ShowOrParseArgs - -> (flags -> [(String, [String])]) - -> ([(String, [String])] -> (flags -> flags)) - -> [OptionField flags] -programConfigurationOption progConf showOrParseArgs get set = - case showOrParseArgs of - -- we don't want a verbose help text list so we just show a generic one: - ShowArgs -> [programOption "PROG"] - ParseArgs -> map (programOption . programName . fst) - (knownPrograms progConf) - where - programOption prog = - option "" [prog ++ "-option"] - ("give an extra option to " ++ prog ++ - " (no need to quote options containing spaces)") - get set - (reqArg' "OPT" (\arg -> [(prog, [arg])]) - (\progArgs -> concat [ args - | (prog', args) <- progArgs, prog==prog' ])) - --- | For each known program @PROG@ in 'progConf', produce a @PROG-options@ --- 'OptionField'. -programConfigurationOptions - :: ProgramConfiguration - -> ShowOrParseArgs - -> (flags -> [(String, [String])]) - -> ([(String, [String])] -> (flags -> flags)) - -> [OptionField flags] -programConfigurationOptions progConf showOrParseArgs get set = - case showOrParseArgs of - -- we don't want a verbose help text list so we just show a generic one: - ShowArgs -> [programOptions "PROG"] - ParseArgs -> map (programOptions . programName . fst) - (knownPrograms progConf) - where - programOptions prog = - option "" [prog ++ "-options"] - ("give extra options to " ++ prog) - get set - (reqArg' "OPTS" (\args -> [(prog, splitArgs args)]) (const [])) - --- ------------------------------------------------------------ --- * GetOpt Utils --- ------------------------------------------------------------ - -boolOpt :: SFlags -> SFlags - -> MkOptDescr (a -> Flag Bool) (Flag Bool -> a -> a) a -boolOpt = Command.boolOpt flagToMaybe Flag - -boolOpt' :: OptFlags -> OptFlags - -> MkOptDescr (a -> Flag Bool) (Flag Bool -> a -> a) a -boolOpt' = Command.boolOpt' flagToMaybe Flag - -trueArg, falseArg :: MkOptDescr (a -> Flag Bool) (Flag Bool -> a -> a) a -trueArg sfT lfT = boolOpt' (sfT, lfT) ([], []) sfT lfT -falseArg sfF lfF = boolOpt' ([], []) (sfF, lfF) sfF lfF - -reqArgFlag :: ArgPlaceHolder -> SFlags -> LFlags -> Description -> - (b -> Flag String) -> (Flag String -> b -> b) -> OptDescr b -reqArgFlag ad = reqArg ad (succeedReadE Flag) flagToList - -optionDistPref :: (flags -> Flag FilePath) - -> (Flag FilePath -> flags -> flags) - -> ShowOrParseArgs - -> OptionField flags -optionDistPref get set = \showOrParseArgs -> - option "" (distPrefFlagName showOrParseArgs) - ( "The directory where Cabal puts generated build files " - ++ "(default " ++ defaultDistPref ++ ")") - get set - (reqArgFlag "DIR") - where - distPrefFlagName ShowArgs = ["builddir"] - distPrefFlagName ParseArgs = ["builddir", "distdir", "distpref"] - -optionVerbosity :: (flags -> Flag Verbosity) - -> (Flag Verbosity -> flags -> flags) - -> OptionField flags -optionVerbosity get set = - option "v" ["verbose"] - "Control verbosity (n is 0--3, default verbosity level is 1)" - get set - (optArg "n" (fmap Flag flagToVerbosity) - (Flag verbose) -- default Value if no n is given - (fmap (Just . showForCabal) . flagToList)) - -optionNumJobs :: (flags -> Flag (Maybe Int)) - -> (Flag (Maybe Int) -> flags -> flags) - -> OptionField flags -optionNumJobs get set = - option "j" ["jobs"] - "Run NUM jobs simultaneously (or '$ncpus' if no NUM is given)." - get set - (optArg "NUM" (fmap Flag numJobsParser) - (Flag Nothing) - (map (Just . maybe "$ncpus" show) . flagToList)) - where - numJobsParser :: ReadE (Maybe Int) - numJobsParser = ReadE $ \s -> - case s of - "$ncpus" -> Right Nothing - _ -> case reads s of - [(n, "")] - | n < 1 -> Left "The number of jobs should be 1 or more." - | otherwise -> Right (Just n) - _ -> Left "The jobs value should be a number or '$ncpus'" - --- ------------------------------------------------------------ --- * Other Utils --- ------------------------------------------------------------ - -readPToMaybe :: Parse.ReadP a a -> String -> Maybe a -readPToMaybe p str = listToMaybe [ r | (r,s) <- Parse.readP_to_S p str - , all isSpace s ] - --- | Arguments to pass to a @configure@ script, e.g. generated by --- @autoconf@. -configureArgs :: Bool -> ConfigFlags -> [String] -configureArgs bcHack flags - = hc_flag - ++ optFlag "with-hc-pkg" configHcPkg - ++ optFlag' "prefix" prefix - ++ optFlag' "bindir" bindir - ++ optFlag' "libdir" libdir - ++ optFlag' "libexecdir" libexecdir - ++ optFlag' "datadir" datadir - ++ optFlag' "sysconfdir" sysconfdir - ++ configConfigureArgs flags - where - hc_flag = case (configHcFlavor flags, configHcPath flags) of - (_, Flag hc_path) -> [hc_flag_name ++ hc_path] - (Flag hc, NoFlag) -> [hc_flag_name ++ display hc] - (NoFlag,NoFlag) -> [] - hc_flag_name - --TODO kill off thic bc hack when defaultUserHooks is removed. - | bcHack = "--with-hc=" - | otherwise = "--with-compiler=" - optFlag name config_field = case config_field flags of - Flag p -> ["--" ++ name ++ "=" ++ p] - NoFlag -> [] - optFlag' name config_field = optFlag name (fmap fromPathTemplate - . config_field - . configInstallDirs) - -configureCCompiler :: Verbosity -> ProgramConfiguration - -> IO (FilePath, [String]) -configureCCompiler verbosity lbi = configureProg verbosity lbi gccProgram - -configureLinker :: Verbosity -> ProgramConfiguration -> IO (FilePath, [String]) -configureLinker verbosity lbi = configureProg verbosity lbi ldProgram - -configureProg :: Verbosity -> ProgramConfiguration -> Program - -> IO (FilePath, [String]) -configureProg verbosity programConfig prog = do - (p, _) <- requireProgram verbosity prog programConfig - let pInv = programInvocation p [] - return (progInvokePath pInv, progInvokeArgs pInv) - --- | Helper function to split a string into a list of arguments. --- It's supposed to handle quoted things sensibly, eg: --- --- > splitArgs "--foo=\"C:\Program Files\Bar\" --baz" --- > = ["--foo=C:\Program Files\Bar", "--baz"] --- -splitArgs :: String -> [String] -splitArgs = space [] - where - space :: String -> String -> [String] - space w [] = word w [] - space w ( c :s) - | isSpace c = word w (space [] s) - space w ('"':s) = string w s - space w s = nonstring w s - - string :: String -> String -> [String] - string w [] = word w [] - string w ('"':s) = space w s - string w ( c :s) = string (c:w) s - - nonstring :: String -> String -> [String] - nonstring w [] = word w [] - nonstring w ('"':s) = string w s - nonstring w ( c :s) = space (c:w) s - - word [] s = s - word w s = reverse w : s - --- The test cases kinda have to be rewritten from the ground up... :/ ---hunitTests :: [Test] ---hunitTests = --- let m = [("ghc", GHC), ("nhc98", NHC), ("hugs", Hugs)] --- (flags, commands', unkFlags, ers) --- = getOpt Permute options ["configure", "foobar", "--prefix=/foo", "--ghc", "--nhc98", "--hugs", "--with-compiler=/comp", "--unknown1", "--unknown2", "--install-prefix=/foo", "--user", "--global"] --- in [TestLabel "very basic option parsing" $ TestList [ --- "getOpt flags" ~: "failed" ~: --- [Prefix "/foo", GhcFlag, NhcFlag, HugsFlag, --- WithCompiler "/comp", InstPrefix "/foo", UserFlag, GlobalFlag] --- ~=? flags, --- "getOpt commands" ~: "failed" ~: ["configure", "foobar"] ~=? commands', --- "getOpt unknown opts" ~: "failed" ~: --- ["--unknown1", "--unknown2"] ~=? unkFlags, --- "getOpt errors" ~: "failed" ~: [] ~=? ers], --- --- TestLabel "test location of various compilers" $ TestList --- ["configure parsing for prefix and compiler flag" ~: "failed" ~: --- (Right (ConfigCmd (Just comp, Nothing, Just "/usr/local"), [])) --- ~=? (parseArgs ["--prefix=/usr/local", "--"++name, "configure"]) --- | (name, comp) <- m], --- --- TestLabel "find the package tool" $ TestList --- ["configure parsing for prefix comp flag, withcompiler" ~: "failed" ~: --- (Right (ConfigCmd (Just comp, Just "/foo/comp", Just "/usr/local"), [])) --- ~=? (parseArgs ["--prefix=/usr/local", "--"++name, --- "--with-compiler=/foo/comp", "configure"]) --- | (name, comp) <- m], --- --- TestLabel "simpler commands" $ TestList --- [flag ~: "failed" ~: (Right (flagCmd, [])) ~=? (parseArgs [flag]) --- | (flag, flagCmd) <- [("build", BuildCmd), --- ("install", InstallCmd Nothing False), --- ("sdist", SDistCmd), --- ("register", RegisterCmd False)] --- ] --- ] - -{- Testing ideas: - * IO to look for hugs and hugs-pkg (which hugs, etc) - * quickCheck to test permutations of arguments - * what other options can we over-ride with a command-line flag? --} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/SrcDist.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/SrcDist.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/SrcDist.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/SrcDist.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,477 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.SrcDist --- Copyright : Simon Marlow 2004 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This handles the @sdist@ command. The module exports an 'sdist' action but --- also some of the phases that make it up so that other tools can use just the --- bits they need. In particular the preparation of the tree of files to go --- into the source tarball is separated from actually building the source --- tarball. --- --- The 'createArchive' action uses the external @tar@ program and assumes that --- it accepts the @-z@ flag. Neither of these assumptions are valid on Windows. --- The 'sdist' action now also does some distribution QA checks. - --- NOTE: FIX: we don't have a great way of testing this module, since --- we can't easily look inside a tarball once its created. - -module Distribution.Simple.SrcDist ( - -- * The top level action - sdist, - - -- ** Parts of 'sdist' - printPackageProblems, - prepareTree, - createArchive, - - -- ** Snapshots - prepareSnapshotTree, - snapshotPackage, - snapshotVersion, - dateToSnapshotNumber, - - -- * Extracting the source files - listPackageSources - - ) where - -import Distribution.PackageDescription hiding (Flag) -import Distribution.PackageDescription.Check hiding (doesFileExist) -import Distribution.Package -import Distribution.ModuleName -import qualified Distribution.ModuleName as ModuleName -import Distribution.Version -import Distribution.Simple.Utils -import Distribution.Simple.Setup -import Distribution.Simple.PreProcess -import Distribution.Simple.LocalBuildInfo -import Distribution.Simple.BuildPaths -import Distribution.Simple.Program -import Distribution.Text -import Distribution.Verbosity - -import Control.Monad(when, unless, forM) -import Data.Char (toLower) -import Data.List (partition, isPrefixOf) -import qualified Data.Map as Map -import Data.Maybe (isNothing, catMaybes) -import Data.Time (UTCTime, getCurrentTime, toGregorian, utctDay) -import System.Directory ( doesFileExist ) -import System.IO (IOMode(WriteMode), hPutStrLn, withFile) -import System.FilePath - ( (), (<.>), dropExtension, isAbsolute ) - --- |Create a source distribution. -sdist :: PackageDescription -- ^information from the tarball - -> Maybe LocalBuildInfo -- ^Information from configure - -> SDistFlags -- ^verbosity & snapshot - -> (FilePath -> FilePath) -- ^build prefix (temp dir) - -> [PPSuffixHandler] -- ^ extra preprocessors (includes suffixes) - -> IO () -sdist pkg mb_lbi flags mkTmpDir pps = - - -- When given --list-sources, just output the list of sources to a file. - case (sDistListSources flags) of - Flag path -> withFile path WriteMode $ \outHandle -> do - (ordinary, maybeExecutable) <- listPackageSources verbosity pkg pps - mapM_ (hPutStrLn outHandle) ordinary - mapM_ (hPutStrLn outHandle) maybeExecutable - notice verbosity $ "List of package sources written to file '" - ++ path ++ "'" - NoFlag -> do - -- do some QA - printPackageProblems verbosity pkg - - when (isNothing mb_lbi) $ - warn verbosity "Cannot run preprocessors. Run 'configure' command first." - - date <- getCurrentTime - let pkg' | snapshot = snapshotPackage date pkg - | otherwise = pkg - - case flagToMaybe (sDistDirectory flags) of - Just targetDir -> do - generateSourceDir targetDir pkg' - info verbosity $ "Source directory created: " ++ targetDir - - Nothing -> do - createDirectoryIfMissingVerbose verbosity True tmpTargetDir - withTempDirectory verbosity tmpTargetDir "sdist." $ \tmpDir -> do - let targetDir = tmpDir tarBallName pkg' - generateSourceDir targetDir pkg' - targzFile <- createArchive verbosity pkg' mb_lbi tmpDir targetPref - notice verbosity $ "Source tarball created: " ++ targzFile - - where - generateSourceDir targetDir pkg' = do - - setupMessage verbosity "Building source dist for" (packageId pkg') - prepareTree verbosity pkg' mb_lbi targetDir pps - when snapshot $ - overwriteSnapshotPackageDesc verbosity pkg' targetDir - - verbosity = fromFlag (sDistVerbosity flags) - snapshot = fromFlag (sDistSnapshot flags) - - distPref = fromFlag $ sDistDistPref flags - targetPref = distPref - tmpTargetDir = mkTmpDir distPref - --- | List all source files of a package. Returns a tuple of lists: first --- component is a list of ordinary files, second one is a list of those files --- that may be executable. -listPackageSources :: Verbosity -- ^ verbosity - -> PackageDescription -- ^ info from the cabal file - -> [PPSuffixHandler] -- ^ extra preprocessors (include - -- suffixes) - -> IO ([FilePath], [FilePath]) -listPackageSources verbosity pkg_descr0 pps = do - -- Call helpers that actually do all work. - ordinary <- listPackageSourcesOrdinary verbosity pkg_descr pps - maybeExecutable <- listPackageSourcesMaybeExecutable pkg_descr - return (ordinary, maybeExecutable) - where - pkg_descr = filterAutogenModule pkg_descr0 - --- | List those source files that may be executable (e.g. the configure script). -listPackageSourcesMaybeExecutable :: PackageDescription -> IO [FilePath] -listPackageSourcesMaybeExecutable pkg_descr = - -- Extra source files. - fmap concat . forM (extraSrcFiles pkg_descr) $ \fpath -> matchFileGlob fpath - --- | List those source files that should be copied with ordinary permissions. -listPackageSourcesOrdinary :: Verbosity - -> PackageDescription - -> [PPSuffixHandler] - -> IO [FilePath] -listPackageSourcesOrdinary verbosity pkg_descr pps = - fmap concat . sequence $ - [ - -- Library sources. - withAllLib $ \Library { exposedModules = modules, libBuildInfo = libBi } -> - allSourcesBuildInfo libBi pps modules - - -- Executables sources. - , fmap concat - . withAllExe $ \Executable { modulePath = mainPath, buildInfo = exeBi } -> do - biSrcs <- allSourcesBuildInfo exeBi pps [] - mainSrc <- findMainExeFile exeBi pps mainPath - return (mainSrc:biSrcs) - - -- Test suites sources. - , fmap concat - . withAllTest $ \t -> do - let bi = testBuildInfo t - case testInterface t of - TestSuiteExeV10 _ mainPath -> do - biSrcs <- allSourcesBuildInfo bi pps [] - srcMainFile <- do - ppFile <- findFileWithExtension (ppSuffixes pps) - (hsSourceDirs bi) (dropExtension mainPath) - case ppFile of - Nothing -> findFile (hsSourceDirs bi) mainPath - Just pp -> return pp - return (srcMainFile:biSrcs) - TestSuiteLibV09 _ m -> - allSourcesBuildInfo bi pps [m] - TestSuiteUnsupported tp -> die $ "Unsupported test suite type: " - ++ show tp - - -- Benchmarks sources. - , fmap concat - . withAllBenchmark $ \bm -> do - let bi = benchmarkBuildInfo bm - case benchmarkInterface bm of - BenchmarkExeV10 _ mainPath -> do - biSrcs <- allSourcesBuildInfo bi pps [] - srcMainFile <- do - ppFile <- findFileWithExtension (ppSuffixes pps) - (hsSourceDirs bi) (dropExtension mainPath) - case ppFile of - Nothing -> findFile (hsSourceDirs bi) mainPath - Just pp -> return pp - return (srcMainFile:biSrcs) - BenchmarkUnsupported tp -> die $ "Unsupported benchmark type: " - ++ show tp - - -- Data files. - , fmap concat - . forM (dataFiles pkg_descr) $ \filename -> - matchFileGlob (dataDir pkg_descr filename) - - -- Extra doc files. - , fmap concat - . forM (extraDocFiles pkg_descr) $ \ filename -> - matchFileGlob filename - - -- License file(s). - , return (licenseFiles pkg_descr) - - -- Install-include files. - , withAllLib $ \ l -> do - let lbi = libBuildInfo l - relincdirs = "." : filter (not.isAbsolute) (includeDirs lbi) - mapM (fmap snd . findIncludeFile relincdirs) (installIncludes lbi) - - -- Setup script, if it exists. - , fmap (maybe [] (\f -> [f])) $ findSetupFile "" - - -- The .cabal file itself. - , fmap (\d -> [d]) (defaultPackageDesc verbosity) - - ] - where - -- We have to deal with all libs and executables, so we have local - -- versions of these functions that ignore the 'buildable' attribute: - withAllLib action = maybe (return []) action (library pkg_descr) - withAllExe action = mapM action (executables pkg_descr) - withAllTest action = mapM action (testSuites pkg_descr) - withAllBenchmark action = mapM action (benchmarks pkg_descr) - - --- |Prepare a directory tree of source files. -prepareTree :: Verbosity -- ^verbosity - -> PackageDescription -- ^info from the cabal file - -> Maybe LocalBuildInfo - -> FilePath -- ^source tree to populate - -> [PPSuffixHandler] -- ^extra preprocessors (includes suffixes) - -> IO () -prepareTree verbosity pkg_descr0 mb_lbi targetDir pps = do - -- If the package was configured then we can run platform-independent - -- pre-processors and include those generated files. - case mb_lbi of - Just lbi | not (null pps) -> do - let lbi' = lbi{ buildDir = targetDir buildDir lbi } - withAllComponentsInBuildOrder pkg_descr lbi' $ \c _ -> - preprocessComponent pkg_descr c lbi' True verbosity pps - _ -> return () - - (ordinary, mExecutable) <- listPackageSources verbosity pkg_descr0 pps - installOrdinaryFiles verbosity targetDir (zip (repeat []) ordinary) - installMaybeExecutableFiles verbosity targetDir (zip (repeat []) mExecutable) - maybeCreateDefaultSetupScript targetDir - - where - pkg_descr = filterAutogenModule pkg_descr0 - --- | Find the setup script file, if it exists. -findSetupFile :: FilePath -> IO (Maybe FilePath) -findSetupFile targetDir = do - hsExists <- doesFileExist setupHs - lhsExists <- doesFileExist setupLhs - if hsExists - then return (Just setupHs) - else if lhsExists - then return (Just setupLhs) - else return Nothing - where - setupHs = targetDir "Setup.hs" - setupLhs = targetDir "Setup.lhs" - --- | Create a default setup script in the target directory, if it doesn't exist. -maybeCreateDefaultSetupScript :: FilePath -> IO () -maybeCreateDefaultSetupScript targetDir = do - mSetupFile <- findSetupFile targetDir - case mSetupFile of - Just _setupFile -> return () - Nothing -> do - writeUTF8File (targetDir "Setup.hs") $ unlines [ - "import Distribution.Simple", - "main = defaultMain"] - --- | Find the main executable file. -findMainExeFile :: BuildInfo -> [PPSuffixHandler] -> FilePath -> IO FilePath -findMainExeFile exeBi pps mainPath = do - ppFile <- findFileWithExtension (ppSuffixes pps) (hsSourceDirs exeBi) - (dropExtension mainPath) - case ppFile of - Nothing -> findFile (hsSourceDirs exeBi) mainPath - Just pp -> return pp - --- | Given a list of include paths, try to find the include file named --- @f@. Return the name of the file and the full path, or exit with error if --- there's no such file. -findIncludeFile :: [FilePath] -> String -> IO (String, FilePath) -findIncludeFile [] f = die ("can't find include file " ++ f) -findIncludeFile (d:ds) f = do - let path = (d f) - b <- doesFileExist path - if b then return (f,path) else findIncludeFile ds f - --- | Remove the auto-generated module ('Paths_*') from 'exposed-modules' and --- 'other-modules'. -filterAutogenModule :: PackageDescription -> PackageDescription -filterAutogenModule pkg_descr0 = mapLib filterAutogenModuleLib $ - mapAllBuildInfo filterAutogenModuleBI pkg_descr0 - where - mapLib f pkg = pkg { library = fmap f (library pkg) } - filterAutogenModuleLib lib = lib { - exposedModules = filter (/=autogenModule) (exposedModules lib) - } - filterAutogenModuleBI bi = bi { - otherModules = filter (/=autogenModule) (otherModules bi) - } - autogenModule = autogenModuleName pkg_descr0 - --- | Prepare a directory tree of source files for a snapshot version. --- It is expected that the appropriate snapshot version has already been set --- in the package description, eg using 'snapshotPackage' or 'snapshotVersion'. --- -prepareSnapshotTree :: Verbosity -- ^verbosity - -> PackageDescription -- ^info from the cabal file - -> Maybe LocalBuildInfo - -> FilePath -- ^source tree to populate - -> [PPSuffixHandler] -- ^extra preprocessors (includes - -- suffixes) - -> IO () -prepareSnapshotTree verbosity pkg mb_lbi targetDir pps = do - prepareTree verbosity pkg mb_lbi targetDir pps - overwriteSnapshotPackageDesc verbosity pkg targetDir - -overwriteSnapshotPackageDesc :: Verbosity -- ^verbosity - -> PackageDescription -- ^info from the cabal file - -> FilePath -- ^source tree - -> IO () -overwriteSnapshotPackageDesc verbosity pkg targetDir = do - -- We could just writePackageDescription targetDescFile pkg_descr, - -- but that would lose comments and formatting. - descFile <- defaultPackageDesc verbosity - withUTF8FileContents descFile $ - writeUTF8File (targetDir descFile) - . unlines . map (replaceVersion (packageVersion pkg)) . lines - - where - replaceVersion :: Version -> String -> String - replaceVersion version line - | "version:" `isPrefixOf` map toLower line - = "version: " ++ display version - | otherwise = line - --- | Modifies a 'PackageDescription' by appending a snapshot number --- corresponding to the given date. --- -snapshotPackage :: UTCTime -> PackageDescription -> PackageDescription -snapshotPackage date pkg = - pkg { - package = pkgid { pkgVersion = snapshotVersion date (pkgVersion pkgid) } - } - where pkgid = packageId pkg - --- | Modifies a 'Version' by appending a snapshot number corresponding --- to the given date. --- -snapshotVersion :: UTCTime -> Version -> Version -snapshotVersion date version = version { - versionBranch = versionBranch version - ++ [dateToSnapshotNumber date] - } - --- | Given a date produce a corresponding integer representation. --- For example given a date @18/03/2008@ produce the number @20080318@. --- -dateToSnapshotNumber :: UTCTime -> Int -dateToSnapshotNumber date = case toGregorian (utctDay date) of - (year, month, day) -> - fromIntegral year * 10000 - + month * 100 - + day - --- | Callback type for use by sdistWith. -type CreateArchiveFun = Verbosity -- ^verbosity - -> PackageDescription -- ^info from cabal file - -> Maybe LocalBuildInfo -- ^info from configure - -> FilePath -- ^source tree to archive - -> FilePath -- ^name of archive to create - -> IO FilePath - --- | Create an archive from a tree of source files, and clean up the tree. -createArchive :: CreateArchiveFun -createArchive verbosity pkg_descr mb_lbi tmpDir targetPref = do - let tarBallFilePath = targetPref tarBallName pkg_descr <.> "tar.gz" - - (tarProg, _) <- requireProgram verbosity tarProgram - (maybe defaultProgramConfiguration withPrograms mb_lbi) - let formatOptSupported = maybe False (== "YES") $ - Map.lookup "Supports --format" - (programProperties tarProg) - runProgram verbosity tarProg $ - -- Hmm: I could well be skating on thinner ice here by using the -C option - -- (=> seems to be supported at least by GNU and *BSD tar) [The - -- prev. solution used pipes and sub-command sequences to set up the paths - -- correctly, which is problematic in a Windows setting.] - ["-czf", tarBallFilePath, "-C", tmpDir] - ++ (if formatOptSupported then ["--format", "ustar"] else []) - ++ [tarBallName pkg_descr] - return tarBallFilePath - --- | Given a buildinfo, return the names of all source files. -allSourcesBuildInfo :: BuildInfo - -> [PPSuffixHandler] -- ^ Extra preprocessors - -> [ModuleName] -- ^ Exposed modules - -> IO [FilePath] -allSourcesBuildInfo bi pps modules = do - let searchDirs = hsSourceDirs bi - sources <- fmap concat $ sequence $ - [ let file = ModuleName.toFilePath module_ - in findAllFilesWithExtension suffixes searchDirs file - >>= nonEmpty (notFound module_) return - | module_ <- modules ++ otherModules bi ] - bootFiles <- sequence - [ let file = ModuleName.toFilePath module_ - fileExts = ["hs-boot", "lhs-boot"] - in findFileWithExtension fileExts (hsSourceDirs bi) file - | module_ <- modules ++ otherModules bi ] - - return $ sources ++ catMaybes bootFiles ++ cSources bi ++ jsSources bi - - where - nonEmpty x _ [] = x - nonEmpty _ f xs = f xs - suffixes = ppSuffixes pps ++ ["hs", "lhs"] - notFound m = die $ "Error: Could not find module: " ++ display m - ++ " with any suffix: " ++ show suffixes - - -printPackageProblems :: Verbosity -> PackageDescription -> IO () -printPackageProblems verbosity pkg_descr = do - ioChecks <- checkPackageFiles pkg_descr "." - let pureChecks = checkConfiguredPackage pkg_descr - isDistError (PackageDistSuspicious _) = False - isDistError (PackageDistSuspiciousWarn _) = False - isDistError _ = True - (errors, warnings) = partition isDistError (pureChecks ++ ioChecks) - unless (null errors) $ - notice verbosity $ "Distribution quality errors:\n" - ++ unlines (map explanation errors) - unless (null warnings) $ - notice verbosity $ "Distribution quality warnings:\n" - ++ unlines (map explanation warnings) - unless (null errors) $ - notice verbosity - "Note: the public hackage server would reject this package." - ------------------------------------------------------------- - --- | The name of the tarball without extension --- -tarBallName :: PackageDescription -> String -tarBallName = display . packageId - -mapAllBuildInfo :: (BuildInfo -> BuildInfo) - -> (PackageDescription -> PackageDescription) -mapAllBuildInfo f pkg = pkg { - library = fmap mapLibBi (library pkg), - executables = fmap mapExeBi (executables pkg), - testSuites = fmap mapTestBi (testSuites pkg), - benchmarks = fmap mapBenchBi (benchmarks pkg) - } - where - mapLibBi lib = lib { libBuildInfo = f (libBuildInfo lib) } - mapExeBi exe = exe { buildInfo = f (buildInfo exe) } - mapTestBi t = t { testBuildInfo = f (testBuildInfo t) } - mapBenchBi bm = bm { benchmarkBuildInfo = f (benchmarkBuildInfo bm) } diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Test/ExeV10.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Test/ExeV10.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Test/ExeV10.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Test/ExeV10.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,167 +0,0 @@ -module Distribution.Simple.Test.ExeV10 - ( runTest - ) where - -import Distribution.Compat.CreatePipe -import Distribution.Compat.Environment -import qualified Distribution.PackageDescription as PD -import Distribution.Simple.Build.PathsModule -import Distribution.Simple.BuildPaths -import Distribution.Simple.Compiler -import Distribution.Simple.Hpc -import Distribution.Simple.InstallDirs -import qualified Distribution.Simple.LocalBuildInfo as LBI -import Distribution.Simple.Setup -import Distribution.Simple.Test.Log -import Distribution.Simple.Utils -import Distribution.System -import Distribution.TestSuite -import Distribution.Text -import Distribution.Verbosity - -import Control.Concurrent (forkIO) -import Control.Monad ( unless, void, when ) -import System.Directory - ( createDirectoryIfMissing, doesDirectoryExist, doesFileExist - , getCurrentDirectory, removeDirectoryRecursive ) -import System.Exit ( ExitCode(..) ) -import System.FilePath ( (), (<.>) ) -import System.IO ( hGetContents, hPutStr, stdout, stderr ) - -runTest :: PD.PackageDescription - -> LBI.LocalBuildInfo - -> TestFlags - -> PD.TestSuite - -> IO TestSuiteLog -runTest pkg_descr lbi flags suite = do - let isCoverageEnabled = fromFlag $ configCoverage $ LBI.configFlags lbi - way = guessWay lbi - tixDir_ = tixDir distPref way $ PD.testName suite - - pwd <- getCurrentDirectory - existingEnv <- getEnvironment - - let cmd = LBI.buildDir lbi PD.testName suite - PD.testName suite <.> exeExtension - -- Check that the test executable exists. - exists <- doesFileExist cmd - unless exists $ die $ "Error: Could not find test program \"" ++ cmd - ++ "\". Did you build the package first?" - - -- Remove old .tix files if appropriate. - unless (fromFlag $ testKeepTix flags) $ do - exists' <- doesDirectoryExist tixDir_ - when exists' $ removeDirectoryRecursive tixDir_ - - -- Create directory for HPC files. - createDirectoryIfMissing True tixDir_ - - -- Write summary notices indicating start of test suite - notice verbosity $ summarizeSuiteStart $ PD.testName suite - - (wOut, wErr, logText) <- case details of - Direct -> return (stdout, stderr, "") - _ -> do - (rOut, wOut) <- createPipe - - -- Read test executable's output lazily (returns immediately) - logText <- hGetContents rOut - -- Force the IO manager to drain the test output pipe - void $ forkIO $ length logText `seq` return () - - -- '--show-details=streaming': print the log output in another thread - when (details == Streaming) $ void $ forkIO $ hPutStr stdout logText - - return (wOut, wOut, logText) - - -- Run the test executable - let opts = map (testOption pkg_descr lbi suite) - (testOptions flags) - dataDirPath = pwd PD.dataDir pkg_descr - tixFile = pwd tixFilePath distPref way (PD.testName suite) - pkgPathEnv = (pkgPathEnvVar pkg_descr "datadir", dataDirPath) - : existingEnv - shellEnv = [("HPCTIXFILE", tixFile) | isCoverageEnabled] ++ pkgPathEnv - - -- Add (DY)LD_LIBRARY_PATH if needed - shellEnv' <- if LBI.withDynExe lbi - then do let (Platform _ os) = LBI.hostPlatform lbi - clbi = LBI.getComponentLocalBuildInfo lbi - (LBI.CTestName (PD.testName suite)) - paths <- LBI.depLibraryPaths True False lbi clbi - return (addLibraryPath os paths shellEnv) - else return shellEnv - - exit <- rawSystemIOWithEnv verbosity cmd opts Nothing (Just shellEnv') - -- these handles are automatically closed - Nothing (Just wOut) (Just wErr) - - -- Generate TestSuiteLog from executable exit code and a machine- - -- readable test log. - let suiteLog = buildLog exit - - -- Write summary notice to log file indicating start of test suite - appendFile (logFile suiteLog) $ summarizeSuiteStart $ PD.testName suite - - -- Append contents of temporary log file to the final human- - -- readable log file - appendFile (logFile suiteLog) logText - - -- Write end-of-suite summary notice to log file - appendFile (logFile suiteLog) $ summarizeSuiteFinish suiteLog - - -- Show the contents of the human-readable log file on the terminal - -- if there is a failure and/or detailed output is requested - let whenPrinting = when $ - ( details == Always || - details == Failures && not (suitePassed $ testLogs suiteLog)) - -- verbosity overrides show-details - && verbosity >= normal - whenPrinting $ putStr $ unlines $ lines logText - - -- Write summary notice to terminal indicating end of test suite - notice verbosity $ summarizeSuiteFinish suiteLog - - when isCoverageEnabled $ - markupTest verbosity lbi distPref (display $ PD.package pkg_descr) suite - - return suiteLog - where - distPref = fromFlag $ testDistPref flags - verbosity = fromFlag $ testVerbosity flags - details = fromFlag $ testShowDetails flags - testLogDir = distPref "test" - - buildLog exit = - let r = case exit of - ExitSuccess -> Pass - ExitFailure c -> Fail $ "exit code: " ++ show c - n = PD.testName suite - l = TestLog - { testName = n - , testOptionsReturned = [] - , testResult = r - } - in TestSuiteLog - { testSuiteName = n - , testLogs = l - , logFile = - testLogDir - testSuiteLogPath (fromFlag $ testHumanLog flags) - pkg_descr lbi n l - } - --- TODO: This is abusing the notion of a 'PathTemplate'. The result isn't --- necessarily a path. -testOption :: PD.PackageDescription - -> LBI.LocalBuildInfo - -> PD.TestSuite - -> PathTemplate - -> String -testOption pkg_descr lbi suite template = - fromPathTemplate $ substPathTemplate env template - where - env = initialPathTemplateEnv - (PD.package pkg_descr) (LBI.localUnitId lbi) - (compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) ++ - [(TestSuiteNameVar, toPathTemplate $ PD.testName suite)] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Test/LibV09.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Test/LibV09.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Test/LibV09.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Test/LibV09.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,260 +0,0 @@ -module Distribution.Simple.Test.LibV09 - ( runTest - -- Test stub - , simpleTestStub - , stubFilePath, stubMain, stubName, stubWriteLog - , writeSimpleTestStub - ) where - -import Distribution.Compat.CreatePipe -import Distribution.Compat.Environment -import Distribution.Compat.Internal.TempFile -import Distribution.ModuleName -import qualified Distribution.PackageDescription as PD -import Distribution.Simple.Build.PathsModule -import Distribution.Simple.BuildPaths -import Distribution.Simple.Compiler -import Distribution.Simple.Hpc -import Distribution.Simple.InstallDirs -import qualified Distribution.Simple.LocalBuildInfo as LBI -import Distribution.Simple.Setup -import Distribution.Simple.Test.Log -import Distribution.Simple.Utils -import Distribution.System -import Distribution.TestSuite -import Distribution.Text -import Distribution.Verbosity - -import Control.Exception ( bracket ) -import Control.Monad ( when, unless ) -import Data.Maybe ( mapMaybe ) -import System.Directory - ( createDirectoryIfMissing, doesDirectoryExist, doesFileExist - , getCurrentDirectory, removeDirectoryRecursive, removeFile - , setCurrentDirectory ) -import System.Exit ( ExitCode(..), exitWith ) -import System.FilePath ( (), (<.>) ) -import System.IO ( hClose, hGetContents, hPutStr ) -import System.Process (StdStream(..), waitForProcess) - -runTest :: PD.PackageDescription - -> LBI.LocalBuildInfo - -> TestFlags - -> PD.TestSuite - -> IO TestSuiteLog -runTest pkg_descr lbi flags suite = do - let isCoverageEnabled = fromFlag $ configCoverage $ LBI.configFlags lbi - way = guessWay lbi - - pwd <- getCurrentDirectory - existingEnv <- getEnvironment - - let cmd = LBI.buildDir lbi stubName suite - stubName suite <.> exeExtension - -- Check that the test executable exists. - exists <- doesFileExist cmd - unless exists $ die $ "Error: Could not find test program \"" ++ cmd - ++ "\". Did you build the package first?" - - -- Remove old .tix files if appropriate. - unless (fromFlag $ testKeepTix flags) $ do - let tDir = tixDir distPref way $ PD.testName suite - exists' <- doesDirectoryExist tDir - when exists' $ removeDirectoryRecursive tDir - - -- Create directory for HPC files. - createDirectoryIfMissing True $ tixDir distPref way $ PD.testName suite - - -- Write summary notices indicating start of test suite - notice verbosity $ summarizeSuiteStart $ PD.testName suite - - suiteLog <- bracket openCabalTemp deleteIfExists $ \tempLog -> do - - (rOut, wOut) <- createPipe - - -- Run test executable - (Just wIn, _, _, process) <- do - let opts = map (testOption pkg_descr lbi suite) $ testOptions flags - dataDirPath = pwd PD.dataDir pkg_descr - tixFile = pwd tixFilePath distPref way (PD.testName suite) - pkgPathEnv = (pkgPathEnvVar pkg_descr "datadir", dataDirPath) - : existingEnv - shellEnv = [("HPCTIXFILE", tixFile) | isCoverageEnabled] - ++ pkgPathEnv - -- Add (DY)LD_LIBRARY_PATH if needed - shellEnv' <- if LBI.withDynExe lbi - then do - let (Platform _ os) = LBI.hostPlatform lbi - clbi = LBI.getComponentLocalBuildInfo - lbi - (LBI.CTestName - (PD.testName suite)) - paths <- LBI.depLibraryPaths - True False lbi clbi - return (addLibraryPath os paths shellEnv) - else return shellEnv - createProcessWithEnv verbosity cmd opts Nothing (Just shellEnv') - -- these handles are closed automatically - CreatePipe (UseHandle wOut) (UseHandle wOut) - - hPutStr wIn $ show (tempLog, PD.testName suite) - hClose wIn - - -- Append contents of temporary log file to the final human- - -- readable log file - logText <- hGetContents rOut - -- Force the IO manager to drain the test output pipe - length logText `seq` return () - - exitcode <- waitForProcess process - unless (exitcode == ExitSuccess) $ do - debug verbosity $ cmd ++ " returned " ++ show exitcode - - -- Generate final log file name - let finalLogName l = testLogDir - testSuiteLogPath - (fromFlag $ testHumanLog flags) pkg_descr lbi - (testSuiteName l) (testLogs l) - -- Generate TestSuiteLog from executable exit code and a machine- - -- readable test log - suiteLog <- fmap ((\l -> l { logFile = finalLogName l }) . read) - $ readFile tempLog - - -- Write summary notice to log file indicating start of test suite - appendFile (logFile suiteLog) $ summarizeSuiteStart $ PD.testName suite - - appendFile (logFile suiteLog) logText - - -- Write end-of-suite summary notice to log file - appendFile (logFile suiteLog) $ summarizeSuiteFinish suiteLog - - -- Show the contents of the human-readable log file on the terminal - -- if there is a failure and/or detailed output is requested - let details = fromFlag $ testShowDetails flags - whenPrinting = when $ (details > Never) - && (not (suitePassed $ testLogs suiteLog) || details == Always) - && verbosity >= normal - whenPrinting $ putStr $ unlines $ lines logText - - return suiteLog - - -- Write summary notice to terminal indicating end of test suite - notice verbosity $ summarizeSuiteFinish suiteLog - - when isCoverageEnabled $ - markupTest verbosity lbi distPref (display $ PD.package pkg_descr) suite - - return suiteLog - where - deleteIfExists file = do - exists <- doesFileExist file - when exists $ removeFile file - - testLogDir = distPref "test" - openCabalTemp = do - (f, h) <- openTempFile testLogDir $ "cabal-test-" <.> "log" - hClose h >> return f - - distPref = fromFlag $ testDistPref flags - verbosity = fromFlag $ testVerbosity flags - --- TODO: This is abusing the notion of a 'PathTemplate'. The result isn't --- necessarily a path. -testOption :: PD.PackageDescription - -> LBI.LocalBuildInfo - -> PD.TestSuite - -> PathTemplate - -> String -testOption pkg_descr lbi suite template = - fromPathTemplate $ substPathTemplate env template - where - env = initialPathTemplateEnv - (PD.package pkg_descr) (LBI.localUnitId lbi) - (compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) ++ - [(TestSuiteNameVar, toPathTemplate $ PD.testName suite)] - --- Test stub ---------- - --- | The name of the stub executable associated with a library 'TestSuite'. -stubName :: PD.TestSuite -> FilePath -stubName t = PD.testName t ++ "Stub" - --- | The filename of the source file for the stub executable associated with a --- library 'TestSuite'. -stubFilePath :: PD.TestSuite -> FilePath -stubFilePath t = stubName t <.> "hs" - --- | Write the source file for a library 'TestSuite' stub executable. -writeSimpleTestStub :: PD.TestSuite -- ^ library 'TestSuite' for which a stub - -- is being created - -> FilePath -- ^ path to directory where stub source - -- should be located - -> IO () -writeSimpleTestStub t dir = do - createDirectoryIfMissing True dir - let filename = dir stubFilePath t - PD.TestSuiteLibV09 _ m = PD.testInterface t - writeFile filename $ simpleTestStub m - --- | Source code for library test suite stub executable -simpleTestStub :: ModuleName -> String -simpleTestStub m = unlines - [ "module Main ( main ) where" - , "import Distribution.Simple.Test.LibV09 ( stubMain )" - , "import " ++ show (disp m) ++ " ( tests )" - , "main :: IO ()" - , "main = stubMain tests" - ] - --- | Main function for test stubs. Once, it was written directly into the stub, --- but minimizing the amount of code actually in the stub maximizes the number --- of detectable errors when Cabal is compiled. -stubMain :: IO [Test] -> IO () -stubMain tests = do - (f, n) <- fmap read getContents - dir <- getCurrentDirectory - results <- tests >>= stubRunTests - setCurrentDirectory dir - stubWriteLog f n results - --- | The test runner used in library "TestSuite" stub executables. Runs a list --- of 'Test's. An executable calling this function is meant to be invoked as --- the child of a Cabal process during @.\/setup test@. A 'TestSuiteLog', --- provided by Cabal, is read from the standard input; it supplies the name of --- the test suite and the location of the machine-readable test suite log file. --- Human-readable log information is written to the standard output for capture --- by the calling Cabal process. -stubRunTests :: [Test] -> IO TestLogs -stubRunTests tests = do - logs <- mapM stubRunTests' tests - return $ GroupLogs "Default" logs - where - stubRunTests' (Test t) = do - l <- run t >>= finish - summarizeTest normal Always l - return l - where - finish (Finished result) = - return TestLog - { testName = name t - , testOptionsReturned = defaultOptions t - , testResult = result - } - finish (Progress _ next) = next >>= finish - stubRunTests' g@(Group {}) = do - logs <- mapM stubRunTests' $ groupTests g - return $ GroupLogs (groupName g) logs - stubRunTests' (ExtraOptions _ t) = stubRunTests' t - maybeDefaultOption opt = - maybe Nothing (\d -> Just (optionName opt, d)) $ optionDefault opt - defaultOptions testInst = mapMaybe maybeDefaultOption $ options testInst - --- | From a test stub, write the 'TestSuiteLog' to temporary file for the calling --- Cabal process to read. -stubWriteLog :: FilePath -> String -> TestLogs -> IO () -stubWriteLog f n logs = do - let testLog = TestSuiteLog { testSuiteName = n, testLogs = logs, logFile = f } - writeFile (logFile testLog) $ show testLog - when (suiteError logs) $ exitWith $ ExitFailure 2 - when (suiteFailed logs) $ exitWith $ ExitFailure 1 - exitWith ExitSuccess diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Test/Log.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Test/Log.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Test/Log.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Test/Log.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,159 +0,0 @@ -module Distribution.Simple.Test.Log - ( PackageLog(..) - , TestLogs(..) - , TestSuiteLog(..) - , countTestResults - , localPackageLog - , summarizePackage - , summarizeSuiteFinish, summarizeSuiteStart - , summarizeTest - , suiteError, suiteFailed, suitePassed - , testSuiteLogPath - ) where - -import Distribution.Package -import qualified Distribution.PackageDescription as PD -import Distribution.Simple.Compiler -import Distribution.Simple.InstallDirs -import qualified Distribution.Simple.LocalBuildInfo as LBI -import Distribution.Simple.Setup -import Distribution.Simple.Utils -import Distribution.System -import Distribution.TestSuite -import Distribution.Verbosity - -import Control.Monad ( when ) -import Data.Char ( toUpper ) - --- | Logs all test results for a package, broken down first by test suite and --- then by test case. -data PackageLog = PackageLog - { package :: PackageId - , compiler :: CompilerId - , platform :: Platform - , testSuites :: [TestSuiteLog] - } - deriving (Read, Show, Eq) - --- | A 'PackageLog' with package and platform information specified. -localPackageLog :: PD.PackageDescription -> LBI.LocalBuildInfo -> PackageLog -localPackageLog pkg_descr lbi = PackageLog - { package = PD.package pkg_descr - , compiler = compilerId $ LBI.compiler lbi - , platform = LBI.hostPlatform lbi - , testSuites = [] - } - --- | Logs test suite results, itemized by test case. -data TestSuiteLog = TestSuiteLog - { testSuiteName :: String - , testLogs :: TestLogs - , logFile :: FilePath -- path to human-readable log file - } - deriving (Read, Show, Eq) - -data TestLogs - = TestLog - { testName :: String - , testOptionsReturned :: Options - , testResult :: Result - } - | GroupLogs String [TestLogs] - deriving (Read, Show, Eq) - --- | Count the number of pass, fail, and error test results in a 'TestLogs' --- tree. -countTestResults :: TestLogs - -> (Int, Int, Int) -- ^ Passes, fails, and errors, - -- respectively. -countTestResults = go (0, 0, 0) - where - go (p, f, e) (TestLog { testResult = r }) = - case r of - Pass -> (p + 1, f, e) - Fail _ -> (p, f + 1, e) - Error _ -> (p, f, e + 1) - go (p, f, e) (GroupLogs _ ts) = foldl go (p, f, e) ts - --- | From a 'TestSuiteLog', determine if the test suite passed. -suitePassed :: TestLogs -> Bool -suitePassed l = - case countTestResults l of - (_, 0, 0) -> True - _ -> False - --- | From a 'TestSuiteLog', determine if the test suite failed. -suiteFailed :: TestLogs -> Bool -suiteFailed l = - case countTestResults l of - (_, 0, _) -> False - _ -> True - --- | From a 'TestSuiteLog', determine if the test suite encountered errors. -suiteError :: TestLogs -> Bool -suiteError l = - case countTestResults l of - (_, _, 0) -> False - _ -> True - -resultString :: TestLogs -> String -resultString l | suiteError l = "error" - | suiteFailed l = "fail" - | otherwise = "pass" - -testSuiteLogPath :: PathTemplate - -> PD.PackageDescription - -> LBI.LocalBuildInfo - -> String -- ^ test suite name - -> TestLogs -- ^ test suite results - -> FilePath -testSuiteLogPath template pkg_descr lbi test_name result = - fromPathTemplate $ substPathTemplate env template - where - env = initialPathTemplateEnv - (PD.package pkg_descr) (LBI.localUnitId lbi) - (compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) - ++ [ (TestSuiteNameVar, toPathTemplate test_name) - , (TestSuiteResultVar, toPathTemplate $ resultString result) - ] - --- | Print a summary to the console after all test suites have been run --- indicating the number of successful test suites and cases. Returns 'True' if --- all test suites passed and 'False' otherwise. -summarizePackage :: Verbosity -> PackageLog -> IO Bool -summarizePackage verbosity packageLog = do - let counts = map (countTestResults . testLogs) $ testSuites packageLog - (passed, failed, errors) = foldl1 addTriple counts - totalCases = passed + failed + errors - passedSuites = length - $ filter (suitePassed . testLogs) - $ testSuites packageLog - totalSuites = length $ testSuites packageLog - notice verbosity $ show passedSuites ++ " of " ++ show totalSuites - ++ " test suites (" ++ show passed ++ " of " - ++ show totalCases ++ " test cases) passed." - return $! passedSuites == totalSuites - where - addTriple (p1, f1, e1) (p2, f2, e2) = (p1 + p2, f1 + f2, e1 + e2) - --- | Print a summary of a single test case's result to the console, supressing --- output for certain verbosity or test filter levels. -summarizeTest :: Verbosity -> TestShowDetails -> TestLogs -> IO () -summarizeTest _ _ (GroupLogs {}) = return () -summarizeTest verbosity details t = - when shouldPrint $ notice verbosity $ "Test case " ++ testName t - ++ ": " ++ show (testResult t) - where shouldPrint = (details > Never) && (notPassed || details == Always) - notPassed = testResult t /= Pass - --- | Print a summary of the test suite's results on the console, suppressing --- output for certain verbosity or test filter levels. -summarizeSuiteFinish :: TestSuiteLog -> String -summarizeSuiteFinish testLog = unlines - [ "Test suite " ++ testSuiteName testLog ++ ": " ++ resStr - , "Test suite logged to: " ++ logFile testLog - ] - where resStr = map toUpper (resultString $ testLogs testLog) - -summarizeSuiteStart :: String -> String -summarizeSuiteStart n = "Test suite " ++ n ++ ": RUNNING...\n" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Test.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Test.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Test.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Test.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,130 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Test --- Copyright : Thomas Tuegel 2010 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This is the entry point into testing a built package. It performs the --- \"@.\/setup test@\" action. It runs test suites designated in the package --- description and reports on the results. - -module Distribution.Simple.Test - ( test - ) where - -import qualified Distribution.PackageDescription as PD -import Distribution.Simple.Compiler -import Distribution.Simple.Hpc -import Distribution.Simple.InstallDirs -import qualified Distribution.Simple.LocalBuildInfo as LBI -import Distribution.Simple.Setup -import Distribution.Simple.UserHooks -import qualified Distribution.Simple.Test.ExeV10 as ExeV10 -import qualified Distribution.Simple.Test.LibV09 as LibV09 -import Distribution.Simple.Test.Log -import Distribution.Simple.Utils -import Distribution.TestSuite -import Distribution.Text - -import Control.Monad ( when, unless, filterM ) -import System.Directory - ( createDirectoryIfMissing, doesFileExist, getDirectoryContents - , removeFile ) -import System.Exit ( ExitCode(..), exitFailure, exitWith ) -import System.FilePath ( () ) - --- |Perform the \"@.\/setup test@\" action. -test :: Args -- ^positional command-line arguments - -> PD.PackageDescription -- ^information from the .cabal file - -> LBI.LocalBuildInfo -- ^information from the configure step - -> TestFlags -- ^flags sent to test - -> IO () -test args pkg_descr lbi flags = do - let verbosity = fromFlag $ testVerbosity flags - machineTemplate = fromFlag $ testMachineLog flags - distPref = fromFlag $ testDistPref flags - testLogDir = distPref "test" - testNames = args - pkgTests = PD.testSuites pkg_descr - enabledTests = [ t | t <- pkgTests - , PD.testEnabled t - , PD.buildable (PD.testBuildInfo t) ] - - doTest :: (PD.TestSuite, Maybe TestSuiteLog) -> IO TestSuiteLog - doTest (suite, _) = - case PD.testInterface suite of - PD.TestSuiteExeV10 _ _ -> - ExeV10.runTest pkg_descr lbi flags suite - - PD.TestSuiteLibV09 _ _ -> - LibV09.runTest pkg_descr lbi flags suite - - _ -> return TestSuiteLog - { testSuiteName = PD.testName suite - , testLogs = TestLog - { testName = PD.testName suite - , testOptionsReturned = [] - , testResult = - Error $ "No support for running test suite type: " - ++ show (disp $ PD.testType suite) - } - , logFile = "" - } - - when (not $ PD.hasTests pkg_descr) $ do - notice verbosity "Package has no test suites." - exitWith ExitSuccess - - when (PD.hasTests pkg_descr && null enabledTests) $ - die $ "No test suites enabled. Did you remember to configure with " - ++ "\'--enable-tests\'?" - - testsToRun <- case testNames of - [] -> return $ zip enabledTests $ repeat Nothing - names -> flip mapM names $ \tName -> - let testMap = zip enabledNames enabledTests - enabledNames = map PD.testName enabledTests - allNames = map PD.testName pkgTests - in case lookup tName testMap of - Just t -> return (t, Nothing) - _ | tName `elem` allNames -> - die $ "Package configured with test suite " - ++ tName ++ " disabled." - | otherwise -> die $ "no such test: " ++ tName - - createDirectoryIfMissing True testLogDir - - -- Delete ordinary files from test log directory. - getDirectoryContents testLogDir - >>= filterM doesFileExist . map (testLogDir ) - >>= mapM_ removeFile - - let totalSuites = length testsToRun - notice verbosity $ "Running " ++ show totalSuites ++ " test suites..." - suites <- mapM doTest testsToRun - let packageLog = (localPackageLog pkg_descr lbi) { testSuites = suites } - packageLogFile = () testLogDir - $ packageLogPath machineTemplate pkg_descr lbi - allOk <- summarizePackage verbosity packageLog - writeFile packageLogFile $ show packageLog - - let isCoverageEnabled = fromFlag $ configCoverage $ LBI.configFlags lbi - when isCoverageEnabled $ - markupPackage verbosity lbi distPref (display $ PD.package pkg_descr) $ - map fst testsToRun - - unless allOk exitFailure - -packageLogPath :: PathTemplate - -> PD.PackageDescription - -> LBI.LocalBuildInfo - -> FilePath -packageLogPath template pkg_descr lbi = - fromPathTemplate $ substPathTemplate env template - where - env = initialPathTemplateEnv - (PD.package pkg_descr) (LBI.localUnitId lbi) - (compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/UHC.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/UHC.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/UHC.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/UHC.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,287 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.UHC --- Copyright : Andres Loeh 2009 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module contains most of the UHC-specific code for configuring, building --- and installing packages. --- --- Thanks to the authors of the other implementation-specific files, in --- particular to Isaac Jones, Duncan Coutts and Henning Thielemann, for --- inspiration on how to design this module. - -module Distribution.Simple.UHC ( - configure, getInstalledPackages, - buildLib, buildExe, installLib, registerPackage, inplacePackageDbPath - ) where - -import Distribution.Compat.ReadP -import Distribution.InstalledPackageInfo -import Distribution.Package hiding (installedUnitId) -import Distribution.PackageDescription -import Distribution.Simple.BuildPaths -import Distribution.Simple.Compiler as C -import Distribution.Simple.LocalBuildInfo -import Distribution.Simple.PackageIndex -import Distribution.Simple.Program -import Distribution.Simple.Utils -import Distribution.Text -import Distribution.Verbosity -import Distribution.Version -import Distribution.System -import Language.Haskell.Extension - -import Control.Monad -import Data.List -import qualified Data.Map as M ( empty ) -import System.Directory -import System.FilePath - --- ----------------------------------------------------------------------------- --- Configuring - -configure :: Verbosity -> Maybe FilePath -> Maybe FilePath - -> ProgramConfiguration -> IO (Compiler, Maybe Platform, ProgramConfiguration) -configure verbosity hcPath _hcPkgPath conf = do - - (_uhcProg, uhcVersion, conf') <- - requireProgramVersion verbosity uhcProgram - (orLaterVersion (Version [1,0,2] [])) - (userMaybeSpecifyPath "uhc" hcPath conf) - - let comp = Compiler { - compilerId = CompilerId UHC uhcVersion, - compilerAbiTag = C.NoAbiTag, - compilerCompat = [], - compilerLanguages = uhcLanguages, - compilerExtensions = uhcLanguageExtensions, - compilerProperties = M.empty - } - compPlatform = Nothing - return (comp, compPlatform, conf') - -uhcLanguages :: [(Language, C.Flag)] -uhcLanguages = [(Haskell98, "")] - --- | The flags for the supported extensions. -uhcLanguageExtensions :: [(Extension, C.Flag)] -uhcLanguageExtensions = - let doFlag (f, (enable, disable)) = [(EnableExtension f, enable), - (DisableExtension f, disable)] - alwaysOn = ("", ""{- wrong -}) - in concatMap doFlag - [(CPP, ("--cpp", ""{- wrong -})), - (PolymorphicComponents, alwaysOn), - (ExistentialQuantification, alwaysOn), - (ForeignFunctionInterface, alwaysOn), - (UndecidableInstances, alwaysOn), - (MultiParamTypeClasses, alwaysOn), - (Rank2Types, alwaysOn), - (PatternSignatures, alwaysOn), - (EmptyDataDecls, alwaysOn), - (ImplicitPrelude, ("", "--no-prelude"{- wrong -})), - (TypeOperators, alwaysOn), - (OverlappingInstances, alwaysOn), - (FlexibleInstances, alwaysOn)] - -getInstalledPackages :: Verbosity -> Compiler -> PackageDBStack -> ProgramConfiguration - -> IO InstalledPackageIndex -getInstalledPackages verbosity comp packagedbs conf = do - let compilerid = compilerId comp - systemPkgDir <- getGlobalPackageDir verbosity conf - userPkgDir <- getUserPackageDir - let pkgDirs = nub (concatMap (packageDbPaths userPkgDir systemPkgDir) packagedbs) - -- putStrLn $ "pkgdirs: " ++ show pkgDirs - pkgs <- liftM (map addBuiltinVersions . concat) $ - mapM (\ d -> getDirectoryContents d >>= filterM (isPkgDir (display compilerid) d)) - pkgDirs - -- putStrLn $ "pkgs: " ++ show pkgs - let iPkgs = - map mkInstalledPackageInfo $ - concatMap parsePackage $ - pkgs - -- putStrLn $ "installed pkgs: " ++ show iPkgs - return (fromList iPkgs) - -getGlobalPackageDir :: Verbosity -> ProgramConfiguration -> IO FilePath -getGlobalPackageDir verbosity conf = do - output <- rawSystemProgramStdoutConf verbosity - uhcProgram conf ["--meta-pkgdir-system"] - -- call to "lines" necessary, because pkgdir contains an extra newline at the end - let [pkgdir] = lines output - return pkgdir - -getUserPackageDir :: IO FilePath -getUserPackageDir = do - homeDir <- getHomeDirectory - return $ homeDir ".cabal" "lib" -- TODO: determine in some other way - -packageDbPaths :: FilePath -> FilePath -> PackageDB -> [FilePath] -packageDbPaths user system db = - case db of - GlobalPackageDB -> [ system ] - UserPackageDB -> [ user ] - SpecificPackageDB path -> [ path ] - --- | Hack to add version numbers to UHC-built-in packages. This should sooner or --- later be fixed on the UHC side. -addBuiltinVersions :: String -> String -{- -addBuiltinVersions "uhcbase" = "uhcbase-1.0" -addBuiltinVersions "base" = "base-3.0" -addBuiltinVersions "array" = "array-0.2" --} -addBuiltinVersions xs = xs - --- | Name of the installed package config file. -installedPkgConfig :: String -installedPkgConfig = "installed-pkg-config" - --- | Check if a certain dir contains a valid package. Currently, we are --- looking only for the presence of an installed package configuration. --- TODO: Actually make use of the information provided in the file. -isPkgDir :: String -> String -> String -> IO Bool -isPkgDir _ _ ('.' : _) = return False -- ignore files starting with a . -isPkgDir c dir xs = do - let candidate = dir uhcPackageDir xs c - -- putStrLn $ "trying: " ++ candidate - doesFileExist (candidate installedPkgConfig) - -parsePackage :: String -> [PackageId] -parsePackage x = map fst (filter (\ (_,y) -> null y) (readP_to_S parse x)) - --- | Create a trivial package info from a directory name. -mkInstalledPackageInfo :: PackageId -> InstalledPackageInfo -mkInstalledPackageInfo p = emptyInstalledPackageInfo - { installedUnitId = mkLegacyUnitId p, - sourcePackageId = p } - - --- ----------------------------------------------------------------------------- --- Building - -buildLib :: Verbosity -> PackageDescription -> LocalBuildInfo - -> Library -> ComponentLocalBuildInfo -> IO () -buildLib verbosity pkg_descr lbi lib clbi = do - - systemPkgDir <- getGlobalPackageDir verbosity (withPrograms lbi) - userPkgDir <- getUserPackageDir - let runUhcProg = rawSystemProgramConf verbosity uhcProgram (withPrograms lbi) - let uhcArgs = -- set package name - ["--pkg-build=" ++ display (packageId pkg_descr)] - -- common flags lib/exe - ++ constructUHCCmdLine userPkgDir systemPkgDir - lbi (libBuildInfo lib) clbi - (buildDir lbi) verbosity - -- source files - -- suboptimal: UHC does not understand module names, so - -- we replace periods by path separators - ++ map (map (\ c -> if c == '.' then pathSeparator else c)) - (map display (libModules lib)) - - runUhcProg uhcArgs - - return () - -buildExe :: Verbosity -> PackageDescription -> LocalBuildInfo - -> Executable -> ComponentLocalBuildInfo -> IO () -buildExe verbosity _pkg_descr lbi exe clbi = do - systemPkgDir <- getGlobalPackageDir verbosity (withPrograms lbi) - userPkgDir <- getUserPackageDir - let runUhcProg = rawSystemProgramConf verbosity uhcProgram (withPrograms lbi) - let uhcArgs = -- common flags lib/exe - constructUHCCmdLine userPkgDir systemPkgDir - lbi (buildInfo exe) clbi - (buildDir lbi) verbosity - -- output file - ++ ["--output", buildDir lbi exeName exe] - -- main source module - ++ [modulePath exe] - runUhcProg uhcArgs - -constructUHCCmdLine :: FilePath -> FilePath - -> LocalBuildInfo -> BuildInfo -> ComponentLocalBuildInfo - -> FilePath -> Verbosity -> [String] -constructUHCCmdLine user system lbi bi clbi odir verbosity = - -- verbosity - (if verbosity >= deafening then ["-v4"] - else if verbosity >= normal then [] - else ["-v0"]) - ++ hcOptions UHC bi - -- flags for language extensions - ++ languageToFlags (compiler lbi) (defaultLanguage bi) - ++ extensionsToFlags (compiler lbi) (usedExtensions bi) - -- packages - ++ ["--hide-all-packages"] - ++ uhcPackageDbOptions user system (withPackageDB lbi) - ++ ["--package=uhcbase"] - ++ ["--package=" ++ display (pkgName pkgid) | (_, pkgid) <- componentPackageDeps clbi ] - -- search paths - ++ ["-i" ++ odir] - ++ ["-i" ++ l | l <- nub (hsSourceDirs bi)] - ++ ["-i" ++ autogenModulesDir lbi] - -- cpp options - ++ ["--optP=" ++ opt | opt <- cppOptions bi] - -- output path - ++ ["--odir=" ++ odir] - -- optimization - ++ (case withOptimization lbi of - NoOptimisation -> ["-O0"] - NormalOptimisation -> ["-O1"] - MaximumOptimisation -> ["-O2"]) - -uhcPackageDbOptions :: FilePath -> FilePath -> PackageDBStack -> [String] -uhcPackageDbOptions user system db = map (\ x -> "--pkg-searchpath=" ++ x) - (concatMap (packageDbPaths user system) db) - --- ----------------------------------------------------------------------------- --- Installation - -installLib :: Verbosity -> LocalBuildInfo - -> FilePath -> FilePath -> FilePath - -> PackageDescription -> Library -> ComponentLocalBuildInfo -> IO () -installLib verbosity _lbi targetDir _dynlibTargetDir builtDir pkg _library _clbi = do - -- putStrLn $ "dest: " ++ targetDir - -- putStrLn $ "built: " ++ builtDir - installDirectoryContents verbosity (builtDir display (packageId pkg)) targetDir - --- currently hard-coded UHC code generator and variant to use -uhcTarget, uhcTargetVariant :: String -uhcTarget = "bc" -uhcTargetVariant = "plain" - --- root directory for a package in UHC -uhcPackageDir :: String -> String -> FilePath -uhcPackageSubDir :: String -> FilePath -uhcPackageDir pkgid compilerid = pkgid uhcPackageSubDir compilerid -uhcPackageSubDir compilerid = compilerid uhcTarget uhcTargetVariant - --- ----------------------------------------------------------------------------- --- Registering - -registerPackage - :: Verbosity - -> Compiler - -> ProgramConfiguration - -> PackageDBStack - -> InstalledPackageInfo - -> IO () -registerPackage verbosity comp progdb packageDbs installedPkgInfo = do - dbdir <- case last packageDbs of - GlobalPackageDB -> getGlobalPackageDir verbosity progdb - UserPackageDB -> getUserPackageDir - SpecificPackageDB dir -> return dir - let pkgdir = dbdir uhcPackageDir (display pkgid) (display compilerid) - createDirectoryIfMissingVerbose verbosity True pkgdir - writeUTF8File (pkgdir installedPkgConfig) - (showInstalledPackageInfo installedPkgInfo) - where - pkgid = sourcePackageId installedPkgInfo - compilerid = compilerId comp - -inplacePackageDbPath :: LocalBuildInfo -> FilePath -inplacePackageDbPath lbi = buildDir lbi diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/UserHooks.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/UserHooks.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/UserHooks.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/UserHooks.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,206 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.UserHooks --- Copyright : Isaac Jones 2003-2005 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This defines the API that @Setup.hs@ scripts can use to customise the way --- the build works. This module just defines the 'UserHooks' type. The --- predefined sets of hooks that implement the @Simple@, @Make@ and @Configure@ --- build systems are defined in "Distribution.Simple". The 'UserHooks' is a big --- record of functions. There are 3 for each action, a pre, post and the action --- itself. There are few other miscellaneous hooks, ones to extend the set of --- programs and preprocessors and one to override the function used to read the --- @.cabal@ file. --- --- This hooks type is widely agreed to not be the right solution. Partly this --- is because changes to it usually break custom @Setup.hs@ files and yet many --- internal code changes do require changes to the hooks. For example we cannot --- pass any extra parameters to most of the functions that implement the --- various phases because it would involve changing the types of the --- corresponding hook. At some point it will have to be replaced. - -module Distribution.Simple.UserHooks ( - UserHooks(..), Args, - emptyUserHooks, - ) where - -import Distribution.PackageDescription -import Distribution.Simple.Program -import Distribution.Simple.Command -import Distribution.Simple.PreProcess -import Distribution.Simple.Setup -import Distribution.Simple.LocalBuildInfo - -type Args = [String] - --- | Hooks allow authors to add specific functionality before and after a --- command is run, and also to specify additional preprocessors. --- --- * WARNING: The hooks interface is under rather constant flux as we try to --- understand users needs. Setup files that depend on this interface may --- break in future releases. -data UserHooks = UserHooks { - - -- | Used for @.\/setup test@ - runTests :: Args -> Bool -> PackageDescription -> LocalBuildInfo -> IO (), - -- | Read the description file - readDesc :: IO (Maybe GenericPackageDescription), - -- | Custom preprocessors in addition to and overriding 'knownSuffixHandlers'. - hookedPreProcessors :: [ PPSuffixHandler ], - -- | These programs are detected at configure time. Arguments for them are - -- added to the configure command. - hookedPrograms :: [Program], - - -- |Hook to run before configure command - preConf :: Args -> ConfigFlags -> IO HookedBuildInfo, - -- |Over-ride this hook to get different behavior during configure. - confHook :: (GenericPackageDescription, HookedBuildInfo) - -> ConfigFlags -> IO LocalBuildInfo, - -- |Hook to run after configure command - postConf :: Args -> ConfigFlags -> PackageDescription -> LocalBuildInfo -> IO (), - - -- |Hook to run before build command. Second arg indicates verbosity level. - preBuild :: Args -> BuildFlags -> IO HookedBuildInfo, - - -- |Over-ride this hook to get different behavior during build. - buildHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> BuildFlags -> IO (), - -- |Hook to run after build command. Second arg indicates verbosity level. - postBuild :: Args -> BuildFlags -> PackageDescription -> LocalBuildInfo -> IO (), - - -- |Hook to run before repl command. Second arg indicates verbosity level. - preRepl :: Args -> ReplFlags -> IO HookedBuildInfo, - -- |Over-ride this hook to get different behavior during interpretation. - replHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> ReplFlags -> [String] -> IO (), - -- |Hook to run after repl command. Second arg indicates verbosity level. - postRepl :: Args -> ReplFlags -> PackageDescription -> LocalBuildInfo -> IO (), - - -- |Hook to run before clean command. Second arg indicates verbosity level. - preClean :: Args -> CleanFlags -> IO HookedBuildInfo, - -- |Over-ride this hook to get different behavior during clean. - cleanHook :: PackageDescription -> () -> UserHooks -> CleanFlags -> IO (), - -- |Hook to run after clean command. Second arg indicates verbosity level. - postClean :: Args -> CleanFlags -> PackageDescription -> () -> IO (), - - -- |Hook to run before copy command - preCopy :: Args -> CopyFlags -> IO HookedBuildInfo, - -- |Over-ride this hook to get different behavior during copy. - copyHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> CopyFlags -> IO (), - -- |Hook to run after copy command - postCopy :: Args -> CopyFlags -> PackageDescription -> LocalBuildInfo -> IO (), - - -- |Hook to run before install command - preInst :: Args -> InstallFlags -> IO HookedBuildInfo, - - -- |Over-ride this hook to get different behavior during install. - instHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> InstallFlags -> IO (), - -- |Hook to run after install command. postInst should be run - -- on the target, not on the build machine. - postInst :: Args -> InstallFlags -> PackageDescription -> LocalBuildInfo -> IO (), - - -- |Hook to run before sdist command. Second arg indicates verbosity level. - preSDist :: Args -> SDistFlags -> IO HookedBuildInfo, - -- |Over-ride this hook to get different behavior during sdist. - sDistHook :: PackageDescription -> Maybe LocalBuildInfo -> UserHooks -> SDistFlags -> IO (), - -- |Hook to run after sdist command. Second arg indicates verbosity level. - postSDist :: Args -> SDistFlags -> PackageDescription -> Maybe LocalBuildInfo -> IO (), - - -- |Hook to run before register command - preReg :: Args -> RegisterFlags -> IO HookedBuildInfo, - -- |Over-ride this hook to get different behavior during registration. - regHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> RegisterFlags -> IO (), - -- |Hook to run after register command - postReg :: Args -> RegisterFlags -> PackageDescription -> LocalBuildInfo -> IO (), - - -- |Hook to run before unregister command - preUnreg :: Args -> RegisterFlags -> IO HookedBuildInfo, - -- |Over-ride this hook to get different behavior during unregistration. - unregHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> RegisterFlags -> IO (), - -- |Hook to run after unregister command - postUnreg :: Args -> RegisterFlags -> PackageDescription -> LocalBuildInfo -> IO (), - - -- |Hook to run before hscolour command. Second arg indicates verbosity level. - preHscolour :: Args -> HscolourFlags -> IO HookedBuildInfo, - -- |Over-ride this hook to get different behavior during hscolour. - hscolourHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> HscolourFlags -> IO (), - -- |Hook to run after hscolour command. Second arg indicates verbosity level. - postHscolour :: Args -> HscolourFlags -> PackageDescription -> LocalBuildInfo -> IO (), - - -- |Hook to run before haddock command. Second arg indicates verbosity level. - preHaddock :: Args -> HaddockFlags -> IO HookedBuildInfo, - -- |Over-ride this hook to get different behavior during haddock. - haddockHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> HaddockFlags -> IO (), - -- |Hook to run after haddock command. Second arg indicates verbosity level. - postHaddock :: Args -> HaddockFlags -> PackageDescription -> LocalBuildInfo -> IO (), - - -- |Hook to run before test command. - preTest :: Args -> TestFlags -> IO HookedBuildInfo, - -- |Over-ride this hook to get different behavior during test. - testHook :: Args -> PackageDescription -> LocalBuildInfo -> UserHooks -> TestFlags -> IO (), - -- |Hook to run after test command. - postTest :: Args -> TestFlags -> PackageDescription -> LocalBuildInfo -> IO (), - - -- |Hook to run before bench command. - preBench :: Args -> BenchmarkFlags -> IO HookedBuildInfo, - -- |Over-ride this hook to get different behavior during bench. - benchHook :: Args -> PackageDescription -> LocalBuildInfo -> UserHooks -> BenchmarkFlags -> IO (), - -- |Hook to run after bench command. - postBench :: Args -> BenchmarkFlags -> PackageDescription -> LocalBuildInfo -> IO () - } - -{-# DEPRECATED runTests "Please use the new testing interface instead!" #-} - --- |Empty 'UserHooks' which do nothing. -emptyUserHooks :: UserHooks -emptyUserHooks - = UserHooks { - runTests = ru, - readDesc = return Nothing, - hookedPreProcessors = [], - hookedPrograms = [], - preConf = rn, - confHook = (\_ _ -> return (error "No local build info generated during configure. Over-ride empty configure hook.")), - postConf = ru, - preBuild = rn', - buildHook = ru, - postBuild = ru, - preRepl = \_ _ -> return emptyHookedBuildInfo, - replHook = \_ _ _ _ _ -> return (), - postRepl = ru, - preClean = rn, - cleanHook = ru, - postClean = ru, - preCopy = rn, - copyHook = ru, - postCopy = ru, - preInst = rn, - instHook = ru, - postInst = ru, - preSDist = rn, - sDistHook = ru, - postSDist = ru, - preReg = rn, - regHook = ru, - postReg = ru, - preUnreg = rn, - unregHook = ru, - postUnreg = ru, - preHscolour = rn, - hscolourHook = ru, - postHscolour = ru, - preHaddock = rn, - haddockHook = ru, - postHaddock = ru, - preTest = rn', - testHook = \_ -> ru, - postTest = ru, - preBench = rn', - benchHook = \_ -> ru, - postBench = ru - } - where rn args _ = noExtraFlags args >> return emptyHookedBuildInfo - rn' _ _ = return emptyHookedBuildInfo - ru _ _ _ _ = return () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Utils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Utils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple/Utils.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple/Utils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1486 +0,0 @@ -{-# LANGUAGE CPP, ForeignFunctionInterface, ScopedTypeVariables #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Utils --- Copyright : Isaac Jones, Simon Marlow 2003-2004 --- License : BSD3 --- portions Copyright (c) 2007, Galois Inc. --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- A large and somewhat miscellaneous collection of utility functions used --- throughout the rest of the Cabal lib and in other tools that use the Cabal --- lib like @cabal-install@. It has a very simple set of logging actions. It --- has low level functions for running programs, a bunch of wrappers for --- various directory and file functions that do extra logging. - -module Distribution.Simple.Utils ( - cabalVersion, - - -- * logging and errors - die, - dieWithLocation, - topHandler, topHandlerWith, - warn, notice, setupMessage, info, debug, - debugNoWrap, chattyTry, - printRawCommandAndArgs, printRawCommandAndArgsAndEnv, - - -- * exceptions - handleDoesNotExist, - - -- * running programs - rawSystemExit, - rawSystemExitCode, - rawSystemExitWithEnv, - rawSystemStdout, - rawSystemStdInOut, - rawSystemIOWithEnv, - createProcessWithEnv, - maybeExit, - xargs, - findProgramLocation, - findProgramVersion, - - -- * copying files - smartCopySources, - createDirectoryIfMissingVerbose, - copyFileVerbose, - copyDirectoryRecursiveVerbose, - copyFiles, - copyFileTo, - - -- * installing files - installOrdinaryFile, - installExecutableFile, - installMaybeExecutableFile, - installOrdinaryFiles, - installExecutableFiles, - installMaybeExecutableFiles, - installDirectoryContents, - copyDirectoryRecursive, - - -- * File permissions - doesExecutableExist, - setFileOrdinary, - setFileExecutable, - - -- * file names - currentDir, - shortRelativePath, - dropExeExtension, - exeExtensions, - - -- * finding files - findFile, - findFirstFile, - findFileWithExtension, - findFileWithExtension', - findAllFilesWithExtension, - findModuleFile, - findModuleFiles, - getDirectoryContentsRecursive, - - -- * environment variables - isInSearchPath, - addLibraryPath, - - -- * simple file globbing - matchFileGlob, - matchDirFileGlob, - parseFileGlob, - FileGlob(..), - - -- * modification time - moreRecentFile, - existsAndIsMoreRecentThan, - - -- * temp files and dirs - TempFileOptions(..), defaultTempFileOptions, - withTempFile, withTempFileEx, - withTempDirectory, withTempDirectoryEx, - - -- * .cabal and .buildinfo files - defaultPackageDesc, - findPackageDesc, - tryFindPackageDesc, - defaultHookedPackageDesc, - findHookedPackageDesc, - - -- * reading and writing files safely - withFileContents, - writeFileAtomic, - rewriteFile, - - -- * Unicode - fromUTF8, - toUTF8, - readUTF8File, - withUTF8FileContents, - writeUTF8File, - normaliseLineEndings, - - -- * BOM - startsWithBOM, - fileHasBOM, - ignoreBOM, - - -- * generic utils - dropWhileEndLE, - takeWhileEndLE, - equating, - comparing, - isInfixOf, - intercalate, - lowercase, - listUnion, - listUnionRight, - ordNub, - ordNubRight, - safeTail, - wrapText, - wrapLine, - ) where - -import Distribution.Text -import Distribution.Package -import Distribution.ModuleName as ModuleName -import Distribution.System -import Distribution.Version -import Distribution.Compat.CopyFile -import Distribution.Compat.Internal.TempFile -import Distribution.Compat.Exception -import Distribution.Verbosity - -#if __GLASGOW_HASKELL__ < 711 -#ifdef VERSION_base -#define BOOTSTRAPPED_CABAL 1 -#endif -#else -#ifdef CURRENT_PACKAGE_KEY -#define BOOTSTRAPPED_CABAL 1 -#endif -#endif - -#ifdef BOOTSTRAPPED_CABAL -import qualified Paths_Cabal (version) -#endif - -import Control.Monad - ( when, unless, filterM ) -import Control.Concurrent.MVar - ( newEmptyMVar, putMVar, takeMVar ) -import Data.Bits - ( Bits((.|.), (.&.), shiftL, shiftR) ) -import Data.Char as Char - ( isDigit, toLower, chr, ord ) -import Data.Foldable - ( traverse_ ) -import Data.List - ( nub, unfoldr, intercalate, isInfixOf ) -import Data.Typeable - ( cast ) -import Data.Ord - ( comparing ) -import qualified Data.ByteString.Lazy as BS -import qualified Data.ByteString.Lazy.Char8 as BS.Char8 -import qualified Data.Set as Set - -import System.Directory - ( Permissions(executable), getDirectoryContents, getPermissions - , doesDirectoryExist, doesFileExist, removeFile, findExecutable - , getModificationTime ) -import System.Environment - ( getProgName ) -import System.Exit - ( exitWith, ExitCode(..) ) -import System.FilePath - ( normalise, (), (<.>) - , getSearchPath, joinPath, takeDirectory, splitFileName - , splitExtension, splitExtensions, splitDirectories - , searchPathSeparator ) -import System.Directory - ( createDirectory, renameFile, removeDirectoryRecursive ) -import System.IO - ( Handle, openFile, openBinaryFile, openBinaryTempFileWithDefaultPermissions - , IOMode(ReadMode), hSetBinaryMode - , hGetContents, stderr, stdout, hPutStr, hFlush, hClose ) -import System.IO.Error as IO.Error - ( isDoesNotExistError, isAlreadyExistsError, isUserError - , ioeSetFileName, ioeGetFileName, ioeGetErrorString ) -import System.IO.Error - ( ioeSetLocation, ioeGetLocation ) -import System.IO.Unsafe - ( unsafeInterleaveIO ) -import qualified Control.Exception as Exception - -import Control.Exception (IOException, evaluate, throwIO) -import Control.Concurrent (forkIO) -import qualified System.Process as Process - ( CreateProcess(..), StdStream(..), proc) -import System.Process - ( ProcessHandle, createProcess, rawSystem, runInteractiveProcess - , showCommandForUser, waitForProcess) - --- We only get our own version number when we're building with ourselves -cabalVersion :: Version -#if defined(BOOTSTRAPPED_CABAL) -cabalVersion = Paths_Cabal.version -#elif defined(CABAL_VERSION) -cabalVersion = Version [CABAL_VERSION] [] -#else -cabalVersion = Version [1,9999] [] --used when bootstrapping -#endif - --- ---------------------------------------------------------------------------- --- Exception and logging utils - -dieWithLocation :: FilePath -> Maybe Int -> String -> IO a -dieWithLocation filename lineno msg = - ioError . setLocation lineno - . flip ioeSetFileName (normalise filename) - $ userError msg - where - setLocation Nothing err = err - setLocation (Just n) err = ioeSetLocation err (show n) - -die :: String -> IO a -die msg = ioError (userError msg) - -topHandlerWith :: forall a. (Exception.SomeException -> IO a) -> IO a -> IO a -topHandlerWith cont prog = - Exception.catches prog [ - Exception.Handler rethrowAsyncExceptions - , Exception.Handler rethrowExitStatus - , Exception.Handler handle - ] - where - -- Let async exceptions rise to the top for the default top-handler - rethrowAsyncExceptions :: Exception.AsyncException -> IO a - rethrowAsyncExceptions = throwIO - - -- ExitCode gets thrown asynchronously too, and we don't want to print it - rethrowExitStatus :: ExitCode -> IO a - rethrowExitStatus = throwIO - - -- Print all other exceptions - handle :: Exception.SomeException -> IO a - handle se = do - hFlush stdout - pname <- getProgName - hPutStr stderr (wrapText (message pname se)) - cont se - - message :: String -> Exception.SomeException -> String - message pname (Exception.SomeException se) = - case cast se :: Maybe Exception.IOException of - Just ioe | isUserError ioe -> - let file = case ioeGetFileName ioe of - Nothing -> "" - Just path -> path ++ location ++ ": " - location = case ioeGetLocation ioe of - l@(n:_) | Char.isDigit n -> ':' : l - _ -> "" - detail = ioeGetErrorString ioe - in pname ++ ": " ++ file ++ detail - _ -> -#if __GLASGOW_HASKELL__ < 710 - show se -#else - Exception.displayException se -#endif - -topHandler :: IO a -> IO a -topHandler prog = topHandlerWith (const $ exitWith (ExitFailure 1)) prog - --- | Non fatal conditions that may be indicative of an error or problem. --- --- We display these at the 'normal' verbosity level. --- -warn :: Verbosity -> String -> IO () -warn verbosity msg = - when (verbosity >= normal) $ do - hFlush stdout - hPutStr stderr (wrapText ("Warning: " ++ msg)) - --- | Useful status messages. --- --- We display these at the 'normal' verbosity level. --- --- This is for the ordinary helpful status messages that users see. Just --- enough information to know that things are working but not floods of detail. --- -notice :: Verbosity -> String -> IO () -notice verbosity msg = - when (verbosity >= normal) $ - putStr (wrapText msg) - -setupMessage :: Verbosity -> String -> PackageIdentifier -> IO () -setupMessage verbosity msg pkgid = - notice verbosity (msg ++ ' ': display pkgid ++ "...") - --- | More detail on the operation of some action. --- --- We display these messages when the verbosity level is 'verbose' --- -info :: Verbosity -> String -> IO () -info verbosity msg = - when (verbosity >= verbose) $ - putStr (wrapText msg) - --- | Detailed internal debugging information --- --- We display these messages when the verbosity level is 'deafening' --- -debug :: Verbosity -> String -> IO () -debug verbosity msg = - when (verbosity >= deafening) $ do - putStr (wrapText msg) - hFlush stdout - --- | A variant of 'debug' that doesn't perform the automatic line --- wrapping. Produces better output in some cases. -debugNoWrap :: Verbosity -> String -> IO () -debugNoWrap verbosity msg = - when (verbosity >= deafening) $ do - putStrLn msg - hFlush stdout - --- | Perform an IO action, catching any IO exceptions and printing an error --- if one occurs. -chattyTry :: String -- ^ a description of the action we were attempting - -> IO () -- ^ the action itself - -> IO () -chattyTry desc action = - catchIO action $ \exception -> - putStrLn $ "Error while " ++ desc ++ ": " ++ show exception - --- | Run an IO computation, returning @e@ if it raises a "file --- does not exist" error. -handleDoesNotExist :: a -> IO a -> IO a -handleDoesNotExist e = - Exception.handleJust - (\ioe -> if isDoesNotExistError ioe then Just ioe else Nothing) - (\_ -> return e) - --- ----------------------------------------------------------------------------- --- Helper functions - --- | Wraps text to the default line width. Existing newlines are preserved. -wrapText :: String -> String -wrapText = unlines - . map (intercalate "\n" - . map unwords - . wrapLine 79 - . words) - . lines - --- | Wraps a list of words to a list of lines of words of a particular width. -wrapLine :: Int -> [String] -> [[String]] -wrapLine width = wrap 0 [] - where wrap :: Int -> [String] -> [String] -> [[String]] - wrap 0 [] (w:ws) - | length w + 1 > width - = wrap (length w) [w] ws - wrap col line (w:ws) - | col + length w + 1 > width - = reverse line : wrap 0 [] (w:ws) - wrap col line (w:ws) - = let col' = col + length w + 1 - in wrap col' (w:line) ws - wrap _ [] [] = [] - wrap _ line [] = [reverse line] - --- ----------------------------------------------------------------------------- --- rawSystem variants -maybeExit :: IO ExitCode -> IO () -maybeExit cmd = do - res <- cmd - unless (res == ExitSuccess) $ exitWith res - -printRawCommandAndArgs :: Verbosity -> FilePath -> [String] -> IO () -printRawCommandAndArgs verbosity path args = - printRawCommandAndArgsAndEnv verbosity path args Nothing - -printRawCommandAndArgsAndEnv :: Verbosity - -> FilePath - -> [String] - -> Maybe [(String, String)] - -> IO () -printRawCommandAndArgsAndEnv verbosity path args menv - | verbosity >= deafening = do - traverse_ (putStrLn . ("Environment: " ++) . show) menv - print (path, args) - | verbosity >= verbose = putStrLn $ showCommandForUser path args - | otherwise = return () - - --- Exit with the same exit code if the subcommand fails -rawSystemExit :: Verbosity -> FilePath -> [String] -> IO () -rawSystemExit verbosity path args = do - printRawCommandAndArgs verbosity path args - hFlush stdout - exitcode <- rawSystem path args - unless (exitcode == ExitSuccess) $ do - debug verbosity $ path ++ " returned " ++ show exitcode - exitWith exitcode - -rawSystemExitCode :: Verbosity -> FilePath -> [String] -> IO ExitCode -rawSystemExitCode verbosity path args = do - printRawCommandAndArgs verbosity path args - hFlush stdout - exitcode <- rawSystem path args - unless (exitcode == ExitSuccess) $ do - debug verbosity $ path ++ " returned " ++ show exitcode - return exitcode - -rawSystemExitWithEnv :: Verbosity - -> FilePath - -> [String] - -> [(String, String)] - -> IO () -rawSystemExitWithEnv verbosity path args env = do - printRawCommandAndArgsAndEnv verbosity path args (Just env) - hFlush stdout - (_,_,_,ph) <- createProcess $ - (Process.proc path args) { Process.env = (Just env) -#ifdef MIN_VERSION_process -#if MIN_VERSION_process(1,2,0) --- delegate_ctlc has been added in process 1.2, and we still want to be able to --- bootstrap GHC on systems not having that version - , Process.delegate_ctlc = True -#endif -#endif - } - exitcode <- waitForProcess ph - unless (exitcode == ExitSuccess) $ do - debug verbosity $ path ++ " returned " ++ show exitcode - exitWith exitcode - --- Closes the passed in handles before returning. -rawSystemIOWithEnv :: Verbosity - -> FilePath - -> [String] - -> Maybe FilePath -- ^ New working dir or inherit - -> Maybe [(String, String)] -- ^ New environment or inherit - -> Maybe Handle -- ^ stdin - -> Maybe Handle -- ^ stdout - -> Maybe Handle -- ^ stderr - -> IO ExitCode -rawSystemIOWithEnv verbosity path args mcwd menv inp out err = do - (_,_,_,ph) <- createProcessWithEnv verbosity path args mcwd menv - (mbToStd inp) (mbToStd out) (mbToStd err) - exitcode <- waitForProcess ph - unless (exitcode == ExitSuccess) $ do - debug verbosity $ path ++ " returned " ++ show exitcode - return exitcode - where - mbToStd :: Maybe Handle -> Process.StdStream - mbToStd = maybe Process.Inherit Process.UseHandle - -createProcessWithEnv :: - Verbosity - -> FilePath - -> [String] - -> Maybe FilePath -- ^ New working dir or inherit - -> Maybe [(String, String)] -- ^ New environment or inherit - -> Process.StdStream -- ^ stdin - -> Process.StdStream -- ^ stdout - -> Process.StdStream -- ^ stderr - -> IO (Maybe Handle, Maybe Handle, Maybe Handle,ProcessHandle) - -- ^ Any handles created for stdin, stdout, or stderr - -- with 'CreateProcess', and a handle to the process. -createProcessWithEnv verbosity path args mcwd menv inp out err = do - printRawCommandAndArgsAndEnv verbosity path args menv - hFlush stdout - (inp', out', err', ph) <- createProcess $ - (Process.proc path args) { - Process.cwd = mcwd - , Process.env = menv - , Process.std_in = inp - , Process.std_out = out - , Process.std_err = err -#ifdef MIN_VERSION_process -#if MIN_VERSION_process(1,2,0) --- delegate_ctlc has been added in process 1.2, and we still want to be able to --- bootstrap GHC on systems not having that version - , Process.delegate_ctlc = True -#endif -#endif - } - return (inp', out', err', ph) - --- | Run a command and return its output. --- --- The output is assumed to be text in the locale encoding. --- -rawSystemStdout :: Verbosity -> FilePath -> [String] -> IO String -rawSystemStdout verbosity path args = do - (output, errors, exitCode) <- rawSystemStdInOut verbosity path args - Nothing Nothing - Nothing False - when (exitCode /= ExitSuccess) $ - die errors - return output - --- | Run a command and return its output, errors and exit status. Optionally --- also supply some input. Also provides control over whether the binary/text --- mode of the input and output. --- -rawSystemStdInOut :: Verbosity - -> FilePath -- ^ Program location - -> [String] -- ^ Arguments - -> Maybe FilePath -- ^ New working dir or inherit - -> Maybe [(String, String)] -- ^ New environment or inherit - -> Maybe (String, Bool) -- ^ input text and binary mode - -> Bool -- ^ output in binary mode - -> IO (String, String, ExitCode) -- ^ output, errors, exit -rawSystemStdInOut verbosity path args mcwd menv input outputBinary = do - printRawCommandAndArgs verbosity path args - - Exception.bracket - (runInteractiveProcess path args mcwd menv) - (\(inh,outh,errh,_) -> hClose inh >> hClose outh >> hClose errh) - $ \(inh,outh,errh,pid) -> do - - -- output mode depends on what the caller wants - hSetBinaryMode outh outputBinary - -- but the errors are always assumed to be text (in the current locale) - hSetBinaryMode errh False - - -- fork off a couple threads to pull on the stderr and stdout - -- so if the process writes to stderr we do not block. - - err <- hGetContents errh - out <- hGetContents outh - - mv <- newEmptyMVar - let force str = (evaluate (length str) >> return ()) - `Exception.finally` putMVar mv () - --TODO: handle exceptions like text decoding. - _ <- forkIO $ force out - _ <- forkIO $ force err - - -- push all the input, if any - case input of - Nothing -> return () - Just (inputStr, inputBinary) -> do - -- input mode depends on what the caller wants - hSetBinaryMode inh inputBinary - hPutStr inh inputStr - hClose inh - --TODO: this probably fails if the process refuses to consume - -- or if it closes stdin (eg if it exits) - - -- wait for both to finish, in either order - takeMVar mv - takeMVar mv - - -- wait for the program to terminate - exitcode <- waitForProcess pid - unless (exitcode == ExitSuccess) $ - debug verbosity $ path ++ " returned " ++ show exitcode - ++ if null err then "" else - " with error message:\n" ++ err - ++ case input of - Nothing -> "" - Just ("", _) -> "" - Just (inp, _) -> "\nstdin input:\n" ++ inp - - return (out, err, exitcode) - - -{-# DEPRECATED findProgramLocation - "No longer used within Cabal, try findProgramOnSearchPath" #-} --- | Look for a program on the path. -findProgramLocation :: Verbosity -> FilePath -> IO (Maybe FilePath) -findProgramLocation verbosity prog = do - debug verbosity $ "searching for " ++ prog ++ " in path." - res <- findExecutable prog - case res of - Nothing -> debug verbosity ("Cannot find " ++ prog ++ " on the path") - Just path -> debug verbosity ("found " ++ prog ++ " at "++ path) - return res - - --- | Look for a program and try to find it's version number. It can accept --- either an absolute path or the name of a program binary, in which case we --- will look for the program on the path. --- -findProgramVersion :: String -- ^ version args - -> (String -> String) -- ^ function to select version - -- number from program output - -> Verbosity - -> FilePath -- ^ location - -> IO (Maybe Version) -findProgramVersion versionArg selectVersion verbosity path = do - str <- rawSystemStdout verbosity path [versionArg] - `catchIO` (\_ -> return "") - `catchExit` (\_ -> return "") - let version :: Maybe Version - version = simpleParse (selectVersion str) - case version of - Nothing -> warn verbosity $ "cannot determine version of " ++ path - ++ " :\n" ++ show str - Just v -> debug verbosity $ path ++ " is version " ++ display v - return version - - --- | Like the Unix xargs program. Useful for when we've got very long command --- lines that might overflow an OS limit on command line length and so you --- need to invoke a command multiple times to get all the args in. --- --- Use it with either of the rawSystem variants above. For example: --- --- > xargs (32*1024) (rawSystemExit verbosity) prog fixedArgs bigArgs --- -xargs :: Int -> ([String] -> IO ()) - -> [String] -> [String] -> IO () -xargs maxSize rawSystemFun fixedArgs bigArgs = - let fixedArgSize = sum (map length fixedArgs) + length fixedArgs - chunkSize = maxSize - fixedArgSize - in mapM_ (rawSystemFun . (fixedArgs ++)) (chunks chunkSize bigArgs) - - where chunks len = unfoldr $ \s -> - if null s then Nothing - else Just (chunk [] len s) - - chunk acc _ [] = (reverse acc,[]) - chunk acc len (s:ss) - | len' < len = chunk (s:acc) (len-len'-1) ss - | otherwise = (reverse acc, s:ss) - where len' = length s - --- ------------------------------------------------------------ --- * File Utilities --- ------------------------------------------------------------ - ----------------- --- Finding files - --- | Find a file by looking in a search path. The file path must match exactly. --- -findFile :: [FilePath] -- ^search locations - -> FilePath -- ^File Name - -> IO FilePath -findFile searchPath fileName = - findFirstFile id - [ path fileName - | path <- nub searchPath] - >>= maybe (die $ fileName ++ " doesn't exist") return - --- | Find a file by looking in a search path with one of a list of possible --- file extensions. The file base name should be given and it will be tried --- with each of the extensions in each element of the search path. --- -findFileWithExtension :: [String] - -> [FilePath] - -> FilePath - -> IO (Maybe FilePath) -findFileWithExtension extensions searchPath baseName = - findFirstFile id - [ path baseName <.> ext - | path <- nub searchPath - , ext <- nub extensions ] - -findAllFilesWithExtension :: [String] - -> [FilePath] - -> FilePath - -> IO [FilePath] -findAllFilesWithExtension extensions searchPath basename = - findAllFiles id - [ path basename <.> ext - | path <- nub searchPath - , ext <- nub extensions ] - --- | Like 'findFileWithExtension' but returns which element of the search path --- the file was found in, and the file path relative to that base directory. --- -findFileWithExtension' :: [String] - -> [FilePath] - -> FilePath - -> IO (Maybe (FilePath, FilePath)) -findFileWithExtension' extensions searchPath baseName = - findFirstFile (uncurry ()) - [ (path, baseName <.> ext) - | path <- nub searchPath - , ext <- nub extensions ] - -findFirstFile :: (a -> FilePath) -> [a] -> IO (Maybe a) -findFirstFile file = findFirst - where findFirst [] = return Nothing - findFirst (x:xs) = do exists <- doesFileExist (file x) - if exists - then return (Just x) - else findFirst xs - -findAllFiles :: (a -> FilePath) -> [a] -> IO [a] -findAllFiles file = filterM (doesFileExist . file) - --- | Finds the files corresponding to a list of Haskell module names. --- --- As 'findModuleFile' but for a list of module names. --- -findModuleFiles :: [FilePath] -- ^ build prefix (location of objects) - -> [String] -- ^ search suffixes - -> [ModuleName] -- ^ modules - -> IO [(FilePath, FilePath)] -findModuleFiles searchPath extensions moduleNames = - mapM (findModuleFile searchPath extensions) moduleNames - --- | Find the file corresponding to a Haskell module name. --- --- This is similar to 'findFileWithExtension'' but specialised to a module --- name. The function fails if the file corresponding to the module is missing. --- -findModuleFile :: [FilePath] -- ^ build prefix (location of objects) - -> [String] -- ^ search suffixes - -> ModuleName -- ^ module - -> IO (FilePath, FilePath) -findModuleFile searchPath extensions moduleName = - maybe notFound return - =<< findFileWithExtension' extensions searchPath - (ModuleName.toFilePath moduleName) - where - notFound = die $ "Error: Could not find module: " ++ display moduleName - ++ " with any suffix: " ++ show extensions - ++ " in the search path: " ++ show searchPath - --- | List all the files in a directory and all subdirectories. --- --- The order places files in sub-directories after all the files in their --- parent directories. The list is generated lazily so is not well defined if --- the source directory structure changes before the list is used. --- -getDirectoryContentsRecursive :: FilePath -> IO [FilePath] -getDirectoryContentsRecursive topdir = recurseDirectories [""] - where - recurseDirectories :: [FilePath] -> IO [FilePath] - recurseDirectories [] = return [] - recurseDirectories (dir:dirs) = unsafeInterleaveIO $ do - (files, dirs') <- collect [] [] =<< getDirectoryContents (topdir dir) - files' <- recurseDirectories (dirs' ++ dirs) - return (files ++ files') - - where - collect files dirs' [] = return (reverse files - ,reverse dirs') - collect files dirs' (entry:entries) | ignore entry - = collect files dirs' entries - collect files dirs' (entry:entries) = do - let dirEntry = dir entry - isDirectory <- doesDirectoryExist (topdir dirEntry) - if isDirectory - then collect files (dirEntry:dirs') entries - else collect (dirEntry:files) dirs' entries - - ignore ['.'] = True - ignore ['.', '.'] = True - ignore _ = False - ------------------------- --- Environment variables - --- | Is this directory in the system search path? -isInSearchPath :: FilePath -> IO Bool -isInSearchPath path = fmap (elem path) getSearchPath - -addLibraryPath :: OS - -> [FilePath] - -> [(String,String)] - -> [(String,String)] -addLibraryPath os paths = addEnv - where - pathsString = intercalate [searchPathSeparator] paths - ldPath = case os of - OSX -> "DYLD_LIBRARY_PATH" - _ -> "LD_LIBRARY_PATH" - - addEnv [] = [(ldPath,pathsString)] - addEnv ((key,value):xs) - | key == ldPath = - if null value - then (key,pathsString):xs - else (key,value ++ (searchPathSeparator:pathsString)):xs - | otherwise = (key,value):addEnv xs - ----------------- --- File globbing - -data FileGlob - -- | No glob at all, just an ordinary file - = NoGlob FilePath - - -- | dir prefix and extension, like @\"foo\/bar\/\*.baz\"@ corresponds to - -- @FileGlob \"foo\/bar\" \".baz\"@ - | FileGlob FilePath String - -parseFileGlob :: FilePath -> Maybe FileGlob -parseFileGlob filepath = case splitExtensions filepath of - (filepath', ext) -> case splitFileName filepath' of - (dir, "*") | '*' `elem` dir - || '*' `elem` ext - || null ext -> Nothing - | null dir -> Just (FileGlob "." ext) - | otherwise -> Just (FileGlob dir ext) - _ | '*' `elem` filepath -> Nothing - | otherwise -> Just (NoGlob filepath) - -matchFileGlob :: FilePath -> IO [FilePath] -matchFileGlob = matchDirFileGlob "." - -matchDirFileGlob :: FilePath -> FilePath -> IO [FilePath] -matchDirFileGlob dir filepath = case parseFileGlob filepath of - Nothing -> die $ "invalid file glob '" ++ filepath - ++ "'. Wildcards '*' are only allowed in place of the file" - ++ " name, not in the directory name or file extension." - ++ " If a wildcard is used it must be with an file extension." - Just (NoGlob filepath') -> return [filepath'] - Just (FileGlob dir' ext) -> do - files <- getDirectoryContents (dir dir') - case [ dir' file - | file <- files - , let (name, ext') = splitExtensions file - , not (null name) && ext' == ext ] of - [] -> die $ "filepath wildcard '" ++ filepath - ++ "' does not match any files." - matches -> return matches - --------------------- --- Modification time - --- | Compare the modification times of two files to see if the first is newer --- than the second. The first file must exist but the second need not. --- The expected use case is when the second file is generated using the first. --- In this use case, if the result is True then the second file is out of date. --- -moreRecentFile :: FilePath -> FilePath -> IO Bool -moreRecentFile a b = do - exists <- doesFileExist b - if not exists - then return True - else do tb <- getModificationTime b - ta <- getModificationTime a - return (ta > tb) - --- | Like 'moreRecentFile', but also checks that the first file exists. -existsAndIsMoreRecentThan :: FilePath -> FilePath -> IO Bool -existsAndIsMoreRecentThan a b = do - exists <- doesFileExist a - if not exists - then return False - else a `moreRecentFile` b - ----------------------------------------- --- Copying and installing files and dirs - --- | Same as 'createDirectoryIfMissing' but logs at higher verbosity levels. --- -createDirectoryIfMissingVerbose :: Verbosity - -> Bool -- ^ Create its parents too? - -> FilePath - -> IO () -createDirectoryIfMissingVerbose verbosity create_parents path0 - | create_parents = createDirs (parents path0) - | otherwise = createDirs (take 1 (parents path0)) - where - parents = reverse . scanl1 () . splitDirectories . normalise - - createDirs [] = return () - createDirs (dir:[]) = createDir dir throwIO - createDirs (dir:dirs) = - createDir dir $ \_ -> do - createDirs dirs - createDir dir throwIO - - createDir :: FilePath -> (IOException -> IO ()) -> IO () - createDir dir notExistHandler = do - r <- tryIO $ createDirectoryVerbose verbosity dir - case (r :: Either IOException ()) of - Right () -> return () - Left e - | isDoesNotExistError e -> notExistHandler e - -- createDirectory (and indeed POSIX mkdir) does not distinguish - -- between a dir already existing and a file already existing. So we - -- check for it here. Unfortunately there is a slight race condition - -- here, but we think it is benign. It could report an exception in - -- the case that the dir did exist but another process deletes the - -- directory and creates a file in its place before we can check - -- that the directory did indeed exist. - | isAlreadyExistsError e -> (do - isDir <- doesDirectoryExist dir - if isDir then return () - else throwIO e - ) `catchIO` ((\_ -> return ()) :: IOException -> IO ()) - | otherwise -> throwIO e - -createDirectoryVerbose :: Verbosity -> FilePath -> IO () -createDirectoryVerbose verbosity dir = do - info verbosity $ "creating " ++ dir - createDirectory dir - setDirOrdinary dir - --- | Copies a file without copying file permissions. The target file is created --- with default permissions. Any existing target file is replaced. --- --- At higher verbosity levels it logs an info message. --- -copyFileVerbose :: Verbosity -> FilePath -> FilePath -> IO () -copyFileVerbose verbosity src dest = do - info verbosity ("copy " ++ src ++ " to " ++ dest) - copyFile src dest - --- | Install an ordinary file. This is like a file copy but the permissions --- are set appropriately for an installed file. On Unix it is \"-rw-r--r--\" --- while on Windows it uses the default permissions for the target directory. --- -installOrdinaryFile :: Verbosity -> FilePath -> FilePath -> IO () -installOrdinaryFile verbosity src dest = do - info verbosity ("Installing " ++ src ++ " to " ++ dest) - copyOrdinaryFile src dest - --- | Install an executable file. This is like a file copy but the permissions --- are set appropriately for an installed file. On Unix it is \"-rwxr-xr-x\" --- while on Windows it uses the default permissions for the target directory. --- -installExecutableFile :: Verbosity -> FilePath -> FilePath -> IO () -installExecutableFile verbosity src dest = do - info verbosity ("Installing executable " ++ src ++ " to " ++ dest) - copyExecutableFile src dest - --- | Install a file that may or not be executable, preserving permissions. -installMaybeExecutableFile :: Verbosity -> FilePath -> FilePath -> IO () -installMaybeExecutableFile verbosity src dest = do - perms <- getPermissions src - if (executable perms) --only checks user x bit - then installExecutableFile verbosity src dest - else installOrdinaryFile verbosity src dest - --- | Given a relative path to a file, copy it to the given directory, preserving --- the relative path and creating the parent directories if needed. -copyFileTo :: Verbosity -> FilePath -> FilePath -> IO () -copyFileTo verbosity dir file = do - let targetFile = dir file - createDirectoryIfMissingVerbose verbosity True (takeDirectory targetFile) - installOrdinaryFile verbosity file targetFile - --- | Common implementation of 'copyFiles', 'installOrdinaryFiles', --- 'installExecutableFiles' and 'installMaybeExecutableFiles'. -copyFilesWith :: (Verbosity -> FilePath -> FilePath -> IO ()) - -> Verbosity -> FilePath -> [(FilePath, FilePath)] -> IO () -copyFilesWith doCopy verbosity targetDir srcFiles = do - - -- Create parent directories for everything - let dirs = map (targetDir ) . nub . map (takeDirectory . snd) $ srcFiles - mapM_ (createDirectoryIfMissingVerbose verbosity True) dirs - - -- Copy all the files - sequence_ [ let src = srcBase srcFile - dest = targetDir srcFile - in doCopy verbosity src dest - | (srcBase, srcFile) <- srcFiles ] - --- | Copies a bunch of files to a target directory, preserving the directory --- structure in the target location. The target directories are created if they --- do not exist. --- --- The files are identified by a pair of base directory and a path relative to --- that base. It is only the relative part that is preserved in the --- destination. --- --- For example: --- --- > copyFiles normal "dist/src" --- > [("", "src/Foo.hs"), ("dist/build/", "src/Bar.hs")] --- --- This would copy \"src\/Foo.hs\" to \"dist\/src\/src\/Foo.hs\" and --- copy \"dist\/build\/src\/Bar.hs\" to \"dist\/src\/src\/Bar.hs\". --- --- This operation is not atomic. Any IO failure during the copy (including any --- missing source files) leaves the target in an unknown state so it is best to --- use it with a freshly created directory so that it can be simply deleted if --- anything goes wrong. --- -copyFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)] -> IO () -copyFiles = copyFilesWith copyFileVerbose - --- | This is like 'copyFiles' but uses 'installOrdinaryFile'. --- -installOrdinaryFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)] -> IO () -installOrdinaryFiles = copyFilesWith installOrdinaryFile - --- | This is like 'copyFiles' but uses 'installExecutableFile'. --- -installExecutableFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)] - -> IO () -installExecutableFiles = copyFilesWith installExecutableFile - --- | This is like 'copyFiles' but uses 'installMaybeExecutableFile'. --- -installMaybeExecutableFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)] - -> IO () -installMaybeExecutableFiles = copyFilesWith installMaybeExecutableFile - --- | This installs all the files in a directory to a target location, --- preserving the directory layout. All the files are assumed to be ordinary --- rather than executable files. --- -installDirectoryContents :: Verbosity -> FilePath -> FilePath -> IO () -installDirectoryContents verbosity srcDir destDir = do - info verbosity ("copy directory '" ++ srcDir ++ "' to '" ++ destDir ++ "'.") - srcFiles <- getDirectoryContentsRecursive srcDir - installOrdinaryFiles verbosity destDir [ (srcDir, f) | f <- srcFiles ] - --- | Recursively copy the contents of one directory to another path. -copyDirectoryRecursive :: Verbosity -> FilePath -> FilePath -> IO () -copyDirectoryRecursive verbosity srcDir destDir = do - info verbosity ("copy directory '" ++ srcDir ++ "' to '" ++ destDir ++ "'.") - srcFiles <- getDirectoryContentsRecursive srcDir - copyFilesWith (const copyFile) verbosity destDir [ (srcDir, f) - | f <- srcFiles ] - -------------------- --- File permissions - --- | Like 'doesFileExist', but also checks that the file is executable. -doesExecutableExist :: FilePath -> IO Bool -doesExecutableExist f = do - exists <- doesFileExist f - if exists - then do perms <- getPermissions f - return (executable perms) - else return False - ---------------------------------- --- Deprecated file copy functions - -{-# DEPRECATED smartCopySources - "Use findModuleFiles and copyFiles or installOrdinaryFiles" #-} -smartCopySources :: Verbosity -> [FilePath] -> FilePath - -> [ModuleName] -> [String] -> IO () -smartCopySources verbosity searchPath targetDir moduleNames extensions = - findModuleFiles searchPath extensions moduleNames - >>= copyFiles verbosity targetDir - -{-# DEPRECATED copyDirectoryRecursiveVerbose - "You probably want installDirectoryContents instead" #-} -copyDirectoryRecursiveVerbose :: Verbosity -> FilePath -> FilePath -> IO () -copyDirectoryRecursiveVerbose verbosity srcDir destDir = do - info verbosity ("copy directory '" ++ srcDir ++ "' to '" ++ destDir ++ "'.") - srcFiles <- getDirectoryContentsRecursive srcDir - copyFiles verbosity destDir [ (srcDir, f) | f <- srcFiles ] - ---------------------------- --- Temporary files and dirs - --- | Advanced options for 'withTempFile' and 'withTempDirectory'. -data TempFileOptions = TempFileOptions { - optKeepTempFiles :: Bool -- ^ Keep temporary files? - } - -defaultTempFileOptions :: TempFileOptions -defaultTempFileOptions = TempFileOptions { optKeepTempFiles = False } - --- | Use a temporary filename that doesn't already exist. --- -withTempFile :: FilePath -- ^ Temp dir to create the file in - -> String -- ^ File name template. See 'openTempFile'. - -> (FilePath -> Handle -> IO a) -> IO a -withTempFile tmpDir template action = - withTempFileEx defaultTempFileOptions tmpDir template action - --- | A version of 'withTempFile' that additionally takes a 'TempFileOptions' --- argument. -withTempFileEx :: TempFileOptions - -> FilePath -- ^ Temp dir to create the file in - -> String -- ^ File name template. See 'openTempFile'. - -> (FilePath -> Handle -> IO a) -> IO a -withTempFileEx opts tmpDir template action = - Exception.bracket - (openTempFile tmpDir template) - (\(name, handle) -> do hClose handle - unless (optKeepTempFiles opts) $ - handleDoesNotExist () . removeFile $ name) - (uncurry action) - --- | Create and use a temporary directory. --- --- Creates a new temporary directory inside the given directory, making use --- of the template. The temp directory is deleted after use. For example: --- --- > withTempDirectory verbosity "src" "sdist." $ \tmpDir -> do ... --- --- The @tmpDir@ will be a new subdirectory of the given directory, e.g. --- @src/sdist.342@. --- -withTempDirectory :: Verbosity - -> FilePath -> String -> (FilePath -> IO a) -> IO a -withTempDirectory verbosity targetDir template = - withTempDirectoryEx verbosity defaultTempFileOptions targetDir template - --- | A version of 'withTempDirectory' that additionally takes a --- 'TempFileOptions' argument. -withTempDirectoryEx :: Verbosity - -> TempFileOptions - -> FilePath -> String -> (FilePath -> IO a) -> IO a -withTempDirectoryEx _verbosity opts targetDir template = - Exception.bracket - (createTempDirectory targetDir template) - (unless (optKeepTempFiles opts) - . handleDoesNotExist () . removeDirectoryRecursive) - ------------------------------------ --- Safely reading and writing files - --- | Gets the contents of a file, but guarantee that it gets closed. --- --- The file is read lazily but if it is not fully consumed by the action then --- the remaining input is truncated and the file is closed. --- -withFileContents :: FilePath -> (String -> IO a) -> IO a -withFileContents name action = - Exception.bracket (openFile name ReadMode) hClose - (\hnd -> hGetContents hnd >>= action) - --- | Writes a file atomically. --- --- The file is either written successfully or an IO exception is raised and --- the original file is left unchanged. --- --- On windows it is not possible to delete a file that is open by a process. --- This case will give an IO exception but the atomic property is not affected. --- -writeFileAtomic :: FilePath -> BS.ByteString -> IO () -writeFileAtomic targetPath content = do - let (targetDir, targetFile) = splitFileName targetPath - Exception.bracketOnError - (openBinaryTempFileWithDefaultPermissions targetDir $ targetFile <.> "tmp") - (\(tmpPath, handle) -> hClose handle >> removeFile tmpPath) - (\(tmpPath, handle) -> do - BS.hPut handle content - hClose handle - renameFile tmpPath targetPath) - --- | Write a file but only if it would have new content. If we would be writing --- the same as the existing content then leave the file as is so that we do not --- update the file's modification time. --- --- NB: the file is assumed to be ASCII-encoded. -rewriteFile :: FilePath -> String -> IO () -rewriteFile path newContent = - flip catchIO mightNotExist $ do - existingContent <- readFile path - _ <- evaluate (length existingContent) - unless (existingContent == newContent) $ - writeFileAtomic path (BS.Char8.pack newContent) - where - mightNotExist e | isDoesNotExistError e = writeFileAtomic path - (BS.Char8.pack newContent) - | otherwise = ioError e - --- | The path name that represents the current directory. --- In Unix, it's @\".\"@, but this is system-specific. --- (E.g. AmigaOS uses the empty string @\"\"@ for the current directory.) -currentDir :: FilePath -currentDir = "." - -shortRelativePath :: FilePath -> FilePath -> FilePath -shortRelativePath from to = - case dropCommonPrefix (splitDirectories from) (splitDirectories to) of - (stuff, path) -> joinPath (map (const "..") stuff ++ path) - where - dropCommonPrefix :: Eq a => [a] -> [a] -> ([a],[a]) - dropCommonPrefix (x:xs) (y:ys) - | x == y = dropCommonPrefix xs ys - dropCommonPrefix xs ys = (xs,ys) - --- | Drop the extension if it's one of 'exeExtensions', or return the path --- unchanged. -dropExeExtension :: FilePath -> FilePath -dropExeExtension filepath = - case splitExtension filepath of - (filepath', extension) | extension `elem` exeExtensions -> filepath' - | otherwise -> filepath - --- | List of possible executable file extensions on the current platform. -exeExtensions :: [String] -exeExtensions = case buildOS of - -- Possible improvement: on Windows, read the list of extensions from the - -- PATHEXT environment variable. By default PATHEXT is ".com; .exe; .bat; - -- .cmd". - Windows -> ["", "exe"] - Ghcjs -> ["", "exe"] - _ -> [""] - --- ------------------------------------------------------------ --- * Finding the description file --- ------------------------------------------------------------ - --- |Package description file (/pkgname/@.cabal@) -defaultPackageDesc :: Verbosity -> IO FilePath -defaultPackageDesc _verbosity = tryFindPackageDesc currentDir - --- |Find a package description file in the given directory. Looks for --- @.cabal@ files. -findPackageDesc :: FilePath -- ^Where to look - -> IO (Either String FilePath) -- ^.cabal -findPackageDesc dir - = do files <- getDirectoryContents dir - -- to make sure we do not mistake a ~/.cabal/ dir for a .cabal - -- file we filter to exclude dirs and null base file names: - cabalFiles <- filterM doesFileExist - [ dir file - | file <- files - , let (name, ext) = splitExtension file - , not (null name) && ext == ".cabal" ] - case cabalFiles of - [] -> return (Left noDesc) - [cabalFile] -> return (Right cabalFile) - multiple -> return (Left $ multiDesc multiple) - - where - noDesc :: String - noDesc = "No cabal file found.\n" - ++ "Please create a package description file .cabal" - - multiDesc :: [String] -> String - multiDesc l = "Multiple cabal files found.\n" - ++ "Please use only one of: " - ++ intercalate ", " l - --- |Like 'findPackageDesc', but calls 'die' in case of error. -tryFindPackageDesc :: FilePath -> IO FilePath -tryFindPackageDesc dir = either die return =<< findPackageDesc dir - --- |Optional auxiliary package information file (/pkgname/@.buildinfo@) -defaultHookedPackageDesc :: IO (Maybe FilePath) -defaultHookedPackageDesc = findHookedPackageDesc currentDir - --- |Find auxiliary package information in the given directory. --- Looks for @.buildinfo@ files. -findHookedPackageDesc - :: FilePath -- ^Directory to search - -> IO (Maybe FilePath) -- ^/dir/@\/@/pkgname/@.buildinfo@, if present -findHookedPackageDesc dir = do - files <- getDirectoryContents dir - buildInfoFiles <- filterM doesFileExist - [ dir file - | file <- files - , let (name, ext) = splitExtension file - , not (null name) && ext == buildInfoExt ] - case buildInfoFiles of - [] -> return Nothing - [f] -> return (Just f) - _ -> die ("Multiple files with extension " ++ buildInfoExt) - -buildInfoExt :: String -buildInfoExt = ".buildinfo" - --- ------------------------------------------------------------ --- * Unicode stuff --- ------------------------------------------------------------ - --- This is a modification of the UTF8 code from gtk2hs and the --- utf8-string package. - -fromUTF8 :: String -> String -fromUTF8 [] = [] -fromUTF8 (c:cs) - | c <= '\x7F' = c : fromUTF8 cs - | c <= '\xBF' = replacementChar : fromUTF8 cs - | c <= '\xDF' = twoBytes c cs - | c <= '\xEF' = moreBytes 3 0x800 cs (ord c .&. 0xF) - | c <= '\xF7' = moreBytes 4 0x10000 cs (ord c .&. 0x7) - | c <= '\xFB' = moreBytes 5 0x200000 cs (ord c .&. 0x3) - | c <= '\xFD' = moreBytes 6 0x4000000 cs (ord c .&. 0x1) - | otherwise = replacementChar : fromUTF8 cs - where - twoBytes c0 (c1:cs') - | ord c1 .&. 0xC0 == 0x80 - = let d = ((ord c0 .&. 0x1F) `shiftL` 6) - .|. (ord c1 .&. 0x3F) - in if d >= 0x80 - then chr d : fromUTF8 cs' - else replacementChar : fromUTF8 cs' - twoBytes _ cs' = replacementChar : fromUTF8 cs' - - moreBytes :: Int -> Int -> [Char] -> Int -> [Char] - moreBytes 1 overlong cs' acc - | overlong <= acc && acc <= 0x10FFFF - && (acc < 0xD800 || 0xDFFF < acc) - && (acc < 0xFFFE || 0xFFFF < acc) - = chr acc : fromUTF8 cs' - - | otherwise - = replacementChar : fromUTF8 cs' - - moreBytes byteCount overlong (cn:cs') acc - | ord cn .&. 0xC0 == 0x80 - = moreBytes (byteCount-1) overlong cs' - ((acc `shiftL` 6) .|. ord cn .&. 0x3F) - - moreBytes _ _ cs' _ - = replacementChar : fromUTF8 cs' - - replacementChar = '\xfffd' - -toUTF8 :: String -> String -toUTF8 [] = [] -toUTF8 (c:cs) - | c <= '\x07F' = c - : toUTF8 cs - | c <= '\x7FF' = chr (0xC0 .|. (w `shiftR` 6)) - : chr (0x80 .|. (w .&. 0x3F)) - : toUTF8 cs - | c <= '\xFFFF'= chr (0xE0 .|. (w `shiftR` 12)) - : chr (0x80 .|. ((w `shiftR` 6) .&. 0x3F)) - : chr (0x80 .|. (w .&. 0x3F)) - : toUTF8 cs - | otherwise = chr (0xf0 .|. (w `shiftR` 18)) - : chr (0x80 .|. ((w `shiftR` 12) .&. 0x3F)) - : chr (0x80 .|. ((w `shiftR` 6) .&. 0x3F)) - : chr (0x80 .|. (w .&. 0x3F)) - : toUTF8 cs - where w = ord c - --- | Whether BOM is at the beginning of the input -startsWithBOM :: String -> Bool -startsWithBOM ('\xFEFF':_) = True -startsWithBOM _ = False - --- | Check whether a file has Unicode byte order mark (BOM). -fileHasBOM :: FilePath -> IO Bool -fileHasBOM f = fmap (startsWithBOM . fromUTF8) - . hGetContents =<< openBinaryFile f ReadMode - --- | Ignore a Unicode byte order mark (BOM) at the beginning of the input --- -ignoreBOM :: String -> String -ignoreBOM ('\xFEFF':string) = string -ignoreBOM string = string - --- | Reads a UTF8 encoded text file as a Unicode String --- --- Reads lazily using ordinary 'readFile'. --- -readUTF8File :: FilePath -> IO String -readUTF8File f = fmap (ignoreBOM . fromUTF8) - . hGetContents =<< openBinaryFile f ReadMode - --- | Reads a UTF8 encoded text file as a Unicode String --- --- Same behaviour as 'withFileContents'. --- -withUTF8FileContents :: FilePath -> (String -> IO a) -> IO a -withUTF8FileContents name action = - Exception.bracket - (openBinaryFile name ReadMode) - hClose - (\hnd -> hGetContents hnd >>= action . ignoreBOM . fromUTF8) - --- | Writes a Unicode String as a UTF8 encoded text file. --- --- Uses 'writeFileAtomic', so provides the same guarantees. --- -writeUTF8File :: FilePath -> String -> IO () -writeUTF8File path = writeFileAtomic path . BS.Char8.pack . toUTF8 - --- | Fix different systems silly line ending conventions -normaliseLineEndings :: String -> String -normaliseLineEndings [] = [] -normaliseLineEndings ('\r':'\n':s) = '\n' : normaliseLineEndings s -- windows -normaliseLineEndings ('\r':s) = '\n' : normaliseLineEndings s -- old OS X -normaliseLineEndings ( c :s) = c : normaliseLineEndings s - --- ------------------------------------------------------------ --- * Common utils --- ------------------------------------------------------------ - --- | @dropWhileEndLE p@ is equivalent to @reverse . dropWhile p . reverse@, but --- quite a bit faster. The difference between "Data.List.dropWhileEnd" and this --- version is that the one in "Data.List" is strict in elements, but spine-lazy, --- while this one is spine-strict but lazy in elements. That's what @LE@ stands --- for - "lazy in elements". --- --- Example: --- --- @ --- > tail $ Data.List.dropWhileEnd (<3) [undefined, 5, 4, 3, 2, 1] --- *** Exception: Prelude.undefined --- > tail $ dropWhileEndLE (<3) [undefined, 5, 4, 3, 2, 1] --- [5,4,3] --- > take 3 $ Data.List.dropWhileEnd (<3) [5, 4, 3, 2, 1, undefined] --- [5,4,3] --- > take 3 $ dropWhileEndLE (<3) [5, 4, 3, 2, 1, undefined] --- *** Exception: Prelude.undefined --- @ -dropWhileEndLE :: (a -> Bool) -> [a] -> [a] -dropWhileEndLE p = foldr (\x r -> if null r && p x then [] else x:r) [] - --- | @takeWhileEndLE p@ is equivalent to @reverse . takeWhile p . reverse@, but --- is usually faster (as well as being easier to read). -takeWhileEndLE :: (a -> Bool) -> [a] -> [a] -takeWhileEndLE p = fst . foldr go ([], False) - where - go x (rest, done) - | not done && p x = (x:rest, False) - | otherwise = (rest, True) - --- | Like "Data.List.nub", but has @O(n log n)@ complexity instead of --- @O(n^2)@. Code for 'ordNub' and 'listUnion' taken from Niklas Hambüchen's --- package. -ordNub :: (Ord a) => [a] -> [a] -ordNub l = go Set.empty l - where - go _ [] = [] - go s (x:xs) = if x `Set.member` s then go s xs - else x : go (Set.insert x s) xs - --- | Like "Data.List.union", but has @O(n log n)@ complexity instead of --- @O(n^2)@. -listUnion :: (Ord a) => [a] -> [a] -> [a] -listUnion a b = a ++ ordNub (filter (`Set.notMember` aSet) b) - where - aSet = Set.fromList a - --- | A right-biased version of 'ordNub'. --- --- Example: --- --- @ --- > ordNub [1,2,1] --- [1,2] --- > ordNubRight [1,2,1] --- [2,1] --- @ -ordNubRight :: (Ord a) => [a] -> [a] -ordNubRight = fst . foldr go ([], Set.empty) - where - go x p@(l, s) = if x `Set.member` s then p - else (x:l, Set.insert x s) - --- | A right-biased version of 'listUnion'. --- --- Example: --- --- @ --- > listUnion [1,2,3,4,3] [2,1,1] --- [1,2,3,4,3] --- > listUnionRight [1,2,3,4,3] [2,1,1] --- [4,3,2,1,1] --- @ -listUnionRight :: (Ord a) => [a] -> [a] -> [a] -listUnionRight a b = ordNubRight (filter (`Set.notMember` bSet) a) ++ b - where - bSet = Set.fromList b - --- | A total variant of 'tail'. -safeTail :: [a] -> [a] -safeTail [] = [] -safeTail (_:xs) = xs - -equating :: Eq a => (b -> a) -> b -> b -> Bool -equating p x y = p x == p y - -lowercase :: String -> String -lowercase = map Char.toLower diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Simple.hs 2016-11-07 10:02:23.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Simple.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,695 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple --- Copyright : Isaac Jones 2003-2005 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This is the command line front end to the Simple build system. When given --- the parsed command-line args and package information, is able to perform --- basic commands like configure, build, install, register, etc. --- --- This module exports the main functions that Setup.hs scripts use. It --- re-exports the 'UserHooks' type, the standard entry points like --- 'defaultMain' and 'defaultMainWithHooks' and the predefined sets of --- 'UserHooks' that custom @Setup.hs@ scripts can extend to add their own --- behaviour. --- --- This module isn't called \"Simple\" because it's simple. Far from --- it. It's called \"Simple\" because it does complicated things to --- simple software. --- --- The original idea was that there could be different build systems that all --- presented the same compatible command line interfaces. There is still a --- "Distribution.Make" system but in practice no packages use it. - -{- -Work around this warning: -libraries/Cabal/Distribution/Simple.hs:78:0: - Warning: In the use of `runTests' - (imported from Distribution.Simple.UserHooks): - Deprecated: "Please use the new testing interface instead!" --} -{-# OPTIONS_GHC -fno-warn-deprecations #-} - -module Distribution.Simple ( - module Distribution.Package, - module Distribution.Version, - module Distribution.License, - module Distribution.Simple.Compiler, - module Language.Haskell.Extension, - -- * Simple interface - defaultMain, defaultMainNoRead, defaultMainArgs, - -- * Customization - UserHooks(..), Args, - defaultMainWithHooks, defaultMainWithHooksArgs, - -- ** Standard sets of hooks - simpleUserHooks, - autoconfUserHooks, - defaultUserHooks, emptyUserHooks, - -- ** Utils - defaultHookedPackageDesc - ) where - --- local -import Distribution.Simple.Compiler hiding (Flag) -import Distribution.Simple.UserHooks -import Distribution.Package -import Distribution.PackageDescription hiding (Flag) -import Distribution.PackageDescription.Parse -import Distribution.PackageDescription.Configuration -import Distribution.Simple.Program -import Distribution.Simple.Program.Db -import Distribution.Simple.PreProcess -import Distribution.Simple.Setup -import Distribution.Simple.Command - -import Distribution.Simple.Build -import Distribution.Simple.SrcDist -import Distribution.Simple.Register - -import Distribution.Simple.Configure - -import Distribution.Simple.LocalBuildInfo -import Distribution.Simple.Bench -import Distribution.Simple.BuildPaths -import Distribution.Simple.Test -import Distribution.Simple.Install -import Distribution.Simple.Haddock -import Distribution.Simple.Utils -import Distribution.Utils.NubList -import Distribution.Verbosity -import Language.Haskell.Extension -import Distribution.Version -import Distribution.License -import Distribution.Text - --- Base -import System.Environment (getArgs, getProgName) -import System.Directory (removeFile, doesFileExist - ,doesDirectoryExist, removeDirectoryRecursive) -import System.Exit (exitWith,ExitCode(..)) -import System.FilePath (searchPathSeparator) -import Distribution.Compat.Environment (getEnvironment) -import Distribution.Compat.GetShortPathName (getShortPathName) - -import Control.Monad (when) -import Data.Foldable (traverse_) -import Data.List (unionBy, nub, (\\)) - --- | A simple implementation of @main@ for a Cabal setup script. --- It reads the package description file using IO, and performs the --- action specified on the command line. -defaultMain :: IO () -defaultMain = getArgs >>= defaultMainHelper simpleUserHooks - --- | A version of 'defaultMain' that is passed the command line --- arguments, rather than getting them from the environment. -defaultMainArgs :: [String] -> IO () -defaultMainArgs = defaultMainHelper simpleUserHooks - --- | A customizable version of 'defaultMain'. -defaultMainWithHooks :: UserHooks -> IO () -defaultMainWithHooks hooks = getArgs >>= defaultMainHelper hooks - --- | A customizable version of 'defaultMain' that also takes the command --- line arguments. -defaultMainWithHooksArgs :: UserHooks -> [String] -> IO () -defaultMainWithHooksArgs = defaultMainHelper - --- | Like 'defaultMain', but accepts the package description as input --- rather than using IO to read it. -defaultMainNoRead :: GenericPackageDescription -> IO () -defaultMainNoRead pkg_descr = - getArgs >>= - defaultMainHelper simpleUserHooks { readDesc = return (Just pkg_descr) } - -defaultMainHelper :: UserHooks -> Args -> IO () -defaultMainHelper hooks args = topHandler $ - case commandsRun (globalCommand commands) commands args of - CommandHelp help -> printHelp help - CommandList opts -> printOptionsList opts - CommandErrors errs -> printErrors errs - CommandReadyToGo (flags, commandParse) -> - case commandParse of - _ | fromFlag (globalVersion flags) -> printVersion - | fromFlag (globalNumericVersion flags) -> printNumericVersion - CommandHelp help -> printHelp help - CommandList opts -> printOptionsList opts - CommandErrors errs -> printErrors errs - CommandReadyToGo action -> action - - where - printHelp help = getProgName >>= putStr . help - printOptionsList = putStr . unlines - printErrors errs = do - putStr (intercalate "\n" errs) - exitWith (ExitFailure 1) - printNumericVersion = putStrLn $ display cabalVersion - printVersion = putStrLn $ "Cabal library version " - ++ display cabalVersion - - progs = addKnownPrograms (hookedPrograms hooks) defaultProgramConfiguration - commands = - [configureCommand progs `commandAddAction` \fs as -> - configureAction hooks fs as >> return () - ,buildCommand progs `commandAddAction` buildAction hooks - ,replCommand progs `commandAddAction` replAction hooks - ,installCommand `commandAddAction` installAction hooks - ,copyCommand `commandAddAction` copyAction hooks - ,haddockCommand `commandAddAction` haddockAction hooks - ,cleanCommand `commandAddAction` cleanAction hooks - ,sdistCommand `commandAddAction` sdistAction hooks - ,hscolourCommand `commandAddAction` hscolourAction hooks - ,registerCommand `commandAddAction` registerAction hooks - ,unregisterCommand `commandAddAction` unregisterAction hooks - ,testCommand `commandAddAction` testAction hooks - ,benchmarkCommand `commandAddAction` benchAction hooks - ] - --- | Combine the preprocessors in the given hooks with the --- preprocessors built into cabal. -allSuffixHandlers :: UserHooks - -> [PPSuffixHandler] -allSuffixHandlers hooks - = overridesPP (hookedPreProcessors hooks) knownSuffixHandlers - where - overridesPP :: [PPSuffixHandler] -> [PPSuffixHandler] -> [PPSuffixHandler] - overridesPP = unionBy (\x y -> fst x == fst y) - -configureAction :: UserHooks -> ConfigFlags -> Args -> IO LocalBuildInfo -configureAction hooks flags args = do - distPref <- findDistPrefOrDefault (configDistPref flags) - let flags' = flags { configDistPref = toFlag distPref } - pbi <- preConf hooks args flags' - - (mb_pd_file, pkg_descr0) <- confPkgDescr - - --get_pkg_descr (configVerbosity flags') - --let pkg_descr = updatePackageDescription pbi pkg_descr0 - let epkg_descr = (pkg_descr0, pbi) - - --(warns, ers) <- sanityCheckPackage pkg_descr - --errorOut (configVerbosity flags') warns ers - - localbuildinfo0 <- confHook hooks epkg_descr flags' - - -- remember the .cabal filename if we know it - -- and all the extra command line args - let localbuildinfo = localbuildinfo0 { - pkgDescrFile = mb_pd_file, - extraConfigArgs = args - } - writePersistBuildConfig distPref localbuildinfo - - let pkg_descr = localPkgDescr localbuildinfo - postConf hooks args flags' pkg_descr localbuildinfo - return localbuildinfo - where - verbosity = fromFlag (configVerbosity flags) - confPkgDescr :: IO (Maybe FilePath, GenericPackageDescription) - confPkgDescr = do - mdescr <- readDesc hooks - case mdescr of - Just descr -> return (Nothing, descr) - Nothing -> do - pdfile <- defaultPackageDesc verbosity - descr <- readPackageDescription verbosity pdfile - return (Just pdfile, descr) - -buildAction :: UserHooks -> BuildFlags -> Args -> IO () -buildAction hooks flags args = do - distPref <- findDistPrefOrDefault (buildDistPref flags) - let verbosity = fromFlag $ buildVerbosity flags - flags' = flags { buildDistPref = toFlag distPref } - - lbi <- getBuildConfig hooks verbosity distPref - progs <- reconfigurePrograms verbosity - (buildProgramPaths flags') - (buildProgramArgs flags') - (withPrograms lbi) - - hookedAction preBuild buildHook postBuild - (return lbi { withPrograms = progs }) - hooks flags' { buildArgs = args } args - -replAction :: UserHooks -> ReplFlags -> Args -> IO () -replAction hooks flags args = do - distPref <- findDistPrefOrDefault (replDistPref flags) - let verbosity = fromFlag $ replVerbosity flags - flags' = flags { replDistPref = toFlag distPref } - - lbi <- getBuildConfig hooks verbosity distPref - progs <- reconfigurePrograms verbosity - (replProgramPaths flags') - (replProgramArgs flags') - (withPrograms lbi) - - pbi <- preRepl hooks args flags' - let lbi' = lbi { withPrograms = progs } - pkg_descr0 = localPkgDescr lbi' - pkg_descr = updatePackageDescription pbi pkg_descr0 - replHook hooks pkg_descr lbi' hooks flags' args - postRepl hooks args flags' pkg_descr lbi' - -hscolourAction :: UserHooks -> HscolourFlags -> Args -> IO () -hscolourAction hooks flags args = do - distPref <- findDistPrefOrDefault (hscolourDistPref flags) - let verbosity = fromFlag $ hscolourVerbosity flags - flags' = flags { hscolourDistPref = toFlag distPref } - hookedAction preHscolour hscolourHook postHscolour - (getBuildConfig hooks verbosity distPref) - hooks flags' args - -haddockAction :: UserHooks -> HaddockFlags -> Args -> IO () -haddockAction hooks flags args = do - distPref <- findDistPrefOrDefault (haddockDistPref flags) - let verbosity = fromFlag $ haddockVerbosity flags - flags' = flags { haddockDistPref = toFlag distPref } - - lbi <- getBuildConfig hooks verbosity distPref - progs <- reconfigurePrograms verbosity - (haddockProgramPaths flags') - (haddockProgramArgs flags') - (withPrograms lbi) - - hookedAction preHaddock haddockHook postHaddock - (return lbi { withPrograms = progs }) - hooks flags' args - -cleanAction :: UserHooks -> CleanFlags -> Args -> IO () -cleanAction hooks flags args = do - distPref <- findDistPrefOrDefault (cleanDistPref flags) - let flags' = flags { cleanDistPref = toFlag distPref } - - pbi <- preClean hooks args flags' - - pdfile <- defaultPackageDesc verbosity - ppd <- readPackageDescription verbosity pdfile - let pkg_descr0 = flattenPackageDescription ppd - -- We don't sanity check for clean as an error - -- here would prevent cleaning: - --sanityCheckHookedBuildInfo pkg_descr0 pbi - let pkg_descr = updatePackageDescription pbi pkg_descr0 - - cleanHook hooks pkg_descr () hooks flags' - postClean hooks args flags' pkg_descr () - where - verbosity = fromFlag (cleanVerbosity flags) - -copyAction :: UserHooks -> CopyFlags -> Args -> IO () -copyAction hooks flags args = do - distPref <- findDistPrefOrDefault (copyDistPref flags) - let verbosity = fromFlag $ copyVerbosity flags - flags' = flags { copyDistPref = toFlag distPref } - hookedAction preCopy copyHook postCopy - (getBuildConfig hooks verbosity distPref) - hooks flags' args - -installAction :: UserHooks -> InstallFlags -> Args -> IO () -installAction hooks flags args = do - distPref <- findDistPrefOrDefault (installDistPref flags) - let verbosity = fromFlag $ installVerbosity flags - flags' = flags { installDistPref = toFlag distPref } - hookedAction preInst instHook postInst - (getBuildConfig hooks verbosity distPref) - hooks flags' args - -sdistAction :: UserHooks -> SDistFlags -> Args -> IO () -sdistAction hooks flags args = do - distPref <- findDistPrefOrDefault (sDistDistPref flags) - let flags' = flags { sDistDistPref = toFlag distPref } - pbi <- preSDist hooks args flags' - - mlbi <- maybeGetPersistBuildConfig distPref - pdfile <- defaultPackageDesc verbosity - ppd <- readPackageDescription verbosity pdfile - let pkg_descr0 = flattenPackageDescription ppd - sanityCheckHookedBuildInfo pkg_descr0 pbi - let pkg_descr = updatePackageDescription pbi pkg_descr0 - - sDistHook hooks pkg_descr mlbi hooks flags' - postSDist hooks args flags' pkg_descr mlbi - where - verbosity = fromFlag (sDistVerbosity flags) - -testAction :: UserHooks -> TestFlags -> Args -> IO () -testAction hooks flags args = do - distPref <- findDistPrefOrDefault (testDistPref flags) - let verbosity = fromFlag $ testVerbosity flags - flags' = flags { testDistPref = toFlag distPref } - - localBuildInfo <- getBuildConfig hooks verbosity distPref - let pkg_descr = localPkgDescr localBuildInfo - -- It is safe to do 'runTests' before the new test handler because the - -- default action is a no-op and if the package uses the old test interface - -- the new handler will find no tests. - runTests hooks args False pkg_descr localBuildInfo - hookedActionWithArgs preTest testHook postTest - (getBuildConfig hooks verbosity distPref) - hooks flags' args - -benchAction :: UserHooks -> BenchmarkFlags -> Args -> IO () -benchAction hooks flags args = do - distPref <- findDistPrefOrDefault (benchmarkDistPref flags) - let verbosity = fromFlag $ benchmarkVerbosity flags - flags' = flags { benchmarkDistPref = toFlag distPref } - hookedActionWithArgs preBench benchHook postBench - (getBuildConfig hooks verbosity distPref) - hooks flags' args - -registerAction :: UserHooks -> RegisterFlags -> Args -> IO () -registerAction hooks flags args = do - distPref <- findDistPrefOrDefault (regDistPref flags) - let verbosity = fromFlag $ regVerbosity flags - flags' = flags { regDistPref = toFlag distPref } - hookedAction preReg regHook postReg - (getBuildConfig hooks verbosity distPref) - hooks flags' args - -unregisterAction :: UserHooks -> RegisterFlags -> Args -> IO () -unregisterAction hooks flags args = do - distPref <- findDistPrefOrDefault (regDistPref flags) - let verbosity = fromFlag $ regVerbosity flags - flags' = flags { regDistPref = toFlag distPref } - hookedAction preUnreg unregHook postUnreg - (getBuildConfig hooks verbosity distPref) - hooks flags' args - -hookedAction :: (UserHooks -> Args -> flags -> IO HookedBuildInfo) - -> (UserHooks -> PackageDescription -> LocalBuildInfo - -> UserHooks -> flags -> IO ()) - -> (UserHooks -> Args -> flags -> PackageDescription - -> LocalBuildInfo -> IO ()) - -> IO LocalBuildInfo - -> UserHooks -> flags -> Args -> IO () -hookedAction pre_hook cmd_hook = - hookedActionWithArgs pre_hook (\h _ pd lbi uh flags -> cmd_hook h pd lbi uh flags) - -hookedActionWithArgs :: (UserHooks -> Args -> flags -> IO HookedBuildInfo) - -> (UserHooks -> Args -> PackageDescription -> LocalBuildInfo - -> UserHooks -> flags -> IO ()) - -> (UserHooks -> Args -> flags -> PackageDescription - -> LocalBuildInfo -> IO ()) - -> IO LocalBuildInfo - -> UserHooks -> flags -> Args -> IO () -hookedActionWithArgs pre_hook cmd_hook post_hook get_build_config hooks flags args = do - pbi <- pre_hook hooks args flags - localbuildinfo <- get_build_config - let pkg_descr0 = localPkgDescr localbuildinfo - --pkg_descr0 <- get_pkg_descr (get_verbose flags) - sanityCheckHookedBuildInfo pkg_descr0 pbi - let pkg_descr = updatePackageDescription pbi pkg_descr0 - -- TODO: should we write the modified package descr back to the - -- localbuildinfo? - cmd_hook hooks args pkg_descr localbuildinfo hooks flags - post_hook hooks args flags pkg_descr localbuildinfo - -sanityCheckHookedBuildInfo :: PackageDescription -> HookedBuildInfo -> IO () -sanityCheckHookedBuildInfo PackageDescription { library = Nothing } (Just _,_) - = die $ "The buildinfo contains info for a library, " - ++ "but the package does not have a library." - -sanityCheckHookedBuildInfo pkg_descr (_, hookExes) - | not (null nonExistant) - = die $ "The buildinfo contains info for an executable called '" - ++ head nonExistant ++ "' but the package does not have a " - ++ "executable with that name." - where - pkgExeNames = nub (map exeName (executables pkg_descr)) - hookExeNames = nub (map fst hookExes) - nonExistant = hookExeNames \\ pkgExeNames - -sanityCheckHookedBuildInfo _ _ = return () - - -getBuildConfig :: UserHooks -> Verbosity -> FilePath -> IO LocalBuildInfo -getBuildConfig hooks verbosity distPref = do - lbi_wo_programs <- getPersistBuildConfig distPref - -- Restore info about unconfigured programs, since it is not serialized - let lbi = lbi_wo_programs { - withPrograms = restoreProgramConfiguration - (builtinPrograms ++ hookedPrograms hooks) - (withPrograms lbi_wo_programs) - } - - case pkgDescrFile lbi of - Nothing -> return lbi - Just pkg_descr_file -> do - outdated <- checkPersistBuildConfigOutdated distPref pkg_descr_file - if outdated - then reconfigure pkg_descr_file lbi - else return lbi - - where - reconfigure :: FilePath -> LocalBuildInfo -> IO LocalBuildInfo - reconfigure pkg_descr_file lbi = do - notice verbosity $ pkg_descr_file ++ " has been changed. " - ++ "Re-configuring with most recently used options. " - ++ "If this fails, please run configure manually.\n" - let cFlags = configFlags lbi - let cFlags' = cFlags { - -- Since the list of unconfigured programs is not serialized, - -- restore it to the same value as normally used at the beginning - -- of a configure run: - configPrograms_ = restoreProgramConfiguration - (builtinPrograms ++ hookedPrograms hooks) - `fmap` configPrograms_ cFlags, - - -- Use the current, not saved verbosity level: - configVerbosity = Flag verbosity - } - configureAction hooks cFlags' (extraConfigArgs lbi) - - --- -------------------------------------------------------------------------- --- Cleaning - -clean :: PackageDescription -> CleanFlags -> IO () -clean pkg_descr flags = do - let distPref = fromFlagOrDefault defaultDistPref $ cleanDistPref flags - notice verbosity "cleaning..." - - maybeConfig <- if fromFlag (cleanSaveConf flags) - then maybeGetPersistBuildConfig distPref - else return Nothing - - -- remove the whole dist/ directory rather than tracking exactly what files - -- we created in there. - chattyTry "removing dist/" $ do - exists <- doesDirectoryExist distPref - when exists (removeDirectoryRecursive distPref) - - -- Any extra files the user wants to remove - mapM_ removeFileOrDirectory (extraTmpFiles pkg_descr) - - -- If the user wanted to save the config, write it back - traverse_ (writePersistBuildConfig distPref) maybeConfig - - where - removeFileOrDirectory :: FilePath -> IO () - removeFileOrDirectory fname = do - isDir <- doesDirectoryExist fname - isFile <- doesFileExist fname - if isDir then removeDirectoryRecursive fname - else when isFile $ removeFile fname - verbosity = fromFlag (cleanVerbosity flags) - --- -------------------------------------------------------------------------- --- Default hooks - --- | Hooks that correspond to a plain instantiation of the --- \"simple\" build system -simpleUserHooks :: UserHooks -simpleUserHooks = - emptyUserHooks { - confHook = configure, - postConf = finalChecks, - buildHook = defaultBuildHook, - replHook = defaultReplHook, - copyHook = \desc lbi _ f -> install desc lbi f, -- has correct 'copy' behavior with params - testHook = defaultTestHook, - benchHook = defaultBenchHook, - instHook = defaultInstallHook, - sDistHook = \p l h f -> sdist p l f srcPref (allSuffixHandlers h), - cleanHook = \p _ _ f -> clean p f, - hscolourHook = \p l h f -> hscolour p l (allSuffixHandlers h) f, - haddockHook = \p l h f -> haddock p l (allSuffixHandlers h) f, - regHook = defaultRegHook, - unregHook = \p l _ f -> unregister p l f - } - where - finalChecks _args flags pkg_descr lbi = - checkForeignDeps pkg_descr lbi (lessVerbose verbosity) - where - verbosity = fromFlag (configVerbosity flags) - --- | Basic autoconf 'UserHooks': --- --- * 'postConf' runs @.\/configure@, if present. --- --- * the pre-hooks 'preBuild', 'preClean', 'preCopy', 'preInst', --- 'preReg' and 'preUnreg' read additional build information from --- /package/@.buildinfo@, if present. --- --- Thus @configure@ can use local system information to generate --- /package/@.buildinfo@ and possibly other files. - -{-# DEPRECATED defaultUserHooks - "Use simpleUserHooks or autoconfUserHooks, unless you need Cabal-1.2\n compatibility in which case you must stick with defaultUserHooks" #-} -defaultUserHooks :: UserHooks -defaultUserHooks = autoconfUserHooks { - confHook = \pkg flags -> do - let verbosity = fromFlag (configVerbosity flags) - warn verbosity - "defaultUserHooks in Setup script is deprecated." - confHook autoconfUserHooks pkg flags, - postConf = oldCompatPostConf - } - -- This is the annoying old version that only runs configure if it exists. - -- It's here for compatibility with existing Setup.hs scripts. See: - -- https://github.com/haskell/cabal/issues/158 - where oldCompatPostConf args flags pkg_descr lbi - = do let verbosity = fromFlag (configVerbosity flags) - noExtraFlags args - confExists <- doesFileExist "configure" - when confExists $ - runConfigureScript verbosity - backwardsCompatHack flags lbi - - pbi <- getHookedBuildInfo verbosity - sanityCheckHookedBuildInfo pkg_descr pbi - let pkg_descr' = updatePackageDescription pbi pkg_descr - postConf simpleUserHooks args flags pkg_descr' lbi - - backwardsCompatHack = True - -autoconfUserHooks :: UserHooks -autoconfUserHooks - = simpleUserHooks - { - postConf = defaultPostConf, - preBuild = \_ flags -> - -- not using 'readHook' here because 'build' takes - -- extra args - getHookedBuildInfo $ fromFlag $ buildVerbosity flags, - preClean = readHook cleanVerbosity, - preCopy = readHook copyVerbosity, - preInst = readHook installVerbosity, - preHscolour = readHook hscolourVerbosity, - preHaddock = readHook haddockVerbosity, - preReg = readHook regVerbosity, - preUnreg = readHook regVerbosity - } - where defaultPostConf :: Args -> ConfigFlags -> PackageDescription -> LocalBuildInfo -> IO () - defaultPostConf args flags pkg_descr lbi - = do let verbosity = fromFlag (configVerbosity flags) - noExtraFlags args - confExists <- doesFileExist "configure" - if confExists - then runConfigureScript verbosity - backwardsCompatHack flags lbi - else die "configure script not found." - - pbi <- getHookedBuildInfo verbosity - sanityCheckHookedBuildInfo pkg_descr pbi - let pkg_descr' = updatePackageDescription pbi pkg_descr - postConf simpleUserHooks args flags pkg_descr' lbi - - backwardsCompatHack = False - - readHook :: (a -> Flag Verbosity) -> Args -> a -> IO HookedBuildInfo - readHook get_verbosity a flags = do - noExtraFlags a - getHookedBuildInfo verbosity - where - verbosity = fromFlag (get_verbosity flags) - -runConfigureScript :: Verbosity -> Bool -> ConfigFlags -> LocalBuildInfo - -> IO () -runConfigureScript verbosity backwardsCompatHack flags lbi = do - env <- getEnvironment - let programConfig = withPrograms lbi - (ccProg, ccFlags) <- configureCCompiler verbosity programConfig - ccProgShort <- getShortPathName ccProg - -- The C compiler's compilation and linker flags (e.g. - -- "C compiler flags" and "Gcc Linker flags" from GHC) have already - -- been merged into ccFlags, so we set both CFLAGS and LDFLAGS - -- to ccFlags - -- We don't try and tell configure which ld to use, as we don't have - -- a way to pass its flags too - let extraPath = fromNubList $ configProgramPathExtra flags - let cflagsEnv = maybe (unwords ccFlags) (++ (" " ++ unwords ccFlags)) $ lookup "CFLAGS" env - spSep = [searchPathSeparator] - pathEnv = maybe (intercalate spSep extraPath) ((intercalate spSep extraPath ++ spSep)++) $ lookup "PATH" env - overEnv = ("CFLAGS", Just cflagsEnv) : [("PATH", Just pathEnv) | not (null extraPath)] - args' = args ++ ["CC=" ++ ccProgShort] - shProg = simpleProgram "sh" - progDb = modifyProgramSearchPath (\p -> map ProgramSearchPathDir extraPath ++ p) emptyProgramDb - shConfiguredProg <- lookupProgram shProg `fmap` configureProgram verbosity shProg progDb - case shConfiguredProg of - Just sh -> runProgramInvocation verbosity (programInvocation (sh {programOverrideEnv = overEnv}) args') - Nothing -> die notFoundMsg - - where - args = "./configure" : configureArgs backwardsCompatHack flags - - notFoundMsg = "The package has a './configure' script. If you are on Windows, This requires a " - ++ "Unix compatibility toolchain such as MinGW+MSYS or Cygwin. " - ++ "If you are not on Windows, ensure that an 'sh' command is discoverable in your path." - -getHookedBuildInfo :: Verbosity -> IO HookedBuildInfo -getHookedBuildInfo verbosity = do - maybe_infoFile <- defaultHookedPackageDesc - case maybe_infoFile of - Nothing -> return emptyHookedBuildInfo - Just infoFile -> do - info verbosity $ "Reading parameters from " ++ infoFile - readHookedBuildInfo verbosity infoFile - -defaultTestHook :: Args -> PackageDescription -> LocalBuildInfo - -> UserHooks -> TestFlags -> IO () -defaultTestHook args pkg_descr localbuildinfo _ flags = - test args pkg_descr localbuildinfo flags - -defaultBenchHook :: Args -> PackageDescription -> LocalBuildInfo - -> UserHooks -> BenchmarkFlags -> IO () -defaultBenchHook args pkg_descr localbuildinfo _ flags = - bench args pkg_descr localbuildinfo flags - -defaultInstallHook :: PackageDescription -> LocalBuildInfo - -> UserHooks -> InstallFlags -> IO () -defaultInstallHook pkg_descr localbuildinfo _ flags = do - let copyFlags = defaultCopyFlags { - copyDistPref = installDistPref flags, - copyDest = toFlag NoCopyDest, - copyVerbosity = installVerbosity flags - } - install pkg_descr localbuildinfo copyFlags - let registerFlags = defaultRegisterFlags { - regDistPref = installDistPref flags, - regInPlace = installInPlace flags, - regPackageDB = installPackageDB flags, - regVerbosity = installVerbosity flags - } - when (hasLibs pkg_descr) $ register pkg_descr localbuildinfo registerFlags - -defaultBuildHook :: PackageDescription -> LocalBuildInfo - -> UserHooks -> BuildFlags -> IO () -defaultBuildHook pkg_descr localbuildinfo hooks flags = - build pkg_descr localbuildinfo flags (allSuffixHandlers hooks) - -defaultReplHook :: PackageDescription -> LocalBuildInfo - -> UserHooks -> ReplFlags -> [String] -> IO () -defaultReplHook pkg_descr localbuildinfo hooks flags args = - repl pkg_descr localbuildinfo flags (allSuffixHandlers hooks) args - -defaultRegHook :: PackageDescription -> LocalBuildInfo - -> UserHooks -> RegisterFlags -> IO () -defaultRegHook pkg_descr localbuildinfo _ flags = - if hasLibs pkg_descr - then register pkg_descr localbuildinfo flags - else setupMessage (fromFlag (regVerbosity flags)) - "Package contains no library to register:" (packageId pkg_descr) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/System.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/System.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/System.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/System.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,235 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.System --- Copyright : Duncan Coutts 2007-2008 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Cabal often needs to do slightly different things on specific platforms. You --- probably know about the 'System.Info.os' however using that is very --- inconvenient because it is a string and different Haskell implementations --- do not agree on using the same strings for the same platforms! (In --- particular see the controversy over \"windows\" vs \"mingw32\"). So to make it --- more consistent and easy to use we have an 'OS' enumeration. --- -module Distribution.System ( - -- * Operating System - OS(..), - buildOS, - - -- * Machine Architecture - Arch(..), - buildArch, - - -- * Platform is a pair of arch and OS - Platform(..), - buildPlatform, - platformFromTriple, - - -- * Internal - knownOSs, - knownArches - ) where - -import qualified System.Info (os, arch) -import qualified Data.Char as Char (toLower, isAlphaNum, isAlpha) - -import Distribution.Compat.Binary -import Distribution.Text -import qualified Distribution.Compat.ReadP as Parse - -import Control.Monad (liftM2) -import Data.Data (Data) -import Data.Typeable (Typeable) -import Data.Maybe (fromMaybe, listToMaybe) -import GHC.Generics (Generic) -import qualified Text.PrettyPrint as Disp -import Text.PrettyPrint ((<>)) - --- | How strict to be when classifying strings into the 'OS' and 'Arch' enums. --- --- The reason we have multiple ways to do the classification is because there --- are two situations where we need to do it. --- --- For parsing OS and arch names in .cabal files we really want everyone to be --- referring to the same or or arch by the same name. Variety is not a virtue --- in this case. We don't mind about case though. --- --- For the System.Info.os\/arch different Haskell implementations use different --- names for the same or\/arch. Also they tend to distinguish versions of an --- OS\/arch which we just don't care about. --- --- The 'Compat' classification allows us to recognise aliases that are already --- in common use but it allows us to distinguish them from the canonical name --- which enables us to warn about such deprecated aliases. --- -data ClassificationStrictness = Permissive | Compat | Strict - --- ------------------------------------------------------------ --- * Operating System --- ------------------------------------------------------------ - -data OS = Linux | Windows | OSX -- tier 1 desktop OSs - | FreeBSD | OpenBSD | NetBSD -- other free Unix OSs - | DragonFly - | Solaris | AIX | HPUX | IRIX -- ageing Unix OSs - | HaLVM -- bare metal / VMs / hypervisors - | Hurd -- GNU's microkernel - | IOS | Android -- mobile OSs - | Ghcjs - | OtherOS String - deriving (Eq, Generic, Ord, Show, Read, Typeable, Data) - -instance Binary OS - -knownOSs :: [OS] -knownOSs = [Linux, Windows, OSX - ,FreeBSD, OpenBSD, NetBSD, DragonFly - ,Solaris, AIX, HPUX, IRIX - ,HaLVM - ,Hurd - ,IOS, Android - ,Ghcjs] - -osAliases :: ClassificationStrictness -> OS -> [String] -osAliases Permissive Windows = ["mingw32", "win32", "cygwin32"] -osAliases Compat Windows = ["mingw32", "win32"] -osAliases _ OSX = ["darwin"] -osAliases _ Hurd = ["gnu"] -osAliases Permissive FreeBSD = ["kfreebsdgnu"] -osAliases Compat FreeBSD = ["kfreebsdgnu"] -osAliases Permissive Solaris = ["solaris2"] -osAliases Compat Solaris = ["solaris2"] -osAliases _ _ = [] - -instance Text OS where - disp (OtherOS name) = Disp.text name - disp other = Disp.text (lowercase (show other)) - - parse = fmap (classifyOS Compat) ident - -classifyOS :: ClassificationStrictness -> String -> OS -classifyOS strictness s = - fromMaybe (OtherOS s) $ lookup (lowercase s) osMap - where - osMap = [ (name, os) - | os <- knownOSs - , name <- display os : osAliases strictness os ] - -buildOS :: OS -buildOS = classifyOS Permissive System.Info.os - --- ------------------------------------------------------------ --- * Machine Architecture --- ------------------------------------------------------------ - -data Arch = I386 | X86_64 | PPC | PPC64 | Sparc - | Arm | Mips | SH - | IA64 | S390 - | Alpha | Hppa | Rs6000 - | M68k | Vax - | JavaScript - | OtherArch String - deriving (Eq, Generic, Ord, Show, Read, Typeable, Data) - -instance Binary Arch - -knownArches :: [Arch] -knownArches = [I386, X86_64, PPC, PPC64, Sparc - ,Arm, Mips, SH - ,IA64, S390 - ,Alpha, Hppa, Rs6000 - ,M68k, Vax - ,JavaScript] - -archAliases :: ClassificationStrictness -> Arch -> [String] -archAliases Strict _ = [] -archAliases Compat _ = [] -archAliases _ PPC = ["powerpc"] -archAliases _ PPC64 = ["powerpc64"] -archAliases _ Sparc = ["sparc64", "sun4"] -archAliases _ Mips = ["mipsel", "mipseb"] -archAliases _ Arm = ["armeb", "armel"] -archAliases _ _ = [] - -instance Text Arch where - disp (OtherArch name) = Disp.text name - disp other = Disp.text (lowercase (show other)) - - parse = fmap (classifyArch Strict) ident - -classifyArch :: ClassificationStrictness -> String -> Arch -classifyArch strictness s = - fromMaybe (OtherArch s) $ lookup (lowercase s) archMap - where - archMap = [ (name, arch) - | arch <- knownArches - , name <- display arch : archAliases strictness arch ] - -buildArch :: Arch -buildArch = classifyArch Permissive System.Info.arch - --- ------------------------------------------------------------ --- * Platform --- ------------------------------------------------------------ - -data Platform = Platform Arch OS - deriving (Eq, Generic, Ord, Show, Read, Typeable, Data) - -instance Binary Platform - -instance Text Platform where - disp (Platform arch os) = disp arch <> Disp.char '-' <> disp os - -- TODO: there are ambigious platforms like: `arch-word-os` - -- which could be parsed as - -- * Platform "arch-word" "os" - -- * Platform "arch" "word-os" - -- We could support that preferring variants 'OtherOS' or 'OtherArch' - -- - -- For now we split into arch and os parts on the first dash. - parse = do - arch <- parseDashlessArch - _ <- Parse.char '-' - os <- parse - return (Platform arch os) - where - parseDashlessArch :: Parse.ReadP r Arch - parseDashlessArch = fmap (classifyArch Strict) dashlessIdent - --- | The platform Cabal was compiled on. In most cases, --- @LocalBuildInfo.hostPlatform@ should be used instead (the platform we're --- targeting). -buildPlatform :: Platform -buildPlatform = Platform buildArch buildOS - --- Utils: - -ident :: Parse.ReadP r String -ident = liftM2 (:) first rest - where first = Parse.satisfy Char.isAlpha - rest = Parse.munch (\c -> Char.isAlphaNum c || c == '_' || c == '-') - -dashlessIdent :: Parse.ReadP r String -dashlessIdent = liftM2 (:) first rest - where first = Parse.satisfy Char.isAlpha - rest = Parse.munch (\c -> Char.isAlphaNum c || c == '_') - -lowercase :: String -> String -lowercase = map Char.toLower - -platformFromTriple :: String -> Maybe Platform -platformFromTriple triple = - fmap fst (listToMaybe $ Parse.readP_to_S parseTriple triple) - where parseWord = Parse.munch1 (\c -> Char.isAlphaNum c || c == '_') - parseTriple = do - arch <- fmap (classifyArch Permissive) parseWord - _ <- Parse.char '-' - _ <- parseWord -- Skip vendor - _ <- Parse.char '-' - os <- fmap (classifyOS Permissive) ident -- OS may have hyphens, like - -- 'nto-qnx' - return $ Platform arch os diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/TestSuite.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/TestSuite.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/TestSuite.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/TestSuite.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,96 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.TestSuite --- Copyright : Thomas Tuegel 2010 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module defines the detailed test suite interface which makes it --- possible to expose individual tests to Cabal or other test agents. - -module Distribution.TestSuite - ( TestInstance(..) - , OptionDescr(..) - , OptionType(..) - , Test(..) - , Options - , Progress(..) - , Result(..) - , testGroup - ) where - -data TestInstance = TestInstance - { run :: IO Progress -- ^ Perform the test. - , name :: String -- ^ A name for the test, unique within a - -- test suite. - , tags :: [String] -- ^ Users can select groups of tests by - -- their tags. - , options :: [OptionDescr] -- ^ Descriptions of the options recognized - -- by this test. - , setOption :: String -> String -> Either String TestInstance - -- ^ Try to set the named option to the given value. Returns an error - -- message if the option is not supported or the value could not be - -- correctly parsed; otherwise, a 'TestInstance' with the option set to - -- the given value is returned. - } - -data OptionDescr = OptionDescr - { optionName :: String - , optionDescription :: String -- ^ A human-readable description of the - -- option to guide the user setting it. - , optionType :: OptionType - , optionDefault :: Maybe String - } - deriving (Eq, Read, Show) - -data OptionType - = OptionFile - { optionFileMustExist :: Bool - , optionFileIsDir :: Bool - , optionFileExtensions :: [String] - } - | OptionString - { optionStringMultiline :: Bool - } - | OptionNumber - { optionNumberIsInt :: Bool - , optionNumberBounds :: (Maybe String, Maybe String) - } - | OptionBool - | OptionEnum [String] - | OptionSet [String] - | OptionRngSeed - deriving (Eq, Read, Show) - -data Test - = Test TestInstance - | Group - { groupName :: String - , concurrently :: Bool - -- ^ If true, then children of this group may be run in parallel. - -- Note that this setting is not inherited by children. In - -- particular, consider a group F with "concurrently = False" that - -- has some children, including a group T with "concurrently = - -- True". The children of group T may be run concurrently with each - -- other, as long as none are run at the same time as any of the - -- direct children of group F. - , groupTests :: [Test] - } - | ExtraOptions [OptionDescr] Test - -type Options = [(String, String)] - -data Progress = Finished Result - | Progress String (IO Progress) - -data Result = Pass - | Fail String - | Error String - deriving (Eq, Read, Show) - --- | Create a named group of tests, which are assumed to be safe to run in --- parallel. -testGroup :: String -> [Test] -> Test -testGroup n ts = Group { groupName = n, concurrently = True, groupTests = ts } diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Text.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Text.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Text.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Text.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,73 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Text --- Copyright : Duncan Coutts 2007 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This defines a 'Text' class which is a bit like the 'Read' and 'Show' --- classes. The difference is that is uses a modern pretty printer and parser --- system and the format is not expected to be Haskell concrete syntax but --- rather the external human readable representation used by Cabal. --- -module Distribution.Text ( - Text(..), - defaultStyle, - display, - simpleParse, - ) where - -import qualified Distribution.Compat.ReadP as Parse -import qualified Text.PrettyPrint as Disp - -import Data.Version (Version(Version)) -import qualified Data.Char as Char (isDigit, isAlphaNum, isSpace) - -class Text a where - disp :: a -> Disp.Doc - parse :: Parse.ReadP r a - --- | The default rendering style used in Cabal for console output. -defaultStyle :: Disp.Style -defaultStyle = Disp.Style { Disp.mode = Disp.PageMode - , Disp.lineLength = 79 - , Disp.ribbonsPerLine = 1.0 - } - -display :: Text a => a -> String -display = Disp.renderStyle defaultStyle . disp - -simpleParse :: Text a => String -> Maybe a -simpleParse str = case [ p | (p, s) <- Parse.readP_to_S parse str - , all Char.isSpace s ] of - [] -> Nothing - (p:_) -> Just p - --- ----------------------------------------------------------------------------- --- Instances for types from the base package - -instance Text Bool where - disp = Disp.text . show - parse = Parse.choice [ (Parse.string "True" Parse.+++ - Parse.string "true") >> return True - , (Parse.string "False" Parse.+++ - Parse.string "false") >> return False ] - -instance Text Int where - disp = Disp.text . show - parse = (fmap negate $ Parse.char '-' >> parseNat) Parse.+++ parseNat - --- | Parser for non-negative integers. -parseNat :: Parse.ReadP r Int -parseNat = read `fmap` Parse.munch1 Char.isDigit - -instance Text Version where - disp (Version branch _tags) -- Death to version tags!! - = Disp.hcat (Disp.punctuate (Disp.char '.') (map Disp.int branch)) - - parse = do - branch <- Parse.sepBy1 parseNat (Parse.char '.') - -- allow but ignore tags: - _tags <- Parse.many (Parse.char '-' >> Parse.munch1 Char.isAlphaNum) - return (Version branch []) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Utils/NubList.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Utils/NubList.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Utils/NubList.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Utils/NubList.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,102 +0,0 @@ -module Distribution.Utils.NubList - ( NubList -- opaque - , toNubList -- smart construtor - , fromNubList - , overNubList - - , NubListR - , toNubListR - , fromNubListR - , overNubListR - ) where - -import Distribution.Compat.Semigroup as Semi -import Distribution.Compat.Binary -import Distribution.Simple.Utils - -import qualified Text.Read as R - --- | NubList : A de-duplicated list that maintains the original order. -newtype NubList a = - NubList { fromNubList :: [a] } - deriving Eq - --- NubList assumes that nub retains the list order while removing duplicate --- elements (keeping the first occurence). Documentation for "Data.List.nub" --- does not specifically state that ordering is maintained so we will add a test --- for that to the test suite. - --- | Smart constructor for the NubList type. -toNubList :: Ord a => [a] -> NubList a -toNubList list = NubList $ ordNub list - --- | Lift a function over lists to a function over NubLists. -overNubList :: Ord a => ([a] -> [a]) -> NubList a -> NubList a -overNubList f (NubList list) = toNubList . f $ list - --- | Monoid operations on NubLists. --- For a valid Monoid instance we need to satistfy the required monoid laws; --- identity, associativity and closure. --- --- Identity : by inspection: --- mempty `mappend` NubList xs == NubList xs `mappend` mempty --- --- Associativity : by inspection: --- (NubList xs `mappend` NubList ys) `mappend` NubList zs --- == NubList xs `mappend` (NubList ys `mappend` NubList zs) --- --- Closure : appending two lists of type a and removing duplicates obviously --- does not change the type. - -instance Ord a => Monoid (NubList a) where - mempty = NubList [] - mappend = (Semi.<>) - -instance Ord a => Semigroup (NubList a) where - (NubList xs) <> (NubList ys) = NubList $ xs `listUnion` ys - -instance Show a => Show (NubList a) where - show (NubList list) = show list - -instance (Ord a, Read a) => Read (NubList a) where - readPrec = readNubList toNubList - --- | Helper used by NubList/NubListR's Read instances. -readNubList :: (Read a) => ([a] -> l a) -> R.ReadPrec (l a) -readNubList toList = R.parens . R.prec 10 $ fmap toList R.readPrec - --- | Binary instance for 'NubList a' is the same as for '[a]'. For 'put', we --- just pull off constructor and put the list. For 'get', we get the list and --- make a 'NubList' out of it using 'toNubList'. -instance (Ord a, Binary a) => Binary (NubList a) where - put (NubList l) = put l - get = fmap toNubList get - --- | NubListR : A right-biased version of 'NubList'. That is @toNubListR --- ["-XNoFoo", "-XFoo", "-XNoFoo"]@ will result in @["-XFoo", "-XNoFoo"]@, --- unlike the normal 'NubList', which is left-biased. Built on top of --- 'ordNubRight' and 'listUnionRight'. -newtype NubListR a = - NubListR { fromNubListR :: [a] } - deriving Eq - --- | Smart constructor for the NubListR type. -toNubListR :: Ord a => [a] -> NubListR a -toNubListR list = NubListR $ ordNubRight list - --- | Lift a function over lists to a function over NubListRs. -overNubListR :: Ord a => ([a] -> [a]) -> NubListR a -> NubListR a -overNubListR f (NubListR list) = toNubListR . f $ list - -instance Ord a => Monoid (NubListR a) where - mempty = NubListR [] - mappend = (Semi.<>) - -instance Ord a => Semigroup (NubListR a) where - (NubListR xs) <> (NubListR ys) = NubListR $ xs `listUnionRight` ys - -instance Show a => Show (NubListR a) where - show (NubListR list) = show list - -instance (Ord a, Read a) => Read (NubListR a) where - readPrec = readNubList toNubListR diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Verbosity.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Verbosity.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Verbosity.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Verbosity.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,90 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Verbosity --- Copyright : Ian Lynagh 2007 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- A simple 'Verbosity' type with associated utilities. There are 4 standard --- verbosity levels from 'silent', 'normal', 'verbose' up to 'deafening'. This --- is used for deciding what logging messages to print. - --- Verbosity for Cabal functions. - -module Distribution.Verbosity ( - -- * Verbosity - Verbosity, - silent, normal, verbose, deafening, - moreVerbose, lessVerbose, - intToVerbosity, flagToVerbosity, - showForCabal, showForGHC - ) where - -import Distribution.Compat.Binary -import Distribution.ReadE - -import Data.List (elemIndex) -import GHC.Generics - -data Verbosity = Silent | Normal | Verbose | Deafening - deriving (Generic, Show, Read, Eq, Ord, Enum, Bounded) - -instance Binary Verbosity - --- We shouldn't print /anything/ unless an error occurs in silent mode -silent :: Verbosity -silent = Silent - --- Print stuff we want to see by default -normal :: Verbosity -normal = Normal - --- Be more verbose about what's going on -verbose :: Verbosity -verbose = Verbose - --- Not only are we verbose ourselves (perhaps even noisier than when --- being "verbose"), but we tell everything we run to be verbose too -deafening :: Verbosity -deafening = Deafening - -moreVerbose :: Verbosity -> Verbosity -moreVerbose Silent = Silent --silent should stay silent -moreVerbose Normal = Verbose -moreVerbose Verbose = Deafening -moreVerbose Deafening = Deafening - -lessVerbose :: Verbosity -> Verbosity -lessVerbose Deafening = Deafening -lessVerbose Verbose = Normal -lessVerbose Normal = Silent -lessVerbose Silent = Silent - -intToVerbosity :: Int -> Maybe Verbosity -intToVerbosity 0 = Just Silent -intToVerbosity 1 = Just Normal -intToVerbosity 2 = Just Verbose -intToVerbosity 3 = Just Deafening -intToVerbosity _ = Nothing - -flagToVerbosity :: ReadE Verbosity -flagToVerbosity = ReadE $ \s -> - case reads s of - [(i, "")] -> - case intToVerbosity i of - Just v -> Right v - Nothing -> Left ("Bad verbosity: " ++ show i ++ - ". Valid values are 0..3") - _ -> Left ("Can't parse verbosity " ++ s) - -showForCabal, showForGHC :: Verbosity -> String - -showForCabal v = maybe (error "unknown verbosity") show $ - elemIndex v [silent,normal,verbose,deafening] -showForGHC v = maybe (error "unknown verbosity") show $ - elemIndex v [silent,normal,__,verbose,deafening] - where __ = silent -- this will be always ignored by elemIndex diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Version.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Version.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Distribution/Version.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Distribution/Version.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,882 +0,0 @@ -{-# LANGUAGE CPP, DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -#if __GLASGOW_HASKELL__ < 707 -{-# LANGUAGE StandaloneDeriving #-} -#endif - --- Hack approach to support bootstrapping. --- When MIN_VERSION_binary macro is available, use it. But it's not available --- during bootstrapping (or anyone else building Setup.hs directly). If the --- builder specifies -DMIN_VERSION_binary_0_8_0=1 or =0 then we respect that. --- Otherwise we pick a default based on GHC version: assume binary <0.8 when --- GHC < 8.0, and binary >=0.8 when GHC >= 8.0. -#ifdef MIN_VERSION_binary -#define MIN_VERSION_binary_0_8_0 MIN_VERSION_binary(0,8,0) -#else -#ifndef MIN_VERSION_binary_0_8_0 -#if __GLASGOW_HASKELL__ >= 800 -#define MIN_VERSION_binary_0_8_0 1 -#else -#define MIN_VERSION_binary_0_8_0 0 -#endif -#endif -#endif - -#if !MIN_VERSION_binary_0_8_0 -{-# OPTIONS_GHC -fno-warn-orphans #-} -#endif - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Version --- Copyright : Isaac Jones, Simon Marlow 2003-2004 --- Duncan Coutts 2008 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Exports the 'Version' type along with a parser and pretty printer. A version --- is something like @\"1.3.3\"@. It also defines the 'VersionRange' data --- types. Version ranges are like @\">= 1.2 && < 2\"@. - -module Distribution.Version ( - -- * Package versions - Version(..), - - -- * Version ranges - VersionRange(..), - - -- ** Constructing - anyVersion, noVersion, - thisVersion, notThisVersion, - laterVersion, earlierVersion, - orLaterVersion, orEarlierVersion, - unionVersionRanges, intersectVersionRanges, - differenceVersionRanges, - invertVersionRange, - withinVersion, - betweenVersionsInclusive, - - -- ** Inspection - withinRange, - isAnyVersion, - isNoVersion, - isSpecificVersion, - simplifyVersionRange, - foldVersionRange, - foldVersionRange', - hasUpperBound, - hasLowerBound, - - -- ** Modification - removeUpperBound, - - -- * Version intervals view - asVersionIntervals, - VersionInterval, - LowerBound(..), - UpperBound(..), - Bound(..), - - -- ** 'VersionIntervals' abstract type - -- | The 'VersionIntervals' type and the accompanying functions are exposed - -- primarily for completeness and testing purposes. In practice - -- 'asVersionIntervals' is the main function to use to - -- view a 'VersionRange' as a bunch of 'VersionInterval's. - -- - VersionIntervals, - toVersionIntervals, - fromVersionIntervals, - withinIntervals, - versionIntervals, - mkVersionIntervals, - unionVersionIntervals, - intersectVersionIntervals, - invertVersionIntervals - - ) where - -import Distribution.Compat.Binary ( Binary(..) ) -import Data.Data ( Data ) -import Data.Typeable ( Typeable ) -import Data.Version ( Version(..) ) -import GHC.Generics ( Generic ) - -import Distribution.Text -import qualified Distribution.Compat.ReadP as Parse -import Distribution.Compat.ReadP hiding (get) - -import qualified Text.PrettyPrint as Disp -import Text.PrettyPrint ((<>), (<+>)) -import qualified Data.Char as Char (isDigit) -import Control.Exception (assert) - --- ----------------------------------------------------------------------------- --- Version ranges - --- Todo: maybe move this to Distribution.Package.Version? --- (package-specific versioning scheme). - -data VersionRange - = AnyVersion - | ThisVersion Version -- = version - | LaterVersion Version -- > version (NB. not >=) - | EarlierVersion Version -- < version - | WildcardVersion Version -- == ver.* (same as >= ver && < ver+1) - | UnionVersionRanges VersionRange VersionRange - | IntersectVersionRanges VersionRange VersionRange - | VersionRangeParens VersionRange -- just '(exp)' parentheses syntax - deriving (Data, Eq, Generic, Read, Show, Typeable) - -instance Binary VersionRange - -#if __GLASGOW_HASKELL__ < 707 --- starting with ghc-7.7/base-4.7 this instance is provided in "Data.Data" -deriving instance Data Version -#endif - -#if !(MIN_VERSION_binary_0_8_0) --- Deriving this instance from Generic gives trouble on GHC 7.2 because the --- Generic instance has to be standalone-derived. So, we hand-roll our own. --- We can't use a generic Binary instance on later versions because we must --- maintain compatibility between compiler versions. -instance Binary Version where - get = do - br <- get - tags <- get - return $ Version br tags - put (Version br tags) = put br >> put tags -#endif - -{-# DeprecateD AnyVersion - "Use 'anyVersion', 'foldVersionRange' or 'asVersionIntervals'" #-} -{-# DEPRECATED ThisVersion - "Use 'thisVersion', 'foldVersionRange' or 'asVersionIntervals'" #-} -{-# DEPRECATED LaterVersion - "Use 'laterVersion', 'foldVersionRange' or 'asVersionIntervals'" #-} -{-# DEPRECATED EarlierVersion - "Use 'earlierVersion', 'foldVersionRange' or 'asVersionIntervals'" #-} -{-# DEPRECATED WildcardVersion - "Use 'anyVersion', 'foldVersionRange' or 'asVersionIntervals'" #-} -{-# DEPRECATED UnionVersionRanges - "Use 'unionVersionRanges', 'foldVersionRange' or 'asVersionIntervals'" #-} -{-# DEPRECATED IntersectVersionRanges - "Use 'intersectVersionRanges', 'foldVersionRange' or 'asVersionIntervals'"#-} - --- | The version range @-any@. That is, a version range containing all --- versions. --- --- > withinRange v anyVersion = True --- -anyVersion :: VersionRange -anyVersion = AnyVersion - --- | The empty version range, that is a version range containing no versions. --- --- This can be constructed using any unsatisfiable version range expression, --- for example @> 1 && < 1@. --- --- > withinRange v noVersion = False --- -noVersion :: VersionRange -noVersion = IntersectVersionRanges (LaterVersion v) (EarlierVersion v) - where v = Version [1] [] - --- | The version range @== v@ --- --- > withinRange v' (thisVersion v) = v' == v --- -thisVersion :: Version -> VersionRange -thisVersion = ThisVersion - --- | The version range @< v || > v@ --- --- > withinRange v' (notThisVersion v) = v' /= v --- -notThisVersion :: Version -> VersionRange -notThisVersion v = UnionVersionRanges (EarlierVersion v) (LaterVersion v) - --- | The version range @> v@ --- --- > withinRange v' (laterVersion v) = v' > v --- -laterVersion :: Version -> VersionRange -laterVersion = LaterVersion - --- | The version range @>= v@ --- --- > withinRange v' (orLaterVersion v) = v' >= v --- -orLaterVersion :: Version -> VersionRange -orLaterVersion v = UnionVersionRanges (ThisVersion v) (LaterVersion v) - --- | The version range @< v@ --- --- > withinRange v' (earlierVersion v) = v' < v --- -earlierVersion :: Version -> VersionRange -earlierVersion = EarlierVersion - --- | The version range @<= v@ --- --- > withinRange v' (orEarlierVersion v) = v' <= v --- -orEarlierVersion :: Version -> VersionRange -orEarlierVersion v = UnionVersionRanges (ThisVersion v) (EarlierVersion v) - --- | The version range @vr1 || vr2@ --- --- > withinRange v' (unionVersionRanges vr1 vr2) --- > = withinRange v' vr1 || withinRange v' vr2 --- -unionVersionRanges :: VersionRange -> VersionRange -> VersionRange -unionVersionRanges = UnionVersionRanges - --- | The version range @vr1 && vr2@ --- --- > withinRange v' (intersectVersionRanges vr1 vr2) --- > = withinRange v' vr1 && withinRange v' vr2 --- -intersectVersionRanges :: VersionRange -> VersionRange -> VersionRange -intersectVersionRanges = IntersectVersionRanges - --- | The difference of two version ranges --- --- > withinRange v' (differenceVersionRanges vr1 vr2) --- > = withinRange v' vr1 && not (withinRange v' vr2) --- --- @since 1.24.1.0 -differenceVersionRanges :: VersionRange -> VersionRange -> VersionRange -differenceVersionRanges vr1 vr2 = - intersectVersionRanges vr1 (invertVersionRange vr2) - --- | The inverse of a version range --- --- > withinRange v' (invertVersionRange vr) --- > = not (withinRange v' vr) --- -invertVersionRange :: VersionRange -> VersionRange -invertVersionRange = - fromVersionIntervals . invertVersionIntervals - . VersionIntervals . asVersionIntervals - --- | The version range @== v.*@. --- --- For example, for version @1.2@, the version range @== 1.2.*@ is the same as --- @>= 1.2 && < 1.3@ --- --- > withinRange v' (laterVersion v) = v' >= v && v' < upper v --- > where --- > upper (Version lower t) = Version (init lower ++ [last lower + 1]) t --- -withinVersion :: Version -> VersionRange -withinVersion = WildcardVersion - --- | The version range @>= v1 && <= v2@. --- --- In practice this is not very useful because we normally use inclusive lower --- bounds and exclusive upper bounds. --- --- > withinRange v' (laterVersion v) = v' > v --- -betweenVersionsInclusive :: Version -> Version -> VersionRange -betweenVersionsInclusive v1 v2 = - IntersectVersionRanges (orLaterVersion v1) (orEarlierVersion v2) - -{-# DEPRECATED betweenVersionsInclusive - "In practice this is not very useful because we normally use inclusive lower bounds and exclusive upper bounds" #-} - --- | Given a version range, remove the highest upper bound. Example: @(>= 1 && < --- 3) || (>= 4 && < 5)@ is converted to @(>= 1 && < 3) || (>= 4)@. -removeUpperBound :: VersionRange -> VersionRange -removeUpperBound = fromVersionIntervals . relaxLastInterval . toVersionIntervals - where - relaxLastInterval (VersionIntervals intervals) = - VersionIntervals (relaxLastInterval' intervals) - - relaxLastInterval' [] = [] - relaxLastInterval' [(l,_)] = [(l, NoUpperBound)] - relaxLastInterval' (i:is) = i : relaxLastInterval' is - --- | Fold over the basic syntactic structure of a 'VersionRange'. --- --- This provides a syntactic view of the expression defining the version range. --- The syntactic sugar @\">= v\"@, @\"<= v\"@ and @\"== v.*\"@ is presented --- in terms of the other basic syntax. --- --- For a semantic view use 'asVersionIntervals'. --- -foldVersionRange :: a -- ^ @\"-any\"@ version - -> (Version -> a) -- ^ @\"== v\"@ - -> (Version -> a) -- ^ @\"> v\"@ - -> (Version -> a) -- ^ @\"< v\"@ - -> (a -> a -> a) -- ^ @\"_ || _\"@ union - -> (a -> a -> a) -- ^ @\"_ && _\"@ intersection - -> VersionRange -> a -foldVersionRange anyv this later earlier union intersect = fold - where - fold AnyVersion = anyv - fold (ThisVersion v) = this v - fold (LaterVersion v) = later v - fold (EarlierVersion v) = earlier v - fold (WildcardVersion v) = fold (wildcard v) - fold (UnionVersionRanges v1 v2) = union (fold v1) (fold v2) - fold (IntersectVersionRanges v1 v2) = intersect (fold v1) (fold v2) - fold (VersionRangeParens v) = fold v - - wildcard v = intersectVersionRanges - (orLaterVersion v) - (earlierVersion (wildcardUpperBound v)) - --- | An extended variant of 'foldVersionRange' that also provides a view of the --- expression in which the syntactic sugar @\">= v\"@, @\"<= v\"@ and @\"== --- v.*\"@ is presented explicitly rather than in terms of the other basic --- syntax. --- -foldVersionRange' :: a -- ^ @\"-any\"@ version - -> (Version -> a) -- ^ @\"== v\"@ - -> (Version -> a) -- ^ @\"> v\"@ - -> (Version -> a) -- ^ @\"< v\"@ - -> (Version -> a) -- ^ @\">= v\"@ - -> (Version -> a) -- ^ @\"<= v\"@ - -> (Version -> Version -> a) -- ^ @\"== v.*\"@ wildcard. The - -- function is passed the - -- inclusive lower bound and the - -- exclusive upper bounds of the - -- range defined by the wildcard. - -> (a -> a -> a) -- ^ @\"_ || _\"@ union - -> (a -> a -> a) -- ^ @\"_ && _\"@ intersection - -> (a -> a) -- ^ @\"(_)\"@ parentheses - -> VersionRange -> a -foldVersionRange' anyv this later earlier orLater orEarlier - wildcard union intersect parens = fold - where - fold AnyVersion = anyv - fold (ThisVersion v) = this v - fold (LaterVersion v) = later v - fold (EarlierVersion v) = earlier v - - fold (UnionVersionRanges (ThisVersion v) - (LaterVersion v')) | v==v' = orLater v - fold (UnionVersionRanges (LaterVersion v) - (ThisVersion v')) | v==v' = orLater v - fold (UnionVersionRanges (ThisVersion v) - (EarlierVersion v')) | v==v' = orEarlier v - fold (UnionVersionRanges (EarlierVersion v) - (ThisVersion v')) | v==v' = orEarlier v - - fold (WildcardVersion v) = wildcard v (wildcardUpperBound v) - fold (UnionVersionRanges v1 v2) = union (fold v1) (fold v2) - fold (IntersectVersionRanges v1 v2) = intersect (fold v1) (fold v2) - fold (VersionRangeParens v) = parens (fold v) - - --- | Does this version fall within the given range? --- --- This is the evaluation function for the 'VersionRange' type. --- -withinRange :: Version -> VersionRange -> Bool -withinRange v = foldVersionRange - True - (\v' -> versionBranch v == versionBranch v') - (\v' -> versionBranch v > versionBranch v') - (\v' -> versionBranch v < versionBranch v') - (||) - (&&) - --- | View a 'VersionRange' as a union of intervals. --- --- This provides a canonical view of the semantics of a 'VersionRange' as --- opposed to the syntax of the expression used to define it. For the syntactic --- view use 'foldVersionRange'. --- --- Each interval is non-empty. The sequence is in increasing order and no --- intervals overlap or touch. Therefore only the first and last can be --- unbounded. The sequence can be empty if the range is empty --- (e.g. a range expression like @< 1 && > 2@). --- --- Other checks are trivial to implement using this view. For example: --- --- > isNoVersion vr | [] <- asVersionIntervals vr = True --- > | otherwise = False --- --- > isSpecificVersion vr --- > | [(LowerBound v InclusiveBound --- > ,UpperBound v' InclusiveBound)] <- asVersionIntervals vr --- > , v == v' = Just v --- > | otherwise = Nothing --- -asVersionIntervals :: VersionRange -> [VersionInterval] -asVersionIntervals = versionIntervals . toVersionIntervals - --- | Does this 'VersionRange' place any restriction on the 'Version' or is it --- in fact equivalent to 'AnyVersion'. --- --- Note this is a semantic check, not simply a syntactic check. So for example --- the following is @True@ (for all @v@). --- --- > isAnyVersion (EarlierVersion v `UnionVersionRanges` orLaterVersion v) --- -isAnyVersion :: VersionRange -> Bool -isAnyVersion vr = case asVersionIntervals vr of - [(LowerBound v InclusiveBound, NoUpperBound)] | isVersion0 v -> True - _ -> False - --- | This is the converse of 'isAnyVersion'. It check if the version range is --- empty, if there is no possible version that satisfies the version range. --- --- For example this is @True@ (for all @v@): --- --- > isNoVersion (EarlierVersion v `IntersectVersionRanges` LaterVersion v) --- -isNoVersion :: VersionRange -> Bool -isNoVersion vr = case asVersionIntervals vr of - [] -> True - _ -> False - --- | Is this version range in fact just a specific version? --- --- For example the version range @\">= 3 && <= 3\"@ contains only the version --- @3@. --- -isSpecificVersion :: VersionRange -> Maybe Version -isSpecificVersion vr = case asVersionIntervals vr of - [(LowerBound v InclusiveBound - ,UpperBound v' InclusiveBound)] - | v == v' -> Just v - _ -> Nothing - --- | Simplify a 'VersionRange' expression. For non-empty version ranges --- this produces a canonical form. Empty or inconsistent version ranges --- are left as-is because that provides more information. --- --- If you need a canonical form use --- @fromVersionIntervals . toVersionIntervals@ --- --- It satisfies the following properties: --- --- > withinRange v (simplifyVersionRange r) = withinRange v r --- --- > withinRange v r = withinRange v r' --- > ==> simplifyVersionRange r = simplifyVersionRange r' --- > || isNoVersion r --- > || isNoVersion r' --- -simplifyVersionRange :: VersionRange -> VersionRange -simplifyVersionRange vr - -- If the version range is inconsistent then we just return the - -- original since that has more information than ">1 && < 1", which - -- is the canonical inconsistent version range. - | null (versionIntervals vi) = vr - | otherwise = fromVersionIntervals vi - where - vi = toVersionIntervals vr - ----------------------------- --- Wildcard range utilities --- - -wildcardUpperBound :: Version -> Version -wildcardUpperBound (Version lowerBound ts) = Version upperBound ts - where - upperBound = init lowerBound ++ [last lowerBound + 1] - -isWildcardRange :: Version -> Version -> Bool -isWildcardRange (Version branch1 _) (Version branch2 _) = check branch1 branch2 - where check (n:[]) (m:[]) | n+1 == m = True - check (n:ns) (m:ms) | n == m = check ns ms - check _ _ = False - ------------------- --- Intervals view --- - --- | A complementary representation of a 'VersionRange'. Instead of a boolean --- version predicate it uses an increasing sequence of non-overlapping, --- non-empty intervals. --- --- The key point is that this representation gives a canonical representation --- for the semantics of 'VersionRange's. This makes it easier to check things --- like whether a version range is empty, covers all versions, or requires a --- certain minimum or maximum version. It also makes it easy to check equality --- or containment. It also makes it easier to identify \'simple\' version --- predicates for translation into foreign packaging systems that do not --- support complex version range expressions. --- -newtype VersionIntervals = VersionIntervals [VersionInterval] - deriving (Eq, Show) - --- | Inspect the list of version intervals. --- -versionIntervals :: VersionIntervals -> [VersionInterval] -versionIntervals (VersionIntervals is) = is - -type VersionInterval = (LowerBound, UpperBound) -data LowerBound = LowerBound Version !Bound deriving (Eq, Show) -data UpperBound = NoUpperBound | UpperBound Version !Bound deriving (Eq, Show) -data Bound = ExclusiveBound | InclusiveBound deriving (Eq, Show) - -minLowerBound :: LowerBound -minLowerBound = LowerBound (Version [0] []) InclusiveBound - -isVersion0 :: Version -> Bool -isVersion0 (Version [0] _) = True -isVersion0 _ = False - -instance Ord LowerBound where - LowerBound ver bound <= LowerBound ver' bound' = case compare ver ver' of - LT -> True - EQ -> not (bound == ExclusiveBound && bound' == InclusiveBound) - GT -> False - -instance Ord UpperBound where - _ <= NoUpperBound = True - NoUpperBound <= UpperBound _ _ = False - UpperBound ver bound <= UpperBound ver' bound' = case compare ver ver' of - LT -> True - EQ -> not (bound == InclusiveBound && bound' == ExclusiveBound) - GT -> False - -invariant :: VersionIntervals -> Bool -invariant (VersionIntervals intervals) = all validInterval intervals - && all doesNotTouch' adjacentIntervals - where - doesNotTouch' :: (VersionInterval, VersionInterval) -> Bool - doesNotTouch' ((_,u), (l',_)) = doesNotTouch u l' - - adjacentIntervals :: [(VersionInterval, VersionInterval)] - adjacentIntervals - | null intervals = [] - | otherwise = zip intervals (tail intervals) - -checkInvariant :: VersionIntervals -> VersionIntervals -checkInvariant is = assert (invariant is) is - --- | Directly construct a 'VersionIntervals' from a list of intervals. --- --- Each interval must be non-empty. The sequence must be in increasing order --- and no intervals may overlap or touch. If any of these conditions are not --- satisfied the function returns @Nothing@. --- -mkVersionIntervals :: [VersionInterval] -> Maybe VersionIntervals -mkVersionIntervals intervals - | invariant (VersionIntervals intervals) = Just (VersionIntervals intervals) - | otherwise = Nothing - -validVersion :: Version -> Bool -validVersion (Version [] _) = False -validVersion (Version vs _) = all (>=0) vs - -validInterval :: (LowerBound, UpperBound) -> Bool -validInterval i@(l, u) = validLower l && validUpper u && nonEmpty i - where - validLower (LowerBound v _) = validVersion v - validUpper NoUpperBound = True - validUpper (UpperBound v _) = validVersion v - --- Check an interval is non-empty --- -nonEmpty :: VersionInterval -> Bool -nonEmpty (_, NoUpperBound ) = True -nonEmpty (LowerBound l lb, UpperBound u ub) = - (l < u) || (l == u && lb == InclusiveBound && ub == InclusiveBound) - --- Check an upper bound does not intersect, or even touch a lower bound: --- --- ---| or ---) but not ---] or ---) or ---] --- |--- (--- (--- [--- [--- --- -doesNotTouch :: UpperBound -> LowerBound -> Bool -doesNotTouch NoUpperBound _ = False -doesNotTouch (UpperBound u ub) (LowerBound l lb) = - u < l - || (u == l && ub == ExclusiveBound && lb == ExclusiveBound) - --- | Check an upper bound does not intersect a lower bound: --- --- ---| or ---) or ---] or ---) but not ---] --- |--- (--- (--- [--- [--- --- -doesNotIntersect :: UpperBound -> LowerBound -> Bool -doesNotIntersect NoUpperBound _ = False -doesNotIntersect (UpperBound u ub) (LowerBound l lb) = - u < l - || (u == l && not (ub == InclusiveBound && lb == InclusiveBound)) - --- | Test if a version falls within the version intervals. --- --- It exists mostly for completeness and testing. It satisfies the following --- properties: --- --- > withinIntervals v (toVersionIntervals vr) = withinRange v vr --- > withinIntervals v ivs = withinRange v (fromVersionIntervals ivs) --- -withinIntervals :: Version -> VersionIntervals -> Bool -withinIntervals v (VersionIntervals intervals) = any withinInterval intervals - where - withinInterval (lowerBound, upperBound) = withinLower lowerBound - && withinUpper upperBound - withinLower (LowerBound v' ExclusiveBound) = v' < v - withinLower (LowerBound v' InclusiveBound) = v' <= v - - withinUpper NoUpperBound = True - withinUpper (UpperBound v' ExclusiveBound) = v' > v - withinUpper (UpperBound v' InclusiveBound) = v' >= v - --- | Convert a 'VersionRange' to a sequence of version intervals. --- -toVersionIntervals :: VersionRange -> VersionIntervals -toVersionIntervals = foldVersionRange - ( chkIvl (minLowerBound, NoUpperBound)) - (\v -> chkIvl (LowerBound v InclusiveBound, UpperBound v InclusiveBound)) - (\v -> chkIvl (LowerBound v ExclusiveBound, NoUpperBound)) - (\v -> if isVersion0 v then VersionIntervals [] else - chkIvl (minLowerBound, UpperBound v ExclusiveBound)) - unionVersionIntervals - intersectVersionIntervals - where - chkIvl interval = checkInvariant (VersionIntervals [interval]) - --- | Convert a 'VersionIntervals' value back into a 'VersionRange' expression --- representing the version intervals. --- -fromVersionIntervals :: VersionIntervals -> VersionRange -fromVersionIntervals (VersionIntervals []) = noVersion -fromVersionIntervals (VersionIntervals intervals) = - foldr1 UnionVersionRanges [ interval l u | (l, u) <- intervals ] - - where - interval (LowerBound v InclusiveBound) - (UpperBound v' InclusiveBound) | v == v' - = ThisVersion v - interval (LowerBound v InclusiveBound) - (UpperBound v' ExclusiveBound) | isWildcardRange v v' - = WildcardVersion v - interval l u = lowerBound l `intersectVersionRanges'` upperBound u - - lowerBound (LowerBound v InclusiveBound) - | isVersion0 v = AnyVersion - | otherwise = orLaterVersion v - lowerBound (LowerBound v ExclusiveBound) = LaterVersion v - - upperBound NoUpperBound = AnyVersion - upperBound (UpperBound v InclusiveBound) = orEarlierVersion v - upperBound (UpperBound v ExclusiveBound) = EarlierVersion v - - intersectVersionRanges' vr AnyVersion = vr - intersectVersionRanges' AnyVersion vr = vr - intersectVersionRanges' vr vr' = IntersectVersionRanges vr vr' - -unionVersionIntervals :: VersionIntervals -> VersionIntervals - -> VersionIntervals -unionVersionIntervals (VersionIntervals is0) (VersionIntervals is'0) = - checkInvariant (VersionIntervals (union is0 is'0)) - where - union is [] = is - union [] is' = is' - union (i:is) (i':is') = case unionInterval i i' of - Left Nothing -> i : union is (i' :is') - Left (Just i'') -> union is (i'':is') - Right Nothing -> i' : union (i :is) is' - Right (Just i'') -> union (i'':is) is' - -unionInterval :: VersionInterval -> VersionInterval - -> Either (Maybe VersionInterval) (Maybe VersionInterval) -unionInterval (lower , upper ) (lower', upper') - - -- Non-intersecting intervals with the left interval ending first - | upper `doesNotTouch` lower' = Left Nothing - - -- Non-intersecting intervals with the right interval first - | upper' `doesNotTouch` lower = Right Nothing - - -- Complete or partial overlap, with the left interval ending first - | upper <= upper' = lowerBound `seq` - Left (Just (lowerBound, upper')) - - -- Complete or partial overlap, with the left interval ending first - | otherwise = lowerBound `seq` - Right (Just (lowerBound, upper)) - where - lowerBound = min lower lower' - -intersectVersionIntervals :: VersionIntervals -> VersionIntervals - -> VersionIntervals -intersectVersionIntervals (VersionIntervals is0) (VersionIntervals is'0) = - checkInvariant (VersionIntervals (intersect is0 is'0)) - where - intersect _ [] = [] - intersect [] _ = [] - intersect (i:is) (i':is') = case intersectInterval i i' of - Left Nothing -> intersect is (i':is') - Left (Just i'') -> i'' : intersect is (i':is') - Right Nothing -> intersect (i:is) is' - Right (Just i'') -> i'' : intersect (i:is) is' - -intersectInterval :: VersionInterval -> VersionInterval - -> Either (Maybe VersionInterval) (Maybe VersionInterval) -intersectInterval (lower , upper ) (lower', upper') - - -- Non-intersecting intervals with the left interval ending first - | upper `doesNotIntersect` lower' = Left Nothing - - -- Non-intersecting intervals with the right interval first - | upper' `doesNotIntersect` lower = Right Nothing - - -- Complete or partial overlap, with the left interval ending first - | upper <= upper' = lowerBound `seq` - Left (Just (lowerBound, upper)) - - -- Complete or partial overlap, with the right interval ending first - | otherwise = lowerBound `seq` - Right (Just (lowerBound, upper')) - where - lowerBound = max lower lower' - -invertVersionIntervals :: VersionIntervals - -> VersionIntervals -invertVersionIntervals (VersionIntervals xs) = - case xs of - -- Empty interval set - [] -> VersionIntervals [(noLowerBound, NoUpperBound)] - -- Interval with no lower bound - ((lb, ub) : more) | lb == noLowerBound -> - VersionIntervals $ invertVersionIntervals' ub more - -- Interval with a lower bound - ((lb, ub) : more) -> - VersionIntervals $ (noLowerBound, invertLowerBound lb) - : invertVersionIntervals' ub more - where - -- Invert subsequent version intervals given the upper bound of - -- the intervals already inverted. - invertVersionIntervals' :: UpperBound - -> [(LowerBound, UpperBound)] - -> [(LowerBound, UpperBound)] - invertVersionIntervals' NoUpperBound [] = [] - invertVersionIntervals' ub0 [] = [(invertUpperBound ub0, NoUpperBound)] - invertVersionIntervals' ub0 [(lb, NoUpperBound)] = - [(invertUpperBound ub0, invertLowerBound lb)] - invertVersionIntervals' ub0 ((lb, ub1) : more) = - (invertUpperBound ub0, invertLowerBound lb) - : invertVersionIntervals' ub1 more - - invertLowerBound :: LowerBound -> UpperBound - invertLowerBound (LowerBound v b) = UpperBound v (invertBound b) - - invertUpperBound :: UpperBound -> LowerBound - invertUpperBound (UpperBound v b) = LowerBound v (invertBound b) - invertUpperBound NoUpperBound = error "NoUpperBound: unexpected" - - invertBound :: Bound -> Bound - invertBound ExclusiveBound = InclusiveBound - invertBound InclusiveBound = ExclusiveBound - - noLowerBound :: LowerBound - noLowerBound = LowerBound (Version [0] []) InclusiveBound - -------------------------------- --- Parsing and pretty printing --- - -instance Text VersionRange where - disp = fst - . foldVersionRange' -- precedence: - ( Disp.text "-any" , 0 :: Int) - (\v -> (Disp.text "==" <> disp v , 0)) - (\v -> (Disp.char '>' <> disp v , 0)) - (\v -> (Disp.char '<' <> disp v , 0)) - (\v -> (Disp.text ">=" <> disp v , 0)) - (\v -> (Disp.text "<=" <> disp v , 0)) - (\v _ -> (Disp.text "==" <> dispWild v , 0)) - (\(r1, p1) (r2, p2) -> - (punct 2 p1 r1 <+> Disp.text "||" <+> punct 2 p2 r2 , 2)) - (\(r1, p1) (r2, p2) -> - (punct 1 p1 r1 <+> Disp.text "&&" <+> punct 1 p2 r2 , 1)) - (\(r, _) -> (Disp.parens r, 0)) - - where dispWild (Version b _) = - Disp.hcat (Disp.punctuate (Disp.char '.') (map Disp.int b)) - <> Disp.text ".*" - punct p p' | p < p' = Disp.parens - | otherwise = id - - parse = expr - where - expr = do Parse.skipSpaces - t <- term - Parse.skipSpaces - (do _ <- Parse.string "||" - Parse.skipSpaces - e <- expr - return (UnionVersionRanges t e) - +++ - return t) - term = do f <- factor - Parse.skipSpaces - (do _ <- Parse.string "&&" - Parse.skipSpaces - t <- term - return (IntersectVersionRanges f t) - +++ - return f) - factor = Parse.choice $ parens expr - : parseAnyVersion - : parseNoVersion - : parseWildcardRange - : map parseRangeOp rangeOps - parseAnyVersion = Parse.string "-any" >> return AnyVersion - parseNoVersion = Parse.string "-none" >> return noVersion - - parseWildcardRange = do - _ <- Parse.string "==" - Parse.skipSpaces - branch <- Parse.sepBy1 digits (Parse.char '.') - _ <- Parse.char '.' - _ <- Parse.char '*' - return (WildcardVersion (Version branch [])) - - parens p = Parse.between (Parse.char '(' >> Parse.skipSpaces) - (Parse.char ')' >> Parse.skipSpaces) - (do a <- p - Parse.skipSpaces - return (VersionRangeParens a)) - - digits = do - first <- Parse.satisfy Char.isDigit - if first == '0' - then return 0 - else do rest <- Parse.munch Char.isDigit - return (read (first : rest)) - - parseRangeOp (s,f) = Parse.string s >> Parse.skipSpaces >> fmap f parse - rangeOps = [ ("<", EarlierVersion), - ("<=", orEarlierVersion), - (">", LaterVersion), - (">=", orLaterVersion), - ("==", ThisVersion) ] - --- | Does the version range have an upper bound? --- --- @since 1.24.0.0 -hasUpperBound :: VersionRange -> Bool -hasUpperBound = foldVersionRange - False - (const True) - (const False) - (const True) - (&&) (||) - --- | Does the version range have an explicit lower bound? --- --- Note: this function only considers the user-specified lower bounds, but not --- the implicit >=0 lower bound. --- --- @since 1.24.0.0 -hasLowerBound :: VersionRange -> Bool -hasLowerBound = foldVersionRange - False - (const True) - (const True) - (const False) - (&&) (||) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/doc/Cabal.css cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/doc/Cabal.css --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/doc/Cabal.css 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/doc/Cabal.css 1970-01-01 00:00:00.000000000 +0000 @@ -1,49 +0,0 @@ -body { - max-width: 18cm; -} - -div { - font-family: sans-serif; - color: black; - background: white -} - -h1, h2, h3, h4, h5, h6, p.title { color: #005A9C } - -h1 { font: 170% sans-serif } -h2 { font: 140% sans-serif } -h3 { font: 120% sans-serif } -h4 { font: bold 100% sans-serif } -h5 { font: italic 100% sans-serif } -h6 { font: small-caps 100% sans-serif } - -pre { - font-family: monospace; - border-width: 1px; - border-style: solid; - padding: 0.3em -} - -pre.screen { color: #006400 } -pre.programlisting { color: maroon } - -div.example { - margin: 1ex 0em; - border: solid #412e25 1px; - padding: 0ex 0.4em -} - -div.example, div.example-contents { - background-color: #fffcf5 -} - -a:link { color: #0000C8 } -a:hover { background: #FFFFA8 } -a:active { color: #D00000 } -a:visited { color: #680098 } - -h1 a:link, h2 a:link, h3 a:link, h4 a:link, h5 a:link, h6 a:link, -h1 a:visited, h2 a:visited, h3 a:visited, h4 a:visited, h5 a:visited, h6 a:visited { - color: #005A9C; - text-decoration: none -} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/doc/developing-packages.markdown cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/doc/developing-packages.markdown --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/doc/developing-packages.markdown 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/doc/developing-packages.markdown 1970-01-01 00:00:00.000000000 +0000 @@ -1,2238 +0,0 @@ -% Cabal User Guide: Developing Cabal packages - - -# Quickstart # - - -Lets assume we have created a project directory and already have a -Haskell module or two. - -Every project needs a name, we'll call this example "proglet". - -~~~~~~~~~~~ -$ cd proglet/ -$ ls -Proglet.hs -~~~~~~~~~~~ - -It is assumed that (apart from external dependencies) all the files that -make up a package live under a common project root directory. This -simple example has all the project files in one directory, but most -packages will use one or more subdirectories. - -To turn this into a Cabal package we need two extra files in the -project's root directory: - - * `proglet.cabal`: containing package metadata and build information. - - * `Setup.hs`: usually containing a few standardized lines of code, but - can be customized if necessary. - -We can create both files manually or we can use `cabal init` to create -them for us. - -### Using "cabal init" ### - -The `cabal init` command is interactive. It asks us a number of -questions starting with the package name and version. - -~~~~~~~~~~ -$ cabal init -Package name [default "proglet"]? -Package version [default "0.1"]? -... -~~~~~~~~~~ - -It also asks questions about various other bits of package metadata. For -a package that you never intend to distribute to others, these fields can -be left blank. - -One of the important questions is whether the package contains a library -or an executable. Libraries are collections of Haskell modules that can -be re-used by other Haskell libraries and programs, while executables -are standalone programs. - -~~~~~~~~~~ -What does the package build: - 1) Library - 2) Executable -Your choice? -~~~~~~~~~~ - -For the moment these are the only choices. For more complex packages -(e.g. a library and multiple executables or test suites) the `.cabal` -file can be edited afterwards. - -Finally, `cabal init` creates the initial `proglet.cabal` and `Setup.hs` -files, and depending on your choice of license, a `LICENSE` file as well. - -~~~~~~~~~~ -Generating LICENSE... -Generating Setup.hs... -Generating proglet.cabal... - -You may want to edit the .cabal file and add a Description field. -~~~~~~~~~~ - -As this stage the `proglet.cabal` is not quite complete and before you -are able to build the package you will need to edit the file and add -some build information about the library or executable. - -### Editing the .cabal file ### - -Load up the `.cabal` file in a text editor. The first part of the -`.cabal` file has the package metadata and towards the end of the file -you will find the `executable` or `library` section. - -You will see that the fields that have yet to be filled in are commented -out. Cabal files use "`--`" Haskell-style comment syntax. (Note that -comments are only allowed on lines on their own. Trailing comments on -other lines are not allowed because they could be confused with program -options.) - -If you selected earlier to create a library package then your `.cabal` -file will have a section that looks like this: - -~~~~~~~~~~~~~~~~~ -library - exposed-modules: Proglet - -- other-modules: - -- build-depends: -~~~~~~~~~~~~~~~~~ - -Alternatively, if you selected an executable then there will be a -section like: - -~~~~~~~~~~~~~~~~~ -executable proglet - -- main-is: - -- other-modules: - -- build-depends: -~~~~~~~~~~~~~~~~~ - -The build information fields listed (but commented out) are just the few -most important and common fields. There are many others that are covered -later in this chapter. - -Most of the build information fields are the same between libraries and -executables. The difference is that libraries have a number of "exposed" -modules that make up the public interface of the library, while -executables have a file containing a `Main` module. - -The name of a library always matches the name of the package, so it is -not specified in the library section. Executables often follow the name -of the package too, but this is not required and the name is given -explicitly. - -### Modules included in the package ### - -For a library, `cabal init` looks in the project directory for files -that look like Haskell modules and adds all the modules to the -`exposed-modules` field. For modules that do not form part of your -package's public interface, you can move those modules to the -`other-modules` field. Either way, all modules in the library need to be -listed. - -For an executable, `cabal init` does not try to guess which file -contains your program's `Main` module. You will need to fill in the -`main-is` field with the file name of your program's `Main` module -(including `.hs` or `.lhs` extension). Other modules included in the -executable should be listed in the `other-modules` field. - -### Modules imported from other packages ### - -While your library or executable may include a number of modules, it -almost certainly also imports a number of external modules from the -standard libraries or other pre-packaged libraries. (These other -libraries are of course just Cabal packages that contain a library.) - -You have to list all of the library packages that your library or -executable imports modules from. Or to put it another way: you have to -list all the other packages that your package depends on. - -For example, suppose the example `Proglet` module imports the module -`Data.Map`. The `Data.Map` module comes from the `containers` package, -so we must list it: - -~~~~~~~~~~~~~~~~~ -library - exposed-modules: Proglet - other-modules: - build-depends: containers, base == 4.* -~~~~~~~~~~~~~~~~~ - -In addition, almost every package also depends on the `base` library -package because it exports the standard `Prelude` module plus other -basic modules like `Data.List`. - -You will notice that we have listed `base == 4.*`. This gives a -constraint on the version of the base package that our package will work -with. The most common kinds of constraints are: - - * `pkgname >= n` - * `pkgname >= n && < m` - * `pkgname == n.*` - -The last is just shorthand, for example `base == 4.*` means exactly the -same thing as `base >= 4 && < 5`. - -### Building the package ### - -For simple packages that's it! We can now try configuring and building -the package: - -~~~~~~~~~~~~~~~~ -cabal configure -cabal build -~~~~~~~~~~~~~~~~ - -Assuming those two steps worked then you can also install the package: - -~~~~~~~~~~~~~~~~ -cabal install -~~~~~~~~~~~~~~~~ - -For libraries this makes them available for use in GHCi or to be used by -other packages. For executables it installs the program so that you can -run it (though you may first need to adjust your system's `$PATH`). - -### Next steps ### - -What we have covered so far should be enough for very simple packages -that you use on your own system. - -The next few sections cover more details needed for more complex -packages and details needed for distributing packages to other people. - -The previous chapter covers building and installing packages -- your own -packages or ones developed by other people. - - -# Package concepts # - -Before diving into the details of writing packages it helps to -understand a bit about packages in the Haskell world and the particular -approach that Cabal takes. - -### The point of packages ### - -Packages are a mechanism for organising and distributing code. Packages -are particularly suited for "programming in the large", that is building -big systems by using and re-using code written by different people at -different times. - -People organise code into packages based on functionality and -dependencies. Social factors are also important: most packages have a -single author, or a relatively small team of authors. - -Packages are also used for distribution: the idea is that a package can -be created in one place and be moved to a different computer and be -usable in that different environment. There are a surprising number of -details that have to be got right for this to work, and a good package -system helps to simply this process and make it reliable. - -Packages come in two main flavours: libraries of reusable code, and -complete programs. Libraries present a code interface, an API, while -programs can be run directly. In the Haskell world, library packages -expose a set of Haskell modules as their public interface. Cabal -packages can contain a library or executables or both. - -Some programming languages have packages as a builtin language concept. -For example in Java, a package provides a local namespace for types and -other definitions. In the Haskell world, packages are not a part of the -language itself. Haskell programs consist of a number of modules, and -packages just provide a way to partition the modules into sets of -related functionality. Thus the choice of module names in Haskell is -still important, even when using packages. - -### Package names and versions ### - -All packages have a name, e.g. "HUnit". Package names are assumed to be -unique. Cabal package names can use letters, numbers and hyphens, but -not spaces. The namespace for Cabal packages is flat, not hierarchical. - -Packages also have a version, e.g "1.1". This matches the typical way in -which packages are developed. Strictly speaking, each version of a -package is independent, but usually they are very similar. Cabal package -versions follow the conventional numeric style, consisting of a sequence -of digits such as "1.0.1" or "2.0". There are a range of common -conventions for "versioning" packages, that is giving some meaning to -the version number in terms of changes in the package. Section [TODO] -has some tips on package versioning. - -The combination of package name and version is called the _package ID_ -and is written with a hyphen to separate the name and version, e.g. -"HUnit-1.1". - -For Cabal packages, the combination of the package name and version -_uniquely_ identifies each package. Or to put it another way: two -packages with the same name and version are considered to _be_ the same. - -Strictly speaking, the package ID only identifies each Cabal _source_ -package; the same Cabal source package can be configured and built in -different ways. There is a separate installed package ID that uniquely -identifies each installed package instance. Most of the time however, -users need not be aware of this detail. - -### Kinds of package: Cabal vs GHC vs system ### - -It can be slightly confusing at first because there are various -different notions of package floating around. Fortunately the details -are not very complicated. - -Cabal packages -: Cabal packages are really source packages. That is they contain - Haskell (and sometimes C) source code. - - Cabal packages can be compiled to produce GHC packages. They can - also be translated into operating system packages. - -GHC packages -: This is GHC's view on packages. GHC only cares about library - packages, not executables. Library packages have to be registered - with GHC for them to be available in GHCi or to be used when - compiling other programs or packages. - - The low-level tool `ghc-pkg` is used to register GHC packages and to - get information on what packages are currently registered. - - You never need to make GHC packages manually. When you build and - install a Cabal package containing a library then it gets registered - with GHC automatically. - - Haskell implementations other than GHC have essentially the same - concept of registered packages. For the most part, Cabal hides the - slight differences. - -Operating system packages -: On operating systems like Linux and Mac OS X, the system has a - specific notion of a package and there are tools for installing and - managing packages. - - The Cabal package format is designed to allow Cabal packages to be - translated, mostly-automatically, into operating system packages. - They are usually translated 1:1, that is a single Cabal package - becomes a single system package. - - It is also possible to make Windows installers from Cabal packages, - though this is typically done for a program together with all of its - library dependencies, rather than packaging each library separately. - - -### Unit of distribution ### - -The Cabal package is the unit of distribution. What this means is that -each Cabal package can be distributed on its own in source or binary -form. Of course there may dependencies between packages, but there is -usually a degree of flexibility in which versions of packages can work -together so distributing them independently makes sense. - -It is perhaps easiest to see what being ``the unit of distribution'' -means by contrast to an alternative approach. Many projects are made up -of several interdependent packages and during development these might -all be kept under one common directory tree and be built and tested -together. When it comes to distribution however, rather than -distributing them all together in a single tarball, it is required that -they each be distributed independently in their own tarballs. - -Cabal's approach is to say that if you can specify a dependency on a -package then that package should be able to be distributed -independently. Or to put it the other way round, if you want to -distribute it as a single unit, then it should be a single package. - - -### Explicit dependencies and automatic package management ### - -Cabal takes the approach that all packages dependencies are specified -explicitly and specified in a declarative way. The point is to enable -automatic package management. This means tools like `cabal` can resolve -dependencies and install a package plus all of its dependencies -automatically. Alternatively, it is possible to mechanically (or mostly -mechanically) translate Cabal packages into system packages and let the -system package manager install dependencies automatically. - -It is important to track dependencies accurately so that packages can -reliably be moved from one system to another system and still be able to -build it there. Cabal is therefore relatively strict about specifying -dependencies. For example Cabal's default build system will not even let -code build if it tries to import a module from a package that isn't -listed in the `.cabal` file, even if that package is actually installed. -This helps to ensure that there are no "untracked dependencies" that -could cause the code to fail to build on some other system. - -The explicit dependency approach is in contrast to the traditional -"./configure" approach where instead of specifying dependencies -declaratively, the `./configure` script checks if the dependencies are -present on the system. Some manual work is required to transform a -`./configure` based package into a Linux distribution package (or -similar). This conversion work is usually done by people other than the -package author(s). The practical effect of this is that only the most -popular packages will benefit from automatic package management. Instead, -Cabal forces the original author to specify the dependencies but the -advantage is that every package can benefit from automatic package -management. - -The "./configure" approach tends to encourage packages that adapt -themselves to the environment in which they are built, for example by -disabling optional features so that they can continue to work when a -particular dependency is not available. This approach makes sense in a -world where installing additional dependencies is a tiresome manual -process and so minimising dependencies is important. The automatic -package management view is that packages should just declare what they -need and the package manager will take responsibility for ensuring that -all the dependencies are installed. - -Sometimes of course optional features and optional dependencies do make -sense. Cabal packages can have optional features and varying -dependencies. These conditional dependencies are still specified in a -declarative way however and remain compatible with automatic package -management. The need to remain compatible with automatic package -management means that Cabal's conditional dependencies system is a bit -less flexible than with the "./configure" approach. - -### Portability ### - -One of the purposes of Cabal is to make it easier to build packages on -different platforms (operating systems and CPU architectures), with -different compiler versions and indeed even with different Haskell -implementations. (Yes, there are Haskell implementations other than -GHC!) - -Cabal provides abstractions of features present in different Haskell -implementations and wherever possible it is best to take advantage of -these to increase portability. Where necessary however it is possible to -use specific features of specific implementations. - -For example a package author can list in the package's `.cabal` what -language extensions the code uses. This allows Cabal to figure out if -the language extension is supported by the Haskell implementation that -the user picks. Additionally, certain language extensions such as -Template Haskell require special handling from the build system and by -listing the extension it provides the build system with enough -information to do the right thing. - -Another similar example is linking with foreign libraries. Rather than -specifying GHC flags directly, the package author can list the libraries -that are needed and the build system will take care of using the right -flags for the compiler. Additionally this makes it easier for tools to -discover what system C libraries a package needs, which is useful for -tracking dependencies on system libraries (e.g. when translating into -Linux distribution packages). - -In fact both of these examples fall into the category of explicitly -specifying dependencies. Not all dependencies are other Cabal packages. -Foreign libraries are clearly another kind of dependency. It's also -possible to think of language extensions as dependencies: the package -depends on a Haskell implementation that supports all those extensions. - -Where compiler-specific options are needed however, there is an "escape -hatch" available. The developer can specify implementation-specific -options and more generally there is a configuration mechanism to -customise many aspects of how a package is built depending on the -Haskell implementation, the operating system, computer architecture and -user-specified configuration flags. - - -# Developing packages # - -The Cabal package is the unit of distribution. When installed, its -purpose is to make available: - - * One or more Haskell programs. - - * At most one library, exposing a number of Haskell modules. - -However having both a library and executables in a package does not work -very well; if the executables depend on the library, they must -explicitly list all the modules they directly or indirectly import from -that library. Fortunately, starting with Cabal 1.8.0.4, executables can -also declare the package that they are in as a dependency, and Cabal -will treat them as if they were in another package that depended on -the library. - -Internally, the package may consist of much more than a bunch of Haskell -modules: it may also have C source code and header files, source code -meant for preprocessing, documentation, test cases, auxiliary tools etc. - -A package is identified by a globally-unique _package name_, which -consists of one or more alphanumeric words separated by hyphens. To -avoid ambiguity, each of these words should contain at least one letter. -Chaos will result if two distinct packages with the same name are -installed on the same system. A particular version of the package is -distinguished by a _version number_, consisting of a sequence of one or -more integers separated by dots. These can be combined to form a single -text string called the _package ID_, using a hyphen to separate the name -from the version, e.g. "`HUnit-1.1`". - -Note: Packages are not part of the Haskell language; they simply -populate the hierarchical space of module names. In GHC 6.6 and later a -program may contain multiple modules with the same name if they come -from separate packages; in all other current Haskell systems packages -may not overlap in the modules they provide, including hidden modules. - - -## Creating a package ## - -Suppose you have a directory hierarchy containing the source files that -make up your package. You will need to add two more files to the root -directory of the package: - -_package_`.cabal` - -: a Unicode UTF-8 text file containing a package description. - For details of the syntax of this file, see the [section on package - descriptions](#package-descriptions). - -`Setup.hs` - -: a single-module Haskell program to perform various setup tasks (with - the interface described in the section on [building and installing - packages](installing-packages.html). This module should - import only modules that will be present in all Haskell - implementations, including modules of the Cabal library. The - content of this file is determined by the `build-type` setting in - the `.cabal` file. In most cases it will be trivial, calling on - the Cabal library to do most of the work. - -Once you have these, you can create a source bundle of this directory -for distribution. Building of the package is discussed in the section on -[building and installing packages](installing-packages.html). - -One of the purposes of Cabal is to make it easier to build a package -with different Haskell implementations. So it provides abstractions of -features present in different Haskell implementations and wherever -possible it is best to take advantage of these to increase portability. -Where necessary however it is possible to use specific features of -specific implementations. For example one of the pieces of information a -package author can put in the package's `.cabal` file is what language -extensions the code uses. This is far preferable to specifying flags for -a specific compiler as it allows Cabal to pick the right flags for the -Haskell implementation that the user picks. It also allows Cabal to -figure out if the language extension is even supported by the Haskell -implementation that the user picks. Where compiler-specific options are -needed however, there is an "escape hatch" available. The developer can -specify implementation-specific options and more generally there is a -configuration mechanism to customise many aspects of how a package is -built depending on the Haskell implementation, the Operating system, -computer architecture and user-specified configuration flags. - -~~~~~~~~~~~~~~~~ -name: Foo -version: 1.0 - -library - build-depends: base - exposed-modules: Foo - extensions: ForeignFunctionInterface - ghc-options: -Wall - if os(windows) - build-depends: Win32 -~~~~~~~~~~~~~~~~ - -#### Example: A package containing a simple library #### - -The HUnit package contains a file `HUnit.cabal` containing: - -~~~~~~~~~~~~~~~~ -name: HUnit -version: 1.1.1 -synopsis: A unit testing framework for Haskell -homepage: http://hunit.sourceforge.net/ -category: Testing -author: Dean Herington -license: BSD3 -license-file: LICENSE -cabal-version: >= 1.10 -build-type: Simple - -library - build-depends: base >= 2 && < 4 - exposed-modules: Test.HUnit.Base, Test.HUnit.Lang, - Test.HUnit.Terminal, Test.HUnit.Text, Test.HUnit - default-extensions: CPP -~~~~~~~~~~~~~~~~ - -and the following `Setup.hs`: - -~~~~~~~~~~~~~~~~ -import Distribution.Simple -main = defaultMain -~~~~~~~~~~~~~~~~ - -#### Example: A package containing executable programs #### - -~~~~~~~~~~~~~~~~ -name: TestPackage -version: 0.0 -synopsis: Small package with two programs -author: Angela Author -license: BSD3 -build-type: Simple -cabal-version: >= 1.2 - -executable program1 - build-depends: HUnit - main-is: Main.hs - hs-source-dirs: prog1 - -executable program2 - main-is: Main.hs - build-depends: HUnit - hs-source-dirs: prog2 - other-modules: Utils -~~~~~~~~~~~~~~~~ - -with `Setup.hs` the same as above. - -#### Example: A package containing a library and executable programs #### - -~~~~~~~~~~~~~~~~ -name: TestPackage -version: 0.0 -synopsis: Package with library and two programs -license: BSD3 -author: Angela Author -build-type: Simple -cabal-version: >= 1.2 - -library - build-depends: HUnit - exposed-modules: A, B, C - -executable program1 - main-is: Main.hs - hs-source-dirs: prog1 - other-modules: A, B - -executable program2 - main-is: Main.hs - hs-source-dirs: prog2 - other-modules: A, C, Utils -~~~~~~~~~~~~~~~~ - -with `Setup.hs` the same as above. Note that any library modules -required (directly or indirectly) by an executable must be listed again. - -The trivial setup script used in these examples uses the _simple build -infrastructure_ provided by the Cabal library (see -[Distribution.Simple][dist-simple]). The simplicity lies in its -interface rather that its implementation. It automatically handles -preprocessing with standard preprocessors, and builds packages for all -the Haskell implementations. - -The simple build infrastructure can also handle packages where building -is governed by system-dependent parameters, if you specify a little more -(see the section on [system-dependent -parameters](#system-dependent-parameters)). A few packages require [more -elaborate solutions](#more-complex-packages). - -## Package descriptions ## - -The package description file must have a name ending in "`.cabal`". It -must be a Unicode text file encoded using valid UTF-8. There must be -exactly one such file in the directory. The first part of the name is -usually the package name, and some of the tools that operate on Cabal -packages require this. - -In the package description file, lines whose first non-whitespace characters -are "`--`" are treated as comments and ignored. - -This file should contain of a number global property descriptions and -several sections. - -* The [global properties](#package-properties) describe the package as a - whole, such as name, license, author, etc. - -* Optionally, a number of _configuration flags_ can be declared. These - can be used to enable or disable certain features of a package. (see - the section on [configurations](#configurations)). - -* The (optional) library section specifies the [library - properties](#library) and relevant [build - information](#build-information). - -* Following is an arbitrary number of executable sections - which describe an executable program and relevant [build - information](#build-information). - -Each section consists of a number of property descriptions -in the form of field/value pairs, with a syntax roughly like mail -message headers. - -* Case is not significant in field names, but is significant in field - values. - -* To continue a field value, indent the next line relative to the field - name. - -* Field names may be indented, but all field values in the same section - must use the same indentation. - -* Tabs are *not* allowed as indentation characters due to a missing - standard interpretation of tab width. - -* To get a blank line in a field value, use an indented "`.`" - -The syntax of the value depends on the field. Field types include: - -_token_, _filename_, _directory_ -: Either a sequence of one or more non-space non-comma characters, or a quoted - string in Haskell 98 lexical syntax. The latter can be used for escaping - whitespace, for example: `ghc-options: -Wall "-with-rtsopts=-T -I1"`. - Unless otherwise stated, relative filenames and directories are interpreted - from the package root directory. - -_freeform_, _URL_, _address_ -: An arbitrary, uninterpreted string. - -_identifier_ -: A letter followed by zero or more alphanumerics or underscores. - -_compiler_ -: A compiler flavor (one of: `GHC`, `JHC`, `UHC` or `LHC`) followed by a - version range. For example, `GHC ==6.10.3`, or `LHC >=0.6 && <0.8`. - -### Modules and preprocessors ### - -Haskell module names listed in the `exposed-modules` and `other-modules` -fields may correspond to Haskell source files, i.e. with names ending in -"`.hs`" or "`.lhs`", or to inputs for various Haskell preprocessors. The -simple build infrastructure understands the extensions: - -* `.gc` ([greencard][]) -* `.chs` ([c2hs][]) -* `.hsc` (`hsc2hs`) -* `.y` and `.ly` ([happy][]) -* `.x` ([alex][]) -* `.cpphs` ([cpphs][]) - -When building, Cabal will automatically run the appropriate -preprocessor and compile the Haskell module it produces. For the -`c2hs` and `hsc2hs` preprocessors, Cabal will also automatically add, -compile and link any C sources generated by the preprocessor (produced -by `hsc2hs`'s `#def` feature or `c2hs`'s auto-generated wrapper -functions). - -Some fields take lists of values, which are optionally separated by commas, -except for the `build-depends` field, where the commas are mandatory. - -Some fields are marked as required. All others are optional, and unless -otherwise specified have empty default values. - -### Package properties ### - -These fields may occur in the first top-level properties section and -describe the package as a whole: - -`name:` _package-name_ (required) -: The unique name of the package, without the version number. - -`version:` _numbers_ (required) -: The package version number, usually consisting of a sequence of - natural numbers separated by dots. - -`cabal-version:` _>= x.y_ -: The version of the Cabal specification that this package description uses. - The Cabal specification does slowly evolve, introducing new features and - occasionally changing the meaning of existing features. By specifying - which version of the spec you are using it enables programs which process - the package description to know what syntax to expect and what each part - means. - - For historical reasons this is always expressed using _>=_ version range - syntax. No other kinds of version range make sense, in particular upper - bounds do not make sense. In future this field will specify just a version - number, rather than a version range. - - The version number you specify will affect both compatibility and - behaviour. Most tools (including the Cabal library and cabal program) - understand a range of versions of the Cabal specification. Older tools - will of course only work with older versions of the Cabal specification. - Most of the time, tools that are too old will recognise this fact and - produce a suitable error message. - - As for behaviour, new versions of the Cabal spec can change the meaning - of existing syntax. This means if you want to take advantage of the new - meaning or behaviour then you must specify the newer Cabal version. - Tools are expected to use the meaning and behaviour appropriate to the - version given in the package description. - - In particular, the syntax of package descriptions changed significantly - with Cabal version 1.2 and the `cabal-version` field is now required. - Files written in the old syntax are still recognized, so if you require - compatibility with very old Cabal versions then you may write your package - description file using the old syntax. Please consult the user's guide of - an older Cabal version for a description of that syntax. - -`build-type:` _identifier_ -: The type of build used by this package. Build types are the - constructors of the [BuildType][] type, defaulting to `Custom`. - - If the build type is anything other than `Custom`, then the - `Setup.hs` file *must* be exactly the standardized content - discussed below. This is because in these cases, `cabal` will - ignore the `Setup.hs` file completely, whereas other methods of - package management, such as `runhaskell Setup.hs [CMD]`, still - rely on the `Setup.hs` file. - - For build type `Simple`, the contents of `Setup.hs` must be: - - ~~~~~~~~~~~~~~~~ - import Distribution.Simple - main = defaultMain - ~~~~~~~~~~~~~~~~ - - For build type `Configure` (see the section on [system-dependent - parameters](#system-dependent-parameters) below), the contents of - `Setup.hs` must be: - - ~~~~~~~~~~~~~~~~ - import Distribution.Simple - main = defaultMainWithHooks autoconfUserHooks - ~~~~~~~~~~~~~~~~ - - For build type `Make` (see the section on [more complex - packages](installing-packages.html#more-complex-packages) below), - the contents of `Setup.hs` must be: - - ~~~~~~~~~~~~~~~~ - import Distribution.Make - main = defaultMain - ~~~~~~~~~~~~~~~~ - - For build type `Custom`, the file `Setup.hs` can be customized, - and will be used both by `cabal` and other tools. - - For most packages, the build type `Simple` is sufficient. - -`license:` _identifier_ (default: `AllRightsReserved`) -: The type of license under which this package is distributed. - License names are the constants of the [License][dist-license] type. - -`license-file:` _filename_ or `license-files:` _filename list_ -: The name of a file(s) containing the precise copyright license for - this package. The license file(s) will be installed with the package. - - If you have multiple license files then use the `license-files` - field instead of (or in addition to) the `license-file` field. - -`copyright:` _freeform_ -: The content of a copyright notice, typically the name of the holder - of the copyright on the package and the year(s) from which copyright - is claimed. For example: `Copyright: (c) 2006-2007 Joe Bloggs` - -`author:` _freeform_ -: The original author of the package. - - Remember that `.cabal` files are Unicode, using the UTF-8 encoding. - -`maintainer:` _address_ -: The current maintainer or maintainers of the package. This is an e-mail address to which users should send bug - reports, feature requests and patches. - -`stability:` _freeform_ -: The stability level of the package, e.g. `alpha`, `experimental`, `provisional`, - `stable`. - -`homepage:` _URL_ -: The package homepage. - -`bug-reports:` _URL_ -: The URL where users should direct bug reports. This would normally be either: - - * A `mailto:` URL, e.g. for a person or a mailing list. - - * An `http:` (or `https:`) URL for an online bug tracking system. - - For example Cabal itself uses a web-based bug tracking system - - ~~~~~~~~~~~~~~~~ - bug-reports: http://hackage.haskell.org/trac/hackage/ - ~~~~~~~~~~~~~~~~ - -`package-url:` _URL_ -: The location of a source bundle for the package. The distribution - should be a Cabal package. - -`synopsis:` _freeform_ -: A very short description of the package, for use in a table of - packages. This is your headline, so keep it short (one line) but as - informative as possible. Save space by not including the package - name or saying it's written in Haskell. - -`description:` _freeform_ -: Description of the package. This may be several paragraphs, and - should be aimed at a Haskell programmer who has never heard of your - package before. - - For library packages, this field is used as prologue text by [`setup - haddock`](installing-packages.html#setup-haddock), and thus may - contain the same markup as [haddock][] documentation comments. - -`category:` _freeform_ -: A classification category for future use by the package catalogue [Hackage]. These - categories have not yet been specified, but the upper levels of the - module hierarchy make a good start. - -`tested-with:` _compiler list_ -: A list of compilers and versions against which the package has been - tested (or at least built). - -`data-files:` _filename list_ -: A list of files to be installed for run-time use by the package. - This is useful for packages that use a large amount of static data, - such as tables of values or code templates. Cabal provides a way to - [find these files at - run-time](#accessing-data-files-from-package-code). - - A limited form of `*` wildcards in file names, for example - `data-files: images/*.png` matches all the `.png` files in the - `images` directory. - - The limitation is that `*` wildcards are only allowed in place of - the file name, not in the directory name or file extension. In - particular, wildcards do not include directories contents - recursively. Furthermore, if a wildcard is used it must be used with - an extension, so `data-files: data/*` is not allowed. When matching - a wildcard plus extension, a file's full extension must match - exactly, so `*.gz` matches `foo.gz` but not `foo.tar.gz`. A wildcard - that does not match any files is an error. - - The reason for providing only a very limited form of wildcard is to - concisely express the common case of a large number of related files - of the same file type without making it too easy to accidentally - include unwanted files. - -`data-dir:` _directory_ -: The directory where Cabal looks for data files to install, relative - to the source directory. By default, Cabal will look in the source - directory itself. - -`extra-source-files:` _filename list_ -: A list of additional files to be included in source distributions - built with [`setup sdist`](installing-packages.html#setup-sdist). As - with `data-files` it can use a limited form of `*` wildcards in file - names. - -`extra-doc-files:` _filename list_ -: A list of additional files to be included in source distributions, - and also copied to the html directory when Haddock documentation is - generated. As with `data-files` it can use a limited form of `*` - wildcards in file names. - -`extra-tmp-files:` _filename list_ -: A list of additional files or directories to be removed by [`setup - clean`](installing-packages.html#setup-clean). These would typically - be additional files created by additional hooks, such as the scheme - described in the section on [system-dependent - parameters](#system-dependent-parameters). - -### Library ### - -The library section should contain the following fields: - -`exposed-modules:` _identifier list_ (required if this package contains a library) -: A list of modules added by this package. - -`exposed:` _boolean_ (default: `True`) -: Some Haskell compilers (notably GHC) support the notion of packages - being "exposed" or "hidden" which means the modules they provide can - be easily imported without always having to specify which package - they come from. However this only works effectively if the modules - provided by all exposed packages do not overlap (otherwise a module - import would be ambiguous). - - Almost all new libraries use hierarchical module names that do not - clash, so it is very uncommon to have to use this field. However it - may be necessary to set `exposed: False` for some old libraries that - use a flat module namespace or where it is known that the exposed - modules would clash with other common modules. - -`reexported-modules:` _exportlist _ -: Supported only in GHC 7.10 and later. A list of modules to _reexport_ from - this package. The syntax of this field is `orig-pkg:Name as NewName` to - reexport module `Name` from `orig-pkg` with the new name `NewName`. We also - support abbreviated versions of the syntax: if you omit `as NewName`, - we'll reexport without renaming; if you omit `orig-pkg`, then we will - automatically figure out which package to reexport from, if it's - unambiguous. - - Reexported modules are useful for compatibility shims when a package has - been split into multiple packages, and they have the useful property that - if a package provides a module, and another package reexports it under - the same name, these are not considered a conflict (as would be the case - with a stub module.) They can also be used to resolve name conflicts. - -The library section may also contain build information fields (see the -section on [build information](#build-information)). - -#### Opening an interpreter session #### - -While developing a package, it is often useful to make its code available inside -an interpreter session. This can be done with the `repl` command: - -~~~~~~~~~~~~~~~~ -cabal repl -~~~~~~~~~~~~~~~~ - -The name comes from the acronym [REPL], which stands for -"read-eval-print-loop". By default `cabal repl` loads the first component in a -package. If the package contains several named components, the name can be given -as an argument to `repl`. The name can be also optionally prefixed with the -component's type for disambiguation purposes. Example: - -~~~~~~~~~~~~~~~~ -cabal repl foo -cabal repl exe:foo -cabal repl test:bar -cabal repl bench:baz -~~~~~~~~~~~~~~~~ - -#### Freezing dependency versions #### - -If a package is built in several different environments, such as a development -environment, a staging environment and a production environment, it may be -necessary or desirable to ensure that the same dependency versions are -selected in each environment. This can be done with the `freeze` command: - -~~~~~~~~~~~~~~~~ -cabal freeze -~~~~~~~~~~~~~~~~ - -The command writes the selected version for all dependencies to the -`cabal.config` file. All environments which share this file will use the -dependency versions specified in it. - -#### Generating dependency version bounds #### - -Cabal also has the ability to suggest dependency version bounds that conform to -[Package Versioning Policy][PVP], which is a recommended versioning system for -publicly released Cabal packages. This is done by running the `gen-bounds` -command: - -~~~~~~~~~~~~~~~~ -cabal gen-bounds -~~~~~~~~~~~~~~~~ - -For example, given the following dependencies specified in `build-depends`: - -~~~~~~~~~~~~~~~~ -foo == 0.5.2 -bar == 1.1 -~~~~~~~~~~~~~~~~ - -`gen-bounds` will suggest changing them to the following: - -~~~~~~~~~~~~~~~~ -foo >= 0.5.2 && < 0.6 -bar >= 1.1 && < 1.2 -~~~~~~~~~~~~~~~~ - - -### Executables ### - -Executable sections (if present) describe executable programs contained -in the package and must have an argument after the section label, which -defines the name of the executable. This is a freeform argument but may -not contain spaces. - -The executable may be described using the following fields, as well as -build information fields (see the section on [build -information](#build-information)). - -`main-is:` _filename_ (required) -: The name of the `.hs` or `.lhs` file containing the `Main` module. Note that it is the - `.hs` filename that must be listed, even if that file is generated - using a preprocessor. The source file must be relative to one of the - directories listed in `hs-source-dirs`. - -#### Running executables #### - -You can have Cabal build and run your executables by using the `run` command: - -~~~~~~~~~~~~~~~~ -$ cabal run EXECUTABLE [-- EXECUTABLE_FLAGS] -~~~~~~~~~~~~~~~~ - -This command will configure, build and run the executable `EXECUTABLE`. The -double dash separator is required to distinguish executable flags from `run`'s -own flags. If there is only one executable defined in the whole package, the -executable's name can be omitted. See the output of `cabal help run` for a list -of options you can pass to `cabal run`. - - -### Test suites ### - -Test suite sections (if present) describe package test suites and must have an -argument after the section label, which defines the name of the test suite. -This is a freeform argument, but may not contain spaces. It should be unique -among the names of the package's other test suites, the package's executables, -and the package itself. Using test suite sections requires at least Cabal -version 1.9.2. - -The test suite may be described using the following fields, as well as build -information fields (see the section on [build -information](#build-information)). - -`type:` _interface_ (required) -: The interface type and version of the test suite. Cabal supports two test - suite interfaces, called `exitcode-stdio-1.0` and `detailed-0.9`. Each of - these types may require or disallow other fields as described below. - -Test suites using the `exitcode-stdio-1.0` interface are executables -that indicate test failure with a non-zero exit code when run; they may provide -human-readable log information through the standard output and error channels. -This interface is provided primarily for compatibility with existing test -suites; it is preferred that new test suites be written for the `detailed-0.9` -interface. The `exitcode-stdio-1.0` type requires the `main-is` field. - -`main-is:` _filename_ (required: `exitcode-stdio-1.0`, disallowed: `detailed-0.9`) -: The name of the `.hs` or `.lhs` file containing the `Main` module. Note that it is the - `.hs` filename that must be listed, even if that file is generated - using a preprocessor. The source file must be relative to one of the - directories listed in `hs-source-dirs`. This field is analogous to the - `main-is` field of an executable section. - -Test suites using the `detailed-0.9` interface are modules exporting the symbol -`tests :: IO [Test]`. The `Test` type is exported by the module -`Distribution.TestSuite` provided by Cabal. For more details, see the example below. - -The `detailed-0.9` interface allows Cabal and other test agents to inspect a -test suite's results case by case, producing detailed human- and -machine-readable log files. The `detailed-0.9` interface requires the -`test-module` field. - -`test-module:` _identifier_ (required: `detailed-0.9`, disallowed: `exitcode-stdio-1.0`) -: The module exporting the `tests` symbol. - -#### Example: Package using `exitcode-stdio-1.0` interface #### - -The example package description and executable source file below demonstrate -the use of the `exitcode-stdio-1.0` interface. - -foo.cabal: - -~~~~~~~~~~~~~~~~ -Name: foo -Version: 1.0 -License: BSD3 -Cabal-Version: >= 1.9.2 -Build-Type: Simple - -Test-Suite test-foo - type: exitcode-stdio-1.0 - main-is: test-foo.hs - build-depends: base -~~~~~~~~~~~~~~~~ - -test-foo.hs: - -~~~~~~~~~~~~~~~~ -module Main where - -import System.Exit (exitFailure) - -main = do - putStrLn "This test always fails!" - exitFailure -~~~~~~~~~~~~~~~~ - -#### Example: Package using `detailed-0.9` interface #### - -The example package description and test module source file below demonstrate -the use of the `detailed-0.9` interface. The test module also develops a simple -implementation of the interface set by `Distribution.TestSuite`, but in actual -usage the implementation would be provided by the library that provides the -testing facility. - -bar.cabal: - -~~~~~~~~~~~~~~~~ -Name: bar -Version: 1.0 -License: BSD3 -Cabal-Version: >= 1.9.2 -Build-Type: Simple - -Test-Suite test-bar - type: detailed-0.9 - test-module: Bar - build-depends: base, Cabal >= 1.9.2 -~~~~~~~~~~~~~~~~ - -Bar.hs: - -~~~~~~~~~~~~~~~~ -module Bar ( tests ) where - -import Distribution.TestSuite - -tests :: IO [Test] -tests = return [ Test succeeds, Test fails ] - where - succeeds = TestInstance - { run = return $ Finished Pass - , name = "succeeds" - , tags = [] - , options = [] - , setOption = \_ _ -> Right succeeds - } - fails = TestInstance - { run = return $ Finished $ Fail "Always fails!" - , name = "fails" - , tags = [] - , options = [] - , setOption = \_ _ -> Right fails - } -~~~~~~~~~~~~~~~~ - -#### Running test suites #### - -You can have Cabal run your test suites using its built-in test -runner: - -~~~~~~~~~~~~~~~~ -$ cabal configure --enable-tests -$ cabal build -$ cabal test -~~~~~~~~~~~~~~~~ - -See the output of `cabal help test` for a list of options you can pass -to `cabal test`. - -### Benchmarks ### - -Benchmark sections (if present) describe benchmarks contained in the package and -must have an argument after the section label, which defines the name of the -benchmark. This is a freeform argument, but may not contain spaces. It should -be unique among the names of the package's other benchmarks, the package's test -suites, the package's executables, and the package itself. Using benchmark -sections requires at least Cabal version 1.9.2. - -The benchmark may be described using the following fields, as well as build -information fields (see the section on [build information](#build-information)). - -`type:` _interface_ (required) -: The interface type and version of the benchmark. At the moment Cabal only - support one benchmark interface, called `exitcode-stdio-1.0`. - -Benchmarks using the `exitcode-stdio-1.0` interface are executables that -indicate failure to run the benchmark with a non-zero exit code when run; they -may provide human-readable information through the standard output and error -channels. - -`main-is:` _filename_ (required: `exitcode-stdio-1.0`) -: The name of the `.hs` or `.lhs` file containing the `Main` module. Note that - it is the `.hs` filename that must be listed, even if that file is generated - using a preprocessor. The source file must be relative to one of the - directories listed in `hs-source-dirs`. This field is analogous to the - `main-is` field of an executable section. - -#### Example: Package using `exitcode-stdio-1.0` interface #### - -The example package description and executable source file below demonstrate -the use of the `exitcode-stdio-1.0` interface. - -foo.cabal: - -~~~~~~~~~~~~~~~~ -Name: foo -Version: 1.0 -License: BSD3 -Cabal-Version: >= 1.9.2 -Build-Type: Simple - -Benchmark bench-foo - type: exitcode-stdio-1.0 - main-is: bench-foo.hs - build-depends: base, time -~~~~~~~~~~~~~~~~ - -bench-foo.hs: - -~~~~~~~~~~~~~~~~ -{-# LANGUAGE BangPatterns #-} -module Main where - -import Data.Time.Clock - -fib 0 = 1 -fib 1 = 1 -fib n = fib (n-1) + fib (n-2) - -main = do - start <- getCurrentTime - let !r = fib 20 - end <- getCurrentTime - putStrLn $ "fib 20 took " ++ show (diffUTCTime end start) -~~~~~~~~~~~~~~~~ - -#### Running benchmarks #### - -You can have Cabal run your benchmark using its built-in benchmark runner: - -~~~~~~~~~~~~~~~~ -$ cabal configure --enable-benchmarks -$ cabal build -$ cabal bench -~~~~~~~~~~~~~~~~ - -See the output of `cabal help bench` for a list of options you can -pass to `cabal bench`. - -### Build information ### - -The following fields may be optionally present in a library, executable, test -suite or benchmark section, and give information for the building of the -corresponding library or executable. See also the sections on -[system-dependent parameters](#system-dependent-parameters) and -[configurations](#configurations) for a way to supply system-dependent values -for these fields. - -`build-depends:` _package list_ -: A list of packages needed to build this one. Each package can be - annotated with a version constraint. - - Version constraints use the operators `==, >=, >, <, <=` and a - version number. Multiple constraints can be combined using `&&` or - `||`. If no version constraint is specified, any version is assumed - to be acceptable. For example: - - ~~~~~~~~~~~~~~~~ - library - build-depends: - base >= 2, - foo >= 1.2 && < 1.3, - bar - ~~~~~~~~~~~~~~~~ - - Dependencies like `foo >= 1.2 && < 1.3` turn out to be very common - because it is recommended practise for package versions to - correspond to API versions. As of Cabal 1.6, there is a special - syntax to support this use: - - ~~~~~~~~~~~~~~~~ - build-depends: foo ==1.2.* - ~~~~~~~~~~~~~~~~ - - It is only syntactic sugar. It is exactly equivalent to `foo >= 1.2 && < 1.3`. - - Note: Prior to Cabal 1.8, `build-depends` specified in each section - were global to all sections. This was unintentional, but some packages - were written to depend on it, so if you need your `build-depends` to - be local to each section, you must specify at least - `Cabal-Version: >= 1.8` in your `.cabal` file. - - Note: Cabal 1.20 experimentally supported module thinning and - renaming in `build-depends`; however, this support has since been - removed and should not be used. - -`other-modules:` _identifier list_ -: A list of modules used by the component but not exposed to users. - For a library component, these would be hidden modules of the - library. For an executable, these would be auxiliary modules to be - linked with the file named in the `main-is` field. - - Note: Every module in the package *must* be listed in one of - `other-modules`, `exposed-modules` or `main-is` fields. - -`hs-source-dirs:` _directory list_ (default: "`.`") -: Root directories for the module hierarchy. - - For backwards compatibility, the old variant `hs-source-dir` is also - recognized. - -`default-extensions:` _identifier list_ -: A list of Haskell extensions used by every module. These determine - corresponding compiler options enabled for all files. Extension names are - the constructors of the [Extension][extension] type. For example, `CPP` - specifies that Haskell source files are to be preprocessed with a C - preprocessor. - -`other-extensions:` _identifier list_ -: A list of Haskell extensions used by some (but not necessarily all) modules. - From GHC version 6.6 onward, these may be specified by placing a `LANGUAGE` - pragma in the source files affected e.g. - - ~~~~~~~~~~~~~~~~ - {-# LANGUAGE CPP, MultiParamTypeClasses #-} - ~~~~~~~~~~~~~~~~ - - In Cabal-1.24 the dependency solver will use this and `default-extensions` information. - Cabal prior to 1.24 will abort compilation if the current compiler doesn't provide - the extensions. - - If you use some extensions conditionally, using CPP or conditional module lists, - it is good to replicate the condition in `other-extensions` declarations: - - ~~~~~~~~~~~~~~~~ - other-extensions: CPP - if impl(ghc >= 7.5) - other-extensions: PolyKinds - ~~~~~~~~~~~~~~~~ - - You could also omit the conditionally used extensions, as they are for information only, - but it is recommended to replicate them in `other-extensions` declarations. - -`build-tools:` _program list_ -: A list of programs, possibly annotated with versions, needed to - build this package, e.g. `c2hs >= 0.15, cpphs`.If no version - constraint is specified, any version is assumed to be acceptable. - -`buildable:` _boolean_ (default: `True`) -: Is the component buildable? Like some of the other fields below, - this field is more useful with the slightly more elaborate form of - the simple build infrastructure described in the section on - [system-dependent parameters](#system-dependent-parameters). - -`ghc-options:` _token list_ -: Additional options for GHC. You can often achieve the same effect - using the `extensions` field, which is preferred. - - Options required only by one module may be specified by placing an - `OPTIONS_GHC` pragma in the source file affected. - - As with many other fields, whitespace can be escaped by using Haskell string - syntax. Example: `ghc-options: -Wcompat "-with-rtsopts=-T -I1" -Wall`. - -`ghc-prof-options:` _token list_ -: Additional options for GHC when the package is built with profiling - enabled. - - Note that as of Cabal-1.24, the default profiling detail level defaults to - `exported-functions` for libraries and `toplevel-functions` for - executables. For GHC these correspond to the flags `-fprof-auto-exported` - and `-fprof-auto-top`. Prior to Cabal-1.24 the level defaulted to `none`. - These levels can be adjusted by the person building the package with the - `--profiling-detail` and `--library-profiling-detail` flags. - - It is typically better for the person building the package to pick the - profiling detail level rather than for the package author. So unless you - have special needs it is probably better not to specify any of the GHC - `-fprof-auto*` flags here. However if you wish to override the profiling - detail level, you can do so using the `ghc-prof-options` field: use - `-fno-prof-auto` or one of the other `-fprof-auto*` flags. - - -`ghc-shared-options:` _token list_ -: Additional options for GHC when the package is built as shared library. - The options specified via this field are combined with the ones specified - via `ghc-options`, and are passed to GHC during both the compile and - link phases. - -`includes:` _filename list_ -: A list of header files to be included in any compilations via C. - This field applies to both header files that are already installed - on the system and to those coming with the package to be installed. - These files typically contain function prototypes for foreign - imports used by the package. - -`install-includes:` _filename list_ -: A list of header files from this package to be installed into - `$libdir/includes` when the package is installed. Files listed in - `install-includes:` should be found in relative to the top of the - source tree or relative to one of the directories listed in - `include-dirs`. - - `install-includes` is typically used to name header files that - contain prototypes for foreign imports used in Haskell code in this - package, for which the C implementations are also provided with the - package. Note that to include them when compiling the package - itself, they need to be listed in the `includes:` field as well. - -`include-dirs:` _directory list_ -: A list of directories to search for header files, when preprocessing - with `c2hs`, `hsc2hs`, `cpphs` or the C preprocessor, and - also when compiling via C. - -`c-sources:` _filename list_ -: A list of C source files to be compiled and linked with the Haskell files. - -`js-sources:` _filename list_ -: A list of JavaScript source files to be linked with the Haskell files - (only for JavaScript targets). - -`extra-libraries:` _token list_ -: A list of extra libraries to link with. - -`extra-ghci-libraries:` _token list_ -: A list of extra libraries to be used instead of 'extra-libraries' when - the package is loaded with GHCi. - -`extra-lib-dirs:` _directory list_ -: A list of directories to search for libraries. - -`cc-options:` _token list_ -: Command-line arguments to be passed to the C compiler. Since the - arguments are compiler-dependent, this field is more useful with the - setup described in the section on [system-dependent - parameters](#system-dependent-parameters). - -`cpp-options:` _token list_ -: Command-line arguments for pre-processing Haskell code. Applies to - haskell source and other pre-processed Haskell source like .hsc .chs. - Does not apply to C code, that's what cc-options is for. - -`ld-options:` _token list_ -: Command-line arguments to be passed to the linker. Since the - arguments are compiler-dependent, this field is more useful with the - setup described in the section on [system-dependent - parameters](#system-dependent-parameters)>. - -`pkgconfig-depends:` _package list_ -: A list of [pkg-config] packages, needed to build this package. - They can be annotated with versions, e.g. `gtk+-2.0 >= 2.10, cairo - >= 1.0`. If no version constraint is specified, any version is - assumed to be acceptable. Cabal uses `pkg-config` to find if the - packages are available on the system and to find the extra - compilation and linker options needed to use the packages. - - If you need to bind to a C library that supports `pkg-config` (use - `pkg-config --list-all` to find out if it is supported) then it is - much preferable to use this field rather than hard code options into - the other fields. - -`frameworks:` _token list_ -: On Darwin/MacOS X, a list of frameworks to link to. See Apple's - developer documentation for more details on frameworks. This entry - is ignored on all other platforms. - -`extra-frameworks-dirs:` _directory list_ -: On Darwin/MacOS X, a list of directories to search for frameworks. - This entry is ignored on all other platforms. - -### Configurations ### - -Library and executable sections may include conditional -blocks, which test for various system parameters and -configuration flags. The flags mechanism is rather generic, -but most of the time a flag represents certain feature, that -can be switched on or off by the package user. -Here is an example package description file using -configurations: - -#### Example: A package containing a library and executable programs #### - -~~~~~~~~~~~~~~~~ -Name: Test1 -Version: 0.0.1 -Cabal-Version: >= 1.2 -License: BSD3 -Author: Jane Doe -Synopsis: Test package to test configurations -Category: Example - -Flag Debug - Description: Enable debug support - Default: False - -Flag WebFrontend - Description: Include API for web frontend. - -- Cabal checks if the configuration is possible, first - -- with this flag set to True and if not it tries with False - -Library - Build-Depends: base - Exposed-Modules: Testing.Test1 - Extensions: CPP - - if flag(debug) - GHC-Options: -DDEBUG - if !os(windows) - CC-Options: "-DDEBUG" - else - CC-Options: "-DNDEBUG" - - if flag(webfrontend) - Build-Depends: cgi > 0.42 - Other-Modules: Testing.WebStuff - -Executable test1 - Main-is: T1.hs - Other-Modules: Testing.Test1 - Build-Depends: base - - if flag(debug) - CC-Options: "-DDEBUG" - GHC-Options: -DDEBUG -~~~~~~~~~~~~~~~~ - -#### Layout #### - -Flags, conditionals, library and executable sections use layout to -indicate structure. This is very similar to the Haskell layout rule. -Entries in a section have to all be indented to the same level which -must be more than the section header. Tabs are not allowed to be used -for indentation. - -As an alternative to using layout you can also use explicit braces `{}`. -In this case the indentation of entries in a section does not matter, -though different fields within a block must be on different lines. Here -is a bit of the above example again, using braces: - -#### Example: Using explicit braces rather than indentation for layout #### - -~~~~~~~~~~~~~~~~ -Name: Test1 -Version: 0.0.1 -Cabal-Version: >= 1.2 -License: BSD3 -Author: Jane Doe -Synopsis: Test package to test configurations -Category: Example - -Flag Debug { - Description: Enable debug support - Default: False -} - -Library { - Build-Depends: base - Exposed-Modules: Testing.Test1 - Extensions: CPP - if flag(debug) { - GHC-Options: -DDEBUG - if !os(windows) { - CC-Options: "-DDEBUG" - } else { - CC-Options: "-DNDEBUG" - } - } -} -~~~~~~~~~~~~~~~~ - -#### Configuration Flags #### - -A flag section takes the flag name as an argument and may contain the -following fields. - -`description:` _freeform_ -: The description of this flag. - -`default:` _boolean_ (default: `True`) -: The default value of this flag. - - Note that this value may be [overridden in several - ways](installing-packages.html#controlling-flag-assignments"). The - rationale for having flags default to True is that users usually - want new features as soon as they are available. Flags representing - features that are not (yet) recommended for most users (such as - experimental features or debugging support) should therefore - explicitly override the default to False. - -`manual:` _boolean_ (default: `False`) -: By default, Cabal will first try to satisfy dependencies with the - default flag value and then, if that is not possible, with the - negated value. However, if the flag is manual, then the default - value (which can be overridden by commandline flags) will be used. - -#### Conditional Blocks #### - -Conditional blocks may appear anywhere inside a library or executable -section. They have to follow rather strict formatting rules. -Conditional blocks must always be of the shape - -~~~~~~~~~~~~~~~~ - `if `_condition_ - _property-descriptions-or-conditionals*_ -~~~~~~~~~~~~~~~~ - -or - -~~~~~~~~~~~~~~~~ - `if `_condition_ - _property-descriptions-or-conditionals*_ - `else` - _property-descriptions-or-conditionals*_ -~~~~~~~~~~~~~~~~ - -Note that the `if` and the condition have to be all on the same line. - -#### Conditions #### - -Conditions can be formed using boolean tests and the boolean operators -`||` (disjunction / logical "or"), `&&` (conjunction / logical "and"), -or `!` (negation / logical "not"). The unary `!` takes highest -precedence, `||` takes lowest. Precedence levels may be overridden -through the use of parentheses. For example, `os(darwin) && !arch(i386) -|| os(freebsd)` is equivalent to `(os(darwin) && !(arch(i386))) || -os(freebsd)`. - -The following tests are currently supported. - -`os(`_name_`)` -: Tests if the current operating system is _name_. The argument is - tested against `System.Info.os` on the target system. There is - unfortunately some disagreement between Haskell implementations - about the standard values of `System.Info.os`. Cabal canonicalises - it so that in particular `os(windows)` works on all implementations. - If the canonicalised os names match, this test evaluates to true, - otherwise false. The match is case-insensitive. - -`arch(`_name_`)` -: Tests if the current architecture is _name_. The argument is - matched against `System.Info.arch` on the target system. If the arch - names match, this test evaluates to true, otherwise false. The match - is case-insensitive. - -`impl(`_compiler_`)` -: Tests for the configured Haskell implementation. An optional version - constraint may be specified (for example `impl(ghc >= 6.6.1)`). If - the configured implementation is of the right type and matches the - version constraint, then this evaluates to true, otherwise false. - The match is case-insensitive. - -`flag(`_name_`)` -: Evaluates to the current assignment of the flag of the given name. - Flag names are case insensitive. Testing for flags that have not - been introduced with a flag section is an error. - -`true` -: Constant value true. - -`false` -: Constant value false. - -#### Resolution of Conditions and Flags #### - -If a package descriptions specifies configuration flags the package user can -[control these in several ways](installing-packages.html#controlling-flag-assignments). -If the user does not fix the value of a flag, Cabal will try to find a flag -assignment in the following way. - - * For each flag specified, it will assign its default value, evaluate - all conditions with this flag assignment, and check if all - dependencies can be satisfied. If this check succeeded, the package - will be configured with those flag assignments. - - * If dependencies were missing, the last flag (as by the order in - which the flags were introduced in the package description) is tried - with its alternative value and so on. This continues until either - an assignment is found where all dependencies can be satisfied, or - all possible flag assignments have been tried. - -To put it another way, Cabal does a complete backtracking search to find -a satisfiable package configuration. It is only the dependencies -specified in the `build-depends` field in conditional blocks that -determine if a particular flag assignment is satisfiable (`build-tools` -are not considered). The order of the declaration and the default value -of the flags determines the search order. Flags overridden on the -command line fix the assignment of that flag, so no backtracking will be -tried for that flag. - -If no suitable flag assignment could be found, the configuration phase -will fail and a list of missing dependencies will be printed. Note that -this resolution process is exponential in the worst case (i.e., in the -case where dependencies cannot be satisfied). There are some -optimizations applied internally, but the overall complexity remains -unchanged. - -### Meaning of field values when using conditionals ### - -During the configuration phase, a flag assignment is chosen, all -conditionals are evaluated, and the package description is combined into -a flat package descriptions. If the same field both inside a conditional -and outside then they are combined using the following rules. - - - * Boolean fields are combined using conjunction (logical "and"). - - * List fields are combined by appending the inner items to the outer - items, for example - - ~~~~~~~~~~~~~~~~ - other-extensions: CPP - if impl(ghc) - other-extensions: MultiParamTypeClasses - ~~~~~~~~~~~~~~~~ - - when compiled using GHC will be combined to - - ~~~~~~~~~~~~~~~~ - other-extensions: CPP, MultiParamTypeClasses - ~~~~~~~~~~~~~~~~ - - Similarly, if two conditional sections appear at the same nesting - level, properties specified in the latter will come after properties - specified in the former. - - * All other fields must not be specified in ambiguous ways. For - example - - ~~~~~~~~~~~~~~~~ - Main-is: Main.hs - if flag(useothermain) - Main-is: OtherMain.hs - ~~~~~~~~~~~~~~~~ - - will lead to an error. Instead use - - ~~~~~~~~~~~~~~~~ - if flag(useothermain) - Main-is: OtherMain.hs - else - Main-is: Main.hs - ~~~~~~~~~~~~~~~~ - -### Source Repositories ### - -It is often useful to be able to specify a source revision control -repository for a package. Cabal lets you specifying this information in -a relatively structured form which enables other tools to interpret and -make effective use of the information. For example the information -should be sufficient for an automatic tool to checkout the sources. - -Cabal supports specifying different information for various common -source control systems. Obviously not all automated tools will support -all source control systems. - -Cabal supports specifying repositories for different use cases. By -declaring which case we mean automated tools can be more useful. There -are currently two kinds defined: - - * The `head` kind refers to the latest development branch of the - package. This may be used for example to track activity of a project - or as an indication to outside developers what sources to get for - making new contributions. - - * The `this` kind refers to the branch and tag of a repository that - contains the sources for this version or release of a package. For most - source control systems this involves specifying a tag, id or hash of - some form and perhaps a branch. The purpose is to be able to - reconstruct the sources corresponding to a particular package - version. This might be used to indicate what sources to get if - someone needs to fix a bug in an older branch that is no longer an - active head branch. - -You can specify one kind or the other or both. As an example here are -the repositories for the Cabal library. Note that the `this` kind of -repository specifies a tag. - -~~~~~~~~~~~~~~~~ -source-repository head - type: darcs - location: http://darcs.haskell.org/cabal/ - -source-repository this - type: darcs - location: http://darcs.haskell.org/cabal-branches/cabal-1.6/ - tag: 1.6.1 -~~~~~~~~~~~~~~~~ - -The exact fields are as follows: - -`type:` _token_ -: The name of the source control system used for this repository. The - currently recognised types are: - - * `darcs` - * `git` - * `svn` - * `cvs` - * `mercurial` (or alias `hg`) - * `bazaar` (or alias `bzr`) - * `arch` - * `monotone` - - This field is required. - -`location:` _URL_ -: The location of the repository. The exact form of this field depends - on the repository type. For example: - - * for darcs: `http://code.haskell.org/foo/` - * for git: `git://github.com/foo/bar.git` - * for CVS: `anoncvs@cvs.foo.org:/cvs` - - This field is required. - -`module:` _token_ -: CVS requires a named module, as each CVS server can host multiple - named repositories. - - This field is required for the CVS repository type and should not - be used otherwise. - -`branch:` _token_ -: Many source control systems support the notion of a branch, as a - distinct concept from having repositories in separate locations. For - example CVS, SVN and git use branches while for darcs uses different - locations for different branches. If you need to specify a branch to - identify a your repository then specify it in this field. - - This field is optional. - -`tag:` _token_ -: A tag identifies a particular state of a source repository. The tag - can be used with a `this` repository kind to identify the state of - a repository corresponding to a particular package version or - release. The exact form of the tag depends on the repository type. - - This field is required for the `this` repository kind. - -`subdir:` _directory_ -: Some projects put the sources for multiple packages under a single - source repository. This field lets you specify the relative path - from the root of the repository to the top directory for the - package, i.e. the directory containing the package's `.cabal` file. - - This field is optional. It default to empty which corresponds to the - root directory of the repository. - -### Downloading a package's source ### - -The `cabal get` command allows to access a package's source code - either by -unpacking a tarball downloaded from Hackage (the default) or by checking out a -working copy from the package's source repository. - -~~~~~~~~~~~~~~~~ -$ cabal get [FLAGS] PACKAGES -~~~~~~~~~~~~~~~~ - -The `get` command supports the following options: - -`-d --destdir` _PATH_ -: Where to place the package source, defaults to (a subdirectory of) the - current directory. - -`-s --source-repository` _[head|this|...]_ -: Fork the package's source repository using the appropriate version control - system. The optional argument allows to choose a specific repository kind. - -## Custom setup scripts - -The optional `custom-setup` stanza contains information needed for the -compilation of custom `Setup.hs` scripts, - -~~~~~~~~~~~~~~~~ -custom-setup - setup-depends: - base >= 4.5 && < 4.11, - Cabal < 1.25 -~~~~~~~~~~~~~~~~ - -`setup-depends:` _package list_ -: The dependencies needed to compile `Setup.hs`. See the - [`build-depends`](#build-information) section for a description of the - syntax expected by this field. - -## Accessing data files from package code ## - -The placement on the target system of files listed in the `data-files` -field varies between systems, and in some cases one can even move -packages around after installation (see [prefix -independence](installing-packages.html#prefix-independence)). To enable -packages to find these files in a portable way, Cabal generates a module -called `Paths_`_pkgname_ (with any hyphens in _pkgname_ replaced by -underscores) during building, so that it may be imported by modules of -the package. This module defines a function - -~~~~~~~~~~~~~~~ -getDataFileName :: FilePath -> IO FilePath -~~~~~~~~~~~~~~~ - -If the argument is a filename listed in the `data-files` field, the -result is the name of the corresponding file on the system on which the -program is running. - -Note: If you decide to import the `Paths_`_pkgname_ module then it -*must* be listed in the `other-modules` field just like any other module -in your package. - -The `Paths_`_pkgname_ module is not platform independent so it does not -get included in the source tarballs generated by `sdist`. - -The `Paths_`_pkgname_ module also includes some other useful functions -and values, which record the version of the package and some other -directories which the package has been configured to be installed -into (e.g. data files live in `getDataDir`): - -~~~~~~~~~~~~~~~ -version :: Version - -getBinDir :: IO FilePath -getLibDir :: IO FilePath -getDynLibDir :: IO FilePath -getDataDir :: IO FilePath -getLibexecDir :: IO FilePath -getSysconfDir :: IO FilePath -~~~~~~~~~~~~~~~ - -### Accessing the package version ### - -The aforementioned auto generated `Paths_`_pkgname_ module also -exports the constant `version ::` [Version][data-version] which is -defined as the version of your package as specified in the `version` -field. - -## System-dependent parameters ## - -For some packages, especially those interfacing with C libraries, -implementation details and the build procedure depend on the build -environment. The `build-type` `Configure` can be used to handle many -such situations. In this case, `Setup.hs` should be: - -~~~~~~~~~~~~~~~~ -import Distribution.Simple -main = defaultMainWithHooks autoconfUserHooks -~~~~~~~~~~~~~~~~ - -Most packages, however, would probably do better using the `Simple` -build type and [configurations](#configurations). - -The `build-type` `Configure` differs from `Simple` in two ways: - -* The package root directory must contain a shell script called - `configure`. The configure step will run the script. This `configure` - script may be produced by [autoconf][] or may be hand-written. The - `configure` script typically discovers information about the system - and records it for later steps, e.g. by generating system-dependent - header files for inclusion in C source files and preprocessed Haskell - source files. (Clearly this won't work for Windows without MSYS or - Cygwin: other ideas are needed.) - -* If the package root directory contains a file called - _package_`.buildinfo` after the configuration step, subsequent steps - will read it to obtain additional settings for [build - information](#build-information) fields,to be merged with the ones - given in the `.cabal` file. In particular, this file may be generated - by the `configure` script mentioned above, allowing these settings to - vary depending on the build environment. - - The build information file should have the following structure: - - > _buildinfo_ - > - > `executable:` _name_ - > _buildinfo_ - > - > `executable:` _name_ - > _buildinfo_ - > ... - - where each _buildinfo_ consists of settings of fields listed in the - section on [build information](#build-information). The first one (if - present) relates to the library, while each of the others relate to - the named executable. (The names must match the package description, - but you don't have to have entries for all of them.) - -Neither of these files is required. If they are absent, this setup -script is equivalent to `defaultMain`. - -#### Example: Using autoconf #### - -This example is for people familiar with the [autoconf][] tools. - -In the X11 package, the file `configure.ac` contains: - -~~~~~~~~~~~~~~~~ -AC_INIT([Haskell X11 package], [1.1], [libraries@haskell.org], [X11]) - -# Safety check: Ensure that we are in the correct source directory. -AC_CONFIG_SRCDIR([X11.cabal]) - -# Header file to place defines in -AC_CONFIG_HEADERS([include/HsX11Config.h]) - -# Check for X11 include paths and libraries -AC_PATH_XTRA -AC_TRY_CPP([#include ],,[no_x=yes]) - -# Build the package if we found X11 stuff -if test "$no_x" = yes -then BUILD_PACKAGE_BOOL=False -else BUILD_PACKAGE_BOOL=True -fi -AC_SUBST([BUILD_PACKAGE_BOOL]) - -AC_CONFIG_FILES([X11.buildinfo]) -AC_OUTPUT -~~~~~~~~~~~~~~~~ - -Then the setup script will run the `configure` script, which checks for -the presence of the X11 libraries and substitutes for variables in the -file `X11.buildinfo.in`: - -~~~~~~~~~~~~~~~~ -buildable: @BUILD_PACKAGE_BOOL@ -cc-options: @X_CFLAGS@ -ld-options: @X_LIBS@ -~~~~~~~~~~~~~~~~ - -This generates a file `X11.buildinfo` supplying the parameters needed by -later stages: - -~~~~~~~~~~~~~~~~ -buildable: True -cc-options: -I/usr/X11R6/include -ld-options: -L/usr/X11R6/lib -~~~~~~~~~~~~~~~~ - -The `configure` script also generates a header file `include/HsX11Config.h` -containing C preprocessor defines recording the results of various tests. This -file may be included by C source files and preprocessed Haskell source files in -the package. - -Note: Packages using these features will also need to list additional files such -as `configure`, templates for `.buildinfo` files, files named only in -`.buildinfo` files, header files and so on in the `extra-source-files` field to -ensure that they are included in source distributions. They should also list -files and directories generated by `configure` in the `extra-tmp-files` field to -ensure that they are removed by `setup clean`. - -Quite often the files generated by `configure` need to be listed somewhere in -the package description (for example, in the `install-includes` field). However, -we usually don't want generated files to be included in the source tarball. The -solution is again provided by the `.buildinfo` file. In the above example, the -following line should be added to `X11.buildinfo`: - -~~~~~~~~~~~~~~~~ -install-includes: HsX11Config.h -~~~~~~~~~~~~~~~~ - -In this way, the generated `HsX11Config.h` file won't be included in the source -tarball in addition to `HsX11Config.h.in`, but it will be copied to the right -location during the install process. Packages that use custom `Setup.hs` scripts -can update the necessary fields programmatically instead of using the -`.buildinfo` file. - - -## Conditional compilation ## - -Sometimes you want to write code that works with more than one version -of a dependency. You can specify a range of versions for the dependency -in the `build-depends`, but how do you then write the code that can use -different versions of the API? - -Haskell lets you preprocess your code using the C preprocessor (either -the real C preprocessor, or `cpphs`). To enable this, add `extensions: -CPP` to your package description. When using CPP, Cabal provides some -pre-defined macros to let you test the version of dependent packages; -for example, suppose your package works with either version 3 or version -4 of the `base` package, you could select the available version in your -Haskell modules like this: - -~~~~~~~~~~~~~~~~ -#if MIN_VERSION_base(4,0,0) -... code that works with base-4 ... -#else -... code that works with base-3 ... -#endif -~~~~~~~~~~~~~~~~ - -In general, Cabal supplies a macro `MIN_VERSION_`_`package`_`_(A,B,C)` -for each package depended on via `build-depends`. This macro is true if -the actual version of the package in use is greater than or equal to -`A.B.C` (using the conventional ordering on version numbers, which is -lexicographic on the sequence, but numeric on each component, so for -example 1.2.0 is greater than 1.0.3). - -Since version 1.20, there is also the `MIN_TOOL_VERSION_`_`tool`_ family of -macros for conditioning on the version of build tools used to build the program -(e.g. `hsc2hs`). - -Cabal places the definitions of these macros into an -automatically-generated header file, which is included when -preprocessing Haskell source code by passing options to the C -preprocessor. - -Cabal also allows to detect when the source code is being used for generating -documentation. The `__HADDOCK_VERSION__` macro is defined only when compiling -via [haddock][] instead of a normal Haskell compiler. The value of the -`__HADDOCK_VERSION__` macro is defined as `A*1000 + B*10 + C`, where `A.B.C` is -the Haddock version. This can be useful for working around bugs in Haddock or -generating prettier documentation in some special cases. - -## More complex packages ## - -For packages that don't fit the simple schemes described above, you have -a few options: - - * By using the `build-type` `Custom`, you can supply your own - `Setup.hs` file, and customize the simple build infrastructure - using _hooks_. These allow you to perform additional actions - before and after each command is run, and also to specify - additional preprocessors. A typical `Setup.hs` may look like this: - - ~~~~~~~~~~~~~~~~ - import Distribution.Simple - main = defaultMainWithHooks simpleUserHooks { postHaddock = posthaddock } - - posthaddock args flags desc info = .... - ~~~~~~~~~~~~~~~~ - - See `UserHooks` in [Distribution.Simple][dist-simple] for the - details, but note that this interface is experimental, and likely - to change in future releases. - - If you use a custom `Setup.hs` file you should strongly consider adding a - `custom-setup` stanza with a `setup-depends` field to ensure that your - setup script does not break with future dependency versions. - - * You could delegate all the work to `make`, though this is unlikely - to be very portable. Cabal supports this with the `build-type` - `Make` and a trivial setup library [Distribution.Make][dist-make], - which simply parses the command line arguments and invokes `make`. - Here `Setup.hs` should look like this: - - ~~~~~~~~~~~~~~~~ - import Distribution.Make - main = defaultMain - ~~~~~~~~~~~~~~~~ - - The root directory of the package should contain a `configure` - script, and, after that has run, a `Makefile` with a default target - that builds the package, plus targets `install`, `register`, - `unregister`, `clean`, `dist` and `docs`. Some options to commands - are passed through as follows: - - * The `--with-hc-pkg`, `--prefix`, `--bindir`, `--libdir`, `--dynlibdir`, `--datadir`, - `--libexecdir` and `--sysconfdir` options to the `configure` command are - passed on to the `configure` script. In addition the value of the - `--with-compiler` option is passed in a `--with-hc` option and all - options specified with `--configure-option=` are passed on. - - * The `--destdir` option to the `copy` command becomes a setting - of a `destdir` variable on the invocation of `make copy`. The - supplied `Makefile` should provide a `copy` target, which will - probably look like this: - - ~~~~~~~~~~~~~~~~ - copy : - $(MAKE) install prefix=$(destdir)/$(prefix) \ - bindir=$(destdir)/$(bindir) \ - libdir=$(destdir)/$(libdir) \ - dynlibdir=$(destdir)/$(dynlibdir) \ - datadir=$(destdir)/$(datadir) \ - libexecdir=$(destdir)/$(libexecdir) \ - sysconfdir=$(destdir)/$(sysconfdir) \ - ~~~~~~~~~~~~~~~~ - - * Finally, with the `build-type` `Custom`, you can also write your - own setup script from scratch. It must conform to the interface - described in the section on [building and installing - packages](installing-packages.html), and you may use the Cabal - library for all or part of the work. One option is to copy the - source of `Distribution.Simple`, and alter it for your needs. Good - luck. - - - -[dist-simple]: ../release/cabal-latest/doc/API/Cabal/Distribution-Simple.html -[dist-make]: ../release/cabal-latest/doc/API/Cabal/Distribution-Make.html -[dist-license]: ../release/cabal-latest/doc/API/Cabal/Distribution-License.html#t:License -[extension]: ../release/cabal-latest/doc/API/Cabal/Language-Haskell-Extension.html#t:Extension -[BuildType]: ../release/cabal-latest/doc/API/Cabal/Distribution-PackageDescription.html#t:BuildType -[data-version]: http://hackage.haskell.org/packages/archive/base/latest/doc/html/Data-Version.html -[alex]: http://www.haskell.org/alex/ -[autoconf]: http://www.gnu.org/software/autoconf/ -[c2hs]: http://www.cse.unsw.edu.au/~chak/haskell/c2hs/ -[cpphs]: http://projects.haskell.org/cpphs/ -[greencard]: http://hackage.haskell.org/package/greencard -[haddock]: http://www.haskell.org/haddock/ -[HsColour]: http://www.cs.york.ac.uk/fp/darcs/hscolour/ -[happy]: http://www.haskell.org/happy/ -[Hackage]: http://hackage.haskell.org/ -[pkg-config]: http://www.freedesktop.org/wiki/Software/pkg-config/ -[REPL]: http://en.wikipedia.org/wiki/Read%E2%80%93eval%E2%80%93print_loop -[PVP]: https://wiki.haskell.org/Package_versioning_policy diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/doc/index.markdown cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/doc/index.markdown --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/doc/index.markdown 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/doc/index.markdown 1970-01-01 00:00:00.000000000 +0000 @@ -1,201 +0,0 @@ -% Cabal User Guide -**Version: 1.24.1.0** - -Cabal is the standard package system for [Haskell] software. It helps -people to configure, build and install Haskell software and to -distribute it easily to other users and developers. - -There is a command line tool called `cabal` for working with Cabal -packages. It helps with installing existing packages and also helps -people developing their own packages. It can be used to work with local -packages or to install packages from online package archives, including -automatically installing dependencies. By default it is configured to -use [Hackage] which is Haskell's central package archive that contains -thousands of libraries and applications in the Cabal package format. - -# Contents # - - * [Introduction](#introduction) - - [What's in a package](#whats-in-a-package) - - [A tool for working with packages](#a-tool-for-working-with-packages) - * [Building, installing and managing packages](installing-packages.html) - * [Creating packages](developing-packages.html) - * [Reporting bugs and deficiencies](misc.html#reporting-bugs-and-deficiencies) - * [Stability of Cabal interfaces](misc.html#stability-of-cabal-interfaces) - -# Introduction # - -Cabal is a package system for Haskell software. The point of a package -system is to enable software developers and users to easily distribute, -use and reuse software. A package system makes it easier for developers -to get their software into the hands of users. Equally importantly, it -makes it easier for software developers to be able to reuse software -components written by other developers. - -Packaging systems deal with packages and with Cabal we call them _Cabal -packages_. The Cabal package is the unit of distribution. Every Cabal -package has a name and a version number which are used to identify the -package, e.g. `filepath-1.0`. - -Cabal packages can depend on other Cabal packages. There are tools -to enable automated package management. This means it is possible for -developers and users to install a package plus all of the other Cabal -packages that it depends on. It also means that it is practical to make -very modular systems using lots of packages that reuse code written by -many developers. - -Cabal packages are source based and are typically (but not necessarily) -portable to many platforms and Haskell implementations. The Cabal -package format is designed to make it possible to translate into other -formats, including binary packages for various systems. - -When distributed, Cabal packages use the standard compressed tarball -format, with the file extension `.tar.gz`, e.g. `filepath-1.0.tar.gz`. - -Note that packages are not part of the Haskell language, rather they -are a feature provided by the combination of Cabal and GHC (and several -other Haskell implementations). - - -## A tool for working with packages ## - -There is a command line tool, called "`cabal`", that users and developers -can use to build and install Cabal packages. It can be used for both -local packages and for packages available remotely over the network. It -can automatically install Cabal packages plus any other Cabal packages -they depend on. - -Developers can use the tool with packages in local directories, e.g. - -~~~~~~~~~~~~~~~~ -cd foo/ -cabal install -~~~~~~~~~~~~~~~~ - -While working on a package in a local directory, developers can run the -individual steps to configure and build, and also generate documentation -and run test suites and benchmarks. - -It is also possible to install several local packages at once, e.g. - -~~~~~~~~~~~~~~~~ -cabal install foo/ bar/ -~~~~~~~~~~~~~~~~ - -Developers and users can use the tool to install packages from remote -Cabal package archives. By default, the `cabal` tool is configured to -use the central Haskell package archive called [Hackage] but it -is possible to use it with any other suitable archive. - -~~~~~~~~~~~~~~~~ -cabal install xmonad -~~~~~~~~~~~~~~~~ - -This will install the `xmonad` package plus all of its dependencies. - -In addition to packages that have been published in an archive, -developers can install packages from local or remote tarball files, -for example - -~~~~~~~~~~~~~~~~ -cabal install foo-1.0.tar.gz -cabal install http://example.com/foo-1.0.tar.gz -~~~~~~~~~~~~~~~~ - -Cabal provides a number of ways for a user to customise how and where a -package is installed. They can decide where a package will be installed, -which Haskell implementation to use and whether to build optimised code -or build with the ability to profile code. It is not expected that users -will have to modify any of the information in the `.cabal` file. - -For full details, see the section on [building and installing -packages](installing-packages.html). - -Note that `cabal` is not the only tool for working with Cabal packages. -Due to the standardised format and a library for reading `.cabal` files, -there are several other special-purpose tools. - -## What's in a package ## - -A Cabal package consists of: - - * Haskell software, including libraries, executables and tests - * metadata about the package in a standard human and machine - readable format (the "`.cabal`" file) - * a standard interface to build the package (the "`Setup.hs`" file) - -The `.cabal` file contains information about the package, supplied by -the package author. In particular it lists the other Cabal packages -that the package depends on. - -For full details on what goes in the `.cabal` and `Setup.hs` files, and -for all the other features provided by the build system, see the section -on [developing packages](developing-packages.html). - - -## Cabal featureset ## - -Cabal and its associated tools and websites covers: - - * a software build system - * software configuration - * packaging for distribution - * automated package management - * natively using the `cabal` command line tool; or - * by translation into native package formats such as RPM or deb - * web and local Cabal package archives - * central Hackage website with 1000's of Cabal packages - -Some parts of the system can be used without others. In particular the -built-in build system for simple packages is optional: it is possible -to use custom build systems. - -## Similar systems ## - -The Cabal system is roughly comparable with the system of Python Eggs, -Ruby Gems or Perl distributions. Each system has a notion of -distributable packages, and has tools to manage the process of -distributing and installing packages. - -Hackage is an online archive of Cabal packages. It is roughly comparable -to CPAN but with rather fewer packages (around 5,000 vs 28,000). - -Cabal is often compared with autoconf and automake and there is some -overlap in functionality. The most obvious similarity is that the -command line interface for actually configuring and building packages -follows the same steps and has many of the same configuration -parameters. - -~~~~~~~~~~ -./configure --prefix=... -make -make install -~~~~~~~~~~ - -compared to - -~~~~~~~~~~ -cabal configure --prefix=... -cabal build -cabal install -~~~~~~~~~~ - -Cabal's build system for simple packages is considerably less flexible -than make/automake, but has builtin knowledge of how to build Haskell -code and requires very little manual configuration. Cabal's simple build -system is also portable to Windows, without needing a Unix-like -environment such as cygwin/mingwin. - -Compared to autoconf, Cabal takes a somewhat different approach to -package configuration. Cabal's approach is designed for automated -package management. Instead of having a configure script that tests for -whether dependencies are available, Cabal packages specify their -dependencies. There is some scope for optional and conditional -dependencies. By having package authors specify dependencies it makes it -possible for tools to install a package and all of its dependencies -automatically. It also makes it possible to translate (in a -mostly-automatically way) into another package format like RPM or deb -which also have automatic dependency resolution. - -[Haskell]: http://www.haskell.org/ -[Hackage]: http://hackage.haskell.org/ diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/doc/installing-packages.markdown cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/doc/installing-packages.markdown --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/doc/installing-packages.markdown 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/doc/installing-packages.markdown 1970-01-01 00:00:00.000000000 +0000 @@ -1,1303 +0,0 @@ -% Cabal User Guide - -# Configuration # - -## Overview ## - -The global configuration file for `cabal-install` is `~/.cabal/config`. If you -do not have this file, `cabal` will create it for you on the first call to -`cabal update`. Alternatively, you can explicitly ask `cabal` to create it for -you using - -> `cabal user-config update` - -Most of the options in this configuration file are also available as command -line arguments, and the corresponding documentation can be used to lookup their -meaning. The created configuration file only specifies values for a handful of -options. Most options are left at their default value, which it documents; -for instance, - -~~~~~~~~~~~~~~~~ --- executable-stripping: True -~~~~~~~~~~~~~~~~ - -means that the configuration file currently does not specify a value for the -`executable-stripping` option (the line is commented out), and that the default -is `True`; if you wanted to disable stripping of executables by default, you -would change this line to - -~~~~~~~~~~~~~~~~ -executable-stripping: False -~~~~~~~~~~~~~~~~ - -You can also use `cabal user-config update` to migrate configuration files -created by older versions of `cabal`. - -## Repository specification ## - -An important part of the configuration if the specification of the repository. -When `cabal` creates a default config file, it configures the repository to -be the central Hackage server: - -~~~~~~~~~~~~~~~~ -repository hackage.haskell.org - url: http://hackage.haskell.org/ -~~~~~~~~~~~~~~~~ - -The name of the repository is given on the first line, and can be anything; -packages downloaded from this repository will be cached under -`~/.cabal/packages/hackage.haskell.org` (or whatever name you specify; you can -change the prefix by changing the value of `remote-repo-cache`). If you want, -you can configure multiple repositories, and `cabal` will combine them and be -able to download packages from any of them. - -### Using secure repositories ### - -For repositories that support the TUF security infrastructure (this includes -Hackage), you can enable secure access to the repository by specifying: - -~~~~~~~~~~~~~~~~ -repository hackage.haskell.org - url: http://hackage.haskell.org/ - secure: True - root-keys: - key-threshold: -~~~~~~~~~~~~~~~~ - -The `` and `` values are used for bootstrapping. As -part of the TUF infrastructure the repository will contain a file `root.json` -(for instance, -[http://hackage.haskell.org/root.json](http://hackage.haskell.org/root.json)) -which the client needs to do verification. However, how can `cabal` verify the -`root.json` file _itself_? This is known as bootstrapping: if you specify a list -of root key IDs and a corresponding threshold, `cabal` will verify that the -downloaded `root.json` file has been signed with at least `` -keys from your set of ``. - -You can, but are not recommended to, omit these two fields. In that case `cabal` -will download the `root.json` field and use it without verification. Although -this bootstrapping step is then unsafe, all subsequent access is secure -(provided that the downloaded `root.json` was not tempered with). Of course, -adding `root-keys` and `key-threshold` to your repository specification only -shifts the problem, because now you somehow need to make sure that the key IDs -you received were the right ones. How that is done is however outside the scope -of `cabal` proper. - -More information about the security infrastructure can be found at -[https://github.com/well-typed/hackage-security](https://github.com/well-typed/hackage-security). - -### Legacy repositories ### - -Currently `cabal` supports two kinds of “legacy” repositories. The -first is specified using - -~~~~~~~~~~~~~~~~ -remote-repo: hackage.haskell.org:http://hackage.haskell.org/packages/archive -~~~~~~~~~~~~~~~~ - -This is just syntactic sugar for - -~~~~~~~~~~~~~~~~ -repository hackage.haskell.org - url: hackage.haskell.org:http://hackage.haskell.org/packages/archive -~~~~~~~~~~~~~~~~ - -although, in (and only in) the specific case of Hackage, the URL -`http://hackage.haskell.org/packages/archive` will be silently translated to -`http://hackage.haskell.org/`. - -The second kind of legacy repositories are so-called “local” -repositories: - -~~~~~~~~~~~~~~~~ -local-repo: my-local-repo:/path/to/local/repo -~~~~~~~~~~~~~~~~ - -This can be used to access repositories on the local file system. However, the -layout of these local repositories is different from the layout of remote -repositories, and usage of these local repositories is deprecated. - -### Secure local repositories ### - -If you want to use repositories on your local file system, it is recommended -instead to use a _secure_ local repository: - -~~~~~~~~~~~~~~~~ -repository my-local-repo - url: file:/path/to/local/repo - secure: True - root-keys: - key-threshold: -~~~~~~~~~~~~~~~~ - -The layout of these secure local repos matches the layout of remote repositories -exactly; the -[hackage-repo-tool](http://hackage.haskell.org/package/hackage-repo-tool) can be -used to create and manage such repositories. - -# Building and installing packages # - -After you've unpacked a Cabal package, you can build it by moving into -the root directory of the package and running the `cabal` tool there: - -> `cabal [command] [option...]` - -The _command_ argument selects a particular step in the build/install process. - -You can also get a summary of the command syntax with - -> `cabal help` - -Alternatively, you can also use the `Setup.hs` or `Setup.lhs` script: - -> `runhaskell Setup.hs [command] [option...]` - -For the summary of the command syntax, run: - -> `cabal help` - -or - -> `runhaskell Setup.hs --help` - -## Building and installing a system package ## - -~~~~~~~~~~~~~~~~ -runhaskell Setup.hs configure --ghc -runhaskell Setup.hs build -runhaskell Setup.hs install -~~~~~~~~~~~~~~~~ - -The first line readies the system to build the tool using GHC; for -example, it checks that GHC exists on the system. The second line -performs the actual building, while the last both copies the build -results to some permanent place and registers the package with GHC. - -## Building and installing a user package ## - -~~~~~~~~~~~~~~~~ -runhaskell Setup.hs configure --user -runhaskell Setup.hs build -runhaskell Setup.hs install -~~~~~~~~~~~~~~~~ - -The package is installed under the user's home directory and is -registered in the user's package database (`--user`). - -## Installing packages from Hackage ## - -The `cabal` tool also can download, configure, build and install a [Hackage] -package and all of its dependencies in a single step. To do this, run: - -~~~~~~~~~~~~~~~~ -cabal install [PACKAGE...] -~~~~~~~~~~~~~~~~ - -To browse the list of available packages, visit the [Hackage] web site. - -## Developing with sandboxes ## - -By default, any dependencies of the package are installed into the global or -user package databases (e.g. using `cabal install --only-dependencies`). If -you're building several different packages that have incompatible dependencies, -this can cause the build to fail. One way to avoid this problem is to build each -package in an isolated environment ("sandbox"), with a sandbox-local package -database. Because sandboxes are per-project, inconsistent dependencies can be -simply disallowed. - -For more on sandboxes, see also -[this article](http://coldwa.st/e/blog/2013-08-20-Cabal-sandbox.html). - -### Sandboxes: basic usage ### - -To initialise a fresh sandbox in the current directory, run `cabal sandbox -init`. All subsequent commands (such as `build` and `install`) from this point -will use the sandbox. - -~~~~~~~~~~~~~~~ -$ cd /path/to/my/haskell/library -$ cabal sandbox init # Initialise the sandbox -$ cabal install --only-dependencies # Install dependencies into the sandbox -$ cabal build # Build your package inside the sandbox -~~~~~~~~~~~~~~~ - -It can be useful to make a source package available for installation in the -sandbox - for example, if your package depends on a patched or an unreleased -version of a library. This can be done with the `cabal sandbox add-source` -command - think of it as "local [Hackage]". If an add-source dependency is later -modified, it is reinstalled automatically. - -~~~~~~~~~~~~~~~ -$ cabal sandbox add-source /my/patched/library # Add a new add-source dependency -$ cabal install --dependencies-only # Install it into the sandbox -$ cabal build # Build the local package -$ $EDITOR /my/patched/library/Source.hs # Modify the add-source dependency -$ cabal build # Modified dependency is automatically reinstalled -~~~~~~~~~~~~~~~ - -Normally, the sandbox settings (such as optimisation level) are inherited from -the main Cabal config file (`$HOME/cabal/config`). Sometimes, though, you need -to change some settings specifically for a single sandbox. You can do this by -creating a `cabal.config` file in the same directory with your -`cabal.sandbox.config` (which was created by `sandbox init`). This file has the -same syntax as the main Cabal config file. - -~~~~~~~~~~~~~~~ -$ cat cabal.config -documentation: True -constraints: foo == 1.0, bar >= 2.0, baz -$ cabal build # Uses settings from the cabal.config file -~~~~~~~~~~~~~~~ - -When you have decided that you no longer want to build your package inside a -sandbox, just delete it: - -~~~~~~~~~~~~~~~ -$ cabal sandbox delete # Built-in command -$ rm -rf .cabal-sandbox cabal.sandbox.config # Alternative manual method -~~~~~~~~~~~~~~~ - -### Sandboxes: advanced usage ### - -The default behaviour of the `add-source` command is to track modifications done -to the added dependency and reinstall the sandbox copy of the package when -needed. Sometimes this is not desirable: in these cases you can use `add-source ---snapshot`, which disables the change tracking. In addition to `add-source`, -there are also `list-sources` and `delete-source` commands. - -Sometimes one wants to share a single sandbox between multiple packages. This -can be easily done with the `--sandbox` option: - -~~~~~~~~~~~~~~~ -$ mkdir -p /path/to/shared-sandbox -$ cd /path/to/shared-sandbox -$ cabal sandbox init --sandbox . -$ cd /path/to/package-a -$ cabal sandbox init --sandbox /path/to/shared-sandbox -$ cd /path/to/package-b -$ cabal sandbox init --sandbox /path/to/shared-sandbox -~~~~~~~~~~~~~~~ - -Note that `cabal sandbox init --sandbox .` puts all sandbox files into the -current directory. By default, `cabal sandbox init` initialises a new sandbox in -a newly-created subdirectory of the current working directory -(`./.cabal-sandbox`). - -Using multiple different compiler versions simultaneously is also supported, via -the `-w` option: - -~~~~~~~~~~~~~~~ -$ cabal sandbox init -$ cabal install --only-dependencies -w /path/to/ghc-1 # Install dependencies for both compilers -$ cabal install --only-dependencies -w /path/to/ghc-2 -$ cabal configure -w /path/to/ghc-1 # Build with the first compiler -$ cabal build -$ cabal configure -w /path/to/ghc-2 # Build with the second compiler -$ cabal build -~~~~~~~~~~~~~~~ - -It can be occasionally useful to run the compiler-specific package manager tool -(e.g. `ghc-pkg`) tool on the sandbox package DB directly (for example, you may -need to unregister some packages). The `cabal sandbox hc-pkg` command is a -convenient wrapper that runs the compiler-specific package manager tool with the -arguments: - -~~~~~~~~~~~~~~~ -$ cabal -v sandbox hc-pkg list -Using a sandbox located at /path/to/.cabal-sandbox -'ghc-pkg' '--global' '--no-user-package-conf' - '--package-conf=/path/to/.cabal-sandbox/i386-linux-ghc-7.4.2-packages.conf.d' - 'list' -[...] -~~~~~~~~~~~~~~~ - -The `--require-sandbox` option makes all sandbox-aware commands -(`install`/`build`/etc.) exit with error if there is no sandbox present. This -makes it harder to accidentally modify the user package database. The option can -be also turned on via the per-user configuration file (`~/.cabal/config`) or the -per-project one (`$PROJECT_DIR/cabal.config`). The error can be squelched with -`--no-require-sandbox`. - -The option `--sandbox-config-file` allows to specify the location of the -`cabal.sandbox.config` file (by default, `cabal` searches for it in the current -directory). This provides the same functionality as shared sandboxes, but -sometimes can be more convenient. Example: - -~~~~~~~~~~~~~~~ -$ mkdir my/sandbox -$ cd my/sandbox -$ cabal sandbox init -$ cd /path/to/my/project -$ cabal --sandbox-config-file=/path/to/my/sandbox/cabal.sandbox.config install -# Uses the sandbox located at /path/to/my/sandbox/.cabal-sandbox -$ cd ~ -$ cabal --sandbox-config-file=/path/to/my/sandbox/cabal.sandbox.config install -# Still uses the same sandbox -~~~~~~~~~~~~~~~ - -The sandbox config file can be also specified via the `CABAL_SANDBOX_CONFIG` -environment variable. - -Finally, the flag `--ignore-sandbox` lets you temporarily ignore an existing -sandbox: - -~~~~~~~~~~~~~~~ -$ mkdir my/sandbox -$ cd my/sandbox -$ cabal sandbox init -$ cabal --ignore-sandbox install text -# Installs 'text' in the user package database ('~/.cabal'). -~~~~~~~~~~~~~~~ - -## Creating a binary package ## - -When creating binary packages (e.g. for Red Hat or Debian) one needs to -create a tarball that can be sent to another system for unpacking in the -root directory: - -~~~~~~~~~~~~~~~~ -runhaskell Setup.hs configure --prefix=/usr -runhaskell Setup.hs build -runhaskell Setup.hs copy --destdir=/tmp/mypkg -tar -czf mypkg.tar.gz /tmp/mypkg/ -~~~~~~~~~~~~~~~~ - -If the package contains a library, you need two additional steps: - -~~~~~~~~~~~~~~~~ -runhaskell Setup.hs register --gen-script -runhaskell Setup.hs unregister --gen-script -~~~~~~~~~~~~~~~~ - -This creates shell scripts `register.sh` and `unregister.sh`, which must -also be sent to the target system. After unpacking there, the package -must be registered by running the `register.sh` script. The -`unregister.sh` script would be used in the uninstall procedure of the -package. Similar steps may be used for creating binary packages for -Windows. - - -The following options are understood by all commands: - -`--help`, `-h` or `-?` -: List the available options for the command. - -`--verbose=`_n_ or `-v`_n_ -: Set the verbosity level (0-3). The normal level is 1; a missing _n_ - defaults to 2. - -The various commands and the additional options they support are -described below. In the simple build infrastructure, any other options -will be reported as errors. - -## setup configure ## - -Prepare to build the package. Typically, this step checks that the -target platform is capable of building the package, and discovers -platform-specific features that are needed during the build. - -The user may also adjust the behaviour of later stages using the options -listed in the following subsections. In the simple build -infrastructure, the values supplied via these options are recorded in a -private file read by later stages. - -If a user-supplied `configure` script is run (see the section on -[system-dependent -parameters](developing-packages.html#system-dependent-parameters) or on -[complex packages](developing-packages.html#more-complex-packages)), it -is passed the `--with-hc-pkg`, `--prefix`, `--bindir`, `--libdir`, `--dynlibdir`, -`--datadir`, `--libexecdir` and `--sysconfdir` options. In addition the -value of the `--with-compiler` option is passed in a `--with-hc` option -and all options specified with `--configure-option=` are passed on. - -### Programs used for building ### - -The following options govern the programs used to process the source -files of a package: - -`--ghc` or `-g`, `--jhc`, `--lhc`, `--uhc` -: Specify which Haskell implementation to use to build the package. - At most one of these flags may be given. If none is given, the - implementation under which the setup script was compiled or - interpreted is used. - -`--with-compiler=`_path_ or `-w`_path_ -: Specify the path to a particular compiler. If given, this must match - the implementation selected above. The default is to search for the - usual name of the selected implementation. - - This flag also sets the default value of the `--with-hc-pkg` option - to the package tool for this compiler. Check the output of `setup - configure -v` to ensure that it finds the right package tool (or use - `--with-hc-pkg` explicitly). - - -`--with-hc-pkg=`_path_ -: Specify the path to the package tool, e.g. `ghc-pkg`. The package - tool must be compatible with the compiler specified by - `--with-compiler`. If this option is omitted, the default value is - determined from the compiler selected. - -`--with-`_`prog`_`=`_path_ -: Specify the path to the program _prog_. Any program known to Cabal - can be used in place of _prog_. It can either be a fully path or the - name of a program that can be found on the program search path. For - example: `--with-ghc=ghc-6.6.1` or - `--with-cpphs=/usr/local/bin/cpphs`. - The full list of accepted programs is not enumerated in this user guide. - Rather, run `cabal install --help` to view the list. - -`--`_`prog`_`-options=`_options_ -: Specify additional options to the program _prog_. Any program known - to Cabal can be used in place of _prog_. For example: - `--alex-options="--template=mytemplatedir/"`. The _options_ is split - into program options based on spaces. Any options containing embedded - spaced need to be quoted, for example - `--foo-options='--bar="C:\Program File\Bar"'`. As an alternative - that takes only one option at a time but avoids the need to quote, - use `--`_`prog`_`-option` instead. - -`--`_`prog`_`-option=`_option_ -: Specify a single additional option to the program _prog_. For - passing an option that contain embedded spaces, such as a file name - with embedded spaces, using this rather than `--`_`prog`_`-options` - means you do not need an additional level of quoting. Of course if - you are using a command shell you may still need to quote, for - example `--foo-options="--bar=C:\Program File\Bar"`. - -All of the options passed with either `--`_`prog`_`-options` or -`--`_`prog`_`-option` are passed in the order they were specified on the -configure command line. - -### Installation paths ### - -The following options govern the location of installed files from a -package: - -`--prefix=`_dir_ -: The root of the installation. For example for a global install you - might use `/usr/local` on a Unix system, or `C:\Program Files` on a - Windows system. The other installation paths are usually - subdirectories of _prefix_, but they don't have to be. - - In the simple build system, _dir_ may contain the following path - variables: `$pkgid`, `$pkg`, `$version`, `$compiler`, `$os`, - `$arch`, `$abi`, `$abitag` - -`--bindir=`_dir_ -: Executables that the user might invoke are installed here. - - In the simple build system, _dir_ may contain the following path - variables: `$prefix`, `$pkgid`, `$pkg`, `$version`, `$compiler`, - `$os`, `$arch`, `$abi`, `$abitag - -`--libdir=`_dir_ -: Object-code libraries are installed here. - - In the simple build system, _dir_ may contain the following path - variables: `$prefix`, `$bindir`, `$pkgid`, `$pkg`, `$version`, - `$compiler`, `$os`, `$arch`, `$abi`, `$abitag` - -`--dynlibdir=`_dir_ -: Dynamic libraries are installed here. - - By default, this is set to `$libdir/$abi`, which is usually not equal to - `$libdir/$libsubdir`. - - In the simple build system, _dir_ may contain the following path - variables: `$prefix`, `$bindir`, `$libdir`, `$pkgid`, `$pkg`, `$version`, - `$compiler`, `$os`, `$arch`, `$abi`, `$abitag` - - -`--libexecdir=`_dir_ -: Executables that are not expected to be invoked directly by the user - are installed here. - - In the simple build system, _dir_ may contain the following path - variables: `$prefix`, `$bindir`, `$libdir`, `$libsubdir`, `$pkgid`, - `$pkg`, `$version`, `$compiler`, `$os`, `$arch`, `$abi`, `$abitag` - -`--datadir`=_dir_ -: Architecture-independent data files are installed here. - - In the simple build system, _dir_ may contain the following path - variables: `$prefix`, `$bindir`, `$libdir`, `$libsubdir`, `$pkgid`, `$pkg`, - `$version`, `$compiler`, `$os`, `$arch`, `$abi`, `$abitag` - -`--sysconfdir=`_dir_ -: Installation directory for the configuration files. - - In the simple build system, _dir_ may contain the following path variables: - `$prefix`, `$bindir`, `$libdir`, `$libsubdir`, `$pkgid`, `$pkg`, `$version`, - `$compiler`, `$os`, `$arch`, `$abi`, `$abitag` - -In addition the simple build system supports the following installation path options: - -`--libsubdir=`_dir_ -: A subdirectory of _libdir_ in which libraries are actually - installed. For example, in the simple build system on Unix, the - default _libdir_ is `/usr/local/lib`, and _libsubdir_ contains the - package identifier and compiler, e.g. `mypkg-0.2/ghc-6.4`, so - libraries would be installed in `/usr/local/lib/mypkg-0.2/ghc-6.4`. - - _dir_ may contain the following path variables: `$pkgid`, `$pkg`, - `$version`, `$compiler`, `$os`, `$arch`, `$abi`, `$abitag` - -`--datasubdir=`_dir_ -: A subdirectory of _datadir_ in which data files are actually - installed. - - _dir_ may contain the following path variables: `$pkgid`, `$pkg`, - `$version`, `$compiler`, `$os`, `$arch`, `$abi`, `$abitag` - -`--docdir=`_dir_ -: Documentation files are installed relative to this directory. - - _dir_ may contain the following path variables: `$prefix`, `$bindir`, - `$libdir`, `$libsubdir`, `$datadir`, `$datasubdir`, `$pkgid`, `$pkg`, - `$version`, `$compiler`, `$os`, `$arch`, `$abi`, `$abitag` - -`--htmldir=`_dir_ -: HTML documentation files are installed relative to this directory. - - _dir_ may contain the following path variables: `$prefix`, `$bindir`, - `$libdir`, `$libsubdir`, `$datadir`, `$datasubdir`, `$docdir`, `$pkgid`, - `$pkg`, `$version`, `$compiler`, `$os`, `$arch`, `$abi`, `$abitag` - -`--program-prefix=`_prefix_ -: Prepend _prefix_ to installed program names. - - _prefix_ may contain the following path variables: `$pkgid`, `$pkg`, - `$version`, `$compiler`, `$os`, `$arch`, `$abi`, `$abitag` - -`--program-suffix=`_suffix_ -: Append _suffix_ to installed program names. The most obvious use for - this is to append the program's version number to make it possible - to install several versions of a program at once: - `--program-suffix='$version'`. - - _suffix_ may contain the following path variables: `$pkgid`, `$pkg`, - `$version`, `$compiler`, `$os`, `$arch`, `$abi`, `$abitag` - -#### Path variables in the simple build system #### - -For the simple build system, there are a number of variables that can be -used when specifying installation paths. The defaults are also specified -in terms of these variables. A number of the variables are actually for -other paths, like `$prefix`. This allows paths to be specified relative -to each other rather than as absolute paths, which is important for -building relocatable packages (see [prefix -independence](#prefix-independence)). - -`$prefix` -: The path variable that stands for the root of the installation. For - an installation to be relocatable, all other installation paths must - be relative to the `$prefix` variable. - -`$bindir` -: The path variable that expands to the path given by the `--bindir` - configure option (or the default). - -`$libdir` -: As above but for `--libdir` - -`$dynlibdir` -: As above but for `--dynlibdir` - -`$libsubdir` -: As above but for `--libsubdir` - -`$datadir` -: As above but for `--datadir` - -`$datasubdir` -: As above but for `--datasubdir` - -`$docdir` -: As above but for `--docdir` - -`$pkgid` -: The name and version of the package, e.g. `mypkg-0.2` - -`$pkg` -: The name of the package, e.g. `mypkg` - -`$version` -: The version of the package, e.g. `0.2` - -`$compiler` -: The compiler being used to build the package, e.g. `ghc-6.6.1` - -`$os` -: The operating system of the computer being used to build the - package, e.g. `linux`, `windows`, `osx`, `freebsd` or `solaris` - -`$arch` -: The architecture of the computer being used to build the package, e.g. - `i386`, `x86_64`, `ppc` or `sparc` - -`$abitag` -: An optional tag that a compiler can use for telling incompatible ABI's - on the same architecture apart. GHCJS encodes the underlying GHC version - in the ABI tag. - -`$abi` -: A shortcut for getting a path that completely identifies the platform in terms - of binary compatibility. Expands to the same value as `$arch-$os-compiler-$abitag` - if the compiler uses an abi tag, `$arch-$os-$compiler` if it doesn't. - -#### Paths in the simple build system #### - -For the simple build system, the following defaults apply: - -Option Windows Default Unix Default -------- ---------------- ------------- -`--prefix` (global) `C:\Program Files\Haskell` `/usr/local` -`--prefix` (per-user) `C:\Documents And Settings\user\Application Data\cabal` `$HOME/.cabal` -`--bindir` `$prefix\bin` `$prefix/bin` -`--libdir` `$prefix` `$prefix/lib` -`--libsubdir` (others) `$pkgid\$compiler` `$pkgid/$compiler` -`--dynlibdir` `$libdir\$abi` `$libdir/$abi` -`--libexecdir` `$prefix\$pkgid` `$prefix/libexec` -`--datadir` (executable) `$prefix` `$prefix/share` -`--datadir` (library) `C:\Program Files\Haskell` `$prefix/share` -`--datasubdir` `$pkgid` `$pkgid` -`--docdir` `$prefix\doc\$pkgid` `$datadir/doc/$pkgid` -`--sysconfdir` `$prefix\etc` `$prefix/etc` -`--htmldir` `$docdir\html` `$docdir/html` -`--program-prefix` (empty) (empty) -`--program-suffix` (empty) (empty) - - -#### Prefix-independence #### - -On Windows it is possible to obtain the pathname of the running program. This -means that we can construct an installable executable package that is -independent of its absolute install location. The executable can find its -auxiliary files by finding its own path and knowing the location of the other -files relative to `$bindir`. Prefix-independence is particularly useful: it -means the user can choose the install location (i.e. the value of `$prefix`) at -install-time, rather than having to bake the path into the binary when it is -built. - -In order to achieve this, we require that for an executable on Windows, -all of `$bindir`, `$libdir`, `$dynlibdir`, `$datadir` and `$libexecdir` begin with -`$prefix`. If this is not the case then the compiled executable will -have baked-in all absolute paths. - -The application need do nothing special to achieve prefix-independence. -If it finds any files using `getDataFileName` and the [other functions -provided for the -purpose](developing-packages.html#accessing-data-files-from-package-code), -the files will be accessed relative to the location of the current -executable. - -A library cannot (currently) be prefix-independent, because it will be -linked into an executable whose file system location bears no relation -to the library package. - -### Controlling Flag Assignments ### - -Flag assignments (see the [resolution of conditions and -flags](developing-packages.html#resolution-of-conditions-and-flags)) can -be controlled with the following command line options. - -`-f` _flagname_ or `-f` `-`_flagname_ -: Force the specified flag to `true` or `false` (if preceded with a `-`). Later - specifications for the same flags will override earlier, i.e., - specifying `-fdebug -f-debug` is equivalent to `-f-debug` - -`--flags=`_flagspecs_ -: Same as `-f`, but allows specifying multiple flag assignments at - once. The parameter is a space-separated list of flag names (to - force a flag to `true`), optionally preceded by a `-` (to force a - flag to `false`). For example, `--flags="debug -feature1 feature2"` is - equivalent to `-fdebug -f-feature1 -ffeature2`. - -### Building Test Suites ### - -`--enable-tests` -: Build the test suites defined in the package description file during the - `build` stage. Check for dependencies required by the test suites. If the - package is configured with this option, it will be possible to run the test - suites with the `test` command after the package is built. - -`--disable-tests` -: (default) Do not build any test suites during the `build` stage. - Do not check for dependencies required only by the test suites. It will not - be possible to invoke the `test` command without reconfiguring the package. - -`--enable-coverage` -: Build libraries and executables (including test suites) with Haskell - Program Coverage enabled. Running the test suites will automatically - generate coverage reports with HPC. - -`--disable-coverage` -: (default) Do not enable Haskell Program Coverage. - -### Miscellaneous options ## - -`--user` -: Does a per-user installation. This changes the [default installation - prefix](#paths-in-the-simple-build-system). It also allow - dependencies to be satisfied by the user's package database, in - addition to the global database. This also implies a default of - `--user` for any subsequent `install` command, as packages - registered in the global database should not depend on packages - registered in a user's database. - -`--global` -: (default) Does a global installation. In this case package - dependencies must be satisfied by the global package database. All - packages in the user's package database will be ignored. Typically - the final installation step will require administrative privileges. - -`--package-db=`_db_ -: Allows package dependencies to be satisfied from this additional - package database _db_ in addition to the global package database. - All packages in the user's package database will be ignored. The - interpretation of _db_ is implementation-specific. Typically it will - be a file or directory. Not all implementations support arbitrary - package databases. - -`--default-user-config=` _file_ -: Allows a "default" `cabal.config` freeze file to be passed in - manually. This file will only be used if one does not exist in the - project directory already. Typically, this can be set from the global - cabal `config` file so as to provide a default set of partial - constraints to be used by projects, providing a way for users to peg - themselves to stable package collections. - -`--enable-optimization`[=_n_] or `-O`[_n_] -: (default) Build with optimization flags (if available). This is - appropriate for production use, taking more time to build faster - libraries and programs. - - The optional _n_ value is the optimisation level. Some compilers - support multiple optimisation levels. The range is 0 to 2. Level 0 - is equivalent to `--disable-optimization`, level 1 is the default if - no _n_ parameter is given. Level 2 is higher optimisation if the - compiler supports it. Level 2 is likely to lead to longer compile - times and bigger generated code. - -`--disable-optimization` -: Build without optimization. This is suited for development: building - will be quicker, but the resulting library or programs will be slower. - -`--enable-profiling` -: Build libraries and executables with profiling enabled (for compilers - that support profiling as a separate mode). For this to work, all - libraries used by this package must also have been built with profiling - support. For libraries this involves building an additional instance of - the library in addition to the normal non-profiling instance. For - executables it changes the single executable to be built in profiling mode. - - This flag covers both libraries and executables, but can be overridden - by the `--enable-library-profiling` flag. - - See also the `--profiling-detail` flag below. - -`--disable-profiling` -: (default) Do not enable profiling in generated libraries and executables. - -`--enable-library-profiling` or `-p` -: As with `--enable-profiling` above, but it applies only for libraries. So - this generates an additional profiling instance of the library in addition - to the normal non-profiling instance. - - The `--enable-profiling` flag controls the profiling mode for both - libraries and executables, but if different modes are desired for - libraries versus executables then use `--enable-library-profiling` as well. - -`--disable-library-profiling` -: (default) Do not generate an additional profiling version of the - library. - -`--profiling-detail`[=_level_] -: Some compilers that support profiling, notably GHC, can allocate costs to - different parts of the program and there are different levels of - granularity or detail with which this can be done. In particular for GHC - this concept is called "cost centers", and GHC can automatically add cost - centers, and can do so in different ways. - - This flag covers both libraries and executables, but can be overridden - by the `--library-profiling-detail` flag. - - Currently this setting is ignored for compilers other than GHC. The levels - that cabal currently supports are: - - `default` - : For GHC this uses `exported-functions` for libraries and - `toplevel-functions` for executables. - - `none` - : No costs will be assigned to any code within this component. - - `exported-functions` - : Costs will be assigned at the granularity of all top level functions - exported from each module. In GHC specifically, this is for non-inline - functions. - - `toplevel-functions` - : Costs will be assigned at the granularity of all top level functions - in each module, whether they are exported from the module or not. - In GHC specifically, this is for non-inline functions. - - `all-functions` - : Costs will be assigned at the granularity of all functions in each - module, whether top level or local. In GHC specifically, this is for - non-inline toplevel or where-bound functions or values. - - This flag is new in Cabal-1.24. Prior versions used the equivalent of - `none` above. - -`--library-profiling-detail`[=_level_] -: As with `--profiling-detail` above, but it applies only for libraries. - - The level for both libraries and executables is set by the - `--profiling-detail` flag, but if different levels are desired for - libraries versus executables then use `--library-profiling-detail` as well. - - -`--enable-library-vanilla` -: (default) Build ordinary libraries (as opposed to profiling - libraries). This is independent of the `--enable-library-profiling` - option. If you enable both, you get both. - -`--disable-library-vanilla` -: Do not build ordinary libraries. This is useful in conjunction with - `--enable-library-profiling` to build only profiling libraries, - rather than profiling and ordinary libraries. - -`--enable-library-for-ghci` -: (default) Build libraries suitable for use with GHCi. - -`--disable-library-for-ghci` -: Not all platforms support GHCi and indeed on some platforms, trying - to build GHCi libs fails. In such cases this flag can be used as a - workaround. - -`--enable-split-objs` -: Use the GHC `-split-objs` feature when building the library. This - reduces the final size of the executables that use the library by - allowing them to link with only the bits that they use rather than - the entire library. The downside is that building the library takes - longer and uses considerably more memory. - -`--disable-split-objs` -: (default) Do not use the GHC `-split-objs` feature. This makes - building the library quicker but the final executables that use the - library will be larger. - -`--enable-executable-stripping` -: (default) When installing binary executable programs, run the - `strip` program on the binary. This can considerably reduce the size - of the executable binary file. It does this by removing debugging - information and symbols. While such extra information is useful for - debugging C programs with traditional debuggers it is rarely helpful - for debugging binaries produced by Haskell compilers. - - Not all Haskell implementations generate native binaries. For such - implementations this option has no effect. - -`--disable-executable-stripping` -: Do not strip binary executables during installation. You might want - to use this option if you need to debug a program using gdb, for - example if you want to debug the C parts of a program containing - both Haskell and C code. Another reason is if your are building a - package for a system which has a policy of managing the stripping - itself (such as some Linux distributions). - -`--enable-shared` -: Build shared library. This implies a separate compiler run to - generate position independent code as required on most platforms. - -`--disable-shared` -: (default) Do not build shared library. - -`--enable-executable-dynamic` -: Link executables dynamically. The executable's library dependencies should - be built as shared objects. This implies `--enable-shared` unless - `--disable-shared` is explicitly specified. - -`--disable-executable-dynamic` -: (default) Link executables statically. - -`--configure-option=`_str_ -: An extra option to an external `configure` script, if one is used - (see the section on [system-dependent - parameters](developing-packages.html#system-dependent-parameters)). - There can be several of these options. - -`--extra-include-dirs`[=_dir_] -: An extra directory to search for C header files. You can use this - flag multiple times to get a list of directories. - - You might need to use this flag if you have standard system header - files in a non-standard location that is not mentioned in the - package's `.cabal` file. Using this option has the same affect as - appending the directory _dir_ to the `include-dirs` field in each - library and executable in the package's `.cabal` file. The advantage - of course is that you do not have to modify the package at all. - These extra directories will be used while building the package and - for libraries it is also saved in the package registration - information and used when compiling modules that use the library. - -`--extra-lib-dirs`[=_dir_] -: An extra directory to search for system libraries files. You can use - this flag multiple times to get a list of directories. - -`--extra-framework-dirs`[=_dir_] -: An extra directory to search for frameworks (OS X only). You can use this - flag multiple times to get a list of directories. - - You might need to use this flag if you have standard system - libraries in a non-standard location that is not mentioned in the - package's `.cabal` file. Using this option has the same affect as - appending the directory _dir_ to the `extra-lib-dirs` field in each - library and executable in the package's `.cabal` file. The advantage - of course is that you do not have to modify the package at all. - These extra directories will be used while building the package and - for libraries it is also saved in the package registration - information and used when compiling modules that use the library. - -`--allow-newer`[=_pkgs_] -: Selectively relax upper bounds in dependencies without editing the - package description. - - If you want to install a package A that depends on B >= 1.0 && < 2.0, but - you have the version 2.0 of B installed, you can compile A against B 2.0 by - using `cabal install --allow-newer=B A`. This works for the whole package - index: if A also depends on C that in turn depends on B < 2.0, C's - dependency on B will be also relaxed. - - Example: - - ~~~~~~~~~~~~~~~~ - $ cd foo - $ cabal configure - Resolving dependencies... - cabal: Could not resolve dependencies: - [...] - $ cabal configure --allow-newer - Resolving dependencies... - Configuring foo... - ~~~~~~~~~~~~~~~~ - - Additional examples: - - ~~~~~~~~~~~~~~~~ - # Relax upper bounds in all dependencies. - $ cabal install --allow-newer foo - - # Relax upper bounds only in dependencies on bar, baz and quux. - $ cabal install --allow-newer=bar,baz,quux foo - - # Relax the upper bound on bar and force bar==2.1. - $ cabal install --allow-newer=bar --constraint="bar==2.1" foo - ~~~~~~~~~~~~~~~~ - - It's also possible to limit the scope of `--allow-newer` to single - packages with the `--allow-newer=scope:dep` syntax. This means that the - dependency on `dep` will be relaxed only for the package `scope`. - - Example: - - ~~~~~~~~~~~~~~~~ - # Relax upper bound in foo's dependency on base; also relax upper bound in - # every package's dependency on lens. - $ cabal install --allow-newer=foo:base,lens - - # Relax upper bounds in foo's dependency on base and bar's dependency - # on time; also relax the upper bound in the dependency on lens specified by - # any package. - $ cabal install --allow-newer=foo:base,lens --allow-newer=bar:time - ~~~~~~~~~~~~~~~~ - - Finally, one can enable `--allow-newer` permanently by setting `allow-newer: - True` in the `~/.cabal/config` file. Enabling 'allow-newer' selectively is - also supported in the config file (`allow-newer: foo, bar, baz:base`). - -`--constraint=`_constraint_ -: Restrict solutions involving a package to a given version range. - For example, `cabal install --constraint="bar==2.1"` will only consider - install plans that do not use `bar` at all, or `bar` of version 2.1. - - As a special case, `cabal install --constraint="bar -none"` prevents `bar` - from being used at all (`-none` abbreviates `> 1 && < 1`); `cabal install - --constraint="bar installed"` prevents reinstallation of the `bar` package; - `cabal install --constraint="bar +foo -baz"` specifies that the flag `foo` - should be turned on and the `baz` flag should be turned off. - -## setup build ## - -Perform any preprocessing or compilation needed to make this package ready for installation. - -This command takes the following options: - ---_prog_-options=_options_, --_prog_-option=_option_ -: These are mostly the same as the [options configure - step](#setup-configure). Unlike the options specified at the - configure step, any program options specified at the build step are - not persistent but are used for that invocation only. They options - specified at the build step are in addition not in replacement of - any options specified at the configure step. - -## setup haddock ## - -Build the documentation for the package using [haddock][]. By default, -only the documentation for the exposed modules is generated (but see the -`--executables` and `--internal` flags below). - -This command takes the following options: - -`--hoogle` -: Generate a file `dist/doc/html/`_pkgid_`.txt`, which can be - converted by [Hoogle](http://www.haskell.org/hoogle/) into a - database for searching. This is equivalent to running [haddock][] - with the `--hoogle` flag. - -`--html-location=`_url_ -: Specify a template for the location of HTML documentation for - prerequisite packages. The substitutions ([see - listing](#paths-in-the-simple-build-system)) are applied to the - template to obtain a location for each package, which will be used - by hyperlinks in the generated documentation. For example, the - following command generates links pointing at [Hackage] pages: - - > setup haddock --html-location='http://hackage.haskell.org/packages/archive/$pkg/latest/doc/html' - - Here the argument is quoted to prevent substitution by the shell. If - this option is omitted, the location for each package is obtained - using the package tool (e.g. `ghc-pkg`). - -`--executables` -: Also run [haddock][] for the modules of all the executable programs. - By default [haddock][] is run only on the exported modules. - -`--internal` -: Run [haddock][] for the all modules, including unexposed ones, and - make [haddock][] generate documentation for unexported symbols as - well. - -`--css=`_path_ -: The argument _path_ denotes a CSS file, which is passed to - [haddock][] and used to set the style of the generated - documentation. This is only needed to override the default style - that [haddock][] uses. - -`--hyperlink-source` -: Generate [haddock][] documentation integrated with [HsColour][]. - First, [HsColour][] is run to generate colourised code. Then - [haddock][] is run to generate HTML documentation. Each entity - shown in the documentation is linked to its definition in the - colourised code. - -`--hscolour-css=`_path_ -: The argument _path_ denotes a CSS file, which is passed to [HsColour][] as in - - > runhaskell Setup.hs hscolour --css=_path_ - -## setup hscolour ## - -Produce colourised code in HTML format using [HsColour][]. Colourised -code for exported modules is put in `dist/doc/html/`_pkgid_`/src`. - -This command takes the following options: - -`--executables` -: Also run [HsColour][] on the sources of all executable programs. - Colourised code is put in `dist/doc/html/`_pkgid_/_executable_`/src`. - -`--css=`_path_ -: Use the given CSS file for the generated HTML files. The CSS file - defines the colours used to colourise code. Note that this copies - the given CSS file to the directory with the generated HTML files - (renamed to `hscolour.css`) rather than linking to it. - -## setup install ## - -Copy the files into the install locations and (for library packages) -register the package with the compiler, i.e. make the modules it -contains available to programs. - -The [install locations](#installation-paths) are determined by options -to `setup configure`. - -This command takes the following options: - -`--global` -: Register this package in the system-wide database. (This is the - default, unless the `--user` option was supplied to the `configure` - command.) - -`--user` -: Register this package in the user's local package database. (This is - the default if the `--user` option was supplied to the `configure` - command.) - -## setup copy ## - -Copy the files without registering them. This command is mainly of use -to those creating binary packages. - -This command takes the following option: - -`--destdir=`_path_ - -Specify the directory under which to place installed files. If this is -not given, then the root directory is assumed. - -## setup register ## - -Register this package with the compiler, i.e. make the modules it -contains available to programs. This only makes sense for library -packages. Note that the `install` command incorporates this action. The -main use of this separate command is in the post-installation step for a -binary package. - -This command takes the following options: - -`--global` -: Register this package in the system-wide database. (This is the default.) - - -`--user` -: Register this package in the user's local package database. - - -`--gen-script` -: Instead of registering the package, generate a script containing - commands to perform the registration. On Unix, this file is called - `register.sh`, on Windows, `register.bat`. This script might be - included in a binary bundle, to be run after the bundle is unpacked - on the target system. - -`--gen-pkg-config`[=_path_] -: Instead of registering the package, generate a package registration - file. This only applies to compilers that support package - registration files which at the moment is only GHC. The file should - be used with the compiler's mechanism for registering packages. This - option is mainly intended for packaging systems. If possible use the - `--gen-script` option instead since it is more portable across - Haskell implementations. The _path_ is - optional and can be used to specify a particular output file to - generate. Otherwise, by default the file is the package name and - version with a `.conf` extension. - -`--inplace` -: Registers the package for use directly from the build tree, without - needing to install it. This can be useful for testing: there's no - need to install the package after modifying it, just recompile and - test. - - This flag does not create a build-tree-local package database. It - still registers the package in one of the user or global databases. - - However, there are some caveats. It only works with GHC - (currently). It only works if your package doesn't depend on having - any supplemental files installed --- plain Haskell libraries should - be fine. - -## setup unregister ## - -Deregister this package with the compiler. - -This command takes the following options: - -`--global` -: Deregister this package in the system-wide database. (This is the default.) - -`--user` -: Deregister this package in the user's local package database. - -`--gen-script` -: Instead of deregistering the package, generate a script containing - commands to perform the deregistration. On Unix, this file is - called `unregister.sh`, on Windows, `unregister.bat`. This script - might be included in a binary bundle, to be run on the target - system. - -## setup clean ## - -Remove any local files created during the `configure`, `build`, -`haddock`, `register` or `unregister` steps, and also any files and -directories listed in the `extra-tmp-files` field. - -This command takes the following options: - -`--save-configure` or `-s` -: Keeps the configuration information so it is not necessary to run - the configure step again before building. - -## setup test ## - -Run the test suites specified in the package description file. Aside from -the following flags, Cabal accepts the name of one or more test suites on the -command line after `test`. When supplied, Cabal will run only the named test -suites, otherwise, Cabal will run all test suites in the package. - -`--builddir=`_dir_ -: The directory where Cabal puts generated build files (default: `dist`). - Test logs will be located in the `test` subdirectory. - -`--human-log=`_path_ -: The template used to name human-readable test logs; the path is relative - to `dist/test`. By default, logs are named according to the template - `$pkgid-$test-suite.log`, so that each test suite will be logged to its own - human-readable log file. Template variables allowed are: `$pkgid`, - `$compiler`, `$os`, `$arch`, `$abi`, `$abitag`, `$test-suite`, and `$result`. - -`--machine-log=`_path_ -: The path to the machine-readable log, relative to `dist/test`. The default - template is `$pkgid.log`. Template variables allowed are: `$pkgid`, - `$compiler`, `$os`, `$arch`, `$abi`, `$abitag` and `$result`. - -`--show-details=`_filter_ -: Determines if the results of individual test cases are shown on the - terminal. May be `always` (always show), `never` (never show), `failures` - (show only failed results), or `streaming` (show all results in real time). - -`--test-options=`_options_ -: Give extra options to the test executables. - -`--test-option=`_option_ -: give an extra option to the test executables. There is no need to quote - options containing spaces because a single option is assumed, so options - will not be split on spaces. - -## setup sdist ## - -Create a system- and compiler-independent source distribution in a file -_package_-_version_`.tar.gz` in the `dist` subdirectory, for -distribution to package builders. When unpacked, the commands listed in -this section will be available. - -The files placed in this distribution are the package description file, -the setup script, the sources of the modules named in the package -description file, and files named in the `license-file`, `main-is`, -`c-sources`, `js-sources`, `data-files`, `extra-source-files` and -`extra-doc-files` fields. - -This command takes the following option: - -`--snapshot` -: Append today's date (in "YYYYMMDD" format) to the version number for - the generated source package. The original package is unaffected. - - -[dist-simple]: ../release/cabal-latest/doc/API/Cabal/Distribution-Simple.html -[dist-make]: ../release/cabal-latest/doc/API/Cabal/Distribution-Make.html -[dist-license]: ../release/cabal-latest/doc/API/Cabal/Distribution-License.html#t:License -[extension]: ../release/cabal-latest/doc/API/Cabal/Language-Haskell-Extension.html#t:Extension -[BuildType]: ../release/cabal-latest/doc/API/Cabal/Distribution-PackageDescription.html#t:BuildType -[alex]: http://www.haskell.org/alex/ -[autoconf]: http://www.gnu.org/software/autoconf/ -[c2hs]: http://www.cse.unsw.edu.au/~chak/haskell/c2hs/ -[cpphs]: http://projects.haskell.org/cpphs/ -[greencard]: http://hackage.haskell.org/package/greencard -[haddock]: http://www.haskell.org/haddock/ -[HsColour]: http://www.cs.york.ac.uk/fp/darcs/hscolour/ -[happy]: http://www.haskell.org/happy/ -[Hackage]: http://hackage.haskell.org/ -[pkg-config]: http://www.freedesktop.org/wiki/Software/pkg-config/ diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/doc/misc.markdown cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/doc/misc.markdown --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/doc/misc.markdown 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/doc/misc.markdown 1970-01-01 00:00:00.000000000 +0000 @@ -1,109 +0,0 @@ -% Cabal User Guide - -# Reporting bugs and deficiencies # - -Please report any flaws or feature requests in the [bug tracker][]. - -For general discussion or queries email the libraries mailing list -. There is also a development mailing list -. - -[bug tracker]: https://github.com/haskell/cabal/issues - -# Stability of Cabal interfaces # - -The Cabal library and related infrastructure is still under active -development. New features are being added and limitations and bugs are -being fixed. This requires internal changes and often user visible -changes as well. We therefore cannot promise complete future-proof -stability, at least not without halting all development work. - -This section documents the aspects of the Cabal interface that we can -promise to keep stable and which bits are subject to change. - -## Cabal file format ## - -This is backwards compatible and mostly forwards compatible. New fields -can be added without breaking older versions of Cabal. Fields can be -deprecated without breaking older packages. - -## Command-line interface ## - -### Very Stable Command-line interfaces ### - -* `./setup configure` - * `--prefix` - * `--user` - * `--ghc`, `--uhc` - * `--verbose` - * `--prefix` - -* `./setup build` -* `./setup install` -* `./setup register` -* `./setup copy` - -### Stable Command-line interfaces ### - -### Unstable command-line ### - -## Functions and Types ## - -The Cabal library follows the [Package Versioning Policy][PVP]. This -means that within a stable major release, for example 1.2.x, there will -be no incompatible API changes. But minor versions increments, for -example 1.2.3, indicate compatible API additions. - -The Package Versioning Policy does not require any API guarantees -between major releases, for example between 1.2.x and 1.4.x. In practise -of course not everything changes between major releases. Some parts of -the API are more prone to change than others. The rest of this section -gives some informal advice on what level of API stability you can expect -between major releases. - -[PVP]: http://www.haskell.org/haskellwiki/Package_versioning_policy - -### Very Stable API ### - -* `defaultMain` - -* `defaultMainWithHooks defaultUserHooks` - - But regular `defaultMainWithHooks` isn't stable since `UserHooks` - changes. - -### Semi-stable API ### - -* `UserHooks` The hooks API will change in the future - -* `Distribution.*` is mostly declarative information about packages and - is somewhat stable. - -### Unstable API ### - -Everything under `Distribution.Simple.*` has no stability guarantee. - -## Hackage ## - -The index format is a partly stable interface. It consists of a tar.gz -file that contains directories with `.cabal` files in. In future it may -contain more kinds of files so do not assume every file is a `.cabal` -file. Incompatible revisions to the format would involve bumping the -name of the index file, i.e., `00-index.tar.gz`, `01-index.tar.gz` etc. - - -[dist-simple]: ../release/cabal-latest/doc/API/Cabal/Distribution-Simple.html -[dist-make]: ../release/cabal-latest/doc/API/Cabal/Distribution-Make.html -[dist-license]: ../release/cabal-latest/doc/API/Cabal/Distribution-License.html#t:License -[extension]: ../release/cabal-latest/doc/API/Cabal/Language-Haskell-Extension.html#t:Extension -[BuildType]: ../release/cabal-latest/doc/API/Cabal/Distribution-PackageDescription.html#t:BuildType -[alex]: http://www.haskell.org/alex/ -[autoconf]: http://www.gnu.org/software/autoconf/ -[c2hs]: http://www.cse.unsw.edu.au/~chak/haskell/c2hs/ -[cpphs]: http://projects.haskell.org/cpphs/ -[greencard]: http://hackage.haskell.org/package/greencard -[haddock]: http://www.haskell.org/haddock/ -[HsColour]: http://www.cs.york.ac.uk/fp/darcs/hscolour/ -[happy]: http://www.haskell.org/happy/ -[Hackage]: http://hackage.haskell.org/ -[pkg-config]: http://www.freedesktop.org/wiki/Software/pkg-config/ diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Language/Haskell/Extension.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Language/Haskell/Extension.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Language/Haskell/Extension.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Language/Haskell/Extension.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,847 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Language.Haskell.Extension --- Copyright : Isaac Jones 2003-2004 --- License : BSD3 --- --- Maintainer : libraries@haskell.org --- Portability : portable --- --- Haskell language dialects and extensions - -module Language.Haskell.Extension ( - Language(..), - knownLanguages, - - Extension(..), - KnownExtension(..), - knownExtensions, - deprecatedExtensions - ) where - -import Distribution.Text -import qualified Distribution.Compat.ReadP as Parse -import Distribution.Compat.Binary - -import qualified Text.PrettyPrint as Disp -import qualified Data.Char as Char (isAlphaNum) -import Data.Array (Array, accumArray, bounds, Ix(inRange), (!)) -import Data.Data (Data) -import Data.Typeable (Typeable) -import GHC.Generics (Generic) - --- ------------------------------------------------------------ --- * Language --- ------------------------------------------------------------ - --- | This represents a Haskell language dialect. --- --- Language 'Extension's are interpreted relative to one of these base --- languages. --- -data Language = - - -- | The Haskell 98 language as defined by the Haskell 98 report. - -- - Haskell98 - - -- | The Haskell 2010 language as defined by the Haskell 2010 report. - -- - | Haskell2010 - - -- | An unknown language, identified by its name. - | UnknownLanguage String - deriving (Generic, Show, Read, Eq, Typeable, Data) - -instance Binary Language - -knownLanguages :: [Language] -knownLanguages = [Haskell98, Haskell2010] - -instance Text Language where - disp (UnknownLanguage other) = Disp.text other - disp other = Disp.text (show other) - - parse = do - lang <- Parse.munch1 Char.isAlphaNum - return (classifyLanguage lang) - -classifyLanguage :: String -> Language -classifyLanguage = \str -> case lookup str langTable of - Just lang -> lang - Nothing -> UnknownLanguage str - where - langTable = [ (show lang, lang) - | lang <- knownLanguages ] - --- ------------------------------------------------------------ --- * Extension --- ------------------------------------------------------------ - --- Note: if you add a new 'KnownExtension': --- --- * also add it to the Distribution.Simple.X.languageExtensions lists --- (where X is each compiler: GHC, JHC, LHC, UHC, HaskellSuite) --- --- | This represents language extensions beyond a base 'Language' definition --- (such as 'Haskell98') that are supported by some implementations, usually --- in some special mode. --- --- Where applicable, references are given to an implementation's --- official documentation. - -data Extension = - -- | Enable a known extension - EnableExtension KnownExtension - - -- | Disable a known extension - | DisableExtension KnownExtension - - -- | An unknown extension, identified by the name of its @LANGUAGE@ - -- pragma. - | UnknownExtension String - - deriving (Generic, Show, Read, Eq, Ord, Typeable, Data) - -instance Binary Extension - -data KnownExtension = - - -- | Allow overlapping class instances, provided there is a unique - -- most specific instance for each use. - -- - -- * - OverlappingInstances - - -- | Ignore structural rules guaranteeing the termination of class - -- instance resolution. Termination is guaranteed by a fixed-depth - -- recursion stack, and compilation may fail if this depth is - -- exceeded. - -- - -- * - | UndecidableInstances - - -- | Implies 'OverlappingInstances'. Allow the implementation to - -- choose an instance even when it is possible that further - -- instantiation of types will lead to a more specific instance - -- being applicable. - -- - -- * - | IncoherentInstances - - -- | /(deprecated)/ Allow recursive bindings in @do@ blocks, using the @rec@ - -- keyword. See also 'RecursiveDo'. - | DoRec - - -- | Allow recursive bindings using @mdo@, a variant of @do@. - -- @DoRec@ provides a different, preferred syntax. - -- - -- * - | RecursiveDo - - -- | Provide syntax for writing list comprehensions which iterate - -- over several lists together, like the 'zipWith' family of - -- functions. - -- - -- * - | ParallelListComp - - -- | Allow multiple parameters in a type class. - -- - -- * - | MultiParamTypeClasses - - -- | Enable the dreaded monomorphism restriction. - -- - -- * - | MonomorphismRestriction - - -- | Allow a specification attached to a multi-parameter type class - -- which indicates that some parameters are entirely determined by - -- others. The implementation will check that this property holds - -- for the declared instances, and will use this property to reduce - -- ambiguity in instance resolution. - -- - -- * - | FunctionalDependencies - - -- | Like 'RankNTypes' but does not allow a higher-rank type to - -- itself appear on the left of a function arrow. - -- - -- * - | Rank2Types - - -- | Allow a universally-quantified type to occur on the left of a - -- function arrow. - -- - -- * - | RankNTypes - - -- | Allow data constructors to have polymorphic arguments. Unlike - -- 'RankNTypes', does not allow this for ordinary functions. - -- - -- * - | PolymorphicComponents - - -- | Allow existentially-quantified data constructors. - -- - -- * - | ExistentialQuantification - - -- | Cause a type variable in a signature, which has an explicit - -- @forall@ quantifier, to scope over the definition of the - -- accompanying value declaration. - -- - -- * - | ScopedTypeVariables - - -- | Deprecated, use 'ScopedTypeVariables' instead. - | PatternSignatures - - -- | Enable implicit function parameters with dynamic scope. - -- - -- * - | ImplicitParams - - -- | Relax some restrictions on the form of the context of a type - -- signature. - -- - -- * - | FlexibleContexts - - -- | Relax some restrictions on the form of the context of an - -- instance declaration. - -- - -- * - | FlexibleInstances - - -- | Allow data type declarations with no constructors. - -- - -- * - | EmptyDataDecls - - -- | Run the C preprocessor on Haskell source code. - -- - -- * - | CPP - - -- | Allow an explicit kind signature giving the kind of types over - -- which a type variable ranges. - -- - -- * - | KindSignatures - - -- | Enable a form of pattern which forces evaluation before an - -- attempted match, and a form of strict @let@/@where@ binding. - -- - -- * - | BangPatterns - - -- | Allow type synonyms in instance heads. - -- - -- * - | TypeSynonymInstances - - -- | Enable Template Haskell, a system for compile-time - -- metaprogramming. - -- - -- * - | TemplateHaskell - - -- | Enable the Foreign Function Interface. In GHC, implements the - -- standard Haskell 98 Foreign Function Interface Addendum, plus - -- some GHC-specific extensions. - -- - -- * - | ForeignFunctionInterface - - -- | Enable arrow notation. - -- - -- * - | Arrows - - -- | /(deprecated)/ Enable generic type classes, with default instances defined in - -- terms of the algebraic structure of a type. - -- - -- * - | Generics - - -- | Enable the implicit importing of the module "Prelude". When - -- disabled, when desugaring certain built-in syntax into ordinary - -- identifiers, use whatever is in scope rather than the "Prelude" - -- -- version. - -- - -- * - | ImplicitPrelude - - -- | Enable syntax for implicitly binding local names corresponding - -- to the field names of a record. Puns bind specific names, unlike - -- 'RecordWildCards'. - -- - -- * - | NamedFieldPuns - - -- | Enable a form of guard which matches a pattern and binds - -- variables. - -- - -- * - | PatternGuards - - -- | Allow a type declared with @newtype@ to use @deriving@ for any - -- class with an instance for the underlying type. - -- - -- * - | GeneralizedNewtypeDeriving - - -- | Enable the \"Trex\" extensible records system. - -- - -- * - | ExtensibleRecords - - -- | Enable type synonyms which are transparent in some definitions - -- and opaque elsewhere, as a way of implementing abstract - -- datatypes. - -- - -- * - | RestrictedTypeSynonyms - - -- | Enable an alternate syntax for string literals, - -- with string templating. - -- - -- * - | HereDocuments - - -- | Allow the character @#@ as a postfix modifier on identifiers. - -- Also enables literal syntax for unboxed values. - -- - -- * - | MagicHash - - -- | Allow data types and type synonyms which are indexed by types, - -- i.e. ad-hoc polymorphism for types. - -- - -- * - | TypeFamilies - - -- | Allow a standalone declaration which invokes the type class - -- @deriving@ mechanism. - -- - -- * - | StandaloneDeriving - - -- | Allow certain Unicode characters to stand for certain ASCII - -- character sequences, e.g. keywords and punctuation. - -- - -- * - | UnicodeSyntax - - -- | Allow the use of unboxed types as foreign types, e.g. in - -- @foreign import@ and @foreign export@. - -- - -- * - | UnliftedFFITypes - - -- | Enable interruptible FFI. - -- - -- * - | InterruptibleFFI - - -- | Allow use of CAPI FFI calling convention (@foreign import capi@). - -- - -- * - | CApiFFI - - -- | Defer validity checking of types until after expanding type - -- synonyms, relaxing the constraints on how synonyms may be used. - -- - -- * - | LiberalTypeSynonyms - - -- | Allow the name of a type constructor, type class, or type - -- variable to be an infix operator. - | TypeOperators - - -- | Enable syntax for implicitly binding local names corresponding - -- to the field names of a record. A wildcard binds all unmentioned - -- names, unlike 'NamedFieldPuns'. - -- - -- * - | RecordWildCards - - -- | Deprecated, use 'NamedFieldPuns' instead. - | RecordPuns - - -- | Allow a record field name to be disambiguated by the type of - -- the record it's in. - -- - -- * - | DisambiguateRecordFields - - -- | Enable traditional record syntax (as supported by Haskell 98) - -- - -- * - | TraditionalRecordSyntax - - -- | Enable overloading of string literals using a type class, much - -- like integer literals. - -- - -- * - | OverloadedStrings - - -- | Enable generalized algebraic data types, in which type - -- variables may be instantiated on a per-constructor basis. Implies - -- 'GADTSyntax'. - -- - -- * - | GADTs - - -- | Enable GADT syntax for declaring ordinary algebraic datatypes. - -- - -- * - | GADTSyntax - - -- | Make pattern bindings monomorphic. - -- - -- * - | MonoPatBinds - - -- | Relax the requirements on mutually-recursive polymorphic - -- functions. - -- - -- * - | RelaxedPolyRec - - -- | Allow default instantiation of polymorphic types in more - -- situations. - -- - -- * - | ExtendedDefaultRules - - -- | Enable unboxed tuples. - -- - -- * - | UnboxedTuples - - -- | Enable @deriving@ for classes 'Data.Typeable.Typeable' and - -- 'Data.Generics.Data'. - -- - -- * - | DeriveDataTypeable - - -- | Enable @deriving@ for 'GHC.Generics.Generic' and 'GHC.Generics.Generic1'. - -- - -- * - | DeriveGeneric - - -- | Enable support for default signatures. - -- - -- * - | DefaultSignatures - - -- | Allow type signatures to be specified in instance declarations. - -- - -- * - | InstanceSigs - - -- | Allow a class method's type to place additional constraints on - -- a class type variable. - -- - -- * - | ConstrainedClassMethods - - -- | Allow imports to be qualified by the package name the module is - -- intended to be imported from, e.g. - -- - -- > import "network" Network.Socket - -- - -- * - | PackageImports - - -- | /(deprecated)/ Allow a type variable to be instantiated at a - -- polymorphic type. - -- - -- * - | ImpredicativeTypes - - -- | /(deprecated)/ Change the syntax for qualified infix operators. - -- - -- * - | NewQualifiedOperators - - -- | Relax the interpretation of left operator sections to allow - -- unary postfix operators. - -- - -- * - | PostfixOperators - - -- | Enable quasi-quotation, a mechanism for defining new concrete - -- syntax for expressions and patterns. - -- - -- * - | QuasiQuotes - - -- | Enable generalized list comprehensions, supporting operations - -- such as sorting and grouping. - -- - -- * - | TransformListComp - - -- | Enable monad comprehensions, which generalise the list - -- comprehension syntax to work for any monad. - -- - -- * - | MonadComprehensions - - -- | Enable view patterns, which match a value by applying a - -- function and matching on the result. - -- - -- * - | ViewPatterns - - -- | Allow concrete XML syntax to be used in expressions and patterns, - -- as per the Haskell Server Pages extension language: - -- . The ideas behind it are - -- discussed in the paper \"Haskell Server Pages through Dynamic Loading\" - -- by Niklas Broberg, from Haskell Workshop '05. - | XmlSyntax - - -- | Allow regular pattern matching over lists, as discussed in the - -- paper \"Regular Expression Patterns\" by Niklas Broberg, Andreas Farre - -- and Josef Svenningsson, from ICFP '04. - | RegularPatterns - - -- | Enable the use of tuple sections, e.g. @(, True)@ desugars into - -- @\x -> (x, True)@. - -- - -- * - | TupleSections - - -- | Allow GHC primops, written in C--, to be imported into a Haskell - -- file. - | GHCForeignImportPrim - - -- | Support for patterns of the form @n + k@, where @k@ is an - -- integer literal. - -- - -- * - | NPlusKPatterns - - -- | Improve the layout rule when @if@ expressions are used in a @do@ - -- block. - | DoAndIfThenElse - - -- | Enable support for multi-way @if@-expressions. - -- - -- * - | MultiWayIf - - -- | Enable support lambda-@case@ expressions. - -- - -- * - | LambdaCase - - -- | Makes much of the Haskell sugar be desugared into calls to the - -- function with a particular name that is in scope. - -- - -- * - | RebindableSyntax - - -- | Make @forall@ a keyword in types, which can be used to give the - -- generalisation explicitly. - -- - -- * - | ExplicitForAll - - -- | Allow contexts to be put on datatypes, e.g. the @Eq a@ in - -- @data Eq a => Set a = NilSet | ConsSet a (Set a)@. - -- - -- * - | DatatypeContexts - - -- | Local (@let@ and @where@) bindings are monomorphic. - -- - -- * - | MonoLocalBinds - - -- | Enable @deriving@ for the 'Data.Functor.Functor' class. - -- - -- * - | DeriveFunctor - - -- | Enable @deriving@ for the 'Data.Traversable.Traversable' class. - -- - -- * - | DeriveTraversable - - -- | Enable @deriving@ for the 'Data.Foldable.Foldable' class. - -- - -- * - | DeriveFoldable - - -- | Enable non-decreasing indentation for @do@ blocks. - -- - -- * - | NondecreasingIndentation - - -- | Allow imports to be qualified with a safe keyword that requires - -- the imported module be trusted as according to the Safe Haskell - -- definition of trust. - -- - -- > import safe Network.Socket - -- - -- * - | SafeImports - - -- | Compile a module in the Safe, Safe Haskell mode -- a restricted - -- form of the Haskell language to ensure type safety. - -- - -- * - | Safe - - -- | Compile a module in the Trustworthy, Safe Haskell mode -- no - -- restrictions apply but the module is marked as trusted as long as - -- the package the module resides in is trusted. - -- - -- * - | Trustworthy - - -- | Compile a module in the Unsafe, Safe Haskell mode so that - -- modules compiled using Safe, Safe Haskell mode can't import it. - -- - -- * - | Unsafe - - -- | Allow type class/implicit parameter/equality constraints to be - -- used as types with the special kind constraint. Also generalise - -- the @(ctxt => ty)@ syntax so that any type of kind constraint can - -- occur before the arrow. - -- - -- * - | ConstraintKinds - - -- | Enable kind polymorphism. - -- - -- * - | PolyKinds - - -- | Enable datatype promotion. - -- - -- * - | DataKinds - - -- | Enable parallel arrays syntax (@[:@, @:]@) for /Data Parallel Haskell/. - -- - -- * - | ParallelArrays - - -- | Enable explicit role annotations, like in (@type role Foo representational representational@). - -- - -- * - | RoleAnnotations - - -- | Enable overloading of list literals, arithmetic sequences and - -- list patterns using the 'IsList' type class. - -- - -- * - | OverloadedLists - - -- | Enable case expressions that have no alternatives. Also applies to lambda-case expressions if they are enabled. - -- - -- * - | EmptyCase - - -- | Triggers the generation of derived 'Typeable' instances for every - -- datatype and type class declaration. - -- - -- * - | AutoDeriveTypeable - - -- | Desugars negative literals directly (without using negate). - -- - -- * - | NegativeLiterals - - -- | Allow the use of binary integer literal syntax (e.g. @0b11001001@ to denote @201@). - -- - -- * - | BinaryLiterals - - -- | Allow the use of floating literal syntax for all instances of 'Num', including 'Int' and 'Integer'. - -- - -- * - | NumDecimals - - -- | Enable support for type classes with no type parameter. - -- - -- * - | NullaryTypeClasses - - -- | Enable explicit namespaces in module import/export lists. - -- - -- * - | ExplicitNamespaces - - -- | Allow the user to write ambiguous types, and the type inference engine to infer them. - -- - -- * - | AllowAmbiguousTypes - - -- | Enable @foreign import javascript@. - | JavaScriptFFI - - -- | Allow giving names to and abstracting over patterns. - -- - -- * - | PatternSynonyms - - -- | Allow anonymous placeholders (underscore) inside type signatures. The - -- type inference engine will generate a message describing the type inferred - -- at the hole's location. - -- - -- * - | PartialTypeSignatures - - -- | Allow named placeholders written with a leading underscore inside type - -- signatures. Wildcards with the same name unify to the same type. - -- - -- * - | NamedWildCards - - -- | Enable @deriving@ for any class. - -- - -- * - | DeriveAnyClass - - -- | Enable @deriving@ for the 'Language.Haskell.TH.Syntax.Lift' class. - -- - -- * - | DeriveLift - - -- | Enable support for 'static pointers' (and the @static@ - -- keyword) to refer to globally stable names, even across - -- different programs. - -- - -- * - | StaticPointers - - -- | Switches data type declarations to be strict by default (as if - -- they had a bang using @BangPatterns@), and allow opt-in field - -- laziness using @~@. - | StrictData - - -- | Switches all pattern bindings to be strict by default (as if - -- they had a bang using @BangPatterns@), ordinary patterns are - -- recovered using @~@. Implies @StrictData@. - | Strict - - -- | Allows @do@-notation for types that are @'Applicative'@ as well - -- as @'Monad'@. When enabled, desugaring @do@ notation tries to use - -- @(<*>)@ and @'fmap'@ and @'join'@ as far as possible. - | ApplicativeDo - - -- | Allow records to use duplicated field labels for accessors. - | DuplicateRecordFields - - -- | Enable explicit type applications with the syntax @id \@Int@. - | TypeApplications - - -- | Dissolve the distinction between types and kinds, allowing the compiler - -- to reason about kind equality and therefore enabling GADTs to be promoted - -- to the type-level. - | TypeInType - - -- | Allow recursive (and therefore undecideable) super-class relationships. - | UndecidableSuperClasses - - -- | A temporary extension to help library authors check if their - -- code will compile with the new planned desugaring of fail. - | MonadFailDesugaring - - -- | A subset of @TemplateHaskell@ including only quasi-quoting. - | TemplateHaskellQuotes - - -- | Allows use of the @#label@ syntax. - | OverloadedLabels - - deriving (Generic, Show, Read, Eq, Ord, Enum, Bounded, Typeable, Data) - -instance Binary KnownExtension - -{-# DEPRECATED knownExtensions - "KnownExtension is an instance of Enum and Bounded, use those instead." #-} -knownExtensions :: [KnownExtension] -knownExtensions = [minBound..maxBound] - --- | Extensions that have been deprecated, possibly paired with another --- extension that replaces it. --- -deprecatedExtensions :: [(Extension, Maybe Extension)] -deprecatedExtensions = - [ (EnableExtension RecordPuns, Just (EnableExtension NamedFieldPuns)) - , (EnableExtension PatternSignatures, Just (EnableExtension ScopedTypeVariables)) - ] --- NOTE: when adding deprecated extensions that have new alternatives --- we must be careful to make sure that the deprecation messages are --- valid. We must not recommend aliases that cannot be used with older --- compilers, perhaps by adding support in Cabal to translate the new --- name to the old one for older compilers. Otherwise we are in danger --- of the scenario in ticket #689. - -instance Text Extension where - disp (UnknownExtension other) = Disp.text other - disp (EnableExtension ke) = Disp.text (show ke) - disp (DisableExtension ke) = Disp.text ("No" ++ show ke) - - parse = do - extension <- Parse.munch1 Char.isAlphaNum - return (classifyExtension extension) - -instance Text KnownExtension where - disp ke = Disp.text (show ke) - - parse = do - extension <- Parse.munch1 Char.isAlphaNum - case classifyKnownExtension extension of - Just ke -> - return ke - Nothing -> - fail ("Can't parse " ++ show extension ++ " as KnownExtension") - -classifyExtension :: String -> Extension -classifyExtension string - = case classifyKnownExtension string of - Just ext -> EnableExtension ext - Nothing -> - case string of - 'N':'o':string' -> - case classifyKnownExtension string' of - Just ext -> DisableExtension ext - Nothing -> UnknownExtension string - _ -> UnknownExtension string - --- | 'read' for 'KnownExtension's is really really slow so for the Text --- instance --- what we do is make a simple table indexed off the first letter in the --- extension name. The extension names actually cover the range @'A'-'Z'@ --- pretty densely and the biggest bucket is 7 so it's not too bad. We just do --- a linear search within each bucket. --- --- This gives an order of magnitude improvement in parsing speed, and it'll --- also allow us to do case insensitive matches in future if we prefer. --- -classifyKnownExtension :: String -> Maybe KnownExtension -classifyKnownExtension "" = Nothing -classifyKnownExtension string@(c : _) - | inRange (bounds knownExtensionTable) c - = lookup string (knownExtensionTable ! c) - | otherwise = Nothing - -knownExtensionTable :: Array Char [(String, KnownExtension)] -knownExtensionTable = - accumArray (flip (:)) [] ('A', 'Z') - [ (head str, (str, extension)) - | extension <- [toEnum 0 ..] - , let str = show extension ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/LICENSE cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/LICENSE --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/LICENSE 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,34 +0,0 @@ -Copyright (c) 2003-2014, Isaac Jones, Simon Marlow, Martin Sjögren, - Bjorn Bringert, Krasimir Angelov, - Malcolm Wallace, Ross Patterson, Ian Lynagh, - Duncan Coutts, Thomas Schilling, - Johan Tibell, Mikhail Glushenkov -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - - * Neither the name of Isaac Jones nor the names of other - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/README.md cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/README.md --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/README.md 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,182 +0,0 @@ -The Cabal library package -========================= - -See the [Cabal web site] for more information. - -If you also want the `cabal` command-line program, you need the -[cabal-install] package in addition to this library. - -[cabal-install]: ../cabal-install/README.md - -Installing the Cabal library -============================ - -If you already have the `cabal` program ---------------------------------------- - -In this case run: - - $ cabal install - -However, if you do not have an existing version of the `cabal` program, -you first must install the Cabal library. To avoid this bootstrapping -problem, you can install the Cabal library directly as described below. - - -Installing as a user (no root or administrator access) ------------------------------------------------------- - - ghc -threaded --make Setup - ./Setup configure --user - ./Setup build - ./Setup install - -Note the use of the `--user` flag at the configure step. - -Compiling 'Setup' rather than using `runghc Setup` is much faster and -works on Windows. For all packages other than Cabal itself, it is fine -to use `runghc`. - -This will install into `$HOME/.cabal/` on Unix and into -`Documents and Settings\$User\Application Data\cabal\` on Windows. -If you want to install elsewhere, use the `--prefix=` flag at the -configure step. - - -Installing as root or Administrator ------------------------------------ - - ghc -threaded --make Setup - ./Setup configure - ./Setup build - sudo ./Setup install - -Compiling Setup rather than using `runghc Setup` is much faster and -works on Windows. For all packages other than Cabal itself, it is fine -to use `runghc`. - -This will install into `/usr/local` on Unix, and on Windows it will -install into `$ProgramFiles/Haskell`. If you want to install elsewhere, -use the `--prefix=` flag at the configure step. - - -Using older versions of GHC and Cabal -====================================== - -It is recommended that you leave any pre-existing version of Cabal -installed. In particular, it is *essential* you keep the version that -came with GHC itself, since other installed packages require it (for -instance, the "ghc" API package). - -Prior to GHC 6.4.2, however, GHC did not deal particularly well with -having multiple versions of packages installed at once. So if you are -using GHC 6.4.1 or older and you have an older version of Cabal -installed, you should probably remove it by running: - - $ ghc-pkg unregister Cabal - -or, if you had Cabal installed only for your user account, run: - - $ ghc-pkg unregister Cabal --user - -The `filepath` dependency -========================= - -Cabal uses the [filepath] package, so it must be installed first. -GHC version 6.6.1 and later come with `filepath`, however, earlier -versions do not by default. If you do not already have `filepath`, -you need to install it. You can use any existing version of Cabal to do -that. If you have neither Cabal nor `filepath`, it is slightly -harder but still possible. - -Unpack Cabal and `filepath` into separate directories. For example: - - tar -xzf filepath-1.1.0.0.tar.gz - tar -xzf Cabal-1.6.0.0.tar.gz - - # rename to make the following instructions simpler: - mv filepath-1.1.0.0/ filepath/ - mv Cabal-1.6.0.0/ Cabal/ - - cd Cabal - ghc -i../filepath -cpp --make Setup.hs -o ../filepath/setup - cd ../filepath/ - ./setup configure --user - ./setup build - ./setup install - -This installs `filepath` so that you can install Cabal with the normal -method. - -[filepath]: http://hackage.haskell.org/package/filepath - -More information -================ - -Please see the [Cabal web site] for the [user guide] and [API -documentation]. There is additional information available on the -[development wiki]. - -[user guide]: http://www.haskell.org/cabal/users-guide -[API documentation]: http://www.haskell.org/cabal/release/cabal-latest/doc/API/Cabal/Distribution-Simple.html -[development wiki]: https://github.com/haskell/cabal/wiki - - -Bugs -==== - -Please report bugs and feature requests to Cabal's [bug tracker]. - - -Your help ---------- - -To help Cabal's development, it is enormously helpful to know from -Cabal's users what their most pressing problems are with Cabal and -[Hackage]. You may have a favourite Cabal bug or limitation. Look at -Cabal's [bug tracker]. Ensure that the problem is reported there and -adequately described. Comment on the issue to report how much of a -problem the bug is for you. Subscribe to the issues's notifications to -discussed requirements and keep informed on progress. For feature -requests, it is helpful if there is a description of how you would -expect to interact with the new feature. - -[Hackage]: http://hackage.haskell.org - - -Source code -=========== - -You can get the master development branch using: - - $ git clone https://github.com/haskell/cabal.git - - -Credits -======= - -Cabal developers (in alphabetical order): - -- Krasimir Angelov -- Bjorn Bringert -- Duncan Coutts -- Isaac Jones -- David Himmelstrup ("Lemmih") -- Simon Marlow -- Ross Patterson -- Thomas Schilling -- Martin Sjögren -- Malcolm Wallace -- and nearly 30 other people have contributed occasional patches - -Cabal specification authors: - -- Isaac Jones -- Simon Marlow -- Ross Patterson -- Simon Peyton Jones -- Malcolm Wallace - - -[bug tracker]: https://github.com/haskell/cabal/issues -[Cabal web site]: http://www.haskell.org/cabal/ diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/Setup.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,13 +0,0 @@ -import Distribution.Simple -main :: IO () -main = defaultMain - --- Although this looks like the Simple build type, it is in fact vital that --- we use this Setup.hs because it'll get compiled against the local copy --- of the Cabal lib, thus enabling Cabal to bootstrap itself without relying --- on any previous installation. This also means we can use any new features --- immediately because we never have to worry about building Cabal with an --- older version of itself. --- --- NOTE 25/01/2015: Bootstrapping is disabled for now, see --- https://github.com/haskell/cabal/issues/3003. diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/hackage/check.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/hackage/check.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/hackage/check.sh 2016-11-07 10:02:27.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/hackage/check.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -#!/bin/sh - -base_version=1.4.0.2 -test_version=1.5.6 - -for setup in archive/*/*/Setup.hs archive/*/*/Setup.lhs; do - - pkgname=$(basename ${setup}) - - if test $(wc -w < ${setup}) -gt 21; then - if ghc -package Cabal-${base_version} -S ${setup} -o /dev/null 2> /dev/null; then - - if ghc -package Cabal-${test_version} -S ${setup} -o /dev/null 2> /dev/null; then - echo "OK ${setup}" - else - echo "FAIL ${setup} does not compile with Cabal-${test_version}" - fi - else - echo "OK ${setup} (does not compile with Cabal-${base_version})" - fi - else - echo "trivial ${setup}" - fi - -done diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/hackage/download.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/hackage/download.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/hackage/download.sh 2016-11-07 10:02:27.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/hackage/download.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -#!/bin/sh - -if test ! -f archive/archive.tar; then - - wget http://hackage.haskell.org/cgi-bin/hackage-scripts/archive.tar - mkdir -p archive - mv archive.tar archive/ - tar -C archive -xf archive/archive.tar - -fi - -if test ! -f archive/00-index.tar.gz; then - - wget http://hackage.haskell.org/packages/archive/00-index.tar.gz - mkdir -p archive - mv 00-index.tar.gz archive/ - tar -C archive -xzf archive/00-index.tar.gz - -fi diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/hackage/unpack.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/hackage/unpack.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/hackage/unpack.sh 2016-11-07 10:02:27.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/hackage/unpack.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -#!/bin/sh - -for tarball in archive/*/*/*.tar.gz; do - - pkgdir=$(dirname ${tarball}) - pkgname=$(basename ${tarball} .tar.gz) - - if tar -tzf ${tarball} ${pkgname}/Setup.hs 2> /dev/null; then - tar -xzf ${tarball} ${pkgname}/Setup.hs -O > ${pkgdir}/Setup.hs - elif tar -tzf ${tarball} ${pkgname}/Setup.lhs 2> /dev/null; then - tar -xzf ${tarball} ${pkgname}/Setup.lhs -O > ${pkgdir}/Setup.lhs - else - echo "${pkgname} has no Setup.hs or .lhs at all!!?!" - fi - -done diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/misc/ghc-supported-languages.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/misc/ghc-supported-languages.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/misc/ghc-supported-languages.hs 2016-11-07 10:02:28.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/misc/ghc-supported-languages.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,97 +0,0 @@ --- | A test program to check that ghc has got all of its extensions registered --- -module Main where - -import Language.Haskell.Extension -import Distribution.Text -import Distribution.Simple.Utils -import Distribution.Verbosity - -import Data.List ((\\)) -import Data.Maybe -import Control.Applicative -import Control.Monad -import System.Environment -import System.Exit - --- | A list of GHC extensions that are deliberately not registered, --- e.g. due to being experimental and not ready for public consumption --- -exceptions = map readExtension [] - -checkProblems :: [Extension] -> [String] -checkProblems implemented = - - let unregistered = - [ ext | ext <- implemented -- extensions that ghc knows about - , not (registered ext) -- but that are not registered - , ext `notElem` exceptions ] -- except for the exceptions - - -- check if someone has forgotten to update the exceptions list... - - -- exceptions that are not implemented - badExceptions = exceptions \\ implemented - - -- exceptions that are now registered - badExceptions' = filter registered exceptions - - in catMaybes - [ check unregistered $ unlines - [ "The following extensions are known to GHC but are not in the " - , "extension registry in Language.Haskell.Extension." - , " " ++ intercalate "\n " (map display unregistered) - , "If these extensions are ready for public consumption then they " - , "should be registered. If they are still experimental and you " - , "think they are not ready to be registered then please add them " - , "to the exceptions list in this test program along with an " - , "explanation." - ] - , check badExceptions $ unlines - [ "Error in the extension exception list. The following extensions" - , "are listed as exceptions but are not even implemented by GHC:" - , " " ++ intercalate "\n " (map display badExceptions) - , "Please fix this test program by correcting the list of" - , "exceptions." - ] - , check badExceptions' $ unlines - [ "Error in the extension exception list. The following extensions" - , "are listed as exceptions to registration but they are in fact" - , "now registered in Language.Haskell.Extension:" - , " " ++ intercalate "\n " (map display badExceptions') - , "Please fix this test program by correcting the list of" - , "exceptions." - ] - ] - where - registered (UnknownExtension _) = False - registered _ = True - - check [] _ = Nothing - check _ i = Just i - - -main = topHandler $ do - [ghcPath] <- getArgs - exts <- getExtensions ghcPath - let problems = checkProblems exts - putStrLn (intercalate "\n" problems) - if null problems - then exitSuccess - else exitFailure - -getExtensions :: FilePath -> IO [Extension] -getExtensions ghcPath = - map readExtension . lines - <$> rawSystemStdout normal ghcPath ["--supported-languages"] - -readExtension :: String -> Extension -readExtension str = handleNoParse $ do - -- GHC defines extensions in a positive way, Cabal defines them - -- relative to H98 so we try parsing ("No" ++ extName) first - ext <- simpleParse ("No" ++ str) - case ext of - UnknownExtension _ -> simpleParse str - _ -> return ext - where - handleNoParse :: Maybe Extension -> Extension - handleNoParse = fromMaybe (error $ "unparsable extension " ++ show str) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/AllowNewer/AllowNewer.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/AllowNewer/AllowNewer.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/AllowNewer/AllowNewer.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/AllowNewer/AllowNewer.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -name: AllowNewer -version: 0.1.0.0 -license: BSD3 -author: Foo Bar -maintainer: cabal-dev@haskell.org -build-type: Simple -cabal-version: >=1.10 - -library - exposed-modules: Foo - hs-source-dirs: src - build-depends: base < 1 - default-language: Haskell2010 - -test-suite foo-test - type: exitcode-stdio-1.0 - main-is: Test.hs - hs-source-dirs: tests - build-depends: base < 1 - -benchmark foo-bench - type: exitcode-stdio-1.0 - main-is: Bench.hs - hs-source-dirs: benchmarks - build-depends: base < 1 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/AllowNewer/benchmarks/Bench.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/AllowNewer/benchmarks/Bench.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/AllowNewer/benchmarks/Bench.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/AllowNewer/benchmarks/Bench.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -module Main where - -main :: IO () -main = return () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/AllowNewer/src/Foo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/AllowNewer/src/Foo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/AllowNewer/src/Foo.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/AllowNewer/src/Foo.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -module Main where - -main :: IO () -main = return () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/AllowNewer/tests/Test.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/AllowNewer/tests/Test.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/AllowNewer/tests/Test.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/AllowNewer/tests/Test.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -!module Main where - -main :: IO () -main = return () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkExeV10/benchmarks/bench-Foo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkExeV10/benchmarks/bench-Foo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkExeV10/benchmarks/bench-Foo.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkExeV10/benchmarks/bench-Foo.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -module Main where - -import Foo -import System.Exit - -main :: IO () -main | fooTest [] = exitSuccess - | otherwise = exitFailure diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkExeV10/Foo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkExeV10/Foo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkExeV10/Foo.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkExeV10/Foo.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -module Foo where - -fooTest :: [String] -> Bool -fooTest _ = True diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkExeV10/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkExeV10/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkExeV10/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkExeV10/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -name: my -version: 0.1 -license: BSD3 -cabal-version: >= 1.9.2 -build-type: Simple - -library - exposed-modules: Foo - build-depends: base - -benchmark bench-Foo - type: exitcode-stdio-1.0 - hs-source-dirs: benchmarks - main-is: bench-Foo.hs - build-depends: base, my diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkOptions/BenchmarkOptions.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkOptions/BenchmarkOptions.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkOptions/BenchmarkOptions.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkOptions/BenchmarkOptions.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -name: BenchmarkOptions -version: 0.1 -license: BSD3 -author: Johan Tibell -stability: stable -category: PackageTests -build-type: Simple -cabal-version: >= 1.9.2 - -description: - Check that Cabal passes the correct test options to test suites. - -executable dummy - main-is: test-BenchmarkOptions.hs - build-depends: base - -benchmark test-BenchmarkOptions - main-is: test-BenchmarkOptions.hs - type: exitcode-stdio-1.0 - build-depends: base diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkOptions/test-BenchmarkOptions.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkOptions/test-BenchmarkOptions.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkOptions/test-BenchmarkOptions.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkOptions/test-BenchmarkOptions.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -module Main where - -import System.Environment ( getArgs ) -import System.Exit ( exitFailure, exitSuccess ) - -main :: IO () -main = do - args <- getArgs - if args == ["1", "2", "3"] - then exitSuccess - else putStrLn ("Got: " ++ show args) >> exitFailure diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkStanza/Check.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkStanza/Check.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkStanza/Check.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkStanza/Check.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,30 +0,0 @@ -module PackageTests.BenchmarkStanza.Check where - -import PackageTests.PackageTester - -import Distribution.Version -import Distribution.Simple.LocalBuildInfo -import Distribution.Package -import Distribution.PackageDescription - -suite :: TestM () -suite = do - assertOutputDoesNotContain "unknown section type" - =<< cabal' "configure" [] - dist_dir <- distDir - lbi <- liftIO $ getPersistBuildConfig dist_dir - let anticipatedBenchmark = emptyBenchmark - { benchmarkName = "dummy" - , benchmarkInterface = BenchmarkExeV10 (Version [1,0] []) - "dummy.hs" - , benchmarkBuildInfo = emptyBuildInfo - { targetBuildDepends = - [ Dependency (PackageName "base") anyVersion ] - , hsSourceDirs = ["."] - } - , benchmarkEnabled = False - } - gotBenchmark = head $ benchmarks (localPkgDescr lbi) - assertEqual "parsed benchmark stanza does not match anticipated" - anticipatedBenchmark gotBenchmark - return () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkStanza/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkStanza/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkStanza/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BenchmarkStanza/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -name: BenchmarkStanza -version: 0.1 -license: BSD3 -author: Johan Tibell -stability: stable -category: PackageTests -build-type: Simple - -description: - Check that Cabal recognizes the benchmark stanza defined below. - -Library - exposed-modules: MyLibrary - build-depends: base - -benchmark dummy - main-is: dummy.hs - type: exitcode-stdio-1.0 - build-depends: base \ No newline at end of file diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildableField/BuildableField.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildableField/BuildableField.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildableField/BuildableField.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildableField/BuildableField.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -name: BuildableField -version: 0.1.0.0 -cabal-version: >=1.2 -build-type: Simple -license: BSD3 - -flag build-exe - default: True - -library - -executable my-executable - build-depends: base, unavailable-package - main-is: Main.hs - if !flag(build-exe) - buildable: False diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildableField/Main.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildableField/Main.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildableField/Main.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildableField/Main.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -import UnavailableModule - -main :: IO () -main = putStrLn "Hello" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive1/GlobalBuildDepsNotAdditive1.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive1/GlobalBuildDepsNotAdditive1.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive1/GlobalBuildDepsNotAdditive1.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive1/GlobalBuildDepsNotAdditive1.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -name: GlobalBuildDepsNotAdditive1 -version: 0.1 -license: BSD3 -cabal-version: >= 1.6 -author: Stephen Blackheath -stability: stable -category: PackageTests -build-type: Simple - -description: - If you specify 'base' in the global build dependencies, then define - a library without base, it fails to find 'base' for the library. - ---------------------------------------- - -build-depends: base - -Library - exposed-modules: MyLibrary - build-depends: bytestring, pretty diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive1/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive1/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive1/MyLibrary.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive1/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -module MyLibrary where - -import qualified Data.ByteString.Char8 as C -import Text.PrettyPrint - -myLibFunc :: IO () -myLibFunc = do - putStrLn (render (text "foo")) - let text = "myLibFunc" - C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive2/GlobalBuildDepsNotAdditive2.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive2/GlobalBuildDepsNotAdditive2.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive2/GlobalBuildDepsNotAdditive2.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive2/GlobalBuildDepsNotAdditive2.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -name: GlobalBuildDepsNotAdditive1 -version: 0.1 -license: BSD3 -cabal-version: >= 1.6 -author: Stephen Blackheath -stability: stable -category: PackageTests -build-type: Simple - -description: - If you specify 'base' in the global build dependencies, then define - an executable without base, it fails to find 'base' for the executable - ---------------------------------------- - -build-depends: base - -Executable lemon - main-is: lemon.hs - build-depends: bytestring, pretty diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive2/lemon.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive2/lemon.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive2/lemon.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive2/lemon.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,7 +0,0 @@ -import qualified Data.ByteString.Char8 as C -import Text.PrettyPrint - -main = do - putStrLn (render (text "foo")) - let text = "lemon" - C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary0/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary0/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary0/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary0/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -name: InternalLibrary0 -version: 0.1 -license: BSD3 -cabal-version: >= 1.6 -author: Stephen Blackheath -stability: stable -category: PackageTests -build-type: Simple - -description: - Check that with 'cabal-version:' containing versions less than 1.7, we do *not* - have the new behaviour to allow executables to refer to the library defined - in the same module. - ---------------------------------------- - -Library - exposed-modules: MyLibrary - build-depends: base, bytestring, pretty - -Executable lemon - main-is: lemon.hs - hs-source-dirs: programs - build-depends: base, bytestring, pretty, InternalLibrary0 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary0/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary0/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary0/MyLibrary.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary0/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -module MyLibrary where - -import qualified Data.ByteString.Char8 as C -import Text.PrettyPrint - -myLibFunc :: IO () -myLibFunc = do - putStrLn (render (text "foo")) - let text = "myLibFunc" - C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary0/programs/lemon.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary0/programs/lemon.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary0/programs/lemon.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary0/programs/lemon.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -import Text.PrettyPrint -import MyLibrary - -main = do - putStrLn (render (text "foo")) - myLibFunc diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary1/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary1/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary1/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary1/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -name: InternalLibrary1 -version: 0.1 -license: BSD3 -cabal-version: >= 1.7.1 -author: Stephen Blackheath -stability: stable -category: PackageTests -build-type: Simple - -description: - Check for the new (in >= 1.7.1) ability to allow executables to refer to - the library defined in the same module. - ---------------------------------------- - -Library - exposed-modules: MyLibrary - build-depends: base, bytestring, pretty - -Executable lemon - main-is: lemon.hs - hs-source-dirs: programs - build-depends: base, bytestring, pretty, InternalLibrary1 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary1/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary1/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary1/MyLibrary.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary1/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -module MyLibrary where - -import qualified Data.ByteString.Char8 as C -import Text.PrettyPrint - -myLibFunc :: IO () -myLibFunc = do - putStrLn (render (text "foo")) - let text = "myLibFunc" - C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary1/programs/lemon.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary1/programs/lemon.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary1/programs/lemon.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary1/programs/lemon.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -import Text.PrettyPrint -import MyLibrary - -main = do - putStrLn (render (text "foo")) - myLibFunc diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary2/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary2/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary2/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary2/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -name: InternalLibrary2 -version: 0.1 -license: BSD3 -cabal-version: >= 1.7.1 -author: Stephen Blackheath -stability: stable -category: PackageTests -build-type: Simple - -description: - This test is to make sure that the internal library is preferred by ghc to - an installed one of the same name and version. - ---------------------------------------- - -Library - exposed-modules: MyLibrary - build-depends: base, bytestring, pretty - -Executable lemon - main-is: lemon.hs - hs-source-dirs: programs - build-depends: base, bytestring, pretty, InternalLibrary2 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary2/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary2/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary2/MyLibrary.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary2/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -module MyLibrary where - -import qualified Data.ByteString.Char8 as C -import Text.PrettyPrint - -myLibFunc :: IO () -myLibFunc = do - putStrLn (render (text "foo")) - let text = "myLibFunc internal" - C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary2/programs/lemon.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary2/programs/lemon.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary2/programs/lemon.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary2/programs/lemon.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -import Text.PrettyPrint -import MyLibrary - -main = do - putStrLn (render (text "foo")) - myLibFunc diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary2/to-install/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary2/to-install/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary2/to-install/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary2/to-install/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -name: InternalLibrary2 -version: 0.1 -license: BSD3 -cabal-version: >= 1.6 -author: Stephen Blackheath -stability: stable -category: PackageTests -build-type: Simple - -description: - This test is to make sure that the internal library is preferred by ghc to - an installed one of the same name and version. - ---------------------------------------- - -Library - exposed-modules: MyLibrary - build-depends: base, bytestring, pretty diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary2/to-install/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary2/to-install/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary2/to-install/MyLibrary.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary2/to-install/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -module MyLibrary where - -import qualified Data.ByteString.Char8 as C -import Text.PrettyPrint - -myLibFunc :: IO () -myLibFunc = do - putStrLn (render (text "foo")) - let text = "myLibFunc installed" - C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary3/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary3/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary3/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary3/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -name: InternalLibrary3 -version: 0.1 -license: BSD3 -cabal-version: >= 1.7.1 -author: Stephen Blackheath -stability: stable -category: PackageTests -build-type: Simple - -description: - This test is to make sure that the internal library is preferred by ghc to - an installed one of the same name, but a *newer* version. - ---------------------------------------- - -Library - exposed-modules: MyLibrary - build-depends: base, bytestring, pretty - -Executable lemon - main-is: lemon.hs - hs-source-dirs: programs - build-depends: base, bytestring, pretty, InternalLibrary3 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary3/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary3/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary3/MyLibrary.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary3/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -module MyLibrary where - -import qualified Data.ByteString.Char8 as C -import Text.PrettyPrint - -myLibFunc :: IO () -myLibFunc = do - putStrLn (render (text "foo")) - let text = "myLibFunc internal" - C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary3/programs/lemon.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary3/programs/lemon.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary3/programs/lemon.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary3/programs/lemon.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -import Text.PrettyPrint -import MyLibrary - -main = do - putStrLn (render (text "foo")) - myLibFunc diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary3/to-install/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary3/to-install/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary3/to-install/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary3/to-install/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -name: InternalLibrary3 -version: 0.2 -license: BSD3 -cabal-version: >= 1.6 -author: Stephen Blackheath -stability: stable -category: PackageTests -build-type: Simple - -description: - This test is to make sure that the internal library is preferred by ghc to - an installed one of the same name but a *newer* version. - ---------------------------------------- - -Library - exposed-modules: MyLibrary - build-depends: base, bytestring, pretty diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary3/to-install/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary3/to-install/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary3/to-install/MyLibrary.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary3/to-install/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -module MyLibrary where - -import qualified Data.ByteString.Char8 as C -import Text.PrettyPrint - -myLibFunc :: IO () -myLibFunc = do - putStrLn (render (text "foo")) - let text = "myLibFunc installed" - C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary4/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary4/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary4/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary4/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -name: InternalLibrary4 -version: 0.1 -license: BSD3 -cabal-version: >= 1.7.1 -author: Stephen Blackheath -stability: stable -category: PackageTests -build-type: Simple - -description: - This test is to make sure that we can explicitly say we want InternalLibrary4-0.2 - and it will give us the *installed* version 0.2 instead of the internal 0.1. - ---------------------------------------- - -Library - exposed-modules: MyLibrary - build-depends: base, bytestring, pretty - -Executable lemon - main-is: lemon.hs - hs-source-dirs: programs - build-depends: base, bytestring, pretty, InternalLibrary4 >= 0.2 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary4/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary4/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary4/MyLibrary.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary4/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -module MyLibrary where - -import qualified Data.ByteString.Char8 as C -import Text.PrettyPrint - -myLibFunc :: IO () -myLibFunc = do - putStrLn (render (text "foo")) - let text = "myLibFunc internal" - C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary4/programs/lemon.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary4/programs/lemon.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary4/programs/lemon.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary4/programs/lemon.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -import Text.PrettyPrint -import MyLibrary - -main = do - putStrLn (render (text "foo")) - myLibFunc diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary4/to-install/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary4/to-install/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary4/to-install/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary4/to-install/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -name: InternalLibrary4 -version: 0.2 -license: BSD3 -cabal-version: >= 1.6 -author: Stephen Blackheath -stability: stable -category: PackageTests -build-type: Simple - -description: - This test is to make sure that the internal library is preferred by ghc to - an installed one of the same name but a *newer* version. - ---------------------------------------- - -Library - exposed-modules: MyLibrary - build-depends: base, bytestring, pretty diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary4/to-install/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary4/to-install/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary4/to-install/MyLibrary.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/InternalLibrary4/to-install/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -module MyLibrary where - -import qualified Data.ByteString.Char8 as C -import Text.PrettyPrint - -myLibFunc :: IO () -myLibFunc = do - putStrLn (render (text "foo")) - let text = "myLibFunc installed" - C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/SameDepsAllRound/lemon.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/SameDepsAllRound/lemon.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/SameDepsAllRound/lemon.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/SameDepsAllRound/lemon.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,7 +0,0 @@ -import qualified Data.ByteString.Char8 as C -import Text.PrettyPrint - -main = do - putStrLn (render (text "foo")) - let text = "lemon" - C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/SameDepsAllRound/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/SameDepsAllRound/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/SameDepsAllRound/MyLibrary.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/SameDepsAllRound/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -module MyLibrary where - -import qualified Data.ByteString.Char8 as C -import Text.PrettyPrint - -myLibFunc :: IO () -myLibFunc = do - putStrLn (render (text "foo")) - let text = "myLibFunc" - C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/SameDepsAllRound/pineapple.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/SameDepsAllRound/pineapple.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/SameDepsAllRound/pineapple.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/SameDepsAllRound/pineapple.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,7 +0,0 @@ -import qualified Data.ByteString.Char8 as C -import Text.PrettyPrint - -main = do - putStrLn (render (text "foo")) - let text = "pineapple" - C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/SameDepsAllRound/SameDepsAllRound.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/SameDepsAllRound/SameDepsAllRound.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/SameDepsAllRound/SameDepsAllRound.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/SameDepsAllRound/SameDepsAllRound.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,31 +0,0 @@ -name: SameDepsAllRound -version: 0.1 -license: BSD3 -cabal-version: >= 1.6 -author: Stephen Blackheath -stability: stable -synopsis: Same dependencies all round -category: PackageTests -build-type: Simple - -description: - Check for the "old build-dep behaviour" namely that we get the same - package dependencies on all build targets, even if different ones - were specified for different targets - . - Here all .hs files use the three packages mentioned, so this shows - that build-depends is not target-specific. This is the behaviour - we want when cabal-version contains versions less than 1.7. - ---------------------------------------- - -Library - exposed-modules: MyLibrary - build-depends: base, bytestring - -Executable lemon - main-is: lemon.hs - build-depends: pretty - -Executable pineapple - main-is: pineapple.hs diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps1/lemon.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps1/lemon.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps1/lemon.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps1/lemon.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,7 +0,0 @@ -import qualified Data.ByteString.Char8 as C -import Text.PrettyPrint - -main = do - putStrLn (render (text "foo")) - let text = "lemon" - C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps1/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps1/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps1/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps1/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -name: TargetSpecificDeps1 -version: 0.1 -license: BSD3 -cabal-version: >= 1.7.1 -author: Stephen Blackheath -stability: stable -category: PackageTests -build-type: Simple - -description: - Check for the new build-dep behaviour, where build-depends are - handled specifically for each target - ---------------------------------------- - -Library - exposed-modules: MyLibrary - build-depends: base, bytestring - -Executable lemon - main-is: lemon.hs - build-depends: base, bytestring, pretty diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps1/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps1/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps1/MyLibrary.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps1/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -module MyLibrary where - -import qualified Data.ByteString.Char8 as C -import Text.PrettyPrint - -myLibFunc :: IO () -myLibFunc = do - putStrLn (render (text "foo")) - let text = "myLibFunc" - C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps2/lemon.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps2/lemon.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps2/lemon.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps2/lemon.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -import qualified Data.ByteString.Char8 as C - -main = do - let text = "lemon" - C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps2/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps2/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps2/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps2/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -name: TargetSpecificDeps1 -version: 0.1 -license: BSD3 -cabal-version: >= 1.7.1 -author: Stephen Blackheath -stability: stable -category: PackageTests -build-type: Simple - -description: - Check for the new build-dep behaviour, where build-depends are - handled specifically for each target - This one is a control against TargetSpecificDeps1 - it is correct and should - succeed. - ---------------------------------------- - -Library - exposed-modules: MyLibrary - build-depends: base, bytestring, pretty - -Executable lemon - main-is: lemon.hs - build-depends: base, bytestring diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps2/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps2/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps2/MyLibrary.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps2/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -module MyLibrary where - -import qualified Data.ByteString.Char8 as C -import Text.PrettyPrint - -myLibFunc :: IO () -myLibFunc = do - putStrLn (render (text "foo")) - let text = "myLibFunc" - C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps3/lemon.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps3/lemon.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps3/lemon.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps3/lemon.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,7 +0,0 @@ -import qualified Data.ByteString.Char8 as C -import Text.PrettyPrint - -main = do - putStrLn (render (text "foo")) - let text = "lemon" - C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps3/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps3/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps3/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps3/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -name: test -version: 0.1 -license: BSD3 -cabal-version: >= 1.7.1 -author: Stephen Blackheath -stability: stable -category: PackageTests -build-type: Simple - -description: - Check for the new build-dep behaviour, where build-depends are - handled specifically for each target - ---------------------------------------- - -Library - exposed-modules: MyLibrary - build-depends: base, bytestring, pretty - -Executable lemon - main-is: lemon.hs - build-depends: base, bytestring diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps3/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps3/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps3/MyLibrary.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildDeps/TargetSpecificDeps3/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -module MyLibrary where - -import qualified Data.ByteString.Char8 as C -import Text.PrettyPrint - -myLibFunc :: IO () -myLibFunc = do - putStrLn (render (text "foo")) - let text = "myLibFunc" - C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildTestSuiteDetailedV09/Dummy2.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildTestSuiteDetailedV09/Dummy2.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/BuildTestSuiteDetailedV09/Dummy2.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/BuildTestSuiteDetailedV09/Dummy2.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -module Dummy2 where - -import Distribution.TestSuite (Test) - -tests :: IO [Test] -tests = return [] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/CMain/Bar.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/CMain/Bar.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/CMain/Bar.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/CMain/Bar.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,7 +0,0 @@ -{-# LANGUAGE ForeignFunctionInterface #-} -module Bar where - -bar :: IO () -bar = return () - -foreign export ccall bar :: IO () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/CMain/foo.c cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/CMain/foo.c --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/CMain/foo.c 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/CMain/foo.c 1970-01-01 00:00:00.000000000 +0000 @@ -1,13 +0,0 @@ -#include -#include "HsFFI.h" - -#ifdef __GLASGOW_HASKELL__ -#include "Bar_stub.h" -#endif - -int main(int argc, char **argv) { - hs_init(&argc, &argv); - bar(); - printf("Hello world!"); - return 0; -} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/CMain/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/CMain/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/CMain/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/CMain/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -name: my -version: 0.1 -license: BSD3 -cabal-version: >= 1.9.2 -build-type: Simple - -executable foo - main-is: foo.c - other-modules: Bar - build-depends: base diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/DeterministicAr/Check.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/DeterministicAr/Check.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/DeterministicAr/Check.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/DeterministicAr/Check.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,85 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE OverloadedStrings #-} - -module PackageTests.DeterministicAr.Check where - -import Control.Monad -import qualified Data.ByteString as BS -import qualified Data.ByteString.Char8 as BS8 -import Data.Char (isSpace) -import PackageTests.PackageTester -import System.IO - -import Distribution.Compiler (CompilerFlavor(..), CompilerId(..)) -import Distribution.Package (getHSLibraryName) -import Distribution.Version (Version(..)) -import Distribution.Simple.Compiler (compilerId) -import Distribution.Simple.LocalBuildInfo (LocalBuildInfo, compiler, localUnitId) - -suite :: TestM () -suite = do - cabal_build [] - dist_dir <- distDir - lbi <- liftIO $ getPersistBuildConfig dist_dir - liftIO $ checkMetadata lbi (dist_dir "build") - --- Almost a copypasta of Distribution.Simple.Program.Ar.wipeMetadata -checkMetadata :: LocalBuildInfo -> FilePath -> IO () -checkMetadata lbi dir = withBinaryFile path ReadMode $ \ h -> do - hFileSize h >>= checkArchive h - where - path = dir "lib" ++ getHSLibraryName (localUnitId lbi) ++ ".a" - - _ghc_7_10 = case compilerId (compiler lbi) of - CompilerId GHC version | version >= Version [7, 10] [] -> True - _ -> False - - checkError msg = assertFailure ( - "PackageTests.DeterministicAr.checkMetadata: " ++ msg ++ - " in " ++ path) >> undefined - archLF = "!\x0a" -- global magic, 8 bytes - x60LF = "\x60\x0a" -- header magic, 2 bytes - metadata = BS.concat - [ "0 " -- mtime, 12 bytes - , "0 " -- UID, 6 bytes - , "0 " -- GID, 6 bytes - , "0644 " -- mode, 8 bytes - ] - headerSize = 60 - - -- http://en.wikipedia.org/wiki/Ar_(Unix)#File_format_details - checkArchive :: Handle -> Integer -> IO () - checkArchive h archiveSize = do - global <- BS.hGet h (BS.length archLF) - unless (global == archLF) $ checkError "Bad global header" - checkHeader (toInteger $ BS.length archLF) - - where - checkHeader :: Integer -> IO () - checkHeader offset = case compare offset archiveSize of - EQ -> return () - GT -> checkError (atOffset "Archive truncated") - LT -> do - header <- BS.hGet h headerSize - unless (BS.length header == headerSize) $ - checkError (atOffset "Short header") - let magic = BS.drop 58 header - unless (magic == x60LF) . checkError . atOffset $ - "Bad magic " ++ show magic ++ " in header" - - unless (metadata == BS.take 32 (BS.drop 16 header)) - . checkError . atOffset $ "Metadata has changed" - - let size = BS.take 10 $ BS.drop 48 header - objSize <- case reads (BS8.unpack size) of - [(n, s)] | all isSpace s -> return n - _ -> checkError (atOffset "Bad file size in header") - - let nextHeader = offset + toInteger headerSize + - -- Odd objects are padded with an extra '\x0a' - if odd objSize then objSize + 1 else objSize - hSeek h AbsoluteSeek nextHeader - checkHeader nextHeader - - where - atOffset msg = msg ++ " at offset " ++ show offset diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/DeterministicAr/Lib.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/DeterministicAr/Lib.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/DeterministicAr/Lib.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/DeterministicAr/Lib.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -module Lib where - -dummy :: IO () -dummy = return () - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/DeterministicAr/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/DeterministicAr/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/DeterministicAr/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/DeterministicAr/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -name: DeterministicAr -version: 0 -license: BSD3 -cabal-version: >= 1.9.1 -author: Liyang HU -stability: stable -category: PackageTests -build-type: Simple - -description: - Ensure our GNU ar -D emulation (#1537) works as advertised: check that - all metadata in the resulting .a archive match the default. - -Library - exposed-modules: Lib - build-depends: base - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/DuplicateModuleName/DuplicateModuleName.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/DuplicateModuleName/DuplicateModuleName.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/DuplicateModuleName/DuplicateModuleName.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/DuplicateModuleName/DuplicateModuleName.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -name: DuplicateModuleName -version: 0.1.0.0 -license: BSD3 -author: Edward Z. Yang -maintainer: ezyang@cs.stanford.edu -build-type: Simple -cabal-version: >=1.10 - -library - exposed-modules: Foo - hs-source-dirs: src - build-depends: base, Cabal - default-language: Haskell2010 - -test-suite foo - type: detailed-0.9 - test-module: Foo - hs-source-dirs: tests - build-depends: base, Cabal, DuplicateModuleName - -test-suite foo2 - type: detailed-0.9 - test-module: Foo - hs-source-dirs: tests2 - build-depends: base, Cabal, DuplicateModuleName diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/DuplicateModuleName/src/Foo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/DuplicateModuleName/src/Foo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/DuplicateModuleName/src/Foo.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/DuplicateModuleName/src/Foo.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,12 +0,0 @@ -module Foo where - -import Distribution.TestSuite - -tests :: IO [Test] -tests = return [Test $ TestInstance - { run = return (Finished (Fail "A")) - , name = "test A" - , tags = [] - , options = [] - , setOption = \_ _-> Left "No Options" - }] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/DuplicateModuleName/tests/Foo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/DuplicateModuleName/tests/Foo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/DuplicateModuleName/tests/Foo.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/DuplicateModuleName/tests/Foo.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -{-# LANGUAGE PackageImports #-} -module Foo where - -import Distribution.TestSuite -import qualified "DuplicateModuleName" Foo as T - -tests :: IO [Test] -tests = do - r <- T.tests - return $ [Test $ TestInstance - { run = return (Finished (Fail "B")) - , name = "test B" - , tags = [] - , options = [] - , setOption = \_ _-> Left "No Options" - }] ++ r - -this_is_test = True diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/DuplicateModuleName/tests2/Foo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/DuplicateModuleName/tests2/Foo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/DuplicateModuleName/tests2/Foo.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/DuplicateModuleName/tests2/Foo.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -{-# LANGUAGE PackageImports #-} -module Foo where - -import Distribution.TestSuite -import qualified "DuplicateModuleName" Foo as T - -tests :: IO [Test] -tests = do - r <- T.tests - return $ [Test $ TestInstance - { run = return (Finished (Fail "C")) - , name = "test C" - , tags = [] - , options = [] - , setOption = \_ _-> Left "No Options" - }] ++ r - -this_is_test2 = True diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/EmptyLib/empty/empty.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/EmptyLib/empty/empty.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/EmptyLib/empty/empty.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/EmptyLib/empty/empty.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -name: emptyLib -Cabal-version: >= 1.2 -version: 1.0 -build-type: Simple - -Library diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectory/ghc cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectory/ghc --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectory/ghc 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectory/ghc 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -#!/bin/sh -if [ -z "$WITH_GHC" ]; then - echo "Need to set WITH_GHC" - exit 1 -fi -exec "$WITH_GHC" "$@" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectory/ghc-pkg cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectory/ghc-pkg --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectory/ghc-pkg 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectory/ghc-pkg 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -#!/bin/sh -echo "GHC package manager version 9999999" -exit 0 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectory/SameDirectory.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectory/SameDirectory.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectory/SameDirectory.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectory/SameDirectory.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -name: SameDirectory -version: 0.1.0.0 -license: BSD3 -author: Edward Z. Yang -maintainer: ezyang@cs.stanford.edu -build-type: Simple -cabal-version: >=1.10 - -library - build-depends: base - default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/ghc-7.10 cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/ghc-7.10 --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/ghc-7.10 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/ghc-7.10 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -#!/bin/sh -if [ -z "$WITH_GHC" ]; then - echo "Need to set WITH_GHC" - exit 1 -fi -exec "$WITH_GHC" "$@" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/ghc-pkg-ghc-7.10 cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/ghc-pkg-ghc-7.10 --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/ghc-pkg-ghc-7.10 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/ghc-pkg-ghc-7.10 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -#!/bin/sh -echo "GHC package manager version 9999999" -exit 0 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/SameDirectory.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/SameDirectory.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/SameDirectory.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/SameDirectory.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -name: SameDirectory -version: 0.1.0.0 -license: BSD3 -author: Edward Z. Yang -maintainer: ezyang@cs.stanford.edu -build-type: Simple -cabal-version: >=1.10 - -library - build-depends: base - default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/ghc-7.10 cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/ghc-7.10 --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/ghc-7.10 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/ghc-7.10 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -#!/bin/sh -if [ -z "$WITH_GHC" ]; then - echo "Need to set WITH_GHC" - exit 1 -fi -exec "$WITH_GHC" "$@" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/ghc-pkg-7.10 cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/ghc-pkg-7.10 --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/ghc-pkg-7.10 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/ghc-pkg-7.10 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -#!/bin/sh -echo "GHC package manager version 9999999" -exit 0 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/SameDirectory.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/SameDirectory.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/SameDirectory.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/SameDirectory.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -name: SameDirectory -version: 0.1.0.0 -license: BSD3 -author: Edward Z. Yang -maintainer: ezyang@cs.stanford.edu -build-type: Simple -cabal-version: >=1.10 - -library - build-depends: base - default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/Symlink/bin/ghc cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/Symlink/bin/ghc --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/Symlink/bin/ghc 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/Symlink/bin/ghc 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -#!/bin/sh -if [ -z "$WITH_GHC" ]; then - echo "Need to set WITH_GHC" - exit 1 -fi -exec "$WITH_GHC" "$@" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/Symlink/bin/ghc-pkg cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/Symlink/bin/ghc-pkg --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/Symlink/bin/ghc-pkg 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/Symlink/bin/ghc-pkg 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -#!/bin/sh -echo "GHC package manager version 9999999" -exit 0 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/Symlink/SameDirectory.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/Symlink/SameDirectory.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/Symlink/SameDirectory.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/Symlink/SameDirectory.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -name: SameDirectory -version: 0.1.0.0 -license: BSD3 -author: Edward Z. Yang -maintainer: ezyang@cs.stanford.edu -build-type: Simple -cabal-version: >=1.10 - -library - build-depends: base - default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/bin/ghc-7.10 cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/bin/ghc-7.10 --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/bin/ghc-7.10 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/bin/ghc-7.10 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -#!/bin/sh -if [ -z "$WITH_GHC" ]; then - echo "Need to set WITH_GHC" - exit 1 -fi -exec "$WITH_GHC" "$@" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/bin/ghc-pkg-7.10 cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/bin/ghc-pkg-7.10 --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/bin/ghc-pkg-7.10 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/bin/ghc-pkg-7.10 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -#!/bin/sh -echo "GHC package manager version 9999999" -exit 0 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/SameDirectory.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/SameDirectory.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/SameDirectory.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/SameDirectory.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -name: SameDirectory -version: 0.1.0.0 -license: BSD3 -author: Edward Z. Yang -maintainer: ezyang@cs.stanford.edu -build-type: Simple -cabal-version: >=1.10 - -library - build-depends: base - default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SymlinkVersion/bin/ghc-7.10 cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SymlinkVersion/bin/ghc-7.10 --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SymlinkVersion/bin/ghc-7.10 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SymlinkVersion/bin/ghc-7.10 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -#!/bin/sh -if [ -z "$WITH_GHC" ]; then - echo "Need to set WITH_GHC" - exit 1 -fi -exec "$WITH_GHC" "$@" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SymlinkVersion/bin/ghc-pkg-ghc-7.10 cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SymlinkVersion/bin/ghc-pkg-ghc-7.10 --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SymlinkVersion/bin/ghc-pkg-ghc-7.10 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SymlinkVersion/bin/ghc-pkg-ghc-7.10 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -#!/bin/sh -echo "GHC package manager version 9999999" -exit 0 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SymlinkVersion/SameDirectory.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SymlinkVersion/SameDirectory.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SymlinkVersion/SameDirectory.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/GhcPkgGuess/SymlinkVersion/SameDirectory.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -name: SameDirectory -version: 0.1.0.0 -license: BSD3 -author: Edward Z. Yang -maintainer: ezyang@cs.stanford.edu -build-type: Simple -cabal-version: >=1.10 - -library - build-depends: base - default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/Haddock/CPP.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/Haddock/CPP.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/Haddock/CPP.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/Haddock/CPP.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -{-# LANGUAGE CPP #-} - -module CPP where - -#define HIDING hiding -#define NEEDLES needles - --- | For HIDING NEEDLES. -data Haystack = Haystack diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/Haddock/Literate.lhs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/Haddock/Literate.lhs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/Haddock/Literate.lhs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/Haddock/Literate.lhs 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -> module Literate where - -> -- | For hiding needles. -> data Haystack = Haystack diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/Haddock/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/Haddock/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/Haddock/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/Haddock/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -name: Haddock -version: 0.1 -license: BSD3 -author: Iain Nicol -stability: stable -category: PackageTests -build-type: Simple -Cabal-version: >= 1.2 - -description: - Check that Cabal successfully invokes Haddock. - -Library - exposed-modules: CPP, Literate, NoCPP, Simple - other-extensions: CPP - build-depends: base diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/Haddock/NoCPP.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/Haddock/NoCPP.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/Haddock/NoCPP.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/Haddock/NoCPP.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -module NoCPP (Haystack) where - --- | For hiding needles. -data Haystack = Haystack - --- | Causes a build failure if the CPP language extension is enabled. -stringGap = "Foo\ -\Bar" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/Haddock/Simple.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/Haddock/Simple.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/Haddock/Simple.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/Haddock/Simple.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -module Simple where - --- | For hiding needles. -data Haystack = Haystack diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/HaddockNewline/A.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/HaddockNewline/A.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/HaddockNewline/A.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/HaddockNewline/A.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -module A where diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/HaddockNewline/HaddockNewline.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/HaddockNewline/HaddockNewline.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/HaddockNewline/HaddockNewline.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/HaddockNewline/HaddockNewline.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -name: HaddockNewline -version: 0.1.0.0 -synopsis: This has a - newline yo. --- description: -license: BSD3 -license-file: LICENSE -author: Edward Z. Yang -maintainer: ezyang@cs.stanford.edu -build-type: Simple -extra-source-files: ChangeLog.md -cabal-version: >=1.10 - -library - exposed-modules: A - -- other-modules: - -- other-extensions: - build-depends: base - -- hs-source-dirs: - default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/HaddockNewline/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/HaddockNewline/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/HaddockNewline/Setup.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/HaddockNewline/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/multInst/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/multInst/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/multInst/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/multInst/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -name: Haddock -version: 0.1 -license: BSD3 -author: Iain Nicol -stability: stable -category: PackageTests -build-type: Simple -Cabal-version: >= 1.2 - -description: - Check that Cabal successfully invokes Haddock. - -Library - exposed-modules: CPP, Literate, NoCPP, Simple - other-extensions: CPP - build-depends: base diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/Options.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/Options.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/Options.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/Options.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} - -module PackageTests.Options - ( OptionEnableAllTests(..) - ) where - -import Data.Typeable (Typeable) - -import Test.Tasty.Options (IsOption(..), flagCLParser, safeRead) - -newtype OptionEnableAllTests = OptionEnableAllTests Bool - deriving Typeable - -instance IsOption OptionEnableAllTests where - defaultValue = OptionEnableAllTests False - parseValue = fmap OptionEnableAllTests . safeRead - optionName = return "enable-all-tests" - optionHelp = return "Enable all tests" - optionCLParser = flagCLParser Nothing (OptionEnableAllTests True) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/OrderFlags/Foo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/OrderFlags/Foo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/OrderFlags/Foo.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/OrderFlags/Foo.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -module Foo where - -x :: IO Int -x = return 5 - -f :: IO Int -f = do x - return 3 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/OrderFlags/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/OrderFlags/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/OrderFlags/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/OrderFlags/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -name: OrderFlags -version: 0.1 -license: BSD3 -author: Oleksandr Manzyuk -stability: stable -category: PackageTests -build-type: Simple -cabal-version: >=1.9.2 - -description: - Check that Cabal correctly orders flags that are passed to GHC. - -library - exposed-modules: Foo - build-depends: base - - ghc-options: -Wall -Werror - - if impl(ghc >= 6.12.1) - ghc-options: -fno-warn-unused-do-bind diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/PackageTester.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/PackageTester.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/PackageTester.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/PackageTester.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,693 +0,0 @@ -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE NondecreasingIndentation #-} -{-# LANGUAGE CPP #-} - -module PackageTests.PackageTester - ( PackageSpec - , SuiteConfig(..) - , TestConfig(..) - , Result(..) - , TestM - , runTestM - - -- * Paths - , packageDir - , distDir - , relativeDistDir - , sharedDBPath - , getWithGhcPath - - -- * Running cabal commands - , cabal - , cabal' - , cabal_build - , cabal_install - , ghcPkg - , ghcPkg' - , compileSetup - , run - , runExe - , runExe' - , rawRun - , rawCompileSetup - , withPackage - , withEnv - , withPackageDb - - -- * Polymorphic versions of HUnit functions - , assertFailure - , assertEqual - , assertBool - , shouldExist - , shouldNotExist - - -- * Test helpers - , shouldFail - , whenGhcVersion - , assertOutputContains - , assertOutputDoesNotContain - , assertFindInFile - , concatOutput - , withSymlink - - -- * Test trees - , TestTreeM - , runTestTree - , testTree - , testTree' - , groupTests - , mapTestTrees - , testWhen - , testUnless - , unlessWindows - , hasSharedLibraries - - , getPersistBuildConfig - - -- Common utilities - , module System.FilePath - , module Data.List - , module Control.Monad.IO.Class - , module Text.Regex.Posix - ) where - -import PackageTests.Options - -import Distribution.Compat.CreatePipe (createPipe) -import Distribution.Simple.Compiler (PackageDBStack, PackageDB(..)) -import Distribution.Simple.Program.Run (getEffectiveEnvironment) -import Distribution.System (OS(Windows), buildOS) -import Distribution.Simple.Utils - ( printRawCommandAndArgsAndEnv, withFileContents ) -import Distribution.Simple.Configure - ( getPersistBuildConfig ) -import Distribution.Verbosity (Verbosity) -import Distribution.Simple.BuildPaths (exeExtension) - -#ifndef CURRENT_COMPONENT_ID -import Distribution.Simple.Utils (cabalVersion) -import Distribution.Text (display) -#endif - -import qualified Test.Tasty.HUnit as HUnit -import Text.Regex.Posix - -import qualified Control.Exception as E -import Control.Monad -import Control.Monad.Trans.Class (lift) -import Control.Monad.Trans.Reader -import Control.Monad.Trans.Writer -import Control.Monad.IO.Class -import qualified Data.ByteString.Char8 as C -import Data.List -import Data.Version -import System.Directory - ( doesFileExist, canonicalizePath, createDirectoryIfMissing - , removeDirectoryRecursive, getPermissions, setPermissions - , setOwnerExecutable ) -import System.Exit -import System.FilePath -import System.IO -import System.IO.Error (isDoesNotExistError) -import System.Process (runProcess, waitForProcess, showCommandForUser) -import Test.Tasty (TestTree, askOption, testGroup) - -#ifndef mingw32_HOST_OS -import Control.Monad.Catch ( bracket_ ) -import System.Directory ( removeFile ) -import System.Posix.Files ( createSymbolicLink ) -#endif - --- | Our test monad maintains an environment recording the global test --- suite configuration 'SuiteConfig', and the local per-test --- configuration 'TestConfig'. -type TestM = ReaderT (SuiteConfig, TestConfig) IO - --- | Run a test in the test monad. -runTestM :: SuiteConfig -> FilePath -> Maybe String -> TestM a -> IO () -runTestM suite name subname m = do - let test = TestConfig { - testMainName = name, - testSubName = subname, - testShouldFail = False, - testCurrentPackage = ".", - testPackageDb = False, - testEnvironment = [] - } - void (runReaderT (cleanup >> m) (suite, test)) - where - -- TODO: option not to clean up dist dirs; this should be - -- harmless! - cleanup = do - onlyIfExists . removeDirectoryRecursive =<< topDir - --- | Run an IO action, and suppress a "does not exist" error. -onlyIfExists :: MonadIO m => IO () -> m () -onlyIfExists m = liftIO $ - E.catch m $ \(e :: IOError) -> - if isDoesNotExistError e - then return () - else E.throwIO e - --- cleaning up: --- cabal clean will clean up dist directory, but we also need to zap --- Setup etc. --- --- Suggestion: just copy the files somewhere else! - --- | Global configuration for the entire test suite. -data SuiteConfig = SuiteConfig - -- | Path to GHC that was used to compile Cabal library under test. - { ghcPath :: FilePath - -- | Version of GHC that compiled Cabal. - , ghcVersion :: Version - -- | Path to ghc-pkg corresponding to 'ghcPath'. - , ghcPkgPath :: FilePath - -- | Path to GHC that we should use to "./Setup --with-ghc" - , withGhcPath :: FilePath - -- | Version of GHC at 'withGhcPath'. - , withGhcVersion :: Version - -- | The build directory that was used to build Cabal (used - -- to compile Setup scripts.) - , cabalDistPref :: FilePath - -- | Configuration options you can use to make the Cabal - -- being tested visible (e.g. if you're using the test runner). - -- We don't add these by default because then you have to - -- link against Cabal which makes the build go longer. - , packageDBStack :: PackageDBStack - -- | How verbose should we be - , suiteVerbosity :: Verbosity - -- | The absolute current working directory - , absoluteCWD :: FilePath - } - -data TestConfig = TestConfig - -- | Test name, MUST be the directory the test packages live in - -- relative to tests/PackageTests - { testMainName :: FilePath - -- | Test sub-name, used to qualify dist/database directory to avoid - -- conflicts. - , testSubName :: Maybe String - -- | This gets modified sometimes - , testShouldFail :: Bool - -- | The "current" package, ala current directory - , testCurrentPackage :: PackageSpec - -- | Says if we've initialized the per-test package DB - , testPackageDb :: Bool - -- | Environment override - , testEnvironment :: [(String, Maybe String)] - } - --- | A package that can be built. -type PackageSpec = FilePath - ------------------------------------------------------------------------- --- * Directories - -simpleSetupPath :: TestM FilePath -simpleSetupPath = do - (suite, _) <- ask - return (absoluteCWD suite "tests/Setup") - --- | The absolute path to the directory containing the files for --- this tests; usually @Check.hs@ and any test packages. -testDir :: TestM FilePath -testDir = do - (suite, test) <- ask - return $ absoluteCWD suite "tests/PackageTests" testMainName test - --- | The absolute path to the root of the package directory; it's --- where the Cabal file lives. This is what you want the CWD of cabal --- calls to be. -packageDir :: TestM FilePath -packageDir = do - (_, test) <- ask - test_dir <- testDir - return $ test_dir testCurrentPackage test - --- | The absolute path to the directory containing all the --- files for ALL tests associated with a test (respecting --- subtests.) To clean, you ONLY need to delete this directory. -topDir :: TestM FilePath -topDir = do - test_dir <- testDir - (_, test) <- ask - return $ test_dir - case testSubName test of - Nothing -> "dist-test" - Just n -> "dist-test." ++ n - -prefixDir :: TestM FilePath -prefixDir = do - top_dir <- topDir - return $ top_dir "usr" - --- | The absolute path to the build directory that should be used --- for the current package in a test. -distDir :: TestM FilePath -distDir = do - top_dir <- topDir - (_, test) <- ask - return $ top_dir testCurrentPackage test "dist" - -definitelyMakeRelative :: FilePath -> FilePath -> FilePath -definitelyMakeRelative base0 path0 = - let go [] path = joinPath path - go base [] = joinPath (replicate (length base) "..") - go (".":xs) ys = go xs ys - go xs (".":ys) = go xs ys - go (x:xs) (y:ys) - | x == y = go xs ys - | otherwise = go (x:xs) [] go [] (y:ys) - in go (splitPath base0) (splitPath path0) - --- hpc is stupid and doesn't understand absolute paths. -relativeDistDir :: TestM FilePath -relativeDistDir = do - dist_dir0 <- distDir - pkg_dir <- packageDir - return $ definitelyMakeRelative pkg_dir dist_dir0 - --- | The absolute path to the shared package database that should --- be used by all packages in this test. -sharedDBPath :: TestM FilePath -sharedDBPath = do - top_dir <- topDir - return $ top_dir "packagedb" - -getWithGhcPath :: TestM FilePath -getWithGhcPath = do - (suite, _) <- ask - return $ withGhcPath suite - ------------------------------------------------------------------------- --- * Running cabal - -cabal :: String -> [String] -> TestM () -cabal cmd extraArgs0 = void (cabal' cmd extraArgs0) - -cabal' :: String -> [String] -> TestM Result -cabal' cmd extraArgs0 = do - (suite, test) <- ask - prefix_dir <- prefixDir - when ((cmd == "register" || cmd == "copy") && not (testPackageDb test)) $ - error "Cannot register/copy without using 'withPackageDb'" - let extraArgs1 = case cmd of - "configure" -> - -- If the package database is empty, setting --global - -- here will make us error loudly if we try to install - -- into a bad place. - [ "--global" - , "--with-ghc", withGhcPath suite - -- This improves precision but it increases the number - -- of flags one has to specify and I don't like that; - -- Cabal is going to configure it and usually figure - -- out the right location in any case. - -- , "--with-ghc-pkg", withGhcPkgPath suite - -- Would really like to do this, but we're not always - -- going to be building against sufficiently recent - -- Cabal which provides this macro. - -- , "--dependency=Cabal=" ++ THIS_PACKAGE_KEY - -- These flags make the test suite run faster - -- Can't do this unless we LD_LIBRARY_PATH correctly - -- , "--enable-executable-dynamic" - , "--disable-optimization" - -- Specify where we want our installed packages to go - , "--prefix=" ++ prefix_dir - ] -- Only add the LBI package stack if the GHC version - -- matches. - ++ (if withGhcPath suite == ghcPath suite - then packageDBParams (packageDBStack suite) - else []) - ++ extraArgs0 - -- This gives us MUCH better error messages - "build" -> "-v" : extraArgs0 - _ -> extraArgs0 - -- This is a horrible hack to make hpc work correctly - dist_dir <- relativeDistDir - let extraArgs = ["--distdir", dist_dir] ++ extraArgs1 - doCabal (cmd:extraArgs) - --- | This abstracts the common pattern of configuring and then building. -cabal_build :: [String] -> TestM () -cabal_build args = do - cabal "configure" args - cabal "build" [] - return () - --- | This abstracts the common pattern of "installing" a package. -cabal_install :: [String] -> TestM () -cabal_install args = do - cabal "configure" args - cabal "build" [] - cabal "copy" [] - cabal "register" [] - return () - --- | Determines what Setup executable to run and runs it -doCabal :: [String] -- ^ extra arguments - -> TestM Result -doCabal cabalArgs = do - pkg_dir <- packageDir - customSetup <- liftIO $ doesFileExist (pkg_dir "Setup.hs") - if customSetup - then do - compileSetup - -- TODO make this less racey - let path = pkg_dir "Setup" - run (Just pkg_dir) path cabalArgs - else do - -- Use shared Setup executable (only for Simple build types). - path <- simpleSetupPath - run (Just pkg_dir) path cabalArgs - -packageDBParams :: PackageDBStack -> [String] -packageDBParams dbs = "--package-db=clear" - : map (("--package-db=" ++) . convert) dbs - where - convert :: PackageDB -> String - convert GlobalPackageDB = "global" - convert UserPackageDB = "user" - convert (SpecificPackageDB path) = path - ------------------------------------------------------------------------- --- * Compiling setup scripts - -compileSetup :: TestM () -compileSetup = do - (suite, test) <- ask - pkg_path <- packageDir - liftIO $ rawCompileSetup (suiteVerbosity suite) suite (testEnvironment test) pkg_path - -rawCompileSetup :: Verbosity -> SuiteConfig -> [(String, Maybe String)] -> FilePath -> IO () -rawCompileSetup verbosity suite e path = do - -- NB: Use 'ghcPath', not 'withGhcPath', since we need to be able to - -- link against the Cabal library which was built with 'ghcPath'. - r <- rawRun verbosity (Just path) (ghcPath suite) e $ - [ "--make"] ++ - ghcPackageDBParams (ghcVersion suite) (packageDBStack suite) ++ - [ "-hide-all-packages" - , "-package base" -#ifdef CURRENT_COMPONENT_ID - -- This is best, but we don't necessarily have it - -- if we're bootstrapping with old Cabal. - , "-package-id " ++ CURRENT_COMPONENT_ID -#else - -- This mostly works, UNLESS you've installed a - -- version of Cabal with the SAME version number. - -- Then old GHCs will incorrectly select the installed - -- version (because it prefers the FIRST package it finds.) - -- It also semi-works to not specify "-hide-all-packages" - -- at all, except if there's a later version of Cabal - -- installed GHC will prefer that. - , "-package Cabal-" ++ display cabalVersion -#endif - , "-O0" - , "Setup.hs" ] - unless (resultExitCode r == ExitSuccess) $ - error $ - "could not build shared Setup executable\n" ++ - " ran: " ++ resultCommand r ++ "\n" ++ - " output:\n" ++ resultOutput r ++ "\n\n" - -ghcPackageDBParams :: Version -> PackageDBStack -> [String] -ghcPackageDBParams ghc_version dbs - | ghc_version >= Version [7,6] [] - = "-clear-package-db" : map convert dbs - | otherwise - = concatMap convertLegacy dbs - where - convert :: PackageDB -> String - convert GlobalPackageDB = "-global-package-db" - convert UserPackageDB = "-user-package-db" - convert (SpecificPackageDB path) = "-package-db=" ++ path - - convertLegacy :: PackageDB -> [String] - convertLegacy (SpecificPackageDB path) = ["-package-conf=" ++ path] - convertLegacy _ = [] - ------------------------------------------------------------------------- --- * Running ghc-pkg - -ghcPkg :: String -> [String] -> TestM () -ghcPkg cmd args = void (ghcPkg' cmd args) - -ghcPkg' :: String -> [String] -> TestM Result -ghcPkg' cmd args = do - db_path <- sharedDBPath - (config, test) <- ask - unless (testPackageDb test) $ - error "Must initialize package database using withPackageDb" - let db_stack = packageDBStack config ++ [SpecificPackageDB db_path] - extraArgs = ghcPkgPackageDBParams (ghcVersion config) db_stack - run Nothing (ghcPkgPath config) (cmd : extraArgs ++ args) - -ghcPkgPackageDBParams :: Version -> PackageDBStack -> [String] -ghcPkgPackageDBParams version dbs = concatMap convert dbs where - convert :: PackageDB -> [String] - -- Ignoring global/user is dodgy but there's no way good - -- way to give ghc-pkg the correct flags in this case. - convert GlobalPackageDB = [] - convert UserPackageDB = [] - convert (SpecificPackageDB path) - | version >= Version [7,6] [] - = ["--package-db=" ++ path] - | otherwise - = ["--package-conf=" ++ path] - ------------------------------------------------------------------------- --- * Running other things - --- | Run an executable that was produced by cabal. The @exe_name@ --- is precisely the name of the executable section in the file. -runExe :: String -> [String] -> TestM () -runExe exe_name args = void (runExe' exe_name args) - -runExe' :: String -> [String] -> TestM Result -runExe' exe_name args = do - dist_dir <- distDir - let exe = dist_dir "build" exe_name exe_name - run Nothing exe args - -run :: Maybe FilePath -> String -> [String] -> TestM Result -run mb_cwd path args = do - verbosity <- getVerbosity - (_, test) <- ask - r <- liftIO $ rawRun verbosity mb_cwd path (testEnvironment test) args - record r - requireSuccess r - -rawRun :: Verbosity -> Maybe FilePath -> String -> [(String, Maybe String)] -> [String] -> IO Result -rawRun verbosity mb_cwd path envOverrides args = do - -- path is relative to the current directory; canonicalizePath makes it - -- absolute, so that runProcess will find it even when changing directory. - path' <- do pathExists <- doesFileExist path - canonicalizePath (if pathExists then path - else path <.> exeExtension) - menv <- getEffectiveEnvironment envOverrides - - printRawCommandAndArgsAndEnv verbosity path' args menv - (readh, writeh) <- createPipe - pid <- runProcess path' args mb_cwd menv Nothing (Just writeh) (Just writeh) - - out <- hGetContents readh - void $ E.evaluate (length out) -- force the output - hClose readh - - -- wait for the program to terminate - exitcode <- waitForProcess pid - return Result { - resultExitCode = exitcode, - resultDirectory = mb_cwd, - resultCommand = showCommandForUser path' args, - resultOutput = out - } - ------------------------------------------------------------------------- --- * Subprocess run results - -data Result = Result - { resultExitCode :: ExitCode - , resultDirectory :: Maybe FilePath - , resultCommand :: String - , resultOutput :: String - } deriving Show - -requireSuccess :: Result -> TestM Result -requireSuccess r@Result { resultCommand = cmd - , resultExitCode = exitCode - , resultOutput = output } = do - (_, test) <- ask - when (exitCode /= ExitSuccess && not (testShouldFail test)) $ - assertFailure $ "Command " ++ cmd ++ " failed.\n" ++ - "Output:\n" ++ output ++ "\n" - when (exitCode == ExitSuccess && testShouldFail test) $ - assertFailure $ "Command " ++ cmd ++ " succeeded.\n" ++ - "Output:\n" ++ output ++ "\n" - return r - -record :: Result -> TestM () -record res = do - build_dir <- distDir - (suite, _) <- ask - liftIO $ createDirectoryIfMissing True build_dir - liftIO $ C.appendFile (build_dir "test.log") - (C.pack $ "+ " ++ resultCommand res ++ "\n" - ++ resultOutput res ++ "\n\n") - let test_sh = build_dir "test.sh" - b <- liftIO $ doesFileExist test_sh - when (not b) . liftIO $ do - -- This is hella racey but this is not that security important - C.appendFile test_sh - (C.pack $ "#/bin/sh\nset -ev\n" ++ - "cd "++ show (absoluteCWD suite) ++"\n") - perms <- getPermissions test_sh - setPermissions test_sh (setOwnerExecutable True perms) - - liftIO $ C.appendFile test_sh - (C.pack - (case resultDirectory res of - Nothing -> resultCommand res - Just d -> "(cd " ++ show d ++ " && " ++ resultCommand res ++ ")\n")) - ------------------------------------------------------------------------- --- * Test helpers - -assertFailure :: MonadIO m => String -> m () -assertFailure = liftIO . HUnit.assertFailure - -assertEqual :: (Eq a, Show a, MonadIO m) => String -> a -> a -> m () -assertEqual s x y = liftIO $ HUnit.assertEqual s x y - -assertBool :: MonadIO m => String -> Bool -> m () -assertBool s x = liftIO $ HUnit.assertBool s x - -shouldExist :: MonadIO m => FilePath -> m () -shouldExist path = liftIO $ doesFileExist path >>= assertBool (path ++ " should exist") - -shouldNotExist :: MonadIO m => FilePath -> m () -shouldNotExist path = - liftIO $ doesFileExist path >>= assertBool (path ++ " should exist") . not - -shouldFail :: TestM a -> TestM a -shouldFail = withReaderT (\(suite, test) -> (suite, test { testShouldFail = not (testShouldFail test) })) - -whenGhcVersion :: (Version -> Bool) -> TestM () -> TestM () -whenGhcVersion p m = do - (suite, _) <- ask - when (p (ghcVersion suite)) m - -withPackage :: FilePath -> TestM a -> TestM a -withPackage f = withReaderT (\(suite, test) -> (suite, test { testCurrentPackage = f })) - --- TODO: Really should accumulate... but I think to do this --- properly we can't just append -withEnv :: [(String, Maybe String)] -> TestM a -> TestM a -withEnv e m = do - (_, test0) <- ask - when (not (null (testEnvironment test0))) - $ error "nested withEnv (not yet) supported" - withReaderT (\(suite, test) -> (suite, test { testEnvironment = e })) m - -withPackageDb :: TestM a -> TestM a -withPackageDb m = do - (_, test0) <- ask - db_path <- sharedDBPath - if testPackageDb test0 - then m - else withReaderT (\(suite, test) -> - (suite { packageDBStack - = packageDBStack suite - ++ [SpecificPackageDB db_path] }, - test { testPackageDb = True })) - $ do ghcPkg "init" [db_path] - m - -assertOutputContains :: MonadIO m => String -> Result -> m () -assertOutputContains needle result = - unless (needle `isInfixOf` (concatOutput output)) $ - assertFailure $ - " expected: " ++ needle ++ "\n" ++ - " in output: " ++ output ++ "" - where output = resultOutput result - -assertOutputDoesNotContain :: MonadIO m => String -> Result -> m () -assertOutputDoesNotContain needle result = - when (needle `isInfixOf` (concatOutput output)) $ - assertFailure $ - "unexpected: " ++ needle ++ - " in output: " ++ output - where output = resultOutput result - -assertFindInFile :: MonadIO m => String -> FilePath -> m () -assertFindInFile needle path = - liftIO $ withFileContents path - (\contents -> - unless (needle `isInfixOf` contents) - (assertFailure ("expected: " ++ needle ++ "\n" ++ - " in file: " ++ path))) - --- | Replace line breaks with spaces, correctly handling "\r\n". -concatOutput :: String -> String -concatOutput = unwords . lines . filter ((/=) '\r') - --- | Create a symlink for the duration of the provided action. If the symlink --- already exists, it is deleted. Does not work on Windows. -withSymlink :: FilePath -> FilePath -> TestM a -> TestM a -#ifdef mingw32_HOST_OS -withSymlink _oldpath _newpath _act = - error "PackageTests.PackageTester.withSymlink: does not work on Windows!" -#else -withSymlink oldpath newpath act = do - symlinkExists <- liftIO $ doesFileExist newpath - when symlinkExists $ liftIO $ removeFile newpath - bracket_ (liftIO $ createSymbolicLink oldpath newpath) - (liftIO $ removeFile newpath) act -#endif - ------------------------------------------------------------------------- --- * Test trees - --- | Monad for creating test trees. The option --enable-all-tests determines --- whether to filter tests with 'testWhen' and 'testUnless'. -type TestTreeM = WriterT [TestTree] (Reader OptionEnableAllTests) - -runTestTree :: String -> TestTreeM () -> TestTree -runTestTree name ts = askOption $ - testGroup name . runReader (execWriterT ts) - -testTree :: SuiteConfig -> String -> Maybe String -> TestM a -> TestTreeM () -testTree config name subname m = - testTree' $ HUnit.testCase name $ runTestM config name subname m - -testTree' :: TestTree -> TestTreeM () -testTree' tc = tell [tc] - --- | Create a test group from the output of the given action. -groupTests :: String -> TestTreeM () -> TestTreeM () -groupTests name = censor (\ts -> [testGroup name ts]) - --- | Apply a function to each top-level test tree. -mapTestTrees :: (TestTree -> TestTree) -> TestTreeM a -> TestTreeM a -mapTestTrees = censor . map - -testWhen :: Bool -> TestTreeM () -> TestTreeM () -testWhen c test = do - OptionEnableAllTests enableAll <- lift ask - when (enableAll || c) test - -testUnless :: Bool -> TestTreeM () -> TestTreeM () -testUnless = testWhen . not - -unlessWindows :: TestTreeM () -> TestTreeM () -unlessWindows = testUnless (buildOS == Windows) - -hasSharedLibraries :: SuiteConfig -> Bool -hasSharedLibraries config = - buildOS /= Windows || withGhcVersion config < Version [7,8] [] - ------------------------------------------------------------------------- --- Verbosity - -getVerbosity :: TestM Verbosity -getVerbosity = fmap (suiteVerbosity . fst) ask diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/PathsModule/Executable/Main.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/PathsModule/Executable/Main.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/PathsModule/Executable/Main.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/PathsModule/Executable/Main.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -module Main where - -import Paths_PathsModule (getBinDir) - -main :: IO () -main = do - _ <- getBinDir - return () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/PathsModule/Executable/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/PathsModule/Executable/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/PathsModule/Executable/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/PathsModule/Executable/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -name: PathsModule -version: 0.1 -license: BSD3 -author: Johan Tibell -stability: stable -category: PackageTests -build-type: Simple -Cabal-version: >= 1.2 - -description: - Check that the generated paths module compiles. - -Executable TestPathsModule - main-is: Main.hs - other-modules: Paths_PathsModule - build-depends: base diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/PathsModule/Library/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/PathsModule/Library/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/PathsModule/Library/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/PathsModule/Library/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -name: PathsModule -version: 0.1 -license: BSD3 -author: Johan Tibell -stability: stable -category: PackageTests -build-type: Simple -Cabal-version: >= 1.2 - -description: - Check that the generated paths module compiles. - -Library - exposed-modules: Paths_PathsModule - build-depends: base diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/PreProcess/Foo.hsc cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/PreProcess/Foo.hsc --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/PreProcess/Foo.hsc 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/PreProcess/Foo.hsc 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -module Foo where diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/PreProcess/Main.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/PreProcess/Main.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/PreProcess/Main.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/PreProcess/Main.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -module Main where - -import Foo - -main :: IO () -main = return () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/PreProcess/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/PreProcess/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/PreProcess/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/PreProcess/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,32 +0,0 @@ -name: PreProcess -version: 0.1 -license: BSD3 -author: Johan Tibell -stability: stable -category: PackageTests -build-type: Simple -Cabal-version: >= 1.2 - -description: - Check that preprocessors are run. - -Library - exposed-modules: Foo - build-depends: base - -Executable my-executable - main-is: Main.hs - other-modules: Foo - build-depends: base - -Test-Suite my-test-suite - main-is: Main.hs - type: exitcode-stdio-1.0 - other-modules: Foo - build-depends: base - -Benchmark my-benchmark - main-is: Main.hs - type: exitcode-stdio-1.0 - other-modules: Foo - build-depends: base diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/PreProcessExtraSources/Foo.hsc cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/PreProcessExtraSources/Foo.hsc --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/PreProcessExtraSources/Foo.hsc 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/PreProcessExtraSources/Foo.hsc 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -{-# LANGUAGE ForeignFunctionInterface #-} -module Foo where - -import Foreign.C.Types - -#def int incr(int x) { return x + 1; } - -foreign import ccall unsafe "Foo_hsc.h incr" - incr :: CInt -> CInt diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/PreProcessExtraSources/Main.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/PreProcessExtraSources/Main.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/PreProcessExtraSources/Main.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/PreProcessExtraSources/Main.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -module Main where - -import Foo - -main :: IO () -main = do - let x = incr 4 - return () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/PreProcessExtraSources/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/PreProcessExtraSources/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/PreProcessExtraSources/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/PreProcessExtraSources/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,32 +0,0 @@ -name: PreProcessExtraSources -version: 0.1 -license: BSD3 -author: Ian Ross -stability: stable -category: PackageTests -build-type: Simple -Cabal-version: >= 1.2 - -description: - Check that preprocessors that generate extra C sources are handled. - -Library - exposed-modules: Foo - build-depends: base - -Executable my-executable - main-is: Main.hs - other-modules: Foo - build-depends: base - -Test-Suite my-test-suite - main-is: Main.hs - type: exitcode-stdio-1.0 - other-modules: Foo - build-depends: base - -Benchmark my-benchmark - main-is: Main.hs - type: exitcode-stdio-1.0 - other-modules: Foo - build-depends: base diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/ReexportedModules/ReexportedModules.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/ReexportedModules/ReexportedModules.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/ReexportedModules/ReexportedModules.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/ReexportedModules/ReexportedModules.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -name: ReexportedModules -version: 0.1.0.0 -license-file: LICENSE -author: Edward Z. Yang -maintainer: ezyang@cs.stanford.edu -build-type: Simple -cabal-version: >=1.21 - -library - build-depends: base, containers - reexported-modules: containers:Data.Map as DataMap diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/dynamic/Exe.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/dynamic/Exe.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/dynamic/Exe.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/dynamic/Exe.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -{-# LANGUAGE TemplateHaskell #-} -module Main where - -import TH - -main = print $(splice) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/dynamic/Lib.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/dynamic/Lib.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/dynamic/Lib.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/dynamic/Lib.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -{-# LANGUAGE TemplateHaskell #-} -module Lib where - -import TH - -val = $(splice) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/dynamic/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/dynamic/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/dynamic/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/dynamic/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -Name: templateHaskell -Version: 0.1 -Build-Type: Simple -Cabal-Version: >= 1.2 - -Library - Exposed-Modules: Lib - Other-Modules: TH - Build-Depends: base, template-haskell - Extensions: TemplateHaskell - -Executable main - Main-is: Exe.hs - Build-Depends: base, template-haskell - Extensions: TemplateHaskell diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/dynamic/TH.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/dynamic/TH.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/dynamic/TH.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/dynamic/TH.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -{-# LANGUAGE TemplateHaskell #-} -module TH where - -splice = [| () |] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/profiling/Exe.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/profiling/Exe.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/profiling/Exe.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/profiling/Exe.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -{-# LANGUAGE TemplateHaskell #-} -module Main where - -import TH - -main = print $(splice) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/profiling/Lib.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/profiling/Lib.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/profiling/Lib.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/profiling/Lib.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -{-# LANGUAGE TemplateHaskell #-} -module Lib where - -import TH - -val = $(splice) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/profiling/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/profiling/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/profiling/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/profiling/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -Name: templateHaskell -Version: 0.1 -Build-Type: Simple -Cabal-Version: >= 1.2 - -Library - Exposed-Modules: Lib - Other-Modules: TH - Build-Depends: base, template-haskell - Extensions: TemplateHaskell - -Executable main - Main-is: Exe.hs - Build-Depends: base, template-haskell - Extensions: TemplateHaskell diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/profiling/TH.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/profiling/TH.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/profiling/TH.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/profiling/TH.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -{-# LANGUAGE TemplateHaskell #-} -module TH where - -splice = [| () |] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/vanilla/Exe.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/vanilla/Exe.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/vanilla/Exe.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/vanilla/Exe.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -{-# LANGUAGE TemplateHaskell #-} -module Main where - -import TH - -main = print $(splice) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/vanilla/Lib.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/vanilla/Lib.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/vanilla/Lib.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/vanilla/Lib.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -{-# LANGUAGE TemplateHaskell #-} -module Lib where - -import TH - -val = $(splice) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/vanilla/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/vanilla/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/vanilla/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/vanilla/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -Name: templateHaskell -Version: 0.1 -Build-Type: Simple -Cabal-Version: >= 1.2 - -Library - Exposed-Modules: Lib - Other-Modules: TH - Build-Depends: base, template-haskell - Extensions: TemplateHaskell - -Executable main - Main-is: Exe.hs - Build-Depends: base, template-haskell - Extensions: TemplateHaskell diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/vanilla/TH.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/vanilla/TH.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/vanilla/TH.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TemplateHaskell/vanilla/TH.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -{-# LANGUAGE TemplateHaskell #-} -module TH where - -splice = [| () |] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestNameCollision/child/child.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestNameCollision/child/child.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestNameCollision/child/child.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestNameCollision/child/child.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -name: child -version: 0.1 -description: This defines the colliding detailed-0.9 test suite -license: BSD3 -author: Edward Z. Yang -maintainer: ezyang@cs.stanford.edu -build-type: Simple -cabal-version: >=1.10 - -library - exposed-modules: Child - build-depends: base, parent - default-language: Haskell2010 - -test-suite parent - type: detailed-0.9 - test-module: Test - hs-source-dirs: tests - build-depends: base, Cabal, child diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestNameCollision/child/Child.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestNameCollision/child/Child.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestNameCollision/child/Child.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestNameCollision/child/Child.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -module Child where -import Parent diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestNameCollision/child/tests/Test.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestNameCollision/child/tests/Test.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestNameCollision/child/tests/Test.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestNameCollision/child/tests/Test.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,13 +0,0 @@ -module Test where - -import Distribution.TestSuite -import Child - -tests :: IO [Test] -tests = return $ [Test $ TestInstance - { run = return (Finished Pass) - , name = "test" - , tags = [] - , options = [] - , setOption = \_ _-> Left "No Options" - }] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestNameCollision/parent/parent.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestNameCollision/parent/parent.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestNameCollision/parent/parent.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestNameCollision/parent/parent.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,13 +0,0 @@ -name: parent -version: 0.1 -description: This package is what the test suite is going to collide with -license: BSD3 -author: Edward Z. Yang -maintainer: ezyang@cs.stanford.edu -build-type: Simple -cabal-version: >=1.10 - -library - exposed-modules: Parent - build-depends: base - default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestNameCollision/parent/Parent.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestNameCollision/parent/Parent.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestNameCollision/parent/Parent.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestNameCollision/parent/Parent.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -module Parent where diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestOptions/TestOptions.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestOptions/TestOptions.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestOptions/TestOptions.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestOptions/TestOptions.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -name: TestOptions -version: 0.1 -license: BSD3 -author: Thomas Tuegel -stability: stable -category: PackageTests -build-type: Simple -cabal-version: >= 1.9.2 - -description: - Check that Cabal passes the correct test options to test suites. - -executable dummy - main-is: test-TestOptions.hs - build-depends: base - -test-suite test-TestOptions - main-is: test-TestOptions.hs - type: exitcode-stdio-1.0 - build-depends: base diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestOptions/test-TestOptions.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestOptions/test-TestOptions.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestOptions/test-TestOptions.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestOptions/test-TestOptions.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -module Main where - -import System.Environment ( getArgs ) -import System.Exit ( exitFailure, exitSuccess ) - -main :: IO () -main = do - args <- getArgs - if args == ["1", "2", "3"] - then exitSuccess - else putStrLn ("Got: " ++ show args) >> exitFailure diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/Tests.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/Tests.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/Tests.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/Tests.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,307 +0,0 @@ -module PackageTests.Tests(tests) where - -import PackageTests.PackageTester - -import qualified PackageTests.BenchmarkStanza.Check -import qualified PackageTests.TestStanza.Check -import qualified PackageTests.DeterministicAr.Check -import qualified PackageTests.TestSuiteTests.ExeV10.Check - -import Control.Monad - -import Data.Version -import Test.Tasty (mkTimeout, localOption) -import Test.Tasty.HUnit (testCase) - -tests :: SuiteConfig -> TestTreeM () -tests config = do - - --------------------------------------------------------------------- - -- * External tests - - -- Test that Cabal parses 'benchmark' sections correctly - tc "BenchmarkStanza" PackageTests.BenchmarkStanza.Check.suite - - -- Test that Cabal parses 'test' sections correctly - tc "TestStanza" PackageTests.TestStanza.Check.suite - - -- Test that Cabal determinstically generates object archives - tc "DeterministicAr" PackageTests.DeterministicAr.Check.suite - - --------------------------------------------------------------------- - -- * Test suite tests - - groupTests "TestSuiteTests" $ do - - -- Test exitcode-stdio-1.0 test suites (and HPC) - groupTests "ExeV10" - (PackageTests.TestSuiteTests.ExeV10.Check.tests config) - - -- Test detailed-0.9 test suites - groupTests "LibV09" $ - let - tcs :: FilePath -> TestM a -> TestTreeM () - tcs name m - = testTree' $ testCase name (runTestM config - "TestSuiteTests/LibV09" (Just name) m) - in do - -- Test if detailed-0.9 builds correctly - tcs "Build" $ cabal_build ["--enable-tests"] - - -- Tests for #2489, stdio deadlock - mapTestTrees (localOption (mkTimeout $ 10 ^ (8 :: Int))) - . tcs "Deadlock" $ do - cabal_build ["--enable-tests"] - shouldFail $ cabal "test" [] - - --------------------------------------------------------------------- - -- * Inline tests - - -- Test if exitcode-stdio-1.0 benchmark builds correctly - tc "BenchmarkExeV10" $ cabal_build ["--enable-benchmarks"] - - -- Test --benchmark-option(s) flags on ./Setup bench - tc "BenchmarkOptions" $ do - cabal_build ["--enable-benchmarks"] - cabal "bench" [ "--benchmark-options=1 2 3" ] - cabal "bench" [ "--benchmark-option=1" - , "--benchmark-option=2" - , "--benchmark-option=3" - ] - - -- Test --test-option(s) flags on ./Setup test - tc "TestOptions" $ do - cabal_build ["--enable-tests"] - cabal "test" ["--test-options=1 2 3"] - cabal "test" [ "--test-option=1" - , "--test-option=2" - , "--test-option=3" - ] - - -- Test attempt to have executable depend on internal - -- library, but cabal-version is too old. - tc "BuildDeps/InternalLibrary0" $ do - r <- shouldFail $ cabal' "configure" [] - -- Should tell you how to enable the desired behavior - let sb = "library which is defined within the same package." - assertOutputContains sb r - - -- Test executable depends on internal library. - tc "BuildDeps/InternalLibrary1" $ cabal_build [] - - -- Test that internal library is preferred to an installed on - -- with the same name and version - tc "BuildDeps/InternalLibrary2" $ internal_lib_test "internal" - - -- Test that internal library is preferred to an installed on - -- with the same name and LATER version - tc "BuildDeps/InternalLibrary3" $ internal_lib_test "internal" - - -- Test that an explicit dependency constraint which doesn't - -- match the internal library causes us to use external library - tc "BuildDeps/InternalLibrary4" $ internal_lib_test "installed" - - -- Test "old build-dep behavior", where we should get the - -- same package dependencies on all targets if cabal-version - -- is sufficiently old. - tc "BuildDeps/SameDepsAllRound" $ cabal_build [] - - -- Test "new build-dep behavior", where each target gets - -- separate dependencies. This tests that an executable - -- dep does not leak into the library. - tc "BuildDeps/TargetSpecificDeps1" $ do - cabal "configure" [] - r <- shouldFail $ cabal' "build" [] - assertRegex "error should be in MyLibrary.hs" "^MyLibrary.hs:" r - assertRegex - "error should be \"Could not find module `Text\\.PrettyPrint\"" - "(Could not find module|Failed to load interface for).*Text\\.PrettyPrint" r - - -- This is a control on TargetSpecificDeps1; it should - -- succeed. - tc "BuildDeps/TargetSpecificDeps2" $ cabal_build [] - - -- Test "new build-dep behavior", where each target gets - -- separate dependencies. This tests that an library - -- dep does not leak into the executable. - tc "BuildDeps/TargetSpecificDeps3" $ do - cabal "configure" [] - r <- shouldFail $ cabal' "build" [] - assertRegex "error should be in lemon.hs" "^lemon.hs:" r - assertRegex - "error should be \"Could not find module `Text\\.PrettyPrint\"" - "(Could not find module|Failed to load interface for).*Text\\.PrettyPrint" r - - -- Test that Paths module is generated and available for executables. - tc "PathsModule/Executable" $ cabal_build [] - - -- Test that Paths module is generated and available for libraries. - tc "PathsModule/Library" $ cabal_build [] - - -- Check that preprocessors (hsc2hs) are run - tc "PreProcess" $ cabal_build ["--enable-tests", "--enable-benchmarks"] - - -- Check that preprocessors that generate extra C sources are handled - tc "PreProcessExtraSources" $ cabal_build ["--enable-tests", - "--enable-benchmarks"] - - -- Test building a vanilla library/executable which uses Template Haskell - tc "TemplateHaskell/vanilla" $ cabal_build [] - - -- Test building a profiled library/executable which uses Template Haskell - -- (Cabal has to build the non-profiled version first) - tc "TemplateHaskell/profiling" $ cabal_build ["--enable-library-profiling", - "--enable-profiling"] - - -- Test building a dynamic library/executable which uses Template - -- Haskell - testWhen (hasSharedLibraries config) $ - tc "TemplateHaskell/dynamic" $ cabal_build ["--enable-shared", - "--enable-executable-dynamic"] - - -- Test building an executable whose main() function is defined in a C - -- file - tc "CMain" $ cabal_build [] - - -- Test build when the library is empty, for #1241 - tc "EmptyLib" $ - withPackage "empty" $ cabal_build [] - - -- Test that "./Setup haddock" works correctly - tc "Haddock" $ do - dist_dir <- distDir - let haddocksDir = dist_dir "doc" "html" "Haddock" - cabal "configure" [] - cabal "haddock" [] - let docFiles - = map (haddocksDir ) - ["CPP.html", "Literate.html", "NoCPP.html", "Simple.html"] - mapM_ (assertFindInFile "For hiding needles.") docFiles - - -- Test that Haddock with a newline in synopsis works correctly, #3004 - tc "HaddockNewline" $ do - cabal "configure" [] - cabal "haddock" [] - - -- Test that Cabal properly orders GHC flags passed to GHC (when - -- there are multiple ghc-options fields.) - tc "OrderFlags" $ cabal_build [] - - -- Test that reexported modules build correctly - -- TODO: should also test that they import OK! - tc "ReexportedModules" $ do - whenGhcVersion (>= Version [7,9] []) $ cabal_build [] - - -- Test that Cabal computes different IPIDs when the source changes. - tc "UniqueIPID" . withPackageDb $ do - withPackage "P1" $ cabal "configure" [] - withPackage "P2" $ cabal "configure" [] - withPackage "P1" $ cabal "build" [] - withPackage "P1" $ cabal "build" [] -- rebuild should work - r1 <- withPackage "P1" $ cabal' "register" ["--print-ipid", "--inplace"] - withPackage "P2" $ cabal "build" [] - r2 <- withPackage "P2" $ cabal' "register" ["--print-ipid", "--inplace"] - let exIPID s = takeWhile (/= '\n') $ - head . filter (isPrefixOf $ "UniqueIPID-0.1-") $ (tails s) - when ((exIPID $ resultOutput r1) == (exIPID $ resultOutput r2)) $ - assertFailure $ "cabal has not calculated different Installed " ++ - "package ID when source is changed." - - tc "DuplicateModuleName" $ do - cabal_build ["--enable-tests"] - r1 <- shouldFail $ cabal' "test" ["foo"] - assertOutputContains "test B" r1 - assertOutputContains "test A" r1 - r2 <- shouldFail $ cabal' "test" ["foo2"] - assertOutputContains "test C" r2 - assertOutputContains "test A" r2 - - tc "TestNameCollision" $ do - withPackageDb $ do - withPackage "parent" $ cabal_install [] - withPackage "child" $ do - cabal_build ["--enable-tests"] - cabal "test" [] - - -- Test that '--allow-newer' works via the 'Setup.hs configure' interface. - tc "AllowNewer" $ do - shouldFail $ cabal "configure" [] - cabal "configure" ["--allow-newer"] - shouldFail $ cabal "configure" ["--allow-newer=baz,quux"] - cabal "configure" ["--allow-newer=base", "--allow-newer=baz,quux"] - cabal "configure" ["--allow-newer=bar", "--allow-newer=base,baz" - ,"--allow-newer=quux"] - shouldFail $ cabal "configure" ["--enable-tests"] - cabal "configure" ["--enable-tests", "--allow-newer"] - shouldFail $ cabal "configure" ["--enable-benchmarks"] - cabal "configure" ["--enable-benchmarks", "--allow-newer"] - shouldFail $ cabal "configure" ["--enable-benchmarks", "--enable-tests"] - cabal "configure" ["--enable-benchmarks", "--enable-tests" - ,"--allow-newer"] - shouldFail $ cabal "configure" ["--allow-newer=Foo:base"] - shouldFail $ cabal "configure" ["--allow-newer=Foo:base" - ,"--enable-tests", "--enable-benchmarks"] - cabal "configure" ["--allow-newer=AllowNewer:base"] - cabal "configure" ["--allow-newer=AllowNewer:base" - ,"--allow-newer=Foo:base"] - cabal "configure" ["--allow-newer=AllowNewer:base" - ,"--allow-newer=Foo:base" - ,"--enable-tests", "--enable-benchmarks"] - - -- Test that Cabal can choose flags to disable building a component when that - -- component's dependencies are unavailable. The build should succeed without - -- requiring the component's dependencies or imports. - tc "BuildableField" $ do - r <- cabal' "configure" ["-v"] - assertOutputContains "Flags chosen: build-exe=False" r - cabal "build" [] - - -- TODO: Enable these tests on Windows - unlessWindows $ do - tc "GhcPkgGuess/SameDirectory" $ ghc_pkg_guess "ghc" - tc "GhcPkgGuess/SameDirectoryVersion" $ ghc_pkg_guess "ghc-7.10" - tc "GhcPkgGuess/SameDirectoryGhcVersion" $ ghc_pkg_guess "ghc-7.10" - - unlessWindows $ do - tc "GhcPkgGuess/Symlink" $ do - -- We don't want to distribute a tarball with symlinks. See #3190. - withSymlink "bin/ghc" - "tests/PackageTests/GhcPkgGuess/Symlink/ghc" $ - ghc_pkg_guess "ghc" - - tc "GhcPkgGuess/SymlinkVersion" $ do - withSymlink "bin/ghc-7.10" - "tests/PackageTests/GhcPkgGuess/SymlinkVersion/ghc" $ - ghc_pkg_guess "ghc" - - tc "GhcPkgGuess/SymlinkGhcVersion" $ do - withSymlink "bin/ghc-7.10" - "tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/ghc" $ - ghc_pkg_guess "ghc" - - where - ghc_pkg_guess bin_name = do - cwd <- packageDir - with_ghc <- getWithGhcPath - r <- withEnv [("WITH_GHC", Just with_ghc)] - . shouldFail $ cabal' "configure" ["-w", cwd bin_name] - assertOutputContains "is version 9999999" r - return () - - -- Shared test function for BuildDeps/InternalLibrary* tests. - internal_lib_test expect = withPackageDb $ do - withPackage "to-install" $ cabal_install [] - cabal_build [] - r <- runExe' "lemon" [] - assertEqual - ("executable should have linked with the " ++ expect ++ " library") - ("foo foo myLibFunc " ++ expect) - (concatOutput (resultOutput r)) - - assertRegex :: String -> String -> Result -> TestM () - assertRegex msg regex r = let out = resultOutput r - in assertBool (msg ++ ",\nactual output:\n" ++ out) - (out =~ regex) - - tc :: FilePath -> TestM a -> TestTreeM () - tc name = testTree config name Nothing diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestStanza/Check.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestStanza/Check.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestStanza/Check.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestStanza/Check.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,29 +0,0 @@ -module PackageTests.TestStanza.Check where - -import PackageTests.PackageTester - -import Distribution.Version -import Distribution.Simple.LocalBuildInfo -import Distribution.Package -import Distribution.PackageDescription - -suite :: TestM () -suite = do - assertOutputDoesNotContain "unknown section type" - =<< cabal' "configure" [] - dist_dir <- distDir - lbi <- liftIO $ getPersistBuildConfig dist_dir - let anticipatedTestSuite = emptyTestSuite - { testName = "dummy" - , testInterface = TestSuiteExeV10 (Version [1,0] []) "dummy.hs" - , testBuildInfo = emptyBuildInfo - { targetBuildDepends = - [ Dependency (PackageName "base") anyVersion ] - , hsSourceDirs = ["."] - } - , testEnabled = False - } - gotTestSuite = head $ testSuites (localPkgDescr lbi) - assertEqual "parsed test-suite stanza does not match anticipated" - anticipatedTestSuite gotTestSuite - return () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestStanza/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestStanza/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestStanza/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestStanza/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -name: TestStanza -version: 0.1 -license: BSD3 -author: Thomas Tuegel -stability: stable -category: PackageTests -build-type: Simple - -description: - Check that Cabal recognizes the Test stanza defined below. - -Library - exposed-modules: MyLibrary - build-depends: base - -test-suite dummy - main-is: dummy.hs - type: exitcode-stdio-1.0 - build-depends: base \ No newline at end of file diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/ExeV10/Check.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/ExeV10/Check.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/ExeV10/Check.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/ExeV10/Check.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,129 +0,0 @@ -module PackageTests.TestSuiteTests.ExeV10.Check (tests) where - -import qualified Control.Exception as E (IOException, catch) -import Control.Monad (forM_, liftM4, when) -import Data.Maybe (catMaybes) -import System.FilePath -import Test.Tasty.HUnit (testCase) - -import Distribution.Compiler (CompilerFlavor(..), CompilerId(..)) -import Distribution.PackageDescription (package) -import Distribution.Simple.Compiler (compilerId) -import Distribution.Simple.LocalBuildInfo (compiler, localPkgDescr, localCompatPackageKey) -import Distribution.Simple.Hpc -import Distribution.Simple.Program.Builtin (hpcProgram) -import Distribution.Simple.Program.Db - ( emptyProgramDb, configureProgram, requireProgramVersion ) -import Distribution.Text (display) -import qualified Distribution.Verbosity as Verbosity -import Distribution.Version (Version(..), orLaterVersion) - -import PackageTests.PackageTester - -tests :: SuiteConfig -> TestTreeM () -tests config = do - -- TODO: hierarchy and subnaming is a little unfortunate - tc "Test" "Default" $ do - cabal_build ["--enable-tests"] - -- This one runs both tests, including the very LONG Foo - -- test which prints a lot of output - cabal "test" ["--show-details=direct"] - groupTests "WithHpc" $ hpcTestMatrix config - groupTests "WithoutHpc" $ do - -- Ensures that even if -fhpc is manually provided no .tix file is output. - tc "NoTix" "NoHpcNoTix" $ do - dist_dir <- distDir - cabal_build - [ "--enable-tests" - , "--ghc-option=-fhpc" - , "--ghc-option=-hpcdir" - , "--ghc-option=" ++ dist_dir ++ "/hpc/vanilla" ] - cabal "test" ["test-Short", "--show-details=direct"] - lbi <- liftIO $ getPersistBuildConfig dist_dir - let way = guessWay lbi - shouldNotExist $ tixFilePath dist_dir way "test-Short" - -- Ensures that even if a .tix file happens to be left around - -- markup isn't generated. - tc "NoMarkup" "NoHpcNoMarkup" $ do - dist_dir <- distDir - let tixFile = tixFilePath dist_dir Vanilla "test-Short" - withEnv [("HPCTIXFILE", Just tixFile)] $ do - cabal_build - [ "--enable-tests" - , "--ghc-option=-fhpc" - , "--ghc-option=-hpcdir" - , "--ghc-option=" ++ dist_dir ++ "/hpc/vanilla" ] - cabal "test" ["test-Short", "--show-details=direct"] - shouldNotExist $ htmlDir dist_dir Vanilla "test-Short" "hpc_index.html" - where - tc :: String -> String -> TestM a -> TestTreeM () - tc name subname m - = testTree' $ testCase name - (runTestM config "TestSuiteTests/ExeV10" (Just subname) m) - -hpcTestMatrix :: SuiteConfig -> TestTreeM () -hpcTestMatrix config = forM_ (choose4 [True, False]) $ - \(libProf, exeProf, exeDyn, shared) -> do - let name | null suffixes = "Vanilla" - | otherwise = intercalate "-" suffixes - where - suffixes = catMaybes - [ if libProf then Just "LibProf" else Nothing - , if exeProf then Just "ExeProf" else Nothing - , if exeDyn then Just "ExeDyn" else Nothing - , if shared then Just "Shared" else Nothing - ] - opts = catMaybes - [ enable libProf "library-profiling" - , enable exeProf "profiling" - , enable exeDyn "executable-dynamic" - , enable shared "shared" - ] - where - enable cond flag - | cond = Just $ "--enable-" ++ flag - | otherwise = Nothing - -- Ensure that both .tix file and markup are generated if coverage - -- is enabled. - testUnless ((exeDyn || shared) && not (hasSharedLibraries config)) $ - tc name ("WithHpc-" ++ name) $ do - isCorrectVersion <- liftIO $ correctHpcVersion - when isCorrectVersion $ do - dist_dir <- distDir - cabal_build ("--enable-tests" : "--enable-coverage" : opts) - cabal "test" ["test-Short", "--show-details=direct"] - lbi <- liftIO $ getPersistBuildConfig dist_dir - let way = guessWay lbi - CompilerId comp version = compilerId (compiler lbi) - subdir - | comp == GHC && version >= Version [7, 10] [] = - localCompatPackageKey lbi - | otherwise = display (package $ localPkgDescr lbi) - mapM_ shouldExist - [ mixDir dist_dir way "my-0.1" subdir "Foo.mix" - , mixDir dist_dir way "test-Short" "Main.mix" - , tixFilePath dist_dir way "test-Short" - , htmlDir dist_dir way "test-Short" "hpc_index.html" - ] - where - tc :: String -> String -> TestM a -> TestTreeM () - tc name subname m - = testTree' $ testCase name - (runTestM config "TestSuiteTests/ExeV10" (Just subname) m) - - choose4 :: [a] -> [(a, a, a, a)] - choose4 xs = liftM4 (,,,) xs xs xs xs - --- | Checks for a suitable HPC version for testing. -correctHpcVersion :: IO Bool -correctHpcVersion = do - let programDb' = emptyProgramDb - let verbosity = Verbosity.normal - let verRange = orLaterVersion (Version [0,7] []) - programDb <- configureProgram verbosity hpcProgram programDb' - (requireProgramVersion verbosity hpcProgram verRange programDb - >> return True) `catchIO` (\_ -> return False) - where - -- Distribution.Compat.Exception is hidden. - catchIO :: IO a -> (E.IOException -> IO a) -> IO a - catchIO = E.catch diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/ExeV10/Foo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/ExeV10/Foo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/ExeV10/Foo.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/ExeV10/Foo.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -module Foo where - -fooTest :: [String] -> Bool -fooTest _ = True diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/ExeV10/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/ExeV10/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/ExeV10/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/ExeV10/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -name: my -version: 0.1 -license: BSD3 -cabal-version: >= 1.9.2 -build-type: Simple - -library - exposed-modules: Foo - build-depends: base - -test-suite test-Foo - type: exitcode-stdio-1.0 - hs-source-dirs: tests - main-is: test-Foo.hs - build-depends: base, my - -test-suite test-Short - type: exitcode-stdio-1.0 - hs-source-dirs: tests - main-is: test-Short.hs - build-depends: base, my diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/ExeV10/tests/test-Foo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/ExeV10/tests/test-Foo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/ExeV10/tests/test-Foo.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/ExeV10/tests/test-Foo.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,12 +0,0 @@ -module Main where - -import Foo -import System.Exit -import Control.Monad - -main :: IO () -main | fooTest [] = do - -- Make sure that the output buffer is drained - replicateM 10000 $ putStrLn "The quick brown fox jumps over the lazy dog" - exitSuccess - | otherwise = exitFailure diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/ExeV10/tests/test-Short.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/ExeV10/tests/test-Short.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/ExeV10/tests/test-Short.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/ExeV10/tests/test-Short.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -module Main where - -import Foo -import System.Exit -import Control.Monad - -main :: IO () -main | fooTest [] = do - replicateM 5 $ putStrLn "The quick brown fox jumps over the lazy dog" - exitSuccess - | otherwise = exitFailure diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/LibV09/Lib.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/LibV09/Lib.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/LibV09/Lib.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/LibV09/Lib.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -module Lib where - -import Distribution.TestSuite - -nullt x = Test $ TestInstance - { run = return $ Finished (Fail "no reason") - , name = "test " ++ show x - , tags = [] - , options = [] - , setOption = \_ _-> Left "No Options" - } diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/LibV09/LibV09.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/LibV09/LibV09.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/LibV09/LibV09.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/LibV09/LibV09.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -name: LibV09 -version: 0.1 -cabal-version: >= 1.2 -license: BSD3 -author: Thomas Tuegel -stability: stable -category: PackageTests -build-type: Simple -cabal-version: >= 1.9.2 - -description: Check type detailed-0.9 test suites. - -library - exposed-modules: Lib - build-depends: base, Cabal - -test-suite LibV09-Deadlock - type: detailed-0.9 - hs-source-dirs: tests - test-module: Deadlock - build-depends: base, Cabal, LibV09 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/LibV09/tests/Deadlock.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/LibV09/tests/Deadlock.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/LibV09/tests/Deadlock.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/TestSuiteTests/LibV09/tests/Deadlock.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -module Deadlock where - -import Distribution.TestSuite - -import Lib - -tests :: IO [Test] -tests = return [nullt x | x <- [1 .. 1000]] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/UniqueIPID/P1/M.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/UniqueIPID/P1/M.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/UniqueIPID/P1/M.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/UniqueIPID/P1/M.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -module M(m) where - -m = print "1" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/UniqueIPID/P1/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/UniqueIPID/P1/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/UniqueIPID/P1/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/UniqueIPID/P1/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -name: UniqueIPID -version: 0.1 -license: BSD3 -author: Vishal Agrawal -stability: stable -category: PackageTests -build-type: Simple -Cabal-version: >= 1.2 - -description: - Check that Cabal generates unique IPID based on source. - -Library - exposed-modules: M - build-depends: base diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/UniqueIPID/P2/M.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/UniqueIPID/P2/M.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/UniqueIPID/P2/M.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/UniqueIPID/P2/M.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -module M(m) where - -m = print "2" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/UniqueIPID/P2/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/UniqueIPID/P2/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests/UniqueIPID/P2/my.cabal 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests/UniqueIPID/P2/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -name: UniqueIPID -version: 0.1 -license: BSD3 -author: Vishal Agrawal -stability: stable -category: PackageTests -build-type: Simple -Cabal-version: >= 1.2 - -description: - Check that Cabal generates unique IPID based on source. - -Library - exposed-modules: M - build-depends: base, containers diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/PackageTests.hs 2016-11-07 10:02:25.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/PackageTests.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,283 +0,0 @@ --- The intention is that this will be the new unit test framework. --- Please add any working tests here. This file should do nothing --- but import tests from other modules. --- --- Stephen Blackheath, 2009 - -module Main where - -import PackageTests.Options -import PackageTests.PackageTester -import PackageTests.Tests - -import Distribution.Simple.Configure - ( ConfigStateFileError(..), findDistPrefOrDefault, getConfigStateFile - , interpretPackageDbFlags ) -import Distribution.Simple.Compiler (PackageDB(..), PackageDBStack) -import Distribution.Simple.LocalBuildInfo (LocalBuildInfo(..)) -import Distribution.Simple.Program.Types (Program(..), programPath, programVersion) -import Distribution.Simple.Program.Builtin - ( ghcProgram, ghcPkgProgram, haddockProgram ) -import Distribution.Simple.Program.Db (requireProgram) -import Distribution.Simple.Setup (Flag(..), readPackageDbList, showPackageDbList) -import Distribution.Simple.Utils (cabalVersion) -import Distribution.Text (display, simpleParse) -import Distribution.Verbosity (normal, flagToVerbosity) -import Distribution.ReadE (readEOrFail) - -import Control.Exception -import Data.Proxy ( Proxy(..) ) -import Distribution.Compat.Environment ( lookupEnv ) -import System.Directory -import Test.Tasty -import Test.Tasty.Options -import Test.Tasty.Ingredients -import Data.Maybe - -#if MIN_VERSION_base(4,6,0) -import System.Environment ( getExecutablePath ) -#endif - -main :: IO () -main = do - -- In abstract, the Cabal test suite makes calls to the "Setup" - -- executable and tests the output of Cabal. However, we have to - -- responsible for building this executable in the first place, - -- since (1) Cabal doesn't support a test-suite depending on an - -- executable, so we can't put a "Setup" executable in the Cabal - -- file and then depend on it, (2) we don't want to call the Cabal - -- functions *directly* because we need to capture and save the - -- stdout and stderr, and (3) even if we could do all that, we will - -- want to test some Custom setup scripts, which will be specific to - -- the test at hand and need to be compiled against Cabal. - -- - -- To be able to build the executable, there is some information - -- we need: - -- - -- 1. We need to know what ghc to use, - -- - -- 2. We need to know what package databases (plural!) contain - -- all of the necessary dependencies to make our Cabal package - -- well-formed. - -- - -- We could have the user pass these all in as arguments (TODO: this - -- should be an option), but there's a more convenient way to get - -- this information: the *build configuration* that was used to - -- build the Cabal library (and this test suite) in the first place. - -- To do this, we need to find the 'dist' directory that was set as - -- the build directory for Cabal. - - dist_dir <- guessDistDir - -- Might be bottom, if we can't figure it out. If you override - -- all the relevant parameters you might still succeed. - lbi <- getPersistBuildConfig_ (dist_dir "setup-config") - - -- You need to run the test suite in the right directory, sorry. - test_dir <- getCurrentDirectory - - -- Pull out the information we need from the LBI - -- TODO: The paths to GHC are configurable by command line, but you - -- have to be careful: some tests might depend on the Cabal library, - -- in which case you REALLY need to have built and installed Cabal - -- for the version that the test suite is being built against. The - -- easiest thing to do is make sure you built Cabal the same way as - -- you will run the tests. - let getExePathFromEnvOrLBI env_name prog = do - r <- lookupEnv env_name - case r of - Nothing -> do - (conf, _) <- requireProgram normal prog (withPrograms lbi) - return (programPath conf) - Just x -> return x - -- It is too much damn work to actually properly configure it - -- (Cabal will go off and probe GHC and we really aren't keen - -- on doing this every time we run the test suite.) - ghc_path <- getExePathFromEnvOrLBI "CABAL_PACKAGETESTS_GHC" ghcProgram - ghc_pkg_path <- getExePathFromEnvOrLBI "CABAL_PACKAGETESTS_GHC_PKG" - ghcPkgProgram - haddock_path <- getExePathFromEnvOrLBI "CABAL_PACKAGETESTS_HADDOCK" - haddockProgram - - with_ghc_path <- fromMaybe ghc_path - `fmap` lookupEnv "CABAL_PACKAGETESTS_WITH_GHC" - - ghc_version_env <- lookupEnv "CABAL_PACKAGETESTS_GHC_VERSION" - ghc_version <- case ghc_version_env of - Nothing -> do - (ghcConf, _) <- requireProgram normal ghcProgram (withPrograms lbi) - return (fromJust (programVersion ghcConf)) - Just str -> - return (fromJust (simpleParse str)) - - with_ghc_version <- do - version <- programFindVersion ghcProgram normal with_ghc_path - case version of - Nothing -> error "Cannot determine version of GHC used for --with-ghc" - Just v -> return v - - -- Package DBs are not guaranteed to be absolute, so make them so in - -- case a subprocess using the package DB needs a different CWD. - db_stack_env <- lookupEnv "CABAL_PACKAGETESTS_DB_STACK" - let packageDBStack0 = case db_stack_env of - Nothing -> withPackageDB lbi - Just str -> interpretPackageDbFlags True -- user install? why not. - (concatMap readPackageDbList - (splitSearchPath str)) - packageDBStack1 <- mapM canonicalizePackageDB packageDBStack0 - - -- The packageDBStack is worth some commentary. The database - -- stack we extract from the LBI will contain enough package - -- databases to make the Cabal package well-formed. However, - -- it does not *contain* the inplace installed Cabal package. - -- So we need to add that to the stack. - let packageDBStack2 - = packageDBStack1 ++ - [SpecificPackageDB - (dist_dir "package.conf.inplace")] - - -- THIS ISN'T EVEN MY FINAL FORM. The package database stack - -- controls where we install a package; specifically, the package is - -- installed to the top-most package on the stack (this makes the - -- most sense, since it could depend on any of the packages below - -- it.) If the test wants to register anything (as opposed to just - -- working in place), then we need to have another temporary - -- database we can install into (and not accidentally clobber any of - -- the other stacks.) This is done on a per-test basis. - -- - -- ONE MORE THING. On the subject of installing the package (with - -- copy/register) it is EXTREMELY important that we also overload - -- the install directories, so we don't clobber anything in the - -- default install paths. VERY IMPORTANT. - - -- TODO: make this controllable by a flag - verbosity <- maybe normal (readEOrFail flagToVerbosity) - `fmap` lookupEnv "VERBOSE" - -- The inplaceDB is where the Cabal library was registered - -- in place (and is usable.) inplaceConfig is a convenient - -- set of flags to make sure we make it visible. - let suite = SuiteConfig - { cabalDistPref = dist_dir - , ghcPath = ghc_path - , ghcVersion = ghc_version - , ghcPkgPath = ghc_pkg_path - , withGhcPath = with_ghc_path - , withGhcVersion = with_ghc_version - , packageDBStack = packageDBStack2 - , suiteVerbosity = verbosity - , absoluteCWD = test_dir - } - - putStrLn $ "Cabal test suite - testing cabal version " - ++ display cabalVersion - putStrLn $ "Cabal build directory: " ++ dist_dir - putStrLn $ "Test directory: " ++ test_dir - -- TODO: it might be useful to factor this out so that ./Setup - -- configure dumps this file, so we can read it without in a version - -- stable way. - putStrLn $ "Environment:" - putStrLn $ "CABAL_PACKAGETESTS_GHC=" ++ show ghc_path ++ " \\" - putStrLn $ "CABAL_PACKAGETESTS_GHC_VERSION=" - ++ show (display ghc_version) ++ " \\" - putStrLn $ "CABAL_PACKAGETESTS_GHC_PKG=" ++ show ghc_pkg_path ++ " \\" - putStrLn $ "CABAL_PACKAGETESTS_WITH_GHC=" ++ show with_ghc_path ++ " \\" - putStrLn $ "CABAL_PACKAGETESTS_HADDOCK=" ++ show haddock_path ++ " \\" - -- For brevity, do pre-canonicalization - putStrLn $ "CABAL_PACKAGETESTS_DB_STACK=" ++ - show (intercalate [searchPathSeparator] - (showPackageDbList (uninterpretPackageDBFlags - packageDBStack0))) - - -- Create a shared Setup executable to speed up Simple tests - putStrLn $ "Building shared ./Setup executable" - rawCompileSetup verbosity suite [] "tests" - - defaultMainWithIngredients options $ - runTestTree "Package Tests" (tests suite) - --- Reverse of 'interpretPackageDbFlags'. --- prop_idem stk b --- = interpretPackageDbFlags b (uninterpretPackageDBFlags stk) == stk -uninterpretPackageDBFlags :: PackageDBStack -> [Maybe PackageDB] -uninterpretPackageDBFlags stk = Nothing : map (\x -> Just x) stk - --- | Guess what the 'dist' directory Cabal was installed in is. There's --- no 100% reliable way to find this, but there are a few good shots: --- --- 1. Test programs are ~always built in-place, in a directory --- that looks like dist/build/package-tests/package-tests; --- thus the directory can be determined by looking at $0. --- This method is robust against sandboxes, Nix local --- builds, and Stack, but doesn't work if you're running --- in an interpreter. --- --- 2. We can use the normal input methods (as per Cabal), --- checking for the CABAL_BUILDDIR environment variable as --- well as the default location in the current working directory. --- --- NB: If you update this, also update its copy in cabal-install's --- IntegrationTests -guessDistDir :: IO FilePath -guessDistDir = do -#if MIN_VERSION_base(4,6,0) - -- Method (1) - -- TODO: this needs to be BC'ified, probably. - exe_path <- canonicalizePath =<< getExecutablePath - -- exe_path is something like /path/to/dist/build/package-tests/package-tests - let dist0 = dropFileName exe_path ".." ".." - b <- doesFileExist (dist0 "setup-config") -#else - let dist0 = error "no path" - b = False -#endif - -- Method (2) - if b then canonicalizePath dist0 - else findDistPrefOrDefault NoFlag >>= canonicalizePath - -canonicalizePackageDB :: PackageDB -> IO PackageDB -canonicalizePackageDB (SpecificPackageDB path) - = SpecificPackageDB `fmap` canonicalizePath path -canonicalizePackageDB x = return x - --- | Like Distribution.Simple.Configure.getPersistBuildConfig but --- doesn't check that the Cabal version matches, which it doesn't when --- we run Cabal's own test suite, due to bootstrapping issues. --- Here's the situation: --- --- 1. There's some system Cabal-1.0 installed. We use this --- to build Setup.hs --- 2. We run ./Setup configure, which uses Cabal-1.0 to --- write out the LocalBuildInfo --- 3. We build the Cabal library, whose version is Cabal-2.0 --- 4. We build the package-tests executable, which LINKS AGAINST --- Cabal-2.0 --- 5. We try to read the LocalBuildInfo that ./Setup configure --- wrote out, but it's Cabal-1.0 format! --- --- It's a bit skeevy that we're trying to read Cabal-1.0 LocalBuildInfo --- using Cabal-2.0's parser, but this seems to work OK in practice --- because LocalBuildInfo is a slow-moving data structure. If --- we ever make a major change, this won't work, and we'll have to --- take a different approach (either setting "build-type: Custom" --- so we bootstrap with the most recent Cabal, or by writing the --- information we need in another format.) -getPersistBuildConfig_ :: FilePath -> IO LocalBuildInfo -getPersistBuildConfig_ filename = do - eLBI <- try $ getConfigStateFile filename - case eLBI of - Left (ConfigStateFileBadVersion _ _ (Right lbi)) -> return lbi - -- These errors are lazy! We might not need these parameters. - Left (ConfigStateFileBadVersion _ _ (Left err)) - -> return . error $ - "We couldn't understand the build configuration. Try " ++ - "editing Cabal.cabal to have 'build-type: Custom' " ++ - "and then rebuilding, or manually specifying CABAL_PACKAGETESTS_* " ++ - "environment variables (see README.md for more details)." ++ - "\n\nOriginal error: " ++ - show err - Left err -> return (throw err) - Right lbi -> return lbi - -options :: [Ingredient] -options = includingOptions - [Option (Proxy :: Proxy OptionEnableAllTests)] : - defaultIngredients diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/README.md cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/README.md --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/README.md 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,38 +0,0 @@ -Writing package tests -===================== - -The tests under the [PackageTests] directory define and build packages -that exercise various components of Cabal. Each test case is an [HUnit] -test. The entry point for the test suite, where all the test cases are -listed, is [PackageTests.hs]. There are utilities for calling the stages -of Cabal's build process in [PackageTests/PackageTester.hs]; have a look -at an existing test case to see how they are used. - -In order to run the tests, `PackageTests` needs to know where the inplace -copy of Cabal being tested is, as well as some information which was -used to configure it. By default, `PackageTests` tries to look at the -`LocalBuildInfo`, but if the format of `LocalBuildInfo` has changed -between the version of Cabal which ran the configure step, and the -version of Cabal we are testing against, this may fail. In that -case, you can manually specify the information we need using -the following environment variables: - -* `CABAL_PACKAGETESTS_GHC` is the path to the GHC you compiled Cabal with -* `CABAL_PACKAGETESTS_GHC_PKG` is the path to the ghc-pkg associated with this GHC -* `CABAL_PACKAGETESTS_HADDOCK` is the path to the haddock associated with this GHC -* `CABAL_PACKAGETESTS_GHC_VERSION` is the version of your GHC -* `CABAL_PACKAGETESTS_DB_STACK` is a PATH-style list of package database paths, - `clear`, `global` and `user`. Each component of the list is - interpreted the same way as Cabal's `-package-db` flag. This list - must contain the copy of Cabal you are planning to test against - (as well as its transitive dependencies). - -If you can successfully run the test suite, we'll print out examples -of all of these values for you under "Environment". - -[PackageTests]: PackageTests -[HUnit]: http://hackage.haskell.org/package/HUnit -[PackageTests.hs]: PackageTests.hs -[PackageTests/PackageTester.hs]: PackageTests/PackageTester.hs -[detailed]: ../Distribution/TestSuite.hs -[PackageTests/BuildTestSuiteDetailedV09/Check.hs]: PackageTests/BuildTestSuiteDetailedV09/Check.hs diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/Setup.hs 2016-11-07 10:02:27.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -import Distribution.Simple -main = defaultMain - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/Test/Laws.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/Test/Laws.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/Test/Laws.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/Test/Laws.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,79 +0,0 @@ -{-# OPTIONS_GHC -fno-warn-missing-signatures #-} -module Test.Laws where - -import Prelude hiding (Num((+), (*))) -import Data.Monoid (Monoid(..), Endo(..)) -import qualified Data.Foldable as Foldable - -idempotent_unary f x = f fx == fx where fx = f x - --- Basic laws on binary operators - -idempotent_binary (+) x = x + x == x - -commutative (+) x y = x + y == y + x - -associative (+) x y z = (x + y) + z == x + (y + z) - -distributive_left (*) (+) x y z = x * (y + z) == (x * y) + (x * z) - -distributive_right (*) (+) x y z = (y + z) * x == (y * x) + (z * x) - - --- | The first 'fmap' law --- --- > fmap id == id --- -fmap_1 :: (Eq (f a), Functor f) => f a -> Bool -fmap_1 x = fmap id x == x - --- | The second 'fmap' law --- --- > fmap (f . g) == fmap f . fmap g --- -fmap_2 :: (Eq (f c), Functor f) => (b -> c) -> (a -> b) -> f a -> Bool -fmap_2 f g x = fmap (f . g) x == (fmap f . fmap g) x - - --- | The monoid identity law, 'mempty' is a left and right identity of --- 'mappend': --- --- > mempty `mappend` x = x --- > x `mappend` mempty = x --- -monoid_1 :: (Eq a, Data.Monoid.Monoid a) => a -> Bool -monoid_1 x = mempty `mappend` x == x - && x `mappend` mempty == x - --- | The monoid associativity law, 'mappend' must be associative. --- --- > (x `mappend` y) `mappend` z = x `mappend` (y `mappend` z) --- -monoid_2 :: (Eq a, Data.Monoid.Monoid a) => a -> a -> a -> Bool -monoid_2 x y z = (x `mappend` y) `mappend` z - == x `mappend` (y `mappend` z) - --- | The 'mconcat' definition. It can be overidden for the sake of effeciency --- but it must still satisfy the property given by the default definition: --- --- > mconcat = foldr mappend mempty --- -monoid_3 :: (Eq a, Data.Monoid.Monoid a) => [a] -> Bool -monoid_3 xs = mconcat xs == foldr mappend mempty xs - - --- | First 'Foldable' law --- --- > Foldable.fold = Foldable.foldr mappend mempty --- -foldable_1 :: (Foldable.Foldable t, Monoid m, Eq m) => t m -> Bool -foldable_1 x = Foldable.fold x == Foldable.foldr mappend mempty x - --- | Second 'Foldable' law --- --- > foldr f z t = appEndo (foldMap (Endo . f) t) z --- -foldable_2 :: (Foldable.Foldable t, Eq b) - => (a -> b -> b) -> b -> t a -> Bool -foldable_2 f z t = Foldable.foldr f z t - == appEndo (Foldable.foldMap (Endo . f) t) z diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/Test/QuickCheck/Utils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/Test/QuickCheck/Utils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/Test/QuickCheck/Utils.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/Test/QuickCheck/Utils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,29 +0,0 @@ -module Test.QuickCheck.Utils where - -import Test.QuickCheck.Gen - - --- | Adjust the size of the generated value. --- --- In general the size gets bigger and bigger linearly. For some types --- it is not appropriate to generate ever bigger values but instead --- to generate lots of intermediate sized values. You could do that using: --- --- > adjustSize (\n -> min n 5) --- --- Similarly, for some types the linear size growth may mean getting too big --- too quickly relative to other values. So you may want to adjust how --- quickly the size grows. For example dividing by a constant, or even --- something like the integer square root or log. --- --- > adjustSize (\n -> n `div` 2) --- --- Putting this together we can make for example a relatively short list: --- --- > adjustSize (\n -> min 5 (n `div` 3)) (listOf1 arbitrary) --- --- Not only do we put a limit on the length but we also scale the growth to --- prevent it from hitting the maximum size quite so early. --- -adjustSize :: (Int -> Int) -> Gen a -> Gen a -adjustSize adjust gen = sized (\n -> resize (adjust n) gen) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/Compat/CreatePipe.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/Compat/CreatePipe.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/Compat/CreatePipe.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/Compat/CreatePipe.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -module UnitTests.Distribution.Compat.CreatePipe (tests) where - -import Distribution.Compat.CreatePipe -import System.IO (hClose, hGetContents, hPutStr, hSetEncoding, localeEncoding) -import Test.Tasty -import Test.Tasty.HUnit - -tests :: [TestTree] -tests = [testCase "Locale Encoding" case_Locale_Encoding] - -case_Locale_Encoding :: Assertion -case_Locale_Encoding = assert $ do - let str = "\0252" - (r, w) <- createPipe - hSetEncoding w localeEncoding - out <- hGetContents r - hPutStr w str - hClose w - return $! out == str diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/Compat/ReadP.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/Compat/ReadP.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/Compat/ReadP.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/Compat/ReadP.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,153 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Compat.ReadP --- Copyright : (c) The University of Glasgow 2002 --- License : BSD-style (see the file libraries/base/LICENSE) --- --- Maintainer : libraries@haskell.org --- Portability : portable --- --- This code was originally in Distribution.Compat.ReadP. Please see that file --- for provenance. The tests have been integrated into the test framework. --- Some properties cannot be tested, as they hold over arbitrary ReadP values, --- and we don't have a good Arbitrary instance (nor Show instance) for ReadP. --- -module UnitTests.Distribution.Compat.ReadP - ( tests - -- * Properties - -- $properties - ) where - -import Data.List -import Distribution.Compat.ReadP -import Test.Tasty -import Test.Tasty.QuickCheck - -tests :: [TestTree] -tests = - [ testProperty "Get Nil" prop_Get_Nil - , testProperty "Get Cons" prop_Get_Cons - , testProperty "Look" prop_Look - , testProperty "Fail" prop_Fail - , testProperty "Return" prop_Return - --, testProperty "Bind" prop_Bind - --, testProperty "Plus" prop_Plus - --, testProperty "LeftPlus" prop_LeftPlus - --, testProperty "Gather" prop_Gather - , testProperty "String Yes" prop_String_Yes - , testProperty "String Maybe" prop_String_Maybe - , testProperty "Munch" (prop_Munch evenChar) - , testProperty "Munch1" (prop_Munch1 evenChar) - --, testProperty "Choice" prop_Choice - --, testProperty "ReadS" prop_ReadS - ] - --- --------------------------------------------------------------------------- --- QuickCheck properties that hold for the combinators - -{- $properties -The following are QuickCheck specifications of what the combinators do. -These can be seen as formal specifications of the behavior of the -combinators. - -We use bags to give semantics to the combinators. --} - -type Bag a = [a] - --- Equality on bags does not care about the order of elements. - -(=~) :: Ord a => Bag a -> Bag a -> Bool -xs =~ ys = sort xs == sort ys - --- A special equality operator to avoid unresolved overloading --- when testing the properties. - -(=~.) :: Bag (Int,String) -> Bag (Int,String) -> Bool -(=~.) = (=~) - --- Here follow the properties: - -prop_Get_Nil :: Bool -prop_Get_Nil = - readP_to_S get [] =~ [] - -prop_Get_Cons :: Char -> [Char] -> Bool -prop_Get_Cons c s = - readP_to_S get (c:s) =~ [(c,s)] - -prop_Look :: String -> Bool -prop_Look s = - readP_to_S look s =~ [(s,s)] - -prop_Fail :: String -> Bool -prop_Fail s = - readP_to_S pfail s =~. [] - -prop_Return :: Int -> String -> Bool -prop_Return x s = - readP_to_S (return x) s =~. [(x,s)] - -{- -prop_Bind p k s = - readP_to_S (p >>= k) s =~. - [ ys'' - | (x,s') <- readP_to_S p s - , ys'' <- readP_to_S (k (x::Int)) s' - ] - -prop_Plus :: ReadP Int Int -> ReadP Int Int -> String -> Bool -prop_Plus p q s = - readP_to_S (p +++ q) s =~. - (readP_to_S p s ++ readP_to_S q s) - -prop_LeftPlus :: ReadP Int Int -> ReadP Int Int -> String -> Bool -prop_LeftPlus p q s = - readP_to_S (p <++ q) s =~. - (readP_to_S p s +<+ readP_to_S q s) - where - [] +<+ ys = ys - xs +<+ _ = xs - -prop_Gather s = - forAll readPWithoutReadS $ \p -> - readP_to_S (gather p) s =~ - [ ((pre,x::Int),s') - | (x,s') <- readP_to_S p s - , let pre = take (length s - length s') s - ] --} - -prop_String_Yes :: String -> [Char] -> Bool -prop_String_Yes this s = - readP_to_S (string this) (this ++ s) =~ - [(this,s)] - -prop_String_Maybe :: String -> String -> Bool -prop_String_Maybe this s = - readP_to_S (string this) s =~ - [(this, drop (length this) s) | this `isPrefixOf` s] - -prop_Munch :: (Char -> Bool) -> String -> Bool -prop_Munch p s = - readP_to_S (munch p) s =~ - [(takeWhile p s, dropWhile p s)] - -prop_Munch1 :: (Char -> Bool) -> String -> Bool -prop_Munch1 p s = - readP_to_S (munch1 p) s =~ - [(res,s') | let (res,s') = (takeWhile p s, dropWhile p s), not (null res)] - -{- -prop_Choice :: [ReadP Int Int] -> String -> Bool -prop_Choice ps s = - readP_to_S (choice ps) s =~. - readP_to_S (foldr (+++) pfail ps) s - -prop_ReadS :: ReadS Int -> String -> Bool -prop_ReadS r s = - readP_to_S (readS_to_P r) s =~. r s --} - -evenChar :: Char -> Bool -evenChar = even . fromEnum diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/Simple/Program/Internal.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/Simple/Program/Internal.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/Simple/Program/Internal.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/Simple/Program/Internal.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,36 +0,0 @@ -module UnitTests.Distribution.Simple.Program.Internal - ( tests - ) where - -import Distribution.Simple.Program.Internal ( stripExtractVersion ) - -import Test.Tasty -import Test.Tasty.HUnit - -v :: String -v = "GNU strip (GNU Binutils; openSUSE 13.2) 2.24.0.20140403-6.1\nCopyright 2013\ - \ Free Software Foundation, Inc.\nThis program is free software; you may\ - \ redistribute it under the terms of\nthe GNU General Public License version 3\ - \ or (at your option) any later version.\nThis program has absolutely no\ - \ warranty.\n" - -v' :: String -v' = "GNU strip 2.17.50.0.6-26.el5 20061020" - -v'' :: String -v'' = "GNU strip (openSUSE-13.2) 2.23.50.0.6-26.el5 20061020" - -v''' :: String -v''' = "GNU strip (GNU (Binutils for) Ubuntu 12.04 ) 2.22" - -tests :: [TestTree] -tests = - [ testCase "Handles parentheses" $ - (stripExtractVersion v) @=? "2.24" - , testCase "Handles dashes and alphabetic characters" $ - (stripExtractVersion v') @=? "2.17" - , testCase "Handles single-word parenthetical expressions" $ - (stripExtractVersion v'') @=? "2.23" - , testCase "Handles nested parentheses" $ - (stripExtractVersion v''') @=? "2.22" - ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/Simple/Utils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/Simple/Utils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/Simple/Utils.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/Simple/Utils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,59 +0,0 @@ -module UnitTests.Distribution.Simple.Utils - ( tests - ) where - -import Distribution.Simple.Utils -import Distribution.Verbosity - -import Data.IORef -import System.Directory ( doesDirectoryExist, doesFileExist - , getTemporaryDirectory - , removeDirectoryRecursive, removeFile ) -import System.IO (hClose) - -import Test.Tasty -import Test.Tasty.HUnit - -withTempFileTest :: Assertion -withTempFileTest = do - fileName <- newIORef "" - tempDir <- getTemporaryDirectory - withTempFile tempDir ".foo" $ \fileName' _handle -> do - writeIORef fileName fileName' - fileExists <- readIORef fileName >>= doesFileExist - assertBool "Temporary file not deleted by 'withTempFile'!" (not fileExists) - -withTempFileRemovedTest :: Assertion -withTempFileRemovedTest = do - tempDir <- getTemporaryDirectory - withTempFile tempDir ".foo" $ \fileName handle -> do - hClose handle - removeFile fileName - -withTempDirTest :: Assertion -withTempDirTest = do - dirName <- newIORef "" - tempDir <- getTemporaryDirectory - withTempDirectory normal tempDir "foo" $ \dirName' -> do - writeIORef dirName dirName' - dirExists <- readIORef dirName >>= doesDirectoryExist - assertBool "Temporary directory not deleted by 'withTempDirectory'!" - (not dirExists) - -withTempDirRemovedTest :: Assertion -withTempDirRemovedTest = do - tempDir <- getTemporaryDirectory - withTempDirectory normal tempDir "foo" $ \dirPath -> do - removeDirectoryRecursive dirPath - -tests :: [TestTree] -tests = - [ testCase "withTempFile works as expected" $ - withTempFileTest - , testCase "withTempFile can handle removed files" $ - withTempFileRemovedTest - , testCase "withTempDirectory works as expected" $ - withTempDirTest - , testCase "withTempDirectory can handle removed directories" $ - withTempDirRemovedTest - ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/System.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/System.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/System.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/System.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,29 +0,0 @@ -{-# OPTIONS_GHC -fno-warn-orphans #-} -module UnitTests.Distribution.System - ( tests - ) where - -import Control.Monad (liftM2) -import Distribution.Text (Text(..), display, simpleParse) -import Distribution.System -import Test.Tasty -import Test.Tasty.QuickCheck - -textRoundtrip :: (Show a, Eq a, Text a) => a -> Property -textRoundtrip x = simpleParse (display x) === Just x - -tests :: [TestTree] -tests = - [ testProperty "Text OS round trip" (textRoundtrip :: OS -> Property) - , testProperty "Text Arch round trip" (textRoundtrip :: Arch -> Property) - , testProperty "Text Platform round trip" (textRoundtrip :: Platform -> Property) - ] - -instance Arbitrary OS where - arbitrary = elements knownOSs - -instance Arbitrary Arch where - arbitrary = elements knownArches - -instance Arbitrary Platform where - arbitrary = liftM2 Platform arbitrary arbitrary diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/Utils/NubList.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/Utils/NubList.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/Utils/NubList.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/Utils/NubList.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,46 +0,0 @@ -{-# LANGUAGE CPP #-} -module UnitTests.Distribution.Utils.NubList - ( tests - ) where - -#if __GLASGOW_HASKELL__ < 710 -import Data.Monoid -#endif -import Distribution.Utils.NubList -import Test.Tasty -import Test.Tasty.HUnit -import Test.Tasty.QuickCheck - -tests :: [TestTree] -tests = - [ testCase "Numlist retains ordering" testOrdering - , testCase "Numlist removes duplicates" testDeDupe - , testProperty "Monoid Numlist Identity" prop_Identity - , testProperty "Monoid Numlist Associativity" prop_Associativity - ] - -someIntList :: [Int] --- This list must not have duplicate entries. -someIntList = [ 1, 3, 4, 2, 0, 7, 6, 5, 9, -1 ] - -testOrdering :: Assertion -testOrdering = - assertBool "Maintains element ordering:" $ - fromNubList (toNubList someIntList) == someIntList - -testDeDupe :: Assertion -testDeDupe = - assertBool "De-duplicates a list:" $ - fromNubList (toNubList (someIntList ++ someIntList)) == someIntList - --- --------------------------------------------------------------------------- --- QuickCheck properties for NubList - -prop_Identity :: [Int] -> Bool -prop_Identity xs = - mempty `mappend` toNubList xs == toNubList xs `mappend` mempty - -prop_Associativity :: [Int] -> [Int] -> [Int] -> Bool -prop_Associativity xs ys zs = - (toNubList xs `mappend` toNubList ys) `mappend` toNubList zs - == toNubList xs `mappend` (toNubList ys `mappend` toNubList zs) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/Version.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/Version.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/Version.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/UnitTests/Distribution/Version.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,723 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# OPTIONS_GHC -fno-warn-orphans - -fno-warn-incomplete-patterns - -fno-warn-deprecations - -fno-warn-unused-binds #-} --FIXME -module UnitTests.Distribution.Version (versionTests) where - -import Distribution.Version -import Distribution.Text - -import Text.PrettyPrint as Disp (text, render, parens, hcat - ,punctuate, int, char, (<>), (<+>)) - -import Test.Tasty -import Test.Tasty.QuickCheck -import qualified Test.Laws as Laws - -#if !MIN_VERSION_QuickCheck(2,9,0) -import Test.QuickCheck.Utils -#endif - -import Control.Monad (liftM, liftM2) -import Data.Maybe (isJust, fromJust) -import Data.List (sort, sortBy, nub) -import Data.Ord (comparing) - -versionTests :: [TestTree] -versionTests = - zipWith (\n p -> testProperty ("Range Property " ++ show n) p) [1::Int ..] - -- properties to validate the test framework - [ property prop_nonNull - , property prop_gen_intervals1 - , property prop_gen_intervals2 ---, property prop_equivalentVersionRange --FIXME: runs out of test cases - , property prop_intermediateVersion - - -- the basic syntactic version range functions - , property prop_anyVersion - , property prop_noVersion - , property prop_thisVersion - , property prop_notThisVersion - , property prop_laterVersion - , property prop_orLaterVersion - , property prop_earlierVersion - , property prop_orEarlierVersion - , property prop_unionVersionRanges - , property prop_intersectVersionRanges - , property prop_differenceVersionRanges - , property prop_invertVersionRange - , property prop_withinVersion - , property prop_foldVersionRange - , property prop_foldVersionRange' - - -- the semantic query functions ---, property prop_isAnyVersion1 --FIXME: runs out of test cases ---, property prop_isAnyVersion2 --FIXME: runs out of test cases ---, property prop_isNoVersion --FIXME: runs out of test cases ---, property prop_isSpecificVersion1 --FIXME: runs out of test cases ---, property prop_isSpecificVersion2 --FIXME: runs out of test cases - , property prop_simplifyVersionRange1 - , property prop_simplifyVersionRange1' ---, property prop_simplifyVersionRange2 --FIXME: runs out of test cases ---, property prop_simplifyVersionRange2' --FIXME: runs out of test cases ---, property prop_simplifyVersionRange2'' --FIXME: actually wrong - - -- converting between version ranges and version intervals - , property prop_to_intervals ---, property prop_to_intervals_canonical --FIXME: runs out of test cases ---, property prop_to_intervals_canonical' --FIXME: runs out of test cases - , property prop_from_intervals - , property prop_to_from_intervals - , property prop_from_to_intervals - , property prop_from_to_intervals' - - -- union and intersection of version intervals - , property prop_unionVersionIntervals - , property prop_unionVersionIntervals_idempotent - , property prop_unionVersionIntervals_commutative - , property prop_unionVersionIntervals_associative - , property prop_intersectVersionIntervals - , property prop_intersectVersionIntervals_idempotent - , property prop_intersectVersionIntervals_commutative - , property prop_intersectVersionIntervals_associative - , property prop_union_intersect_distributive - , property prop_intersect_union_distributive - - -- inversion of version intervals - , property prop_invertVersionIntervals - , property prop_invertVersionIntervalsTwice - ] - --- parseTests :: [TestTree] --- parseTests = --- zipWith (\n p -> testProperty ("Parse Property " ++ show n) p) [1::Int ..] --- -- parsing and pretty printing --- [ -- property prop_parse_disp1 --FIXME: actually wrong - --- -- These are also wrong, see --- -- https://github.com/haskell/cabal/issues/3037#issuecomment-177671011 - --- -- property prop_parse_disp2 --- -- , property prop_parse_disp3 --- -- , property prop_parse_disp4 --- -- , property prop_parse_disp5 --- ] - -#if !MIN_VERSION_QuickCheck(2,9,0) -instance Arbitrary Version where - arbitrary = do - branch <- smallListOf1 $ - frequency [(3, return 0) - ,(3, return 1) - ,(2, return 2) - ,(1, return 3)] - return (Version branch []) -- deliberate [] - where - smallListOf1 = adjustSize (\n -> min 5 (n `div` 3)) . listOf1 - - shrink (Version branch []) = - [ Version branch' [] | branch' <- shrink branch, not (null branch') ] - shrink (Version branch _tags) = - [ Version branch [] ] -#endif - -instance Arbitrary VersionRange where - arbitrary = sized verRangeExp - where - verRangeExp n = frequency $ - [ (2, return anyVersion) - , (1, liftM thisVersion arbitrary) - , (1, liftM laterVersion arbitrary) - , (1, liftM orLaterVersion arbitrary) - , (1, liftM orLaterVersion' arbitrary) - , (1, liftM earlierVersion arbitrary) - , (1, liftM orEarlierVersion arbitrary) - , (1, liftM orEarlierVersion' arbitrary) - , (1, liftM withinVersion arbitrary) - , (2, liftM VersionRangeParens arbitrary) - ] ++ if n == 0 then [] else - [ (2, liftM2 unionVersionRanges verRangeExp2 verRangeExp2) - , (2, liftM2 intersectVersionRanges verRangeExp2 verRangeExp2) - ] - where - verRangeExp2 = verRangeExp (n `div` 2) - - orLaterVersion' v = - unionVersionRanges (LaterVersion v) (ThisVersion v) - orEarlierVersion' v = - unionVersionRanges (EarlierVersion v) (ThisVersion v) - ---------------------------- --- VersionRange properties --- - -prop_nonNull :: Version -> Bool -prop_nonNull = not . null . versionBranch - -prop_anyVersion :: Version -> Bool -prop_anyVersion v' = - withinRange v' anyVersion == True - -prop_noVersion :: Version -> Bool -prop_noVersion v' = - withinRange v' noVersion == False - -prop_thisVersion :: Version -> Version -> Bool -prop_thisVersion v v' = - withinRange v' (thisVersion v) - == (v' == v) - -prop_notThisVersion :: Version -> Version -> Bool -prop_notThisVersion v v' = - withinRange v' (notThisVersion v) - == (v' /= v) - -prop_laterVersion :: Version -> Version -> Bool -prop_laterVersion v v' = - withinRange v' (laterVersion v) - == (v' > v) - -prop_orLaterVersion :: Version -> Version -> Bool -prop_orLaterVersion v v' = - withinRange v' (orLaterVersion v) - == (v' >= v) - -prop_earlierVersion :: Version -> Version -> Bool -prop_earlierVersion v v' = - withinRange v' (earlierVersion v) - == (v' < v) - -prop_orEarlierVersion :: Version -> Version -> Bool -prop_orEarlierVersion v v' = - withinRange v' (orEarlierVersion v) - == (v' <= v) - -prop_unionVersionRanges :: VersionRange -> VersionRange -> Version -> Bool -prop_unionVersionRanges vr1 vr2 v' = - withinRange v' (unionVersionRanges vr1 vr2) - == (withinRange v' vr1 || withinRange v' vr2) - -prop_intersectVersionRanges :: VersionRange -> VersionRange -> Version -> Bool -prop_intersectVersionRanges vr1 vr2 v' = - withinRange v' (intersectVersionRanges vr1 vr2) - == (withinRange v' vr1 && withinRange v' vr2) - -prop_differenceVersionRanges :: VersionRange -> VersionRange -> Version -> Bool -prop_differenceVersionRanges vr1 vr2 v' = - withinRange v' (differenceVersionRanges vr1 vr2) - == (withinRange v' vr1 && not (withinRange v' vr2)) - -prop_invertVersionRange :: VersionRange -> Version -> Bool -prop_invertVersionRange vr v' = - withinRange v' (invertVersionRange vr) - == not (withinRange v' vr) - -prop_withinVersion :: Version -> Version -> Bool -prop_withinVersion v v' = - withinRange v' (withinVersion v) - == (v' >= v && v' < upper v) - where - upper (Version lower t) = Version (init lower ++ [last lower + 1]) t - -prop_foldVersionRange :: VersionRange -> Bool -prop_foldVersionRange range = - expandWildcard range - == foldVersionRange anyVersion thisVersion - laterVersion earlierVersion - unionVersionRanges intersectVersionRanges - range - where - expandWildcard (WildcardVersion v) = - intersectVersionRanges (orLaterVersion v) (earlierVersion (upper v)) - where - upper (Version lower t) = Version (init lower ++ [last lower + 1]) t - - expandWildcard (UnionVersionRanges v1 v2) = - UnionVersionRanges (expandWildcard v1) (expandWildcard v2) - expandWildcard (IntersectVersionRanges v1 v2) = - IntersectVersionRanges (expandWildcard v1) (expandWildcard v2) - expandWildcard (VersionRangeParens v) = expandWildcard v - expandWildcard v = v - - -prop_foldVersionRange' :: VersionRange -> Bool -prop_foldVersionRange' range = - canonicalise range - == foldVersionRange' anyVersion thisVersion - laterVersion earlierVersion - orLaterVersion orEarlierVersion - (\v _ -> withinVersion v) - unionVersionRanges intersectVersionRanges id - range - where - canonicalise (UnionVersionRanges (LaterVersion v) - (ThisVersion v')) | v == v' - = UnionVersionRanges (ThisVersion v') - (LaterVersion v) - canonicalise (UnionVersionRanges (EarlierVersion v) - (ThisVersion v')) | v == v' - = UnionVersionRanges (ThisVersion v') - (EarlierVersion v) - canonicalise (UnionVersionRanges v1 v2) = - UnionVersionRanges (canonicalise v1) (canonicalise v2) - canonicalise (IntersectVersionRanges v1 v2) = - IntersectVersionRanges (canonicalise v1) (canonicalise v2) - canonicalise (VersionRangeParens v) = canonicalise v - canonicalise v = v - - -prop_isAnyVersion1 :: VersionRange -> Version -> Property -prop_isAnyVersion1 range version = - isAnyVersion range ==> withinRange version range - -prop_isAnyVersion2 :: VersionRange -> Property -prop_isAnyVersion2 range = - isAnyVersion range ==> - foldVersionRange True (\_ -> False) (\_ -> False) (\_ -> False) - (\_ _ -> False) (\_ _ -> False) - (simplifyVersionRange range) - -prop_isNoVersion :: VersionRange -> Version -> Property -prop_isNoVersion range version = - isNoVersion range ==> not (withinRange version range) - -prop_isSpecificVersion1 :: VersionRange -> NonEmptyList Version -> Property -prop_isSpecificVersion1 range (NonEmpty versions) = - isJust version && not (null versions') ==> - allEqual (fromJust version : versions') - where - version = isSpecificVersion range - versions' = filter (`withinRange` range) versions - allEqual xs = and (zipWith (==) xs (tail xs)) - -prop_isSpecificVersion2 :: VersionRange -> Property -prop_isSpecificVersion2 range = - isJust version ==> - foldVersionRange Nothing Just (\_ -> Nothing) (\_ -> Nothing) - (\_ _ -> Nothing) (\_ _ -> Nothing) - (simplifyVersionRange range) - == version - - where - version = isSpecificVersion range - --- | 'simplifyVersionRange' is a semantic identity on 'VersionRange'. --- -prop_simplifyVersionRange1 :: VersionRange -> Version -> Bool -prop_simplifyVersionRange1 range version = - withinRange version range == withinRange version (simplifyVersionRange range) - -prop_simplifyVersionRange1' :: VersionRange -> Bool -prop_simplifyVersionRange1' range = - range `equivalentVersionRange` (simplifyVersionRange range) - --- | 'simplifyVersionRange' produces a canonical form for ranges with --- equivalent semantics. --- -prop_simplifyVersionRange2 :: VersionRange -> VersionRange -> Version -> Property -prop_simplifyVersionRange2 r r' v = - r /= r' && simplifyVersionRange r == simplifyVersionRange r' ==> - withinRange v r == withinRange v r' - -prop_simplifyVersionRange2' :: VersionRange -> VersionRange -> Property -prop_simplifyVersionRange2' r r' = - r /= r' && simplifyVersionRange r == simplifyVersionRange r' ==> - r `equivalentVersionRange` r' - ---FIXME: see equivalentVersionRange for details -prop_simplifyVersionRange2'' :: VersionRange -> VersionRange -> Property -prop_simplifyVersionRange2'' r r' = - r /= r' && r `equivalentVersionRange` r' ==> - simplifyVersionRange r == simplifyVersionRange r' - || isNoVersion r - || isNoVersion r' - --------------------- --- VersionIntervals --- - --- | Generating VersionIntervals --- --- This is a tad tricky as VersionIntervals is an abstract type, so we first --- make a local type for generating the internal representation. Then we check --- that this lets us construct valid 'VersionIntervals'. --- -newtype VersionIntervals' = VersionIntervals' [VersionInterval] - deriving (Eq, Show) - -instance Arbitrary VersionIntervals' where - arbitrary = do - ubound <- arbitrary - bounds <- arbitrary - let intervals = mergeTouching - . map fixEmpty - . replaceUpper ubound - . pairs - . sortBy (comparing fst) - $ bounds - return (VersionIntervals' intervals) - - where - pairs ((l, lb):(u, ub):bs) = (LowerBound l lb, UpperBound u ub) - : pairs bs - pairs _ = [] - - replaceUpper NoUpperBound [(l,_)] = [(l, NoUpperBound)] - replaceUpper NoUpperBound (i:is) = i : replaceUpper NoUpperBound is - replaceUpper _ is = is - - -- merge adjacent intervals that touch - mergeTouching (i1@(l,u):i2@(l',u'):is) - | doesNotTouch u l' = i1 : mergeTouching (i2:is) - | otherwise = mergeTouching ((l,u'):is) - mergeTouching is = is - - doesNotTouch :: UpperBound -> LowerBound -> Bool - doesNotTouch NoUpperBound _ = False - doesNotTouch (UpperBound u ub) (LowerBound l lb) = - u < l - || (u == l && ub == ExclusiveBound && lb == ExclusiveBound) - - fixEmpty (LowerBound l _, UpperBound u _) - | l == u = (LowerBound l InclusiveBound, UpperBound u InclusiveBound) - fixEmpty i = i - - shrink (VersionIntervals' intervals) = - [ VersionIntervals' intervals' | intervals' <- shrink intervals ] - -instance Arbitrary Bound where - arbitrary = elements [ExclusiveBound, InclusiveBound] - -instance Arbitrary LowerBound where - arbitrary = liftM2 LowerBound arbitrary arbitrary - -instance Arbitrary UpperBound where - arbitrary = oneof [return NoUpperBound - ,liftM2 UpperBound arbitrary arbitrary] - --- | Check that our VersionIntervals' arbitrary instance generates intervals --- that satisfies the invariant. --- -prop_gen_intervals1 :: VersionIntervals' -> Bool -prop_gen_intervals1 (VersionIntervals' intervals) = - isJust (mkVersionIntervals intervals) - -instance Arbitrary VersionIntervals where - arbitrary = do - VersionIntervals' intervals <- arbitrary - case mkVersionIntervals intervals of - Just xs -> return xs - --- | Check that constructing our intervals type and converting it to a --- 'VersionRange' and then into the true intervals type gives us back --- the exact same sequence of intervals. This tells us that our arbitrary --- instance for 'VersionIntervals'' is ok. --- -prop_gen_intervals2 :: VersionIntervals' -> Bool -prop_gen_intervals2 (VersionIntervals' intervals') = - asVersionIntervals (fromVersionIntervals intervals) == intervals' - where - Just intervals = mkVersionIntervals intervals' - --- | Check that 'VersionIntervals' models 'VersionRange' via --- 'toVersionIntervals'. --- -prop_to_intervals :: VersionRange -> Version -> Bool -prop_to_intervals range version = - withinRange version range == withinIntervals version intervals - where - intervals = toVersionIntervals range - --- | Check that semantic equality on 'VersionRange's is the same as converting --- to 'VersionIntervals' and doing syntactic equality. --- -prop_to_intervals_canonical :: VersionRange -> VersionRange -> Property -prop_to_intervals_canonical r r' = - r /= r' && r `equivalentVersionRange` r' ==> - toVersionIntervals r == toVersionIntervals r' - -prop_to_intervals_canonical' :: VersionRange -> VersionRange -> Property -prop_to_intervals_canonical' r r' = - r /= r' && toVersionIntervals r == toVersionIntervals r' ==> - r `equivalentVersionRange` r' - --- | Check that 'VersionIntervals' models 'VersionRange' via --- 'fromVersionIntervals'. --- -prop_from_intervals :: VersionIntervals -> Version -> Bool -prop_from_intervals intervals version = - withinRange version range == withinIntervals version intervals - where - range = fromVersionIntervals intervals - --- | @'toVersionIntervals' . 'fromVersionIntervals'@ is an exact identity on --- 'VersionIntervals'. --- -prop_to_from_intervals :: VersionIntervals -> Bool -prop_to_from_intervals intervals = - toVersionIntervals (fromVersionIntervals intervals) == intervals - --- | @'fromVersionIntervals' . 'toVersionIntervals'@ is a semantic identity on --- 'VersionRange', though not necessarily a syntactic identity. --- -prop_from_to_intervals :: VersionRange -> Bool -prop_from_to_intervals range = - range' `equivalentVersionRange` range - where - range' = fromVersionIntervals (toVersionIntervals range) - --- | Equivalent of 'prop_from_to_intervals' --- -prop_from_to_intervals' :: VersionRange -> Version -> Bool -prop_from_to_intervals' range version = - withinRange version range' == withinRange version range - where - range' = fromVersionIntervals (toVersionIntervals range) - --- | The semantics of 'unionVersionIntervals' is (||). --- -prop_unionVersionIntervals :: VersionIntervals -> VersionIntervals - -> Version -> Bool -prop_unionVersionIntervals is1 is2 v = - withinIntervals v (unionVersionIntervals is1 is2) - == (withinIntervals v is1 || withinIntervals v is2) - --- | 'unionVersionIntervals' is idempotent --- -prop_unionVersionIntervals_idempotent :: VersionIntervals -> Bool -prop_unionVersionIntervals_idempotent = - Laws.idempotent_binary unionVersionIntervals - --- | 'unionVersionIntervals' is commutative --- -prop_unionVersionIntervals_commutative :: VersionIntervals - -> VersionIntervals -> Bool -prop_unionVersionIntervals_commutative = - Laws.commutative unionVersionIntervals - --- | 'unionVersionIntervals' is associative --- -prop_unionVersionIntervals_associative :: VersionIntervals - -> VersionIntervals - -> VersionIntervals -> Bool -prop_unionVersionIntervals_associative = - Laws.associative unionVersionIntervals - --- | The semantics of 'intersectVersionIntervals' is (&&). --- -prop_intersectVersionIntervals :: VersionIntervals -> VersionIntervals - -> Version -> Bool -prop_intersectVersionIntervals is1 is2 v = - withinIntervals v (intersectVersionIntervals is1 is2) - == (withinIntervals v is1 && withinIntervals v is2) - --- | 'intersectVersionIntervals' is idempotent --- -prop_intersectVersionIntervals_idempotent :: VersionIntervals -> Bool -prop_intersectVersionIntervals_idempotent = - Laws.idempotent_binary intersectVersionIntervals - --- | 'intersectVersionIntervals' is commutative --- -prop_intersectVersionIntervals_commutative :: VersionIntervals - -> VersionIntervals -> Bool -prop_intersectVersionIntervals_commutative = - Laws.commutative intersectVersionIntervals - --- | 'intersectVersionIntervals' is associative --- -prop_intersectVersionIntervals_associative :: VersionIntervals - -> VersionIntervals - -> VersionIntervals -> Bool -prop_intersectVersionIntervals_associative = - Laws.associative intersectVersionIntervals - --- | 'unionVersionIntervals' distributes over 'intersectVersionIntervals' --- -prop_union_intersect_distributive :: Property -prop_union_intersect_distributive = - Laws.distributive_left unionVersionIntervals intersectVersionIntervals - .&. Laws.distributive_right unionVersionIntervals intersectVersionIntervals - --- | 'intersectVersionIntervals' distributes over 'unionVersionIntervals' --- -prop_intersect_union_distributive :: Property -prop_intersect_union_distributive = - Laws.distributive_left intersectVersionIntervals unionVersionIntervals - .&. Laws.distributive_right intersectVersionIntervals unionVersionIntervals - --- | The semantics of 'invertVersionIntervals' is 'not'. --- -prop_invertVersionIntervals :: VersionIntervals - -> Version -> Bool -prop_invertVersionIntervals vi v = - withinIntervals v (invertVersionIntervals vi) - == not (withinIntervals v vi) - --- | Double application of 'invertVersionIntervals' is the identity function -prop_invertVersionIntervalsTwice :: VersionIntervals -> Bool -prop_invertVersionIntervalsTwice vi = - invertVersionIntervals (invertVersionIntervals vi) == vi - - - --------------------------------- --- equivalentVersionRange helper - -prop_equivalentVersionRange :: VersionRange -> VersionRange - -> Version -> Property -prop_equivalentVersionRange range range' version = - equivalentVersionRange range range' && range /= range' ==> - withinRange version range == withinRange version range' - ---FIXME: this is wrong. consider version ranges "<=1" and "<1.0" --- this algorithm cannot distinguish them because there is no version --- that is included by one that is excluded by the other. --- Alternatively we must reconsider the semantics of '<' and '<=' --- in version ranges / version intervals. Perhaps the canonical --- representation should use just < v and interpret "<= v" as "< v.0". -equivalentVersionRange :: VersionRange -> VersionRange -> Bool -equivalentVersionRange vr1 vr2 = - let allVersionsUsed = nub (sort (versionsUsed vr1 ++ versionsUsed vr2)) - minPoint = Version [0] [] - maxPoint | null allVersionsUsed = minPoint - | otherwise = case maximum allVersionsUsed of - Version vs _ -> Version (vs ++ [1]) [] - probeVersions = minPoint : maxPoint - : intermediateVersions allVersionsUsed - - in all (\v -> withinRange v vr1 == withinRange v vr2) probeVersions - - where - versionsUsed = foldVersionRange [] (\x->[x]) (\x->[x]) (\x->[x]) (++) (++) - intermediateVersions (v1:v2:vs) = v1 : intermediateVersion v1 v2 - : intermediateVersions (v2:vs) - intermediateVersions vs = vs - -intermediateVersion :: Version -> Version -> Version -intermediateVersion v1 v2 | v1 >= v2 = error "intermediateVersion: v1 >= v2" -intermediateVersion (Version v1 _) (Version v2 _) = - Version (intermediateList v1 v2) [] - where - intermediateList :: [Int] -> [Int] -> [Int] - intermediateList [] (_:_) = [0] - intermediateList (x:xs) (y:ys) - | x < y = x : xs ++ [0] - | otherwise = x : intermediateList xs ys - -prop_intermediateVersion :: Version -> Version -> Property -prop_intermediateVersion v1 v2 = - (v1 /= v2) && not (adjacentVersions v1 v2) ==> - if v1 < v2 - then let v = intermediateVersion v1 v2 - in (v1 < v && v < v2) - else let v = intermediateVersion v2 v1 - in v1 > v && v > v2 - -adjacentVersions :: Version -> Version -> Bool -adjacentVersions (Version v1 _) (Version v2 _) = v1 ++ [0] == v2 - || v2 ++ [0] == v1 - --------------------------------- --- Parsing and pretty printing --- - -prop_parse_disp1 :: VersionRange -> Bool -prop_parse_disp1 vr = - fmap stripParens (simpleParse (display vr)) == Just (canonicalise vr) - - where - canonicalise = swizzle . swap - - swizzle (UnionVersionRanges (UnionVersionRanges v1 v2) v3) - | not (isOrLaterVersion v1 v2) && not (isOrEarlierVersion v1 v2) - = swizzle (UnionVersionRanges v1 (UnionVersionRanges v2 v3)) - - swizzle (IntersectVersionRanges (IntersectVersionRanges v1 v2) v3) - = swizzle (IntersectVersionRanges v1 (IntersectVersionRanges v2 v3)) - - swizzle (UnionVersionRanges v1 v2) = - UnionVersionRanges (swizzle v1) (swizzle v2) - swizzle (IntersectVersionRanges v1 v2) = - IntersectVersionRanges (swizzle v1) (swizzle v2) - swizzle (VersionRangeParens v) = swizzle v - swizzle v = v - - isOrLaterVersion (ThisVersion v) (LaterVersion v') = v == v' - isOrLaterVersion _ _ = False - - isOrEarlierVersion (ThisVersion v) (EarlierVersion v') = v == v' - isOrEarlierVersion _ _ = False - - swap = - foldVersionRange' anyVersion thisVersion - laterVersion earlierVersion - orLaterVersion orEarlierVersion - (\v _ -> withinVersion v) - unionVersionRanges intersectVersionRanges id - - stripParens :: VersionRange -> VersionRange - stripParens (VersionRangeParens v) = stripParens v - stripParens (UnionVersionRanges v1 v2) = - UnionVersionRanges (stripParens v1) (stripParens v2) - stripParens (IntersectVersionRanges v1 v2) = - IntersectVersionRanges (stripParens v1) (stripParens v2) - stripParens v = v - -prop_parse_disp2 :: VersionRange -> Property -prop_parse_disp2 vr = - let b = fmap (display :: VersionRange -> String) (simpleParse (display vr)) - a = Just (display vr) - in - counterexample ("Expected: " ++ show a) $ - counterexample ("But got: " ++ show b) $ - b == a - -prop_parse_disp3 :: VersionRange -> Property -prop_parse_disp3 vr = - let a = Just (display vr) - b = fmap displayRaw (simpleParse (display vr)) - in - counterexample ("Expected: " ++ show a) $ - counterexample ("But got: " ++ show b) $ - b == a - -prop_parse_disp4 :: VersionRange -> Property -prop_parse_disp4 vr = - let a = Just vr - b = (simpleParse (display vr)) - in - counterexample ("Expected: " ++ show a) $ - counterexample ("But got: " ++ show b) $ - b == a - -prop_parse_disp5 :: VersionRange -> Property -prop_parse_disp5 vr = - let a = Just vr - b = simpleParse (displayRaw vr) - in - counterexample ("Expected: " ++ show a) $ - counterexample ("But got: " ++ show b) $ - b == a - -displayRaw :: VersionRange -> String -displayRaw = - Disp.render - . foldVersionRange' -- precedence: - -- All the same as the usual pretty printer, except for the parens - ( Disp.text "-any") - (\v -> Disp.text "==" <> disp v) - (\v -> Disp.char '>' <> disp v) - (\v -> Disp.char '<' <> disp v) - (\v -> Disp.text ">=" <> disp v) - (\v -> Disp.text "<=" <> disp v) - (\v _ -> Disp.text "==" <> dispWild v) - (\r1 r2 -> r1 <+> Disp.text "||" <+> r2) - (\r1 r2 -> r1 <+> Disp.text "&&" <+> r2) - (\r -> Disp.parens r) -- parens - - where - dispWild (Version b _) = - Disp.hcat (Disp.punctuate (Disp.char '.') (map Disp.int b)) - <> Disp.text ".*" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/UnitTests.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/UnitTests.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.1.0/tests/UnitTests.hs 2016-11-07 10:02:26.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.1.0/tests/UnitTests.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,34 +0,0 @@ -module Main - ( main - ) where - -import Test.Tasty - -import qualified UnitTests.Distribution.Compat.CreatePipe -import qualified UnitTests.Distribution.Compat.ReadP -import qualified UnitTests.Distribution.Simple.Program.Internal -import qualified UnitTests.Distribution.Simple.Utils -import qualified UnitTests.Distribution.System -import qualified UnitTests.Distribution.Utils.NubList -import qualified UnitTests.Distribution.Version (versionTests) - -tests :: TestTree -tests = testGroup "Unit Tests" $ - [ testGroup "Distribution.Compat.CreatePipe" - UnitTests.Distribution.Compat.CreatePipe.tests - , testGroup "Distribution.Compat.ReadP" - UnitTests.Distribution.Compat.ReadP.tests - , testGroup "Distribution.Simple.Program.Internal" - UnitTests.Distribution.Simple.Program.Internal.tests - , testGroup "Distribution.Simple.Utils" - UnitTests.Distribution.Simple.Utils.tests - , testGroup "Distribution.Utils.NubList" - UnitTests.Distribution.Utils.NubList.tests - , testGroup "Distribution.System" - UnitTests.Distribution.System.tests - , testGroup "Distribution.Version" - UnitTests.Distribution.Version.versionTests - ] - -main :: IO () -main = defaultMain tests diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Cabal.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Cabal.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Cabal.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Cabal.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,373 @@ +name: Cabal +version: 1.24.2.0 +copyright: 2003-2006, Isaac Jones + 2005-2011, Duncan Coutts +license: BSD3 +license-file: LICENSE +author: Isaac Jones + Duncan Coutts +maintainer: cabal-devel@haskell.org +homepage: http://www.haskell.org/cabal/ +bug-reports: https://github.com/haskell/cabal/issues +synopsis: A framework for packaging Haskell software +description: + The Haskell Common Architecture for Building Applications and + Libraries: a framework defining a common interface for authors to more + easily build their Haskell applications in a portable way. + . + The Haskell Cabal is part of a larger infrastructure for distributing, + organizing, and cataloging Haskell libraries and tools. +category: Distribution +cabal-version: >=1.10 +build-type: Simple +-- If we use a new Cabal feature, this needs to be changed to Custom so +-- we can bootstrap. + +extra-source-files: + README.md tests/README.md changelog + doc/Cabal.css doc/developing-packages.markdown doc/index.markdown + doc/installing-packages.markdown + doc/misc.markdown + + -- Generated with 'misc/gen-extra-source-files.sh' + -- Do NOT edit this section manually; instead, run the script. + -- BEGIN gen-extra-source-files + tests/PackageTests/AllowNewer/AllowNewer.cabal + tests/PackageTests/AllowNewer/benchmarks/Bench.hs + tests/PackageTests/AllowNewer/src/Foo.hs + tests/PackageTests/AllowNewer/tests/Test.hs + tests/PackageTests/BenchmarkExeV10/Foo.hs + tests/PackageTests/BenchmarkExeV10/benchmarks/bench-Foo.hs + tests/PackageTests/BenchmarkExeV10/my.cabal + tests/PackageTests/BenchmarkOptions/BenchmarkOptions.cabal + tests/PackageTests/BenchmarkOptions/test-BenchmarkOptions.hs + tests/PackageTests/BenchmarkStanza/my.cabal + tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive1/GlobalBuildDepsNotAdditive1.cabal + tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive1/MyLibrary.hs + tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive2/GlobalBuildDepsNotAdditive2.cabal + tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive2/lemon.hs + tests/PackageTests/BuildDeps/InternalLibrary0/MyLibrary.hs + tests/PackageTests/BuildDeps/InternalLibrary0/my.cabal + tests/PackageTests/BuildDeps/InternalLibrary0/programs/lemon.hs + tests/PackageTests/BuildDeps/InternalLibrary1/MyLibrary.hs + tests/PackageTests/BuildDeps/InternalLibrary1/my.cabal + tests/PackageTests/BuildDeps/InternalLibrary1/programs/lemon.hs + tests/PackageTests/BuildDeps/InternalLibrary2/MyLibrary.hs + tests/PackageTests/BuildDeps/InternalLibrary2/my.cabal + tests/PackageTests/BuildDeps/InternalLibrary2/programs/lemon.hs + tests/PackageTests/BuildDeps/InternalLibrary2/to-install/MyLibrary.hs + tests/PackageTests/BuildDeps/InternalLibrary2/to-install/my.cabal + tests/PackageTests/BuildDeps/InternalLibrary3/MyLibrary.hs + tests/PackageTests/BuildDeps/InternalLibrary3/my.cabal + tests/PackageTests/BuildDeps/InternalLibrary3/programs/lemon.hs + tests/PackageTests/BuildDeps/InternalLibrary3/to-install/MyLibrary.hs + tests/PackageTests/BuildDeps/InternalLibrary3/to-install/my.cabal + tests/PackageTests/BuildDeps/InternalLibrary4/MyLibrary.hs + tests/PackageTests/BuildDeps/InternalLibrary4/my.cabal + tests/PackageTests/BuildDeps/InternalLibrary4/programs/lemon.hs + tests/PackageTests/BuildDeps/InternalLibrary4/to-install/MyLibrary.hs + tests/PackageTests/BuildDeps/InternalLibrary4/to-install/my.cabal + tests/PackageTests/BuildDeps/SameDepsAllRound/MyLibrary.hs + tests/PackageTests/BuildDeps/SameDepsAllRound/SameDepsAllRound.cabal + tests/PackageTests/BuildDeps/SameDepsAllRound/lemon.hs + tests/PackageTests/BuildDeps/SameDepsAllRound/pineapple.hs + tests/PackageTests/BuildDeps/TargetSpecificDeps1/MyLibrary.hs + tests/PackageTests/BuildDeps/TargetSpecificDeps1/lemon.hs + tests/PackageTests/BuildDeps/TargetSpecificDeps1/my.cabal + tests/PackageTests/BuildDeps/TargetSpecificDeps2/MyLibrary.hs + tests/PackageTests/BuildDeps/TargetSpecificDeps2/lemon.hs + tests/PackageTests/BuildDeps/TargetSpecificDeps2/my.cabal + tests/PackageTests/BuildDeps/TargetSpecificDeps3/MyLibrary.hs + tests/PackageTests/BuildDeps/TargetSpecificDeps3/lemon.hs + tests/PackageTests/BuildDeps/TargetSpecificDeps3/my.cabal + tests/PackageTests/BuildTargetErrors/BuildTargetErrors.cabal + tests/PackageTests/BuildTargetErrors/Main.hs + tests/PackageTests/BuildTestSuiteDetailedV09/Dummy2.hs + tests/PackageTests/BuildableField/BuildableField.cabal + tests/PackageTests/BuildableField/Main.hs + tests/PackageTests/CMain/Bar.hs + tests/PackageTests/CMain/foo.c + tests/PackageTests/CMain/my.cabal + tests/PackageTests/DeterministicAr/Lib.hs + tests/PackageTests/DeterministicAr/my.cabal + tests/PackageTests/DuplicateModuleName/DuplicateModuleName.cabal + tests/PackageTests/DuplicateModuleName/src/Foo.hs + tests/PackageTests/DuplicateModuleName/tests/Foo.hs + tests/PackageTests/DuplicateModuleName/tests2/Foo.hs + tests/PackageTests/EmptyLib/empty/empty.cabal + tests/PackageTests/GhcPkgGuess/SameDirectory/SameDirectory.cabal + tests/PackageTests/GhcPkgGuess/SameDirectory/ghc + tests/PackageTests/GhcPkgGuess/SameDirectory/ghc-pkg + tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/SameDirectory.cabal + tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/ghc-7.10 + tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/ghc-pkg-ghc-7.10 + tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/SameDirectory.cabal + tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/ghc-7.10 + tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/ghc-pkg-7.10 + tests/PackageTests/GhcPkgGuess/Symlink/SameDirectory.cabal + tests/PackageTests/GhcPkgGuess/Symlink/bin/ghc + tests/PackageTests/GhcPkgGuess/Symlink/bin/ghc-pkg + tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/SameDirectory.cabal + tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/bin/ghc-7.10 + tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/bin/ghc-pkg-7.10 + tests/PackageTests/GhcPkgGuess/SymlinkVersion/SameDirectory.cabal + tests/PackageTests/GhcPkgGuess/SymlinkVersion/bin/ghc-7.10 + tests/PackageTests/GhcPkgGuess/SymlinkVersion/bin/ghc-pkg-ghc-7.10 + tests/PackageTests/Haddock/CPP.hs + tests/PackageTests/Haddock/Literate.lhs + tests/PackageTests/Haddock/NoCPP.hs + tests/PackageTests/Haddock/Simple.hs + tests/PackageTests/Haddock/my.cabal + tests/PackageTests/HaddockNewline/A.hs + tests/PackageTests/HaddockNewline/HaddockNewline.cabal + tests/PackageTests/HaddockNewline/Setup.hs + tests/PackageTests/Options.hs + tests/PackageTests/OrderFlags/Foo.hs + tests/PackageTests/OrderFlags/my.cabal + tests/PackageTests/PathsModule/Executable/Main.hs + tests/PackageTests/PathsModule/Executable/my.cabal + tests/PackageTests/PathsModule/Library/my.cabal + tests/PackageTests/PreProcess/Foo.hsc + tests/PackageTests/PreProcess/Main.hs + tests/PackageTests/PreProcess/my.cabal + tests/PackageTests/PreProcessExtraSources/Foo.hsc + tests/PackageTests/PreProcessExtraSources/Main.hs + tests/PackageTests/PreProcessExtraSources/my.cabal + tests/PackageTests/ReexportedModules/ReexportedModules.cabal + tests/PackageTests/TemplateHaskell/dynamic/Exe.hs + tests/PackageTests/TemplateHaskell/dynamic/Lib.hs + tests/PackageTests/TemplateHaskell/dynamic/TH.hs + tests/PackageTests/TemplateHaskell/dynamic/my.cabal + tests/PackageTests/TemplateHaskell/profiling/Exe.hs + tests/PackageTests/TemplateHaskell/profiling/Lib.hs + tests/PackageTests/TemplateHaskell/profiling/TH.hs + tests/PackageTests/TemplateHaskell/profiling/my.cabal + tests/PackageTests/TemplateHaskell/vanilla/Exe.hs + tests/PackageTests/TemplateHaskell/vanilla/Lib.hs + tests/PackageTests/TemplateHaskell/vanilla/TH.hs + tests/PackageTests/TemplateHaskell/vanilla/my.cabal + tests/PackageTests/TestNameCollision/child/Child.hs + tests/PackageTests/TestNameCollision/child/child.cabal + tests/PackageTests/TestNameCollision/child/tests/Test.hs + tests/PackageTests/TestNameCollision/parent/Parent.hs + tests/PackageTests/TestNameCollision/parent/parent.cabal + tests/PackageTests/TestOptions/TestOptions.cabal + tests/PackageTests/TestOptions/test-TestOptions.hs + tests/PackageTests/TestStanza/my.cabal + tests/PackageTests/TestSuiteTests/ExeV10/Foo.hs + tests/PackageTests/TestSuiteTests/ExeV10/my.cabal + tests/PackageTests/TestSuiteTests/ExeV10/tests/test-Foo.hs + tests/PackageTests/TestSuiteTests/ExeV10/tests/test-Short.hs + tests/PackageTests/TestSuiteTests/LibV09/Lib.hs + tests/PackageTests/TestSuiteTests/LibV09/LibV09.cabal + tests/PackageTests/TestSuiteTests/LibV09/tests/Deadlock.hs + tests/PackageTests/Tests.hs + tests/PackageTests/UniqueIPID/P1/M.hs + tests/PackageTests/UniqueIPID/P1/my.cabal + tests/PackageTests/UniqueIPID/P2/M.hs + tests/PackageTests/UniqueIPID/P2/my.cabal + tests/PackageTests/multInst/my.cabal + tests/Setup.hs + tests/hackage/check.sh + tests/hackage/download.sh + tests/hackage/unpack.sh + tests/misc/ghc-supported-languages.hs + -- END gen-extra-source-files + +source-repository head + type: git + location: https://github.com/haskell/cabal/ + subdir: Cabal + +flag bundled-binary-generic + default: False + +library + build-depends: + array >= 0.1 && < 0.6, + base >= 4.5 && < 5, + bytestring >= 0.9 && < 1, + containers >= 0.4 && < 0.6, + deepseq >= 1.3 && < 1.5, + directory >= 1.1 && < 1.4, + filepath >= 1.3 && < 1.5, + pretty >= 1.1 && < 1.2, + process >= 1.1.0.1 && < 1.5, + time >= 1.4 && < 1.8 + + if flag(bundled-binary-generic) + build-depends: binary >= 0.5 && < 0.7 + else + build-depends: binary >= 0.7 && < 0.9 + + -- Needed for GHC.Generics before GHC 7.6 + if impl(ghc < 7.6) + build-depends: ghc-prim >= 0.2 && < 0.3 + + if !os(windows) + build-depends: + unix >= 2.5 && < 2.8 + + if os(windows) + build-depends: + Win32 >= 2.2 && < 2.4 + + ghc-options: -Wall -fno-ignore-asserts -fwarn-tabs + if impl(ghc >= 8.0) + ghc-options: -Wcompat -Wnoncanonical-monad-instances + -Wnoncanonical-monadfail-instances + + exposed-modules: + Distribution.Compat.CreatePipe + Distribution.Compat.Environment + Distribution.Compat.Exception + Distribution.Compat.Internal.TempFile + Distribution.Compat.ReadP + Distribution.Compat.Semigroup + Distribution.Compiler + Distribution.InstalledPackageInfo + Distribution.License + Distribution.Make + Distribution.ModuleName + Distribution.Package + Distribution.PackageDescription + Distribution.PackageDescription.Check + Distribution.PackageDescription.Configuration + Distribution.PackageDescription.Parse + Distribution.PackageDescription.PrettyPrint + Distribution.PackageDescription.Utils + Distribution.ParseUtils + Distribution.ReadE + Distribution.Simple + Distribution.Simple.Bench + Distribution.Simple.Build + Distribution.Simple.Build.Macros + Distribution.Simple.Build.PathsModule + Distribution.Simple.BuildPaths + Distribution.Simple.BuildTarget + Distribution.Simple.CCompiler + Distribution.Simple.Command + Distribution.Simple.Compiler + Distribution.Simple.Configure + Distribution.Simple.GHC + Distribution.Simple.GHCJS + Distribution.Simple.Haddock + Distribution.Simple.HaskellSuite + Distribution.Simple.Hpc + Distribution.Simple.Install + Distribution.Simple.InstallDirs + Distribution.Simple.JHC + Distribution.Simple.LHC + Distribution.Simple.LocalBuildInfo + Distribution.Simple.PackageIndex + Distribution.Simple.PreProcess + Distribution.Simple.PreProcess.Unlit + Distribution.Simple.Program + Distribution.Simple.Program.Ar + Distribution.Simple.Program.Builtin + Distribution.Simple.Program.Db + Distribution.Simple.Program.Find + Distribution.Simple.Program.GHC + Distribution.Simple.Program.HcPkg + Distribution.Simple.Program.Hpc + Distribution.Simple.Program.Internal + Distribution.Simple.Program.Ld + Distribution.Simple.Program.Run + Distribution.Simple.Program.Script + Distribution.Simple.Program.Strip + Distribution.Simple.Program.Types + Distribution.Simple.Register + Distribution.Simple.Setup + Distribution.Simple.SrcDist + Distribution.Simple.Test + Distribution.Simple.Test.ExeV10 + Distribution.Simple.Test.LibV09 + Distribution.Simple.Test.Log + Distribution.Simple.UHC + Distribution.Simple.UserHooks + Distribution.Simple.Utils + Distribution.System + Distribution.TestSuite + Distribution.Text + Distribution.Utils.NubList + Distribution.Verbosity + Distribution.Version + Language.Haskell.Extension + Distribution.Compat.Binary + + other-modules: + Distribution.Compat.CopyFile + Distribution.Compat.GetShortPathName + Distribution.Compat.MonadFail + Distribution.GetOpt + Distribution.Lex + Distribution.Simple.GHC.Internal + Distribution.Simple.GHC.IPI642 + Distribution.Simple.GHC.IPIConvert + Distribution.Simple.GHC.ImplInfo + Paths_Cabal + + if flag(bundled-binary-generic) + other-modules: + Distribution.Compat.Binary.Class + Distribution.Compat.Binary.Generic + + default-language: Haskell98 + +-- Small, fast running tests. +test-suite unit-tests + type: exitcode-stdio-1.0 + hs-source-dirs: tests + other-modules: + Test.Laws + Test.QuickCheck.Utils + UnitTests.Distribution.Compat.CreatePipe + UnitTests.Distribution.Compat.ReadP + UnitTests.Distribution.Simple.Program.Internal + UnitTests.Distribution.Simple.Utils + UnitTests.Distribution.System + UnitTests.Distribution.Utils.NubList + UnitTests.Distribution.Version + main-is: UnitTests.hs + build-depends: + base, + directory, + tasty, + tasty-hunit, + tasty-quickcheck, + pretty, + QuickCheck >= 2.7 && < 2.10, + Cabal + ghc-options: -Wall + default-language: Haskell98 + +-- Large, system tests that build packages. +test-suite package-tests + type: exitcode-stdio-1.0 + main-is: PackageTests.hs + other-modules: + PackageTests.BenchmarkStanza.Check + PackageTests.TestStanza.Check + PackageTests.DeterministicAr.Check + PackageTests.TestSuiteTests.ExeV10.Check + PackageTests.PackageTester + hs-source-dirs: tests + build-depends: + base, + containers, + tagged, + tasty, + tasty-hunit, + transformers, + Cabal, + process, + directory, + filepath, + bytestring, + regex-posix, + old-time + if !os(windows) + build-depends: unix, exceptions + ghc-options: -Wall -rtsopts + default-extensions: CPP + default-language: Haskell98 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/changelog cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/changelog --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/changelog 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/changelog 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,484 @@ +-*-change-log-*- +1.24.2.0 Mikhail Glushenkov December 2016 + * Fixed a bug in the handling of non-buildable components (#4094). + * Reverted a PVP-noncompliant API change in 1.24.1.0 (#4123). + * Bumped the directory upper bound to < 1.4 (#4158). + +1.24.1.0 Ryan Thomas October 2016 + * API addition: 'differenceVersionRanges' (#3519). + * Fixed reexported-modules display mangling (#3928). + * Check that the correct cabal-version is specified when the + extra-doc-files field is present (#3825). + * Fixed an incorrect invocation of GetShortPathName that was + causing build failures on Windows (#3649). + * Linker flags are now set correctly on GHC >= 7.8 (#3443). + +1.24.0.0 Ryan Thomas May 2016 + * Support GHC 8. + * Deal with extra C sources from preprocessors (#238). + * Include cabal_macros.h when running c2hs (#2600). + * Don't recompile C sources unless needed (#2601). + * Read 'builddir' option from 'CABAL_BUILDDIR' environment variable + * Add '--profiling-detail=$level' flag with a default for libraries + and executables of 'exported-functions' and 'toplevel-functions' + respetively (GHC's '-fprof-auto-{exported,top}' flags) (#193). + * New 'custom-setup' stanza to specify setup deps. Setup is also built + with the cabal_macros.h style macros, for conditional compilation. + * Support Haddock response files (#2746). + * Fixed a bug in the Text instance for Platform (#2862). + * New 'setup haddock' option: '--for-hackage' (#2852). + * New --show-detail=direct; like streaming, but allows the test + program to detect that is connected to a terminal, and works + reliable with a non-threaded runtime (#2911, and serves as a + work-around for #2398) + * Library support for multi-instance package DBs (#2948). + * Improved the './Setup configure' solver (#3082, #3076). + * The '--allow-newer' option can be now used with './Setup + configure' (#3163). + * Added a way to specify extra locations to find OS X frameworks + in ('extra-framework-dirs'). Can be used both in .cabal files and + as an argument to './Setup configure' (#3158). + * Macros 'VERSION_$pkgname' and 'MIN_VERSION_$pkgname' are now + also generated for the current package. (#3235). + +1.22.0.0 Johan Tibell January 2015 + * Support GHC 7.10. + * Experimental support for emitting DWARF debug info. + * Preliminary support for relocatable packages. + * Allow cabal to be used inside cabal exec enviroments. + * hpc: support mutliple "ways" (e.g. profiling and vanilla). + * Support GHCJS. + * Improved command line documentation. + * Add '-none' constraint syntax for version ranges (#2093). + * Make the default doc index file path compiler/arch/os-dependent + (#2136). + * Warn instead of dying when generating documentation and hscolour + isn't installed (455f51622fa38347db62197a04bb0fa5b928ff17). + * Support the new BinaryLiterals extension + (1f25ab3c5eff311ada73c6c987061b80e9bbebd9). + * Warn about 'ghc-prof-options: -auto-all' in 'cabal check' (#2162). + * Add preliminary support for multiple instances of the same package + version installed side-by-side (#2002). + * New binary build config format - faster build times (#2076). + * Support module thinning and renaming (#2038). + * Add a new license type: UnspecifiedLicense (#2141). + * Remove support for Hugs and nhc98 (#2168). + * Invoke 'tar' with '--formar ustar' if possible in 'sdist' (#1903). + * Replace --enable-library-coverage with --enable-coverage, which + enables program coverage for all components (#1945). + * Suggest that `ExitFailure 9` is probably due to memory + exhaustion (#1522). + * Drop support for Haddock < 2.0 (#1808, #1718). + * Make 'cabal test'/'cabal bench' build only what's needed for + running tests/benchmarks (#1821). + * Build shared libraries by default when linking executables dynamically. + * Build profiled libraries by default when profiling executables. + +1.20.0.1 Johan Tibell May 2014 + * Fix streaming test output. + +1.20.0.0 Johan Tibell April 2014 + * Rewrite user guide + * Fix repl Ctrl+C handling + * Add haskell-suite compiler support + * Add __HADDOCK_VERSION__ define + * Allow specifying exact dependency version using hash + * Rename extra-html-files to extra-doc-files + * Add parallel build support for GHC 7.8 and later + * Don't call ranlib on OS X + * Avoid re-linking executables, test suites, and benchmarks + unnecessarily, shortening build times + * Add --allow-newer which allows upper version bounds to be + ignored + * Add --enable-library-stripping + * Add command for freezing dependencies + * Allow repl to be used outside Cabal packages + * Add --require-sandbox + * Don't use --strip-unneeded on OS X or iOS + * Add new license-files field got additional licenses + * Fix if(solaris) on some Solaris versions + * Don't use -dylib-install-name on OS X with GHC > 7.8 + * Add DragonFly as a known OS + * Improve pretty-printing of Cabal files + * Add test flag --show-details=streaming for real-time test output + * Add exec command + +1.10.2.0 Duncan Coutts June 2011 + * Include test suites in cabal sdist + * Fix for conditionals in test suite stanzas in .cabal files + * Fix permissions of directories created during install + * Fix for global builds when $HOME env var is not set + +1.10.1.0 Duncan Coutts February 2011 + * Improved error messages when test suites are not enabled + * Template parameters allowed in test --test-option(s) flag + * Improved documentation of the test feature + * Relaxed QA check on cabal-version when using test-suite sections + * haddock command now allows both --hoogle and --html at the same time + * Find ghc-version-specific instances of the hsc2hs program + * Preserve file executable permissions in sdist tarballs + * Pass gcc location and flags to ./configure scripts + * Get default gcc flags from ghc + +1.10.0.0 Duncan Coutts November 2010 + * New cabal test feature + * Initial support for UHC + * New default-language and other-languages fields (e.g. Haskell98/2010) + * New default-extensions and other-extensions fields + * Deprecated extensions field (for packages using cabal-version >=1.10) + * Cabal-version field must now only be of the form ">= x.y" + * Removed deprecated --copy-prefix= feature + * Auto-reconfigure when .cabal file changes + * Workaround for haddock overwriting .hi and .o files when using TH + * Extra cpp flags used with hsc2hs and c2hs (-D${os}_BUILD_OS etc) + * New cpp define VERSION_ gives string version of dependencies + * User guide source now in markdown format for easier editing + * Improved checks and error messages for C libraries and headers + * Removed BSD4 from the list of suggested licenses + * Updated list of known language extensions + * Fix for include paths to allow C code to import FFI stub.h files + * Fix for intra-package dependencies on OSX + * Stricter checks on various bits of .cabal file syntax + * Minor fixes for c2hs + +1.8.0.6 Duncan Coutts June 2010 + * Fix 'register --global/--user' + +1.8.0.4 Duncan Coutts March 2010 + * Set dylib-install-name for dynalic libs on OSX + * Stricter configure check that compiler supports a package's extensions + * More configure-time warnings + * Hugs can compile Cabal lib again + * Default datadir now follows prefix on Windows + * Support for finding installed packages for hugs + * Cabal version macros now have proper parenthesis + * Reverted change to filter out deps of non-buildable components + * Fix for registering implace when using a specific package db + * Fix mismatch between $os and $arch path template variables + * Fix for finding ar.exe on Windows, always pick ghc's version + * Fix for intra-package dependencies with ghc-6.12 + +1.8.0.2 Duncan Coutts December 2009 + * Support for GHC-6.12 + * New unique installed package IDs which use a package hash + * Allow executables to depend on the lib within the same package + * Dependencies for each component apply only to that component + (previously applied to all the other components too) + * Added new known license MIT and versioned GPL and LGPL + * More liberal package version range syntax + * Package registration files are now UTF8 + * Support for LHC and JHC-0.7.2 + * Deprecated RecordPuns extension in favour of NamedFieldPuns + * Deprecated PatternSignatures extension in favor of ScopedTypeVariables + * New VersionRange semantic view as a sequence of intervals + * Improved package quality checks + * Minor simplification in a couple Setup.hs hooks + * Beginnings of a unit level testsuite using QuickCheck + * Various bug fixes + * Various internal cleanups + +1.6.0.2 Duncan Coutts February 2009 + * New configure-time check for C headers and libraries + * Added language extensions present in ghc-6.10 + * Added support for NamedFieldPuns extension in ghc-6.8 + * Fix in configure step for ghc-6.6 on Windows + * Fix warnings in Path_pkgname.hs module on Windows + * Fix for exotic flags in ld-options field + * Fix for using pkg-config in a package with a lib and an executable + * Fix for building haddock docs for exes that use the Paths module + * Fix for installing header files in subdirectories + * Fix for the case of building profiling libs but not ordinary libs + * Fix read-only attribute of installed files on Windows + * Ignore ghc -threaded flag when profiling in ghc-6.8 and older + +1.6.0.1 Duncan Coutts October 2008 + * Export a compat function to help alex and happy + +1.6.0.0 Duncan Coutts October 2008 + * Support for ghc-6.10 + * Source control repositories can now be specified in .cabal files + * Bug report URLs can be now specified in .cabal files + * Wildcards now allowed in data-files and extra-source-files fields + * New syntactic sugar for dependencies "build-depends: foo ==1.2.*" + * New cabal_macros.h provides macros to test versions of dependencies + * Relocatable bindists now possible on unix via env vars + * New 'exposed' field allows packages to be not exposed by default + * Install dir flags can now use $os and $arch variables + * New --builddir flag allows multiple builds from a single sources dir + * cc-options now only apply to .c files, not for -fvia-C + * cc-options are not longer propagated to dependent packages + * The cpp/cc/ld-options fields no longer use ',' as a separator + * hsc2hs is now called using gcc instead of using ghc as gcc + * New api for manipulating sets and graphs of packages + * Internal api improvements and code cleanups + * Minor improvements to the user guide + * Miscellaneous minor bug fixes + +1.4.0.2 Duncan Coutts August 2008 + * Fix executable stripping default + * Fix striping exes on OSX that export dynamic symbols (like ghc) + * Correct the order of arguments given by --prog-options= + * Fix corner case with overlapping user and global packages + * Fix for modules that use pre-processing and .hs-boot files + * Clarify some points in the user guide and readme text + * Fix verbosity flags passed to sub-command like haddock + * Fix sdist --snapshot + * Allow meta-packages that contain no modules or C code + * Make the generated Paths module -Wall clean on Windows + +1.4.0.1 Duncan Coutts June 2008 + * Fix a bug which caused '.' to always be in the sources search path + * Haddock-2.2 and later do now support the --hoogle flag + +1.4.0.0 Duncan Coutts June 2008 + * Rewritten command line handling support + * Command line completion with bash + * Better support for Haddock 2 + * Improved support for nhc98 + * Removed support for ghc-6.2 + * Haddock markup in .lhs files now supported + * Default colour scheme for highlighted source code + * Default prefix for --user installs is now $HOME/.cabal + * All .cabal files are treaded as UTF-8 and must be valid + * Many checks added for common mistakes + * New --package-db= option for specific package databases + * Many internal changes to support cabal-install + * Stricter parsing for version strings, eg dissalows "1.05" + * Improved user guide introduction + * Programatica support removed + * New options --program-prefix/suffix allows eg versioned programs + * Support packages that use .hs-boot files + * Fix sdist for Main modules that require preprocessing + * New configure -O flag with optimisation level 0--2 + * Provide access to "x-" extension fields through the Cabal api + * Added check for broken installed packages + * Added warning about using inconsistent versions of dependencies + * Strip binary executable files by default with an option to disable + * New options to add site-specific include and library search paths + * Lift the restriction that libraries must have exposed-modules + * Many bugs fixed. + * Many internal structural improvements and code cleanups + +1.2.4.0 Duncan Coutts June 2008 + * Released with GHC 6.8.3 + * Backported several fixes and minor improvements from Cabal-1.4 + * Use a default colour scheme for sources with hscolour >=1.9 + * Support --hyperlink-source for Haddock >= 2.0 + * Fix for running in a non-writable directory + * Add OSX -framework arguments when linking executables + * Updates to the user guide + * Allow build-tools names to include + and _ + * Export autoconfUserHooks and simpleUserHooks + * Export ccLdOptionsBuildInfo for Setup.hs scripts + * Export unionBuildInfo and make BuildInfo an instance of Monoid + * Fix to allow the 'main-is' module to use a pre-processor + +1.2.3.0 Duncan Coutts Nov 2007 + * Released with GHC 6.8.2 + * Includes full list of GHC language extensions + * Fix infamous "dist/conftest.c" bug + * Fix configure --interfacedir= + * Find ld.exe on Windows correctly + * Export PreProcessor constructor and mkSimplePreProcessor + * Fix minor bug in unlit code + * Fix some markup in the haddock docs + +1.2.2.0 Duncan Coutts Nov 2007 + * Released with GHC 6.8.1 + * Support haddock-2.0 + * Support building DSOs with GHC + * Require reconfiguring if the .cabal file has changed + * Fix os(windows) configuration test + * Fix building documentation + * Fix building packages on Solaris + * Other minor bug fixes + +1.2.1 Duncan Coutts Oct 2007 + * To be included in GHC 6.8.1 + * New field "cpp-options" used when preprocessing Haskell modules + * Fixes for hsc2hs when using ghc + * C source code gets compiled with -O2 by default + * OS aliases, to allow os(windows) rather than requiring os(mingw32) + * Fix cleaning of 'stub' files + * Fix cabal-setup, command line ui that replaces "runhaskell Setup.hs" + * Build docs even when dependent packages docs are missing + * Allow the --html-dir to be specified at configure time + * Fix building with ghc-6.2 + * Other minor bug fixes and build fixes + +1.2.0 Duncan Coutts Sept 2007 + * To be included in GHC 6.8.x + * New configurations feature + * Can make haddock docs link to hilighted sources (with hscolour) + * New flag to allow linking to haddock docs on the web + * Supports pkg-config + * New field "build-tools" for tool dependencies + * Improved c2hs support + * Preprocessor output no longer clutters source dirs + * Separate "includes" and "install-includes" fields + * Makefile command to generate makefiles for building libs with GHC + * New --docdir configure flag + * Generic --with-prog --prog-args configure flags + * Better default installation paths on Windows + * Install paths can be specified relative to each other + * License files now installed + * Initial support for NHC (incomplete) + * Consistent treatment of verbosity + * Reduced verbosity of configure step by default + * Improved helpfulness of output messages + * Help output now clearer and fits in 80 columns + * New setup register --gen-pkg-config flag for distros + * Major internal refactoring, hooks api has changed + * Dozens of bug fixes + +1.1.6.2 Duncan Coutts May 2007 + * Released with GHC 6.6.1 + * Handle windows text file encoding for .cabal files + * Fix compiling a executable for profiling that uses Template Haskell + * Other minor bug fixes and user guide clarifications + +1.1.6.1 Duncan Coutts Oct 2006 + * fix unlit code + * fix escaping in register.sh + +1.1.6 Duncan Coutts Oct 2006 + * Released with GHC 6.6 + * Added support for hoogle + * Allow profiling and normal builds of libs to be chosen indepentantly + * Default installation directories on Win32 changed + * Register haddock docs with ghc-pkg + * Get haddock to make hyperlinks to dependent package docs + * Added BangPatterns language extension + * Various bug fixes + +1.1.4 Duncan Coutts May 2006 + * Released with GHC 6.4.2 + * Better support for packages that need to install header files + * cabal-setup added, but not installed by default yet + * Implemented "setup register --inplace" + * Have packages exposed by default with ghc-6.2 + * It is no longer necessary to run 'configure' before 'clean' or 'sdist' + * Added support for ghc's -split-objs + * Initial support for JHC + * Ignore extension fields in .cabal files (fields begining with "x-") + * Some changes to command hooks API to improve consistency + * Hugs support improvements + * Added GeneralisedNewtypeDeriving language extension + * Added cabal-version field + * Support hidden modules with haddock + * Internal code refactoring + * More bug fixes + +1.1.3 Isaac Jones Sept 2005 + * WARNING: Interfaces not documented in the user's guide may + change in future releases. + * Move building of GHCi .o libs to the build phase rather than + register phase. (from Duncan Coutts) + * Use .tar.gz for source package extension + * Uses GHC instead of cpphs if the latter is not available + * Added experimental "command hooks" which completely override the + default behavior of a command. + * Some bugfixes + +1.1.1 Isaac Jones July 2005 + * WARNING: Interfaces not documented in the user's guide may + change in future releases. + * Handles recursive modules for GHC 6.2 and GHC 6.4. + * Added "setup test" command (Used with UserHook) + * implemented handling of _stub.{c,h,o} files + * Added support for profiling + * Changed install prefix of libraries (pref/pkgname-version + to prefix/pkgname-version/compname-version) + * Added pattern guards as a language extension + * Moved some functionality to Language.Haskell.Extension + * Register / unregister .bat files for windows + * Exposed more of the API + * Added support for the hide-all-packages flag in GHC > 6.4 + * Several bug fixes + +1.0 Isaac Jones March 11 2005 + * Released with GHC 6.4, Hugs March 2005, and nhc98 1.18 + * Some sanity checking + +0.5 Isaac Jones Wed Feb 19 2005 + * WARNING: this is a pre-release and the interfaces are still + likely to change until we reach a 1.0 release. + * Hooks interfaces changed + * Added preprocessors to user hooks + * No more executable-modules or hidden-modules. Use + "other-modules" instead. + * Certain fields moved into BuildInfo, much refactoring + * extra-libs -> extra-libraries + * Added --gen-script to configure and unconfigure. + * modules-ghc (etc) now ghc-modules (etc) + * added new fields including "synopsis" + * Lots of bug fixes + * spaces can sometimes be used instead of commas + * A user manual has appeared (Thanks, ross!) + * for ghc 6.4, configures versionsed depends properly + * more features to ./setup haddock + +0.4 Isaac Jones Sun Jan 16 2005 + + * Much thanks to all the awesome fptools hackers who have been + working hard to build the Haskell Cabal! + + * Interface Changes: + + ** WARNING: this is a pre-release and the interfaces are still + likely to change until we reach a 1.0 release. + + ** Instead of Package.description, you should name your + description files .cabal. In particular, we suggest + that you name it .cabal, but this is not enforced + (yet). Multiple .cabal files in the same directory is an error, + at least for now. + + ** ./setup install --install-prefix is gone. Use ./setup copy + --copy-prefix instead. + + ** The "Modules" field is gone. Use "hidden-modules", + "exposed-modules", and "executable-modules". + + ** Build-depends is now a package-only field, and can't go into + executable stanzas. Build-depends is a package-to-package + relationship. + + ** Some new fields. Use the Source. + + * New Features + + ** Cabal is now included as a package in the CVS version of + fptools. That means it'll be released as "-package Cabal" in + future versions of the compilers, and if you are a bleeding-edge + user, you can grab it from the CVS repository with the compilers. + + ** Hugs compatibility and NHC98 compatibility should both be + improved. + + ** Hooks Interface / Autoconf compatibility: Most of the hooks + interface is hidden for now, because it's not finalized. I have + exposed only "defaultMainWithHooks" and "defaultUserHooks". This + allows you to use a ./configure script to preprocess + "foo.buildinfo", which gets merged with "foo.cabal". In future + releases, we'll expose UserHooks, but we're definitely going to + change the interface to those. The interface to the two functions + I've exposed should stay the same, though. + + ** ./setup haddock is a baby feature which pre-processes the + source code with hscpp and runs haddock on it. This is brand new + and hardly tested, so you get to knock it around and see what you + think. + + ** Some commands now actually implement verbosity. + + ** The preprocessors have been tested a bit more, and seem to work + OK. Please give feedback if you use these. + +0.3 Isaac Jones Sun Jan 16 2005 + * Unstable snapshot release + * From now on, stable releases are even. + +0.2 Isaac Jones + + * Adds more HUGS support and preprocessor support. diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compat/Binary/Class.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compat/Binary/Class.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compat/Binary/Class.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compat/Binary/Class.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,518 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE Trustworthy #-} +{-# LANGUAGE DefaultSignatures #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Compat.Binary.Class +-- Copyright : Lennart Kolmodin +-- License : BSD3-style (see LICENSE) +-- +-- Maintainer : Lennart Kolmodin +-- Stability : unstable +-- Portability : portable to Hugs and GHC. Requires the FFI and some flexible instances +-- +-- Typeclass and instances for binary serialization. +-- +----------------------------------------------------------------------------- + +module Distribution.Compat.Binary.Class ( + + -- * The Binary class + Binary(..) + + -- * Support for generics + , GBinary(..) + + ) where + +import Data.Word + +import Data.Binary.Put +import Data.Binary.Get + +import Control.Monad +import Foreign + +import Data.ByteString.Lazy (ByteString) +import qualified Data.ByteString.Lazy as L + +import Data.Char (chr,ord) +import Data.List (unfoldr) + +-- And needed for the instances: +import qualified Data.ByteString as B +import qualified Data.Map as Map +import qualified Data.Set as Set +import qualified Data.IntMap as IntMap +import qualified Data.IntSet as IntSet +import qualified Data.Ratio as R + +import qualified Data.Tree as T + +import Data.Array.Unboxed + +import GHC.Generics + +import qualified Data.Sequence as Seq +import qualified Data.Foldable as Fold + +------------------------------------------------------------------------ + +class GBinary f where + gput :: f t -> Put + gget :: Get (f t) + +-- | The 'Binary' class provides 'put' and 'get', methods to encode and +-- decode a Haskell value to a lazy 'ByteString'. It mirrors the 'Read' and +-- 'Show' classes for textual representation of Haskell types, and is +-- suitable for serialising Haskell values to disk, over the network. +-- +-- For decoding and generating simple external binary formats (e.g. C +-- structures), Binary may be used, but in general is not suitable +-- for complex protocols. Instead use the 'Put' and 'Get' primitives +-- directly. +-- +-- Instances of Binary should satisfy the following property: +-- +-- > decode . encode == id +-- +-- That is, the 'get' and 'put' methods should be the inverse of each +-- other. A range of instances are provided for basic Haskell types. +-- +class Binary t where + -- | Encode a value in the Put monad. + put :: t -> Put + -- | Decode a value in the Get monad + get :: Get t + + default put :: (Generic t, GBinary (Rep t)) => t -> Put + put = gput . from + + default get :: (Generic t, GBinary (Rep t)) => Get t + get = to `fmap` gget + +------------------------------------------------------------------------ +-- Simple instances + +-- The () type need never be written to disk: values of singleton type +-- can be reconstructed from the type alone +instance Binary () where + put () = return () + get = return () + +-- Bools are encoded as a byte in the range 0 .. 1 +instance Binary Bool where + put = putWord8 . fromIntegral . fromEnum + get = liftM (toEnum . fromIntegral) getWord8 + +-- Values of type 'Ordering' are encoded as a byte in the range 0 .. 2 +instance Binary Ordering where + put = putWord8 . fromIntegral . fromEnum + get = liftM (toEnum . fromIntegral) getWord8 + +------------------------------------------------------------------------ +-- Words and Ints + +-- Words8s are written as bytes +instance Binary Word8 where + put = putWord8 + get = getWord8 + +-- Words16s are written as 2 bytes in big-endian (network) order +instance Binary Word16 where + put = putWord16be + get = getWord16be + +-- Words32s are written as 4 bytes in big-endian (network) order +instance Binary Word32 where + put = putWord32be + get = getWord32be + +-- Words64s are written as 8 bytes in big-endian (network) order +instance Binary Word64 where + put = putWord64be + get = getWord64be + +-- Int8s are written as a single byte. +instance Binary Int8 where + put i = put (fromIntegral i :: Word8) + get = liftM fromIntegral (get :: Get Word8) + +-- Int16s are written as a 2 bytes in big endian format +instance Binary Int16 where + put i = put (fromIntegral i :: Word16) + get = liftM fromIntegral (get :: Get Word16) + +-- Int32s are written as a 4 bytes in big endian format +instance Binary Int32 where + put i = put (fromIntegral i :: Word32) + get = liftM fromIntegral (get :: Get Word32) + +-- Int64s are written as a 4 bytes in big endian format +instance Binary Int64 where + put i = put (fromIntegral i :: Word64) + get = liftM fromIntegral (get :: Get Word64) + +------------------------------------------------------------------------ + +-- Words are are written as Word64s, that is, 8 bytes in big endian format +instance Binary Word where + put i = put (fromIntegral i :: Word64) + get = liftM fromIntegral (get :: Get Word64) + +-- Ints are are written as Int64s, that is, 8 bytes in big endian format +instance Binary Int where + put i = put (fromIntegral i :: Int64) + get = liftM fromIntegral (get :: Get Int64) + +------------------------------------------------------------------------ +-- +-- Portable, and pretty efficient, serialisation of Integer +-- + +-- Fixed-size type for a subset of Integer +type SmallInt = Int32 + +-- Integers are encoded in two ways: if they fit inside a SmallInt, +-- they're written as a byte tag, and that value. If the Integer value +-- is too large to fit in a SmallInt, it is written as a byte array, +-- along with a sign and length field. + +instance Binary Integer where + + {-# INLINE put #-} + put n | n >= lo && n <= hi = do + putWord8 0 + put (fromIntegral n :: SmallInt) -- fast path + where + lo = fromIntegral (minBound :: SmallInt) :: Integer + hi = fromIntegral (maxBound :: SmallInt) :: Integer + + put n = do + putWord8 1 + put sign + put (unroll (abs n)) -- unroll the bytes + where + sign = fromIntegral (signum n) :: Word8 + + {-# INLINE get #-} + get = do + tag <- get :: Get Word8 + case tag of + 0 -> liftM fromIntegral (get :: Get SmallInt) + _ -> do sign <- get + bytes <- get + let v = roll bytes + return $! if sign == (1 :: Word8) then v else - v + +-- +-- Fold and unfold an Integer to and from a list of its bytes +-- +unroll :: Integer -> [Word8] +unroll = unfoldr step + where + step 0 = Nothing + step i = Just (fromIntegral i, i `shiftR` 8) + +roll :: [Word8] -> Integer +roll = foldr unstep 0 + where + unstep b a = a `shiftL` 8 .|. fromIntegral b + +{- + +-- +-- An efficient, raw serialisation for Integer (GHC only) +-- + +-- TODO This instance is not architecture portable. GMP stores numbers as +-- arrays of machine sized words, so the byte format is not portable across +-- architectures with different endianness and word size. + +import Data.ByteString.Base (toForeignPtr,unsafePackAddress, memcpy) +import GHC.Base hiding (ord, chr) +import GHC.Prim +import GHC.Ptr (Ptr(..)) +import GHC.IOBase (IO(..)) + +instance Binary Integer where + put (S# i) = putWord8 0 >> put (I# i) + put (J# s ba) = do + putWord8 1 + put (I# s) + put (BA ba) + + get = do + b <- getWord8 + case b of + 0 -> do (I# i#) <- get + return (S# i#) + _ -> do (I# s#) <- get + (BA a#) <- get + return (J# s# a#) + +instance Binary ByteArray where + + -- Pretty safe. + put (BA ba) = + let sz = sizeofByteArray# ba -- (primitive) in *bytes* + addr = byteArrayContents# ba + bs = unsafePackAddress (I# sz) addr + in put bs -- write as a ByteString. easy, yay! + + -- Pretty scary. Should be quick though + get = do + (fp, off, n@(I# sz)) <- liftM toForeignPtr get -- so decode a ByteString + assert (off == 0) $ return $ unsafePerformIO $ do + (MBA arr) <- newByteArray sz -- and copy it into a ByteArray# + let to = byteArrayContents# (unsafeCoerce# arr) -- urk, is this safe? + withForeignPtr fp $ \from -> memcpy (Ptr to) from (fromIntegral n) + freezeByteArray arr + +-- wrapper for ByteArray# +data ByteArray = BA {-# UNPACK #-} !ByteArray# +data MBA = MBA {-# UNPACK #-} !(MutableByteArray# RealWorld) + +newByteArray :: Int# -> IO MBA +newByteArray sz = IO $ \s -> + case newPinnedByteArray# sz s of { (# s', arr #) -> + (# s', MBA arr #) } + +freezeByteArray :: MutableByteArray# RealWorld -> IO ByteArray +freezeByteArray arr = IO $ \s -> + case unsafeFreezeByteArray# arr s of { (# s', arr' #) -> + (# s', BA arr' #) } + +-} + +instance (Binary a,Integral a) => Binary (R.Ratio a) where + put r = put (R.numerator r) >> put (R.denominator r) + get = liftM2 (R.%) get get + +------------------------------------------------------------------------ + +-- Char is serialised as UTF-8 +instance Binary Char where + put a | c <= 0x7f = put (fromIntegral c :: Word8) + | c <= 0x7ff = do put (0xc0 .|. y) + put (0x80 .|. z) + | c <= 0xffff = do put (0xe0 .|. x) + put (0x80 .|. y) + put (0x80 .|. z) + | c <= 0x10ffff = do put (0xf0 .|. w) + put (0x80 .|. x) + put (0x80 .|. y) + put (0x80 .|. z) + | otherwise = error "Not a valid Unicode code point" + where + c = ord a + z, y, x, w :: Word8 + z = fromIntegral (c .&. 0x3f) + y = fromIntegral (shiftR c 6 .&. 0x3f) + x = fromIntegral (shiftR c 12 .&. 0x3f) + w = fromIntegral (shiftR c 18 .&. 0x7) + + get = do + let getByte = liftM (fromIntegral :: Word8 -> Int) get + shiftL6 = flip shiftL 6 :: Int -> Int + w <- getByte + r <- case () of + _ | w < 0x80 -> return w + | w < 0xe0 -> do + x <- liftM (xor 0x80) getByte + return (x .|. shiftL6 (xor 0xc0 w)) + | w < 0xf0 -> do + x <- liftM (xor 0x80) getByte + y <- liftM (xor 0x80) getByte + return (y .|. shiftL6 (x .|. shiftL6 + (xor 0xe0 w))) + | otherwise -> do + x <- liftM (xor 0x80) getByte + y <- liftM (xor 0x80) getByte + z <- liftM (xor 0x80) getByte + return (z .|. shiftL6 (y .|. shiftL6 + (x .|. shiftL6 (xor 0xf0 w)))) + return $! chr r + +------------------------------------------------------------------------ +-- Instances for the first few tuples + +instance (Binary a, Binary b) => Binary (a,b) where + put (a,b) = put a >> put b + get = liftM2 (,) get get + +instance (Binary a, Binary b, Binary c) => Binary (a,b,c) where + put (a,b,c) = put a >> put b >> put c + get = liftM3 (,,) get get get + +instance (Binary a, Binary b, Binary c, Binary d) => Binary (a,b,c,d) where + put (a,b,c,d) = put a >> put b >> put c >> put d + get = liftM4 (,,,) get get get get + +instance (Binary a, Binary b, Binary c, Binary d, Binary e) => Binary (a,b,c,d,e) where + put (a,b,c,d,e) = put a >> put b >> put c >> put d >> put e + get = liftM5 (,,,,) get get get get get + +-- +-- and now just recurse: +-- + +instance (Binary a, Binary b, Binary c, Binary d, Binary e, Binary f) + => Binary (a,b,c,d,e,f) where + put (a,b,c,d,e,f) = put (a,(b,c,d,e,f)) + get = do (a,(b,c,d,e,f)) <- get ; return (a,b,c,d,e,f) + +instance (Binary a, Binary b, Binary c, Binary d, Binary e, Binary f, Binary g) + => Binary (a,b,c,d,e,f,g) where + put (a,b,c,d,e,f,g) = put (a,(b,c,d,e,f,g)) + get = do (a,(b,c,d,e,f,g)) <- get ; return (a,b,c,d,e,f,g) + +instance (Binary a, Binary b, Binary c, Binary d, Binary e, + Binary f, Binary g, Binary h) + => Binary (a,b,c,d,e,f,g,h) where + put (a,b,c,d,e,f,g,h) = put (a,(b,c,d,e,f,g,h)) + get = do (a,(b,c,d,e,f,g,h)) <- get ; return (a,b,c,d,e,f,g,h) + +instance (Binary a, Binary b, Binary c, Binary d, Binary e, + Binary f, Binary g, Binary h, Binary i) + => Binary (a,b,c,d,e,f,g,h,i) where + put (a,b,c,d,e,f,g,h,i) = put (a,(b,c,d,e,f,g,h,i)) + get = do (a,(b,c,d,e,f,g,h,i)) <- get ; return (a,b,c,d,e,f,g,h,i) + +instance (Binary a, Binary b, Binary c, Binary d, Binary e, + Binary f, Binary g, Binary h, Binary i, Binary j) + => Binary (a,b,c,d,e,f,g,h,i,j) where + put (a,b,c,d,e,f,g,h,i,j) = put (a,(b,c,d,e,f,g,h,i,j)) + get = do (a,(b,c,d,e,f,g,h,i,j)) <- get ; return (a,b,c,d,e,f,g,h,i,j) + +------------------------------------------------------------------------ +-- Container types + +instance Binary a => Binary [a] where + put l = put (length l) >> mapM_ put l + get = do n <- get :: Get Int + getMany n + +-- | 'getMany n' get 'n' elements in order, without blowing the stack. +getMany :: Binary a => Int -> Get [a] +getMany n = go [] n + where + go xs 0 = return $! reverse xs + go xs i = do x <- get + -- we must seq x to avoid stack overflows due to laziness in + -- (>>=) + x `seq` go (x:xs) (i-1) +{-# INLINE getMany #-} + +instance (Binary a) => Binary (Maybe a) where + put Nothing = putWord8 0 + put (Just x) = putWord8 1 >> put x + get = do + w <- getWord8 + case w of + 0 -> return Nothing + _ -> liftM Just get + +instance (Binary a, Binary b) => Binary (Either a b) where + put (Left a) = putWord8 0 >> put a + put (Right b) = putWord8 1 >> put b + get = do + w <- getWord8 + case w of + 0 -> liftM Left get + _ -> liftM Right get + +------------------------------------------------------------------------ +-- ByteStrings (have specially efficient instances) + +instance Binary B.ByteString where + put bs = do put (B.length bs) + putByteString bs + get = get >>= getByteString + +-- +-- Using old versions of fps, this is a type synonym, and non portable +-- +-- Requires 'flexible instances' +-- +instance Binary ByteString where + put bs = do put (fromIntegral (L.length bs) :: Int) + putLazyByteString bs + get = get >>= getLazyByteString + +------------------------------------------------------------------------ +-- Maps and Sets + +instance (Binary a) => Binary (Set.Set a) where + put s = put (Set.size s) >> mapM_ put (Set.toAscList s) + get = liftM Set.fromDistinctAscList get + +instance (Binary k, Binary e) => Binary (Map.Map k e) where + put m = put (Map.size m) >> mapM_ put (Map.toAscList m) + get = liftM Map.fromDistinctAscList get + +instance Binary IntSet.IntSet where + put s = put (IntSet.size s) >> mapM_ put (IntSet.toAscList s) + get = liftM IntSet.fromDistinctAscList get + +instance (Binary e) => Binary (IntMap.IntMap e) where + put m = put (IntMap.size m) >> mapM_ put (IntMap.toAscList m) + get = liftM IntMap.fromDistinctAscList get + +------------------------------------------------------------------------ +-- Queues and Sequences + +instance (Binary e) => Binary (Seq.Seq e) where + put s = put (Seq.length s) >> Fold.mapM_ put s + get = do n <- get :: Get Int + rep Seq.empty n get + where rep xs 0 _ = return $! xs + rep xs n g = xs `seq` n `seq` do + x <- g + rep (xs Seq.|> x) (n-1) g + +------------------------------------------------------------------------ +-- Floating point + +instance Binary Double where + put d = put (decodeFloat d) + get = liftM2 encodeFloat get get + +instance Binary Float where + put f = put (decodeFloat f) + get = liftM2 encodeFloat get get + +------------------------------------------------------------------------ +-- Trees + +instance (Binary e) => Binary (T.Tree e) where + put (T.Node r s) = put r >> put s + get = liftM2 T.Node get get + +------------------------------------------------------------------------ +-- Arrays + +instance (Binary i, Ix i, Binary e) => Binary (Array i e) where + put a = do + put (bounds a) + put (rangeSize $ bounds a) -- write the length + mapM_ put (elems a) -- now the elems. + get = do + bs <- get + n <- get -- read the length + xs <- getMany n -- now the elems. + return (listArray bs xs) + +-- +-- The IArray UArray e constraint is non portable. Requires flexible instances +-- +instance (Binary i, Ix i, Binary e, IArray UArray e) => Binary (UArray i e) where + put a = do + put (bounds a) + put (rangeSize $ bounds a) -- now write the length + mapM_ put (elems a) + get = do + bs <- get + n <- get + xs <- getMany n + return (listArray bs xs) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compat/Binary/Generic.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compat/Binary/Generic.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compat/Binary/Generic.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compat/Binary/Generic.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,128 @@ +{-# LANGUAGE BangPatterns, CPP, FlexibleInstances, KindSignatures, + ScopedTypeVariables, Trustworthy, TypeOperators, TypeSynonymInstances #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Compat.Binary.Generic +-- Copyright : Bryan O'Sullivan +-- License : BSD3-style (see LICENSE) +-- +-- Maintainer : Bryan O'Sullivan +-- Stability : unstable +-- Portability : Only works with GHC 7.2 and newer +-- +-- Instances for supporting GHC generics. +-- +----------------------------------------------------------------------------- +module Distribution.Compat.Binary.Generic + ( + ) where + +import Control.Applicative +import Distribution.Compat.Binary.Class +import Data.Binary.Get +import Data.Binary.Put +import Data.Bits +import Data.Word +import GHC.Generics + +-- Type without constructors +instance GBinary V1 where + gput _ = return () + gget = return undefined + +-- Constructor without arguments +instance GBinary U1 where + gput U1 = return () + gget = return U1 + +-- Product: constructor with parameters +instance (GBinary a, GBinary b) => GBinary (a :*: b) where + gput (x :*: y) = gput x >> gput y + gget = (:*:) <$> gget <*> gget + +-- Metadata (constructor name, etc) +instance GBinary a => GBinary (M1 i c a) where + gput = gput . unM1 + gget = M1 <$> gget + +-- Constants, additional parameters, and rank-1 recursion +instance Binary a => GBinary (K1 i a) where + gput = put . unK1 + gget = K1 <$> get + +-- Borrowed from the cereal package. + +-- The following GBinary instance for sums has support for serializing +-- types with up to 2^64-1 constructors. It will use the minimal +-- number of bytes needed to encode the constructor. For example when +-- a type has 2^8 constructors or less it will use a single byte to +-- encode the constructor. If it has 2^16 constructors or less it will +-- use two bytes, and so on till 2^64-1. + +#define GUARD(WORD) (size - 1) <= fromIntegral (maxBound :: WORD) +#define PUTSUM(WORD) GUARD(WORD) = putSum (0 :: WORD) (fromIntegral size) +#define GETSUM(WORD) GUARD(WORD) = (get :: Get WORD) >>= checkGetSum (fromIntegral size) + +instance ( GSum a, GSum b + , GBinary a, GBinary b + , SumSize a, SumSize b) => GBinary (a :+: b) where + gput | PUTSUM(Word8) | PUTSUM(Word16) | PUTSUM(Word32) | PUTSUM(Word64) + | otherwise = sizeError "encode" size + where + size = unTagged (sumSize :: Tagged (a :+: b) Word64) + + gget | GETSUM(Word8) | GETSUM(Word16) | GETSUM(Word32) | GETSUM(Word64) + | otherwise = sizeError "decode" size + where + size = unTagged (sumSize :: Tagged (a :+: b) Word64) + +sizeError :: Show size => String -> size -> error +sizeError s size = + error $ "Can't " ++ s ++ " a type with " ++ show size ++ " constructors" + +------------------------------------------------------------------------ + +checkGetSum :: (Ord word, Num word, Bits word, GSum f) + => word -> word -> Get (f a) +checkGetSum size code | code < size = getSum code size + | otherwise = fail "Unknown encoding for constructor" +{-# INLINE checkGetSum #-} + +class GSum f where + getSum :: (Ord word, Num word, Bits word) => word -> word -> Get (f a) + putSum :: (Num w, Bits w, Binary w) => w -> w -> f a -> Put + +instance (GSum a, GSum b, GBinary a, GBinary b) => GSum (a :+: b) where + getSum !code !size | code < sizeL = L1 <$> getSum code sizeL + | otherwise = R1 <$> getSum (code - sizeL) sizeR + where + sizeL = size `shiftR` 1 + sizeR = size - sizeL + + putSum !code !size s = case s of + L1 x -> putSum code sizeL x + R1 x -> putSum (code + sizeL) sizeR x + where + sizeL = size `shiftR` 1 + sizeR = size - sizeL + +instance GBinary a => GSum (C1 c a) where + getSum _ _ = gget + + putSum !code _ x = put code *> gput x + +------------------------------------------------------------------------ + +class SumSize f where + sumSize :: Tagged f Word64 + +newtype Tagged (s :: * -> *) b = Tagged {unTagged :: b} + +instance (SumSize a, SumSize b) => SumSize (a :+: b) where + sumSize = Tagged $ unTagged (sumSize :: Tagged a Word64) + + unTagged (sumSize :: Tagged b Word64) + +instance SumSize (C1 c a) where + sumSize = Tagged 1 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compat/Binary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compat/Binary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compat/Binary.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compat/Binary.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,60 @@ +{-# LANGUAGE CPP #-} +#if __GLASGOW_HASKELL__ >= 711 +{-# LANGUAGE PatternSynonyms #-} +#endif + +#ifndef MIN_VERSION_binary +#define MIN_VERSION_binary(x, y, z) 0 +#endif + +module Distribution.Compat.Binary + ( decodeOrFailIO +#if __GLASGOW_HASKELL__ >= 708 || MIN_VERSION_binary(0,7,0) + , module Data.Binary +#else + , Binary(..) + , decode, encode +#endif + ) where + +import Control.Exception (catch, evaluate) +#if __GLASGOW_HASKELL__ >= 711 +import Control.Exception (pattern ErrorCall) +#else +import Control.Exception (ErrorCall(..)) +#endif +import Data.ByteString.Lazy (ByteString) + +#if __GLASGOW_HASKELL__ < 706 +import Prelude hiding (catch) +#endif + +#if __GLASGOW_HASKELL__ >= 708 || MIN_VERSION_binary(0,7,0) + +import Data.Binary + +#else + +import Data.Binary.Get +import Data.Binary.Put + +import Distribution.Compat.Binary.Class +import Distribution.Compat.Binary.Generic () + +-- | Decode a value from a lazy ByteString, reconstructing the original structure. +-- +decode :: Binary a => ByteString -> a +decode = runGet get + +-- | Encode a value using binary serialisation to a lazy ByteString. +-- +encode :: Binary a => a -> ByteString +encode = runPut . put +{-# INLINE encode #-} + +#endif + +decodeOrFailIO :: Binary a => ByteString -> IO (Either String a) +decodeOrFailIO bs = + catch (evaluate (decode bs) >>= return . Right) + $ \(ErrorCall str) -> return $ Left str diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compat/CopyFile.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compat/CopyFile.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compat/CopyFile.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compat/CopyFile.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,106 @@ +{-# LANGUAGE CPP #-} +{-# OPTIONS_HADDOCK hide #-} +module Distribution.Compat.CopyFile ( + copyFile, + copyFileChanged, + filesEqual, + copyOrdinaryFile, + copyExecutableFile, + setFileOrdinary, + setFileExecutable, + setDirOrdinary, + ) where + +import Distribution.Compat.Exception +import Distribution.Compat.Internal.TempFile + +import Control.Monad + ( when, unless ) +import Control.Exception + ( bracketOnError, throwIO ) +import qualified Data.ByteString.Lazy as BSL +import System.IO.Error + ( ioeSetLocation ) +import System.Directory + ( doesFileExist, renameFile, removeFile ) +import System.FilePath + ( takeDirectory ) +import System.IO + ( IOMode(ReadMode), hClose, hGetBuf, hPutBuf + , withBinaryFile ) +import Foreign + ( allocaBytes ) + +#ifndef mingw32_HOST_OS +import System.Posix.Internals (withFilePath) +import System.Posix.Types + ( FileMode ) +import System.Posix.Internals + ( c_chmod ) +import Foreign.C + ( throwErrnoPathIfMinus1_ ) +#endif /* mingw32_HOST_OS */ + +copyOrdinaryFile, copyExecutableFile :: FilePath -> FilePath -> IO () +copyOrdinaryFile src dest = copyFile src dest >> setFileOrdinary dest +copyExecutableFile src dest = copyFile src dest >> setFileExecutable dest + +setFileOrdinary, setFileExecutable, setDirOrdinary :: FilePath -> IO () +#ifndef mingw32_HOST_OS +setFileOrdinary path = setFileMode path 0o644 -- file perms -rw-r--r-- +setFileExecutable path = setFileMode path 0o755 -- file perms -rwxr-xr-x + +setFileMode :: FilePath -> FileMode -> IO () +setFileMode name m = + withFilePath name $ \s -> do + throwErrnoPathIfMinus1_ "setFileMode" name (c_chmod s m) +#else +setFileOrdinary _ = return () +setFileExecutable _ = return () +#endif +-- This happens to be true on Unix and currently on Windows too: +setDirOrdinary = setFileExecutable + +-- | Copies a file to a new destination. +-- Often you should use `copyFileChanged` instead. +copyFile :: FilePath -> FilePath -> IO () +copyFile fromFPath toFPath = + copy + `catchIO` (\ioe -> throwIO (ioeSetLocation ioe "copyFile")) + where copy = withBinaryFile fromFPath ReadMode $ \hFrom -> + bracketOnError openTmp cleanTmp $ \(tmpFPath, hTmp) -> + do allocaBytes bufferSize $ copyContents hFrom hTmp + hClose hTmp + renameFile tmpFPath toFPath + openTmp = openBinaryTempFile (takeDirectory toFPath) ".copyFile.tmp" + cleanTmp (tmpFPath, hTmp) = do + hClose hTmp `catchIO` \_ -> return () + removeFile tmpFPath `catchIO` \_ -> return () + bufferSize = 4096 + + copyContents hFrom hTo buffer = do + count <- hGetBuf hFrom buffer bufferSize + when (count > 0) $ do + hPutBuf hTo buffer count + copyContents hFrom hTo buffer + +-- | Like `copyFile`, but does not touch the target if source and destination +-- are already byte-identical. This is recommended as it is useful for +-- time-stamp based recompilation avoidance. +copyFileChanged :: FilePath -> FilePath -> IO () +copyFileChanged src dest = do + equal <- filesEqual src dest + unless equal $ copyFile src dest + +-- | Checks if two files are byte-identical. +-- Returns False if either of the files do not exist. +filesEqual :: FilePath -> FilePath -> IO Bool +filesEqual f1 f2 = do + ex1 <- doesFileExist f1 + ex2 <- doesFileExist f2 + if not (ex1 && ex2) then return False else + withBinaryFile f1 ReadMode $ \h1 -> + withBinaryFile f2 ReadMode $ \h2 -> do + c1 <- BSL.hGetContents h1 + c2 <- BSL.hGetContents h2 + return $! c1 == c2 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compat/CreatePipe.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compat/CreatePipe.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compat/CreatePipe.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compat/CreatePipe.hs 2016-12-23 10:35:20.000000000 +0000 @@ -0,0 +1,62 @@ +{-# LANGUAGE CPP, ForeignFunctionInterface #-} +module Distribution.Compat.CreatePipe (createPipe) where + +import System.IO (Handle, hSetEncoding, localeEncoding) + +-- The mingw32_HOST_OS CPP macro is GHC-specific +#if mingw32_HOST_OS +import Control.Exception (onException) +import Foreign.C.Error (throwErrnoIfMinus1_) +import Foreign.C.Types (CInt(..), CUInt(..)) +import Foreign.Ptr (Ptr) +import Foreign.Marshal.Array (allocaArray) +import Foreign.Storable (peek, peekElemOff) +import GHC.IO.FD (mkFD) +import GHC.IO.Device (IODeviceType(Stream)) +import GHC.IO.Handle.FD (mkHandleFromFD) +import System.IO (IOMode(ReadMode, WriteMode)) +#elif ghcjs_HOST_OS +#else +import System.Posix.IO (fdToHandle) +import qualified System.Posix.IO as Posix +#endif + +createPipe :: IO (Handle, Handle) +-- The mingw32_HOST_OS CPP macro is GHC-specific +#if mingw32_HOST_OS +createPipe = do + (readfd, writefd) <- allocaArray 2 $ \ pfds -> do + throwErrnoIfMinus1_ "_pipe" $ c__pipe pfds 2 ({- _O_BINARY -} 32768) + readfd <- peek pfds + writefd <- peekElemOff pfds 1 + return (readfd, writefd) + (do readh <- fdToHandle readfd ReadMode + writeh <- fdToHandle writefd WriteMode + hSetEncoding readh localeEncoding + hSetEncoding writeh localeEncoding + return (readh, writeh)) `onException` (close readfd >> close writefd) + where + fdToHandle :: CInt -> IOMode -> IO Handle + fdToHandle fd mode = do + (fd', deviceType) <- mkFD fd mode (Just (Stream, 0, 0)) False False + mkHandleFromFD fd' deviceType "" mode False Nothing + + close :: CInt -> IO () + close = throwErrnoIfMinus1_ "_close" . c__close + +foreign import ccall "io.h _pipe" c__pipe :: + Ptr CInt -> CUInt -> CInt -> IO CInt + +foreign import ccall "io.h _close" c__close :: + CInt -> IO CInt +#elif ghcjs_HOST_OS +createPipe = error "createPipe" +#else +createPipe = do + (readfd, writefd) <- Posix.createPipe + readh <- fdToHandle readfd + writeh <- fdToHandle writefd + hSetEncoding readh localeEncoding + hSetEncoding writeh localeEncoding + return (readh, writeh) +#endif diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compat/Environment.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compat/Environment.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compat/Environment.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compat/Environment.hs 2016-12-23 10:35:20.000000000 +0000 @@ -0,0 +1,89 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE ForeignFunctionInterface #-} +{-# OPTIONS_HADDOCK hide #-} + +module Distribution.Compat.Environment + ( getEnvironment, lookupEnv, setEnv ) + where + +import qualified System.Environment as System +#if __GLASGOW_HASKELL__ >= 706 +import System.Environment (lookupEnv) +#else +import Distribution.Compat.Exception (catchIO) +#endif + +#ifdef mingw32_HOST_OS +import Control.Monad +import qualified Data.Char as Char (toUpper) +import Foreign.C +import GHC.Windows +#else +import Foreign.C.Types +import Foreign.C.String +import Foreign.C.Error (throwErrnoIfMinus1_) +import System.Posix.Internals ( withFilePath ) +#endif /* mingw32_HOST_OS */ + +getEnvironment :: IO [(String, String)] +#ifdef mingw32_HOST_OS +-- On Windows, the names of environment variables are case-insensitive, but are +-- often given in mixed-case (e.g. "PATH" is "Path"), so we have to normalise +-- them. +getEnvironment = fmap upcaseVars System.getEnvironment + where + upcaseVars = map upcaseVar + upcaseVar (var, val) = (map Char.toUpper var, val) +#else +getEnvironment = System.getEnvironment +#endif + +#if __GLASGOW_HASKELL__ < 706 +-- | @lookupEnv var@ returns the value of the environment variable @var@, or +-- @Nothing@ if there is no such value. +lookupEnv :: String -> IO (Maybe String) +lookupEnv name = (Just `fmap` System.getEnv name) `catchIO` const (return Nothing) +#endif /* __GLASGOW_HASKELL__ < 706 */ + +-- | @setEnv name value@ sets the specified environment variable to @value@. +-- +-- Throws `Control.Exception.IOException` if either @name@ or @value@ is the +-- empty string or contains an equals sign. +setEnv :: String -> String -> IO () +setEnv key value_ + | null value = error "Distribuiton.Compat.setEnv: empty string" + | otherwise = setEnv_ key value + where + -- NOTE: Anything that follows NUL is ignored on both POSIX and Windows. We + -- still strip it manually so that the null check above succeeds if a value + -- starts with NUL. + value = takeWhile (/= '\NUL') value_ + +setEnv_ :: String -> String -> IO () + +#ifdef mingw32_HOST_OS + +setEnv_ key value = withCWString key $ \k -> withCWString value $ \v -> do + success <- c_SetEnvironmentVariable k v + unless success (throwGetLastError "setEnv") + +# if defined(i386_HOST_ARCH) +# define WINDOWS_CCONV stdcall +# elif defined(x86_64_HOST_ARCH) +# define WINDOWS_CCONV ccall +# else +# error Unknown mingw32 arch +# endif /* i386_HOST_ARCH */ + +foreign import WINDOWS_CCONV unsafe "windows.h SetEnvironmentVariableW" + c_SetEnvironmentVariable :: LPTSTR -> LPTSTR -> IO Bool +#else +setEnv_ key value = do + withFilePath key $ \ keyP -> + withFilePath value $ \ valueP -> + throwErrnoIfMinus1_ "setenv" $ + c_setenv keyP valueP (fromIntegral (fromEnum True)) + +foreign import ccall unsafe "setenv" + c_setenv :: CString -> CString -> CInt -> IO CInt +#endif /* mingw32_HOST_OS */ diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compat/Exception.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compat/Exception.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compat/Exception.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compat/Exception.hs 2016-12-23 10:35:20.000000000 +0000 @@ -0,0 +1,17 @@ +module Distribution.Compat.Exception ( + catchIO, + catchExit, + tryIO, + ) where + +import System.Exit +import qualified Control.Exception as Exception + +tryIO :: IO a -> IO (Either Exception.IOException a) +tryIO = Exception.try + +catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a +catchIO = Exception.catch + +catchExit :: IO a -> (ExitCode -> IO a) -> IO a +catchExit = Exception.catch diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compat/GetShortPathName.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compat/GetShortPathName.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compat/GetShortPathName.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compat/GetShortPathName.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,56 @@ +{-# LANGUAGE CPP, ForeignFunctionInterface #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Compat.GetShortPathName +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : Windows-only +-- +-- Win32 API 'GetShortPathName' function. + +module Distribution.Compat.GetShortPathName ( getShortPathName ) + where + +#ifdef mingw32_HOST_OS +import Control.Monad (void) + +import qualified System.Win32 as Win32 +import System.Win32 (LPCTSTR, LPTSTR, DWORD) +import Foreign.Marshal.Array (allocaArray) + +#ifdef x86_64_HOST_ARCH +#define WINAPI ccall +#else +#define WINAPI stdcall +#endif + +foreign import WINAPI unsafe "windows.h GetShortPathNameW" + c_GetShortPathName :: LPCTSTR -> LPTSTR -> DWORD -> IO DWORD + +-- | On Windows, retrieves the short path form of the specified path. On +-- non-Windows, does nothing. See https://github.com/haskell/cabal/issues/3185. +-- +-- From MS's GetShortPathName docs: +-- +-- Passing NULL for [the second] parameter and zero for cchBuffer +-- will always return the required buffer size for a +-- specified lpszLongPath. +-- +getShortPathName :: FilePath -> IO FilePath +getShortPathName path = + Win32.withTString path $ \c_path -> do + c_len <- Win32.failIfZero "GetShortPathName #1 failed!" $ + c_GetShortPathName c_path Win32.nullPtr 0 + let arr_len = fromIntegral c_len + allocaArray arr_len $ \c_out -> do + void $ Win32.failIfZero "GetShortPathName #2 failed!" $ + c_GetShortPathName c_path c_out c_len + Win32.peekTString c_out + +#else + +getShortPathName :: FilePath -> IO FilePath +getShortPathName path = return path + +#endif diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compat/Internal/TempFile.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compat/Internal/TempFile.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compat/Internal/TempFile.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compat/Internal/TempFile.hs 2016-12-23 10:35:20.000000000 +0000 @@ -0,0 +1,124 @@ +{-# LANGUAGE CPP #-} +{-# OPTIONS_HADDOCK hide #-} +module Distribution.Compat.Internal.TempFile ( + openTempFile, + openBinaryTempFile, + openNewBinaryFile, + createTempDirectory, + ) where + +import Distribution.Compat.Exception + +import System.FilePath (()) +import Foreign.C (CInt, eEXIST, getErrno, errnoToIOError) + +import System.IO (Handle, openTempFile, openBinaryTempFile) +import Data.Bits ((.|.)) +import System.Posix.Internals (c_open, c_close, o_CREAT, o_EXCL, o_RDWR, + o_BINARY, o_NONBLOCK, o_NOCTTY, + withFilePath, c_getpid) +import System.IO.Error (isAlreadyExistsError) +import GHC.IO.Handle.FD (fdToHandle) +import Control.Exception (onException) + +#if defined(mingw32_HOST_OS) || defined(ghcjs_HOST_OS) +import System.Directory ( createDirectory ) +#else +import qualified System.Posix +#endif + +-- ------------------------------------------------------------ +-- * temporary files +-- ------------------------------------------------------------ + +-- This is here for Haskell implementations that do not come with +-- System.IO.openTempFile. This includes nhc-1.20, hugs-2006.9. +-- TODO: Not sure about JHC +-- TODO: This file should probably be removed. + +-- This is a copy/paste of the openBinaryTempFile definition, but +-- if uses 666 rather than 600 for the permissions. The base library +-- needs to be changed to make this better. +openNewBinaryFile :: FilePath -> String -> IO (FilePath, Handle) +openNewBinaryFile dir template = do + pid <- c_getpid + findTempName pid + where + -- We split off the last extension, so we can use .foo.ext files + -- for temporary files (hidden on Unix OSes). Unfortunately we're + -- below file path in the hierarchy here. + (prefix,suffix) = + case break (== '.') $ reverse template of + -- First case: template contains no '.'s. Just re-reverse it. + (rev_suffix, "") -> (reverse rev_suffix, "") + -- Second case: template contains at least one '.'. Strip the + -- dot from the prefix and prepend it to the suffix (if we don't + -- do this, the unique number will get added after the '.' and + -- thus be part of the extension, which is wrong.) + (rev_suffix, '.':rest) -> (reverse rest, '.':reverse rev_suffix) + -- Otherwise, something is wrong, because (break (== '.')) should + -- always return a pair with either the empty string or a string + -- beginning with '.' as the second component. + _ -> error "bug in System.IO.openTempFile" + + oflags = rw_flags .|. o_EXCL .|. o_BINARY + + findTempName x = do + fd <- withFilePath filepath $ \ f -> + c_open f oflags 0o666 + if fd < 0 + then do + errno <- getErrno + if errno == eEXIST + then findTempName (x+1) + else ioError (errnoToIOError "openNewBinaryFile" errno Nothing (Just dir)) + else do + -- TODO: We want to tell fdToHandle what the file path is, + -- as any exceptions etc will only be able to report the + -- FD currently + h <- fdToHandle fd `onException` c_close fd + return (filepath, h) + where + filename = prefix ++ show x ++ suffix + filepath = dir `combine` filename + + -- FIXME: bits copied from System.FilePath + combine a b + | null b = a + | null a = b + | last a == pathSeparator = a ++ b + | otherwise = a ++ [pathSeparator] ++ b + +-- FIXME: Should use System.FilePath library +pathSeparator :: Char +#ifdef mingw32_HOST_OS +pathSeparator = '\\' +#else +pathSeparator = '/' +#endif + +-- FIXME: Copied from GHC.Handle +std_flags, output_flags, rw_flags :: CInt +std_flags = o_NONBLOCK .|. o_NOCTTY +output_flags = std_flags .|. o_CREAT +rw_flags = output_flags .|. o_RDWR + +createTempDirectory :: FilePath -> String -> IO FilePath +createTempDirectory dir template = do + pid <- c_getpid + findTempName pid + where + findTempName x = do + let dirpath = dir template ++ "-" ++ show x + r <- tryIO $ mkPrivateDir dirpath + case r of + Right _ -> return dirpath + Left e | isAlreadyExistsError e -> findTempName (x+1) + | otherwise -> ioError e + +mkPrivateDir :: String -> IO () +#if defined(mingw32_HOST_OS) || defined(ghcjs_HOST_OS) +mkPrivateDir s = createDirectory s +#else +mkPrivateDir s = System.Posix.createDirectory s 0o700 +#endif diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compat/MonadFail.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compat/MonadFail.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compat/MonadFail.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compat/MonadFail.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,36 @@ +{-# LANGUAGE CPP #-} + +-- | Compatibility layer for "Control.Monad.Fail" +module Distribution.Compat.MonadFail ( MonadFail(fail) ) where +#if __GLASGOW_HASKELL__ >= 800 +-- provided by base-4.9.0.0 and later +import Control.Monad.Fail (MonadFail(fail)) +#else +-- the following code corresponds to +-- http://hackage.haskell.org/package/fail-4.9.0.0 +import qualified Prelude as P +import Prelude hiding (fail) + +import Text.ParserCombinators.ReadP +import Text.ParserCombinators.ReadPrec + +class Monad m => MonadFail m where + fail :: String -> m a + +-- instances provided by base-4.9 + +instance MonadFail Maybe where + fail _ = Nothing + +instance MonadFail [] where + fail _ = [] + +instance MonadFail IO where + fail = P.fail + +instance MonadFail ReadPrec where + fail = P.fail -- = P (\_ -> fail s) + +instance MonadFail ReadP where + fail = P.fail +#endif diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compat/ReadP.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compat/ReadP.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compat/ReadP.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compat/ReadP.hs 2016-12-23 10:35:20.000000000 +0000 @@ -0,0 +1,402 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Compat.ReadP +-- Copyright : (c) The University of Glasgow 2002 +-- License : BSD-style (see the file libraries/base/LICENSE) +-- +-- Maintainer : libraries@haskell.org +-- Portability : portable +-- +-- This is a library of parser combinators, originally written by Koen Claessen. +-- It parses all alternatives in parallel, so it never keeps hold of +-- the beginning of the input string, a common source of space leaks with +-- other parsers. The '(+++)' choice combinator is genuinely commutative; +-- it makes no difference which branch is \"shorter\". +-- +-- See also Koen's paper /Parallel Parsing Processes/ +-- (). +-- +-- This version of ReadP has been locally hacked to make it H98, by +-- Martin Sjögren +-- +-- The unit tests have been moved to UnitTest.Distribution.Compat.ReadP, by +-- Mark Lentczner +----------------------------------------------------------------------------- + +module Distribution.Compat.ReadP + ( + -- * The 'ReadP' type + ReadP, -- :: * -> *; instance Functor, Monad, MonadPlus + + -- * Primitive operations + get, -- :: ReadP Char + look, -- :: ReadP String + (+++), -- :: ReadP a -> ReadP a -> ReadP a + (<++), -- :: ReadP a -> ReadP a -> ReadP a + gather, -- :: ReadP a -> ReadP (String, a) + + -- * Other operations + pfail, -- :: ReadP a + satisfy, -- :: (Char -> Bool) -> ReadP Char + char, -- :: Char -> ReadP Char + string, -- :: String -> ReadP String + munch, -- :: (Char -> Bool) -> ReadP String + munch1, -- :: (Char -> Bool) -> ReadP String + skipSpaces, -- :: ReadP () + choice, -- :: [ReadP a] -> ReadP a + count, -- :: Int -> ReadP a -> ReadP [a] + between, -- :: ReadP open -> ReadP close -> ReadP a -> ReadP a + option, -- :: a -> ReadP a -> ReadP a + optional, -- :: ReadP a -> ReadP () + many, -- :: ReadP a -> ReadP [a] + many1, -- :: ReadP a -> ReadP [a] + skipMany, -- :: ReadP a -> ReadP () + skipMany1, -- :: ReadP a -> ReadP () + sepBy, -- :: ReadP a -> ReadP sep -> ReadP [a] + sepBy1, -- :: ReadP a -> ReadP sep -> ReadP [a] + endBy, -- :: ReadP a -> ReadP sep -> ReadP [a] + endBy1, -- :: ReadP a -> ReadP sep -> ReadP [a] + chainr, -- :: ReadP a -> ReadP (a -> a -> a) -> a -> ReadP a + chainl, -- :: ReadP a -> ReadP (a -> a -> a) -> a -> ReadP a + chainl1, -- :: ReadP a -> ReadP (a -> a -> a) -> ReadP a + chainr1, -- :: ReadP a -> ReadP (a -> a -> a) -> ReadP a + manyTill, -- :: ReadP a -> ReadP end -> ReadP [a] + + -- * Running a parser + ReadS, -- :: *; = String -> [(a,String)] + readP_to_S, -- :: ReadP a -> ReadS a + readS_to_P -- :: ReadS a -> ReadP a + ) + where + +import qualified Distribution.Compat.MonadFail as Fail + +import Control.Monad( MonadPlus(..), liftM, liftM2, replicateM, ap, (>=>) ) +import Data.Char (isSpace) +import Control.Applicative as AP (Applicative(..), Alternative(empty, (<|>))) + +infixr 5 +++, <++ + +-- --------------------------------------------------------------------------- +-- The P type +-- is representation type -- should be kept abstract + +data P s a + = Get (s -> P s a) + | Look ([s] -> P s a) + | Fail + | Result a (P s a) + | Final [(a,[s])] -- invariant: list is non-empty! + +-- Monad, MonadPlus + +instance Functor (P s) where + fmap = liftM + +instance Applicative (P s) where + pure x = Result x Fail + (<*>) = ap + +instance Monad (P s) where + return = AP.pure + + (Get f) >>= k = Get (f >=> k) + (Look f) >>= k = Look (f >=> k) + Fail >>= _ = Fail + (Result x p) >>= k = k x `mplus` (p >>= k) + (Final r) >>= k = final [ys' | (x,s) <- r, ys' <- run (k x) s] + + fail = Fail.fail + +instance Fail.MonadFail (P s) where + fail _ = Fail + +instance Alternative (P s) where + empty = mzero + (<|>) = mplus + +instance MonadPlus (P s) where + mzero = Fail + + -- most common case: two gets are combined + Get f1 `mplus` Get f2 = Get (\c -> f1 c `mplus` f2 c) + + -- results are delivered as soon as possible + Result x p `mplus` q = Result x (p `mplus` q) + p `mplus` Result x q = Result x (p `mplus` q) + + -- fail disappears + Fail `mplus` p = p + p `mplus` Fail = p + + -- two finals are combined + -- final + look becomes one look and one final (=optimization) + -- final + sthg else becomes one look and one final + Final r `mplus` Final t = Final (r ++ t) + Final r `mplus` Look f = Look (\s -> Final (r ++ run (f s) s)) + Final r `mplus` p = Look (\s -> Final (r ++ run p s)) + Look f `mplus` Final r = Look (\s -> Final (run (f s) s ++ r)) + p `mplus` Final r = Look (\s -> Final (run p s ++ r)) + + -- two looks are combined (=optimization) + -- look + sthg else floats upwards + Look f `mplus` Look g = Look (\s -> f s `mplus` g s) + Look f `mplus` p = Look (\s -> f s `mplus` p) + p `mplus` Look f = Look (\s -> p `mplus` f s) + +-- --------------------------------------------------------------------------- +-- The ReadP type + +newtype Parser r s a = R ((a -> P s r) -> P s r) +type ReadP r a = Parser r Char a + +-- Functor, Monad, MonadPlus + +instance Functor (Parser r s) where + fmap h (R f) = R (\k -> f (k . h)) + +instance Applicative (Parser r s) where + pure x = R (\k -> k x) + (<*>) = ap + +instance Monad (Parser r s) where + return = AP.pure + fail = Fail.fail + R m >>= f = R (\k -> m (\a -> let R m' = f a in m' k)) + +instance Fail.MonadFail (Parser r s) where + fail _ = R (const Fail) + +--instance MonadPlus (Parser r s) where +-- mzero = pfail +-- mplus = (+++) + +-- --------------------------------------------------------------------------- +-- Operations over P + +final :: [(a,[s])] -> P s a +-- Maintains invariant for Final constructor +final [] = Fail +final r = Final r + +run :: P c a -> ([c] -> [(a, [c])]) +run (Get f) (c:s) = run (f c) s +run (Look f) s = run (f s) s +run (Result x p) s = (x,s) : run p s +run (Final r) _ = r +run _ _ = [] + +-- --------------------------------------------------------------------------- +-- Operations over ReadP + +get :: ReadP r Char +-- ^ Consumes and returns the next character. +-- Fails if there is no input left. +get = R Get + +look :: ReadP r String +-- ^ Look-ahead: returns the part of the input that is left, without +-- consuming it. +look = R Look + +pfail :: ReadP r a +-- ^ Always fails. +pfail = R (const Fail) + +(+++) :: ReadP r a -> ReadP r a -> ReadP r a +-- ^ Symmetric choice. +R f1 +++ R f2 = R (\k -> f1 k `mplus` f2 k) + +(<++) :: ReadP a a -> ReadP r a -> ReadP r a +-- ^ Local, exclusive, left-biased choice: If left parser +-- locally produces any result at all, then right parser is +-- not used. +R f <++ q = + do s <- look + probe (f return) s 0 + where + probe (Get f') (c:s) n = probe (f' c) s (n+1 :: Int) + probe (Look f') s n = probe (f' s) s n + probe p@(Result _ _) _ n = discard n >> R (p >>=) + probe (Final r) _ _ = R (Final r >>=) + probe _ _ _ = q + + discard 0 = return () + discard n = get >> discard (n-1 :: Int) + +gather :: ReadP (String -> P Char r) a -> ReadP r (String, a) +-- ^ Transforms a parser into one that does the same, but +-- in addition returns the exact characters read. +-- IMPORTANT NOTE: 'gather' gives a runtime error if its first argument +-- is built using any occurrences of readS_to_P. +gather (R m) = + R (\k -> gath id (m (\a -> return (\s -> k (s,a))))) + where + gath l (Get f) = Get (\c -> gath (l.(c:)) (f c)) + gath _ Fail = Fail + gath l (Look f) = Look (gath l . f) + gath l (Result k p) = k (l []) `mplus` gath l p + gath _ (Final _) = error "do not use readS_to_P in gather!" + +-- --------------------------------------------------------------------------- +-- Derived operations + +satisfy :: (Char -> Bool) -> ReadP r Char +-- ^ Consumes and returns the next character, if it satisfies the +-- specified predicate. +satisfy p = do c <- get; if p c then return c else pfail + +char :: Char -> ReadP r Char +-- ^ Parses and returns the specified character. +char c = satisfy (c ==) + +string :: String -> ReadP r String +-- ^ Parses and returns the specified string. +string this = do s <- look; scan this s + where + scan [] _ = return this + scan (x:xs) (y:ys) | x == y = get >> scan xs ys + scan _ _ = pfail + +munch :: (Char -> Bool) -> ReadP r String +-- ^ Parses the first zero or more characters satisfying the predicate. +munch p = + do s <- look + scan s + where + scan (c:cs) | p c = do _ <- get; s <- scan cs; return (c:s) + scan _ = do return "" + +munch1 :: (Char -> Bool) -> ReadP r String +-- ^ Parses the first one or more characters satisfying the predicate. +munch1 p = + do c <- get + if p c then do s <- munch p; return (c:s) + else pfail + +choice :: [ReadP r a] -> ReadP r a +-- ^ Combines all parsers in the specified list. +choice [] = pfail +choice [p] = p +choice (p:ps) = p +++ choice ps + +skipSpaces :: ReadP r () +-- ^ Skips all whitespace. +skipSpaces = + do s <- look + skip s + where + skip (c:s) | isSpace c = do _ <- get; skip s + skip _ = do return () + +count :: Int -> ReadP r a -> ReadP r [a] +-- ^ @ count n p @ parses @n@ occurrences of @p@ in sequence. A list of +-- results is returned. +count n p = replicateM n p + +between :: ReadP r open -> ReadP r close -> ReadP r a -> ReadP r a +-- ^ @ between open close p @ parses @open@, followed by @p@ and finally +-- @close@. Only the value of @p@ is returned. +between open close p = do _ <- open + x <- p + _ <- close + return x + +option :: a -> ReadP r a -> ReadP r a +-- ^ @option x p@ will either parse @p@ or return @x@ without consuming +-- any input. +option x p = p +++ return x + +optional :: ReadP r a -> ReadP r () +-- ^ @optional p@ optionally parses @p@ and always returns @()@. +optional p = (p >> return ()) +++ return () + +many :: ReadP r a -> ReadP r [a] +-- ^ Parses zero or more occurrences of the given parser. +many p = return [] +++ many1 p + +many1 :: ReadP r a -> ReadP r [a] +-- ^ Parses one or more occurrences of the given parser. +many1 p = liftM2 (:) p (many p) + +skipMany :: ReadP r a -> ReadP r () +-- ^ Like 'many', but discards the result. +skipMany p = many p >> return () + +skipMany1 :: ReadP r a -> ReadP r () +-- ^ Like 'many1', but discards the result. +skipMany1 p = p >> skipMany p + +sepBy :: ReadP r a -> ReadP r sep -> ReadP r [a] +-- ^ @sepBy p sep@ parses zero or more occurrences of @p@, separated by @sep@. +-- Returns a list of values returned by @p@. +sepBy p sep = sepBy1 p sep +++ return [] + +sepBy1 :: ReadP r a -> ReadP r sep -> ReadP r [a] +-- ^ @sepBy1 p sep@ parses one or more occurrences of @p@, separated by @sep@. +-- Returns a list of values returned by @p@. +sepBy1 p sep = liftM2 (:) p (many (sep >> p)) + +endBy :: ReadP r a -> ReadP r sep -> ReadP r [a] +-- ^ @endBy p sep@ parses zero or more occurrences of @p@, separated and ended +-- by @sep@. +endBy p sep = many (do x <- p ; _ <- sep ; return x) + +endBy1 :: ReadP r a -> ReadP r sep -> ReadP r [a] +-- ^ @endBy p sep@ parses one or more occurrences of @p@, separated and ended +-- by @sep@. +endBy1 p sep = many1 (do x <- p ; _ <- sep ; return x) + +chainr :: ReadP r a -> ReadP r (a -> a -> a) -> a -> ReadP r a +-- ^ @chainr p op x@ parses zero or more occurrences of @p@, separated by @op@. +-- Returns a value produced by a /right/ associative application of all +-- functions returned by @op@. If there are no occurrences of @p@, @x@ is +-- returned. +chainr p op x = chainr1 p op +++ return x + +chainl :: ReadP r a -> ReadP r (a -> a -> a) -> a -> ReadP r a +-- ^ @chainl p op x@ parses zero or more occurrences of @p@, separated by @op@. +-- Returns a value produced by a /left/ associative application of all +-- functions returned by @op@. If there are no occurrences of @p@, @x@ is +-- returned. +chainl p op x = chainl1 p op +++ return x + +chainr1 :: ReadP r a -> ReadP r (a -> a -> a) -> ReadP r a +-- ^ Like 'chainr', but parses one or more occurrences of @p@. +chainr1 p op = scan + where scan = p >>= rest + rest x = do f <- op + y <- scan + return (f x y) + +++ return x + +chainl1 :: ReadP r a -> ReadP r (a -> a -> a) -> ReadP r a +-- ^ Like 'chainl', but parses one or more occurrences of @p@. +chainl1 p op = p >>= rest + where rest x = do f <- op + y <- p + rest (f x y) + +++ return x + +manyTill :: ReadP r a -> ReadP [a] end -> ReadP r [a] +-- ^ @manyTill p end@ parses zero or more occurrences of @p@, until @end@ +-- succeeds. Returns a list of values returned by @p@. +manyTill p end = scan + where scan = (end >> return []) <++ (liftM2 (:) p scan) + +-- --------------------------------------------------------------------------- +-- Converting between ReadP and Read + +readP_to_S :: ReadP a a -> ReadS a +-- ^ Converts a parser into a Haskell ReadS-style function. +-- This is the main way in which you can \"run\" a 'ReadP' parser: +-- the expanded type is +-- @ readP_to_S :: ReadP a -> String -> [(a,String)] @ +readP_to_S (R f) = run (f return) + +readS_to_P :: ReadS a -> ReadP r a +-- ^ Converts a Haskell ReadS-style function into a parser. +-- Warning: This introduces local backtracking in the resulting +-- parser, and therefore a possible inefficiency. +readS_to_P r = + R (\k -> Look (\s -> final [bs'' | (a,s') <- r s, bs'' <- run (k a) s'])) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compat/Semigroup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compat/Semigroup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compat/Semigroup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compat/Semigroup.hs 2016-12-23 10:35:20.000000000 +0000 @@ -0,0 +1,171 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE TypeOperators #-} + +-- | Compatibility layer for "Data.Semigroup" +module Distribution.Compat.Semigroup + ( Semigroup((<>)) + , Mon.Monoid(..) + , All(..) + , Any(..) + + , Last'(..) + + , gmappend + , gmempty + ) where + +import Distribution.Compat.Binary (Binary) + +import Control.Applicative as App +import GHC.Generics +#if __GLASGOW_HASKELL__ >= 711 +-- Data.Semigroup is available since GHC 8.0/base-4.9 +import Data.Semigroup +import qualified Data.Monoid as Mon +#else +-- provide internal simplified non-exposed class for older GHCs +import Data.Monoid as Mon (Monoid(..), All(..), Any(..), Dual(..)) +-- containers +import Data.Set (Set) +import Data.IntSet (IntSet) +import Data.Map (Map) +import Data.IntMap (IntMap) + + +class Semigroup a where + (<>) :: a -> a -> a + +-- several primitive instances +instance Semigroup () where + _ <> _ = () + +instance Semigroup [a] where + (<>) = (++) + +instance Semigroup a => Semigroup (Dual a) where + Dual a <> Dual b = Dual (b <> a) + +instance Semigroup a => Semigroup (Maybe a) where + Nothing <> b = b + a <> Nothing = a + Just a <> Just b = Just (a <> b) + +instance Semigroup (Either a b) where + Left _ <> b = b + a <> _ = a + +instance Semigroup Ordering where + LT <> _ = LT + EQ <> y = y + GT <> _ = GT + +instance Semigroup b => Semigroup (a -> b) where + f <> g = \a -> f a <> g a + +instance Semigroup All where + All a <> All b = All (a && b) + +instance Semigroup Any where + Any a <> Any b = Any (a || b) + +instance (Semigroup a, Semigroup b) => Semigroup (a, b) where + (a,b) <> (a',b') = (a<>a',b<>b') + +instance (Semigroup a, Semigroup b, Semigroup c) + => Semigroup (a, b, c) where + (a,b,c) <> (a',b',c') = (a<>a',b<>b',c<>c') + +instance (Semigroup a, Semigroup b, Semigroup c, Semigroup d) + => Semigroup (a, b, c, d) where + (a,b,c,d) <> (a',b',c',d') = (a<>a',b<>b',c<>c',d<>d') + +instance (Semigroup a, Semigroup b, Semigroup c, Semigroup d, Semigroup e) + => Semigroup (a, b, c, d, e) where + (a,b,c,d,e) <> (a',b',c',d',e') = (a<>a',b<>b',c<>c',d<>d',e<>e') + +-- containers instances +instance Semigroup IntSet where + (<>) = mappend + +instance Ord a => Semigroup (Set a) where + (<>) = mappend + +instance Semigroup (IntMap v) where + (<>) = mappend + +instance Ord k => Semigroup (Map k v) where + (<>) = mappend +#endif + +-- | Cabal's own 'Data.Monoid.Last' copy to avoid requiring an orphan +-- 'Binary' instance. +-- +-- Once the oldest `binary` version we support provides a 'Binary' +-- instance for 'Data.Monoid.Last' we can remove this one here. +-- +-- NB: 'Data.Semigroup.Last' is defined differently and not a 'Monoid' +newtype Last' a = Last' { getLast' :: Maybe a } + deriving (Eq, Ord, Read, Show, Binary, + Functor, App.Applicative, Generic) + +instance Semigroup (Last' a) where + x <> Last' Nothing = x + _ <> x = x + +instance Monoid (Last' a) where + mempty = Last' Nothing + mappend = (<>) + +------------------------------------------------------------------------------- +------------------------------------------------------------------------------- +-- Stolen from Edward Kmett's BSD3-licensed `semigroups` package + +-- | Generically generate a 'Semigroup' ('<>') operation for any type +-- implementing 'Generic'. This operation will append two values +-- by point-wise appending their component fields. It is only defined +-- for product types. +-- +-- @ +-- 'gmappend' a ('gmappend' b c) = 'gmappend' ('gmappend' a b) c +-- @ +gmappend :: (Generic a, GSemigroup (Rep a)) => a -> a -> a +gmappend x y = to (gmappend' (from x) (from y)) + +class GSemigroup f where + gmappend' :: f p -> f p -> f p + +instance Semigroup a => GSemigroup (K1 i a) where + gmappend' (K1 x) (K1 y) = K1 (x <> y) + +instance GSemigroup f => GSemigroup (M1 i c f) where + gmappend' (M1 x) (M1 y) = M1 (gmappend' x y) + +instance (GSemigroup f, GSemigroup g) => GSemigroup (f :*: g) where + gmappend' (x1 :*: x2) (y1 :*: y2) = gmappend' x1 y1 :*: gmappend' x2 y2 + +-- | Generically generate a 'Monoid' 'mempty' for any product-like type +-- implementing 'Generic'. +-- +-- It is only defined for product types. +-- +-- @ +-- 'gmappend' 'gmempty' a = a = 'gmappend' a 'gmempty' +-- @ + +gmempty :: (Generic a, GMonoid (Rep a)) => a +gmempty = to gmempty' + +class GSemigroup f => GMonoid f where + gmempty' :: f p + +instance (Semigroup a, Monoid a) => GMonoid (K1 i a) where + gmempty' = K1 mempty + +instance GMonoid f => GMonoid (M1 i c f) where + gmempty' = M1 gmempty' + +instance (GMonoid f, GMonoid g) => GMonoid (f :*: g) where + gmempty' = gmempty' :*: gmempty' diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compiler.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compiler.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Compiler.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Compiler.hs 2016-12-23 10:35:20.000000000 +0000 @@ -0,0 +1,204 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Compiler +-- Copyright : Isaac Jones 2003-2004 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This has an enumeration of the various compilers that Cabal knows about. It +-- also specifies the default compiler. Sadly you'll often see code that does +-- case analysis on this compiler flavour enumeration like: +-- +-- > case compilerFlavor comp of +-- > GHC -> GHC.getInstalledPackages verbosity packageDb progconf +-- > JHC -> JHC.getInstalledPackages verbosity packageDb progconf +-- +-- Obviously it would be better to use the proper 'Compiler' abstraction +-- because that would keep all the compiler-specific code together. +-- Unfortunately we cannot make this change yet without breaking the +-- 'UserHooks' api, which would break all custom @Setup.hs@ files, so for the +-- moment we just have to live with this deficiency. If you're interested, see +-- ticket #57. + +module Distribution.Compiler ( + -- * Compiler flavor + CompilerFlavor(..), + buildCompilerId, + buildCompilerFlavor, + defaultCompilerFlavor, + parseCompilerFlavorCompat, + + -- * Compiler id + CompilerId(..), + + -- * Compiler info + CompilerInfo(..), + unknownCompilerInfo, + AbiTag(..), abiTagString + ) where + +import Distribution.Compat.Binary +import Language.Haskell.Extension + +import Data.Data (Data) +import Data.Typeable (Typeable) +import Data.Maybe (fromMaybe) +import Distribution.Version (Version(..)) +import GHC.Generics (Generic) + +import qualified System.Info (compilerName, compilerVersion) +import Distribution.Text (Text(..), display) +import qualified Distribution.Compat.ReadP as Parse +import qualified Text.PrettyPrint as Disp +import Text.PrettyPrint ((<>)) + +import qualified Data.Char as Char (toLower, isDigit, isAlphaNum) +import Control.Monad (when) + +data CompilerFlavor = GHC | GHCJS | NHC | YHC | Hugs | HBC | Helium | JHC | LHC | UHC + | HaskellSuite String -- string is the id of the actual compiler + | OtherCompiler String + deriving (Generic, Show, Read, Eq, Ord, Typeable, Data) + +instance Binary CompilerFlavor + +knownCompilerFlavors :: [CompilerFlavor] +knownCompilerFlavors = [GHC, GHCJS, NHC, YHC, Hugs, HBC, Helium, JHC, LHC, UHC] + +instance Text CompilerFlavor where + disp (OtherCompiler name) = Disp.text name + disp (HaskellSuite name) = Disp.text name + disp NHC = Disp.text "nhc98" + disp other = Disp.text (lowercase (show other)) + + parse = do + comp <- Parse.munch1 Char.isAlphaNum + when (all Char.isDigit comp) Parse.pfail + return (classifyCompilerFlavor comp) + +classifyCompilerFlavor :: String -> CompilerFlavor +classifyCompilerFlavor s = + fromMaybe (OtherCompiler s) $ lookup (lowercase s) compilerMap + where + compilerMap = [ (display compiler, compiler) + | compiler <- knownCompilerFlavors ] + + +--TODO: In some future release, remove 'parseCompilerFlavorCompat' and use +-- ordinary 'parse'. Also add ("nhc", NHC) to the above 'compilerMap'. + +-- | Like 'classifyCompilerFlavor' but compatible with the old ReadS parser. +-- +-- It is compatible in the sense that it accepts only the same strings, +-- eg "GHC" but not "ghc". However other strings get mapped to 'OtherCompiler'. +-- The point of this is that we do not allow extra valid values that would +-- upset older Cabal versions that had a stricter parser however we cope with +-- new values more gracefully so that we'll be able to introduce new value in +-- future without breaking things so much. +-- +parseCompilerFlavorCompat :: Parse.ReadP r CompilerFlavor +parseCompilerFlavorCompat = do + comp <- Parse.munch1 Char.isAlphaNum + when (all Char.isDigit comp) Parse.pfail + case lookup comp compilerMap of + Just compiler -> return compiler + Nothing -> return (OtherCompiler comp) + where + compilerMap = [ (show compiler, compiler) + | compiler <- knownCompilerFlavors + , compiler /= YHC ] + +buildCompilerFlavor :: CompilerFlavor +buildCompilerFlavor = classifyCompilerFlavor System.Info.compilerName + +buildCompilerVersion :: Version +buildCompilerVersion = System.Info.compilerVersion + +buildCompilerId :: CompilerId +buildCompilerId = CompilerId buildCompilerFlavor buildCompilerVersion + +-- | The default compiler flavour to pick when compiling stuff. This defaults +-- to the compiler used to build the Cabal lib. +-- +-- However if it's not a recognised compiler then it's 'Nothing' and the user +-- will have to specify which compiler they want. +-- +defaultCompilerFlavor :: Maybe CompilerFlavor +defaultCompilerFlavor = case buildCompilerFlavor of + OtherCompiler _ -> Nothing + _ -> Just buildCompilerFlavor + +-- ------------------------------------------------------------ +-- * Compiler Id +-- ------------------------------------------------------------ + +data CompilerId = CompilerId CompilerFlavor Version + deriving (Eq, Generic, Ord, Read, Show) + +instance Binary CompilerId + +instance Text CompilerId where + disp (CompilerId f (Version [] _)) = disp f + disp (CompilerId f v) = disp f <> Disp.char '-' <> disp v + + parse = do + flavour <- parse + version <- (Parse.char '-' >> parse) Parse.<++ return (Version [] []) + return (CompilerId flavour version) + +lowercase :: String -> String +lowercase = map Char.toLower + +-- ------------------------------------------------------------ +-- * Compiler Info +-- ------------------------------------------------------------ + +-- | Compiler information used for resolving configurations. Some fields can be +-- set to Nothing to indicate that the information is unknown. + +data CompilerInfo = CompilerInfo { + compilerInfoId :: CompilerId, + -- ^ Compiler flavour and version. + compilerInfoAbiTag :: AbiTag, + -- ^ Tag for distinguishing incompatible ABI's on the same architecture/os. + compilerInfoCompat :: Maybe [CompilerId], + -- ^ Other implementations that this compiler claims to be compatible with, if known. + compilerInfoLanguages :: Maybe [Language], + -- ^ Supported language standards, if known. + compilerInfoExtensions :: Maybe [Extension] + -- ^ Supported extensions, if known. + } + deriving (Generic, Show, Read) + +instance Binary CompilerInfo + +data AbiTag + = NoAbiTag + | AbiTag String + deriving (Eq, Generic, Show, Read) + +instance Binary AbiTag + +instance Text AbiTag where + disp NoAbiTag = Disp.empty + disp (AbiTag tag) = Disp.text tag + + parse = do + tag <- Parse.munch (\c -> Char.isAlphaNum c || c == '_') + if null tag then return NoAbiTag else return (AbiTag tag) + +abiTagString :: AbiTag -> String +abiTagString NoAbiTag = "" +abiTagString (AbiTag tag) = tag + +-- | Make a CompilerInfo of which only the known information is its CompilerId, +-- its AbiTag and that it does not claim to be compatible with other +-- compiler id's. +unknownCompilerInfo :: CompilerId -> AbiTag -> CompilerInfo +unknownCompilerInfo compilerId abiTag = + CompilerInfo compilerId abiTag (Just []) Nothing Nothing diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/GetOpt.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/GetOpt.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/GetOpt.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/GetOpt.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,335 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.GetOpt +-- Copyright : (c) Sven Panne 2002-2005 +-- License : BSD-style (see the file libraries/base/LICENSE) +-- +-- Maintainer : libraries@haskell.org +-- Portability : portable +-- +-- This library provides facilities for parsing the command-line options +-- in a standalone program. It is essentially a Haskell port of the GNU +-- @getopt@ library. +-- +----------------------------------------------------------------------------- + +{- +Sven Panne Oct. 1996 (small +changes Dec. 1997) + +Two rather obscure features are missing: The Bash 2.0 non-option hack +(if you don't already know it, you probably don't want to hear about +it...) and the recognition of long options with a single dash +(e.g. '-help' is recognised as '--help', as long as there is no short +option 'h'). + +Other differences between GNU's getopt and this implementation: + +* To enforce a coherent description of options and arguments, there + are explanation fields in the option/argument descriptor. + +* Error messages are now more informative, but no longer POSIX + compliant... :-( + +And a final Haskell advertisement: The GNU C implementation uses well +over 1100 lines, we need only 195 here, including a 46 line example! +:-) +-} + +{-# OPTIONS_HADDOCK hide #-} +module Distribution.GetOpt ( + -- * GetOpt + getOpt, getOpt', + usageInfo, + ArgOrder(..), + OptDescr(..), + ArgDescr(..), + + -- * Example + + -- $example +) where + +import Data.List ( isPrefixOf, intercalate, find ) +import Data.Maybe ( isJust ) + +-- |What to do with options following non-options +data ArgOrder a + = RequireOrder -- ^ no option processing after first non-option + | Permute -- ^ freely intersperse options and non-options + | ReturnInOrder (String -> a) -- ^ wrap non-options into options + +{-| +Each 'OptDescr' describes a single option. + +The arguments to 'Option' are: + +* list of short option characters + +* list of long option strings (without \"--\") + +* argument descriptor + +* explanation of option for user +-} +data OptDescr a = -- description of a single options: + Option [Char] -- list of short option characters + [String] -- list of long option strings (without "--") + (ArgDescr a) -- argument descriptor + String -- explanation of option for user + +-- |Describes whether an option takes an argument or not, and if so +-- how the argument is injected into a value of type @a@. +data ArgDescr a + = NoArg a -- ^ no argument expected + | ReqArg (String -> a) String -- ^ option requires argument + | OptArg (Maybe String -> a) String -- ^ optional argument + +data OptKind a -- kind of cmd line arg (internal use only): + = Opt a -- an option + | UnreqOpt String -- an un-recognized option + | NonOpt String -- a non-option + | EndOfOpts -- end-of-options marker (i.e. "--") + | OptErr String -- something went wrong... + +-- | Return a string describing the usage of a command, derived from +-- the header (first argument) and the options described by the +-- second argument. +usageInfo :: String -- header + -> [OptDescr a] -- option descriptors + -> String -- nicely formatted decription of options +usageInfo header optDescr = unlines (header:table) + where (ss,ls,ds) = unzip3 [ (intercalate ", " (map (fmtShort ad) sos) + ,concatMap (fmtLong ad) (take 1 los) + ,d) + | Option sos los ad d <- optDescr ] + ssWidth = (maximum . map length) ss + lsWidth = (maximum . map length) ls + dsWidth = 30 `max` (80 - (ssWidth + lsWidth + 3)) + table = [ " " ++ padTo ssWidth so' ++ + " " ++ padTo lsWidth lo' ++ + " " ++ d' + | (so,lo,d) <- zip3 ss ls ds + , (so',lo',d') <- fmtOpt dsWidth so lo d ] + padTo n x = take n (x ++ repeat ' ') + +fmtOpt :: Int -> String -> String -> String -> [(String, String, String)] +fmtOpt descrWidth so lo descr = + case wrapText descrWidth descr of + [] -> [(so,lo,"")] + (d:ds) -> (so,lo,d) : [ ("","",d') | d' <- ds ] + +fmtShort :: ArgDescr a -> Char -> String +fmtShort (NoArg _ ) so = "-" ++ [so] +fmtShort (ReqArg _ _) so = "-" ++ [so] +fmtShort (OptArg _ _) so = "-" ++ [so] + +fmtLong :: ArgDescr a -> String -> String +fmtLong (NoArg _ ) lo = "--" ++ lo +fmtLong (ReqArg _ ad) lo = "--" ++ lo ++ "=" ++ ad +fmtLong (OptArg _ ad) lo = "--" ++ lo ++ "[=" ++ ad ++ "]" + +wrapText :: Int -> String -> [String] +wrapText width = map unwords . wrap 0 [] . words + where wrap :: Int -> [String] -> [String] -> [[String]] + wrap 0 [] (w:ws) + | length w + 1 > width + = wrap (length w) [w] ws + wrap col line (w:ws) + | col + length w + 1 > width + = reverse line : wrap 0 [] (w:ws) + wrap col line (w:ws) + = let col' = col + length w + 1 + in wrap col' (w:line) ws + wrap _ [] [] = [] + wrap _ line [] = [reverse line] + +{-| +Process the command-line, and return the list of values that matched +(and those that didn\'t). The arguments are: + +* The order requirements (see 'ArgOrder') + +* The option descriptions (see 'OptDescr') + +* The actual command line arguments (presumably got from + 'System.Environment.getArgs'). + +'getOpt' returns a triple consisting of the option arguments, a list +of non-options, and a list of error messages. +-} +getOpt :: ArgOrder a -- non-option handling + -> [OptDescr a] -- option descriptors + -> [String] -- the command-line arguments + -> ([a],[String],[String]) -- (options,non-options,error messages) +getOpt ordering optDescr args = (os,xs,es ++ map errUnrec us) + where (os,xs,us,es) = getOpt' ordering optDescr args + +{-| +This is almost the same as 'getOpt', but returns a quadruple +consisting of the option arguments, a list of non-options, a list of +unrecognized options, and a list of error messages. +-} +getOpt' :: ArgOrder a -- non-option handling + -> [OptDescr a] -- option descriptors + -> [String] -- the command-line arguments + -> ([a],[String], [String] ,[String]) -- (options,non-options,unrecognized,error messages) +getOpt' _ _ [] = ([],[],[],[]) +getOpt' ordering optDescr (arg:args) = procNextOpt opt ordering + where procNextOpt (Opt o) _ = (o:os,xs,us,es) + procNextOpt (UnreqOpt u) _ = (os,xs,u:us,es) + procNextOpt (NonOpt x) RequireOrder = ([],x:rest,[],[]) + procNextOpt (NonOpt x) Permute = (os,x:xs,us,es) + procNextOpt (NonOpt x) (ReturnInOrder f) = (f x :os, xs,us,es) + procNextOpt EndOfOpts RequireOrder = ([],rest,[],[]) + procNextOpt EndOfOpts Permute = ([],rest,[],[]) + procNextOpt EndOfOpts (ReturnInOrder f) = (map f rest,[],[],[]) + procNextOpt (OptErr e) _ = (os,xs,us,e:es) + + (opt,rest) = getNext arg args optDescr + (os,xs,us,es) = getOpt' ordering optDescr rest + +-- take a look at the next cmd line arg and decide what to do with it +getNext :: String -> [String] -> [OptDescr a] -> (OptKind a,[String]) +getNext ('-':'-':[]) rest _ = (EndOfOpts,rest) +getNext ('-':'-':xs) rest optDescr = longOpt xs rest optDescr +getNext ('-': x :xs) rest optDescr = shortOpt x xs rest optDescr +getNext a rest _ = (NonOpt a,rest) + +-- handle long option +longOpt :: String -> [String] -> [OptDescr a] -> (OptKind a,[String]) +longOpt ls rs optDescr = long ads arg rs + where (opt,arg) = break (=='=') ls + getWith p = [ o | o@(Option _ xs _ _) <- optDescr + , isJust (find (p opt) xs)] + exact = getWith (==) + options = if null exact then getWith isPrefixOf else exact + ads = [ ad | Option _ _ ad _ <- options ] + optStr = "--" ++ opt + + long (_:_:_) _ rest = (errAmbig options optStr,rest) + long [NoArg a ] [] rest = (Opt a,rest) + long [NoArg _ ] ('=':_) rest = (errNoArg optStr,rest) + long [ReqArg _ d] [] [] = (errReq d optStr,[]) + long [ReqArg f _] [] (r:rest) = (Opt (f r),rest) + long [ReqArg f _] ('=':xs) rest = (Opt (f xs),rest) + long [OptArg f _] [] rest = (Opt (f Nothing),rest) + long [OptArg f _] ('=':xs) rest = (Opt (f (Just xs)),rest) + long _ _ rest = (UnreqOpt ("--"++ls),rest) + +-- handle short option +shortOpt :: Char -> String -> [String] -> [OptDescr a] -> (OptKind a,[String]) +shortOpt y ys rs optDescr = short ads ys rs + where options = [ o | o@(Option ss _ _ _) <- optDescr, s <- ss, y == s ] + ads = [ ad | Option _ _ ad _ <- options ] + optStr = '-':[y] + + short (_:_:_) _ rest = (errAmbig options optStr,rest) + short (NoArg a :_) [] rest = (Opt a,rest) + short (NoArg a :_) xs rest = (Opt a,('-':xs):rest) + short (ReqArg _ d:_) [] [] = (errReq d optStr,[]) + short (ReqArg f _:_) [] (r:rest) = (Opt (f r),rest) + short (ReqArg f _:_) xs rest = (Opt (f xs),rest) + short (OptArg f _:_) [] rest = (Opt (f Nothing),rest) + short (OptArg f _:_) xs rest = (Opt (f (Just xs)),rest) + short [] [] rest = (UnreqOpt optStr,rest) + short [] xs rest = (UnreqOpt (optStr++xs),rest) + +-- miscellaneous error formatting + +errAmbig :: [OptDescr a] -> String -> OptKind a +errAmbig ods optStr = OptErr (usageInfo header ods) + where header = "option `" ++ optStr ++ "' is ambiguous; could be one of:" + +errReq :: String -> String -> OptKind a +errReq d optStr = OptErr ("option `" ++ optStr ++ "' requires an argument " ++ d ++ "\n") + +errUnrec :: String -> String +errUnrec optStr = "unrecognized option `" ++ optStr ++ "'\n" + +errNoArg :: String -> OptKind a +errNoArg optStr = OptErr ("option `" ++ optStr ++ "' doesn't allow an argument\n") + +{- +----------------------------------------------------------------------------------------- +-- and here a small and hopefully enlightening example: + +data Flag = Verbose | Version | Name String | Output String | Arg String deriving Show + +options :: [OptDescr Flag] +options = + [Option ['v'] ["verbose"] (NoArg Verbose) "verbosely list files", + Option ['V','?'] ["version","release"] (NoArg Version) "show version info", + Option ['o'] ["output"] (OptArg out "FILE") "use FILE for dump", + Option ['n'] ["name"] (ReqArg Name "USER") "only dump USER's files"] + +out :: Maybe String -> Flag +out Nothing = Output "stdout" +out (Just o) = Output o + +test :: ArgOrder Flag -> [String] -> String +test order cmdline = case getOpt order options cmdline of + (o,n,[] ) -> "options=" ++ show o ++ " args=" ++ show n ++ "\n" + (_,_,errs) -> concat errs ++ usageInfo header options + where header = "Usage: foobar [OPTION...] files..." + +-- example runs: +-- putStr (test RequireOrder ["foo","-v"]) +-- ==> options=[] args=["foo", "-v"] +-- putStr (test Permute ["foo","-v"]) +-- ==> options=[Verbose] args=["foo"] +-- putStr (test (ReturnInOrder Arg) ["foo","-v"]) +-- ==> options=[Arg "foo", Verbose] args=[] +-- putStr (test Permute ["foo","--","-v"]) +-- ==> options=[] args=["foo", "-v"] +-- putStr (test Permute ["-?o","--name","bar","--na=baz"]) +-- ==> options=[Version, Output "stdout", Name "bar", Name "baz"] args=[] +-- putStr (test Permute ["--ver","foo"]) +-- ==> option `--ver' is ambiguous; could be one of: +-- -v --verbose verbosely list files +-- -V, -? --version, --release show version info +-- Usage: foobar [OPTION...] files... +-- -v --verbose verbosely list files +-- -V, -? --version, --release show version info +-- -o[FILE] --output[=FILE] use FILE for dump +-- -n USER --name=USER only dump USER's files +----------------------------------------------------------------------------------------- +-} + +{- $example + +To hopefully illuminate the role of the different data +structures, here\'s the command-line options for a (very simple) +compiler: + +> module Opts where +> +> import Distribution.GetOpt +> import Data.Maybe ( fromMaybe ) +> +> data Flag +> = Verbose | Version +> | Input String | Output String | LibDir String +> deriving Show +> +> options :: [OptDescr Flag] +> options = +> [ Option ['v'] ["verbose"] (NoArg Verbose) "chatty output on stderr" +> , Option ['V','?'] ["version"] (NoArg Version) "show version number" +> , Option ['o'] ["output"] (OptArg outp "FILE") "output FILE" +> , Option ['c'] [] (OptArg inp "FILE") "input FILE" +> , Option ['L'] ["libdir"] (ReqArg LibDir "DIR") "library directory" +> ] +> +> inp,outp :: Maybe String -> Flag +> outp = Output . fromMaybe "stdout" +> inp = Input . fromMaybe "stdin" +> +> compilerOpts :: [String] -> IO ([Flag], [String]) +> compilerOpts argv = +> case getOpt Permute options argv of +> (o,n,[] ) -> return (o,n) +> (_,_,errs) -> ioError (userError (concat errs ++ usageInfo header options)) +> where header = "Usage: ic [OPTION...] files..." + +-} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/InstalledPackageInfo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/InstalledPackageInfo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/InstalledPackageInfo.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/InstalledPackageInfo.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,372 @@ +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.InstalledPackageInfo +-- Copyright : (c) The University of Glasgow 2004 +-- +-- Maintainer : libraries@haskell.org +-- Portability : portable +-- +-- This is the information about an /installed/ package that +-- is communicated to the @ghc-pkg@ program in order to register +-- a package. @ghc-pkg@ now consumes this package format (as of version +-- 6.4). This is specific to GHC at the moment. +-- +-- The @.cabal@ file format is for describing a package that is not yet +-- installed. It has a lot of flexibility, like conditionals and dependency +-- ranges. As such, that format is not at all suitable for describing a package +-- that has already been built and installed. By the time we get to that stage, +-- we have resolved all conditionals and resolved dependency version +-- constraints to exact versions of dependent packages. So, this module defines +-- the 'InstalledPackageInfo' data structure that contains all the info we keep +-- about an installed package. There is a parser and pretty printer. The +-- textual format is rather simpler than the @.cabal@ format: there are no +-- sections, for example. + +-- This module is meant to be local-only to Distribution... + +module Distribution.InstalledPackageInfo ( + InstalledPackageInfo(..), + installedComponentId, + installedPackageId, + OriginalModule(..), ExposedModule(..), + ParseResult(..), PError(..), PWarning, + emptyInstalledPackageInfo, + parseInstalledPackageInfo, + showInstalledPackageInfo, + showInstalledPackageInfoField, + showSimpleInstalledPackageInfoField, + fieldsInstalledPackageInfo, + ) where + +import Distribution.ParseUtils +import Distribution.License +import Distribution.Package hiding (installedUnitId, installedPackageId) +import qualified Distribution.Package as Package +import Distribution.ModuleName +import Distribution.Version +import Distribution.Text +import qualified Distribution.Compat.ReadP as Parse +import Distribution.Compat.Binary + +import Text.PrettyPrint as Disp +import Data.Maybe (fromMaybe) +import GHC.Generics (Generic) + +-- ----------------------------------------------------------------------------- +-- The InstalledPackageInfo type + +-- For BC reasons, we continue to name this record an InstalledPackageInfo; +-- but it would more accurately be called an InstalledUnitInfo with Backpack +data InstalledPackageInfo + = InstalledPackageInfo { + -- these parts are exactly the same as PackageDescription + sourcePackageId :: PackageId, + installedUnitId :: UnitId, + compatPackageKey :: String, + license :: License, + copyright :: String, + maintainer :: String, + author :: String, + stability :: String, + homepage :: String, + pkgUrl :: String, + synopsis :: String, + description :: String, + category :: String, + -- these parts are required by an installed package only: + abiHash :: AbiHash, + exposed :: Bool, + exposedModules :: [ExposedModule], + hiddenModules :: [ModuleName], + trusted :: Bool, + importDirs :: [FilePath], + libraryDirs :: [FilePath], + libraryDynDirs :: [FilePath], + dataDir :: FilePath, + hsLibraries :: [String], + extraLibraries :: [String], + extraGHCiLibraries:: [String], -- overrides extraLibraries for GHCi + includeDirs :: [FilePath], + includes :: [String], + depends :: [UnitId], + ccOptions :: [String], + ldOptions :: [String], + frameworkDirs :: [FilePath], + frameworks :: [String], + haddockInterfaces :: [FilePath], + haddockHTMLs :: [FilePath], + pkgRoot :: Maybe FilePath + } + deriving (Eq, Generic, Read, Show) + +installedComponentId :: InstalledPackageInfo -> ComponentId +installedComponentId ipi = case installedUnitId ipi of + SimpleUnitId cid -> cid + +{-# DEPRECATED installedPackageId "Use installedUnitId instead" #-} +-- | Backwards compatibility with Cabal pre-1.24. +installedPackageId :: InstalledPackageInfo -> UnitId +installedPackageId = installedUnitId + +instance Binary InstalledPackageInfo + +instance Package.Package InstalledPackageInfo where + packageId = sourcePackageId + +instance Package.HasUnitId InstalledPackageInfo where + installedUnitId = installedUnitId + +instance Package.PackageInstalled InstalledPackageInfo where + installedDepends = depends + +emptyInstalledPackageInfo :: InstalledPackageInfo +emptyInstalledPackageInfo + = InstalledPackageInfo { + sourcePackageId = PackageIdentifier (PackageName "") (Version [] []), + installedUnitId = mkUnitId "", + compatPackageKey = "", + license = UnspecifiedLicense, + copyright = "", + maintainer = "", + author = "", + stability = "", + homepage = "", + pkgUrl = "", + synopsis = "", + description = "", + category = "", + abiHash = AbiHash "", + exposed = False, + exposedModules = [], + hiddenModules = [], + trusted = False, + importDirs = [], + libraryDirs = [], + libraryDynDirs = [], + dataDir = "", + hsLibraries = [], + extraLibraries = [], + extraGHCiLibraries= [], + includeDirs = [], + includes = [], + depends = [], + ccOptions = [], + ldOptions = [], + frameworkDirs = [], + frameworks = [], + haddockInterfaces = [], + haddockHTMLs = [], + pkgRoot = Nothing + } + +-- ----------------------------------------------------------------------------- +-- Exposed modules + +data OriginalModule + = OriginalModule { + originalPackageId :: UnitId, + originalModuleName :: ModuleName + } + deriving (Generic, Eq, Read, Show) + +data ExposedModule + = ExposedModule { + exposedName :: ModuleName, + exposedReexport :: Maybe OriginalModule + } + deriving (Eq, Generic, Read, Show) + +instance Text OriginalModule where + disp (OriginalModule ipi m) = + disp ipi <> Disp.char ':' <> disp m + parse = do + ipi <- parse + _ <- Parse.char ':' + m <- parse + return (OriginalModule ipi m) + +instance Text ExposedModule where + disp (ExposedModule m reexport) = + Disp.hsep [ disp m + , case reexport of + Just m' -> Disp.hsep [Disp.text "from", disp m'] + Nothing -> Disp.empty + ] + parse = do + m <- parseModuleNameQ + Parse.skipSpaces + reexport <- Parse.option Nothing $ do + _ <- Parse.string "from" + Parse.skipSpaces + fmap Just parse + return (ExposedModule m reexport) + + +instance Binary OriginalModule + +instance Binary ExposedModule + +-- To maintain backwards-compatibility, we accept both comma/non-comma +-- separated variants of this field. You SHOULD use the comma syntax if you +-- use any new functions, although actually it's unambiguous due to a quirk +-- of the fact that modules must start with capital letters. + +showExposedModules :: [ExposedModule] -> Disp.Doc +showExposedModules xs + | all isExposedModule xs = fsep (map disp xs) + | otherwise = fsep (Disp.punctuate comma (map disp xs)) + where isExposedModule (ExposedModule _ Nothing) = True + isExposedModule _ = False + +parseExposedModules :: Parse.ReadP r [ExposedModule] +parseExposedModules = parseOptCommaList parse + +-- ----------------------------------------------------------------------------- +-- Parsing + +parseInstalledPackageInfo :: String -> ParseResult InstalledPackageInfo +parseInstalledPackageInfo = + parseFieldsFlat (fieldsInstalledPackageInfo ++ deprecatedFieldDescrs) + emptyInstalledPackageInfo + +-- ----------------------------------------------------------------------------- +-- Pretty-printing + +showInstalledPackageInfo :: InstalledPackageInfo -> String +showInstalledPackageInfo = showFields fieldsInstalledPackageInfo + +showInstalledPackageInfoField :: String -> Maybe (InstalledPackageInfo -> String) +showInstalledPackageInfoField = showSingleNamedField fieldsInstalledPackageInfo + +showSimpleInstalledPackageInfoField :: String -> Maybe (InstalledPackageInfo -> String) +showSimpleInstalledPackageInfoField = showSimpleSingleNamedField fieldsInstalledPackageInfo + +-- ----------------------------------------------------------------------------- +-- Description of the fields, for parsing/printing + +fieldsInstalledPackageInfo :: [FieldDescr InstalledPackageInfo] +fieldsInstalledPackageInfo = basicFieldDescrs ++ installedFieldDescrs + +basicFieldDescrs :: [FieldDescr InstalledPackageInfo] +basicFieldDescrs = + [ simpleField "name" + disp parsePackageNameQ + packageName (\name pkg -> pkg{sourcePackageId=(sourcePackageId pkg){pkgName=name}}) + , simpleField "version" + disp parseOptVersion + packageVersion (\ver pkg -> pkg{sourcePackageId=(sourcePackageId pkg){pkgVersion=ver}}) + , simpleField "id" + disp parse + installedUnitId (\pk pkg -> pkg{installedUnitId=pk}) + -- NB: parse these as component IDs + , simpleField "key" + (disp . ComponentId) (fmap (\(ComponentId s) -> s) parse) + compatPackageKey (\pk pkg -> pkg{compatPackageKey=pk}) + , simpleField "license" + disp parseLicenseQ + license (\l pkg -> pkg{license=l}) + , simpleField "copyright" + showFreeText parseFreeText + copyright (\val pkg -> pkg{copyright=val}) + , simpleField "maintainer" + showFreeText parseFreeText + maintainer (\val pkg -> pkg{maintainer=val}) + , simpleField "stability" + showFreeText parseFreeText + stability (\val pkg -> pkg{stability=val}) + , simpleField "homepage" + showFreeText parseFreeText + homepage (\val pkg -> pkg{homepage=val}) + , simpleField "package-url" + showFreeText parseFreeText + pkgUrl (\val pkg -> pkg{pkgUrl=val}) + , simpleField "synopsis" + showFreeText parseFreeText + synopsis (\val pkg -> pkg{synopsis=val}) + , simpleField "description" + showFreeText parseFreeText + description (\val pkg -> pkg{description=val}) + , simpleField "category" + showFreeText parseFreeText + category (\val pkg -> pkg{category=val}) + , simpleField "author" + showFreeText parseFreeText + author (\val pkg -> pkg{author=val}) + ] + +installedFieldDescrs :: [FieldDescr InstalledPackageInfo] +installedFieldDescrs = [ + boolField "exposed" + exposed (\val pkg -> pkg{exposed=val}) + , simpleField "exposed-modules" + showExposedModules parseExposedModules + exposedModules (\xs pkg -> pkg{exposedModules=xs}) + , listField "hidden-modules" + disp parseModuleNameQ + hiddenModules (\xs pkg -> pkg{hiddenModules=xs}) + , simpleField "abi" + disp parse + abiHash (\abi pkg -> pkg{abiHash=abi}) + , boolField "trusted" + trusted (\val pkg -> pkg{trusted=val}) + , listField "import-dirs" + showFilePath parseFilePathQ + importDirs (\xs pkg -> pkg{importDirs=xs}) + , listField "library-dirs" + showFilePath parseFilePathQ + libraryDirs (\xs pkg -> pkg{libraryDirs=xs}) + , listField "dynamic-library-dirs" + showFilePath parseFilePathQ + libraryDynDirs (\xs pkg -> pkg{libraryDynDirs=xs}) + , simpleField "data-dir" + showFilePath (parseFilePathQ Parse.<++ return "") + dataDir (\val pkg -> pkg{dataDir=val}) + , listField "hs-libraries" + showFilePath parseTokenQ + hsLibraries (\xs pkg -> pkg{hsLibraries=xs}) + , listField "extra-libraries" + showToken parseTokenQ + extraLibraries (\xs pkg -> pkg{extraLibraries=xs}) + , listField "extra-ghci-libraries" + showToken parseTokenQ + extraGHCiLibraries (\xs pkg -> pkg{extraGHCiLibraries=xs}) + , listField "include-dirs" + showFilePath parseFilePathQ + includeDirs (\xs pkg -> pkg{includeDirs=xs}) + , listField "includes" + showFilePath parseFilePathQ + includes (\xs pkg -> pkg{includes=xs}) + , listField "depends" + disp parse + depends (\xs pkg -> pkg{depends=xs}) + , listField "cc-options" + showToken parseTokenQ + ccOptions (\path pkg -> pkg{ccOptions=path}) + , listField "ld-options" + showToken parseTokenQ + ldOptions (\path pkg -> pkg{ldOptions=path}) + , listField "framework-dirs" + showFilePath parseFilePathQ + frameworkDirs (\xs pkg -> pkg{frameworkDirs=xs}) + , listField "frameworks" + showToken parseTokenQ + frameworks (\xs pkg -> pkg{frameworks=xs}) + , listField "haddock-interfaces" + showFilePath parseFilePathQ + haddockInterfaces (\xs pkg -> pkg{haddockInterfaces=xs}) + , listField "haddock-html" + showFilePath parseFilePathQ + haddockHTMLs (\xs pkg -> pkg{haddockHTMLs=xs}) + , simpleField "pkgroot" + (const Disp.empty) parseFilePathQ + (fromMaybe "" . pkgRoot) (\xs pkg -> pkg{pkgRoot=Just xs}) + ] + +deprecatedFieldDescrs :: [FieldDescr InstalledPackageInfo] +deprecatedFieldDescrs = [ + listField "hugs-options" + showToken parseTokenQ + (const []) (const id) + ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Lex.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Lex.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Lex.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Lex.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,55 @@ +{-# LANGUAGE PatternGuards #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Lex +-- Copyright : Ben Gamari 2015-2019 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module contains a simple lexer supporting quoted strings + +module Distribution.Lex ( + tokenizeQuotedWords + ) where + +import Data.Char (isSpace) +import Distribution.Compat.Semigroup as Semi + +newtype DList a = DList ([a] -> [a]) + +runDList :: DList a -> [a] +runDList (DList run) = run [] + +singleton :: a -> DList a +singleton a = DList (a:) + +instance Monoid (DList a) where + mempty = DList id + mappend = (Semi.<>) + +instance Semigroup (DList a) where + DList a <> DList b = DList (a . b) + +tokenizeQuotedWords :: String -> [String] +tokenizeQuotedWords = filter (not . null) . go False mempty + where + go :: Bool -- ^ in quoted region + -> DList Char -- ^ accumulator + -> String -- ^ string to be parsed + -> [String] -- ^ parse result + go _ accum [] + | [] <- accum' = [] + | otherwise = [accum'] + where accum' = runDList accum + + go False accum (c:cs) + | isSpace c = runDList accum : go False mempty cs + | c == '"' = go True accum cs + + go True accum (c:cs) + | c == '"' = go False accum cs + + go quoted accum (c:cs) + = go quoted (accum `mappend` singleton c) cs + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/License.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/License.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/License.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/License.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,177 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.License +-- Description : The License data type. +-- Copyright : Isaac Jones 2003-2005 +-- Duncan Coutts 2008 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Package descriptions contain fields for specifying the name of a software +-- license and the name of the file containing the text of that license. While +-- package authors may choose any license they like, Cabal provides an +-- enumeration of a small set of common free and open source software licenses. +-- This is done so that Hackage can recognise licenses, so that tools can detect +-- , +-- and to deter +-- . +-- +-- It is recommended that all package authors use the @license-file@ or +-- @license-files@ fields in their package descriptions. Further information +-- about these fields can be found in the +-- . +-- +-- = Additional resources +-- +-- The following websites provide information about free and open source +-- software licenses: +-- +-- * +-- +-- * +-- +-- = Disclaimer +-- +-- The descriptions of software licenses provided by this documentation are +-- intended for informational purposes only and in no way constitute legal +-- advice. Please read the text of the licenses and consult a lawyer for any +-- advice regarding software licensing. + +module Distribution.License ( + License(..), + knownLicenses, + ) where + +import Distribution.Version +import Distribution.Text +import qualified Distribution.Compat.ReadP as Parse +import Distribution.Compat.Binary + +import qualified Text.PrettyPrint as Disp +import Text.PrettyPrint ((<>)) +import qualified Data.Char as Char (isAlphaNum) +import Data.Data (Data) +import Data.Typeable (Typeable) +import GHC.Generics (Generic) + +-- | Indicates the license under which a package's source code is released. +-- Versions of the licenses not listed here will be rejected by Hackage and +-- cause @cabal check@ to issue a warning. +data License = + -- TODO: * remove BSD4 + + -- | GNU General Public License, + -- or + -- . + GPL (Maybe Version) + + -- | . + | AGPL (Maybe Version) + + -- | GNU Lesser General Public License, + -- or + -- . + | LGPL (Maybe Version) + + -- | . + | BSD2 + + -- | . + | BSD3 + + -- | . + -- This license has not been approved by the OSI and is incompatible with + -- the GNU GPL. It is provided for historical reasons and should be avoided. + | BSD4 + + -- | . + | MIT + + -- | + | ISC + + -- | . + | MPL Version + + -- | . + | Apache (Maybe Version) + + -- | The author of a package disclaims any copyright to its source code and + -- dedicates it to the public domain. This is not a software license. Please + -- note that it is not possible to dedicate works to the public domain in + -- every jurisdiction, nor is a work that is in the public domain in one + -- jurisdiction necessarily in the public domain elsewhere. + | PublicDomain + + -- | Explicitly 'All Rights Reserved', eg for proprietary software. The + -- package may not be legally modified or redistributed by anyone but the + -- rightsholder. + | AllRightsReserved + + -- | No license specified which legally defaults to 'All Rights Reserved'. + -- The package may not be legally modified or redistributed by anyone but + -- the rightsholder. + | UnspecifiedLicense + + -- | Any other software license. + | OtherLicense + + -- | Indicates an erroneous license name. + | UnknownLicense String + deriving (Generic, Read, Show, Eq, Typeable, Data) + +instance Binary License + +-- | The list of all currently recognised licenses. +knownLicenses :: [License] +knownLicenses = [ GPL unversioned, GPL (version [2]), GPL (version [3]) + , LGPL unversioned, LGPL (version [2, 1]), LGPL (version [3]) + , AGPL unversioned, AGPL (version [3]) + , BSD2, BSD3, MIT, ISC + , MPL (Version [2, 0] []) + , Apache unversioned, Apache (version [2, 0]) + , PublicDomain, AllRightsReserved, OtherLicense] + where + unversioned = Nothing + version v = Just (Version v []) + +instance Text License where + disp (GPL version) = Disp.text "GPL" <> dispOptVersion version + disp (LGPL version) = Disp.text "LGPL" <> dispOptVersion version + disp (AGPL version) = Disp.text "AGPL" <> dispOptVersion version + disp (MPL version) = Disp.text "MPL" <> dispVersion version + disp (Apache version) = Disp.text "Apache" <> dispOptVersion version + disp (UnknownLicense other) = Disp.text other + disp other = Disp.text (show other) + + parse = do + name <- Parse.munch1 (\c -> Char.isAlphaNum c && c /= '-') + version <- Parse.option Nothing (Parse.char '-' >> fmap Just parse) + return $! case (name, version :: Maybe Version) of + ("GPL", _ ) -> GPL version + ("LGPL", _ ) -> LGPL version + ("AGPL", _ ) -> AGPL version + ("BSD2", Nothing) -> BSD2 + ("BSD3", Nothing) -> BSD3 + ("BSD4", Nothing) -> BSD4 + ("ISC", Nothing) -> ISC + ("MIT", Nothing) -> MIT + ("MPL", Just version') -> MPL version' + ("Apache", _ ) -> Apache version + ("PublicDomain", Nothing) -> PublicDomain + ("AllRightsReserved", Nothing) -> AllRightsReserved + ("OtherLicense", Nothing) -> OtherLicense + _ -> UnknownLicense $ name ++ + maybe "" (('-':) . display) version + +dispOptVersion :: Maybe Version -> Disp.Doc +dispOptVersion Nothing = Disp.empty +dispOptVersion (Just v) = dispVersion v + +dispVersion :: Version -> Disp.Doc +dispVersion v = Disp.char '-' <> disp v diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Make.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Make.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Make.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Make.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,181 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Make +-- Copyright : Martin Sjögren 2004 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This is an alternative build system that delegates everything to the @make@ +-- program. All the commands just end up calling @make@ with appropriate +-- arguments. The intention was to allow preexisting packages that used +-- makefiles to be wrapped into Cabal packages. In practice essentially all +-- such packages were converted over to the \"Simple\" build system instead. +-- Consequently this module is not used much and it certainly only sees cursory +-- maintenance and no testing. Perhaps at some point we should stop pretending +-- that it works. +-- +-- Uses the parsed command-line from "Distribution.Simple.Setup" in order to build +-- Haskell tools using a back-end build system based on make. Obviously we +-- assume that there is a configure script, and that after the ConfigCmd has +-- been run, there is a Makefile. Further assumptions: +-- +-- [ConfigCmd] We assume the configure script accepts +-- @--with-hc@, +-- @--with-hc-pkg@, +-- @--prefix@, +-- @--bindir@, +-- @--libdir@, +-- @--libexecdir@, +-- @--datadir@. +-- +-- [BuildCmd] We assume that the default Makefile target will build everything. +-- +-- [InstallCmd] We assume there is an @install@ target. Note that we assume that +-- this does *not* register the package! +-- +-- [CopyCmd] We assume there is a @copy@ target, and a variable @$(destdir)@. +-- The @copy@ target should probably just invoke @make install@ +-- recursively (e.g. @$(MAKE) install prefix=$(destdir)\/$(prefix) +-- bindir=$(destdir)\/$(bindir)@. The reason we can\'t invoke @make +-- install@ directly here is that we don\'t know the value of @$(prefix)@. +-- +-- [SDistCmd] We assume there is a @dist@ target. +-- +-- [RegisterCmd] We assume there is a @register@ target and a variable @$(user)@. +-- +-- [UnregisterCmd] We assume there is an @unregister@ target. +-- +-- [HaddockCmd] We assume there is a @docs@ or @doc@ target. + + +-- copy : +-- $(MAKE) install prefix=$(destdir)/$(prefix) \ +-- bindir=$(destdir)/$(bindir) \ + +module Distribution.Make ( + module Distribution.Package, + License(..), Version(..), + defaultMain, defaultMainArgs, defaultMainNoRead + ) where + +-- local +import Distribution.Compat.Exception +import Distribution.Package +import Distribution.Simple.Program +import Distribution.PackageDescription +import Distribution.Simple.Setup +import Distribution.Simple.Command + +import Distribution.Simple.Utils + +import Distribution.License +import Distribution.Version +import Distribution.Text + +import System.Environment (getArgs, getProgName) +import System.Exit + +defaultMain :: IO () +defaultMain = getArgs >>= defaultMainArgs + +defaultMainArgs :: [String] -> IO () +defaultMainArgs = defaultMainHelper + +{-# DEPRECATED defaultMainNoRead "it ignores its PackageDescription arg" #-} +defaultMainNoRead :: PackageDescription -> IO () +defaultMainNoRead = const defaultMain + +defaultMainHelper :: [String] -> IO () +defaultMainHelper args = + case commandsRun (globalCommand commands) commands args of + CommandHelp help -> printHelp help + CommandList opts -> printOptionsList opts + CommandErrors errs -> printErrors errs + CommandReadyToGo (flags, commandParse) -> + case commandParse of + _ | fromFlag (globalVersion flags) -> printVersion + | fromFlag (globalNumericVersion flags) -> printNumericVersion + CommandHelp help -> printHelp help + CommandList opts -> printOptionsList opts + CommandErrors errs -> printErrors errs + CommandReadyToGo action -> action + + where + printHelp help = getProgName >>= putStr . help + printOptionsList = putStr . unlines + printErrors errs = do + putStr (intercalate "\n" errs) + exitWith (ExitFailure 1) + printNumericVersion = putStrLn $ display cabalVersion + printVersion = putStrLn $ "Cabal library version " + ++ display cabalVersion + + progs = defaultProgramConfiguration + commands = + [configureCommand progs `commandAddAction` configureAction + ,buildCommand progs `commandAddAction` buildAction + ,installCommand `commandAddAction` installAction + ,copyCommand `commandAddAction` copyAction + ,haddockCommand `commandAddAction` haddockAction + ,cleanCommand `commandAddAction` cleanAction + ,sdistCommand `commandAddAction` sdistAction + ,registerCommand `commandAddAction` registerAction + ,unregisterCommand `commandAddAction` unregisterAction + ] + +configureAction :: ConfigFlags -> [String] -> IO () +configureAction flags args = do + noExtraFlags args + let verbosity = fromFlag (configVerbosity flags) + rawSystemExit verbosity "sh" $ + "configure" + : configureArgs backwardsCompatHack flags + where backwardsCompatHack = True + +copyAction :: CopyFlags -> [String] -> IO () +copyAction flags args = do + noExtraFlags args + let destArgs = case fromFlag $ copyDest flags of + NoCopyDest -> ["install"] + CopyTo path -> ["copy", "destdir=" ++ path] + rawSystemExit (fromFlag $ copyVerbosity flags) "make" destArgs + +installAction :: InstallFlags -> [String] -> IO () +installAction flags args = do + noExtraFlags args + rawSystemExit (fromFlag $ installVerbosity flags) "make" ["install"] + rawSystemExit (fromFlag $ installVerbosity flags) "make" ["register"] + +haddockAction :: HaddockFlags -> [String] -> IO () +haddockAction flags args = do + noExtraFlags args + rawSystemExit (fromFlag $ haddockVerbosity flags) "make" ["docs"] + `catchIO` \_ -> + rawSystemExit (fromFlag $ haddockVerbosity flags) "make" ["doc"] + +buildAction :: BuildFlags -> [String] -> IO () +buildAction flags args = do + noExtraFlags args + rawSystemExit (fromFlag $ buildVerbosity flags) "make" [] + +cleanAction :: CleanFlags -> [String] -> IO () +cleanAction flags args = do + noExtraFlags args + rawSystemExit (fromFlag $ cleanVerbosity flags) "make" ["clean"] + +sdistAction :: SDistFlags -> [String] -> IO () +sdistAction flags args = do + noExtraFlags args + rawSystemExit (fromFlag $ sDistVerbosity flags) "make" ["dist"] + +registerAction :: RegisterFlags -> [String] -> IO () +registerAction flags args = do + noExtraFlags args + rawSystemExit (fromFlag $ regVerbosity flags) "make" ["register"] + +unregisterAction :: RegisterFlags -> [String] -> IO () +unregisterAction flags args = do + noExtraFlags args + rawSystemExit (fromFlag $ regVerbosity flags) "make" ["unregister"] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/ModuleName.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/ModuleName.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/ModuleName.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/ModuleName.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,109 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.ModuleName +-- Copyright : Duncan Coutts 2008 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Data type for Haskell module names. + +module Distribution.ModuleName ( + ModuleName, + fromString, + components, + toFilePath, + main, + simple, + ) where + +import Distribution.Text +import Distribution.Compat.Binary +import qualified Distribution.Compat.ReadP as Parse + +import qualified Data.Char as Char + ( isAlphaNum, isUpper ) +import Data.Data (Data) +import Data.Typeable (Typeable) +import qualified Text.PrettyPrint as Disp +import Data.List + ( intercalate, intersperse ) +import GHC.Generics (Generic) +import System.FilePath + ( pathSeparator ) + +-- | A valid Haskell module name. +-- +newtype ModuleName = ModuleName [String] + deriving (Eq, Generic, Ord, Read, Show, Typeable, Data) + +instance Binary ModuleName + +instance Text ModuleName where + disp (ModuleName ms) = + Disp.hcat (intersperse (Disp.char '.') (map Disp.text ms)) + + parse = do + ms <- Parse.sepBy1 component (Parse.char '.') + return (ModuleName ms) + + where + component = do + c <- Parse.satisfy Char.isUpper + cs <- Parse.munch validModuleChar + return (c:cs) + +validModuleChar :: Char -> Bool +validModuleChar c = Char.isAlphaNum c || c == '_' || c == '\'' + +validModuleComponent :: String -> Bool +validModuleComponent [] = False +validModuleComponent (c:cs) = Char.isUpper c + && all validModuleChar cs + +{-# DEPRECATED simple "use ModuleName.fromString instead" #-} +simple :: String -> ModuleName +simple str = ModuleName [str] + +-- | Construct a 'ModuleName' from a valid module name 'String'. +-- +-- This is just a convenience function intended for valid module strings. It is +-- an error if it is used with a string that is not a valid module name. If you +-- are parsing user input then use 'Distribution.Text.simpleParse' instead. +-- +fromString :: String -> ModuleName +fromString string + | all validModuleComponent components' = ModuleName components' + | otherwise = error badName + + where + components' = split string + badName = "ModuleName.fromString: invalid module name " ++ show string + + split cs = case break (=='.') cs of + (chunk,[]) -> chunk : [] + (chunk,_:rest) -> chunk : split rest + +-- | The module name @Main@. +-- +main :: ModuleName +main = ModuleName ["Main"] + +-- | The individual components of a hierarchical module name. For example +-- +-- > components (fromString "A.B.C") = ["A", "B", "C"] +-- +components :: ModuleName -> [String] +components (ModuleName ms) = ms + +-- | Convert a module name to a file path, but without any file extension. +-- For example: +-- +-- > toFilePath (fromString "A.B.C") = "A/B/C" +-- +toFilePath :: ModuleName -> FilePath +toFilePath = intercalate [pathSeparator] . components diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/PackageDescription/Check.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/PackageDescription/Check.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/PackageDescription/Check.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/PackageDescription/Check.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,1737 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.PackageDescription.Check +-- Copyright : Lennart Kolmodin 2008 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This has code for checking for various problems in packages. There is one +-- set of checks that just looks at a 'PackageDescription' in isolation and +-- another set of checks that also looks at files in the package. Some of the +-- checks are basic sanity checks, others are portability standards that we'd +-- like to encourage. There is a 'PackageCheck' type that distinguishes the +-- different kinds of check so we can see which ones are appropriate to report +-- in different situations. This code gets uses when configuring a package when +-- we consider only basic problems. The higher standard is uses when when +-- preparing a source tarball and by Hackage when uploading new packages. The +-- reason for this is that we want to hold packages that are expected to be +-- distributed to a higher standard than packages that are only ever expected +-- to be used on the author's own environment. + +module Distribution.PackageDescription.Check ( + -- * Package Checking + PackageCheck(..), + checkPackage, + checkConfiguredPackage, + + -- ** Checking package contents + checkPackageFiles, + checkPackageContent, + CheckPackageContentOps(..), + checkPackageFileNames, + ) where + +import Distribution.PackageDescription +import Distribution.PackageDescription.Configuration +import Distribution.Compiler +import Distribution.System +import Distribution.License +import Distribution.Simple.CCompiler +import Distribution.Simple.Utils hiding (findPackageDesc, notice) +import Distribution.Version +import Distribution.Package +import Distribution.Text +import Language.Haskell.Extension + +import Data.Maybe + ( isNothing, isJust, catMaybes, mapMaybe, maybeToList, fromMaybe ) +import Data.List (sort, group, isPrefixOf, nub, find) +import Control.Monad + ( filterM, liftM ) +import qualified System.Directory as System + ( doesFileExist, doesDirectoryExist ) +import qualified Data.Map as Map + +import qualified Text.PrettyPrint as Disp +import Text.PrettyPrint ((<>), (<+>)) + +import qualified System.Directory (getDirectoryContents) +import System.IO (openBinaryFile, IOMode(ReadMode), hGetContents) +import System.FilePath + ( (), takeExtension, isRelative, isAbsolute + , splitDirectories, splitPath, splitExtension ) +import System.FilePath.Windows as FilePath.Windows + ( isValid ) + +-- | Results of some kind of failed package check. +-- +-- There are a range of severities, from merely dubious to totally insane. +-- All of them come with a human readable explanation. In future we may augment +-- them with more machine readable explanations, for example to help an IDE +-- suggest automatic corrections. +-- +data PackageCheck = + + -- | This package description is no good. There's no way it's going to + -- build sensibly. This should give an error at configure time. + PackageBuildImpossible { explanation :: String } + + -- | A problem that is likely to affect building the package, or an + -- issue that we'd like every package author to be aware of, even if + -- the package is never distributed. + | PackageBuildWarning { explanation :: String } + + -- | An issue that might not be a problem for the package author but + -- might be annoying or detrimental when the package is distributed to + -- users. We should encourage distributed packages to be free from these + -- issues, but occasionally there are justifiable reasons so we cannot + -- ban them entirely. + | PackageDistSuspicious { explanation :: String } + + -- | Like PackageDistSuspicious but will only display warnings + -- rather than causing abnormal exit when you run 'cabal check'. + | PackageDistSuspiciousWarn { explanation :: String } + + -- | An issue that is OK in the author's environment but is almost + -- certain to be a portability problem for other environments. We can + -- quite legitimately refuse to publicly distribute packages with these + -- problems. + | PackageDistInexcusable { explanation :: String } + deriving (Eq) + +instance Show PackageCheck where + show notice = explanation notice + +check :: Bool -> PackageCheck -> Maybe PackageCheck +check False _ = Nothing +check True pc = Just pc + +checkSpecVersion :: PackageDescription -> [Int] -> Bool -> PackageCheck + -> Maybe PackageCheck +checkSpecVersion pkg specver cond pc + | specVersion pkg >= Version specver [] = Nothing + | otherwise = check cond pc + +-- ------------------------------------------------------------ +-- * Standard checks +-- ------------------------------------------------------------ + +-- | Check for common mistakes and problems in package descriptions. +-- +-- This is the standard collection of checks covering all aspects except +-- for checks that require looking at files within the package. For those +-- see 'checkPackageFiles'. +-- +-- It requires the 'GenericPackageDescription' and optionally a particular +-- configuration of that package. If you pass 'Nothing' then we just check +-- a version of the generic description using 'flattenPackageDescription'. +-- +checkPackage :: GenericPackageDescription + -> Maybe PackageDescription + -> [PackageCheck] +checkPackage gpkg mpkg = + checkConfiguredPackage pkg + ++ checkConditionals gpkg + ++ checkPackageVersions gpkg + ++ checkDevelopmentOnlyFlags gpkg + where + pkg = fromMaybe (flattenPackageDescription gpkg) mpkg + +--TODO: make this variant go away +-- we should always know the GenericPackageDescription +checkConfiguredPackage :: PackageDescription -> [PackageCheck] +checkConfiguredPackage pkg = + checkSanity pkg + ++ checkFields pkg + ++ checkLicense pkg + ++ checkSourceRepos pkg + ++ checkGhcOptions pkg + ++ checkCCOptions pkg + ++ checkCPPOptions pkg + ++ checkPaths pkg + ++ checkCabalVersion pkg + + +-- ------------------------------------------------------------ +-- * Basic sanity checks +-- ------------------------------------------------------------ + +-- | Check that this package description is sane. +-- +checkSanity :: PackageDescription -> [PackageCheck] +checkSanity pkg = + catMaybes [ + + check (null . (\(PackageName n) -> n) . packageName $ pkg) $ + PackageBuildImpossible "No 'name' field." + + , check (null . versionBranch . packageVersion $ pkg) $ + PackageBuildImpossible "No 'version' field." + + , check (all ($ pkg) [ null . executables + , null . testSuites + , null . benchmarks + , isNothing . library ]) $ + PackageBuildImpossible + "No executables, libraries, tests, or benchmarks found. Nothing to do." + + , check (not (null duplicateNames)) $ + PackageBuildImpossible $ "Duplicate sections: " ++ commaSep duplicateNames + ++ ". The name of every executable, test suite, and benchmark section in" + ++ " the package must be unique." + ] + --TODO: check for name clashes case insensitively: windows file systems cannot + --cope. + + ++ maybe [] (checkLibrary pkg) (library pkg) + ++ concatMap (checkExecutable pkg) (executables pkg) + ++ concatMap (checkTestSuite pkg) (testSuites pkg) + ++ concatMap (checkBenchmark pkg) (benchmarks pkg) + + ++ catMaybes [ + + check (specVersion pkg > cabalVersion) $ + PackageBuildImpossible $ + "This package description follows version " + ++ display (specVersion pkg) ++ " of the Cabal specification. This " + ++ "tool only supports up to version " ++ display cabalVersion ++ "." + ] + where + exeNames = map exeName $ executables pkg + testNames = map testName $ testSuites pkg + bmNames = map benchmarkName $ benchmarks pkg + duplicateNames = dups $ exeNames ++ testNames ++ bmNames + +checkLibrary :: PackageDescription -> Library -> [PackageCheck] +checkLibrary pkg lib = + catMaybes [ + + check (not (null moduleDuplicates)) $ + PackageBuildImpossible $ + "Duplicate modules in library: " + ++ commaSep (map display moduleDuplicates) + + -- check use of required-signatures/exposed-signatures sections + , checkVersion [1,21] (not (null (requiredSignatures lib))) $ + PackageDistInexcusable $ + "To use the 'required-signatures' field the package needs to specify " + ++ "at least 'cabal-version: >= 1.21'." + + , checkVersion [1,21] (not (null (exposedSignatures lib))) $ + PackageDistInexcusable $ + "To use the 'exposed-signatures' field the package needs to specify " + ++ "at least 'cabal-version: >= 1.21'." + ] + + where + checkVersion :: [Int] -> Bool -> PackageCheck -> Maybe PackageCheck + checkVersion ver cond pc + | specVersion pkg >= Version ver [] = Nothing + | otherwise = check cond pc + + moduleDuplicates = dups (libModules lib ++ + map moduleReexportName (reexportedModules lib)) + +checkExecutable :: PackageDescription -> Executable -> [PackageCheck] +checkExecutable pkg exe = + catMaybes [ + + check (null (modulePath exe)) $ + PackageBuildImpossible $ + "No 'main-is' field found for executable " ++ exeName exe + + , check (not (null (modulePath exe)) + && (not $ fileExtensionSupportedLanguage $ modulePath exe)) $ + PackageBuildImpossible $ + "The 'main-is' field must specify a '.hs' or '.lhs' file " + ++ "(even if it is generated by a preprocessor), " + ++ "or it may specify a C/C++/obj-C source file." + + , checkSpecVersion pkg [1,17] + (fileExtensionSupportedLanguage (modulePath exe) + && takeExtension (modulePath exe) `notElem` [".hs", ".lhs"]) $ + PackageDistInexcusable $ + "The package uses a C/C++/obj-C source file for the 'main-is' field. " + ++ "To use this feature you must specify 'cabal-version: >= 1.18'." + + , check (not (null moduleDuplicates)) $ + PackageBuildImpossible $ + "Duplicate modules in executable '" ++ exeName exe ++ "': " + ++ commaSep (map display moduleDuplicates) + ] + where + moduleDuplicates = dups (exeModules exe) + +checkTestSuite :: PackageDescription -> TestSuite -> [PackageCheck] +checkTestSuite pkg test = + catMaybes [ + + case testInterface test of + TestSuiteUnsupported tt@(TestTypeUnknown _ _) -> Just $ + PackageBuildWarning $ + quote (display tt) ++ " is not a known type of test suite. " + ++ "The known test suite types are: " + ++ commaSep (map display knownTestTypes) + + TestSuiteUnsupported tt -> Just $ + PackageBuildWarning $ + quote (display tt) ++ " is not a supported test suite version. " + ++ "The known test suite types are: " + ++ commaSep (map display knownTestTypes) + _ -> Nothing + + , check (not $ null moduleDuplicates) $ + PackageBuildImpossible $ + "Duplicate modules in test suite '" ++ testName test ++ "': " + ++ commaSep (map display moduleDuplicates) + + , check mainIsWrongExt $ + PackageBuildImpossible $ + "The 'main-is' field must specify a '.hs' or '.lhs' file " + ++ "(even if it is generated by a preprocessor), " + ++ "or it may specify a C/C++/obj-C source file." + + , checkSpecVersion pkg [1,17] (mainIsNotHsExt && not mainIsWrongExt) $ + PackageDistInexcusable $ + "The package uses a C/C++/obj-C source file for the 'main-is' field. " + ++ "To use this feature you must specify 'cabal-version: >= 1.18'." + ] + where + moduleDuplicates = dups $ testModules test + + mainIsWrongExt = case testInterface test of + TestSuiteExeV10 _ f -> not $ fileExtensionSupportedLanguage f + _ -> False + + mainIsNotHsExt = case testInterface test of + TestSuiteExeV10 _ f -> takeExtension f `notElem` [".hs", ".lhs"] + _ -> False + +checkBenchmark :: PackageDescription -> Benchmark -> [PackageCheck] +checkBenchmark _pkg bm = + catMaybes [ + + case benchmarkInterface bm of + BenchmarkUnsupported tt@(BenchmarkTypeUnknown _ _) -> Just $ + PackageBuildWarning $ + quote (display tt) ++ " is not a known type of benchmark. " + ++ "The known benchmark types are: " + ++ commaSep (map display knownBenchmarkTypes) + + BenchmarkUnsupported tt -> Just $ + PackageBuildWarning $ + quote (display tt) ++ " is not a supported benchmark version. " + ++ "The known benchmark types are: " + ++ commaSep (map display knownBenchmarkTypes) + _ -> Nothing + + , check (not $ null moduleDuplicates) $ + PackageBuildImpossible $ + "Duplicate modules in benchmark '" ++ benchmarkName bm ++ "': " + ++ commaSep (map display moduleDuplicates) + + , check mainIsWrongExt $ + PackageBuildImpossible $ + "The 'main-is' field must specify a '.hs' or '.lhs' file " + ++ "(even if it is generated by a preprocessor)." + ] + where + moduleDuplicates = dups $ benchmarkModules bm + + mainIsWrongExt = case benchmarkInterface bm of + BenchmarkExeV10 _ f -> takeExtension f `notElem` [".hs", ".lhs"] + _ -> False + +-- ------------------------------------------------------------ +-- * Additional pure checks +-- ------------------------------------------------------------ + +checkFields :: PackageDescription -> [PackageCheck] +checkFields pkg = + catMaybes [ + + check (not . FilePath.Windows.isValid . display . packageName $ pkg) $ + PackageDistInexcusable $ + "Unfortunately, the package name '" ++ display (packageName pkg) + ++ "' is one of the reserved system file names on Windows. Many tools " + ++ "need to convert package names to file names so using this name " + ++ "would cause problems." + + , check (isNothing (buildType pkg)) $ + PackageBuildWarning $ + "No 'build-type' specified. If you do not need a custom Setup.hs or " + ++ "./configure script then use 'build-type: Simple'." + + , case buildType pkg of + Just (UnknownBuildType unknown) -> Just $ + PackageBuildWarning $ + quote unknown ++ " is not a known 'build-type'. " + ++ "The known build types are: " + ++ commaSep (map display knownBuildTypes) + _ -> Nothing + + , check (isJust (setupBuildInfo pkg) && buildType pkg /= Just Custom) $ + PackageBuildWarning $ + "Ignoring the 'custom-setup' section because the 'build-type' is " + ++ "not 'Custom'. Use 'build-type: Custom' if you need to use a " + ++ "custom Setup.hs script." + + , check (not (null unknownCompilers)) $ + PackageBuildWarning $ + "Unknown compiler " ++ commaSep (map quote unknownCompilers) + ++ " in 'tested-with' field." + + , check (not (null unknownLanguages)) $ + PackageBuildWarning $ + "Unknown languages: " ++ commaSep unknownLanguages + + , check (not (null unknownExtensions)) $ + PackageBuildWarning $ + "Unknown extensions: " ++ commaSep unknownExtensions + + , check (not (null languagesUsedAsExtensions)) $ + PackageBuildWarning $ + "Languages listed as extensions: " + ++ commaSep languagesUsedAsExtensions + ++ ". Languages must be specified in either the 'default-language' " + ++ " or the 'other-languages' field." + + , check (not (null ourDeprecatedExtensions)) $ + PackageDistSuspicious $ + "Deprecated extensions: " + ++ commaSep (map (quote . display . fst) ourDeprecatedExtensions) + ++ ". " ++ unwords + [ "Instead of '" ++ display ext + ++ "' use '" ++ display replacement ++ "'." + | (ext, Just replacement) <- ourDeprecatedExtensions ] + + , check (null (category pkg)) $ + PackageDistSuspicious "No 'category' field." + + , check (null (maintainer pkg)) $ + PackageDistSuspicious "No 'maintainer' field." + + , check (null (synopsis pkg) && null (description pkg)) $ + PackageDistInexcusable "No 'synopsis' or 'description' field." + + , check (null (description pkg) && not (null (synopsis pkg))) $ + PackageDistSuspicious "No 'description' field." + + , check (null (synopsis pkg) && not (null (description pkg))) $ + PackageDistSuspicious "No 'synopsis' field." + + --TODO: recommend the bug reports URL, author and homepage fields + --TODO: recommend not using the stability field + --TODO: recommend specifying a source repo + + , check (length (synopsis pkg) >= 80) $ + PackageDistSuspicious + "The 'synopsis' field is rather long (max 80 chars is recommended)." + + -- check use of impossible constraints "tested-with: GHC== 6.10 && ==6.12" + , check (not (null testedWithImpossibleRanges)) $ + PackageDistInexcusable $ + "Invalid 'tested-with' version range: " + ++ commaSep (map display testedWithImpossibleRanges) + ++ ". To indicate that you have tested a package with multiple " + ++ "different versions of the same compiler use multiple entries, " + ++ "for example 'tested-with: GHC==6.10.4, GHC==6.12.3' and not " + ++ "'tested-with: GHC==6.10.4 && ==6.12.3'." + ] + where + unknownCompilers = [ name | (OtherCompiler name, _) <- testedWith pkg ] + unknownLanguages = [ name | bi <- allBuildInfo pkg + , UnknownLanguage name <- allLanguages bi ] + unknownExtensions = [ name | bi <- allBuildInfo pkg + , UnknownExtension name <- allExtensions bi + , name `notElem` map display knownLanguages ] + ourDeprecatedExtensions = nub $ catMaybes + [ find ((==ext) . fst) deprecatedExtensions + | bi <- allBuildInfo pkg + , ext <- allExtensions bi ] + languagesUsedAsExtensions = + [ name | bi <- allBuildInfo pkg + , UnknownExtension name <- allExtensions bi + , name `elem` map display knownLanguages ] + + testedWithImpossibleRanges = + [ Dependency (PackageName (display compiler)) vr + | (compiler, vr) <- testedWith pkg + , isNoVersion vr ] + + +checkLicense :: PackageDescription -> [PackageCheck] +checkLicense pkg = + catMaybes [ + + check (license pkg == UnspecifiedLicense) $ + PackageDistInexcusable + "The 'license' field is missing." + + , check (license pkg == AllRightsReserved) $ + PackageDistSuspicious + "The 'license' is AllRightsReserved. Is that really what you want?" + , case license pkg of + UnknownLicense l -> Just $ + PackageBuildWarning $ + quote ("license: " ++ l) ++ " is not a recognised license. The " + ++ "known licenses are: " + ++ commaSep (map display knownLicenses) + _ -> Nothing + + , check (license pkg == BSD4) $ + PackageDistSuspicious $ + "Using 'license: BSD4' is almost always a misunderstanding. 'BSD4' " + ++ "refers to the old 4-clause BSD license with the advertising " + ++ "clause. 'BSD3' refers the new 3-clause BSD license." + + , case unknownLicenseVersion (license pkg) of + Just knownVersions -> Just $ + PackageDistSuspicious $ + "'license: " ++ display (license pkg) ++ "' is not a known " + ++ "version of that license. The known versions are " + ++ commaSep (map display knownVersions) + ++ ". If this is not a mistake and you think it should be a known " + ++ "version then please file a ticket." + _ -> Nothing + + , check (license pkg `notElem` [ AllRightsReserved + , UnspecifiedLicense, PublicDomain] + -- AllRightsReserved and PublicDomain are not strictly + -- licenses so don't need license files. + && null (licenseFiles pkg)) $ + PackageDistSuspicious "A 'license-file' is not specified." + ] + where + unknownLicenseVersion (GPL (Just v)) + | v `notElem` knownVersions = Just knownVersions + where knownVersions = [ v' | GPL (Just v') <- knownLicenses ] + unknownLicenseVersion (LGPL (Just v)) + | v `notElem` knownVersions = Just knownVersions + where knownVersions = [ v' | LGPL (Just v') <- knownLicenses ] + unknownLicenseVersion (AGPL (Just v)) + | v `notElem` knownVersions = Just knownVersions + where knownVersions = [ v' | AGPL (Just v') <- knownLicenses ] + unknownLicenseVersion (Apache (Just v)) + | v `notElem` knownVersions = Just knownVersions + where knownVersions = [ v' | Apache (Just v') <- knownLicenses ] + unknownLicenseVersion _ = Nothing + +checkSourceRepos :: PackageDescription -> [PackageCheck] +checkSourceRepos pkg = + catMaybes $ concat [[ + + case repoKind repo of + RepoKindUnknown kind -> Just $ PackageDistInexcusable $ + quote kind ++ " is not a recognised kind of source-repository. " + ++ "The repo kind is usually 'head' or 'this'" + _ -> Nothing + + , check (isNothing (repoType repo)) $ + PackageDistInexcusable + "The source-repository 'type' is a required field." + + , check (isNothing (repoLocation repo)) $ + PackageDistInexcusable + "The source-repository 'location' is a required field." + + , check (repoType repo == Just CVS && isNothing (repoModule repo)) $ + PackageDistInexcusable + "For a CVS source-repository, the 'module' is a required field." + + , check (repoKind repo == RepoThis && isNothing (repoTag repo)) $ + PackageDistInexcusable $ + "For the 'this' kind of source-repository, the 'tag' is a required " + ++ "field. It should specify the tag corresponding to this version " + ++ "or release of the package." + + , check (maybe False System.FilePath.isAbsolute (repoSubdir repo)) $ + PackageDistInexcusable + "The 'subdir' field of a source-repository must be a relative path." + ] + | repo <- sourceRepos pkg ] + +--TODO: check location looks like a URL for some repo types. + +checkGhcOptions :: PackageDescription -> [PackageCheck] +checkGhcOptions pkg = + catMaybes [ + + checkFlags ["-fasm"] $ + PackageDistInexcusable $ + "'ghc-options: -fasm' is unnecessary and will not work on CPU " + ++ "architectures other than x86, x86-64, ppc or sparc." + + , checkFlags ["-fvia-C"] $ + PackageDistSuspicious $ + "'ghc-options: -fvia-C' is usually unnecessary. If your package " + ++ "needs -via-C for correctness rather than performance then it " + ++ "is using the FFI incorrectly and will probably not work with GHC " + ++ "6.10 or later." + + , checkFlags ["-fhpc"] $ + PackageDistInexcusable $ + "'ghc-options: -fhpc' is not not necessary. Use the configure flag " + ++ " --enable-coverage instead." + + , checkFlags ["-prof"] $ + PackageBuildWarning $ + "'ghc-options: -prof' is not necessary and will lead to problems " + ++ "when used on a library. Use the configure flag " + ++ "--enable-library-profiling and/or --enable-profiling." + + , checkFlags ["-o"] $ + PackageBuildWarning $ + "'ghc-options: -o' is not needed. " + ++ "The output files are named automatically." + + , checkFlags ["-hide-package"] $ + PackageBuildWarning $ + "'ghc-options: -hide-package' is never needed. " + ++ "Cabal hides all packages." + + , checkFlags ["--make"] $ + PackageBuildWarning $ + "'ghc-options: --make' is never needed. Cabal uses this automatically." + + , checkFlags ["-main-is"] $ + PackageDistSuspicious $ + "'ghc-options: -main-is' is not portable." + + , checkFlags ["-O0", "-Onot"] $ + PackageDistSuspicious $ + "'ghc-options: -O0' is not needed. " + ++ "Use the --disable-optimization configure flag." + + , checkFlags [ "-O", "-O1"] $ + PackageDistInexcusable $ + "'ghc-options: -O' is not needed. " + ++ "Cabal automatically adds the '-O' flag. " + ++ "Setting it yourself interferes with the --disable-optimization flag." + + , checkFlags ["-O2"] $ + PackageDistSuspiciousWarn $ + "'ghc-options: -O2' is rarely needed. " + ++ "Check that it is giving a real benefit " + ++ "and not just imposing longer compile times on your users." + + , checkFlags ["-split-objs"] $ + PackageBuildWarning $ + "'ghc-options: -split-objs' is not needed. " + ++ "Use the --enable-split-objs configure flag." + + , checkFlags ["-optl-Wl,-s", "-optl-s"] $ + PackageDistInexcusable $ + "'ghc-options: -optl-Wl,-s' is not needed and is not portable to all" + ++ " operating systems. Cabal 1.4 and later automatically strip" + ++ " executables. Cabal also has a flag --disable-executable-stripping" + ++ " which is necessary when building packages for some Linux" + ++ " distributions and using '-optl-Wl,-s' prevents that from working." + + , checkFlags ["-fglasgow-exts"] $ + PackageDistSuspicious $ + "Instead of 'ghc-options: -fglasgow-exts' it is preferable to use " + ++ "the 'extensions' field." + + , check ("-threaded" `elem` lib_ghc_options) $ + PackageBuildWarning $ + "'ghc-options: -threaded' has no effect for libraries. It should " + ++ "only be used for executables." + + , check ("-rtsopts" `elem` lib_ghc_options) $ + PackageBuildWarning $ + "'ghc-options: -rtsopts' has no effect for libraries. It should " + ++ "only be used for executables." + + , check (any (\opt -> "-with-rtsopts" `isPrefixOf` opt) lib_ghc_options) $ + PackageBuildWarning $ + "'ghc-options: -with-rtsopts' has no effect for libraries. It " + ++ "should only be used for executables." + + , checkAlternatives "ghc-options" "extensions" + [ (flag, display extension) | flag <- all_ghc_options + , Just extension <- [ghcExtension flag] ] + + , checkAlternatives "ghc-options" "extensions" + [ (flag, extension) | flag@('-':'X':extension) <- all_ghc_options ] + + , checkAlternatives "ghc-options" "cpp-options" $ + [ (flag, flag) | flag@('-':'D':_) <- all_ghc_options ] + ++ [ (flag, flag) | flag@('-':'U':_) <- all_ghc_options ] + + , checkAlternatives "ghc-options" "include-dirs" + [ (flag, dir) | flag@('-':'I':dir) <- all_ghc_options ] + + , checkAlternatives "ghc-options" "extra-libraries" + [ (flag, lib) | flag@('-':'l':lib) <- all_ghc_options ] + + , checkAlternatives "ghc-options" "extra-lib-dirs" + [ (flag, dir) | flag@('-':'L':dir) <- all_ghc_options ] + + , checkAlternatives "ghc-options" "frameworks" + [ (flag, fmwk) | (flag@"-framework", fmwk) <- + zip all_ghc_options (safeTail all_ghc_options) ] + + , checkAlternatives "ghc-options" "extra-framework-dirs" + [ (flag, dir) | (flag@"-framework-path", dir) <- + zip all_ghc_options (safeTail all_ghc_options) ] + ] + + where + all_ghc_options = concatMap get_ghc_options (allBuildInfo pkg) + lib_ghc_options = maybe [] (get_ghc_options . libBuildInfo) (library pkg) + get_ghc_options bi = hcOptions GHC bi ++ hcProfOptions GHC bi + ++ hcSharedOptions GHC bi + + checkFlags :: [String] -> PackageCheck -> Maybe PackageCheck + checkFlags flags = check (any (`elem` flags) all_ghc_options) + + ghcExtension ('-':'f':name) = case name of + "allow-overlapping-instances" -> enable OverlappingInstances + "no-allow-overlapping-instances" -> disable OverlappingInstances + "th" -> enable TemplateHaskell + "no-th" -> disable TemplateHaskell + "ffi" -> enable ForeignFunctionInterface + "no-ffi" -> disable ForeignFunctionInterface + "fi" -> enable ForeignFunctionInterface + "no-fi" -> disable ForeignFunctionInterface + "monomorphism-restriction" -> enable MonomorphismRestriction + "no-monomorphism-restriction" -> disable MonomorphismRestriction + "mono-pat-binds" -> enable MonoPatBinds + "no-mono-pat-binds" -> disable MonoPatBinds + "allow-undecidable-instances" -> enable UndecidableInstances + "no-allow-undecidable-instances" -> disable UndecidableInstances + "allow-incoherent-instances" -> enable IncoherentInstances + "no-allow-incoherent-instances" -> disable IncoherentInstances + "arrows" -> enable Arrows + "no-arrows" -> disable Arrows + "generics" -> enable Generics + "no-generics" -> disable Generics + "implicit-prelude" -> enable ImplicitPrelude + "no-implicit-prelude" -> disable ImplicitPrelude + "implicit-params" -> enable ImplicitParams + "no-implicit-params" -> disable ImplicitParams + "bang-patterns" -> enable BangPatterns + "no-bang-patterns" -> disable BangPatterns + "scoped-type-variables" -> enable ScopedTypeVariables + "no-scoped-type-variables" -> disable ScopedTypeVariables + "extended-default-rules" -> enable ExtendedDefaultRules + "no-extended-default-rules" -> disable ExtendedDefaultRules + _ -> Nothing + ghcExtension "-cpp" = enable CPP + ghcExtension _ = Nothing + + enable e = Just (EnableExtension e) + disable e = Just (DisableExtension e) + +checkCCOptions :: PackageDescription -> [PackageCheck] +checkCCOptions pkg = + catMaybes [ + + checkAlternatives "cc-options" "include-dirs" + [ (flag, dir) | flag@('-':'I':dir) <- all_ccOptions ] + + , checkAlternatives "cc-options" "extra-libraries" + [ (flag, lib) | flag@('-':'l':lib) <- all_ccOptions ] + + , checkAlternatives "cc-options" "extra-lib-dirs" + [ (flag, dir) | flag@('-':'L':dir) <- all_ccOptions ] + + , checkAlternatives "ld-options" "extra-libraries" + [ (flag, lib) | flag@('-':'l':lib) <- all_ldOptions ] + + , checkAlternatives "ld-options" "extra-lib-dirs" + [ (flag, dir) | flag@('-':'L':dir) <- all_ldOptions ] + + , checkCCFlags [ "-O", "-Os", "-O0", "-O1", "-O2", "-O3" ] $ + PackageDistSuspicious $ + "'cc-options: -O[n]' is generally not needed. When building with " + ++ " optimisations Cabal automatically adds '-O2' for C code. " + ++ "Setting it yourself interferes with the --disable-optimization " + ++ "flag." + ] + + where all_ccOptions = [ opts | bi <- allBuildInfo pkg + , opts <- ccOptions bi ] + all_ldOptions = [ opts | bi <- allBuildInfo pkg + , opts <- ldOptions bi ] + + checkCCFlags :: [String] -> PackageCheck -> Maybe PackageCheck + checkCCFlags flags = check (any (`elem` flags) all_ccOptions) + +checkCPPOptions :: PackageDescription -> [PackageCheck] +checkCPPOptions pkg = + catMaybes [ + checkAlternatives "cpp-options" "include-dirs" + [ (flag, dir) | flag@('-':'I':dir) <- all_cppOptions] + ] + where all_cppOptions = [ opts | bi <- allBuildInfo pkg + , opts <- cppOptions bi ] + +checkAlternatives :: String -> String -> [(String, String)] -> Maybe PackageCheck +checkAlternatives badField goodField flags = + check (not (null badFlags)) $ + PackageBuildWarning $ + "Instead of " ++ quote (badField ++ ": " ++ unwords badFlags) + ++ " use " ++ quote (goodField ++ ": " ++ unwords goodFlags) + + where (badFlags, goodFlags) = unzip flags + +checkPaths :: PackageDescription -> [PackageCheck] +checkPaths pkg = + [ PackageBuildWarning $ + quote (kind ++ ": " ++ path) + ++ " is a relative path outside of the source tree. " + ++ "This will not work when generating a tarball with 'sdist'." + | (path, kind) <- relPaths ++ absPaths + , isOutsideTree path ] + ++ + [ PackageDistInexcusable $ + quote (kind ++ ": " ++ path) ++ " is an absolute path." + | (path, kind) <- relPaths + , isAbsolute path ] + ++ + [ PackageDistInexcusable $ + quote (kind ++ ": " ++ path) ++ " points inside the 'dist' " + ++ "directory. This is not reliable because the location of this " + ++ "directory is configurable by the user (or package manager). In " + ++ "addition the layout of the 'dist' directory is subject to change " + ++ "in future versions of Cabal." + | (path, kind) <- relPaths ++ absPaths + , isInsideDist path ] + ++ + [ PackageDistInexcusable $ + "The 'ghc-options' contains the path '" ++ path ++ "' which points " + ++ "inside the 'dist' directory. This is not reliable because the " + ++ "location of this directory is configurable by the user (or package " + ++ "manager). In addition the layout of the 'dist' directory is subject " + ++ "to change in future versions of Cabal." + | bi <- allBuildInfo pkg + , (GHC, flags) <- options bi + , path <- flags + , isInsideDist path ] + where + isOutsideTree path = case splitDirectories path of + "..":_ -> True + ".":"..":_ -> True + _ -> False + isInsideDist path = case map lowercase (splitDirectories path) of + "dist" :_ -> True + ".":"dist":_ -> True + _ -> False + -- paths that must be relative + relPaths = + [ (path, "extra-src-files") | path <- extraSrcFiles pkg ] + ++ [ (path, "extra-tmp-files") | path <- extraTmpFiles pkg ] + ++ [ (path, "extra-doc-files") | path <- extraDocFiles pkg ] + ++ [ (path, "data-files") | path <- dataFiles pkg ] + ++ [ (path, "data-dir") | path <- [dataDir pkg]] + ++ [ (path, "license-file") | path <- licenseFiles pkg ] + ++ concat + [ [ (path, "c-sources") | path <- cSources bi ] + ++ [ (path, "js-sources") | path <- jsSources bi ] + ++ [ (path, "install-includes") | path <- installIncludes bi ] + ++ [ (path, "hs-source-dirs") | path <- hsSourceDirs bi ] + | bi <- allBuildInfo pkg ] + -- paths that are allowed to be absolute + absPaths = concat + [ [ (path, "includes") | path <- includes bi ] + ++ [ (path, "include-dirs") | path <- includeDirs bi ] + ++ [ (path, "extra-lib-dirs") | path <- extraLibDirs bi ] + | bi <- allBuildInfo pkg ] + +--TODO: check sets of paths that would be interpreted differently between Unix +-- and windows, ie case-sensitive or insensitive. Things that might clash, or +-- conversely be distinguished. + +--TODO: use the tar path checks on all the above paths + +-- | Check that the package declares the version in the @\"cabal-version\"@ +-- field correctly. +-- +checkCabalVersion :: PackageDescription -> [PackageCheck] +checkCabalVersion pkg = + catMaybes [ + + -- check syntax of cabal-version field + check (specVersion pkg >= Version [1,10] [] + && not simpleSpecVersionRangeSyntax) $ + PackageBuildWarning $ + "Packages relying on Cabal 1.10 or later must only specify a " + ++ "version range of the form 'cabal-version: >= x.y'. Use " + ++ "'cabal-version: >= " ++ display (specVersion pkg) ++ "'." + + -- check syntax of cabal-version field + , check (specVersion pkg < Version [1,9] [] + && not simpleSpecVersionRangeSyntax) $ + PackageDistSuspicious $ + "It is recommended that the 'cabal-version' field only specify a " + ++ "version range of the form '>= x.y'. Use " + ++ "'cabal-version: >= " ++ display (specVersion pkg) ++ "'. " + ++ "Tools based on Cabal 1.10 and later will ignore upper bounds." + + -- check syntax of cabal-version field + , checkVersion [1,12] simpleSpecVersionSyntax $ + PackageBuildWarning $ + "With Cabal 1.10 or earlier, the 'cabal-version' field must use " + ++ "range syntax rather than a simple version number. Use " + ++ "'cabal-version: >= " ++ display (specVersion pkg) ++ "'." + + -- check use of test suite sections + , checkVersion [1,8] (not (null $ testSuites pkg)) $ + PackageDistInexcusable $ + "The 'test-suite' section is new in Cabal 1.10. " + ++ "Unfortunately it messes up the parser in older Cabal versions " + ++ "so you must specify at least 'cabal-version: >= 1.8', but note " + ++ "that only Cabal 1.10 and later can actually run such test suites." + + -- check use of default-language field + -- note that we do not need to do an equivalent check for the + -- other-language field since that one does not change behaviour + , checkVersion [1,10] (any isJust (buildInfoField defaultLanguage)) $ + PackageBuildWarning $ + "To use the 'default-language' field the package needs to specify " + ++ "at least 'cabal-version: >= 1.10'." + + , check (specVersion pkg >= Version [1,10] [] + && (any isNothing (buildInfoField defaultLanguage))) $ + PackageBuildWarning $ + "Packages using 'cabal-version: >= 1.10' must specify the " + ++ "'default-language' field for each component (e.g. Haskell98 or " + ++ "Haskell2010). If a component uses different languages in " + ++ "different modules then list the other ones in the " + ++ "'other-languages' field." + + -- check use of reexported-modules sections + , checkVersion [1,21] + (maybe False (not.null.reexportedModules) (library pkg)) $ + PackageDistInexcusable $ + "To use the 'reexported-module' field the package needs to specify " + ++ "at least 'cabal-version: >= 1.21'." + + -- check use of thinning and renaming + , checkVersion [1,21] (not (null depsUsingThinningRenamingSyntax)) $ + PackageDistInexcusable $ + "The package uses " + ++ "thinning and renaming in the 'build-depends' field: " + ++ commaSep (map display depsUsingThinningRenamingSyntax) + ++ ". To use this new syntax, the package needs to specify at least" + ++ "'cabal-version: >= 1.21'." + + -- check use of 'extra-framework-dirs' field + , checkVersion [1,23] (any (not . null) (buildInfoField extraFrameworkDirs)) $ + -- Just a warning, because this won't break on old Cabal versions. + PackageDistSuspiciousWarn $ + "To use the 'extra-framework-dirs' field the package needs to specify" + ++ " at least 'cabal-version: >= 1.23'." + + -- check use of default-extensions field + -- don't need to do the equivalent check for other-extensions + , checkVersion [1,10] (any (not . null) (buildInfoField defaultExtensions)) $ + PackageBuildWarning $ + "To use the 'default-extensions' field the package needs to specify " + ++ "at least 'cabal-version: >= 1.10'." + + -- check use of extensions field + , check (specVersion pkg >= Version [1,10] [] + && (any (not . null) (buildInfoField oldExtensions))) $ + PackageBuildWarning $ + "For packages using 'cabal-version: >= 1.10' the 'extensions' " + ++ "field is deprecated. The new 'default-extensions' field lists " + ++ "extensions that are used in all modules in the component, while " + ++ "the 'other-extensions' field lists extensions that are used in " + ++ "some modules, e.g. via the {-# LANGUAGE #-} pragma." + + -- check use of "foo (>= 1.0 && < 1.4) || >=1.8 " version-range syntax + , checkVersion [1,8] (not (null versionRangeExpressions)) $ + PackageDistInexcusable $ + "The package uses full version-range expressions " + ++ "in a 'build-depends' field: " + ++ commaSep (map displayRawDependency versionRangeExpressions) + ++ ". To use this new syntax the package needs to specify at least " + ++ "'cabal-version: >= 1.8'. Alternatively, if broader compatibility " + ++ "is important, then convert to conjunctive normal form, and use " + ++ "multiple 'build-depends:' lines, one conjunct per line." + + -- check use of "build-depends: foo == 1.*" syntax + , checkVersion [1,6] (not (null depsUsingWildcardSyntax)) $ + PackageDistInexcusable $ + "The package uses wildcard syntax in the 'build-depends' field: " + ++ commaSep (map display depsUsingWildcardSyntax) + ++ ". To use this new syntax the package need to specify at least " + ++ "'cabal-version: >= 1.6'. Alternatively, if broader compatibility " + ++ "is important then use: " ++ commaSep + [ display (Dependency name (eliminateWildcardSyntax versionRange)) + | Dependency name versionRange <- depsUsingWildcardSyntax ] + + -- check use of "tested-with: GHC (>= 1.0 && < 1.4) || >=1.8 " syntax + , checkVersion [1,8] (not (null testedWithVersionRangeExpressions)) $ + PackageDistInexcusable $ + "The package uses full version-range expressions " + ++ "in a 'tested-with' field: " + ++ commaSep (map displayRawDependency testedWithVersionRangeExpressions) + ++ ". To use this new syntax the package needs to specify at least " + ++ "'cabal-version: >= 1.8'." + + -- check use of "tested-with: GHC == 6.12.*" syntax + , checkVersion [1,6] (not (null testedWithUsingWildcardSyntax)) $ + PackageDistInexcusable $ + "The package uses wildcard syntax in the 'tested-with' field: " + ++ commaSep (map display testedWithUsingWildcardSyntax) + ++ ". To use this new syntax the package need to specify at least " + ++ "'cabal-version: >= 1.6'. Alternatively, if broader compatibility " + ++ "is important then use: " ++ commaSep + [ display (Dependency name (eliminateWildcardSyntax versionRange)) + | Dependency name versionRange <- testedWithUsingWildcardSyntax ] + + -- check use of "data-files: data/*.txt" syntax + , checkVersion [1,6] (not (null dataFilesUsingGlobSyntax)) $ + PackageDistInexcusable $ + "Using wildcards like " + ++ commaSep (map quote $ take 3 dataFilesUsingGlobSyntax) + ++ " in the 'data-files' field requires 'cabal-version: >= 1.6'. " + ++ "Alternatively if you require compatibility with earlier Cabal " + ++ "versions then list all the files explicitly." + + -- check use of "extra-source-files: mk/*.in" syntax + , checkVersion [1,6] (not (null extraSrcFilesUsingGlobSyntax)) $ + PackageDistInexcusable $ + "Using wildcards like " + ++ commaSep (map quote $ take 3 extraSrcFilesUsingGlobSyntax) + ++ " in the 'extra-source-files' field requires " + ++ "'cabal-version: >= 1.6'. Alternatively if you require " + ++ "compatibility with earlier Cabal versions then list all the files " + ++ "explicitly." + + -- check use of "source-repository" section + , checkVersion [1,6] (not (null (sourceRepos pkg))) $ + PackageDistInexcusable $ + "The 'source-repository' section is new in Cabal 1.6. " + ++ "Unfortunately it messes up the parser in earlier Cabal versions " + ++ "so you need to specify 'cabal-version: >= 1.6'." + + -- check for new licenses + , checkVersion [1,4] (license pkg `notElem` compatLicenses) $ + PackageDistInexcusable $ + "Unfortunately the license " ++ quote (display (license pkg)) + ++ " messes up the parser in earlier Cabal versions so you need to " + ++ "specify 'cabal-version: >= 1.4'. Alternatively if you require " + ++ "compatibility with earlier Cabal versions then use 'OtherLicense'." + + -- check for new language extensions + , checkVersion [1,2,3] (not (null mentionedExtensionsThatNeedCabal12)) $ + PackageDistInexcusable $ + "Unfortunately the language extensions " + ++ commaSep (map (quote . display) mentionedExtensionsThatNeedCabal12) + ++ " break the parser in earlier Cabal versions so you need to " + ++ "specify 'cabal-version: >= 1.2.3'. Alternatively if you require " + ++ "compatibility with earlier Cabal versions then you may be able to " + ++ "use an equivalent compiler-specific flag." + + , checkVersion [1,4] (not (null mentionedExtensionsThatNeedCabal14)) $ + PackageDistInexcusable $ + "Unfortunately the language extensions " + ++ commaSep (map (quote . display) mentionedExtensionsThatNeedCabal14) + ++ " break the parser in earlier Cabal versions so you need to " + ++ "specify 'cabal-version: >= 1.4'. Alternatively if you require " + ++ "compatibility with earlier Cabal versions then you may be able to " + ++ "use an equivalent compiler-specific flag." + + , check (specVersion pkg >= Version [1,23] [] + && isNothing (setupBuildInfo pkg) + && buildType pkg == Just Custom) $ + PackageBuildWarning $ + "Packages using 'cabal-version: >= 1.23' with 'build-type: Custom' " + ++ "must use a 'custom-setup' section with a 'setup-depends' field " + ++ "that specifies the dependencies of the Setup.hs script itself. " + ++ "The 'setup-depends' field uses the same syntax as 'build-depends', " + ++ "so a simple example would be 'setup-depends: base, Cabal'." + + , check (specVersion pkg < Version [1,23] [] + && isNothing (setupBuildInfo pkg) + && buildType pkg == Just Custom) $ + PackageDistSuspiciousWarn $ + "From version 1.23 cabal supports specifiying explicit dependencies " + ++ "for Custom setup scripts. Consider using cabal-version >= 1.23 and " + ++ "adding a 'custom-setup' section with a 'setup-depends' field " + ++ "that specifies the dependencies of the Setup.hs script itself. " + ++ "The 'setup-depends' field uses the same syntax as 'build-depends', " + ++ "so a simple example would be 'setup-depends: base, Cabal'." + ] + where + -- Perform a check on packages that use a version of the spec less than + -- the version given. This is for cases where a new Cabal version adds + -- a new feature and we want to check that it is not used prior to that + -- version. + checkVersion :: [Int] -> Bool -> PackageCheck -> Maybe PackageCheck + checkVersion ver cond pc + | specVersion pkg >= Version ver [] = Nothing + | otherwise = check cond pc + + buildInfoField field = map field (allBuildInfo pkg) + dataFilesUsingGlobSyntax = filter usesGlobSyntax (dataFiles pkg) + extraSrcFilesUsingGlobSyntax = filter usesGlobSyntax (extraSrcFiles pkg) + usesGlobSyntax str = case parseFileGlob str of + Just (FileGlob _ _) -> True + _ -> False + + versionRangeExpressions = + [ dep | dep@(Dependency _ vr) <- buildDepends pkg + , usesNewVersionRangeSyntax vr ] + + testedWithVersionRangeExpressions = + [ Dependency (PackageName (display compiler)) vr + | (compiler, vr) <- testedWith pkg + , usesNewVersionRangeSyntax vr ] + + simpleSpecVersionRangeSyntax = + either (const True) + (foldVersionRange' + True + (\_ -> False) + (\_ -> False) (\_ -> False) + (\_ -> True) -- >= + (\_ -> False) + (\_ _ -> False) + (\_ _ -> False) (\_ _ -> False) + id) + (specVersionRaw pkg) + + -- is the cabal-version field a simple version number, rather than a range + simpleSpecVersionSyntax = + either (const True) (const False) (specVersionRaw pkg) + + usesNewVersionRangeSyntax :: VersionRange -> Bool + usesNewVersionRangeSyntax = + (> 2) -- uses the new syntax if depth is more than 2 + . foldVersionRange' + (1 :: Int) + (const 1) + (const 1) (const 1) + (const 1) (const 1) + (const (const 1)) + (+) (+) + (const 3) -- uses new ()'s syntax + + depsUsingWildcardSyntax = [ dep | dep@(Dependency _ vr) <- buildDepends pkg + , usesWildcardSyntax vr ] + + -- TODO: If the user writes build-depends: foo with (), this is + -- indistinguishable from build-depends: foo, so there won't be an + -- error even though there should be + depsUsingThinningRenamingSyntax = + [ name + | bi <- allBuildInfo pkg + , (name, _) <- Map.toList (targetBuildRenaming bi) ] + + testedWithUsingWildcardSyntax = + [ Dependency (PackageName (display compiler)) vr + | (compiler, vr) <- testedWith pkg + , usesWildcardSyntax vr ] + + usesWildcardSyntax :: VersionRange -> Bool + usesWildcardSyntax = + foldVersionRange' + False (const False) + (const False) (const False) + (const False) (const False) + (\_ _ -> True) -- the wildcard case + (||) (||) id + + eliminateWildcardSyntax = + foldVersionRange' + anyVersion thisVersion + laterVersion earlierVersion + orLaterVersion orEarlierVersion + (\v v' -> intersectVersionRanges (orLaterVersion v) (earlierVersion v')) + intersectVersionRanges unionVersionRanges id + + compatLicenses = [ GPL Nothing, LGPL Nothing, AGPL Nothing, BSD3, BSD4 + , PublicDomain, AllRightsReserved + , UnspecifiedLicense, OtherLicense ] + + mentionedExtensions = [ ext | bi <- allBuildInfo pkg + , ext <- allExtensions bi ] + mentionedExtensionsThatNeedCabal12 = + nub (filter (`elem` compatExtensionsExtra) mentionedExtensions) + + -- As of Cabal-1.4 we can add new extensions without worrying about + -- breaking old versions of cabal. + mentionedExtensionsThatNeedCabal14 = + nub (filter (`notElem` compatExtensions) mentionedExtensions) + + -- The known extensions in Cabal-1.2.3 + compatExtensions = + map EnableExtension + [ OverlappingInstances, UndecidableInstances, IncoherentInstances + , RecursiveDo, ParallelListComp, MultiParamTypeClasses + , FunctionalDependencies, Rank2Types + , RankNTypes, PolymorphicComponents, ExistentialQuantification + , ScopedTypeVariables, ImplicitParams, FlexibleContexts + , FlexibleInstances, EmptyDataDecls, CPP, BangPatterns + , TypeSynonymInstances, TemplateHaskell, ForeignFunctionInterface + , Arrows, Generics, NamedFieldPuns, PatternGuards + , GeneralizedNewtypeDeriving, ExtensibleRecords, RestrictedTypeSynonyms + , HereDocuments] ++ + map DisableExtension + [MonomorphismRestriction, ImplicitPrelude] ++ + compatExtensionsExtra + + -- The extra known extensions in Cabal-1.2.3 vs Cabal-1.1.6 + -- (Cabal-1.1.6 came with ghc-6.6. Cabal-1.2 came with ghc-6.8) + compatExtensionsExtra = + map EnableExtension + [ KindSignatures, MagicHash, TypeFamilies, StandaloneDeriving + , UnicodeSyntax, PatternSignatures, UnliftedFFITypes, LiberalTypeSynonyms + , TypeOperators, RecordWildCards, RecordPuns, DisambiguateRecordFields + , OverloadedStrings, GADTs, RelaxedPolyRec + , ExtendedDefaultRules, UnboxedTuples, DeriveDataTypeable + , ConstrainedClassMethods + ] ++ + map DisableExtension + [MonoPatBinds] + +-- | A variation on the normal 'Text' instance, shows any ()'s in the original +-- textual syntax. We need to show these otherwise it's confusing to users when +-- we complain of their presence but do not pretty print them! +-- +displayRawVersionRange :: VersionRange -> String +displayRawVersionRange = + Disp.render + . fst + . foldVersionRange' -- precedence: + -- All the same as the usual pretty printer, except for the parens + ( Disp.text "-any" , 0 :: Int) + (\v -> (Disp.text "==" <> disp v , 0)) + (\v -> (Disp.char '>' <> disp v , 0)) + (\v -> (Disp.char '<' <> disp v , 0)) + (\v -> (Disp.text ">=" <> disp v , 0)) + (\v -> (Disp.text "<=" <> disp v , 0)) + (\v _ -> (Disp.text "==" <> dispWild v , 0)) + (\(r1, p1) (r2, p2) -> + (punct 2 p1 r1 <+> Disp.text "||" <+> punct 2 p2 r2 , 2)) + (\(r1, p1) (r2, p2) -> + (punct 1 p1 r1 <+> Disp.text "&&" <+> punct 1 p2 r2 , 1)) + (\(r, _ ) -> (Disp.parens r, 0)) -- parens + + where + dispWild (Version b _) = + Disp.hcat (Disp.punctuate (Disp.char '.') (map Disp.int b)) + <> Disp.text ".*" + punct p p' | p < p' = Disp.parens + | otherwise = id + +displayRawDependency :: Dependency -> String +displayRawDependency (Dependency pkg vr) = + display pkg ++ " " ++ displayRawVersionRange vr + + +-- ------------------------------------------------------------ +-- * Checks on the GenericPackageDescription +-- ------------------------------------------------------------ + +-- | Check the build-depends fields for any weirdness or bad practise. +-- +checkPackageVersions :: GenericPackageDescription -> [PackageCheck] +checkPackageVersions pkg = + catMaybes [ + + -- Check that the version of base is bounded above. + -- For example this bans "build-depends: base >= 3". + -- It should probably be "build-depends: base >= 3 && < 4" + -- which is the same as "build-depends: base == 3.*" + check (not (boundedAbove baseDependency)) $ + PackageDistInexcusable $ + "The dependency 'build-depends: base' does not specify an upper " + ++ "bound on the version number. Each major release of the 'base' " + ++ "package changes the API in various ways and most packages will " + ++ "need some changes to compile with it. The recommended practise " + ++ "is to specify an upper bound on the version of the 'base' " + ++ "package. This ensures your package will continue to build when a " + ++ "new major version of the 'base' package is released. If you are " + ++ "not sure what upper bound to use then use the next major " + ++ "version. For example if you have tested your package with 'base' " + ++ "version 4.5 and 4.6 then use 'build-depends: base >= 4.5 && < 4.7'." + + ] + where + -- TODO: What we really want to do is test if there exists any + -- configuration in which the base version is unbounded above. + -- However that's a bit tricky because there are many possible + -- configurations. As a cheap easy and safe approximation we will + -- pick a single "typical" configuration and check if that has an + -- open upper bound. To get a typical configuration we finalise + -- using no package index and the current platform. + finalised = finalizePackageDescription + [] (const True) buildPlatform + (unknownCompilerInfo + (CompilerId buildCompilerFlavor (Version [] [])) NoAbiTag) + [] pkg + baseDependency = case finalised of + Right (pkg', _) | not (null baseDeps) -> + foldr intersectVersionRanges anyVersion baseDeps + where + baseDeps = + [ vr | Dependency (PackageName "base") vr <- buildDepends pkg' ] + + -- Just in case finalizePackageDescription fails for any reason, + -- or if the package doesn't depend on the base package at all, + -- then we will just skip the check, since boundedAbove noVersion = True + _ -> noVersion + + boundedAbove :: VersionRange -> Bool + boundedAbove vr = case asVersionIntervals vr of + [] -> True -- this is the inconsistent version range. + intervals -> case last intervals of + (_, UpperBound _ _) -> True + (_, NoUpperBound ) -> False + + +checkConditionals :: GenericPackageDescription -> [PackageCheck] +checkConditionals pkg = + catMaybes [ + + check (not $ null unknownOSs) $ + PackageDistInexcusable $ + "Unknown operating system name " + ++ commaSep (map quote unknownOSs) + + , check (not $ null unknownArches) $ + PackageDistInexcusable $ + "Unknown architecture name " + ++ commaSep (map quote unknownArches) + + , check (not $ null unknownImpls) $ + PackageDistInexcusable $ + "Unknown compiler name " + ++ commaSep (map quote unknownImpls) + ] + where + unknownOSs = [ os | OS (OtherOS os) <- conditions ] + unknownArches = [ arch | Arch (OtherArch arch) <- conditions ] + unknownImpls = [ impl | Impl (OtherCompiler impl) _ <- conditions ] + conditions = maybe [] fvs (condLibrary pkg) + ++ concatMap (fvs . snd) (condExecutables pkg) + fvs (CondNode _ _ ifs) = concatMap compfv ifs -- free variables + compfv (c, ct, mct) = condfv c ++ fvs ct ++ maybe [] fvs mct + condfv c = case c of + Var v -> [v] + Lit _ -> [] + CNot c1 -> condfv c1 + COr c1 c2 -> condfv c1 ++ condfv c2 + CAnd c1 c2 -> condfv c1 ++ condfv c2 + +checkDevelopmentOnlyFlagsBuildInfo :: BuildInfo -> [PackageCheck] +checkDevelopmentOnlyFlagsBuildInfo bi = + catMaybes [ + + check has_WerrorWall $ + PackageDistInexcusable $ + "'ghc-options: -Wall -Werror' makes the package very easy to " + ++ "break with future GHC versions because new GHC versions often " + ++ "add new warnings. Use just 'ghc-options: -Wall' instead." + ++ extraExplanation + + , check (not has_WerrorWall && has_Werror) $ + PackageDistInexcusable $ + "'ghc-options: -Werror' makes the package easy to " + ++ "break with future GHC versions because new GHC versions often " + ++ "add new warnings. " + ++ extraExplanation + + , checkFlags ["-fdefer-type-errors"] $ + PackageDistInexcusable $ + "'ghc-options: -fdefer-type-errors' is fine during development but " + ++ "is not appropriate for a distributed package. " + ++ extraExplanation + + -- -dynamic is not a debug flag + , check (any (\opt -> "-d" `isPrefixOf` opt && opt /= "-dynamic") + ghc_options) $ + PackageDistInexcusable $ + "'ghc-options: -d*' debug flags are not appropriate " + ++ "for a distributed package. " + ++ extraExplanation + + , checkFlags ["-fprof-auto", "-fprof-auto-top", "-fprof-auto-calls", + "-fprof-cafs", "-fno-prof-count-entries", + "-auto-all", "-auto", "-caf-all"] $ + PackageDistSuspicious $ + "'ghc-options: -fprof*' profiling flags are typically not " + ++ "appropriate for a distributed library package. These flags are " + ++ "useful to profile this package, but when profiling other packages " + ++ "that use this one these flags clutter the profile output with " + ++ "excessive detail. If you think other packages really want to see " + ++ "cost centres from this package then use '-fprof-auto-exported' " + ++ "which puts cost centres only on exported functions. " + ++ extraExplanation + ] + where + extraExplanation = + " Alternatively, if you want to use this, make it conditional based " + ++ "on a Cabal configuration flag (with 'manual: True' and 'default: " + ++ "False') and enable that flag during development." + + has_WerrorWall = has_Werror && ( has_Wall || has_W ) + has_Werror = "-Werror" `elem` ghc_options + has_Wall = "-Wall" `elem` ghc_options + has_W = "-W" `elem` ghc_options + ghc_options = hcOptions GHC bi ++ hcProfOptions GHC bi + ++ hcSharedOptions GHC bi + + checkFlags :: [String] -> PackageCheck -> Maybe PackageCheck + checkFlags flags = check (any (`elem` flags) ghc_options) + +checkDevelopmentOnlyFlags :: GenericPackageDescription -> [PackageCheck] +checkDevelopmentOnlyFlags pkg = + concatMap checkDevelopmentOnlyFlagsBuildInfo + [ bi + | (conditions, bi) <- allConditionalBuildInfo + , not (any guardedByManualFlag conditions) ] + where + guardedByManualFlag = definitelyFalse + + -- We've basically got three-values logic here: True, False or unknown + -- hence this pattern to propagate the unknown cases properly. + definitelyFalse (Var (Flag n)) = maybe False not (Map.lookup n manualFlags) + definitelyFalse (Var _) = False + definitelyFalse (Lit b) = not b + definitelyFalse (CNot c) = definitelyTrue c + definitelyFalse (COr c1 c2) = definitelyFalse c1 && definitelyFalse c2 + definitelyFalse (CAnd c1 c2) = definitelyFalse c1 || definitelyFalse c2 + + definitelyTrue (Var (Flag n)) = fromMaybe False (Map.lookup n manualFlags) + definitelyTrue (Var _) = False + definitelyTrue (Lit b) = b + definitelyTrue (CNot c) = definitelyFalse c + definitelyTrue (COr c1 c2) = definitelyTrue c1 || definitelyTrue c2 + definitelyTrue (CAnd c1 c2) = definitelyTrue c1 && definitelyTrue c2 + + manualFlags = Map.fromList + [ (flagName flag, flagDefault flag) + | flag <- genPackageFlags pkg + , flagManual flag ] + + allConditionalBuildInfo :: [([Condition ConfVar], BuildInfo)] + allConditionalBuildInfo = + concatMap (collectCondTreePaths libBuildInfo) + (maybeToList (condLibrary pkg)) + + ++ concatMap (collectCondTreePaths buildInfo . snd) + (condExecutables pkg) + + ++ concatMap (collectCondTreePaths testBuildInfo . snd) + (condTestSuites pkg) + + ++ concatMap (collectCondTreePaths benchmarkBuildInfo . snd) + (condBenchmarks pkg) + + -- get all the leaf BuildInfo, paired up with the path (in the tree sense) + -- of if-conditions that guard it + collectCondTreePaths :: (a -> b) + -> CondTree v c a + -> [([Condition v], b)] + collectCondTreePaths mapData = go [] + where + go conditions condNode = + -- the data at this level in the tree: + (reverse conditions, mapData (condTreeData condNode)) + + : concat + [ go (condition:conditions) ifThen + | (condition, ifThen, _) <- condTreeComponents condNode ] + + ++ concat + [ go (condition:conditions) elseThen + | (condition, _, Just elseThen) <- condTreeComponents condNode ] + + +-- ------------------------------------------------------------ +-- * Checks involving files in the package +-- ------------------------------------------------------------ + +-- | Sanity check things that requires IO. It looks at the files in the +-- package and expects to find the package unpacked in at the given file path. +-- +checkPackageFiles :: PackageDescription -> FilePath -> IO [PackageCheck] +checkPackageFiles pkg root = checkPackageContent checkFilesIO pkg + where + checkFilesIO = CheckPackageContentOps { + doesFileExist = System.doesFileExist . relative, + doesDirectoryExist = System.doesDirectoryExist . relative, + getDirectoryContents = System.Directory.getDirectoryContents . relative, + getFileContents = \f -> openBinaryFile (relative f) ReadMode >>= hGetContents + } + relative path = root path + +-- | A record of operations needed to check the contents of packages. +-- Used by 'checkPackageContent'. +-- +data CheckPackageContentOps m = CheckPackageContentOps { + doesFileExist :: FilePath -> m Bool, + doesDirectoryExist :: FilePath -> m Bool, + getDirectoryContents :: FilePath -> m [FilePath], + getFileContents :: FilePath -> m String + } + +-- | Sanity check things that requires looking at files in the package. +-- This is a generalised version of 'checkPackageFiles' that can work in any +-- monad for which you can provide 'CheckPackageContentOps' operations. +-- +-- The point of this extra generality is to allow doing checks in some virtual +-- file system, for example a tarball in memory. +-- +checkPackageContent :: Monad m => CheckPackageContentOps m + -> PackageDescription + -> m [PackageCheck] +checkPackageContent ops pkg = do + cabalBomError <- checkCabalFileBOM ops + licenseErrors <- checkLicensesExist ops pkg + setupError <- checkSetupExists ops pkg + configureError <- checkConfigureExists ops pkg + localPathErrors <- checkLocalPathsExist ops pkg + vcsLocation <- checkMissingVcsInfo ops pkg + + return $ licenseErrors + ++ catMaybes [cabalBomError, setupError, configureError] + ++ localPathErrors + ++ vcsLocation + +checkCabalFileBOM :: Monad m => CheckPackageContentOps m + -> m (Maybe PackageCheck) +checkCabalFileBOM ops = do + epdfile <- findPackageDesc ops + case epdfile of + Left pc -> return $ Just pc + Right pdfile -> (flip check pc . startsWithBOM . fromUTF8) `liftM` (getFileContents ops pdfile) + where pc = PackageDistInexcusable $ + pdfile ++ " starts with an Unicode byte order mark (BOM). This may cause problems with older cabal versions." + +-- |Find a package description file in the given directory. Looks for +-- @.cabal@ files. Like 'Distribution.Simple.Utils.findPackageDesc', +-- but generalized over monads. +findPackageDesc :: Monad m => CheckPackageContentOps m + -> m (Either PackageCheck FilePath) -- ^.cabal +findPackageDesc ops + = do let dir = "." + files <- getDirectoryContents ops dir + -- to make sure we do not mistake a ~/.cabal/ dir for a .cabal + -- file we filter to exclude dirs and null base file names: + cabalFiles <- filterM (doesFileExist ops) + [ dir file + | file <- files + , let (name, ext) = splitExtension file + , not (null name) && ext == ".cabal" ] + case cabalFiles of + [] -> return (Left $ PackageBuildImpossible noDesc) + [cabalFile] -> return (Right cabalFile) + multiple -> return (Left $ PackageBuildImpossible $ multiDesc multiple) + + where + noDesc :: String + noDesc = "No cabal file found.\n" + ++ "Please create a package description file .cabal" + + multiDesc :: [String] -> String + multiDesc l = "Multiple cabal files found.\n" + ++ "Please use only one of: " + ++ intercalate ", " l + +checkLicensesExist :: Monad m => CheckPackageContentOps m + -> PackageDescription + -> m [PackageCheck] +checkLicensesExist ops pkg = do + exists <- mapM (doesFileExist ops) (licenseFiles pkg) + return + [ PackageBuildWarning $ + "The '" ++ fieldname ++ "' field refers to the file " + ++ quote file ++ " which does not exist." + | (file, False) <- zip (licenseFiles pkg) exists ] + where + fieldname | length (licenseFiles pkg) == 1 = "license-file" + | otherwise = "license-files" + +checkSetupExists :: Monad m => CheckPackageContentOps m + -> PackageDescription + -> m (Maybe PackageCheck) +checkSetupExists ops pkg = do + let simpleBuild = buildType pkg == Just Simple + hsexists <- doesFileExist ops "Setup.hs" + lhsexists <- doesFileExist ops "Setup.lhs" + return $ check (not simpleBuild && not hsexists && not lhsexists) $ + PackageDistInexcusable $ + "The package is missing a Setup.hs or Setup.lhs script." + +checkConfigureExists :: Monad m => CheckPackageContentOps m + -> PackageDescription + -> m (Maybe PackageCheck) +checkConfigureExists ops PackageDescription { buildType = Just Configure } = do + exists <- doesFileExist ops "configure" + return $ check (not exists) $ + PackageBuildWarning $ + "The 'build-type' is 'Configure' but there is no 'configure' script. " + ++ "You probably need to run 'autoreconf -i' to generate it." +checkConfigureExists _ _ = return Nothing + +checkLocalPathsExist :: Monad m => CheckPackageContentOps m + -> PackageDescription + -> m [PackageCheck] +checkLocalPathsExist ops pkg = do + let dirs = [ (dir, kind) + | bi <- allBuildInfo pkg + , (dir, kind) <- + [ (dir, "extra-lib-dirs") | dir <- extraLibDirs bi ] + ++ [ (dir, "extra-framework-dirs") + | dir <- extraFrameworkDirs bi ] + ++ [ (dir, "include-dirs") | dir <- includeDirs bi ] + ++ [ (dir, "hs-source-dirs") | dir <- hsSourceDirs bi ] + , isRelative dir ] + missing <- filterM (liftM not . doesDirectoryExist ops . fst) dirs + return [ PackageBuildWarning { + explanation = quote (kind ++ ": " ++ dir) + ++ " directory does not exist." + } + | (dir, kind) <- missing ] + +checkMissingVcsInfo :: Monad m => CheckPackageContentOps m + -> PackageDescription + -> m [PackageCheck] +checkMissingVcsInfo ops pkg | null (sourceRepos pkg) = do + vcsInUse <- liftM or $ mapM (doesDirectoryExist ops) repoDirnames + if vcsInUse + then return [ PackageDistSuspicious message ] + else return [] + where + repoDirnames = [ dirname | repo <- knownRepoTypes + , dirname <- repoTypeDirname repo ] + message = "When distributing packages it is encouraged to specify source " + ++ "control information in the .cabal file using one or more " + ++ "'source-repository' sections. See the Cabal user guide for " + ++ "details." + +checkMissingVcsInfo _ _ = return [] + +repoTypeDirname :: RepoType -> [FilePath] +repoTypeDirname Darcs = ["_darcs"] +repoTypeDirname Git = [".git"] +repoTypeDirname SVN = [".svn"] +repoTypeDirname CVS = ["CVS"] +repoTypeDirname Mercurial = [".hg"] +repoTypeDirname GnuArch = [".arch-params"] +repoTypeDirname Bazaar = [".bzr"] +repoTypeDirname Monotone = ["_MTN"] +repoTypeDirname _ = [] + +-- ------------------------------------------------------------ +-- * Checks involving files in the package +-- ------------------------------------------------------------ + +-- | Check the names of all files in a package for portability problems. This +-- should be done for example when creating or validating a package tarball. +-- +checkPackageFileNames :: [FilePath] -> [PackageCheck] +checkPackageFileNames files = + (take 1 . mapMaybe checkWindowsPath $ files) + ++ (take 1 . mapMaybe checkTarPath $ files) + -- If we get any of these checks triggering then we're likely to get + -- many, and that's probably not helpful, so return at most one. + +checkWindowsPath :: FilePath -> Maybe PackageCheck +checkWindowsPath path = + check (not $ FilePath.Windows.isValid path') $ + PackageDistInexcusable $ + "Unfortunately, the file " ++ quote path ++ " is not a valid file " + ++ "name on Windows which would cause portability problems for this " + ++ "package. Windows file names cannot contain any of the characters " + ++ "\":*?<>|\" and there are a few reserved names including \"aux\", " + ++ "\"nul\", \"con\", \"prn\", \"com1-9\", \"lpt1-9\" and \"clock$\"." + where + path' = ".\\" ++ path + -- force a relative name to catch invalid file names like "f:oo" which + -- otherwise parse as file "oo" in the current directory on the 'f' drive. + +-- | Check a file name is valid for the portable POSIX tar format. +-- +-- The POSIX tar format has a restriction on the length of file names. It is +-- unfortunately not a simple restriction like a maximum length. The exact +-- restriction is that either the whole path be 100 characters or less, or it +-- be possible to split the path on a directory separator such that the first +-- part is 155 characters or less and the second part 100 characters or less. +-- +checkTarPath :: FilePath -> Maybe PackageCheck +checkTarPath path + | length path > 255 = Just longPath + | otherwise = case pack nameMax (reverse (splitPath path)) of + Left err -> Just err + Right [] -> Nothing + Right (first:rest) -> case pack prefixMax remainder of + Left err -> Just err + Right [] -> Nothing + Right (_:_) -> Just noSplit + where + -- drop the '/' between the name and prefix: + remainder = init first : rest + + where + nameMax, prefixMax :: Int + nameMax = 100 + prefixMax = 155 + + pack _ [] = Left emptyName + pack maxLen (c:cs) + | n > maxLen = Left longName + | otherwise = Right (pack' maxLen n cs) + where n = length c + + pack' maxLen n (c:cs) + | n' <= maxLen = pack' maxLen n' cs + where n' = n + length c + pack' _ _ cs = cs + + longPath = PackageDistInexcusable $ + "The following file name is too long to store in a portable POSIX " + ++ "format tar archive. The maximum length is 255 ASCII characters.\n" + ++ "The file in question is:\n " ++ path + longName = PackageDistInexcusable $ + "The following file name is too long to store in a portable POSIX " + ++ "format tar archive. The maximum length for the name part (including " + ++ "extension) is 100 ASCII characters. The maximum length for any " + ++ "individual directory component is 155.\n" + ++ "The file in question is:\n " ++ path + noSplit = PackageDistInexcusable $ + "The following file name is too long to store in a portable POSIX " + ++ "format tar archive. While the total length is less than 255 ASCII " + ++ "characters, there are unfortunately further restrictions. It has to " + ++ "be possible to split the file path on a directory separator into " + ++ "two parts such that the first part fits in 155 characters or less " + ++ "and the second part fits in 100 characters or less. Basically you " + ++ "have to make the file name or directory names shorter, or you could " + ++ "split a long directory name into nested subdirectories with shorter " + ++ "names.\nThe file in question is:\n " ++ path + emptyName = PackageDistInexcusable $ + "Encountered a file with an empty name, something is very wrong! " + ++ "Files with an empty name cannot be stored in a tar archive or in " + ++ "standard file systems." + +-- ------------------------------------------------------------ +-- * Utils +-- ------------------------------------------------------------ + +quote :: String -> String +quote s = "'" ++ s ++ "'" + +commaSep :: [String] -> String +commaSep = intercalate ", " + +dups :: Ord a => [a] -> [a] +dups xs = [ x | (x:_:_) <- group (sort xs) ] + +fileExtensionSupportedLanguage :: FilePath -> Bool +fileExtensionSupportedLanguage path = + isHaskell || isC + where + extension = takeExtension path + isHaskell = extension `elem` [".hs", ".lhs"] + isC = isJust (filenameCDialect extension) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/PackageDescription/Configuration.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/PackageDescription/Configuration.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/PackageDescription/Configuration.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/PackageDescription/Configuration.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,781 @@ +{-# LANGUAGE CPP #-} +-- -fno-warn-deprecations for use of Map.foldWithKey +{-# OPTIONS_GHC -fno-warn-deprecations #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.PackageDescription.Configuration +-- Copyright : Thomas Schilling, 2007 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This is about the cabal configurations feature. It exports +-- 'finalizePackageDescription' and 'flattenPackageDescription' which are +-- functions for converting 'GenericPackageDescription's down to +-- 'PackageDescription's. It has code for working with the tree of conditions +-- and resolving or flattening conditions. + +module Distribution.PackageDescription.Configuration ( + finalizePackageDescription, + flattenPackageDescription, + + -- Utils + parseCondition, + freeVars, + extractCondition, + extractConditions, + addBuildableCondition, + mapCondTree, + mapTreeData, + mapTreeConds, + mapTreeConstrs, + transformAllBuildInfos, + transformAllBuildDepends, + ) where + +import Control.Applicative -- 7.10 -Werror workaround. +import Prelude + +import Distribution.Package +import Distribution.PackageDescription +import Distribution.PackageDescription.Utils +import Distribution.Version +import Distribution.Compiler +import Distribution.System +import Distribution.Simple.Utils +import Distribution.Text +import Distribution.Compat.ReadP as ReadP hiding ( char ) +import qualified Distribution.Compat.ReadP as ReadP ( char ) +import Distribution.Compat.Semigroup as Semi + +import Control.Arrow (first) +import Data.Char ( isAlphaNum ) +import Data.Maybe ( mapMaybe, maybeToList ) +import Data.Map ( Map, fromListWith, toList ) +import qualified Data.Map as Map +import Data.Tree ( Tree(Node) ) + +------------------------------------------------------------------------------ + +-- | Simplify the condition and return its free variables. +simplifyCondition :: Condition c + -> (c -> Either d Bool) -- ^ (partial) variable assignment + -> (Condition d, [d]) +simplifyCondition cond i = fv . walk $ cond + where + walk cnd = case cnd of + Var v -> either Var Lit (i v) + Lit b -> Lit b + CNot c -> case walk c of + Lit True -> Lit False + Lit False -> Lit True + c' -> CNot c' + COr c d -> case (walk c, walk d) of + (Lit False, d') -> d' + (Lit True, _) -> Lit True + (c', Lit False) -> c' + (_, Lit True) -> Lit True + (c',d') -> COr c' d' + CAnd c d -> case (walk c, walk d) of + (Lit False, _) -> Lit False + (Lit True, d') -> d' + (_, Lit False) -> Lit False + (c', Lit True) -> c' + (c',d') -> CAnd c' d' + -- gather free vars + fv c = (c, fv' c) + fv' c = case c of + Var v -> [v] + Lit _ -> [] + CNot c' -> fv' c' + COr c1 c2 -> fv' c1 ++ fv' c2 + CAnd c1 c2 -> fv' c1 ++ fv' c2 + +-- | Simplify a configuration condition using the OS and arch names. Returns +-- the names of all the flags occurring in the condition. +simplifyWithSysParams :: OS -> Arch -> CompilerInfo -> Condition ConfVar + -> (Condition FlagName, [FlagName]) +simplifyWithSysParams os arch cinfo cond = (cond', flags) + where + (cond', flags) = simplifyCondition cond interp + interp (OS os') = Right $ os' == os + interp (Arch arch') = Right $ arch' == arch + interp (Impl comp vr) + | matchImpl (compilerInfoId cinfo) = Right True + | otherwise = case compilerInfoCompat cinfo of + -- fixme: treat Nothing as unknown, rather than empty list once we + -- support partial resolution of system parameters + Nothing -> Right False + Just compat -> Right (any matchImpl compat) + where + matchImpl (CompilerId c v) = comp == c && v `withinRange` vr + interp (Flag f) = Left f + +-- TODO: Add instances and check +-- +-- prop_sC_idempotent cond a o = cond' == cond'' +-- where +-- cond' = simplifyCondition cond a o +-- cond'' = simplifyCondition cond' a o +-- +-- prop_sC_noLits cond a o = isLit res || not (hasLits res) +-- where +-- res = simplifyCondition cond a o +-- hasLits (Lit _) = True +-- hasLits (CNot c) = hasLits c +-- hasLits (COr l r) = hasLits l || hasLits r +-- hasLits (CAnd l r) = hasLits l || hasLits r +-- hasLits _ = False +-- + +-- | Parse a configuration condition from a string. +parseCondition :: ReadP r (Condition ConfVar) +parseCondition = condOr + where + condOr = sepBy1 condAnd (oper "||") >>= return . foldl1 COr + condAnd = sepBy1 cond (oper "&&")>>= return . foldl1 CAnd + cond = sp >> (boolLiteral +++ inparens condOr +++ notCond +++ osCond + +++ archCond +++ flagCond +++ implCond ) + inparens = between (ReadP.char '(' >> sp) (sp >> ReadP.char ')' >> sp) + notCond = ReadP.char '!' >> sp >> cond >>= return . CNot + osCond = string "os" >> sp >> inparens osIdent >>= return . Var + archCond = string "arch" >> sp >> inparens archIdent >>= return . Var + flagCond = string "flag" >> sp >> inparens flagIdent >>= return . Var + implCond = string "impl" >> sp >> inparens implIdent >>= return . Var + boolLiteral = fmap Lit parse + archIdent = fmap Arch parse + osIdent = fmap OS parse + flagIdent = fmap (Flag . FlagName . lowercase) (munch1 isIdentChar) + isIdentChar c = isAlphaNum c || c == '_' || c == '-' + oper s = sp >> string s >> sp + sp = skipSpaces + implIdent = do i <- parse + vr <- sp >> option anyVersion parse + return $ Impl i vr + +------------------------------------------------------------------------------ + +mapCondTree :: (a -> b) -> (c -> d) -> (Condition v -> Condition w) + -> CondTree v c a -> CondTree w d b +mapCondTree fa fc fcnd (CondNode a c ifs) = + CondNode (fa a) (fc c) (map g ifs) + where + g (cnd, t, me) = (fcnd cnd, mapCondTree fa fc fcnd t, + fmap (mapCondTree fa fc fcnd) me) + +mapTreeConstrs :: (c -> d) -> CondTree v c a -> CondTree v d a +mapTreeConstrs f = mapCondTree id f id + +mapTreeConds :: (Condition v -> Condition w) -> CondTree v c a -> CondTree w c a +mapTreeConds f = mapCondTree id id f + +mapTreeData :: (a -> b) -> CondTree v c a -> CondTree v c b +mapTreeData f = mapCondTree f id id + +-- | Result of dependency test. Isomorphic to @Maybe d@ but renamed for +-- clarity. +data DepTestRslt d = DepOk | MissingDeps d + +instance Semigroup d => Monoid (DepTestRslt d) where + mempty = DepOk + mappend = (Semi.<>) + +instance Semigroup d => Semigroup (DepTestRslt d) where + DepOk <> x = x + x <> DepOk = x + (MissingDeps d) <> (MissingDeps d') = MissingDeps (d <> d') + + +-- | Try to find a flag assignment that satisfies the constraints of all trees. +-- +-- Returns either the missing dependencies, or a tuple containing the +-- resulting data, the associated dependencies, and the chosen flag +-- assignments. +-- +-- In case of failure, the union of the dependencies that led to backtracking +-- on all branches is returned. +-- [TODO: Could also be specified with a function argument.] +-- +-- TODO: The current algorithm is rather naive. A better approach would be to: +-- +-- * Rule out possible paths, by taking a look at the associated dependencies. +-- +-- * Infer the required values for the conditions of these paths, and +-- calculate the required domains for the variables used in these +-- conditions. Then picking a flag assignment would be linear (I guess). +-- +-- This would require some sort of SAT solving, though, thus it's not +-- implemented unless we really need it. +-- +resolveWithFlags :: + [(FlagName,[Bool])] + -- ^ Domain for each flag name, will be tested in order. + -> OS -- ^ OS as returned by Distribution.System.buildOS + -> Arch -- ^ Arch as returned by Distribution.System.buildArch + -> CompilerInfo -- ^ Compiler information + -> [Dependency] -- ^ Additional constraints + -> [CondTree ConfVar [Dependency] PDTagged] + -> ([Dependency] -> DepTestRslt [Dependency]) -- ^ Dependency test function. + -> Either [Dependency] (TargetSet PDTagged, FlagAssignment) + -- ^ Either the missing dependencies (error case), or a pair of + -- (set of build targets with dependencies, chosen flag assignments) +resolveWithFlags dom os arch impl constrs trees checkDeps = + either (Left . fromDepMapUnion) Right $ explore (build [] dom) + where + extraConstrs = toDepMap constrs + + -- simplify trees by (partially) evaluating all conditions and converting + -- dependencies to dependency maps. + simplifiedTrees :: [CondTree FlagName DependencyMap PDTagged] + simplifiedTrees = map ( mapTreeConstrs toDepMap -- convert to maps + . addBuildableConditionPDTagged + . mapTreeConds (fst . simplifyWithSysParams os arch impl)) + trees + + -- @explore@ searches a tree of assignments, backtracking whenever a flag + -- introduces a dependency that cannot be satisfied. If there is no + -- solution, @explore@ returns the union of all dependencies that caused + -- it to backtrack. Since the tree is constructed lazily, we avoid some + -- computation overhead in the successful case. + explore :: Tree FlagAssignment + -> Either DepMapUnion (TargetSet PDTagged, FlagAssignment) + explore (Node flags ts) = + let targetSet = TargetSet $ flip map simplifiedTrees $ + -- apply additional constraints to all dependencies + first (`constrainBy` extraConstrs) . + simplifyCondTree (env flags) + deps = overallDependencies targetSet + in case checkDeps (fromDepMap deps) of + DepOk | null ts -> Right (targetSet, flags) + | otherwise -> tryAll $ map explore ts + MissingDeps mds -> Left (toDepMapUnion mds) + + -- Builds a tree of all possible flag assignments. Internal nodes + -- have only partial assignments. + build :: FlagAssignment -> [(FlagName, [Bool])] -> Tree FlagAssignment + build assigned [] = Node assigned [] + build assigned ((fn, vals) : unassigned) = + Node assigned $ map (\v -> build ((fn, v) : assigned) unassigned) vals + + tryAll :: [Either DepMapUnion a] -> Either DepMapUnion a + tryAll = foldr mp mz + + -- special version of `mplus' for our local purposes + mp :: Either DepMapUnion a -> Either DepMapUnion a -> Either DepMapUnion a + mp m@(Right _) _ = m + mp _ m@(Right _) = m + mp (Left xs) (Left ys) = + let union = Map.foldrWithKey (Map.insertWith' combine) + (unDepMapUnion xs) (unDepMapUnion ys) + combine x y = simplifyVersionRange $ unionVersionRanges x y + in union `seq` Left (DepMapUnion union) + + -- `mzero' + mz :: Either DepMapUnion a + mz = Left (DepMapUnion Map.empty) + + env :: FlagAssignment -> FlagName -> Either FlagName Bool + env flags flag = (maybe (Left flag) Right . lookup flag) flags + +-- | Transforms a 'CondTree' by putting the input under the "then" branch of a +-- conditional that is True when Buildable is True. If 'addBuildableCondition' +-- can determine that Buildable is always True, it returns the input unchanged. +-- If Buildable is always False, it returns the empty 'CondTree'. +addBuildableCondition :: (Eq v, Monoid a, Monoid c) => (a -> BuildInfo) + -> CondTree v c a + -> CondTree v c a +addBuildableCondition getInfo t = + case extractCondition (buildable . getInfo) t of + Lit True -> t + Lit False -> CondNode mempty mempty [] + c -> CondNode mempty mempty [(c, t, Nothing)] + +-- | This is a special version of 'addBuildableCondition' for the 'PDTagged' +-- type. +-- +-- It is not simply a specialisation. It is more complicated than it +-- ought to be because of the way the 'PDTagged' monoid instance works. The +-- @mempty = 'PDNull'@ forgets the component type, which has the effect of +-- completely deleting components that are not buildable. +-- +-- See for more details. +-- +addBuildableConditionPDTagged :: (Eq v, Monoid c) => + CondTree v c PDTagged + -> CondTree v c PDTagged +addBuildableConditionPDTagged t = + case extractCondition (buildable . getInfo) t of + Lit True -> t + Lit False -> deleteConstraints t + c -> CondNode mempty mempty [(c, t, Just (deleteConstraints t))] + where + deleteConstraints = mapTreeConstrs (const mempty) + + getInfo :: PDTagged -> BuildInfo + getInfo (Lib l) = libBuildInfo l + getInfo (Exe _ e) = buildInfo e + getInfo (Test _ test) = testBuildInfo test + getInfo (Bench _ b) = benchmarkBuildInfo b + getInfo PDNull = mempty + + +-- Note: extracting buildable conditions. +-- -------------------------------------- +-- +-- If the conditions in a cond tree lead to Buildable being set to False, then +-- none of the dependencies for this cond tree should actually be taken into +-- account. On the other hand, some of the flags may only be decided in the +-- solver, so we cannot necessarily make the decision whether a component is +-- Buildable or not prior to solving. +-- +-- What we are doing here is to partially evaluate a condition tree in order to +-- extract the condition under which Buildable is True. The predicate determines +-- whether data under a 'CondTree' is buildable. + + +-- | Extract the condition matched by the given predicate from a cond tree. +-- +-- We use this mainly for extracting buildable conditions (see the Note above), +-- but the function is in fact more general. +extractCondition :: Eq v => (a -> Bool) -> CondTree v c a -> Condition v +extractCondition p = go + where + go (CondNode x _ cs) | not (p x) = Lit False + | otherwise = goList cs + + goList [] = Lit True + goList ((c, t, e) : cs) = + let + ct = go t + ce = maybe (Lit True) go e + in + ((c `cAnd` ct) `cOr` (CNot c `cAnd` ce)) `cAnd` goList cs + +-- | Extract conditions matched by the given predicate from all cond trees in a +-- 'GenericPackageDescription'. +extractConditions :: (BuildInfo -> Bool) -> GenericPackageDescription + -> [Condition ConfVar] +extractConditions f gpkg = + concat [ + maybeToList $ extractCondition (f . libBuildInfo) <$> condLibrary gpkg + , extractCondition (f . buildInfo) . snd <$> condExecutables gpkg + , extractCondition (f . testBuildInfo) . snd <$> condTestSuites gpkg + , extractCondition (f . benchmarkBuildInfo) . snd <$> condBenchmarks gpkg + ] + + +-- | A map of dependencies that combines version ranges using 'unionVersionRanges'. +newtype DepMapUnion = DepMapUnion { unDepMapUnion :: Map PackageName VersionRange } + +toDepMapUnion :: [Dependency] -> DepMapUnion +toDepMapUnion ds = + DepMapUnion $ fromListWith unionVersionRanges [ (p,vr) | Dependency p vr <- ds ] + +fromDepMapUnion :: DepMapUnion -> [Dependency] +fromDepMapUnion m = [ Dependency p vr | (p,vr) <- toList (unDepMapUnion m) ] + +-- | A map of dependencies. Newtyped since the default monoid instance is not +-- appropriate. The monoid instance uses 'intersectVersionRanges'. +newtype DependencyMap = DependencyMap { unDependencyMap :: Map PackageName VersionRange } + deriving (Show, Read) + +instance Monoid DependencyMap where + mempty = DependencyMap Map.empty + mappend = (Semi.<>) + +instance Semigroup DependencyMap where + (DependencyMap a) <> (DependencyMap b) = + DependencyMap (Map.unionWith intersectVersionRanges a b) + +toDepMap :: [Dependency] -> DependencyMap +toDepMap ds = + DependencyMap $ fromListWith intersectVersionRanges [ (p,vr) | Dependency p vr <- ds ] + +fromDepMap :: DependencyMap -> [Dependency] +fromDepMap m = [ Dependency p vr | (p,vr) <- toList (unDependencyMap m) ] + +-- | Flattens a CondTree using a partial flag assignment. When a condition +-- cannot be evaluated, both branches are ignored. +simplifyCondTree :: (Monoid a, Monoid d) => + (v -> Either v Bool) + -> CondTree v d a + -> (d, a) +simplifyCondTree env (CondNode a d ifs) = + mconcat $ (d, a) : mapMaybe simplifyIf ifs + where + simplifyIf (cnd, t, me) = + case simplifyCondition cnd env of + (Lit True, _) -> Just $ simplifyCondTree env t + (Lit False, _) -> fmap (simplifyCondTree env) me + _ -> Nothing + +-- | Flatten a CondTree. This will resolve the CondTree by taking all +-- possible paths into account. Note that since branches represent exclusive +-- choices this may not result in a \"sane\" result. +ignoreConditions :: (Monoid a, Monoid c) => CondTree v c a -> (a, c) +ignoreConditions (CondNode a c ifs) = (a, c) `mappend` mconcat (concatMap f ifs) + where f (_, t, me) = ignoreConditions t + : maybeToList (fmap ignoreConditions me) + +freeVars :: CondTree ConfVar c a -> [FlagName] +freeVars t = [ f | Flag f <- freeVars' t ] + where + freeVars' (CondNode _ _ ifs) = concatMap compfv ifs + compfv (c, ct, mct) = condfv c ++ freeVars' ct ++ maybe [] freeVars' mct + condfv c = case c of + Var v -> [v] + Lit _ -> [] + CNot c' -> condfv c' + COr c1 c2 -> condfv c1 ++ condfv c2 + CAnd c1 c2 -> condfv c1 ++ condfv c2 + + +------------------------------------------------------------------------------ + +-- | A set of targets with their package dependencies +newtype TargetSet a = TargetSet [(DependencyMap, a)] + +-- | Combine the target-specific dependencies in a TargetSet to give the +-- dependencies for the package as a whole. +overallDependencies :: TargetSet PDTagged -> DependencyMap +overallDependencies (TargetSet targets) = mconcat depss + where + (depss, _) = unzip $ filter (removeDisabledSections . snd) targets + removeDisabledSections :: PDTagged -> Bool + removeDisabledSections (Lib l) = buildable (libBuildInfo l) + removeDisabledSections (Exe _ e) = buildable (buildInfo e) + removeDisabledSections (Test _ t) = testEnabled t && buildable (testBuildInfo t) + removeDisabledSections (Bench _ b) = benchmarkEnabled b && buildable (benchmarkBuildInfo b) + removeDisabledSections PDNull = True + +-- Apply extra constraints to a dependency map. +-- Combines dependencies where the result will only contain keys from the left +-- (first) map. If a key also exists in the right map, both constraints will +-- be intersected. +constrainBy :: DependencyMap -- ^ Input map + -> DependencyMap -- ^ Extra constraints + -> DependencyMap +constrainBy left extra = + DependencyMap $ + Map.foldWithKey tightenConstraint (unDependencyMap left) + (unDependencyMap extra) + where tightenConstraint n c l = + case Map.lookup n l of + Nothing -> l + Just vr -> Map.insert n (intersectVersionRanges vr c) l + +-- | Collect up the targets in a TargetSet of tagged targets, storing the +-- dependencies as we go. +flattenTaggedTargets :: TargetSet PDTagged -> + (Maybe Library, [(String, Executable)], [(String, TestSuite)] + , [(String, Benchmark)]) +flattenTaggedTargets (TargetSet targets) = foldr untag (Nothing, [], [], []) targets + where + untag (_, Lib _) (Just _, _, _, _) = userBug "Only one library expected" + untag (deps, Lib l) (Nothing, exes, tests, bms) = + (Just l', exes, tests, bms) + where + l' = l { + libBuildInfo = (libBuildInfo l) { targetBuildDepends = fromDepMap deps } + } + untag (deps, Exe n e) (mlib, exes, tests, bms) + | any ((== n) . fst) exes = + userBug $ "There exist several exes with the same name: '" ++ n ++ "'" + | any ((== n) . fst) tests = + userBug $ "There exists a test with the same name as an exe: '" ++ n ++ "'" + | any ((== n) . fst) bms = + userBug $ "There exists a benchmark with the same name as an exe: '" ++ n ++ "'" + | otherwise = (mlib, (n, e'):exes, tests, bms) + where + e' = e { + buildInfo = (buildInfo e) { targetBuildDepends = fromDepMap deps } + } + untag (deps, Test n t) (mlib, exes, tests, bms) + | any ((== n) . fst) tests = + userBug $ "There exist several tests with the same name: '" ++ n ++ "'" + | any ((== n) . fst) exes = + userBug $ "There exists an exe with the same name as the test: '" ++ n ++ "'" + | any ((== n) . fst) bms = + userBug $ "There exists a benchmark with the same name as the test: '" ++ n ++ "'" + | otherwise = (mlib, exes, (n, t'):tests, bms) + where + t' = t { + testBuildInfo = (testBuildInfo t) + { targetBuildDepends = fromDepMap deps } + } + untag (deps, Bench n b) (mlib, exes, tests, bms) + | any ((== n) . fst) bms = + userBug $ "There exist several benchmarks with the same name: '" ++ n ++ "'" + | any ((== n) . fst) exes = + userBug $ "There exists an exe with the same name as the benchmark: '" ++ n ++ "'" + | any ((== n) . fst) tests = + userBug $ "There exists a test with the same name as the benchmark: '" ++ n ++ "'" + | otherwise = (mlib, exes, tests, (n, b'):bms) + where + b' = b { + benchmarkBuildInfo = (benchmarkBuildInfo b) + { targetBuildDepends = fromDepMap deps } + } + untag (_, PDNull) x = x -- actually this should not happen, but let's be liberal + + +------------------------------------------------------------------------------ +-- Convert GenericPackageDescription to PackageDescription +-- + +data PDTagged = Lib Library + | Exe String Executable + | Test String TestSuite + | Bench String Benchmark + | PDNull + deriving Show + +instance Monoid PDTagged where + mempty = PDNull + mappend = (Semi.<>) + +instance Semigroup PDTagged where + PDNull <> x = x + x <> PDNull = x + Lib l <> Lib l' = Lib (l <> l') + Exe n e <> Exe n' e' | n == n' = Exe n (e <> e') + Test n t <> Test n' t' | n == n' = Test n (t <> t') + Bench n b <> Bench n' b' | n == n' = Bench n (b <> b') + _ <> _ = cabalBug "Cannot combine incompatible tags" + +-- | Create a package description with all configurations resolved. +-- +-- This function takes a `GenericPackageDescription` and several environment +-- parameters and tries to generate `PackageDescription` by finding a flag +-- assignment that result in satisfiable dependencies. +-- +-- It takes as inputs a not necessarily complete specifications of flags +-- assignments, an optional package index as well as platform parameters. If +-- some flags are not assigned explicitly, this function will try to pick an +-- assignment that causes this function to succeed. The package index is +-- optional since on some platforms we cannot determine which packages have +-- been installed before. When no package index is supplied, every dependency +-- is assumed to be satisfiable, therefore all not explicitly assigned flags +-- will get their default values. +-- +-- This function will fail if it cannot find a flag assignment that leads to +-- satisfiable dependencies. (It will not try alternative assignments for +-- explicitly specified flags.) In case of failure it will return the missing +-- dependencies that it encountered when trying different flag assignments. +-- On success, it will return the package description and the full flag +-- assignment chosen. +-- +finalizePackageDescription :: + FlagAssignment -- ^ Explicitly specified flag assignments + -> (Dependency -> Bool) -- ^ Is a given dependency satisfiable from the set of + -- available packages? If this is unknown then use + -- True. + -> Platform -- ^ The 'Arch' and 'OS' + -> CompilerInfo -- ^ Compiler information + -> [Dependency] -- ^ Additional constraints + -> GenericPackageDescription + -> Either [Dependency] + (PackageDescription, FlagAssignment) + -- ^ Either missing dependencies or the resolved package + -- description along with the flag assignments chosen. +finalizePackageDescription userflags satisfyDep + (Platform arch os) impl constraints + (GenericPackageDescription pkg flags mlib0 exes0 tests0 bms0) = + case resolveFlags of + Right ((mlib, exes', tests', bms'), targetSet, flagVals) -> + Right ( pkg { library = mlib + , executables = exes' + , testSuites = tests' + , benchmarks = bms' + , buildDepends = fromDepMap (overallDependencies targetSet) + } + , flagVals ) + + Left missing -> Left missing + where + -- Combine lib, exes, and tests into one list of @CondTree@s with tagged data + condTrees = maybeToList (fmap (mapTreeData Lib) mlib0 ) + ++ map (\(name,tree) -> mapTreeData (Exe name) tree) exes0 + ++ map (\(name,tree) -> mapTreeData (Test name) tree) tests0 + ++ map (\(name,tree) -> mapTreeData (Bench name) tree) bms0 + + resolveFlags = + case resolveWithFlags flagChoices os arch impl constraints condTrees check of + Right (targetSet, fs) -> + let (mlib, exes, tests, bms) = flattenTaggedTargets targetSet in + Right ( (fmap libFillInDefaults mlib, + map (\(n,e) -> (exeFillInDefaults e) { exeName = n }) exes, + map (\(n,t) -> (testFillInDefaults t) { testName = n }) tests, + map (\(n,b) -> (benchFillInDefaults b) { benchmarkName = n }) bms), + targetSet, fs) + Left missing -> Left missing + + flagChoices = map (\(MkFlag n _ d manual) -> (n, d2c manual n d)) flags + d2c manual n b = case lookup n userflags of + Just val -> [val] + Nothing + | manual -> [b] + | otherwise -> [b, not b] + --flagDefaults = map (\(n,x:_) -> (n,x)) flagChoices + check ds = let missingDeps = filter (not . satisfyDep) ds + in if null missingDeps + then DepOk + else MissingDeps missingDeps + +{- +let tst_p = (CondNode [1::Int] [Distribution.Package.Dependency "a" AnyVersion] []) +let tst_p2 = (CondNode [1::Int] [Distribution.Package.Dependency "a" (EarlierVersion (Version [1,0] [])), Distribution.Package.Dependency "a" (LaterVersion (Version [2,0] []))] []) + +let p_index = Distribution.Simple.PackageIndex.fromList [Distribution.Package.PackageIdentifier "a" (Version [0,5] []), Distribution.Package.PackageIdentifier "a" (Version [2,5] [])] +let look = not . null . Distribution.Simple.PackageIndex.lookupDependency p_index +let looks ds = mconcat $ map (\d -> if look d then DepOk else MissingDeps [d]) ds +resolveWithFlags [] Distribution.System.Linux Distribution.System.I386 (Distribution.Compiler.GHC,Version [6,8,2] []) [tst_p] looks ===> Right ... +resolveWithFlags [] Distribution.System.Linux Distribution.System.I386 (Distribution.Compiler.GHC,Version [6,8,2] []) [tst_p2] looks ===> Left ... +-} + +-- | Flatten a generic package description by ignoring all conditions and just +-- join the field descriptors into on package description. Note, however, +-- that this may lead to inconsistent field values, since all values are +-- joined into one field, which may not be possible in the original package +-- description, due to the use of exclusive choices (if ... else ...). +-- +-- TODO: One particularly tricky case is defaulting. In the original package +-- description, e.g., the source directory might either be the default or a +-- certain, explicitly set path. Since defaults are filled in only after the +-- package has been resolved and when no explicit value has been set, the +-- default path will be missing from the package description returned by this +-- function. +flattenPackageDescription :: GenericPackageDescription -> PackageDescription +flattenPackageDescription (GenericPackageDescription pkg _ mlib0 exes0 tests0 bms0) = + pkg { library = mlib + , executables = reverse exes + , testSuites = reverse tests + , benchmarks = reverse bms + , buildDepends = ldeps ++ reverse edeps ++ reverse tdeps ++ reverse bdeps + } + where + (mlib, ldeps) = case mlib0 of + Just lib -> let (l,ds) = ignoreConditions lib in + (Just (libFillInDefaults l), ds) + Nothing -> (Nothing, []) + (exes, edeps) = foldr flattenExe ([],[]) exes0 + (tests, tdeps) = foldr flattenTst ([],[]) tests0 + (bms, bdeps) = foldr flattenBm ([],[]) bms0 + flattenExe (n, t) (es, ds) = + let (e, ds') = ignoreConditions t in + ( (exeFillInDefaults $ e { exeName = n }) : es, ds' ++ ds ) + flattenTst (n, t) (es, ds) = + let (e, ds') = ignoreConditions t in + ( (testFillInDefaults $ e { testName = n }) : es, ds' ++ ds ) + flattenBm (n, t) (es, ds) = + let (e, ds') = ignoreConditions t in + ( (benchFillInDefaults $ e { benchmarkName = n }) : es, ds' ++ ds ) + +-- This is in fact rather a hack. The original version just overrode the +-- default values, however, when adding conditions we had to switch to a +-- modifier-based approach. There, nothing is ever overwritten, but only +-- joined together. +-- +-- This is the cleanest way i could think of, that doesn't require +-- changing all field parsing functions to return modifiers instead. +libFillInDefaults :: Library -> Library +libFillInDefaults lib@(Library { libBuildInfo = bi }) = + lib { libBuildInfo = biFillInDefaults bi } + +exeFillInDefaults :: Executable -> Executable +exeFillInDefaults exe@(Executable { buildInfo = bi }) = + exe { buildInfo = biFillInDefaults bi } + +testFillInDefaults :: TestSuite -> TestSuite +testFillInDefaults tst@(TestSuite { testBuildInfo = bi }) = + tst { testBuildInfo = biFillInDefaults bi } + +benchFillInDefaults :: Benchmark -> Benchmark +benchFillInDefaults bm@(Benchmark { benchmarkBuildInfo = bi }) = + bm { benchmarkBuildInfo = biFillInDefaults bi } + +biFillInDefaults :: BuildInfo -> BuildInfo +biFillInDefaults bi = + if null (hsSourceDirs bi) + then bi { hsSourceDirs = [currentDir] } + else bi + +-- Walk a 'GenericPackageDescription' and apply @onBuildInfo@/@onSetupBuildInfo@ +-- to all nested 'BuildInfo'/'SetupBuildInfo' values. +transformAllBuildInfos :: (BuildInfo -> BuildInfo) + -> (SetupBuildInfo -> SetupBuildInfo) + -> GenericPackageDescription + -> GenericPackageDescription +transformAllBuildInfos onBuildInfo onSetupBuildInfo gpd = gpd' + where + onLibrary lib = lib { libBuildInfo = onBuildInfo $ libBuildInfo lib } + onExecutable exe = exe { buildInfo = onBuildInfo $ buildInfo exe } + onTestSuite tst = tst { testBuildInfo = onBuildInfo $ testBuildInfo tst } + onBenchmark bmk = bmk { benchmarkBuildInfo = + onBuildInfo $ benchmarkBuildInfo bmk } + + pd = packageDescription gpd + pd' = pd { + library = fmap onLibrary (library pd), + executables = map onExecutable (executables pd), + testSuites = map onTestSuite (testSuites pd), + benchmarks = map onBenchmark (benchmarks pd), + setupBuildInfo = fmap onSetupBuildInfo (setupBuildInfo pd) + } + + gpd' = transformAllCondTrees onLibrary onExecutable + onTestSuite onBenchmark id + $ gpd { packageDescription = pd' } + +-- | Walk a 'GenericPackageDescription' and apply @f@ to all nested +-- @build-depends@ fields. +transformAllBuildDepends :: (Dependency -> Dependency) + -> GenericPackageDescription + -> GenericPackageDescription +transformAllBuildDepends f gpd = gpd' + where + onBI bi = bi { targetBuildDepends = map f $ targetBuildDepends bi } + onSBI stp = stp { setupDepends = map f $ setupDepends stp } + onPD pd = pd { buildDepends = map f $ buildDepends pd } + + pd' = onPD $ packageDescription gpd + gpd' = transformAllCondTrees id id id id (map f) + . transformAllBuildInfos onBI onSBI + $ gpd { packageDescription = pd' } + +-- | Walk all 'CondTree's inside a 'GenericPackageDescription' and apply +-- appropriate transformations to all nodes. Helper function used by +-- 'transformAllBuildDepends' and 'transformAllBuildInfos'. +transformAllCondTrees :: (Library -> Library) + -> (Executable -> Executable) + -> (TestSuite -> TestSuite) + -> (Benchmark -> Benchmark) + -> ([Dependency] -> [Dependency]) + -> GenericPackageDescription -> GenericPackageDescription +transformAllCondTrees onLibrary onExecutable + onTestSuite onBenchmark onDepends gpd = gpd' + where + gpd' = gpd { + condLibrary = condLib', + condExecutables = condExes', + condTestSuites = condTests', + condBenchmarks = condBenchs' + } + + condLib = condLibrary gpd + condExes = condExecutables gpd + condTests = condTestSuites gpd + condBenchs = condBenchmarks gpd + + condLib' = fmap (onCondTree onLibrary) condLib + condExes' = map (mapSnd $ onCondTree onExecutable) condExes + condTests' = map (mapSnd $ onCondTree onTestSuite) condTests + condBenchs' = map (mapSnd $ onCondTree onBenchmark) condBenchs + + mapSnd :: (a -> b) -> (c,a) -> (c,b) + mapSnd = fmap + + onCondTree :: (a -> b) -> CondTree v [Dependency] a + -> CondTree v [Dependency] b + onCondTree g = mapCondTree g onDepends id diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/PackageDescription/Parse.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/PackageDescription/Parse.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/PackageDescription/Parse.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/PackageDescription/Parse.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,1282 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveDataTypeable #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.PackageDescription.Parse +-- Copyright : Isaac Jones 2003-2005 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This defined parsers and partial pretty printers for the @.cabal@ format. +-- Some of the complexity in this module is due to the fact that we have to be +-- backwards compatible with old @.cabal@ files, so there's code to translate +-- into the newer structure. + +module Distribution.PackageDescription.Parse ( + -- * Package descriptions + readPackageDescription, + writePackageDescription, + parsePackageDescription, + showPackageDescription, + + -- ** Parsing + ParseResult(..), + FieldDescr(..), + LineNo, + + -- ** Supplementary build information + readHookedBuildInfo, + parseHookedBuildInfo, + writeHookedBuildInfo, + showHookedBuildInfo, + + pkgDescrFieldDescrs, + libFieldDescrs, + executableFieldDescrs, + binfoFieldDescrs, + sourceRepoFieldDescrs, + testSuiteFieldDescrs, + flagFieldDescrs + ) where + +import Distribution.ParseUtils hiding (parseFields) +import Distribution.PackageDescription +import Distribution.PackageDescription.Utils +import Distribution.Package +import Distribution.ModuleName +import Distribution.Version +import Distribution.Verbosity +import Distribution.Compiler +import Distribution.PackageDescription.Configuration +import Distribution.Simple.Utils +import Distribution.Text +import Distribution.Compat.ReadP hiding (get) + +import Data.Char (isSpace) +import Data.Foldable (traverse_) +import Data.Maybe (listToMaybe, isJust) +import Data.List (nub, unfoldr, partition, (\\)) +import Control.Monad (liftM, foldM, when, unless, ap) +#if __GLASGOW_HASKELL__ < 710 +import Data.Monoid (Monoid(..)) +import Control.Applicative (Applicative(..)) +#endif +import Control.Arrow (first) +import System.Directory (doesFileExist) +import qualified Data.ByteString.Lazy.Char8 as BS.Char8 + +import Text.PrettyPrint + + +-- ----------------------------------------------------------------------------- +-- The PackageDescription type + +pkgDescrFieldDescrs :: [FieldDescr PackageDescription] +pkgDescrFieldDescrs = + [ simpleField "name" + disp parse + packageName (\name pkg -> pkg{package=(package pkg){pkgName=name}}) + , simpleField "version" + disp parse + packageVersion (\ver pkg -> pkg{package=(package pkg){pkgVersion=ver}}) + , simpleField "cabal-version" + (either disp disp) (liftM Left parse +++ liftM Right parse) + specVersionRaw (\v pkg -> pkg{specVersionRaw=v}) + , simpleField "build-type" + (maybe empty disp) (fmap Just parse) + buildType (\t pkg -> pkg{buildType=t}) + , simpleField "license" + disp parseLicenseQ + license (\l pkg -> pkg{license=l}) + -- We have both 'license-file' and 'license-files' fields. + -- Rather than declaring license-file to be deprecated, we will continue + -- to allow both. The 'license-file' will continue to only allow single + -- tokens, while 'license-files' allows multiple. On pretty-printing, we + -- will use 'license-file' if there's just one, and use 'license-files' + -- otherwise. + , simpleField "license-file" + showFilePath parseFilePathQ + (\pkg -> case licenseFiles pkg of + [x] -> x + _ -> "") + (\l pkg -> pkg{licenseFiles=licenseFiles pkg ++ [l]}) + , listField "license-files" + showFilePath parseFilePathQ + (\pkg -> case licenseFiles pkg of + [_] -> [] + xs -> xs) + (\ls pkg -> pkg{licenseFiles=ls}) + , simpleField "copyright" + showFreeText parseFreeText + copyright (\val pkg -> pkg{copyright=val}) + , simpleField "maintainer" + showFreeText parseFreeText + maintainer (\val pkg -> pkg{maintainer=val}) + , simpleField "stability" + showFreeText parseFreeText + stability (\val pkg -> pkg{stability=val}) + , simpleField "homepage" + showFreeText parseFreeText + homepage (\val pkg -> pkg{homepage=val}) + , simpleField "package-url" + showFreeText parseFreeText + pkgUrl (\val pkg -> pkg{pkgUrl=val}) + , simpleField "bug-reports" + showFreeText parseFreeText + bugReports (\val pkg -> pkg{bugReports=val}) + , simpleField "synopsis" + showFreeText parseFreeText + synopsis (\val pkg -> pkg{synopsis=val}) + , simpleField "description" + showFreeText parseFreeText + description (\val pkg -> pkg{description=val}) + , simpleField "category" + showFreeText parseFreeText + category (\val pkg -> pkg{category=val}) + , simpleField "author" + showFreeText parseFreeText + author (\val pkg -> pkg{author=val}) + , listField "tested-with" + showTestedWith parseTestedWithQ + testedWith (\val pkg -> pkg{testedWith=val}) + , listFieldWithSep vcat "data-files" + showFilePath parseFilePathQ + dataFiles (\val pkg -> pkg{dataFiles=val}) + , simpleField "data-dir" + showFilePath parseFilePathQ + dataDir (\val pkg -> pkg{dataDir=val}) + , listFieldWithSep vcat "extra-source-files" + showFilePath parseFilePathQ + extraSrcFiles (\val pkg -> pkg{extraSrcFiles=val}) + , listFieldWithSep vcat "extra-tmp-files" + showFilePath parseFilePathQ + extraTmpFiles (\val pkg -> pkg{extraTmpFiles=val}) + , listFieldWithSep vcat "extra-doc-files" + showFilePath parseFilePathQ + extraDocFiles (\val pkg -> pkg{extraDocFiles=val}) + ] + +-- | Store any fields beginning with "x-" in the customFields field of +-- a PackageDescription. All other fields will generate a warning. +storeXFieldsPD :: UnrecFieldParser PackageDescription +storeXFieldsPD (f@('x':'-':_),val) pkg = + Just pkg{ customFieldsPD = + customFieldsPD pkg ++ [(f,val)]} +storeXFieldsPD _ _ = Nothing + +-- --------------------------------------------------------------------------- +-- The Library type + +libFieldDescrs :: [FieldDescr Library] +libFieldDescrs = + [ listFieldWithSep vcat "exposed-modules" disp parseModuleNameQ + exposedModules (\mods lib -> lib{exposedModules=mods}) + + , commaListFieldWithSep vcat "reexported-modules" disp parse + reexportedModules (\mods lib -> lib{reexportedModules=mods}) + + , listFieldWithSep vcat "required-signatures" disp parseModuleNameQ + requiredSignatures (\mods lib -> lib{requiredSignatures=mods}) + + , listFieldWithSep vcat "exposed-signatures" disp parseModuleNameQ + exposedSignatures (\mods lib -> lib{exposedSignatures=mods}) + + , boolField "exposed" + libExposed (\val lib -> lib{libExposed=val}) + ] ++ map biToLib binfoFieldDescrs + where biToLib = liftField libBuildInfo (\bi lib -> lib{libBuildInfo=bi}) + +storeXFieldsLib :: UnrecFieldParser Library +storeXFieldsLib (f@('x':'-':_), val) l@(Library { libBuildInfo = bi }) = + Just $ l {libBuildInfo = + bi{ customFieldsBI = customFieldsBI bi ++ [(f,val)]}} +storeXFieldsLib _ _ = Nothing + +-- --------------------------------------------------------------------------- +-- The Executable type + + +executableFieldDescrs :: [FieldDescr Executable] +executableFieldDescrs = + [ -- note ordering: configuration must come first, for + -- showPackageDescription. + simpleField "executable" + showToken parseTokenQ + exeName (\xs exe -> exe{exeName=xs}) + , simpleField "main-is" + showFilePath parseFilePathQ + modulePath (\xs exe -> exe{modulePath=xs}) + ] + ++ map biToExe binfoFieldDescrs + where biToExe = liftField buildInfo (\bi exe -> exe{buildInfo=bi}) + +storeXFieldsExe :: UnrecFieldParser Executable +storeXFieldsExe (f@('x':'-':_), val) e@(Executable { buildInfo = bi }) = + Just $ e {buildInfo = bi{ customFieldsBI = (f,val):customFieldsBI bi}} +storeXFieldsExe _ _ = Nothing + +-- --------------------------------------------------------------------------- +-- The TestSuite type + +-- | An intermediate type just used for parsing the test-suite stanza. +-- After validation it is converted into the proper 'TestSuite' type. +data TestSuiteStanza = TestSuiteStanza { + testStanzaTestType :: Maybe TestType, + testStanzaMainIs :: Maybe FilePath, + testStanzaTestModule :: Maybe ModuleName, + testStanzaBuildInfo :: BuildInfo + } + +emptyTestStanza :: TestSuiteStanza +emptyTestStanza = TestSuiteStanza Nothing Nothing Nothing mempty + +testSuiteFieldDescrs :: [FieldDescr TestSuiteStanza] +testSuiteFieldDescrs = + [ simpleField "type" + (maybe empty disp) (fmap Just parse) + testStanzaTestType (\x suite -> suite { testStanzaTestType = x }) + , simpleField "main-is" + (maybe empty showFilePath) (fmap Just parseFilePathQ) + testStanzaMainIs (\x suite -> suite { testStanzaMainIs = x }) + , simpleField "test-module" + (maybe empty disp) (fmap Just parseModuleNameQ) + testStanzaTestModule (\x suite -> suite { testStanzaTestModule = x }) + ] + ++ map biToTest binfoFieldDescrs + where + biToTest = liftField testStanzaBuildInfo + (\bi suite -> suite { testStanzaBuildInfo = bi }) + +storeXFieldsTest :: UnrecFieldParser TestSuiteStanza +storeXFieldsTest (f@('x':'-':_), val) t@(TestSuiteStanza { testStanzaBuildInfo = bi }) = + Just $ t {testStanzaBuildInfo = bi{ customFieldsBI = (f,val):customFieldsBI bi}} +storeXFieldsTest _ _ = Nothing + +validateTestSuite :: LineNo -> TestSuiteStanza -> ParseResult TestSuite +validateTestSuite line stanza = + case testStanzaTestType stanza of + Nothing -> return $ + emptyTestSuite { testBuildInfo = testStanzaBuildInfo stanza } + + Just tt@(TestTypeUnknown _ _) -> + return emptyTestSuite { + testInterface = TestSuiteUnsupported tt, + testBuildInfo = testStanzaBuildInfo stanza + } + + Just tt | tt `notElem` knownTestTypes -> + return emptyTestSuite { + testInterface = TestSuiteUnsupported tt, + testBuildInfo = testStanzaBuildInfo stanza + } + + Just tt@(TestTypeExe ver) -> + case testStanzaMainIs stanza of + Nothing -> syntaxError line (missingField "main-is" tt) + Just file -> do + when (isJust (testStanzaTestModule stanza)) $ + warning (extraField "test-module" tt) + return emptyTestSuite { + testInterface = TestSuiteExeV10 ver file, + testBuildInfo = testStanzaBuildInfo stanza + } + + Just tt@(TestTypeLib ver) -> + case testStanzaTestModule stanza of + Nothing -> syntaxError line (missingField "test-module" tt) + Just module_ -> do + when (isJust (testStanzaMainIs stanza)) $ + warning (extraField "main-is" tt) + return emptyTestSuite { + testInterface = TestSuiteLibV09 ver module_, + testBuildInfo = testStanzaBuildInfo stanza + } + + where + missingField name tt = "The '" ++ name ++ "' field is required for the " + ++ display tt ++ " test suite type." + + extraField name tt = "The '" ++ name ++ "' field is not used for the '" + ++ display tt ++ "' test suite type." + + +-- --------------------------------------------------------------------------- +-- The Benchmark type + +-- | An intermediate type just used for parsing the benchmark stanza. +-- After validation it is converted into the proper 'Benchmark' type. +data BenchmarkStanza = BenchmarkStanza { + benchmarkStanzaBenchmarkType :: Maybe BenchmarkType, + benchmarkStanzaMainIs :: Maybe FilePath, + benchmarkStanzaBenchmarkModule :: Maybe ModuleName, + benchmarkStanzaBuildInfo :: BuildInfo + } + +emptyBenchmarkStanza :: BenchmarkStanza +emptyBenchmarkStanza = BenchmarkStanza Nothing Nothing Nothing mempty + +benchmarkFieldDescrs :: [FieldDescr BenchmarkStanza] +benchmarkFieldDescrs = + [ simpleField "type" + (maybe empty disp) (fmap Just parse) + benchmarkStanzaBenchmarkType + (\x suite -> suite { benchmarkStanzaBenchmarkType = x }) + , simpleField "main-is" + (maybe empty showFilePath) (fmap Just parseFilePathQ) + benchmarkStanzaMainIs + (\x suite -> suite { benchmarkStanzaMainIs = x }) + ] + ++ map biToBenchmark binfoFieldDescrs + where + biToBenchmark = liftField benchmarkStanzaBuildInfo + (\bi suite -> suite { benchmarkStanzaBuildInfo = bi }) + +storeXFieldsBenchmark :: UnrecFieldParser BenchmarkStanza +storeXFieldsBenchmark (f@('x':'-':_), val) + t@(BenchmarkStanza { benchmarkStanzaBuildInfo = bi }) = + Just $ t {benchmarkStanzaBuildInfo = + bi{ customFieldsBI = (f,val):customFieldsBI bi}} +storeXFieldsBenchmark _ _ = Nothing + +validateBenchmark :: LineNo -> BenchmarkStanza -> ParseResult Benchmark +validateBenchmark line stanza = + case benchmarkStanzaBenchmarkType stanza of + Nothing -> return $ + emptyBenchmark { benchmarkBuildInfo = benchmarkStanzaBuildInfo stanza } + + Just tt@(BenchmarkTypeUnknown _ _) -> + return emptyBenchmark { + benchmarkInterface = BenchmarkUnsupported tt, + benchmarkBuildInfo = benchmarkStanzaBuildInfo stanza + } + + Just tt | tt `notElem` knownBenchmarkTypes -> + return emptyBenchmark { + benchmarkInterface = BenchmarkUnsupported tt, + benchmarkBuildInfo = benchmarkStanzaBuildInfo stanza + } + + Just tt@(BenchmarkTypeExe ver) -> + case benchmarkStanzaMainIs stanza of + Nothing -> syntaxError line (missingField "main-is" tt) + Just file -> do + when (isJust (benchmarkStanzaBenchmarkModule stanza)) $ + warning (extraField "benchmark-module" tt) + return emptyBenchmark { + benchmarkInterface = BenchmarkExeV10 ver file, + benchmarkBuildInfo = benchmarkStanzaBuildInfo stanza + } + + where + missingField name tt = "The '" ++ name ++ "' field is required for the " + ++ display tt ++ " benchmark type." + + extraField name tt = "The '" ++ name ++ "' field is not used for the '" + ++ display tt ++ "' benchmark type." + +-- --------------------------------------------------------------------------- +-- The BuildInfo type + + +binfoFieldDescrs :: [FieldDescr BuildInfo] +binfoFieldDescrs = + [ boolField "buildable" + buildable (\val binfo -> binfo{buildable=val}) + , commaListField "build-tools" + disp parseBuildTool + buildTools (\xs binfo -> binfo{buildTools=xs}) + , commaListFieldWithSep vcat "build-depends" + disp parse + targetBuildDepends (\xs binfo -> binfo{targetBuildDepends=xs}) + , spaceListField "cpp-options" + showToken parseTokenQ' + cppOptions (\val binfo -> binfo{cppOptions=val}) + , spaceListField "cc-options" + showToken parseTokenQ' + ccOptions (\val binfo -> binfo{ccOptions=val}) + , spaceListField "ld-options" + showToken parseTokenQ' + ldOptions (\val binfo -> binfo{ldOptions=val}) + , commaListField "pkgconfig-depends" + disp parsePkgconfigDependency + pkgconfigDepends (\xs binfo -> binfo{pkgconfigDepends=xs}) + , listField "frameworks" + showToken parseTokenQ + frameworks (\val binfo -> binfo{frameworks=val}) + , listField "extra-framework-dirs" + showToken parseFilePathQ + extraFrameworkDirs (\val binfo -> binfo{extraFrameworkDirs=val}) + , listFieldWithSep vcat "c-sources" + showFilePath parseFilePathQ + cSources (\paths binfo -> binfo{cSources=paths}) + , listFieldWithSep vcat "js-sources" + showFilePath parseFilePathQ + jsSources (\paths binfo -> binfo{jsSources=paths}) + , simpleField "default-language" + (maybe empty disp) (option Nothing (fmap Just parseLanguageQ)) + defaultLanguage (\lang binfo -> binfo{defaultLanguage=lang}) + , listField "other-languages" + disp parseLanguageQ + otherLanguages (\langs binfo -> binfo{otherLanguages=langs}) + , listField "default-extensions" + disp parseExtensionQ + defaultExtensions (\exts binfo -> binfo{defaultExtensions=exts}) + , listField "other-extensions" + disp parseExtensionQ + otherExtensions (\exts binfo -> binfo{otherExtensions=exts}) + , listField "extensions" + disp parseExtensionQ + oldExtensions (\exts binfo -> binfo{oldExtensions=exts}) + + , listFieldWithSep vcat "extra-libraries" + showToken parseTokenQ + extraLibs (\xs binfo -> binfo{extraLibs=xs}) + , listFieldWithSep vcat "extra-ghci-libraries" + showToken parseTokenQ + extraGHCiLibs (\xs binfo -> binfo{extraGHCiLibs=xs}) + , listField "extra-lib-dirs" + showFilePath parseFilePathQ + extraLibDirs (\xs binfo -> binfo{extraLibDirs=xs}) + , listFieldWithSep vcat "includes" + showFilePath parseFilePathQ + includes (\paths binfo -> binfo{includes=paths}) + , listFieldWithSep vcat "install-includes" + showFilePath parseFilePathQ + installIncludes (\paths binfo -> binfo{installIncludes=paths}) + , listField "include-dirs" + showFilePath parseFilePathQ + includeDirs (\paths binfo -> binfo{includeDirs=paths}) + , listField "hs-source-dirs" + showFilePath parseFilePathQ + hsSourceDirs (\paths binfo -> binfo{hsSourceDirs=paths}) + , listFieldWithSep vcat "other-modules" + disp parseModuleNameQ + otherModules (\val binfo -> binfo{otherModules=val}) + , optsField "ghc-prof-options" GHC + profOptions (\val binfo -> binfo{profOptions=val}) + , optsField "ghcjs-prof-options" GHCJS + profOptions (\val binfo -> binfo{profOptions=val}) + , optsField "ghc-shared-options" GHC + sharedOptions (\val binfo -> binfo{sharedOptions=val}) + , optsField "ghcjs-shared-options" GHCJS + sharedOptions (\val binfo -> binfo{sharedOptions=val}) + , optsField "ghc-options" GHC + options (\path binfo -> binfo{options=path}) + , optsField "ghcjs-options" GHCJS + options (\path binfo -> binfo{options=path}) + , optsField "jhc-options" JHC + options (\path binfo -> binfo{options=path}) + + -- NOTE: Hugs and NHC are not supported anymore, but these fields are kept + -- around for backwards compatibility. + , optsField "hugs-options" Hugs + options (const id) + , optsField "nhc98-options" NHC + options (const id) + ] + +storeXFieldsBI :: UnrecFieldParser BuildInfo +storeXFieldsBI (f@('x':'-':_),val) bi = Just bi{ customFieldsBI = (f,val):customFieldsBI bi } +storeXFieldsBI _ _ = Nothing + +------------------------------------------------------------------------------ + +flagFieldDescrs :: [FieldDescr Flag] +flagFieldDescrs = + [ simpleField "description" + showFreeText parseFreeText + flagDescription (\val fl -> fl{ flagDescription = val }) + , boolField "default" + flagDefault (\val fl -> fl{ flagDefault = val }) + , boolField "manual" + flagManual (\val fl -> fl{ flagManual = val }) + ] + +------------------------------------------------------------------------------ + +sourceRepoFieldDescrs :: [FieldDescr SourceRepo] +sourceRepoFieldDescrs = + [ simpleField "type" + (maybe empty disp) (fmap Just parse) + repoType (\val repo -> repo { repoType = val }) + , simpleField "location" + (maybe empty showFreeText) (fmap Just parseFreeText) + repoLocation (\val repo -> repo { repoLocation = val }) + , simpleField "module" + (maybe empty showToken) (fmap Just parseTokenQ) + repoModule (\val repo -> repo { repoModule = val }) + , simpleField "branch" + (maybe empty showToken) (fmap Just parseTokenQ) + repoBranch (\val repo -> repo { repoBranch = val }) + , simpleField "tag" + (maybe empty showToken) (fmap Just parseTokenQ) + repoTag (\val repo -> repo { repoTag = val }) + , simpleField "subdir" + (maybe empty showFilePath) (fmap Just parseFilePathQ) + repoSubdir (\val repo -> repo { repoSubdir = val }) + ] + +------------------------------------------------------------------------------ + +setupBInfoFieldDescrs :: [FieldDescr SetupBuildInfo] +setupBInfoFieldDescrs = + [ commaListFieldWithSep vcat "setup-depends" + disp parse + setupDepends (\xs binfo -> binfo{setupDepends=xs}) + ] + +-- --------------------------------------------------------------- +-- Parsing + +-- | Given a parser and a filename, return the parse of the file, +-- after checking if the file exists. +readAndParseFile :: (FilePath -> (String -> IO a) -> IO a) + -> (String -> ParseResult a) + -> Verbosity + -> FilePath -> IO a +readAndParseFile withFileContents' parser verbosity fpath = do + exists <- doesFileExist fpath + unless exists + (die $ "Error Parsing: file \"" ++ fpath ++ "\" doesn't exist. Cannot continue.") + withFileContents' fpath $ \str -> case parser str of + ParseFailed e -> do + let (line, message) = locatedErrorMsg e + dieWithLocation fpath line message + ParseOk warnings x -> do + mapM_ (warn verbosity . showPWarning fpath) $ reverse warnings + return x + +readHookedBuildInfo :: Verbosity -> FilePath -> IO HookedBuildInfo +readHookedBuildInfo = + readAndParseFile withFileContents parseHookedBuildInfo + +-- |Parse the given package file. +readPackageDescription :: Verbosity -> FilePath -> IO GenericPackageDescription +readPackageDescription = + readAndParseFile withUTF8FileContents parsePackageDescription + +stanzas :: [Field] -> [[Field]] +stanzas [] = [] +stanzas (f:fields) = (f:this) : stanzas rest + where + (this, rest) = break isStanzaHeader fields + +isStanzaHeader :: Field -> Bool +isStanzaHeader (F _ f _) = f == "executable" +isStanzaHeader _ = False + +------------------------------------------------------------------------------ + + +mapSimpleFields :: (Field -> ParseResult Field) -> [Field] + -> ParseResult [Field] +mapSimpleFields f = mapM walk + where + walk fld@F{} = f fld + walk (IfBlock l c fs1 fs2) = do + fs1' <- mapM walk fs1 + fs2' <- mapM walk fs2 + return (IfBlock l c fs1' fs2') + walk (Section ln n l fs1) = do + fs1' <- mapM walk fs1 + return (Section ln n l fs1') + +-- prop_isMapM fs = mapSimpleFields return fs == return fs + + +-- names of fields that represents dependencies +-- TODO: maybe build-tools should go here too? +constraintFieldNames :: [String] +constraintFieldNames = ["build-depends"] + +-- Possible refactoring would be to have modifiers be explicit about what +-- they add and define an accessor that specifies what the dependencies +-- are. This way we would completely reuse the parsing knowledge from the +-- field descriptor. +parseConstraint :: Field -> ParseResult [Dependency] +parseConstraint (F l n v) + | n `elem` constraintFieldNames = runP l n (parseCommaList parse) v +parseConstraint f = userBug $ "Constraint was expected (got: " ++ show f ++ ")" + +{- +headerFieldNames :: [String] +headerFieldNames = filter (\n -> not (n `elem` constraintFieldNames)) + . map fieldName $ pkgDescrFieldDescrs +-} + +libFieldNames :: [String] +libFieldNames = map fieldName libFieldDescrs + ++ buildInfoNames ++ constraintFieldNames + +-- exeFieldNames :: [String] +-- exeFieldNames = map fieldName executableFieldDescrs +-- ++ buildInfoNames + +buildInfoNames :: [String] +buildInfoNames = map fieldName binfoFieldDescrs + ++ map fst deprecatedFieldsBuildInfo + +-- A minimal implementation of the StateT monad transformer to avoid depending +-- on the 'mtl' package. +newtype StT s m a = StT { runStT :: s -> m (a,s) } + +instance Functor f => Functor (StT s f) where + fmap g (StT f) = StT $ fmap (first g) . f + +#if __GLASGOW_HASKELL__ >= 710 +instance (Monad m) => Applicative (StT s m) where +#else +instance (Monad m, Functor m) => Applicative (StT s m) where +#endif + pure a = StT (\s -> return (a,s)) + (<*>) = ap + + +instance Monad m => Monad (StT s m) where +#if __GLASGOW_HASKELL__ < 710 + return a = StT (\s -> return (a,s)) +#endif + StT f >>= g = StT $ \s -> do + (a,s') <- f s + runStT (g a) s' + +get :: Monad m => StT s m s +get = StT $ \s -> return (s, s) + +modify :: Monad m => (s -> s) -> StT s m () +modify f = StT $ \s -> return ((),f s) + +lift :: Monad m => m a -> StT s m a +lift m = StT $ \s -> m >>= \a -> return (a,s) + +evalStT :: Monad m => StT s m a -> s -> m a +evalStT st s = liftM fst $ runStT st s + +-- Our monad for parsing a list/tree of fields. +-- +-- The state represents the remaining fields to be processed. +type PM a = StT [Field] ParseResult a + + + +-- return look-ahead field or nothing if we're at the end of the file +peekField :: PM (Maybe Field) +peekField = liftM listToMaybe get + +-- Unconditionally discard the first field in our state. Will error when it +-- reaches end of file. (Yes, that's evil.) +skipField :: PM () +skipField = modify tail + +--FIXME: this should take a ByteString, not a String. We have to be able to +-- decode UTF8 and handle the BOM. + +-- | Parses the given file into a 'GenericPackageDescription'. +-- +-- In Cabal 1.2 the syntax for package descriptions was changed to a format +-- with sections and possibly indented property descriptions. +parsePackageDescription :: String -> ParseResult GenericPackageDescription +parsePackageDescription file = do + + -- This function is quite complex because it needs to be able to parse + -- both pre-Cabal-1.2 and post-Cabal-1.2 files. Additionally, it contains + -- a lot of parser-related noise since we do not want to depend on Parsec. + -- + -- If we detect an pre-1.2 file we implicitly convert it to post-1.2 + -- style. See 'sectionizeFields' below for details about the conversion. + + fields0 <- readFields file `catchParseError` \err -> + let tabs = findIndentTabs file in + case err of + -- In case of a TabsError report them all at once. + TabsError tabLineNo -> reportTabsError + -- but only report the ones including and following + -- the one that caused the actual error + [ t | t@(lineNo',_) <- tabs + , lineNo' >= tabLineNo ] + _ -> parseFail err + + let cabalVersionNeeded = + head $ [ minVersionBound versionRange + | Just versionRange <- [ simpleParse v + | F _ "cabal-version" v <- fields0 ] ] + ++ [Version [0] []] + minVersionBound versionRange = + case asVersionIntervals versionRange of + [] -> Version [0] [] + ((LowerBound version _, _):_) -> version + + handleFutureVersionParseFailure cabalVersionNeeded $ do + + let sf = sectionizeFields fields0 -- ensure 1.2 format + + -- figure out and warn about deprecated stuff (warnings are collected + -- inside our parsing monad) + fields <- mapSimpleFields deprecField sf + + -- Our parsing monad takes the not-yet-parsed fields as its state. + -- After each successful parse we remove the field from the state + -- ('skipField') and move on to the next one. + -- + -- Things are complicated a bit, because fields take a tree-like + -- structure -- they can be sections or "if"/"else" conditionals. + + flip evalStT fields $ do + + -- The header consists of all simple fields up to the first section + -- (flag, library, executable). + header_fields <- getHeader [] + + -- Parses just the header fields and stores them in a + -- 'PackageDescription'. Note that our final result is a + -- 'GenericPackageDescription'; for pragmatic reasons we just store + -- the partially filled-out 'PackageDescription' inside the + -- 'GenericPackageDescription'. + pkg <- lift $ parseFields pkgDescrFieldDescrs + storeXFieldsPD + emptyPackageDescription + header_fields + + -- 'getBody' assumes that the remaining fields only consist of + -- flags, lib and exe sections. + (repos, flags, mcsetup, mlib, exes, tests, bms) <- getBody + warnIfRest -- warn if getBody did not parse up to the last field. + -- warn about using old/new syntax with wrong cabal-version: + maybeWarnCabalVersion (not $ oldSyntax fields0) pkg + checkForUndefinedFlags flags mlib exes tests + return $ GenericPackageDescription + pkg { sourceRepos = repos, setupBuildInfo = mcsetup } + flags mlib exes tests bms + + where + oldSyntax = all isSimpleField + reportTabsError tabs = + syntaxError (fst (head tabs)) $ + "Do not use tabs for indentation (use spaces instead)\n" + ++ " Tabs were used at (line,column): " ++ show tabs + + maybeWarnCabalVersion newsyntax pkg + | newsyntax && specVersion pkg < Version [1,2] [] + = lift $ warning $ + "A package using section syntax must specify at least\n" + ++ "'cabal-version: >= 1.2'." + + maybeWarnCabalVersion newsyntax pkg + | not newsyntax && specVersion pkg >= Version [1,2] [] + = lift $ warning $ + "A package using 'cabal-version: " + ++ displaySpecVersion (specVersionRaw pkg) + ++ "' must use section syntax. See the Cabal user guide for details." + where + displaySpecVersion (Left version) = display version + displaySpecVersion (Right versionRange) = + case asVersionIntervals versionRange of + [] {- impossible -} -> display versionRange + ((LowerBound version _, _):_) -> display (orLaterVersion version) + + maybeWarnCabalVersion _ _ = return () + + + handleFutureVersionParseFailure cabalVersionNeeded parseBody = + (unless versionOk (warning message) >> parseBody) + `catchParseError` \parseError -> case parseError of + TabsError _ -> parseFail parseError + _ | versionOk -> parseFail parseError + | otherwise -> fail message + where versionOk = cabalVersionNeeded <= cabalVersion + message = "This package requires at least Cabal version " + ++ display cabalVersionNeeded + + -- "Sectionize" an old-style Cabal file. A sectionized file has: + -- + -- * all global fields at the beginning, followed by + -- + -- * all flag declarations, followed by + -- + -- * an optional library section, and an arbitrary number of executable + -- sections (in any order). + -- + -- The current implementation just gathers all library-specific fields + -- in a library section and wraps all executable stanzas in an executable + -- section. + sectionizeFields :: [Field] -> [Field] + sectionizeFields fs + | oldSyntax fs = + let + -- "build-depends" is a local field now. To be backwards + -- compatible, we still allow it as a global field in old-style + -- package description files and translate it to a local field by + -- adding it to every non-empty section + (hdr0, exes0) = break ((=="executable") . fName) fs + (hdr, libfs0) = partition (not . (`elem` libFieldNames) . fName) hdr0 + + (deps, libfs) = partition ((== "build-depends") . fName) + libfs0 + + exes = unfoldr toExe exes0 + toExe [] = Nothing + toExe (F l e n : r) + | e == "executable" = + let (efs, r') = break ((=="executable") . fName) r + in Just (Section l "executable" n (deps ++ efs), r') + toExe _ = cabalBug "unexpected input to 'toExe'" + in + hdr ++ + (if null libfs then [] + else [Section (lineNo (head libfs)) "library" "" (deps ++ libfs)]) + ++ exes + | otherwise = fs + + isSimpleField F{} = True + isSimpleField _ = False + + -- warn if there's something at the end of the file + warnIfRest :: PM () + warnIfRest = do + s <- get + case s of + [] -> return () + _ -> lift $ warning "Ignoring trailing declarations." -- add line no. + + -- all simple fields at the beginning of the file are (considered) header + -- fields + getHeader :: [Field] -> PM [Field] + getHeader acc = peekField >>= \mf -> case mf of + Just f@F{} -> skipField >> getHeader (f:acc) + _ -> return (reverse acc) + + -- + -- body ::= { repo | flag | library | executable | test }+ -- at most one lib + -- + -- The body consists of an optional sequence of declarations of flags and + -- an arbitrary number of executables and at most one library. + getBody :: PM ([SourceRepo], [Flag] + ,Maybe SetupBuildInfo + ,Maybe (CondTree ConfVar [Dependency] Library) + ,[(String, CondTree ConfVar [Dependency] Executable)] + ,[(String, CondTree ConfVar [Dependency] TestSuite)] + ,[(String, CondTree ConfVar [Dependency] Benchmark)]) + getBody = peekField >>= \mf -> case mf of + Just (Section line_no sec_type sec_label sec_fields) + | sec_type == "executable" -> do + when (null sec_label) $ lift $ syntaxError line_no + "'executable' needs one argument (the executable's name)" + exename <- lift $ runP line_no "executable" parseTokenQ sec_label + flds <- collectFields parseExeFields sec_fields + skipField + (repos, flags, csetup, lib, exes, tests, bms) <- getBody + return (repos, flags, csetup, lib, (exename, flds): exes, tests, bms) + + | sec_type == "test-suite" -> do + when (null sec_label) $ lift $ syntaxError line_no + "'test-suite' needs one argument (the test suite's name)" + testname <- lift $ runP line_no "test" parseTokenQ sec_label + flds <- collectFields (parseTestFields line_no) sec_fields + + -- Check that a valid test suite type has been chosen. A type + -- field may be given inside a conditional block, so we must + -- check for that before complaining that a type field has not + -- been given. The test suite must always have a valid type, so + -- we need to check both the 'then' and 'else' blocks, though + -- the blocks need not have the same type. + let checkTestType ts ct = + let ts' = mappend ts $ condTreeData ct + -- If a conditional has only a 'then' block and no + -- 'else' block, then it cannot have a valid type + -- in every branch, unless the type is specified at + -- a higher level in the tree. + checkComponent (_, _, Nothing) = False + -- If a conditional has a 'then' block and an 'else' + -- block, both must specify a test type, unless the + -- type is specified higher in the tree. + checkComponent (_, t, Just e) = + checkTestType ts' t && checkTestType ts' e + -- Does the current node specify a test type? + hasTestType = testInterface ts' + /= testInterface emptyTestSuite + -- If the current level of the tree specifies a type, + -- then we are done. If not, then one of the conditional + -- branches below the current node must specify a type. + -- Each node may have multiple immediate children; we + -- only one need one to specify a type because the + -- configure step uses 'mappend' to join together the + -- results of flag resolution. + in hasTestType || any checkComponent (condTreeComponents ct) + if checkTestType emptyTestSuite flds + then do + skipField + (repos, flags, csetup, lib, exes, tests, bms) <- getBody + return (repos, flags, csetup, lib, exes, + (testname, flds) : tests, bms) + else lift $ syntaxError line_no $ + "Test suite \"" ++ testname + ++ "\" is missing required field \"type\" or the field " + ++ "is not present in all conditional branches. The " + ++ "available test types are: " + ++ intercalate ", " (map display knownTestTypes) + + | sec_type == "benchmark" -> do + when (null sec_label) $ lift $ syntaxError line_no + "'benchmark' needs one argument (the benchmark's name)" + benchname <- lift $ runP line_no "benchmark" parseTokenQ sec_label + flds <- collectFields (parseBenchmarkFields line_no) sec_fields + + -- Check that a valid benchmark type has been chosen. A type + -- field may be given inside a conditional block, so we must + -- check for that before complaining that a type field has not + -- been given. The benchmark must always have a valid type, so + -- we need to check both the 'then' and 'else' blocks, though + -- the blocks need not have the same type. + let checkBenchmarkType ts ct = + let ts' = mappend ts $ condTreeData ct + -- If a conditional has only a 'then' block and no + -- 'else' block, then it cannot have a valid type + -- in every branch, unless the type is specified at + -- a higher level in the tree. + checkComponent (_, _, Nothing) = False + -- If a conditional has a 'then' block and an 'else' + -- block, both must specify a benchmark type, unless the + -- type is specified higher in the tree. + checkComponent (_, t, Just e) = + checkBenchmarkType ts' t && checkBenchmarkType ts' e + -- Does the current node specify a benchmark type? + hasBenchmarkType = benchmarkInterface ts' + /= benchmarkInterface emptyBenchmark + -- If the current level of the tree specifies a type, + -- then we are done. If not, then one of the conditional + -- branches below the current node must specify a type. + -- Each node may have multiple immediate children; we + -- only one need one to specify a type because the + -- configure step uses 'mappend' to join together the + -- results of flag resolution. + in hasBenchmarkType || any checkComponent (condTreeComponents ct) + if checkBenchmarkType emptyBenchmark flds + then do + skipField + (repos, flags, csetup, lib, exes, tests, bms) <- getBody + return (repos, flags, csetup, lib, exes, + tests, (benchname, flds) : bms) + else lift $ syntaxError line_no $ + "Benchmark \"" ++ benchname + ++ "\" is missing required field \"type\" or the field " + ++ "is not present in all conditional branches. The " + ++ "available benchmark types are: " + ++ intercalate ", " (map display knownBenchmarkTypes) + + | sec_type == "library" -> do + unless (null sec_label) $ lift $ + syntaxError line_no "'library' expects no argument" + flds <- collectFields parseLibFields sec_fields + skipField + (repos, flags, csetup, lib, exes, tests, bms) <- getBody + when (isJust lib) $ lift $ syntaxError line_no + "There can only be one library section in a package description." + return (repos, flags, csetup, Just flds, exes, tests, bms) + + | sec_type == "flag" -> do + when (null sec_label) $ lift $ + syntaxError line_no "'flag' needs one argument (the flag's name)" + flag <- lift $ parseFields + flagFieldDescrs + warnUnrec + (MkFlag (FlagName (lowercase sec_label)) "" True False) + sec_fields + skipField + (repos, flags, csetup, lib, exes, tests, bms) <- getBody + return (repos, flag:flags, csetup, lib, exes, tests, bms) + + | sec_type == "source-repository" -> do + when (null sec_label) $ lift $ syntaxError line_no $ + "'source-repository' needs one argument, " + ++ "the repo kind which is usually 'head' or 'this'" + kind <- case simpleParse sec_label of + Just kind -> return kind + Nothing -> lift $ syntaxError line_no $ + "could not parse repo kind: " ++ sec_label + repo <- lift $ parseFields + sourceRepoFieldDescrs + warnUnrec + SourceRepo { + repoKind = kind, + repoType = Nothing, + repoLocation = Nothing, + repoModule = Nothing, + repoBranch = Nothing, + repoTag = Nothing, + repoSubdir = Nothing + } + sec_fields + skipField + (repos, flags, csetup, lib, exes, tests, bms) <- getBody + return (repo:repos, flags, csetup, lib, exes, tests, bms) + + | sec_type == "custom-setup" -> do + unless (null sec_label) $ lift $ + syntaxError line_no "'setup' expects no argument" + flds <- lift $ parseFields + setupBInfoFieldDescrs + warnUnrec + mempty + sec_fields + skipField + (repos, flags, csetup0, lib, exes, tests, bms) <- getBody + when (isJust csetup0) $ lift $ syntaxError line_no + "There can only be one 'custom-setup' section in a package description." + return (repos, flags, Just flds, lib, exes, tests, bms) + + | otherwise -> do + lift $ warning $ "Ignoring unknown section type: " ++ sec_type + skipField + getBody + Just f@(F {}) -> do + _ <- lift $ syntaxError (lineNo f) $ + "Plain fields are not allowed in between stanzas: " ++ show f + skipField + getBody + Just f@(IfBlock {}) -> do + _ <- lift $ syntaxError (lineNo f) $ + "If-blocks are not allowed in between stanzas: " ++ show f + skipField + getBody + Nothing -> return ([], [], Nothing, Nothing, [], [], []) + + -- Extracts all fields in a block and returns a 'CondTree'. + -- + -- We have to recurse down into conditionals and we treat fields that + -- describe dependencies specially. + collectFields :: ([Field] -> PM a) -> [Field] + -> PM (CondTree ConfVar [Dependency] a) + collectFields parser allflds = do + + let simplFlds = [ F l n v | F l n v <- allflds ] + condFlds = [ f | f@IfBlock{} <- allflds ] + sections = [ s | s@Section{} <- allflds ] + + mapM_ + (\(Section l n _ _) -> lift . warning $ + "Unexpected section '" ++ n ++ "' on line " ++ show l) + sections + + a <- parser simplFlds + + -- Dependencies must be treated specially: when we + -- parse into a CondTree, not only do we parse them into + -- the targetBuildDepends/etc field inside the + -- PackageDescription, but we also have to put the + -- combined dependencies into CondTree. + -- + -- This information is, in principle, redundant, but + -- putting it here makes it easier for the constraint + -- solver to pick a flag assignment which supports + -- all of the dependencies (because it only has + -- to check the CondTree, rather than grovel everywhere + -- inside the conditional bits). + deps <- liftM concat + . mapM (lift . parseConstraint) + . filter isConstraint + $ simplFlds + + ifs <- mapM processIfs condFlds + + return (CondNode a deps ifs) + where + isConstraint (F _ n _) = n `elem` constraintFieldNames + isConstraint _ = False + + processIfs (IfBlock l c t e) = do + cnd <- lift $ runP l "if" parseCondition c + t' <- collectFields parser t + e' <- case e of + [] -> return Nothing + es -> do fs <- collectFields parser es + return (Just fs) + return (cnd, t', e') + processIfs _ = cabalBug "processIfs called with wrong field type" + + parseLibFields :: [Field] -> PM Library + parseLibFields = lift . parseFields libFieldDescrs storeXFieldsLib emptyLibrary + + -- Note: we don't parse the "executable" field here, hence the tail hack. + parseExeFields :: [Field] -> PM Executable + parseExeFields = lift . parseFields (tail executableFieldDescrs) + storeXFieldsExe emptyExecutable + + parseTestFields :: LineNo -> [Field] -> PM TestSuite + parseTestFields line fields = do + x <- lift $ parseFields testSuiteFieldDescrs storeXFieldsTest + emptyTestStanza fields + lift $ validateTestSuite line x + + parseBenchmarkFields :: LineNo -> [Field] -> PM Benchmark + parseBenchmarkFields line fields = do + x <- lift $ parseFields benchmarkFieldDescrs storeXFieldsBenchmark + emptyBenchmarkStanza fields + lift $ validateBenchmark line x + + checkForUndefinedFlags :: + [Flag] -> + Maybe (CondTree ConfVar [Dependency] Library) -> + [(String, CondTree ConfVar [Dependency] Executable)] -> + [(String, CondTree ConfVar [Dependency] TestSuite)] -> + PM () + checkForUndefinedFlags flags mlib exes tests = do + let definedFlags = map flagName flags + traverse_ (checkCondTreeFlags definedFlags) mlib + mapM_ (checkCondTreeFlags definedFlags . snd) exes + mapM_ (checkCondTreeFlags definedFlags . snd) tests + + checkCondTreeFlags :: [FlagName] -> CondTree ConfVar c a -> PM () + checkCondTreeFlags definedFlags ct = do + let fv = nub $ freeVars ct + unless (all (`elem` definedFlags) fv) $ + fail $ "These flags are used without having been defined: " + ++ intercalate ", " [ n | FlagName n <- fv \\ definedFlags ] + + +-- | Parse a list of fields, given a list of field descriptions, +-- a structure to accumulate the parsed fields, and a function +-- that can decide what to do with fields which don't match any +-- of the field descriptions. +parseFields :: [FieldDescr a] -- ^ descriptions of fields we know how to + -- parse + -> UnrecFieldParser a -- ^ possibly do something with + -- unrecognized fields + -> a -- ^ accumulator + -> [Field] -- ^ fields to be parsed + -> ParseResult a +parseFields descrs unrec ini fields = + do (a, unknowns) <- foldM (parseField descrs unrec) (ini, []) fields + unless (null unknowns) $ warning $ render $ + text "Unknown fields:" <+> + commaSep (map (\(l,u) -> u ++ " (line " ++ show l ++ ")") + (reverse unknowns)) + $+$ + text "Fields allowed in this section:" $$ + nest 4 (commaSep $ map fieldName descrs) + return a + where + commaSep = fsep . punctuate comma . map text + +parseField :: [FieldDescr a] -- ^ list of parseable fields + -> UnrecFieldParser a -- ^ possibly do something with + -- unrecognized fields + -> (a,[(Int,String)]) -- ^ accumulated result and warnings + -> Field -- ^ the field to be parsed + -> ParseResult (a, [(Int,String)]) +parseField (FieldDescr name _ parser : fields) unrec (a, us) (F line f val) + | name == f = parser line val a >>= \a' -> return (a',us) + | otherwise = parseField fields unrec (a,us) (F line f val) +parseField [] unrec (a,us) (F l f val) = return $ + case unrec (f,val) a of -- no fields matched, see if the 'unrec' + Just a' -> (a',us) -- function wants to do anything with it + Nothing -> (a, (l,f):us) +parseField _ _ _ _ = cabalBug "'parseField' called on a non-field" + +deprecatedFields :: [(String,String)] +deprecatedFields = + deprecatedFieldsPkgDescr ++ deprecatedFieldsBuildInfo + +deprecatedFieldsPkgDescr :: [(String,String)] +deprecatedFieldsPkgDescr = [ ("other-files", "extra-source-files") ] + +deprecatedFieldsBuildInfo :: [(String,String)] +deprecatedFieldsBuildInfo = [ ("hs-source-dir","hs-source-dirs") ] + +-- Handle deprecated fields +deprecField :: Field -> ParseResult Field +deprecField (F line fld val) = do + fld' <- case lookup fld deprecatedFields of + Nothing -> return fld + Just newName -> do + warning $ "The field \"" ++ fld + ++ "\" is deprecated, please use \"" ++ newName ++ "\"" + return newName + return (F line fld' val) +deprecField _ = cabalBug "'deprecField' called on a non-field" + + +parseHookedBuildInfo :: String -> ParseResult HookedBuildInfo +parseHookedBuildInfo inp = do + fields <- readFields inp + let ss@(mLibFields:exes) = stanzas fields + mLib <- parseLib mLibFields + biExes <- mapM parseExe (maybe ss (const exes) mLib) + return (mLib, biExes) + where + parseLib :: [Field] -> ParseResult (Maybe BuildInfo) + parseLib (bi@(F _ inFieldName _:_)) + | lowercase inFieldName /= "executable" = liftM Just (parseBI bi) + parseLib _ = return Nothing + + parseExe :: [Field] -> ParseResult (String, BuildInfo) + parseExe (F line inFieldName mName:bi) + | lowercase inFieldName == "executable" + = do bis <- parseBI bi + return (mName, bis) + | otherwise = syntaxError line "expecting 'executable' at top of stanza" + parseExe (_:_) = cabalBug "`parseExe' called on a non-field" + parseExe [] = syntaxError 0 "error in parsing buildinfo file. Expected executable stanza" + + parseBI st = parseFields binfoFieldDescrs storeXFieldsBI emptyBuildInfo st + +-- --------------------------------------------------------------------------- +-- Pretty printing + +writePackageDescription :: FilePath -> PackageDescription -> IO () +writePackageDescription fpath pkg = writeUTF8File fpath (showPackageDescription pkg) + +--TODO: make this use section syntax +-- add equivalent for GenericPackageDescription +showPackageDescription :: PackageDescription -> String +showPackageDescription pkg = render $ + ppPackage pkg + $$ ppCustomFields (customFieldsPD pkg) + $$ (case library pkg of + Nothing -> empty + Just lib -> ppLibrary lib) + $$ vcat [ space $$ ppExecutable exe | exe <- executables pkg ] + where + ppPackage = ppFields pkgDescrFieldDescrs + ppLibrary = ppFields libFieldDescrs + ppExecutable = ppFields executableFieldDescrs + +ppCustomFields :: [(String,String)] -> Doc +ppCustomFields flds = vcat (map ppCustomField flds) + +ppCustomField :: (String,String) -> Doc +ppCustomField (name,val) = text name <> colon <+> showFreeText val + +writeHookedBuildInfo :: FilePath -> HookedBuildInfo -> IO () +writeHookedBuildInfo fpath = writeFileAtomic fpath . BS.Char8.pack + . showHookedBuildInfo + +showHookedBuildInfo :: HookedBuildInfo -> String +showHookedBuildInfo (mb_lib_bi, ex_bis) = render $ + (case mb_lib_bi of + Nothing -> empty + Just bi -> ppBuildInfo bi) + $$ vcat [ space + $$ text "executable:" <+> text name + $$ ppBuildInfo bi + | (name, bi) <- ex_bis ] + where + ppBuildInfo bi = ppFields binfoFieldDescrs bi + $$ ppCustomFields (customFieldsBI bi) + +-- replace all tabs used as indentation with whitespace, also return where +-- tabs were found +findIndentTabs :: String -> [(Int,Int)] +findIndentTabs = concatMap checkLine + . zip [1..] + . lines + where + checkLine (lineno, l) = + let (indent, _content) = span isSpace l + tabCols = map fst . filter ((== '\t') . snd) . zip [0..] + addLineNo = map (\col -> (lineno,col)) + in addLineNo (tabCols indent) + +--test_findIndentTabs = findIndentTabs $ unlines $ +-- [ "foo", " bar", " \t baz", "\t biz\t", "\t\t \t mib" ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/PackageDescription/PrettyPrint.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/PackageDescription/PrettyPrint.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/PackageDescription/PrettyPrint.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/PackageDescription/PrettyPrint.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,252 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.PackageDescription.PrettyPrint +-- Copyright : Jürgen Nicklisch-Franken 2010 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Pretty printing for cabal files +-- +----------------------------------------------------------------------------- + +module Distribution.PackageDescription.PrettyPrint ( + writeGenericPackageDescription, + showGenericPackageDescription, +) where + +import Distribution.PackageDescription +import Distribution.Simple.Utils +import Distribution.ParseUtils +import Distribution.PackageDescription.Parse +import Distribution.Package +import Distribution.Text + +import Data.Monoid as Mon (Monoid(mempty)) +import Data.Maybe (isJust) +import Text.PrettyPrint + (hsep, parens, char, nest, empty, isEmpty, ($$), (<+>), + colon, (<>), text, vcat, ($+$), Doc, render) + +-- | Recompile with false for regression testing +simplifiedPrinting :: Bool +simplifiedPrinting = False + +-- | Writes a .cabal file from a generic package description +writeGenericPackageDescription :: FilePath -> GenericPackageDescription -> IO () +writeGenericPackageDescription fpath pkg = writeUTF8File fpath (showGenericPackageDescription pkg) + +-- | Writes a generic package description to a string +showGenericPackageDescription :: GenericPackageDescription -> String +showGenericPackageDescription = render . ppGenericPackageDescription + +ppGenericPackageDescription :: GenericPackageDescription -> Doc +ppGenericPackageDescription gpd = + ppPackageDescription (packageDescription gpd) + $+$ ppGenPackageFlags (genPackageFlags gpd) + $+$ ppLibrary (condLibrary gpd) + $+$ ppExecutables (condExecutables gpd) + $+$ ppTestSuites (condTestSuites gpd) + $+$ ppBenchmarks (condBenchmarks gpd) + +ppPackageDescription :: PackageDescription -> Doc +ppPackageDescription pd = ppFields pkgDescrFieldDescrs pd + $+$ ppCustomFields (customFieldsPD pd) + $+$ ppSourceRepos (sourceRepos pd) + +ppSourceRepos :: [SourceRepo] -> Doc +ppSourceRepos [] = empty +ppSourceRepos (hd:tl) = ppSourceRepo hd $+$ ppSourceRepos tl + +ppSourceRepo :: SourceRepo -> Doc +ppSourceRepo repo = + emptyLine $ text "source-repository" <+> disp (repoKind repo) $+$ + (nest indentWith (ppFields sourceRepoFieldDescrs' repo)) + where + sourceRepoFieldDescrs' = [fd | fd <- sourceRepoFieldDescrs, fieldName fd /= "kind"] + +-- TODO: this is a temporary hack. Ideally, fields containing default values +-- would be filtered out when the @FieldDescr a@ list is generated. +ppFieldsFiltered :: [(String, String)] -> [FieldDescr a] -> a -> Doc +ppFieldsFiltered removable fields x = ppFields (filter nondefault fields) x + where + nondefault (FieldDescr name getter _) = + maybe True (render (getter x) /=) (lookup name removable) + +binfoDefaults :: [(String, String)] +binfoDefaults = [("buildable", "True")] + +libDefaults :: [(String, String)] +libDefaults = ("exposed", "True") : binfoDefaults + +flagDefaults :: [(String, String)] +flagDefaults = [("default", "True"), ("manual", "False")] + +ppDiffFields :: [FieldDescr a] -> a -> a -> Doc +ppDiffFields fields x y = + vcat [ ppField name (getter x) + | FieldDescr name getter _ <- fields + , render (getter x) /= render (getter y) + ] + +ppCustomFields :: [(String,String)] -> Doc +ppCustomFields flds = vcat [ppCustomField f | f <- flds] + +ppCustomField :: (String,String) -> Doc +ppCustomField (name,val) = text name <> colon <+> showFreeText val + +ppGenPackageFlags :: [Flag] -> Doc +ppGenPackageFlags flds = vcat [ppFlag f | f <- flds] + +ppFlag :: Flag -> Doc +ppFlag flag@(MkFlag name _ _ _) = + emptyLine $ text "flag" <+> ppFlagName name $+$ nest indentWith fields + where + fields = ppFieldsFiltered flagDefaults flagFieldDescrs flag + +ppLibrary :: (Maybe (CondTree ConfVar [Dependency] Library)) -> Doc +ppLibrary Nothing = empty +ppLibrary (Just condTree) = + emptyLine $ text "library" $+$ nest indentWith (ppCondTree condTree Nothing ppLib) + where + ppLib lib Nothing = ppFieldsFiltered libDefaults libFieldDescrs lib + $$ ppCustomFields (customFieldsBI (libBuildInfo lib)) + ppLib lib (Just plib) = ppDiffFields libFieldDescrs lib plib + $$ ppCustomFields (customFieldsBI (libBuildInfo lib)) + +ppExecutables :: [(String, CondTree ConfVar [Dependency] Executable)] -> Doc +ppExecutables exes = + vcat [emptyLine $ text ("executable " ++ n) + $+$ nest indentWith (ppCondTree condTree Nothing ppExe)| (n,condTree) <- exes] + where + ppExe (Executable _ modulePath' buildInfo') Nothing = + (if modulePath' == "" then empty else text "main-is:" <+> text modulePath') + $+$ ppFieldsFiltered binfoDefaults binfoFieldDescrs buildInfo' + $+$ ppCustomFields (customFieldsBI buildInfo') + ppExe (Executable _ modulePath' buildInfo') + (Just (Executable _ modulePath2 buildInfo2)) = + (if modulePath' == "" || modulePath' == modulePath2 + then empty else text "main-is:" <+> text modulePath') + $+$ ppDiffFields binfoFieldDescrs buildInfo' buildInfo2 + $+$ ppCustomFields (customFieldsBI buildInfo') + +ppTestSuites :: [(String, CondTree ConfVar [Dependency] TestSuite)] -> Doc +ppTestSuites suites = + emptyLine $ vcat [ text ("test-suite " ++ n) + $+$ nest indentWith (ppCondTree condTree Nothing ppTestSuite) + | (n,condTree) <- suites] + where + ppTestSuite testsuite Nothing = + maybe empty (\t -> text "type:" <+> disp t) + maybeTestType + $+$ maybe empty (\f -> text "main-is:" <+> text f) + (testSuiteMainIs testsuite) + $+$ maybe empty (\m -> text "test-module:" <+> disp m) + (testSuiteModule testsuite) + $+$ ppFieldsFiltered binfoDefaults binfoFieldDescrs (testBuildInfo testsuite) + $+$ ppCustomFields (customFieldsBI (testBuildInfo testsuite)) + where + maybeTestType | testInterface testsuite == mempty = Nothing + | otherwise = Just (testType testsuite) + + ppTestSuite (TestSuite _ _ buildInfo' _) + (Just (TestSuite _ _ buildInfo2 _)) = + ppDiffFields binfoFieldDescrs buildInfo' buildInfo2 + $+$ ppCustomFields (customFieldsBI buildInfo') + + testSuiteMainIs test = case testInterface test of + TestSuiteExeV10 _ f -> Just f + _ -> Nothing + + testSuiteModule test = case testInterface test of + TestSuiteLibV09 _ m -> Just m + _ -> Nothing + +ppBenchmarks :: [(String, CondTree ConfVar [Dependency] Benchmark)] -> Doc +ppBenchmarks suites = + emptyLine $ vcat [ text ("benchmark " ++ n) + $+$ nest indentWith (ppCondTree condTree Nothing ppBenchmark) + | (n,condTree) <- suites] + where + ppBenchmark benchmark Nothing = + maybe empty (\t -> text "type:" <+> disp t) + maybeBenchmarkType + $+$ maybe empty (\f -> text "main-is:" <+> text f) + (benchmarkMainIs benchmark) + $+$ ppFieldsFiltered binfoDefaults binfoFieldDescrs (benchmarkBuildInfo benchmark) + $+$ ppCustomFields (customFieldsBI (benchmarkBuildInfo benchmark)) + where + maybeBenchmarkType | benchmarkInterface benchmark == mempty = Nothing + | otherwise = Just (benchmarkType benchmark) + + ppBenchmark (Benchmark _ _ buildInfo' _) + (Just (Benchmark _ _ buildInfo2 _)) = + ppDiffFields binfoFieldDescrs buildInfo' buildInfo2 + $+$ ppCustomFields (customFieldsBI buildInfo') + + benchmarkMainIs benchmark = case benchmarkInterface benchmark of + BenchmarkExeV10 _ f -> Just f + _ -> Nothing + +ppCondition :: Condition ConfVar -> Doc +ppCondition (Var x) = ppConfVar x +ppCondition (Lit b) = text (show b) +ppCondition (CNot c) = char '!' <> (ppCondition c) +ppCondition (COr c1 c2) = parens (hsep [ppCondition c1, text "||" + <+> ppCondition c2]) +ppCondition (CAnd c1 c2) = parens (hsep [ppCondition c1, text "&&" + <+> ppCondition c2]) +ppConfVar :: ConfVar -> Doc +ppConfVar (OS os) = text "os" <> parens (disp os) +ppConfVar (Arch arch) = text "arch" <> parens (disp arch) +ppConfVar (Flag name) = text "flag" <> parens (ppFlagName name) +ppConfVar (Impl c v) = text "impl" <> parens (disp c <+> disp v) + +ppFlagName :: FlagName -> Doc +ppFlagName (FlagName name) = text name + +ppCondTree :: CondTree ConfVar [Dependency] a -> Maybe a -> (a -> Maybe a -> Doc) -> Doc +ppCondTree ct@(CondNode it _ ifs) mbIt ppIt = + let res = (vcat $ map ppIf ifs) + $+$ ppIt it mbIt + in if isJust mbIt && isEmpty res + then ppCondTree ct Nothing ppIt + else res + where + -- TODO: this ends up printing trailing spaces when combined with nest. + ppIf (c, thenTree, Just elseTree) = ppIfElse it ppIt c thenTree elseTree + ppIf (c, thenTree, Nothing) = ppIf' it ppIt c thenTree + +ppIfCondition :: (Condition ConfVar) -> Doc +ppIfCondition c = (emptyLine $ text "if" <+> ppCondition c) + +ppIf' :: a -> (a -> Maybe a -> Doc) + -> Condition ConfVar + -> CondTree ConfVar [Dependency] a + -> Doc +ppIf' it ppIt c thenTree = + if isEmpty thenDoc + then Mon.mempty + else ppIfCondition c $$ nest indentWith thenDoc + where thenDoc = ppCondTree thenTree (if simplifiedPrinting then (Just it) else Nothing) ppIt + +ppIfElse :: a -> (a -> Maybe a -> Doc) + -> Condition ConfVar + -> CondTree ConfVar [Dependency] a + -> CondTree ConfVar [Dependency] a + -> Doc +ppIfElse it ppIt c thenTree elseTree = + case (isEmpty thenDoc, isEmpty elseDoc) of + (True, True) -> Mon.mempty + (False, True) -> ppIfCondition c $$ nest indentWith thenDoc + (True, False) -> ppIfCondition (cNot c) $$ nest indentWith elseDoc + (False, False) -> (ppIfCondition c $$ nest indentWith thenDoc) + $+$ (text "else" $$ nest indentWith elseDoc) + where thenDoc = ppCondTree thenTree (if simplifiedPrinting then (Just it) else Nothing) ppIt + elseDoc = ppCondTree elseTree (if simplifiedPrinting then (Just it) else Nothing) ppIt + +emptyLine :: Doc -> Doc +emptyLine d = text "" $+$ d + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/PackageDescription/Utils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/PackageDescription/Utils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/PackageDescription/Utils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/PackageDescription/Utils.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,23 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.PackageDescription.Utils +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Common utils used by modules under Distribution.PackageDescription.*. + +module Distribution.PackageDescription.Utils ( + cabalBug, userBug + ) where + +-- ---------------------------------------------------------------------------- +-- Exception and logging utils + +userBug :: String -> a +userBug msg = error $ msg ++ ". This is a bug in your .cabal file." + +cabalBug :: String -> a +cabalBug msg = error $ msg ++ ". This is possibly a bug in Cabal.\n" + ++ "Please report it to the developers: " + ++ "https://github.com/haskell/cabal/issues/new" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/PackageDescription.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/PackageDescription.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/PackageDescription.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/PackageDescription.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,1289 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.PackageDescription +-- Copyright : Isaac Jones 2003-2005 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This defines the data structure for the @.cabal@ file format. There are +-- several parts to this structure. It has top level info and then 'Library', +-- 'Executable', 'TestSuite', and 'Benchmark' sections each of which have +-- associated 'BuildInfo' data that's used to build the library, exe, test, or +-- benchmark. To further complicate things there is both a 'PackageDescription' +-- and a 'GenericPackageDescription'. This distinction relates to cabal +-- configurations. When we initially read a @.cabal@ file we get a +-- 'GenericPackageDescription' which has all the conditional sections. +-- Before actually building a package we have to decide +-- on each conditional. Once we've done that we get a 'PackageDescription'. +-- It was done this way initially to avoid breaking too much stuff when the +-- feature was introduced. It could probably do with being rationalised at some +-- point to make it simpler. + +module Distribution.PackageDescription ( + -- * Package descriptions + PackageDescription(..), + emptyPackageDescription, + specVersion, + descCabalVersion, + BuildType(..), + knownBuildTypes, + + -- ** Renaming + ModuleRenaming(..), + defaultRenaming, + lookupRenaming, + + -- ** Libraries + Library(..), + ModuleReexport(..), + emptyLibrary, + withLib, + hasLibs, + libModules, + + -- ** Executables + Executable(..), + emptyExecutable, + withExe, + hasExes, + exeModules, + + -- * Tests + TestSuite(..), + TestSuiteInterface(..), + TestType(..), + testType, + knownTestTypes, + emptyTestSuite, + hasTests, + withTest, + testModules, + enabledTests, + + -- * Benchmarks + Benchmark(..), + BenchmarkInterface(..), + BenchmarkType(..), + benchmarkType, + knownBenchmarkTypes, + emptyBenchmark, + hasBenchmarks, + withBenchmark, + benchmarkModules, + enabledBenchmarks, + + -- * Build information + BuildInfo(..), + emptyBuildInfo, + allBuildInfo, + allLanguages, + allExtensions, + usedExtensions, + hcOptions, + hcProfOptions, + hcSharedOptions, + + -- ** Supplementary build information + HookedBuildInfo, + emptyHookedBuildInfo, + updatePackageDescription, + + -- * package configuration + GenericPackageDescription(..), + Flag(..), FlagName(..), FlagAssignment, + CondTree(..), ConfVar(..), Condition(..), + cNot, cAnd, cOr, + + -- * Source repositories + SourceRepo(..), + RepoKind(..), + RepoType(..), + knownRepoTypes, + + -- * Custom setup build information + SetupBuildInfo(..), + ) where + +import Distribution.Compat.Binary +import qualified Distribution.Compat.Semigroup as Semi ((<>)) +import Distribution.Compat.Semigroup as Semi (Monoid(..), Semigroup, gmempty) +import qualified Distribution.Compat.ReadP as Parse +import Distribution.Compat.ReadP ((<++)) +import Distribution.Package +import Distribution.ModuleName +import Distribution.Version +import Distribution.License +import Distribution.Compiler +import Distribution.System +import Distribution.Text +import Language.Haskell.Extension + +import Data.Data (Data) +import Data.Foldable (traverse_) +import Data.List (nub, intercalate) +import Data.Maybe (fromMaybe, maybeToList) +import Data.Foldable as Fold (Foldable(foldMap)) +import Data.Traversable as Trav (Traversable(traverse)) +import Data.Typeable ( Typeable ) +import Control.Applicative as AP (Alternative(..), Applicative(..)) +import Control.Monad (MonadPlus(mplus,mzero), ap) +import GHC.Generics (Generic) +import Text.PrettyPrint as Disp +import qualified Data.Char as Char (isAlphaNum, isDigit, toLower) +import qualified Data.Map as Map +import Data.Map (Map) + +-- ----------------------------------------------------------------------------- +-- The PackageDescription type + +-- | This data type is the internal representation of the file @pkg.cabal@. +-- It contains two kinds of information about the package: information +-- which is needed for all packages, such as the package name and version, and +-- information which is needed for the simple build system only, such as +-- the compiler options and library name. +-- +data PackageDescription + = PackageDescription { + -- the following are required by all packages: + package :: PackageIdentifier, + license :: License, + licenseFiles :: [FilePath], + copyright :: String, + maintainer :: String, + author :: String, + stability :: String, + testedWith :: [(CompilerFlavor,VersionRange)], + homepage :: String, + pkgUrl :: String, + bugReports :: String, + sourceRepos :: [SourceRepo], + synopsis :: String, -- ^A one-line summary of this package + description :: String, -- ^A more verbose description of this package + category :: String, + customFieldsPD :: [(String,String)], -- ^Custom fields starting + -- with x-, stored in a + -- simple assoc-list. + + -- | YOU PROBABLY DON'T WANT TO USE THIS FIELD. This field is + -- special! Depending on how far along processing the + -- PackageDescription we are, the contents of this field are + -- either nonsense, or the collected dependencies of *all* the + -- components in this package. buildDepends is initialized by + -- 'finalizePackageDescription' and 'flattenPackageDescription'; + -- prior to that, dependency info is stored in the 'CondTree' + -- built around a 'GenericPackageDescription'. When this + -- resolution is done, dependency info is written to the inner + -- 'BuildInfo' and this field. This is all horrible, and #2066 + -- tracks progress to get rid of this field. + buildDepends :: [Dependency], + -- | The version of the Cabal spec that this package description uses. + -- For historical reasons this is specified with a version range but + -- only ranges of the form @>= v@ make sense. We are in the process of + -- transitioning to specifying just a single version, not a range. + specVersionRaw :: Either Version VersionRange, + buildType :: Maybe BuildType, + setupBuildInfo :: Maybe SetupBuildInfo, + -- components + library :: Maybe Library, + executables :: [Executable], + testSuites :: [TestSuite], + benchmarks :: [Benchmark], + dataFiles :: [FilePath], + dataDir :: FilePath, + extraSrcFiles :: [FilePath], + extraTmpFiles :: [FilePath], + extraDocFiles :: [FilePath] + } + deriving (Generic, Show, Read, Eq, Typeable, Data) + +instance Binary PackageDescription + +instance Package PackageDescription where + packageId = package + +-- | The version of the Cabal spec that this package should be interpreted +-- against. +-- +-- Historically we used a version range but we are switching to using a single +-- version. Currently we accept either. This function converts into a single +-- version by ignoring upper bounds in the version range. +-- +specVersion :: PackageDescription -> Version +specVersion pkg = case specVersionRaw pkg of + Left version -> version + Right versionRange -> case asVersionIntervals versionRange of + [] -> Version [0] [] + ((LowerBound version _, _):_) -> version + +-- | The range of versions of the Cabal tools that this package is intended to +-- work with. +-- +-- This function is deprecated and should not be used for new purposes, only to +-- support old packages that rely on the old interpretation. +-- +descCabalVersion :: PackageDescription -> VersionRange +descCabalVersion pkg = case specVersionRaw pkg of + Left version -> orLaterVersion version + Right versionRange -> versionRange +{-# DEPRECATED descCabalVersion "Use specVersion instead" #-} + +emptyPackageDescription :: PackageDescription +emptyPackageDescription + = PackageDescription { + package = PackageIdentifier (PackageName "") + (Version [] []), + license = UnspecifiedLicense, + licenseFiles = [], + specVersionRaw = Right anyVersion, + buildType = Nothing, + copyright = "", + maintainer = "", + author = "", + stability = "", + testedWith = [], + buildDepends = [], + homepage = "", + pkgUrl = "", + bugReports = "", + sourceRepos = [], + synopsis = "", + description = "", + category = "", + customFieldsPD = [], + setupBuildInfo = Nothing, + library = Nothing, + executables = [], + testSuites = [], + benchmarks = [], + dataFiles = [], + dataDir = "", + extraSrcFiles = [], + extraTmpFiles = [], + extraDocFiles = [] + } + +-- | The type of build system used by this package. +data BuildType + = Simple -- ^ calls @Distribution.Simple.defaultMain@ + | Configure -- ^ calls @Distribution.Simple.defaultMainWithHooks defaultUserHooks@, + -- which invokes @configure@ to generate additional build + -- information used by later phases. + | Make -- ^ calls @Distribution.Make.defaultMain@ + | Custom -- ^ uses user-supplied @Setup.hs@ or @Setup.lhs@ (default) + | UnknownBuildType String + -- ^ a package that uses an unknown build type cannot actually + -- be built. Doing it this way rather than just giving a + -- parse error means we get better error messages and allows + -- you to inspect the rest of the package description. + deriving (Generic, Show, Read, Eq, Typeable, Data) + +instance Binary BuildType + +knownBuildTypes :: [BuildType] +knownBuildTypes = [Simple, Configure, Make, Custom] + +instance Text BuildType where + disp (UnknownBuildType other) = Disp.text other + disp other = Disp.text (show other) + + parse = do + name <- Parse.munch1 Char.isAlphaNum + return $ case name of + "Simple" -> Simple + "Configure" -> Configure + "Custom" -> Custom + "Make" -> Make + _ -> UnknownBuildType name + +-- --------------------------------------------------------------------------- +-- The SetupBuildInfo type + +-- One can see this as a very cut-down version of BuildInfo below. +-- To keep things simple for tools that compile Setup.hs we limit the +-- options authors can specify to just Haskell package dependencies. + +data SetupBuildInfo = SetupBuildInfo { + setupDepends :: [Dependency], + defaultSetupDepends :: Bool + -- ^ Is this a default 'custom-setup' section added by the cabal-install + -- code (as opposed to user-provided)? This field is only used + -- internally, and doesn't correspond to anything in the .cabal + -- file. See #3199. + } + deriving (Generic, Show, Eq, Read, Typeable, Data) + +instance Binary SetupBuildInfo + +instance Semi.Monoid SetupBuildInfo where + mempty = SetupBuildInfo [] False + mappend = (Semi.<>) + +instance Semigroup SetupBuildInfo where + a <> b = SetupBuildInfo (setupDepends a Semi.<> setupDepends b) + (defaultSetupDepends a || defaultSetupDepends b) + +-- --------------------------------------------------------------------------- +-- Module renaming + +-- | Renaming applied to the modules provided by a package. +-- The boolean indicates whether or not to also include all of the +-- original names of modules. Thus, @ModuleRenaming False []@ is +-- "don't expose any modules, and @ModuleRenaming True [("Data.Bool", "Bool")]@ +-- is, "expose all modules, but also expose @Data.Bool@ as @Bool@". +-- +data ModuleRenaming = ModuleRenaming Bool [(ModuleName, ModuleName)] + deriving (Show, Read, Eq, Ord, Typeable, Data, Generic) + +defaultRenaming :: ModuleRenaming +defaultRenaming = ModuleRenaming True [] + +lookupRenaming :: Package pkg => pkg -> Map PackageName ModuleRenaming -> ModuleRenaming +lookupRenaming = Map.findWithDefault defaultRenaming . packageName + +instance Binary ModuleRenaming where + +instance Monoid ModuleRenaming where + mempty = ModuleRenaming False [] + mappend = (Semi.<>) + +instance Semigroup ModuleRenaming where + ModuleRenaming b rns <> ModuleRenaming b' rns' + = ModuleRenaming (b || b') (rns ++ rns') -- ToDo: dedupe? + +-- NB: parentheses are mandatory, because later we may extend this syntax +-- to allow "hiding (A, B)" or other modifier words. +instance Text ModuleRenaming where + disp (ModuleRenaming True []) = Disp.empty + disp (ModuleRenaming b vs) = (if b then text "with" else Disp.empty) <+> dispRns + where dispRns = Disp.parens + (Disp.hsep + (Disp.punctuate Disp.comma (map dispEntry vs))) + dispEntry (orig, new) + | orig == new = disp orig + | otherwise = disp orig <+> text "as" <+> disp new + + parse = do Parse.string "with" >> Parse.skipSpaces + fmap (ModuleRenaming True) parseRns + <++ fmap (ModuleRenaming False) parseRns + <++ return (ModuleRenaming True []) + where parseRns = do + rns <- Parse.between (Parse.char '(') (Parse.char ')') parseList + Parse.skipSpaces + return rns + parseList = + Parse.sepBy parseEntry (Parse.char ',' >> Parse.skipSpaces) + parseEntry :: Parse.ReadP r (ModuleName, ModuleName) + parseEntry = do + orig <- parse + Parse.skipSpaces + (do _ <- Parse.string "as" + Parse.skipSpaces + new <- parse + Parse.skipSpaces + return (orig, new) + <++ + return (orig, orig)) + +-- --------------------------------------------------------------------------- +-- The Library type + +data Library = Library { + exposedModules :: [ModuleName], + reexportedModules :: [ModuleReexport], + requiredSignatures:: [ModuleName], -- ^ What sigs need implementations? + exposedSignatures:: [ModuleName], -- ^ What sigs are visible to users? + libExposed :: Bool, -- ^ Is the lib to be exposed by default? + libBuildInfo :: BuildInfo + } + deriving (Generic, Show, Eq, Read, Typeable, Data) + +instance Binary Library + +instance Monoid Library where + mempty = Library { + exposedModules = mempty, + reexportedModules = mempty, + requiredSignatures = mempty, + exposedSignatures = mempty, + libExposed = True, + libBuildInfo = mempty + } + mappend = (Semi.<>) + +instance Semigroup Library where + a <> b = Library { + exposedModules = combine exposedModules, + reexportedModules = combine reexportedModules, + requiredSignatures = combine requiredSignatures, + exposedSignatures = combine exposedSignatures, + libExposed = libExposed a && libExposed b, -- so False propagates + libBuildInfo = combine libBuildInfo + } + where combine field = field a `mappend` field b + +emptyLibrary :: Library +emptyLibrary = mempty + +-- |does this package have any libraries? +hasLibs :: PackageDescription -> Bool +hasLibs p = maybe False (buildable . libBuildInfo) (library p) + +-- |'Maybe' version of 'hasLibs' +maybeHasLibs :: PackageDescription -> Maybe Library +maybeHasLibs p = + library p >>= \lib -> if buildable (libBuildInfo lib) + then Just lib + else Nothing + +-- |If the package description has a library section, call the given +-- function with the library build info as argument. +withLib :: PackageDescription -> (Library -> IO ()) -> IO () +withLib pkg_descr f = + traverse_ f (maybeHasLibs pkg_descr) + +-- | Get all the module names from the library (exposed and internal modules) +-- which need to be compiled. (This does not include reexports, which +-- do not need to be compiled.) +libModules :: Library -> [ModuleName] +libModules lib = exposedModules lib + ++ otherModules (libBuildInfo lib) + ++ exposedSignatures lib + ++ requiredSignatures lib + +-- ----------------------------------------------------------------------------- +-- Module re-exports + +data ModuleReexport = ModuleReexport { + moduleReexportOriginalPackage :: Maybe PackageName, + moduleReexportOriginalName :: ModuleName, + moduleReexportName :: ModuleName + } + deriving (Eq, Generic, Read, Show, Typeable, Data) + +instance Binary ModuleReexport + +instance Text ModuleReexport where + disp (ModuleReexport mpkgname origname newname) = + maybe Disp.empty (\pkgname -> disp pkgname <> Disp.char ':') mpkgname + <> disp origname + <+> if newname == origname + then Disp.empty + else Disp.text "as" <+> disp newname + + parse = do + mpkgname <- Parse.option Nothing $ do + pkgname <- parse + _ <- Parse.char ':' + return (Just pkgname) + origname <- parse + newname <- Parse.option origname $ do + Parse.skipSpaces + _ <- Parse.string "as" + Parse.skipSpaces + parse + return (ModuleReexport mpkgname origname newname) + +-- --------------------------------------------------------------------------- +-- The Executable type + +data Executable = Executable { + exeName :: String, + modulePath :: FilePath, + buildInfo :: BuildInfo + } + deriving (Generic, Show, Read, Eq, Typeable, Data) + +instance Binary Executable + +instance Monoid Executable where + mempty = gmempty + mappend = (Semi.<>) + +instance Semigroup Executable where + a <> b = Executable{ + exeName = combine' exeName, + modulePath = combine modulePath, + buildInfo = combine buildInfo + } + where combine field = field a `mappend` field b + combine' field = case (field a, field b) of + ("","") -> "" + ("", x) -> x + (x, "") -> x + (x, y) -> error $ "Ambiguous values for executable field: '" + ++ x ++ "' and '" ++ y ++ "'" + +emptyExecutable :: Executable +emptyExecutable = mempty + +-- |does this package have any executables? +hasExes :: PackageDescription -> Bool +hasExes p = any (buildable . buildInfo) (executables p) + +-- | Perform the action on each buildable 'Executable' in the package +-- description. +withExe :: PackageDescription -> (Executable -> IO ()) -> IO () +withExe pkg_descr f = + sequence_ [f exe | exe <- executables pkg_descr, buildable (buildInfo exe)] + +-- | Get all the module names from an exe +exeModules :: Executable -> [ModuleName] +exeModules exe = otherModules (buildInfo exe) + +-- --------------------------------------------------------------------------- +-- The TestSuite type + +-- | A \"test-suite\" stanza in a cabal file. +-- +data TestSuite = TestSuite { + testName :: String, + testInterface :: TestSuiteInterface, + testBuildInfo :: BuildInfo, + testEnabled :: Bool + -- TODO: By having a 'testEnabled' field in the PackageDescription, we + -- are mixing build status information (i.e., arguments to 'configure') + -- with static package description information. This is undesirable, but + -- a better solution is waiting on the next overhaul to the + -- GenericPackageDescription -> PackageDescription resolution process. + } + deriving (Generic, Show, Read, Eq, Typeable, Data) + +instance Binary TestSuite + +-- | The test suite interfaces that are currently defined. Each test suite must +-- specify which interface it supports. +-- +-- More interfaces may be defined in future, either new revisions or totally +-- new interfaces. +-- +data TestSuiteInterface = + + -- | Test interface \"exitcode-stdio-1.0\". The test-suite takes the form + -- of an executable. It returns a zero exit code for success, non-zero for + -- failure. The stdout and stderr channels may be logged. It takes no + -- command line parameters and nothing on stdin. + -- + TestSuiteExeV10 Version FilePath + + -- | Test interface \"detailed-0.9\". The test-suite takes the form of a + -- library containing a designated module that exports \"tests :: [Test]\". + -- + | TestSuiteLibV09 Version ModuleName + + -- | A test suite that does not conform to one of the above interfaces for + -- the given reason (e.g. unknown test type). + -- + | TestSuiteUnsupported TestType + deriving (Eq, Generic, Read, Show, Typeable, Data) + +instance Binary TestSuiteInterface + +instance Monoid TestSuite where + mempty = TestSuite { + testName = mempty, + testInterface = mempty, + testBuildInfo = mempty, + testEnabled = False + } + mappend = (Semi.<>) + +instance Semigroup TestSuite where + a <> b = TestSuite { + testName = combine' testName, + testInterface = combine testInterface, + testBuildInfo = combine testBuildInfo, + testEnabled = testEnabled a || testEnabled b + } + where combine field = field a `mappend` field b + combine' f = case (f a, f b) of + ("", x) -> x + (x, "") -> x + (x, y) -> error "Ambiguous values for test field: '" + ++ x ++ "' and '" ++ y ++ "'" + +instance Monoid TestSuiteInterface where + mempty = TestSuiteUnsupported (TestTypeUnknown mempty (Version [] [])) + mappend = (Semi.<>) + +instance Semigroup TestSuiteInterface where + a <> (TestSuiteUnsupported _) = a + _ <> b = b + +emptyTestSuite :: TestSuite +emptyTestSuite = mempty + +-- | Does this package have any test suites? +hasTests :: PackageDescription -> Bool +hasTests = any (buildable . testBuildInfo) . testSuites + +-- | Get all the enabled test suites from a package. +enabledTests :: PackageDescription -> [TestSuite] +enabledTests = filter testEnabled . testSuites + +-- | Perform an action on each buildable 'TestSuite' in a package. +withTest :: PackageDescription -> (TestSuite -> IO ()) -> IO () +withTest pkg_descr f = + mapM_ f $ filter (buildable . testBuildInfo) $ enabledTests pkg_descr + +-- | Get all the module names from a test suite. +testModules :: TestSuite -> [ModuleName] +testModules test = (case testInterface test of + TestSuiteLibV09 _ m -> [m] + _ -> []) + ++ otherModules (testBuildInfo test) + +-- | The \"test-type\" field in the test suite stanza. +-- +data TestType = TestTypeExe Version -- ^ \"type: exitcode-stdio-x.y\" + | TestTypeLib Version -- ^ \"type: detailed-x.y\" + | TestTypeUnknown String Version -- ^ Some unknown test type e.g. \"type: foo\" + deriving (Generic, Show, Read, Eq, Typeable, Data) + +instance Binary TestType + +knownTestTypes :: [TestType] +knownTestTypes = [ TestTypeExe (Version [1,0] []) + , TestTypeLib (Version [0,9] []) ] + +stdParse :: Text ver => (ver -> String -> res) -> Parse.ReadP r res +stdParse f = do + cs <- Parse.sepBy1 component (Parse.char '-') + _ <- Parse.char '-' + ver <- parse + let name = intercalate "-" cs + return $! f ver (lowercase name) + where + component = do + cs <- Parse.munch1 Char.isAlphaNum + if all Char.isDigit cs then Parse.pfail else return cs + -- each component must contain an alphabetic character, to avoid + -- ambiguity in identifiers like foo-1 (the 1 is the version number). + +instance Text TestType where + disp (TestTypeExe ver) = text "exitcode-stdio-" <> disp ver + disp (TestTypeLib ver) = text "detailed-" <> disp ver + disp (TestTypeUnknown name ver) = text name <> char '-' <> disp ver + + parse = stdParse $ \ver name -> case name of + "exitcode-stdio" -> TestTypeExe ver + "detailed" -> TestTypeLib ver + _ -> TestTypeUnknown name ver + + +testType :: TestSuite -> TestType +testType test = case testInterface test of + TestSuiteExeV10 ver _ -> TestTypeExe ver + TestSuiteLibV09 ver _ -> TestTypeLib ver + TestSuiteUnsupported testtype -> testtype + +-- --------------------------------------------------------------------------- +-- The Benchmark type + +-- | A \"benchmark\" stanza in a cabal file. +-- +data Benchmark = Benchmark { + benchmarkName :: String, + benchmarkInterface :: BenchmarkInterface, + benchmarkBuildInfo :: BuildInfo, + benchmarkEnabled :: Bool + -- TODO: See TODO for 'testEnabled'. + } + deriving (Generic, Show, Read, Eq, Typeable, Data) + +instance Binary Benchmark + +-- | The benchmark interfaces that are currently defined. Each +-- benchmark must specify which interface it supports. +-- +-- More interfaces may be defined in future, either new revisions or +-- totally new interfaces. +-- +data BenchmarkInterface = + + -- | Benchmark interface \"exitcode-stdio-1.0\". The benchmark + -- takes the form of an executable. It returns a zero exit code + -- for success, non-zero for failure. The stdout and stderr + -- channels may be logged. It takes no command line parameters + -- and nothing on stdin. + -- + BenchmarkExeV10 Version FilePath + + -- | A benchmark that does not conform to one of the above + -- interfaces for the given reason (e.g. unknown benchmark type). + -- + | BenchmarkUnsupported BenchmarkType + deriving (Eq, Generic, Read, Show, Typeable, Data) + +instance Binary BenchmarkInterface + +instance Monoid Benchmark where + mempty = Benchmark { + benchmarkName = mempty, + benchmarkInterface = mempty, + benchmarkBuildInfo = mempty, + benchmarkEnabled = False + } + mappend = (Semi.<>) + +instance Semigroup Benchmark where + a <> b = Benchmark { + benchmarkName = combine' benchmarkName, + benchmarkInterface = combine benchmarkInterface, + benchmarkBuildInfo = combine benchmarkBuildInfo, + benchmarkEnabled = benchmarkEnabled a || benchmarkEnabled b + } + where combine field = field a `mappend` field b + combine' f = case (f a, f b) of + ("", x) -> x + (x, "") -> x + (x, y) -> error "Ambiguous values for benchmark field: '" + ++ x ++ "' and '" ++ y ++ "'" + +instance Monoid BenchmarkInterface where + mempty = BenchmarkUnsupported (BenchmarkTypeUnknown mempty (Version [] [])) + mappend = (Semi.<>) + +instance Semigroup BenchmarkInterface where + a <> (BenchmarkUnsupported _) = a + _ <> b = b + +emptyBenchmark :: Benchmark +emptyBenchmark = mempty + +-- | Does this package have any benchmarks? +hasBenchmarks :: PackageDescription -> Bool +hasBenchmarks = any (buildable . benchmarkBuildInfo) . benchmarks + +-- | Get all the enabled benchmarks from a package. +enabledBenchmarks :: PackageDescription -> [Benchmark] +enabledBenchmarks = filter benchmarkEnabled . benchmarks + +-- | Perform an action on each buildable 'Benchmark' in a package. +withBenchmark :: PackageDescription -> (Benchmark -> IO ()) -> IO () +withBenchmark pkg_descr f = + mapM_ f $ filter (buildable . benchmarkBuildInfo) $ enabledBenchmarks pkg_descr + +-- | Get all the module names from a benchmark. +benchmarkModules :: Benchmark -> [ModuleName] +benchmarkModules benchmark = otherModules (benchmarkBuildInfo benchmark) + +-- | The \"benchmark-type\" field in the benchmark stanza. +-- +data BenchmarkType = BenchmarkTypeExe Version + -- ^ \"type: exitcode-stdio-x.y\" + | BenchmarkTypeUnknown String Version + -- ^ Some unknown benchmark type e.g. \"type: foo\" + deriving (Generic, Show, Read, Eq, Typeable, Data) + +instance Binary BenchmarkType + +knownBenchmarkTypes :: [BenchmarkType] +knownBenchmarkTypes = [ BenchmarkTypeExe (Version [1,0] []) ] + +instance Text BenchmarkType where + disp (BenchmarkTypeExe ver) = text "exitcode-stdio-" <> disp ver + disp (BenchmarkTypeUnknown name ver) = text name <> char '-' <> disp ver + + parse = stdParse $ \ver name -> case name of + "exitcode-stdio" -> BenchmarkTypeExe ver + _ -> BenchmarkTypeUnknown name ver + + +benchmarkType :: Benchmark -> BenchmarkType +benchmarkType benchmark = case benchmarkInterface benchmark of + BenchmarkExeV10 ver _ -> BenchmarkTypeExe ver + BenchmarkUnsupported benchmarktype -> benchmarktype + +-- --------------------------------------------------------------------------- +-- The BuildInfo type + +-- Consider refactoring into executable and library versions. +data BuildInfo = BuildInfo { + buildable :: Bool, -- ^ component is buildable here + buildTools :: [Dependency], -- ^ tools needed to build this bit + cppOptions :: [String], -- ^ options for pre-processing Haskell code + ccOptions :: [String], -- ^ options for C compiler + ldOptions :: [String], -- ^ options for linker + pkgconfigDepends :: [Dependency], -- ^ pkg-config packages that are used + frameworks :: [String], -- ^support frameworks for Mac OS X + extraFrameworkDirs:: [String], -- ^ extra locations to find frameworks. + cSources :: [FilePath], + jsSources :: [FilePath], + hsSourceDirs :: [FilePath], -- ^ where to look for the Haskell module hierarchy + otherModules :: [ModuleName], -- ^ non-exposed or non-main modules + + defaultLanguage :: Maybe Language,-- ^ language used when not explicitly specified + otherLanguages :: [Language], -- ^ other languages used within the package + defaultExtensions :: [Extension], -- ^ language extensions used by all modules + otherExtensions :: [Extension], -- ^ other language extensions used within the package + oldExtensions :: [Extension], -- ^ the old extensions field, treated same as 'defaultExtensions' + + extraLibs :: [String], -- ^ what libraries to link with when compiling a program that uses your package + extraGHCiLibs :: [String], -- ^ if present, overrides extraLibs when package is loaded with GHCi. + extraLibDirs :: [String], + includeDirs :: [FilePath], -- ^directories to find .h files + includes :: [FilePath], -- ^ The .h files to be found in includeDirs + installIncludes :: [FilePath], -- ^ .h files to install with the package + options :: [(CompilerFlavor,[String])], + profOptions :: [(CompilerFlavor,[String])], + sharedOptions :: [(CompilerFlavor,[String])], + customFieldsBI :: [(String,String)], -- ^Custom fields starting + -- with x-, stored in a + -- simple assoc-list. + targetBuildDepends :: [Dependency], -- ^ Dependencies specific to a library or executable target + targetBuildRenaming :: Map PackageName ModuleRenaming + } + deriving (Generic, Show, Read, Eq, Typeable, Data) + +instance Binary BuildInfo + +instance Monoid BuildInfo where + mempty = BuildInfo { + buildable = True, + buildTools = [], + cppOptions = [], + ccOptions = [], + ldOptions = [], + pkgconfigDepends = [], + frameworks = [], + extraFrameworkDirs = [], + cSources = [], + jsSources = [], + hsSourceDirs = [], + otherModules = [], + defaultLanguage = Nothing, + otherLanguages = [], + defaultExtensions = [], + otherExtensions = [], + oldExtensions = [], + extraLibs = [], + extraGHCiLibs = [], + extraLibDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + options = [], + profOptions = [], + sharedOptions = [], + customFieldsBI = [], + targetBuildDepends = [], + targetBuildRenaming = Map.empty + } + mappend = (Semi.<>) + +instance Semigroup BuildInfo where + a <> b = BuildInfo { + buildable = buildable a && buildable b, + buildTools = combine buildTools, + cppOptions = combine cppOptions, + ccOptions = combine ccOptions, + ldOptions = combine ldOptions, + pkgconfigDepends = combine pkgconfigDepends, + frameworks = combineNub frameworks, + extraFrameworkDirs = combineNub extraFrameworkDirs, + cSources = combineNub cSources, + jsSources = combineNub jsSources, + hsSourceDirs = combineNub hsSourceDirs, + otherModules = combineNub otherModules, + defaultLanguage = combineMby defaultLanguage, + otherLanguages = combineNub otherLanguages, + defaultExtensions = combineNub defaultExtensions, + otherExtensions = combineNub otherExtensions, + oldExtensions = combineNub oldExtensions, + extraLibs = combine extraLibs, + extraGHCiLibs = combine extraGHCiLibs, + extraLibDirs = combineNub extraLibDirs, + includeDirs = combineNub includeDirs, + includes = combineNub includes, + installIncludes = combineNub installIncludes, + options = combine options, + profOptions = combine profOptions, + sharedOptions = combine sharedOptions, + customFieldsBI = combine customFieldsBI, + targetBuildDepends = combineNub targetBuildDepends, + targetBuildRenaming = combineMap targetBuildRenaming + } + where + combine field = field a `mappend` field b + combineNub field = nub (combine field) + combineMby field = field b `mplus` field a + combineMap field = Map.unionWith mappend (field a) (field b) + +emptyBuildInfo :: BuildInfo +emptyBuildInfo = mempty + +-- | The 'BuildInfo' for the library (if there is one and it's buildable), and +-- all buildable executables, test suites and benchmarks. Useful for gathering +-- dependencies. +allBuildInfo :: PackageDescription -> [BuildInfo] +allBuildInfo pkg_descr = [ bi | Just lib <- [library pkg_descr] + , let bi = libBuildInfo lib + , buildable bi ] + ++ [ bi | exe <- executables pkg_descr + , let bi = buildInfo exe + , buildable bi ] + ++ [ bi | tst <- testSuites pkg_descr + , let bi = testBuildInfo tst + , buildable bi + , testEnabled tst ] + ++ [ bi | tst <- benchmarks pkg_descr + , let bi = benchmarkBuildInfo tst + , buildable bi + , benchmarkEnabled tst ] + --FIXME: many of the places where this is used, we actually want to look at + -- unbuildable bits too, probably need separate functions + +-- | The 'Language's used by this component +-- +allLanguages :: BuildInfo -> [Language] +allLanguages bi = maybeToList (defaultLanguage bi) + ++ otherLanguages bi + +-- | The 'Extension's that are used somewhere by this component +-- +allExtensions :: BuildInfo -> [Extension] +allExtensions bi = usedExtensions bi + ++ otherExtensions bi + +-- | The 'Extensions' that are used by all modules in this component +-- +usedExtensions :: BuildInfo -> [Extension] +usedExtensions bi = oldExtensions bi + ++ defaultExtensions bi + +type HookedBuildInfo = (Maybe BuildInfo, [(String, BuildInfo)]) + +emptyHookedBuildInfo :: HookedBuildInfo +emptyHookedBuildInfo = (Nothing, []) + +-- |Select options for a particular Haskell compiler. +hcOptions :: CompilerFlavor -> BuildInfo -> [String] +hcOptions = lookupHcOptions options + +hcProfOptions :: CompilerFlavor -> BuildInfo -> [String] +hcProfOptions = lookupHcOptions profOptions + +hcSharedOptions :: CompilerFlavor -> BuildInfo -> [String] +hcSharedOptions = lookupHcOptions sharedOptions + +lookupHcOptions :: (BuildInfo -> [(CompilerFlavor,[String])]) + -> CompilerFlavor -> BuildInfo -> [String] +lookupHcOptions f hc bi = [ opt | (hc',opts) <- f bi + , hc' == hc + , opt <- opts ] + +-- ------------------------------------------------------------ +-- * Source repos +-- ------------------------------------------------------------ + +-- | Information about the source revision control system for a package. +-- +-- When specifying a repo it is useful to know the meaning or intention of the +-- information as doing so enables automation. There are two obvious common +-- purposes: one is to find the repo for the latest development version, the +-- other is to find the repo for this specific release. The 'ReopKind' +-- specifies which one we mean (or another custom one). +-- +-- A package can specify one or the other kind or both. Most will specify just +-- a head repo but some may want to specify a repo to reconstruct the sources +-- for this package release. +-- +-- The required information is the 'RepoType' which tells us if it's using +-- 'Darcs', 'Git' for example. The 'repoLocation' and other details are +-- interpreted according to the repo type. +-- +data SourceRepo = SourceRepo { + -- | The kind of repo. This field is required. + repoKind :: RepoKind, + + -- | The type of the source repository system for this repo, eg 'Darcs' or + -- 'Git'. This field is required. + repoType :: Maybe RepoType, + + -- | The location of the repository. For most 'RepoType's this is a URL. + -- This field is required. + repoLocation :: Maybe String, + + -- | 'CVS' can put multiple \"modules\" on one server and requires a + -- module name in addition to the location to identify a particular repo. + -- Logically this is part of the location but unfortunately has to be + -- specified separately. This field is required for the 'CVS' 'RepoType' and + -- should not be given otherwise. + repoModule :: Maybe String, + + -- | The name or identifier of the branch, if any. Many source control + -- systems have the notion of multiple branches in a repo that exist in the + -- same location. For example 'Git' and 'CVS' use this while systems like + -- 'Darcs' use different locations for different branches. This field is + -- optional but should be used if necessary to identify the sources, + -- especially for the 'RepoThis' repo kind. + repoBranch :: Maybe String, + + -- | The tag identify a particular state of the repository. This should be + -- given for the 'RepoThis' repo kind and not for 'RepoHead' kind. + -- + repoTag :: Maybe String, + + -- | Some repositories contain multiple projects in different subdirectories + -- This field specifies the subdirectory where this packages sources can be + -- found, eg the subdirectory containing the @.cabal@ file. It is interpreted + -- relative to the root of the repository. This field is optional. If not + -- given the default is \".\" ie no subdirectory. + repoSubdir :: Maybe FilePath +} + deriving (Eq, Generic, Read, Show, Typeable, Data) + +instance Binary SourceRepo + +-- | What this repo info is for, what it represents. +-- +data RepoKind = + -- | The repository for the \"head\" or development version of the project. + -- This repo is where we should track the latest development activity or + -- the usual repo people should get to contribute patches. + RepoHead + + -- | The repository containing the sources for this exact package version + -- or release. For this kind of repo a tag should be given to give enough + -- information to re-create the exact sources. + | RepoThis + + | RepoKindUnknown String + deriving (Eq, Generic, Ord, Read, Show, Typeable, Data) + +instance Binary RepoKind + +-- | An enumeration of common source control systems. The fields used in the +-- 'SourceRepo' depend on the type of repo. The tools and methods used to +-- obtain and track the repo depend on the repo type. +-- +data RepoType = Darcs | Git | SVN | CVS + | Mercurial | GnuArch | Bazaar | Monotone + | OtherRepoType String + deriving (Eq, Generic, Ord, Read, Show, Typeable, Data) + +instance Binary RepoType + +knownRepoTypes :: [RepoType] +knownRepoTypes = [Darcs, Git, SVN, CVS + ,Mercurial, GnuArch, Bazaar, Monotone] + +repoTypeAliases :: RepoType -> [String] +repoTypeAliases Bazaar = ["bzr"] +repoTypeAliases Mercurial = ["hg"] +repoTypeAliases GnuArch = ["arch"] +repoTypeAliases _ = [] + +instance Text RepoKind where + disp RepoHead = Disp.text "head" + disp RepoThis = Disp.text "this" + disp (RepoKindUnknown other) = Disp.text other + + parse = do + name <- ident + return $ case lowercase name of + "head" -> RepoHead + "this" -> RepoThis + _ -> RepoKindUnknown name + +instance Text RepoType where + disp (OtherRepoType other) = Disp.text other + disp other = Disp.text (lowercase (show other)) + parse = fmap classifyRepoType ident + +classifyRepoType :: String -> RepoType +classifyRepoType s = + fromMaybe (OtherRepoType s) $ lookup (lowercase s) repoTypeMap + where + repoTypeMap = [ (name, repoType') + | repoType' <- knownRepoTypes + , name <- display repoType' : repoTypeAliases repoType' ] + +ident :: Parse.ReadP r String +ident = Parse.munch1 (\c -> Char.isAlphaNum c || c == '_' || c == '-') + +lowercase :: String -> String +lowercase = map Char.toLower + +-- ------------------------------------------------------------ +-- * Utils +-- ------------------------------------------------------------ + +updatePackageDescription :: HookedBuildInfo -> PackageDescription -> PackageDescription +updatePackageDescription (mb_lib_bi, exe_bi) p + = p{ executables = updateExecutables exe_bi (executables p) + , library = updateLibrary mb_lib_bi (library p) + } + where + updateLibrary :: Maybe BuildInfo -> Maybe Library -> Maybe Library + updateLibrary (Just bi) (Just lib) = Just (lib{libBuildInfo = bi `mappend` libBuildInfo lib}) + updateLibrary Nothing mb_lib = mb_lib + updateLibrary (Just _) Nothing = Nothing + + updateExecutables :: [(String, BuildInfo)] -- ^[(exeName, new buildinfo)] + -> [Executable] -- ^list of executables to update + -> [Executable] -- ^list with exeNames updated + updateExecutables exe_bi' executables' = foldr updateExecutable executables' exe_bi' + + updateExecutable :: (String, BuildInfo) -- ^(exeName, new buildinfo) + -> [Executable] -- ^list of executables to update + -> [Executable] -- ^list with exeName updated + updateExecutable _ [] = [] + updateExecutable exe_bi'@(name,bi) (exe:exes) + | exeName exe == name = exe{buildInfo = bi `mappend` buildInfo exe} : exes + | otherwise = exe : updateExecutable exe_bi' exes + +-- --------------------------------------------------------------------------- +-- The GenericPackageDescription type + +data GenericPackageDescription = + GenericPackageDescription { + packageDescription :: PackageDescription, + genPackageFlags :: [Flag], + condLibrary :: Maybe (CondTree ConfVar [Dependency] Library), + condExecutables :: [(String, CondTree ConfVar [Dependency] Executable)], + condTestSuites :: [(String, CondTree ConfVar [Dependency] TestSuite)], + condBenchmarks :: [(String, CondTree ConfVar [Dependency] Benchmark)] + } + deriving (Show, Eq, Typeable, Data, Generic) + +instance Package GenericPackageDescription where + packageId = packageId . packageDescription + +instance Binary GenericPackageDescription + +-- | A flag can represent a feature to be included, or a way of linking +-- a target against its dependencies, or in fact whatever you can think of. +data Flag = MkFlag + { flagName :: FlagName + , flagDescription :: String + , flagDefault :: Bool + , flagManual :: Bool + } + deriving (Show, Eq, Typeable, Data, Generic) + +instance Binary Flag + +-- | A 'FlagName' is the name of a user-defined configuration flag +newtype FlagName = FlagName String + deriving (Eq, Generic, Ord, Show, Read, Typeable, Data) + +instance Binary FlagName + +-- | A 'FlagAssignment' is a total or partial mapping of 'FlagName's to +-- 'Bool' flag values. It represents the flags chosen by the user or +-- discovered during configuration. For example @--flags=foo --flags=-bar@ +-- becomes @[("foo", True), ("bar", False)]@ +-- +type FlagAssignment = [(FlagName, Bool)] + +-- | A @ConfVar@ represents the variable type used. +data ConfVar = OS OS + | Arch Arch + | Flag FlagName + | Impl CompilerFlavor VersionRange + deriving (Eq, Show, Typeable, Data, Generic) + +instance Binary ConfVar + +-- | A boolean expression parameterized over the variable type used. +data Condition c = Var c + | Lit Bool + | CNot (Condition c) + | COr (Condition c) (Condition c) + | CAnd (Condition c) (Condition c) + deriving (Show, Eq, Typeable, Data, Generic) + +-- | Boolean negation of a 'Condition' value. +cNot :: Condition a -> Condition a +cNot (Lit b) = Lit (not b) +cNot (CNot c) = c +cNot c = CNot c + +-- | Boolean AND of two 'Condtion' values. +cAnd :: Condition a -> Condition a -> Condition a +cAnd (Lit False) _ = Lit False +cAnd _ (Lit False) = Lit False +cAnd (Lit True) x = x +cAnd x (Lit True) = x +cAnd x y = CAnd x y + +-- | Boolean OR of two 'Condition' values. +cOr :: Eq v => Condition v -> Condition v -> Condition v +cOr (Lit True) _ = Lit True +cOr _ (Lit True) = Lit True +cOr (Lit False) x = x +cOr x (Lit False) = x +cOr c (CNot d) + | c == d = Lit True +cOr (CNot c) d + | c == d = Lit True +cOr x y = COr x y + +instance Functor Condition where + f `fmap` Var c = Var (f c) + _ `fmap` Lit c = Lit c + f `fmap` CNot c = CNot (fmap f c) + f `fmap` COr c d = COr (fmap f c) (fmap f d) + f `fmap` CAnd c d = CAnd (fmap f c) (fmap f d) + +instance Foldable Condition where + f `foldMap` Var c = f c + _ `foldMap` Lit _ = mempty + f `foldMap` CNot c = Fold.foldMap f c + f `foldMap` COr c d = foldMap f c `mappend` foldMap f d + f `foldMap` CAnd c d = foldMap f c `mappend` foldMap f d + +instance Traversable Condition where + f `traverse` Var c = Var `fmap` f c + _ `traverse` Lit c = pure $ Lit c + f `traverse` CNot c = CNot `fmap` Trav.traverse f c + f `traverse` COr c d = COr `fmap` traverse f c <*> traverse f d + f `traverse` CAnd c d = CAnd `fmap` traverse f c <*> traverse f d + +instance Applicative Condition where + pure = Var + (<*>) = ap + +instance Monad Condition where + return = AP.pure + -- Terminating cases + (>>=) (Lit x) _ = Lit x + (>>=) (Var x) f = f x + -- Recursing cases + (>>=) (CNot x ) f = CNot (x >>= f) + (>>=) (COr x y) f = COr (x >>= f) (y >>= f) + (>>=) (CAnd x y) f = CAnd (x >>= f) (y >>= f) + +instance Monoid (Condition a) where + mempty = Lit False + mappend = (Semi.<>) + +instance Semigroup (Condition a) where + (<>) = COr + +instance Alternative Condition where + empty = mempty + (<|>) = mappend + +instance MonadPlus Condition where + mzero = mempty + mplus = mappend + +instance Binary c => Binary (Condition c) + +data CondTree v c a = CondNode + { condTreeData :: a + , condTreeConstraints :: c + , condTreeComponents :: [( Condition v + , CondTree v c a + , Maybe (CondTree v c a))] + } + deriving (Show, Eq, Typeable, Data, Generic) + +instance (Binary v, Binary c, Binary a) => Binary (CondTree v c a) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Package.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Package.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Package.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Package.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,242 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Package +-- Copyright : Isaac Jones 2003-2004 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Defines a package identifier along with a parser and pretty printer for it. +-- 'PackageIdentifier's consist of a name and an exact version. It also defines +-- a 'Dependency' data type. A dependency is a package name and a version +-- range, like @\"foo >= 1.2 && < 2\"@. + +module Distribution.Package ( + -- * Package ids + PackageName(..), + PackageIdentifier(..), + PackageId, + + -- * Package keys/installed package IDs (used for linker symbols) + ComponentId(..), + UnitId(..), + mkUnitId, + mkLegacyUnitId, + getHSLibraryName, + InstalledPackageId, -- backwards compat + + -- * ABI hash + AbiHash(..), + + -- * Package source dependencies + Dependency(..), + thisPackageVersion, + notThisPackageVersion, + simplifyDependency, + + -- * Package classes + Package(..), packageName, packageVersion, + HasUnitId(..), + installedPackageId, + PackageInstalled(..), + ) where + +import Distribution.Version + ( Version(..), VersionRange, anyVersion, thisVersion + , notThisVersion, simplifyVersionRange ) + +import qualified Distribution.Compat.ReadP as Parse +import qualified Text.PrettyPrint as Disp +import Distribution.Compat.ReadP +import Distribution.Compat.Binary +import Distribution.Text + +import Control.DeepSeq (NFData(..)) +import qualified Data.Char as Char + ( isDigit, isAlphaNum, ) +import Data.Data ( Data ) +import Data.List ( intercalate ) +import Data.Typeable ( Typeable ) +import GHC.Generics (Generic) +import Text.PrettyPrint ((<>), (<+>), text) + +newtype PackageName = PackageName { unPackageName :: String } + deriving (Generic, Read, Show, Eq, Ord, Typeable, Data) + +instance Binary PackageName + +instance Text PackageName where + disp (PackageName n) = Disp.text n + parse = do + ns <- Parse.sepBy1 component (Parse.char '-') + return (PackageName (intercalate "-" ns)) + where + component = do + cs <- Parse.munch1 Char.isAlphaNum + if all Char.isDigit cs then Parse.pfail else return cs + -- each component must contain an alphabetic character, to avoid + -- ambiguity in identifiers like foo-1 (the 1 is the version number). + +instance NFData PackageName where + rnf (PackageName pkg) = rnf pkg + +-- | Type alias so we can use the shorter name PackageId. +type PackageId = PackageIdentifier + +-- | The name and version of a package. +data PackageIdentifier + = PackageIdentifier { + pkgName :: PackageName, -- ^The name of this package, eg. foo + pkgVersion :: Version -- ^the version of this package, eg 1.2 + } + deriving (Generic, Read, Show, Eq, Ord, Typeable, Data) + +instance Binary PackageIdentifier + +instance Text PackageIdentifier where + disp (PackageIdentifier n v) = case v of + Version [] _ -> disp n -- if no version, don't show version. + _ -> disp n <> Disp.char '-' <> disp v + + parse = do + n <- parse + v <- (Parse.char '-' >> parse) <++ return (Version [] []) + return (PackageIdentifier n v) + +instance NFData PackageIdentifier where + rnf (PackageIdentifier name version) = rnf name `seq` rnf version + +-- ------------------------------------------------------------ +-- * Component Source Hash +-- ------------------------------------------------------------ + +-- | A 'ComponentId' uniquely identifies the transitive source +-- code closure of a component. For non-Backpack components, it also +-- serves as the basis for install paths, symbols, etc. +-- +data ComponentId + = ComponentId String + deriving (Generic, Read, Show, Eq, Ord, Typeable, Data) + +{-# DEPRECATED InstalledPackageId "Use UnitId instead" #-} +type InstalledPackageId = UnitId + +instance Binary ComponentId + +instance Text ComponentId where + disp (ComponentId str) = text str + + parse = ComponentId `fmap` Parse.munch1 abi_char + where abi_char c = Char.isAlphaNum c || c `elem` "-_." + +instance NFData ComponentId where + rnf (ComponentId pk) = rnf pk + +-- | Returns library name prefixed with HS, suitable for filenames +getHSLibraryName :: UnitId -> String +getHSLibraryName (SimpleUnitId (ComponentId s)) = "HS" ++ s + +-- | For now, there is no distinction between component IDs +-- and unit IDs in Cabal. +newtype UnitId = SimpleUnitId ComponentId + deriving (Generic, Read, Show, Eq, Ord, Typeable, Data, Binary, Text, NFData) + +-- | Makes a simple-style UnitId from a string. +mkUnitId :: String -> UnitId +mkUnitId = SimpleUnitId . ComponentId + +-- | Make an old-style UnitId from a package identifier +mkLegacyUnitId :: PackageId -> UnitId +mkLegacyUnitId = SimpleUnitId . ComponentId . display + +-- ------------------------------------------------------------ +-- * Package source dependencies +-- ------------------------------------------------------------ + +-- | Describes a dependency on a source package (API) +-- +data Dependency = Dependency PackageName VersionRange + deriving (Generic, Read, Show, Eq, Typeable, Data) + +instance Binary Dependency + +instance Text Dependency where + disp (Dependency name ver) = + disp name <+> disp ver + + parse = do name <- parse + Parse.skipSpaces + ver <- parse <++ return anyVersion + Parse.skipSpaces + return (Dependency name ver) + +thisPackageVersion :: PackageIdentifier -> Dependency +thisPackageVersion (PackageIdentifier n v) = + Dependency n (thisVersion v) + +notThisPackageVersion :: PackageIdentifier -> Dependency +notThisPackageVersion (PackageIdentifier n v) = + Dependency n (notThisVersion v) + +-- | Simplify the 'VersionRange' expression in a 'Dependency'. +-- See 'simplifyVersionRange'. +-- +simplifyDependency :: Dependency -> Dependency +simplifyDependency (Dependency name range) = + Dependency name (simplifyVersionRange range) + +-- | Class of things that have a 'PackageIdentifier' +-- +-- Types in this class are all notions of a package. This allows us to have +-- different types for the different phases that packages go though, from +-- simple name\/id, package description, configured or installed packages. +-- +-- Not all kinds of packages can be uniquely identified by a +-- 'PackageIdentifier'. In particular, installed packages cannot, there may be +-- many installed instances of the same source package. +-- +class Package pkg where + packageId :: pkg -> PackageIdentifier + +packageName :: Package pkg => pkg -> PackageName +packageName = pkgName . packageId + +packageVersion :: Package pkg => pkg -> Version +packageVersion = pkgVersion . packageId + +instance Package PackageIdentifier where + packageId = id + +-- | Packages that have an installed package ID +class Package pkg => HasUnitId pkg where + installedUnitId :: pkg -> UnitId + +{-# DEPRECATED installedPackageId "Use installedUnitId instead" #-} +-- | Compatibility wrapper for Cabal pre-1.24. +installedPackageId :: HasUnitId pkg => pkg -> UnitId +installedPackageId = installedUnitId + +-- | Class of installed packages. +-- +-- The primary data type which is an instance of this package is +-- 'InstalledPackageInfo', but when we are doing install plans in Cabal install +-- we may have other, installed package-like things which contain more metadata. +-- Installed packages have exact dependencies 'installedDepends'. +class (HasUnitId pkg) => PackageInstalled pkg where + installedDepends :: pkg -> [UnitId] + +-- ----------------------------------------------------------------------------- +-- ABI hash + +newtype AbiHash = AbiHash String + deriving (Eq, Show, Read, Generic) +instance Binary AbiHash + +instance Text AbiHash where + disp (AbiHash abi) = Disp.text abi + parse = fmap AbiHash (Parse.munch Char.isAlphaNum) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/ParseUtils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/ParseUtils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/ParseUtils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/ParseUtils.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,750 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.ParseUtils +-- Copyright : (c) The University of Glasgow 2004 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Utilities for parsing 'PackageDescription' and 'InstalledPackageInfo'. +-- +-- The @.cabal@ file format is not trivial, especially with the introduction +-- of configurations and the section syntax that goes with that. This module +-- has a bunch of parsing functions that is used by the @.cabal@ parser and a +-- couple others. It has the parsing framework code and also little parsers for +-- many of the formats we get in various @.cabal@ file fields, like module +-- names, comma separated lists etc. + +-- This module is meant to be local-only to Distribution... + +{-# OPTIONS_HADDOCK hide #-} +module Distribution.ParseUtils ( + LineNo, PError(..), PWarning(..), locatedErrorMsg, syntaxError, warning, + runP, runE, ParseResult(..), catchParseError, parseFail, showPWarning, + Field(..), fName, lineNo, + FieldDescr(..), ppField, ppFields, readFields, readFieldsFlat, + showFields, showSingleNamedField, showSimpleSingleNamedField, + parseFields, parseFieldsFlat, + parseFilePathQ, parseTokenQ, parseTokenQ', + parseModuleNameQ, parseBuildTool, parsePkgconfigDependency, + parseOptVersion, parsePackageNameQ, parseVersionRangeQ, + parseTestedWithQ, parseLicenseQ, parseLanguageQ, parseExtensionQ, + parseSepList, parseCommaList, parseOptCommaList, + showFilePath, showToken, showTestedWith, showFreeText, parseFreeText, + field, simpleField, listField, listFieldWithSep, spaceListField, + commaListField, commaListFieldWithSep, commaNewLineListField, + optsField, liftField, boolField, parseQuoted, indentWith, + + UnrecFieldParser, warnUnrec, ignoreUnrec, + ) where + +import Distribution.Compiler +import Distribution.License +import Distribution.Version +import Distribution.Package +import Distribution.ModuleName +import qualified Distribution.Compat.MonadFail as Fail +import Distribution.Compat.ReadP as ReadP hiding (get) +import Distribution.ReadE +import Distribution.Text +import Distribution.Simple.Utils +import Language.Haskell.Extension + +import Text.PrettyPrint hiding (braces) +import Data.Char (isSpace, toLower, isAlphaNum, isDigit) +import Data.Maybe (fromMaybe) +import Data.Tree as Tree (Tree(..), flatten) +import qualified Data.Map as Map +import Control.Monad (foldM, ap) +import Control.Applicative as AP (Applicative(..)) +import System.FilePath (normalise) +import Data.List (sortBy) + +-- ----------------------------------------------------------------------------- + +type LineNo = Int +type Separator = ([Doc] -> Doc) + +data PError = AmbiguousParse String LineNo + | NoParse String LineNo + | TabsError LineNo + | FromString String (Maybe LineNo) + deriving (Eq, Show) + +data PWarning = PWarning String + | UTFWarning LineNo String + deriving (Eq, Show) + +showPWarning :: FilePath -> PWarning -> String +showPWarning fpath (PWarning msg) = + normalise fpath ++ ": " ++ msg +showPWarning fpath (UTFWarning line fname) = + normalise fpath ++ ":" ++ show line + ++ ": Invalid UTF-8 text in the '" ++ fname ++ "' field." + +data ParseResult a = ParseFailed PError | ParseOk [PWarning] a + deriving Show + +instance Functor ParseResult where + fmap _ (ParseFailed err) = ParseFailed err + fmap f (ParseOk ws x) = ParseOk ws $ f x + +instance Applicative ParseResult where + pure = ParseOk [] + (<*>) = ap + + +instance Monad ParseResult where + return = AP.pure + ParseFailed err >>= _ = ParseFailed err + ParseOk ws x >>= f = case f x of + ParseFailed err -> ParseFailed err + ParseOk ws' x' -> ParseOk (ws'++ws) x' + fail = Fail.fail + +instance Fail.MonadFail ParseResult where + fail s = ParseFailed (FromString s Nothing) + +catchParseError :: ParseResult a -> (PError -> ParseResult a) + -> ParseResult a +p@(ParseOk _ _) `catchParseError` _ = p +ParseFailed e `catchParseError` k = k e + +parseFail :: PError -> ParseResult a +parseFail = ParseFailed + +runP :: LineNo -> String -> ReadP a a -> String -> ParseResult a +runP line fieldname p s = + case [ x | (x,"") <- results ] of + [a] -> ParseOk (utf8Warnings line fieldname s) a + --TODO: what is this double parse thing all about? + -- Can't we just do the all isSpace test the first time? + [] -> case [ x | (x,ys) <- results, all isSpace ys ] of + [a] -> ParseOk (utf8Warnings line fieldname s) a + [] -> ParseFailed (NoParse fieldname line) + _ -> ParseFailed (AmbiguousParse fieldname line) + _ -> ParseFailed (AmbiguousParse fieldname line) + where results = readP_to_S p s + +runE :: LineNo -> String -> ReadE a -> String -> ParseResult a +runE line fieldname p s = + case runReadE p s of + Right a -> ParseOk (utf8Warnings line fieldname s) a + Left e -> syntaxError line $ + "Parse of field '" ++ fieldname ++ "' failed (" ++ e ++ "): " ++ s + +utf8Warnings :: LineNo -> String -> String -> [PWarning] +utf8Warnings line fieldname s = + take 1 [ UTFWarning n fieldname + | (n,l) <- zip [line..] (lines s) + , '\xfffd' `elem` l ] + +locatedErrorMsg :: PError -> (Maybe LineNo, String) +locatedErrorMsg (AmbiguousParse f n) = (Just n, + "Ambiguous parse in field '"++f++"'.") +locatedErrorMsg (NoParse f n) = (Just n, + "Parse of field '"++f++"' failed.") +locatedErrorMsg (TabsError n) = (Just n, "Tab used as indentation.") +locatedErrorMsg (FromString s n) = (n, s) + +syntaxError :: LineNo -> String -> ParseResult a +syntaxError n s = ParseFailed $ FromString s (Just n) + +tabsError :: LineNo -> ParseResult a +tabsError ln = ParseFailed $ TabsError ln + +warning :: String -> ParseResult () +warning s = ParseOk [PWarning s] () + +-- | Field descriptor. The parameter @a@ parameterizes over where the field's +-- value is stored in. +data FieldDescr a + = FieldDescr + { fieldName :: String + , fieldGet :: a -> Doc + , fieldSet :: LineNo -> String -> a -> ParseResult a + -- ^ @fieldSet n str x@ Parses the field value from the given input + -- string @str@ and stores the result in @x@ if the parse was + -- successful. Otherwise, reports an error on line number @n@. + } + +field :: String -> (a -> Doc) -> ReadP a a -> FieldDescr a +field name showF readF = + FieldDescr name showF (\line val _st -> runP line name readF val) + +-- Lift a field descriptor storing into an 'a' to a field descriptor storing +-- into a 'b'. +liftField :: (b -> a) -> (a -> b -> b) -> FieldDescr a -> FieldDescr b +liftField get set (FieldDescr name showF parseF) + = FieldDescr name (showF . get) + (\line str b -> do + a <- parseF line str (get b) + return (set a b)) + +-- Parser combinator for simple fields. Takes a field name, a pretty printer, +-- a parser function, an accessor, and a setter, returns a FieldDescr over the +-- compoid structure. +simpleField :: String -> (a -> Doc) -> ReadP a a + -> (b -> a) -> (a -> b -> b) -> FieldDescr b +simpleField name showF readF get set + = liftField get set $ field name showF readF + +commaListFieldWithSep :: Separator -> String -> (a -> Doc) -> ReadP [a] a + -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b +commaListFieldWithSep separator name showF readF get set = + liftField get set' $ + field name showF' (parseCommaList readF) + where + set' xs b = set (get b ++ xs) b + showF' = separator . punctuate comma . map showF + +commaListField :: String -> (a -> Doc) -> ReadP [a] a + -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b +commaListField = commaListFieldWithSep fsep + +commaNewLineListField :: String -> (a -> Doc) -> ReadP [a] a + -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b +commaNewLineListField = commaListFieldWithSep sep + +spaceListField :: String -> (a -> Doc) -> ReadP [a] a + -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b +spaceListField name showF readF get set = + liftField get set' $ + field name showF' (parseSpaceList readF) + where + set' xs b = set (get b ++ xs) b + showF' = fsep . map showF + +listFieldWithSep :: Separator -> String -> (a -> Doc) -> ReadP [a] a + -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b +listFieldWithSep separator name showF readF get set = + liftField get set' $ + field name showF' (parseOptCommaList readF) + where + set' xs b = set (get b ++ xs) b + showF' = separator . map showF + +listField :: String -> (a -> Doc) -> ReadP [a] a + -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b +listField = listFieldWithSep fsep + +optsField :: String -> CompilerFlavor -> (b -> [(CompilerFlavor,[String])]) + -> ([(CompilerFlavor,[String])] -> b -> b) -> FieldDescr b +optsField name flavor get set = + liftField (fromMaybe [] . lookup flavor . get) + (\opts b -> set (reorder (update flavor opts (get b))) b) $ + field name showF (sepBy parseTokenQ' (munch1 isSpace)) + where + update _ opts l | all null opts = l --empty opts as if no opts + update f opts [] = [(f,opts)] + update f opts ((f',opts'):rest) + | f == f' = (f, opts' ++ opts) : rest + | otherwise = (f',opts') : update f opts rest + reorder = sortBy (comparing fst) + showF = hsep . map text + +-- TODO: this is a bit smelly hack. It's because we want to parse bool fields +-- liberally but not accept new parses. We cannot do that with ReadP +-- because it does not support warnings. We need a new parser framework! +boolField :: String -> (b -> Bool) -> (Bool -> b -> b) -> FieldDescr b +boolField name get set = liftField get set (FieldDescr name showF readF) + where + showF = text . show + readF line str _ + | str == "True" = ParseOk [] True + | str == "False" = ParseOk [] False + | lstr == "true" = ParseOk [caseWarning] True + | lstr == "false" = ParseOk [caseWarning] False + | otherwise = ParseFailed (NoParse name line) + where + lstr = lowercase str + caseWarning = PWarning $ + "The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'." + +ppFields :: [FieldDescr a] -> a -> Doc +ppFields fields x = + vcat [ ppField name (getter x) | FieldDescr name getter _ <- fields ] + +ppField :: String -> Doc -> Doc +ppField name fielddoc + | isEmpty fielddoc = empty + | name `elem` nestedFields = text name <> colon $+$ nest indentWith fielddoc + | otherwise = text name <> colon <+> fielddoc + where + nestedFields = + [ "description" + , "build-depends" + , "data-files" + , "extra-source-files" + , "extra-tmp-files" + , "exposed-modules" + , "c-sources" + , "js-sources" + , "extra-libraries" + , "includes" + , "install-includes" + , "other-modules" + , "depends" + ] + +showFields :: [FieldDescr a] -> a -> String +showFields fields = render . ($+$ text "") . ppFields fields + +showSingleNamedField :: [FieldDescr a] -> String -> Maybe (a -> String) +showSingleNamedField fields f = + case [ get | (FieldDescr f' get _) <- fields, f' == f ] of + [] -> Nothing + (get:_) -> Just (render . ppField f . get) + +showSimpleSingleNamedField :: [FieldDescr a] -> String -> Maybe (a -> String) +showSimpleSingleNamedField fields f = + case [ get | (FieldDescr f' get _) <- fields, f' == f ] of + [] -> Nothing + (get:_) -> Just (renderStyle myStyle . get) + where myStyle = style { mode = LeftMode } + +parseFields :: [FieldDescr a] -> a -> String -> ParseResult a +parseFields fields initial str = + readFields str >>= accumFields fields initial + +parseFieldsFlat :: [FieldDescr a] -> a -> String -> ParseResult a +parseFieldsFlat fields initial str = + readFieldsFlat str >>= accumFields fields initial + +accumFields :: [FieldDescr a] -> a -> [Field] -> ParseResult a +accumFields fields = foldM setField + where + fieldMap = Map.fromList + [ (name, f) | f@(FieldDescr name _ _) <- fields ] + setField accum (F line name value) = case Map.lookup name fieldMap of + Just (FieldDescr _ _ set) -> set line value accum + Nothing -> do + warning ("Unrecognized field " ++ name ++ " on line " ++ show line) + return accum + setField accum f = do + warning ("Unrecognized stanza on line " ++ show (lineNo f)) + return accum + +-- | The type of a function which, given a name-value pair of an +-- unrecognized field, and the current structure being built, +-- decides whether to incorporate the unrecognized field +-- (by returning Just x, where x is a possibly modified version +-- of the structure being built), or not (by returning Nothing). +type UnrecFieldParser a = (String,String) -> a -> Maybe a + +-- | A default unrecognized field parser which simply returns Nothing, +-- i.e. ignores all unrecognized fields, so warnings will be generated. +warnUnrec :: UnrecFieldParser a +warnUnrec _ _ = Nothing + +-- | A default unrecognized field parser which silently (i.e. no +-- warnings will be generated) ignores unrecognized fields, by +-- returning the structure being built unmodified. +ignoreUnrec :: UnrecFieldParser a +ignoreUnrec _ = Just + +------------------------------------------------------------------------------ + +-- The data type for our three syntactic categories +data Field + = F LineNo String String + -- ^ A regular @: @ field + | Section LineNo String String [Field] + -- ^ A section with a name and possible parameter. The syntactic + -- structure is: + -- + -- @ + -- { + -- * + -- } + -- @ + | IfBlock LineNo String [Field] [Field] + -- ^ A conditional block with an optional else branch: + -- + -- @ + -- if { + -- * + -- } else { + -- * + -- } + -- @ + deriving (Show + ,Eq) -- for testing + +lineNo :: Field -> LineNo +lineNo (F n _ _) = n +lineNo (Section n _ _ _) = n +lineNo (IfBlock n _ _ _) = n + +fName :: Field -> String +fName (F _ n _) = n +fName (Section _ n _ _) = n +fName _ = error "fname: not a field or section" + +readFields :: String -> ParseResult [Field] +readFields input = ifelse + =<< mapM (mkField 0) + =<< mkTree tokens + + where ls = (lines . normaliseLineEndings) input + tokens = (concatMap tokeniseLine . trimLines) ls + +readFieldsFlat :: String -> ParseResult [Field] +readFieldsFlat input = mapM (mkField 0) + =<< mkTree tokens + where ls = (lines . normaliseLineEndings) input + tokens = (concatMap tokeniseLineFlat . trimLines) ls + +-- attach line number and determine indentation +trimLines :: [String] -> [(LineNo, Indent, HasTabs, String)] +trimLines ls = [ (lineno, indent, hastabs, trimTrailing l') + | (lineno, l) <- zip [1..] ls + , let (sps, l') = span isSpace l + indent = length sps + hastabs = '\t' `elem` sps + , validLine l' ] + where validLine ('-':'-':_) = False -- Comment + validLine [] = False -- blank line + validLine _ = True + +-- | We parse generically based on indent level and braces '{' '}'. To do that +-- we split into lines and then '{' '}' tokens and other spans within a line. +data Token = + -- | The 'Line' token is for bits that /start/ a line, eg: + -- + -- > "\n blah blah { blah" + -- + -- tokenises to: + -- + -- > [Line n 2 False "blah blah", OpenBracket, Span n "blah"] + -- + -- so lines are the only ones that can have nested layout, since they + -- have a known indentation level. + -- + -- eg: we can't have this: + -- + -- > if ... { + -- > } else + -- > other + -- + -- because other cannot nest under else, since else doesn't start a line + -- so cannot have nested layout. It'd have to be: + -- + -- > if ... { + -- > } + -- > else + -- > other + -- + -- but that's not so common, people would normally use layout or + -- brackets not both in a single @if else@ construct. + -- + -- > if ... { foo : bar } + -- > else + -- > other + -- + -- this is OK + Line LineNo Indent HasTabs String + | Span LineNo String -- ^ span in a line, following brackets + | OpenBracket LineNo | CloseBracket LineNo + +type Indent = Int +type HasTabs = Bool + +-- | Tokenise a single line, splitting on '{' '}' and the spans in between. +-- Also trims leading & trailing space on those spans within the line. +tokeniseLine :: (LineNo, Indent, HasTabs, String) -> [Token] +tokeniseLine (n0, i, t, l) = case split n0 l of + (Span _ l':ss) -> Line n0 i t l' :ss + cs -> cs + where split _ "" = [] + split n s = case span (\c -> c /='}' && c /= '{') s of + ("", '{' : s') -> OpenBracket n : split n s' + (w , '{' : s') -> mkspan n w (OpenBracket n : split n s') + ("", '}' : s') -> CloseBracket n : split n s' + (w , '}' : s') -> mkspan n w (CloseBracket n : split n s') + (w , _) -> mkspan n w [] + + mkspan n s ss | null s' = ss + | otherwise = Span n s' : ss + where s' = trimTrailing (trimLeading s) + +tokeniseLineFlat :: (LineNo, Indent, HasTabs, String) -> [Token] +tokeniseLineFlat (n0, i, t, l) + | null l' = [] + | otherwise = [Line n0 i t l'] + where + l' = trimTrailing (trimLeading l) + +trimLeading, trimTrailing :: String -> String +trimLeading = dropWhile isSpace +trimTrailing = dropWhileEndLE isSpace + + +type SyntaxTree = Tree (LineNo, HasTabs, String) + +-- | Parse the stream of tokens into a tree of them, based on indent \/ layout +mkTree :: [Token] -> ParseResult [SyntaxTree] +mkTree toks = + layout 0 [] toks >>= \(trees, trailing) -> case trailing of + [] -> return trees + OpenBracket n:_ -> syntaxError n "mismatched brackets, unexpected {" + CloseBracket n:_ -> syntaxError n "mismatched brackets, unexpected }" + -- the following two should never happen: + Span n l :_ -> syntaxError n $ "unexpected span: " ++ show l + Line n _ _ l :_ -> syntaxError n $ "unexpected line: " ++ show l + + +-- | Parse the stream of tokens into a tree of them, based on indent +-- This parse state expect to be in a layout context, though possibly +-- nested within a braces context so we may still encounter closing braces. +layout :: Indent -- ^ indent level of the parent\/previous line + -> [SyntaxTree] -- ^ accumulating param, trees in this level + -> [Token] -- ^ remaining tokens + -> ParseResult ([SyntaxTree], [Token]) + -- ^ collected trees on this level and trailing tokens +layout _ a [] = return (reverse a, []) +layout i a (s@(Line _ i' _ _):ss) | i' < i = return (reverse a, s:ss) +layout i a (Line n _ t l:OpenBracket n':ss) = do + (sub, ss') <- braces n' [] ss + layout i (Node (n,t,l) sub:a) ss' + +layout i a (Span n l:OpenBracket n':ss) = do + (sub, ss') <- braces n' [] ss + layout i (Node (n,False,l) sub:a) ss' + +-- look ahead to see if following lines are more indented, giving a sub-tree +layout i a (Line n i' t l:ss) = do + lookahead <- layout (i'+1) [] ss + case lookahead of + ([], _) -> layout i (Node (n,t,l) [] :a) ss + (ts, ss') -> layout i (Node (n,t,l) ts :a) ss' + +layout _ _ ( OpenBracket n :_) = syntaxError n "unexpected '{'" +layout _ a (s@(CloseBracket _):ss) = return (reverse a, s:ss) +layout _ _ ( Span n l : _) = syntaxError n $ "unexpected span: " + ++ show l + +-- | Parse the stream of tokens into a tree of them, based on explicit braces +-- This parse state expects to find a closing bracket. +braces :: LineNo -- ^ line of the '{', used for error messages + -> [SyntaxTree] -- ^ accumulating param, trees in this level + -> [Token] -- ^ remaining tokens + -> ParseResult ([SyntaxTree],[Token]) + -- ^ collected trees on this level and trailing tokens +braces m a (Line n _ t l:OpenBracket n':ss) = do + (sub, ss') <- braces n' [] ss + braces m (Node (n,t,l) sub:a) ss' + +braces m a (Span n l:OpenBracket n':ss) = do + (sub, ss') <- braces n' [] ss + braces m (Node (n,False,l) sub:a) ss' + +braces m a (Line n i t l:ss) = do + lookahead <- layout (i+1) [] ss + case lookahead of + ([], _) -> braces m (Node (n,t,l) [] :a) ss + (ts, ss') -> braces m (Node (n,t,l) ts :a) ss' + +braces m a (Span n l:ss) = braces m (Node (n,False,l) []:a) ss +braces _ a (CloseBracket _:ss) = return (reverse a, ss) +braces n _ [] = syntaxError n $ "opening brace '{'" + ++ "has no matching closing brace '}'" +braces _ _ (OpenBracket n:_) = syntaxError n "unexpected '{'" + +-- | Convert the parse tree into the Field AST +-- Also check for dodgy uses of tabs in indentation. +mkField :: Int -> SyntaxTree -> ParseResult Field +mkField d (Node (n,t,_) _) | d >= 1 && t = tabsError n +mkField d (Node (n,_,l) ts) = case span (\c -> isAlphaNum c || c == '-') l of + ([], _) -> syntaxError n $ "unrecognised field or section: " ++ show l + (name, rest) -> case trimLeading rest of + (':':rest') -> do let followingLines = concatMap Tree.flatten ts + tabs = not (null [()| (_,True,_) <- followingLines ]) + if tabs && d >= 1 + then tabsError n + else return $ F n (map toLower name) + (fieldValue rest' followingLines) + rest' -> do ts' <- mapM (mkField (d+1)) ts + return (Section n (map toLower name) rest' ts') + where fieldValue firstLine followingLines = + let firstLine' = trimLeading firstLine + followingLines' = map (\(_,_,s) -> stripDot s) followingLines + allLines | null firstLine' = followingLines' + | otherwise = firstLine' : followingLines' + in intercalate "\n" allLines + stripDot "." = "" + stripDot s = s + +-- | Convert if/then/else 'Section's to 'IfBlock's +ifelse :: [Field] -> ParseResult [Field] +ifelse [] = return [] +ifelse (Section n "if" cond thenpart + :Section _ "else" as elsepart:fs) + | null cond = syntaxError n "'if' with missing condition" + | null thenpart = syntaxError n "'then' branch of 'if' is empty" + | not (null as) = syntaxError n "'else' takes no arguments" + | null elsepart = syntaxError n "'else' branch of 'if' is empty" + | otherwise = do tp <- ifelse thenpart + ep <- ifelse elsepart + fs' <- ifelse fs + return (IfBlock n cond tp ep:fs') +ifelse (Section n "if" cond thenpart:fs) + | null cond = syntaxError n "'if' with missing condition" + | null thenpart = syntaxError n "'then' branch of 'if' is empty" + | otherwise = do tp <- ifelse thenpart + fs' <- ifelse fs + return (IfBlock n cond tp []:fs') +ifelse (Section n "else" _ _:_) = syntaxError n + "stray 'else' with no preceding 'if'" +ifelse (Section n s a fs':fs) = do fs'' <- ifelse fs' + fs''' <- ifelse fs + return (Section n s a fs'' : fs''') +ifelse (f:fs) = do fs' <- ifelse fs + return (f : fs') + +------------------------------------------------------------------------------ + +-- |parse a module name +parseModuleNameQ :: ReadP r ModuleName +parseModuleNameQ = parseQuoted parse <++ parse + +parseFilePathQ :: ReadP r FilePath +parseFilePathQ = parseTokenQ + -- removed until normalise is no longer broken, was: + -- liftM normalise parseTokenQ + +betweenSpaces :: ReadP r a -> ReadP r a +betweenSpaces act = do skipSpaces + res <- act + skipSpaces + return res + +parseBuildTool :: ReadP r Dependency +parseBuildTool = do name <- parseBuildToolNameQ + ver <- betweenSpaces $ + parseVersionRangeQ <++ return anyVersion + return $ Dependency name ver + +parseBuildToolNameQ :: ReadP r PackageName +parseBuildToolNameQ = parseQuoted parseBuildToolName <++ parseBuildToolName + +-- like parsePackageName but accepts symbols in components +parseBuildToolName :: ReadP r PackageName +parseBuildToolName = do ns <- sepBy1 component (ReadP.char '-') + return (PackageName (intercalate "-" ns)) + where component = do + cs <- munch1 (\c -> isAlphaNum c || c == '+' || c == '_') + if all isDigit cs then pfail else return cs + +-- pkg-config allows versions and other letters in package names, +-- eg "gtk+-2.0" is a valid pkg-config package _name_. +-- It then has a package version number like 2.10.13 +parsePkgconfigDependency :: ReadP r Dependency +parsePkgconfigDependency = do name <- munch1 + (\c -> isAlphaNum c || c `elem` "+-._") + ver <- betweenSpaces $ + parseVersionRangeQ <++ return anyVersion + return $ Dependency (PackageName name) ver + +parsePackageNameQ :: ReadP r PackageName +parsePackageNameQ = parseQuoted parse <++ parse + +parseVersionRangeQ :: ReadP r VersionRange +parseVersionRangeQ = parseQuoted parse <++ parse + +parseOptVersion :: ReadP r Version +parseOptVersion = parseQuoted ver <++ ver + where ver :: ReadP r Version + ver = parse <++ return (Version [] []) + +parseTestedWithQ :: ReadP r (CompilerFlavor,VersionRange) +parseTestedWithQ = parseQuoted tw <++ tw + where + tw :: ReadP r (CompilerFlavor,VersionRange) + tw = do compiler <- parseCompilerFlavorCompat + version <- betweenSpaces $ parse <++ return anyVersion + return (compiler,version) + +parseLicenseQ :: ReadP r License +parseLicenseQ = parseQuoted parse <++ parse + +-- urgh, we can't define optQuotes :: ReadP r a -> ReadP r a +-- because the "compat" version of ReadP isn't quite powerful enough. In +-- particular, the type of <++ is ReadP r r -> ReadP r a -> ReadP r a +-- Hence the trick above to make 'lic' polymorphic. + +parseLanguageQ :: ReadP r Language +parseLanguageQ = parseQuoted parse <++ parse + +parseExtensionQ :: ReadP r Extension +parseExtensionQ = parseQuoted parse <++ parse + +parseHaskellString :: ReadP r String +parseHaskellString = readS_to_P reads + +parseTokenQ :: ReadP r String +parseTokenQ = parseHaskellString <++ munch1 (\x -> not (isSpace x) && x /= ',') + +parseTokenQ' :: ReadP r String +parseTokenQ' = parseHaskellString <++ munch1 (not . isSpace) + +parseSepList :: ReadP r b + -> ReadP r a -- ^The parser for the stuff between commas + -> ReadP r [a] +parseSepList sepr p = sepBy p separator + where separator = betweenSpaces sepr + +parseSpaceList :: ReadP r a -- ^The parser for the stuff between commas + -> ReadP r [a] +parseSpaceList p = sepBy p skipSpaces + +parseCommaList :: ReadP r a -- ^The parser for the stuff between commas + -> ReadP r [a] +parseCommaList = parseSepList (ReadP.char ',') + +parseOptCommaList :: ReadP r a -- ^The parser for the stuff between commas + -> ReadP r [a] +parseOptCommaList = parseSepList (optional (ReadP.char ',')) + +parseQuoted :: ReadP r a -> ReadP r a +parseQuoted = between (ReadP.char '"') (ReadP.char '"') + +parseFreeText :: ReadP.ReadP s String +parseFreeText = ReadP.munch (const True) + +-- -------------------------------------------- +-- ** Pretty printing + +showFilePath :: FilePath -> Doc +showFilePath "" = empty +showFilePath x = showToken x + +showToken :: String -> Doc +showToken str + | not (any dodgy str) && + not (null str) = text str + | otherwise = text (show str) + where dodgy c = isSpace c || c == ',' + +showTestedWith :: (CompilerFlavor,VersionRange) -> Doc +showTestedWith (compiler, version) = text (show compiler) <+> disp version + +-- | Pretty-print free-format text, ensuring that it is vertically aligned, +-- and with blank lines replaced by dots for correct re-parsing. +showFreeText :: String -> Doc +showFreeText "" = empty +showFreeText s = vcat [text (if null l then "." else l) | l <- lines_ s] + +-- | 'lines_' breaks a string up into a list of strings at newline +-- characters. The resulting strings do not contain newlines. +lines_ :: String -> [String] +lines_ [] = [""] +lines_ s = let (l, s') = break (== '\n') s + in l : case s' of + [] -> [] + (_:s'') -> lines_ s'' + +-- | the indentation used for pretty printing +indentWith :: Int +indentWith = 4 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/ReadE.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/ReadE.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/ReadE.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/ReadE.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,51 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.ReadE +-- Copyright : Jose Iborra 2008 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Simple parsing with failure + +module Distribution.ReadE ( + -- * ReadE + ReadE(..), succeedReadE, failReadE, + -- * Projections + parseReadE, readEOrFail, + readP_to_E + ) where + +import Distribution.Compat.ReadP +import Data.Char ( isSpace ) + +-- | Parser with simple error reporting +newtype ReadE a = ReadE {runReadE :: String -> Either ErrorMsg a} +type ErrorMsg = String + +instance Functor ReadE where + fmap f (ReadE p) = ReadE $ \txt -> case p txt of + Right a -> Right (f a) + Left err -> Left err + +succeedReadE :: (String -> a) -> ReadE a +succeedReadE f = ReadE (Right . f) + +failReadE :: ErrorMsg -> ReadE a +failReadE = ReadE . const . Left + +parseReadE :: ReadE a -> ReadP r a +parseReadE (ReadE p) = do + txt <- look + either fail return (p txt) + +readEOrFail :: ReadE a -> String -> a +readEOrFail r = either error id . runReadE r + +readP_to_E :: (String -> ErrorMsg) -> ReadP a a -> ReadE a +readP_to_E err r = + ReadE $ \txt -> case [ p | (p, s) <- readP_to_S r txt + , all isSpace s ] + of [] -> Left (err txt) + (p:_) -> Right p diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Bench.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Bench.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Bench.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Bench.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,123 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Bench +-- Copyright : Johan Tibell 2011 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This is the entry point into running the benchmarks in a built +-- package. It performs the \"@.\/setup bench@\" action. It runs +-- benchmarks designated in the package description. + +module Distribution.Simple.Bench + ( bench + ) where + +import qualified Distribution.PackageDescription as PD +import Distribution.Simple.BuildPaths +import Distribution.Simple.Compiler +import Distribution.Simple.InstallDirs +import qualified Distribution.Simple.LocalBuildInfo as LBI +import Distribution.Simple.Setup +import Distribution.Simple.UserHooks +import Distribution.Simple.Utils +import Distribution.Text + +import Control.Monad ( when, unless, forM ) +import System.Exit ( ExitCode(..), exitFailure, exitSuccess ) +import System.Directory ( doesFileExist ) +import System.FilePath ( (), (<.>) ) + +-- | Perform the \"@.\/setup bench@\" action. +bench :: Args -- ^positional command-line arguments + -> PD.PackageDescription -- ^information from the .cabal file + -> LBI.LocalBuildInfo -- ^information from the configure step + -> BenchmarkFlags -- ^flags sent to benchmark + -> IO () +bench args pkg_descr lbi flags = do + let verbosity = fromFlag $ benchmarkVerbosity flags + benchmarkNames = args + pkgBenchmarks = PD.benchmarks pkg_descr + enabledBenchmarks = [ t | t <- pkgBenchmarks + , PD.benchmarkEnabled t + , PD.buildable (PD.benchmarkBuildInfo t) ] + + -- Run the benchmark + doBench :: PD.Benchmark -> IO ExitCode + doBench bm = + case PD.benchmarkInterface bm of + PD.BenchmarkExeV10 _ _ -> do + let cmd = LBI.buildDir lbi PD.benchmarkName bm + PD.benchmarkName bm <.> exeExtension + options = map (benchOption pkg_descr lbi bm) $ + benchmarkOptions flags + name = PD.benchmarkName bm + -- Check that the benchmark executable exists. + exists <- doesFileExist cmd + unless exists $ die $ + "Error: Could not find benchmark program \"" + ++ cmd ++ "\". Did you build the package first?" + + notice verbosity $ startMessage name + -- This will redirect the child process + -- stdout/stderr to the parent process. + exitcode <- rawSystemExitCode verbosity cmd options + notice verbosity $ finishMessage name exitcode + return exitcode + + _ -> do + notice verbosity $ "No support for running " + ++ "benchmark " ++ PD.benchmarkName bm ++ " of type: " + ++ show (disp $ PD.benchmarkType bm) + exitFailure + + unless (PD.hasBenchmarks pkg_descr) $ do + notice verbosity "Package has no benchmarks." + exitSuccess + + when (PD.hasBenchmarks pkg_descr && null enabledBenchmarks) $ + die $ "No benchmarks enabled. Did you remember to configure with " + ++ "\'--enable-benchmarks\'?" + + bmsToRun <- case benchmarkNames of + [] -> return enabledBenchmarks + names -> forM names $ \bmName -> + let benchmarkMap = zip enabledNames enabledBenchmarks + enabledNames = map PD.benchmarkName enabledBenchmarks + allNames = map PD.benchmarkName pkgBenchmarks + in case lookup bmName benchmarkMap of + Just t -> return t + _ | bmName `elem` allNames -> + die $ "Package configured with benchmark " + ++ bmName ++ " disabled." + | otherwise -> die $ "no such benchmark: " ++ bmName + + let totalBenchmarks = length bmsToRun + notice verbosity $ "Running " ++ show totalBenchmarks ++ " benchmarks..." + exitcodes <- mapM doBench bmsToRun + let allOk = totalBenchmarks == length (filter (== ExitSuccess) exitcodes) + unless allOk exitFailure + where + startMessage name = "Benchmark " ++ name ++ ": RUNNING...\n" + finishMessage name exitcode = "Benchmark " ++ name ++ ": " + ++ (case exitcode of + ExitSuccess -> "FINISH" + ExitFailure _ -> "ERROR") + + +-- TODO: This is abusing the notion of a 'PathTemplate'. The result isn't +-- necessarily a path. +benchOption :: PD.PackageDescription + -> LBI.LocalBuildInfo + -> PD.Benchmark + -> PathTemplate + -> String +benchOption pkg_descr lbi bm template = + fromPathTemplate $ substPathTemplate env template + where + env = initialPathTemplateEnv + (PD.package pkg_descr) (LBI.localUnitId lbi) + (compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) ++ + [(BenchmarkNameVar, toPathTemplate $ PD.benchmarkName bm)] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Build/Macros.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Build/Macros.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Build/Macros.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Build/Macros.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,110 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Build.Macros +-- Copyright : Simon Marlow 2008 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Generate cabal_macros.h - CPP macros for package version testing +-- +-- When using CPP you get +-- +-- > VERSION_ +-- > MIN_VERSION_(A,B,C) +-- +-- for each /package/ in @build-depends@, which is true if the version of +-- /package/ in use is @>= A.B.C@, using the normal ordering on version +-- numbers. +-- +module Distribution.Simple.Build.Macros ( + generate, + generatePackageVersionMacros, + ) where + +import Data.Maybe + ( isJust ) +import Distribution.Package + ( PackageIdentifier(PackageIdentifier) ) +import Distribution.Version + ( Version(versionBranch) ) +import Distribution.PackageDescription + ( PackageDescription ( package ) ) +import Distribution.Simple.LocalBuildInfo + ( LocalBuildInfo(withPrograms), externalPackageDeps + , localComponentId, localCompatPackageKey ) +import Distribution.Simple.Program.Db + ( configuredPrograms ) +import Distribution.Simple.Program.Types + ( ConfiguredProgram(programId, programVersion) ) +import Distribution.Text + ( display ) + +-- ------------------------------------------------------------ +-- * Generate cabal_macros.h +-- ------------------------------------------------------------ + +-- | The contents of the @cabal_macros.h@ for the given configured package. +-- +generate :: PackageDescription -> LocalBuildInfo -> String +generate pkg_descr lbi = + "/* DO NOT EDIT: This file is automatically generated by Cabal */\n\n" ++ + generatePackageVersionMacros + (package pkg_descr : map snd (externalPackageDeps lbi)) ++ + generateToolVersionMacros (configuredPrograms . withPrograms $ lbi) ++ + generateComponentIdMacro lbi + +-- | Helper function that generates just the @VERSION_pkg@ and @MIN_VERSION_pkg@ +-- macros for a list of package ids (usually used with the specific deps of +-- a configured package). +-- +generatePackageVersionMacros :: [PackageIdentifier] -> String +generatePackageVersionMacros pkgids = concat + [ "/* package " ++ display pkgid ++ " */\n" + ++ generateMacros "" pkgname version + | pkgid@(PackageIdentifier name version) <- pkgids + , let pkgname = map fixchar (display name) + ] + +-- | Helper function that generates just the @TOOL_VERSION_pkg@ and +-- @MIN_TOOL_VERSION_pkg@ macros for a list of configured programs. +-- +generateToolVersionMacros :: [ConfiguredProgram] -> String +generateToolVersionMacros progs = concat + [ "/* tool " ++ progid ++ " */\n" + ++ generateMacros "TOOL_" progname version + | prog <- progs + , isJust . programVersion $ prog + , let progid = programId prog ++ "-" ++ display version + progname = map fixchar (programId prog) + Just version = programVersion prog + ] + +-- | Common implementation of 'generatePackageVersionMacros' and +-- 'generateToolVersionMacros'. +-- +generateMacros :: String -> String -> Version -> String +generateMacros prefix name version = + concat + ["#define ", prefix, "VERSION_",name," ",show (display version),"\n" + ,"#define MIN_", prefix, "VERSION_",name,"(major1,major2,minor) (\\\n" + ," (major1) < ",major1," || \\\n" + ," (major1) == ",major1," && (major2) < ",major2," || \\\n" + ," (major1) == ",major1," && (major2) == ",major2," && (minor) <= ",minor,")" + ,"\n\n" + ] + where + (major1:major2:minor:_) = map show (versionBranch version ++ repeat 0) + +-- | Generate the @CURRENT_COMPONENT_ID@ definition for the component ID +-- of the current package. +generateComponentIdMacro :: LocalBuildInfo -> String +generateComponentIdMacro lbi = + concat + [ "#define CURRENT_COMPONENT_ID \"" ++ display (localComponentId lbi) ++ "\"\n\n" + , "#define CURRENT_PACKAGE_KEY \"" ++ localCompatPackageKey lbi ++ "\"\n\n" + ] + +fixchar :: Char -> Char +fixchar '-' = '_' +fixchar c = c diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Build/PathsModule.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Build/PathsModule.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Build/PathsModule.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Build/PathsModule.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,325 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Build.Macros +-- Copyright : Isaac Jones 2003-2005, +-- Ross Paterson 2006, +-- Duncan Coutts 2007-2008 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Generating the Paths_pkgname module. +-- +-- This is a module that Cabal generates for the benefit of packages. It +-- enables them to find their version number and find any installed data files +-- at runtime. This code should probably be split off into another module. +-- +module Distribution.Simple.Build.PathsModule ( + generate, pkgPathEnvVar + ) where + +import Distribution.System +import Distribution.Simple.Compiler +import Distribution.Package +import Distribution.PackageDescription +import Distribution.Simple.LocalBuildInfo +import Distribution.Simple.BuildPaths +import Distribution.Simple.Utils +import Distribution.Text +import Distribution.Version + +import System.FilePath + ( pathSeparator ) +import Data.Maybe + ( fromJust, isNothing ) + +-- ------------------------------------------------------------ +-- * Building Paths_.hs +-- ------------------------------------------------------------ + +generate :: PackageDescription -> LocalBuildInfo -> String +generate pkg_descr lbi = + let pragmas = cpp_pragma ++ ffi_pragmas ++ warning_pragmas + + cpp_pragma | supports_cpp = "{-# LANGUAGE CPP #-}\n" + | otherwise = "" + + ffi_pragmas + | absolute = "" + | supports_language_pragma = + "{-# LANGUAGE ForeignFunctionInterface #-}\n" + | otherwise = + "{-# OPTIONS_GHC -fffi #-}\n"++ + "{-# OPTIONS_JHC -fffi #-}\n" + + warning_pragmas = + "{-# OPTIONS_GHC -fno-warn-missing-import-lists #-}\n"++ + "{-# OPTIONS_GHC -fno-warn-implicit-prelude #-}\n" + + foreign_imports + | absolute = "" + | otherwise = + "import Foreign\n"++ + "import Foreign.C\n" + + reloc_imports + | reloc = + "import System.Environment (getExecutablePath)\n" + | otherwise = "" + + header = + pragmas++ + "module " ++ display paths_modulename ++ " (\n"++ + " version,\n"++ + " getBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir,\n"++ + " getDataFileName, getSysconfDir\n"++ + " ) where\n"++ + "\n"++ + foreign_imports++ + "import qualified Control.Exception as Exception\n"++ + "import Data.Version (Version(..))\n"++ + "import System.Environment (getEnv)\n"++ + reloc_imports ++ + "import Prelude\n"++ + "\n"++ + (if supports_cpp + then + ("#if defined(VERSION_base)\n"++ + "\n"++ + "#if MIN_VERSION_base(4,0,0)\n"++ + "catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a\n"++ + "#else\n"++ + "catchIO :: IO a -> (Exception.Exception -> IO a) -> IO a\n"++ + "#endif\n"++ + "\n"++ + "#else\n"++ + "catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a\n"++ + "#endif\n") + else + "catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a\n")++ + "catchIO = Exception.catch\n" ++ + "\n"++ + "version :: Version"++ + "\nversion = Version " ++ show branch ++ " " ++ show tags + where Version branch tags = packageVersion pkg_descr + + body + | reloc = + "\n\nbindirrel :: FilePath\n" ++ + "bindirrel = " ++ show flat_bindirreloc ++ + "\n"++ + "\ngetBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath\n"++ + "getBinDir = "++mkGetEnvOrReloc "bindir" flat_bindirreloc++"\n"++ + "getLibDir = "++mkGetEnvOrReloc "libdir" flat_libdirreloc++"\n"++ + "getDynLibDir = "++mkGetEnvOrReloc "dynlibdir" flat_dynlibdirreloc++"\n"++ + "getDataDir = "++mkGetEnvOrReloc "datadir" flat_datadirreloc++"\n"++ + "getLibexecDir = "++mkGetEnvOrReloc "libexecdir" flat_libexecdirreloc++"\n"++ + "getSysconfDir = "++mkGetEnvOrReloc "sysconfdir" flat_sysconfdirreloc++"\n"++ + "\n"++ + "getDataFileName :: FilePath -> IO FilePath\n"++ + "getDataFileName name = do\n"++ + " dir <- getDataDir\n"++ + " return (dir `joinFileName` name)\n"++ + "\n"++ + get_prefix_reloc_stuff++ + "\n"++ + filename_stuff + | absolute = + "\nbindir, libdir, dynlibdir, datadir, libexecdir, sysconfdir :: FilePath\n"++ + "\nbindir = " ++ show flat_bindir ++ + "\nlibdir = " ++ show flat_libdir ++ + "\ndynlibdir = " ++ show flat_dynlibdir ++ + "\ndatadir = " ++ show flat_datadir ++ + "\nlibexecdir = " ++ show flat_libexecdir ++ + "\nsysconfdir = " ++ show flat_sysconfdir ++ + "\n"++ + "\ngetBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath\n"++ + "getBinDir = "++mkGetEnvOr "bindir" "return bindir"++"\n"++ + "getLibDir = "++mkGetEnvOr "libdir" "return libdir"++"\n"++ + "getDynLibDir = "++mkGetEnvOr "dynlibdir" "return dynlibdir"++"\n"++ + "getDataDir = "++mkGetEnvOr "datadir" "return datadir"++"\n"++ + "getLibexecDir = "++mkGetEnvOr "libexecdir" "return libexecdir"++"\n"++ + "getSysconfDir = "++mkGetEnvOr "sysconfdir" "return sysconfdir"++"\n"++ + "\n"++ + "getDataFileName :: FilePath -> IO FilePath\n"++ + "getDataFileName name = do\n"++ + " dir <- getDataDir\n"++ + " return (dir ++ "++path_sep++" ++ name)\n" + | otherwise = + "\nprefix, bindirrel :: FilePath" ++ + "\nprefix = " ++ show flat_prefix ++ + "\nbindirrel = " ++ show (fromJust flat_bindirrel) ++ + "\n\n"++ + "getBinDir :: IO FilePath\n"++ + "getBinDir = getPrefixDirRel bindirrel\n\n"++ + "getLibDir :: IO FilePath\n"++ + "getLibDir = "++mkGetDir flat_libdir flat_libdirrel++"\n\n"++ + "getDynLibDir :: IO FilePath\n"++ + "getDynLibDir = "++mkGetDir flat_dynlibdir flat_dynlibdirrel++"\n\n"++ + "getDataDir :: IO FilePath\n"++ + "getDataDir = "++ mkGetEnvOr "datadir" + (mkGetDir flat_datadir flat_datadirrel)++"\n\n"++ + "getLibexecDir :: IO FilePath\n"++ + "getLibexecDir = "++mkGetDir flat_libexecdir flat_libexecdirrel++"\n\n"++ + "getSysconfDir :: IO FilePath\n"++ + "getSysconfDir = "++mkGetDir flat_sysconfdir flat_sysconfdirrel++"\n\n"++ + "getDataFileName :: FilePath -> IO FilePath\n"++ + "getDataFileName name = do\n"++ + " dir <- getDataDir\n"++ + " return (dir `joinFileName` name)\n"++ + "\n"++ + get_prefix_stuff++ + "\n"++ + filename_stuff + in header++body + + where + InstallDirs { + prefix = flat_prefix, + bindir = flat_bindir, + libdir = flat_libdir, + dynlibdir = flat_dynlibdir, + datadir = flat_datadir, + libexecdir = flat_libexecdir, + sysconfdir = flat_sysconfdir + } = absoluteInstallDirs pkg_descr lbi NoCopyDest + InstallDirs { + bindir = flat_bindirrel, + libdir = flat_libdirrel, + dynlibdir = flat_dynlibdirrel, + datadir = flat_datadirrel, + libexecdir = flat_libexecdirrel, + sysconfdir = flat_sysconfdirrel + } = prefixRelativeInstallDirs (packageId pkg_descr) lbi + + flat_bindirreloc = shortRelativePath flat_prefix flat_bindir + flat_libdirreloc = shortRelativePath flat_prefix flat_libdir + flat_dynlibdirreloc = shortRelativePath flat_prefix flat_dynlibdir + flat_datadirreloc = shortRelativePath flat_prefix flat_datadir + flat_libexecdirreloc = shortRelativePath flat_prefix flat_libexecdir + flat_sysconfdirreloc = shortRelativePath flat_prefix flat_sysconfdir + + mkGetDir _ (Just dirrel) = "getPrefixDirRel " ++ show dirrel + mkGetDir dir Nothing = "return " ++ show dir + + mkGetEnvOrReloc var dirrel = "catchIO (getEnv \""++var'++"\")" ++ + " (\\_ -> getPrefixDirReloc \"" ++ dirrel ++ + "\")" + where var' = pkgPathEnvVar pkg_descr var + + mkGetEnvOr var expr = "catchIO (getEnv \""++var'++"\")"++ + " (\\_ -> "++expr++")" + where var' = pkgPathEnvVar pkg_descr var + + -- In several cases we cannot make relocatable installations + absolute = + hasLibs pkg_descr -- we can only make progs relocatable + || isNothing flat_bindirrel -- if the bin dir is an absolute path + || not (supportsRelocatableProgs (compilerFlavor (compiler lbi))) + + reloc = relocatable lbi + + supportsRelocatableProgs GHC = case buildOS of + Windows -> True + _ -> False + supportsRelocatableProgs GHCJS = case buildOS of + Windows -> True + _ -> False + supportsRelocatableProgs _ = False + + paths_modulename = autogenModuleName pkg_descr + + get_prefix_stuff = get_prefix_win32 buildArch + + path_sep = show [pathSeparator] + + supports_cpp = compilerFlavor (compiler lbi) == GHC + + supports_language_pragma = + (compilerFlavor (compiler lbi) == GHC && + (compilerVersion (compiler lbi) + `withinRange` orLaterVersion (Version [6,6,1] []))) || + compilerFlavor (compiler lbi) == GHCJS + +-- | Generates the name of the environment variable controlling the path +-- component of interest. +pkgPathEnvVar :: PackageDescription + -> String -- ^ path component; one of \"bindir\", \"libdir\", + -- \"datadir\", \"libexecdir\", or \"sysconfdir\" + -> String -- ^ environment variable name +pkgPathEnvVar pkg_descr var = + showPkgName (packageName pkg_descr) ++ "_" ++ var + where + showPkgName = map fixchar . display + fixchar '-' = '_' + fixchar c = c + +get_prefix_reloc_stuff :: String +get_prefix_reloc_stuff = + "getPrefixDirReloc :: FilePath -> IO FilePath\n"++ + "getPrefixDirReloc dirRel = do\n"++ + " exePath <- getExecutablePath\n"++ + " let (bindir,_) = splitFileName exePath\n"++ + " return ((bindir `minusFileName` bindirrel) `joinFileName` dirRel)\n" + +get_prefix_win32 :: Arch -> String +get_prefix_win32 arch = + "getPrefixDirRel :: FilePath -> IO FilePath\n"++ + "getPrefixDirRel dirRel = try_size 2048 -- plenty, PATH_MAX is 512 under Win32.\n"++ + " where\n"++ + " try_size size = allocaArray (fromIntegral size) $ \\buf -> do\n"++ + " ret <- c_GetModuleFileName nullPtr buf size\n"++ + " case ret of\n"++ + " 0 -> return (prefix `joinFileName` dirRel)\n"++ + " _ | ret < size -> do\n"++ + " exePath <- peekCWString buf\n"++ + " let (bindir,_) = splitFileName exePath\n"++ + " return ((bindir `minusFileName` bindirrel) `joinFileName` dirRel)\n"++ + " | otherwise -> try_size (size * 2)\n"++ + "\n"++ + "foreign import " ++ cconv ++ " unsafe \"windows.h GetModuleFileNameW\"\n"++ + " c_GetModuleFileName :: Ptr () -> CWString -> Int32 -> IO Int32\n" + where cconv = case arch of + I386 -> "stdcall" + X86_64 -> "ccall" + _ -> error "win32 supported only with I386, X86_64" + +filename_stuff :: String +filename_stuff = + "minusFileName :: FilePath -> String -> FilePath\n"++ + "minusFileName dir \"\" = dir\n"++ + "minusFileName dir \".\" = dir\n"++ + "minusFileName dir suffix =\n"++ + " minusFileName (fst (splitFileName dir)) (fst (splitFileName suffix))\n"++ + "\n"++ + "joinFileName :: String -> String -> FilePath\n"++ + "joinFileName \"\" fname = fname\n"++ + "joinFileName \".\" fname = fname\n"++ + "joinFileName dir \"\" = dir\n"++ + "joinFileName dir fname\n"++ + " | isPathSeparator (last dir) = dir++fname\n"++ + " | otherwise = dir++pathSeparator:fname\n"++ + "\n"++ + "splitFileName :: FilePath -> (String, String)\n"++ + "splitFileName p = (reverse (path2++drive), reverse fname)\n"++ + " where\n"++ + " (path,drive) = case p of\n"++ + " (c:':':p') -> (reverse p',[':',c])\n"++ + " _ -> (reverse p ,\"\")\n"++ + " (fname,path1) = break isPathSeparator path\n"++ + " path2 = case path1 of\n"++ + " [] -> \".\"\n"++ + " [_] -> path1 -- don't remove the trailing slash if \n"++ + " -- there is only one character\n"++ + " (c:path') | isPathSeparator c -> path'\n"++ + " _ -> path1\n"++ + "\n"++ + "pathSeparator :: Char\n"++ + (case buildOS of + Windows -> "pathSeparator = '\\\\'\n" + _ -> "pathSeparator = '/'\n") ++ + "\n"++ + "isPathSeparator :: Char -> Bool\n"++ + (case buildOS of + Windows -> "isPathSeparator c = c == '/' || c == '\\\\'\n" + _ -> "isPathSeparator c = c == '/'\n") diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Build.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Build.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Build.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Build.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,606 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Build +-- Copyright : Isaac Jones 2003-2005, +-- Ross Paterson 2006, +-- Duncan Coutts 2007-2008, 2012 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This is the entry point to actually building the modules in a package. It +-- doesn't actually do much itself, most of the work is delegated to +-- compiler-specific actions. It does do some non-compiler specific bits like +-- running pre-processors. +-- + +module Distribution.Simple.Build ( + build, repl, + startInterpreter, + + initialBuildSteps, + writeAutogenFiles, + ) where + +import Distribution.Package +import qualified Distribution.Simple.GHC as GHC +import qualified Distribution.Simple.GHCJS as GHCJS +import qualified Distribution.Simple.JHC as JHC +import qualified Distribution.Simple.LHC as LHC +import qualified Distribution.Simple.UHC as UHC +import qualified Distribution.Simple.HaskellSuite as HaskellSuite + +import qualified Distribution.Simple.Build.Macros as Build.Macros +import qualified Distribution.Simple.Build.PathsModule as Build.PathsModule + +import Distribution.Simple.Compiler hiding (Flag) +import Distribution.PackageDescription hiding (Flag) +import qualified Distribution.InstalledPackageInfo as IPI +import qualified Distribution.ModuleName as ModuleName +import Distribution.ModuleName (ModuleName) + +import Distribution.Simple.Setup +import Distribution.Simple.BuildTarget +import Distribution.Simple.PreProcess +import Distribution.Simple.LocalBuildInfo +import Distribution.Simple.Program.Types +import Distribution.Simple.Program.Db +import Distribution.Simple.BuildPaths +import Distribution.Simple.Configure +import Distribution.Simple.Register +import Distribution.Simple.Test.LibV09 +import Distribution.Simple.Utils + +import Distribution.System +import Distribution.Text +import Distribution.Verbosity + +import qualified Data.Map as Map +import qualified Data.Set as Set +import Data.Either + ( partitionEithers ) +import Data.List + ( intersect ) +import Control.Monad + ( when, unless, forM_ ) +import System.FilePath + ( (), (<.>) ) +import System.Directory + ( getCurrentDirectory ) + +-- ----------------------------------------------------------------------------- +-- |Build the libraries and executables in this package. + +build :: PackageDescription -- ^ Mostly information from the .cabal file + -> LocalBuildInfo -- ^ Configuration information + -> BuildFlags -- ^ Flags that the user passed to build + -> [ PPSuffixHandler ] -- ^ preprocessors to run before compiling + -> IO () +build pkg_descr lbi flags suffixes = do + let distPref = fromFlag (buildDistPref flags) + verbosity = fromFlag (buildVerbosity flags) + + targets <- readBuildTargets pkg_descr (buildArgs flags) + targets' <- checkBuildTargets verbosity pkg_descr targets + let componentsToBuild = map fst (componentsInBuildOrder lbi (map fst targets')) + info verbosity $ "Component build order: " + ++ intercalate ", " (map showComponentName componentsToBuild) + + initialBuildSteps distPref pkg_descr lbi verbosity + when (null targets) $ + -- Only bother with this message if we're building the whole package + setupMessage verbosity "Building" (packageId pkg_descr) + + internalPackageDB <- createInternalPackageDB verbosity lbi distPref + + withComponentsInBuildOrder pkg_descr lbi componentsToBuild $ \comp clbi -> + let bi = componentBuildInfo comp + progs' = addInternalBuildTools pkg_descr lbi bi (withPrograms lbi) + lbi' = lbi { + withPrograms = progs', + withPackageDB = withPackageDB lbi ++ [internalPackageDB] + } + in buildComponent verbosity (buildNumJobs flags) pkg_descr + lbi' suffixes comp clbi distPref + + +repl :: PackageDescription -- ^ Mostly information from the .cabal file + -> LocalBuildInfo -- ^ Configuration information + -> ReplFlags -- ^ Flags that the user passed to build + -> [ PPSuffixHandler ] -- ^ preprocessors to run before compiling + -> [String] + -> IO () +repl pkg_descr lbi flags suffixes args = do + let distPref = fromFlag (replDistPref flags) + verbosity = fromFlag (replVerbosity flags) + + targets <- readBuildTargets pkg_descr args + targets' <- case targets of + [] -> return $ take 1 [ componentName c + | c <- pkgEnabledComponents pkg_descr ] + [target] -> fmap (map fst) (checkBuildTargets verbosity pkg_descr [target]) + _ -> die $ "The 'repl' command does not support multiple targets at once." + let componentsToBuild = componentsInBuildOrder lbi targets' + componentForRepl = last componentsToBuild + debug verbosity $ "Component build order: " + ++ intercalate ", " + [ showComponentName c | (c,_) <- componentsToBuild ] + + initialBuildSteps distPref pkg_descr lbi verbosity + + internalPackageDB <- createInternalPackageDB verbosity lbi distPref + + let lbiForComponent comp lbi' = + lbi' { + withPackageDB = withPackageDB lbi ++ [internalPackageDB], + withPrograms = addInternalBuildTools pkg_descr lbi' + (componentBuildInfo comp) (withPrograms lbi') + } + + -- build any dependent components + sequence_ + [ let comp = getComponent pkg_descr cname + lbi' = lbiForComponent comp lbi + in buildComponent verbosity NoFlag + pkg_descr lbi' suffixes comp clbi distPref + | (cname, clbi) <- init componentsToBuild ] + + -- REPL for target components + let (cname, clbi) = componentForRepl + comp = getComponent pkg_descr cname + lbi' = lbiForComponent comp lbi + in replComponent verbosity pkg_descr lbi' suffixes comp clbi distPref + + +-- | Start an interpreter without loading any package files. +startInterpreter :: Verbosity -> ProgramDb -> Compiler -> Platform + -> PackageDBStack -> IO () +startInterpreter verbosity programDb comp platform packageDBs = + case compilerFlavor comp of + GHC -> GHC.startInterpreter verbosity programDb comp platform packageDBs + GHCJS -> GHCJS.startInterpreter verbosity programDb comp platform packageDBs + _ -> die "A REPL is not supported with this compiler." + +buildComponent :: Verbosity + -> Flag (Maybe Int) + -> PackageDescription + -> LocalBuildInfo + -> [PPSuffixHandler] + -> Component + -> ComponentLocalBuildInfo + -> FilePath + -> IO () +buildComponent verbosity numJobs pkg_descr lbi suffixes + comp@(CLib lib) clbi distPref = do + preprocessComponent pkg_descr comp lbi False verbosity suffixes + extras <- preprocessExtras comp lbi + info verbosity "Building library..." + let libbi = libBuildInfo lib + lib' = lib { libBuildInfo = addExtraCSources libbi extras } + buildLib verbosity numJobs pkg_descr lbi lib' clbi + + -- Register the library in-place, so exes can depend + -- on internally defined libraries. + pwd <- getCurrentDirectory + let -- The in place registration uses the "-inplace" suffix, not an ABI hash + installedPkgInfo = inplaceInstalledPackageInfo pwd distPref pkg_descr + (AbiHash "") lib' lbi clbi + + registerPackage verbosity (compiler lbi) (withPrograms lbi) False + (withPackageDB lbi) installedPkgInfo + +buildComponent verbosity numJobs pkg_descr lbi suffixes + comp@(CExe exe) clbi _ = do + preprocessComponent pkg_descr comp lbi False verbosity suffixes + extras <- preprocessExtras comp lbi + info verbosity $ "Building executable " ++ exeName exe ++ "..." + let ebi = buildInfo exe + exe' = exe { buildInfo = addExtraCSources ebi extras } + buildExe verbosity numJobs pkg_descr lbi exe' clbi + + +buildComponent verbosity numJobs pkg_descr lbi suffixes + comp@(CTest test@TestSuite { testInterface = TestSuiteExeV10{} }) + clbi _distPref = do + let exe = testSuiteExeV10AsExe test + preprocessComponent pkg_descr comp lbi False verbosity suffixes + extras <- preprocessExtras comp lbi + info verbosity $ "Building test suite " ++ testName test ++ "..." + let ebi = buildInfo exe + exe' = exe { buildInfo = addExtraCSources ebi extras } + buildExe verbosity numJobs pkg_descr lbi exe' clbi + + +buildComponent verbosity numJobs pkg_descr lbi0 suffixes + comp@(CTest + test@TestSuite { testInterface = TestSuiteLibV09{} }) + clbi -- This ComponentLocalBuildInfo corresponds to a detailed + -- test suite and not a real component. It should not + -- be used, except to construct the CLBIs for the + -- library and stub executable that will actually be + -- built. + distPref = do + pwd <- getCurrentDirectory + let (pkg, lib, libClbi, lbi, ipi, exe, exeClbi) = + testSuiteLibV09AsLibAndExe pkg_descr test clbi lbi0 distPref pwd + preprocessComponent pkg_descr comp lbi False verbosity suffixes + extras <- preprocessExtras comp lbi + info verbosity $ "Building test suite " ++ testName test ++ "..." + buildLib verbosity numJobs pkg lbi lib libClbi + -- NB: need to enable multiple instances here, because on 7.10+ + -- the package name is the same as the library, and we still + -- want the registration to go through. + registerPackage verbosity (compiler lbi) (withPrograms lbi) True + (withPackageDB lbi) ipi + let ebi = buildInfo exe + exe' = exe { buildInfo = addExtraCSources ebi extras } + buildExe verbosity numJobs pkg_descr lbi exe' exeClbi + + +buildComponent _ _ _ _ _ + (CTest TestSuite { testInterface = TestSuiteUnsupported tt }) + _ _ = + die $ "No support for building test suite type " ++ display tt + + +buildComponent verbosity numJobs pkg_descr lbi suffixes + comp@(CBench bm@Benchmark { benchmarkInterface = BenchmarkExeV10 {} }) + clbi _ = do + let (exe, exeClbi) = benchmarkExeV10asExe bm clbi + preprocessComponent pkg_descr comp lbi False verbosity suffixes + extras <- preprocessExtras comp lbi + info verbosity $ "Building benchmark " ++ benchmarkName bm ++ "..." + let ebi = buildInfo exe + exe' = exe { buildInfo = addExtraCSources ebi extras } + buildExe verbosity numJobs pkg_descr lbi exe' exeClbi + + +buildComponent _ _ _ _ _ + (CBench Benchmark { benchmarkInterface = BenchmarkUnsupported tt }) + _ _ = + die $ "No support for building benchmark type " ++ display tt + + +-- | Add extra C sources generated by preprocessing to build +-- information. +addExtraCSources :: BuildInfo -> [FilePath] -> BuildInfo +addExtraCSources bi extras = bi { cSources = new } + where new = Set.toList $ old `Set.union` exs + old = Set.fromList $ cSources bi + exs = Set.fromList extras + + +replComponent :: Verbosity + -> PackageDescription + -> LocalBuildInfo + -> [PPSuffixHandler] + -> Component + -> ComponentLocalBuildInfo + -> FilePath + -> IO () +replComponent verbosity pkg_descr lbi suffixes + comp@(CLib lib) clbi _ = do + preprocessComponent pkg_descr comp lbi False verbosity suffixes + extras <- preprocessExtras comp lbi + let libbi = libBuildInfo lib + lib' = lib { libBuildInfo = libbi { cSources = cSources libbi ++ extras } } + replLib verbosity pkg_descr lbi lib' clbi + +replComponent verbosity pkg_descr lbi suffixes + comp@(CExe exe) clbi _ = do + preprocessComponent pkg_descr comp lbi False verbosity suffixes + extras <- preprocessExtras comp lbi + let ebi = buildInfo exe + exe' = exe { buildInfo = ebi { cSources = cSources ebi ++ extras } } + replExe verbosity pkg_descr lbi exe' clbi + + +replComponent verbosity pkg_descr lbi suffixes + comp@(CTest test@TestSuite { testInterface = TestSuiteExeV10{} }) + clbi _distPref = do + let exe = testSuiteExeV10AsExe test + preprocessComponent pkg_descr comp lbi False verbosity suffixes + extras <- preprocessExtras comp lbi + let ebi = buildInfo exe + exe' = exe { buildInfo = ebi { cSources = cSources ebi ++ extras } } + replExe verbosity pkg_descr lbi exe' clbi + + +replComponent verbosity pkg_descr lbi0 suffixes + comp@(CTest + test@TestSuite { testInterface = TestSuiteLibV09{} }) + clbi distPref = do + pwd <- getCurrentDirectory + let (pkg, lib, libClbi, lbi, _, _, _) = + testSuiteLibV09AsLibAndExe pkg_descr test clbi lbi0 distPref pwd + preprocessComponent pkg_descr comp lbi False verbosity suffixes + extras <- preprocessExtras comp lbi + let libbi = libBuildInfo lib + lib' = lib { libBuildInfo = libbi { cSources = cSources libbi ++ extras } } + replLib verbosity pkg lbi lib' libClbi + + +replComponent _ _ _ _ + (CTest TestSuite { testInterface = TestSuiteUnsupported tt }) + _ _ = + die $ "No support for building test suite type " ++ display tt + + +replComponent verbosity pkg_descr lbi suffixes + comp@(CBench bm@Benchmark { benchmarkInterface = BenchmarkExeV10 {} }) + clbi _ = do + let (exe, exeClbi) = benchmarkExeV10asExe bm clbi + preprocessComponent pkg_descr comp lbi False verbosity suffixes + extras <- preprocessExtras comp lbi + let ebi = buildInfo exe + exe' = exe { buildInfo = ebi { cSources = cSources ebi ++ extras } } + replExe verbosity pkg_descr lbi exe' exeClbi + + +replComponent _ _ _ _ + (CBench Benchmark { benchmarkInterface = BenchmarkUnsupported tt }) + _ _ = + die $ "No support for building benchmark type " ++ display tt + +---------------------------------------------------- +-- Shared code for buildComponent and replComponent +-- + +-- | Translate a exe-style 'TestSuite' component into an exe for building +testSuiteExeV10AsExe :: TestSuite -> Executable +testSuiteExeV10AsExe test@TestSuite { testInterface = TestSuiteExeV10 _ mainFile } = + Executable { + exeName = testName test, + modulePath = mainFile, + buildInfo = testBuildInfo test + } +testSuiteExeV10AsExe TestSuite{} = error "testSuiteExeV10AsExe: wrong kind" + +-- | Translate a lib-style 'TestSuite' component into a lib + exe for building +testSuiteLibV09AsLibAndExe :: PackageDescription + -> TestSuite + -> ComponentLocalBuildInfo + -> LocalBuildInfo + -> FilePath + -> FilePath + -> (PackageDescription, + Library, ComponentLocalBuildInfo, + LocalBuildInfo, + IPI.InstalledPackageInfo, + Executable, ComponentLocalBuildInfo) +testSuiteLibV09AsLibAndExe pkg_descr + test@TestSuite { testInterface = TestSuiteLibV09 _ m } + clbi lbi distPref pwd = + (pkg, lib, libClbi, lbi, ipi, exe, exeClbi) + where + bi = testBuildInfo test + lib = Library { + exposedModules = [ m ], + reexportedModules = [], + requiredSignatures = [], + exposedSignatures = [], + libExposed = True, + libBuildInfo = bi + } + -- NB: temporary hack; I have a refactor which solves this + cid = computeComponentId (package pkg_descr) + (CTestName (testName test)) + (map ((\(SimpleUnitId cid0) -> cid0) . fst) + (componentPackageDeps clbi)) + (flagAssignment lbi) + uid = SimpleUnitId cid + (pkg_name, compat_key) = computeCompatPackageKey + (compiler lbi) (package pkg_descr) + (CTestName (testName test)) uid + libClbi = LibComponentLocalBuildInfo + { componentPackageDeps = componentPackageDeps clbi + , componentPackageRenaming = componentPackageRenaming clbi + , componentUnitId = uid + , componentCompatPackageKey = compat_key + , componentExposedModules = [IPI.ExposedModule m Nothing] + } + pkg = pkg_descr { + package = (package pkg_descr) { pkgName = pkg_name } + , buildDepends = targetBuildDepends $ testBuildInfo test + , executables = [] + , testSuites = [] + , library = Just lib + } + ipi = inplaceInstalledPackageInfo pwd distPref pkg (AbiHash "") lib lbi libClbi + testDir = buildDir lbi stubName test + stubName test ++ "-tmp" + testLibDep = thisPackageVersion $ package pkg + exe = Executable { + exeName = stubName test, + modulePath = stubFilePath test, + buildInfo = (testBuildInfo test) { + hsSourceDirs = [ testDir ], + targetBuildDepends = testLibDep + : (targetBuildDepends $ testBuildInfo test), + targetBuildRenaming = Map.empty + } + } + -- | The stub executable needs a new 'ComponentLocalBuildInfo' + -- that exposes the relevant test suite library. + exeClbi = ExeComponentLocalBuildInfo { + componentPackageDeps = + (IPI.installedUnitId ipi, packageId ipi) + : (filter (\(_, x) -> let PackageName name = pkgName x + in name == "Cabal" || name == "base") + (componentPackageDeps clbi)), + componentPackageRenaming = Map.empty + } +testSuiteLibV09AsLibAndExe _ TestSuite{} _ _ _ _ = error "testSuiteLibV09AsLibAndExe: wrong kind" + + +-- | Translate a exe-style 'Benchmark' component into an exe for building +benchmarkExeV10asExe :: Benchmark -> ComponentLocalBuildInfo + -> (Executable, ComponentLocalBuildInfo) +benchmarkExeV10asExe bm@Benchmark { benchmarkInterface = BenchmarkExeV10 _ f } + clbi = + (exe, exeClbi) + where + exe = Executable { + exeName = benchmarkName bm, + modulePath = f, + buildInfo = benchmarkBuildInfo bm + } + exeClbi = ExeComponentLocalBuildInfo { + componentPackageDeps = componentPackageDeps clbi, + componentPackageRenaming = componentPackageRenaming clbi + } +benchmarkExeV10asExe Benchmark{} _ = error "benchmarkExeV10asExe: wrong kind" + +-- | Initialize a new package db file for libraries defined +-- internally to the package. +createInternalPackageDB :: Verbosity -> LocalBuildInfo -> FilePath + -> IO PackageDB +createInternalPackageDB verbosity lbi distPref = do + existsAlready <- doesPackageDBExist dbPath + when existsAlready $ deletePackageDB dbPath + createPackageDB verbosity (compiler lbi) (withPrograms lbi) False dbPath + return (SpecificPackageDB dbPath) + where + dbPath = case compilerFlavor (compiler lbi) of + UHC -> UHC.inplacePackageDbPath lbi + _ -> distPref "package.conf.inplace" + +addInternalBuildTools :: PackageDescription -> LocalBuildInfo -> BuildInfo + -> ProgramDb -> ProgramDb +addInternalBuildTools pkg lbi bi progs = + foldr updateProgram progs internalBuildTools + where + internalBuildTools = + [ simpleConfiguredProgram toolName (FoundOnSystem toolLocation) + | toolName <- toolNames + , let toolLocation = buildDir lbi toolName toolName <.> exeExtension ] + toolNames = intersect buildToolNames internalExeNames + internalExeNames = map exeName (executables pkg) + buildToolNames = map buildToolName (buildTools bi) + where + buildToolName (Dependency (PackageName name) _ ) = name + + +-- TODO: build separate libs in separate dirs so that we can build +-- multiple libs, e.g. for 'LibTest' library-style test suites +buildLib :: Verbosity -> Flag (Maybe Int) + -> PackageDescription -> LocalBuildInfo + -> Library -> ComponentLocalBuildInfo -> IO () +buildLib verbosity numJobs pkg_descr lbi lib clbi = + case compilerFlavor (compiler lbi) of + GHC -> GHC.buildLib verbosity numJobs pkg_descr lbi lib clbi + GHCJS -> GHCJS.buildLib verbosity numJobs pkg_descr lbi lib clbi + JHC -> JHC.buildLib verbosity pkg_descr lbi lib clbi + LHC -> LHC.buildLib verbosity pkg_descr lbi lib clbi + UHC -> UHC.buildLib verbosity pkg_descr lbi lib clbi + HaskellSuite {} -> HaskellSuite.buildLib verbosity pkg_descr lbi lib clbi + _ -> die "Building is not supported with this compiler." + +buildExe :: Verbosity -> Flag (Maybe Int) + -> PackageDescription -> LocalBuildInfo + -> Executable -> ComponentLocalBuildInfo -> IO () +buildExe verbosity numJobs pkg_descr lbi exe clbi = + case compilerFlavor (compiler lbi) of + GHC -> GHC.buildExe verbosity numJobs pkg_descr lbi exe clbi + GHCJS -> GHCJS.buildExe verbosity numJobs pkg_descr lbi exe clbi + JHC -> JHC.buildExe verbosity pkg_descr lbi exe clbi + LHC -> LHC.buildExe verbosity pkg_descr lbi exe clbi + UHC -> UHC.buildExe verbosity pkg_descr lbi exe clbi + _ -> die "Building is not supported with this compiler." + +replLib :: Verbosity -> PackageDescription -> LocalBuildInfo + -> Library -> ComponentLocalBuildInfo -> IO () +replLib verbosity pkg_descr lbi lib clbi = + case compilerFlavor (compiler lbi) of + -- 'cabal repl' doesn't need to support 'ghc --make -j', so we just pass + -- NoFlag as the numJobs parameter. + GHC -> GHC.replLib verbosity NoFlag pkg_descr lbi lib clbi + GHCJS -> GHCJS.replLib verbosity NoFlag pkg_descr lbi lib clbi + _ -> die "A REPL is not supported for this compiler." + +replExe :: Verbosity -> PackageDescription -> LocalBuildInfo + -> Executable -> ComponentLocalBuildInfo -> IO () +replExe verbosity pkg_descr lbi exe clbi = + case compilerFlavor (compiler lbi) of + GHC -> GHC.replExe verbosity NoFlag pkg_descr lbi exe clbi + GHCJS -> GHCJS.replExe verbosity NoFlag pkg_descr lbi exe clbi + _ -> die "A REPL is not supported for this compiler." + + +initialBuildSteps :: FilePath -- ^"dist" prefix + -> PackageDescription -- ^mostly information from the .cabal file + -> LocalBuildInfo -- ^Configuration information + -> Verbosity -- ^The verbosity to use + -> IO () +initialBuildSteps _distPref pkg_descr lbi verbosity = do + -- check that there's something to build + unless (not . null $ allBuildInfo pkg_descr) $ do + let name = display (packageId pkg_descr) + die $ "No libraries, executables, tests, or benchmarks " + ++ "are enabled for package " ++ name ++ "." + + createDirectoryIfMissingVerbose verbosity True (buildDir lbi) + + writeAutogenFiles verbosity pkg_descr lbi + +-- | Generate and write out the Paths_.hs and cabal_macros.h files +-- +writeAutogenFiles :: Verbosity + -> PackageDescription + -> LocalBuildInfo + -> IO () +writeAutogenFiles verbosity pkg lbi = do + createDirectoryIfMissingVerbose verbosity True (autogenModulesDir lbi) + + let pathsModulePath = autogenModulesDir lbi + ModuleName.toFilePath (autogenModuleName pkg) <.> "hs" + rewriteFile pathsModulePath (Build.PathsModule.generate pkg lbi) + + let cppHeaderPath = autogenModulesDir lbi cppHeaderName + rewriteFile cppHeaderPath (Build.Macros.generate pkg lbi) + +-- | Check that the given build targets are valid in the current context. +-- +-- Also swizzle into a more convenient form. +-- +checkBuildTargets :: Verbosity -> PackageDescription -> [BuildTarget] + -> IO [(ComponentName, Maybe (Either ModuleName FilePath))] +checkBuildTargets _ pkg [] = + return [ (componentName c, Nothing) | c <- pkgEnabledComponents pkg ] + +checkBuildTargets verbosity pkg targets = do + + let (enabled, disabled) = + partitionEithers + [ case componentDisabledReason (getComponent pkg cname) of + Nothing -> Left target' + Just reason -> Right (cname, reason) + | target <- targets + , let target'@(cname,_) = swizzleTarget target ] + + case disabled of + [] -> return () + ((cname,reason):_) -> die $ formatReason (showComponentName cname) reason + + forM_ [ (c, t) | (c, Just t) <- enabled ] $ \(c, t) -> + warn verbosity $ "Ignoring '" ++ either display id t ++ ". The whole " + ++ showComponentName c ++ " will be built. (Support for " + ++ "module and file targets has not been implemented yet.)" + + return enabled + + where + swizzleTarget (BuildTargetComponent c) = (c, Nothing) + swizzleTarget (BuildTargetModule c m) = (c, Just (Left m)) + swizzleTarget (BuildTargetFile c f) = (c, Just (Right f)) + + formatReason cn DisabledComponent = + "Cannot build the " ++ cn ++ " because the component is marked " + ++ "as disabled in the .cabal file." + formatReason cn DisabledAllTests = + "Cannot build the " ++ cn ++ " because test suites are not " + ++ "enabled. Run configure with the flag --enable-tests" + formatReason cn DisabledAllBenchmarks = + "Cannot build the " ++ cn ++ " because benchmarks are not " + ++ "enabled. Re-run configure with the flag --enable-benchmarks" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/BuildPaths.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/BuildPaths.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/BuildPaths.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/BuildPaths.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,127 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.BuildPaths +-- Copyright : Isaac Jones 2003-2004, +-- Duncan Coutts 2008 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- A bunch of dirs, paths and file names used for intermediate build steps. +-- + +module Distribution.Simple.BuildPaths ( + defaultDistPref, srcPref, + haddockDirName, + hscolourPref, hscolourPref', haddockPref, haddockPref', + autogenModulesDir, + + autogenModuleName, + cppHeaderName, + haddockName, + + mkLibName, + mkProfLibName, + mkSharedLibName, + + exeExtension, + objExtension, + dllExtension, + + ) where + + +import Distribution.Package +import Distribution.ModuleName as ModuleName +import Distribution.Compiler +import Distribution.PackageDescription +import Distribution.Simple.LocalBuildInfo +import Distribution.Simple.Setup +import Distribution.Text +import Distribution.System + +import System.FilePath ((), (<.>)) + +-- --------------------------------------------------------------------------- +-- Build directories and files + +srcPref :: FilePath -> FilePath +srcPref distPref = distPref "src" + +-- Backwards compat, see #4123. +haddockPref, hscolourPref :: FilePath -> PackageDescription -> FilePath +haddockPref = haddockPref' ForDevelopment +hscolourPref = haddockPref' ForDevelopment + +hscolourPref' :: HaddockTarget -> FilePath -> PackageDescription -> FilePath +hscolourPref' = haddockPref' + +-- | This is the name of the directory in which the generated haddocks +-- should be stored. It does not include the @/doc/html@ prefix. +haddockDirName :: HaddockTarget -> PackageDescription -> FilePath +haddockDirName ForDevelopment = display . packageName +haddockDirName ForHackage = (++ "-docs") . display . packageId + +-- | The directory to which generated haddock documentation should be written. +haddockPref' :: HaddockTarget -> FilePath -> PackageDescription -> FilePath +haddockPref' haddockTarget distPref pkg_descr + = distPref "doc" "html" haddockDirName haddockTarget pkg_descr + +-- |The directory in which we put auto-generated modules +autogenModulesDir :: LocalBuildInfo -> String +autogenModulesDir lbi = buildDir lbi "autogen" + +cppHeaderName :: String +cppHeaderName = "cabal_macros.h" + +-- |The name of the auto-generated module associated with a package +autogenModuleName :: PackageDescription -> ModuleName +autogenModuleName pkg_descr = + ModuleName.fromString $ + "Paths_" ++ map fixchar (display (packageName pkg_descr)) + where fixchar '-' = '_' + fixchar c = c + +haddockName :: PackageDescription -> FilePath +haddockName pkg_descr = display (packageName pkg_descr) <.> "haddock" + +-- --------------------------------------------------------------------------- +-- Library file names + +mkLibName :: UnitId -> String +mkLibName lib = "lib" ++ getHSLibraryName lib <.> "a" + +mkProfLibName :: UnitId -> String +mkProfLibName lib = "lib" ++ getHSLibraryName lib ++ "_p" <.> "a" + +-- Implement proper name mangling for dynamical shared objects +-- libHS- +-- e.g. libHSbase-2.1-ghc6.6.1.so +mkSharedLibName :: CompilerId -> UnitId -> String +mkSharedLibName (CompilerId compilerFlavor compilerVersion) lib + = "lib" ++ getHSLibraryName lib ++ "-" ++ comp <.> dllExtension + where comp = display compilerFlavor ++ display compilerVersion + +-- ------------------------------------------------------------ +-- * Platform file extensions +-- ------------------------------------------------------------ + +-- | Default extension for executable files on the current platform. +-- (typically @\"\"@ on Unix and @\"exe\"@ on Windows or OS\/2) +exeExtension :: String +exeExtension = case buildOS of + Windows -> "exe" + _ -> "" + +-- | Extension for object files. For GHC the extension is @\"o\"@. +objExtension :: String +objExtension = "o" + +-- | Extension for dynamically linked (or shared) libraries +-- (typically @\"so\"@ on Unix and @\"dll\"@ on Windows) +dllExtension :: String +dllExtension = case buildOS of + Windows -> "dll" + OSX -> "dylib" + _ -> "so" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/BuildTarget.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/BuildTarget.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/BuildTarget.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/BuildTarget.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,940 @@ +{-# LANGUAGE DeriveGeneric #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.BuildTargets +-- Copyright : (c) Duncan Coutts 2012 +-- License : BSD-like +-- +-- Maintainer : duncan@community.haskell.org +-- +-- Handling for user-specified build targets +----------------------------------------------------------------------------- +module Distribution.Simple.BuildTarget ( + + -- * Build targets + BuildTarget(..), + readBuildTargets, + showBuildTarget, + QualLevel(..), + buildTargetComponentName, + + -- * Parsing user build targets + UserBuildTarget, + readUserBuildTargets, + showUserBuildTarget, + UserBuildTargetProblem(..), + reportUserBuildTargetProblems, + + -- * Resolving build targets + resolveBuildTargets, + BuildTargetProblem(..), + reportBuildTargetProblems, + ) where + +import Distribution.Package +import Distribution.PackageDescription +import Distribution.ModuleName +import Distribution.Simple.LocalBuildInfo +import Distribution.Text +import Distribution.Simple.Utils + +import Distribution.Compat.Binary (Binary) +import qualified Distribution.Compat.ReadP as Parse +import Distribution.Compat.ReadP + ( (+++), (<++) ) + +import Data.List + ( nub, stripPrefix, sortBy, groupBy, partition ) +import Data.Maybe + ( listToMaybe, catMaybes ) +import Data.Either + ( partitionEithers ) +import GHC.Generics (Generic) +import qualified Data.Map as Map +import Control.Monad +import Control.Applicative as AP (Alternative(..), Applicative(..)) +import Data.Char + ( isSpace, isAlphaNum ) +import System.FilePath as FilePath + ( dropExtension, normalise, splitDirectories, joinPath, splitPath + , hasTrailingPathSeparator ) +import System.Directory + ( doesFileExist, doesDirectoryExist ) + +-- ------------------------------------------------------------ +-- * User build targets +-- ------------------------------------------------------------ + +-- | Various ways that a user may specify a build target. +-- +data UserBuildTarget = + + -- | A target specified by a single name. This could be a component + -- module or file. + -- + -- > cabal build foo + -- > cabal build Data.Foo + -- > cabal build Data/Foo.hs Data/Foo.hsc + -- + UserBuildTargetSingle String + + -- | A target specified by a qualifier and name. This could be a component + -- name qualified by the component namespace kind, or a module or file + -- qualified by the component name. + -- + -- > cabal build lib:foo exe:foo + -- > cabal build foo:Data.Foo + -- > cabal build foo:Data/Foo.hs + -- + | UserBuildTargetDouble String String + + -- A fully qualified target, either a module or file qualified by a + -- component name with the component namespace kind. + -- + -- > cabal build lib:foo:Data/Foo.hs exe:foo:Data/Foo.hs + -- > cabal build lib:foo:Data.Foo exe:foo:Data.Foo + -- + | UserBuildTargetTriple String String String + deriving (Show, Eq, Ord) + + +-- ------------------------------------------------------------ +-- * Resolved build targets +-- ------------------------------------------------------------ + +-- | A fully resolved build target. +-- +data BuildTarget = + + -- | A specific component + -- + BuildTargetComponent ComponentName + + -- | A specific module within a specific component. + -- + | BuildTargetModule ComponentName ModuleName + + -- | A specific file within a specific component. + -- + | BuildTargetFile ComponentName FilePath + deriving (Eq, Show, Generic) + +instance Binary BuildTarget + +buildTargetComponentName :: BuildTarget -> ComponentName +buildTargetComponentName (BuildTargetComponent cn) = cn +buildTargetComponentName (BuildTargetModule cn _) = cn +buildTargetComponentName (BuildTargetFile cn _) = cn + +-- | Read a list of user-supplied build target strings and resolve them to +-- 'BuildTarget's according to a 'PackageDescription'. If there are problems +-- with any of the targets e.g. they don't exist or are misformatted, throw an +-- 'IOException'. +readBuildTargets :: PackageDescription -> [String] -> IO [BuildTarget] +readBuildTargets pkg targetStrs = do + let (uproblems, utargets) = readUserBuildTargets targetStrs + reportUserBuildTargetProblems uproblems + + utargets' <- mapM checkTargetExistsAsFile utargets + + let (bproblems, btargets) = resolveBuildTargets pkg utargets' + reportBuildTargetProblems bproblems + + return btargets + +checkTargetExistsAsFile :: UserBuildTarget -> IO (UserBuildTarget, Bool) +checkTargetExistsAsFile t = do + fexists <- existsAsFile (fileComponentOfTarget t) + return (t, fexists) + + where + existsAsFile f = do + exists <- doesFileExist f + case splitPath f of + (d:_) | hasTrailingPathSeparator d -> doesDirectoryExist d + (d:_:_) | not exists -> doesDirectoryExist d + _ -> return exists + + fileComponentOfTarget (UserBuildTargetSingle s1) = s1 + fileComponentOfTarget (UserBuildTargetDouble _ s2) = s2 + fileComponentOfTarget (UserBuildTargetTriple _ _ s3) = s3 + + +-- ------------------------------------------------------------ +-- * Parsing user targets +-- ------------------------------------------------------------ + +readUserBuildTargets :: [String] -> ([UserBuildTargetProblem] + ,[UserBuildTarget]) +readUserBuildTargets = partitionEithers . map readUserBuildTarget + +readUserBuildTarget :: String -> Either UserBuildTargetProblem + UserBuildTarget +readUserBuildTarget targetstr = + case readPToMaybe parseTargetApprox targetstr of + Nothing -> Left (UserBuildTargetUnrecognised targetstr) + Just tgt -> Right tgt + + where + parseTargetApprox :: Parse.ReadP r UserBuildTarget + parseTargetApprox = + (do a <- tokenQ + return (UserBuildTargetSingle a)) + +++ (do a <- token + _ <- Parse.char ':' + b <- tokenQ + return (UserBuildTargetDouble a b)) + +++ (do a <- token + _ <- Parse.char ':' + b <- token + _ <- Parse.char ':' + c <- tokenQ + return (UserBuildTargetTriple a b c)) + + token = Parse.munch1 (\x -> not (isSpace x) && x /= ':') + tokenQ = parseHaskellString <++ token + parseHaskellString :: Parse.ReadP r String + parseHaskellString = Parse.readS_to_P reads + + readPToMaybe :: Parse.ReadP a a -> String -> Maybe a + readPToMaybe p str = listToMaybe [ r | (r,s) <- Parse.readP_to_S p str + , all isSpace s ] + +data UserBuildTargetProblem + = UserBuildTargetUnrecognised String + deriving Show + +reportUserBuildTargetProblems :: [UserBuildTargetProblem] -> IO () +reportUserBuildTargetProblems problems = do + case [ target | UserBuildTargetUnrecognised target <- problems ] of + [] -> return () + target -> + die $ unlines + [ "Unrecognised build target '" ++ name ++ "'." + | name <- target ] + ++ "Examples:\n" + ++ " - build foo -- component name " + ++ "(library, executable, test-suite or benchmark)\n" + ++ " - build Data.Foo -- module name\n" + ++ " - build Data/Foo.hsc -- file name\n" + ++ " - build lib:foo exe:foo -- component qualified by kind\n" + ++ " - build foo:Data.Foo -- module qualified by component\n" + ++ " - build foo:Data/Foo.hsc -- file qualified by component" + +showUserBuildTarget :: UserBuildTarget -> String +showUserBuildTarget = intercalate ":" . getComponents + where + getComponents (UserBuildTargetSingle s1) = [s1] + getComponents (UserBuildTargetDouble s1 s2) = [s1,s2] + getComponents (UserBuildTargetTriple s1 s2 s3) = [s1,s2,s3] + +showBuildTarget :: QualLevel -> PackageId -> BuildTarget -> String +showBuildTarget ql pkgid bt = + showUserBuildTarget (renderBuildTarget ql bt pkgid) + + +-- ------------------------------------------------------------ +-- * Resolving user targets to build targets +-- ------------------------------------------------------------ + +{- +stargets = + [ BuildTargetComponent (CExeName "foo") + , BuildTargetModule (CExeName "foo") (mkMn "Foo") + , BuildTargetModule (CExeName "tst") (mkMn "Foo") + ] + where + mkMn :: String -> ModuleName + mkMn = fromJust . simpleParse + +ex_pkgid :: PackageIdentifier +Just ex_pkgid = simpleParse "thelib" +-} + +-- | Given a bunch of user-specified targets, try to resolve what it is they +-- refer to. +-- +resolveBuildTargets :: PackageDescription + -> [(UserBuildTarget, Bool)] + -> ([BuildTargetProblem], [BuildTarget]) +resolveBuildTargets pkg = partitionEithers + . map (uncurry (resolveBuildTarget pkg)) + +resolveBuildTarget :: PackageDescription -> UserBuildTarget -> Bool + -> Either BuildTargetProblem BuildTarget +resolveBuildTarget pkg userTarget fexists = + case findMatch (matchBuildTarget pkg userTarget fexists) of + Unambiguous target -> Right target + Ambiguous targets -> Left (BuildTargetAmbiguous userTarget targets') + where targets' = disambiguateBuildTargets + (packageId pkg) userTarget + targets + None errs -> Left (classifyMatchErrors errs) + + where + classifyMatchErrors errs + | not (null expected) = let (things, got:_) = unzip expected in + BuildTargetExpected userTarget things got + | not (null nosuch) = BuildTargetNoSuch userTarget nosuch + | otherwise = error $ "resolveBuildTarget: internal error in matching" + where + expected = [ (thing, got) | MatchErrorExpected thing got <- errs ] + nosuch = [ (thing, got) | MatchErrorNoSuch thing got <- errs ] + + +data BuildTargetProblem + = BuildTargetExpected UserBuildTarget [String] String + -- ^ [expected thing] (actually got) + | BuildTargetNoSuch UserBuildTarget [(String, String)] + -- ^ [(no such thing, actually got)] + | BuildTargetAmbiguous UserBuildTarget [(UserBuildTarget, BuildTarget)] + deriving Show + + +disambiguateBuildTargets :: PackageId -> UserBuildTarget -> [BuildTarget] + -> [(UserBuildTarget, BuildTarget)] +disambiguateBuildTargets pkgid original = + disambiguate (userTargetQualLevel original) + where + disambiguate ql ts + | null amb = unamb + | otherwise = unamb ++ disambiguate (succ ql) amb + where + (amb, unamb) = step ql ts + + userTargetQualLevel (UserBuildTargetSingle _ ) = QL1 + userTargetQualLevel (UserBuildTargetDouble _ _ ) = QL2 + userTargetQualLevel (UserBuildTargetTriple _ _ _) = QL3 + + step :: QualLevel -> [BuildTarget] + -> ([BuildTarget], [(UserBuildTarget, BuildTarget)]) + step ql = (\(amb, unamb) -> (map snd $ concat amb, concat unamb)) + . partition (\g -> length g > 1) + . groupBy (equating fst) + . sortBy (comparing fst) + . map (\t -> (renderBuildTarget ql t pkgid, t)) + +data QualLevel = QL1 | QL2 | QL3 + deriving (Enum, Show) + +renderBuildTarget :: QualLevel -> BuildTarget -> PackageId -> UserBuildTarget +renderBuildTarget ql target pkgid = + case ql of + QL1 -> UserBuildTargetSingle s1 where s1 = single target + QL2 -> UserBuildTargetDouble s1 s2 where (s1, s2) = double target + QL3 -> UserBuildTargetTriple s1 s2 s3 where (s1, s2, s3) = triple target + + where + single (BuildTargetComponent cn ) = dispCName cn + single (BuildTargetModule _ m) = display m + single (BuildTargetFile _ f) = f + + double (BuildTargetComponent cn ) = (dispKind cn, dispCName cn) + double (BuildTargetModule cn m) = (dispCName cn, display m) + double (BuildTargetFile cn f) = (dispCName cn, f) + + triple (BuildTargetComponent _ ) = error "triple BuildTargetComponent" + triple (BuildTargetModule cn m) = (dispKind cn, dispCName cn, display m) + triple (BuildTargetFile cn f) = (dispKind cn, dispCName cn, f) + + dispCName = componentStringName pkgid + dispKind = showComponentKindShort . componentKind + +reportBuildTargetProblems :: [BuildTargetProblem] -> IO () +reportBuildTargetProblems problems = do + + case [ (t, e, g) | BuildTargetExpected t e g <- problems ] of + [] -> return () + targets -> + die $ unlines + [ "Unrecognised build target '" ++ showUserBuildTarget target + ++ "'.\n" + ++ "Expected a " ++ intercalate " or " expected + ++ ", rather than '" ++ got ++ "'." + | (target, expected, got) <- targets ] + + case [ (t, e) | BuildTargetNoSuch t e <- problems ] of + [] -> return () + targets -> + die $ unlines + [ "Unknown build target '" ++ showUserBuildTarget target + ++ "'.\nThere is no " + ++ intercalate " or " [ mungeThing thing ++ " '" ++ got ++ "'" + | (thing, got) <- nosuch ] ++ "." + | (target, nosuch) <- targets ] + where + mungeThing "file" = "file target" + mungeThing thing = thing + + case [ (t, ts) | BuildTargetAmbiguous t ts <- problems ] of + [] -> return () + targets -> + die $ unlines + [ "Ambiguous build target '" ++ showUserBuildTarget target + ++ "'. It could be:\n " + ++ unlines [ " "++ showUserBuildTarget ut ++ + " (" ++ showBuildTargetKind bt ++ ")" + | (ut, bt) <- amb ] + | (target, amb) <- targets ] + + where + showBuildTargetKind (BuildTargetComponent _ ) = "component" + showBuildTargetKind (BuildTargetModule _ _) = "module" + showBuildTargetKind (BuildTargetFile _ _) = "file" + + +---------------------------------- +-- Top level BuildTarget matcher +-- + +matchBuildTarget :: PackageDescription + -> UserBuildTarget -> Bool -> Match BuildTarget +matchBuildTarget pkg = \utarget fexists -> + case utarget of + UserBuildTargetSingle str1 -> + matchBuildTarget1 cinfo str1 fexists + + UserBuildTargetDouble str1 str2 -> + matchBuildTarget2 cinfo str1 str2 fexists + + UserBuildTargetTriple str1 str2 str3 -> + matchBuildTarget3 cinfo str1 str2 str3 fexists + where + cinfo = pkgComponentInfo pkg + +matchBuildTarget1 :: [ComponentInfo] -> String -> Bool -> Match BuildTarget +matchBuildTarget1 cinfo str1 fexists = + matchComponent1 cinfo str1 + `matchPlusShadowing` matchModule1 cinfo str1 + `matchPlusShadowing` matchFile1 cinfo str1 fexists + + +matchBuildTarget2 :: [ComponentInfo] -> String -> String -> Bool + -> Match BuildTarget +matchBuildTarget2 cinfo str1 str2 fexists = + matchComponent2 cinfo str1 str2 + `matchPlusShadowing` matchModule2 cinfo str1 str2 + `matchPlusShadowing` matchFile2 cinfo str1 str2 fexists + + +matchBuildTarget3 :: [ComponentInfo] -> String -> String -> String -> Bool + -> Match BuildTarget +matchBuildTarget3 cinfo str1 str2 str3 fexists = + matchModule3 cinfo str1 str2 str3 + `matchPlusShadowing` matchFile3 cinfo str1 str2 str3 fexists + + +data ComponentInfo = ComponentInfo { + cinfoName :: ComponentName, + cinfoStrName :: ComponentStringName, + cinfoSrcDirs :: [FilePath], + cinfoModules :: [ModuleName], + cinfoHsFiles :: [FilePath], -- other hs files (like main.hs) + cinfoCFiles :: [FilePath], + cinfoJsFiles :: [FilePath] + } + +type ComponentStringName = String + +pkgComponentInfo :: PackageDescription -> [ComponentInfo] +pkgComponentInfo pkg = + [ ComponentInfo { + cinfoName = componentName c, + cinfoStrName = componentStringName pkg (componentName c), + cinfoSrcDirs = hsSourceDirs bi, + cinfoModules = componentModules c, + cinfoHsFiles = componentHsFiles c, + cinfoCFiles = cSources bi, + cinfoJsFiles = jsSources bi + } + | c <- pkgComponents pkg + , let bi = componentBuildInfo c ] + +componentStringName :: Package pkg => pkg -> ComponentName -> ComponentStringName +componentStringName pkg CLibName = display (packageName pkg) +componentStringName _ (CExeName name) = name +componentStringName _ (CTestName name) = name +componentStringName _ (CBenchName name) = name + +componentModules :: Component -> [ModuleName] +componentModules (CLib lib) = libModules lib +componentModules (CExe exe) = exeModules exe +componentModules (CTest test) = testModules test +componentModules (CBench bench) = benchmarkModules bench + +componentHsFiles :: Component -> [FilePath] +componentHsFiles (CExe exe) = [modulePath exe] +componentHsFiles (CTest TestSuite { + testInterface = TestSuiteExeV10 _ mainfile + }) = [mainfile] +componentHsFiles (CBench Benchmark { + benchmarkInterface = BenchmarkExeV10 _ mainfile + }) = [mainfile] +componentHsFiles _ = [] + +{- +ex_cs :: [ComponentInfo] +ex_cs = + [ (mkC (CExeName "foo") ["src1", "src1/src2"] ["Foo", "Src2.Bar", "Bar"]) + , (mkC (CExeName "tst") ["src1", "test"] ["Foo"]) + ] + where + mkC n ds ms = ComponentInfo n (componentStringName pkgid n) ds (map mkMn ms) + mkMn :: String -> ModuleName + mkMn = fromJust . simpleParse + pkgid :: PackageIdentifier + Just pkgid = simpleParse "thelib" +-} + +------------------------------ +-- Matching component kinds +-- + +data ComponentKind = LibKind | ExeKind | TestKind | BenchKind + deriving (Eq, Ord, Show) + +componentKind :: ComponentName -> ComponentKind +componentKind CLibName = LibKind +componentKind (CExeName _) = ExeKind +componentKind (CTestName _) = TestKind +componentKind (CBenchName _) = BenchKind + +cinfoKind :: ComponentInfo -> ComponentKind +cinfoKind = componentKind . cinfoName + +matchComponentKind :: String -> Match ComponentKind +matchComponentKind s + | s `elem` ["lib", "library"] = increaseConfidence >> return LibKind + | s `elem` ["exe", "executable"] = increaseConfidence >> return ExeKind + | s `elem` ["tst", "test", "test-suite"] = increaseConfidence + >> return TestKind + | s `elem` ["bench", "benchmark"] = increaseConfidence + >> return BenchKind + | otherwise = matchErrorExpected + "component kind" s + +showComponentKind :: ComponentKind -> String +showComponentKind LibKind = "library" +showComponentKind ExeKind = "executable" +showComponentKind TestKind = "test-suite" +showComponentKind BenchKind = "benchmark" + +showComponentKindShort :: ComponentKind -> String +showComponentKindShort LibKind = "lib" +showComponentKindShort ExeKind = "exe" +showComponentKindShort TestKind = "test" +showComponentKindShort BenchKind = "bench" + +------------------------------ +-- Matching component targets +-- + +matchComponent1 :: [ComponentInfo] -> String -> Match BuildTarget +matchComponent1 cs = \str1 -> do + guardComponentName str1 + c <- matchComponentName cs str1 + return (BuildTargetComponent (cinfoName c)) + +matchComponent2 :: [ComponentInfo] -> String -> String -> Match BuildTarget +matchComponent2 cs = \str1 str2 -> do + ckind <- matchComponentKind str1 + guardComponentName str2 + c <- matchComponentKindAndName cs ckind str2 + return (BuildTargetComponent (cinfoName c)) + +-- utils: + +guardComponentName :: String -> Match () +guardComponentName s + | all validComponentChar s + && not (null s) = increaseConfidence + | otherwise = matchErrorExpected "component name" s + where + validComponentChar c = isAlphaNum c || c == '.' + || c == '_' || c == '-' || c == '\'' + +matchComponentName :: [ComponentInfo] -> String -> Match ComponentInfo +matchComponentName cs str = + orNoSuchThing "component" str + $ increaseConfidenceFor + $ matchInexactly caseFold + [ (cinfoStrName c, c) | c <- cs ] + str + +matchComponentKindAndName :: [ComponentInfo] -> ComponentKind -> String + -> Match ComponentInfo +matchComponentKindAndName cs ckind str = + orNoSuchThing (showComponentKind ckind ++ " component") str + $ increaseConfidenceFor + $ matchInexactly (\(ck, cn) -> (ck, caseFold cn)) + [ ((cinfoKind c, cinfoStrName c), c) | c <- cs ] + (ckind, str) + + +------------------------------ +-- Matching module targets +-- + +matchModule1 :: [ComponentInfo] -> String -> Match BuildTarget +matchModule1 cs = \str1 -> do + guardModuleName str1 + nubMatchErrors $ do + c <- tryEach cs + let ms = cinfoModules c + m <- matchModuleName ms str1 + return (BuildTargetModule (cinfoName c) m) + +matchModule2 :: [ComponentInfo] -> String -> String -> Match BuildTarget +matchModule2 cs = \str1 str2 -> do + guardComponentName str1 + guardModuleName str2 + c <- matchComponentName cs str1 + let ms = cinfoModules c + m <- matchModuleName ms str2 + return (BuildTargetModule (cinfoName c) m) + +matchModule3 :: [ComponentInfo] -> String -> String -> String + -> Match BuildTarget +matchModule3 cs str1 str2 str3 = do + ckind <- matchComponentKind str1 + guardComponentName str2 + c <- matchComponentKindAndName cs ckind str2 + guardModuleName str3 + let ms = cinfoModules c + m <- matchModuleName ms str3 + return (BuildTargetModule (cinfoName c) m) + +-- utils: + +guardModuleName :: String -> Match () +guardModuleName s + | all validModuleChar s + && not (null s) = increaseConfidence + | otherwise = matchErrorExpected "module name" s + where + validModuleChar c = isAlphaNum c || c == '.' || c == '_' || c == '\'' + +matchModuleName :: [ModuleName] -> String -> Match ModuleName +matchModuleName ms str = + orNoSuchThing "module" str + $ increaseConfidenceFor + $ matchInexactly caseFold + [ (display m, m) + | m <- ms ] + str + + +------------------------------ +-- Matching file targets +-- + +matchFile1 :: [ComponentInfo] -> String -> Bool -> Match BuildTarget +matchFile1 cs str1 exists = + nubMatchErrors $ do + c <- tryEach cs + filepath <- matchComponentFile c str1 exists + return (BuildTargetFile (cinfoName c) filepath) + + +matchFile2 :: [ComponentInfo] -> String -> String -> Bool -> Match BuildTarget +matchFile2 cs str1 str2 exists = do + guardComponentName str1 + c <- matchComponentName cs str1 + filepath <- matchComponentFile c str2 exists + return (BuildTargetFile (cinfoName c) filepath) + + +matchFile3 :: [ComponentInfo] -> String -> String -> String -> Bool + -> Match BuildTarget +matchFile3 cs str1 str2 str3 exists = do + ckind <- matchComponentKind str1 + guardComponentName str2 + c <- matchComponentKindAndName cs ckind str2 + filepath <- matchComponentFile c str3 exists + return (BuildTargetFile (cinfoName c) filepath) + + +matchComponentFile :: ComponentInfo -> String -> Bool -> Match FilePath +matchComponentFile c str fexists = + expecting "file" str $ + matchPlus + (matchFileExists str fexists) + (matchPlusShadowing + (msum [ matchModuleFileRooted dirs ms str + , matchOtherFileRooted dirs hsFiles str ]) + (msum [ matchModuleFileUnrooted ms str + , matchOtherFileUnrooted hsFiles str + , matchOtherFileUnrooted cFiles str + , matchOtherFileUnrooted jsFiles str ])) + where + dirs = cinfoSrcDirs c + ms = cinfoModules c + hsFiles = cinfoHsFiles c + cFiles = cinfoCFiles c + jsFiles = cinfoJsFiles c + + +-- utils + +matchFileExists :: FilePath -> Bool -> Match a +matchFileExists _ False = mzero +matchFileExists fname True = do increaseConfidence + matchErrorNoSuch "file" fname + +matchModuleFileUnrooted :: [ModuleName] -> String -> Match FilePath +matchModuleFileUnrooted ms str = do + let filepath = normalise str + _ <- matchModuleFileStem ms filepath + return filepath + +matchModuleFileRooted :: [FilePath] -> [ModuleName] -> String -> Match FilePath +matchModuleFileRooted dirs ms str = nubMatches $ do + let filepath = normalise str + filepath' <- matchDirectoryPrefix dirs filepath + _ <- matchModuleFileStem ms filepath' + return filepath + +matchModuleFileStem :: [ModuleName] -> FilePath -> Match ModuleName +matchModuleFileStem ms = + increaseConfidenceFor + . matchInexactly caseFold + [ (toFilePath m, m) | m <- ms ] + . dropExtension + +matchOtherFileRooted :: [FilePath] -> [FilePath] -> FilePath -> Match FilePath +matchOtherFileRooted dirs fs str = do + let filepath = normalise str + filepath' <- matchDirectoryPrefix dirs filepath + _ <- matchFile fs filepath' + return filepath + +matchOtherFileUnrooted :: [FilePath] -> FilePath -> Match FilePath +matchOtherFileUnrooted fs str = do + let filepath = normalise str + _ <- matchFile fs filepath + return filepath + +matchFile :: [FilePath] -> FilePath -> Match FilePath +matchFile fs = increaseConfidenceFor + . matchInexactly caseFold [ (f, f) | f <- fs ] + +matchDirectoryPrefix :: [FilePath] -> FilePath -> Match FilePath +matchDirectoryPrefix dirs filepath = + exactMatches $ + catMaybes + [ stripDirectory (normalise dir) filepath | dir <- dirs ] + where + stripDirectory :: FilePath -> FilePath -> Maybe FilePath + stripDirectory dir fp = + joinPath `fmap` stripPrefix (splitDirectories dir) (splitDirectories fp) + + +------------------------------ +-- Matching monad +-- + +-- | A matcher embodies a way to match some input as being some recognised +-- value. In particular it deals with multiple and ambiguous matches. +-- +-- There are various matcher primitives ('matchExactly', 'matchInexactly'), +-- ways to combine matchers ('ambiguousWith', 'shadows') and finally we can +-- run a matcher against an input using 'findMatch'. +-- + +data Match a = NoMatch Confidence [MatchError] + | ExactMatch Confidence [a] + | InexactMatch Confidence [a] + deriving Show + +type Confidence = Int + +data MatchError = MatchErrorExpected String String + | MatchErrorNoSuch String String + deriving (Show, Eq) + + +instance Alternative Match where + empty = mzero + (<|>) = mplus + +instance MonadPlus Match where + mzero = matchZero + mplus = matchPlus + +matchZero :: Match a +matchZero = NoMatch 0 [] + +-- | Combine two matchers. Exact matches are used over inexact matches +-- but if we have multiple exact, or inexact then the we collect all the +-- ambiguous matches. +-- +matchPlus :: Match a -> Match a -> Match a +matchPlus (ExactMatch d1 xs) (ExactMatch d2 xs') = + ExactMatch (max d1 d2) (xs ++ xs') +matchPlus a@(ExactMatch _ _ ) (InexactMatch _ _ ) = a +matchPlus a@(ExactMatch _ _ ) (NoMatch _ _ ) = a +matchPlus (InexactMatch _ _ ) b@(ExactMatch _ _ ) = b +matchPlus (InexactMatch d1 xs) (InexactMatch d2 xs') = + InexactMatch (max d1 d2) (xs ++ xs') +matchPlus a@(InexactMatch _ _ ) (NoMatch _ _ ) = a +matchPlus (NoMatch _ _ ) b@(ExactMatch _ _ ) = b +matchPlus (NoMatch _ _ ) b@(InexactMatch _ _ ) = b +matchPlus a@(NoMatch d1 ms) b@(NoMatch d2 ms') + | d1 > d2 = a + | d1 < d2 = b + | otherwise = NoMatch d1 (ms ++ ms') + +-- | Combine two matchers. This is similar to 'ambiguousWith' with the +-- difference that an exact match from the left matcher shadows any exact +-- match on the right. Inexact matches are still collected however. +-- +matchPlusShadowing :: Match a -> Match a -> Match a +matchPlusShadowing a@(ExactMatch _ _) (ExactMatch _ _) = a +matchPlusShadowing a b = matchPlus a b + +instance Functor Match where + fmap _ (NoMatch d ms) = NoMatch d ms + fmap f (ExactMatch d xs) = ExactMatch d (fmap f xs) + fmap f (InexactMatch d xs) = InexactMatch d (fmap f xs) + +instance Applicative Match where + pure a = ExactMatch 0 [a] + (<*>) = ap + +instance Monad Match where + return = AP.pure + + NoMatch d ms >>= _ = NoMatch d ms + ExactMatch d xs >>= f = addDepth d + $ foldr matchPlus matchZero (map f xs) + InexactMatch d xs >>= f = addDepth d . forceInexact + $ foldr matchPlus matchZero (map f xs) + +addDepth :: Confidence -> Match a -> Match a +addDepth d' (NoMatch d msgs) = NoMatch (d'+d) msgs +addDepth d' (ExactMatch d xs) = ExactMatch (d'+d) xs +addDepth d' (InexactMatch d xs) = InexactMatch (d'+d) xs + +forceInexact :: Match a -> Match a +forceInexact (ExactMatch d ys) = InexactMatch d ys +forceInexact m = m + +------------------------------ +-- Various match primitives +-- + +matchErrorExpected, matchErrorNoSuch :: String -> String -> Match a +matchErrorExpected thing got = NoMatch 0 [MatchErrorExpected thing got] +matchErrorNoSuch thing got = NoMatch 0 [MatchErrorNoSuch thing got] + +expecting :: String -> String -> Match a -> Match a +expecting thing got (NoMatch 0 _) = matchErrorExpected thing got +expecting _ _ m = m + +orNoSuchThing :: String -> String -> Match a -> Match a +orNoSuchThing thing got (NoMatch 0 _) = matchErrorNoSuch thing got +orNoSuchThing _ _ m = m + +increaseConfidence :: Match () +increaseConfidence = ExactMatch 1 [()] + +increaseConfidenceFor :: Match a -> Match a +increaseConfidenceFor m = m >>= \r -> increaseConfidence >> return r + +nubMatches :: Eq a => Match a -> Match a +nubMatches (NoMatch d msgs) = NoMatch d msgs +nubMatches (ExactMatch d xs) = ExactMatch d (nub xs) +nubMatches (InexactMatch d xs) = InexactMatch d (nub xs) + +nubMatchErrors :: Match a -> Match a +nubMatchErrors (NoMatch d msgs) = NoMatch d (nub msgs) +nubMatchErrors (ExactMatch d xs) = ExactMatch d xs +nubMatchErrors (InexactMatch d xs) = InexactMatch d xs + +-- | Lift a list of matches to an exact match. +-- +exactMatches, inexactMatches :: [a] -> Match a + +exactMatches [] = matchZero +exactMatches xs = ExactMatch 0 xs + +inexactMatches [] = matchZero +inexactMatches xs = InexactMatch 0 xs + +tryEach :: [a] -> Match a +tryEach = exactMatches + + +------------------------------ +-- Top level match runner +-- + +-- | Given a matcher and a key to look up, use the matcher to find all the +-- possible matches. There may be 'None', a single 'Unambiguous' match or +-- you may have an 'Ambiguous' match with several possibilities. +-- +findMatch :: Eq b => Match b -> MaybeAmbiguous b +findMatch match = + case match of + NoMatch _ msgs -> None (nub msgs) + ExactMatch _ xs -> checkAmbiguous xs + InexactMatch _ xs -> checkAmbiguous xs + where + checkAmbiguous xs = case nub xs of + [x] -> Unambiguous x + xs' -> Ambiguous xs' + +data MaybeAmbiguous a = None [MatchError] | Unambiguous a | Ambiguous [a] + deriving Show + + +------------------------------ +-- Basic matchers +-- + +{- +-- | A primitive matcher that looks up a value in a finite 'Map'. The +-- value must match exactly. +-- +matchExactly :: forall a b. Ord a => [(a, b)] -> (a -> Match b) +matchExactly xs = + \x -> case Map.lookup x m of + Nothing -> matchZero + Just ys -> ExactMatch 0 ys + where + m :: Ord a => Map a [b] + m = Map.fromListWith (++) [ (k,[x]) | (k,x) <- xs ] +-} + +-- | A primitive matcher that looks up a value in a finite 'Map'. It checks +-- for an exact or inexact match. We get an inexact match if the match +-- is not exact, but the canonical forms match. It takes a canonicalisation +-- function for this purpose. +-- +-- So for example if we used string case fold as the canonicalisation +-- function, then we would get case insensitive matching (but it will still +-- report an exact match when the case matches too). +-- +matchInexactly :: (Ord a, Ord a') => + (a -> a') -> + [(a, b)] -> (a -> Match b) +matchInexactly cannonicalise xs = + \x -> case Map.lookup x m of + Just ys -> exactMatches ys + Nothing -> case Map.lookup (cannonicalise x) m' of + Just ys -> inexactMatches ys + Nothing -> matchZero + where + m = Map.fromListWith (++) [ (k,[x]) | (k,x) <- xs ] + + -- the map of canonicalised keys to groups of inexact matches + m' = Map.mapKeysWith (++) cannonicalise m + + + +------------------------------ +-- Utils +-- + +caseFold :: String -> String +caseFold = lowercase diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/CCompiler.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/CCompiler.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/CCompiler.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/CCompiler.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,122 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.CCompiler +-- Copyright : 2011, Dan Knapp +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This simple package provides types and functions for interacting with +-- C compilers. Currently it's just a type enumerating extant C-like +-- languages, which we call dialects. + +{- +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of Isaac Jones nor the names of other + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -} + +module Distribution.Simple.CCompiler ( + CDialect(..), + cSourceExtensions, + cDialectFilenameExtension, + filenameCDialect + ) where + +import Distribution.Compat.Semigroup as Semi + +import System.FilePath + ( takeExtension ) + + +-- | Represents a dialect of C. The Monoid instance expresses backward +-- compatibility, in the sense that 'mappend a b' is the least inclusive +-- dialect which both 'a' and 'b' can be correctly interpreted as. +data CDialect = C + | ObjectiveC + | CPlusPlus + | ObjectiveCPlusPlus + deriving (Eq, Show) + +instance Monoid CDialect where + mempty = C + mappend = (Semi.<>) + +instance Semigroup CDialect where + C <> anything = anything + ObjectiveC <> CPlusPlus = ObjectiveCPlusPlus + CPlusPlus <> ObjectiveC = ObjectiveCPlusPlus + _ <> ObjectiveCPlusPlus = ObjectiveCPlusPlus + ObjectiveC <> _ = ObjectiveC + CPlusPlus <> _ = CPlusPlus + ObjectiveCPlusPlus <> _ = ObjectiveCPlusPlus + +-- | A list of all file extensions which are recognized as possibly containing +-- some dialect of C code. Note that this list is only for source files, +-- not for header files. +cSourceExtensions :: [String] +cSourceExtensions = ["c", "i", "ii", "m", "mi", "mm", "M", "mii", "cc", "cp", + "cxx", "cpp", "CPP", "c++", "C"] + + +-- | Takes a dialect of C and whether code is intended to be passed through +-- the preprocessor, and returns a filename extension for containing that +-- code. +cDialectFilenameExtension :: CDialect -> Bool -> String +cDialectFilenameExtension C True = "c" +cDialectFilenameExtension C False = "i" +cDialectFilenameExtension ObjectiveC True = "m" +cDialectFilenameExtension ObjectiveC False = "mi" +cDialectFilenameExtension CPlusPlus True = "cpp" +cDialectFilenameExtension CPlusPlus False = "ii" +cDialectFilenameExtension ObjectiveCPlusPlus True = "mm" +cDialectFilenameExtension ObjectiveCPlusPlus False = "mii" + + +-- | Infers from a filename's extension the dialect of C which it contains, +-- and whether it is intended to be passed through the preprocessor. +filenameCDialect :: String -> Maybe (CDialect, Bool) +filenameCDialect filename = do + extension <- case takeExtension filename of + '.':ext -> Just ext + _ -> Nothing + case extension of + "c" -> return (C, True) + "i" -> return (C, False) + "ii" -> return (CPlusPlus, False) + "m" -> return (ObjectiveC, True) + "mi" -> return (ObjectiveC, False) + "mm" -> return (ObjectiveCPlusPlus, True) + "M" -> return (ObjectiveCPlusPlus, True) + "mii" -> return (ObjectiveCPlusPlus, False) + "cc" -> return (CPlusPlus, True) + "cp" -> return (CPlusPlus, True) + "cxx" -> return (CPlusPlus, True) + "cpp" -> return (CPlusPlus, True) + "CPP" -> return (CPlusPlus, True) + "c++" -> return (CPlusPlus, True) + "C" -> return (CPlusPlus, True) + _ -> Nothing diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Command.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Command.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Command.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Command.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,620 @@ +{-# LANGUAGE ExistentialQuantification #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Command +-- Copyright : Duncan Coutts 2007 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : non-portable (ExistentialQuantification) +-- +-- This is to do with command line handling. The Cabal command line is +-- organised into a number of named sub-commands (much like darcs). The +-- 'CommandUI' abstraction represents one of these sub-commands, with a name, +-- description, a set of flags. Commands can be associated with actions and +-- run. It handles some common stuff automatically, like the @--help@ and +-- command line completion flags. It is designed to allow other tools make +-- derived commands. This feature is used heavily in @cabal-install@. + +module Distribution.Simple.Command ( + + -- * Command interface + CommandUI(..), + commandShowOptions, + CommandParse(..), + commandParseArgs, + getNormalCommandDescriptions, + helpCommandUI, + + -- ** Constructing commands + ShowOrParseArgs(..), + usageDefault, + usageAlternatives, + mkCommandUI, + hiddenCommand, + + -- ** Associating actions with commands + Command, + commandAddAction, + noExtraFlags, + + -- ** Building lists of commands + CommandType(..), + CommandSpec(..), + commandFromSpec, + + -- ** Running commands + commandsRun, + +-- * Option Fields + OptionField(..), Name, + +-- ** Constructing Option Fields + option, multiOption, + +-- ** Liftings & Projections + liftOption, viewAsFieldDescr, + +-- * Option Descriptions + OptDescr(..), Description, SFlags, LFlags, OptFlags, ArgPlaceHolder, + +-- ** OptDescr 'smart' constructors + MkOptDescr, + reqArg, reqArg', optArg, optArg', noArg, + boolOpt, boolOpt', choiceOpt, choiceOptFromEnum + + ) where + +import qualified Distribution.GetOpt as GetOpt +import Distribution.Text +import Distribution.ParseUtils +import Distribution.ReadE +import Distribution.Simple.Utils + +import Control.Monad +import Data.Char (isAlpha, toLower) +import Data.List (sortBy) +import Data.Maybe +import Data.Monoid as Mon +import Text.PrettyPrint ( punctuate, cat, comma, text ) +import Text.PrettyPrint as PP ( empty ) + +data CommandUI flags = CommandUI { + -- | The name of the command as it would be entered on the command line. + -- For example @\"build\"@. + commandName :: String, + -- | A short, one line description of the command to use in help texts. + commandSynopsis :: String, + -- | A function that maps a program name to a usage summary for this + -- command. + commandUsage :: String -> String, + -- | Additional explanation of the command to use in help texts. + commandDescription :: Maybe (String -> String), + -- | Post-Usage notes and examples in help texts + commandNotes :: Maybe (String -> String), + -- | Initial \/ empty flags + commandDefaultFlags :: flags, + -- | All the Option fields for this command + commandOptions :: ShowOrParseArgs -> [OptionField flags] + } + +data ShowOrParseArgs = ShowArgs | ParseArgs +type Name = String +type Description = String + +-- | We usually have a data type for storing configuration values, where +-- every field stores a configuration option, and the user sets +-- the value either via command line flags or a configuration file. +-- An individual OptionField models such a field, and we usually +-- build a list of options associated to a configuration data type. +data OptionField a = OptionField { + optionName :: Name, + optionDescr :: [OptDescr a] } + +-- | An OptionField takes one or more OptDescrs, describing the command line +-- interface for the field. +data OptDescr a = ReqArg Description OptFlags ArgPlaceHolder + (ReadE (a->a)) (a -> [String]) + + | OptArg Description OptFlags ArgPlaceHolder + (ReadE (a->a)) (a->a) (a -> [Maybe String]) + + | ChoiceOpt [(Description, OptFlags, a->a, a -> Bool)] + + | BoolOpt Description OptFlags{-True-} OptFlags{-False-} + (Bool -> a -> a) (a-> Maybe Bool) + +-- | Short command line option strings +type SFlags = [Char] +-- | Long command line option strings +type LFlags = [String] +type OptFlags = (SFlags,LFlags) +type ArgPlaceHolder = String + + +-- | Create an option taking a single OptDescr. +-- No explicit Name is given for the Option, the name is the first LFlag given. +option :: SFlags -> LFlags -> Description -> get -> set -> MkOptDescr get set a + -> OptionField a +option sf lf@(n:_) d get set arg = OptionField n [arg sf lf d get set] +option _ _ _ _ _ _ = error $ "Distribution.command.option: " + ++ "An OptionField must have at least one LFlag" + +-- | Create an option taking several OptDescrs. +-- You will have to give the flags and description individually to the +-- OptDescr constructor. +multiOption :: Name -> get -> set + -> [get -> set -> OptDescr a] -- ^MkOptDescr constructors partially + -- applied to flags and description. + -> OptionField a +multiOption n get set args = OptionField n [arg get set | arg <- args] + +type MkOptDescr get set a = SFlags -> LFlags -> Description -> get -> set + -> OptDescr a + +-- | Create a string-valued command line interface. +reqArg :: Monoid b => ArgPlaceHolder -> ReadE b -> (b -> [String]) + -> MkOptDescr (a -> b) (b -> a -> a) a +reqArg ad mkflag showflag sf lf d get set = + ReqArg d (sf,lf) ad (fmap (\a b -> set (get b `mappend` a) b) mkflag) + (showflag . get) + +-- | Create a string-valued command line interface with a default value. +optArg :: Monoid b => ArgPlaceHolder -> ReadE b -> b -> (b -> [Maybe String]) + -> MkOptDescr (a -> b) (b -> a -> a) a +optArg ad mkflag def showflag sf lf d get set = + OptArg d (sf,lf) ad (fmap (\a b -> set (get b `mappend` a) b) mkflag) + (\b -> set (get b `mappend` def) b) + (showflag . get) + +-- | (String -> a) variant of "reqArg" +reqArg' :: Monoid b => ArgPlaceHolder -> (String -> b) -> (b -> [String]) + -> MkOptDescr (a -> b) (b -> a -> a) a +reqArg' ad mkflag showflag = + reqArg ad (succeedReadE mkflag) showflag + +-- | (String -> a) variant of "optArg" +optArg' :: Mon.Monoid b => ArgPlaceHolder -> (Maybe String -> b) + -> (b -> [Maybe String]) + -> MkOptDescr (a -> b) (b -> a -> a) a +optArg' ad mkflag showflag = + optArg ad (succeedReadE (mkflag . Just)) def showflag + where def = mkflag Nothing + +noArg :: (Eq b) => b -> MkOptDescr (a -> b) (b -> a -> a) a +noArg flag sf lf d = choiceOpt [(flag, (sf,lf), d)] sf lf d + +boolOpt :: (b -> Maybe Bool) -> (Bool -> b) -> SFlags -> SFlags + -> MkOptDescr (a -> b) (b -> a -> a) a +boolOpt g s sfT sfF _sf _lf@(n:_) d get set = + BoolOpt d (sfT, ["enable-"++n]) (sfF, ["disable-"++n]) (set.s) (g.get) +boolOpt _ _ _ _ _ _ _ _ _ = error + "Distribution.Simple.Setup.boolOpt: unreachable" + +boolOpt' :: (b -> Maybe Bool) -> (Bool -> b) -> OptFlags -> OptFlags + -> MkOptDescr (a -> b) (b -> a -> a) a +boolOpt' g s ffT ffF _sf _lf d get set = BoolOpt d ffT ffF (set.s) (g . get) + +-- | create a Choice option +choiceOpt :: Eq b => [(b,OptFlags,Description)] + -> MkOptDescr (a -> b) (b -> a -> a) a +choiceOpt aa_ff _sf _lf _d get set = ChoiceOpt alts + where alts = [(d,flags, set alt, (==alt) . get) | (alt,flags,d) <- aa_ff] + +-- | create a Choice option out of an enumeration type. +-- As long flags, the Show output is used. As short flags, the first character +-- which does not conflict with a previous one is used. +choiceOptFromEnum :: (Bounded b, Enum b, Show b, Eq b) => + MkOptDescr (a -> b) (b -> a -> a) a +choiceOptFromEnum _sf _lf d get = + choiceOpt [ (x, (sf, [map toLower $ show x]), d') + | (x, sf) <- sflags' + , let d' = d ++ show x] + _sf _lf d get + where sflags' = foldl f [] [firstOne..] + f prev x = let prevflags = concatMap snd prev in + prev ++ take 1 [(x, [toLower sf]) + | sf <- show x, isAlpha sf + , toLower sf `notElem` prevflags] + firstOne = minBound `asTypeOf` get undefined + +commandGetOpts :: ShowOrParseArgs -> CommandUI flags + -> [GetOpt.OptDescr (flags -> flags)] +commandGetOpts showOrParse command = + concatMap viewAsGetOpt (commandOptions command showOrParse) + +viewAsGetOpt :: OptionField a -> [GetOpt.OptDescr (a->a)] +viewAsGetOpt (OptionField _n aa) = concatMap optDescrToGetOpt aa + where + optDescrToGetOpt (ReqArg d (cs,ss) arg_desc set _) = + [GetOpt.Option cs ss (GetOpt.ReqArg set' arg_desc) d] + where set' = readEOrFail set + optDescrToGetOpt (OptArg d (cs,ss) arg_desc set def _) = + [GetOpt.Option cs ss (GetOpt.OptArg set' arg_desc) d] + where set' Nothing = def + set' (Just txt) = readEOrFail set txt + optDescrToGetOpt (ChoiceOpt alts) = + [GetOpt.Option sf lf (GetOpt.NoArg set) d | (d,(sf,lf),set,_) <- alts ] + optDescrToGetOpt (BoolOpt d (sfT, lfT) ([], []) set _) = + [ GetOpt.Option sfT lfT (GetOpt.NoArg (set True)) d ] + optDescrToGetOpt (BoolOpt d ([], []) (sfF, lfF) set _) = + [ GetOpt.Option sfF lfF (GetOpt.NoArg (set False)) d ] + optDescrToGetOpt (BoolOpt d (sfT,lfT) (sfF, lfF) set _) = + [ GetOpt.Option sfT lfT (GetOpt.NoArg (set True)) ("Enable " ++ d) + , GetOpt.Option sfF lfF (GetOpt.NoArg (set False)) ("Disable " ++ d) ] + +-- | to view as a FieldDescr, we sort the list of interfaces (Req > Bool > +-- Choice > Opt) and consider only the first one. +viewAsFieldDescr :: OptionField a -> FieldDescr a +viewAsFieldDescr (OptionField _n []) = + error "Distribution.command.viewAsFieldDescr: unexpected" +viewAsFieldDescr (OptionField n dd) = FieldDescr n get set + where + optDescr = head $ sortBy cmp dd + + cmp :: OptDescr a -> OptDescr a -> Ordering + ReqArg{} `cmp` ReqArg{} = EQ + ReqArg{} `cmp` _ = GT + BoolOpt{} `cmp` ReqArg{} = LT + BoolOpt{} `cmp` BoolOpt{} = EQ + BoolOpt{} `cmp` _ = GT + ChoiceOpt{} `cmp` ReqArg{} = LT + ChoiceOpt{} `cmp` BoolOpt{} = LT + ChoiceOpt{} `cmp` ChoiceOpt{} = EQ + ChoiceOpt{} `cmp` _ = GT + OptArg{} `cmp` OptArg{} = EQ + OptArg{} `cmp` _ = LT + +-- get :: a -> Doc + get t = case optDescr of + ReqArg _ _ _ _ ppr -> + (cat . punctuate comma . map text . ppr) t + + OptArg _ _ _ _ _ ppr -> + case ppr t of [] -> PP.empty + (Nothing : _) -> text "True" + (Just a : _) -> text a + + ChoiceOpt alts -> + fromMaybe PP.empty $ listToMaybe + [ text lf | (_,(_,lf:_), _,enabled) <- alts, enabled t] + + BoolOpt _ _ _ _ enabled -> (maybe PP.empty disp . enabled) t + +-- set :: LineNo -> String -> a -> ParseResult a + set line val a = + case optDescr of + ReqArg _ _ _ readE _ -> ($ a) `liftM` runE line n readE val + -- We parse for a single value instead of a + -- list, as one can't really implement + -- parseList :: ReadE a -> ReadE [a] with + -- the current ReadE definition + ChoiceOpt{} -> + case getChoiceByLongFlag optDescr val of + Just f -> return (f a) + _ -> syntaxError line val + + BoolOpt _ _ _ setV _ -> (`setV` a) `liftM` runP line n parse val + + OptArg _ _ _ readE _ _ -> ($ a) `liftM` runE line n readE val + -- Optional arguments are parsed just like + -- required arguments here; we don't + -- provide a method to set an OptArg field + -- to the default value. + +getChoiceByLongFlag :: OptDescr b -> String -> Maybe (b->b) +getChoiceByLongFlag (ChoiceOpt alts) val = listToMaybe + [ set | (_,(_sf,lf:_), set, _) <- alts + , lf == val] + +getChoiceByLongFlag _ _ = + error "Distribution.command.getChoiceByLongFlag: expected a choice option" + +getCurrentChoice :: OptDescr a -> a -> [String] +getCurrentChoice (ChoiceOpt alts) a = + [ lf | (_,(_sf,lf:_), _, currentChoice) <- alts, currentChoice a] + +getCurrentChoice _ _ = error "Command.getChoice: expected a Choice OptDescr" + + +liftOption :: (b -> a) -> (a -> (b -> b)) -> OptionField a -> OptionField b +liftOption get' set' opt = + opt { optionDescr = liftOptDescr get' set' `map` optionDescr opt} + + +liftOptDescr :: (b -> a) -> (a -> (b -> b)) -> OptDescr a -> OptDescr b +liftOptDescr get' set' (ChoiceOpt opts) = + ChoiceOpt [ (d, ff, liftSet get' set' set , (get . get')) + | (d, ff, set, get) <- opts] + +liftOptDescr get' set' (OptArg d ff ad set def get) = + OptArg d ff ad (liftSet get' set' `fmap` set) + (liftSet get' set' def) (get . get') + +liftOptDescr get' set' (ReqArg d ff ad set get) = + ReqArg d ff ad (liftSet get' set' `fmap` set) (get . get') + +liftOptDescr get' set' (BoolOpt d ffT ffF set get) = + BoolOpt d ffT ffF (liftSet get' set' . set) (get . get') + +liftSet :: (b -> a) -> (a -> (b -> b)) -> (a -> a) -> b -> b +liftSet get' set' set x = set' (set $ get' x) x + +-- | Show flags in the standard long option command line format +commandShowOptions :: CommandUI flags -> flags -> [String] +commandShowOptions command v = concat + [ showOptDescr v od | o <- commandOptions command ParseArgs + , od <- optionDescr o] + where + maybePrefix [] = [] + maybePrefix (lOpt:_) = ["--" ++ lOpt] + + showOptDescr :: a -> OptDescr a -> [String] + showOptDescr x (BoolOpt _ (_,lfTs) (_,lfFs) _ enabled) + = case enabled x of + Nothing -> [] + Just True -> maybePrefix lfTs + Just False -> maybePrefix lfFs + showOptDescr x c@ChoiceOpt{} + = ["--" ++ val | val <- getCurrentChoice c x] + showOptDescr x (ReqArg _ (_ssff,lf:_) _ _ showflag) + = [ "--"++lf++"="++flag + | flag <- showflag x ] + showOptDescr x (OptArg _ (_ssff,lf:_) _ _ _ showflag) + = [ case flag of + Just s -> "--"++lf++"="++s + Nothing -> "--"++lf + | flag <- showflag x ] + showOptDescr _ _ + = error "Distribution.Simple.Command.showOptDescr: unreachable" + + +commandListOptions :: CommandUI flags -> [String] +commandListOptions command = + concatMap listOption $ + addCommonFlags ShowArgs $ -- This is a slight hack, we don't want + -- "--list-options" showing up in the + -- list options output, so use ShowArgs + commandGetOpts ShowArgs command + where + listOption (GetOpt.Option shortNames longNames _ _) = + [ "-" ++ [name] | name <- shortNames ] + ++ [ "--" ++ name | name <- longNames ] + +-- | The help text for this command with descriptions of all the options. +commandHelp :: CommandUI flags -> String -> String +commandHelp command pname = + commandSynopsis command + ++ "\n\n" + ++ commandUsage command pname + ++ ( case commandDescription command of + Nothing -> "" + Just desc -> '\n': desc pname) + ++ "\n" + ++ ( if cname == "" + then "Global flags:" + else "Flags for " ++ cname ++ ":" ) + ++ ( GetOpt.usageInfo "" + . addCommonFlags ShowArgs + $ commandGetOpts ShowArgs command ) + ++ ( case commandNotes command of + Nothing -> "" + Just notes -> '\n': notes pname) + where cname = commandName command + +-- | Default "usage" documentation text for commands. +usageDefault :: String -> String -> String +usageDefault name pname = + "Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n\n" + ++ "Flags for " ++ name ++ ":" + +-- | Create "usage" documentation from a list of parameter +-- configurations. +usageAlternatives :: String -> [String] -> String -> String +usageAlternatives name strs pname = unlines + [ start ++ pname ++ " " ++ name ++ " " ++ s + | let starts = "Usage: " : repeat " or: " + , (start, s) <- zip starts strs + ] + +-- | Make a Command from standard 'GetOpt' options. +mkCommandUI :: String -- ^ name + -> String -- ^ synopsis + -> [String] -- ^ usage alternatives + -> flags -- ^ initial\/empty flags + -> (ShowOrParseArgs -> [OptionField flags]) -- ^ options + -> CommandUI flags +mkCommandUI name synopsis usages flags options = CommandUI + { commandName = name + , commandSynopsis = synopsis + , commandDescription = Nothing + , commandNotes = Nothing + , commandUsage = usageAlternatives name usages + , commandDefaultFlags = flags + , commandOptions = options + } + +-- | Common flags that apply to every command +data CommonFlag = HelpFlag | ListOptionsFlag + +commonFlags :: ShowOrParseArgs -> [GetOpt.OptDescr CommonFlag] +commonFlags showOrParseArgs = case showOrParseArgs of + ShowArgs -> [help] + ParseArgs -> [help, list] + where + help = GetOpt.Option helpShortFlags ["help"] (GetOpt.NoArg HelpFlag) + "Show this help text" + helpShortFlags = case showOrParseArgs of + ShowArgs -> ['h'] + ParseArgs -> ['h', '?'] + list = GetOpt.Option [] ["list-options"] (GetOpt.NoArg ListOptionsFlag) + "Print a list of command line flags" + +addCommonFlags :: ShowOrParseArgs + -> [GetOpt.OptDescr a] + -> [GetOpt.OptDescr (Either CommonFlag a)] +addCommonFlags showOrParseArgs options = + map (fmapOptDesc Left) (commonFlags showOrParseArgs) + ++ map (fmapOptDesc Right) options + where fmapOptDesc f (GetOpt.Option s l d m) = + GetOpt.Option s l (fmapArgDesc f d) m + fmapArgDesc f (GetOpt.NoArg a) = GetOpt.NoArg (f a) + fmapArgDesc f (GetOpt.ReqArg s d) = GetOpt.ReqArg (f . s) d + fmapArgDesc f (GetOpt.OptArg s d) = GetOpt.OptArg (f . s) d + +-- | Parse a bunch of command line arguments +-- +commandParseArgs :: CommandUI flags + -> Bool -- ^ Is the command a global or subcommand? + -> [String] + -> CommandParse (flags -> flags, [String]) +commandParseArgs command global args = + let options = addCommonFlags ParseArgs + $ commandGetOpts ParseArgs command + order | global = GetOpt.RequireOrder + | otherwise = GetOpt.Permute + in case GetOpt.getOpt' order options args of + (flags, _, _, _) + | any listFlag flags -> CommandList (commandListOptions command) + | any helpFlag flags -> CommandHelp (commandHelp command) + where listFlag (Left ListOptionsFlag) = True; listFlag _ = False + helpFlag (Left HelpFlag) = True; helpFlag _ = False + (flags, opts, opts', []) + | global || null opts' -> CommandReadyToGo (accum flags, mix opts opts') + | otherwise -> CommandErrors (unrecognised opts') + (_, _, _, errs) -> CommandErrors errs + + where -- Note: It is crucial to use reverse function composition here or to + -- reverse the flags here as we want to process the flags left to right + -- but data flow in function composition is right to left. + accum flags = foldr (flip (.)) id [ f | Right f <- flags ] + unrecognised opts = [ "unrecognized " + ++ "'" ++ (commandName command) ++ "'" + ++ " option `" ++ opt ++ "'\n" + | opt <- opts ] + -- For unrecognised global flags we put them in the position just after + -- the command, if there is one. This gives us a chance to parse them + -- as sub-command rather than global flags. + mix [] ys = ys + mix (x:xs) ys = x:ys++xs + +data CommandParse flags = CommandHelp (String -> String) + | CommandList [String] + | CommandErrors [String] + | CommandReadyToGo flags +instance Functor CommandParse where + fmap _ (CommandHelp help) = CommandHelp help + fmap _ (CommandList opts) = CommandList opts + fmap _ (CommandErrors errs) = CommandErrors errs + fmap f (CommandReadyToGo flags) = CommandReadyToGo (f flags) + + +data CommandType = NormalCommand | HiddenCommand +data Command action = + Command String String ([String] -> CommandParse action) CommandType + +-- | Mark command as hidden. Hidden commands don't show up in the 'progname +-- help' or 'progname --help' output. +hiddenCommand :: Command action -> Command action +hiddenCommand (Command name synopsys f _cmdType) = + Command name synopsys f HiddenCommand + +commandAddAction :: CommandUI flags + -> (flags -> [String] -> action) + -> Command action +commandAddAction command action = + Command (commandName command) + (commandSynopsis command) + (fmap (uncurry applyDefaultArgs) . commandParseArgs command False) + NormalCommand + + where applyDefaultArgs mkflags args = + let flags = mkflags (commandDefaultFlags command) + in action flags args + +commandsRun :: CommandUI a + -> [Command action] + -> [String] + -> CommandParse (a, CommandParse action) +commandsRun globalCommand commands args = + case commandParseArgs globalCommand True args of + CommandHelp help -> CommandHelp help + CommandList opts -> CommandList (opts ++ commandNames) + CommandErrors errs -> CommandErrors errs + CommandReadyToGo (mkflags, args') -> case args' of + ("help":cmdArgs) -> handleHelpCommand cmdArgs + (name:cmdArgs) -> case lookupCommand name of + [Command _ _ action _] + -> CommandReadyToGo (flags, action cmdArgs) + _ -> CommandReadyToGo (flags, badCommand name) + [] -> CommandReadyToGo (flags, noCommand) + where flags = mkflags (commandDefaultFlags globalCommand) + + where + lookupCommand cname = [ cmd | cmd@(Command cname' _ _ _) <- commands' + , cname' == cname ] + noCommand = CommandErrors ["no command given (try --help)\n"] + badCommand cname = CommandErrors ["unrecognised command: " ++ cname + ++ " (try --help)\n"] + commands' = commands ++ [commandAddAction helpCommandUI undefined] + commandNames = [ name | (Command name _ _ NormalCommand) <- commands' ] + + -- A bit of a hack: support "prog help" as a synonym of "prog --help" + -- furthermore, support "prog help command" as "prog command --help" + handleHelpCommand cmdArgs = + case commandParseArgs helpCommandUI True cmdArgs of + CommandHelp help -> CommandHelp help + CommandList list -> CommandList (list ++ commandNames) + CommandErrors _ -> CommandHelp globalHelp + CommandReadyToGo (_,[]) -> CommandHelp globalHelp + CommandReadyToGo (_,(name:cmdArgs')) -> + case lookupCommand name of + [Command _ _ action _] -> + case action ("--help":cmdArgs') of + CommandHelp help -> CommandHelp help + CommandList _ -> CommandList [] + _ -> CommandHelp globalHelp + _ -> badCommand name + + where globalHelp = commandHelp globalCommand + +-- | Utility function, many commands do not accept additional flags. This +-- action fails with a helpful error message if the user supplies any extra. +-- +noExtraFlags :: [String] -> IO () +noExtraFlags [] = return () +noExtraFlags extraFlags = + die $ "Unrecognised flags: " ++ intercalate ", " extraFlags +--TODO: eliminate this function and turn it into a variant on commandAddAction +-- instead like commandAddActionNoArgs that doesn't supply the [String] + +-- | Helper function for creating globalCommand description +getNormalCommandDescriptions :: [Command action] -> [(String, String)] +getNormalCommandDescriptions cmds = + [ (name, description) + | Command name description _ NormalCommand <- cmds ] + +helpCommandUI :: CommandUI () +helpCommandUI = + (mkCommandUI + "help" + "Help about commands." + ["[FLAGS]", "COMMAND [FLAGS]"] + () + (const [])) + { + commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " help help\n" + ++ " Oh, appararently you already know this.\n" + } + +-- | wraps a @CommandUI@ together with a function that turns it into a @Command@. +-- By hiding the type of flags for the UI allows construction of a list of all UIs at the +-- top level of the program. That list can then be used for generation of manual page +-- as well as for executing the selected command. +data CommandSpec action + = forall flags. CommandSpec (CommandUI flags) (CommandUI flags -> Command action) CommandType + +commandFromSpec :: CommandSpec a -> Command a +commandFromSpec (CommandSpec ui action _) = action ui diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Compiler.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Compiler.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Compiler.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Compiler.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,362 @@ +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Compiler +-- Copyright : Isaac Jones 2003-2004 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This should be a much more sophisticated abstraction than it is. Currently +-- it's just a bit of data about the compiler, like it's flavour and name and +-- version. The reason it's just data is because currently it has to be in +-- 'Read' and 'Show' so it can be saved along with the 'LocalBuildInfo'. The +-- only interesting bit of info it contains is a mapping between language +-- extensions and compiler command line flags. This module also defines a +-- 'PackageDB' type which is used to refer to package databases. Most compilers +-- only know about a single global package collection but GHC has a global and +-- per-user one and it lets you create arbitrary other package databases. We do +-- not yet fully support this latter feature. + +module Distribution.Simple.Compiler ( + -- * Haskell implementations + module Distribution.Compiler, + Compiler(..), + showCompilerId, showCompilerIdWithAbi, + compilerFlavor, compilerVersion, + compilerCompatVersion, + compilerInfo, + + -- * Support for package databases + PackageDB(..), + PackageDBStack, + registrationPackageDB, + absolutePackageDBPaths, + absolutePackageDBPath, + + -- * Support for optimisation levels + OptimisationLevel(..), + flagToOptimisationLevel, + + -- * Support for debug info levels + DebugInfoLevel(..), + flagToDebugInfoLevel, + + -- * Support for language extensions + Flag, + languageToFlags, + unsupportedLanguages, + extensionsToFlags, + unsupportedExtensions, + parmakeSupported, + reexportedModulesSupported, + renamingPackageFlagsSupported, + unifiedIPIDRequired, + packageKeySupported, + unitIdSupported, + libraryDynDirSupported, + + -- * Support for profiling detail levels + ProfDetailLevel(..), + knownProfDetailLevels, + flagToProfDetailLevel, + showProfDetailLevel, + ) where + +import Distribution.Compiler +import Distribution.Version +import Distribution.Text +import Language.Haskell.Extension +import Distribution.Simple.Utils +import Distribution.Compat.Binary + +import Control.Monad (liftM) +import Data.List (nub) +import qualified Data.Map as M (Map, lookup) +import Data.Maybe (catMaybes, isNothing, listToMaybe) +import GHC.Generics (Generic) +import System.Directory (canonicalizePath) + +data Compiler = Compiler { + compilerId :: CompilerId, + -- ^ Compiler flavour and version. + compilerAbiTag :: AbiTag, + -- ^ Tag for distinguishing incompatible ABI's on the same architecture/os. + compilerCompat :: [CompilerId], + -- ^ Other implementations that this compiler claims to be compatible with. + compilerLanguages :: [(Language, Flag)], + -- ^ Supported language standards. + compilerExtensions :: [(Extension, Flag)], + -- ^ Supported extensions. + compilerProperties :: M.Map String String + -- ^ A key-value map for properties not covered by the above fields. + } + deriving (Eq, Generic, Show, Read) + +instance Binary Compiler + +showCompilerId :: Compiler -> String +showCompilerId = display . compilerId + +showCompilerIdWithAbi :: Compiler -> String +showCompilerIdWithAbi comp = + display (compilerId comp) ++ + case compilerAbiTag comp of + NoAbiTag -> [] + AbiTag xs -> '-':xs + +compilerFlavor :: Compiler -> CompilerFlavor +compilerFlavor = (\(CompilerId f _) -> f) . compilerId + +compilerVersion :: Compiler -> Version +compilerVersion = (\(CompilerId _ v) -> v) . compilerId + +compilerCompatVersion :: CompilerFlavor -> Compiler -> Maybe Version +compilerCompatVersion flavor comp + | compilerFlavor comp == flavor = Just (compilerVersion comp) + | otherwise = + listToMaybe [ v | CompilerId fl v <- compilerCompat comp, fl == flavor ] + +compilerInfo :: Compiler -> CompilerInfo +compilerInfo c = CompilerInfo (compilerId c) + (compilerAbiTag c) + (Just . compilerCompat $ c) + (Just . map fst . compilerLanguages $ c) + (Just . map fst . compilerExtensions $ c) + +-- ------------------------------------------------------------ +-- * Package databases +-- ------------------------------------------------------------ + +-- |Some compilers have a notion of a database of available packages. +-- For some there is just one global db of packages, other compilers +-- support a per-user or an arbitrary db specified at some location in +-- the file system. This can be used to build isloated environments of +-- packages, for example to build a collection of related packages +-- without installing them globally. +-- +data PackageDB = GlobalPackageDB + | UserPackageDB + | SpecificPackageDB FilePath + deriving (Eq, Generic, Ord, Show, Read) + +instance Binary PackageDB + +-- | We typically get packages from several databases, and stack them +-- together. This type lets us be explicit about that stacking. For example +-- typical stacks include: +-- +-- > [GlobalPackageDB] +-- > [GlobalPackageDB, UserPackageDB] +-- > [GlobalPackageDB, SpecificPackageDB "package.conf.inplace"] +-- +-- Note that the 'GlobalPackageDB' is invariably at the bottom since it +-- contains the rts, base and other special compiler-specific packages. +-- +-- We are not restricted to using just the above combinations. In particular +-- we can use several custom package dbs and the user package db together. +-- +-- When it comes to writing, the top most (last) package is used. +-- +type PackageDBStack = [PackageDB] + +-- | Return the package that we should register into. This is the package db at +-- the top of the stack. +-- +registrationPackageDB :: PackageDBStack -> PackageDB +registrationPackageDB [] = error "internal error: empty package db set" +registrationPackageDB dbs = last dbs + +-- | Make package paths absolute + + +absolutePackageDBPaths :: PackageDBStack -> IO PackageDBStack +absolutePackageDBPaths = mapM absolutePackageDBPath + +absolutePackageDBPath :: PackageDB -> IO PackageDB +absolutePackageDBPath GlobalPackageDB = return GlobalPackageDB +absolutePackageDBPath UserPackageDB = return UserPackageDB +absolutePackageDBPath (SpecificPackageDB db) = + SpecificPackageDB `liftM` canonicalizePath db + +-- ------------------------------------------------------------ +-- * Optimisation levels +-- ------------------------------------------------------------ + +-- | Some compilers support optimising. Some have different levels. +-- For compilers that do not the level is just capped to the level +-- they do support. +-- +data OptimisationLevel = NoOptimisation + | NormalOptimisation + | MaximumOptimisation + deriving (Bounded, Enum, Eq, Generic, Read, Show) + +instance Binary OptimisationLevel + +flagToOptimisationLevel :: Maybe String -> OptimisationLevel +flagToOptimisationLevel Nothing = NormalOptimisation +flagToOptimisationLevel (Just s) = case reads s of + [(i, "")] + | i >= fromEnum (minBound :: OptimisationLevel) + && i <= fromEnum (maxBound :: OptimisationLevel) + -> toEnum i + | otherwise -> error $ "Bad optimisation level: " ++ show i + ++ ". Valid values are 0..2" + _ -> error $ "Can't parse optimisation level " ++ s + +-- ------------------------------------------------------------ +-- * Debug info levels +-- ------------------------------------------------------------ + +-- | Some compilers support emitting debug info. Some have different +-- levels. For compilers that do not the level is just capped to the +-- level they do support. +-- +data DebugInfoLevel = NoDebugInfo + | MinimalDebugInfo + | NormalDebugInfo + | MaximalDebugInfo + deriving (Bounded, Enum, Eq, Generic, Read, Show) + +instance Binary DebugInfoLevel + +flagToDebugInfoLevel :: Maybe String -> DebugInfoLevel +flagToDebugInfoLevel Nothing = NormalDebugInfo +flagToDebugInfoLevel (Just s) = case reads s of + [(i, "")] + | i >= fromEnum (minBound :: DebugInfoLevel) + && i <= fromEnum (maxBound :: DebugInfoLevel) + -> toEnum i + | otherwise -> error $ "Bad debug info level: " ++ show i + ++ ". Valid values are 0..3" + _ -> error $ "Can't parse debug info level " ++ s + +-- ------------------------------------------------------------ +-- * Languages and Extensions +-- ------------------------------------------------------------ + +unsupportedLanguages :: Compiler -> [Language] -> [Language] +unsupportedLanguages comp langs = + [ lang | lang <- langs + , isNothing (languageToFlag comp lang) ] + +languageToFlags :: Compiler -> Maybe Language -> [Flag] +languageToFlags comp = filter (not . null) + . catMaybes . map (languageToFlag comp) + . maybe [Haskell98] (\x->[x]) + +languageToFlag :: Compiler -> Language -> Maybe Flag +languageToFlag comp ext = lookup ext (compilerLanguages comp) + + +-- |For the given compiler, return the extensions it does not support. +unsupportedExtensions :: Compiler -> [Extension] -> [Extension] +unsupportedExtensions comp exts = + [ ext | ext <- exts + , isNothing (extensionToFlag comp ext) ] + +type Flag = String + +-- |For the given compiler, return the flags for the supported extensions. +extensionsToFlags :: Compiler -> [Extension] -> [Flag] +extensionsToFlags comp = nub . filter (not . null) + . catMaybes . map (extensionToFlag comp) + +extensionToFlag :: Compiler -> Extension -> Maybe Flag +extensionToFlag comp ext = lookup ext (compilerExtensions comp) + +-- | Does this compiler support parallel --make mode? +parmakeSupported :: Compiler -> Bool +parmakeSupported = ghcSupported "Support parallel --make" + +-- | Does this compiler support reexported-modules? +reexportedModulesSupported :: Compiler -> Bool +reexportedModulesSupported = ghcSupported "Support reexported-modules" + +-- | Does this compiler support thinning/renaming on package flags? +renamingPackageFlagsSupported :: Compiler -> Bool +renamingPackageFlagsSupported = ghcSupported "Support thinning and renaming package flags" + +-- | Does this compiler have unified IPIDs (so no package keys) +unifiedIPIDRequired :: Compiler -> Bool +unifiedIPIDRequired = ghcSupported "Requires unified installed package IDs" + +-- | Does this compiler support package keys? +packageKeySupported :: Compiler -> Bool +packageKeySupported = ghcSupported "Uses package keys" + +-- | Does this compiler support unit IDs? +unitIdSupported :: Compiler -> Bool +unitIdSupported = ghcSupported "Uses unit IDs" + +-- | Does this compiler support a package database entry with: +-- "dynamic-library-dirs"? +libraryDynDirSupported :: Compiler -> Bool +libraryDynDirSupported comp = case compilerFlavor comp of + GHC -> compilerVersion comp >= Version [8,0,1,20161021] [] + _ -> False + +-- | Utility function for GHC only features +ghcSupported :: String -> Compiler -> Bool +ghcSupported key comp = + case compilerFlavor comp of + GHC -> checkProp + GHCJS -> checkProp + _ -> False + where checkProp = + case M.lookup key (compilerProperties comp) of + Just "YES" -> True + _ -> False + +-- ------------------------------------------------------------ +-- * Profiling detail level +-- ------------------------------------------------------------ + +-- | Some compilers (notably GHC) support profiling and can instrument +-- programs so the system can account costs to different functions. There are +-- different levels of detail that can be used for this accounting. +-- For compilers that do not support this notion or the particular detail +-- levels, this is either ignored or just capped to some similar level +-- they do support. +-- +data ProfDetailLevel = ProfDetailNone + | ProfDetailDefault + | ProfDetailExportedFunctions + | ProfDetailToplevelFunctions + | ProfDetailAllFunctions + | ProfDetailOther String + deriving (Eq, Generic, Read, Show) + +instance Binary ProfDetailLevel + +flagToProfDetailLevel :: String -> ProfDetailLevel +flagToProfDetailLevel "" = ProfDetailDefault +flagToProfDetailLevel s = + case lookup (lowercase s) + [ (name, value) + | (primary, aliases, value) <- knownProfDetailLevels + , name <- primary : aliases ] + of Just value -> value + Nothing -> ProfDetailOther s + +knownProfDetailLevels :: [(String, [String], ProfDetailLevel)] +knownProfDetailLevels = + [ ("default", [], ProfDetailDefault) + , ("none", [], ProfDetailNone) + , ("exported-functions", ["exported"], ProfDetailExportedFunctions) + , ("toplevel-functions", ["toplevel", "top"], ProfDetailToplevelFunctions) + , ("all-functions", ["all"], ProfDetailAllFunctions) + ] + +showProfDetailLevel :: ProfDetailLevel -> String +showProfDetailLevel dl = case dl of + ProfDetailNone -> "none" + ProfDetailDefault -> "default" + ProfDetailExportedFunctions -> "exported-functions" + ProfDetailToplevelFunctions -> "toplevel-functions" + ProfDetailAllFunctions -> "all-functions" + ProfDetailOther other -> other + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Configure.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Configure.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Configure.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Configure.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,2037 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ScopedTypeVariables #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Configure +-- Copyright : Isaac Jones 2003-2005 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This deals with the /configure/ phase. It provides the 'configure' action +-- which is given the package description and configure flags. It then tries +-- to: configure the compiler; resolves any conditionals in the package +-- description; resolve the package dependencies; check if all the extensions +-- used by this package are supported by the compiler; check that all the build +-- tools are available (including version checks if appropriate); checks for +-- any required @pkg-config@ packages (updating the 'BuildInfo' with the +-- results) +-- +-- Then based on all this it saves the info in the 'LocalBuildInfo' and writes +-- it out to the @dist\/setup-config@ file. It also displays various details to +-- the user, the amount of information displayed depending on the verbosity +-- level. + +module Distribution.Simple.Configure (configure, + writePersistBuildConfig, + getConfigStateFile, + getPersistBuildConfig, + checkPersistBuildConfigOutdated, + tryGetPersistBuildConfig, + maybeGetPersistBuildConfig, + findDistPref, findDistPrefOrDefault, + computeComponentId, + computeCompatPackageKey, + localBuildInfoFile, + getInstalledPackages, + getInstalledPackagesMonitorFiles, + getPackageDBContents, + configCompiler, configCompilerAux, + configCompilerEx, configCompilerAuxEx, + computeEffectiveProfiling, + ccLdOptionsBuildInfo, + checkForeignDeps, + interpretPackageDbFlags, + ConfigStateFileError(..), + tryGetConfigStateFile, + platformDefines, + relaxPackageDeps, + ) + where + +import Distribution.Compiler +import Distribution.Utils.NubList +import Distribution.Simple.Compiler hiding (Flag) +import Distribution.Simple.PreProcess +import Distribution.Package +import qualified Distribution.InstalledPackageInfo as Installed +import Distribution.InstalledPackageInfo (InstalledPackageInfo + ,emptyInstalledPackageInfo) +import qualified Distribution.Simple.PackageIndex as PackageIndex +import Distribution.Simple.PackageIndex (InstalledPackageIndex) +import Distribution.PackageDescription as PD hiding (Flag) +import Distribution.ModuleName +import Distribution.PackageDescription.Configuration +import Distribution.PackageDescription.Check hiding (doesFileExist) +import Distribution.Simple.Program +import Distribution.Simple.Setup as Setup +import qualified Distribution.Simple.InstallDirs as InstallDirs +import Distribution.Simple.LocalBuildInfo +import Distribution.Simple.BuildPaths +import Distribution.Simple.Utils +import Distribution.System +import Distribution.Version +import Distribution.Verbosity + +import qualified Distribution.Simple.GHC as GHC +import qualified Distribution.Simple.GHCJS as GHCJS +import qualified Distribution.Simple.JHC as JHC +import qualified Distribution.Simple.LHC as LHC +import qualified Distribution.Simple.UHC as UHC +import qualified Distribution.Simple.HaskellSuite as HaskellSuite + +-- Prefer the more generic Data.Traversable.mapM to Prelude.mapM +import Prelude hiding ( mapM ) +import Control.Exception + ( Exception, evaluate, throw, throwIO, try ) +import Control.Exception ( ErrorCall ) +import Control.Monad + ( liftM, when, unless, foldM, filterM, mplus ) +import Distribution.Compat.Binary ( decodeOrFailIO, encode ) +import GHC.Fingerprint ( Fingerprint(..), fingerprintString ) +import Data.ByteString.Lazy (ByteString) +import qualified Data.ByteString as BS +import qualified Data.ByteString.Lazy.Char8 as BLC8 +import Data.List + ( (\\), nub, partition, isPrefixOf, inits, stripPrefix ) +import Data.Maybe + ( isNothing, catMaybes, fromMaybe, mapMaybe, isJust ) +import Data.Either + ( partitionEithers ) +import qualified Data.Set as Set +import Data.Monoid as Mon ( Monoid(..) ) +import qualified Data.Map as Map +import Data.Map (Map) +import Data.Traversable + ( mapM ) +import Data.Typeable +import Data.Char ( chr, isAlphaNum ) +import Numeric ( showIntAtBase ) +import System.Directory + ( doesFileExist, createDirectoryIfMissing, getTemporaryDirectory ) +import System.FilePath + ( (), isAbsolute ) +import qualified System.Info + ( compilerName, compilerVersion ) +import System.IO + ( hPutStrLn, hClose ) +import Distribution.Text + ( Text(disp), defaultStyle, display, simpleParse ) +import Text.PrettyPrint + ( Doc, (<>), (<+>), ($+$), char, comma, empty, hsep, nest + , punctuate, quotes, render, renderStyle, sep, text ) +import Distribution.Compat.Environment ( lookupEnv ) +import Distribution.Compat.Exception ( catchExit, catchIO ) + +-- | The errors that can be thrown when reading the @setup-config@ file. +data ConfigStateFileError + = ConfigStateFileNoHeader -- ^ No header found. + | ConfigStateFileBadHeader -- ^ Incorrect header. + | ConfigStateFileNoParse -- ^ Cannot parse file contents. + | ConfigStateFileMissing -- ^ No file! + | ConfigStateFileBadVersion PackageIdentifier PackageIdentifier + (Either ConfigStateFileError LocalBuildInfo) -- ^ Mismatched version. + deriving (Typeable) + +-- | Format a 'ConfigStateFileError' as a user-facing error message. +dispConfigStateFileError :: ConfigStateFileError -> Doc +dispConfigStateFileError ConfigStateFileNoHeader = + text "Saved package config file header is missing." + <+> text "Re-run the 'configure' command." +dispConfigStateFileError ConfigStateFileBadHeader = + text "Saved package config file header is corrupt." + <+> text "Re-run the 'configure' command." +dispConfigStateFileError ConfigStateFileNoParse = + text "Saved package config file is corrupt." + <+> text "Re-run the 'configure' command." +dispConfigStateFileError ConfigStateFileMissing = + text "Run the 'configure' command first." +dispConfigStateFileError (ConfigStateFileBadVersion oldCabal oldCompiler _) = + text "Saved package config file is outdated:" + $+$ badCabal $+$ badCompiler + $+$ text "Re-run the 'configure' command." + where + badCabal = + text "• the Cabal version changed from" + <+> disp oldCabal <+> "to" <+> disp currentCabalId + badCompiler + | oldCompiler == currentCompilerId = empty + | otherwise = + text "• the compiler changed from" + <+> disp oldCompiler <+> "to" <+> disp currentCompilerId + +instance Show ConfigStateFileError where + show = renderStyle defaultStyle . dispConfigStateFileError + +instance Exception ConfigStateFileError + +-- | Read the 'localBuildInfoFile'. Throw an exception if the file is +-- missing, if the file cannot be read, or if the file was created by an older +-- version of Cabal. +getConfigStateFile :: FilePath -- ^ The file path of the @setup-config@ file. + -> IO LocalBuildInfo +getConfigStateFile filename = do + exists <- doesFileExist filename + unless exists $ throwIO ConfigStateFileMissing + -- Read the config file into a strict ByteString to avoid problems with + -- lazy I/O, then convert to lazy because the binary package needs that. + contents <- BS.readFile filename + let (header, body) = BLC8.span (/='\n') (BLC8.fromChunks [contents]) + + headerParseResult <- try $ evaluate $ parseHeader header + let (cabalId, compId) = + case headerParseResult of + Left (_ :: ErrorCall) -> throw ConfigStateFileBadHeader + Right x -> x + + let getStoredValue = do + result <- decodeOrFailIO (BLC8.tail body) + case result of + Left _ -> throw ConfigStateFileNoParse + Right x -> return x + deferErrorIfBadVersion act + | cabalId /= currentCabalId = do + eResult <- try act + throw $ ConfigStateFileBadVersion cabalId compId eResult + | otherwise = act + deferErrorIfBadVersion getStoredValue + +-- | Read the 'localBuildInfoFile', returning either an error or the local build +-- info. +tryGetConfigStateFile :: FilePath -- ^ The file path of the @setup-config@ file. + -> IO (Either ConfigStateFileError LocalBuildInfo) +tryGetConfigStateFile = try . getConfigStateFile + +-- | Try to read the 'localBuildInfoFile'. +tryGetPersistBuildConfig :: FilePath -- ^ The @dist@ directory path. + -> IO (Either ConfigStateFileError LocalBuildInfo) +tryGetPersistBuildConfig = try . getPersistBuildConfig + +-- | Read the 'localBuildInfoFile'. Throw an exception if the file is +-- missing, if the file cannot be read, or if the file was created by an older +-- version of Cabal. +getPersistBuildConfig :: FilePath -- ^ The @dist@ directory path. + -> IO LocalBuildInfo +getPersistBuildConfig = getConfigStateFile . localBuildInfoFile + +-- | Try to read the 'localBuildInfoFile'. +maybeGetPersistBuildConfig :: FilePath -- ^ The @dist@ directory path. + -> IO (Maybe LocalBuildInfo) +maybeGetPersistBuildConfig = + liftM (either (const Nothing) Just) . tryGetPersistBuildConfig + +-- | After running configure, output the 'LocalBuildInfo' to the +-- 'localBuildInfoFile'. +writePersistBuildConfig :: FilePath -- ^ The @dist@ directory path. + -> LocalBuildInfo -- ^ The 'LocalBuildInfo' to write. + -> IO () +writePersistBuildConfig distPref lbi = do + createDirectoryIfMissing False distPref + writeFileAtomic (localBuildInfoFile distPref) $ + BLC8.unlines [showHeader pkgId, encode lbi] + where + pkgId = packageId $ localPkgDescr lbi + +-- | Identifier of the current Cabal package. +currentCabalId :: PackageIdentifier +currentCabalId = PackageIdentifier (PackageName "Cabal") cabalVersion + +-- | Identifier of the current compiler package. +currentCompilerId :: PackageIdentifier +currentCompilerId = PackageIdentifier (PackageName System.Info.compilerName) + System.Info.compilerVersion + +-- | Parse the @setup-config@ file header, returning the package identifiers +-- for Cabal and the compiler. +parseHeader :: ByteString -- ^ The file contents. + -> (PackageIdentifier, PackageIdentifier) +parseHeader header = case BLC8.words header of + ["Saved", "package", "config", "for", pkgId, "written", "by", cabalId, + "using", compId] -> + fromMaybe (throw ConfigStateFileBadHeader) $ do + _ <- simpleParse (BLC8.unpack pkgId) :: Maybe PackageIdentifier + cabalId' <- simpleParse (BLC8.unpack cabalId) + compId' <- simpleParse (BLC8.unpack compId) + return (cabalId', compId') + _ -> throw ConfigStateFileNoHeader + +-- | Generate the @setup-config@ file header. +showHeader :: PackageIdentifier -- ^ The processed package. + -> ByteString +showHeader pkgId = BLC8.unwords + [ "Saved", "package", "config", "for" + , BLC8.pack $ display pkgId + , "written", "by" + , BLC8.pack $ display currentCabalId + , "using" + , BLC8.pack $ display currentCompilerId + ] + +-- | Check that localBuildInfoFile is up-to-date with respect to the +-- .cabal file. +checkPersistBuildConfigOutdated :: FilePath -> FilePath -> IO Bool +checkPersistBuildConfigOutdated distPref pkg_descr_file = do + pkg_descr_file `moreRecentFile` (localBuildInfoFile distPref) + +-- | Get the path of @dist\/setup-config@. +localBuildInfoFile :: FilePath -- ^ The @dist@ directory path. + -> FilePath +localBuildInfoFile distPref = distPref "setup-config" + +-- ----------------------------------------------------------------------------- +-- * Configuration +-- ----------------------------------------------------------------------------- + +-- | Return the \"dist/\" prefix, or the default prefix. The prefix is taken +-- from (in order of highest to lowest preference) the override prefix, the +-- \"CABAL_BUILDDIR\" environment variable, or the default prefix. +findDistPref :: FilePath -- ^ default \"dist\" prefix + -> Setup.Flag FilePath -- ^ override \"dist\" prefix + -> IO FilePath +findDistPref defDistPref overrideDistPref = do + envDistPref <- liftM parseEnvDistPref (lookupEnv "CABAL_BUILDDIR") + return $ fromFlagOrDefault defDistPref (mappend envDistPref overrideDistPref) + where + parseEnvDistPref env = + case env of + Just distPref | not (null distPref) -> toFlag distPref + _ -> NoFlag + +-- | Return the \"dist/\" prefix, or the default prefix. The prefix is taken +-- from (in order of highest to lowest preference) the override prefix, the +-- \"CABAL_BUILDDIR\" environment variable, or 'defaultDistPref' is used. Call +-- this function to resolve a @*DistPref@ flag whenever it is not known to be +-- set. (The @*DistPref@ flags are always set to a definite value before +-- invoking 'UserHooks'.) +findDistPrefOrDefault :: Setup.Flag FilePath -- ^ override \"dist\" prefix + -> IO FilePath +findDistPrefOrDefault = findDistPref defaultDistPref + +-- | Compute the effective value of the profiling flags +-- @--enable-library-profiling@ and @--enable-executable-profiling@ +-- from the specified 'ConfigFlags'. This may be useful for +-- external Cabal tools which need to interact with Setup in +-- a backwards-compatible way: the most predictable mechanism +-- for enabling profiling across many legacy versions is to +-- NOT use @--enable-profiling@ and use those two flags instead. +-- +-- Note that @--enable-executable-profiling@ also affects profiling +-- of benchmarks and (non-detailed) test suites. +computeEffectiveProfiling :: ConfigFlags -> (Bool {- lib -}, Bool {- exe -}) +computeEffectiveProfiling cfg = + -- The --profiling flag sets the default for both libs and exes, + -- but can be overidden by --library-profiling, or the old deprecated + -- --executable-profiling flag. + -- + -- The --profiling-detail and --library-profiling-detail flags behave + -- similarly + let profEnabledBoth = fromFlagOrDefault False (configProf cfg) + profEnabledLib = fromFlagOrDefault profEnabledBoth (configProfLib cfg) + profEnabledExe = fromFlagOrDefault profEnabledBoth (configProfExe cfg) + in (profEnabledLib, profEnabledExe) + +-- |Perform the \"@.\/setup configure@\" action. +-- Returns the @.setup-config@ file. +configure :: (GenericPackageDescription, HookedBuildInfo) + -> ConfigFlags -> IO LocalBuildInfo +configure (pkg_descr0', pbi) cfg = do + let pkg_descr0 = + -- Ignore '--allow-newer' when we're given '--exact-configuration'. + if fromFlagOrDefault False (configExactConfiguration cfg) + then pkg_descr0' + else relaxPackageDeps + (fromMaybe AllowNewerNone $ configAllowNewer cfg) + pkg_descr0' + + setupMessage verbosity "Configuring" (packageId pkg_descr0) + + checkDeprecatedFlags verbosity cfg + checkExactConfiguration pkg_descr0 cfg + + -- Where to build the package + let buildDir :: FilePath -- e.g. dist/build + -- fromFlag OK due to Distribution.Simple calling + -- findDistPrefOrDefault to fill it in + buildDir = fromFlag (configDistPref cfg) "build" + createDirectoryIfMissingVerbose (lessVerbose verbosity) True buildDir + + -- What package database(s) to use + let packageDbs + = interpretPackageDbFlags + (fromFlag (configUserInstall cfg)) + (configPackageDBs cfg) + + -- comp: the compiler we're building with + -- compPlatform: the platform we're building for + -- programsConfig: location and args of all programs we're + -- building with + (comp, compPlatform, programsConfig) + <- configCompilerEx + (flagToMaybe (configHcFlavor cfg)) + (flagToMaybe (configHcPath cfg)) + (flagToMaybe (configHcPkg cfg)) + (mkProgramsConfig cfg (configPrograms cfg)) + (lessVerbose verbosity) + + -- The InstalledPackageIndex of all installed packages + installedPackageSet <- getInstalledPackages (lessVerbose verbosity) comp + packageDbs programsConfig + + -- The InstalledPackageIndex of all (possible) internal packages + let internalPackageSet = getInternalPackages pkg_descr0 + + -- allConstraints: The set of all 'Dependency's we have. Used ONLY + -- to 'configureFinalizedPackage'. + -- requiredDepsMap: A map from 'PackageName' to the specifically + -- required 'InstalledPackageInfo', due to --dependency + -- + -- NB: These constraints are to be applied to ALL components of + -- a package. Thus, it's not an error if allConstraints contains + -- more constraints than is necessary for a component (another + -- component might need it.) + -- + -- NB: The fact that we bundle all the constraints together means + -- that is not possible to configure a test-suite to use one + -- version of a dependency, and the executable to use another. + (allConstraints, requiredDepsMap) <- either die return $ + combinedConstraints (configConstraints cfg) + (configDependencies cfg) + installedPackageSet + + -- pkg_descr: The resolved package description, that does not contain any + -- conditionals, because we have have an assignment for + -- every flag, either picking them ourselves using a + -- simple naive algorithm, or having them be passed to + -- us by 'configConfigurationsFlags') + -- flags: The 'FlagAssignment' that the conditionals were + -- resolved with. + -- + -- NB: Why doesn't finalizing a package also tell us what the + -- dependencies are (e.g. when we run the naive algorithm, + -- we are checking if dependencies are satisfiable)? The + -- primary reason is that we may NOT have done any solving: + -- if the flags are all chosen for us, this step is a simple + -- matter of flattening according to that assignment. It's + -- cleaner to then configure the dependencies afterwards. + (pkg_descr, flags) + <- configureFinalizedPackage verbosity cfg + allConstraints + (dependencySatisfiable + (fromFlagOrDefault False (configExactConfiguration cfg)) + installedPackageSet + internalPackageSet + requiredDepsMap) + comp + compPlatform + pkg_descr0 + + checkCompilerProblems comp pkg_descr + checkPackageProblems verbosity pkg_descr0 + (updatePackageDescription pbi pkg_descr) + + -- The list of 'InstalledPackageInfo' recording the selected + -- dependencies... + -- internalPkgDeps: ...on internal packages (these are fake!) + -- externalPkgDeps: ...on external packages + -- + -- Invariant: For any package name, there is at most one package + -- in externalPackageDeps which has that name. + -- + -- NB: The dependency selection is global over ALL components + -- in the package (similar to how allConstraints and + -- requiredDepsMap are global over all components). In particular, + -- if *any* component (post-flag resolution) has an unsatisfiable + -- dependency, we will fail. This can sometimes be undesirable + -- for users, see #1786 (benchmark conflicts with executable), + (internalPkgDeps, externalPkgDeps) + <- configureDependencies + verbosity + internalPackageSet + installedPackageSet + requiredDepsMap + pkg_descr + + let installDeps = Map.elems -- deduplicate + . Map.fromList + . map (\v -> (Installed.installedUnitId v, v)) + $ externalPkgDeps + + packageDependsIndex <- + case PackageIndex.dependencyClosure installedPackageSet + (map Installed.installedUnitId installDeps) of + Left packageDependsIndex -> return packageDependsIndex + Right broken -> + die $ "The following installed packages are broken because other" + ++ " packages they depend on are missing. These broken " + ++ "packages must be rebuilt before they can be used.\n" + ++ unlines [ "package " + ++ display (packageId pkg) + ++ " is broken due to missing package " + ++ intercalate ", " (map display deps) + | (pkg, deps) <- broken ] + + let pseudoTopPkg = emptyInstalledPackageInfo { + Installed.installedUnitId = + mkLegacyUnitId (packageId pkg_descr), + Installed.sourcePackageId = packageId pkg_descr, + Installed.depends = + map Installed.installedUnitId installDeps + } + case PackageIndex.dependencyInconsistencies + . PackageIndex.insert pseudoTopPkg + $ packageDependsIndex of + [] -> return () + inconsistencies -> + warn verbosity $ + "This package indirectly depends on multiple versions of the same " + ++ "package. This is highly likely to cause a compile failure.\n" + ++ unlines [ "package " ++ display pkg ++ " requires " + ++ display (PackageIdentifier name ver) + | (name, uses) <- inconsistencies + , (pkg, ver) <- uses ] + + -- installation directories + defaultDirs <- defaultInstallDirs (compilerFlavor comp) + (fromFlag (configUserInstall cfg)) (hasLibs pkg_descr) + let installDirs = combineInstallDirs fromFlagOrDefault + defaultDirs (configInstallDirs cfg) + + -- check languages and extensions + let langlist = nub $ catMaybes $ map defaultLanguage + (allBuildInfo pkg_descr) + let langs = unsupportedLanguages comp langlist + when (not (null langs)) $ + die $ "The package " ++ display (packageId pkg_descr0) + ++ " requires the following languages which are not " + ++ "supported by " ++ display (compilerId comp) ++ ": " + ++ intercalate ", " (map display langs) + let extlist = nub $ concatMap allExtensions (allBuildInfo pkg_descr) + let exts = unsupportedExtensions comp extlist + when (not (null exts)) $ + die $ "The package " ++ display (packageId pkg_descr0) + ++ " requires the following language extensions which are not " + ++ "supported by " ++ display (compilerId comp) ++ ": " + ++ intercalate ", " (map display exts) + + -- configured known/required programs & external build tools + -- exclude build-tool deps on "internal" exes in the same package + let requiredBuildTools = + [ buildTool + | let exeNames = map exeName (executables pkg_descr) + , bi <- allBuildInfo pkg_descr + , buildTool@(Dependency (PackageName toolName) reqVer) + <- buildTools bi + , let isInternal = + toolName `elem` exeNames + -- we assume all internal build-tools are + -- versioned with the package: + && packageVersion pkg_descr `withinRange` reqVer + , not isInternal ] + + programsConfig' <- + configureAllKnownPrograms (lessVerbose verbosity) programsConfig + >>= configureRequiredPrograms verbosity requiredBuildTools + + (pkg_descr', programsConfig'') <- + configurePkgconfigPackages verbosity pkg_descr programsConfig' + + -- internal component graph + buildComponents <- + case mkComponentsGraph pkg_descr internalPkgDeps of + Left componentCycle -> reportComponentCycle componentCycle + Right comps -> + mkComponentsLocalBuildInfo cfg comp packageDependsIndex pkg_descr + internalPkgDeps externalPkgDeps + comps (configConfigurationsFlags cfg) + + split_objs <- + if not (fromFlag $ configSplitObjs cfg) + then return False + else case compilerFlavor comp of + GHC | compilerVersion comp >= Version [6,5] [] + -> return True + GHCJS + -> return True + _ -> do warn verbosity + ("this compiler does not support " ++ + "--enable-split-objs; ignoring") + return False + + let ghciLibByDefault = + case compilerId comp of + CompilerId GHC _ -> + -- If ghc is non-dynamic, then ghci needs object files, + -- so we build one by default. + -- + -- Technically, archive files should be sufficient for ghci, + -- but because of GHC bug #8942, it has never been safe to + -- rely on them. By the time that bug was fixed, ghci had + -- been changed to read shared libraries instead of archive + -- files (see next code block). + not (GHC.isDynamic comp) + CompilerId GHCJS _ -> + not (GHCJS.isDynamic comp) + _ -> False + + let sharedLibsByDefault + | fromFlag (configDynExe cfg) = + -- build a shared library if dynamically-linked + -- executables are requested + True + | otherwise = case compilerId comp of + CompilerId GHC _ -> + -- if ghc is dynamic, then ghci needs a shared + -- library, so we build one by default. + GHC.isDynamic comp + CompilerId GHCJS _ -> + GHCJS.isDynamic comp + _ -> False + withSharedLib_ = + -- build shared libraries if required by GHC or by the + -- executable linking mode, but allow the user to force + -- building only static library archives with + -- --disable-shared. + fromFlagOrDefault sharedLibsByDefault $ configSharedLib cfg + withDynExe_ = fromFlag $ configDynExe cfg + when (withDynExe_ && not withSharedLib_) $ warn verbosity $ + "Executables will use dynamic linking, but a shared library " + ++ "is not being built. Linking will fail if any executables " + ++ "depend on the library." + + let (profEnabledLib, profEnabledExe) = computeEffectiveProfiling cfg + + profDetailLibOnly <- checkProfDetail (configProfLibDetail cfg) + profDetailBoth <- liftM (fromFlagOrDefault ProfDetailDefault) + (checkProfDetail (configProfDetail cfg)) + let profDetailLib = fromFlagOrDefault profDetailBoth profDetailLibOnly + profDetailExe = profDetailBoth + + when (profEnabledExe && not profEnabledLib) $ + warn verbosity $ + "Executables will be built with profiling, but library " + ++ "profiling is disabled. Linking will fail if any executables " + ++ "depend on the library." + + let configCoverage_ = + mappend (configCoverage cfg) (configLibCoverage cfg) + + cfg' = cfg { configCoverage = configCoverage_ } + + reloc <- + if not (fromFlag $ configRelocatable cfg) + then return False + else return True + + let lbi = LocalBuildInfo { + configFlags = cfg', + flagAssignment = flags, + extraConfigArgs = [], -- Currently configure does not + -- take extra args, but if it + -- did they would go here. + installDirTemplates = installDirs, + compiler = comp, + hostPlatform = compPlatform, + buildDir = buildDir, + componentsConfigs = buildComponents, + installedPkgs = packageDependsIndex, + pkgDescrFile = Nothing, + localPkgDescr = pkg_descr', + withPrograms = programsConfig'', + withVanillaLib = fromFlag $ configVanillaLib cfg, + withProfLib = profEnabledLib, + withSharedLib = withSharedLib_, + withDynExe = withDynExe_, + withProfExe = profEnabledExe, + withProfLibDetail = profDetailLib, + withProfExeDetail = profDetailExe, + withOptimization = fromFlag $ configOptimization cfg, + withDebugInfo = fromFlag $ configDebugInfo cfg, + withGHCiLib = fromFlagOrDefault ghciLibByDefault $ + configGHCiLib cfg, + splitObjs = split_objs, + stripExes = fromFlag $ configStripExes cfg, + stripLibs = fromFlag $ configStripLibs cfg, + withPackageDB = packageDbs, + progPrefix = fromFlag $ configProgPrefix cfg, + progSuffix = fromFlag $ configProgSuffix cfg, + relocatable = reloc + } + + when reloc (checkRelocatable verbosity pkg_descr lbi) + + let dirs = absoluteInstallDirs pkg_descr lbi NoCopyDest + relative = prefixRelativeInstallDirs (packageId pkg_descr) lbi + + unless (isAbsolute (prefix dirs)) $ die $ + "expected an absolute directory name for --prefix: " ++ prefix dirs + + info verbosity $ "Using " ++ display currentCabalId + ++ " compiled by " ++ display currentCompilerId + info verbosity $ "Using compiler: " ++ showCompilerId comp + info verbosity $ "Using install prefix: " ++ prefix dirs + + let dirinfo name dir isPrefixRelative = + info verbosity $ name ++ " installed in: " ++ dir ++ relNote + where relNote = case buildOS of + Windows | not (hasLibs pkg_descr) + && isNothing isPrefixRelative + -> " (fixed location)" + _ -> "" + + dirinfo "Binaries" (bindir dirs) (bindir relative) + dirinfo "Libraries" (libdir dirs) (libdir relative) + dirinfo "Dynamic libraries" (dynlibdir dirs) (dynlibdir relative) + dirinfo "Private binaries" (libexecdir dirs) (libexecdir relative) + dirinfo "Data files" (datadir dirs) (datadir relative) + dirinfo "Documentation" (docdir dirs) (docdir relative) + dirinfo "Configuration files" (sysconfdir dirs) (sysconfdir relative) + + sequence_ [ reportProgram verbosity prog configuredProg + | (prog, configuredProg) <- knownPrograms programsConfig'' ] + + return lbi + + where + verbosity = fromFlag (configVerbosity cfg) + + checkProfDetail (Flag (ProfDetailOther other)) = do + warn verbosity $ + "Unknown profiling detail level '" ++ other + ++ "', using default.\n" + ++ "The profiling detail levels are: " ++ intercalate ", " + [ name | (name, _, _) <- knownProfDetailLevels ] + return (Flag ProfDetailDefault) + checkProfDetail other = return other + +mkProgramsConfig :: ConfigFlags -> ProgramConfiguration -> ProgramConfiguration +mkProgramsConfig cfg initialProgramsConfig = programsConfig + where + programsConfig = userSpecifyArgss (configProgramArgs cfg) + . userSpecifyPaths (configProgramPaths cfg) + . setProgramSearchPath searchpath + $ initialProgramsConfig + searchpath = getProgramSearchPath (initialProgramsConfig) + ++ map ProgramSearchPathDir + (fromNubList $ configProgramPathExtra cfg) + +-- ----------------------------------------------------------------------------- +-- Helper functions for configure + +-- | Check if the user used any deprecated flags. +checkDeprecatedFlags :: Verbosity -> ConfigFlags -> IO () +checkDeprecatedFlags verbosity cfg = do + unless (configProfExe cfg == NoFlag) $ do + let enable | fromFlag (configProfExe cfg) = "enable" + | otherwise = "disable" + warn verbosity + ("The flag --" ++ enable ++ "-executable-profiling is deprecated. " + ++ "Please use --" ++ enable ++ "-profiling instead.") + + unless (configLibCoverage cfg == NoFlag) $ do + let enable | fromFlag (configLibCoverage cfg) = "enable" + | otherwise = "disable" + warn verbosity + ("The flag --" ++ enable ++ "-library-coverage is deprecated. " + ++ "Please use --" ++ enable ++ "-coverage instead.") + +-- | Sanity check: if '--exact-configuration' was given, ensure that the +-- complete flag assignment was specified on the command line. +checkExactConfiguration :: GenericPackageDescription -> ConfigFlags -> IO () +checkExactConfiguration pkg_descr0 cfg = do + when (fromFlagOrDefault False (configExactConfiguration cfg)) $ do + let cmdlineFlags = map fst (configConfigurationsFlags cfg) + allFlags = map flagName . genPackageFlags $ pkg_descr0 + diffFlags = allFlags \\ cmdlineFlags + when (not . null $ diffFlags) $ + die $ "'--exact-configuration' was given, " + ++ "but the following flags were not specified: " + ++ intercalate ", " (map show diffFlags) + +-- | Create a PackageIndex that makes *any libraries that might be* +-- defined internally to this package look like installed packages, in +-- case an executable should refer to any of them as dependencies. +-- +-- It must be *any libraries that might be* defined rather than the +-- actual definitions, because these depend on conditionals in the .cabal +-- file, and we haven't resolved them yet. finalizePackageDescription +-- does the resolution of conditionals, and it takes internalPackageSet +-- as part of its input. +-- +-- Currently a package can define no more than one library (which has +-- the same name as the package) but we could extend this later. +-- If we later allowed private internal libraries, then here we would +-- need to pre-scan the conditional data to make a list of all private +-- libraries that could possibly be defined by the .cabal file. +getInternalPackages :: GenericPackageDescription + -> InstalledPackageIndex +getInternalPackages pkg_descr0 = + let pid :: PackageIdentifier -- e.g. foo-0.1 + pid = packageId pkg_descr0 + internalPackage = emptyInstalledPackageInfo { + --TODO: should use a per-compiler method to map the source + -- package ID into an installed package id we can use + -- for the internal package set. The use of + -- mkLegacyUnitId here is a hack. + Installed.installedUnitId = mkLegacyUnitId pid, + Installed.sourcePackageId = pid + } + in PackageIndex.fromList [internalPackage] + + +-- | Returns true if a dependency is satisfiable. This is to be passed +-- to finalizePackageDescription. +dependencySatisfiable + :: Bool + -> InstalledPackageIndex -- ^ installed set + -> InstalledPackageIndex -- ^ internal set + -> Map PackageName InstalledPackageInfo -- ^ required dependencies + -> (Dependency -> Bool) +dependencySatisfiable + exact_config installedPackageSet internalPackageSet requiredDepsMap + d@(Dependency depName _) + | exact_config = + -- When we're given '--exact-configuration', we assume that all + -- dependencies and flags are exactly specified on the command + -- line. Thus we only consult the 'requiredDepsMap'. Note that + -- we're not doing the version range check, so if there's some + -- dependency that wasn't specified on the command line, + -- 'finalizePackageDescription' will fail. + -- + -- TODO: mention '--exact-configuration' in the error message + -- when this fails? + -- + -- (However, note that internal deps don't have to be + -- specified!) + (depName `Map.member` requiredDepsMap) || isInternalDep + + | otherwise = + -- Normal operation: just look up dependency in the combined + -- package index. + not . null . PackageIndex.lookupDependency pkgs $ d + where + pkgs = PackageIndex.merge internalPackageSet installedPackageSet + isInternalDep = not . null + $ PackageIndex.lookupDependency internalPackageSet d + +-- | Relax the dependencies of this package if needed. +relaxPackageDeps :: AllowNewer -> GenericPackageDescription + -> GenericPackageDescription +relaxPackageDeps AllowNewerNone gpd = gpd +relaxPackageDeps AllowNewerAll gpd = transformAllBuildDepends relaxAll gpd + where + relaxAll = \(Dependency pkgName verRange) -> + Dependency pkgName (removeUpperBound verRange) +relaxPackageDeps (AllowNewerSome allowNewerDeps') gpd = + transformAllBuildDepends relaxSome gpd + where + thisPkgName = packageName gpd + allowNewerDeps = mapMaybe f allowNewerDeps' + + f (Setup.AllowNewerDep p) = Just p + f (Setup.AllowNewerDepScoped scope p) | scope == thisPkgName = Just p + | otherwise = Nothing + + relaxSome = \d@(Dependency depName verRange) -> + if depName `elem` allowNewerDeps + then Dependency depName (removeUpperBound verRange) + else d + +-- | Finalize a generic package description. The workhorse is +-- 'finalizePackageDescription' but there's a bit of other nattering +-- about necessary. +-- +-- TODO: what exactly is the business with @flaggedTests@ and +-- @flaggedBenchmarks@? +configureFinalizedPackage + :: Verbosity + -> ConfigFlags + -> [Dependency] + -> (Dependency -> Bool) -- ^ tests if a dependency is satisfiable. + -- Might say it's satisfiable even when not. + -> Compiler + -> Platform + -> GenericPackageDescription + -> IO (PackageDescription, FlagAssignment) +configureFinalizedPackage verbosity cfg + allConstraints satisfies comp compPlatform pkg_descr0 = do + let enableTest t = t { testEnabled = fromFlag (configTests cfg) } + flaggedTests = map (\(n, t) -> (n, mapTreeData enableTest t)) + (condTestSuites pkg_descr0) + enableBenchmark bm = bm { benchmarkEnabled = + fromFlag (configBenchmarks cfg) } + flaggedBenchmarks = map (\(n, bm) -> + (n, mapTreeData enableBenchmark bm)) + (condBenchmarks pkg_descr0) + pkg_descr0'' = pkg_descr0 { condTestSuites = flaggedTests + , condBenchmarks = flaggedBenchmarks } + + (pkg_descr0', flags) <- + case finalizePackageDescription + (configConfigurationsFlags cfg) + satisfies + compPlatform + (compilerInfo comp) + allConstraints + pkg_descr0'' + of Right r -> return r + Left missing -> + die $ "Encountered missing dependencies:\n" + ++ (render . nest 4 . sep . punctuate comma + . map (disp . simplifyDependency) + $ missing) + + -- add extra include/lib dirs as specified in cfg + -- we do it here so that those get checked too + let pkg_descr = addExtraIncludeLibDirs pkg_descr0' + + when (not (null flags)) $ + info verbosity $ "Flags chosen: " + ++ intercalate ", " [ name ++ "=" ++ display value + | (FlagName name, value) <- flags ] + + return (pkg_descr, flags) + where + addExtraIncludeLibDirs pkg_descr = + let extraBi = mempty { extraLibDirs = configExtraLibDirs cfg + , extraFrameworkDirs = configExtraFrameworkDirs cfg + , PD.includeDirs = configExtraIncludeDirs cfg} + modifyLib l = l{ libBuildInfo = libBuildInfo l + `mappend` extraBi } + modifyExecutable e = e{ buildInfo = buildInfo e + `mappend` extraBi} + in pkg_descr{ library = modifyLib `fmap` library pkg_descr + , executables = modifyExecutable `map` + executables pkg_descr} + +-- | Check for use of Cabal features which require compiler support +checkCompilerProblems :: Compiler -> PackageDescription -> IO () +checkCompilerProblems comp pkg_descr = do + unless (renamingPackageFlagsSupported comp || + and [ True + | bi <- allBuildInfo pkg_descr + , _ <- Map.elems (targetBuildRenaming bi)]) $ + die $ "Your compiler does not support thinning and renaming on " + ++ "package flags. To use this feature you probably must use " + ++ "GHC 7.9 or later." + + when (maybe False (not.null.PD.reexportedModules) (PD.library pkg_descr) + && not (reexportedModulesSupported comp)) $ do + die $ "Your compiler does not support module re-exports. To use " + ++ "this feature you probably must use GHC 7.9 or later." + +-- | Select dependencies for the package. +configureDependencies + :: Verbosity + -> InstalledPackageIndex -- ^ internal packages + -> InstalledPackageIndex -- ^ installed packages + -> Map PackageName InstalledPackageInfo -- ^ required deps + -> PackageDescription + -> IO ([PackageId], [InstalledPackageInfo]) +configureDependencies verbosity + internalPackageSet installedPackageSet requiredDepsMap pkg_descr = do + let selectDependencies :: [Dependency] -> + ([FailedDependency], [ResolvedDependency]) + selectDependencies = + partitionEithers + . map (selectDependency internalPackageSet installedPackageSet + requiredDepsMap) + + (failedDeps, allPkgDeps) = + selectDependencies (buildDepends pkg_descr) + + internalPkgDeps = [ pkgid + | InternalDependency _ pkgid <- allPkgDeps ] + externalPkgDeps = [ pkg + | ExternalDependency _ pkg <- allPkgDeps ] + + when (not (null internalPkgDeps) + && not (newPackageDepsBehaviour pkg_descr)) $ + die $ "The field 'build-depends: " + ++ intercalate ", " (map (display . packageName) internalPkgDeps) + ++ "' refers to a library which is defined within the same " + ++ "package. To use this feature the package must specify at " + ++ "least 'cabal-version: >= 1.8'." + + reportFailedDependencies failedDeps + reportSelectedDependencies verbosity allPkgDeps + + return (internalPkgDeps, externalPkgDeps) + +-- ----------------------------------------------------------------------------- +-- Configuring package dependencies + +reportProgram :: Verbosity -> Program -> Maybe ConfiguredProgram -> IO () +reportProgram verbosity prog Nothing + = info verbosity $ "No " ++ programName prog ++ " found" +reportProgram verbosity prog (Just configuredProg) + = info verbosity $ "Using " ++ programName prog ++ version ++ location + where location = case programLocation configuredProg of + FoundOnSystem p -> " found on system at: " ++ p + UserSpecified p -> " given by user at: " ++ p + version = case programVersion configuredProg of + Nothing -> "" + Just v -> " version " ++ display v + +hackageUrl :: String +hackageUrl = "http://hackage.haskell.org/package/" + +data ResolvedDependency = ExternalDependency Dependency InstalledPackageInfo + | InternalDependency Dependency PackageId -- should be a + -- lib name + +data FailedDependency = DependencyNotExists PackageName + | DependencyNoVersion Dependency + +-- | Test for a package dependency and record the version we have installed. +selectDependency :: InstalledPackageIndex -- ^ Internally defined packages + -> InstalledPackageIndex -- ^ Installed packages + -> Map PackageName InstalledPackageInfo + -- ^ Packages for which we have been given specific deps to + -- use + -> Dependency + -> Either FailedDependency ResolvedDependency +selectDependency internalIndex installedIndex requiredDepsMap + dep@(Dependency pkgname vr) = + -- If the dependency specification matches anything in the internal package + -- index, then we prefer that match to anything in the second. + -- For example: + -- + -- Name: MyLibrary + -- Version: 0.1 + -- Library + -- .. + -- Executable my-exec + -- build-depends: MyLibrary + -- + -- We want "build-depends: MyLibrary" always to match the internal library + -- even if there is a newer installed library "MyLibrary-0.2". + -- However, "build-depends: MyLibrary >= 0.2" should match the installed one. + case PackageIndex.lookupPackageName internalIndex pkgname of + [(_,[pkg])] | packageVersion pkg `withinRange` vr + -> Right $ InternalDependency dep (packageId pkg) + + _ -> case Map.lookup pkgname requiredDepsMap of + -- If we know the exact pkg to use, then use it. + Just pkginstance -> Right (ExternalDependency dep pkginstance) + -- Otherwise we just pick an arbitrary instance of the latest version. + Nothing -> case PackageIndex.lookupDependency installedIndex dep of + [] -> Left $ DependencyNotExists pkgname + pkgs -> Right $ ExternalDependency dep $ + case last pkgs of + (_ver, pkginstances) -> head pkginstances + +reportSelectedDependencies :: Verbosity + -> [ResolvedDependency] -> IO () +reportSelectedDependencies verbosity deps = + info verbosity $ unlines + [ "Dependency " ++ display (simplifyDependency dep) + ++ ": using " ++ display pkgid + | resolved <- deps + , let (dep, pkgid) = case resolved of + ExternalDependency dep' pkg' -> (dep', packageId pkg') + InternalDependency dep' pkgid' -> (dep', pkgid') ] + +reportFailedDependencies :: [FailedDependency] -> IO () +reportFailedDependencies [] = return () +reportFailedDependencies failed = + die (intercalate "\n\n" (map reportFailedDependency failed)) + + where + reportFailedDependency (DependencyNotExists pkgname) = + "there is no version of " ++ display pkgname ++ " installed.\n" + ++ "Perhaps you need to download and install it from\n" + ++ hackageUrl ++ display pkgname ++ "?" + + reportFailedDependency (DependencyNoVersion dep) = + "cannot satisfy dependency " ++ display (simplifyDependency dep) ++ "\n" + +-- | List all installed packages in the given package databases. +getInstalledPackages :: Verbosity -> Compiler + -> PackageDBStack -- ^ The stack of package databases. + -> ProgramConfiguration + -> IO InstalledPackageIndex +getInstalledPackages verbosity comp packageDBs progconf = do + when (null packageDBs) $ + die $ "No package databases have been specified. If you use " + ++ "--package-db=clear, you must follow it with --package-db= " + ++ "with 'global', 'user' or a specific file." + + info verbosity "Reading installed packages..." + case compilerFlavor comp of + GHC -> GHC.getInstalledPackages verbosity comp packageDBs progconf + GHCJS -> GHCJS.getInstalledPackages verbosity packageDBs progconf + JHC -> JHC.getInstalledPackages verbosity packageDBs progconf + LHC -> LHC.getInstalledPackages verbosity packageDBs progconf + UHC -> UHC.getInstalledPackages verbosity comp packageDBs progconf + HaskellSuite {} -> + HaskellSuite.getInstalledPackages verbosity packageDBs progconf + flv -> die $ "don't know how to find the installed packages for " + ++ display flv + +-- | Like 'getInstalledPackages', but for a single package DB. +-- +-- NB: Why isn't this always a fall through to 'getInstalledPackages'? +-- That is because 'getInstalledPackages' performs some sanity checks +-- on the package database stack in question. However, when sandboxes +-- are involved these sanity checks are not desirable. +getPackageDBContents :: Verbosity -> Compiler + -> PackageDB -> ProgramConfiguration + -> IO InstalledPackageIndex +getPackageDBContents verbosity comp packageDB progconf = do + info verbosity "Reading installed packages..." + case compilerFlavor comp of + GHC -> GHC.getPackageDBContents verbosity packageDB progconf + GHCJS -> GHCJS.getPackageDBContents verbosity packageDB progconf + -- For other compilers, try to fall back on 'getInstalledPackages'. + _ -> getInstalledPackages verbosity comp [packageDB] progconf + + +-- | A set of files (or directories) that can be monitored to detect when +-- there might have been a change in the installed packages. +-- +getInstalledPackagesMonitorFiles :: Verbosity -> Compiler + -> PackageDBStack + -> ProgramConfiguration -> Platform + -> IO [FilePath] +getInstalledPackagesMonitorFiles verbosity comp packageDBs progconf platform = + case compilerFlavor comp of + GHC -> GHC.getInstalledPackagesMonitorFiles + verbosity platform progconf packageDBs + other -> do + warn verbosity $ "don't know how to find change monitoring files for " + ++ "the installed package databases for " ++ display other + return [] + +-- | The user interface specifies the package dbs to use with a combination of +-- @--global@, @--user@ and @--package-db=global|user|clear|$file@. +-- This function combines the global/user flag and interprets the package-db +-- flag into a single package db stack. +-- +interpretPackageDbFlags :: Bool -> [Maybe PackageDB] -> PackageDBStack +interpretPackageDbFlags userInstall specificDBs = + extra initialStack specificDBs + where + initialStack | userInstall = [GlobalPackageDB, UserPackageDB] + | otherwise = [GlobalPackageDB] + + extra dbs' [] = dbs' + extra _ (Nothing:dbs) = extra [] dbs + extra dbs' (Just db:dbs) = extra (dbs' ++ [db]) dbs + +newPackageDepsBehaviourMinVersion :: Version +newPackageDepsBehaviourMinVersion = Version [1,7,1] [] + +-- In older cabal versions, there was only one set of package dependencies for +-- the whole package. In this version, we can have separate dependencies per +-- target, but we only enable this behaviour if the minimum cabal version +-- specified is >= a certain minimum. Otherwise, for compatibility we use the +-- old behaviour. +newPackageDepsBehaviour :: PackageDescription -> Bool +newPackageDepsBehaviour pkg = + specVersion pkg >= newPackageDepsBehaviourMinVersion + +-- We are given both --constraint="foo < 2.0" style constraints and also +-- specific packages to pick via --dependency="foo=foo-2.0-177d5cdf20962d0581". +-- +-- When finalising the package we have to take into account the specific +-- installed deps we've been given, and the finalise function expects +-- constraints, so we have to translate these deps into version constraints. +-- +-- But after finalising we then have to make sure we pick the right specific +-- deps in the end. So we still need to remember which installed packages to +-- pick. +combinedConstraints :: [Dependency] -> + [(PackageName, UnitId)] -> + InstalledPackageIndex -> + Either String ([Dependency], + Map PackageName InstalledPackageInfo) +combinedConstraints constraints dependencies installedPackages = do + + when (not (null badUnitIds)) $ + Left $ render $ text "The following package dependencies were requested" + $+$ nest 4 (dispDependencies badUnitIds) + $+$ text "however the given installed package instance does not exist." + + when (not (null badNames)) $ + Left $ render $ text "The following package dependencies were requested" + $+$ nest 4 (dispDependencies badNames) + $+$ text ("however the installed package's name does not match " + ++ "the name given.") + + --TODO: we don't check that all dependencies are used! + + return (allConstraints, idConstraintMap) + + where + allConstraints :: [Dependency] + allConstraints = constraints + ++ [ thisPackageVersion (packageId pkg) + | (_, _, Just pkg) <- dependenciesPkgInfo ] + + idConstraintMap :: Map PackageName InstalledPackageInfo + idConstraintMap = Map.fromList + [ (packageName pkg, pkg) + | (_, _, Just pkg) <- dependenciesPkgInfo ] + + -- The dependencies along with the installed package info, if it exists + dependenciesPkgInfo :: [(PackageName, UnitId, + Maybe InstalledPackageInfo)] + dependenciesPkgInfo = + [ (pkgname, ipkgid, mpkg) + | (pkgname, ipkgid) <- dependencies + , let mpkg = PackageIndex.lookupUnitId + installedPackages ipkgid + ] + + -- If we looked up a package specified by an installed package id + -- (i.e. someone has written a hash) and didn't find it then it's + -- an error. + badUnitIds = + [ (pkgname, ipkgid) + | (pkgname, ipkgid, Nothing) <- dependenciesPkgInfo ] + + -- If someone has written e.g. + -- --dependency="foo=MyOtherLib-1.0-07...5bf30" then they have + -- probably made a mistake. + badNames = + [ (requestedPkgName, ipkgid) + | (requestedPkgName, ipkgid, Just pkg) <- dependenciesPkgInfo + , let foundPkgName = packageName pkg + , requestedPkgName /= foundPkgName ] + + dispDependencies deps = + hsep [ text "--dependency=" + <> quotes (disp pkgname <> char '=' <> disp ipkgid) + | (pkgname, ipkgid) <- deps ] + +-- ----------------------------------------------------------------------------- +-- Configuring program dependencies + +configureRequiredPrograms :: Verbosity -> [Dependency] -> ProgramConfiguration + -> IO ProgramConfiguration +configureRequiredPrograms verbosity deps conf = + foldM (configureRequiredProgram verbosity) conf deps + +configureRequiredProgram :: Verbosity -> ProgramConfiguration -> Dependency + -> IO ProgramConfiguration +configureRequiredProgram verbosity conf + (Dependency (PackageName progName) verRange) = + case lookupKnownProgram progName conf of + Nothing -> die ("Unknown build tool " ++ progName) + Just prog + -- requireProgramVersion always requires the program have a version + -- but if the user says "build-depends: foo" ie no version constraint + -- then we should not fail if we cannot discover the program version. + | verRange == anyVersion -> do + (_, conf') <- requireProgram verbosity prog conf + return conf' + | otherwise -> do + (_, _, conf') <- requireProgramVersion verbosity prog verRange conf + return conf' + +-- ----------------------------------------------------------------------------- +-- Configuring pkg-config package dependencies + +configurePkgconfigPackages :: Verbosity -> PackageDescription + -> ProgramConfiguration + -> IO (PackageDescription, ProgramConfiguration) +configurePkgconfigPackages verbosity pkg_descr conf + | null allpkgs = return (pkg_descr, conf) + | otherwise = do + (_, _, conf') <- requireProgramVersion + (lessVerbose verbosity) pkgConfigProgram + (orLaterVersion $ Version [0,9,0] []) conf + mapM_ requirePkg allpkgs + lib' <- mapM addPkgConfigBILib (library pkg_descr) + exes' <- mapM addPkgConfigBIExe (executables pkg_descr) + tests' <- mapM addPkgConfigBITest (testSuites pkg_descr) + benches' <- mapM addPkgConfigBIBench (benchmarks pkg_descr) + let pkg_descr' = pkg_descr { library = lib', executables = exes', + testSuites = tests', benchmarks = benches' } + return (pkg_descr', conf') + + where + allpkgs = concatMap pkgconfigDepends (allBuildInfo pkg_descr) + pkgconfig = rawSystemProgramStdoutConf (lessVerbose verbosity) + pkgConfigProgram conf + + requirePkg dep@(Dependency (PackageName pkg) range) = do + version <- pkgconfig ["--modversion", pkg] + `catchIO` (\_ -> die notFound) + `catchExit` (\_ -> die notFound) + case simpleParse version of + Nothing -> die "parsing output of pkg-config --modversion failed" + Just v | not (withinRange v range) -> die (badVersion v) + | otherwise -> info verbosity (depSatisfied v) + where + notFound = "The pkg-config package '" ++ pkg ++ "'" + ++ versionRequirement + ++ " is required but it could not be found." + badVersion v = "The pkg-config package '" ++ pkg ++ "'" + ++ versionRequirement + ++ " is required but the version installed on the" + ++ " system is version " ++ display v + depSatisfied v = "Dependency " ++ display dep + ++ ": using version " ++ display v + + versionRequirement + | isAnyVersion range = "" + | otherwise = " version " ++ display range + + -- Adds pkgconfig dependencies to the build info for a component + addPkgConfigBI compBI setCompBI comp = do + bi <- pkgconfigBuildInfo (pkgconfigDepends (compBI comp)) + return $ setCompBI comp (compBI comp `mappend` bi) + + -- Adds pkgconfig dependencies to the build info for a library + addPkgConfigBILib = addPkgConfigBI libBuildInfo $ + \lib bi -> lib { libBuildInfo = bi } + + -- Adds pkgconfig dependencies to the build info for an executable + addPkgConfigBIExe = addPkgConfigBI buildInfo $ + \exe bi -> exe { buildInfo = bi } + + -- Adds pkgconfig dependencies to the build info for a test suite + addPkgConfigBITest = addPkgConfigBI testBuildInfo $ + \test bi -> test { testBuildInfo = bi } + + -- Adds pkgconfig dependencies to the build info for a benchmark + addPkgConfigBIBench = addPkgConfigBI benchmarkBuildInfo $ + \bench bi -> bench { benchmarkBuildInfo = bi } + + pkgconfigBuildInfo :: [Dependency] -> IO BuildInfo + pkgconfigBuildInfo [] = return Mon.mempty + pkgconfigBuildInfo pkgdeps = do + let pkgs = nub [ display pkg | Dependency pkg _ <- pkgdeps ] + ccflags <- pkgconfig ("--cflags" : pkgs) + ldflags <- pkgconfig ("--libs" : pkgs) + return (ccLdOptionsBuildInfo (words ccflags) (words ldflags)) + +-- | Makes a 'BuildInfo' from C compiler and linker flags. +-- +-- This can be used with the output from configuration programs like pkg-config +-- and similar package-specific programs like mysql-config, freealut-config etc. +-- For example: +-- +-- > ccflags <- rawSystemProgramStdoutConf verbosity prog conf ["--cflags"] +-- > ldflags <- rawSystemProgramStdoutConf verbosity prog conf ["--libs"] +-- > return (ccldOptionsBuildInfo (words ccflags) (words ldflags)) +-- +ccLdOptionsBuildInfo :: [String] -> [String] -> BuildInfo +ccLdOptionsBuildInfo cflags ldflags = + let (includeDirs', cflags') = partition ("-I" `isPrefixOf`) cflags + (extraLibs', ldflags') = partition ("-l" `isPrefixOf`) ldflags + (extraLibDirs', ldflags'') = partition ("-L" `isPrefixOf`) ldflags' + in mempty { + PD.includeDirs = map (drop 2) includeDirs', + PD.extraLibs = map (drop 2) extraLibs', + PD.extraLibDirs = map (drop 2) extraLibDirs', + PD.ccOptions = cflags', + PD.ldOptions = ldflags'' + } + +-- ----------------------------------------------------------------------------- +-- Determining the compiler details + +configCompilerAuxEx :: ConfigFlags + -> IO (Compiler, Platform, ProgramConfiguration) +configCompilerAuxEx cfg = configCompilerEx (flagToMaybe $ configHcFlavor cfg) + (flagToMaybe $ configHcPath cfg) + (flagToMaybe $ configHcPkg cfg) + programsConfig + (fromFlag (configVerbosity cfg)) + where + programsConfig = mkProgramsConfig cfg defaultProgramConfiguration + +configCompilerEx :: Maybe CompilerFlavor -> Maybe FilePath -> Maybe FilePath + -> ProgramConfiguration -> Verbosity + -> IO (Compiler, Platform, ProgramConfiguration) +configCompilerEx Nothing _ _ _ _ = die "Unknown compiler" +configCompilerEx (Just hcFlavor) hcPath hcPkg conf verbosity = do + (comp, maybePlatform, programsConfig) <- case hcFlavor of + GHC -> GHC.configure verbosity hcPath hcPkg conf + GHCJS -> GHCJS.configure verbosity hcPath hcPkg conf + JHC -> JHC.configure verbosity hcPath hcPkg conf + LHC -> do (_, _, ghcConf) <- GHC.configure verbosity Nothing hcPkg conf + LHC.configure verbosity hcPath Nothing ghcConf + UHC -> UHC.configure verbosity hcPath hcPkg conf + HaskellSuite {} -> HaskellSuite.configure verbosity hcPath hcPkg conf + _ -> die "Unknown compiler" + return (comp, fromMaybe buildPlatform maybePlatform, programsConfig) + +-- Ideally we would like to not have separate configCompiler* and +-- configCompiler*Ex sets of functions, but there are many custom setup scripts +-- in the wild that are using them, so the versions with old types are kept for +-- backwards compatibility. Platform was added to the return triple in 1.18. + +{-# DEPRECATED configCompiler + "'configCompiler' is deprecated. Use 'configCompilerEx' instead." #-} +configCompiler :: Maybe CompilerFlavor -> Maybe FilePath -> Maybe FilePath + -> ProgramConfiguration -> Verbosity + -> IO (Compiler, ProgramConfiguration) +configCompiler mFlavor hcPath hcPkg conf verbosity = + fmap (\(a,_,b) -> (a,b)) $ configCompilerEx mFlavor hcPath hcPkg conf verbosity + +{-# DEPRECATED configCompilerAux + "configCompilerAux is deprecated. Use 'configCompilerAuxEx' instead." #-} +configCompilerAux :: ConfigFlags + -> IO (Compiler, ProgramConfiguration) +configCompilerAux = fmap (\(a,_,b) -> (a,b)) . configCompilerAuxEx + +-- ----------------------------------------------------------------------------- +-- Making the internal component graph + + +mkComponentsGraph :: PackageDescription + -> [PackageId] + -> Either [ComponentName] + [(Component, [ComponentName])] +mkComponentsGraph pkg_descr internalPkgDeps = + let graph = [ (c, componentName c, componentDeps c) + | c <- pkgEnabledComponents pkg_descr ] + in case checkComponentsCyclic graph of + Just ccycle -> Left [ cname | (_,cname,_) <- ccycle ] + Nothing -> Right [ (c, cdeps) | (c, _, cdeps) <- graph ] + where + -- The dependencies for the given component + componentDeps component = + [ CExeName toolname | Dependency (PackageName toolname) _ + <- buildTools bi + , toolname `elem` map exeName + (executables pkg_descr) ] + + ++ [ CLibName | Dependency pkgname _ <- targetBuildDepends bi + , pkgname `elem` map packageName internalPkgDeps ] + where + bi = componentBuildInfo component + +reportComponentCycle :: [ComponentName] -> IO a +reportComponentCycle cnames = + die $ "Components in the package depend on each other in a cyclic way:\n " + ++ intercalate " depends on " + [ "'" ++ showComponentName cname ++ "'" + | cname <- cnames ++ [head cnames] ] + +-- | This method computes a default, "good enough" 'ComponentId' +-- for a package. The intent is that cabal-install (or the user) will +-- specify a more detailed IPID via the @--ipid@ flag if necessary. +computeComponentId :: PackageIdentifier + -> ComponentName + -- TODO: careful here! + -> [ComponentId] -- IPIDs of the component dependencies + -> FlagAssignment + -> ComponentId +computeComponentId pid cname dep_ipids flagAssignment = do + -- show is found to be faster than intercalate and then replacement of + -- special character used in intercalating. We cannot simply hash by + -- doubly concating list, as it just flatten out the nested list, so + -- different sources can produce same hash + let hash = hashToBase62 $ + -- For safety, include the package + version here + -- for GHC 7.10, where just the hash is used as + -- the package key + (display pid) + ++ (show $ dep_ipids) + ++ show flagAssignment + ComponentId $ + display pid + ++ "-" ++ hash + ++ (case cname of + CLibName -> "" + -- TODO: these could result in non-parseable IPIDs + -- since the component name format is very flexible + CExeName s -> "-" ++ s ++ ".exe" + CTestName s -> "-" ++ s ++ ".test" + CBenchName s -> "-" ++ s ++ ".bench") + +hashToBase62 :: String -> String +hashToBase62 s = showFingerprint $ fingerprintString s + where + showIntAtBase62 x = showIntAtBase 62 representBase62 x "" + representBase62 x + | x < 10 = chr (48 + x) + | x < 36 = chr (65 + x - 10) + | x < 62 = chr (97 + x - 36) + | otherwise = '@' + showFingerprint (Fingerprint a b) = showIntAtBase62 a ++ showIntAtBase62 b + +-- | In GHC 8.0, the string we pass to GHC to use for symbol +-- names for a package can be an arbitrary, IPID-compatible string. +-- However, prior to GHC 8.0 there are some restrictions on what +-- format this string can be (due to how ghc-pkg parsed the key): +-- +-- 1. In GHC 7.10, the string had either be of the form +-- foo_ABCD, where foo is a non-semantic alphanumeric/hyphenated +-- prefix and ABCD is two base-64 encoded 64-bit integers, +-- or a GHC 7.8 style identifier. +-- +-- 2. In GHC 7.8, the string had to be a valid package identifier +-- like foo-0.1. +-- +-- So, the problem is that Cabal, in general, has a general IPID, +-- but needs to figure out a package key / package ID that the +-- old ghc-pkg will actually accept. But there's an EVERY WORSE +-- problem: if ghc-pkg decides to parse an identifier foo-0.1-xxx +-- as if it were a package identifier, which means it will SILENTLY +-- DROP the "xxx" (because it's a tag, and Cabal does not allow tags.) +-- So we must CONNIVE to ensure that we don't pick something that +-- looks like this. +-- +-- So this function attempts to define a mapping into the old formats. +-- +-- The mapping for GHC 7.8 and before: +-- +-- * For CLibName, we unconditionally use the 'PackageIdentifier'. +-- +-- * For sub-components, we create a new 'PackageIdentifier' which +-- is encoded in the following way. The test suite "qux" in package +-- "foobar-0.2" gets this package identifier "z-foobar-z-test-qux-0.2". +-- These package IDs have the form: +-- +-- cpid ::= "z-" package-id "-z-" component-type "-" component-name +-- component-type ::= "test" | "bench" | "exe" | "lib" +-- package-id and component-name have "-" ( "z" + ) "-" +-- segments encoded by adding an extra "z". +-- +-- The mapping for GHC 7.10: +-- +-- * For CLibName: +-- If the IPID is of the form foo-0.1-ABCDEF where foo_ABCDEF would +-- validly parse as a package key, we pass "ABCDEF". (NB: not +-- all hashes parse this way, because GHC 7.10 mandated that +-- these hashes be two base-62 encoded 64 bit integers), +-- but hashes that Cabal generated using 'computeComponentId' +-- are guaranteed to have this form. +-- +-- If it is not of this form, we rehash the IPID into the +-- correct form and pass that. +-- +-- * For sub-components, we rehash the IPID into the correct format +-- and pass that. +-- +computeCompatPackageKey + :: Compiler + -> PackageIdentifier + -> ComponentName + -> UnitId + -> (PackageName, String) +computeCompatPackageKey comp pid cname uid@(SimpleUnitId (ComponentId str)) + | not (packageKeySupported comp || unitIdSupported comp) = + -- NB: the package ID in the database entry has to follow this + let zdashcode s = go s (Nothing :: Maybe Int) [] + where go [] _ r = reverse r + go ('-':z) (Just n) r | n > 0 = go z (Just 0) ('-':'z':r) + go ('-':z) _ r = go z (Just 0) ('-':r) + go ('z':z) (Just n) r = go z (Just (n+1)) ('z':r) + go (c:z) _ r = go z Nothing (c:r) + cname_str = case cname of + CLibName -> error "computeCompatPackageKey" + CTestName n -> "-z-test-" ++ zdashcode n + CBenchName n -> "-z-bench-" ++ zdashcode n + CExeName n -> "-z-exe-" ++ zdashcode n + package_name + | cname == CLibName = pkgName pid + | otherwise = PackageName $ "z-" + ++ zdashcode (display (pkgName pid)) + ++ zdashcode cname_str + old_style_key + | cname == CLibName = display pid + | otherwise = display package_name ++ "-" + ++ display (pkgVersion pid) + in (package_name, old_style_key) + | not (unifiedIPIDRequired comp) = + let mb_verbatim_key + = case simpleParse str :: Maybe PackageId of + -- Something like 'foo-0.1', use it verbatim. + -- (NB: hash tags look like tags, so they are parsed, + -- so the extra equality check tests if a tag was dropped.) + Just pid0 | display pid0 == str -> Just str + _ -> Nothing + mb_truncated_key + = let cand = reverse (takeWhile isAlphaNum (reverse str)) + in if length cand == 22 && all isAlphaNum cand + then Just cand + else Nothing + rehashed_key = hashToBase62 str + in (pkgName pid, fromMaybe rehashed_key + (mb_verbatim_key `mplus` mb_truncated_key)) + | otherwise = (pkgName pid, display uid) + +mkComponentsLocalBuildInfo :: ConfigFlags + -> Compiler + -> InstalledPackageIndex + -> PackageDescription + -> [PackageId] -- internal package deps + -> [InstalledPackageInfo] -- external package deps + -> [(Component, [ComponentName])] + -> FlagAssignment + -> IO [(ComponentName, ComponentLocalBuildInfo, + [ComponentName])] +mkComponentsLocalBuildInfo cfg comp installedPackages pkg_descr + internalPkgDeps externalPkgDeps + graph flagAssignment = do + -- Pre-compute library hash so we can setup internal deps + -- TODO configIPID should have name changed + let cid = case configIPID cfg of + Flag cid0 -> + -- Hack to reuse install dirs machinery + -- NB: no real IPID available at this point + let env = packageTemplateEnv (package pkg_descr) + (mkUnitId "") + str = fromPathTemplate + (InstallDirs.substPathTemplate env + (toPathTemplate cid0)) + in ComponentId str + _ -> + computeComponentId (package pkg_descr) CLibName + (getDeps CLibName) flagAssignment + uid = SimpleUnitId cid + (_, compat_key) = computeCompatPackageKey comp + (package pkg_descr) CLibName uid + sequence + [ do clbi <- componentLocalBuildInfo uid compat_key c + return (componentName c, clbi, cdeps) + | (c, cdeps) <- graph ] + where + getDeps cname = + let externalPkgs = maybe [] (\lib -> selectSubset + (componentBuildInfo lib) + externalPkgDeps) + (lookupComponent pkg_descr cname) + in map Installed.installedComponentId externalPkgs + + -- The allPkgDeps contains all the package deps for the whole package + -- but we need to select the subset for this specific component. + -- we just take the subset for the package names this component + -- needs. Note, this only works because we cannot yet depend on two + -- versions of the same package. + componentLocalBuildInfo uid compat_key component = + case component of + CLib lib -> do + let exports = map (\n -> Installed.ExposedModule n Nothing) + (PD.exposedModules lib) + let mb_reexports = resolveModuleReexports installedPackages + (packageId pkg_descr) + uid + externalPkgDeps lib + reexports <- case mb_reexports of + Left problems -> reportModuleReexportProblems problems + Right r -> return r + + return LibComponentLocalBuildInfo { + componentPackageDeps = cpds, + componentUnitId = uid, + componentCompatPackageKey = compat_key, + componentPackageRenaming = cprns, + componentExposedModules = exports ++ reexports + } + CExe _ -> + return ExeComponentLocalBuildInfo { + componentPackageDeps = cpds, + componentPackageRenaming = cprns + } + CTest _ -> + return TestComponentLocalBuildInfo { + componentPackageDeps = cpds, + componentPackageRenaming = cprns + } + CBench _ -> + return BenchComponentLocalBuildInfo { + componentPackageDeps = cpds, + componentPackageRenaming = cprns + } + where + bi = componentBuildInfo component + dedup = Map.toList . Map.fromList + cpds = if newPackageDepsBehaviour pkg_descr + then dedup $ + [ (Installed.installedUnitId pkg, packageId pkg) + | pkg <- selectSubset bi externalPkgDeps ] + ++ [ (uid, pkgid) + | pkgid <- selectSubset bi internalPkgDeps ] + else [ (Installed.installedUnitId pkg, packageId pkg) + | pkg <- externalPkgDeps ] + cprns = if newPackageDepsBehaviour pkg_descr + then targetBuildRenaming bi + -- Hack: if we have old package-deps behavior, it's impossible + -- for non-default renamings to be used, because the Cabal + -- version is too early. This is a good, because while all the + -- deps were bundled up in buildDepends, we didn't do this for + -- renamings, so it's not even clear how to get the merged + -- version. So just assume that all of them are the default.. + else Map.fromList (map (\(_,pid) -> + (packageName pid, defaultRenaming)) cpds) + + selectSubset :: Package pkg => BuildInfo -> [pkg] -> [pkg] + selectSubset bi pkgs = + [ pkg | pkg <- pkgs, packageName pkg `elem` names bi ] + + names bi = [ name | Dependency name _ <- targetBuildDepends bi ] + +-- | Given the author-specified re-export declarations from the .cabal file, +-- resolve them to the form that we need for the package database. +-- +-- An invariant of the package database is that we always link the re-export +-- directly to its original defining location (rather than indirectly via a +-- chain of re-exporting packages). +-- +resolveModuleReexports :: InstalledPackageIndex + -> PackageId + -> UnitId + -> [InstalledPackageInfo] + -> Library + -> Either [(ModuleReexport, String)] -- errors + [Installed.ExposedModule] -- ok +resolveModuleReexports installedPackages srcpkgid key externalPkgDeps lib = + case partitionEithers + (map resolveModuleReexport (PD.reexportedModules lib)) of + ([], ok) -> Right ok + (errs, _) -> Left errs + where + -- A mapping from visible module names to their original defining + -- module name. We also record the package name of the package which + -- *immediately* provided the module (not the original) to handle if the + -- user explicitly says which build-depends they want to reexport from. + visibleModules :: Map ModuleName [(PackageName, Installed.ExposedModule)] + visibleModules = + Map.fromListWith (++) $ + [ (Installed.exposedName exposedModule, [(exportingPackageName, + exposedModule)]) + -- The package index here contains all the indirect deps of the + -- package we're configuring, but we want just the direct deps + | let directDeps = Set.fromList + (map Installed.installedUnitId externalPkgDeps) + , pkg <- PackageIndex.allPackages installedPackages + , Installed.installedUnitId pkg `Set.member` directDeps + , let exportingPackageName = packageName pkg + , exposedModule <- visibleModuleDetails pkg + ] + ++ [ (visibleModuleName, [(exportingPackageName, exposedModule)]) + | visibleModuleName <- PD.exposedModules lib + ++ otherModules (libBuildInfo lib) + , let exportingPackageName = packageName srcpkgid + definingModuleName = visibleModuleName + definingPackageId = key + originalModule = Installed.OriginalModule definingPackageId + definingModuleName + exposedModule = Installed.ExposedModule visibleModuleName + (Just originalModule) + ] + + -- All the modules exported from this package and their defining name and + -- package (either defined here in this package or re-exported from some + -- other package). Return an ExposedModule because we want to hold onto + -- signature information. + visibleModuleDetails :: InstalledPackageInfo -> [Installed.ExposedModule] + visibleModuleDetails pkg = do + exposedModule <- Installed.exposedModules pkg + case Installed.exposedReexport exposedModule of + -- The first case is the modules actually defined in this package. + -- In this case the reexport will point to this package. + Nothing -> return exposedModule { + Installed.exposedReexport = + Just (Installed.OriginalModule + (Installed.installedUnitId pkg) + (Installed.exposedName exposedModule)) } + -- On the other hand, a visible module might actually be itself + -- a re-export! In this case, the re-export info for the package + -- doing the re-export will point us to the original defining + -- module name and package, so we can reuse the entry. + Just _ -> return exposedModule + + resolveModuleReexport reexport@ModuleReexport { + moduleReexportOriginalPackage = moriginalPackageName, + moduleReexportOriginalName = originalName, + moduleReexportName = newName + } = + + let filterForSpecificPackage = + case moriginalPackageName of + Nothing -> id + Just originalPackageName -> + filter (\(pkgname, _) -> pkgname == originalPackageName) + + matches = filterForSpecificPackage + (Map.findWithDefault [] originalName visibleModules) + in + case (matches, moriginalPackageName) of + ((_, exposedModule):rest, _) + -- TODO: Refine this check for signatures + | all (\(_, exposedModule') -> + Installed.exposedReexport exposedModule + == Installed.exposedReexport exposedModule') rest + -> Right exposedModule { Installed.exposedName = newName } + + ([], Just originalPackageName) + -> Left $ (,) reexport + $ "The package " ++ display originalPackageName + ++ " does not export a module " ++ display originalName + + ([], Nothing) + -> Left $ (,) reexport + $ "The module " ++ display originalName + ++ " is not exported by any suitable package (this package " + ++ "itself nor any of its 'build-depends' dependencies)." + + (ms, _) + -> Left $ (,) reexport + $ "The module " ++ display originalName ++ " is exported " + ++ "by more than one package (" + ++ intercalate ", " [ display pkgname | (pkgname,_) <- ms ] + ++ ") and so the re-export is ambiguous. The ambiguity can " + ++ "be resolved by qualifying by the package name. The " + ++ "syntax is 'packagename:moduleName [as newname]'." + + -- Note: if in future Cabal allows directly depending on multiple + -- instances of the same package (e.g. backpack) then an additional + -- ambiguity case is possible here: (_, Just originalPackageName) + -- with the module being ambiguous despite being qualified by a + -- package name. Presumably by that time we'll have a mechanism to + -- qualify the instance we're referring to. + +reportModuleReexportProblems :: [(ModuleReexport, String)] -> IO a +reportModuleReexportProblems reexportProblems = + die $ unlines + [ "Problem with the module re-export '" ++ display reexport ++ "': " ++ msg + | (reexport, msg) <- reexportProblems ] + +-- ----------------------------------------------------------------------------- +-- Testing C lib and header dependencies + +-- Try to build a test C program which includes every header and links every +-- lib. If that fails, try to narrow it down by preprocessing (only) and linking +-- with individual headers and libs. If none is the obvious culprit then give a +-- generic error message. +-- TODO: produce a log file from the compiler errors, if any. +checkForeignDeps :: PackageDescription -> LocalBuildInfo -> Verbosity -> IO () +checkForeignDeps pkg lbi verbosity = do + ifBuildsWith allHeaders (commonCcArgs ++ makeLdArgs allLibs) -- I'm feeling + -- lucky + (return ()) + (do missingLibs <- findMissingLibs + missingHdr <- findOffendingHdr + explainErrors missingHdr missingLibs) + where + allHeaders = collectField PD.includes + allLibs = collectField PD.extraLibs + + ifBuildsWith headers args success failure = do + ok <- builds (makeProgram headers) args + if ok then success else failure + + findOffendingHdr = + ifBuildsWith allHeaders ccArgs + (return Nothing) + (go . tail . inits $ allHeaders) + where + go [] = return Nothing -- cannot happen + go (hdrs:hdrsInits) = + -- Try just preprocessing first + ifBuildsWith hdrs cppArgs + -- If that works, try compiling too + (ifBuildsWith hdrs ccArgs + (go hdrsInits) + (return . Just . Right . last $ hdrs)) + (return . Just . Left . last $ hdrs) + + cppArgs = "-E":commonCppArgs -- preprocess only + ccArgs = "-c":commonCcArgs -- don't try to link + + findMissingLibs = ifBuildsWith [] (makeLdArgs allLibs) + (return []) + (filterM (fmap not . libExists) allLibs) + + libExists lib = builds (makeProgram []) (makeLdArgs [lib]) + + commonCppArgs = platformDefines lbi + ++ [ "-I" ++ autogenModulesDir lbi ] + ++ [ "-I" ++ dir | dir <- collectField PD.includeDirs ] + ++ ["-I."] + ++ collectField PD.cppOptions + ++ collectField PD.ccOptions + ++ [ "-I" ++ dir + | dep <- deps + , dir <- Installed.includeDirs dep ] + ++ [ opt + | dep <- deps + , opt <- Installed.ccOptions dep ] + + commonCcArgs = commonCppArgs + ++ collectField PD.ccOptions + ++ [ opt + | dep <- deps + , opt <- Installed.ccOptions dep ] + + commonLdArgs = [ "-L" ++ dir | dir <- collectField PD.extraLibDirs ] + ++ collectField PD.ldOptions + ++ [ "-L" ++ dir + | dep <- deps + , dir <- Installed.libraryDirs dep ] + --TODO: do we also need dependent packages' ld options? + makeLdArgs libs = [ "-l"++lib | lib <- libs ] ++ commonLdArgs + + makeProgram hdrs = unlines $ + [ "#include \"" ++ hdr ++ "\"" | hdr <- hdrs ] ++ + ["int main(int argc, char** argv) { return 0; }"] + + collectField f = concatMap f allBi + allBi = allBuildInfo pkg + deps = PackageIndex.topologicalOrder (installedPkgs lbi) + + builds program args = do + tempDir <- getTemporaryDirectory + withTempFile tempDir ".c" $ \cName cHnd -> + withTempFile tempDir "" $ \oNname oHnd -> do + hPutStrLn cHnd program + hClose cHnd + hClose oHnd + _ <- rawSystemProgramStdoutConf verbosity + gccProgram (withPrograms lbi) (cName:"-o":oNname:args) + return True + `catchIO` (\_ -> return False) + `catchExit` (\_ -> return False) + + explainErrors Nothing [] = return () -- should be impossible! + explainErrors _ _ + | isNothing . lookupProgram gccProgram . withPrograms $ lbi + + = die $ unlines $ + [ "No working gcc", + "This package depends on foreign library but we cannot " + ++ "find a working C compiler. If you have it in a " + ++ "non-standard location you can use the --with-gcc " + ++ "flag to specify it." ] + + explainErrors hdr libs = die $ unlines $ + [ if plural + then "Missing dependencies on foreign libraries:" + else "Missing dependency on a foreign library:" + | missing ] + ++ case hdr of + Just (Left h) -> ["* Missing (or bad) header file: " ++ h ] + _ -> [] + ++ case libs of + [] -> [] + [lib] -> ["* Missing C library: " ++ lib] + _ -> ["* Missing C libraries: " ++ intercalate ", " libs] + ++ [if plural then messagePlural else messageSingular | missing] + ++ case hdr of + Just (Left _) -> [ headerCppMessage ] + Just (Right h) -> [ (if missing then "* " else "") + ++ "Bad header file: " ++ h + , headerCcMessage ] + _ -> [] + + where + plural = length libs >= 2 + -- Is there something missing? (as opposed to broken) + missing = not (null libs) + || case hdr of Just (Left _) -> True; _ -> False + + messageSingular = + "This problem can usually be solved by installing the system " + ++ "package that provides this library (you may need the " + ++ "\"-dev\" version). If the library is already installed " + ++ "but in a non-standard location then you can use the flags " + ++ "--extra-include-dirs= and --extra-lib-dirs= to specify " + ++ "where it is." + messagePlural = + "This problem can usually be solved by installing the system " + ++ "packages that provide these libraries (you may need the " + ++ "\"-dev\" versions). If the libraries are already installed " + ++ "but in a non-standard location then you can use the flags " + ++ "--extra-include-dirs= and --extra-lib-dirs= to specify " + ++ "where they are." + headerCppMessage = + "If the header file does exist, it may contain errors that " + ++ "are caught by the C compiler at the preprocessing stage. " + ++ "In this case you can re-run configure with the verbosity " + ++ "flag -v3 to see the error messages." + headerCcMessage = + "The header file contains a compile error. " + ++ "You can re-run configure with the verbosity flag " + ++ "-v3 to see the error messages from the C compiler." + +-- | Output package check warnings and errors. Exit if any errors. +checkPackageProblems :: Verbosity + -> GenericPackageDescription + -> PackageDescription + -> IO () +checkPackageProblems verbosity gpkg pkg = do + ioChecks <- checkPackageFiles pkg "." + let pureChecks = checkPackage gpkg (Just pkg) + errors = [ e | PackageBuildImpossible e <- pureChecks ++ ioChecks ] + warnings = [ w | PackageBuildWarning w <- pureChecks ++ ioChecks ] + if null errors + then mapM_ (warn verbosity) warnings + else die (intercalate "\n\n" errors) + +-- | Preform checks if a relocatable build is allowed +checkRelocatable :: Verbosity + -> PackageDescription + -> LocalBuildInfo + -> IO () +checkRelocatable verbosity pkg lbi + = sequence_ [ checkOS + , checkCompiler + , packagePrefixRelative + , depsPrefixRelative + ] + where + -- Check if the OS support relocatable builds. + -- + -- If you add new OS' to this list, and your OS supports dynamic libraries + -- and RPATH, make sure you add your OS to RPATH-support list of: + -- Distribution.Simple.GHC.getRPaths + checkOS + = unless (os `elem` [ OSX, Linux ]) + $ die $ "Operating system: " ++ display os ++ + ", does not support relocatable builds" + where + (Platform _ os) = hostPlatform lbi + + -- Check if the Compiler support relocatable builds + checkCompiler + = unless (compilerFlavor comp `elem` [ GHC ]) + $ die $ "Compiler: " ++ show comp ++ + ", does not support relocatable builds" + where + comp = compiler lbi + + -- Check if all the install dirs are relative to same prefix + packagePrefixRelative + = unless (relativeInstallDirs installDirs) + $ die $ "Installation directories are not prefix_relative:\n" ++ + show installDirs + where + installDirs = absoluteInstallDirs pkg lbi NoCopyDest + p = prefix installDirs + relativeInstallDirs (InstallDirs {..}) = + all isJust + (fmap (stripPrefix p) + [ bindir, libdir, dynlibdir, libexecdir, includedir, datadir + , docdir, mandir, htmldir, haddockdir, sysconfdir] ) + + -- Check if the library dirs of the dependencies that are in the package + -- database to which the package is installed are relative to the + -- prefix of the package + depsPrefixRelative = do + pkgr <- GHC.pkgRoot verbosity lbi (last (withPackageDB lbi)) + mapM_ (doCheck pkgr) ipkgs + where + doCheck pkgr ipkg + | maybe False (== pkgr) (Installed.pkgRoot ipkg) + = mapM_ (\l -> when (isNothing $ stripPrefix p l) (die (msg l))) + (Installed.libraryDirs ipkg) + | otherwise + = return () + installDirs = absoluteInstallDirs pkg lbi NoCopyDest + p = prefix installDirs + ipkgs = PackageIndex.allPackages (installedPkgs lbi) + msg l = "Library directory of a dependency: " ++ show l ++ + "\nis not relative to the installation prefix:\n" ++ + show p diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/GHC/ImplInfo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/GHC/ImplInfo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/GHC/ImplInfo.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/GHC/ImplInfo.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,109 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.GHC.ImplInfo +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module contains the data structure describing invocation +-- details for a GHC or GHC-derived compiler, such as supported flags +-- and workarounds for bugs. + +module Distribution.Simple.GHC.ImplInfo ( + GhcImplInfo(..), getImplInfo, + ghcVersionImplInfo, ghcjsVersionImplInfo, lhcVersionImplInfo + ) where + +import Distribution.Simple.Compiler +import Distribution.Version + +{- | + Information about features and quirks of a GHC-based implementation. + + Compiler flavors based on GHC behave similarly enough that some of + the support code for them is shared. Every implementation has its + own peculiarities, that may or may not be a direct result of the + underlying GHC version. This record keeps track of these differences. + + All shared code (i.e. everything not in the Distribution.Simple.FLAVOR + module) should use implementation info rather than version numbers + to test for supported features. +-} + +data GhcImplInfo = GhcImplInfo + { hasCcOdirBug :: Bool -- ^ bug in -odir handling for C compilations. + , flagInfoLanguages :: Bool -- ^ --info and --supported-languages flags + , fakeRecordPuns :: Bool -- ^ use -XRecordPuns for NamedFieldPuns + , flagStubdir :: Bool -- ^ -stubdir flag supported + , flagOutputDir :: Bool -- ^ -outputdir flag supported + , noExtInSplitSuffix :: Bool -- ^ split-obj suffix does not contain p_o ext + , flagFfiIncludes :: Bool -- ^ -#include on command line for FFI includes + , flagBuildingCabalPkg :: Bool -- ^ -fbuilding-cabal-package flag supported + , flagPackageId :: Bool -- ^ -package-id / -package flags supported + , separateGccMingw :: Bool -- ^ mingw and gcc are in separate directories + , supportsHaskell2010 :: Bool -- ^ -XHaskell2010 and -XHaskell98 flags + , reportsNoExt :: Bool -- ^ --supported-languages gives Ext and NoExt + , alwaysNondecIndent :: Bool -- ^ NondecreasingIndentation is always on + , flagGhciScript :: Bool -- ^ -ghci-script flag supported + , flagProfAuto :: Bool -- ^ new style -fprof-auto* flags + , flagPackageConf :: Bool -- ^ use package-conf instead of package-db + , flagDebugInfo :: Bool -- ^ -g flag supported + } + +getImplInfo :: Compiler -> GhcImplInfo +getImplInfo comp = + case compilerFlavor comp of + GHC -> ghcVersionImplInfo (compilerVersion comp) + LHC -> lhcVersionImplInfo (compilerVersion comp) + GHCJS -> case compilerCompatVersion GHC comp of + Just ghcVer -> ghcjsVersionImplInfo (compilerVersion comp) ghcVer + _ -> error ("Distribution.Simple.GHC.Props.getImplProps: " ++ + "could not find GHC version for GHCJS compiler") + x -> error ("Distribution.Simple.GHC.Props.getImplProps only works" ++ + "for GHC-like compilers (GHC, GHCJS, LHC)" ++ + ", but found " ++ show x) + +ghcVersionImplInfo :: Version -> GhcImplInfo +ghcVersionImplInfo (Version v _) = GhcImplInfo + { hasCcOdirBug = v < [6,4,1] + , flagInfoLanguages = v >= [6,7] + , fakeRecordPuns = v >= [6,8] && v < [6,10] + , flagStubdir = v >= [6,8] + , flagOutputDir = v >= [6,10] + , noExtInSplitSuffix = v < [6,11] + , flagFfiIncludes = v < [6,11] + , flagBuildingCabalPkg = v >= [6,11] + , flagPackageId = v > [6,11] + , separateGccMingw = v < [6,12] + , supportsHaskell2010 = v >= [7] + , reportsNoExt = v >= [7] + , alwaysNondecIndent = v < [7,1] + , flagGhciScript = v >= [7,2] + , flagProfAuto = v >= [7,4] + , flagPackageConf = v < [7,5] + , flagDebugInfo = v >= [7,10] + } + +ghcjsVersionImplInfo :: Version -> Version -> GhcImplInfo +ghcjsVersionImplInfo _ghcjsVer _ghcVer = GhcImplInfo + { hasCcOdirBug = False + , flagInfoLanguages = True + , fakeRecordPuns = False + , flagStubdir = True + , flagOutputDir = True + , noExtInSplitSuffix = False + , flagFfiIncludes = False + , flagBuildingCabalPkg = True + , flagPackageId = True + , separateGccMingw = False + , supportsHaskell2010 = True + , reportsNoExt = True + , alwaysNondecIndent = False + , flagGhciScript = True + , flagProfAuto = True + , flagPackageConf = False + , flagDebugInfo = False + } + +lhcVersionImplInfo :: Version -> GhcImplInfo +lhcVersionImplInfo = ghcVersionImplInfo diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/GHC/Internal.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/GHC/Internal.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/GHC/Internal.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/GHC/Internal.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,535 @@ +{-# LANGUAGE PatternGuards #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.GHC.Internal +-- Copyright : Isaac Jones 2003-2007 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module contains functions shared by GHC (Distribution.Simple.GHC) +-- and GHC-derived compilers. + +module Distribution.Simple.GHC.Internal ( + configureToolchain, + getLanguages, + getExtensions, + targetPlatform, + getGhcInfo, + componentCcGhcOptions, + componentGhcOptions, + mkGHCiLibName, + filterGhciFlags, + ghcLookupProperty, + getHaskellObjects, + mkGhcOptPackages, + substTopDir, + checkPackageDbEnvVar, + profDetailLevelFlag, + showArchString, + showOsString, + ) where + +import Distribution.Simple.GHC.ImplInfo +import Distribution.Package +import Distribution.InstalledPackageInfo +import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo +import Distribution.PackageDescription as PD hiding (Flag) +import Distribution.Compat.Exception +import Distribution.Lex +import Distribution.Simple.Compiler hiding (Flag) +import Distribution.Simple.Program.GHC +import Distribution.Simple.Setup +import qualified Distribution.ModuleName as ModuleName +import Distribution.Simple.Program +import Distribution.Simple.LocalBuildInfo +import Distribution.Simple.Utils +import Distribution.Simple.BuildPaths +import Distribution.System +import Distribution.Text ( display, simpleParse ) +import Distribution.Utils.NubList ( toNubListR ) +import Distribution.Verbosity +import Language.Haskell.Extension + +import qualified Data.Map as M +import Data.Char ( isSpace ) +import Data.Maybe ( fromMaybe, maybeToList, isJust ) +import Control.Monad ( unless, when ) +import Data.Monoid as Mon ( Monoid(..) ) +import System.Directory ( getDirectoryContents, getTemporaryDirectory ) +import System.Environment ( getEnv ) +import System.FilePath ( (), (<.>), takeExtension + , takeDirectory, takeFileName) +import System.IO ( hClose, hPutStrLn ) + +targetPlatform :: [(String, String)] -> Maybe Platform +targetPlatform ghcInfo = platformFromTriple =<< lookup "Target platform" ghcInfo + +-- | Adjust the way we find and configure gcc and ld +-- +configureToolchain :: GhcImplInfo + -> ConfiguredProgram + -> M.Map String String + -> ProgramConfiguration + -> ProgramConfiguration +configureToolchain implInfo ghcProg ghcInfo = + addKnownProgram gccProgram { + programFindLocation = findProg gccProgramName extraGccPath, + programPostConf = configureGcc + } + . addKnownProgram ldProgram { + programFindLocation = findProg ldProgramName extraLdPath, + programPostConf = configureLd + } + . addKnownProgram arProgram { + programFindLocation = findProg arProgramName extraArPath + } + . addKnownProgram stripProgram { + programFindLocation = findProg stripProgramName extraStripPath + } + where + compilerDir = takeDirectory (programPath ghcProg) + baseDir = takeDirectory compilerDir + mingwBinDir = baseDir "mingw" "bin" + libDir = baseDir "gcc-lib" + includeDir = baseDir "include" "mingw" + isWindows = case buildOS of Windows -> True; _ -> False + binPrefix = "" + + maybeName :: Program -> Maybe FilePath -> String + maybeName prog = maybe (programName prog) (dropExeExtension . takeFileName) + + gccProgramName = maybeName gccProgram mbGccLocation + ldProgramName = maybeName ldProgram mbLdLocation + arProgramName = maybeName arProgram mbArLocation + stripProgramName = maybeName stripProgram mbStripLocation + + mkExtraPath :: Maybe FilePath -> FilePath -> [FilePath] + mkExtraPath mbPath mingwPath | isWindows = mbDir ++ [mingwPath] + | otherwise = mbDir + where + mbDir = maybeToList . fmap takeDirectory $ mbPath + + extraGccPath = mkExtraPath mbGccLocation windowsExtraGccDir + extraLdPath = mkExtraPath mbLdLocation windowsExtraLdDir + extraArPath = mkExtraPath mbArLocation windowsExtraArDir + extraStripPath = mkExtraPath mbStripLocation windowsExtraStripDir + + -- on Windows finding and configuring ghc's gcc & binutils is a bit special + (windowsExtraGccDir, windowsExtraLdDir, + windowsExtraArDir, windowsExtraStripDir) + | separateGccMingw implInfo = (baseDir, libDir, libDir, libDir) + | otherwise = -- GHC >= 6.12 + let b = mingwBinDir binPrefix + in (b, b, b, b) + + findProg :: String -> [FilePath] + -> Verbosity -> ProgramSearchPath + -> IO (Maybe (FilePath, [FilePath])) + findProg progName extraPath v searchpath = + findProgramOnSearchPath v searchpath' progName + where + searchpath' = (map ProgramSearchPathDir extraPath) ++ searchpath + + -- Read tool locations from the 'ghc --info' output. Useful when + -- cross-compiling. + mbGccLocation = M.lookup "C compiler command" ghcInfo + mbLdLocation = M.lookup "ld command" ghcInfo + mbArLocation = M.lookup "ar command" ghcInfo + mbStripLocation = M.lookup "strip command" ghcInfo + + ccFlags = getFlags "C compiler flags" + -- GHC 7.8 renamed "Gcc Linker flags" to "C compiler link flags" + -- and "Ld Linker flags" to "ld flags" (GHC #4862). + gccLinkerFlags = getFlags "Gcc Linker flags" ++ getFlags "C compiler link flags" + ldLinkerFlags = getFlags "Ld Linker flags" ++ getFlags "ld flags" + + -- It appears that GHC 7.6 and earlier encode the tokenized flags as a + -- [String] in these settings whereas later versions just encode the flags as + -- String. + -- + -- We first try to parse as a [String] and if this fails then tokenize the + -- flags ourself. + getFlags :: String -> [String] + getFlags key = + case M.lookup key ghcInfo of + Nothing -> [] + Just flags + | (flags', ""):_ <- reads flags -> flags' + | otherwise -> tokenizeQuotedWords flags + + configureGcc :: Verbosity -> ConfiguredProgram -> IO ConfiguredProgram + configureGcc v gccProg = do + gccProg' <- configureGcc' v gccProg + return gccProg' { + programDefaultArgs = programDefaultArgs gccProg' + ++ ccFlags ++ gccLinkerFlags + } + + configureGcc' :: Verbosity -> ConfiguredProgram -> IO ConfiguredProgram + configureGcc' + | isWindows = \_ gccProg -> case programLocation gccProg of + -- if it's found on system then it means we're using the result + -- of programFindLocation above rather than a user-supplied path + -- Pre GHC 6.12, that meant we should add these flags to tell + -- ghc's gcc where it lives and thus where gcc can find its + -- various files: + FoundOnSystem {} + | separateGccMingw implInfo -> + return gccProg { programDefaultArgs = ["-B" ++ libDir, + "-I" ++ includeDir] } + _ -> return gccProg + | otherwise = \_ gccProg -> return gccProg + + configureLd :: Verbosity -> ConfiguredProgram -> IO ConfiguredProgram + configureLd v ldProg = do + ldProg' <- configureLd' v ldProg + return ldProg' { + programDefaultArgs = programDefaultArgs ldProg' ++ ldLinkerFlags + } + + -- we need to find out if ld supports the -x flag + configureLd' :: Verbosity -> ConfiguredProgram -> IO ConfiguredProgram + configureLd' verbosity ldProg = do + tempDir <- getTemporaryDirectory + ldx <- withTempFile tempDir ".c" $ \testcfile testchnd -> + withTempFile tempDir ".o" $ \testofile testohnd -> do + hPutStrLn testchnd "int foo() { return 0; }" + hClose testchnd; hClose testohnd + rawSystemProgram verbosity ghcProg + [ "-hide-all-packages" + , "-c", testcfile + , "-o", testofile + ] + withTempFile tempDir ".o" $ \testofile' testohnd' -> + do + hClose testohnd' + _ <- rawSystemProgramStdout verbosity ldProg + ["-x", "-r", testofile, "-o", testofile'] + return True + `catchIO` (\_ -> return False) + `catchExit` (\_ -> return False) + if ldx + then return ldProg { programDefaultArgs = ["-x"] } + else return ldProg + +getLanguages :: Verbosity -> GhcImplInfo -> ConfiguredProgram + -> IO [(Language, String)] +getLanguages _ implInfo _ + -- TODO: should be using --supported-languages rather than hard coding + | supportsHaskell2010 implInfo = return [(Haskell98, "-XHaskell98") + ,(Haskell2010, "-XHaskell2010")] + | otherwise = return [(Haskell98, "")] + +getGhcInfo :: Verbosity -> GhcImplInfo -> ConfiguredProgram + -> IO [(String, String)] +getGhcInfo verbosity implInfo ghcProg + | flagInfoLanguages implInfo = do + xs <- getProgramOutput verbosity (suppressOverrideArgs ghcProg) + ["--info"] + case reads xs of + [(i, ss)] + | all isSpace ss -> + return i + _ -> + die "Can't parse --info output of GHC" + | otherwise = + return [] + +getExtensions :: Verbosity -> GhcImplInfo -> ConfiguredProgram + -> IO [(Extension, String)] +getExtensions verbosity implInfo ghcProg + | flagInfoLanguages implInfo = do + str <- getProgramOutput verbosity (suppressOverrideArgs ghcProg) + ["--supported-languages"] + let extStrs = if reportsNoExt implInfo + then lines str + else -- Older GHCs only gave us either Foo or NoFoo, + -- so we have to work out the other one ourselves + [ extStr'' + | extStr <- lines str + , let extStr' = case extStr of + 'N' : 'o' : xs -> xs + _ -> "No" ++ extStr + , extStr'' <- [extStr, extStr'] + ] + let extensions0 = [ (ext, "-X" ++ display ext) + | Just ext <- map simpleParse extStrs ] + extensions1 = if fakeRecordPuns implInfo + then -- ghc-6.8 introduced RecordPuns however it + -- should have been NamedFieldPuns. We now + -- encourage packages to use NamedFieldPuns + -- so for compatibility we fake support for + -- it in ghc-6.8 by making it an alias for + -- the old RecordPuns extension. + (EnableExtension NamedFieldPuns, "-XRecordPuns") : + (DisableExtension NamedFieldPuns, "-XNoRecordPuns") : + extensions0 + else extensions0 + extensions2 = if alwaysNondecIndent implInfo + then -- ghc-7.2 split NondecreasingIndentation off + -- into a proper extension. Before that it + -- was always on. + (EnableExtension NondecreasingIndentation, "") : + (DisableExtension NondecreasingIndentation, "") : + extensions1 + else extensions1 + return extensions2 + + | otherwise = return oldLanguageExtensions + +-- | For GHC 6.6.x and earlier, the mapping from supported extensions to flags +oldLanguageExtensions :: [(Extension, String)] +oldLanguageExtensions = + let doFlag (f, (enable, disable)) = [(EnableExtension f, enable), + (DisableExtension f, disable)] + fglasgowExts = ("-fglasgow-exts", + "") -- This is wrong, but we don't want to turn + -- all the extensions off when asked to just + -- turn one off + fFlag flag = ("-f" ++ flag, "-fno-" ++ flag) + in concatMap doFlag + [(OverlappingInstances , fFlag "allow-overlapping-instances") + ,(TypeSynonymInstances , fglasgowExts) + ,(TemplateHaskell , fFlag "th") + ,(ForeignFunctionInterface , fFlag "ffi") + ,(MonomorphismRestriction , fFlag "monomorphism-restriction") + ,(MonoPatBinds , fFlag "mono-pat-binds") + ,(UndecidableInstances , fFlag "allow-undecidable-instances") + ,(IncoherentInstances , fFlag "allow-incoherent-instances") + ,(Arrows , fFlag "arrows") + ,(Generics , fFlag "generics") + ,(ImplicitPrelude , fFlag "implicit-prelude") + ,(ImplicitParams , fFlag "implicit-params") + ,(CPP , ("-cpp", ""{- Wrong -})) + ,(BangPatterns , fFlag "bang-patterns") + ,(KindSignatures , fglasgowExts) + ,(RecursiveDo , fglasgowExts) + ,(ParallelListComp , fglasgowExts) + ,(MultiParamTypeClasses , fglasgowExts) + ,(FunctionalDependencies , fglasgowExts) + ,(Rank2Types , fglasgowExts) + ,(RankNTypes , fglasgowExts) + ,(PolymorphicComponents , fglasgowExts) + ,(ExistentialQuantification , fglasgowExts) + ,(ScopedTypeVariables , fFlag "scoped-type-variables") + ,(FlexibleContexts , fglasgowExts) + ,(FlexibleInstances , fglasgowExts) + ,(EmptyDataDecls , fglasgowExts) + ,(PatternGuards , fglasgowExts) + ,(GeneralizedNewtypeDeriving , fglasgowExts) + ,(MagicHash , fglasgowExts) + ,(UnicodeSyntax , fglasgowExts) + ,(PatternSignatures , fglasgowExts) + ,(UnliftedFFITypes , fglasgowExts) + ,(LiberalTypeSynonyms , fglasgowExts) + ,(TypeOperators , fglasgowExts) + ,(GADTs , fglasgowExts) + ,(RelaxedPolyRec , fglasgowExts) + ,(ExtendedDefaultRules , fFlag "extended-default-rules") + ,(UnboxedTuples , fglasgowExts) + ,(DeriveDataTypeable , fglasgowExts) + ,(ConstrainedClassMethods , fglasgowExts) + ] + +componentCcGhcOptions :: Verbosity -> GhcImplInfo -> LocalBuildInfo + -> BuildInfo -> ComponentLocalBuildInfo + -> FilePath -> FilePath + -> GhcOptions +componentCcGhcOptions verbosity implInfo lbi bi clbi pref filename = + mempty { + ghcOptVerbosity = toFlag verbosity, + ghcOptMode = toFlag GhcModeCompile, + ghcOptInputFiles = toNubListR [filename], + + ghcOptCppIncludePath = toNubListR $ [autogenModulesDir lbi, odir] + ++ PD.includeDirs bi, + ghcOptHideAllPackages= toFlag True, + ghcOptPackageDBs = withPackageDB lbi, + ghcOptPackages = toNubListR $ mkGhcOptPackages clbi, + ghcOptCcOptions = toNubListR $ + (case withOptimization lbi of + NoOptimisation -> [] + _ -> ["-O2"]) ++ + (case withDebugInfo lbi of + NoDebugInfo -> [] + MinimalDebugInfo -> ["-g1"] + NormalDebugInfo -> ["-g"] + MaximalDebugInfo -> ["-g3"]) ++ + PD.ccOptions bi, + ghcOptObjDir = toFlag odir + } + where + odir | hasCcOdirBug implInfo = pref takeDirectory filename + | otherwise = pref + -- ghc 6.4.0 had a bug in -odir handling for C compilations. + +componentGhcOptions :: Verbosity -> LocalBuildInfo + -> BuildInfo -> ComponentLocalBuildInfo -> FilePath + -> GhcOptions +componentGhcOptions verbosity lbi bi clbi odir = + mempty { + ghcOptVerbosity = toFlag verbosity, + ghcOptCabal = toFlag True, + ghcOptThisUnitId = case clbi of + LibComponentLocalBuildInfo { componentCompatPackageKey = pk } + -> toFlag pk + _ -> Mon.mempty, + ghcOptHideAllPackages = toFlag True, + ghcOptPackageDBs = withPackageDB lbi, + ghcOptPackages = toNubListR $ mkGhcOptPackages clbi, + ghcOptSplitObjs = toFlag (splitObjs lbi), + ghcOptSourcePathClear = toFlag True, + ghcOptSourcePath = toNubListR $ [odir] ++ (hsSourceDirs bi) + ++ [autogenModulesDir lbi], + ghcOptCppIncludePath = toNubListR $ [autogenModulesDir lbi, odir] + ++ PD.includeDirs bi, + ghcOptCppOptions = toNubListR $ cppOptions bi, + ghcOptCppIncludes = toNubListR $ + [autogenModulesDir lbi cppHeaderName], + ghcOptFfiIncludes = toNubListR $ PD.includes bi, + ghcOptObjDir = toFlag odir, + ghcOptHiDir = toFlag odir, + ghcOptStubDir = toFlag odir, + ghcOptOutputDir = toFlag odir, + ghcOptOptimisation = toGhcOptimisation (withOptimization lbi), + ghcOptDebugInfo = toGhcDebugInfo (withDebugInfo lbi), + ghcOptExtra = toNubListR $ hcOptions GHC bi, + ghcOptLanguage = toFlag (fromMaybe Haskell98 (defaultLanguage bi)), + -- Unsupported extensions have already been checked by configure + ghcOptExtensions = toNubListR $ usedExtensions bi, + ghcOptExtensionMap = M.fromList . compilerExtensions $ (compiler lbi) + } + where + toGhcOptimisation NoOptimisation = mempty --TODO perhaps override? + toGhcOptimisation NormalOptimisation = toFlag GhcNormalOptimisation + toGhcOptimisation MaximumOptimisation = toFlag GhcMaximumOptimisation + + -- GHC doesn't support debug info levels yet. + toGhcDebugInfo NoDebugInfo = mempty + toGhcDebugInfo MinimalDebugInfo = toFlag True + toGhcDebugInfo NormalDebugInfo = toFlag True + toGhcDebugInfo MaximalDebugInfo = toFlag True + +-- | Strip out flags that are not supported in ghci +filterGhciFlags :: [String] -> [String] +filterGhciFlags = filter supported + where + supported ('-':'O':_) = False + supported "-debug" = False + supported "-threaded" = False + supported "-ticky" = False + supported "-eventlog" = False + supported "-prof" = False + supported "-unreg" = False + supported _ = True + +mkGHCiLibName :: UnitId -> String +mkGHCiLibName lib = getHSLibraryName lib <.> "o" + +ghcLookupProperty :: String -> Compiler -> Bool +ghcLookupProperty prop comp = + case M.lookup prop (compilerProperties comp) of + Just "YES" -> True + _ -> False + +-- when using -split-objs, we need to search for object files in the +-- Module_split directory for each module. +getHaskellObjects :: GhcImplInfo -> Library -> LocalBuildInfo + -> FilePath -> String -> Bool -> IO [FilePath] +getHaskellObjects implInfo lib lbi pref wanted_obj_ext allow_split_objs + | splitObjs lbi && allow_split_objs = do + let splitSuffix = if noExtInSplitSuffix implInfo + then "_split" + else "_" ++ wanted_obj_ext ++ "_split" + dirs = [ pref (ModuleName.toFilePath x ++ splitSuffix) + | x <- libModules lib ] + objss <- mapM getDirectoryContents dirs + let objs = [ dir obj + | (objs',dir) <- zip objss dirs, obj <- objs', + let obj_ext = takeExtension obj, + '.':wanted_obj_ext == obj_ext ] + return objs + | otherwise = + return [ pref ModuleName.toFilePath x <.> wanted_obj_ext + | x <- libModules lib ] + +mkGhcOptPackages :: ComponentLocalBuildInfo + -> [(UnitId, PackageId, ModuleRenaming)] +mkGhcOptPackages clbi = + map (\(i,p) -> (i,p,lookupRenaming p (componentPackageRenaming clbi))) + (componentPackageDeps clbi) + +substTopDir :: FilePath -> InstalledPackageInfo -> InstalledPackageInfo +substTopDir topDir ipo + = ipo { + InstalledPackageInfo.importDirs + = map f (InstalledPackageInfo.importDirs ipo), + InstalledPackageInfo.libraryDirs + = map f (InstalledPackageInfo.libraryDirs ipo), + InstalledPackageInfo.includeDirs + = map f (InstalledPackageInfo.includeDirs ipo), + InstalledPackageInfo.frameworkDirs + = map f (InstalledPackageInfo.frameworkDirs ipo), + InstalledPackageInfo.haddockInterfaces + = map f (InstalledPackageInfo.haddockInterfaces ipo), + InstalledPackageInfo.haddockHTMLs + = map f (InstalledPackageInfo.haddockHTMLs ipo) + } + where f ('$':'t':'o':'p':'d':'i':'r':rest) = topDir ++ rest + f x = x + +-- Cabal does not use the environment variable GHC{,JS}_PACKAGE_PATH; let +-- users know that this is the case. See ticket #335. Simply ignoring it is +-- not a good idea, since then ghc and cabal are looking at different sets +-- of package DBs and chaos is likely to ensue. +-- +-- An exception to this is when running cabal from within a `cabal exec` +-- environment. In this case, `cabal exec` will set the +-- CABAL_SANDBOX_PACKAGE_PATH to the same value that it set +-- GHC{,JS}_PACKAGE_PATH to. If that is the case it is OK to allow +-- GHC{,JS}_PACKAGE_PATH. +checkPackageDbEnvVar :: String -> String -> IO () +checkPackageDbEnvVar compilerName packagePathEnvVar = do + mPP <- lookupEnv packagePathEnvVar + when (isJust mPP) $ do + mcsPP <- lookupEnv "CABAL_SANDBOX_PACKAGE_PATH" + unless (mPP == mcsPP) abort + where + lookupEnv :: String -> IO (Maybe String) + lookupEnv name = (Just `fmap` getEnv name) + `catchIO` const (return Nothing) + abort = + die $ "Use of " ++ compilerName ++ "'s environment variable " + ++ packagePathEnvVar ++ " is incompatible with Cabal. Use the " + ++ "flag --package-db to specify a package database (it can be " + ++ "used multiple times)." + +profDetailLevelFlag :: Bool -> ProfDetailLevel -> Flag GhcProfAuto +profDetailLevelFlag forLib mpl = + case mpl of + ProfDetailNone -> mempty + ProfDetailDefault | forLib -> toFlag GhcProfAutoExported + | otherwise -> toFlag GhcProfAutoToplevel + ProfDetailExportedFunctions -> toFlag GhcProfAutoExported + ProfDetailToplevelFunctions -> toFlag GhcProfAutoToplevel + ProfDetailAllFunctions -> toFlag GhcProfAutoAll + ProfDetailOther _ -> mempty + +-- | GHC's rendering of it's host or target 'Arch' as used in its platform +-- strings and certain file locations (such as user package db location). +-- +showArchString :: Arch -> String +showArchString PPC = "powerpc" +showArchString PPC64 = "powerpc64" +showArchString other = display other + +-- | GHC's rendering of it's host or target 'OS' as used in its platform +-- strings and certain file locations (such as user package db location). +-- +showOsString :: OS -> String +showOsString Windows = "mingw32" +showOsString OSX = "darwin" +showOsString Solaris = "solaris2" +showOsString other = display other diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/GHC/IPI642.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/GHC/IPI642.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/GHC/IPI642.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/GHC/IPI642.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,102 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.GHC.IPI642 +-- Copyright : (c) The University of Glasgow 2004 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- + +module Distribution.Simple.GHC.IPI642 ( + InstalledPackageInfo(..), + toCurrent, + ) where + +import qualified Distribution.InstalledPackageInfo as Current +import qualified Distribution.Package as Current hiding (installedUnitId) +import Distribution.Simple.GHC.IPIConvert + +-- | This is the InstalledPackageInfo type used by ghc-6.4.2 and later. +-- +-- It's here purely for the 'Read' instance so that we can read the package +-- database used by those ghc versions. It is a little hacky to read the +-- package db directly, but we do need the info and until ghc-6.9 there was +-- no better method. +-- +-- In ghc-6.4.1 and before the format was slightly different. +-- See "Distribution.Simple.GHC.IPI642" +-- +data InstalledPackageInfo = InstalledPackageInfo { + package :: PackageIdentifier, + license :: License, + copyright :: String, + maintainer :: String, + author :: String, + stability :: String, + homepage :: String, + pkgUrl :: String, + description :: String, + category :: String, + exposed :: Bool, + exposedModules :: [String], + hiddenModules :: [String], + importDirs :: [FilePath], + libraryDirs :: [FilePath], + hsLibraries :: [String], + extraLibraries :: [String], + extraGHCiLibraries:: [String], + includeDirs :: [FilePath], + includes :: [String], + depends :: [PackageIdentifier], + hugsOptions :: [String], + ccOptions :: [String], + ldOptions :: [String], + frameworkDirs :: [FilePath], + frameworks :: [String], + haddockInterfaces :: [FilePath], + haddockHTMLs :: [FilePath] + } + deriving Read + +toCurrent :: InstalledPackageInfo -> Current.InstalledPackageInfo +toCurrent ipi@InstalledPackageInfo{} = + let pid = convertPackageId (package ipi) + mkExposedModule m = Current.ExposedModule m Nothing + in Current.InstalledPackageInfo { + Current.sourcePackageId = pid, + Current.installedUnitId = Current.mkLegacyUnitId pid, + Current.compatPackageKey = "", + Current.abiHash = Current.AbiHash "", -- bogus but old GHCs don't care. + Current.license = convertLicense (license ipi), + Current.copyright = copyright ipi, + Current.maintainer = maintainer ipi, + Current.author = author ipi, + Current.stability = stability ipi, + Current.homepage = homepage ipi, + Current.pkgUrl = pkgUrl ipi, + Current.synopsis = "", + Current.description = description ipi, + Current.category = category ipi, + Current.exposed = exposed ipi, + Current.exposedModules = map (mkExposedModule . convertModuleName) (exposedModules ipi), + Current.hiddenModules = map convertModuleName (hiddenModules ipi), + Current.trusted = Current.trusted Current.emptyInstalledPackageInfo, + Current.importDirs = importDirs ipi, + Current.libraryDirs = libraryDirs ipi, + Current.libraryDynDirs = [], + Current.dataDir = "", + Current.hsLibraries = hsLibraries ipi, + Current.extraLibraries = extraLibraries ipi, + Current.extraGHCiLibraries = extraGHCiLibraries ipi, + Current.includeDirs = includeDirs ipi, + Current.includes = includes ipi, + Current.depends = map (Current.mkLegacyUnitId . convertPackageId) (depends ipi), + Current.ccOptions = ccOptions ipi, + Current.ldOptions = ldOptions ipi, + Current.frameworkDirs = frameworkDirs ipi, + Current.frameworks = frameworks ipi, + Current.haddockInterfaces = haddockInterfaces ipi, + Current.haddockHTMLs = haddockHTMLs ipi, + Current.pkgRoot = Nothing + } diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/GHC/IPIConvert.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/GHC/IPIConvert.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/GHC/IPIConvert.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/GHC/IPIConvert.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,50 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.GHC.IPI642 +-- Copyright : (c) The University of Glasgow 2004 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Helper functions for 'Distribution.Simple.GHC.IPI642'. +module Distribution.Simple.GHC.IPIConvert ( + PackageIdentifier, convertPackageId, + License, convertLicense, + convertModuleName + ) where + +import qualified Distribution.Package as Current hiding (installedUnitId) +import qualified Distribution.License as Current + +import Distribution.Version +import Distribution.ModuleName +import Distribution.Text + +import Data.Maybe + +data PackageIdentifier = PackageIdentifier { + pkgName :: String, + pkgVersion :: Version + } + deriving Read + +convertPackageId :: PackageIdentifier -> Current.PackageIdentifier +convertPackageId PackageIdentifier { pkgName = n, pkgVersion = v } = + Current.PackageIdentifier (Current.PackageName n) v + +data License = GPL | LGPL | BSD3 | BSD4 + | PublicDomain | AllRightsReserved | OtherLicense + deriving Read + +convertModuleName :: String -> ModuleName +convertModuleName s = fromJust $ simpleParse s + +convertLicense :: License -> Current.License +convertLicense GPL = Current.GPL Nothing +convertLicense LGPL = Current.LGPL Nothing +convertLicense BSD3 = Current.BSD3 +convertLicense BSD4 = Current.BSD4 +convertLicense PublicDomain = Current.PublicDomain +convertLicense AllRightsReserved = Current.AllRightsReserved +convertLicense OtherLicense = Current.OtherLicense diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/GHC.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/GHC.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/GHC.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/GHC.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,1237 @@ +{-# LANGUAGE PatternGuards #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.GHC +-- Copyright : Isaac Jones 2003-2007 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This is a fairly large module. It contains most of the GHC-specific code for +-- configuring, building and installing packages. It also exports a function +-- for finding out what packages are already installed. Configuring involves +-- finding the @ghc@ and @ghc-pkg@ programs, finding what language extensions +-- this version of ghc supports and returning a 'Compiler' value. +-- +-- 'getInstalledPackages' involves calling the @ghc-pkg@ program to find out +-- what packages are installed. +-- +-- Building is somewhat complex as there is quite a bit of information to take +-- into account. We have to build libs and programs, possibly for profiling and +-- shared libs. We have to support building libraries that will be usable by +-- GHCi and also ghc's @-split-objs@ feature. We have to compile any C files +-- using ghc. Linking, especially for @split-objs@ is remarkably complex, +-- partly because there tend to be 1,000's of @.o@ files and this can often be +-- more than we can pass to the @ld@ or @ar@ programs in one go. +-- +-- Installing for libs and exes involves finding the right files and copying +-- them to the right places. One of the more tricky things about this module is +-- remembering the layout of files in the build directory (which is not +-- explicitly documented) and thus what search dirs are used for various kinds +-- of files. + +module Distribution.Simple.GHC ( + getGhcInfo, + configure, + getInstalledPackages, + getInstalledPackagesMonitorFiles, + getPackageDBContents, + buildLib, buildExe, + replLib, replExe, + startInterpreter, + installLib, installExe, + libAbiHash, + hcPkgInfo, + registerPackage, + componentGhcOptions, + componentCcGhcOptions, + getLibDir, + isDynamic, + getGlobalPackageDB, + pkgRoot + ) where + +import Control.Applicative -- 7.10 -Werror workaround +import Prelude -- https://ghc.haskell.org/trac/ghc/wiki/Migration/7.10#GHCsaysTheimportof...isredundant + +import qualified Distribution.Simple.GHC.IPI642 as IPI642 +import qualified Distribution.Simple.GHC.Internal as Internal +import Distribution.Simple.GHC.ImplInfo +import Distribution.PackageDescription as PD +import Distribution.InstalledPackageInfo (InstalledPackageInfo) +import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo +import Distribution.Simple.PackageIndex (InstalledPackageIndex) +import qualified Distribution.Simple.PackageIndex as PackageIndex +import Distribution.Simple.LocalBuildInfo +import qualified Distribution.Simple.Hpc as Hpc +import Distribution.Simple.BuildPaths +import Distribution.Simple.Utils +import Distribution.Package +import qualified Distribution.ModuleName as ModuleName +import Distribution.Simple.Program +import qualified Distribution.Simple.Program.HcPkg as HcPkg +import qualified Distribution.Simple.Program.Ar as Ar +import qualified Distribution.Simple.Program.Ld as Ld +import qualified Distribution.Simple.Program.Strip as Strip +import Distribution.Simple.Program.GHC +import Distribution.Simple.Setup +import qualified Distribution.Simple.Setup as Cabal +import Distribution.Simple.Compiler hiding (Flag) +import Distribution.Version +import Distribution.System +import Distribution.Verbosity +import Distribution.Text +import Distribution.Utils.NubList +import Language.Haskell.Extension + +import Control.Monad ( unless, when ) +import Data.Char ( isDigit, isSpace ) +import Data.List +import qualified Data.Map as M ( fromList, lookup ) +import Data.Maybe ( catMaybes ) +import Data.Monoid as Mon ( Monoid(..) ) +import Data.Version ( showVersion ) +import System.Directory + ( doesFileExist, getAppUserDataDirectory, createDirectoryIfMissing + , canonicalizePath ) +import System.FilePath ( (), (<.>), takeExtension + , takeDirectory, replaceExtension + , isRelative ) +import qualified System.Info + +-- ----------------------------------------------------------------------------- +-- Configuring + +configure :: Verbosity -> Maybe FilePath -> Maybe FilePath + -> ProgramConfiguration + -> IO (Compiler, Maybe Platform, ProgramConfiguration) +configure verbosity hcPath hcPkgPath conf0 = do + + (ghcProg, ghcVersion, conf1) <- + requireProgramVersion verbosity ghcProgram + (orLaterVersion (Version [6,4] [])) + (userMaybeSpecifyPath "ghc" hcPath conf0) + let implInfo = ghcVersionImplInfo ghcVersion + + -- This is slightly tricky, we have to configure ghc first, then we use the + -- location of ghc to help find ghc-pkg in the case that the user did not + -- specify the location of ghc-pkg directly: + (ghcPkgProg, ghcPkgVersion, conf2) <- + requireProgramVersion verbosity ghcPkgProgram { + programFindLocation = guessGhcPkgFromGhcPath ghcProg + } + anyVersion (userMaybeSpecifyPath "ghc-pkg" hcPkgPath conf1) + + when (ghcVersion /= ghcPkgVersion) $ die $ + "Version mismatch between ghc and ghc-pkg: " + ++ programPath ghcProg ++ " is version " ++ display ghcVersion ++ " " + ++ programPath ghcPkgProg ++ " is version " ++ display ghcPkgVersion + + -- Likewise we try to find the matching hsc2hs and haddock programs. + let hsc2hsProgram' = hsc2hsProgram { + programFindLocation = guessHsc2hsFromGhcPath ghcProg + } + haddockProgram' = haddockProgram { + programFindLocation = guessHaddockFromGhcPath ghcProg + } + conf3 = addKnownProgram haddockProgram' $ + addKnownProgram hsc2hsProgram' conf2 + + languages <- Internal.getLanguages verbosity implInfo ghcProg + extensions0 <- Internal.getExtensions verbosity implInfo ghcProg + + ghcInfo <- Internal.getGhcInfo verbosity implInfo ghcProg + let ghcInfoMap = M.fromList ghcInfo + extensions = -- workaround https://ghc.haskell.org/ticket/11214 + filterExt JavaScriptFFI $ + -- see 'filterExtTH' comment below + filterExtTH $ extensions0 + + -- starting with GHC 8.0, `TemplateHaskell` will be omitted from + -- `--supported-extensions` when it's not available. + -- for older GHCs we can use the "Have interpreter" property to + -- filter out `TemplateHaskell` + filterExtTH | ghcVersion < Version [8] [] + , Just "NO" <- M.lookup "Have interpreter" ghcInfoMap + = filterExt TemplateHaskell + | otherwise = id + + filterExt ext = filter ((/= EnableExtension ext) . fst) + + let comp = Compiler { + compilerId = CompilerId GHC ghcVersion, + compilerAbiTag = NoAbiTag, + compilerCompat = [], + compilerLanguages = languages, + compilerExtensions = extensions, + compilerProperties = ghcInfoMap + } + compPlatform = Internal.targetPlatform ghcInfo + -- configure gcc and ld + conf4 = Internal.configureToolchain implInfo ghcProg ghcInfoMap conf3 + return (comp, compPlatform, conf4) + +-- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find +-- the corresponding tool; e.g. if the tool is ghc-pkg, we try looking +-- for a versioned or unversioned ghc-pkg in the same dir, that is: +-- +-- > /usr/local/bin/ghc-pkg-ghc-6.6.1(.exe) +-- > /usr/local/bin/ghc-pkg-6.6.1(.exe) +-- > /usr/local/bin/ghc-pkg(.exe) +-- +guessToolFromGhcPath :: Program -> ConfiguredProgram + -> Verbosity -> ProgramSearchPath + -> IO (Maybe (FilePath, [FilePath])) +guessToolFromGhcPath tool ghcProg verbosity searchpath + = do let toolname = programName tool + given_path = programPath ghcProg + given_dir = takeDirectory given_path + real_path <- canonicalizePath given_path + let real_dir = takeDirectory real_path + versionSuffix path = takeVersionSuffix (dropExeExtension path) + given_suf = versionSuffix given_path + real_suf = versionSuffix real_path + guessNormal dir = dir toolname <.> exeExtension + guessGhcVersioned dir suf = dir (toolname ++ "-ghc" ++ suf) + <.> exeExtension + guessVersioned dir suf = dir (toolname ++ suf) + <.> exeExtension + mkGuesses dir suf | null suf = [guessNormal dir] + | otherwise = [guessGhcVersioned dir suf, + guessVersioned dir suf, + guessNormal dir] + guesses = mkGuesses given_dir given_suf ++ + if real_path == given_path + then [] + else mkGuesses real_dir real_suf + info verbosity $ "looking for tool " ++ toolname + ++ " near compiler in " ++ given_dir + debug verbosity $ "candidate locations: " ++ show guesses + exists <- mapM doesFileExist guesses + case [ file | (file, True) <- zip guesses exists ] of + -- If we can't find it near ghc, fall back to the usual + -- method. + [] -> programFindLocation tool verbosity searchpath + (fp:_) -> do info verbosity $ "found " ++ toolname ++ " in " ++ fp + let lookedAt = map fst + . takeWhile (\(_file, exist) -> not exist) + $ zip guesses exists + return (Just (fp, lookedAt)) + + where takeVersionSuffix :: FilePath -> String + takeVersionSuffix = takeWhileEndLE isSuffixChar + + isSuffixChar :: Char -> Bool + isSuffixChar c = isDigit c || c == '.' || c == '-' + +-- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find a +-- corresponding ghc-pkg, we try looking for both a versioned and unversioned +-- ghc-pkg in the same dir, that is: +-- +-- > /usr/local/bin/ghc-pkg-ghc-6.6.1(.exe) +-- > /usr/local/bin/ghc-pkg-6.6.1(.exe) +-- > /usr/local/bin/ghc-pkg(.exe) +-- +guessGhcPkgFromGhcPath :: ConfiguredProgram + -> Verbosity -> ProgramSearchPath + -> IO (Maybe (FilePath, [FilePath])) +guessGhcPkgFromGhcPath = guessToolFromGhcPath ghcPkgProgram + +-- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find a +-- corresponding hsc2hs, we try looking for both a versioned and unversioned +-- hsc2hs in the same dir, that is: +-- +-- > /usr/local/bin/hsc2hs-ghc-6.6.1(.exe) +-- > /usr/local/bin/hsc2hs-6.6.1(.exe) +-- > /usr/local/bin/hsc2hs(.exe) +-- +guessHsc2hsFromGhcPath :: ConfiguredProgram + -> Verbosity -> ProgramSearchPath + -> IO (Maybe (FilePath, [FilePath])) +guessHsc2hsFromGhcPath = guessToolFromGhcPath hsc2hsProgram + +-- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find a +-- corresponding haddock, we try looking for both a versioned and unversioned +-- haddock in the same dir, that is: +-- +-- > /usr/local/bin/haddock-ghc-6.6.1(.exe) +-- > /usr/local/bin/haddock-6.6.1(.exe) +-- > /usr/local/bin/haddock(.exe) +-- +guessHaddockFromGhcPath :: ConfiguredProgram + -> Verbosity -> ProgramSearchPath + -> IO (Maybe (FilePath, [FilePath])) +guessHaddockFromGhcPath = guessToolFromGhcPath haddockProgram + +getGhcInfo :: Verbosity -> ConfiguredProgram -> IO [(String, String)] +getGhcInfo verbosity ghcProg = Internal.getGhcInfo verbosity implInfo ghcProg + where + Just version = programVersion ghcProg + implInfo = ghcVersionImplInfo version + +-- | Given a single package DB, return all installed packages. +getPackageDBContents :: Verbosity -> PackageDB -> ProgramConfiguration + -> IO InstalledPackageIndex +getPackageDBContents verbosity packagedb conf = do + pkgss <- getInstalledPackages' verbosity [packagedb] conf + toPackageIndex verbosity pkgss conf + +-- | Given a package DB stack, return all installed packages. +getInstalledPackages :: Verbosity -> Compiler -> PackageDBStack + -> ProgramConfiguration + -> IO InstalledPackageIndex +getInstalledPackages verbosity comp packagedbs conf = do + checkPackageDbEnvVar + checkPackageDbStack comp packagedbs + pkgss <- getInstalledPackages' verbosity packagedbs conf + index <- toPackageIndex verbosity pkgss conf + return $! hackRtsPackage index + + where + hackRtsPackage index = + case PackageIndex.lookupPackageName index (PackageName "rts") of + [(_,[rts])] + -> PackageIndex.insert (removeMingwIncludeDir rts) index + _ -> index -- No (or multiple) ghc rts package is registered!! + -- Feh, whatever, the ghc test suite does some crazy stuff. + +-- | Given a list of @(PackageDB, InstalledPackageInfo)@ pairs, produce a +-- @PackageIndex@. Helper function used by 'getPackageDBContents' and +-- 'getInstalledPackages'. +toPackageIndex :: Verbosity + -> [(PackageDB, [InstalledPackageInfo])] + -> ProgramConfiguration + -> IO InstalledPackageIndex +toPackageIndex verbosity pkgss conf = do + -- On Windows, various fields have $topdir/foo rather than full + -- paths. We need to substitute the right value in so that when + -- we, for example, call gcc, we have proper paths to give it. + topDir <- getLibDir' verbosity ghcProg + let indices = [ PackageIndex.fromList (map (Internal.substTopDir topDir) pkgs) + | (_, pkgs) <- pkgss ] + return $! mconcat indices + + where + Just ghcProg = lookupProgram ghcProgram conf + +getLibDir :: Verbosity -> LocalBuildInfo -> IO FilePath +getLibDir verbosity lbi = + dropWhileEndLE isSpace `fmap` + rawSystemProgramStdoutConf verbosity ghcProgram + (withPrograms lbi) ["--print-libdir"] + +getLibDir' :: Verbosity -> ConfiguredProgram -> IO FilePath +getLibDir' verbosity ghcProg = + dropWhileEndLE isSpace `fmap` + rawSystemProgramStdout verbosity ghcProg ["--print-libdir"] + + +-- | Return the 'FilePath' to the global GHC package database. +getGlobalPackageDB :: Verbosity -> ConfiguredProgram -> IO FilePath +getGlobalPackageDB verbosity ghcProg = + dropWhileEndLE isSpace `fmap` + rawSystemProgramStdout verbosity ghcProg ["--print-global-package-db"] + +-- | Return the 'FilePath' to the per-user GHC package database. +getUserPackageDB :: Verbosity -> ConfiguredProgram -> Platform -> IO FilePath +getUserPackageDB _verbosity ghcProg (Platform arch os) = do + -- It's rather annoying that we have to reconstruct this, because ghc + -- hides this information from us otherwise. But for certain use cases + -- like change monitoring it really can't remain hidden. + appdir <- getAppUserDataDirectory "ghc" + return (appdir platformAndVersion packageConfFileName) + where + platformAndVersion = intercalate "-" [ Internal.showArchString arch + , Internal.showOsString os + , display ghcVersion ] + packageConfFileName + | ghcVersion >= Version [6,12] [] = "package.conf.d" + | otherwise = "package.conf" + Just ghcVersion = programVersion ghcProg + +checkPackageDbEnvVar :: IO () +checkPackageDbEnvVar = + Internal.checkPackageDbEnvVar "GHC" "GHC_PACKAGE_PATH" + +checkPackageDbStack :: Compiler -> PackageDBStack -> IO () +checkPackageDbStack comp = if flagPackageConf implInfo + then checkPackageDbStackPre76 + else checkPackageDbStackPost76 + where implInfo = ghcVersionImplInfo (compilerVersion comp) + +checkPackageDbStackPost76 :: PackageDBStack -> IO () +checkPackageDbStackPost76 (GlobalPackageDB:rest) + | GlobalPackageDB `notElem` rest = return () +checkPackageDbStackPost76 rest + | GlobalPackageDB `elem` rest = + die $ "If the global package db is specified, it must be " + ++ "specified first and cannot be specified multiple times" +checkPackageDbStackPost76 _ = return () + +checkPackageDbStackPre76 :: PackageDBStack -> IO () +checkPackageDbStackPre76 (GlobalPackageDB:rest) + | GlobalPackageDB `notElem` rest = return () +checkPackageDbStackPre76 rest + | GlobalPackageDB `notElem` rest = + die $ "With current ghc versions the global package db is always used " + ++ "and must be listed first. This ghc limitation is lifted in GHC 7.6," + ++ "see http://hackage.haskell.org/trac/ghc/ticket/5977" +checkPackageDbStackPre76 _ = + die $ "If the global package db is specified, it must be " + ++ "specified first and cannot be specified multiple times" + +-- GHC < 6.10 put "$topdir/include/mingw" in rts's installDirs. This +-- breaks when you want to use a different gcc, so we need to filter +-- it out. +removeMingwIncludeDir :: InstalledPackageInfo -> InstalledPackageInfo +removeMingwIncludeDir pkg = + let ids = InstalledPackageInfo.includeDirs pkg + ids' = filter (not . ("mingw" `isSuffixOf`)) ids + in pkg { InstalledPackageInfo.includeDirs = ids' } + +-- | Get the packages from specific PackageDBs, not cumulative. +-- +getInstalledPackages' :: Verbosity -> [PackageDB] -> ProgramConfiguration + -> IO [(PackageDB, [InstalledPackageInfo])] +getInstalledPackages' verbosity packagedbs conf + | ghcVersion >= Version [6,9] [] = + sequence + [ do pkgs <- HcPkg.dump (hcPkgInfo conf) verbosity packagedb + return (packagedb, pkgs) + | packagedb <- packagedbs ] + + where + Just ghcProg = lookupProgram ghcProgram conf + Just ghcVersion = programVersion ghcProg + +getInstalledPackages' verbosity packagedbs conf = do + str <- rawSystemProgramStdoutConf verbosity ghcPkgProgram conf ["list"] + let pkgFiles = [ init line | line <- lines str, last line == ':' ] + dbFile packagedb = case (packagedb, pkgFiles) of + (GlobalPackageDB, global:_) -> return $ Just global + (UserPackageDB, _global:user:_) -> return $ Just user + (UserPackageDB, _global:_) -> return $ Nothing + (SpecificPackageDB specific, _) -> return $ Just specific + _ -> die "cannot read ghc-pkg package listing" + pkgFiles' <- mapM dbFile packagedbs + sequence [ withFileContents file $ \content -> do + pkgs <- readPackages file content + return (db, pkgs) + | (db , Just file) <- zip packagedbs pkgFiles' ] + where + -- Depending on the version of ghc we use a different type's Read + -- instance to parse the package file and then convert. + -- It's a bit yuck. But that's what we get for using Read/Show. + readPackages + | ghcVersion >= Version [6,4,2] [] + = \file content -> case reads content of + [(pkgs, _)] -> return (map IPI642.toCurrent pkgs) + _ -> failToRead file + -- We dropped support for 6.4.2 and earlier. + | otherwise + = \file _ -> failToRead file + Just ghcProg = lookupProgram ghcProgram conf + Just ghcVersion = programVersion ghcProg + failToRead file = die $ "cannot read ghc package database " ++ file + +getInstalledPackagesMonitorFiles :: Verbosity -> Platform + -> ProgramConfiguration + -> [PackageDB] + -> IO [FilePath] +getInstalledPackagesMonitorFiles verbosity platform progdb = + mapM getPackageDBPath + where + getPackageDBPath :: PackageDB -> IO FilePath + getPackageDBPath GlobalPackageDB = + selectMonitorFile =<< getGlobalPackageDB verbosity ghcProg + + getPackageDBPath UserPackageDB = + selectMonitorFile =<< getUserPackageDB verbosity ghcProg platform + + getPackageDBPath (SpecificPackageDB path) = selectMonitorFile path + + -- GHC has old style file dbs, and new style directory dbs. + -- Note that for dir style dbs, we only need to monitor the cache file, not + -- the whole directory. The ghc program itself only reads the cache file + -- so it's safe to only monitor this one file. + selectMonitorFile path = do + isFileStyle <- doesFileExist path + if isFileStyle then return path + else return (path "package.cache") + + Just ghcProg = lookupProgram ghcProgram progdb + + +-- ----------------------------------------------------------------------------- +-- Building + +-- | Build a library with GHC. +-- +buildLib, replLib :: Verbosity -> Cabal.Flag (Maybe Int) + -> PackageDescription -> LocalBuildInfo + -> Library -> ComponentLocalBuildInfo -> IO () +buildLib = buildOrReplLib False +replLib = buildOrReplLib True + +buildOrReplLib :: Bool -> Verbosity -> Cabal.Flag (Maybe Int) + -> PackageDescription -> LocalBuildInfo + -> Library -> ComponentLocalBuildInfo -> IO () +buildOrReplLib forRepl verbosity numJobs pkg_descr lbi lib clbi = do + let libName = componentUnitId clbi + libTargetDir + | componentUnitId clbi == localUnitId lbi = buildDir lbi + | otherwise = buildDir lbi display libName + whenVanillaLib forceVanilla = + when (forceVanilla || withVanillaLib lbi) + whenProfLib = when (withProfLib lbi) + whenSharedLib forceShared = + when (forceShared || withSharedLib lbi) + whenGHCiLib = when (withGHCiLib lbi && withVanillaLib lbi) + ifReplLib = when forRepl + comp = compiler lbi + ghcVersion = compilerVersion comp + implInfo = getImplInfo comp + platform@(Platform _hostArch hostOS) = hostPlatform lbi + + (ghcProg, _) <- requireProgram verbosity ghcProgram (withPrograms lbi) + let runGhcProg = runGHC verbosity ghcProg comp platform + + libBi <- hackThreadedFlag verbosity + comp (withProfLib lbi) (libBuildInfo lib) + + let isGhcDynamic = isDynamic comp + dynamicTooSupported = supportsDynamicToo comp + doingTH = EnableExtension TemplateHaskell `elem` allExtensions libBi + forceVanillaLib = doingTH && not isGhcDynamic + forceSharedLib = doingTH && isGhcDynamic + -- TH always needs default libs, even when building for profiling + + -- Determine if program coverage should be enabled and if so, what + -- '-hpcdir' should be. + let isCoverageEnabled = fromFlag $ configCoverage $ configFlags lbi + -- Component name. Not 'libName' because that has the "HS" prefix + -- that GHC gives Haskell libraries. + cname = display $ PD.package $ localPkgDescr lbi + distPref = fromFlag $ configDistPref $ configFlags lbi + hpcdir way + | forRepl = Mon.mempty -- HPC is not supported in ghci + | isCoverageEnabled = toFlag $ Hpc.mixDir distPref way cname + | otherwise = mempty + + createDirectoryIfMissingVerbose verbosity True libTargetDir + -- TODO: do we need to put hs-boot files into place for mutually recursive + -- modules? + let cObjs = map (`replaceExtension` objExtension) (cSources libBi) + baseOpts = componentGhcOptions verbosity lbi libBi clbi libTargetDir + vanillaOpts = baseOpts `mappend` mempty { + ghcOptMode = toFlag GhcModeMake, + ghcOptNumJobs = numJobs, + ghcOptInputModules = toNubListR $ libModules lib, + ghcOptHPCDir = hpcdir Hpc.Vanilla + } + + profOpts = vanillaOpts `mappend` mempty { + ghcOptProfilingMode = toFlag True, + ghcOptProfilingAuto = Internal.profDetailLevelFlag True + (withProfLibDetail lbi), + ghcOptHiSuffix = toFlag "p_hi", + ghcOptObjSuffix = toFlag "p_o", + ghcOptExtra = toNubListR $ hcProfOptions GHC libBi, + ghcOptHPCDir = hpcdir Hpc.Prof + } + + sharedOpts = vanillaOpts `mappend` mempty { + ghcOptDynLinkMode = toFlag GhcDynamicOnly, + ghcOptFPic = toFlag True, + ghcOptHiSuffix = toFlag "dyn_hi", + ghcOptObjSuffix = toFlag "dyn_o", + ghcOptExtra = toNubListR $ hcSharedOptions GHC libBi, + ghcOptHPCDir = hpcdir Hpc.Dyn + } + linkerOpts = mempty { + ghcOptLinkOptions = toNubListR $ PD.ldOptions libBi, + ghcOptLinkLibs = toNubListR $ extraLibs libBi, + ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi, + ghcOptLinkFrameworks = toNubListR $ + PD.frameworks libBi, + ghcOptLinkFrameworkDirs = toNubListR $ + PD.extraFrameworkDirs libBi, + ghcOptInputFiles = toNubListR + [libTargetDir x | x <- cObjs] + } + replOpts = vanillaOpts { + ghcOptExtra = overNubListR + Internal.filterGhciFlags $ + ghcOptExtra vanillaOpts, + ghcOptNumJobs = mempty + } + `mappend` linkerOpts + `mappend` mempty { + ghcOptMode = toFlag GhcModeInteractive, + ghcOptOptimisation = toFlag GhcNoOptimisation + } + + vanillaSharedOpts = vanillaOpts `mappend` mempty { + ghcOptDynLinkMode = toFlag GhcStaticAndDynamic, + ghcOptDynHiSuffix = toFlag "dyn_hi", + ghcOptDynObjSuffix = toFlag "dyn_o", + ghcOptHPCDir = hpcdir Hpc.Dyn + } + + unless (forRepl || null (libModules lib)) $ + do let vanilla = whenVanillaLib forceVanillaLib (runGhcProg vanillaOpts) + shared = whenSharedLib forceSharedLib (runGhcProg sharedOpts) + useDynToo = dynamicTooSupported && + (forceVanillaLib || withVanillaLib lbi) && + (forceSharedLib || withSharedLib lbi) && + null (hcSharedOptions GHC libBi) + if useDynToo + then do + runGhcProg vanillaSharedOpts + case (hpcdir Hpc.Dyn, hpcdir Hpc.Vanilla) of + (Cabal.Flag dynDir, Cabal.Flag vanillaDir) -> + -- When the vanilla and shared library builds are done + -- in one pass, only one set of HPC module interfaces + -- are generated. This set should suffice for both + -- static and dynamically linked executables. We copy + -- the modules interfaces so they are available under + -- both ways. + copyDirectoryRecursive verbosity dynDir vanillaDir + _ -> return () + else if isGhcDynamic + then do shared; vanilla + else do vanilla; shared + whenProfLib (runGhcProg profOpts) + + -- build any C sources + unless (null (cSources libBi)) $ do + info verbosity "Building C Sources..." + sequence_ + [ do let baseCcOpts = Internal.componentCcGhcOptions verbosity implInfo + lbi libBi clbi libTargetDir filename + vanillaCcOpts = if isGhcDynamic + -- Dynamic GHC requires C sources to be built + -- with -fPIC for REPL to work. See #2207. + then baseCcOpts { ghcOptFPic = toFlag True } + else baseCcOpts + profCcOpts = vanillaCcOpts `mappend` mempty { + ghcOptProfilingMode = toFlag True, + ghcOptObjSuffix = toFlag "p_o" + } + sharedCcOpts = vanillaCcOpts `mappend` mempty { + ghcOptFPic = toFlag True, + ghcOptDynLinkMode = toFlag GhcDynamicOnly, + ghcOptObjSuffix = toFlag "dyn_o" + } + odir = fromFlag (ghcOptObjDir vanillaCcOpts) + createDirectoryIfMissingVerbose verbosity True odir + let runGhcProgIfNeeded ccOpts = do + needsRecomp <- checkNeedsRecompilation filename ccOpts + when needsRecomp $ runGhcProg ccOpts + runGhcProgIfNeeded vanillaCcOpts + unless forRepl $ + whenSharedLib forceSharedLib (runGhcProgIfNeeded sharedCcOpts) + unless forRepl $ whenProfLib (runGhcProgIfNeeded profCcOpts) + | filename <- cSources libBi] + + -- TODO: problem here is we need the .c files built first, so we can load them + -- with ghci, but .c files can depend on .h files generated by ghc by ffi + -- exports. + + ifReplLib $ do + when (null (libModules lib)) $ warn verbosity "No exposed modules" + ifReplLib (runGhcProg replOpts) + + -- link: + unless forRepl $ do + info verbosity "Linking..." + let cProfObjs = map (`replaceExtension` ("p_" ++ objExtension)) + (cSources libBi) + cSharedObjs = map (`replaceExtension` ("dyn_" ++ objExtension)) + (cSources libBi) + cid = compilerId (compiler lbi) + vanillaLibFilePath = libTargetDir mkLibName libName + profileLibFilePath = libTargetDir mkProfLibName libName + sharedLibFilePath = libTargetDir mkSharedLibName cid libName + ghciLibFilePath = libTargetDir Internal.mkGHCiLibName libName + libInstallPath = libdir $ absoluteInstallDirs pkg_descr lbi NoCopyDest + sharedLibInstallPath = libInstallPath mkSharedLibName cid libName + + stubObjs <- catMaybes <$> sequence + [ findFileWithExtension [objExtension] [libTargetDir] + (ModuleName.toFilePath x ++"_stub") + | ghcVersion < Version [7,2] [] -- ghc-7.2+ does not make _stub.o files + , x <- libModules lib ] + stubProfObjs <- catMaybes <$> sequence + [ findFileWithExtension ["p_" ++ objExtension] [libTargetDir] + (ModuleName.toFilePath x ++"_stub") + | ghcVersion < Version [7,2] [] -- ghc-7.2+ does not make _stub.o files + , x <- libModules lib ] + stubSharedObjs <- catMaybes <$> sequence + [ findFileWithExtension ["dyn_" ++ objExtension] [libTargetDir] + (ModuleName.toFilePath x ++"_stub") + | ghcVersion < Version [7,2] [] -- ghc-7.2+ does not make _stub.o files + , x <- libModules lib ] + + hObjs <- Internal.getHaskellObjects implInfo lib lbi + libTargetDir objExtension True + hProfObjs <- + if withProfLib lbi + then Internal.getHaskellObjects implInfo lib lbi + libTargetDir ("p_" ++ objExtension) True + else return [] + hSharedObjs <- + if withSharedLib lbi + then Internal.getHaskellObjects implInfo lib lbi + libTargetDir ("dyn_" ++ objExtension) False + else return [] + + unless (null hObjs && null cObjs && null stubObjs) $ do + rpaths <- getRPaths lbi clbi + + let staticObjectFiles = + hObjs + ++ map (libTargetDir ) cObjs + ++ stubObjs + profObjectFiles = + hProfObjs + ++ map (libTargetDir ) cProfObjs + ++ stubProfObjs + ghciObjFiles = + hObjs + ++ map (libTargetDir ) cObjs + ++ stubObjs + dynamicObjectFiles = + hSharedObjs + ++ map (libTargetDir ) cSharedObjs + ++ stubSharedObjs + -- After the relocation lib is created we invoke ghc -shared + -- with the dependencies spelled out as -package arguments + -- and ghc invokes the linker with the proper library paths + ghcSharedLinkArgs = + mempty { + ghcOptShared = toFlag True, + ghcOptDynLinkMode = toFlag GhcDynamicOnly, + ghcOptInputFiles = toNubListR dynamicObjectFiles, + ghcOptOutputFile = toFlag sharedLibFilePath, + ghcOptExtra = toNubListR $ + hcSharedOptions GHC libBi, + -- For dynamic libs, Mac OS/X needs to know the install location + -- at build time. This only applies to GHC < 7.8 - see the + -- discussion in #1660. + ghcOptDylibName = if hostOS == OSX + && ghcVersion < Version [7,8] [] + then toFlag sharedLibInstallPath + else mempty, + ghcOptHideAllPackages = toFlag True, + ghcOptNoAutoLinkPackages = toFlag True, + ghcOptPackageDBs = withPackageDB lbi, + ghcOptPackages = toNubListR $ + Internal.mkGhcOptPackages clbi , + ghcOptLinkLibs = toNubListR $ extraLibs libBi, + ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi, + ghcOptLinkFrameworks = toNubListR $ PD.frameworks libBi, + ghcOptLinkFrameworkDirs = + toNubListR $ PD.extraFrameworkDirs libBi, + ghcOptRPaths = rpaths + } + + info verbosity (show (ghcOptPackages ghcSharedLinkArgs)) + + whenVanillaLib False $ + Ar.createArLibArchive verbosity lbi vanillaLibFilePath staticObjectFiles + + whenProfLib $ + Ar.createArLibArchive verbosity lbi profileLibFilePath profObjectFiles + + whenGHCiLib $ do + (ldProg, _) <- requireProgram verbosity ldProgram (withPrograms lbi) + Ld.combineObjectFiles verbosity ldProg + ghciLibFilePath ghciObjFiles + + whenSharedLib False $ + runGhcProg ghcSharedLinkArgs + +-- | Start a REPL without loading any source files. +startInterpreter :: Verbosity -> ProgramConfiguration -> Compiler -> Platform + -> PackageDBStack -> IO () +startInterpreter verbosity conf comp platform packageDBs = do + let replOpts = mempty { + ghcOptMode = toFlag GhcModeInteractive, + ghcOptPackageDBs = packageDBs + } + checkPackageDbStack comp packageDBs + (ghcProg, _) <- requireProgram verbosity ghcProgram conf + runGHC verbosity ghcProg comp platform replOpts + +-- | Build an executable with GHC. +-- +buildExe, replExe :: Verbosity -> Cabal.Flag (Maybe Int) + -> PackageDescription -> LocalBuildInfo + -> Executable -> ComponentLocalBuildInfo -> IO () +buildExe = buildOrReplExe False +replExe = buildOrReplExe True + +buildOrReplExe :: Bool -> Verbosity -> Cabal.Flag (Maybe Int) + -> PackageDescription -> LocalBuildInfo + -> Executable -> ComponentLocalBuildInfo -> IO () +buildOrReplExe forRepl verbosity numJobs _pkg_descr lbi + exe@Executable { exeName = exeName', modulePath = modPath } clbi = do + + (ghcProg, _) <- requireProgram verbosity ghcProgram (withPrograms lbi) + let comp = compiler lbi + platform = hostPlatform lbi + implInfo = getImplInfo comp + runGhcProg = runGHC verbosity ghcProg comp platform + + exeBi <- hackThreadedFlag verbosity + comp (withProfExe lbi) (buildInfo exe) + + -- exeNameReal, the name that GHC really uses (with .exe on Windows) + let exeNameReal = exeName' <.> + (if takeExtension exeName' /= ('.':exeExtension) + then exeExtension + else "") + + let targetDir = buildDir lbi exeName' + let exeDir = targetDir (exeName' ++ "-tmp") + createDirectoryIfMissingVerbose verbosity True targetDir + createDirectoryIfMissingVerbose verbosity True exeDir + -- TODO: do we need to put hs-boot files into place for mutually recursive + -- modules? FIX: what about exeName.hi-boot? + + -- Determine if program coverage should be enabled and if so, what + -- '-hpcdir' should be. + let isCoverageEnabled = fromFlag $ configCoverage $ configFlags lbi + distPref = fromFlag $ configDistPref $ configFlags lbi + hpcdir way + | forRepl = mempty -- HPC is not supported in ghci + | isCoverageEnabled = toFlag $ Hpc.mixDir distPref way exeName' + | otherwise = mempty + + -- build executables + + srcMainFile <- findFile (exeDir : hsSourceDirs exeBi) modPath + rpaths <- getRPaths lbi clbi + + let isGhcDynamic = isDynamic comp + dynamicTooSupported = supportsDynamicToo comp + isHaskellMain = elem (takeExtension srcMainFile) [".hs", ".lhs"] + cSrcs = cSources exeBi ++ [srcMainFile | not isHaskellMain] + cObjs = map (`replaceExtension` objExtension) cSrcs + baseOpts = (componentGhcOptions verbosity lbi exeBi clbi exeDir) + `mappend` mempty { + ghcOptMode = toFlag GhcModeMake, + ghcOptInputFiles = toNubListR + [ srcMainFile | isHaskellMain], + ghcOptInputModules = toNubListR + [ m | not isHaskellMain, m <- exeModules exe] + } + staticOpts = baseOpts `mappend` mempty { + ghcOptDynLinkMode = toFlag GhcStaticOnly, + ghcOptHPCDir = hpcdir Hpc.Vanilla + } + profOpts = baseOpts `mappend` mempty { + ghcOptProfilingMode = toFlag True, + ghcOptProfilingAuto = Internal.profDetailLevelFlag False + (withProfExeDetail lbi), + ghcOptHiSuffix = toFlag "p_hi", + ghcOptObjSuffix = toFlag "p_o", + ghcOptExtra = toNubListR + (hcProfOptions GHC exeBi), + ghcOptHPCDir = hpcdir Hpc.Prof + } + dynOpts = baseOpts `mappend` mempty { + ghcOptDynLinkMode = toFlag GhcDynamicOnly, + ghcOptHiSuffix = toFlag "dyn_hi", + ghcOptObjSuffix = toFlag "dyn_o", + ghcOptExtra = toNubListR $ + hcSharedOptions GHC exeBi, + ghcOptHPCDir = hpcdir Hpc.Dyn + } + dynTooOpts = staticOpts `mappend` mempty { + ghcOptDynLinkMode = toFlag GhcStaticAndDynamic, + ghcOptDynHiSuffix = toFlag "dyn_hi", + ghcOptDynObjSuffix = toFlag "dyn_o", + ghcOptHPCDir = hpcdir Hpc.Dyn + } + linkerOpts = mempty { + ghcOptLinkOptions = toNubListR $ PD.ldOptions exeBi, + ghcOptLinkLibs = toNubListR $ extraLibs exeBi, + ghcOptLinkLibPath = toNubListR $ extraLibDirs exeBi, + ghcOptLinkFrameworks = toNubListR $ + PD.frameworks exeBi, + ghcOptLinkFrameworkDirs = toNubListR $ + PD.extraFrameworkDirs exeBi, + ghcOptInputFiles = toNubListR + [exeDir x | x <- cObjs] + } + dynLinkerOpts = mempty { + ghcOptRPaths = rpaths + } + replOpts = baseOpts { + ghcOptExtra = overNubListR + Internal.filterGhciFlags + (ghcOptExtra baseOpts) + } + -- For a normal compile we do separate invocations of ghc for + -- compiling as for linking. But for repl we have to do just + -- the one invocation, so that one has to include all the + -- linker stuff too, like -l flags and any .o files from C + -- files etc. + `mappend` linkerOpts + `mappend` mempty { + ghcOptMode = toFlag GhcModeInteractive, + ghcOptOptimisation = toFlag GhcNoOptimisation + } + commonOpts | withProfExe lbi = profOpts + | withDynExe lbi = dynOpts + | otherwise = staticOpts + compileOpts | useDynToo = dynTooOpts + | otherwise = commonOpts + withStaticExe = (not $ withProfExe lbi) && (not $ withDynExe lbi) + + -- For building exe's that use TH with -prof or -dynamic we actually have + -- to build twice, once without -prof/-dynamic and then again with + -- -prof/-dynamic. This is because the code that TH needs to run at + -- compile time needs to be the vanilla ABI so it can be loaded up and run + -- by the compiler. + -- With dynamic-by-default GHC the TH object files loaded at compile-time + -- need to be .dyn_o instead of .o. + doingTH = EnableExtension TemplateHaskell `elem` allExtensions exeBi + -- Should we use -dynamic-too instead of compiling twice? + useDynToo = dynamicTooSupported && isGhcDynamic + && doingTH && withStaticExe + && null (hcSharedOptions GHC exeBi) + compileTHOpts | isGhcDynamic = dynOpts + | otherwise = staticOpts + compileForTH + | forRepl = False + | useDynToo = False + | isGhcDynamic = doingTH && (withProfExe lbi || withStaticExe) + | otherwise = doingTH && (withProfExe lbi || withDynExe lbi) + + linkOpts = + commonOpts `mappend` + linkerOpts `mappend` + mempty { ghcOptLinkNoHsMain = toFlag (not isHaskellMain) } `mappend` + (if withDynExe lbi then dynLinkerOpts else mempty) + + -- Build static/dynamic object files for TH, if needed. + when compileForTH $ + runGhcProg compileTHOpts { ghcOptNoLink = toFlag True + , ghcOptNumJobs = numJobs } + + unless forRepl $ + runGhcProg compileOpts { ghcOptNoLink = toFlag True + , ghcOptNumJobs = numJobs } + + -- build any C sources + unless (null cSrcs) $ do + info verbosity "Building C Sources..." + sequence_ + [ do let opts = (Internal.componentCcGhcOptions verbosity implInfo lbi exeBi + clbi exeDir filename) `mappend` mempty { + ghcOptDynLinkMode = toFlag (if withDynExe lbi + then GhcDynamicOnly + else GhcStaticOnly), + ghcOptProfilingMode = toFlag (withProfExe lbi) + } + odir = fromFlag (ghcOptObjDir opts) + createDirectoryIfMissingVerbose verbosity True odir + needsRecomp <- checkNeedsRecompilation filename opts + when needsRecomp $ + runGhcProg opts + | filename <- cSrcs ] + + -- TODO: problem here is we need the .c files built first, so we can load them + -- with ghci, but .c files can depend on .h files generated by ghc by ffi + -- exports. + when forRepl $ runGhcProg replOpts + + -- link: + unless forRepl $ do + info verbosity "Linking..." + runGhcProg linkOpts { ghcOptOutputFile = toFlag (targetDir exeNameReal) } + +-- | Returns True if the modification date of the given source file is newer than +-- the object file we last compiled for it, or if no object file exists yet. +checkNeedsRecompilation :: FilePath -> GhcOptions -> IO Bool +checkNeedsRecompilation filename opts = filename `moreRecentFile` oname + where oname = getObjectFileName filename opts + +-- | Finds the object file name of the given source file +getObjectFileName :: FilePath -> GhcOptions -> FilePath +getObjectFileName filename opts = oname + where odir = fromFlag (ghcOptObjDir opts) + oext = fromFlagOrDefault "o" (ghcOptObjSuffix opts) + oname = odir replaceExtension filename oext + +-- | Calculate the RPATHs for the component we are building. +-- +-- Calculates relative RPATHs when 'relocatable' is set. +getRPaths :: LocalBuildInfo + -> ComponentLocalBuildInfo -- ^ Component we are building + -> IO (NubListR FilePath) +getRPaths lbi clbi | supportRPaths hostOS = do + libraryPaths <- depLibraryPaths False (relocatable lbi) lbi clbi + let hostPref = case hostOS of + OSX -> "@loader_path" + _ -> "$ORIGIN" + relPath p = if isRelative p then hostPref p else p + rpaths = toNubListR (map relPath libraryPaths) + return rpaths + where + (Platform _ hostOS) = hostPlatform lbi + + -- The list of RPath-supported operating systems below reflects the + -- platforms on which Cabal's RPATH handling is tested. It does _NOT_ + -- reflect whether the OS supports RPATH. + + -- E.g. when this comment was written, the *BSD operating systems were + -- untested with regards to Cabal RPATH handling, and were hence set to + -- 'False', while those operating systems themselves do support RPATH. + supportRPaths Linux   = True + supportRPaths Windows = False + supportRPaths OSX   = True + supportRPaths FreeBSD   = False + supportRPaths OpenBSD   = False + supportRPaths NetBSD   = False + supportRPaths DragonFly = False + supportRPaths Solaris = False + supportRPaths AIX = False + supportRPaths HPUX = False + supportRPaths IRIX = False + supportRPaths HaLVM = False + supportRPaths IOS = False + supportRPaths Android = False + supportRPaths Ghcjs = False + supportRPaths Hurd = False + supportRPaths (OtherOS _) = False + -- Do _not_ add a default case so that we get a warning here when a new OS + -- is added. + +getRPaths _ _ = return mempty + +-- | Filter the "-threaded" flag when profiling as it does not +-- work with ghc-6.8 and older. +hackThreadedFlag :: Verbosity -> Compiler -> Bool -> BuildInfo -> IO BuildInfo +hackThreadedFlag verbosity comp prof bi + | not mustFilterThreaded = return bi + | otherwise = do + warn verbosity $ "The ghc flag '-threaded' is not compatible with " + ++ "profiling in ghc-6.8 and older. It will be disabled." + return bi { options = filterHcOptions (/= "-threaded") (options bi) } + where + mustFilterThreaded = prof && compilerVersion comp < Version [6, 10] [] + && "-threaded" `elem` hcOptions GHC bi + filterHcOptions p hcoptss = + [ (hc, if hc == GHC then filter p opts else opts) + | (hc, opts) <- hcoptss ] + + +-- | Extracts a String representing a hash of the ABI of a built +-- library. It can fail if the library has not yet been built. +-- +libAbiHash :: Verbosity -> PackageDescription -> LocalBuildInfo + -> Library -> ComponentLocalBuildInfo -> IO String +libAbiHash verbosity _pkg_descr lbi lib clbi = do + libBi <- hackThreadedFlag verbosity + (compiler lbi) (withProfLib lbi) (libBuildInfo lib) + let + comp = compiler lbi + platform = hostPlatform lbi + vanillaArgs = + (componentGhcOptions verbosity lbi libBi clbi (buildDir lbi)) + `mappend` mempty { + ghcOptMode = toFlag GhcModeAbiHash, + ghcOptInputModules = toNubListR $ exposedModules lib + } + sharedArgs = vanillaArgs `mappend` mempty { + ghcOptDynLinkMode = toFlag GhcDynamicOnly, + ghcOptFPic = toFlag True, + ghcOptHiSuffix = toFlag "dyn_hi", + ghcOptObjSuffix = toFlag "dyn_o", + ghcOptExtra = toNubListR $ hcSharedOptions GHC libBi + } + profArgs = vanillaArgs `mappend` mempty { + ghcOptProfilingMode = toFlag True, + ghcOptProfilingAuto = Internal.profDetailLevelFlag True + (withProfLibDetail lbi), + ghcOptHiSuffix = toFlag "p_hi", + ghcOptObjSuffix = toFlag "p_o", + ghcOptExtra = toNubListR $ hcProfOptions GHC libBi + } + ghcArgs + | withVanillaLib lbi = vanillaArgs + | withSharedLib lbi = sharedArgs + | withProfLib lbi = profArgs + | otherwise = error "libAbiHash: Can't find an enabled library way" + + (ghcProg, _) <- requireProgram verbosity ghcProgram (withPrograms lbi) + hash <- getProgramInvocationOutput verbosity + (ghcInvocation ghcProg comp platform ghcArgs) + return (takeWhile (not . isSpace) hash) + +componentGhcOptions :: Verbosity -> LocalBuildInfo + -> BuildInfo -> ComponentLocalBuildInfo -> FilePath + -> GhcOptions +componentGhcOptions = Internal.componentGhcOptions + +componentCcGhcOptions :: Verbosity -> LocalBuildInfo + -> BuildInfo -> ComponentLocalBuildInfo + -> FilePath -> FilePath + -> GhcOptions +componentCcGhcOptions verbosity lbi = + Internal.componentCcGhcOptions verbosity implInfo lbi + where + comp = compiler lbi + implInfo = getImplInfo comp + +-- ----------------------------------------------------------------------------- +-- Installing + +-- |Install executables for GHC. +installExe :: Verbosity + -> LocalBuildInfo + -> InstallDirs FilePath -- ^Where to copy the files to + -> FilePath -- ^Build location + -> (FilePath, FilePath) -- ^Executable (prefix,suffix) + -> PackageDescription + -> Executable + -> IO () +installExe verbosity lbi installDirs buildPref + (progprefix, progsuffix) _pkg exe = do + let binDir = bindir installDirs + createDirectoryIfMissingVerbose verbosity True binDir + let exeFileName = exeName exe <.> exeExtension + fixedExeBaseName = progprefix ++ exeName exe ++ progsuffix + installBinary dest = do + installExecutableFile verbosity + (buildPref exeName exe exeFileName) + (dest <.> exeExtension) + when (stripExes lbi) $ + Strip.stripExe verbosity (hostPlatform lbi) (withPrograms lbi) + (dest <.> exeExtension) + installBinary (binDir fixedExeBaseName) + +-- |Install for ghc, .hi, .a and, if --with-ghci given, .o +installLib :: Verbosity + -> LocalBuildInfo + -> FilePath -- ^install location + -> FilePath -- ^install location for dynamic libraries + -> FilePath -- ^Build location + -> PackageDescription + -> Library + -> ComponentLocalBuildInfo + -> IO () +installLib verbosity lbi targetDir dynlibTargetDir builtDir _pkg lib clbi = do + -- copy .hi files over: + whenVanilla $ copyModuleFiles "hi" + whenProf $ copyModuleFiles "p_hi" + whenShared $ copyModuleFiles "dyn_hi" + + -- copy the built library files over: + whenVanilla $ installOrdinary builtDir targetDir vanillaLibName + whenProf $ installOrdinary builtDir targetDir profileLibName + whenGHCi $ installOrdinary builtDir targetDir ghciLibName + whenShared $ installShared builtDir dynlibTargetDir sharedLibName + + where + install isShared srcDir dstDir name = do + let src = srcDir name + dst = dstDir name + createDirectoryIfMissingVerbose verbosity True dstDir + + if isShared + then installExecutableFile verbosity src dst + else installOrdinaryFile verbosity src dst + + when (stripLibs lbi) $ Strip.stripLib verbosity + (hostPlatform lbi) (withPrograms lbi) dst + + installOrdinary = install False + installShared = install True + + copyModuleFiles ext = + findModuleFiles [builtDir] [ext] (libModules lib) + >>= installOrdinaryFiles verbosity targetDir + + cid = compilerId (compiler lbi) + libName = componentUnitId clbi + vanillaLibName = mkLibName libName + profileLibName = mkProfLibName libName + ghciLibName = Internal.mkGHCiLibName libName + sharedLibName = (mkSharedLibName cid) libName + + hasLib = not $ null (libModules lib) + && null (cSources (libBuildInfo lib)) + whenVanilla = when (hasLib && withVanillaLib lbi) + whenProf = when (hasLib && withProfLib lbi) + whenGHCi = when (hasLib && withGHCiLib lbi) + whenShared = when (hasLib && withSharedLib lbi) + +-- ----------------------------------------------------------------------------- +-- Registering + +hcPkgInfo :: ProgramConfiguration -> HcPkg.HcPkgInfo +hcPkgInfo conf = HcPkg.HcPkgInfo { HcPkg.hcPkgProgram = ghcPkgProg + , HcPkg.noPkgDbStack = v < [6,9] + , HcPkg.noVerboseFlag = v < [6,11] + , HcPkg.flagPackageConf = v < [7,5] + , HcPkg.supportsDirDbs = v >= [6,8] + , HcPkg.requiresDirDbs = v >= [7,10] + , HcPkg.nativeMultiInstance = v >= [7,10] + , HcPkg.recacheMultiInstance = v >= [6,12] + } + where + v = versionBranch ver + Just ghcPkgProg = lookupProgram ghcPkgProgram conf + Just ver = programVersion ghcPkgProg + +registerPackage + :: Verbosity + -> ProgramConfiguration + -> Bool + -> PackageDBStack + -> InstalledPackageInfo + -> IO () +registerPackage verbosity progdb multiInstance packageDbs installedPkgInfo + | multiInstance + = HcPkg.registerMultiInstance (hcPkgInfo progdb) verbosity + packageDbs installedPkgInfo + + | otherwise + = HcPkg.reregister (hcPkgInfo progdb) verbosity + packageDbs (Right installedPkgInfo) + +pkgRoot :: Verbosity -> LocalBuildInfo -> PackageDB -> IO FilePath +pkgRoot verbosity lbi = pkgRoot' + where + pkgRoot' GlobalPackageDB = + let Just ghcProg = lookupProgram ghcProgram (withPrograms lbi) + in fmap takeDirectory (getGlobalPackageDB verbosity ghcProg) + pkgRoot' UserPackageDB = do + appDir <- getAppUserDataDirectory "ghc" + let ver = compilerVersion (compiler lbi) + subdir = System.Info.arch ++ '-':System.Info.os + ++ '-':showVersion ver + rootDir = appDir subdir + -- We must create the root directory for the user package database if it + -- does not yet exists. Otherwise '${pkgroot}' will resolve to a + -- directory at the time of 'ghc-pkg register', and registration will + -- fail. + createDirectoryIfMissing True rootDir + return rootDir + pkgRoot' (SpecificPackageDB fp) = return (takeDirectory fp) + +-- ----------------------------------------------------------------------------- +-- Utils + +isDynamic :: Compiler -> Bool +isDynamic = Internal.ghcLookupProperty "GHC Dynamic" + +supportsDynamicToo :: Compiler -> Bool +supportsDynamicToo = Internal.ghcLookupProperty "Support dynamic-too" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/GHCJS.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/GHCJS.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/GHCJS.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/GHCJS.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,879 @@ +module Distribution.Simple.GHCJS ( + configure, getInstalledPackages, getPackageDBContents, + buildLib, buildExe, + replLib, replExe, + startInterpreter, + installLib, installExe, + libAbiHash, + hcPkgInfo, + registerPackage, + componentGhcOptions, + getLibDir, + isDynamic, + getGlobalPackageDB, + runCmd + ) where + +import Distribution.Simple.GHC.ImplInfo +import qualified Distribution.Simple.GHC.Internal as Internal +import Distribution.PackageDescription as PD +import Distribution.InstalledPackageInfo +import Distribution.Package +import Distribution.Simple.PackageIndex ( InstalledPackageIndex ) +import qualified Distribution.Simple.PackageIndex as PackageIndex +import Distribution.Simple.LocalBuildInfo +import qualified Distribution.Simple.Hpc as Hpc +import Distribution.Simple.BuildPaths +import Distribution.Simple.Utils +import Distribution.Simple.Program +import qualified Distribution.Simple.Program.HcPkg as HcPkg +import qualified Distribution.Simple.Program.Ar as Ar +import qualified Distribution.Simple.Program.Ld as Ld +import qualified Distribution.Simple.Program.Strip as Strip +import Distribution.Simple.Program.GHC +import Distribution.Simple.Setup hiding ( Flag ) +import qualified Distribution.Simple.Setup as Cabal +import Distribution.Simple.Compiler hiding ( Flag ) +import Distribution.Version +import Distribution.System +import Distribution.Verbosity +import Distribution.Utils.NubList +import Distribution.Text +import Language.Haskell.Extension + +import Control.Monad ( unless, when ) +import Data.Char ( isSpace ) +import qualified Data.Map as M ( fromList ) +import Data.Monoid as Mon ( Monoid(..) ) +import System.Directory ( doesFileExist ) +import System.FilePath ( (), (<.>), takeExtension + , takeDirectory, replaceExtension ) + +configure :: Verbosity -> Maybe FilePath -> Maybe FilePath + -> ProgramConfiguration + -> IO (Compiler, Maybe Platform, ProgramConfiguration) +configure verbosity hcPath hcPkgPath conf0 = do + (ghcjsProg, ghcjsVersion, conf1) <- + requireProgramVersion verbosity ghcjsProgram + (orLaterVersion (Version [0,1] [])) + (userMaybeSpecifyPath "ghcjs" hcPath conf0) + Just ghcjsGhcVersion <- findGhcjsGhcVersion verbosity (programPath ghcjsProg) + let implInfo = ghcjsVersionImplInfo ghcjsVersion ghcjsGhcVersion + + -- This is slightly tricky, we have to configure ghcjs first, then we use the + -- location of ghcjs to help find ghcjs-pkg in the case that the user did not + -- specify the location of ghc-pkg directly: + (ghcjsPkgProg, ghcjsPkgVersion, conf2) <- + requireProgramVersion verbosity ghcjsPkgProgram { + programFindLocation = guessGhcjsPkgFromGhcjsPath ghcjsProg + } + anyVersion (userMaybeSpecifyPath "ghcjs-pkg" hcPkgPath conf1) + + Just ghcjsPkgGhcjsVersion <- findGhcjsPkgGhcjsVersion + verbosity (programPath ghcjsPkgProg) + + when (ghcjsVersion /= ghcjsPkgGhcjsVersion) $ die $ + "Version mismatch between ghcjs and ghcjs-pkg: " + ++ programPath ghcjsProg ++ " is version " ++ display ghcjsVersion ++ " " + ++ programPath ghcjsPkgProg ++ " is version " ++ display ghcjsPkgGhcjsVersion + + when (ghcjsGhcVersion /= ghcjsPkgVersion) $ die $ + "Version mismatch between ghcjs and ghcjs-pkg: " + ++ programPath ghcjsProg + ++ " was built with GHC version " ++ display ghcjsGhcVersion ++ " " + ++ programPath ghcjsPkgProg + ++ " was built with GHC version " ++ display ghcjsPkgVersion + + -- be sure to use our versions of hsc2hs, c2hs, haddock and ghc + let hsc2hsProgram' = + hsc2hsProgram { programFindLocation = + guessHsc2hsFromGhcjsPath ghcjsProg } + c2hsProgram' = + c2hsProgram { programFindLocation = + guessC2hsFromGhcjsPath ghcjsProg } + + haddockProgram' = + haddockProgram { programFindLocation = + guessHaddockFromGhcjsPath ghcjsProg } + conf3 = addKnownPrograms [ hsc2hsProgram', c2hsProgram', haddockProgram' ] conf2 + + languages <- Internal.getLanguages verbosity implInfo ghcjsProg + extensions <- Internal.getExtensions verbosity implInfo ghcjsProg + + ghcInfo <- Internal.getGhcInfo verbosity implInfo ghcjsProg + let ghcInfoMap = M.fromList ghcInfo + + let comp = Compiler { + compilerId = CompilerId GHCJS ghcjsVersion, + compilerAbiTag = AbiTag $ + "ghc" ++ intercalate "_" (map show . versionBranch $ ghcjsGhcVersion), + compilerCompat = [CompilerId GHC ghcjsGhcVersion], + compilerLanguages = languages, + compilerExtensions = extensions, + compilerProperties = ghcInfoMap + } + compPlatform = Internal.targetPlatform ghcInfo + -- configure gcc and ld + let conf4 = if ghcjsNativeToo comp + then Internal.configureToolchain implInfo + ghcjsProg ghcInfoMap conf3 + else conf3 + return (comp, compPlatform, conf4) + +ghcjsNativeToo :: Compiler -> Bool +ghcjsNativeToo = Internal.ghcLookupProperty "Native Too" + +guessGhcjsPkgFromGhcjsPath :: ConfiguredProgram -> Verbosity + -> ProgramSearchPath -> IO (Maybe (FilePath, [FilePath])) +guessGhcjsPkgFromGhcjsPath = guessToolFromGhcjsPath ghcjsPkgProgram + +guessHsc2hsFromGhcjsPath :: ConfiguredProgram -> Verbosity + -> ProgramSearchPath -> IO (Maybe (FilePath, [FilePath])) +guessHsc2hsFromGhcjsPath = guessToolFromGhcjsPath hsc2hsProgram + +guessC2hsFromGhcjsPath :: ConfiguredProgram -> Verbosity + -> ProgramSearchPath -> IO (Maybe (FilePath, [FilePath])) +guessC2hsFromGhcjsPath = guessToolFromGhcjsPath c2hsProgram + +guessHaddockFromGhcjsPath :: ConfiguredProgram -> Verbosity + -> ProgramSearchPath -> IO (Maybe (FilePath, [FilePath])) +guessHaddockFromGhcjsPath = guessToolFromGhcjsPath haddockProgram + +guessToolFromGhcjsPath :: Program -> ConfiguredProgram + -> Verbosity -> ProgramSearchPath + -> IO (Maybe (FilePath, [FilePath])) +guessToolFromGhcjsPath tool ghcjsProg verbosity searchpath + = do let toolname = programName tool + path = programPath ghcjsProg + dir = takeDirectory path + versionSuffix = takeVersionSuffix (dropExeExtension path) + guessNormal = dir toolname <.> exeExtension + guessGhcjsVersioned = dir (toolname ++ "-ghcjs" ++ versionSuffix) + <.> exeExtension + guessGhcjs = dir (toolname ++ "-ghcjs") + <.> exeExtension + guessVersioned = dir (toolname ++ versionSuffix) <.> exeExtension + guesses | null versionSuffix = [guessGhcjs, guessNormal] + | otherwise = [guessGhcjsVersioned, + guessGhcjs, + guessVersioned, + guessNormal] + info verbosity $ "looking for tool " ++ toolname + ++ " near compiler in " ++ dir + exists <- mapM doesFileExist guesses + case [ file | (file, True) <- zip guesses exists ] of + -- If we can't find it near ghc, fall back to the usual + -- method. + [] -> programFindLocation tool verbosity searchpath + (fp:_) -> do info verbosity $ "found " ++ toolname ++ " in " ++ fp + let lookedAt = map fst + . takeWhile (\(_file, exist) -> not exist) + $ zip guesses exists + return (Just (fp, lookedAt)) + + where takeVersionSuffix :: FilePath -> String + takeVersionSuffix = reverse . takeWhile (`elem ` "0123456789.-") . + reverse + +-- | Given a single package DB, return all installed packages. +getPackageDBContents :: Verbosity -> PackageDB -> ProgramConfiguration + -> IO InstalledPackageIndex +getPackageDBContents verbosity packagedb conf = do + pkgss <- getInstalledPackages' verbosity [packagedb] conf + toPackageIndex verbosity pkgss conf + +-- | Given a package DB stack, return all installed packages. +getInstalledPackages :: Verbosity -> PackageDBStack -> ProgramConfiguration + -> IO InstalledPackageIndex +getInstalledPackages verbosity packagedbs conf = do + checkPackageDbEnvVar + checkPackageDbStack packagedbs + pkgss <- getInstalledPackages' verbosity packagedbs conf + index <- toPackageIndex verbosity pkgss conf + return $! index + +toPackageIndex :: Verbosity + -> [(PackageDB, [InstalledPackageInfo])] + -> ProgramConfiguration + -> IO InstalledPackageIndex +toPackageIndex verbosity pkgss conf = do + -- On Windows, various fields have $topdir/foo rather than full + -- paths. We need to substitute the right value in so that when + -- we, for example, call gcc, we have proper paths to give it. + topDir <- getLibDir' verbosity ghcjsProg + let indices = [ PackageIndex.fromList (map (Internal.substTopDir topDir) pkgs) + | (_, pkgs) <- pkgss ] + return $! (mconcat indices) + + where + Just ghcjsProg = lookupProgram ghcjsProgram conf + +checkPackageDbEnvVar :: IO () +checkPackageDbEnvVar = + Internal.checkPackageDbEnvVar "GHCJS" "GHCJS_PACKAGE_PATH" + +checkPackageDbStack :: PackageDBStack -> IO () +checkPackageDbStack (GlobalPackageDB:rest) + | GlobalPackageDB `notElem` rest = return () +checkPackageDbStack rest + | GlobalPackageDB `notElem` rest = + die $ "With current ghc versions the global package db is always used " + ++ "and must be listed first. This ghc limitation may be lifted in " + ++ "future, see http://hackage.haskell.org/trac/ghc/ticket/5977" +checkPackageDbStack _ = + die $ "If the global package db is specified, it must be " + ++ "specified first and cannot be specified multiple times" + +getInstalledPackages' :: Verbosity -> [PackageDB] -> ProgramConfiguration + -> IO [(PackageDB, [InstalledPackageInfo])] +getInstalledPackages' verbosity packagedbs conf = + sequence + [ do pkgs <- HcPkg.dump (hcPkgInfo conf) verbosity packagedb + return (packagedb, pkgs) + | packagedb <- packagedbs ] + +getLibDir :: Verbosity -> LocalBuildInfo -> IO FilePath +getLibDir verbosity lbi = + (reverse . dropWhile isSpace . reverse) `fmap` + rawSystemProgramStdoutConf verbosity ghcjsProgram + (withPrograms lbi) ["--print-libdir"] + +getLibDir' :: Verbosity -> ConfiguredProgram -> IO FilePath +getLibDir' verbosity ghcjsProg = + (reverse . dropWhile isSpace . reverse) `fmap` + rawSystemProgramStdout verbosity ghcjsProg ["--print-libdir"] + +-- | Return the 'FilePath' to the global GHC package database. +getGlobalPackageDB :: Verbosity -> ConfiguredProgram -> IO FilePath +getGlobalPackageDB verbosity ghcjsProg = + (reverse . dropWhile isSpace . reverse) `fmap` + rawSystemProgramStdout verbosity ghcjsProg ["--print-global-package-db"] + +toJSLibName :: String -> String +toJSLibName lib + | takeExtension lib `elem` [".dll",".dylib",".so"] + = replaceExtension lib "js_so" + | takeExtension lib == ".a" = replaceExtension lib "js_a" + | otherwise = lib <.> "js_a" + +buildLib, replLib :: Verbosity -> Cabal.Flag (Maybe Int) -> PackageDescription + -> LocalBuildInfo -> Library -> ComponentLocalBuildInfo + -> IO () +buildLib = buildOrReplLib False +replLib = buildOrReplLib True + +buildOrReplLib :: Bool -> Verbosity -> Cabal.Flag (Maybe Int) + -> PackageDescription -> LocalBuildInfo + -> Library -> ComponentLocalBuildInfo -> IO () +buildOrReplLib forRepl verbosity numJobs _pkg_descr lbi lib clbi = do + let libName = componentUnitId clbi + libTargetDir = buildDir lbi + whenVanillaLib forceVanilla = + when (not forRepl && (forceVanilla || withVanillaLib lbi)) + whenProfLib = when (not forRepl && withProfLib lbi) + whenSharedLib forceShared = + when (not forRepl && (forceShared || withSharedLib lbi)) + whenGHCiLib = when (not forRepl && withGHCiLib lbi && withVanillaLib lbi) + ifReplLib = when forRepl + comp = compiler lbi + platform = hostPlatform lbi + implInfo = getImplInfo comp + nativeToo = ghcjsNativeToo comp + + (ghcjsProg, _) <- requireProgram verbosity ghcjsProgram (withPrograms lbi) + let runGhcjsProg = runGHC verbosity ghcjsProg comp platform + libBi = libBuildInfo lib + isGhcjsDynamic = isDynamic comp + dynamicTooSupported = supportsDynamicToo comp + doingTH = EnableExtension TemplateHaskell `elem` allExtensions libBi + forceVanillaLib = doingTH && not isGhcjsDynamic + forceSharedLib = doingTH && isGhcjsDynamic + -- TH always needs default libs, even when building for profiling + + -- Determine if program coverage should be enabled and if so, what + -- '-hpcdir' should be. + let isCoverageEnabled = fromFlag $ configCoverage $ configFlags lbi + -- Component name. Not 'libName' because that has the "HS" prefix + -- that GHC gives Haskell libraries. + cname = display $ PD.package $ localPkgDescr lbi + distPref = fromFlag $ configDistPref $ configFlags lbi + hpcdir way + | isCoverageEnabled = toFlag $ Hpc.mixDir distPref way cname + | otherwise = Mon.mempty + + createDirectoryIfMissingVerbose verbosity True libTargetDir + -- TODO: do we need to put hs-boot files into place for mutually recursive + -- modules? + let cObjs = map (`replaceExtension` objExtension) (cSources libBi) + jsSrcs = jsSources libBi + baseOpts = componentGhcOptions verbosity lbi libBi clbi libTargetDir + linkJsLibOpts = mempty { + ghcOptExtra = toNubListR $ + [ "-link-js-lib" , getHSLibraryName libName + , "-js-lib-outputdir", libTargetDir ] ++ + concatMap (\x -> ["-js-lib-src",x]) jsSrcs + } + vanillaOptsNoJsLib = baseOpts `mappend` mempty { + ghcOptMode = toFlag GhcModeMake, + ghcOptNumJobs = numJobs, + ghcOptInputModules = toNubListR $ libModules lib, + ghcOptHPCDir = hpcdir Hpc.Vanilla + } + vanillaOpts = vanillaOptsNoJsLib `mappend` linkJsLibOpts + + profOpts = adjustExts "p_hi" "p_o" vanillaOpts `mappend` mempty { + ghcOptProfilingMode = toFlag True, + ghcOptExtra = toNubListR $ + ghcjsProfOptions libBi, + ghcOptHPCDir = hpcdir Hpc.Prof + } + sharedOpts = adjustExts "dyn_hi" "dyn_o" vanillaOpts `mappend` mempty { + ghcOptDynLinkMode = toFlag GhcDynamicOnly, + ghcOptFPic = toFlag True, + ghcOptExtra = toNubListR $ + ghcjsSharedOptions libBi, + ghcOptHPCDir = hpcdir Hpc.Dyn + } + linkerOpts = mempty { + ghcOptLinkOptions = toNubListR $ PD.ldOptions libBi, + ghcOptLinkLibs = toNubListR $ extraLibs libBi, + ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi, + ghcOptLinkFrameworks = toNubListR $ PD.frameworks libBi, + ghcOptInputFiles = + toNubListR $ [libTargetDir x | x <- cObjs] ++ jsSrcs + } + replOpts = vanillaOptsNoJsLib { + ghcOptExtra = overNubListR + Internal.filterGhciFlags + (ghcOptExtra vanillaOpts), + ghcOptNumJobs = mempty + } + `mappend` linkerOpts + `mappend` mempty { + ghcOptMode = toFlag GhcModeInteractive, + ghcOptOptimisation = toFlag GhcNoOptimisation + } + + vanillaSharedOpts = vanillaOpts `mappend` + mempty { + ghcOptDynLinkMode = toFlag GhcStaticAndDynamic, + ghcOptDynHiSuffix = toFlag "dyn_hi", + ghcOptDynObjSuffix = toFlag "dyn_o", + ghcOptHPCDir = hpcdir Hpc.Dyn + } + + unless (forRepl || (null (libModules lib) && null jsSrcs && null cObjs)) $ + do let vanilla = whenVanillaLib forceVanillaLib (runGhcjsProg vanillaOpts) + shared = whenSharedLib forceSharedLib (runGhcjsProg sharedOpts) + useDynToo = dynamicTooSupported && + (forceVanillaLib || withVanillaLib lbi) && + (forceSharedLib || withSharedLib lbi) && + null (ghcjsSharedOptions libBi) + if useDynToo + then do + runGhcjsProg vanillaSharedOpts + case (hpcdir Hpc.Dyn, hpcdir Hpc.Vanilla) of + (Cabal.Flag dynDir, Cabal.Flag vanillaDir) -> do + -- When the vanilla and shared library builds are done + -- in one pass, only one set of HPC module interfaces + -- are generated. This set should suffice for both + -- static and dynamically linked executables. We copy + -- the modules interfaces so they are available under + -- both ways. + copyDirectoryRecursive verbosity dynDir vanillaDir + _ -> return () + else if isGhcjsDynamic + then do shared; vanilla + else do vanilla; shared + whenProfLib (runGhcjsProg profOpts) + + -- build any C sources + unless (null (cSources libBi) || not nativeToo) $ do + info verbosity "Building C Sources..." + sequence_ + [ do let vanillaCcOpts = + (Internal.componentCcGhcOptions verbosity implInfo + lbi libBi clbi libTargetDir filename) + profCcOpts = vanillaCcOpts `mappend` mempty { + ghcOptProfilingMode = toFlag True, + ghcOptObjSuffix = toFlag "p_o" + } + sharedCcOpts = vanillaCcOpts `mappend` mempty { + ghcOptFPic = toFlag True, + ghcOptDynLinkMode = toFlag GhcDynamicOnly, + ghcOptObjSuffix = toFlag "dyn_o" + } + odir = fromFlag (ghcOptObjDir vanillaCcOpts) + createDirectoryIfMissingVerbose verbosity True odir + runGhcjsProg vanillaCcOpts + whenSharedLib forceSharedLib (runGhcjsProg sharedCcOpts) + whenProfLib (runGhcjsProg profCcOpts) + | filename <- cSources libBi] + + -- TODO: problem here is we need the .c files built first, so we can load them + -- with ghci, but .c files can depend on .h files generated by ghc by ffi + -- exports. + unless (null (libModules lib)) $ + ifReplLib (runGhcjsProg replOpts) + + -- link: + when (nativeToo && not forRepl) $ do + info verbosity "Linking..." + let cProfObjs = map (`replaceExtension` ("p_" ++ objExtension)) + (cSources libBi) + cSharedObjs = map (`replaceExtension` ("dyn_" ++ objExtension)) + (cSources libBi) + cid = compilerId (compiler lbi) + vanillaLibFilePath = libTargetDir mkLibName libName + profileLibFilePath = libTargetDir mkProfLibName libName + sharedLibFilePath = libTargetDir mkSharedLibName cid libName + ghciLibFilePath = libTargetDir Internal.mkGHCiLibName libName + + hObjs <- Internal.getHaskellObjects implInfo lib lbi + libTargetDir objExtension True + hProfObjs <- + if (withProfLib lbi) + then Internal.getHaskellObjects implInfo lib lbi + libTargetDir ("p_" ++ objExtension) True + else return [] + hSharedObjs <- + if (withSharedLib lbi) + then Internal.getHaskellObjects implInfo lib lbi + libTargetDir ("dyn_" ++ objExtension) False + else return [] + + unless (null hObjs && null cObjs) $ do + + let staticObjectFiles = + hObjs + ++ map (libTargetDir ) cObjs + profObjectFiles = + hProfObjs + ++ map (libTargetDir ) cProfObjs + ghciObjFiles = + hObjs + ++ map (libTargetDir ) cObjs + dynamicObjectFiles = + hSharedObjs + ++ map (libTargetDir ) cSharedObjs + -- After the relocation lib is created we invoke ghc -shared + -- with the dependencies spelled out as -package arguments + -- and ghc invokes the linker with the proper library paths + ghcSharedLinkArgs = + mempty { + ghcOptShared = toFlag True, + ghcOptDynLinkMode = toFlag GhcDynamicOnly, + ghcOptInputFiles = toNubListR dynamicObjectFiles, + ghcOptOutputFile = toFlag sharedLibFilePath, + ghcOptExtra = toNubListR $ + ghcjsSharedOptions libBi, + ghcOptNoAutoLinkPackages = toFlag True, + ghcOptPackageDBs = withPackageDB lbi, + ghcOptPackages = toNubListR $ + Internal.mkGhcOptPackages clbi, + ghcOptLinkLibs = toNubListR $ extraLibs libBi, + ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi + } + + whenVanillaLib False $ do + Ar.createArLibArchive verbosity lbi vanillaLibFilePath staticObjectFiles + + whenProfLib $ do + Ar.createArLibArchive verbosity lbi profileLibFilePath profObjectFiles + + whenGHCiLib $ do + (ldProg, _) <- requireProgram verbosity ldProgram (withPrograms lbi) + Ld.combineObjectFiles verbosity ldProg + ghciLibFilePath ghciObjFiles + + whenSharedLib False $ + runGhcjsProg ghcSharedLinkArgs + +-- | Start a REPL without loading any source files. +startInterpreter :: Verbosity -> ProgramConfiguration -> Compiler -> Platform + -> PackageDBStack -> IO () +startInterpreter verbosity conf comp platform packageDBs = do + let replOpts = mempty { + ghcOptMode = toFlag GhcModeInteractive, + ghcOptPackageDBs = packageDBs + } + checkPackageDbStack packageDBs + (ghcjsProg, _) <- requireProgram verbosity ghcjsProgram conf + runGHC verbosity ghcjsProg comp platform replOpts + +buildExe, replExe :: Verbosity -> Cabal.Flag (Maybe Int) + -> PackageDescription -> LocalBuildInfo + -> Executable -> ComponentLocalBuildInfo -> IO () +buildExe = buildOrReplExe False +replExe = buildOrReplExe True + +buildOrReplExe :: Bool -> Verbosity -> Cabal.Flag (Maybe Int) + -> PackageDescription -> LocalBuildInfo + -> Executable -> ComponentLocalBuildInfo -> IO () +buildOrReplExe forRepl verbosity numJobs _pkg_descr lbi + exe@Executable { exeName = exeName', modulePath = modPath } clbi = do + + (ghcjsProg, _) <- requireProgram verbosity ghcjsProgram (withPrograms lbi) + let comp = compiler lbi + platform = hostPlatform lbi + implInfo = getImplInfo comp + runGhcjsProg = runGHC verbosity ghcjsProg comp platform + exeBi = buildInfo exe + + -- exeNameReal, the name that GHC really uses (with .exe on Windows) + let exeNameReal = exeName' <.> + (if takeExtension exeName' /= ('.':exeExtension) + then exeExtension + else "") + + let targetDir = (buildDir lbi) exeName' + let exeDir = targetDir (exeName' ++ "-tmp") + createDirectoryIfMissingVerbose verbosity True targetDir + createDirectoryIfMissingVerbose verbosity True exeDir + -- TODO: do we need to put hs-boot files into place for mutually recursive + -- modules? FIX: what about exeName.hi-boot? + + -- Determine if program coverage should be enabled and if so, what + -- '-hpcdir' should be. + let isCoverageEnabled = fromFlag $ configCoverage $ configFlags lbi + distPref = fromFlag $ configDistPref $ configFlags lbi + hpcdir way + | isCoverageEnabled = toFlag $ Hpc.mixDir distPref way exeName' + | otherwise = mempty + + -- build executables + + srcMainFile <- findFile (exeDir : hsSourceDirs exeBi) modPath + let isGhcjsDynamic = isDynamic comp + dynamicTooSupported = supportsDynamicToo comp + buildRunner = case clbi of + ExeComponentLocalBuildInfo {} -> False + _ -> True + isHaskellMain = elem (takeExtension srcMainFile) [".hs", ".lhs"] + jsSrcs = jsSources exeBi + cSrcs = cSources exeBi ++ [srcMainFile | not isHaskellMain] + cObjs = map (`replaceExtension` objExtension) cSrcs + nativeToo = ghcjsNativeToo comp + baseOpts = (componentGhcOptions verbosity lbi exeBi clbi exeDir) + `mappend` mempty { + ghcOptMode = toFlag GhcModeMake, + ghcOptInputFiles = toNubListR $ + [ srcMainFile | isHaskellMain], + ghcOptInputModules = toNubListR $ + [ m | not isHaskellMain, m <- exeModules exe], + ghcOptExtra = + if buildRunner then toNubListR ["-build-runner"] + else mempty + } + staticOpts = baseOpts `mappend` mempty { + ghcOptDynLinkMode = toFlag GhcStaticOnly, + ghcOptHPCDir = hpcdir Hpc.Vanilla + } + profOpts = adjustExts "p_hi" "p_o" baseOpts `mappend` mempty { + ghcOptProfilingMode = toFlag True, + ghcOptExtra = toNubListR $ ghcjsProfOptions exeBi, + ghcOptHPCDir = hpcdir Hpc.Prof + } + dynOpts = adjustExts "dyn_hi" "dyn_o" baseOpts `mappend` mempty { + ghcOptDynLinkMode = toFlag GhcDynamicOnly, + ghcOptExtra = toNubListR $ + ghcjsSharedOptions exeBi, + ghcOptHPCDir = hpcdir Hpc.Dyn + } + dynTooOpts = adjustExts "dyn_hi" "dyn_o" staticOpts `mappend` mempty { + ghcOptDynLinkMode = toFlag GhcStaticAndDynamic, + ghcOptHPCDir = hpcdir Hpc.Dyn + } + linkerOpts = mempty { + ghcOptLinkOptions = toNubListR $ PD.ldOptions exeBi, + ghcOptLinkLibs = toNubListR $ extraLibs exeBi, + ghcOptLinkLibPath = toNubListR $ extraLibDirs exeBi, + ghcOptLinkFrameworks = toNubListR $ PD.frameworks exeBi, + ghcOptInputFiles = toNubListR $ + [exeDir x | x <- cObjs] ++ jsSrcs + } + replOpts = baseOpts { + ghcOptExtra = overNubListR + Internal.filterGhciFlags + (ghcOptExtra baseOpts) + } + -- For a normal compile we do separate invocations of ghc for + -- compiling as for linking. But for repl we have to do just + -- the one invocation, so that one has to include all the + -- linker stuff too, like -l flags and any .o files from C + -- files etc. + `mappend` linkerOpts + `mappend` mempty { + ghcOptMode = toFlag GhcModeInteractive, + ghcOptOptimisation = toFlag GhcNoOptimisation + } + commonOpts | withProfExe lbi = profOpts + | withDynExe lbi = dynOpts + | otherwise = staticOpts + compileOpts | useDynToo = dynTooOpts + | otherwise = commonOpts + withStaticExe = (not $ withProfExe lbi) && (not $ withDynExe lbi) + + -- For building exe's that use TH with -prof or -dynamic we actually have + -- to build twice, once without -prof/-dynamic and then again with + -- -prof/-dynamic. This is because the code that TH needs to run at + -- compile time needs to be the vanilla ABI so it can be loaded up and run + -- by the compiler. + -- With dynamic-by-default GHC the TH object files loaded at compile-time + -- need to be .dyn_o instead of .o. + doingTH = EnableExtension TemplateHaskell `elem` allExtensions exeBi + -- Should we use -dynamic-too instead of compiling twice? + useDynToo = dynamicTooSupported && isGhcjsDynamic + && doingTH && withStaticExe && null (ghcjsSharedOptions exeBi) + compileTHOpts | isGhcjsDynamic = dynOpts + | otherwise = staticOpts + compileForTH + | forRepl = False + | useDynToo = False + | isGhcjsDynamic = doingTH && (withProfExe lbi || withStaticExe) + | otherwise = doingTH && (withProfExe lbi || withDynExe lbi) + + linkOpts = commonOpts `mappend` + linkerOpts `mappend` mempty { + ghcOptLinkNoHsMain = toFlag (not isHaskellMain) + } + + -- Build static/dynamic object files for TH, if needed. + when compileForTH $ + runGhcjsProg compileTHOpts { ghcOptNoLink = toFlag True + , ghcOptNumJobs = numJobs } + + unless forRepl $ + runGhcjsProg compileOpts { ghcOptNoLink = toFlag True + , ghcOptNumJobs = numJobs } + + -- build any C sources + unless (null cSrcs || not nativeToo) $ do + info verbosity "Building C Sources..." + sequence_ + [ do let opts = (Internal.componentCcGhcOptions verbosity implInfo lbi exeBi + clbi exeDir filename) `mappend` mempty { + ghcOptDynLinkMode = toFlag (if withDynExe lbi + then GhcDynamicOnly + else GhcStaticOnly), + ghcOptProfilingMode = toFlag (withProfExe lbi) + } + odir = fromFlag (ghcOptObjDir opts) + createDirectoryIfMissingVerbose verbosity True odir + runGhcjsProg opts + | filename <- cSrcs ] + + -- TODO: problem here is we need the .c files built first, so we can load them + -- with ghci, but .c files can depend on .h files generated by ghc by ffi + -- exports. + when forRepl $ runGhcjsProg replOpts + + -- link: + unless forRepl $ do + info verbosity "Linking..." + runGhcjsProg linkOpts { ghcOptOutputFile = toFlag (targetDir exeNameReal) } + +-- |Install for ghc, .hi, .a and, if --with-ghci given, .o +installLib :: Verbosity + -> LocalBuildInfo + -> FilePath -- ^install location + -> FilePath -- ^install location for dynamic libraries + -> FilePath -- ^Build location + -> PackageDescription + -> Library + -> ComponentLocalBuildInfo + -> IO () +installLib verbosity lbi targetDir dynlibTargetDir builtDir _pkg lib clbi = do + whenVanilla $ copyModuleFiles "js_hi" + whenProf $ copyModuleFiles "js_p_hi" + whenShared $ copyModuleFiles "js_dyn_hi" + + whenVanilla $ installOrdinary builtDir targetDir $ toJSLibName vanillaLibName + whenProf $ installOrdinary builtDir targetDir $ toJSLibName profileLibName + whenShared $ installShared builtDir dynlibTargetDir $ toJSLibName sharedLibName + + when (ghcjsNativeToo $ compiler lbi) $ do + -- copy .hi files over: + whenVanilla $ copyModuleFiles "hi" + whenProf $ copyModuleFiles "p_hi" + whenShared $ copyModuleFiles "dyn_hi" + + -- copy the built library files over: + whenVanilla $ installOrdinaryNative builtDir targetDir vanillaLibName + whenProf $ installOrdinaryNative builtDir targetDir profileLibName + whenGHCi $ installOrdinaryNative builtDir targetDir ghciLibName + whenShared $ installSharedNative builtDir dynlibTargetDir sharedLibName + + where + install isShared isJS srcDir dstDir name = do + let src = srcDir name + dst = dstDir name + createDirectoryIfMissingVerbose verbosity True dstDir + + if isShared + then installExecutableFile verbosity src dst + else installOrdinaryFile verbosity src dst + + when (stripLibs lbi && not isJS) $ + Strip.stripLib verbosity + (hostPlatform lbi) (withPrograms lbi) dst + + installOrdinary = install False True + installShared = install True True + + installOrdinaryNative = install False False + installSharedNative = install True False + + copyModuleFiles ext = + findModuleFiles [builtDir] [ext] (libModules lib) + >>= installOrdinaryFiles verbosity targetDir + + cid = compilerId (compiler lbi) + libName = componentUnitId clbi + vanillaLibName = mkLibName libName + profileLibName = mkProfLibName libName + ghciLibName = Internal.mkGHCiLibName libName + sharedLibName = (mkSharedLibName cid) libName + + hasLib = not $ null (libModules lib) + && null (cSources (libBuildInfo lib)) + whenVanilla = when (hasLib && withVanillaLib lbi) + whenProf = when (hasLib && withProfLib lbi) + whenGHCi = when (hasLib && withGHCiLib lbi) + whenShared = when (hasLib && withSharedLib lbi) + +installExe :: Verbosity + -> LocalBuildInfo + -> InstallDirs FilePath -- ^Where to copy the files to + -> FilePath -- ^Build location + -> (FilePath, FilePath) -- ^Executable (prefix,suffix) + -> PackageDescription + -> Executable + -> IO () +installExe verbosity lbi installDirs buildPref + (progprefix, progsuffix) _pkg exe = do + let binDir = bindir installDirs + createDirectoryIfMissingVerbose verbosity True binDir + let exeFileName = exeName exe + fixedExeBaseName = progprefix ++ exeName exe ++ progsuffix + installBinary dest = do + rawSystemProgramConf verbosity ghcjsProgram (withPrograms lbi) $ + [ "--install-executable" + , buildPref exeName exe exeFileName + , "-o", dest + ] ++ + case (stripExes lbi, lookupProgram stripProgram $ withPrograms lbi) of + (True, Just strip) -> ["-strip-program", programPath strip] + _ -> [] + installBinary (binDir fixedExeBaseName) + +libAbiHash :: Verbosity -> PackageDescription -> LocalBuildInfo + -> Library -> ComponentLocalBuildInfo -> IO String +libAbiHash verbosity _pkg_descr lbi lib clbi = do + let + libBi = libBuildInfo lib + comp = compiler lbi + platform = hostPlatform lbi + vanillaArgs = + (componentGhcOptions verbosity lbi libBi clbi (buildDir lbi)) + `mappend` mempty { + ghcOptMode = toFlag GhcModeAbiHash, + ghcOptInputModules = toNubListR $ PD.exposedModules lib + } + profArgs = adjustExts "js_p_hi" "js_p_o" vanillaArgs `mappend` mempty { + ghcOptProfilingMode = toFlag True, + ghcOptExtra = toNubListR (ghcjsProfOptions libBi) + } + ghcArgs = if withVanillaLib lbi then vanillaArgs + else if withProfLib lbi then profArgs + else error "libAbiHash: Can't find an enabled library way" + -- + (ghcjsProg, _) <- requireProgram verbosity ghcjsProgram (withPrograms lbi) + hash <- getProgramInvocationOutput verbosity + (ghcInvocation ghcjsProg comp platform ghcArgs) + return (takeWhile (not . isSpace) hash) + +adjustExts :: String -> String -> GhcOptions -> GhcOptions +adjustExts hiSuf objSuf opts = + opts `mappend` mempty { + ghcOptHiSuffix = toFlag hiSuf, + ghcOptObjSuffix = toFlag objSuf + } + +registerPackage :: Verbosity + -> ProgramConfiguration + -> Bool + -> PackageDBStack + -> InstalledPackageInfo + -> IO () +registerPackage verbosity progdb multiInstance packageDbs installedPkgInfo + | multiInstance + = HcPkg.registerMultiInstance (hcPkgInfo progdb) verbosity + packageDbs installedPkgInfo + + | otherwise + = HcPkg.reregister (hcPkgInfo progdb) verbosity + packageDbs (Right installedPkgInfo) + +componentGhcOptions :: Verbosity -> LocalBuildInfo + -> BuildInfo -> ComponentLocalBuildInfo -> FilePath + -> GhcOptions +componentGhcOptions verbosity lbi bi clbi odir = + let opts = Internal.componentGhcOptions verbosity lbi bi clbi odir + in opts { ghcOptExtra = ghcOptExtra opts `mappend` toNubListR + (hcOptions GHCJS bi) + } + +ghcjsProfOptions :: BuildInfo -> [String] +ghcjsProfOptions bi = + hcProfOptions GHC bi `mappend` hcProfOptions GHCJS bi + +ghcjsSharedOptions :: BuildInfo -> [String] +ghcjsSharedOptions bi = + hcSharedOptions GHC bi `mappend` hcSharedOptions GHCJS bi + +isDynamic :: Compiler -> Bool +isDynamic = Internal.ghcLookupProperty "GHC Dynamic" + +supportsDynamicToo :: Compiler -> Bool +supportsDynamicToo = Internal.ghcLookupProperty "Support dynamic-too" + +findGhcjsGhcVersion :: Verbosity -> FilePath -> IO (Maybe Version) +findGhcjsGhcVersion verbosity pgm = + findProgramVersion "--numeric-ghc-version" id verbosity pgm + +findGhcjsPkgGhcjsVersion :: Verbosity -> FilePath -> IO (Maybe Version) +findGhcjsPkgGhcjsVersion verbosity pgm = + findProgramVersion "--numeric-ghcjs-version" id verbosity pgm + +-- ----------------------------------------------------------------------------- +-- Registering + +hcPkgInfo :: ProgramConfiguration -> HcPkg.HcPkgInfo +hcPkgInfo conf = HcPkg.HcPkgInfo { HcPkg.hcPkgProgram = ghcjsPkgProg + , HcPkg.noPkgDbStack = False + , HcPkg.noVerboseFlag = False + , HcPkg.flagPackageConf = False + , HcPkg.supportsDirDbs = True + , HcPkg.requiresDirDbs = v >= [7,10] + , HcPkg.nativeMultiInstance = v >= [7,10] + , HcPkg.recacheMultiInstance = True + } + where + v = versionBranch ver + Just ghcjsPkgProg = lookupProgram ghcjsPkgProgram conf + Just ver = programVersion ghcjsPkgProg + +-- | Get the JavaScript file name and command and arguments to run a +-- program compiled by GHCJS +-- the exe should be the base program name without exe extension +runCmd :: ProgramConfiguration -> FilePath + -> (FilePath, FilePath, [String]) +runCmd conf exe = + ( script + , programPath ghcjsProg + , programDefaultArgs ghcjsProg ++ programOverrideArgs ghcjsProg ++ ["--run"] + ) + where + script = exe <.> "jsexe" "all" <.> "js" + Just ghcjsProg = lookupProgram ghcjsProgram conf diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Haddock.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Haddock.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Haddock.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Haddock.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,782 @@ +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Haddock +-- Copyright : Isaac Jones 2003-2005 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module deals with the @haddock@ and @hscolour@ commands. +-- It uses information about installed packages (from @ghc-pkg@) to find the +-- locations of documentation for dependent packages, so it can create links. +-- +-- The @hscolour@ support allows generating HTML versions of the original +-- source, with coloured syntax highlighting. + +module Distribution.Simple.Haddock ( + haddock, hscolour, + + haddockPackagePaths + ) where + +import qualified Distribution.Simple.GHC as GHC +import qualified Distribution.Simple.GHCJS as GHCJS + +-- local +import Distribution.Compat.Semigroup as Semi +import Distribution.Package +import qualified Distribution.ModuleName as ModuleName +import Distribution.PackageDescription as PD hiding (Flag) +import Distribution.Simple.Compiler hiding (Flag) +import Distribution.Simple.Program.GHC +import Distribution.Simple.Program +import Distribution.Simple.PreProcess +import Distribution.Simple.Setup +import Distribution.Simple.Build +import Distribution.Simple.InstallDirs +import Distribution.Simple.LocalBuildInfo hiding (substPathTemplate) +import Distribution.Simple.BuildPaths +import qualified Distribution.Simple.PackageIndex as PackageIndex +import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo +import Distribution.InstalledPackageInfo ( InstalledPackageInfo ) +import Distribution.Simple.Utils +import Distribution.System +import Distribution.Text +import Distribution.Utils.NubList +import Distribution.Version +import Distribution.Verbosity +import Language.Haskell.Extension + + +import Control.Monad ( when, forM_ ) +import Data.Char ( isSpace ) +import Data.Either ( rights ) +import Data.Foldable ( traverse_, foldl' ) +import Data.Maybe ( fromMaybe, listToMaybe ) +import GHC.Generics ( Generic ) + +import System.Directory (doesFileExist) +import System.FilePath ( (), (<.>) + , normalise, splitPath, joinPath, isAbsolute ) +import System.IO (hClose, hPutStr, hPutStrLn, hSetEncoding, utf8) + +-- ------------------------------------------------------------------------------ +-- Types + +-- | A record that represents the arguments to the haddock executable, a product +-- monoid. +data HaddockArgs = HaddockArgs { + argInterfaceFile :: Flag FilePath, + -- ^ Path to the interface file, relative to argOutputDir, required. + argPackageName :: Flag PackageIdentifier, + -- ^ Package name, required. + argHideModules :: (All,[ModuleName.ModuleName]), + -- ^ (Hide modules ?, modules to hide) + argIgnoreExports :: Any, + -- ^ Ignore export lists in modules? + argLinkSource :: Flag (Template,Template,Template), + -- ^ (Template for modules, template for symbols, template for lines). + argCssFile :: Flag FilePath, + -- ^ Optional custom CSS file. + argContents :: Flag String, + -- ^ Optional URL to contents page. + argVerbose :: Any, + argOutput :: Flag [Output], + -- ^ HTML or Hoogle doc or both? Required. + argInterfaces :: [(FilePath, Maybe String)], + -- ^ [(Interface file, URL to the HTML docs for links)]. + argOutputDir :: Directory, + -- ^ Where to generate the documentation. + argTitle :: Flag String, + -- ^ Page title, required. + argPrologue :: Flag String, + -- ^ Prologue text, required. + argGhcOptions :: Flag (GhcOptions, Version), + -- ^ Additional flags to pass to GHC. + argGhcLibDir :: Flag FilePath, + -- ^ To find the correct GHC, required. + argTargets :: [FilePath] + -- ^ Modules to process. +} deriving Generic + +-- | The FilePath of a directory, it's a monoid under '()'. +newtype Directory = Dir { unDir' :: FilePath } deriving (Read,Show,Eq,Ord) + +unDir :: Directory -> FilePath +unDir = joinPath . filter (\p -> p /="./" && p /= ".") . splitPath . unDir' + +type Template = String + +data Output = Html | Hoogle + +-- ------------------------------------------------------------------------------ +-- Haddock support + +haddock :: PackageDescription + -> LocalBuildInfo + -> [PPSuffixHandler] + -> HaddockFlags + -> IO () +haddock pkg_descr _ _ haddockFlags + | not (hasLibs pkg_descr) + && not (fromFlag $ haddockExecutables haddockFlags) + && not (fromFlag $ haddockTestSuites haddockFlags) + && not (fromFlag $ haddockBenchmarks haddockFlags) = + warn (fromFlag $ haddockVerbosity haddockFlags) $ + "No documentation was generated as this package does not contain " + ++ "a library. Perhaps you want to use the --executables, --tests or" + ++ " --benchmarks flags." + +haddock pkg_descr lbi suffixes flags' = do + let verbosity = flag haddockVerbosity + comp = compiler lbi + platform = hostPlatform lbi + + flags = case haddockTarget of + ForDevelopment -> flags' + ForHackage -> flags' + { haddockHoogle = Flag True + , haddockHtml = Flag True + , haddockHtmlLocation = Flag (pkg_url ++ "/docs") + , haddockContents = Flag (toPathTemplate pkg_url) + , haddockHscolour = Flag True + } + pkg_url = "/package/$pkg-$version" + flag f = fromFlag $ f flags + + tmpFileOpts = defaultTempFileOptions + { optKeepTempFiles = flag haddockKeepTempFiles } + htmlTemplate = fmap toPathTemplate . flagToMaybe . haddockHtmlLocation + $ flags + haddockTarget = + haddockTargetFromFlag (haddockForHackage flags') + + setupMessage verbosity "Running Haddock for" (packageId pkg_descr) + (confHaddock, version, _) <- + requireProgramVersion verbosity haddockProgram + (orLaterVersion (Version [2,0] [])) (withPrograms lbi) + + -- various sanity checks + when ( flag haddockHoogle + && version < Version [2,2] []) $ + die "haddock 2.0 and 2.1 do not support the --hoogle flag." + + haddockGhcVersionStr <- rawSystemProgramStdout verbosity confHaddock + ["--ghc-version"] + case (simpleParse haddockGhcVersionStr, compilerCompatVersion GHC comp) of + (Nothing, _) -> die "Could not get GHC version from Haddock" + (_, Nothing) -> die "Could not get GHC version from compiler" + (Just haddockGhcVersion, Just ghcVersion) + | haddockGhcVersion == ghcVersion -> return () + | otherwise -> die $ + "Haddock's internal GHC version must match the configured " + ++ "GHC version.\n" + ++ "The GHC version is " ++ display ghcVersion ++ " but " + ++ "haddock is using GHC version " ++ display haddockGhcVersion + + -- the tools match the requests, we can proceed + + initialBuildSteps (flag haddockDistPref) pkg_descr lbi verbosity + + when (flag haddockHscolour) $ + hscolour' (warn verbosity) haddockTarget pkg_descr lbi suffixes + (defaultHscolourFlags `mappend` haddockToHscolour flags) + + libdirArgs <- getGhcLibDir verbosity lbi + let commonArgs = mconcat + [ libdirArgs + , fromFlags (haddockTemplateEnv lbi (packageId pkg_descr)) flags + , fromPackageDescription haddockTarget pkg_descr ] + + let pre c = preprocessComponent pkg_descr c lbi False verbosity suffixes + withAllComponentsInBuildOrder pkg_descr lbi $ \component clbi -> do + pre component + let + doExe com = case (compToExe com) of + Just exe -> do + withTempDirectoryEx verbosity tmpFileOpts (buildDir lbi) "tmp" $ + \tmp -> do + exeArgs <- fromExecutable verbosity tmp lbi exe clbi htmlTemplate + version + let exeArgs' = commonArgs `mappend` exeArgs + runHaddock verbosity tmpFileOpts comp platform + confHaddock exeArgs' + Nothing -> do + warn (fromFlag $ haddockVerbosity flags) + "Unsupported component, skipping..." + return () + case component of + CLib lib -> do + withTempDirectoryEx verbosity tmpFileOpts (buildDir lbi) "tmp" $ + \tmp -> do + libArgs <- fromLibrary verbosity tmp lbi lib clbi htmlTemplate + version + let libArgs' = commonArgs `mappend` libArgs + runHaddock verbosity tmpFileOpts comp platform confHaddock libArgs' + CExe _ -> when (flag haddockExecutables) $ doExe component + CTest _ -> when (flag haddockTestSuites) $ doExe component + CBench _ -> when (flag haddockBenchmarks) $ doExe component + + forM_ (extraDocFiles pkg_descr) $ \ fpath -> do + files <- matchFileGlob fpath + forM_ files $ copyFileTo verbosity (unDir $ argOutputDir commonArgs) + +-- ------------------------------------------------------------------------------ +-- Contributions to HaddockArgs. + +fromFlags :: PathTemplateEnv -> HaddockFlags -> HaddockArgs +fromFlags env flags = + mempty { + argHideModules = (maybe mempty (All . not) + $ flagToMaybe (haddockInternal flags), mempty), + argLinkSource = if fromFlag (haddockHscolour flags) + then Flag ("src/%{MODULE/./-}.html" + ,"src/%{MODULE/./-}.html#%{NAME}" + ,"src/%{MODULE/./-}.html#line-%{LINE}") + else NoFlag, + argCssFile = haddockCss flags, + argContents = fmap (fromPathTemplate . substPathTemplate env) + (haddockContents flags), + argVerbose = maybe mempty (Any . (>= deafening)) + . flagToMaybe $ haddockVerbosity flags, + argOutput = + Flag $ case [ Html | Flag True <- [haddockHtml flags] ] ++ + [ Hoogle | Flag True <- [haddockHoogle flags] ] + of [] -> [ Html ] + os -> os, + argOutputDir = maybe mempty Dir . flagToMaybe $ haddockDistPref flags + } + +fromPackageDescription :: HaddockTarget -> PackageDescription -> HaddockArgs +fromPackageDescription haddockTarget pkg_descr = + mempty { argInterfaceFile = Flag $ haddockName pkg_descr, + argPackageName = Flag $ packageId $ pkg_descr, + argOutputDir = Dir $ + "doc" "html" haddockDirName haddockTarget pkg_descr, + argPrologue = Flag $ if null desc then synopsis pkg_descr + else desc, + argTitle = Flag $ showPkg ++ subtitle + } + where + desc = PD.description pkg_descr + showPkg = display (packageId pkg_descr) + subtitle | null (synopsis pkg_descr) = "" + | otherwise = ": " ++ synopsis pkg_descr + +componentGhcOptions :: Verbosity -> LocalBuildInfo + -> BuildInfo -> ComponentLocalBuildInfo -> FilePath + -> GhcOptions +componentGhcOptions verbosity lbi bi clbi odir = + let f = case compilerFlavor (compiler lbi) of + GHC -> GHC.componentGhcOptions + GHCJS -> GHCJS.componentGhcOptions + _ -> error $ + "Distribution.Simple.Haddock.componentGhcOptions:" ++ + "haddock only supports GHC and GHCJS" + in f verbosity lbi bi clbi odir + +fromLibrary :: Verbosity + -> FilePath + -> LocalBuildInfo -> Library -> ComponentLocalBuildInfo + -> Maybe PathTemplate -- ^ template for HTML location + -> Version + -> IO HaddockArgs +fromLibrary verbosity tmp lbi lib clbi htmlTemplate haddockVersion = do + inFiles <- map snd `fmap` getLibSourceFiles lbi lib + ifaceArgs <- getInterfaces verbosity lbi clbi htmlTemplate + let vanillaOpts = (componentGhcOptions normal lbi bi clbi (buildDir lbi)) { + -- Noooooooooo!!!!!111 + -- haddock stomps on our precious .hi + -- and .o files. Workaround by telling + -- haddock to write them elsewhere. + ghcOptObjDir = toFlag tmp, + ghcOptHiDir = toFlag tmp, + ghcOptStubDir = toFlag tmp + } `mappend` getGhcCppOpts haddockVersion bi + sharedOpts = vanillaOpts { + ghcOptDynLinkMode = toFlag GhcDynamicOnly, + ghcOptFPic = toFlag True, + ghcOptHiSuffix = toFlag "dyn_hi", + ghcOptObjSuffix = toFlag "dyn_o", + ghcOptExtra = + toNubListR $ hcSharedOptions GHC bi + + } + opts <- if withVanillaLib lbi + then return vanillaOpts + else if withSharedLib lbi + then return sharedOpts + else die $ "Must have vanilla or shared libraries " + ++ "enabled in order to run haddock" + ghcVersion <- maybe (die "Compiler has no GHC version") + return + (compilerCompatVersion GHC (compiler lbi)) + + return ifaceArgs { + argHideModules = (mempty,otherModules $ bi), + argGhcOptions = toFlag (opts, ghcVersion), + argTargets = inFiles + } + where + bi = libBuildInfo lib + +fromExecutable :: Verbosity + -> FilePath + -> LocalBuildInfo -> Executable -> ComponentLocalBuildInfo + -> Maybe PathTemplate -- ^ template for HTML location + -> Version + -> IO HaddockArgs +fromExecutable verbosity tmp lbi exe clbi htmlTemplate haddockVersion = do + inFiles <- map snd `fmap` getExeSourceFiles lbi exe + ifaceArgs <- getInterfaces verbosity lbi clbi htmlTemplate + let vanillaOpts = (componentGhcOptions normal lbi bi clbi (buildDir lbi)) { + -- Noooooooooo!!!!!111 + -- haddock stomps on our precious .hi + -- and .o files. Workaround by telling + -- haddock to write them elsewhere. + ghcOptObjDir = toFlag tmp, + ghcOptHiDir = toFlag tmp, + ghcOptStubDir = toFlag tmp + } `mappend` getGhcCppOpts haddockVersion bi + sharedOpts = vanillaOpts { + ghcOptDynLinkMode = toFlag GhcDynamicOnly, + ghcOptFPic = toFlag True, + ghcOptHiSuffix = toFlag "dyn_hi", + ghcOptObjSuffix = toFlag "dyn_o", + ghcOptExtra = + toNubListR $ hcSharedOptions GHC bi + } + opts <- if withVanillaLib lbi + then return vanillaOpts + else if withSharedLib lbi + then return sharedOpts + else die $ "Must have vanilla or shared libraries " + ++ "enabled in order to run haddock" + ghcVersion <- maybe (die "Compiler has no GHC version") + return + (compilerCompatVersion GHC (compiler lbi)) + + return ifaceArgs { + argGhcOptions = toFlag (opts, ghcVersion), + argOutputDir = Dir (exeName exe), + argTitle = Flag (exeName exe), + argTargets = inFiles + } + where + bi = buildInfo exe + +compToExe :: Component -> Maybe Executable +compToExe comp = + case comp of + CTest test@TestSuite { testInterface = TestSuiteExeV10 _ f } -> + Just Executable { + exeName = testName test, + modulePath = f, + buildInfo = testBuildInfo test + } + CBench bench@Benchmark { benchmarkInterface = BenchmarkExeV10 _ f } -> + Just Executable { + exeName = benchmarkName bench, + modulePath = f, + buildInfo = benchmarkBuildInfo bench + } + CExe exe -> Just exe + _ -> Nothing + +getInterfaces :: Verbosity + -> LocalBuildInfo + -> ComponentLocalBuildInfo + -> Maybe PathTemplate -- ^ template for HTML location + -> IO HaddockArgs +getInterfaces verbosity lbi clbi htmlTemplate = do + (packageFlags, warnings) <- haddockPackageFlags lbi clbi htmlTemplate + traverse_ (warn verbosity) warnings + return $ mempty { + argInterfaces = packageFlags + } + +getGhcCppOpts :: Version + -> BuildInfo + -> GhcOptions +getGhcCppOpts haddockVersion bi = + mempty { + ghcOptExtensions = toNubListR [EnableExtension CPP | needsCpp], + ghcOptCppOptions = toNubListR defines + } + where + needsCpp = EnableExtension CPP `elem` usedExtensions bi + defines = [haddockVersionMacro] + haddockVersionMacro = "-D__HADDOCK_VERSION__=" + ++ show (v1 * 1000 + v2 * 10 + v3) + where + [v1, v2, v3] = take 3 $ versionBranch haddockVersion ++ [0,0] + +getGhcLibDir :: Verbosity -> LocalBuildInfo + -> IO HaddockArgs +getGhcLibDir verbosity lbi = do + l <- case compilerFlavor (compiler lbi) of + GHC -> GHC.getLibDir verbosity lbi + GHCJS -> GHCJS.getLibDir verbosity lbi + _ -> error "haddock only supports GHC and GHCJS" + return $ mempty { argGhcLibDir = Flag l } + +-- ------------------------------------------------------------------------------ +-- | Call haddock with the specified arguments. +runHaddock :: Verbosity + -> TempFileOptions + -> Compiler + -> Platform + -> ConfiguredProgram + -> HaddockArgs + -> IO () +runHaddock verbosity tmpFileOpts comp platform confHaddock args = do + let haddockVersion = fromMaybe (error "unable to determine haddock version") + (programVersion confHaddock) + renderArgs verbosity tmpFileOpts haddockVersion comp platform args $ + \(flags,result)-> do + + rawSystemProgram verbosity confHaddock flags + + notice verbosity $ "Documentation created: " ++ result + + +renderArgs :: Verbosity + -> TempFileOptions + -> Version + -> Compiler + -> Platform + -> HaddockArgs + -> (([String], FilePath) -> IO a) + -> IO a +renderArgs verbosity tmpFileOpts version comp platform args k = do + let haddockSupportsUTF8 = version >= Version [2,14,4] [] + haddockSupportsResponseFiles = version > Version [2,16,2] [] + createDirectoryIfMissingVerbose verbosity True outputDir + withTempFileEx tmpFileOpts outputDir "haddock-prologue.txt" $ + \prologueFileName h -> do + do + when haddockSupportsUTF8 (hSetEncoding h utf8) + hPutStrLn h $ fromFlag $ argPrologue args + hClose h + let pflag = "--prologue=" ++ prologueFileName + renderedArgs = pflag : renderPureArgs version comp platform args + if haddockSupportsResponseFiles + then + withTempFileEx tmpFileOpts outputDir "haddock-response.txt" $ + \responseFileName hf -> do + when haddockSupportsUTF8 (hSetEncoding hf utf8) + let responseContents = + unlines $ map escapeArg renderedArgs + hPutStr hf responseContents + hClose hf + info verbosity $ responseFileName ++ " contents: <<<" + info verbosity responseContents + info verbosity $ ">>> " ++ responseFileName + let respFile = "@" ++ responseFileName + k ([respFile], result) + else + k (renderedArgs, result) + where + outputDir = (unDir $ argOutputDir args) + result = intercalate ", " + . map (\o -> outputDir + case o of + Html -> "index.html" + Hoogle -> pkgstr <.> "txt") + $ arg argOutput + where + pkgstr = display $ packageName pkgid + pkgid = arg argPackageName + arg f = fromFlag $ f args + -- Support a gcc-like response file syntax. Each separate + -- argument and its possible parameter(s), will be separated in the + -- response file by an actual newline; all other whitespace, + -- single quotes, double quotes, and the character used for escaping + -- (backslash) are escaped. The called program will need to do a similar + -- inverse operation to de-escape and re-constitute the argument list. + escape cs c + | isSpace c + || '\\' == c + || '\'' == c + || '"' == c = c:'\\':cs -- n.b., our caller must reverse the result + | otherwise = c:cs + escapeArg = reverse . foldl' escape [] + +renderPureArgs :: Version -> Compiler -> Platform -> HaddockArgs -> [String] +renderPureArgs version comp platform args = concat + [ (:[]) . (\f -> "--dump-interface="++ unDir (argOutputDir args) f) + . fromFlag . argInterfaceFile $ args + + , if isVersion 2 16 + then (\pkg -> [ "--package-name=" ++ display (pkgName pkg) + , "--package-version="++display (pkgVersion pkg) + ]) + . fromFlag . argPackageName $ args + else [] + + , (\(All b,xs) -> bool (map (("--hide=" ++). display) xs) [] b) + . argHideModules $ args + + , bool ["--ignore-all-exports"] [] . getAny . argIgnoreExports $ args + + , maybe [] (\(m,e,l) -> + ["--source-module=" ++ m + ,"--source-entity=" ++ e] + ++ if isVersion 2 14 then ["--source-entity-line=" ++ l] + else [] + ) . flagToMaybe . argLinkSource $ args + + , maybe [] ((:[]) . ("--css="++)) . flagToMaybe . argCssFile $ args + + , maybe [] ((:[]) . ("--use-contents="++)) . flagToMaybe . argContents $ args + + , bool [] [verbosityFlag] . getAny . argVerbose $ args + + , map (\o -> case o of Hoogle -> "--hoogle"; Html -> "--html") + . fromFlag . argOutput $ args + + , renderInterfaces . argInterfaces $ args + + , (:[]) . ("--odir="++) . unDir . argOutputDir $ args + + , (:[]) . ("--title="++) + . (bool (++" (internal documentation)") + id (getAny $ argIgnoreExports args)) + . fromFlag . argTitle $ args + + , [ "--optghc=" ++ opt | (opts, _ghcVer) <- flagToList (argGhcOptions args) + , opt <- renderGhcOptions comp platform opts ] + + , maybe [] (\l -> ["-B"++l]) $ + flagToMaybe (argGhcLibDir args) -- error if Nothing? + + , argTargets $ args + ] + where + renderInterfaces = + map (\(i,mh) -> "--read-interface=" ++ + maybe "" (++",") mh ++ i) + bool a b c = if c then a else b + isVersion major minor = version >= Version [major,minor] [] + verbosityFlag + | isVersion 2 5 = "--verbosity=1" + | otherwise = "--verbose" + +--------------------------------------------------------------------------------- + +-- | Given a list of 'InstalledPackageInfo's, return a list of interfaces and +-- HTML paths, and an optional warning for packages with missing documentation. +haddockPackagePaths :: [InstalledPackageInfo] + -> Maybe (InstalledPackageInfo -> FilePath) + -> IO ([(FilePath, Maybe FilePath)], Maybe String) +haddockPackagePaths ipkgs mkHtmlPath = do + interfaces <- sequence + [ case interfaceAndHtmlPath ipkg of + Nothing -> return (Left (packageId ipkg)) + Just (interface, html) -> do + exists <- doesFileExist interface + if exists + then return (Right (interface, html)) + else return (Left pkgid) + | ipkg <- ipkgs, let pkgid = packageId ipkg + , pkgName pkgid `notElem` noHaddockWhitelist + ] + + let missing = [ pkgid | Left pkgid <- interfaces ] + warning = "The documentation for the following packages are not " + ++ "installed. No links will be generated to these packages: " + ++ intercalate ", " (map display missing) + flags = rights interfaces + + return (flags, if null missing then Nothing else Just warning) + + where + -- Don't warn about missing documentation for these packages. See #1231. + noHaddockWhitelist = map PackageName [ "rts" ] + + -- Actually extract interface and HTML paths from an 'InstalledPackageInfo'. + interfaceAndHtmlPath :: InstalledPackageInfo + -> Maybe (FilePath, Maybe FilePath) + interfaceAndHtmlPath pkg = do + interface <- listToMaybe (InstalledPackageInfo.haddockInterfaces pkg) + html <- case mkHtmlPath of + Nothing -> fmap fixFileUrl + (listToMaybe (InstalledPackageInfo.haddockHTMLs pkg)) + Just mkPath -> Just (mkPath pkg) + return (interface, if null html then Nothing else Just html) + where + -- The 'haddock-html' field in the hc-pkg output is often set as a + -- native path, but we need it as a URL. See #1064. + fixFileUrl f | isAbsolute f = "file://" ++ f + | otherwise = f + +haddockPackageFlags :: LocalBuildInfo + -> ComponentLocalBuildInfo + -> Maybe PathTemplate + -> IO ([(FilePath, Maybe FilePath)], Maybe String) +haddockPackageFlags lbi clbi htmlTemplate = do + let allPkgs = installedPkgs lbi + directDeps = map fst (componentPackageDeps clbi) + transitiveDeps <- case PackageIndex.dependencyClosure allPkgs directDeps of + Left x -> return x + Right inf -> die $ "internal error when calculating transitive " + ++ "package dependencies.\nDebug info: " ++ show inf + haddockPackagePaths (PackageIndex.allPackages transitiveDeps) mkHtmlPath + where + mkHtmlPath = fmap expandTemplateVars htmlTemplate + expandTemplateVars tmpl pkg = + fromPathTemplate . substPathTemplate (env pkg) $ tmpl + env pkg = haddockTemplateEnv lbi (packageId pkg) + + +haddockTemplateEnv :: LocalBuildInfo -> PackageIdentifier -> PathTemplateEnv +haddockTemplateEnv lbi pkg_id = + (PrefixVar, prefix (installDirTemplates lbi)) + -- We want the legacy unit ID here, because it gives us nice paths + -- (Haddock people don't care about the dependencies) + : initialPathTemplateEnv pkg_id (mkLegacyUnitId pkg_id) (compilerInfo (compiler lbi)) + (hostPlatform lbi) + +-- ------------------------------------------------------------------------------ +-- hscolour support. + +hscolour :: PackageDescription + -> LocalBuildInfo + -> [PPSuffixHandler] + -> HscolourFlags + -> IO () +hscolour pkg_descr lbi suffixes flags = do + -- we preprocess even if hscolour won't be found on the machine + -- will this upset someone? + initialBuildSteps distPref pkg_descr lbi verbosity + hscolour' die ForDevelopment pkg_descr lbi suffixes flags + where + verbosity = fromFlag (hscolourVerbosity flags) + distPref = fromFlag $ hscolourDistPref flags + +hscolour' :: (String -> IO ()) -- ^ Called when the 'hscolour' exe is not found. + -> HaddockTarget + -> PackageDescription + -> LocalBuildInfo + -> [PPSuffixHandler] + -> HscolourFlags + -> IO () +hscolour' onNoHsColour haddockTarget pkg_descr lbi suffixes flags = + either onNoHsColour (\(hscolourProg, _, _) -> go hscolourProg) =<< + lookupProgramVersion verbosity hscolourProgram + (orLaterVersion (Version [1,8] [])) (withPrograms lbi) + where + go :: ConfiguredProgram -> IO () + go hscolourProg = do + setupMessage verbosity "Running hscolour for" (packageId pkg_descr) + createDirectoryIfMissingVerbose verbosity True $ + hscolourPref' haddockTarget distPref pkg_descr + + let pre c = preprocessComponent pkg_descr c lbi False verbosity suffixes + withAllComponentsInBuildOrder pkg_descr lbi $ \comp _ -> do + pre comp + let + doExe com = case (compToExe com) of + Just exe -> do + let outputDir = hscolourPref' haddockTarget distPref pkg_descr + exeName exe "src" + runHsColour hscolourProg outputDir =<< getExeSourceFiles lbi exe + Nothing -> do + warn (fromFlag $ hscolourVerbosity flags) + "Unsupported component, skipping..." + return () + case comp of + CLib lib -> do + let outputDir = hscolourPref' haddockTarget distPref pkg_descr "src" + runHsColour hscolourProg outputDir =<< getLibSourceFiles lbi lib + CExe _ -> when (fromFlag (hscolourExecutables flags)) $ doExe comp + CTest _ -> when (fromFlag (hscolourTestSuites flags)) $ doExe comp + CBench _ -> when (fromFlag (hscolourBenchmarks flags)) $ doExe comp + + stylesheet = flagToMaybe (hscolourCSS flags) + + verbosity = fromFlag (hscolourVerbosity flags) + distPref = fromFlag (hscolourDistPref flags) + + runHsColour prog outputDir moduleFiles = do + createDirectoryIfMissingVerbose verbosity True outputDir + + case stylesheet of -- copy the CSS file + Nothing | programVersion prog >= Just (Version [1,9] []) -> + rawSystemProgram verbosity prog + ["-print-css", "-o" ++ outputDir "hscolour.css"] + | otherwise -> return () + Just s -> copyFileVerbose verbosity s (outputDir "hscolour.css") + + forM_ moduleFiles $ \(m, inFile) -> + rawSystemProgram verbosity prog + ["-css", "-anchor", "-o" ++ outFile m, inFile] + where + outFile m = outputDir + intercalate "-" (ModuleName.components m) <.> "html" + +haddockToHscolour :: HaddockFlags -> HscolourFlags +haddockToHscolour flags = + HscolourFlags { + hscolourCSS = haddockHscolourCss flags, + hscolourExecutables = haddockExecutables flags, + hscolourTestSuites = haddockTestSuites flags, + hscolourBenchmarks = haddockBenchmarks flags, + hscolourVerbosity = haddockVerbosity flags, + hscolourDistPref = haddockDistPref flags + } +--------------------------------------------------------------------------------- +-- TODO these should be moved elsewhere. + +getLibSourceFiles :: LocalBuildInfo + -> Library + -> IO [(ModuleName.ModuleName, FilePath)] +getLibSourceFiles lbi lib = getSourceFiles searchpaths modules + where + bi = libBuildInfo lib + modules = PD.exposedModules lib ++ otherModules bi + searchpaths = autogenModulesDir lbi : buildDir lbi : hsSourceDirs bi + +getExeSourceFiles :: LocalBuildInfo + -> Executable + -> IO [(ModuleName.ModuleName, FilePath)] +getExeSourceFiles lbi exe = do + moduleFiles <- getSourceFiles searchpaths modules + srcMainPath <- findFile (hsSourceDirs bi) (modulePath exe) + return ((ModuleName.main, srcMainPath) : moduleFiles) + where + bi = buildInfo exe + modules = otherModules bi + searchpaths = autogenModulesDir lbi : exeBuildDir lbi exe : hsSourceDirs bi + +getSourceFiles :: [FilePath] + -> [ModuleName.ModuleName] + -> IO [(ModuleName.ModuleName, FilePath)] +getSourceFiles dirs modules = flip mapM modules $ \m -> fmap ((,) m) $ + findFileWithExtension ["hs", "lhs"] dirs (ModuleName.toFilePath m) + >>= maybe (notFound m) (return . normalise) + where + notFound module_ = die $ "can't find source for module " ++ display module_ + +-- | The directory where we put build results for an executable +exeBuildDir :: LocalBuildInfo -> Executable -> FilePath +exeBuildDir lbi exe = buildDir lbi exeName exe exeName exe ++ "-tmp" + +-- ------------------------------------------------------------------------------ +-- Boilerplate Monoid instance. +instance Monoid HaddockArgs where + mempty = gmempty + mappend = (Semi.<>) + +instance Semigroup HaddockArgs where + (<>) = gmappend + +instance Monoid Directory where + mempty = Dir "." + mappend = (Semi.<>) + +instance Semigroup Directory where + Dir m <> Dir n = Dir $ m n diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/HaskellSuite.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/HaskellSuite.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/HaskellSuite.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/HaskellSuite.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,222 @@ +module Distribution.Simple.HaskellSuite where + +import Control.Monad +import Data.Maybe +import Data.Version +import qualified Data.Map as M (empty) + +import Distribution.Simple.Program +import Distribution.Simple.Compiler as Compiler +import Distribution.Simple.Utils +import Distribution.Simple.BuildPaths +import Distribution.Verbosity +import Distribution.Text +import Distribution.Package +import Distribution.InstalledPackageInfo hiding (includeDirs) +import Distribution.Simple.PackageIndex as PackageIndex +import Distribution.PackageDescription +import Distribution.Simple.LocalBuildInfo +import Distribution.System (Platform) +import Distribution.Compat.Exception +import Language.Haskell.Extension +import Distribution.Simple.Program.Builtin + +configure + :: Verbosity -> Maybe FilePath -> Maybe FilePath + -> ProgramConfiguration -> IO (Compiler, Maybe Platform, ProgramConfiguration) +configure verbosity mbHcPath hcPkgPath conf0 = do + + -- We have no idea how a haskell-suite tool is named, so we require at + -- least some information from the user. + hcPath <- + let msg = "You have to provide name or path of a haskell-suite tool (-w PATH)" + in maybe (die msg) return mbHcPath + + when (isJust hcPkgPath) $ + warn verbosity "--with-hc-pkg option is ignored for haskell-suite" + + (comp, confdCompiler, conf1) <- configureCompiler hcPath conf0 + + -- Update our pkg tool. It uses the same executable as the compiler, but + -- all command start with "pkg" + (confdPkg, _) <- requireProgram verbosity haskellSuitePkgProgram conf1 + let conf2 = + updateProgram + confdPkg + { programLocation = programLocation confdCompiler + , programDefaultArgs = ["pkg"] + } + conf1 + + return (comp, Nothing, conf2) + + where + configureCompiler hcPath conf0' = do + let + haskellSuiteProgram' = + haskellSuiteProgram + { programFindLocation = \v p -> findProgramOnSearchPath v p hcPath } + + -- NB: cannot call requireProgram right away — it'd think that + -- the program is already configured and won't reconfigure it again. + -- Instead, call configureProgram directly first. + conf1 <- configureProgram verbosity haskellSuiteProgram' conf0' + (confdCompiler, conf2) <- requireProgram verbosity haskellSuiteProgram' conf1 + + extensions <- getExtensions verbosity confdCompiler + languages <- getLanguages verbosity confdCompiler + (compName, compVersion) <- + getCompilerVersion verbosity confdCompiler + + let + comp = Compiler { + compilerId = CompilerId (HaskellSuite compName) compVersion, + compilerAbiTag = Compiler.NoAbiTag, + compilerCompat = [], + compilerLanguages = languages, + compilerExtensions = extensions, + compilerProperties = M.empty + } + + return (comp, confdCompiler, conf2) + +hstoolVersion :: Verbosity -> FilePath -> IO (Maybe Version) +hstoolVersion = findProgramVersion "--hspkg-version" id + +numericVersion :: Verbosity -> FilePath -> IO (Maybe Version) +numericVersion = findProgramVersion "--compiler-version" (last . words) + +getCompilerVersion :: Verbosity -> ConfiguredProgram -> IO (String, Version) +getCompilerVersion verbosity prog = do + output <- rawSystemStdout verbosity (programPath prog) ["--compiler-version"] + let + parts = words output + name = concat $ init parts -- there shouldn't be any spaces in the name anyway + versionStr = last parts + version <- + maybe (die "haskell-suite: couldn't determine compiler version") return $ + simpleParse versionStr + return (name, version) + +getExtensions :: Verbosity -> ConfiguredProgram -> IO [(Extension, Compiler.Flag)] +getExtensions verbosity prog = do + extStrs <- + lines `fmap` + rawSystemStdout verbosity (programPath prog) ["--supported-extensions"] + return + [ (ext, "-X" ++ display ext) | Just ext <- map simpleParse extStrs ] + +getLanguages :: Verbosity -> ConfiguredProgram -> IO [(Language, Compiler.Flag)] +getLanguages verbosity prog = do + langStrs <- + lines `fmap` + rawSystemStdout verbosity (programPath prog) ["--supported-languages"] + return + [ (ext, "-G" ++ display ext) | Just ext <- map simpleParse langStrs ] + +-- Other compilers do some kind of a packagedb stack check here. Not sure +-- if we need something like that as well. +getInstalledPackages :: Verbosity -> PackageDBStack -> ProgramConfiguration + -> IO InstalledPackageIndex +getInstalledPackages verbosity packagedbs conf = + liftM (PackageIndex.fromList . concat) $ forM packagedbs $ \packagedb -> + do str <- + getDbProgramOutput verbosity haskellSuitePkgProgram conf + ["dump", packageDbOpt packagedb] + `catchExit` \_ -> die $ "pkg dump failed" + case parsePackages str of + Right ok -> return ok + _ -> die "failed to parse output of 'pkg dump'" + + where + parsePackages str = + let parsed = map parseInstalledPackageInfo (splitPkgs str) + in case [ msg | ParseFailed msg <- parsed ] of + [] -> Right [ pkg | ParseOk _ pkg <- parsed ] + msgs -> Left msgs + + splitPkgs :: String -> [String] + splitPkgs = map unlines . splitWith ("---" ==) . lines + where + splitWith :: (a -> Bool) -> [a] -> [[a]] + splitWith p xs = ys : case zs of + [] -> [] + _:ws -> splitWith p ws + where (ys,zs) = break p xs + +buildLib + :: Verbosity -> PackageDescription -> LocalBuildInfo + -> Library -> ComponentLocalBuildInfo -> IO () +buildLib verbosity pkg_descr lbi lib clbi = do + -- In future, there should be a mechanism for the compiler to request any + -- number of the above parameters (or their parts) — in particular, + -- pieces of PackageDescription. + -- + -- For now, we only pass those that we know are used. + + let odir = buildDir lbi + bi = libBuildInfo lib + srcDirs = hsSourceDirs bi ++ [odir] + dbStack = withPackageDB lbi + language = fromMaybe Haskell98 (defaultLanguage bi) + conf = withPrograms lbi + pkgid = packageId pkg_descr + + runDbProgram verbosity haskellSuiteProgram conf $ + [ "compile", "--build-dir", odir ] ++ + concat [ ["-i", d] | d <- srcDirs ] ++ + concat [ ["-I", d] | d <- [autogenModulesDir lbi, odir] ++ includeDirs bi ] ++ + [ packageDbOpt pkgDb | pkgDb <- dbStack ] ++ + [ "--package-name", display pkgid ] ++ + concat [ ["--package-id", display ipkgid ] + | (ipkgid, _) <- componentPackageDeps clbi ] ++ + ["-G", display language] ++ + concat [ ["-X", display ex] | ex <- usedExtensions bi ] ++ + cppOptions (libBuildInfo lib) ++ + [ display modu | modu <- libModules lib ] + + + +installLib + :: Verbosity + -> LocalBuildInfo + -> FilePath -- ^install location + -> FilePath -- ^install location for dynamic libraries + -> FilePath -- ^Build location + -> PackageDescription + -> Library + -> ComponentLocalBuildInfo + -> IO () +installLib verbosity lbi targetDir dynlibTargetDir builtDir pkg lib _clbi = do + let conf = withPrograms lbi + runDbProgram verbosity haskellSuitePkgProgram conf $ + [ "install-library" + , "--build-dir", builtDir + , "--target-dir", targetDir + , "--dynlib-target-dir", dynlibTargetDir + , "--package-id", display $ packageId pkg + ] ++ map display (libModules lib) + +registerPackage + :: Verbosity + -> ProgramConfiguration + -> PackageDBStack + -> InstalledPackageInfo + -> IO () +registerPackage verbosity progdb packageDbs installedPkgInfo = do + (hspkg, _) <- requireProgram verbosity haskellSuitePkgProgram progdb + + runProgramInvocation verbosity $ + (programInvocation hspkg + ["update", packageDbOpt $ last packageDbs]) + { progInvokeInput = Just $ showInstalledPackageInfo installedPkgInfo } + +initPackageDB :: Verbosity -> ProgramConfiguration -> FilePath -> IO () +initPackageDB verbosity conf dbPath = + runDbProgram verbosity haskellSuitePkgProgram conf + ["init", dbPath] + +packageDbOpt :: PackageDB -> String +packageDbOpt GlobalPackageDB = "--global" +packageDbOpt UserPackageDB = "--user" +packageDbOpt (SpecificPackageDB db) = "--package-db=" ++ db diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Hpc.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Hpc.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Hpc.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Hpc.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,141 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Hpc +-- Copyright : Thomas Tuegel 2011 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module provides functions for locating various HPC-related paths and +-- a function for adding the necessary options to a PackageDescription to +-- build test suites with HPC enabled. + +module Distribution.Simple.Hpc + ( Way(..), guessWay + , htmlDir + , mixDir + , tixDir + , tixFilePath + , markupPackage + , markupTest + ) where + +import Control.Monad ( when ) +import Distribution.ModuleName ( main ) +import Distribution.PackageDescription + ( TestSuite(..) + , testModules + ) +import Distribution.Simple.LocalBuildInfo ( LocalBuildInfo(..) ) +import Distribution.Simple.Program + ( hpcProgram + , requireProgramVersion + ) +import Distribution.Simple.Program.Hpc ( markup, union ) +import Distribution.Simple.Utils ( notice ) +import Distribution.Version ( anyVersion ) +import Distribution.Verbosity ( Verbosity() ) +import System.Directory ( createDirectoryIfMissing, doesFileExist ) +import System.FilePath + +-- ------------------------------------------------------------------------- +-- Haskell Program Coverage + +data Way = Vanilla | Prof | Dyn + deriving (Bounded, Enum, Eq, Read, Show) + +hpcDir :: FilePath -- ^ \"dist/\" prefix + -> Way + -> FilePath -- ^ Directory containing component's HPC .mix files +hpcDir distPref way = distPref "hpc" wayDir + where + wayDir = case way of + Vanilla -> "vanilla" + Prof -> "prof" + Dyn -> "dyn" + +mixDir :: FilePath -- ^ \"dist/\" prefix + -> Way + -> FilePath -- ^ Component name + -> FilePath -- ^ Directory containing test suite's .mix files +mixDir distPref way name = hpcDir distPref way "mix" name + +tixDir :: FilePath -- ^ \"dist/\" prefix + -> Way + -> FilePath -- ^ Component name + -> FilePath -- ^ Directory containing test suite's .tix files +tixDir distPref way name = hpcDir distPref way "tix" name + +-- | Path to the .tix file containing a test suite's sum statistics. +tixFilePath :: FilePath -- ^ \"dist/\" prefix + -> Way + -> FilePath -- ^ Component name + -> FilePath -- ^ Path to test suite's .tix file +tixFilePath distPref way name = tixDir distPref way name name <.> "tix" + +htmlDir :: FilePath -- ^ \"dist/\" prefix + -> Way + -> FilePath -- ^ Component name + -> FilePath -- ^ Path to test suite's HTML markup directory +htmlDir distPref way name = hpcDir distPref way "html" name + +-- | Attempt to guess the way the test suites in this package were compiled +-- and linked with the library so the correct module interfaces are found. +guessWay :: LocalBuildInfo -> Way +guessWay lbi + | withProfExe lbi = Prof + | withDynExe lbi = Dyn + | otherwise = Vanilla + +-- | Generate the HTML markup for a test suite. +markupTest :: Verbosity + -> LocalBuildInfo + -> FilePath -- ^ \"dist/\" prefix + -> String -- ^ Library name + -> TestSuite + -> IO () +markupTest verbosity lbi distPref libName suite = do + tixFileExists <- doesFileExist $ tixFilePath distPref way $ testName suite + when tixFileExists $ do + -- behaviour of 'markup' depends on version, so we need *a* version + -- but no particular one + (hpc, hpcVer, _) <- requireProgramVersion verbosity + hpcProgram anyVersion (withPrograms lbi) + let htmlDir_ = htmlDir distPref way $ testName suite + markup hpc hpcVer verbosity + (tixFilePath distPref way $ testName suite) mixDirs + htmlDir_ + (testModules suite ++ [ main ]) + notice verbosity $ "Test coverage report written to " + ++ htmlDir_ "hpc_index" <.> "html" + where + way = guessWay lbi + mixDirs = map (mixDir distPref way) [ testName suite, libName ] + +-- | Generate the HTML markup for all of a package's test suites. +markupPackage :: Verbosity + -> LocalBuildInfo + -> FilePath -- ^ \"dist/\" prefix + -> String -- ^ Library name + -> [TestSuite] + -> IO () +markupPackage verbosity lbi distPref libName suites = do + let tixFiles = map (tixFilePath distPref way . testName) suites + tixFilesExist <- mapM doesFileExist tixFiles + when (and tixFilesExist) $ do + -- behaviour of 'markup' depends on version, so we need *a* version + -- but no particular one + (hpc, hpcVer, _) <- requireProgramVersion verbosity + hpcProgram anyVersion (withPrograms lbi) + let outFile = tixFilePath distPref way libName + htmlDir' = htmlDir distPref way libName + excluded = concatMap testModules suites ++ [ main ] + createDirectoryIfMissing True $ takeDirectory outFile + union hpc verbosity tixFiles outFile excluded + markup hpc hpcVer verbosity outFile mixDirs htmlDir' excluded + notice verbosity $ "Package coverage report written to " + ++ htmlDir' "hpc_index.html" + where + way = guessWay lbi + mixDirs = map (mixDir distPref way) $ libName : map testName suites diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/InstallDirs.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/InstallDirs.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/InstallDirs.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/InstallDirs.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,580 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE ForeignFunctionInterface #-} +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.InstallDirs +-- Copyright : Isaac Jones 2003-2004 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This manages everything to do with where files get installed (though does +-- not get involved with actually doing any installation). It provides an +-- 'InstallDirs' type which is a set of directories for where to install +-- things. It also handles the fact that we use templates in these install +-- dirs. For example most install dirs are relative to some @$prefix@ and by +-- changing the prefix all other dirs still end up changed appropriately. So it +-- provides a 'PathTemplate' type and functions for substituting for these +-- templates. + +module Distribution.Simple.InstallDirs ( + InstallDirs(..), + InstallDirTemplates, + defaultInstallDirs, + combineInstallDirs, + absoluteInstallDirs, + CopyDest(..), + prefixRelativeInstallDirs, + substituteInstallDirTemplates, + + PathTemplate, + PathTemplateVariable(..), + PathTemplateEnv, + toPathTemplate, + fromPathTemplate, + substPathTemplate, + initialPathTemplateEnv, + platformTemplateEnv, + compilerTemplateEnv, + packageTemplateEnv, + abiTemplateEnv, + installDirsTemplateEnv, + ) where + + +import Distribution.Compat.Binary (Binary) +import Distribution.Compat.Semigroup as Semi +import Distribution.Package +import Distribution.System +import Distribution.Compiler +import Distribution.Text + +import Data.List (isPrefixOf) +import Data.Maybe (fromMaybe) +import GHC.Generics (Generic) +import System.Directory (getAppUserDataDirectory) +import System.FilePath ((), isPathSeparator, pathSeparator) +import System.FilePath (dropDrive) + +#if mingw32_HOST_OS +import Foreign +import Foreign.C +#endif + +-- --------------------------------------------------------------------------- +-- Installation directories + + +-- | The directories where we will install files for packages. +-- +-- We have several different directories for different types of files since +-- many systems have conventions whereby different types of files in a package +-- are installed in different directories. This is particularly the case on +-- Unix style systems. +-- +data InstallDirs dir = InstallDirs { + prefix :: dir, + bindir :: dir, + libdir :: dir, + libsubdir :: dir, + dynlibdir :: dir, + libexecdir :: dir, + includedir :: dir, + datadir :: dir, + datasubdir :: dir, + docdir :: dir, + mandir :: dir, + htmldir :: dir, + haddockdir :: dir, + sysconfdir :: dir + } deriving (Eq, Read, Show, Functor, Generic) + +instance Binary dir => Binary (InstallDirs dir) + +instance (Semigroup dir, Monoid dir) => Monoid (InstallDirs dir) where + mempty = gmempty + mappend = (Semi.<>) + +instance Semigroup dir => Semigroup (InstallDirs dir) where + (<>) = gmappend + +combineInstallDirs :: (a -> b -> c) + -> InstallDirs a + -> InstallDirs b + -> InstallDirs c +combineInstallDirs combine a b = InstallDirs { + prefix = prefix a `combine` prefix b, + bindir = bindir a `combine` bindir b, + libdir = libdir a `combine` libdir b, + libsubdir = libsubdir a `combine` libsubdir b, + dynlibdir = dynlibdir a `combine` dynlibdir b, + libexecdir = libexecdir a `combine` libexecdir b, + includedir = includedir a `combine` includedir b, + datadir = datadir a `combine` datadir b, + datasubdir = datasubdir a `combine` datasubdir b, + docdir = docdir a `combine` docdir b, + mandir = mandir a `combine` mandir b, + htmldir = htmldir a `combine` htmldir b, + haddockdir = haddockdir a `combine` haddockdir b, + sysconfdir = sysconfdir a `combine` sysconfdir b + } + +appendSubdirs :: (a -> a -> a) -> InstallDirs a -> InstallDirs a +appendSubdirs append dirs = dirs { + libdir = libdir dirs `append` libsubdir dirs, + datadir = datadir dirs `append` datasubdir dirs, + libsubdir = error "internal error InstallDirs.libsubdir", + datasubdir = error "internal error InstallDirs.datasubdir" + } + +-- | The installation directories in terms of 'PathTemplate's that contain +-- variables. +-- +-- The defaults for most of the directories are relative to each other, in +-- particular they are all relative to a single prefix. This makes it +-- convenient for the user to override the default installation directory +-- by only having to specify --prefix=... rather than overriding each +-- individually. This is done by allowing $-style variables in the dirs. +-- These are expanded by textual substitution (see 'substPathTemplate'). +-- +-- A few of these installation directories are split into two components, the +-- dir and subdir. The full installation path is formed by combining the two +-- together with @\/@. The reason for this is compatibility with other Unix +-- build systems which also support @--libdir@ and @--datadir@. We would like +-- users to be able to configure @--libdir=\/usr\/lib64@ for example but +-- because by default we want to support installing multiple versions of +-- packages and building the same package for multiple compilers we append the +-- libsubdir to get: @\/usr\/lib64\/$libname\/$compiler@. +-- +-- An additional complication is the need to support relocatable packages on +-- systems which support such things, like Windows. +-- +type InstallDirTemplates = InstallDirs PathTemplate + +-- --------------------------------------------------------------------------- +-- Default installation directories + +defaultInstallDirs :: CompilerFlavor -> Bool -> Bool -> IO InstallDirTemplates +defaultInstallDirs comp userInstall _hasLibs = do + installPrefix <- + if userInstall + then getAppUserDataDirectory "cabal" + else case buildOS of + Windows -> do windowsProgramFilesDir <- getWindowsProgramFilesDir + return (windowsProgramFilesDir "Haskell") + _ -> return "/usr/local" + installLibDir <- + case buildOS of + Windows -> return "$prefix" + _ -> case comp of + LHC | userInstall -> getAppUserDataDirectory "lhc" + _ -> return ("$prefix" "lib") + return $ fmap toPathTemplate $ InstallDirs { + prefix = installPrefix, + bindir = "$prefix" "bin", + libdir = installLibDir, + libsubdir = case comp of + JHC -> "$compiler" + LHC -> "$compiler" + UHC -> "$pkgid" + _other -> "$abi" "$libname", + dynlibdir = "$libdir" case comp of + JHC -> "$compiler" + LHC -> "$compiler" + UHC -> "$pkgid" + _other -> "$abi", + libexecdir = case buildOS of + Windows -> "$prefix" "$libname" + _other -> "$prefix" "libexec", + includedir = "$libdir" "$libsubdir" "include", + datadir = case buildOS of + Windows -> "$prefix" + _other -> "$prefix" "share", + datasubdir = "$abi" "$pkgid", + docdir = "$datadir" "doc" "$abi" "$pkgid", + mandir = "$datadir" "man", + htmldir = "$docdir" "html", + haddockdir = "$htmldir", + sysconfdir = "$prefix" "etc" + } + +-- --------------------------------------------------------------------------- +-- Converting directories, absolute or prefix-relative + +-- | Substitute the install dir templates into each other. +-- +-- To prevent cyclic substitutions, only some variables are allowed in +-- particular dir templates. If out of scope vars are present, they are not +-- substituted for. Checking for any remaining unsubstituted vars can be done +-- as a subsequent operation. +-- +-- The reason it is done this way is so that in 'prefixRelativeInstallDirs' we +-- can replace 'prefix' with the 'PrefixVar' and get resulting +-- 'PathTemplate's that still have the 'PrefixVar' in them. Doing this makes it +-- each to check which paths are relative to the $prefix. +-- +substituteInstallDirTemplates :: PathTemplateEnv + -> InstallDirTemplates -> InstallDirTemplates +substituteInstallDirTemplates env dirs = dirs' + where + dirs' = InstallDirs { + -- So this specifies exactly which vars are allowed in each template + prefix = subst prefix [], + bindir = subst bindir [prefixVar], + libdir = subst libdir [prefixVar, bindirVar], + libsubdir = subst libsubdir [], + dynlibdir = subst dynlibdir [prefixVar, bindirVar, libdirVar], + libexecdir = subst libexecdir prefixBinLibVars, + includedir = subst includedir prefixBinLibVars, + datadir = subst datadir prefixBinLibVars, + datasubdir = subst datasubdir [], + docdir = subst docdir prefixBinLibDataVars, + mandir = subst mandir (prefixBinLibDataVars ++ [docdirVar]), + htmldir = subst htmldir (prefixBinLibDataVars ++ [docdirVar]), + haddockdir = subst haddockdir (prefixBinLibDataVars ++ + [docdirVar, htmldirVar]), + sysconfdir = subst sysconfdir prefixBinLibVars + } + subst dir env' = substPathTemplate (env'++env) (dir dirs) + + prefixVar = (PrefixVar, prefix dirs') + bindirVar = (BindirVar, bindir dirs') + libdirVar = (LibdirVar, libdir dirs') + libsubdirVar = (LibsubdirVar, libsubdir dirs') + datadirVar = (DatadirVar, datadir dirs') + datasubdirVar = (DatasubdirVar, datasubdir dirs') + docdirVar = (DocdirVar, docdir dirs') + htmldirVar = (HtmldirVar, htmldir dirs') + prefixBinLibVars = [prefixVar, bindirVar, libdirVar, libsubdirVar] + prefixBinLibDataVars = prefixBinLibVars ++ [datadirVar, datasubdirVar] + +-- | Convert from abstract install directories to actual absolute ones by +-- substituting for all the variables in the abstract paths, to get real +-- absolute path. +absoluteInstallDirs :: PackageIdentifier + -> UnitId + -> CompilerInfo + -> CopyDest + -> Platform + -> InstallDirs PathTemplate + -> InstallDirs FilePath +absoluteInstallDirs pkgId libname compilerId copydest platform dirs = + (case copydest of + CopyTo destdir -> fmap ((destdir ) . dropDrive) + _ -> id) + . appendSubdirs () + . fmap fromPathTemplate + $ substituteInstallDirTemplates env dirs + where + env = initialPathTemplateEnv pkgId libname compilerId platform + + +-- |The location prefix for the /copy/ command. +data CopyDest + = NoCopyDest + | CopyTo FilePath + deriving (Eq, Show) + +-- | Check which of the paths are relative to the installation $prefix. +-- +-- If any of the paths are not relative, ie they are absolute paths, then it +-- prevents us from making a relocatable package (also known as a \"prefix +-- independent\" package). +-- +prefixRelativeInstallDirs :: PackageIdentifier + -> UnitId + -> CompilerInfo + -> Platform + -> InstallDirTemplates + -> InstallDirs (Maybe FilePath) +prefixRelativeInstallDirs pkgId libname compilerId platform dirs = + fmap relative + . appendSubdirs combinePathTemplate + $ -- substitute the path template into each other, except that we map + -- \$prefix back to $prefix. We're trying to end up with templates that + -- mention no vars except $prefix. + substituteInstallDirTemplates env dirs { + prefix = PathTemplate [Variable PrefixVar] + } + where + env = initialPathTemplateEnv pkgId libname compilerId platform + + -- If it starts with $prefix then it's relative and produce the relative + -- path by stripping off $prefix/ or $prefix + relative dir = case dir of + PathTemplate cs -> fmap (fromPathTemplate . PathTemplate) (relative' cs) + relative' (Variable PrefixVar : Ordinary (s:rest) : rest') + | isPathSeparator s = Just (Ordinary rest : rest') + relative' (Variable PrefixVar : rest) = Just rest + relative' _ = Nothing + +-- --------------------------------------------------------------------------- +-- Path templates + +-- | An abstract path, possibly containing variables that need to be +-- substituted for to get a real 'FilePath'. +-- +newtype PathTemplate = PathTemplate [PathComponent] + deriving (Eq, Ord, Generic) + +instance Binary PathTemplate + +data PathComponent = + Ordinary FilePath + | Variable PathTemplateVariable + deriving (Eq, Ord, Generic) + +instance Binary PathComponent + +data PathTemplateVariable = + PrefixVar -- ^ The @$prefix@ path variable + | BindirVar -- ^ The @$bindir@ path variable + | LibdirVar -- ^ The @$libdir@ path variable + | LibsubdirVar -- ^ The @$libsubdir@ path variable + | DynlibdirVar -- ^ The @$dynlibdir@ path variable + | DatadirVar -- ^ The @$datadir@ path variable + | DatasubdirVar -- ^ The @$datasubdir@ path variable + | DocdirVar -- ^ The @$docdir@ path variable + | HtmldirVar -- ^ The @$htmldir@ path variable + | PkgNameVar -- ^ The @$pkg@ package name path variable + | PkgVerVar -- ^ The @$version@ package version path variable + | PkgIdVar -- ^ The @$pkgid@ package Id path variable, eg @foo-1.0@ + | LibNameVar -- ^ The @$libname@ path variable + | CompilerVar -- ^ The compiler name and version, eg @ghc-6.6.1@ + | OSVar -- ^ The operating system name, eg @windows@ or @linux@ + | ArchVar -- ^ The CPU architecture name, eg @i386@ or @x86_64@ + | AbiVar -- ^ The Compiler's ABI identifier, $arch-$os-$compiler-$abitag + | AbiTagVar -- ^ The optional ABI tag for the compiler + | ExecutableNameVar -- ^ The executable name; used in shell wrappers + | TestSuiteNameVar -- ^ The name of the test suite being run + | TestSuiteResultVar -- ^ The result of the test suite being run, eg + -- @pass@, @fail@, or @error@. + | BenchmarkNameVar -- ^ The name of the benchmark being run + deriving (Eq, Ord, Generic) + +instance Binary PathTemplateVariable + +type PathTemplateEnv = [(PathTemplateVariable, PathTemplate)] + +-- | Convert a 'FilePath' to a 'PathTemplate' including any template vars. +-- +toPathTemplate :: FilePath -> PathTemplate +toPathTemplate = PathTemplate . read + +-- | Convert back to a path, any remaining vars are included +-- +fromPathTemplate :: PathTemplate -> FilePath +fromPathTemplate (PathTemplate template) = show template + +combinePathTemplate :: PathTemplate -> PathTemplate -> PathTemplate +combinePathTemplate (PathTemplate t1) (PathTemplate t2) = + PathTemplate (t1 ++ [Ordinary [pathSeparator]] ++ t2) + +substPathTemplate :: PathTemplateEnv -> PathTemplate -> PathTemplate +substPathTemplate environment (PathTemplate template) = + PathTemplate (concatMap subst template) + + where subst component@(Ordinary _) = [component] + subst component@(Variable variable) = + case lookup variable environment of + Just (PathTemplate components) -> components + Nothing -> [component] + +-- | The initial environment has all the static stuff but no paths +initialPathTemplateEnv :: PackageIdentifier + -> UnitId + -> CompilerInfo + -> Platform + -> PathTemplateEnv +initialPathTemplateEnv pkgId libname compiler platform = + packageTemplateEnv pkgId libname + ++ compilerTemplateEnv compiler + ++ platformTemplateEnv platform + ++ abiTemplateEnv compiler platform + +packageTemplateEnv :: PackageIdentifier -> UnitId -> PathTemplateEnv +packageTemplateEnv pkgId libname = + [(PkgNameVar, PathTemplate [Ordinary $ display (packageName pkgId)]) + ,(PkgVerVar, PathTemplate [Ordinary $ display (packageVersion pkgId)]) + ,(LibNameVar, PathTemplate [Ordinary $ display libname]) + ,(PkgIdVar, PathTemplate [Ordinary $ display pkgId]) + ] + +compilerTemplateEnv :: CompilerInfo -> PathTemplateEnv +compilerTemplateEnv compiler = + [(CompilerVar, PathTemplate [Ordinary $ display (compilerInfoId compiler)]) + ] + +platformTemplateEnv :: Platform -> PathTemplateEnv +platformTemplateEnv (Platform arch os) = + [(OSVar, PathTemplate [Ordinary $ display os]) + ,(ArchVar, PathTemplate [Ordinary $ display arch]) + ] + +abiTemplateEnv :: CompilerInfo -> Platform -> PathTemplateEnv +abiTemplateEnv compiler (Platform arch os) = + [(AbiVar, PathTemplate [Ordinary $ display arch ++ '-':display os ++ + '-':display (compilerInfoId compiler) ++ + case compilerInfoAbiTag compiler of + NoAbiTag -> "" + AbiTag tag -> '-':tag]) + ,(AbiTagVar, PathTemplate [Ordinary $ abiTagString (compilerInfoAbiTag compiler)]) + ] + +installDirsTemplateEnv :: InstallDirs PathTemplate -> PathTemplateEnv +installDirsTemplateEnv dirs = + [(PrefixVar, prefix dirs) + ,(BindirVar, bindir dirs) + ,(LibdirVar, libdir dirs) + ,(LibsubdirVar, libsubdir dirs) + ,(DynlibdirVar, dynlibdir dirs) + ,(DatadirVar, datadir dirs) + ,(DatasubdirVar, datasubdir dirs) + ,(DocdirVar, docdir dirs) + ,(HtmldirVar, htmldir dirs) + ] + + +-- --------------------------------------------------------------------------- +-- Parsing and showing path templates: + +-- The textual format is that of an ordinary Haskell String, eg +-- "$prefix/bin" +-- and this gets parsed to the internal representation as a sequence of path +-- spans which are either strings or variables, eg: +-- PathTemplate [Variable PrefixVar, Ordinary "/bin" ] + +instance Show PathTemplateVariable where + show PrefixVar = "prefix" + show LibNameVar = "libname" + show BindirVar = "bindir" + show LibdirVar = "libdir" + show LibsubdirVar = "libsubdir" + show DynlibdirVar = "dynlibdir" + show DatadirVar = "datadir" + show DatasubdirVar = "datasubdir" + show DocdirVar = "docdir" + show HtmldirVar = "htmldir" + show PkgNameVar = "pkg" + show PkgVerVar = "version" + show PkgIdVar = "pkgid" + show CompilerVar = "compiler" + show OSVar = "os" + show ArchVar = "arch" + show AbiTagVar = "abitag" + show AbiVar = "abi" + show ExecutableNameVar = "executablename" + show TestSuiteNameVar = "test-suite" + show TestSuiteResultVar = "result" + show BenchmarkNameVar = "benchmark" + +instance Read PathTemplateVariable where + readsPrec _ s = + take 1 + [ (var, drop (length varStr) s) + | (varStr, var) <- vars + , varStr `isPrefixOf` s ] + -- NB: order matters! Longer strings first + where vars = [("prefix", PrefixVar) + ,("bindir", BindirVar) + ,("libdir", LibdirVar) + ,("libsubdir", LibsubdirVar) + ,("dynlibdir", DynlibdirVar) + ,("datadir", DatadirVar) + ,("datasubdir", DatasubdirVar) + ,("docdir", DocdirVar) + ,("htmldir", HtmldirVar) + ,("pkgid", PkgIdVar) + ,("libname", LibNameVar) + ,("pkgkey", LibNameVar) -- backwards compatibility + ,("pkg", PkgNameVar) + ,("version", PkgVerVar) + ,("compiler", CompilerVar) + ,("os", OSVar) + ,("arch", ArchVar) + ,("abitag", AbiTagVar) + ,("abi", AbiVar) + ,("executablename", ExecutableNameVar) + ,("test-suite", TestSuiteNameVar) + ,("result", TestSuiteResultVar) + ,("benchmark", BenchmarkNameVar)] + +instance Show PathComponent where + show (Ordinary path) = path + show (Variable var) = '$':show var + showList = foldr (\x -> (shows x .)) id + +instance Read PathComponent where + -- for some reason we collapse multiple $ symbols here + readsPrec _ = lex0 + where lex0 [] = [] + lex0 ('$':'$':s') = lex0 ('$':s') + lex0 ('$':s') = case [ (Variable var, s'') + | (var, s'') <- reads s' ] of + [] -> lex1 "$" s' + ok -> ok + lex0 s' = lex1 [] s' + lex1 "" "" = [] + lex1 acc "" = [(Ordinary (reverse acc), "")] + lex1 acc ('$':'$':s) = lex1 acc ('$':s) + lex1 acc ('$':s) = [(Ordinary (reverse acc), '$':s)] + lex1 acc (c:s) = lex1 (c:acc) s + readList [] = [([],"")] + readList s = [ (component:components, s'') + | (component, s') <- reads s + , (components, s'') <- readList s' ] + +instance Show PathTemplate where + show (PathTemplate template) = show (show template) + +instance Read PathTemplate where + readsPrec p s = [ (PathTemplate template, s') + | (path, s') <- readsPrec p s + , (template, "") <- reads path ] + +-- --------------------------------------------------------------------------- +-- Internal utilities + +getWindowsProgramFilesDir :: IO FilePath +getWindowsProgramFilesDir = do +#if mingw32_HOST_OS + m <- shGetFolderPath csidl_PROGRAM_FILES +#else + let m = Nothing +#endif + return (fromMaybe "C:\\Program Files" m) + +#if mingw32_HOST_OS +shGetFolderPath :: CInt -> IO (Maybe FilePath) +shGetFolderPath n = + allocaArray long_path_size $ \pPath -> do + r <- c_SHGetFolderPath nullPtr n nullPtr 0 pPath + if (r /= 0) + then return Nothing + else do s <- peekCWString pPath; return (Just s) + where + long_path_size = 1024 -- MAX_PATH is 260, this should be plenty + +csidl_PROGRAM_FILES :: CInt +csidl_PROGRAM_FILES = 0x0026 +-- csidl_PROGRAM_FILES_COMMON :: CInt +-- csidl_PROGRAM_FILES_COMMON = 0x002b + +#ifdef x86_64_HOST_ARCH +#define CALLCONV ccall +#else +#define CALLCONV stdcall +#endif + +foreign import CALLCONV unsafe "shlobj.h SHGetFolderPathW" + c_SHGetFolderPath :: Ptr () + -> CInt + -> Ptr () + -> CInt + -> CWString + -> IO CInt +#endif diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Install.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Install.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Install.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Install.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,187 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Install +-- Copyright : Isaac Jones 2003-2004 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This is the entry point into installing a built package. Performs the +-- \"@.\/setup install@\" and \"@.\/setup copy@\" actions. It moves files into +-- place based on the prefix argument. It does the generic bits and then calls +-- compiler-specific functions to do the rest. + +module Distribution.Simple.Install ( + install, + ) where + +import Distribution.PackageDescription +import Distribution.Package (Package(..)) +import Distribution.Simple.LocalBuildInfo +import Distribution.Simple.BuildPaths (haddockName, haddockPref') +import Distribution.Simple.Utils + ( createDirectoryIfMissingVerbose + , installDirectoryContents, installOrdinaryFile, isInSearchPath + , die, info, notice, warn, matchDirFileGlob ) +import Distribution.Simple.Compiler + ( CompilerFlavor(..), compilerFlavor ) +import Distribution.Simple.Setup (CopyFlags(..), fromFlag + ,HaddockTarget(ForDevelopment)) + +import qualified Distribution.Simple.GHC as GHC +import qualified Distribution.Simple.GHCJS as GHCJS +import qualified Distribution.Simple.JHC as JHC +import qualified Distribution.Simple.LHC as LHC +import qualified Distribution.Simple.UHC as UHC +import qualified Distribution.Simple.HaskellSuite as HaskellSuite + +import Control.Monad (when, unless) +import System.Directory + ( doesDirectoryExist, doesFileExist ) +import System.FilePath + ( takeFileName, takeDirectory, (), isAbsolute ) + +import Distribution.Verbosity +import Distribution.Text + ( display ) + +-- |Perform the \"@.\/setup install@\" and \"@.\/setup copy@\" +-- actions. Move files into place based on the prefix argument. + +install :: PackageDescription -- ^information from the .cabal file + -> LocalBuildInfo -- ^information from the configure step + -> CopyFlags -- ^flags sent to copy or install + -> IO () +install pkg_descr lbi flags = do + let distPref = fromFlag (copyDistPref flags) + verbosity = fromFlag (copyVerbosity flags) + copydest = fromFlag (copyDest flags) + installDirs@(InstallDirs { + bindir = binPref, + libdir = libPref, + dynlibdir = dynlibPref, + datadir = dataPref, + docdir = docPref, + htmldir = htmlPref, + haddockdir = interfacePref, + includedir = incPref}) + -- Using the library clbi for binPref is a hack; + -- binPref should be computed per executable + = absoluteInstallDirs pkg_descr lbi copydest + + progPrefixPref = substPathTemplate (packageId pkg_descr) lbi (progPrefix lbi) + progSuffixPref = substPathTemplate (packageId pkg_descr) lbi (progSuffix lbi) + + unless (hasLibs pkg_descr || hasExes pkg_descr) $ + die "No executables and no library found. Nothing to do." + docExists <- doesDirectoryExist $ haddockPref' ForDevelopment distPref pkg_descr + info verbosity ("directory " ++ haddockPref' ForDevelopment distPref pkg_descr ++ + " does exist: " ++ show docExists) + + installDataFiles verbosity pkg_descr dataPref + + when docExists $ do + createDirectoryIfMissingVerbose verbosity True htmlPref + installDirectoryContents verbosity + (haddockPref' ForDevelopment distPref pkg_descr) htmlPref + -- setPermissionsRecursive [Read] htmlPref + -- The haddock interface file actually already got installed + -- in the recursive copy, but now we install it where we actually + -- want it to be (normally the same place). We could remove the + -- copy in htmlPref first. + let haddockInterfaceFileSrc = haddockPref' ForDevelopment distPref pkg_descr + haddockName pkg_descr + haddockInterfaceFileDest = interfacePref haddockName pkg_descr + -- We only generate the haddock interface file for libs, So if the + -- package consists only of executables there will not be one: + exists <- doesFileExist haddockInterfaceFileSrc + when exists $ do + createDirectoryIfMissingVerbose verbosity True interfacePref + installOrdinaryFile verbosity haddockInterfaceFileSrc + haddockInterfaceFileDest + + let lfiles = licenseFiles pkg_descr + unless (null lfiles) $ do + createDirectoryIfMissingVerbose verbosity True docPref + sequence_ + [ installOrdinaryFile verbosity lfile (docPref takeFileName lfile) + | lfile <- lfiles ] + + let buildPref = buildDir lbi + when (hasLibs pkg_descr) $ + notice verbosity ("Installing library in " ++ libPref) + when (hasExes pkg_descr) $ do + notice verbosity ("Installing executable(s) in " ++ binPref) + inPath <- isInSearchPath binPref + when (not inPath) $ + warn verbosity ("The directory " ++ binPref + ++ " is not in the system search path.") + + -- install include files for all compilers - they may be needed to compile + -- haskell files (using the CPP extension) + -- + when (hasLibs pkg_descr) $ installIncludeFiles verbosity pkg_descr incPref + + withLibLBI pkg_descr lbi $ + case compilerFlavor (compiler lbi) of + GHC -> GHC.installLib verbosity lbi libPref dynlibPref buildPref pkg_descr + GHCJS -> GHCJS.installLib verbosity lbi libPref dynlibPref buildPref pkg_descr + LHC -> LHC.installLib verbosity lbi libPref dynlibPref buildPref pkg_descr + JHC -> JHC.installLib verbosity lbi libPref dynlibPref buildPref pkg_descr + UHC -> UHC.installLib verbosity lbi libPref dynlibPref buildPref pkg_descr + HaskellSuite _ -> HaskellSuite.installLib + verbosity lbi libPref dynlibPref buildPref pkg_descr + _ -> \_ _ -> die $ "installing with " + ++ display (compilerFlavor (compiler lbi)) + ++ " is not implemented" + + withExe pkg_descr $ + case compilerFlavor (compiler lbi) of + GHC -> GHC.installExe verbosity lbi installDirs buildPref (progPrefixPref, progSuffixPref) pkg_descr + GHCJS -> GHCJS.installExe verbosity lbi installDirs buildPref (progPrefixPref, progSuffixPref) pkg_descr + LHC -> LHC.installExe verbosity lbi installDirs buildPref (progPrefixPref, progSuffixPref) pkg_descr + JHC -> JHC.installExe verbosity binPref buildPref (progPrefixPref, progSuffixPref) pkg_descr + UHC -> \_ -> return () + HaskellSuite {} -> \_ -> return () + _ -> \_ -> die $ "installing with " + ++ display (compilerFlavor (compiler lbi)) + ++ " is not implemented" + -- register step should be performed by caller. + +-- | Install the files listed in data-files +-- +installDataFiles :: Verbosity -> PackageDescription -> FilePath -> IO () +installDataFiles verbosity pkg_descr destDataDir = + flip mapM_ (dataFiles pkg_descr) $ \ file -> do + let srcDataDir = dataDir pkg_descr + files <- matchDirFileGlob srcDataDir file + let dir = takeDirectory file + createDirectoryIfMissingVerbose verbosity True (destDataDir dir) + sequence_ [ installOrdinaryFile verbosity (srcDataDir file') + (destDataDir file') + | file' <- files ] + +-- | Install the files listed in install-includes +-- +installIncludeFiles :: Verbosity -> PackageDescription -> FilePath -> IO () +installIncludeFiles verbosity + PackageDescription { library = Just lib } destIncludeDir = do + + incs <- mapM (findInc relincdirs) (installIncludes lbi) + sequence_ + [ do createDirectoryIfMissingVerbose verbosity True destDir + installOrdinaryFile verbosity srcFile destFile + | (relFile, srcFile) <- incs + , let destFile = destIncludeDir relFile + destDir = takeDirectory destFile ] + where + relincdirs = "." : filter (not.isAbsolute) (includeDirs lbi) + lbi = libBuildInfo lib + + findInc [] file = die ("can't find include file " ++ file) + findInc (dir:dirs) file = do + let path = dir file + exists <- doesFileExist path + if exists then return (file, path) else findInc dirs file +installIncludeFiles _ _ _ = die "installIncludeFiles: Can't happen?" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/JHC.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/JHC.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/JHC.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/JHC.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,186 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.JHC +-- Copyright : Isaac Jones 2003-2006 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module contains most of the JHC-specific code for configuring, building +-- and installing packages. + +module Distribution.Simple.JHC ( + configure, getInstalledPackages, + buildLib, buildExe, + installLib, installExe + ) where + +import Distribution.PackageDescription as PD hiding (Flag) +import Distribution.InstalledPackageInfo +import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo +import Distribution.Simple.PackageIndex (InstalledPackageIndex) +import qualified Distribution.Simple.PackageIndex as PackageIndex +import Distribution.Simple.LocalBuildInfo +import Distribution.Simple.BuildPaths +import Distribution.Simple.Compiler +import Language.Haskell.Extension +import Distribution.Simple.Program +import Distribution.Version +import Distribution.Package +import Distribution.Simple.Utils +import Distribution.Verbosity +import Distribution.Text + +import System.FilePath ( () ) +import Distribution.Compat.ReadP + ( readP_to_S, string, skipSpaces ) +import Distribution.System ( Platform ) + +import Data.List ( nub ) +import Data.Char ( isSpace ) +import qualified Data.Map as M ( empty ) +import Data.Maybe ( fromMaybe ) + +import qualified Data.ByteString.Lazy.Char8 as BS.Char8 + + +-- ----------------------------------------------------------------------------- +-- Configuring + +configure :: Verbosity -> Maybe FilePath -> Maybe FilePath + -> ProgramConfiguration -> IO (Compiler, Maybe Platform, ProgramConfiguration) +configure verbosity hcPath _hcPkgPath conf = do + + (jhcProg, _, conf') <- requireProgramVersion verbosity + jhcProgram (orLaterVersion (Version [0,7,2] [])) + (userMaybeSpecifyPath "jhc" hcPath conf) + + let Just version = programVersion jhcProg + comp = Compiler { + compilerId = CompilerId JHC version, + compilerAbiTag = NoAbiTag, + compilerCompat = [], + compilerLanguages = jhcLanguages, + compilerExtensions = jhcLanguageExtensions, + compilerProperties = M.empty + } + compPlatform = Nothing + return (comp, compPlatform, conf') + +jhcLanguages :: [(Language, Flag)] +jhcLanguages = [(Haskell98, "")] + +-- | The flags for the supported extensions +jhcLanguageExtensions :: [(Extension, Flag)] +jhcLanguageExtensions = + [(EnableExtension TypeSynonymInstances , "") + ,(DisableExtension TypeSynonymInstances , "") + ,(EnableExtension ForeignFunctionInterface , "") + ,(DisableExtension ForeignFunctionInterface , "") + ,(EnableExtension ImplicitPrelude , "") -- Wrong + ,(DisableExtension ImplicitPrelude , "--noprelude") + ,(EnableExtension CPP , "-fcpp") + ,(DisableExtension CPP , "-fno-cpp") + ] + +getInstalledPackages :: Verbosity -> PackageDBStack -> ProgramConfiguration + -> IO InstalledPackageIndex +getInstalledPackages verbosity _packageDBs conf = do + -- jhc --list-libraries lists all available libraries. + -- How shall I find out, whether they are global or local + -- without checking all files and locations? + str <- rawSystemProgramStdoutConf verbosity jhcProgram conf ["--list-libraries"] + let pCheck :: [(a, String)] -> [a] + pCheck rs = [ r | (r,s) <- rs, all isSpace s ] + let parseLine ln = + pCheck (readP_to_S + (skipSpaces >> string "Name:" >> skipSpaces >> parse) ln) + return $ + PackageIndex.fromList $ + map (\p -> emptyInstalledPackageInfo { + InstalledPackageInfo.installedUnitId = mkLegacyUnitId p, + InstalledPackageInfo.sourcePackageId = p + }) $ + concatMap parseLine $ + lines str + +-- ----------------------------------------------------------------------------- +-- Building + +-- | Building a package for JHC. +-- Currently C source files are not supported. +buildLib :: Verbosity -> PackageDescription -> LocalBuildInfo + -> Library -> ComponentLocalBuildInfo -> IO () +buildLib verbosity pkg_descr lbi lib clbi = do + let Just jhcProg = lookupProgram jhcProgram (withPrograms lbi) + let libBi = libBuildInfo lib + let args = constructJHCCmdLine lbi libBi clbi (buildDir lbi) verbosity + let pkgid = display (packageId pkg_descr) + pfile = buildDir lbi "jhc-pkg.conf" + hlfile= buildDir lbi (pkgid ++ ".hl") + writeFileAtomic pfile . BS.Char8.pack $ jhcPkgConf pkg_descr + rawSystemProgram verbosity jhcProg $ + ["--build-hl="++pfile, "-o", hlfile] ++ + args ++ map display (libModules lib) + +-- | Building an executable for JHC. +-- Currently C source files are not supported. +buildExe :: Verbosity -> PackageDescription -> LocalBuildInfo + -> Executable -> ComponentLocalBuildInfo -> IO () +buildExe verbosity _pkg_descr lbi exe clbi = do + let Just jhcProg = lookupProgram jhcProgram (withPrograms lbi) + let exeBi = buildInfo exe + let out = buildDir lbi exeName exe + let args = constructJHCCmdLine lbi exeBi clbi (buildDir lbi) verbosity + rawSystemProgram verbosity jhcProg (["-o",out] ++ args ++ [modulePath exe]) + +constructJHCCmdLine :: LocalBuildInfo -> BuildInfo -> ComponentLocalBuildInfo + -> FilePath -> Verbosity -> [String] +constructJHCCmdLine lbi bi clbi _odir verbosity = + (if verbosity >= deafening then ["-v"] else []) + ++ hcOptions JHC bi + ++ languageToFlags (compiler lbi) (defaultLanguage bi) + ++ extensionsToFlags (compiler lbi) (usedExtensions bi) + ++ ["--noauto","-i-"] + ++ concat [["-i", l] | l <- nub (hsSourceDirs bi)] + ++ ["-i", autogenModulesDir lbi] + ++ ["-optc" ++ opt | opt <- PD.ccOptions bi] + -- It would be better if JHC would accept package names with versions, + -- but JHC-0.7.2 doesn't accept this. + -- Thus, we have to strip the version with 'pkgName'. + ++ (concat [ ["-p", display (pkgName pkgid)] + | (_, pkgid) <- componentPackageDeps clbi ]) + +jhcPkgConf :: PackageDescription -> String +jhcPkgConf pd = + let sline name sel = name ++ ": "++sel pd + lib = fromMaybe (error "no library available") . library + comma = intercalate "," . map display + in unlines [sline "name" (display . pkgName . packageId) + ,sline "version" (display . pkgVersion . packageId) + ,sline "exposed-modules" (comma . PD.exposedModules . lib) + ,sline "hidden-modules" (comma . otherModules . libBuildInfo . lib) + ] + +installLib :: Verbosity + -> LocalBuildInfo + -> FilePath + -> FilePath + -> FilePath + -> PackageDescription + -> Library + -> ComponentLocalBuildInfo + -> IO () +installLib verb _lbi dest _dyn_dest build_dir pkg_descr _lib _clbi = do + let p = display (packageId pkg_descr)++".hl" + createDirectoryIfMissingVerbose verb True dest + installOrdinaryFile verb (build_dir p) (dest p) + +installExe :: Verbosity -> FilePath -> FilePath -> (FilePath,FilePath) -> PackageDescription -> Executable -> IO () +installExe verb dest build_dir (progprefix,progsuffix) _ exe = do + let exe_name = exeName exe + src = exe_name exeExtension + out = (progprefix ++ exe_name ++ progsuffix) exeExtension + createDirectoryIfMissingVerbose verb True dest + installExecutableFile verb (build_dir src) (dest out) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/LHC.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/LHC.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/LHC.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/LHC.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,770 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.LHC +-- Copyright : Isaac Jones 2003-2007 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This is a fairly large module. It contains most of the GHC-specific code for +-- configuring, building and installing packages. It also exports a function +-- for finding out what packages are already installed. Configuring involves +-- finding the @ghc@ and @ghc-pkg@ programs, finding what language extensions +-- this version of ghc supports and returning a 'Compiler' value. +-- +-- 'getInstalledPackages' involves calling the @ghc-pkg@ program to find out +-- what packages are installed. +-- +-- Building is somewhat complex as there is quite a bit of information to take +-- into account. We have to build libs and programs, possibly for profiling and +-- shared libs. We have to support building libraries that will be usable by +-- GHCi and also ghc's @-split-objs@ feature. We have to compile any C files +-- using ghc. Linking, especially for @split-objs@ is remarkably complex, +-- partly because there tend to be 1,000's of @.o@ files and this can often be +-- more than we can pass to the @ld@ or @ar@ programs in one go. +-- +-- Installing for libs and exes involves finding the right files and copying +-- them to the right places. One of the more tricky things about this module is +-- remembering the layout of files in the build directory (which is not +-- explicitly documented) and thus what search dirs are used for various kinds +-- of files. + +module Distribution.Simple.LHC ( + configure, getInstalledPackages, + buildLib, buildExe, + installLib, installExe, + registerPackage, + hcPkgInfo, + ghcOptions, + ghcVerbosityOptions + ) where + +import Distribution.PackageDescription as PD hiding (Flag) +import Distribution.InstalledPackageInfo +import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo +import Distribution.Simple.PackageIndex +import qualified Distribution.Simple.PackageIndex as PackageIndex +import Distribution.Simple.LocalBuildInfo +import Distribution.Simple.BuildPaths +import Distribution.Simple.Utils +import Distribution.Package +import qualified Distribution.ModuleName as ModuleName +import Distribution.Simple.Program +import qualified Distribution.Simple.Program.HcPkg as HcPkg +import Distribution.Simple.Compiler +import Distribution.Version +import Distribution.Verbosity +import Distribution.Text +import Distribution.Compat.Exception +import Distribution.System +import Language.Haskell.Extension + +import Control.Monad ( unless, when ) +import Data.Monoid as Mon +import Data.List +import qualified Data.Map as M ( empty ) +import Data.Maybe ( catMaybes ) +import System.Directory ( removeFile, renameFile, + getDirectoryContents, doesFileExist, + getTemporaryDirectory ) +import System.FilePath ( (), (<.>), takeExtension, + takeDirectory, replaceExtension ) +import System.IO (hClose, hPutStrLn) + +-- ----------------------------------------------------------------------------- +-- Configuring + +configure :: Verbosity -> Maybe FilePath -> Maybe FilePath + -> ProgramConfiguration -> IO (Compiler, Maybe Platform, ProgramConfiguration) +configure verbosity hcPath hcPkgPath conf = do + + (lhcProg, lhcVersion, conf') <- + requireProgramVersion verbosity lhcProgram + (orLaterVersion (Version [0,7] [])) + (userMaybeSpecifyPath "lhc" hcPath conf) + + (lhcPkgProg, lhcPkgVersion, conf'') <- + requireProgramVersion verbosity lhcPkgProgram + (orLaterVersion (Version [0,7] [])) + (userMaybeSpecifyPath "lhc-pkg" hcPkgPath conf') + + when (lhcVersion /= lhcPkgVersion) $ die $ + "Version mismatch between lhc and lhc-pkg: " + ++ programPath lhcProg ++ " is version " ++ display lhcVersion ++ " " + ++ programPath lhcPkgProg ++ " is version " ++ display lhcPkgVersion + + languages <- getLanguages verbosity lhcProg + extensions <- getExtensions verbosity lhcProg + + let comp = Compiler { + compilerId = CompilerId LHC lhcVersion, + compilerAbiTag = NoAbiTag, + compilerCompat = [], + compilerLanguages = languages, + compilerExtensions = extensions, + compilerProperties = M.empty + } + conf''' = configureToolchain lhcProg conf'' -- configure gcc and ld + compPlatform = Nothing + return (comp, compPlatform, conf''') + +-- | Adjust the way we find and configure gcc and ld +-- +configureToolchain :: ConfiguredProgram -> ProgramConfiguration + -> ProgramConfiguration +configureToolchain lhcProg = + addKnownProgram gccProgram { + programFindLocation = findProg gccProgram (baseDir "gcc.exe"), + programPostConf = configureGcc + } + . addKnownProgram ldProgram { + programFindLocation = findProg ldProgram (libDir "ld.exe"), + programPostConf = configureLd + } + where + compilerDir = takeDirectory (programPath lhcProg) + baseDir = takeDirectory compilerDir + libDir = baseDir "gcc-lib" + includeDir = baseDir "include" "mingw" + isWindows = case buildOS of Windows -> True; _ -> False + + -- on Windows finding and configuring ghc's gcc and ld is a bit special + findProg :: Program -> FilePath + -> Verbosity -> ProgramSearchPath + -> IO (Maybe (FilePath, [FilePath])) + findProg prog location | isWindows = \verbosity searchpath -> do + exists <- doesFileExist location + if exists then return (Just (location, [])) + else do warn verbosity ("Couldn't find " ++ programName prog ++ " where I expected it. Trying the search path.") + programFindLocation prog verbosity searchpath + | otherwise = programFindLocation prog + + configureGcc :: Verbosity -> ConfiguredProgram -> IO ConfiguredProgram + configureGcc + | isWindows = \_ gccProg -> case programLocation gccProg of + -- if it's found on system then it means we're using the result + -- of programFindLocation above rather than a user-supplied path + -- that means we should add this extra flag to tell ghc's gcc + -- where it lives and thus where gcc can find its various files: + FoundOnSystem {} -> return gccProg { + programDefaultArgs = ["-B" ++ libDir, + "-I" ++ includeDir] + } + UserSpecified {} -> return gccProg + | otherwise = \_ gccProg -> return gccProg + + -- we need to find out if ld supports the -x flag + configureLd :: Verbosity -> ConfiguredProgram -> IO ConfiguredProgram + configureLd verbosity ldProg = do + tempDir <- getTemporaryDirectory + ldx <- withTempFile tempDir ".c" $ \testcfile testchnd -> + withTempFile tempDir ".o" $ \testofile testohnd -> do + hPutStrLn testchnd "int foo() { return 0; }" + hClose testchnd; hClose testohnd + rawSystemProgram verbosity lhcProg ["-c", testcfile, + "-o", testofile] + withTempFile tempDir ".o" $ \testofile' testohnd' -> + do + hClose testohnd' + _ <- rawSystemProgramStdout verbosity ldProg + ["-x", "-r", testofile, "-o", testofile'] + return True + `catchIO` (\_ -> return False) + `catchExit` (\_ -> return False) + if ldx + then return ldProg { programDefaultArgs = ["-x"] } + else return ldProg + +getLanguages :: Verbosity -> ConfiguredProgram -> IO [(Language, Flag)] +getLanguages _ _ = return [(Haskell98, "")] +--FIXME: does lhc support -XHaskell98 flag? from what version? + +getExtensions :: Verbosity -> ConfiguredProgram -> IO [(Extension, Flag)] +getExtensions verbosity lhcProg = do + exts <- rawSystemStdout verbosity (programPath lhcProg) + ["--supported-languages"] + -- GHC has the annoying habit of inverting some of the extensions + -- so we have to try parsing ("No" ++ ghcExtensionName) first + let readExtension str = do + ext <- simpleParse ("No" ++ str) + case ext of + UnknownExtension _ -> simpleParse str + _ -> return ext + return $ [ (ext, "-X" ++ display ext) + | Just ext <- map readExtension (lines exts) ] + +getInstalledPackages :: Verbosity -> PackageDBStack -> ProgramConfiguration + -> IO InstalledPackageIndex +getInstalledPackages verbosity packagedbs conf = do + checkPackageDbStack packagedbs + pkgss <- getInstalledPackages' lhcPkg verbosity packagedbs conf + let indexes = [ PackageIndex.fromList (map (substTopDir topDir) pkgs) + | (_, pkgs) <- pkgss ] + return $! (Mon.mconcat indexes) + + where + -- On Windows, various fields have $topdir/foo rather than full + -- paths. We need to substitute the right value in so that when + -- we, for example, call gcc, we have proper paths to give it + Just ghcProg = lookupProgram lhcProgram conf + Just lhcPkg = lookupProgram lhcPkgProgram conf + compilerDir = takeDirectory (programPath ghcProg) + topDir = takeDirectory compilerDir + +checkPackageDbStack :: PackageDBStack -> IO () +checkPackageDbStack (GlobalPackageDB:rest) + | GlobalPackageDB `notElem` rest = return () +checkPackageDbStack _ = + die $ "GHC.getInstalledPackages: the global package db must be " + ++ "specified first and cannot be specified multiple times" + +-- | Get the packages from specific PackageDBs, not cumulative. +-- +getInstalledPackages' :: ConfiguredProgram -> Verbosity + -> [PackageDB] -> ProgramConfiguration + -> IO [(PackageDB, [InstalledPackageInfo])] +getInstalledPackages' lhcPkg verbosity packagedbs conf + = + sequence + [ do str <- rawSystemProgramStdoutConf verbosity lhcPkgProgram conf + ["dump", packageDbGhcPkgFlag packagedb] + `catchExit` \_ -> die $ "ghc-pkg dump failed" + case parsePackages str of + Left ok -> return (packagedb, ok) + _ -> die "failed to parse output of 'ghc-pkg dump'" + | packagedb <- packagedbs ] + + where + parsePackages str = + let parsed = map parseInstalledPackageInfo (splitPkgs str) + in case [ msg | ParseFailed msg <- parsed ] of + [] -> Left [ pkg | ParseOk _ pkg <- parsed ] + msgs -> Right msgs + + splitPkgs :: String -> [String] + splitPkgs = map unlines . splitWith ("---" ==) . lines + where + splitWith :: (a -> Bool) -> [a] -> [[a]] + splitWith p xs = ys : case zs of + [] -> [] + _:ws -> splitWith p ws + where (ys,zs) = break p xs + + packageDbGhcPkgFlag GlobalPackageDB = "--global" + packageDbGhcPkgFlag UserPackageDB = "--user" + packageDbGhcPkgFlag (SpecificPackageDB path) = "--" ++ packageDbFlag ++ "=" ++ path + + packageDbFlag + | programVersion lhcPkg < Just (Version [7,5] []) + = "package-conf" + | otherwise + = "package-db" + + +substTopDir :: FilePath -> InstalledPackageInfo -> InstalledPackageInfo +substTopDir topDir ipo + = ipo { + InstalledPackageInfo.importDirs + = map f (InstalledPackageInfo.importDirs ipo), + InstalledPackageInfo.libraryDirs + = map f (InstalledPackageInfo.libraryDirs ipo), + InstalledPackageInfo.includeDirs + = map f (InstalledPackageInfo.includeDirs ipo), + InstalledPackageInfo.frameworkDirs + = map f (InstalledPackageInfo.frameworkDirs ipo), + InstalledPackageInfo.haddockInterfaces + = map f (InstalledPackageInfo.haddockInterfaces ipo), + InstalledPackageInfo.haddockHTMLs + = map f (InstalledPackageInfo.haddockHTMLs ipo) + } + where f ('$':'t':'o':'p':'d':'i':'r':rest) = topDir ++ rest + f x = x + +-- ----------------------------------------------------------------------------- +-- Building + +-- | Build a library with LHC. +-- +buildLib :: Verbosity -> PackageDescription -> LocalBuildInfo + -> Library -> ComponentLocalBuildInfo -> IO () +buildLib verbosity pkg_descr lbi lib clbi = do + let libName = componentUnitId clbi + pref = buildDir lbi + pkgid = packageId pkg_descr + runGhcProg = rawSystemProgramConf verbosity lhcProgram (withPrograms lbi) + ifVanillaLib forceVanilla = when (forceVanilla || withVanillaLib lbi) + ifProfLib = when (withProfLib lbi) + ifSharedLib = when (withSharedLib lbi) + ifGHCiLib = when (withGHCiLib lbi && withVanillaLib lbi) + + libBi <- hackThreadedFlag verbosity + (compiler lbi) (withProfLib lbi) (libBuildInfo lib) + + let libTargetDir = pref + forceVanillaLib = EnableExtension TemplateHaskell `elem` allExtensions libBi + -- TH always needs vanilla libs, even when building for profiling + + createDirectoryIfMissingVerbose verbosity True libTargetDir + -- TODO: do we need to put hs-boot files into place for mutually recursive modules? + let ghcArgs = + ["-package-name", display pkgid ] + ++ constructGHCCmdLine lbi libBi clbi libTargetDir verbosity + ++ map display (libModules lib) + lhcWrap x = ["--build-library", "--ghc-opts=" ++ unwords x] + ghcArgsProf = ghcArgs + ++ ["-prof", + "-hisuf", "p_hi", + "-osuf", "p_o" + ] + ++ hcProfOptions GHC libBi + ghcArgsShared = ghcArgs + ++ ["-dynamic", + "-hisuf", "dyn_hi", + "-osuf", "dyn_o", "-fPIC" + ] + ++ hcSharedOptions GHC libBi + unless (null (libModules lib)) $ + do ifVanillaLib forceVanillaLib (runGhcProg $ lhcWrap ghcArgs) + ifProfLib (runGhcProg $ lhcWrap ghcArgsProf) + ifSharedLib (runGhcProg $ lhcWrap ghcArgsShared) + + -- build any C sources + unless (null (cSources libBi)) $ do + info verbosity "Building C Sources..." + sequence_ [do let (odir,args) = constructCcCmdLine lbi libBi clbi pref + filename verbosity + createDirectoryIfMissingVerbose verbosity True odir + runGhcProg args + ifSharedLib (runGhcProg (args ++ ["-fPIC", "-osuf dyn_o"])) + | filename <- cSources libBi] + + -- link: + info verbosity "Linking..." + let cObjs = map (`replaceExtension` objExtension) (cSources libBi) + cSharedObjs = map (`replaceExtension` ("dyn_" ++ objExtension)) (cSources libBi) + cid = compilerId (compiler lbi) + vanillaLibFilePath = libTargetDir mkLibName libName + profileLibFilePath = libTargetDir mkProfLibName libName + sharedLibFilePath = libTargetDir mkSharedLibName cid libName + ghciLibFilePath = libTargetDir mkGHCiLibName libName + + stubObjs <- fmap catMaybes $ sequence + [ findFileWithExtension [objExtension] [libTargetDir] + (ModuleName.toFilePath x ++"_stub") + | x <- libModules lib ] + stubProfObjs <- fmap catMaybes $ sequence + [ findFileWithExtension ["p_" ++ objExtension] [libTargetDir] + (ModuleName.toFilePath x ++"_stub") + | x <- libModules lib ] + stubSharedObjs <- fmap catMaybes $ sequence + [ findFileWithExtension ["dyn_" ++ objExtension] [libTargetDir] + (ModuleName.toFilePath x ++"_stub") + | x <- libModules lib ] + + hObjs <- getHaskellObjects lib lbi + pref objExtension True + hProfObjs <- + if (withProfLib lbi) + then getHaskellObjects lib lbi + pref ("p_" ++ objExtension) True + else return [] + hSharedObjs <- + if (withSharedLib lbi) + then getHaskellObjects lib lbi + pref ("dyn_" ++ objExtension) False + else return [] + + unless (null hObjs && null cObjs && null stubObjs) $ do + -- first remove library files if they exists + sequence_ + [ removeFile libFilePath `catchIO` \_ -> return () + | libFilePath <- [vanillaLibFilePath, profileLibFilePath + ,sharedLibFilePath, ghciLibFilePath] ] + + let arVerbosity | verbosity >= deafening = "v" + | verbosity >= normal = "" + | otherwise = "c" + arArgs = ["q"++ arVerbosity] + ++ [vanillaLibFilePath] + arObjArgs = + hObjs + ++ map (pref ) cObjs + ++ stubObjs + arProfArgs = ["q"++ arVerbosity] + ++ [profileLibFilePath] + arProfObjArgs = + hProfObjs + ++ map (pref ) cObjs + ++ stubProfObjs + ldArgs = ["-r"] + ++ ["-o", ghciLibFilePath <.> "tmp"] + ldObjArgs = + hObjs + ++ map (pref ) cObjs + ++ stubObjs + ghcSharedObjArgs = + hSharedObjs + ++ map (pref ) cSharedObjs + ++ stubSharedObjs + -- After the relocation lib is created we invoke ghc -shared + -- with the dependencies spelled out as -package arguments + -- and ghc invokes the linker with the proper library paths + ghcSharedLinkArgs = + [ "-no-auto-link-packages", + "-shared", + "-dynamic", + "-o", sharedLibFilePath ] + ++ ghcSharedObjArgs + ++ ["-package-name", display pkgid ] + ++ ghcPackageFlags lbi clbi + ++ ["-l"++extraLib | extraLib <- extraLibs libBi] + ++ ["-L"++extraLibDir | extraLibDir <- extraLibDirs libBi] + + runLd ldLibName args = do + exists <- doesFileExist ldLibName + -- This method is called iteratively by xargs. The + -- output goes to .tmp, and any existing file + -- named is included when linking. The + -- output is renamed to . + rawSystemProgramConf verbosity ldProgram (withPrograms lbi) + (args ++ if exists then [ldLibName] else []) + renameFile (ldLibName <.> "tmp") ldLibName + + runAr = rawSystemProgramConf verbosity arProgram (withPrograms lbi) + + --TODO: discover this at configure time or runtime on Unix + -- The value is 32k on Windows and POSIX specifies a minimum of 4k + -- but all sensible Unixes use more than 4k. + -- we could use getSysVar ArgumentLimit but that's in the Unix lib + maxCommandLineSize = 30 * 1024 + + ifVanillaLib False $ xargs maxCommandLineSize + runAr arArgs arObjArgs + + ifProfLib $ xargs maxCommandLineSize + runAr arProfArgs arProfObjArgs + + ifGHCiLib $ xargs maxCommandLineSize + (runLd ghciLibFilePath) ldArgs ldObjArgs + + ifSharedLib $ runGhcProg ghcSharedLinkArgs + + +-- | Build an executable with LHC. +-- +buildExe :: Verbosity -> PackageDescription -> LocalBuildInfo + -> Executable -> ComponentLocalBuildInfo -> IO () +buildExe verbosity _pkg_descr lbi + exe@Executable { exeName = exeName', modulePath = modPath } clbi = do + let pref = buildDir lbi + runGhcProg = rawSystemProgramConf verbosity lhcProgram (withPrograms lbi) + + exeBi <- hackThreadedFlag verbosity + (compiler lbi) (withProfExe lbi) (buildInfo exe) + + -- exeNameReal, the name that GHC really uses (with .exe on Windows) + let exeNameReal = exeName' <.> + (if null $ takeExtension exeName' then exeExtension else "") + + let targetDir = pref exeName' + let exeDir = targetDir (exeName' ++ "-tmp") + createDirectoryIfMissingVerbose verbosity True targetDir + createDirectoryIfMissingVerbose verbosity True exeDir + -- TODO: do we need to put hs-boot files into place for mutually recursive modules? + -- FIX: what about exeName.hi-boot? + + -- build executables + unless (null (cSources exeBi)) $ do + info verbosity "Building C Sources." + sequence_ [do let (odir,args) = constructCcCmdLine lbi exeBi clbi + exeDir filename verbosity + createDirectoryIfMissingVerbose verbosity True odir + runGhcProg args + | filename <- cSources exeBi] + + srcMainFile <- findFile (exeDir : hsSourceDirs exeBi) modPath + + let cObjs = map (`replaceExtension` objExtension) (cSources exeBi) + let lhcWrap x = ("--ghc-opts\"":x) ++ ["\""] + let binArgs linkExe profExe = + (if linkExe + then ["-o", targetDir exeNameReal] + else ["-c"]) + ++ constructGHCCmdLine lbi exeBi clbi exeDir verbosity + ++ [exeDir x | x <- cObjs] + ++ [srcMainFile] + ++ ["-optl" ++ opt | opt <- PD.ldOptions exeBi] + ++ ["-l"++lib | lib <- extraLibs exeBi] + ++ ["-L"++libDir | libDir <- extraLibDirs exeBi] + ++ concat [["-framework", f] | f <- PD.frameworks exeBi] + ++ if profExe + then ["-prof", + "-hisuf", "p_hi", + "-osuf", "p_o" + ] ++ hcProfOptions GHC exeBi + else [] + + -- For building exe's for profiling that use TH we actually + -- have to build twice, once without profiling and the again + -- with profiling. This is because the code that TH needs to + -- run at compile time needs to be the vanilla ABI so it can + -- be loaded up and run by the compiler. + when (withProfExe lbi && EnableExtension TemplateHaskell `elem` allExtensions exeBi) + (runGhcProg $ lhcWrap (binArgs False False)) + + runGhcProg (binArgs True (withProfExe lbi)) + +-- | Filter the "-threaded" flag when profiling as it does not +-- work with ghc-6.8 and older. +hackThreadedFlag :: Verbosity -> Compiler -> Bool -> BuildInfo -> IO BuildInfo +hackThreadedFlag verbosity comp prof bi + | not mustFilterThreaded = return bi + | otherwise = do + warn verbosity $ "The ghc flag '-threaded' is not compatible with " + ++ "profiling in ghc-6.8 and older. It will be disabled." + return bi { options = filterHcOptions (/= "-threaded") (options bi) } + where + mustFilterThreaded = prof && compilerVersion comp < Version [6, 10] [] + && "-threaded" `elem` hcOptions GHC bi + filterHcOptions p hcoptss = + [ (hc, if hc == GHC then filter p opts else opts) + | (hc, opts) <- hcoptss ] + +-- when using -split-objs, we need to search for object files in the +-- Module_split directory for each module. +getHaskellObjects :: Library -> LocalBuildInfo + -> FilePath -> String -> Bool -> IO [FilePath] +getHaskellObjects lib lbi pref wanted_obj_ext allow_split_objs + | splitObjs lbi && allow_split_objs = do + let dirs = [ pref (ModuleName.toFilePath x ++ "_split") + | x <- libModules lib ] + objss <- mapM getDirectoryContents dirs + let objs = [ dir obj + | (objs',dir) <- zip objss dirs, obj <- objs', + let obj_ext = takeExtension obj, + '.':wanted_obj_ext == obj_ext ] + return objs + | otherwise = + return [ pref ModuleName.toFilePath x <.> wanted_obj_ext + | x <- libModules lib ] + + +constructGHCCmdLine + :: LocalBuildInfo + -> BuildInfo + -> ComponentLocalBuildInfo + -> FilePath + -> Verbosity + -> [String] +constructGHCCmdLine lbi bi clbi odir verbosity = + ["--make"] + ++ ghcVerbosityOptions verbosity + -- Unsupported extensions have already been checked by configure + ++ ghcOptions lbi bi clbi odir + +ghcVerbosityOptions :: Verbosity -> [String] +ghcVerbosityOptions verbosity + | verbosity >= deafening = ["-v"] + | verbosity >= normal = [] + | otherwise = ["-w", "-v0"] + +ghcOptions :: LocalBuildInfo -> BuildInfo -> ComponentLocalBuildInfo + -> FilePath -> [String] +ghcOptions lbi bi clbi odir + = ["-hide-all-packages"] + ++ ghcPackageDbOptions lbi + ++ (if splitObjs lbi then ["-split-objs"] else []) + ++ ["-i"] + ++ ["-i" ++ odir] + ++ ["-i" ++ l | l <- nub (hsSourceDirs bi)] + ++ ["-i" ++ autogenModulesDir lbi] + ++ ["-I" ++ autogenModulesDir lbi] + ++ ["-I" ++ odir] + ++ ["-I" ++ dir | dir <- PD.includeDirs bi] + ++ ["-optP" ++ opt | opt <- cppOptions bi] + ++ [ "-optP-include", "-optP"++ (autogenModulesDir lbi cppHeaderName) ] + ++ [ "-#include \"" ++ inc ++ "\"" | inc <- PD.includes bi ] + ++ [ "-odir", odir, "-hidir", odir ] + ++ (if compilerVersion c >= Version [6,8] [] + then ["-stubdir", odir] else []) + ++ ghcPackageFlags lbi clbi + ++ (case withOptimization lbi of + NoOptimisation -> [] + NormalOptimisation -> ["-O"] + MaximumOptimisation -> ["-O2"]) + ++ hcOptions GHC bi + ++ languageToFlags c (defaultLanguage bi) + ++ extensionsToFlags c (usedExtensions bi) + where c = compiler lbi + +ghcPackageFlags :: LocalBuildInfo -> ComponentLocalBuildInfo -> [String] +ghcPackageFlags lbi clbi + | ghcVer >= Version [6,11] [] + = concat [ ["-package-id", display ipkgid] + | (ipkgid, _) <- componentPackageDeps clbi ] + + | otherwise = concat [ ["-package", display pkgid] + | (_, pkgid) <- componentPackageDeps clbi ] + where + ghcVer = compilerVersion (compiler lbi) + +ghcPackageDbOptions :: LocalBuildInfo -> [String] +ghcPackageDbOptions lbi = case dbstack of + (GlobalPackageDB:UserPackageDB:dbs) -> concatMap specific dbs + (GlobalPackageDB:dbs) -> ("-no-user-" ++ packageDbFlag) + : concatMap specific dbs + _ -> ierror + where + specific (SpecificPackageDB db) = [ '-':packageDbFlag, db ] + specific _ = ierror + ierror = error ("internal error: unexpected package db stack: " ++ show dbstack) + + dbstack = withPackageDB lbi + packageDbFlag + | compilerVersion (compiler lbi) < Version [7,5] [] + = "package-conf" + | otherwise + = "package-db" + +constructCcCmdLine :: LocalBuildInfo -> BuildInfo -> ComponentLocalBuildInfo + -> FilePath -> FilePath -> Verbosity -> (FilePath,[String]) +constructCcCmdLine lbi bi clbi pref filename verbosity + = let odir | compilerVersion (compiler lbi) >= Version [6,4,1] [] = pref + | otherwise = pref takeDirectory filename + -- ghc 6.4.1 fixed a bug in -odir handling + -- for C compilations. + in + (odir, + ghcCcOptions lbi bi clbi odir + ++ (if verbosity >= deafening then ["-v"] else []) + ++ ["-c",filename]) + + +ghcCcOptions :: LocalBuildInfo -> BuildInfo -> ComponentLocalBuildInfo + -> FilePath -> [String] +ghcCcOptions lbi bi clbi odir + = ["-I" ++ dir | dir <- PD.includeDirs bi] + ++ ghcPackageDbOptions lbi + ++ ghcPackageFlags lbi clbi + ++ ["-optc" ++ opt | opt <- PD.ccOptions bi] + ++ (case withOptimization lbi of + NoOptimisation -> [] + _ -> ["-optc-O2"]) + ++ ["-odir", odir] + +mkGHCiLibName :: UnitId -> String +mkGHCiLibName lib = getHSLibraryName lib <.> "o" + +-- ----------------------------------------------------------------------------- +-- Installing + +-- |Install executables for GHC. +installExe :: Verbosity + -> LocalBuildInfo + -> InstallDirs FilePath -- ^Where to copy the files to + -> FilePath -- ^Build location + -> (FilePath, FilePath) -- ^Executable (prefix,suffix) + -> PackageDescription + -> Executable + -> IO () +installExe verbosity lbi installDirs buildPref (progprefix, progsuffix) _pkg exe = do + let binDir = bindir installDirs + createDirectoryIfMissingVerbose verbosity True binDir + let exeFileName = exeName exe <.> exeExtension + fixedExeBaseName = progprefix ++ exeName exe ++ progsuffix + installBinary dest = do + installExecutableFile verbosity + (buildPref exeName exe exeFileName) + (dest <.> exeExtension) + stripExe verbosity lbi exeFileName (dest <.> exeExtension) + installBinary (binDir fixedExeBaseName) + +stripExe :: Verbosity -> LocalBuildInfo -> FilePath -> FilePath -> IO () +stripExe verbosity lbi name path = when (stripExes lbi) $ + case lookupProgram stripProgram (withPrograms lbi) of + Just strip -> rawSystemProgram verbosity strip args + Nothing -> unless (buildOS == Windows) $ + -- Don't bother warning on windows, we don't expect them to + -- have the strip program anyway. + warn verbosity $ "Unable to strip executable '" ++ name + ++ "' (missing the 'strip' program)" + where + args = path : case buildOS of + OSX -> ["-x"] -- By default, stripping the ghc binary on at least + -- some OS X installations causes: + -- HSbase-3.0.o: unknown symbol `_environ'" + -- The -x flag fixes that. + _ -> [] + +-- |Install for ghc, .hi, .a and, if --with-ghci given, .o +installLib :: Verbosity + -> LocalBuildInfo + -> FilePath -- ^install location + -> FilePath -- ^install location for dynamic libraries + -> FilePath -- ^Build location + -> PackageDescription + -> Library + -> ComponentLocalBuildInfo + -> IO () +installLib verbosity lbi targetDir dynlibTargetDir builtDir _pkg lib clbi = do + -- copy .hi files over: + let copy src dst n = do + createDirectoryIfMissingVerbose verbosity True dst + installOrdinaryFile verbosity (src n) (dst n) + copyModuleFiles ext = + findModuleFiles [builtDir] [ext] (libModules lib) + >>= installOrdinaryFiles verbosity targetDir + ifVanilla $ copyModuleFiles "hi" + ifProf $ copyModuleFiles "p_hi" + hcrFiles <- findModuleFiles (builtDir : hsSourceDirs (libBuildInfo lib)) ["hcr"] (libModules lib) + flip mapM_ hcrFiles $ \(srcBase, srcFile) -> runLhc ["--install-library", srcBase srcFile] + + -- copy the built library files over: + ifVanilla $ copy builtDir targetDir vanillaLibName + ifProf $ copy builtDir targetDir profileLibName + ifGHCi $ copy builtDir targetDir ghciLibName + ifShared $ copy builtDir dynlibTargetDir sharedLibName + + where + cid = compilerId (compiler lbi) + libName = componentUnitId clbi + vanillaLibName = mkLibName libName + profileLibName = mkProfLibName libName + ghciLibName = mkGHCiLibName libName + sharedLibName = mkSharedLibName cid libName + + hasLib = not $ null (libModules lib) + && null (cSources (libBuildInfo lib)) + ifVanilla = when (hasLib && withVanillaLib lbi) + ifProf = when (hasLib && withProfLib lbi) + ifGHCi = when (hasLib && withGHCiLib lbi) + ifShared = when (hasLib && withSharedLib lbi) + + runLhc = rawSystemProgramConf verbosity lhcProgram (withPrograms lbi) + +-- ----------------------------------------------------------------------------- +-- Registering + +registerPackage + :: Verbosity + -> ProgramConfiguration + -> PackageDBStack + -> InstalledPackageInfo + -> IO () +registerPackage verbosity progdb packageDbs installedPkgInfo = + HcPkg.reregister (hcPkgInfo progdb) verbosity packageDbs + (Right installedPkgInfo) + +hcPkgInfo :: ProgramConfiguration -> HcPkg.HcPkgInfo +hcPkgInfo conf = HcPkg.HcPkgInfo { HcPkg.hcPkgProgram = lhcPkgProg + , HcPkg.noPkgDbStack = False + , HcPkg.noVerboseFlag = False + , HcPkg.flagPackageConf = False + , HcPkg.supportsDirDbs = True + , HcPkg.requiresDirDbs = True + , HcPkg.nativeMultiInstance = False -- ? + , HcPkg.recacheMultiInstance = False -- ? + } + where + Just lhcPkgProg = lookupProgram lhcPkgProgram conf diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/LocalBuildInfo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/LocalBuildInfo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/LocalBuildInfo.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/LocalBuildInfo.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,515 @@ +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.LocalBuildInfo +-- Copyright : Isaac Jones 2003-2004 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Once a package has been configured we have resolved conditionals and +-- dependencies, configured the compiler and other needed external programs. +-- The 'LocalBuildInfo' is used to hold all this information. It holds the +-- install dirs, the compiler, the exact package dependencies, the configured +-- programs, the package database to use and a bunch of miscellaneous configure +-- flags. It gets saved and reloaded from a file (@dist\/setup-config@). It gets +-- passed in to very many subsequent build actions. + +module Distribution.Simple.LocalBuildInfo ( + LocalBuildInfo(..), + externalPackageDeps, + localComponentId, + localUnitId, + localCompatPackageKey, + + -- * Buildable package components + Component(..), + ComponentName(..), + showComponentName, + ComponentLocalBuildInfo(..), + foldComponent, + componentName, + componentBuildInfo, + componentEnabled, + componentDisabledReason, + ComponentDisabledReason(..), + pkgComponents, + pkgEnabledComponents, + lookupComponent, + getComponent, + getComponentLocalBuildInfo, + allComponentsInBuildOrder, + componentsInBuildOrder, + checkComponentsCyclic, + depLibraryPaths, + + withAllComponentsInBuildOrder, + withComponentsInBuildOrder, + withComponentsLBI, + withLibLBI, + withExeLBI, + withTestLBI, + + -- * Installation directories + module Distribution.Simple.InstallDirs, + absoluteInstallDirs, prefixRelativeInstallDirs, + substPathTemplate + ) where + + +import Distribution.Simple.InstallDirs hiding (absoluteInstallDirs, + prefixRelativeInstallDirs, + substPathTemplate, ) +import qualified Distribution.Simple.InstallDirs as InstallDirs +import Distribution.Simple.Program +import Distribution.PackageDescription +import qualified Distribution.InstalledPackageInfo as Installed +import Distribution.Package +import Distribution.Simple.Compiler +import Distribution.Simple.PackageIndex +import Distribution.Simple.Setup +import Distribution.Simple.Utils +import Distribution.Text +import Distribution.System + +import Data.Array ((!)) +import Distribution.Compat.Binary (Binary) +import Data.Graph +import Data.List (nub, find, stripPrefix) +import Data.Maybe +import Data.Tree (flatten) +import GHC.Generics (Generic) +import Data.Map (Map) + +import System.Directory (doesDirectoryExist, canonicalizePath) + +-- | Data cached after configuration step. See also +-- 'Distribution.Simple.Setup.ConfigFlags'. +data LocalBuildInfo = LocalBuildInfo { + configFlags :: ConfigFlags, + -- ^ Options passed to the configuration step. + -- Needed to re-run configuration when .cabal is out of date + flagAssignment :: FlagAssignment, + -- ^ The final set of flags which were picked for this package + extraConfigArgs :: [String], + -- ^ Extra args on the command line for the configuration step. + -- Needed to re-run configuration when .cabal is out of date + installDirTemplates :: InstallDirTemplates, + -- ^ The installation directories for the various different + -- kinds of files + --TODO: inplaceDirTemplates :: InstallDirs FilePath + compiler :: Compiler, + -- ^ The compiler we're building with + hostPlatform :: Platform, + -- ^ The platform we're building for + buildDir :: FilePath, + -- ^ Where to build the package. + componentsConfigs :: [(ComponentName, ComponentLocalBuildInfo, [ComponentName])], + -- ^ All the components to build, ordered by topological sort, and with their dependencies + -- over the intrapackage dependency graph + installedPkgs :: InstalledPackageIndex, + -- ^ All the info about the installed packages that the + -- current package depends on (directly or indirectly). + pkgDescrFile :: Maybe FilePath, + -- ^ the filename containing the .cabal file, if available + localPkgDescr :: PackageDescription, + -- ^ The resolved package description, that does not contain + -- any conditionals. + withPrograms :: ProgramConfiguration, -- ^Location and args for all programs + withPackageDB :: PackageDBStack, -- ^What package database to use, global\/user + withVanillaLib:: Bool, -- ^Whether to build normal libs. + withProfLib :: Bool, -- ^Whether to build profiling versions of libs. + withSharedLib :: Bool, -- ^Whether to build shared versions of libs. + withDynExe :: Bool, -- ^Whether to link executables dynamically + withProfExe :: Bool, -- ^Whether to build executables for profiling. + withProfLibDetail :: ProfDetailLevel, -- ^Level of automatic profile detail. + withProfExeDetail :: ProfDetailLevel, -- ^Level of automatic profile detail. + withOptimization :: OptimisationLevel, -- ^Whether to build with optimization (if available). + withDebugInfo :: DebugInfoLevel, -- ^Whether to emit debug info (if available). + withGHCiLib :: Bool, -- ^Whether to build libs suitable for use with GHCi. + splitObjs :: Bool, -- ^Use -split-objs with GHC, if available + stripExes :: Bool, -- ^Whether to strip executables during install + stripLibs :: Bool, -- ^Whether to strip libraries during install + progPrefix :: PathTemplate, -- ^Prefix to be prepended to installed executables + progSuffix :: PathTemplate, -- ^Suffix to be appended to installed executables + relocatable :: Bool -- ^Whether to build a relocatable package + } deriving (Generic, Read, Show) + +instance Binary LocalBuildInfo + +-- | Extract the 'ComponentId' from the library component of a +-- 'LocalBuildInfo' if it exists, or make a fake component ID based +-- on the package ID. +localComponentId :: LocalBuildInfo -> ComponentId +localComponentId lbi + = case localUnitId lbi of + SimpleUnitId cid -> cid + +-- | Extract the 'UnitId' from the library component of a +-- 'LocalBuildInfo' if it exists, or make a fake unit ID based on +-- the package ID. +localUnitId :: LocalBuildInfo -> UnitId +localUnitId lbi = + foldr go (mkLegacyUnitId (package (localPkgDescr lbi))) (componentsConfigs lbi) + where go (_, clbi, _) old_uid = case clbi of + LibComponentLocalBuildInfo { componentUnitId = uid } -> uid + _ -> old_uid + +-- | Extract the compatibility 'ComponentId' from the library component of a +-- 'LocalBuildInfo' if it exists, or make a fake compatibility package +-- key based on the package ID. +localCompatPackageKey :: LocalBuildInfo -> String +localCompatPackageKey lbi = + foldr go (display (package (localPkgDescr lbi))) (componentsConfigs lbi) + where go (_, clbi, _) old_pk = case clbi of + LibComponentLocalBuildInfo { componentCompatPackageKey = pk } -> pk + _ -> old_pk + +-- | External package dependencies for the package as a whole. This is the +-- union of the individual 'componentPackageDeps', less any internal deps. +externalPackageDeps :: LocalBuildInfo -> [(UnitId, PackageId)] +externalPackageDeps lbi = + -- TODO: what about non-buildable components? + nub [ (ipkgid, pkgid) + | (_,clbi,_) <- componentsConfigs lbi + , (ipkgid, pkgid) <- componentPackageDeps clbi + , not (internal pkgid) ] + where + -- True if this dependency is an internal one (depends on the library + -- defined in the same package). + internal pkgid = pkgid == packageId (localPkgDescr lbi) + +-- ----------------------------------------------------------------------------- +-- Buildable components + +data Component = CLib Library + | CExe Executable + | CTest TestSuite + | CBench Benchmark + deriving (Show, Eq, Read) + +data ComponentName = CLibName -- currently only a single lib + | CExeName String + | CTestName String + | CBenchName String + deriving (Eq, Generic, Ord, Read, Show) + +instance Binary ComponentName + +showComponentName :: ComponentName -> String +showComponentName CLibName = "library" +showComponentName (CExeName name) = "executable '" ++ name ++ "'" +showComponentName (CTestName name) = "test suite '" ++ name ++ "'" +showComponentName (CBenchName name) = "benchmark '" ++ name ++ "'" + +data ComponentLocalBuildInfo + = LibComponentLocalBuildInfo { + -- | Resolved internal and external package dependencies for this component. + -- The 'BuildInfo' specifies a set of build dependencies that must be + -- satisfied in terms of version ranges. This field fixes those dependencies + -- to the specific versions available on this machine for this compiler. + componentPackageDeps :: [(UnitId, PackageId)], + componentUnitId :: UnitId, + componentCompatPackageKey :: String, + componentExposedModules :: [Installed.ExposedModule], + componentPackageRenaming :: Map PackageName ModuleRenaming + } + | ExeComponentLocalBuildInfo { + componentPackageDeps :: [(UnitId, PackageId)], + componentPackageRenaming :: Map PackageName ModuleRenaming + } + | TestComponentLocalBuildInfo { + componentPackageDeps :: [(UnitId, PackageId)], + componentPackageRenaming :: Map PackageName ModuleRenaming + } + | BenchComponentLocalBuildInfo { + componentPackageDeps :: [(UnitId, PackageId)], + componentPackageRenaming :: Map PackageName ModuleRenaming + } + deriving (Generic, Read, Show) + +instance Binary ComponentLocalBuildInfo + +foldComponent :: (Library -> a) + -> (Executable -> a) + -> (TestSuite -> a) + -> (Benchmark -> a) + -> Component + -> a +foldComponent f _ _ _ (CLib lib) = f lib +foldComponent _ f _ _ (CExe exe) = f exe +foldComponent _ _ f _ (CTest tst) = f tst +foldComponent _ _ _ f (CBench bch) = f bch + +componentBuildInfo :: Component -> BuildInfo +componentBuildInfo = + foldComponent libBuildInfo buildInfo testBuildInfo benchmarkBuildInfo + +componentName :: Component -> ComponentName +componentName = + foldComponent (const CLibName) + (CExeName . exeName) + (CTestName . testName) + (CBenchName . benchmarkName) + +-- | All the components in the package (libs, exes, or test suites). +-- +pkgComponents :: PackageDescription -> [Component] +pkgComponents pkg = + [ CLib lib | Just lib <- [library pkg] ] + ++ [ CExe exe | exe <- executables pkg ] + ++ [ CTest tst | tst <- testSuites pkg ] + ++ [ CBench bm | bm <- benchmarks pkg ] + +-- | All the components in the package that are buildable and enabled. +-- Thus this excludes non-buildable components and test suites or benchmarks +-- that have been disabled. +-- +pkgEnabledComponents :: PackageDescription -> [Component] +pkgEnabledComponents = filter componentEnabled . pkgComponents + +componentEnabled :: Component -> Bool +componentEnabled = isNothing . componentDisabledReason + +data ComponentDisabledReason = DisabledComponent + | DisabledAllTests + | DisabledAllBenchmarks + +componentDisabledReason :: Component -> Maybe ComponentDisabledReason +componentDisabledReason (CLib lib) + | not (buildable (libBuildInfo lib)) = Just DisabledComponent +componentDisabledReason (CExe exe) + | not (buildable (buildInfo exe)) = Just DisabledComponent +componentDisabledReason (CTest tst) + | not (buildable (testBuildInfo tst)) = Just DisabledComponent + | not (testEnabled tst) = Just DisabledAllTests +componentDisabledReason (CBench bm) + | not (buildable (benchmarkBuildInfo bm)) = Just DisabledComponent + | not (benchmarkEnabled bm) = Just DisabledAllBenchmarks +componentDisabledReason _ = Nothing + +lookupComponent :: PackageDescription -> ComponentName -> Maybe Component +lookupComponent pkg CLibName = + fmap CLib $ library pkg +lookupComponent pkg (CExeName name) = + fmap CExe $ find ((name ==) . exeName) (executables pkg) +lookupComponent pkg (CTestName name) = + fmap CTest $ find ((name ==) . testName) (testSuites pkg) +lookupComponent pkg (CBenchName name) = + fmap CBench $ find ((name ==) . benchmarkName) (benchmarks pkg) + +getComponent :: PackageDescription -> ComponentName -> Component +getComponent pkg cname = + case lookupComponent pkg cname of + Just cpnt -> cpnt + Nothing -> missingComponent + where + missingComponent = + error $ "internal error: the package description contains no " + ++ "component corresponding to " ++ show cname + + +getComponentLocalBuildInfo :: LocalBuildInfo -> ComponentName + -> ComponentLocalBuildInfo +getComponentLocalBuildInfo lbi cname = + case [ clbi + | (cname', clbi, _) <- componentsConfigs lbi + , cname == cname' ] of + [clbi] -> clbi + _ -> missingComponent + where + missingComponent = + error $ "internal error: there is no configuration data " + ++ "for component " ++ show cname + + +-- |If the package description has a library section, call the given +-- function with the library build info as argument. Extended version of +-- 'withLib' that also gives corresponding build info. +withLibLBI :: PackageDescription -> LocalBuildInfo + -> (Library -> ComponentLocalBuildInfo -> IO ()) -> IO () +withLibLBI pkg_descr lbi f = + withLib pkg_descr $ \lib -> + f lib (getComponentLocalBuildInfo lbi CLibName) + +-- | Perform the action on each buildable 'Executable' in the package +-- description. Extended version of 'withExe' that also gives corresponding +-- build info. +withExeLBI :: PackageDescription -> LocalBuildInfo + -> (Executable -> ComponentLocalBuildInfo -> IO ()) -> IO () +withExeLBI pkg_descr lbi f = + withExe pkg_descr $ \exe -> + f exe (getComponentLocalBuildInfo lbi (CExeName (exeName exe))) + +withTestLBI :: PackageDescription -> LocalBuildInfo + -> (TestSuite -> ComponentLocalBuildInfo -> IO ()) -> IO () +withTestLBI pkg_descr lbi f = + withTest pkg_descr $ \test -> + f test (getComponentLocalBuildInfo lbi (CTestName (testName test))) + +{-# DEPRECATED withComponentsLBI "Use withAllComponentsInBuildOrder" #-} +withComponentsLBI :: PackageDescription -> LocalBuildInfo + -> (Component -> ComponentLocalBuildInfo -> IO ()) + -> IO () +withComponentsLBI = withAllComponentsInBuildOrder + +-- | Perform the action on each buildable 'Library' or 'Executable' (Component) +-- in the PackageDescription, subject to the build order specified by the +-- 'compBuildOrder' field of the given 'LocalBuildInfo' +withAllComponentsInBuildOrder :: PackageDescription -> LocalBuildInfo + -> (Component -> ComponentLocalBuildInfo -> IO ()) + -> IO () +withAllComponentsInBuildOrder pkg lbi f = + sequence_ + [ f (getComponent pkg cname) clbi + | (cname, clbi) <- allComponentsInBuildOrder lbi ] + +withComponentsInBuildOrder :: PackageDescription -> LocalBuildInfo + -> [ComponentName] + -> (Component -> ComponentLocalBuildInfo -> IO ()) + -> IO () +withComponentsInBuildOrder pkg lbi cnames f = + sequence_ + [ f (getComponent pkg cname') clbi + | (cname', clbi) <- componentsInBuildOrder lbi cnames ] + +allComponentsInBuildOrder :: LocalBuildInfo + -> [(ComponentName, ComponentLocalBuildInfo)] +allComponentsInBuildOrder lbi = + componentsInBuildOrder lbi + [ cname | (cname, _, _) <- componentsConfigs lbi ] + +componentsInBuildOrder :: LocalBuildInfo -> [ComponentName] + -> [(ComponentName, ComponentLocalBuildInfo)] +componentsInBuildOrder lbi cnames = + map ((\(clbi,cname,_) -> (cname,clbi)) . vertexToNode) + . postOrder graph + . map (\cname -> fromMaybe (noSuchComp cname) (keyToVertex cname)) + $ cnames + where + (graph, vertexToNode, keyToVertex) = + graphFromEdges (map (\(a,b,c) -> (b,a,c)) (componentsConfigs lbi)) + + noSuchComp cname = error $ "internal error: componentsInBuildOrder: " + ++ "no such component: " ++ show cname + + postOrder :: Graph -> [Vertex] -> [Vertex] + postOrder g vs = postorderF (dfs g vs) [] + + postorderF :: Forest a -> [a] -> [a] + postorderF ts = foldr (.) id $ map postorderT ts + + postorderT :: Tree a -> [a] -> [a] + postorderT (Node a ts) = postorderF ts . (a :) + +checkComponentsCyclic :: Ord key => [(node, key, [key])] + -> Maybe [(node, key, [key])] +checkComponentsCyclic es = + let (graph, vertexToNode, _) = graphFromEdges es + cycles = [ flatten c | c <- scc graph, isCycle c ] + isCycle (Node v []) = selfCyclic v + isCycle _ = True + selfCyclic v = v `elem` graph ! v + in case cycles of + [] -> Nothing + (c:_) -> Just (map vertexToNode c) + +-- | Determine the directories containing the dynamic libraries of the +-- transitive dependencies of the component we are building. +-- +-- When wanted, and possible, returns paths relative to the installDirs 'prefix' +depLibraryPaths :: Bool -- ^ Building for inplace? + -> Bool -- ^ Generate prefix-relative library paths + -> LocalBuildInfo + -> ComponentLocalBuildInfo -- ^ Component that is being built + -> IO [FilePath] +depLibraryPaths inplace relative lbi clbi = do + let pkgDescr = localPkgDescr lbi + installDirs = absoluteInstallDirs pkgDescr lbi NoCopyDest + executable = case clbi of + ExeComponentLocalBuildInfo {} -> True + _ -> False + relDir | executable = bindir installDirs + | otherwise = libdir installDirs + + let hasInternalDeps = not $ null + $ [ pkgid + | (_,pkgid) <- componentPackageDeps clbi + , internal pkgid + ] + + let ipkgs = allPackages (installedPkgs lbi) + -- First look for dynamic libraries in `dynamic-library-dirs`, and use + -- `library-dirs` as a fall back. + getDynDir pkg = case Installed.libraryDynDirs pkg of + [] -> Installed.libraryDirs pkg + d -> d + allDepLibDirs = concatMap getDynDir ipkgs + internalLib + | inplace = buildDir lbi + | otherwise = dynlibdir installDirs + allDepLibDirs' = if hasInternalDeps + then internalLib : allDepLibDirs + else allDepLibDirs + allDepLibDirsC <- mapM canonicalizePathNoFail allDepLibDirs' + + let p = prefix installDirs + prefixRelative l = isJust (stripPrefix p l) + libPaths + | relative && + prefixRelative relDir = map (\l -> + if prefixRelative l + then shortRelativePath relDir l + else l + ) allDepLibDirsC + | otherwise = allDepLibDirsC + + return libPaths + where + internal pkgid = pkgid == packageId (localPkgDescr lbi) + -- 'canonicalizePath' fails on UNIX when the directory does not exists. + -- So just don't canonicalize when it doesn't exist. + canonicalizePathNoFail p = do + exists <- doesDirectoryExist p + if exists + then canonicalizePath p + else return p + + +-- ----------------------------------------------------------------------------- +-- Wrappers for a couple functions from InstallDirs + +-- |See 'InstallDirs.absoluteInstallDirs' +absoluteInstallDirs :: PackageDescription -> LocalBuildInfo -> CopyDest + -> InstallDirs FilePath +absoluteInstallDirs pkg lbi copydest = + InstallDirs.absoluteInstallDirs + (packageId pkg) + (localUnitId lbi) + (compilerInfo (compiler lbi)) + copydest + (hostPlatform lbi) + (installDirTemplates lbi) + +-- |See 'InstallDirs.prefixRelativeInstallDirs' +prefixRelativeInstallDirs :: PackageId -> LocalBuildInfo + -> InstallDirs (Maybe FilePath) +prefixRelativeInstallDirs pkg_descr lbi = + InstallDirs.prefixRelativeInstallDirs + (packageId pkg_descr) + (localUnitId lbi) + (compilerInfo (compiler lbi)) + (hostPlatform lbi) + (installDirTemplates lbi) + +substPathTemplate :: PackageId -> LocalBuildInfo + -> PathTemplate -> FilePath +substPathTemplate pkgid lbi = fromPathTemplate + . ( InstallDirs.substPathTemplate env ) + where env = initialPathTemplateEnv + pkgid + (localUnitId lbi) + (compilerInfo (compiler lbi)) + (hostPlatform lbi) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/PackageIndex.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/PackageIndex.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/PackageIndex.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/PackageIndex.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,623 @@ +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.PackageIndex +-- Copyright : (c) David Himmelstrup 2005, +-- Bjorn Bringert 2007, +-- Duncan Coutts 2008-2009 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- An index of packages. +-- +module Distribution.Simple.PackageIndex ( + -- * Package index data type + InstalledPackageIndex, + PackageIndex, + + -- * Creating an index + fromList, + + -- * Updates + merge, + + insert, + + deleteUnitId, + deleteSourcePackageId, + deletePackageName, +-- deleteDependency, + + -- * Queries + + -- ** Precise lookups + lookupUnitId, + lookupSourcePackageId, + lookupPackageId, + lookupPackageName, + lookupDependency, + + -- ** Case-insensitive searches + searchByName, + SearchResult(..), + searchByNameSubstring, + + -- ** Bulk queries + allPackages, + allPackagesByName, + allPackagesBySourcePackageId, + + -- ** Special queries + brokenPackages, + dependencyClosure, + reverseDependencyClosure, + topologicalOrder, + reverseTopologicalOrder, + dependencyInconsistencies, + dependencyCycles, + dependencyGraph, + moduleNameIndex, + + -- * Backwards compatibility + deleteInstalledPackageId, + lookupInstalledPackageId, + ) where + +import Distribution.Compat.Binary +import Distribution.Compat.Semigroup as Semi +import Distribution.Package +import Distribution.ModuleName +import qualified Distribution.InstalledPackageInfo as IPI +import Distribution.Version +import Distribution.Simple.Utils + +import Control.Exception (assert) +import Data.Array ((!)) +import qualified Data.Array as Array +import qualified Data.Graph as Graph +import Data.List as List + ( null, foldl', sort + , groupBy, sortBy, find, nubBy, deleteBy, deleteFirstsBy ) +import Data.Map (Map) +import qualified Data.Map as Map +import Data.Maybe (isNothing, fromMaybe) +import qualified Data.Tree as Tree +import GHC.Generics (Generic) +import Prelude hiding (lookup) + +-- | The collection of information about packages from one or more 'PackageDB's. +-- These packages generally should have an instance of 'PackageInstalled' +-- +-- Packages are uniquely identified in by their 'UnitId', they can +-- also be efficiently looked up by package name or by name and version. +-- +data PackageIndex a = PackageIndex + -- The primary index. Each InstalledPackageInfo record is uniquely identified + -- by its UnitId. + -- + !(Map UnitId a) + + -- This auxiliary index maps package names (case-sensitively) to all the + -- versions and instances of that package. This allows us to find all + -- versions satisfying a dependency. + -- + -- It is a three-level index. The first level is the package name, + -- the second is the package version and the final level is instances + -- of the same package version. These are unique by UnitId + -- and are kept in preference order. + -- + -- FIXME: Clarify what "preference order" means. Check that this invariant is + -- preserved. See #1463 for discussion. + !(Map PackageName (Map Version [a])) + + deriving (Eq, Generic, Show, Read) + +instance Binary a => Binary (PackageIndex a) + +-- | The default package index which contains 'InstalledPackageInfo'. Normally +-- use this. +type InstalledPackageIndex = PackageIndex IPI.InstalledPackageInfo + +instance HasUnitId a => Monoid (PackageIndex a) where + mempty = PackageIndex Map.empty Map.empty + mappend = (Semi.<>) + --save one mappend with empty in the common case: + mconcat [] = mempty + mconcat xs = foldr1 mappend xs + +instance HasUnitId a => Semigroup (PackageIndex a) where + (<>) = merge + +invariant :: HasUnitId a => PackageIndex a -> Bool +invariant (PackageIndex pids pnames) = + map installedUnitId (Map.elems pids) + == sort + [ assert pinstOk (installedUnitId pinst) + | (pname, pvers) <- Map.toList pnames + , let pversOk = not (Map.null pvers) + , (pver, pinsts) <- assert pversOk $ Map.toList pvers + , let pinsts' = sortBy (comparing installedUnitId) pinsts + pinstsOk = all (\g -> length g == 1) + (groupBy (equating installedUnitId) pinsts') + , pinst <- assert pinstsOk $ pinsts' + , let pinstOk = packageName pinst == pname + && packageVersion pinst == pver + ] + + +-- +-- * Internal helpers +-- + +mkPackageIndex :: HasUnitId a + => Map UnitId a + -> Map PackageName (Map Version [a]) + -> PackageIndex a +mkPackageIndex pids pnames = assert (invariant index) index + where index = PackageIndex pids pnames + + +-- +-- * Construction +-- + +-- | Build an index out of a bunch of packages. +-- +-- If there are duplicates by 'UnitId' then later ones mask earlier +-- ones. +-- +fromList :: HasUnitId a => [a] -> PackageIndex a +fromList pkgs = mkPackageIndex pids pnames + where + pids = Map.fromList [ (installedUnitId pkg, pkg) | pkg <- pkgs ] + pnames = + Map.fromList + [ (packageName (head pkgsN), pvers) + | pkgsN <- groupBy (equating packageName) + . sortBy (comparing packageId) + $ pkgs + , let pvers = + Map.fromList + [ (packageVersion (head pkgsNV), + nubBy (equating installedUnitId) (reverse pkgsNV)) + | pkgsNV <- groupBy (equating packageVersion) pkgsN + ] + ] + +-- +-- * Updates +-- + +-- | Merge two indexes. +-- +-- Packages from the second mask packages from the first if they have the exact +-- same 'UnitId'. +-- +-- For packages with the same source 'PackageId', packages from the second are +-- \"preferred\" over those from the first. Being preferred means they are top +-- result when we do a lookup by source 'PackageId'. This is the mechanism we +-- use to prefer user packages over global packages. +-- +merge :: HasUnitId a => PackageIndex a -> PackageIndex a + -> PackageIndex a +merge (PackageIndex pids1 pnames1) (PackageIndex pids2 pnames2) = + mkPackageIndex (Map.unionWith (\_ y -> y) pids1 pids2) + (Map.unionWith (Map.unionWith mergeBuckets) pnames1 pnames2) + where + -- Packages in the second list mask those in the first, however preferred + -- packages go first in the list. + mergeBuckets xs ys = ys ++ (xs \\ ys) + (\\) = deleteFirstsBy (equating installedUnitId) + + +-- | Inserts a single package into the index. +-- +-- This is equivalent to (but slightly quicker than) using 'mappend' or +-- 'merge' with a singleton index. +-- +insert :: HasUnitId a => a -> PackageIndex a -> PackageIndex a +insert pkg (PackageIndex pids pnames) = + mkPackageIndex pids' pnames' + + where + pids' = Map.insert (installedUnitId pkg) pkg pids + pnames' = insertPackageName pnames + insertPackageName = + Map.insertWith' (\_ -> insertPackageVersion) + (packageName pkg) + (Map.singleton (packageVersion pkg) [pkg]) + + insertPackageVersion = + Map.insertWith' (\_ -> insertPackageInstance) + (packageVersion pkg) [pkg] + + insertPackageInstance pkgs = + pkg : deleteBy (equating installedUnitId) pkg pkgs + + +-- | Removes a single installed package from the index. +-- +deleteUnitId :: HasUnitId a + => UnitId -> PackageIndex a + -> PackageIndex a +deleteUnitId ipkgid original@(PackageIndex pids pnames) = + case Map.updateLookupWithKey (\_ _ -> Nothing) ipkgid pids of + (Nothing, _) -> original + (Just spkgid, pids') -> mkPackageIndex pids' + (deletePkgName spkgid pnames) + + where + deletePkgName spkgid = + Map.update (deletePkgVersion spkgid) (packageName spkgid) + + deletePkgVersion spkgid = + (\m -> if Map.null m then Nothing else Just m) + . Map.update deletePkgInstance (packageVersion spkgid) + + deletePkgInstance = + (\xs -> if List.null xs then Nothing else Just xs) + . List.deleteBy (\_ pkg -> installedUnitId pkg == ipkgid) undefined + +-- | Backwards compatibility wrapper for Cabal pre-1.24. +{-# DEPRECATED deleteInstalledPackageId "Use deleteUnitId instead" #-} +deleteInstalledPackageId :: HasUnitId a + => UnitId -> PackageIndex a + -> PackageIndex a +deleteInstalledPackageId = deleteUnitId + +-- | Removes all packages with this source 'PackageId' from the index. +-- +deleteSourcePackageId :: HasUnitId a => PackageId -> PackageIndex a + -> PackageIndex a +deleteSourcePackageId pkgid original@(PackageIndex pids pnames) = + case Map.lookup (packageName pkgid) pnames of + Nothing -> original + Just pvers -> case Map.lookup (packageVersion pkgid) pvers of + Nothing -> original + Just pkgs -> mkPackageIndex + (foldl' (flip (Map.delete . installedUnitId)) pids pkgs) + (deletePkgName pnames) + where + deletePkgName = + Map.update deletePkgVersion (packageName pkgid) + + deletePkgVersion = + (\m -> if Map.null m then Nothing else Just m) + . Map.delete (packageVersion pkgid) + + +-- | Removes all packages with this (case-sensitive) name from the index. +-- +deletePackageName :: HasUnitId a => PackageName -> PackageIndex a + -> PackageIndex a +deletePackageName name original@(PackageIndex pids pnames) = + case Map.lookup name pnames of + Nothing -> original + Just pvers -> mkPackageIndex + (foldl' (flip (Map.delete . installedUnitId)) pids + (concat (Map.elems pvers))) + (Map.delete name pnames) + +{- +-- | Removes all packages satisfying this dependency from the index. +-- +deleteDependency :: Dependency -> PackageIndex -> PackageIndex +deleteDependency (Dependency name verstionRange) = + delete' name (\pkg -> packageVersion pkg `withinRange` verstionRange) +-} + +-- +-- * Bulk queries +-- + +-- | Get all the packages from the index. +-- +allPackages :: PackageIndex a -> [a] +allPackages (PackageIndex pids _) = Map.elems pids + +-- | Get all the packages from the index. +-- +-- They are grouped by package name (case-sensitively). +-- +allPackagesByName :: PackageIndex a -> [(PackageName, [a])] +allPackagesByName (PackageIndex _ pnames) = + [ (pkgname, concat (Map.elems pvers)) + | (pkgname, pvers) <- Map.toList pnames ] + +-- | Get all the packages from the index. +-- +-- They are grouped by source package id (package name and version). +-- +allPackagesBySourcePackageId :: HasUnitId a => PackageIndex a + -> [(PackageId, [a])] +allPackagesBySourcePackageId (PackageIndex _ pnames) = + [ (packageId ipkg, ipkgs) + | pvers <- Map.elems pnames + , ipkgs@(ipkg:_) <- Map.elems pvers ] + +-- +-- * Lookups +-- + +-- | Does a lookup by source package id (name & version). +-- +-- Since multiple package DBs mask each other by 'UnitId', +-- then we get back at most one package. +-- +lookupUnitId :: PackageIndex a -> UnitId + -> Maybe a +lookupUnitId (PackageIndex pids _) pid = Map.lookup pid pids + +-- | Backwards compatibility for Cabal pre-1.24. +{-# DEPRECATED lookupInstalledPackageId "Use lookupUnitId instead" #-} +lookupInstalledPackageId :: PackageIndex a -> UnitId + -> Maybe a +lookupInstalledPackageId = lookupUnitId + + +-- | Does a lookup by source package id (name & version). +-- +-- There can be multiple installed packages with the same source 'PackageId' +-- but different 'UnitId'. They are returned in order of +-- preference, with the most preferred first. +-- +lookupSourcePackageId :: PackageIndex a -> PackageId -> [a] +lookupSourcePackageId (PackageIndex _ pnames) pkgid = + case Map.lookup (packageName pkgid) pnames of + Nothing -> [] + Just pvers -> case Map.lookup (packageVersion pkgid) pvers of + Nothing -> [] + Just pkgs -> pkgs -- in preference order + +-- | Convenient alias of 'lookupSourcePackageId', but assuming only +-- one package per package ID. +lookupPackageId :: PackageIndex a -> PackageId -> Maybe a +lookupPackageId index pkgid = case lookupSourcePackageId index pkgid of + [] -> Nothing + [pkg] -> Just pkg + _ -> error "Distribution.Simple.PackageIndex: multiple matches found" + +-- | Does a lookup by source package name. +-- +lookupPackageName :: PackageIndex a -> PackageName + -> [(Version, [a])] +lookupPackageName (PackageIndex _ pnames) name = + case Map.lookup name pnames of + Nothing -> [] + Just pvers -> Map.toList pvers + + +-- | Does a lookup by source package name and a range of versions. +-- +-- We get back any number of versions of the specified package name, all +-- satisfying the version range constraint. +-- +lookupDependency :: PackageIndex a -> Dependency + -> [(Version, [a])] +lookupDependency (PackageIndex _ pnames) (Dependency name versionRange) = + case Map.lookup name pnames of + Nothing -> [] + Just pvers -> [ entry + | entry@(ver, _) <- Map.toList pvers + , ver `withinRange` versionRange ] + +-- +-- * Case insensitive name lookups +-- + +-- | Does a case-insensitive search by package name. +-- +-- If there is only one package that compares case-insensitively to this name +-- then the search is unambiguous and we get back all versions of that package. +-- If several match case-insensitively but one matches exactly then it is also +-- unambiguous. +-- +-- If however several match case-insensitively and none match exactly then we +-- have an ambiguous result, and we get back all the versions of all the +-- packages. The list of ambiguous results is split by exact package name. So +-- it is a non-empty list of non-empty lists. +-- +searchByName :: PackageIndex a -> String -> SearchResult [a] +searchByName (PackageIndex _ pnames) name = + case [ pkgs | pkgs@(PackageName name',_) <- Map.toList pnames + , lowercase name' == lname ] of + [] -> None + [(_,pvers)] -> Unambiguous (concat (Map.elems pvers)) + pkgss -> case find ((PackageName name==) . fst) pkgss of + Just (_,pvers) -> Unambiguous (concat (Map.elems pvers)) + Nothing -> Ambiguous (map (concat . Map.elems . snd) pkgss) + where lname = lowercase name + +data SearchResult a = None | Unambiguous a | Ambiguous [a] + +-- | Does a case-insensitive substring search by package name. +-- +-- That is, all packages that contain the given string in their name. +-- +searchByNameSubstring :: PackageIndex a -> String -> [a] +searchByNameSubstring (PackageIndex _ pnames) searchterm = + [ pkg + | (PackageName name, pvers) <- Map.toList pnames + , lsearchterm `isInfixOf` lowercase name + , pkgs <- Map.elems pvers + , pkg <- pkgs ] + where lsearchterm = lowercase searchterm + + +-- +-- * Special queries +-- + +-- None of the stuff below depends on the internal representation of the index. +-- + +-- | Find if there are any cycles in the dependency graph. If there are no +-- cycles the result is @[]@. +-- +-- This actually computes the strongly connected components. So it gives us a +-- list of groups of packages where within each group they all depend on each +-- other, directly or indirectly. +-- +dependencyCycles :: PackageInstalled a => PackageIndex a -> [[a]] +dependencyCycles index = + [ vs | Graph.CyclicSCC vs <- Graph.stronglyConnComp adjacencyList ] + where + adjacencyList = [ (pkg, installedUnitId pkg, installedDepends pkg) + | pkg <- allPackages index ] + + +-- | All packages that have immediate dependencies that are not in the index. +-- +-- Returns such packages along with the dependencies that they're missing. +-- +brokenPackages :: PackageInstalled a => PackageIndex a + -> [(a, [UnitId])] +brokenPackages index = + [ (pkg, missing) + | pkg <- allPackages index + , let missing = [ pkg' | pkg' <- installedDepends pkg + , isNothing (lookupUnitId index pkg') ] + , not (null missing) ] + +-- | Tries to take the transitive closure of the package dependencies. +-- +-- If the transitive closure is complete then it returns that subset of the +-- index. Otherwise it returns the broken packages as in 'brokenPackages'. +-- +-- * Note that if the result is @Right []@ it is because at least one of +-- the original given 'PackageId's do not occur in the index. +-- +dependencyClosure :: PackageInstalled a => PackageIndex a + -> [UnitId] + -> Either (PackageIndex a) + [(a, [UnitId])] +dependencyClosure index pkgids0 = case closure mempty [] pkgids0 of + (completed, []) -> Left completed + (completed, _) -> Right (brokenPackages completed) + where + closure completed failed [] = (completed, failed) + closure completed failed (pkgid:pkgids) = case lookupUnitId index pkgid of + Nothing -> closure completed (pkgid:failed) pkgids + Just pkg -> case lookupUnitId completed (installedUnitId pkg) of + Just _ -> closure completed failed pkgids + Nothing -> closure completed' failed pkgids' + where completed' = insert pkg completed + pkgids' = installedDepends pkg ++ pkgids + +-- | Takes the transitive closure of the packages reverse dependencies. +-- +-- * The given 'PackageId's must be in the index. +-- +reverseDependencyClosure :: PackageInstalled a => PackageIndex a + -> [UnitId] + -> [a] +reverseDependencyClosure index = + map vertexToPkg + . concatMap Tree.flatten + . Graph.dfs reverseDepGraph + . map (fromMaybe noSuchPkgId . pkgIdToVertex) + + where + (depGraph, vertexToPkg, pkgIdToVertex) = dependencyGraph index + reverseDepGraph = Graph.transposeG depGraph + noSuchPkgId = error "reverseDependencyClosure: package is not in the graph" + +topologicalOrder :: PackageInstalled a => PackageIndex a -> [a] +topologicalOrder index = map toPkgId + . Graph.topSort + $ graph + where (graph, toPkgId, _) = dependencyGraph index + +reverseTopologicalOrder :: PackageInstalled a => PackageIndex a -> [a] +reverseTopologicalOrder index = map toPkgId + . Graph.topSort + . Graph.transposeG + $ graph + where (graph, toPkgId, _) = dependencyGraph index + +-- | Builds a graph of the package dependencies. +-- +-- Dependencies on other packages that are not in the index are discarded. +-- You can check if there are any such dependencies with 'brokenPackages'. +-- +dependencyGraph :: PackageInstalled a => PackageIndex a + -> (Graph.Graph, + Graph.Vertex -> a, + UnitId -> Maybe Graph.Vertex) +dependencyGraph index = (graph, vertex_to_pkg, id_to_vertex) + where + graph = Array.listArray bounds + [ [ v | Just v <- map id_to_vertex (installedDepends pkg) ] + | pkg <- pkgs ] + + pkgs = sortBy (comparing packageId) (allPackages index) + vertices = zip (map installedUnitId pkgs) [0..] + vertex_map = Map.fromList vertices + id_to_vertex pid = Map.lookup pid vertex_map + + vertex_to_pkg vertex = pkgTable ! vertex + + pkgTable = Array.listArray bounds pkgs + topBound = length pkgs - 1 + bounds = (0, topBound) + +-- | Given a package index where we assume we want to use all the packages +-- (use 'dependencyClosure' if you need to get such a index subset) find out +-- if the dependencies within it use consistent versions of each package. +-- Return all cases where multiple packages depend on different versions of +-- some other package. +-- +-- Each element in the result is a package name along with the packages that +-- depend on it and the versions they require. These are guaranteed to be +-- distinct. +-- +dependencyInconsistencies :: PackageInstalled a => PackageIndex a + -> [(PackageName, [(PackageId, Version)])] +dependencyInconsistencies index = + [ (name, [ (pid,packageVersion dep) | (dep,pids) <- uses, pid <- pids]) + | (name, ipid_map) <- Map.toList inverseIndex + , let uses = Map.elems ipid_map + , reallyIsInconsistent (map fst uses) ] + + where -- for each PackageName, + -- for each package with that name, + -- the InstalledPackageInfo and the package Ids of packages + -- that depend on it. + inverseIndex = Map.fromListWith (Map.unionWith (\(a,b) (_,b') -> (a,b++b'))) + [ (packageName dep, + Map.fromList [(ipid,(dep,[packageId pkg]))]) + | pkg <- allPackages index + , ipid <- installedDepends pkg + , Just dep <- [lookupUnitId index ipid] + ] + + reallyIsInconsistent :: PackageInstalled a => [a] -> Bool + reallyIsInconsistent [] = False + reallyIsInconsistent [_p] = False + reallyIsInconsistent [p1, p2] = + let pid1 = installedUnitId p1 + pid2 = installedUnitId p2 + in pid1 `notElem` installedDepends p2 + && pid2 `notElem` installedDepends p1 + reallyIsInconsistent _ = True + +-- | A rough approximation of GHC's module finder, takes a +-- 'InstalledPackageIndex' and turns it into a map from module names to their +-- source packages. It's used to initialize the @build-deps@ field in @cabal +-- init@. +moduleNameIndex :: InstalledPackageIndex -> Map ModuleName [IPI.InstalledPackageInfo] +moduleNameIndex index = + Map.fromListWith (++) $ do + pkg <- allPackages index + IPI.ExposedModule m reexport <- IPI.exposedModules pkg + case reexport of + Nothing -> return (m, [pkg]) + Just (IPI.OriginalModule _ m') | m == m' -> [] + | otherwise -> return (m', [pkg]) + -- The heuristic is this: we want to prefer the original package + -- which originally exported a module. However, if a reexport + -- also *renamed* the module (m /= m'), then we have to use the + -- downstream package, since the upstream package has the wrong + -- module name! diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/PreProcess/Unlit.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/PreProcess/Unlit.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/PreProcess/Unlit.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/PreProcess/Unlit.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,165 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.PreProcess.Unlit +-- Copyright : ... +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Remove the \"literal\" markups from a Haskell source file, including +-- \"@>@\", \"@\\begin{code}@\", \"@\\end{code}@\", and \"@#@\" + +-- This version is interesting because instead of striping comment lines, it +-- turns them into "-- " style comments. This allows using haddock markup +-- in literate scripts without having to use "> --" prefix. + +module Distribution.Simple.PreProcess.Unlit (unlit,plain) where + +import Data.Char +import Data.List + +data Classified = BirdTrack String | Blank String | Ordinary String + | Line !Int String | CPP String + | BeginCode | EndCode + -- output only: + | Error String | Comment String + +-- | No unliteration. +plain :: String -> String -> String +plain _ hs = hs + +classify :: String -> Classified +classify ('>':s) = BirdTrack s +classify ('#':s) = case tokens s of + (line:file:_) | all isDigit line + && length file >= 2 + && head file == '"' + && last file == '"' + -> Line (read line) (tail (init file)) + _ -> CPP s + where tokens = unfoldr $ \str -> case lex str of + (t@(_:_), str'):_ -> Just (t, str') + _ -> Nothing +classify ('\\':s) + | "begin{code}" `isPrefixOf` s = BeginCode + | "end{code}" `isPrefixOf` s = EndCode +classify s | all isSpace s = Blank s +classify s = Ordinary s + +-- So the weird exception for comment indenting is to make things work with +-- haddock, see classifyAndCheckForBirdTracks below. +unclassify :: Bool -> Classified -> String +unclassify _ (BirdTrack s) = ' ':s +unclassify _ (Blank s) = s +unclassify _ (Ordinary s) = s +unclassify _ (Line n file) = "# " ++ show n ++ " " ++ show file +unclassify _ (CPP s) = '#':s +unclassify True (Comment "") = " --" +unclassify True (Comment s) = " -- " ++ s +unclassify False (Comment "") = "--" +unclassify False (Comment s) = "-- " ++ s +unclassify _ _ = internalError + +-- | 'unlit' takes a filename (for error reports), and transforms the +-- given string, to eliminate the literate comments from the program text. +unlit :: FilePath -> String -> Either String String +unlit file input = + let (usesBirdTracks, classified) = classifyAndCheckForBirdTracks + . inlines + $ input + in either (Left . unlines . map (unclassify usesBirdTracks)) + Right + . checkErrors + . reclassify + $ classified + + where + -- So haddock requires comments and code to align, since it treats comments + -- as following the layout rule. This is a pain for us since bird track + -- style literate code typically gets indented by two since ">" is replaced + -- by " " and people usually use one additional space of indent ie + -- "> then the code". On the other hand we cannot just go and indent all + -- the comments by two since that does not work for latex style literate + -- code. So the hacky solution we use here is that if we see any bird track + -- style code then we'll indent all comments by two, otherwise by none. + -- Of course this will not work for mixed latex/bird track .lhs files but + -- nobody does that, it's silly and specifically recommended against in the + -- H98 unlit spec. + -- + classifyAndCheckForBirdTracks = + flip mapAccumL False $ \seenBirdTrack line -> + let classification = classify line + in (seenBirdTrack || isBirdTrack classification, classification) + + isBirdTrack (BirdTrack _) = True + isBirdTrack _ = False + + checkErrors ls = case [ e | Error e <- ls ] of + [] -> Left ls + (message:_) -> Right (f ++ ":" ++ show n ++ ": " ++ message) + where (f, n) = errorPos file 1 ls + errorPos f n [] = (f, n) + errorPos f n (Error _:_) = (f, n) + errorPos _ _ (Line n' f':ls) = errorPos f' n' ls + errorPos f n (_ :ls) = errorPos f (n+1) ls + +-- Here we model a state machine, with each state represented by +-- a local function. We only have four states (well, five, +-- if you count the error state), but the rules +-- to transition between then are not so simple. +-- Would it be simpler to have more states? +-- +-- Each state represents the type of line that was last read +-- i.e. are we in a comment section, or a latex-code section, +-- or a bird-code section, etc? +reclassify :: [Classified] -> [Classified] +reclassify = blank -- begin in blank state + where + latex [] = [] + latex (EndCode :ls) = Blank "" : comment ls + latex (BeginCode :_ ) = [Error "\\begin{code} in code section"] + latex (BirdTrack l:ls) = Ordinary ('>':l) : latex ls + latex ( l:ls) = l : latex ls + + blank [] = [] + blank (EndCode :_ ) = [Error "\\end{code} without \\begin{code}"] + blank (BeginCode :ls) = Blank "" : latex ls + blank (BirdTrack l:ls) = BirdTrack l : bird ls + blank (Ordinary l:ls) = Comment l : comment ls + blank ( l:ls) = l : blank ls + + bird [] = [] + bird (EndCode :_ ) = [Error "\\end{code} without \\begin{code}"] + bird (BeginCode :ls) = Blank "" : latex ls + bird (Blank l :ls) = Blank l : blank ls + bird (Ordinary _:_ ) = [Error "program line before comment line"] + bird ( l:ls) = l : bird ls + + comment [] = [] + comment (EndCode :_ ) = [Error "\\end{code} without \\begin{code}"] + comment (BeginCode :ls) = Blank "" : latex ls + comment (CPP l :ls) = CPP l : comment ls + comment (BirdTrack _:_ ) = [Error "comment line before program line"] + -- a blank line and another ordinary line following a comment + -- will be treated as continuing the comment. Otherwise it's + -- then end of the comment, with a blank line. + comment (Blank l:ls@(Ordinary _:_)) = Comment l : comment ls + comment (Blank l:ls) = Blank l : blank ls + comment (Line n f :ls) = Line n f : comment ls + comment (Ordinary l:ls) = Comment l : comment ls + comment (Comment _: _) = internalError + comment (Error _: _) = internalError + +-- Re-implementation of 'lines', for better efficiency (but decreased laziness). +-- Also, importantly, accepts non-standard DOS and Mac line ending characters. +inlines :: String -> [String] +inlines xs = lines' xs id + where + lines' [] acc = [acc []] + lines' ('\^M':'\n':s) acc = acc [] : lines' s id -- DOS + lines' ('\^M':s) acc = acc [] : lines' s id -- MacOS + lines' ('\n':s) acc = acc [] : lines' s id -- Unix + lines' (c:s) acc = lines' s (acc . (c:)) + +internalError :: a +internalError = error "unlit: internal error" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/PreProcess.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/PreProcess.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/PreProcess.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/PreProcess.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,652 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.PreProcess +-- Copyright : (c) 2003-2005, Isaac Jones, Malcolm Wallace +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This defines a 'PreProcessor' abstraction which represents a pre-processor +-- that can transform one kind of file into another. There is also a +-- 'PPSuffixHandler' which is a combination of a file extension and a function +-- for configuring a 'PreProcessor'. It defines a bunch of known built-in +-- preprocessors like @cpp@, @cpphs@, @c2hs@, @hsc2hs@, @happy@, @alex@ etc and +-- lists them in 'knownSuffixHandlers'. On top of this it provides a function +-- for actually preprocessing some sources given a bunch of known suffix +-- handlers. This module is not as good as it could be, it could really do with +-- a rewrite to address some of the problems we have with pre-processors. + +module Distribution.Simple.PreProcess (preprocessComponent, preprocessExtras, + knownSuffixHandlers, ppSuffixes, + PPSuffixHandler, PreProcessor(..), + mkSimplePreProcessor, runSimplePreProcessor, + ppCpp, ppCpp', ppGreenCard, ppC2hs, ppHsc2hs, + ppHappy, ppAlex, ppUnlit, platformDefines + ) + where + + +import Distribution.Simple.PreProcess.Unlit +import Distribution.Package +import qualified Distribution.ModuleName as ModuleName +import Distribution.PackageDescription as PD +import qualified Distribution.InstalledPackageInfo as Installed +import qualified Distribution.Simple.PackageIndex as PackageIndex +import Distribution.Simple.CCompiler +import Distribution.Simple.Compiler +import Distribution.Simple.LocalBuildInfo +import Distribution.Simple.BuildPaths +import Distribution.Simple.Utils +import Distribution.Simple.Program +import Distribution.Simple.Test.LibV09 +import Distribution.System +import Distribution.Text +import Distribution.Version +import Distribution.Verbosity + +import Control.Monad +import Data.Maybe (fromMaybe) +import Data.List (nub, isSuffixOf) +import System.Directory (doesFileExist) +import System.Info (os, arch) +import System.FilePath (splitExtension, dropExtensions, (), (<.>), + takeDirectory, normalise, replaceExtension, + takeExtensions) + +-- |The interface to a preprocessor, which may be implemented using an +-- external program, but need not be. The arguments are the name of +-- the input file, the name of the output file and a verbosity level. +-- Here is a simple example that merely prepends a comment to the given +-- source file: +-- +-- > ppTestHandler :: PreProcessor +-- > ppTestHandler = +-- > PreProcessor { +-- > platformIndependent = True, +-- > runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> +-- > do info verbosity (inFile++" has been preprocessed to "++outFile) +-- > stuff <- readFile inFile +-- > writeFile outFile ("-- preprocessed as a test\n\n" ++ stuff) +-- > return ExitSuccess +-- +-- We split the input and output file names into a base directory and the +-- rest of the file name. The input base dir is the path in the list of search +-- dirs that this file was found in. The output base dir is the build dir where +-- all the generated source files are put. +-- +-- The reason for splitting it up this way is that some pre-processors don't +-- simply generate one output .hs file from one input file but have +-- dependencies on other generated files (notably c2hs, where building one +-- .hs file may require reading other .chi files, and then compiling the .hs +-- file may require reading a generated .h file). In these cases the generated +-- files need to embed relative path names to each other (eg the generated .hs +-- file mentions the .h file in the FFI imports). This path must be relative to +-- the base directory where the generated files are located, it cannot be +-- relative to the top level of the build tree because the compilers do not +-- look for .h files relative to there, ie we do not use \"-I .\", instead we +-- use \"-I dist\/build\" (or whatever dist dir has been set by the user) +-- +-- Most pre-processors do not care of course, so mkSimplePreProcessor and +-- runSimplePreProcessor functions handle the simple case. +-- +data PreProcessor = PreProcessor { + + -- Is the output of the pre-processor platform independent? eg happy output + -- is portable haskell but c2hs's output is platform dependent. + -- This matters since only platform independent generated code can be + -- inlcuded into a source tarball. + platformIndependent :: Bool, + + -- TODO: deal with pre-processors that have implementaion dependent output + -- eg alex and happy have --ghc flags. However we can't really inlcude + -- ghc-specific code into supposedly portable source tarballs. + + runPreProcessor :: (FilePath, FilePath) -- Location of the source file relative to a base dir + -> (FilePath, FilePath) -- Output file name, relative to an output base dir + -> Verbosity -- verbosity + -> IO () -- Should exit if the preprocessor fails + } + +-- | Function to determine paths to possible extra C sources for a +-- preprocessor: just takes the path to the build directory and uses +-- this to search for C sources with names that match the +-- preprocessor's output name format. +type PreProcessorExtras = FilePath -> IO [FilePath] + + +mkSimplePreProcessor :: (FilePath -> FilePath -> Verbosity -> IO ()) + -> (FilePath, FilePath) + -> (FilePath, FilePath) -> Verbosity -> IO () +mkSimplePreProcessor simplePP + (inBaseDir, inRelativeFile) + (outBaseDir, outRelativeFile) verbosity = simplePP inFile outFile verbosity + where inFile = normalise (inBaseDir inRelativeFile) + outFile = normalise (outBaseDir outRelativeFile) + +runSimplePreProcessor :: PreProcessor -> FilePath -> FilePath -> Verbosity + -> IO () +runSimplePreProcessor pp inFile outFile verbosity = + runPreProcessor pp (".", inFile) (".", outFile) verbosity + +-- |A preprocessor for turning non-Haskell files with the given extension +-- into plain Haskell source files. +type PPSuffixHandler + = (String, BuildInfo -> LocalBuildInfo -> PreProcessor) + +-- | Apply preprocessors to the sources from 'hsSourceDirs' for a given +-- component (lib, exe, or test suite). +preprocessComponent :: PackageDescription + -> Component + -> LocalBuildInfo + -> Bool + -> Verbosity + -> [PPSuffixHandler] + -> IO () +preprocessComponent pd comp lbi isSrcDist verbosity handlers = case comp of + (CLib lib@Library{ libBuildInfo = bi }) -> do + let dirs = hsSourceDirs bi ++ [autogenModulesDir lbi] + setupMessage verbosity "Preprocessing library" (packageId pd) + forM_ (map ModuleName.toFilePath $ libModules lib) $ + pre dirs (buildDir lbi) (localHandlers bi) + (CExe exe@Executable { buildInfo = bi, exeName = nm }) -> do + let exeDir = buildDir lbi nm nm ++ "-tmp" + dirs = hsSourceDirs bi ++ [autogenModulesDir lbi] + setupMessage verbosity ("Preprocessing executable '" ++ nm ++ "' for") (packageId pd) + forM_ (map ModuleName.toFilePath $ otherModules bi) $ + pre dirs exeDir (localHandlers bi) + pre (hsSourceDirs bi) exeDir (localHandlers bi) $ + dropExtensions (modulePath exe) + CTest test@TestSuite{ testName = nm } -> do + setupMessage verbosity ("Preprocessing test suite '" ++ nm ++ "' for") (packageId pd) + case testInterface test of + TestSuiteExeV10 _ f -> + preProcessTest test f $ buildDir lbi testName test + testName test ++ "-tmp" + TestSuiteLibV09 _ _ -> do + let testDir = buildDir lbi stubName test + stubName test ++ "-tmp" + writeSimpleTestStub test testDir + preProcessTest test (stubFilePath test) testDir + TestSuiteUnsupported tt -> die $ "No support for preprocessing test " + ++ "suite type " ++ display tt + CBench bm@Benchmark{ benchmarkName = nm } -> do + setupMessage verbosity ("Preprocessing benchmark '" ++ nm ++ "' for") (packageId pd) + case benchmarkInterface bm of + BenchmarkExeV10 _ f -> + preProcessBench bm f $ buildDir lbi benchmarkName bm + benchmarkName bm ++ "-tmp" + BenchmarkUnsupported tt -> die $ "No support for preprocessing benchmark " + ++ "type " ++ display tt + where + builtinHaskellSuffixes = ["hs", "lhs", "hsig", "lhsig"] + builtinCSuffixes = cSourceExtensions + builtinSuffixes = builtinHaskellSuffixes ++ builtinCSuffixes + localHandlers bi = [(ext, h bi lbi) | (ext, h) <- handlers] + pre dirs dir lhndlrs fp = + preprocessFile dirs dir isSrcDist fp verbosity builtinSuffixes lhndlrs + preProcessTest test = preProcessComponent (testBuildInfo test) + (testModules test) + preProcessBench bm = preProcessComponent (benchmarkBuildInfo bm) + (benchmarkModules bm) + preProcessComponent bi modules exePath dir = do + let biHandlers = localHandlers bi + sourceDirs = hsSourceDirs bi ++ [ autogenModulesDir lbi ] + sequence_ [ preprocessFile sourceDirs dir isSrcDist + (ModuleName.toFilePath modu) verbosity builtinSuffixes + biHandlers + | modu <- modules ] + preprocessFile (dir : (hsSourceDirs bi)) dir isSrcDist + (dropExtensions $ exePath) verbosity + builtinSuffixes biHandlers + +--TODO: try to list all the modules that could not be found +-- not just the first one. It's annoying and slow due to the need +-- to reconfigure after editing the .cabal file each time. + +-- |Find the first extension of the file that exists, and preprocess it +-- if required. +preprocessFile + :: [FilePath] -- ^source directories + -> FilePath -- ^build directory + -> Bool -- ^preprocess for sdist + -> FilePath -- ^module file name + -> Verbosity -- ^verbosity + -> [String] -- ^builtin suffixes + -> [(String, PreProcessor)] -- ^possible preprocessors + -> IO () +preprocessFile searchLoc buildLoc forSDist baseFile verbosity builtinSuffixes handlers = do + -- look for files in the various source dirs with this module name + -- and a file extension of a known preprocessor + psrcFiles <- findFileWithExtension' (map fst handlers) searchLoc baseFile + case psrcFiles of + -- no preprocessor file exists, look for an ordinary source file + -- just to make sure one actually exists at all for this module. + -- Note: by looking in the target/output build dir too, we allow + -- source files to appear magically in the target build dir without + -- any corresponding "real" source file. This lets custom Setup.hs + -- files generate source modules directly into the build dir without + -- the rest of the build system being aware of it (somewhat dodgy) + Nothing -> do + bsrcFiles <- findFileWithExtension builtinSuffixes (buildLoc : searchLoc) baseFile + case bsrcFiles of + Nothing -> die $ "can't find source for " ++ baseFile + ++ " in " ++ intercalate ", " searchLoc + _ -> return () + -- found a pre-processable file in one of the source dirs + Just (psrcLoc, psrcRelFile) -> do + let (srcStem, ext) = splitExtension psrcRelFile + psrcFile = psrcLoc psrcRelFile + pp = fromMaybe (error "Distribution.Simple.PreProcess: Just expected") + (lookup (tailNotNull ext) handlers) + -- Preprocessing files for 'sdist' is different from preprocessing + -- for 'build'. When preprocessing for sdist we preprocess to + -- avoid that the user has to have the preprocessors available. + -- ATM, we don't have a way to specify which files are to be + -- preprocessed and which not, so for sdist we only process + -- platform independent files and put them into the 'buildLoc' + -- (which we assume is set to the temp. directory that will become + -- the tarball). + --TODO: eliminate sdist variant, just supply different handlers + when (not forSDist || forSDist && platformIndependent pp) $ do + -- look for existing pre-processed source file in the dest dir to + -- see if we really have to re-run the preprocessor. + ppsrcFiles <- findFileWithExtension builtinSuffixes [buildLoc] baseFile + recomp <- case ppsrcFiles of + Nothing -> return True + Just ppsrcFile -> + psrcFile `moreRecentFile` ppsrcFile + when recomp $ do + let destDir = buildLoc dirName srcStem + createDirectoryIfMissingVerbose verbosity True destDir + runPreProcessorWithHsBootHack pp + (psrcLoc, psrcRelFile) + (buildLoc, srcStem <.> "hs") + + where + dirName = takeDirectory + tailNotNull [] = [] + tailNotNull x = tail x + + -- FIXME: This is a somewhat nasty hack. GHC requires that hs-boot files + -- be in the same place as the hs files, so if we put the hs file in dist/ + -- then we need to copy the hs-boot file there too. This should probably be + -- done another way. Possibly we should also be looking for .lhs-boot + -- files, but I think that preprocessors only produce .hs files. + runPreProcessorWithHsBootHack pp + (inBaseDir, inRelativeFile) + (outBaseDir, outRelativeFile) = do + runPreProcessor pp + (inBaseDir, inRelativeFile) + (outBaseDir, outRelativeFile) verbosity + + exists <- doesFileExist inBoot + when exists $ copyFileVerbose verbosity inBoot outBoot + + where + inBoot = replaceExtension inFile "hs-boot" + outBoot = replaceExtension outFile "hs-boot" + + inFile = normalise (inBaseDir inRelativeFile) + outFile = normalise (outBaseDir outRelativeFile) + +-- ------------------------------------------------------------ +-- * known preprocessors +-- ------------------------------------------------------------ + +ppGreenCard :: BuildInfo -> LocalBuildInfo -> PreProcessor +ppGreenCard _ lbi + = PreProcessor { + platformIndependent = False, + runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> + rawSystemProgramConf verbosity greencardProgram (withPrograms lbi) + (["-tffi", "-o" ++ outFile, inFile]) + } + +-- This one is useful for preprocessors that can't handle literate source. +-- We also need a way to chain preprocessors. +ppUnlit :: PreProcessor +ppUnlit = + PreProcessor { + platformIndependent = True, + runPreProcessor = mkSimplePreProcessor $ \inFile outFile _verbosity -> + withUTF8FileContents inFile $ \contents -> + either (writeUTF8File outFile) die (unlit inFile contents) + } + +ppCpp :: BuildInfo -> LocalBuildInfo -> PreProcessor +ppCpp = ppCpp' [] + +ppCpp' :: [String] -> BuildInfo -> LocalBuildInfo -> PreProcessor +ppCpp' extraArgs bi lbi = + case compilerFlavor (compiler lbi) of + GHC -> ppGhcCpp ghcProgram (>= Version [6,6] []) args bi lbi + GHCJS -> ppGhcCpp ghcjsProgram (const True) args bi lbi + _ -> ppCpphs args bi lbi + where cppArgs = getCppOptions bi lbi + args = cppArgs ++ extraArgs + +ppGhcCpp :: Program -> (Version -> Bool) + -> [String] -> BuildInfo -> LocalBuildInfo -> PreProcessor +ppGhcCpp program xHs extraArgs _bi lbi = + PreProcessor { + platformIndependent = False, + runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do + (prog, version, _) <- requireProgramVersion verbosity + program anyVersion (withPrograms lbi) + rawSystemProgram verbosity prog $ + ["-E", "-cpp"] + -- This is a bit of an ugly hack. We're going to + -- unlit the file ourselves later on if appropriate, + -- so we need GHC not to unlit it now or it'll get + -- double-unlitted. In the future we might switch to + -- using cpphs --unlit instead. + ++ (if xHs version then ["-x", "hs"] else []) + ++ [ "-optP-include", "-optP"++ (autogenModulesDir lbi cppHeaderName) ] + ++ ["-o", outFile, inFile] + ++ extraArgs + } + +ppCpphs :: [String] -> BuildInfo -> LocalBuildInfo -> PreProcessor +ppCpphs extraArgs _bi lbi = + PreProcessor { + platformIndependent = False, + runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do + (cpphsProg, cpphsVersion, _) <- requireProgramVersion verbosity + cpphsProgram anyVersion (withPrograms lbi) + rawSystemProgram verbosity cpphsProg $ + ("-O" ++ outFile) : inFile + : "--noline" : "--strip" + : (if cpphsVersion >= Version [1,6] [] + then ["--include="++ (autogenModulesDir lbi cppHeaderName)] + else []) + ++ extraArgs + } + +ppHsc2hs :: BuildInfo -> LocalBuildInfo -> PreProcessor +ppHsc2hs bi lbi = + PreProcessor { + platformIndependent = False, + runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do + (gccProg, _) <- requireProgram verbosity gccProgram (withPrograms lbi) + rawSystemProgramConf verbosity hsc2hsProgram (withPrograms lbi) $ + [ "--cc=" ++ programPath gccProg + , "--ld=" ++ programPath gccProg ] + + -- Additional gcc options + ++ [ "--cflag=" ++ opt | opt <- programDefaultArgs gccProg + ++ programOverrideArgs gccProg ] + ++ [ "--lflag=" ++ opt | opt <- programDefaultArgs gccProg + ++ programOverrideArgs gccProg ] + + -- OSX frameworks: + ++ [ what ++ "=-F" ++ opt + | isOSX + , opt <- nub (concatMap Installed.frameworkDirs pkgs) + , what <- ["--cflag", "--lflag"] ] + ++ [ "--lflag=" ++ arg + | isOSX + , opt <- PD.frameworks bi ++ concatMap Installed.frameworks pkgs + , arg <- ["-framework", opt] ] + + -- Note that on ELF systems, wherever we use -L, we must also use -R + -- because presumably that -L dir is not on the normal path for the + -- system's dynamic linker. This is needed because hsc2hs works by + -- compiling a C program and then running it. + + ++ [ "--cflag=" ++ opt | opt <- platformDefines lbi ] + + -- Options from the current package: + ++ [ "--cflag=-I" ++ dir | dir <- PD.includeDirs bi ] + ++ [ "--cflag=" ++ opt | opt <- PD.ccOptions bi + ++ PD.cppOptions bi ] + ++ [ "--cflag=" ++ opt | opt <- + [ "-I" ++ autogenModulesDir lbi, + "-include", autogenModulesDir lbi cppHeaderName ] ] + ++ [ "--lflag=-L" ++ opt | opt <- PD.extraLibDirs bi ] + ++ [ "--lflag=-Wl,-R," ++ opt | isELF + , opt <- PD.extraLibDirs bi ] + ++ [ "--lflag=-l" ++ opt | opt <- PD.extraLibs bi ] + ++ [ "--lflag=" ++ opt | opt <- PD.ldOptions bi ] + + -- Options from dependent packages + ++ [ "--cflag=" ++ opt + | pkg <- pkgs + , opt <- [ "-I" ++ opt | opt <- Installed.includeDirs pkg ] + ++ [ opt | opt <- Installed.ccOptions pkg ] ] + ++ [ "--lflag=" ++ opt + | pkg <- pkgs + , opt <- [ "-L" ++ opt | opt <- Installed.libraryDirs pkg ] + ++ [ "-Wl,-R," ++ opt | isELF + , opt <- Installed.libraryDirs pkg ] + ++ [ "-l" ++ opt | opt <- Installed.extraLibraries pkg ] + ++ [ opt | opt <- Installed.ldOptions pkg ] ] + ++ ["-o", outFile, inFile] + } + where + -- TODO: installedPkgs contains ALL dependencies associated with + -- the package, but we really only want to look at packages for the + -- *current* dependency. We should use PackageIndex.dependencyClosure + -- on the direct depends of the component. Can't easily do that, + -- because the signature of this function is wrong. Tracked with + -- #2971 (which has a test case.) + pkgs = PackageIndex.topologicalOrder (packageHacks (installedPkgs lbi)) + isOSX = case buildOS of OSX -> True; _ -> False + isELF = case buildOS of OSX -> False; Windows -> False; AIX -> False; _ -> True; + packageHacks = case compilerFlavor (compiler lbi) of + GHC -> hackRtsPackage + GHCJS -> hackRtsPackage + _ -> id + -- We don't link in the actual Haskell libraries of our dependencies, so + -- the -u flags in the ldOptions of the rts package mean linking fails on + -- OS X (it's ld is a tad stricter than gnu ld). Thus we remove the + -- ldOptions for GHC's rts package: + hackRtsPackage index = + case PackageIndex.lookupPackageName index (PackageName "rts") of + [(_, [rts])] + -> PackageIndex.insert rts { Installed.ldOptions = [] } index + _ -> error "No (or multiple) ghc rts package is registered!!" + +ppHsc2hsExtras :: PreProcessorExtras +ppHsc2hsExtras buildBaseDir = filter ("_hsc.c" `isSuffixOf`) `fmap` + getDirectoryContentsRecursive buildBaseDir + +ppC2hs :: BuildInfo -> LocalBuildInfo -> PreProcessor +ppC2hs bi lbi = + PreProcessor { + platformIndependent = False, + runPreProcessor = \(inBaseDir, inRelativeFile) + (outBaseDir, outRelativeFile) verbosity -> do + (c2hsProg, _, _) <- requireProgramVersion verbosity + c2hsProgram (orLaterVersion (Version [0,15] [])) + (withPrograms lbi) + (gccProg, _) <- requireProgram verbosity gccProgram (withPrograms lbi) + rawSystemProgram verbosity c2hsProg $ + + -- Options from the current package: + [ "--cpp=" ++ programPath gccProg, "--cppopts=-E" ] + ++ [ "--cppopts=" ++ opt | opt <- getCppOptions bi lbi ] + ++ [ "--cppopts=-include" ++ (autogenModulesDir lbi cppHeaderName) ] + ++ [ "--include=" ++ outBaseDir ] + + -- Options from dependent packages + ++ [ "--cppopts=" ++ opt + | pkg <- pkgs + , opt <- [ "-I" ++ opt | opt <- Installed.includeDirs pkg ] + ++ [ opt | opt@('-':c:_) <- Installed.ccOptions pkg + , c `elem` "DIU" ] ] + --TODO: install .chi files for packages, so we can --include + -- those dirs here, for the dependencies + + -- input and output files + ++ [ "--output-dir=" ++ outBaseDir + , "--output=" ++ outRelativeFile + , inBaseDir inRelativeFile ] + } + where + pkgs = PackageIndex.topologicalOrder (installedPkgs lbi) + +ppC2hsExtras :: PreProcessorExtras +ppC2hsExtras d = filter (\p -> takeExtensions p == ".chs.c") `fmap` + getDirectoryContentsRecursive d + +--TODO: perhaps use this with hsc2hs too +--TODO: remove cc-options from cpphs for cabal-version: >= 1.10 +getCppOptions :: BuildInfo -> LocalBuildInfo -> [String] +getCppOptions bi lbi + = platformDefines lbi + ++ cppOptions bi + ++ ["-I" ++ dir | dir <- PD.includeDirs bi] + ++ [opt | opt@('-':c:_) <- PD.ccOptions bi, c `elem` "DIU"] + +platformDefines :: LocalBuildInfo -> [String] +platformDefines lbi = + case compilerFlavor comp of + GHC -> + ["-D__GLASGOW_HASKELL__=" ++ versionInt version] ++ + ["-D" ++ os ++ "_BUILD_OS=1"] ++ + ["-D" ++ arch ++ "_BUILD_ARCH=1"] ++ + map (\os' -> "-D" ++ os' ++ "_HOST_OS=1") osStr ++ + map (\arch' -> "-D" ++ arch' ++ "_HOST_ARCH=1") archStr + GHCJS -> + compatGlasgowHaskell ++ + ["-D__GHCJS__=" ++ versionInt version] ++ + ["-D" ++ os ++ "_BUILD_OS=1"] ++ + ["-D" ++ arch ++ "_BUILD_ARCH=1"] ++ + map (\os' -> "-D" ++ os' ++ "_HOST_OS=1") osStr ++ + map (\arch' -> "-D" ++ arch' ++ "_HOST_ARCH=1") archStr + JHC -> ["-D__JHC__=" ++ versionInt version] + HaskellSuite {} -> + ["-D__HASKELL_SUITE__"] ++ + map (\os' -> "-D" ++ os' ++ "_HOST_OS=1") osStr ++ + map (\arch' -> "-D" ++ arch' ++ "_HOST_ARCH=1") archStr + _ -> [] + where + comp = compiler lbi + Platform hostArch hostOS = hostPlatform lbi + version = compilerVersion comp + compatGlasgowHaskell = + maybe [] (\v -> ["-D__GLASGOW_HASKELL__=" ++ versionInt v]) + (compilerCompatVersion GHC comp) + -- TODO: move this into the compiler abstraction + -- FIXME: this forces GHC's crazy 4.8.2 -> 408 convention on all + -- the other compilers. Check if that's really what they want. + versionInt :: Version -> String + versionInt (Version { versionBranch = [] }) = "1" + versionInt (Version { versionBranch = [n] }) = show n + versionInt (Version { versionBranch = n1:n2:_ }) + = -- 6.8.x -> 608 + -- 6.10.x -> 610 + let s1 = show n1 + s2 = show n2 + middle = case s2 of + _ : _ : _ -> "" + _ -> "0" + in s1 ++ middle ++ s2 + osStr = case hostOS of + Linux -> ["linux"] + Windows -> ["mingw32"] + OSX -> ["darwin"] + FreeBSD -> ["freebsd"] + OpenBSD -> ["openbsd"] + NetBSD -> ["netbsd"] + DragonFly -> ["dragonfly"] + Solaris -> ["solaris2"] + AIX -> ["aix"] + HPUX -> ["hpux"] + IRIX -> ["irix"] + HaLVM -> [] + IOS -> ["ios"] + Android -> ["android"] + Ghcjs -> ["ghcjs"] + Hurd -> ["hurd"] + OtherOS _ -> [] + archStr = case hostArch of + I386 -> ["i386"] + X86_64 -> ["x86_64"] + PPC -> ["powerpc"] + PPC64 -> ["powerpc64"] + Sparc -> ["sparc"] + Arm -> ["arm"] + Mips -> ["mips"] + SH -> [] + IA64 -> ["ia64"] + S390 -> ["s390"] + Alpha -> ["alpha"] + Hppa -> ["hppa"] + Rs6000 -> ["rs6000"] + M68k -> ["m68k"] + Vax -> ["vax"] + JavaScript -> ["javascript"] + OtherArch _ -> [] + +ppHappy :: BuildInfo -> LocalBuildInfo -> PreProcessor +ppHappy _ lbi = pp { platformIndependent = True } + where pp = standardPP lbi happyProgram (hcFlags hc) + hc = compilerFlavor (compiler lbi) + hcFlags GHC = ["-agc"] + hcFlags GHCJS = ["-agc"] + hcFlags _ = [] + +ppAlex :: BuildInfo -> LocalBuildInfo -> PreProcessor +ppAlex _ lbi = pp { platformIndependent = True } + where pp = standardPP lbi alexProgram (hcFlags hc) + hc = compilerFlavor (compiler lbi) + hcFlags GHC = ["-g"] + hcFlags GHCJS = ["-g"] + hcFlags _ = [] + +standardPP :: LocalBuildInfo -> Program -> [String] -> PreProcessor +standardPP lbi prog args = + PreProcessor { + platformIndependent = False, + runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> + rawSystemProgramConf verbosity prog (withPrograms lbi) + (args ++ ["-o", outFile, inFile]) + } + +-- |Convenience function; get the suffixes of these preprocessors. +ppSuffixes :: [ PPSuffixHandler ] -> [String] +ppSuffixes = map fst + +-- |Standard preprocessors: GreenCard, c2hs, hsc2hs, happy, alex and cpphs. +knownSuffixHandlers :: [ PPSuffixHandler ] +knownSuffixHandlers = + [ ("gc", ppGreenCard) + , ("chs", ppC2hs) + , ("hsc", ppHsc2hs) + , ("x", ppAlex) + , ("y", ppHappy) + , ("ly", ppHappy) + , ("cpphs", ppCpp) + ] + +-- |Standard preprocessors with possible extra C sources: c2hs, hsc2hs. +knownExtrasHandlers :: [ PreProcessorExtras ] +knownExtrasHandlers = [ ppC2hsExtras, ppHsc2hsExtras ] + +-- | Find any extra C sources generated by preprocessing that need to +-- be added to the component (addresses issue #238). +preprocessExtras :: Component + -> LocalBuildInfo + -> IO [FilePath] +preprocessExtras comp lbi = case comp of + CLib _ -> pp $ buildDir lbi + (CExe Executable { exeName = nm }) -> + pp $ buildDir lbi nm nm ++ "-tmp" + CTest test -> do + case testInterface test of + TestSuiteExeV10 _ _ -> + pp $ buildDir lbi testName test testName test ++ "-tmp" + TestSuiteLibV09 _ _ -> + pp $ buildDir lbi stubName test stubName test ++ "-tmp" + TestSuiteUnsupported tt -> die $ "No support for preprocessing test " + ++ "suite type " ++ display tt + CBench bm -> do + case benchmarkInterface bm of + BenchmarkExeV10 _ _ -> + pp $ buildDir lbi benchmarkName bm benchmarkName bm ++ "-tmp" + BenchmarkUnsupported tt -> die $ "No support for preprocessing benchmark " + ++ "type " ++ display tt + where + pp dir = (map (dir ) . concat) `fmap` forM knownExtrasHandlers ($ dir) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/Ar.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/Ar.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/Ar.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/Ar.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,166 @@ +{-# LANGUAGE OverloadedStrings #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.Ar +-- Copyright : Duncan Coutts 2009 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module provides an library interface to the @ar@ program. + +module Distribution.Simple.Program.Ar ( + createArLibArchive, + multiStageProgramInvocation + ) where + +import Control.Monad (unless) +import qualified Data.ByteString as BS +import qualified Data.ByteString.Char8 as BS8 +import Data.Char (isSpace) +import Distribution.Compat.CopyFile (filesEqual) +import Distribution.Simple.LocalBuildInfo (LocalBuildInfo(..)) +import Distribution.Simple.Program + ( arProgram, requireProgram ) +import Distribution.Simple.Program.Run + ( programInvocation, multiStageProgramInvocation + , runProgramInvocation ) +import Distribution.Simple.Utils + ( dieWithLocation, withTempDirectory ) +import Distribution.System + ( Arch(..), OS(..), Platform(..) ) +import Distribution.Verbosity + ( Verbosity, deafening, verbose ) +import System.Directory (doesFileExist, renameFile) +import System.FilePath ((), splitFileName) +import System.IO + ( Handle, IOMode(ReadWriteMode), SeekMode(AbsoluteSeek) + , hFileSize, hSeek, withBinaryFile ) + +-- | Call @ar@ to create a library archive from a bunch of object files. +-- +createArLibArchive :: Verbosity -> LocalBuildInfo + -> FilePath -> [FilePath] -> IO () +createArLibArchive verbosity lbi targetPath files = do + (ar, _) <- requireProgram verbosity arProgram progConf + + let (targetDir, targetName) = splitFileName targetPath + withTempDirectory verbosity targetDir "objs" $ \ tmpDir -> do + let tmpPath = tmpDir targetName + + -- The args to use with "ar" are actually rather subtle and system-dependent. + -- In particular we have the following issues: + -- + -- -- On OS X, "ar q" does not make an archive index. Archives with no + -- index cannot be used. + -- + -- -- GNU "ar r" will not let us add duplicate objects, only "ar q" lets us + -- do that. We have duplicates because of modules like "A.M" and "B.M" + -- both make an object file "M.o" and ar does not consider the directory. + -- + -- Our solution is to use "ar r" in the simple case when one call is enough. + -- When we need to call ar multiple times we use "ar q" and for the last + -- call on OSX we use "ar qs" so that it'll make the index. + + let simpleArgs = case hostOS of + OSX -> ["-r", "-s"] + _ -> ["-r"] + + initialArgs = ["-q"] + finalArgs = case hostOS of + OSX -> ["-q", "-s"] + _ -> ["-q"] + + extraArgs = verbosityOpts verbosity ++ [tmpPath] + + simple = programInvocation ar (simpleArgs ++ extraArgs) + initial = programInvocation ar (initialArgs ++ extraArgs) + middle = initial + final = programInvocation ar (finalArgs ++ extraArgs) + + sequence_ + [ runProgramInvocation verbosity inv + | inv <- multiStageProgramInvocation + simple (initial, middle, final) files ] + + unless (hostArch == Arm -- See #1537 + || hostOS == AIX) $ -- AIX uses its own "ar" format variant + wipeMetadata tmpPath + equal <- filesEqual tmpPath targetPath + unless equal $ renameFile tmpPath targetPath + + where + progConf = withPrograms lbi + Platform hostArch hostOS = hostPlatform lbi + verbosityOpts v | v >= deafening = ["-v"] + | v >= verbose = [] + | otherwise = ["-c"] + +-- | @ar@ by default includes various metadata for each object file in their +-- respective headers, so the output can differ for the same inputs, making +-- it difficult to avoid re-linking. GNU @ar@(1) has a deterministic mode +-- (@-D@) flag that always writes zero for the mtime, UID and GID, and 0644 +-- for the file mode. However detecting whether @-D@ is supported seems +-- rather harder than just re-implementing this feature. +wipeMetadata :: FilePath -> IO () +wipeMetadata path = do + -- Check for existence first (ReadWriteMode would create one otherwise) + exists <- doesFileExist path + unless exists $ wipeError "Temporary file disappeared" + withBinaryFile path ReadWriteMode $ \ h -> hFileSize h >>= wipeArchive h + + where + wipeError msg = dieWithLocation path Nothing $ + "Distribution.Simple.Program.Ar.wipeMetadata: " ++ msg + archLF = "!\x0a" -- global magic, 8 bytes + x60LF = "\x60\x0a" -- header magic, 2 bytes + metadata = BS.concat + [ "0 " -- mtime, 12 bytes + , "0 " -- UID, 6 bytes + , "0 " -- GID, 6 bytes + , "0644 " -- mode, 8 bytes + ] + headerSize :: Int + headerSize = 60 + + -- http://en.wikipedia.org/wiki/Ar_(Unix)#File_format_details + wipeArchive :: Handle -> Integer -> IO () + wipeArchive h archiveSize = do + global <- BS.hGet h (BS.length archLF) + unless (global == archLF) $ wipeError "Bad global header" + wipeHeader (toInteger $ BS.length archLF) + + where + wipeHeader :: Integer -> IO () + wipeHeader offset = case compare offset archiveSize of + EQ -> return () + GT -> wipeError (atOffset "Archive truncated") + LT -> do + header <- BS.hGet h headerSize + unless (BS.length header == headerSize) $ + wipeError (atOffset "Short header") + let magic = BS.drop 58 header + unless (magic == x60LF) . wipeError . atOffset $ + "Bad magic " ++ show magic ++ " in header" + + let name = BS.take 16 header + let size = BS.take 10 $ BS.drop 48 header + objSize <- case reads (BS8.unpack size) of + [(n, s)] | all isSpace s -> return n + _ -> wipeError (atOffset "Bad file size in header") + + let replacement = BS.concat [ name, metadata, size, magic ] + unless (BS.length replacement == headerSize) $ + wipeError (atOffset "Something has gone terribly wrong") + hSeek h AbsoluteSeek offset + BS.hPut h replacement + + let nextHeader = offset + toInteger headerSize + + -- Odd objects are padded with an extra '\x0a' + if odd objSize then objSize + 1 else objSize + hSeek h AbsoluteSeek nextHeader + wipeHeader nextHeader + + where + atOffset msg = msg ++ " at offset " ++ show offset diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/Builtin.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/Builtin.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/Builtin.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/Builtin.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,337 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.Builtin +-- Copyright : Isaac Jones 2006, Duncan Coutts 2007-2009 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- The module defines all the known built-in 'Program's. +-- +-- Where possible we try to find their version numbers. +-- +module Distribution.Simple.Program.Builtin ( + + -- * The collection of unconfigured and configured programs + builtinPrograms, + + -- * Programs that Cabal knows about + ghcProgram, + ghcPkgProgram, + ghcjsProgram, + ghcjsPkgProgram, + lhcProgram, + lhcPkgProgram, + hmakeProgram, + jhcProgram, + haskellSuiteProgram, + haskellSuitePkgProgram, + uhcProgram, + gccProgram, + arProgram, + stripProgram, + happyProgram, + alexProgram, + hsc2hsProgram, + c2hsProgram, + cpphsProgram, + hscolourProgram, + haddockProgram, + greencardProgram, + ldProgram, + tarProgram, + cppProgram, + pkgConfigProgram, + hpcProgram, + ) where + +import Distribution.Simple.Program.Find +import Distribution.Simple.Program.Internal +import Distribution.Simple.Program.Run +import Distribution.Simple.Program.Types +import Distribution.Simple.Utils +import Distribution.Compat.Exception +import Distribution.Verbosity +import Distribution.Version + +import Data.Char + ( isDigit ) +import qualified Data.Map as Map + +-- ------------------------------------------------------------ +-- * Known programs +-- ------------------------------------------------------------ + +-- | The default list of programs. +-- These programs are typically used internally to Cabal. +builtinPrograms :: [Program] +builtinPrograms = + [ + -- compilers and related progs + ghcProgram + , ghcPkgProgram + , ghcjsProgram + , ghcjsPkgProgram + , haskellSuiteProgram + , haskellSuitePkgProgram + , hmakeProgram + , jhcProgram + , lhcProgram + , lhcPkgProgram + , uhcProgram + , hpcProgram + -- preprocessors + , hscolourProgram + , haddockProgram + , happyProgram + , alexProgram + , hsc2hsProgram + , c2hsProgram + , cpphsProgram + , greencardProgram + -- platform toolchain + , gccProgram + , arProgram + , stripProgram + , ldProgram + , tarProgram + -- configuration tools + , pkgConfigProgram + ] + +ghcProgram :: Program +ghcProgram = (simpleProgram "ghc") { + programFindVersion = findProgramVersion "--numeric-version" id, + + -- Workaround for https://ghc.haskell.org/trac/ghc/ticket/8825 + -- (spurious warning on non-english locales) + programPostConf = \_verbosity ghcProg -> + do let ghcProg' = ghcProg { + programOverrideEnv = ("LANGUAGE", Just "en") + : programOverrideEnv ghcProg + } + -- Only the 7.8 branch seems to be affected. Fixed in 7.8.4. + affectedVersionRange = intersectVersionRanges + (laterVersion $ Version [7,8,0] []) + (earlierVersion $ Version [7,8,4] []) + return $ maybe ghcProg + (\v -> if withinRange v affectedVersionRange + then ghcProg' else ghcProg) + (programVersion ghcProg) + } + +ghcPkgProgram :: Program +ghcPkgProgram = (simpleProgram "ghc-pkg") { + programFindVersion = findProgramVersion "--version" $ \str -> + -- Invoking "ghc-pkg --version" gives a string like + -- "GHC package manager version 6.4.1" + case words str of + (_:_:_:_:ver:_) -> ver + _ -> "" + } + +ghcjsProgram :: Program +ghcjsProgram = (simpleProgram "ghcjs") { + programFindVersion = findProgramVersion "--numeric-ghcjs-version" id + } + +-- note: version is the version number of the GHC version that ghcjs-pkg was built with +ghcjsPkgProgram :: Program +ghcjsPkgProgram = (simpleProgram "ghcjs-pkg") { + programFindVersion = findProgramVersion "--version" $ \str -> + -- Invoking "ghcjs-pkg --version" gives a string like + -- "GHCJS package manager version 6.4.1" + case words str of + (_:_:_:_:ver:_) -> ver + _ -> "" + } + +lhcProgram :: Program +lhcProgram = (simpleProgram "lhc") { + programFindVersion = findProgramVersion "--numeric-version" id + } + +lhcPkgProgram :: Program +lhcPkgProgram = (simpleProgram "lhc-pkg") { + programFindVersion = findProgramVersion "--version" $ \str -> + -- Invoking "lhc-pkg --version" gives a string like + -- "LHC package manager version 0.7" + case words str of + (_:_:_:_:ver:_) -> ver + _ -> "" + } + +hmakeProgram :: Program +hmakeProgram = (simpleProgram "hmake") { + programFindVersion = findProgramVersion "--version" $ \str -> + -- Invoking "hmake --version" gives a string line + -- "/usr/local/bin/hmake: 3.13 (2006-11-01)" + case words str of + (_:ver:_) -> ver + _ -> "" + } + +jhcProgram :: Program +jhcProgram = (simpleProgram "jhc") { + programFindVersion = findProgramVersion "--version" $ \str -> + -- invoking "jhc --version" gives a string like + -- "jhc 0.3.20080208 (wubgipkamcep-2) + -- compiled by ghc-6.8 on a x86_64 running linux" + case words str of + (_:ver:_) -> ver + _ -> "" + } + +uhcProgram :: Program +uhcProgram = (simpleProgram "uhc") { + programFindVersion = findProgramVersion "--version-dotted" id + } + +hpcProgram :: Program +hpcProgram = (simpleProgram "hpc") + { + programFindVersion = findProgramVersion "version" $ \str -> + case words str of + (_ : _ : _ : ver : _) -> ver + _ -> "" + } + +-- This represents a haskell-suite compiler. Of course, the compiler +-- itself probably is not called "haskell-suite", so this is not a real +-- program. (But we don't know statically the name of the actual compiler, +-- so this is the best we can do.) +-- +-- Having this Program value serves two purposes: +-- +-- 1. We can accept options for the compiler in the form of +-- +-- --haskell-suite-option(s)=... +-- +-- 2. We can find a program later using this static id (with +-- requireProgram). +-- +-- The path to the real compiler is found and recorded in the ProgramDb +-- during the configure phase. +haskellSuiteProgram :: Program +haskellSuiteProgram = (simpleProgram "haskell-suite") { + -- pretend that the program exists, otherwise it won't be in the + -- "configured" state + programFindLocation = \_verbosity _searchPath -> + return $ Just ("haskell-suite-dummy-location", []) + } + +-- This represent a haskell-suite package manager. See the comments for +-- haskellSuiteProgram. +haskellSuitePkgProgram :: Program +haskellSuitePkgProgram = (simpleProgram "haskell-suite-pkg") { + programFindLocation = \_verbosity _searchPath -> + return $ Just ("haskell-suite-pkg-dummy-location", []) + } + + +happyProgram :: Program +happyProgram = (simpleProgram "happy") { + programFindVersion = findProgramVersion "--version" $ \str -> + -- Invoking "happy --version" gives a string like + -- "Happy Version 1.16 Copyright (c) ...." + case words str of + (_:_:ver:_) -> ver + _ -> "" + } + +alexProgram :: Program +alexProgram = (simpleProgram "alex") { + programFindVersion = findProgramVersion "--version" $ \str -> + -- Invoking "alex --version" gives a string like + -- "Alex version 2.1.0, (c) 2003 Chris Dornan and Simon Marlow" + case words str of + (_:_:ver:_) -> takeWhile (\x -> isDigit x || x == '.') ver + _ -> "" + } + +gccProgram :: Program +gccProgram = (simpleProgram "gcc") { + programFindVersion = findProgramVersion "-dumpversion" id + } + +arProgram :: Program +arProgram = simpleProgram "ar" + +stripProgram :: Program +stripProgram = (simpleProgram "strip") { + programFindVersion = \verbosity -> + findProgramVersion "--version" stripExtractVersion (lessVerbose verbosity) + } + +hsc2hsProgram :: Program +hsc2hsProgram = (simpleProgram "hsc2hs") { + programFindVersion = + findProgramVersion "--version" $ \str -> + -- Invoking "hsc2hs --version" gives a string like "hsc2hs version 0.66" + case words str of + (_:_:ver:_) -> ver + _ -> "" + } + +c2hsProgram :: Program +c2hsProgram = (simpleProgram "c2hs") { + programFindVersion = findProgramVersion "--numeric-version" id + } + +cpphsProgram :: Program +cpphsProgram = (simpleProgram "cpphs") { + programFindVersion = findProgramVersion "--version" $ \str -> + -- Invoking "cpphs --version" gives a string like "cpphs 1.3" + case words str of + (_:ver:_) -> ver + _ -> "" + } + +hscolourProgram :: Program +hscolourProgram = (simpleProgram "hscolour") { + programFindLocation = \v p -> findProgramOnSearchPath v p "HsColour", + programFindVersion = findProgramVersion "-version" $ \str -> + -- Invoking "HsColour -version" gives a string like "HsColour 1.7" + case words str of + (_:ver:_) -> ver + _ -> "" + } + +haddockProgram :: Program +haddockProgram = (simpleProgram "haddock") { + programFindVersion = findProgramVersion "--version" $ \str -> + -- Invoking "haddock --version" gives a string like + -- "Haddock version 0.8, (c) Simon Marlow 2006" + case words str of + (_:_:ver:_) -> takeWhile (`elem` ('.':['0'..'9'])) ver + _ -> "" + } + +greencardProgram :: Program +greencardProgram = simpleProgram "greencard" + +ldProgram :: Program +ldProgram = simpleProgram "ld" + +tarProgram :: Program +tarProgram = (simpleProgram "tar") { + -- See #1901. Some versions of 'tar' (OpenBSD, NetBSD, ...) don't support the + -- '--format' option. + programPostConf = \verbosity tarProg -> do + tarHelpOutput <- getProgramInvocationOutput + verbosity (programInvocation tarProg ["--help"]) + -- Some versions of tar don't support '--help'. + `catchIO` (\_ -> return "") + let k = "Supports --format" + v = if ("--format" `isInfixOf` tarHelpOutput) then "YES" else "NO" + m = Map.insert k v (programProperties tarProg) + return $ tarProg { programProperties = m } + } + +cppProgram :: Program +cppProgram = simpleProgram "cpp" + +pkgConfigProgram :: Program +pkgConfigProgram = (simpleProgram "pkg-config") { + programFindVersion = findProgramVersion "--version" id + } diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/Db.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/Db.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/Db.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/Db.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,475 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.Db +-- Copyright : Isaac Jones 2006, Duncan Coutts 2007-2009 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This provides a 'ProgramDb' type which holds configured and not-yet +-- configured programs. It is the parameter to lots of actions elsewhere in +-- Cabal that need to look up and run programs. If we had a Cabal monad, +-- the 'ProgramDb' would probably be a reader or state component of it. +-- +-- One nice thing about using it is that any program that is +-- registered with Cabal will get some \"configure\" and \".cabal\" +-- helpers like --with-foo-args --foo-path= and extra-foo-args. +-- +-- There's also a hook for adding programs in a Setup.lhs script. See +-- hookedPrograms in 'Distribution.Simple.UserHooks'. This gives a +-- hook user the ability to get the above flags and such so that they +-- don't have to write all the PATH logic inside Setup.lhs. + +module Distribution.Simple.Program.Db ( + -- * The collection of configured programs we can run + ProgramDb, + emptyProgramDb, + defaultProgramDb, + restoreProgramDb, + + -- ** Query and manipulate the program db + addKnownProgram, + addKnownPrograms, + lookupKnownProgram, + knownPrograms, + getProgramSearchPath, + setProgramSearchPath, + modifyProgramSearchPath, + userSpecifyPath, + userSpecifyPaths, + userMaybeSpecifyPath, + userSpecifyArgs, + userSpecifyArgss, + userSpecifiedArgs, + lookupProgram, + updateProgram, + configuredPrograms, + + -- ** Query and manipulate the program db + configureProgram, + configureAllKnownPrograms, + lookupProgramVersion, + reconfigurePrograms, + requireProgram, + requireProgramVersion, + + ) where + +import Distribution.Simple.Program.Types +import Distribution.Simple.Program.Find +import Distribution.Simple.Program.Builtin +import Distribution.Simple.Utils +import Distribution.Version +import Distribution.Text +import Distribution.Verbosity +import Distribution.Compat.Binary + +import Data.List + ( foldl' ) +import Data.Maybe + ( catMaybes ) +import Data.Tuple (swap) +import qualified Data.Map as Map +import Control.Monad + ( join, foldM ) + +-- ------------------------------------------------------------ +-- * Programs database +-- ------------------------------------------------------------ + +-- | The configuration is a collection of information about programs. It +-- contains information both about configured programs and also about programs +-- that we are yet to configure. +-- +-- The idea is that we start from a collection of unconfigured programs and one +-- by one we try to configure them at which point we move them into the +-- configured collection. For unconfigured programs we record not just the +-- 'Program' but also any user-provided arguments and location for the program. +data ProgramDb = ProgramDb { + unconfiguredProgs :: UnconfiguredProgs, + progSearchPath :: ProgramSearchPath, + configuredProgs :: ConfiguredProgs + } + +type UnconfiguredProgram = (Program, Maybe FilePath, [ProgArg]) +type UnconfiguredProgs = Map.Map String UnconfiguredProgram +type ConfiguredProgs = Map.Map String ConfiguredProgram + + +emptyProgramDb :: ProgramDb +emptyProgramDb = ProgramDb Map.empty defaultProgramSearchPath Map.empty + +defaultProgramDb :: ProgramDb +defaultProgramDb = restoreProgramDb builtinPrograms emptyProgramDb + + +-- internal helpers: +updateUnconfiguredProgs :: (UnconfiguredProgs -> UnconfiguredProgs) + -> ProgramDb -> ProgramDb +updateUnconfiguredProgs update conf = + conf { unconfiguredProgs = update (unconfiguredProgs conf) } + +updateConfiguredProgs :: (ConfiguredProgs -> ConfiguredProgs) + -> ProgramDb -> ProgramDb +updateConfiguredProgs update conf = + conf { configuredProgs = update (configuredProgs conf) } + + +-- Read & Show instances are based on listToFM + +-- | Note that this instance does not preserve the known 'Program's. +-- See 'restoreProgramDb' for details. +-- +instance Show ProgramDb where + show = show . Map.toAscList . configuredProgs + +-- | Note that this instance does not preserve the known 'Program's. +-- See 'restoreProgramDb' for details. +-- +instance Read ProgramDb where + readsPrec p s = + [ (emptyProgramDb { configuredProgs = Map.fromList s' }, r) + | (s', r) <- readsPrec p s ] + +-- | Note that this instance does not preserve the known 'Program's. +-- See 'restoreProgramDb' for details. +-- +instance Binary ProgramDb where + put db = do + put (progSearchPath db) + put (configuredProgs db) + + get = do + searchpath <- get + progs <- get + return $! emptyProgramDb { + progSearchPath = searchpath, + configuredProgs = progs + } + + +-- | The 'Read'\/'Show' and 'Binary' instances do not preserve all the +-- unconfigured 'Programs' because 'Program' is not in 'Read'\/'Show' because +-- it contains functions. So to fully restore a deserialised 'ProgramDb' use +-- this function to add back all the known 'Program's. +-- +-- * It does not add the default programs, but you probably want them, use +-- 'builtinPrograms' in addition to any extra you might need. +-- +restoreProgramDb :: [Program] -> ProgramDb -> ProgramDb +restoreProgramDb = addKnownPrograms + + +-- ------------------------------- +-- Managing unconfigured programs + +-- | Add a known program that we may configure later +-- +addKnownProgram :: Program -> ProgramDb -> ProgramDb +addKnownProgram prog = updateUnconfiguredProgs $ + Map.insertWith combine (programName prog) (prog, Nothing, []) + where combine _ (_, path, args) = (prog, path, args) + + +addKnownPrograms :: [Program] -> ProgramDb -> ProgramDb +addKnownPrograms progs conf = foldl' (flip addKnownProgram) conf progs + + +lookupKnownProgram :: String -> ProgramDb -> Maybe Program +lookupKnownProgram name = + fmap (\(p,_,_)->p) . Map.lookup name . unconfiguredProgs + + +knownPrograms :: ProgramDb -> [(Program, Maybe ConfiguredProgram)] +knownPrograms conf = + [ (p,p') | (p,_,_) <- Map.elems (unconfiguredProgs conf) + , let p' = Map.lookup (programName p) (configuredProgs conf) ] + +-- | Get the current 'ProgramSearchPath' used by the 'ProgramDb'. +-- This is the default list of locations where programs are looked for when +-- configuring them. This can be overridden for specific programs (with +-- 'userSpecifyPath'), and specific known programs can modify or ignore this +-- search path in their own configuration code. +-- +getProgramSearchPath :: ProgramDb -> ProgramSearchPath +getProgramSearchPath = progSearchPath + +-- | Change the current 'ProgramSearchPath' used by the 'ProgramDb'. +-- This will affect programs that are configured from here on, so you +-- should usually set it before configuring any programs. +-- +setProgramSearchPath :: ProgramSearchPath -> ProgramDb -> ProgramDb +setProgramSearchPath searchpath db = db { progSearchPath = searchpath } + +-- | Modify the current 'ProgramSearchPath' used by the 'ProgramDb'. +-- This will affect programs that are configured from here on, so you +-- should usually modify it before configuring any programs. +-- +modifyProgramSearchPath :: (ProgramSearchPath -> ProgramSearchPath) + -> ProgramDb + -> ProgramDb +modifyProgramSearchPath f db = + setProgramSearchPath (f $ getProgramSearchPath db) db + +-- |User-specify this path. Basically override any path information +-- for this program in the configuration. If it's not a known +-- program ignore it. +-- +userSpecifyPath :: String -- ^Program name + -> FilePath -- ^user-specified path to the program + -> ProgramDb -> ProgramDb +userSpecifyPath name path = updateUnconfiguredProgs $ + flip Map.update name $ \(prog, _, args) -> Just (prog, Just path, args) + + +userMaybeSpecifyPath :: String -> Maybe FilePath + -> ProgramDb -> ProgramDb +userMaybeSpecifyPath _ Nothing conf = conf +userMaybeSpecifyPath name (Just path) conf = userSpecifyPath name path conf + + +-- |User-specify the arguments for this program. Basically override +-- any args information for this program in the configuration. If it's +-- not a known program, ignore it.. +userSpecifyArgs :: String -- ^Program name + -> [ProgArg] -- ^user-specified args + -> ProgramDb + -> ProgramDb +userSpecifyArgs name args' = + updateUnconfiguredProgs + (flip Map.update name $ + \(prog, path, args) -> Just (prog, path, args ++ args')) + . updateConfiguredProgs + (flip Map.update name $ + \prog -> Just prog { programOverrideArgs = programOverrideArgs prog + ++ args' }) + + +-- | Like 'userSpecifyPath' but for a list of progs and their paths. +-- +userSpecifyPaths :: [(String, FilePath)] + -> ProgramDb + -> ProgramDb +userSpecifyPaths paths conf = + foldl' (\conf' (prog, path) -> userSpecifyPath prog path conf') conf paths + + +-- | Like 'userSpecifyPath' but for a list of progs and their args. +-- +userSpecifyArgss :: [(String, [ProgArg])] + -> ProgramDb + -> ProgramDb +userSpecifyArgss argss conf = + foldl' (\conf' (prog, args) -> userSpecifyArgs prog args conf') conf argss + + +-- | Get the path that has been previously specified for a program, if any. +-- +userSpecifiedPath :: Program -> ProgramDb -> Maybe FilePath +userSpecifiedPath prog = + join . fmap (\(_,p,_)->p) . Map.lookup (programName prog) . unconfiguredProgs + + +-- | Get any extra args that have been previously specified for a program. +-- +userSpecifiedArgs :: Program -> ProgramDb -> [ProgArg] +userSpecifiedArgs prog = + maybe [] (\(_,_,as)->as) . Map.lookup (programName prog) . unconfiguredProgs + + +-- ----------------------------- +-- Managing configured programs + +-- | Try to find a configured program +lookupProgram :: Program -> ProgramDb -> Maybe ConfiguredProgram +lookupProgram prog = Map.lookup (programName prog) . configuredProgs + + +-- | Update a configured program in the database. +updateProgram :: ConfiguredProgram -> ProgramDb + -> ProgramDb +updateProgram prog = updateConfiguredProgs $ + Map.insert (programId prog) prog + + +-- | List all configured programs. +configuredPrograms :: ProgramDb -> [ConfiguredProgram] +configuredPrograms = Map.elems . configuredProgs + +-- --------------------------- +-- Configuring known programs + +-- | Try to configure a specific program. If the program is already included in +-- the collection of unconfigured programs then we use any user-supplied +-- location and arguments. If the program gets configured successfully it gets +-- added to the configured collection. +-- +-- Note that it is not a failure if the program cannot be configured. It's only +-- a failure if the user supplied a location and the program could not be found +-- at that location. +-- +-- The reason for it not being a failure at this stage is that we don't know up +-- front all the programs we will need, so we try to configure them all. +-- To verify that a program was actually successfully configured use +-- 'requireProgram'. +-- +configureProgram :: Verbosity + -> Program + -> ProgramDb + -> IO ProgramDb +configureProgram verbosity prog conf = do + let name = programName prog + maybeLocation <- case userSpecifiedPath prog conf of + Nothing -> + programFindLocation prog verbosity (progSearchPath conf) + >>= return . fmap (swap . fmap FoundOnSystem . swap) + Just path -> do + absolute <- doesExecutableExist path + if absolute + then return (Just (UserSpecified path, [])) + else findProgramOnSearchPath verbosity (progSearchPath conf) path + >>= maybe (die notFound) + (return . Just . swap . fmap UserSpecified . swap) + where notFound = "Cannot find the program '" ++ name + ++ "'. User-specified path '" + ++ path ++ "' does not refer to an executable and " + ++ "the program is not on the system path." + case maybeLocation of + Nothing -> return conf + Just (location, triedLocations) -> do + version <- programFindVersion prog verbosity (locationPath location) + newPath <- programSearchPathAsPATHVar (progSearchPath conf) + let configuredProg = ConfiguredProgram { + programId = name, + programVersion = version, + programDefaultArgs = [], + programOverrideArgs = userSpecifiedArgs prog conf, + programOverrideEnv = [("PATH", Just newPath)], + programProperties = Map.empty, + programLocation = location, + programMonitorFiles = triedLocations + } + configuredProg' <- programPostConf prog verbosity configuredProg + return (updateConfiguredProgs (Map.insert name configuredProg') conf) + + +-- | Configure a bunch of programs using 'configureProgram'. Just a 'foldM'. +-- +configurePrograms :: Verbosity + -> [Program] + -> ProgramDb + -> IO ProgramDb +configurePrograms verbosity progs conf = + foldM (flip (configureProgram verbosity)) conf progs + + +-- | Try to configure all the known programs that have not yet been configured. +-- +configureAllKnownPrograms :: Verbosity + -> ProgramDb + -> IO ProgramDb +configureAllKnownPrograms verbosity conf = + configurePrograms verbosity + [ prog | (prog,_,_) <- Map.elems notYetConfigured ] conf + where + notYetConfigured = unconfiguredProgs conf + `Map.difference` configuredProgs conf + + +-- | reconfigure a bunch of programs given new user-specified args. It takes +-- the same inputs as 'userSpecifyPath' and 'userSpecifyArgs' and for all progs +-- with a new path it calls 'configureProgram'. +-- +reconfigurePrograms :: Verbosity + -> [(String, FilePath)] + -> [(String, [ProgArg])] + -> ProgramDb + -> IO ProgramDb +reconfigurePrograms verbosity paths argss conf = do + configurePrograms verbosity progs + . userSpecifyPaths paths + . userSpecifyArgss argss + $ conf + + where + progs = catMaybes [ lookupKnownProgram name conf | (name,_) <- paths ] + + +-- | Check that a program is configured and available to be run. +-- +-- It raises an exception if the program could not be configured, otherwise +-- it returns the configured program. +-- +requireProgram :: Verbosity -> Program -> ProgramDb + -> IO (ConfiguredProgram, ProgramDb) +requireProgram verbosity prog conf = do + + -- If it's not already been configured, try to configure it now + conf' <- case lookupProgram prog conf of + Nothing -> configureProgram verbosity prog conf + Just _ -> return conf + + case lookupProgram prog conf' of + Nothing -> die notFound + Just configuredProg -> return (configuredProg, conf') + + where notFound = "The program '" ++ programName prog + ++ "' is required but it could not be found." + + +-- | Check that a program is configured and available to be run. +-- +-- Additionally check that the program version number is suitable and return +-- it. For example you could require 'AnyVersion' or @'orLaterVersion' +-- ('Version' [1,0] [])@ +-- +-- It returns the configured program, its version number and a possibly updated +-- 'ProgramDb'. If the program could not be configured or the version is +-- unsuitable, it returns an error value. +-- +lookupProgramVersion + :: Verbosity -> Program -> VersionRange -> ProgramDb + -> IO (Either String (ConfiguredProgram, Version, ProgramDb)) +lookupProgramVersion verbosity prog range programDb = do + + -- If it's not already been configured, try to configure it now + programDb' <- case lookupProgram prog programDb of + Nothing -> configureProgram verbosity prog programDb + Just _ -> return programDb + + case lookupProgram prog programDb' of + Nothing -> return $! Left notFound + Just configuredProg@ConfiguredProgram { programLocation = location } -> + case programVersion configuredProg of + Just version + | withinRange version range -> + return $! Right (configuredProg, version ,programDb') + | otherwise -> + return $! Left (badVersion version location) + Nothing -> + return $! Left (unknownVersion location) + + where notFound = "The program '" + ++ programName prog ++ "'" ++ versionRequirement + ++ " is required but it could not be found." + badVersion v l = "The program '" + ++ programName prog ++ "'" ++ versionRequirement + ++ " is required but the version found at " + ++ locationPath l ++ " is version " ++ display v + unknownVersion l = "The program '" + ++ programName prog ++ "'" ++ versionRequirement + ++ " is required but the version of " + ++ locationPath l ++ " could not be determined." + versionRequirement + | isAnyVersion range = "" + | otherwise = " version " ++ display range + +-- | Like 'lookupProgramVersion', but raises an exception in case of error +-- instead of returning 'Left errMsg'. +-- +requireProgramVersion :: Verbosity -> Program -> VersionRange + -> ProgramDb + -> IO (ConfiguredProgram, Version, ProgramDb) +requireProgramVersion verbosity prog range programDb = + join $ either die return `fmap` + lookupProgramVersion verbosity prog range programDb diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/Find.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/Find.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/Find.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/Find.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,185 @@ +{-# LANGUAGE CPP, DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.Find +-- Copyright : Duncan Coutts 2013 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- A somewhat extended notion of the normal program search path concept. +-- +-- Usually when finding executables we just want to look in the usual places +-- using the OS's usual method for doing so. In Haskell the normal OS-specific +-- method is captured by 'findExecutable'. On all common OSs that makes use of +-- a @PATH@ environment variable, (though on Windows it is not just the @PATH@). +-- +-- However it is sometimes useful to be able to look in additional locations +-- without having to change the process-global @PATH@ environment variable. +-- So we need an extension of the usual 'findExecutable' that can look in +-- additional locations, either before, after or instead of the normal OS +-- locations. +-- +module Distribution.Simple.Program.Find ( + -- * Program search path + ProgramSearchPath, + ProgramSearchPathEntry(..), + defaultProgramSearchPath, + findProgramOnSearchPath, + programSearchPathAsPATHVar, + getSystemSearchPath, + ) where + +import Distribution.Verbosity +import Distribution.Simple.Utils +import Distribution.System +import Distribution.Compat.Environment +import Distribution.Compat.Binary + +import qualified System.Directory as Directory + ( findExecutable ) +import System.FilePath as FilePath + ( (), (<.>), splitSearchPath, searchPathSeparator, getSearchPath + , takeDirectory ) +import Data.List + ( nub ) +import GHC.Generics +#if defined(mingw32_HOST_OS) +import qualified System.Win32 as Win32 +#endif + +-- | A search path to use when locating executables. This is analogous +-- to the unix @$PATH@ or win32 @%PATH%@ but with the ability to use +-- the system default method for finding executables ('findExecutable' which +-- on unix is simply looking on the @$PATH@ but on win32 is a bit more +-- complicated). +-- +-- The default to use is @[ProgSearchPathDefault]@ but you can add extra dirs +-- either before, after or instead of the default, e.g. here we add an extra +-- dir to search after the usual ones. +-- +-- > ['ProgramSearchPathDefault', 'ProgramSearchPathDir' dir] +-- +type ProgramSearchPath = [ProgramSearchPathEntry] +data ProgramSearchPathEntry = + ProgramSearchPathDir FilePath -- ^ A specific dir + | ProgramSearchPathDefault -- ^ The system default + deriving (Eq, Generic) + +instance Binary ProgramSearchPathEntry + +defaultProgramSearchPath :: ProgramSearchPath +defaultProgramSearchPath = [ProgramSearchPathDefault] + +findProgramOnSearchPath :: Verbosity -> ProgramSearchPath + -> FilePath -> IO (Maybe (FilePath, [FilePath])) +findProgramOnSearchPath verbosity searchpath prog = do + debug verbosity $ "Searching for " ++ prog ++ " in path." + res <- tryPathElems [] searchpath + case res of + Nothing -> debug verbosity ("Cannot find " ++ prog ++ " on the path") + Just (path, _) -> debug verbosity ("Found " ++ prog ++ " at "++ path) + return res + where + tryPathElems :: [[FilePath]] -> [ProgramSearchPathEntry] + -> IO (Maybe (FilePath, [FilePath])) + tryPathElems _ [] = return Nothing + tryPathElems tried (pe:pes) = do + res <- tryPathElem pe + case res of + (Nothing, notfoundat) -> tryPathElems (notfoundat : tried) pes + (Just foundat, notfoundat) -> return (Just (foundat, alltried)) + where + alltried = concat (reverse (notfoundat : tried)) + + tryPathElem :: ProgramSearchPathEntry -> IO (Maybe FilePath, [FilePath]) + tryPathElem (ProgramSearchPathDir dir) = + findFirstExe [ dir prog <.> ext | ext <- exeExtensions ] + + -- On windows, getSystemSearchPath is not guaranteed 100% correct so we + -- use findExecutable and then approximate the not-found-at locations. + tryPathElem ProgramSearchPathDefault | buildOS == Windows = do + mExe <- findExecutable prog + syspath <- getSystemSearchPath + case mExe of + Nothing -> + let notfoundat = [ dir prog | dir <- syspath ] in + return (Nothing, notfoundat) + + Just foundat -> do + let founddir = takeDirectory foundat + notfoundat = [ dir prog + | dir <- takeWhile (/= founddir) syspath ] + return (Just foundat, notfoundat) + + -- On other OSs we can just do the simple thing + tryPathElem ProgramSearchPathDefault = do + dirs <- getSystemSearchPath + findFirstExe [ dir prog <.> ext | dir <- dirs, ext <- exeExtensions ] + + findFirstExe :: [FilePath] -> IO (Maybe FilePath, [FilePath]) + findFirstExe = go [] + where + go fs' [] = return (Nothing, reverse fs') + go fs' (f:fs) = do + isExe <- doesExecutableExist f + if isExe + then return (Just f, reverse fs') + else go (f:fs') fs + +-- | Interpret a 'ProgramSearchPath' to construct a new @$PATH@ env var. +-- Note that this is close but not perfect because on Windows the search +-- algorithm looks at more than just the @%PATH%@. +programSearchPathAsPATHVar :: ProgramSearchPath -> IO String +programSearchPathAsPATHVar searchpath = do + ess <- mapM getEntries searchpath + return (intercalate [searchPathSeparator] (concat ess)) + where + getEntries (ProgramSearchPathDir dir) = return [dir] + getEntries ProgramSearchPathDefault = do + env <- getEnvironment + return (maybe [] splitSearchPath (lookup "PATH" env)) + +-- | Get the system search path. On Unix systems this is just the @$PATH@ env +-- var, but on windows it's a bit more complicated. +-- +getSystemSearchPath :: IO [FilePath] +getSystemSearchPath = fmap nub $ do +#if defined(mingw32_HOST_OS) + processdir <- takeDirectory `fmap` Win32.getModuleFileName Win32.nullHANDLE + currentdir <- Win32.getCurrentDirectory + systemdir <- Win32.getSystemDirectory + windowsdir <- Win32.getWindowsDirectory + pathdirs <- FilePath.getSearchPath + let path = processdir : currentdir + : systemdir : windowsdir + : pathdirs + return path +#else + FilePath.getSearchPath +#endif + +#ifdef MIN_VERSION_directory +#if MIN_VERSION_directory(1,2,1) +#define HAVE_directory_121 +#endif +#endif + +findExecutable :: FilePath -> IO (Maybe FilePath) +#ifdef HAVE_directory_121 +findExecutable = Directory.findExecutable +#else +findExecutable prog = do + -- With directory < 1.2.1 'findExecutable' doesn't check that the path + -- really refers to an executable. + mExe <- Directory.findExecutable prog + case mExe of + Just exe -> do + exeExists <- doesExecutableExist exe + if exeExists + then return mExe + else return Nothing + _ -> return mExe +#endif + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/GHC.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/GHC.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/GHC.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/GHC.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,506 @@ +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Simple.Program.GHC ( + GhcOptions(..), + GhcMode(..), + GhcOptimisation(..), + GhcDynLinkMode(..), + GhcProfAuto(..), + + ghcInvocation, + renderGhcOptions, + + runGHC, + + ) where + +import Distribution.Compat.Semigroup as Semi +import Distribution.Simple.GHC.ImplInfo +import Distribution.Package +import Distribution.PackageDescription hiding (Flag) +import Distribution.ModuleName +import Distribution.Simple.Compiler hiding (Flag) +import Distribution.Simple.Setup +import Distribution.Simple.Program.Types +import Distribution.Simple.Program.Run +import Distribution.System +import Distribution.Text +import Distribution.Verbosity +import Distribution.Utils.NubList +import Language.Haskell.Extension + +import GHC.Generics (Generic) +import qualified Data.Map as M + +-- | A structured set of GHC options/flags +-- +data GhcOptions = GhcOptions { + + -- | The major mode for the ghc invocation. + ghcOptMode :: Flag GhcMode, + + -- | Any extra options to pass directly to ghc. These go at the end and hence + -- override other stuff. + ghcOptExtra :: NubListR String, + + -- | Extra default flags to pass directly to ghc. These go at the beginning + -- and so can be overridden by other stuff. + ghcOptExtraDefault :: NubListR String, + + ----------------------- + -- Inputs and outputs + + -- | The main input files; could be .hs, .hi, .c, .o, depending on mode. + ghcOptInputFiles :: NubListR FilePath, + + -- | The names of input Haskell modules, mainly for @--make@ mode. + ghcOptInputModules :: NubListR ModuleName, + + -- | Location for output file; the @ghc -o@ flag. + ghcOptOutputFile :: Flag FilePath, + + -- | Location for dynamic output file in 'GhcStaticAndDynamic' mode; + -- the @ghc -dyno@ flag. + ghcOptOutputDynFile :: Flag FilePath, + + -- | Start with an empty search path for Haskell source files; + -- the @ghc -i@ flag (@-i@ on it's own with no path argument). + ghcOptSourcePathClear :: Flag Bool, + + -- | Search path for Haskell source files; the @ghc -i@ flag. + ghcOptSourcePath :: NubListR FilePath, + + ------------- + -- Packages + + -- | The unit ID the modules will belong to; the @ghc -this-unit-id@ + -- flag (or @-this-package-key@ or @-package-name@ on older + -- versions of GHC). This is a 'String' because we assume you've + -- already figured out what the correct format for this string is + -- (we need to handle backwards compatibility.) + ghcOptThisUnitId :: Flag String, + + -- | GHC package databases to use, the @ghc -package-conf@ flag. + ghcOptPackageDBs :: PackageDBStack, + + -- | The GHC packages to use. For compatability with old and new ghc, this + -- requires both the short and long form of the package id; + -- the @ghc -package@ or @ghc -package-id@ flags. + ghcOptPackages :: + NubListR (UnitId, PackageId, ModuleRenaming), + + -- | Start with a clean package set; the @ghc -hide-all-packages@ flag + ghcOptHideAllPackages :: Flag Bool, + + -- | Don't automatically link in Haskell98 etc; the @ghc + -- -no-auto-link-packages@ flag. + ghcOptNoAutoLinkPackages :: Flag Bool, + + ----------------- + -- Linker stuff + + -- | Names of libraries to link in; the @ghc -l@ flag. + ghcOptLinkLibs :: NubListR FilePath, + + -- | Search path for libraries to link in; the @ghc -L@ flag. + ghcOptLinkLibPath :: NubListR FilePath, + + -- | Options to pass through to the linker; the @ghc -optl@ flag. + ghcOptLinkOptions :: NubListR String, + + -- | OSX only: frameworks to link in; the @ghc -framework@ flag. + ghcOptLinkFrameworks :: NubListR String, + + -- | OSX only: Search path for frameworks to link in; the + -- @ghc -framework-path@ flag. + ghcOptLinkFrameworkDirs :: NubListR String, + + -- | Don't do the link step, useful in make mode; the @ghc -no-link@ flag. + ghcOptNoLink :: Flag Bool, + + -- | Don't link in the normal RTS @main@ entry point; the @ghc -no-hs-main@ + -- flag. + ghcOptLinkNoHsMain :: Flag Bool, + + -------------------- + -- C and CPP stuff + + -- | Options to pass through to the C compiler; the @ghc -optc@ flag. + ghcOptCcOptions :: NubListR String, + + -- | Options to pass through to CPP; the @ghc -optP@ flag. + ghcOptCppOptions :: NubListR String, + + -- | Search path for CPP includes like header files; the @ghc -I@ flag. + ghcOptCppIncludePath :: NubListR FilePath, + + -- | Extra header files to include at CPP stage; the @ghc -optP-include@ flag. + ghcOptCppIncludes :: NubListR FilePath, + + -- | Extra header files to include for old-style FFI; the @ghc -#include@ flag. + ghcOptFfiIncludes :: NubListR FilePath, + + ---------------------------- + -- Language and extensions + + -- | The base language; the @ghc -XHaskell98@ or @-XHaskell2010@ flag. + ghcOptLanguage :: Flag Language, + + -- | The language extensions; the @ghc -X@ flag. + ghcOptExtensions :: NubListR Extension, + + -- | A GHC version-dependent mapping of extensions to flags. This must be + -- set to be able to make use of the 'ghcOptExtensions'. + ghcOptExtensionMap :: M.Map Extension String, + + ---------------- + -- Compilation + + -- | What optimisation level to use; the @ghc -O@ flag. + ghcOptOptimisation :: Flag GhcOptimisation, + + -- | Emit debug info; the @ghc -g@ flag. + ghcOptDebugInfo :: Flag Bool, + + -- | Compile in profiling mode; the @ghc -prof@ flag. + ghcOptProfilingMode :: Flag Bool, + + -- | Automatically add profiling cost centers; the @ghc -fprof-auto*@ flags. + ghcOptProfilingAuto :: Flag GhcProfAuto, + + -- | Use the \"split object files\" feature; the @ghc -split-objs@ flag. + ghcOptSplitObjs :: Flag Bool, + + -- | Run N jobs simultaneously (if possible). + ghcOptNumJobs :: Flag (Maybe Int), + + -- | Enable coverage analysis; the @ghc -fhpc -hpcdir@ flags. + ghcOptHPCDir :: Flag FilePath, + + ---------------- + -- GHCi + + -- | Extra GHCi startup scripts; the @-ghci-script@ flag + ghcOptGHCiScripts :: NubListR FilePath, + + ------------------------ + -- Redirecting outputs + + ghcOptHiSuffix :: Flag String, + ghcOptObjSuffix :: Flag String, + ghcOptDynHiSuffix :: Flag String, -- ^ only in 'GhcStaticAndDynamic' mode + ghcOptDynObjSuffix :: Flag String, -- ^ only in 'GhcStaticAndDynamic' mode + ghcOptHiDir :: Flag FilePath, + ghcOptObjDir :: Flag FilePath, + ghcOptOutputDir :: Flag FilePath, + ghcOptStubDir :: Flag FilePath, + + -------------------- + -- Dynamic linking + + ghcOptDynLinkMode :: Flag GhcDynLinkMode, + ghcOptShared :: Flag Bool, + ghcOptFPic :: Flag Bool, + ghcOptDylibName :: Flag String, + ghcOptRPaths :: NubListR FilePath, + + --------------- + -- Misc flags + + -- | Get GHC to be quiet or verbose with what it's doing; the @ghc -v@ flag. + ghcOptVerbosity :: Flag Verbosity, + + -- | Let GHC know that it is Cabal that's calling it. + -- Modifies some of the GHC error messages. + ghcOptCabal :: Flag Bool + +} deriving (Show, Generic) + + +data GhcMode = GhcModeCompile -- ^ @ghc -c@ + | GhcModeLink -- ^ @ghc@ + | GhcModeMake -- ^ @ghc --make@ + | GhcModeInteractive -- ^ @ghci@ \/ @ghc --interactive@ + | GhcModeAbiHash -- ^ @ghc --abi-hash@ +-- | GhcModeDepAnalysis -- ^ @ghc -M@ +-- | GhcModeEvaluate -- ^ @ghc -e@ + deriving (Show, Eq) + +data GhcOptimisation = GhcNoOptimisation -- ^ @-O0@ + | GhcNormalOptimisation -- ^ @-O@ + | GhcMaximumOptimisation -- ^ @-O2@ + | GhcSpecialOptimisation String -- ^ e.g. @-Odph@ + deriving (Show, Eq) + +data GhcDynLinkMode = GhcStaticOnly -- ^ @-static@ + | GhcDynamicOnly -- ^ @-dynamic@ + | GhcStaticAndDynamic -- ^ @-static -dynamic-too@ + deriving (Show, Eq) + +data GhcProfAuto = GhcProfAutoAll -- ^ @-fprof-auto@ + | GhcProfAutoToplevel -- ^ @-fprof-auto-top@ + | GhcProfAutoExported -- ^ @-fprof-auto-exported@ + deriving (Show, Eq) + +runGHC :: Verbosity -> ConfiguredProgram -> Compiler -> Platform -> GhcOptions + -> IO () +runGHC verbosity ghcProg comp platform opts = do + runProgramInvocation verbosity (ghcInvocation ghcProg comp platform opts) + + +ghcInvocation :: ConfiguredProgram -> Compiler -> Platform -> GhcOptions + -> ProgramInvocation +ghcInvocation prog comp platform opts = + programInvocation prog (renderGhcOptions comp platform opts) + +renderGhcOptions :: Compiler -> Platform -> GhcOptions -> [String] +renderGhcOptions comp _platform@(Platform _arch os) opts + | compilerFlavor comp `notElem` [GHC, GHCJS] = + error $ "Distribution.Simple.Program.GHC.renderGhcOptions: " + ++ "compiler flavor must be 'GHC' or 'GHCJS'!" + | otherwise = + concat + [ case flagToMaybe (ghcOptMode opts) of + Nothing -> [] + Just GhcModeCompile -> ["-c"] + Just GhcModeLink -> [] + Just GhcModeMake -> ["--make"] + Just GhcModeInteractive -> ["--interactive"] + Just GhcModeAbiHash -> ["--abi-hash"] +-- Just GhcModeDepAnalysis -> ["-M"] +-- Just GhcModeEvaluate -> ["-e", expr] + + , flags ghcOptExtraDefault + + , [ "-no-link" | flagBool ghcOptNoLink ] + + --------------- + -- Misc flags + + , maybe [] verbosityOpts (flagToMaybe (ghcOptVerbosity opts)) + + , [ "-fbuilding-cabal-package" | flagBool ghcOptCabal + , flagBuildingCabalPkg implInfo ] + + ---------------- + -- Compilation + + , case flagToMaybe (ghcOptOptimisation opts) of + Nothing -> [] + Just GhcNoOptimisation -> ["-O0"] + Just GhcNormalOptimisation -> ["-O"] + Just GhcMaximumOptimisation -> ["-O2"] + Just (GhcSpecialOptimisation s) -> ["-O" ++ s] -- eg -Odph + + , [ "-g" | flagDebugInfo implInfo && flagBool ghcOptDebugInfo ] + + , [ "-prof" | flagBool ghcOptProfilingMode ] + + , case flagToMaybe (ghcOptProfilingAuto opts) of + _ | not (flagBool ghcOptProfilingMode) + -> [] + Nothing -> [] + Just GhcProfAutoAll + | flagProfAuto implInfo -> ["-fprof-auto"] + | otherwise -> ["-auto-all"] -- not the same, but close + Just GhcProfAutoToplevel + | flagProfAuto implInfo -> ["-fprof-auto-top"] + | otherwise -> ["-auto-all"] + Just GhcProfAutoExported + | flagProfAuto implInfo -> ["-fprof-auto-exported"] + | otherwise -> ["-auto"] + + , [ "-split-objs" | flagBool ghcOptSplitObjs ] + + , case flagToMaybe (ghcOptHPCDir opts) of + Nothing -> [] + Just hpcdir -> ["-fhpc", "-hpcdir", hpcdir] + + , if parmakeSupported comp + then case ghcOptNumJobs opts of + NoFlag -> [] + Flag n -> ["-j" ++ maybe "" show n] + else [] + + -------------------- + -- Dynamic linking + + , [ "-shared" | flagBool ghcOptShared ] + , case flagToMaybe (ghcOptDynLinkMode opts) of + Nothing -> [] + Just GhcStaticOnly -> ["-static"] + Just GhcDynamicOnly -> ["-dynamic"] + Just GhcStaticAndDynamic -> ["-static", "-dynamic-too"] + , [ "-fPIC" | flagBool ghcOptFPic ] + + , concat [ ["-dylib-install-name", libname] | libname <- flag ghcOptDylibName ] + + ------------------------ + -- Redirecting outputs + + , concat [ ["-osuf", suf] | suf <- flag ghcOptObjSuffix ] + , concat [ ["-hisuf", suf] | suf <- flag ghcOptHiSuffix ] + , concat [ ["-dynosuf", suf] | suf <- flag ghcOptDynObjSuffix ] + , concat [ ["-dynhisuf",suf] | suf <- flag ghcOptDynHiSuffix ] + , concat [ ["-outputdir", dir] | dir <- flag ghcOptOutputDir + , flagOutputDir implInfo ] + , concat [ ["-odir", dir] | dir <- flag ghcOptObjDir ] + , concat [ ["-hidir", dir] | dir <- flag ghcOptHiDir ] + , concat [ ["-stubdir", dir] | dir <- flag ghcOptStubDir + , flagStubdir implInfo ] + + ----------------------- + -- Source search path + + , [ "-i" | flagBool ghcOptSourcePathClear ] + , [ "-i" ++ dir | dir <- flags ghcOptSourcePath ] + + -------------------- + -- C and CPP stuff + + , [ "-I" ++ dir | dir <- flags ghcOptCppIncludePath ] + , [ "-optP" ++ opt | opt <- flags ghcOptCppOptions ] + , concat [ [ "-optP-include", "-optP" ++ inc] + | inc <- flags ghcOptCppIncludes ] + , [ "-#include \"" ++ inc ++ "\"" + | inc <- flags ghcOptFfiIncludes, flagFfiIncludes implInfo ] + , [ "-optc" ++ opt | opt <- flags ghcOptCcOptions ] + + ----------------- + -- Linker stuff + + , [ "-optl" ++ opt | opt <- flags ghcOptLinkOptions ] + , ["-l" ++ lib | lib <- flags ghcOptLinkLibs ] + , ["-L" ++ dir | dir <- flags ghcOptLinkLibPath ] + , if isOSX + then concat [ ["-framework", fmwk] + | fmwk <- flags ghcOptLinkFrameworks ] + else [] + , if isOSX + then concat [ ["-framework-path", path] + | path <- flags ghcOptLinkFrameworkDirs ] + else [] + , [ "-no-hs-main" | flagBool ghcOptLinkNoHsMain ] + , [ "-dynload deploy" | not (null (flags ghcOptRPaths)) ] + , concat [ [ "-optl-Wl,-rpath," ++ dir] + | dir <- flags ghcOptRPaths ] + + ------------- + -- Packages + + , concat [ [ case () of + _ | unitIdSupported comp -> "-this-unit-id" + | packageKeySupported comp -> "-this-package-key" + | otherwise -> "-package-name" + , this_arg ] + | this_arg <- flag ghcOptThisUnitId ] + + , [ "-hide-all-packages" | flagBool ghcOptHideAllPackages ] + , [ "-no-auto-link-packages" | flagBool ghcOptNoAutoLinkPackages ] + + , packageDbArgs implInfo (ghcOptPackageDBs opts) + + , concat $ if flagPackageId implInfo + then let space "" = "" + space xs = ' ' : xs + in [ ["-package-id", display ipkgid ++ space (display rns)] + | (ipkgid,_,rns) <- flags ghcOptPackages ] + else [ ["-package", display pkgid] + | (_,pkgid,_) <- flags ghcOptPackages ] + + ---------------------------- + -- Language and extensions + + , if supportsHaskell2010 implInfo + then [ "-X" ++ display lang | lang <- flag ghcOptLanguage ] + else [] + + , [ case M.lookup ext (ghcOptExtensionMap opts) of + Just arg -> arg + Nothing -> error $ "Distribution.Simple.Program.GHC.renderGhcOptions: " + ++ display ext ++ " not present in ghcOptExtensionMap." + | ext <- flags ghcOptExtensions ] + + ---------------- + -- GHCi + + , concat [ [ "-ghci-script", script ] | script <- flags ghcOptGHCiScripts + , flagGhciScript implInfo ] + + --------------- + -- Inputs + + , [ display modu | modu <- flags ghcOptInputModules ] + , flags ghcOptInputFiles + + , concat [ [ "-o", out] | out <- flag ghcOptOutputFile ] + , concat [ [ "-dyno", out] | out <- flag ghcOptOutputDynFile ] + + --------------- + -- Extra + + , flags ghcOptExtra + + ] + + + where + implInfo = getImplInfo comp + isOSX = os == OSX + flag flg = flagToList (flg opts) + flags flg = fromNubListR . flg $ opts + flagBool flg = fromFlagOrDefault False (flg opts) + +verbosityOpts :: Verbosity -> [String] +verbosityOpts verbosity + | verbosity >= deafening = ["-v"] + | verbosity >= normal = [] + | otherwise = ["-w", "-v0"] + + +-- | GHC <7.6 uses '-package-conf' instead of '-package-db'. +packageDbArgsConf :: PackageDBStack -> [String] +packageDbArgsConf dbstack = case dbstack of + (GlobalPackageDB:UserPackageDB:dbs) -> concatMap specific dbs + (GlobalPackageDB:dbs) -> ("-no-user-package-conf") + : concatMap specific dbs + _ -> ierror + where + specific (SpecificPackageDB db) = [ "-package-conf", db ] + specific _ = ierror + ierror = error $ "internal error: unexpected package db stack: " + ++ show dbstack + +-- | GHC >= 7.6 uses the '-package-db' flag. See +-- https://ghc.haskell.org/trac/ghc/ticket/5977. +packageDbArgsDb :: PackageDBStack -> [String] +-- special cases to make arguments prettier in common scenarios +packageDbArgsDb dbstack = case dbstack of + (GlobalPackageDB:UserPackageDB:dbs) + | all isSpecific dbs -> concatMap single dbs + (GlobalPackageDB:dbs) + | all isSpecific dbs -> "-no-user-package-db" + : concatMap single dbs + dbs -> "-clear-package-db" + : concatMap single dbs + where + single (SpecificPackageDB db) = [ "-package-db", db ] + single GlobalPackageDB = [ "-global-package-db" ] + single UserPackageDB = [ "-user-package-db" ] + isSpecific (SpecificPackageDB _) = True + isSpecific _ = False + +packageDbArgs :: GhcImplInfo -> PackageDBStack -> [String] +packageDbArgs implInfo + | flagPackageConf implInfo = packageDbArgsConf + | otherwise = packageDbArgsDb + +-- ----------------------------------------------------------------------------- +-- Boilerplate Monoid instance for GhcOptions + +instance Monoid GhcOptions where + mempty = gmempty + mappend = (Semi.<>) + +instance Semigroup GhcOptions where + (<>) = gmappend diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/HcPkg.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/HcPkg.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/HcPkg.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/HcPkg.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,481 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.HcPkg +-- Copyright : Duncan Coutts 2009, 2013 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module provides an library interface to the @hc-pkg@ program. +-- Currently only GHC, GHCJS and LHC have hc-pkg programs. + +module Distribution.Simple.Program.HcPkg ( + HcPkgInfo(..), + + init, + invoke, + register, + reregister, + registerMultiInstance, + unregister, + recache, + expose, + hide, + dump, + describe, + list, + + -- * Program invocations + initInvocation, + registerInvocation, + reregisterInvocation, + registerMultiInstanceInvocation, + unregisterInvocation, + recacheInvocation, + exposeInvocation, + hideInvocation, + dumpInvocation, + describeInvocation, + listInvocation, + ) where + +import Distribution.Package hiding (installedUnitId) +import Distribution.InstalledPackageInfo +import Distribution.ParseUtils +import Distribution.Simple.Compiler +import Distribution.Simple.Program.Types +import Distribution.Simple.Program.Run +import Distribution.Text +import Distribution.Simple.Utils +import Distribution.Verbosity +import Distribution.Compat.Exception + +import Prelude hiding (init) +import Data.Char + ( isSpace ) +import Data.List + ( stripPrefix ) +import System.FilePath as FilePath + ( (), (<.>) + , splitPath, splitDirectories, joinPath, isPathSeparator ) +import qualified System.FilePath.Posix as FilePath.Posix + +-- | Information about the features and capabilities of an @hc-pkg@ +-- program. +-- +data HcPkgInfo = HcPkgInfo + { hcPkgProgram :: ConfiguredProgram + , noPkgDbStack :: Bool -- ^ no package DB stack supported + , noVerboseFlag :: Bool -- ^ hc-pkg does not support verbosity flags + , flagPackageConf :: Bool -- ^ use package-conf option instead of package-db + , supportsDirDbs :: Bool -- ^ supports directory style package databases + , requiresDirDbs :: Bool -- ^ requires directory style package databases + , nativeMultiInstance :: Bool -- ^ supports --enable-multi-instance flag + , recacheMultiInstance :: Bool -- ^ supports multi-instance via recache + } + +-- | Call @hc-pkg@ to initialise a package database at the location {path}. +-- +-- > hc-pkg init {path} +-- +init :: HcPkgInfo -> Verbosity -> Bool -> FilePath -> IO () +init hpi verbosity preferCompat path + | not (supportsDirDbs hpi) + || (not (requiresDirDbs hpi) && preferCompat) + = writeFile path "[]" + + | otherwise + = runProgramInvocation verbosity (initInvocation hpi verbosity path) + +-- | Run @hc-pkg@ using a given package DB stack, directly forwarding the +-- provided command-line arguments to it. +invoke :: HcPkgInfo -> Verbosity -> PackageDBStack -> [String] -> IO () +invoke hpi verbosity dbStack extraArgs = + runProgramInvocation verbosity invocation + where + args = packageDbStackOpts hpi dbStack ++ extraArgs + invocation = programInvocation (hcPkgProgram hpi) args + +-- | Call @hc-pkg@ to register a package. +-- +-- > hc-pkg register {filename | -} [--user | --global | --package-db] +-- +register :: HcPkgInfo -> Verbosity -> PackageDBStack + -> Either FilePath + InstalledPackageInfo + -> IO () +register hpi verbosity packagedb pkgFile = + runProgramInvocation verbosity + (registerInvocation hpi verbosity packagedb pkgFile) + + +-- | Call @hc-pkg@ to re-register a package. +-- +-- > hc-pkg register {filename | -} [--user | --global | --package-db] +-- +reregister :: HcPkgInfo -> Verbosity -> PackageDBStack + -> Either FilePath + InstalledPackageInfo + -> IO () +reregister hpi verbosity packagedb pkgFile = + runProgramInvocation verbosity + (reregisterInvocation hpi verbosity packagedb pkgFile) + +registerMultiInstance :: HcPkgInfo -> Verbosity + -> PackageDBStack + -> InstalledPackageInfo + -> IO () +registerMultiInstance hpi verbosity packagedbs pkgInfo + | nativeMultiInstance hpi + = runProgramInvocation verbosity + (registerMultiInstanceInvocation hpi verbosity packagedbs (Right pkgInfo)) + + -- This is a trick. Older versions of GHC do not support the + -- --enable-multi-instance flag for ghc-pkg register but it turns out that + -- the same ability is available by using ghc-pkg recache. The recache + -- command is there to support distro package managers that like to work + -- by just installing files and running update commands, rather than + -- special add/remove commands. So the way to register by this method is + -- to write the package registration file directly into the package db and + -- then call hc-pkg recache. + -- + | recacheMultiInstance hpi + = do let pkgdb = last packagedbs + writeRegistrationFileDirectly hpi pkgdb pkgInfo + recache hpi verbosity pkgdb + + | otherwise + = die $ "HcPkg.registerMultiInstance: the compiler does not support " + ++ "registering multiple instances of packages." + +writeRegistrationFileDirectly :: HcPkgInfo + -> PackageDB + -> InstalledPackageInfo + -> IO () +writeRegistrationFileDirectly hpi (SpecificPackageDB dir) pkgInfo + | supportsDirDbs hpi + = do let pkgfile = dir display (installedUnitId pkgInfo) <.> "conf" + writeUTF8File pkgfile (showInstalledPackageInfo pkgInfo) + + | otherwise + = die $ "HcPkg.writeRegistrationFileDirectly: compiler does not support dir style package dbs" + +writeRegistrationFileDirectly _ _ _ = + -- We don't know here what the dir for the global or user dbs are, + -- if that's needed it'll require a bit more plumbing to support. + die $ "HcPkg.writeRegistrationFileDirectly: only supports SpecificPackageDB for now" + + +-- | Call @hc-pkg@ to unregister a package +-- +-- > hc-pkg unregister [pkgid] [--user | --global | --package-db] +-- +unregister :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId -> IO () +unregister hpi verbosity packagedb pkgid = + runProgramInvocation verbosity + (unregisterInvocation hpi verbosity packagedb pkgid) + + +-- | Call @hc-pkg@ to recache the registered packages. +-- +-- > hc-pkg recache [--user | --global | --package-db] +-- +recache :: HcPkgInfo -> Verbosity -> PackageDB -> IO () +recache hpi verbosity packagedb = + runProgramInvocation verbosity + (recacheInvocation hpi verbosity packagedb) + + +-- | Call @hc-pkg@ to expose a package. +-- +-- > hc-pkg expose [pkgid] [--user | --global | --package-db] +-- +expose :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId -> IO () +expose hpi verbosity packagedb pkgid = + runProgramInvocation verbosity + (exposeInvocation hpi verbosity packagedb pkgid) + +-- | Call @hc-pkg@ to retrieve a specific package +-- +-- > hc-pkg describe [pkgid] [--user | --global | --package-db] +-- +describe :: HcPkgInfo -> Verbosity -> PackageDBStack -> PackageId -> IO [InstalledPackageInfo] +describe hpi verbosity packagedb pid = do + + output <- getProgramInvocationOutput verbosity + (describeInvocation hpi verbosity packagedb pid) + `catchIO` \_ -> return "" + + case parsePackages output of + Left ok -> return ok + _ -> die $ "failed to parse output of '" + ++ programId (hcPkgProgram hpi) ++ " describe " ++ display pid ++ "'" + +-- | Call @hc-pkg@ to hide a package. +-- +-- > hc-pkg hide [pkgid] [--user | --global | --package-db] +-- +hide :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId -> IO () +hide hpi verbosity packagedb pkgid = + runProgramInvocation verbosity + (hideInvocation hpi verbosity packagedb pkgid) + + +-- | Call @hc-pkg@ to get all the details of all the packages in the given +-- package database. +-- +dump :: HcPkgInfo -> Verbosity -> PackageDB -> IO [InstalledPackageInfo] +dump hpi verbosity packagedb = do + + output <- getProgramInvocationOutput verbosity + (dumpInvocation hpi verbosity packagedb) + `catchIO` \_ -> die $ programId (hcPkgProgram hpi) ++ " dump failed" + + case parsePackages output of + Left ok -> return ok + _ -> die $ "failed to parse output of '" + ++ programId (hcPkgProgram hpi) ++ " dump'" + +parsePackages :: String -> Either [InstalledPackageInfo] [PError] +parsePackages str = + let parsed = map parseInstalledPackageInfo' (splitPkgs str) + in case [ msg | ParseFailed msg <- parsed ] of + [] -> Left [ setUnitId + . maybe id mungePackagePaths (pkgRoot pkg) + $ pkg + | ParseOk _ pkg <- parsed ] + msgs -> Right msgs + where + parseInstalledPackageInfo' = + parseFieldsFlat fieldsInstalledPackageInfo emptyInstalledPackageInfo + +--TODO: this could be a lot faster. We're doing normaliseLineEndings twice +-- and converting back and forth with lines/unlines. +splitPkgs :: String -> [String] +splitPkgs = checkEmpty . map unlines . splitWith ("---" ==) . lines + where + -- Handle the case of there being no packages at all. + checkEmpty [s] | all isSpace s = [] + checkEmpty ss = ss + + splitWith :: (a -> Bool) -> [a] -> [[a]] + splitWith p xs = ys : case zs of + [] -> [] + _:ws -> splitWith p ws + where (ys,zs) = break p xs + +mungePackagePaths :: FilePath -> InstalledPackageInfo -> InstalledPackageInfo +-- Perform path/URL variable substitution as per the Cabal ${pkgroot} spec +-- (http://www.haskell.org/pipermail/libraries/2009-May/011772.html) +-- Paths/URLs can be relative to ${pkgroot} or ${pkgrooturl}. +-- The "pkgroot" is the directory containing the package database. +mungePackagePaths pkgroot pkginfo = + pkginfo { + importDirs = mungePaths (importDirs pkginfo), + includeDirs = mungePaths (includeDirs pkginfo), + libraryDirs = mungePaths (libraryDirs pkginfo), + frameworkDirs = mungePaths (frameworkDirs pkginfo), + haddockInterfaces = mungePaths (haddockInterfaces pkginfo), + haddockHTMLs = mungeUrls (haddockHTMLs pkginfo) + } + where + mungePaths = map mungePath + mungeUrls = map mungeUrl + + mungePath p = case stripVarPrefix "${pkgroot}" p of + Just p' -> pkgroot p' + Nothing -> p + + mungeUrl p = case stripVarPrefix "${pkgrooturl}" p of + Just p' -> toUrlPath pkgroot p' + Nothing -> p + + toUrlPath r p = "file:///" + -- URLs always use posix style '/' separators: + ++ FilePath.Posix.joinPath (r : FilePath.splitDirectories p) + + stripVarPrefix var p = + case splitPath p of + (root:path') -> case stripPrefix var root of + Just [sep] | isPathSeparator sep -> Just (joinPath path') + _ -> Nothing + _ -> Nothing + + +-- Older installed package info files did not have the installedUnitId +-- field, so if it is missing then we fill it as the source package ID. +setUnitId :: InstalledPackageInfo -> InstalledPackageInfo +setUnitId pkginfo@InstalledPackageInfo { + installedUnitId = SimpleUnitId (ComponentId ""), + sourcePackageId = pkgid + } + = pkginfo { + installedUnitId = mkLegacyUnitId pkgid + } +setUnitId pkginfo = pkginfo + + +-- | Call @hc-pkg@ to get the source package Id of all the packages in the +-- given package database. +-- +-- This is much less information than with 'dump', but also rather quicker. +-- Note in particular that it does not include the 'UnitId', just +-- the source 'PackageId' which is not necessarily unique in any package db. +-- +list :: HcPkgInfo -> Verbosity -> PackageDB + -> IO [PackageId] +list hpi verbosity packagedb = do + + output <- getProgramInvocationOutput verbosity + (listInvocation hpi verbosity packagedb) + `catchIO` \_ -> die $ programId (hcPkgProgram hpi) ++ " list failed" + + case parsePackageIds output of + Just ok -> return ok + _ -> die $ "failed to parse output of '" + ++ programId (hcPkgProgram hpi) ++ " list'" + + where + parsePackageIds = sequence . map simpleParse . words + +-------------------------- +-- The program invocations +-- + +initInvocation :: HcPkgInfo -> Verbosity -> FilePath -> ProgramInvocation +initInvocation hpi verbosity path = + programInvocation (hcPkgProgram hpi) args + where + args = ["init", path] + ++ verbosityOpts hpi verbosity + +registerInvocation, reregisterInvocation, registerMultiInstanceInvocation + :: HcPkgInfo -> Verbosity -> PackageDBStack + -> Either FilePath InstalledPackageInfo + -> ProgramInvocation +registerInvocation = registerInvocation' "register" False +reregisterInvocation = registerInvocation' "update" False +registerMultiInstanceInvocation = registerInvocation' "update" True + +registerInvocation' :: String -> Bool + -> HcPkgInfo -> Verbosity -> PackageDBStack + -> Either FilePath InstalledPackageInfo + -> ProgramInvocation +registerInvocation' cmdname multiInstance hpi + verbosity packagedbs pkgFileOrInfo = + case pkgFileOrInfo of + Left pkgFile -> + programInvocation (hcPkgProgram hpi) (args pkgFile) + + Right pkgInfo -> + (programInvocation (hcPkgProgram hpi) (args "-")) { + progInvokeInput = Just (showInstalledPackageInfo pkgInfo), + progInvokeInputEncoding = IOEncodingUTF8 + } + where + args file = [cmdname, file] + ++ (if noPkgDbStack hpi + then [packageDbOpts hpi (last packagedbs)] + else packageDbStackOpts hpi packagedbs) + ++ [ "--enable-multi-instance" | multiInstance ] + ++ verbosityOpts hpi verbosity + +unregisterInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId + -> ProgramInvocation +unregisterInvocation hpi verbosity packagedb pkgid = + programInvocation (hcPkgProgram hpi) $ + ["unregister", packageDbOpts hpi packagedb, display pkgid] + ++ verbosityOpts hpi verbosity + + +recacheInvocation :: HcPkgInfo -> Verbosity -> PackageDB + -> ProgramInvocation +recacheInvocation hpi verbosity packagedb = + programInvocation (hcPkgProgram hpi) $ + ["recache", packageDbOpts hpi packagedb] + ++ verbosityOpts hpi verbosity + + +exposeInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId + -> ProgramInvocation +exposeInvocation hpi verbosity packagedb pkgid = + programInvocation (hcPkgProgram hpi) $ + ["expose", packageDbOpts hpi packagedb, display pkgid] + ++ verbosityOpts hpi verbosity + +describeInvocation :: HcPkgInfo -> Verbosity -> PackageDBStack -> PackageId + -> ProgramInvocation +describeInvocation hpi verbosity packagedbs pkgid = + programInvocation (hcPkgProgram hpi) $ + ["describe", display pkgid] + ++ (if noPkgDbStack hpi + then [packageDbOpts hpi (last packagedbs)] + else packageDbStackOpts hpi packagedbs) + ++ verbosityOpts hpi verbosity + +hideInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId + -> ProgramInvocation +hideInvocation hpi verbosity packagedb pkgid = + programInvocation (hcPkgProgram hpi) $ + ["hide", packageDbOpts hpi packagedb, display pkgid] + ++ verbosityOpts hpi verbosity + + +dumpInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> ProgramInvocation +dumpInvocation hpi _verbosity packagedb = + (programInvocation (hcPkgProgram hpi) args) { + progInvokeOutputEncoding = IOEncodingUTF8 + } + where + args = ["dump", packageDbOpts hpi packagedb] + ++ verbosityOpts hpi silent + -- We use verbosity level 'silent' because it is important that we + -- do not contaminate the output with info/debug messages. + +listInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> ProgramInvocation +listInvocation hpi _verbosity packagedb = + (programInvocation (hcPkgProgram hpi) args) { + progInvokeOutputEncoding = IOEncodingUTF8 + } + where + args = ["list", "--simple-output", packageDbOpts hpi packagedb] + ++ verbosityOpts hpi silent + -- We use verbosity level 'silent' because it is important that we + -- do not contaminate the output with info/debug messages. + + +packageDbStackOpts :: HcPkgInfo -> PackageDBStack -> [String] +packageDbStackOpts hpi dbstack = case dbstack of + (GlobalPackageDB:UserPackageDB:dbs) -> "--global" + : "--user" + : map specific dbs + (GlobalPackageDB:dbs) -> "--global" + : ("--no-user-" ++ packageDbFlag hpi) + : map specific dbs + _ -> ierror + where + specific (SpecificPackageDB db) = "--" ++ packageDbFlag hpi ++ "=" ++ db + specific _ = ierror + ierror :: a + ierror = error ("internal error: unexpected package db stack: " ++ show dbstack) + +packageDbFlag :: HcPkgInfo -> String +packageDbFlag hpi + | flagPackageConf hpi + = "package-conf" + | otherwise + = "package-db" + +packageDbOpts :: HcPkgInfo -> PackageDB -> String +packageDbOpts _ GlobalPackageDB = "--global" +packageDbOpts _ UserPackageDB = "--user" +packageDbOpts hpi (SpecificPackageDB db) = "--" ++ packageDbFlag hpi ++ "=" ++ db + +verbosityOpts :: HcPkgInfo -> Verbosity -> [String] +verbosityOpts hpi v + | noVerboseFlag hpi + = [] + | v >= deafening = ["-v2"] + | v == silent = ["-v0"] + | otherwise = [] + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/Hpc.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/Hpc.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/Hpc.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/Hpc.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,99 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.Hpc +-- Copyright : Thomas Tuegel 2011 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module provides an library interface to the @hpc@ program. + +module Distribution.Simple.Program.Hpc + ( markup + , union + ) where + +import Distribution.ModuleName +import Distribution.Simple.Program.Run +import Distribution.Simple.Program.Types +import Distribution.Text +import Distribution.Simple.Utils +import Distribution.Verbosity +import Distribution.Version + +-- | Invoke hpc with the given parameters. +-- +-- Prior to HPC version 0.7 (packaged with GHC 7.8), hpc did not handle +-- multiple .mix paths correctly, so we print a warning, and only pass it the +-- first path in the list. This means that e.g. test suites that import their +-- library as a dependency can still work, but those that include the library +-- modules directly (in other-modules) don't. +markup :: ConfiguredProgram + -> Version + -> Verbosity + -> FilePath -- ^ Path to .tix file + -> [FilePath] -- ^ Paths to .mix file directories + -> FilePath -- ^ Path where html output should be located + -> [ModuleName] -- ^ List of modules to exclude from report + -> IO () +markup hpc hpcVer verbosity tixFile hpcDirs destDir excluded = do + hpcDirs' <- if withinRange hpcVer (orLaterVersion version07) + then return hpcDirs + else do + warn verbosity $ "Your version of HPC (" ++ display hpcVer + ++ ") does not properly handle multiple search paths. " + ++ "Coverage report generation may fail unexpectedly. These " + ++ "issues are addressed in version 0.7 or later (GHC 7.8 or " + ++ "later)." + ++ if null droppedDirs + then "" + else " The following search paths have been abandoned: " + ++ show droppedDirs + return passedDirs + + runProgramInvocation verbosity + (markupInvocation hpc tixFile hpcDirs' destDir excluded) + where + version07 = Version [0, 7] [] + (passedDirs, droppedDirs) = splitAt 1 hpcDirs + +markupInvocation :: ConfiguredProgram + -> FilePath -- ^ Path to .tix file + -> [FilePath] -- ^ Paths to .mix file directories + -> FilePath -- ^ Path where html output should be + -- located + -> [ModuleName] -- ^ List of modules to exclude from + -- report + -> ProgramInvocation +markupInvocation hpc tixFile hpcDirs destDir excluded = + let args = [ "markup", tixFile + , "--destdir=" ++ destDir + ] + ++ map ("--hpcdir=" ++) hpcDirs + ++ ["--exclude=" ++ display moduleName + | moduleName <- excluded ] + in programInvocation hpc args + +union :: ConfiguredProgram + -> Verbosity + -> [FilePath] -- ^ Paths to .tix files + -> FilePath -- ^ Path to resultant .tix file + -> [ModuleName] -- ^ List of modules to exclude from union + -> IO () +union hpc verbosity tixFiles outFile excluded = + runProgramInvocation verbosity + (unionInvocation hpc tixFiles outFile excluded) + +unionInvocation :: ConfiguredProgram + -> [FilePath] -- ^ Paths to .tix files + -> FilePath -- ^ Path to resultant .tix file + -> [ModuleName] -- ^ List of modules to exclude from union + -> ProgramInvocation +unionInvocation hpc tixFiles outFile excluded = + programInvocation hpc $ concat + [ ["sum", "--union"] + , tixFiles + , ["--output=" ++ outFile] + , ["--exclude=" ++ display moduleName + | moduleName <- excluded ] + ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/Internal.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/Internal.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/Internal.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/Internal.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,46 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.Internal +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Internal utilities used by Distribution.Simple.Program.*. + +module Distribution.Simple.Program.Internal ( + stripExtractVersion, + ) where + +import Data.Char (isDigit) +import Data.List (isPrefixOf, isSuffixOf) + +-- | Extract the version number from the output of 'strip --version'. +-- +-- Invoking "strip --version" gives very inconsistent results. We ignore +-- everything in parentheses (see #2497), look for the first word that starts +-- with a number, and try parsing out the first two components of it. Non-GNU +-- 'strip' doesn't appear to have a version flag. +stripExtractVersion :: String -> String +stripExtractVersion str = + let numeric "" = False + numeric (x:_) = isDigit x + + -- Filter out everything in parentheses. + filterPar' :: Int -> [String] -> [String] + filterPar' _ [] = [] + filterPar' n (x:xs) + | n >= 0 && "(" `isPrefixOf` x = filterPar' (n+1) ((tail x):xs) + | n > 0 && ")" `isSuffixOf` x = filterPar' (n-1) xs + | n > 0 = filterPar' n xs + | otherwise = x:filterPar' n xs + + filterPar = filterPar' 0 + + in case dropWhile (not . numeric) (filterPar . words $ str) of + (ver:_) -> + -- take the first two version components + let isDot = (== '.') + (major, rest) = break isDot ver + minor = takeWhile isDigit (dropWhile isDot rest) + in major ++ "." ++ minor + _ -> "" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/Ld.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/Ld.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/Ld.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/Ld.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,62 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.Ld +-- Copyright : Duncan Coutts 2009 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module provides an library interface to the @ld@ linker program. + +module Distribution.Simple.Program.Ld ( + combineObjectFiles, + ) where + +import Distribution.Simple.Program.Types + ( ConfiguredProgram(..) ) +import Distribution.Simple.Program.Run + ( programInvocation, multiStageProgramInvocation + , runProgramInvocation ) +import Distribution.Verbosity + ( Verbosity ) + +import System.Directory + ( renameFile ) +import System.FilePath + ( (<.>) ) + +-- | Call @ld -r@ to link a bunch of object files together. +-- +combineObjectFiles :: Verbosity -> ConfiguredProgram + -> FilePath -> [FilePath] -> IO () +combineObjectFiles verbosity ld target files = + + -- Unlike "ar", the "ld" tool is not designed to be used with xargs. That is, + -- if we have more object files than fit on a single command line then we + -- have a slight problem. What we have to do is link files in batches into + -- a temp object file and then include that one in the next batch. + + let simpleArgs = ["-r", "-o", target] + + initialArgs = ["-r", "-o", target] + middleArgs = ["-r", "-o", target, tmpfile] + finalArgs = middleArgs + + simple = programInvocation ld simpleArgs + initial = programInvocation ld initialArgs + middle = programInvocation ld middleArgs + final = programInvocation ld finalArgs + + invocations = multiStageProgramInvocation + simple (initial, middle, final) files + + in run invocations + + where + tmpfile = target <.> "tmp" -- perhaps should use a proper temp file + + run [] = return () + run [inv] = runProgramInvocation verbosity inv + run (inv:invs) = do runProgramInvocation verbosity inv + renameFile target tmpfile + run invs diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/Run.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/Run.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/Run.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/Run.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,252 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.Run +-- Copyright : Duncan Coutts 2009 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module provides a data type for program invocations and functions to +-- run them. + +module Distribution.Simple.Program.Run ( + ProgramInvocation(..), + IOEncoding(..), + emptyProgramInvocation, + simpleProgramInvocation, + programInvocation, + multiStageProgramInvocation, + + runProgramInvocation, + getProgramInvocationOutput, + + getEffectiveEnvironment, + ) where + +import Distribution.Simple.Program.Types +import Distribution.Simple.Utils +import Distribution.Verbosity +import Distribution.Compat.Environment + +import Data.List + ( foldl', unfoldr ) +import qualified Data.Map as Map +import Control.Monad + ( when ) +import System.Exit + ( ExitCode(..), exitWith ) + +-- | Represents a specific invocation of a specific program. +-- +-- This is used as an intermediate type between deciding how to call a program +-- and actually doing it. This provides the opportunity to the caller to +-- adjust how the program will be called. These invocations can either be run +-- directly or turned into shell or batch scripts. +-- +data ProgramInvocation = ProgramInvocation { + progInvokePath :: FilePath, + progInvokeArgs :: [String], + progInvokeEnv :: [(String, Maybe String)], + progInvokeCwd :: Maybe FilePath, + progInvokeInput :: Maybe String, + progInvokeInputEncoding :: IOEncoding, + progInvokeOutputEncoding :: IOEncoding + } + +data IOEncoding = IOEncodingText -- locale mode text + | IOEncodingUTF8 -- always utf8 + +emptyProgramInvocation :: ProgramInvocation +emptyProgramInvocation = + ProgramInvocation { + progInvokePath = "", + progInvokeArgs = [], + progInvokeEnv = [], + progInvokeCwd = Nothing, + progInvokeInput = Nothing, + progInvokeInputEncoding = IOEncodingText, + progInvokeOutputEncoding = IOEncodingText + } + +simpleProgramInvocation :: FilePath -> [String] -> ProgramInvocation +simpleProgramInvocation path args = + emptyProgramInvocation { + progInvokePath = path, + progInvokeArgs = args + } + +programInvocation :: ConfiguredProgram -> [String] -> ProgramInvocation +programInvocation prog args = + emptyProgramInvocation { + progInvokePath = programPath prog, + progInvokeArgs = programDefaultArgs prog + ++ args + ++ programOverrideArgs prog, + progInvokeEnv = programOverrideEnv prog + } + + +runProgramInvocation :: Verbosity -> ProgramInvocation -> IO () +runProgramInvocation verbosity + ProgramInvocation { + progInvokePath = path, + progInvokeArgs = args, + progInvokeEnv = [], + progInvokeCwd = Nothing, + progInvokeInput = Nothing + } = + rawSystemExit verbosity path args + +runProgramInvocation verbosity + ProgramInvocation { + progInvokePath = path, + progInvokeArgs = args, + progInvokeEnv = envOverrides, + progInvokeCwd = mcwd, + progInvokeInput = Nothing + } = do + menv <- getEffectiveEnvironment envOverrides + exitCode <- rawSystemIOWithEnv verbosity + path args + mcwd menv + Nothing Nothing Nothing + when (exitCode /= ExitSuccess) $ + exitWith exitCode + +runProgramInvocation verbosity + ProgramInvocation { + progInvokePath = path, + progInvokeArgs = args, + progInvokeEnv = envOverrides, + progInvokeCwd = mcwd, + progInvokeInput = Just inputStr, + progInvokeInputEncoding = encoding + } = do + menv <- getEffectiveEnvironment envOverrides + (_, errors, exitCode) <- rawSystemStdInOut verbosity + path args + mcwd menv + (Just input) True + when (exitCode /= ExitSuccess) $ + die $ "'" ++ path ++ "' exited with an error:\n" ++ errors + where + input = case encoding of + IOEncodingText -> (inputStr, False) + IOEncodingUTF8 -> (toUTF8 inputStr, True) -- use binary mode for + -- utf8 + + +getProgramInvocationOutput :: Verbosity -> ProgramInvocation -> IO String +getProgramInvocationOutput verbosity + ProgramInvocation { + progInvokePath = path, + progInvokeArgs = args, + progInvokeEnv = envOverrides, + progInvokeCwd = mcwd, + progInvokeInput = minputStr, + progInvokeOutputEncoding = encoding + } = do + let utf8 = case encoding of IOEncodingUTF8 -> True; _ -> False + decode | utf8 = fromUTF8 . normaliseLineEndings + | otherwise = id + menv <- getEffectiveEnvironment envOverrides + (output, errors, exitCode) <- rawSystemStdInOut verbosity + path args + mcwd menv + input utf8 + when (exitCode /= ExitSuccess) $ + die $ "'" ++ path ++ "' exited with an error:\n" ++ errors + return (decode output) + where + input = + case minputStr of + Nothing -> Nothing + Just inputStr -> Just $ + case encoding of + IOEncodingText -> (inputStr, False) + IOEncodingUTF8 -> (toUTF8 inputStr, True) -- use binary mode for utf8 + + +-- | Return the current environment extended with the given overrides. +-- +getEffectiveEnvironment :: [(String, Maybe String)] + -> IO (Maybe [(String, String)]) +getEffectiveEnvironment [] = return Nothing +getEffectiveEnvironment overrides = + fmap (Just . Map.toList . apply overrides . Map.fromList) getEnvironment + where + apply os env = foldl' (flip update) env os + update (var, Nothing) = Map.delete var + update (var, Just val) = Map.insert var val + +-- | Like the unix xargs program. Useful for when we've got very long command +-- lines that might overflow an OS limit on command line length and so you +-- need to invoke a command multiple times to get all the args in. +-- +-- It takes four template invocations corresponding to the simple, initial, +-- middle and last invocations. If the number of args given is small enough +-- that we can get away with just a single invocation then the simple one is +-- used: +-- +-- > $ simple args +-- +-- If the number of args given means that we need to use multiple invocations +-- then the templates for the initial, middle and last invocations are used: +-- +-- > $ initial args_0 +-- > $ middle args_1 +-- > $ middle args_2 +-- > ... +-- > $ final args_n +-- +multiStageProgramInvocation + :: ProgramInvocation + -> (ProgramInvocation, ProgramInvocation, ProgramInvocation) + -> [String] + -> [ProgramInvocation] +multiStageProgramInvocation simple (initial, middle, final) args = + + let argSize inv = length (progInvokePath inv) + + foldl' (\s a -> length a + 1 + s) 1 (progInvokeArgs inv) + fixedArgSize = maximum (map argSize [simple, initial, middle, final]) + chunkSize = maxCommandLineSize - fixedArgSize + + in case splitChunks chunkSize args of + [] -> [ simple ] + + [c] -> [ simple `appendArgs` c ] + + [c,c'] -> [ initial `appendArgs` c ] + ++ [ final `appendArgs` c'] + + (c:cs) -> [ initial `appendArgs` c ] + ++ [ middle `appendArgs` c'| c' <- init cs ] + ++ [ final `appendArgs` c'| let c' = last cs ] + + where + inv `appendArgs` as = inv { progInvokeArgs = progInvokeArgs inv ++ as } + + splitChunks len = unfoldr $ \s -> + if null s then Nothing + else Just (chunk len s) + + chunk len (s:_) | length s >= len = error toolong + chunk len ss = chunk' [] len ss + + chunk' acc _ [] = (reverse acc,[]) + chunk' acc len (s:ss) + | len' < len = chunk' (s:acc) (len-len'-1) ss + | otherwise = (reverse acc, s:ss) + where len' = length s + + toolong = "multiStageProgramInvocation: a single program arg is larger " + ++ "than the maximum command line length!" + + +--FIXME: discover this at configure time or runtime on unix +-- The value is 32k on Windows and posix specifies a minimum of 4k +-- but all sensible unixes use more than 4k. +-- we could use getSysVar ArgumentLimit but that's in the unix lib +-- +maxCommandLineSize :: Int +maxCommandLineSize = 30 * 1024 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/Script.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/Script.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/Script.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/Script.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,108 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.Script +-- Copyright : Duncan Coutts 2009 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module provides an library interface to the @hc-pkg@ program. +-- Currently only GHC and LHC have hc-pkg programs. + +module Distribution.Simple.Program.Script ( + + invocationAsSystemScript, + invocationAsShellScript, + invocationAsBatchFile, + ) where + +import Distribution.Simple.Program.Run +import Distribution.System + +import Data.Maybe + ( maybeToList ) + +-- | Generate a system script, either POSIX shell script or Windows batch file +-- as appropriate for the given system. +-- +invocationAsSystemScript :: OS -> ProgramInvocation -> String +invocationAsSystemScript Windows = invocationAsBatchFile +invocationAsSystemScript _ = invocationAsShellScript + + +-- | Generate a POSIX shell script that invokes a program. +-- +invocationAsShellScript :: ProgramInvocation -> String +invocationAsShellScript + ProgramInvocation { + progInvokePath = path, + progInvokeArgs = args, + progInvokeEnv = envExtra, + progInvokeCwd = mcwd, + progInvokeInput = minput + } = unlines $ + [ "#!/bin/sh" ] + ++ concatMap setEnv envExtra + ++ [ "cd " ++ quote cwd | cwd <- maybeToList mcwd ] + ++ [ (case minput of + Nothing -> "" + Just input -> "echo " ++ quote input ++ " | ") + ++ unwords (map quote $ path : args) ++ " \"$@\""] + + where + setEnv (var, Nothing) = ["unset " ++ var, "export " ++ var] + setEnv (var, Just val) = ["export " ++ var ++ "=" ++ quote val] + + quote :: String -> String + quote s = "'" ++ escape s ++ "'" + + escape [] = [] + escape ('\'':cs) = "'\\''" ++ escape cs + escape (c :cs) = c : escape cs + + +-- | Generate a Windows batch file that invokes a program. +-- +invocationAsBatchFile :: ProgramInvocation -> String +invocationAsBatchFile + ProgramInvocation { + progInvokePath = path, + progInvokeArgs = args, + progInvokeEnv = envExtra, + progInvokeCwd = mcwd, + progInvokeInput = minput + } = unlines $ + [ "@echo off" ] + ++ map setEnv envExtra + ++ [ "cd \"" ++ cwd ++ "\"" | cwd <- maybeToList mcwd ] + ++ case minput of + Nothing -> + [ path ++ concatMap (' ':) args ] + + Just input -> + [ "(" ] + ++ [ "echo " ++ escape line | line <- lines input ] + ++ [ ") | " + ++ "\"" ++ path ++ "\"" + ++ concatMap (\arg -> ' ':quote arg) args ] + + where + setEnv (var, Nothing) = "set " ++ var ++ "=" + setEnv (var, Just val) = "set " ++ var ++ "=" ++ escape val + + quote :: String -> String + quote s = "\"" ++ escapeQ s ++ "\"" + + escapeQ [] = [] + escapeQ ('"':cs) = "\"\"\"" ++ escapeQ cs + escapeQ (c :cs) = c : escapeQ cs + + escape [] = [] + escape ('|':cs) = "^|" ++ escape cs + escape ('<':cs) = "^<" ++ escape cs + escape ('>':cs) = "^>" ++ escape cs + escape ('&':cs) = "^&" ++ escape cs + escape ('(':cs) = "^(" ++ escape cs + escape (')':cs) = "^)" ++ escape cs + escape ('^':cs) = "^^" ++ escape cs + escape (c :cs) = c : escape cs diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/Strip.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/Strip.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/Strip.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/Strip.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,70 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.Strip +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module provides an library interface to the @strip@ program. + +module Distribution.Simple.Program.Strip (stripLib, stripExe) + where + +import Distribution.Simple.Program +import Distribution.Simple.Utils +import Distribution.System +import Distribution.Verbosity +import Distribution.Version + +import Control.Monad (unless) +import System.FilePath (takeBaseName) + +runStrip :: Verbosity -> ProgramConfiguration -> FilePath -> [String] -> IO () +runStrip verbosity progConf path args = + case lookupProgram stripProgram progConf of + Just strip -> rawSystemProgram verbosity strip (path:args) + Nothing -> unless (buildOS == Windows) $ + -- Don't bother warning on windows, we don't expect them to + -- have the strip program anyway. + warn verbosity $ "Unable to strip executable or library '" + ++ (takeBaseName path) + ++ "' (missing the 'strip' program)" + +stripExe :: Verbosity -> Platform -> ProgramConfiguration -> FilePath -> IO () +stripExe verbosity (Platform _arch os) conf path = + runStrip verbosity conf path args + where + args = case os of + OSX -> ["-x"] -- By default, stripping the ghc binary on at least + -- some OS X installations causes: + -- HSbase-3.0.o: unknown symbol `_environ'" + -- The -x flag fixes that. + _ -> [] + +stripLib :: Verbosity -> Platform -> ProgramConfiguration -> FilePath -> IO () +stripLib verbosity (Platform arch os) conf path = do + case os of + OSX -> -- '--strip-unneeded' is not supported on OS X, iOS, AIX, or + -- Solaris. See #1630. + return () + IOS -> return () + AIX -> return () + Solaris -> return () + Windows -> -- Stripping triggers a bug in 'strip.exe' for + -- libraries with lots identically named modules. See + -- #1784. + return() + Linux | arch == I386 -> + -- Versions of 'strip' on 32-bit Linux older than 2.18 are + -- broken. See #2339. + let okVersion = orLaterVersion (Version [2,18] []) + in case programVersion =<< lookupProgram stripProgram conf of + Just v | withinRange v okVersion -> + runStrip verbosity conf path args + _ -> warn verbosity $ "Unable to strip library '" + ++ (takeBaseName path) + ++ "' (version of 'strip' too old; " + ++ "requires >= 2.18 on 32-bit Linux)" + _ -> runStrip verbosity conf path args + where + args = ["--strip-unneeded"] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/Types.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/Types.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program/Types.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program/Types.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,175 @@ +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.Types +-- Copyright : Isaac Jones 2006, Duncan Coutts 2007-2009 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This provides an abstraction which deals with configuring and running +-- programs. A 'Program' is a static notion of a known program. A +-- 'ConfiguredProgram' is a 'Program' that has been found on the current +-- machine and is ready to be run (possibly with some user-supplied default +-- args). Configuring a program involves finding its location and if necessary +-- finding its version. There's reasonable default behavior for trying to find +-- \"foo\" in PATH, being able to override its location, etc. +-- +module Distribution.Simple.Program.Types ( + -- * Program and functions for constructing them + Program(..), + ProgramSearchPath, + ProgramSearchPathEntry(..), + simpleProgram, + + -- * Configured program and related functions + ConfiguredProgram(..), + programPath, + suppressOverrideArgs, + ProgArg, + ProgramLocation(..), + simpleConfiguredProgram, + ) where + +import Distribution.Simple.Program.Find +import Distribution.Version +import Distribution.Verbosity +import Distribution.Compat.Binary + +import qualified Data.Map as Map +import GHC.Generics (Generic) + +-- | Represents a program which can be configured. +-- +-- Note: rather than constructing this directly, start with 'simpleProgram' and +-- override any extra fields. +-- +data Program = Program { + -- | The simple name of the program, eg. ghc + programName :: String, + + -- | A function to search for the program if its location was not + -- specified by the user. Usually this will just be a call to + -- 'findProgramOnSearchPath'. + -- + -- It is supplied with the prevailing search path which will typically + -- just be used as-is, but can be extended or ignored as needed. + -- + -- For the purpose of change monitoring, in addition to the location + -- where the program was found, it returns all the other places that + -- were tried. + -- + programFindLocation :: Verbosity -> ProgramSearchPath + -> IO (Maybe (FilePath, [FilePath])), + + -- | Try to find the version of the program. For many programs this is + -- not possible or is not necessary so it's OK to return Nothing. + programFindVersion :: Verbosity -> FilePath -> IO (Maybe Version), + + -- | A function to do any additional configuration after we have + -- located the program (and perhaps identified its version). For example + -- it could add args, or environment vars. + programPostConf :: Verbosity -> ConfiguredProgram -> IO ConfiguredProgram + } + +type ProgArg = String + +-- | Represents a program which has been configured and is thus ready to be run. +-- +-- These are usually made by configuring a 'Program', but if you have to +-- construct one directly then start with 'simpleConfiguredProgram' and +-- override any extra fields. +-- +data ConfiguredProgram = ConfiguredProgram { + -- | Just the name again + programId :: String, + + -- | The version of this program, if it is known. + programVersion :: Maybe Version, + + -- | Default command-line args for this program. + -- These flags will appear first on the command line, so they can be + -- overridden by subsequent flags. + programDefaultArgs :: [String], + + -- | Override command-line args for this program. + -- These flags will appear last on the command line, so they override + -- all earlier flags. + programOverrideArgs :: [String], + + -- | Override environment variables for this program. + -- These env vars will extend\/override the prevailing environment of + -- the current to form the environment for the new process. + programOverrideEnv :: [(String, Maybe String)], + + -- | A key-value map listing various properties of the program, useful + -- for feature detection. Populated during the configuration step, key + -- names depend on the specific program. + programProperties :: Map.Map String String, + + -- | Location of the program. eg. @\/usr\/bin\/ghc-6.4@ + programLocation :: ProgramLocation, + + -- | In addition to the 'programLocation' where the program was found, + -- these are additional locations that were looked at. The combination + -- of ths found location and these not-found locations can be used to + -- monitor to detect when the re-configuring the program might give a + -- different result (e.g. found in a different location). + -- + programMonitorFiles :: [FilePath] + } + deriving (Eq, Generic, Read, Show) + +instance Binary ConfiguredProgram + +-- | Where a program was found. Also tells us whether it's specified by user or +-- not. This includes not just the path, but the program as well. +data ProgramLocation + = UserSpecified { locationPath :: FilePath } + -- ^The user gave the path to this program, + -- eg. --ghc-path=\/usr\/bin\/ghc-6.6 + | FoundOnSystem { locationPath :: FilePath } + -- ^The program was found automatically. + deriving (Eq, Generic, Read, Show) + +instance Binary ProgramLocation + +-- | The full path of a configured program. +programPath :: ConfiguredProgram -> FilePath +programPath = locationPath . programLocation + +-- | Suppress any extra arguments added by the user. +suppressOverrideArgs :: ConfiguredProgram -> ConfiguredProgram +suppressOverrideArgs prog = prog { programOverrideArgs = [] } + +-- | Make a simple named program. +-- +-- By default we'll just search for it in the path and not try to find the +-- version name. You can override these behaviours if necessary, eg: +-- +-- > simpleProgram "foo" { programFindLocation = ... , programFindVersion ... } +-- +simpleProgram :: String -> Program +simpleProgram name = Program { + programName = name, + programFindLocation = \v p -> findProgramOnSearchPath v p name, + programFindVersion = \_ _ -> return Nothing, + programPostConf = \_ p -> return p + } + +-- | Make a simple 'ConfiguredProgram'. +-- +-- > simpleConfiguredProgram "foo" (FoundOnSystem path) +-- +simpleConfiguredProgram :: String -> ProgramLocation -> ConfiguredProgram +simpleConfiguredProgram name loc = ConfiguredProgram { + programId = name, + programVersion = Nothing, + programDefaultArgs = [], + programOverrideArgs = [], + programOverrideEnv = [], + programProperties = Map.empty, + programLocation = loc, + programMonitorFiles = [] -- did not look in any other locations + } diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Program.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Program.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,224 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program +-- Copyright : Isaac Jones 2006, Duncan Coutts 2007-2009 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This provides an abstraction which deals with configuring and running +-- programs. A 'Program' is a static notion of a known program. A +-- 'ConfiguredProgram' is a 'Program' that has been found on the current +-- machine and is ready to be run (possibly with some user-supplied default +-- args). Configuring a program involves finding its location and if necessary +-- finding its version. There is also a 'ProgramConfiguration' type which holds +-- configured and not-yet configured programs. It is the parameter to lots of +-- actions elsewhere in Cabal that need to look up and run programs. If we had +-- a Cabal monad, the 'ProgramConfiguration' would probably be a reader or +-- state component of it. +-- +-- The module also defines all the known built-in 'Program's and the +-- 'defaultProgramConfiguration' which contains them all. +-- +-- One nice thing about using it is that any program that is +-- registered with Cabal will get some \"configure\" and \".cabal\" +-- helpers like --with-foo-args --foo-path= and extra-foo-args. +-- +-- There's also good default behavior for trying to find \"foo\" in +-- PATH, being able to override its location, etc. +-- +-- There's also a hook for adding programs in a Setup.lhs script. See +-- hookedPrograms in 'Distribution.Simple.UserHooks'. This gives a +-- hook user the ability to get the above flags and such so that they +-- don't have to write all the PATH logic inside Setup.lhs. + +module Distribution.Simple.Program ( + -- * Program and functions for constructing them + Program(..) + , ProgramSearchPath + , ProgramSearchPathEntry(..) + , simpleProgram + , findProgramOnSearchPath + , defaultProgramSearchPath + , findProgramVersion + + -- * Configured program and related functions + , ConfiguredProgram(..) + , programPath + , ProgArg + , ProgramLocation(..) + , runProgram + , getProgramOutput + , suppressOverrideArgs + + -- * Program invocations + , ProgramInvocation(..) + , emptyProgramInvocation + , simpleProgramInvocation + , programInvocation + , runProgramInvocation + , getProgramInvocationOutput + + -- * The collection of unconfigured and configured programs + , builtinPrograms + + -- * The collection of configured programs we can run + , ProgramConfiguration + , emptyProgramConfiguration + , defaultProgramConfiguration + , restoreProgramConfiguration + , addKnownProgram + , addKnownPrograms + , lookupKnownProgram + , knownPrograms + , getProgramSearchPath + , setProgramSearchPath + , userSpecifyPath + , userSpecifyPaths + , userMaybeSpecifyPath + , userSpecifyArgs + , userSpecifyArgss + , userSpecifiedArgs + , lookupProgram + , lookupProgramVersion + , updateProgram + , configureProgram + , configureAllKnownPrograms + , reconfigurePrograms + , requireProgram + , requireProgramVersion + , runDbProgram + , getDbProgramOutput + + -- * Programs that Cabal knows about + , ghcProgram + , ghcPkgProgram + , ghcjsProgram + , ghcjsPkgProgram + , lhcProgram + , lhcPkgProgram + , hmakeProgram + , jhcProgram + , uhcProgram + , gccProgram + , arProgram + , stripProgram + , happyProgram + , alexProgram + , hsc2hsProgram + , c2hsProgram + , cpphsProgram + , hscolourProgram + , haddockProgram + , greencardProgram + , ldProgram + , tarProgram + , cppProgram + , pkgConfigProgram + , hpcProgram + + -- * deprecated + , rawSystemProgram + , rawSystemProgramStdout + , rawSystemProgramConf + , rawSystemProgramStdoutConf + , findProgramOnPath + , findProgramLocation + + ) where + +import Distribution.Simple.Program.Types +import Distribution.Simple.Program.Run +import Distribution.Simple.Program.Db +import Distribution.Simple.Program.Builtin +import Distribution.Simple.Program.Find +import Distribution.Simple.Utils +import Distribution.Verbosity + + +-- | Runs the given configured program. +-- +runProgram :: Verbosity -- ^Verbosity + -> ConfiguredProgram -- ^The program to run + -> [ProgArg] -- ^Any /extra/ arguments to add + -> IO () +runProgram verbosity prog args = + runProgramInvocation verbosity (programInvocation prog args) + + +-- | Runs the given configured program and gets the output. +-- +getProgramOutput :: Verbosity -- ^Verbosity + -> ConfiguredProgram -- ^The program to run + -> [ProgArg] -- ^Any /extra/ arguments to add + -> IO String +getProgramOutput verbosity prog args = + getProgramInvocationOutput verbosity (programInvocation prog args) + + +-- | Looks up the given program in the program database and runs it. +-- +runDbProgram :: Verbosity -- ^verbosity + -> Program -- ^The program to run + -> ProgramDb -- ^look up the program here + -> [ProgArg] -- ^Any /extra/ arguments to add + -> IO () +runDbProgram verbosity prog programDb args = + case lookupProgram prog programDb of + Nothing -> die notFound + Just configuredProg -> runProgram verbosity configuredProg args + where + notFound = "The program '" ++ programName prog + ++ "' is required but it could not be found" + +-- | Looks up the given program in the program database and runs it. +-- +getDbProgramOutput :: Verbosity -- ^verbosity + -> Program -- ^The program to run + -> ProgramDb -- ^look up the program here + -> [ProgArg] -- ^Any /extra/ arguments to add + -> IO String +getDbProgramOutput verbosity prog programDb args = + case lookupProgram prog programDb of + Nothing -> die notFound + Just configuredProg -> getProgramOutput verbosity configuredProg args + where + notFound = "The program '" ++ programName prog + ++ "' is required but it could not be found" + + +--------------------- +-- Deprecated aliases +-- + +rawSystemProgram :: Verbosity -> ConfiguredProgram + -> [ProgArg] -> IO () +rawSystemProgram = runProgram + +rawSystemProgramStdout :: Verbosity -> ConfiguredProgram + -> [ProgArg] -> IO String +rawSystemProgramStdout = getProgramOutput + +rawSystemProgramConf :: Verbosity -> Program -> ProgramConfiguration + -> [ProgArg] -> IO () +rawSystemProgramConf = runDbProgram + +rawSystemProgramStdoutConf :: Verbosity -> Program -> ProgramConfiguration + -> [ProgArg] -> IO String +rawSystemProgramStdoutConf = getDbProgramOutput + +type ProgramConfiguration = ProgramDb + +emptyProgramConfiguration, defaultProgramConfiguration :: ProgramConfiguration +emptyProgramConfiguration = emptyProgramDb +defaultProgramConfiguration = defaultProgramDb + +restoreProgramConfiguration :: [Program] -> ProgramConfiguration + -> ProgramConfiguration +restoreProgramConfiguration = restoreProgramDb + +{-# DEPRECATED findProgramOnPath "use findProgramOnSearchPath instead" #-} +findProgramOnPath :: String -> Verbosity -> IO (Maybe FilePath) +findProgramOnPath name verbosity = + fmap (fmap fst) $ + findProgramOnSearchPath verbosity defaultProgramSearchPath name diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Register.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Register.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Register.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Register.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,473 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Register +-- Copyright : Isaac Jones 2003-2004 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module deals with registering and unregistering packages. There are a +-- couple ways it can do this, one is to do it directly. Another is to generate +-- a script that can be run later to do it. The idea here being that the user +-- is shielded from the details of what command to use for package registration +-- for a particular compiler. In practice this aspect was not especially +-- popular so we also provide a way to simply generate the package registration +-- file which then must be manually passed to @ghc-pkg@. It is possible to +-- generate registration information for where the package is to be installed, +-- or alternatively to register the package in place in the build tree. The +-- latter is occasionally handy, and will become more important when we try to +-- build multi-package systems. +-- +-- This module does not delegate anything to the per-compiler modules but just +-- mixes it all in in this module, which is rather unsatisfactory. The script +-- generation and the unregister feature are not well used or tested. + +module Distribution.Simple.Register ( + register, + unregister, + + initPackageDB, + doesPackageDBExist, + createPackageDB, + deletePackageDB, + + invokeHcPkg, + registerPackage, + generateRegistrationInfo, + inplaceInstalledPackageInfo, + absoluteInstalledPackageInfo, + generalInstalledPackageInfo, + ) where + +import Distribution.Simple.LocalBuildInfo +import Distribution.Simple.BuildPaths + +import qualified Distribution.Simple.GHC as GHC +import qualified Distribution.Simple.GHCJS as GHCJS +import qualified Distribution.Simple.LHC as LHC +import qualified Distribution.Simple.UHC as UHC +import qualified Distribution.Simple.HaskellSuite as HaskellSuite + +import Distribution.Simple.Compiler +import Distribution.Simple.Program +import Distribution.Simple.Program.Script +import qualified Distribution.Simple.Program.HcPkg as HcPkg +import Distribution.Simple.Setup +import Distribution.PackageDescription +import Distribution.Package +import qualified Distribution.InstalledPackageInfo as IPI +import Distribution.InstalledPackageInfo (InstalledPackageInfo) +import Distribution.Simple.Utils +import Distribution.System +import Distribution.Text +import Distribution.Verbosity as Verbosity + +import System.FilePath ((), (<.>), isAbsolute) +import System.Directory + ( getCurrentDirectory, removeDirectoryRecursive, removeFile + , doesDirectoryExist, doesFileExist ) + +import Data.Version +import Control.Monad (when) +import Data.Maybe + ( isJust, fromMaybe, maybeToList ) +import Data.List + ( partition, nub ) +import qualified Data.ByteString.Lazy.Char8 as BS.Char8 + +-- ----------------------------------------------------------------------------- +-- Registration + +register :: PackageDescription -> LocalBuildInfo + -> RegisterFlags -- ^Install in the user's database?; verbose + -> IO () +register pkg@PackageDescription { library = Just lib } lbi regFlags + = do + let clbi = getComponentLocalBuildInfo lbi CLibName + + absPackageDBs <- absolutePackageDBPaths packageDbs + installedPkgInfo <- generateRegistrationInfo + verbosity pkg lib lbi clbi inplace reloc distPref + (registrationPackageDB absPackageDBs) + + when (fromFlag (regPrintId regFlags)) $ do + putStrLn (display (IPI.installedUnitId installedPkgInfo)) + + -- Three different modes: + case () of + _ | modeGenerateRegFile -> writeRegistrationFile installedPkgInfo + | modeGenerateRegScript -> writeRegisterScript installedPkgInfo + | otherwise -> do + setupMessage verbosity "Registering" (packageId pkg) + registerPackage verbosity (compiler lbi) (withPrograms lbi) False + packageDbs installedPkgInfo + + where + modeGenerateRegFile = isJust (flagToMaybe (regGenPkgConf regFlags)) + regFile = fromMaybe (display (packageId pkg) <.> "conf") + (fromFlag (regGenPkgConf regFlags)) + + modeGenerateRegScript = fromFlag (regGenScript regFlags) + + inplace = fromFlag (regInPlace regFlags) + reloc = relocatable lbi + -- FIXME: there's really no guarantee this will work. + -- registering into a totally different db stack can + -- fail if dependencies cannot be satisfied. + packageDbs = nub $ withPackageDB lbi + ++ maybeToList (flagToMaybe (regPackageDB regFlags)) + distPref = fromFlag (regDistPref regFlags) + verbosity = fromFlag (regVerbosity regFlags) + + writeRegistrationFile installedPkgInfo = do + notice verbosity ("Creating package registration file: " ++ regFile) + writeUTF8File regFile (IPI.showInstalledPackageInfo installedPkgInfo) + + writeRegisterScript installedPkgInfo = + case compilerFlavor (compiler lbi) of + JHC -> notice verbosity "Registration scripts not needed for jhc" + UHC -> notice verbosity "Registration scripts not needed for uhc" + _ -> withHcPkg + "Registration scripts are not implemented for this compiler" + (compiler lbi) (withPrograms lbi) + (writeHcPkgRegisterScript verbosity installedPkgInfo packageDbs) + +register _ _ regFlags = notice verbosity "No package to register" + where + verbosity = fromFlag (regVerbosity regFlags) + + +generateRegistrationInfo :: Verbosity + -> PackageDescription + -> Library + -> LocalBuildInfo + -> ComponentLocalBuildInfo + -> Bool + -> Bool + -> FilePath + -> PackageDB + -> IO InstalledPackageInfo +generateRegistrationInfo verbosity pkg lib lbi clbi inplace reloc distPref packageDb = do + --TODO: eliminate pwd! + pwd <- getCurrentDirectory + + --TODO: the method of setting the UnitId is compiler specific + -- this aspect should be delegated to a per-compiler helper. + let comp = compiler lbi + abi_hash <- + case compilerFlavor comp of + GHC | compilerVersion comp >= Version [6,11] [] -> do + fmap AbiHash $ GHC.libAbiHash verbosity pkg lbi lib clbi + GHCJS -> do + fmap AbiHash $ GHCJS.libAbiHash verbosity pkg lbi lib clbi + _ -> return (AbiHash "") + + installedPkgInfo <- + if inplace + then return (inplaceInstalledPackageInfo pwd distPref + pkg abi_hash lib lbi clbi) + else if reloc + then relocRegistrationInfo verbosity + pkg lib lbi clbi abi_hash packageDb + else return (absoluteInstalledPackageInfo + pkg abi_hash lib lbi clbi) + + + return installedPkgInfo{ IPI.abiHash = abi_hash } + +relocRegistrationInfo :: Verbosity + -> PackageDescription + -> Library + -> LocalBuildInfo + -> ComponentLocalBuildInfo + -> AbiHash + -> PackageDB + -> IO InstalledPackageInfo +relocRegistrationInfo verbosity pkg lib lbi clbi abi_hash packageDb = + case (compilerFlavor (compiler lbi)) of + GHC -> do fs <- GHC.pkgRoot verbosity lbi packageDb + return (relocatableInstalledPackageInfo + pkg abi_hash lib lbi clbi fs) + _ -> die "Distribution.Simple.Register.relocRegistrationInfo: \ + \not implemented for this compiler" + +initPackageDB :: Verbosity -> Compiler -> ProgramConfiguration -> FilePath -> IO () +initPackageDB verbosity comp progdb dbPath = + createPackageDB verbosity comp progdb False dbPath + +-- | Create an empty package DB at the specified location. +createPackageDB :: Verbosity -> Compiler -> ProgramConfiguration -> Bool + -> FilePath -> IO () +createPackageDB verbosity comp progdb preferCompat dbPath = + case compilerFlavor comp of + GHC -> HcPkg.init (GHC.hcPkgInfo progdb) verbosity preferCompat dbPath + GHCJS -> HcPkg.init (GHCJS.hcPkgInfo progdb) verbosity False dbPath + LHC -> HcPkg.init (LHC.hcPkgInfo progdb) verbosity False dbPath + UHC -> return () + HaskellSuite _ -> HaskellSuite.initPackageDB verbosity progdb dbPath + _ -> die $ "Distribution.Simple.Register.createPackageDB: " + ++ "not implemented for this compiler" + +doesPackageDBExist :: FilePath -> IO Bool +doesPackageDBExist dbPath = do + -- currently one impl for all compiler flavours, but could change if needed + dir_exists <- doesDirectoryExist dbPath + if dir_exists + then return True + else doesFileExist dbPath + +deletePackageDB :: FilePath -> IO () +deletePackageDB dbPath = do + -- currently one impl for all compiler flavours, but could change if needed + dir_exists <- doesDirectoryExist dbPath + if dir_exists + then removeDirectoryRecursive dbPath + else do file_exists <- doesFileExist dbPath + when file_exists $ removeFile dbPath + +-- | Run @hc-pkg@ using a given package DB stack, directly forwarding the +-- provided command-line arguments to it. +invokeHcPkg :: Verbosity -> Compiler -> ProgramConfiguration -> PackageDBStack + -> [String] -> IO () +invokeHcPkg verbosity comp conf dbStack extraArgs = + withHcPkg "invokeHcPkg" comp conf + (\hpi -> HcPkg.invoke hpi verbosity dbStack extraArgs) + +withHcPkg :: String -> Compiler -> ProgramConfiguration + -> (HcPkg.HcPkgInfo -> IO a) -> IO a +withHcPkg name comp conf f = + case compilerFlavor comp of + GHC -> f (GHC.hcPkgInfo conf) + GHCJS -> f (GHCJS.hcPkgInfo conf) + LHC -> f (LHC.hcPkgInfo conf) + _ -> die ("Distribution.Simple.Register." ++ name ++ ":\ + \not implemented for this compiler") + +registerPackage :: Verbosity + -> Compiler + -> ProgramConfiguration + -> Bool + -> PackageDBStack + -> InstalledPackageInfo + -> IO () +registerPackage verbosity comp progdb multiInstance packageDbs installedPkgInfo = + case compilerFlavor comp of + GHC -> GHC.registerPackage verbosity progdb multiInstance packageDbs installedPkgInfo + GHCJS -> GHCJS.registerPackage verbosity progdb multiInstance packageDbs installedPkgInfo + _ | multiInstance + -> die "Registering multiple package instances is not yet supported for this compiler" + LHC -> LHC.registerPackage verbosity progdb packageDbs installedPkgInfo + UHC -> UHC.registerPackage verbosity comp progdb packageDbs installedPkgInfo + JHC -> notice verbosity "Registering for jhc (nothing to do)" + HaskellSuite {} -> + HaskellSuite.registerPackage verbosity progdb packageDbs installedPkgInfo + _ -> die "Registering is not implemented for this compiler" + +writeHcPkgRegisterScript :: Verbosity + -> InstalledPackageInfo + -> PackageDBStack + -> HcPkg.HcPkgInfo + -> IO () +writeHcPkgRegisterScript verbosity installedPkgInfo packageDbs hpi = do + let invocation = HcPkg.reregisterInvocation hpi Verbosity.normal + packageDbs (Right installedPkgInfo) + regScript = invocationAsSystemScript buildOS invocation + + notice verbosity ("Creating package registration script: " ++ regScriptFileName) + writeUTF8File regScriptFileName regScript + setFileExecutable regScriptFileName + +regScriptFileName :: FilePath +regScriptFileName = case buildOS of + Windows -> "register.bat" + _ -> "register.sh" + + +-- ----------------------------------------------------------------------------- +-- Making the InstalledPackageInfo + +-- | Construct 'InstalledPackageInfo' for a library in a package, given a set +-- of installation directories. +-- +generalInstalledPackageInfo + :: ([FilePath] -> [FilePath]) -- ^ Translate relative include dir paths to + -- absolute paths. + -> PackageDescription + -> AbiHash + -> Library + -> LocalBuildInfo + -> ComponentLocalBuildInfo + -> InstallDirs FilePath + -> InstalledPackageInfo +generalInstalledPackageInfo adjustRelIncDirs pkg abi_hash lib lbi clbi installDirs = + IPI.InstalledPackageInfo { + IPI.sourcePackageId = packageId pkg, + IPI.installedUnitId = componentUnitId clbi, + IPI.compatPackageKey = componentCompatPackageKey clbi, + IPI.license = license pkg, + IPI.copyright = copyright pkg, + IPI.maintainer = maintainer pkg, + IPI.author = author pkg, + IPI.stability = stability pkg, + IPI.homepage = homepage pkg, + IPI.pkgUrl = pkgUrl pkg, + IPI.synopsis = synopsis pkg, + IPI.description = description pkg, + IPI.category = category pkg, + IPI.abiHash = abi_hash, + IPI.exposed = libExposed lib, + IPI.exposedModules = componentExposedModules clbi, + IPI.hiddenModules = otherModules bi, + IPI.trusted = IPI.trusted IPI.emptyInstalledPackageInfo, + IPI.importDirs = [ libdir installDirs | hasModules ], + -- Note. the libsubdir and datasubdir templates have already been expanded + -- into libdir and datadir. + IPI.libraryDirs = libdirs, + IPI.libraryDynDirs = dynlibdirs, + IPI.dataDir = datadir installDirs, + IPI.hsLibraries = if hasLibrary + then [getHSLibraryName (componentUnitId clbi)] + else [], + IPI.extraLibraries = extraLibs bi, + IPI.extraGHCiLibraries = extraGHCiLibs bi, + IPI.includeDirs = absinc ++ adjustRelIncDirs relinc, + IPI.includes = includes bi, + IPI.depends = map fst (componentPackageDeps clbi), + IPI.ccOptions = [], -- Note. NOT ccOptions bi! + -- We don't want cc-options to be propagated + -- to C compilations in other packages. + IPI.ldOptions = ldOptions bi, + IPI.frameworks = frameworks bi, + IPI.frameworkDirs = extraFrameworkDirs bi, + IPI.haddockInterfaces = [haddockdir installDirs haddockName pkg], + IPI.haddockHTMLs = [htmldir installDirs], + IPI.pkgRoot = Nothing + } + where + bi = libBuildInfo lib + (absinc, relinc) = partition isAbsolute (includeDirs bi) + hasModules = not $ null (libModules lib) + comp = compiler lbi + hasLibrary = hasModules || not (null (cSources bi)) + || (not (null (jsSources bi)) && + compilerFlavor comp == GHCJS) + (libdirs, dynlibdirs) + | not hasLibrary + = (extraLibDirs bi, []) + -- the dynamic-library-dirs defaults to the library-dirs if not specified, + -- so this works whether the dynamic-library-dirs field is supported or not + + | libraryDynDirSupported comp + = (libdir installDirs : extraLibDirs bi, + dynlibdir installDirs : extraLibDirs bi) + + | otherwise + = (libdir installDirs : dynlibdir installDirs : extraLibDirs bi, []) + -- the compiler doesn't understand the dynamic-library-dirs field so we + -- add the dyn directory to the "normal" list in the library-dirs field + +-- | Construct 'InstalledPackageInfo' for a library that is in place in the +-- build tree. +-- +-- This function knows about the layout of in place packages. +-- +inplaceInstalledPackageInfo :: FilePath -- ^ top of the build tree + -> FilePath -- ^ location of the dist tree + -> PackageDescription + -> AbiHash + -> Library + -> LocalBuildInfo + -> ComponentLocalBuildInfo + -> InstalledPackageInfo +inplaceInstalledPackageInfo inplaceDir distPref pkg abi_hash lib lbi clbi = + generalInstalledPackageInfo adjustRelativeIncludeDirs + pkg abi_hash lib lbi clbi installDirs + where + adjustRelativeIncludeDirs = map (inplaceDir ) + libTargetDir + | componentUnitId clbi == localUnitId lbi = buildDir lbi + | otherwise = buildDir lbi display (componentUnitId clbi) + installDirs = + (absoluteInstallDirs pkg lbi NoCopyDest) { + libdir = inplaceDir libTargetDir, + dynlibdir = inplaceDir libTargetDir, + datadir = inplaceDir dataDir pkg, + docdir = inplaceDocdir, + htmldir = inplaceHtmldir, + haddockdir = inplaceHtmldir + } + inplaceDocdir = inplaceDir distPref "doc" + inplaceHtmldir = inplaceDocdir "html" display (packageName pkg) + + +-- | Construct 'InstalledPackageInfo' for the final install location of a +-- library package. +-- +-- This function knows about the layout of installed packages. +-- +absoluteInstalledPackageInfo :: PackageDescription + -> AbiHash + -> Library + -> LocalBuildInfo + -> ComponentLocalBuildInfo + -> InstalledPackageInfo +absoluteInstalledPackageInfo pkg abi_hash lib lbi clbi = + generalInstalledPackageInfo adjustReativeIncludeDirs + pkg abi_hash lib lbi clbi installDirs + where + -- For installed packages we install all include files into one dir, + -- whereas in the build tree they may live in multiple local dirs. + adjustReativeIncludeDirs _ + | null (installIncludes bi) = [] + | otherwise = [includedir installDirs] + bi = libBuildInfo lib + installDirs = absoluteInstallDirs pkg lbi NoCopyDest + + +relocatableInstalledPackageInfo :: PackageDescription + -> AbiHash + -> Library + -> LocalBuildInfo + -> ComponentLocalBuildInfo + -> FilePath + -> InstalledPackageInfo +relocatableInstalledPackageInfo pkg abi_hash lib lbi clbi pkgroot = + generalInstalledPackageInfo adjustReativeIncludeDirs + pkg abi_hash lib lbi clbi installDirs + where + -- For installed packages we install all include files into one dir, + -- whereas in the build tree they may live in multiple local dirs. + adjustReativeIncludeDirs _ + | null (installIncludes bi) = [] + | otherwise = [includedir installDirs] + bi = libBuildInfo lib + + installDirs = fmap (("${pkgroot}" ) . shortRelativePath pkgroot) + $ absoluteInstallDirs pkg lbi NoCopyDest + +-- ----------------------------------------------------------------------------- +-- Unregistration + +unregister :: PackageDescription -> LocalBuildInfo -> RegisterFlags -> IO () +unregister pkg lbi regFlags = do + let pkgid = packageId pkg + genScript = fromFlag (regGenScript regFlags) + verbosity = fromFlag (regVerbosity regFlags) + packageDb = fromFlagOrDefault (registrationPackageDB (withPackageDB lbi)) + (regPackageDB regFlags) + unreg hpi = + let invocation = HcPkg.unregisterInvocation + hpi Verbosity.normal packageDb pkgid + in if genScript + then writeFileAtomic unregScriptFileName + (BS.Char8.pack $ invocationAsSystemScript buildOS invocation) + else runProgramInvocation verbosity invocation + setupMessage verbosity "Unregistering" pkgid + withHcPkg "unregistering is only implemented for GHC and GHCJS" + (compiler lbi) (withPrograms lbi) unreg + +unregScriptFileName :: FilePath +unregScriptFileName = case buildOS of + Windows -> "unregister.bat" + _ -> "unregister.sh" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Setup.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,2130 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Setup +-- Copyright : Isaac Jones 2003-2004 +-- Duncan Coutts 2007 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This is a big module, but not very complicated. The code is very regular +-- and repetitive. It defines the command line interface for all the Cabal +-- commands. For each command (like @configure@, @build@ etc) it defines a type +-- that holds all the flags, the default set of flags and a 'CommandUI' that +-- maps command line flags to and from the corresponding flags type. +-- +-- All the flags types are instances of 'Monoid', see +-- +-- for an explanation. +-- +-- The types defined here get used in the front end and especially in +-- @cabal-install@ which has to do quite a bit of manipulating sets of command +-- line flags. +-- +-- This is actually relatively nice, it works quite well. The main change it +-- needs is to unify it with the code for managing sets of fields that can be +-- read and written from files. This would allow us to save configure flags in +-- config files. + +module Distribution.Simple.Setup ( + + GlobalFlags(..), emptyGlobalFlags, defaultGlobalFlags, globalCommand, + ConfigFlags(..), emptyConfigFlags, defaultConfigFlags, configureCommand, + configPrograms, + AllowNewer(..), AllowNewerDep(..), isAllowNewer, + configAbsolutePaths, readPackageDbList, showPackageDbList, + CopyFlags(..), emptyCopyFlags, defaultCopyFlags, copyCommand, + InstallFlags(..), emptyInstallFlags, defaultInstallFlags, installCommand, + HaddockTarget(..), haddockTargetFromFlag, + HaddockFlags(..), emptyHaddockFlags, defaultHaddockFlags, haddockCommand, + HscolourFlags(..), emptyHscolourFlags, defaultHscolourFlags, hscolourCommand, + BuildFlags(..), emptyBuildFlags, defaultBuildFlags, buildCommand, + buildVerbose, + ReplFlags(..), defaultReplFlags, replCommand, + CleanFlags(..), emptyCleanFlags, defaultCleanFlags, cleanCommand, + RegisterFlags(..), emptyRegisterFlags, defaultRegisterFlags, registerCommand, + unregisterCommand, + SDistFlags(..), emptySDistFlags, defaultSDistFlags, sdistCommand, + TestFlags(..), emptyTestFlags, defaultTestFlags, testCommand, + TestShowDetails(..), + BenchmarkFlags(..), emptyBenchmarkFlags, + defaultBenchmarkFlags, benchmarkCommand, + CopyDest(..), + configureArgs, configureOptions, configureCCompiler, configureLinker, + buildOptions, haddockOptions, installDirsOptions, + programConfigurationOptions, programConfigurationPaths', + splitArgs, + + defaultDistPref, optionDistPref, + + Flag(..), + toFlag, + fromFlag, + fromFlagOrDefault, + flagToMaybe, + flagToList, + boolOpt, boolOpt', trueArg, falseArg, + optionVerbosity, optionNumJobs, readPToMaybe ) where + +import Distribution.Compiler +import Distribution.ReadE +import Distribution.Text +import qualified Distribution.Compat.ReadP as Parse +import qualified Text.PrettyPrint as Disp +import Distribution.Package +import Distribution.PackageDescription hiding (Flag) +import Distribution.Simple.Command hiding (boolOpt, boolOpt') +import qualified Distribution.Simple.Command as Command +import Distribution.Simple.Compiler hiding (Flag) +import Distribution.Simple.Utils +import Distribution.Simple.Program +import Distribution.Simple.InstallDirs +import Distribution.Verbosity +import Distribution.Utils.NubList +import Distribution.Compat.Binary (Binary) +import Distribution.Compat.Semigroup as Semi + +import Control.Applicative as A ( Applicative(..), (<*) ) +import Control.Monad ( liftM ) +import Data.List ( sort ) +import Data.Maybe ( listToMaybe ) +import Data.Char ( isSpace, isAlpha ) +import GHC.Generics ( Generic ) + +-- FIXME Not sure where this should live +defaultDistPref :: FilePath +defaultDistPref = "dist" + +-- ------------------------------------------------------------ +-- * Flag type +-- ------------------------------------------------------------ + +-- | All flags are monoids, they come in two flavours: +-- +-- 1. list flags eg +-- +-- > --ghc-option=foo --ghc-option=bar +-- +-- gives us all the values ["foo", "bar"] +-- +-- 2. singular value flags, eg: +-- +-- > --enable-foo --disable-foo +-- +-- gives us Just False +-- So this Flag type is for the latter singular kind of flag. +-- Its monoid instance gives us the behaviour where it starts out as +-- 'NoFlag' and later flags override earlier ones. +-- +data Flag a = Flag a | NoFlag deriving (Eq, Generic, Show, Read) + +instance Binary a => Binary (Flag a) + +instance Functor Flag where + fmap f (Flag x) = Flag (f x) + fmap _ NoFlag = NoFlag + +instance Monoid (Flag a) where + mempty = NoFlag + mappend = (Semi.<>) + +instance Semigroup (Flag a) where + _ <> f@(Flag _) = f + f <> NoFlag = f + +instance Bounded a => Bounded (Flag a) where + minBound = toFlag minBound + maxBound = toFlag maxBound + +instance Enum a => Enum (Flag a) where + fromEnum = fromEnum . fromFlag + toEnum = toFlag . toEnum + enumFrom (Flag a) = map toFlag . enumFrom $ a + enumFrom _ = [] + enumFromThen (Flag a) (Flag b) = toFlag `map` enumFromThen a b + enumFromThen _ _ = [] + enumFromTo (Flag a) (Flag b) = toFlag `map` enumFromTo a b + enumFromTo _ _ = [] + enumFromThenTo (Flag a) (Flag b) (Flag c) = toFlag `map` enumFromThenTo a b c + enumFromThenTo _ _ _ = [] + +toFlag :: a -> Flag a +toFlag = Flag + +fromFlag :: Flag a -> a +fromFlag (Flag x) = x +fromFlag NoFlag = error "fromFlag NoFlag. Use fromFlagOrDefault" + +fromFlagOrDefault :: a -> Flag a -> a +fromFlagOrDefault _ (Flag x) = x +fromFlagOrDefault def NoFlag = def + +flagToMaybe :: Flag a -> Maybe a +flagToMaybe (Flag x) = Just x +flagToMaybe NoFlag = Nothing + +flagToList :: Flag a -> [a] +flagToList (Flag x) = [x] +flagToList NoFlag = [] + +allFlags :: [Flag Bool] -> Flag Bool +allFlags flags = if all (\f -> fromFlagOrDefault False f) flags + then Flag True + else NoFlag + +-- ------------------------------------------------------------ +-- * Global flags +-- ------------------------------------------------------------ + +-- In fact since individual flags types are monoids and these are just sets of +-- flags then they are also monoids pointwise. This turns out to be really +-- useful. The mempty is the set of empty flags and mappend allows us to +-- override specific flags. For example we can start with default flags and +-- override with the ones we get from a file or the command line, or both. + +-- | Flags that apply at the top level, not to any sub-command. +data GlobalFlags = GlobalFlags { + globalVersion :: Flag Bool, + globalNumericVersion :: Flag Bool + } deriving (Generic) + +defaultGlobalFlags :: GlobalFlags +defaultGlobalFlags = GlobalFlags { + globalVersion = Flag False, + globalNumericVersion = Flag False + } + +globalCommand :: [Command action] -> CommandUI GlobalFlags +globalCommand commands = CommandUI + { commandName = "" + , commandSynopsis = "" + , commandUsage = \pname -> + "This Setup program uses the Haskell Cabal Infrastructure.\n" + ++ "See http://www.haskell.org/cabal/ for more information.\n" + ++ "\n" + ++ "Usage: " ++ pname ++ " [GLOBAL FLAGS] [COMMAND [FLAGS]]\n" + , commandDescription = Just $ \pname -> + let + commands' = commands ++ [commandAddAction helpCommandUI undefined] + cmdDescs = getNormalCommandDescriptions commands' + maxlen = maximum $ [length name | (name, _) <- cmdDescs] + align str = str ++ replicate (maxlen - length str) ' ' + in + "Commands:\n" + ++ unlines [ " " ++ align name ++ " " ++ descr + | (name, descr) <- cmdDescs ] + ++ "\n" + ++ "For more information about a command use\n" + ++ " " ++ pname ++ " COMMAND --help\n\n" + ++ "Typical steps for installing Cabal packages:\n" + ++ concat [ " " ++ pname ++ " " ++ x ++ "\n" + | x <- ["configure", "build", "install"]] + , commandNotes = Nothing + , commandDefaultFlags = defaultGlobalFlags + , commandOptions = \_ -> + [option ['V'] ["version"] + "Print version information" + globalVersion (\v flags -> flags { globalVersion = v }) + trueArg + ,option [] ["numeric-version"] + "Print just the version number" + globalNumericVersion (\v flags -> flags { globalNumericVersion = v }) + trueArg + ] + } + +emptyGlobalFlags :: GlobalFlags +emptyGlobalFlags = mempty + +instance Monoid GlobalFlags where + mempty = gmempty + mappend = (Semi.<>) + +instance Semigroup GlobalFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Config flags +-- ------------------------------------------------------------ + +-- | Policy for relaxing upper bounds in dependencies. For example, given +-- 'build-depends: array >= 0.3 && < 0.5', are we allowed to relax the upper +-- bound and choose a version of 'array' that is greater or equal to 0.5? By +-- default the upper bounds are always strictly honored. +data AllowNewer = + + -- | Default: honor the upper bounds in all dependencies, never choose + -- versions newer than allowed. + AllowNewerNone + + -- | Ignore upper bounds in dependencies on the given packages. + | AllowNewerSome [AllowNewerDep] + + -- | Ignore upper bounds in dependencies on all packages. + | AllowNewerAll + deriving (Eq, Read, Show, Generic) + +-- | Dependencies can be relaxed either for all packages in the install plan, or +-- only for some packages. +data AllowNewerDep = AllowNewerDep PackageName + | AllowNewerDepScoped PackageName PackageName + deriving (Eq, Read, Show, Generic) + +instance Text AllowNewerDep where + disp (AllowNewerDep p0) = disp p0 + disp (AllowNewerDepScoped p0 p1) = disp p0 Disp.<> Disp.colon Disp.<> disp p1 + + parse = scopedP Parse.<++ normalP + where + scopedP = AllowNewerDepScoped `fmap` parse A.<* Parse.char ':' A.<*> parse + normalP = AllowNewerDep `fmap` parse + +instance Binary AllowNewer +instance Binary AllowNewerDep + +instance Semigroup AllowNewer where + AllowNewerNone <> r = r + l@AllowNewerAll <> _ = l + l@(AllowNewerSome _) <> AllowNewerNone = l + (AllowNewerSome _) <> r@AllowNewerAll = r + (AllowNewerSome a) <> (AllowNewerSome b) = AllowNewerSome (a ++ b) + +instance Monoid AllowNewer where + mempty = AllowNewerNone + mappend = (Semi.<>) + +-- | Convert 'AllowNewer' to a boolean. +isAllowNewer :: AllowNewer -> Bool +isAllowNewer AllowNewerNone = False +isAllowNewer (AllowNewerSome _) = True +isAllowNewer AllowNewerAll = True + +allowNewerParser :: Parse.ReadP r (Maybe AllowNewer) +allowNewerParser = + (Just . AllowNewerSome) `fmap` Parse.sepBy1 parse (Parse.char ',') + +allowNewerPrinter :: (Maybe AllowNewer) -> [Maybe String] +allowNewerPrinter Nothing = [] +allowNewerPrinter (Just AllowNewerNone) = [] +allowNewerPrinter (Just AllowNewerAll) = [Nothing] +allowNewerPrinter (Just (AllowNewerSome pkgs)) = map (Just . display) $ pkgs + +-- | Flags to @configure@ command. +-- +-- IMPORTANT: every time a new flag is added, 'D.C.Setup.filterConfigureFlags' +-- should be updated. +data ConfigFlags = ConfigFlags { + --FIXME: the configPrograms is only here to pass info through to configure + -- because the type of configure is constrained by the UserHooks. + -- when we change UserHooks next we should pass the initial + -- ProgramConfiguration directly and not via ConfigFlags + configPrograms_ :: Last' ProgramConfiguration, -- ^All programs that + -- @cabal@ may run + + configProgramPaths :: [(String, FilePath)], -- ^user specified programs paths + configProgramArgs :: [(String, [String])], -- ^user specified programs args + configProgramPathExtra :: NubList FilePath, -- ^Extend the $PATH + configHcFlavor :: Flag CompilerFlavor, -- ^The \"flavor\" of the + -- compiler, such as GHC or + -- JHC. + configHcPath :: Flag FilePath, -- ^given compiler location + configHcPkg :: Flag FilePath, -- ^given hc-pkg location + configVanillaLib :: Flag Bool, -- ^Enable vanilla library + configProfLib :: Flag Bool, -- ^Enable profiling in the library + configSharedLib :: Flag Bool, -- ^Build shared library + configDynExe :: Flag Bool, -- ^Enable dynamic linking of the + -- executables. + configProfExe :: Flag Bool, -- ^Enable profiling in the + -- executables. + configProf :: Flag Bool, -- ^Enable profiling in the library + -- and executables. + configProfDetail :: Flag ProfDetailLevel, -- ^Profiling detail level + -- in the library and executables. + configProfLibDetail :: Flag ProfDetailLevel, -- ^Profiling detail level + -- in the library + configConfigureArgs :: [String], -- ^Extra arguments to @configure@ + configOptimization :: Flag OptimisationLevel, -- ^Enable optimization. + configProgPrefix :: Flag PathTemplate, -- ^Installed executable prefix. + configProgSuffix :: Flag PathTemplate, -- ^Installed executable suffix. + configInstallDirs :: InstallDirs (Flag PathTemplate), -- ^Installation + -- paths + configScratchDir :: Flag FilePath, + configExtraLibDirs :: [FilePath], -- ^ path to search for extra libraries + configExtraFrameworkDirs :: [FilePath], -- ^ path to search for extra + -- frameworks (OS X only) + configExtraIncludeDirs :: [FilePath], -- ^ path to search for header files + configIPID :: Flag String, -- ^ explicit IPID to be used + + configDistPref :: Flag FilePath, -- ^"dist" prefix + configVerbosity :: Flag Verbosity, -- ^verbosity level + configUserInstall :: Flag Bool, -- ^The --user\/--global flag + configPackageDBs :: [Maybe PackageDB], -- ^Which package DBs to use + configGHCiLib :: Flag Bool, -- ^Enable compiling library for GHCi + configSplitObjs :: Flag Bool, -- ^Enable -split-objs with GHC + configStripExes :: Flag Bool, -- ^Enable executable stripping + configStripLibs :: Flag Bool, -- ^Enable library stripping + configConstraints :: [Dependency], -- ^Additional constraints for + -- dependencies. + configDependencies :: [(PackageName, UnitId)], + -- ^The packages depended on. + configConfigurationsFlags :: FlagAssignment, + configTests :: Flag Bool, -- ^Enable test suite compilation + configBenchmarks :: Flag Bool, -- ^Enable benchmark compilation + configCoverage :: Flag Bool, -- ^Enable program coverage + configLibCoverage :: Flag Bool, -- ^Enable program coverage (deprecated) + configExactConfiguration :: Flag Bool, + -- ^All direct dependencies and flags are provided on the command line by + -- the user via the '--dependency' and '--flags' options. + configFlagError :: Flag String, + -- ^Halt and show an error message indicating an error in flag assignment + configRelocatable :: Flag Bool, -- ^ Enable relocatable package built + configDebugInfo :: Flag DebugInfoLevel, -- ^ Emit debug info. + configAllowNewer :: Maybe AllowNewer + -- ^ Ignore upper bounds on all or some dependencies. Wrapped in 'Maybe' to + -- distinguish between "default" and "explicitly disabled". + } + deriving (Generic, Read, Show) + +instance Binary ConfigFlags + +-- | More convenient version of 'configPrograms'. Results in an +-- 'error' if internal invariant is violated. +configPrograms :: ConfigFlags -> ProgramConfiguration +configPrograms = maybe (error "FIXME: remove configPrograms") id . getLast' . configPrograms_ + +configAbsolutePaths :: ConfigFlags -> IO ConfigFlags +configAbsolutePaths f = + (\v -> f { configPackageDBs = v }) + `liftM` mapM (maybe (return Nothing) (liftM Just . absolutePackageDBPath)) + (configPackageDBs f) + +defaultConfigFlags :: ProgramConfiguration -> ConfigFlags +defaultConfigFlags progConf = emptyConfigFlags { + configPrograms_ = pure progConf, + configHcFlavor = maybe NoFlag Flag defaultCompilerFlavor, + configVanillaLib = Flag True, + configProfLib = NoFlag, + configSharedLib = NoFlag, + configDynExe = Flag False, + configProfExe = NoFlag, + configProf = NoFlag, + configProfDetail = NoFlag, + configProfLibDetail= NoFlag, + configOptimization = Flag NormalOptimisation, + configProgPrefix = Flag (toPathTemplate ""), + configProgSuffix = Flag (toPathTemplate ""), + configDistPref = NoFlag, + configVerbosity = Flag normal, + configUserInstall = Flag False, --TODO: reverse this +#if defined(mingw32_HOST_OS) + -- See #1589. + configGHCiLib = Flag True, +#else + configGHCiLib = NoFlag, +#endif + configSplitObjs = Flag False, -- takes longer, so turn off by default + configStripExes = Flag True, + configStripLibs = Flag True, + configTests = Flag False, + configBenchmarks = Flag False, + configCoverage = Flag False, + configLibCoverage = NoFlag, + configExactConfiguration = Flag False, + configFlagError = NoFlag, + configRelocatable = Flag False, + configDebugInfo = Flag NoDebugInfo, + configAllowNewer = Nothing + } + +configureCommand :: ProgramConfiguration -> CommandUI ConfigFlags +configureCommand progConf = CommandUI + { commandName = "configure" + , commandSynopsis = "Prepare to build the package." + , commandDescription = Just $ \_ -> wrapText $ + "Configure how the package is built by setting " + ++ "package (and other) flags.\n" + ++ "\n" + ++ "The configuration affects several other commands, " + ++ "including build, test, bench, run, repl.\n" + , commandNotes = Just $ \_pname -> programFlagsDescription progConf + , commandUsage = \pname -> + "Usage: " ++ pname ++ " configure [FLAGS]\n" + , commandDefaultFlags = defaultConfigFlags progConf + , commandOptions = \showOrParseArgs -> + configureOptions showOrParseArgs + ++ programConfigurationPaths progConf showOrParseArgs + configProgramPaths (\v fs -> fs { configProgramPaths = v }) + ++ programConfigurationOption progConf showOrParseArgs + configProgramArgs (\v fs -> fs { configProgramArgs = v }) + ++ programConfigurationOptions progConf showOrParseArgs + configProgramArgs (\v fs -> fs { configProgramArgs = v }) + } + +configureOptions :: ShowOrParseArgs -> [OptionField ConfigFlags] +configureOptions showOrParseArgs = + [optionVerbosity configVerbosity + (\v flags -> flags { configVerbosity = v }) + ,optionDistPref + configDistPref (\d flags -> flags { configDistPref = d }) + showOrParseArgs + + ,option [] ["compiler"] "compiler" + configHcFlavor (\v flags -> flags { configHcFlavor = v }) + (choiceOpt [ (Flag GHC, ("g", ["ghc"]), "compile with GHC") + , (Flag GHCJS, ([] , ["ghcjs"]), "compile with GHCJS") + , (Flag JHC, ([] , ["jhc"]), "compile with JHC") + , (Flag LHC, ([] , ["lhc"]), "compile with LHC") + , (Flag UHC, ([] , ["uhc"]), "compile with UHC") + -- "haskell-suite" compiler id string will be replaced + -- by a more specific one during the configure stage + , (Flag (HaskellSuite "haskell-suite"), ([] , ["haskell-suite"]), + "compile with a haskell-suite compiler")]) + + ,option "w" ["with-compiler"] + "give the path to a particular compiler" + configHcPath (\v flags -> flags { configHcPath = v }) + (reqArgFlag "PATH") + + ,option "" ["with-hc-pkg"] + "give the path to the package tool" + configHcPkg (\v flags -> flags { configHcPkg = v }) + (reqArgFlag "PATH") + ] + ++ map liftInstallDirs installDirsOptions + ++ [option "" ["program-prefix"] + "prefix to be applied to installed executables" + configProgPrefix + (\v flags -> flags { configProgPrefix = v }) + (reqPathTemplateArgFlag "PREFIX") + + ,option "" ["program-suffix"] + "suffix to be applied to installed executables" + configProgSuffix (\v flags -> flags { configProgSuffix = v } ) + (reqPathTemplateArgFlag "SUFFIX") + + ,option "" ["library-vanilla"] + "Vanilla libraries" + configVanillaLib (\v flags -> flags { configVanillaLib = v }) + (boolOpt [] []) + + ,option "p" ["library-profiling"] + "Library profiling" + configProfLib (\v flags -> flags { configProfLib = v }) + (boolOpt "p" []) + + ,option "" ["shared"] + "Shared library" + configSharedLib (\v flags -> flags { configSharedLib = v }) + (boolOpt [] []) + + ,option "" ["executable-dynamic"] + "Executable dynamic linking" + configDynExe (\v flags -> flags { configDynExe = v }) + (boolOpt [] []) + + ,option "" ["profiling"] + "Executable and library profiling" + configProf (\v flags -> flags { configProf = v }) + (boolOpt [] []) + + ,option "" ["executable-profiling"] + "Executable profiling (DEPRECATED)" + configProfExe (\v flags -> flags { configProfExe = v }) + (boolOpt [] []) + + ,option "" ["profiling-detail"] + ("Profiling detail level for executable and library (default, " ++ + "none, exported-functions, toplevel-functions, all-functions).") + configProfDetail (\v flags -> flags { configProfDetail = v }) + (reqArg' "level" (Flag . flagToProfDetailLevel) + showProfDetailLevelFlag) + + ,option "" ["library-profiling-detail"] + "Profiling detail level for libraries only." + configProfLibDetail (\v flags -> flags { configProfLibDetail = v }) + (reqArg' "level" (Flag . flagToProfDetailLevel) + showProfDetailLevelFlag) + + ,multiOption "optimization" + configOptimization (\v flags -> flags { configOptimization = v }) + [optArg' "n" (Flag . flagToOptimisationLevel) + (\f -> case f of + Flag NoOptimisation -> [] + Flag NormalOptimisation -> [Nothing] + Flag MaximumOptimisation -> [Just "2"] + _ -> []) + "O" ["enable-optimization","enable-optimisation"] + "Build with optimization (n is 0--2, default is 1)", + noArg (Flag NoOptimisation) [] + ["disable-optimization","disable-optimisation"] + "Build without optimization" + ] + + ,multiOption "debug-info" + configDebugInfo (\v flags -> flags { configDebugInfo = v }) + [optArg' "n" (Flag . flagToDebugInfoLevel) + (\f -> case f of + Flag NoDebugInfo -> [] + Flag MinimalDebugInfo -> [Just "1"] + Flag NormalDebugInfo -> [Nothing] + Flag MaximalDebugInfo -> [Just "3"] + _ -> []) + "" ["enable-debug-info"] + "Emit debug info (n is 0--3, default is 0)", + noArg (Flag NoDebugInfo) [] + ["disable-debug-info"] + "Don't emit debug info" + ] + + ,option "" ["library-for-ghci"] + "compile library for use with GHCi" + configGHCiLib (\v flags -> flags { configGHCiLib = v }) + (boolOpt [] []) + + ,option "" ["split-objs"] + "split library into smaller objects to reduce binary sizes (GHC 6.6+)" + configSplitObjs (\v flags -> flags { configSplitObjs = v }) + (boolOpt [] []) + + ,option "" ["executable-stripping"] + "strip executables upon installation to reduce binary sizes" + configStripExes (\v flags -> flags { configStripExes = v }) + (boolOpt [] []) + + ,option "" ["library-stripping"] + "strip libraries upon installation to reduce binary sizes" + configStripLibs (\v flags -> flags { configStripLibs = v }) + (boolOpt [] []) + + ,option "" ["configure-option"] + "Extra option for configure" + configConfigureArgs (\v flags -> flags { configConfigureArgs = v }) + (reqArg' "OPT" (\x -> [x]) id) + + ,option "" ["user-install"] + "doing a per-user installation" + configUserInstall (\v flags -> flags { configUserInstall = v }) + (boolOpt' ([],["user"]) ([], ["global"])) + + ,option "" ["package-db"] + ( "Append the given package database to the list of package" + ++ " databases used (to satisfy dependencies and register into)." + ++ " May be a specific file, 'global' or 'user'. The initial list" + ++ " is ['global'], ['global', 'user'], or ['global', $sandbox]," + ++ " depending on context. Use 'clear' to reset the list to empty." + ++ " See the user guide for details.") + configPackageDBs (\v flags -> flags { configPackageDBs = v }) + (reqArg' "DB" readPackageDbList showPackageDbList) + + ,option "f" ["flags"] + "Force values for the given flags in Cabal conditionals in the .cabal file. E.g., --flags=\"debug -usebytestrings\" forces the flag \"debug\" to true and \"usebytestrings\" to false." + configConfigurationsFlags (\v flags -> flags { configConfigurationsFlags = v }) + (reqArg' "FLAGS" readFlagList showFlagList) + + ,option "" ["extra-include-dirs"] + "A list of directories to search for header files" + configExtraIncludeDirs (\v flags -> flags {configExtraIncludeDirs = v}) + (reqArg' "PATH" (\x -> [x]) id) + + ,option "" ["ipid"] + "Installed package ID to compile this package as" + configIPID (\v flags -> flags {configIPID = v}) + (reqArgFlag "IPID") + + ,option "" ["extra-lib-dirs"] + "A list of directories to search for external libraries" + configExtraLibDirs (\v flags -> flags {configExtraLibDirs = v}) + (reqArg' "PATH" (\x -> [x]) id) + + ,option "" ["extra-framework-dirs"] + "A list of directories to search for external frameworks (OS X only)" + configExtraFrameworkDirs + (\v flags -> flags {configExtraFrameworkDirs = v}) + (reqArg' "PATH" (\x -> [x]) id) + + ,option "" ["extra-prog-path"] + "A list of directories to search for required programs (in addition to the normal search locations)" + configProgramPathExtra (\v flags -> flags {configProgramPathExtra = v}) + (reqArg' "PATH" (\x -> toNubList [x]) fromNubList) + + ,option "" ["constraint"] + "A list of additional constraints on the dependencies." + configConstraints (\v flags -> flags { configConstraints = v}) + (reqArg "DEPENDENCY" + (readP_to_E (const "dependency expected") ((\x -> [x]) `fmap` parse)) + (map (\x -> display x))) + + ,option "" ["dependency"] + "A list of exact dependencies. E.g., --dependency=\"void=void-0.5.8-177d5cdf20962d0581fe2e4932a6c309\"" + configDependencies (\v flags -> flags { configDependencies = v}) + (reqArg "NAME=ID" + (readP_to_E (const "dependency expected") ((\x -> [x]) `fmap` parseDependency)) + (map (\x -> display (fst x) ++ "=" ++ display (snd x)))) + + ,option "" ["tests"] + "dependency checking and compilation for test suites listed in the package description file." + configTests (\v flags -> flags { configTests = v }) + (boolOpt [] []) + + ,option "" ["coverage"] + "build package with Haskell Program Coverage. (GHC only)" + configCoverage (\v flags -> flags { configCoverage = v }) + (boolOpt [] []) + + ,option "" ["library-coverage"] + "build package with Haskell Program Coverage. (GHC only) (DEPRECATED)" + configLibCoverage (\v flags -> flags { configLibCoverage = v }) + (boolOpt [] []) + + ,option [] ["allow-newer"] + ("Ignore upper bounds in all dependencies or DEPS") + configAllowNewer (\v flags -> flags { configAllowNewer = v}) + (optArg "DEPS" + (readP_to_E ("Cannot parse the list of packages: " ++) allowNewerParser) + (Just AllowNewerAll) allowNewerPrinter) + + ,option "" ["exact-configuration"] + "All direct dependencies and flags are provided on the command line." + configExactConfiguration + (\v flags -> flags { configExactConfiguration = v }) + trueArg + + ,option "" ["benchmarks"] + "dependency checking and compilation for benchmarks listed in the package description file." + configBenchmarks (\v flags -> flags { configBenchmarks = v }) + (boolOpt [] []) + + ,option "" ["relocatable"] + "building a package that is relocatable. (GHC only)" + configRelocatable (\v flags -> flags { configRelocatable = v}) + (boolOpt [] []) + ] + where + readFlagList :: String -> FlagAssignment + readFlagList = map tagWithValue . words + where tagWithValue ('-':fname) = (FlagName (lowercase fname), False) + tagWithValue fname = (FlagName (lowercase fname), True) + + showFlagList :: FlagAssignment -> [String] + showFlagList fs = [ if not set then '-':fname else fname + | (FlagName fname, set) <- fs] + + liftInstallDirs = + liftOption configInstallDirs (\v flags -> flags { configInstallDirs = v }) + + reqPathTemplateArgFlag title _sf _lf d get set = + reqArgFlag title _sf _lf d + (fmap fromPathTemplate . get) (set . fmap toPathTemplate) + +readPackageDbList :: String -> [Maybe PackageDB] +readPackageDbList "clear" = [Nothing] +readPackageDbList "global" = [Just GlobalPackageDB] +readPackageDbList "user" = [Just UserPackageDB] +readPackageDbList other = [Just (SpecificPackageDB other)] + +showPackageDbList :: [Maybe PackageDB] -> [String] +showPackageDbList = map showPackageDb + where + showPackageDb Nothing = "clear" + showPackageDb (Just GlobalPackageDB) = "global" + showPackageDb (Just UserPackageDB) = "user" + showPackageDb (Just (SpecificPackageDB db)) = db + +showProfDetailLevelFlag :: Flag ProfDetailLevel -> [String] +showProfDetailLevelFlag NoFlag = [] +showProfDetailLevelFlag (Flag dl) = [showProfDetailLevel dl] + +parseDependency :: Parse.ReadP r (PackageName, UnitId) +parseDependency = do + x <- parse + _ <- Parse.char '=' + y <- parse + return (x, y) + +installDirsOptions :: [OptionField (InstallDirs (Flag PathTemplate))] +installDirsOptions = + [ option "" ["prefix"] + "bake this prefix in preparation of installation" + prefix (\v flags -> flags { prefix = v }) + installDirArg + + , option "" ["bindir"] + "installation directory for executables" + bindir (\v flags -> flags { bindir = v }) + installDirArg + + , option "" ["libdir"] + "installation directory for libraries" + libdir (\v flags -> flags { libdir = v }) + installDirArg + + , option "" ["libsubdir"] + "subdirectory of libdir in which libs are installed" + libsubdir (\v flags -> flags { libsubdir = v }) + installDirArg + + , option "" ["dynlibdir"] + "installation directory for dynamic libraries" + dynlibdir (\v flags -> flags { dynlibdir = v }) + installDirArg + + , option "" ["libexecdir"] + "installation directory for program executables" + libexecdir (\v flags -> flags { libexecdir = v }) + installDirArg + + , option "" ["datadir"] + "installation directory for read-only data" + datadir (\v flags -> flags { datadir = v }) + installDirArg + + , option "" ["datasubdir"] + "subdirectory of datadir in which data files are installed" + datasubdir (\v flags -> flags { datasubdir = v }) + installDirArg + + , option "" ["docdir"] + "installation directory for documentation" + docdir (\v flags -> flags { docdir = v }) + installDirArg + + , option "" ["htmldir"] + "installation directory for HTML documentation" + htmldir (\v flags -> flags { htmldir = v }) + installDirArg + + , option "" ["haddockdir"] + "installation directory for haddock interfaces" + haddockdir (\v flags -> flags { haddockdir = v }) + installDirArg + + , option "" ["sysconfdir"] + "installation directory for configuration files" + sysconfdir (\v flags -> flags { sysconfdir = v }) + installDirArg + ] + where + installDirArg _sf _lf d get set = + reqArgFlag "DIR" _sf _lf d + (fmap fromPathTemplate . get) (set . fmap toPathTemplate) + +emptyConfigFlags :: ConfigFlags +emptyConfigFlags = mempty + +instance Monoid ConfigFlags where + mempty = gmempty + mappend = (Semi.<>) + +instance Semigroup ConfigFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Copy flags +-- ------------------------------------------------------------ + +-- | Flags to @copy@: (destdir, copy-prefix (backwards compat), verbosity) +data CopyFlags = CopyFlags { + copyDest :: Flag CopyDest, + copyDistPref :: Flag FilePath, + copyVerbosity :: Flag Verbosity + } + deriving (Show, Generic) + +defaultCopyFlags :: CopyFlags +defaultCopyFlags = CopyFlags { + copyDest = Flag NoCopyDest, + copyDistPref = NoFlag, + copyVerbosity = Flag normal + } + +copyCommand :: CommandUI CopyFlags +copyCommand = CommandUI + { commandName = "copy" + , commandSynopsis = "Copy the files into the install locations." + , commandDescription = Just $ \_ -> wrapText $ + "Does not call register, and allows a prefix at install time. " + ++ "Without the --destdir flag, configure determines location.\n" + , commandNotes = Nothing + , commandUsage = \pname -> + "Usage: " ++ pname ++ " copy [FLAGS]\n" + , commandDefaultFlags = defaultCopyFlags + , commandOptions = \showOrParseArgs -> + [optionVerbosity copyVerbosity (\v flags -> flags { copyVerbosity = v }) + + ,optionDistPref + copyDistPref (\d flags -> flags { copyDistPref = d }) + showOrParseArgs + + ,option "" ["destdir"] + "directory to copy files to, prepended to installation directories" + copyDest (\v flags -> flags { copyDest = v }) + (reqArg "DIR" (succeedReadE (Flag . CopyTo)) + (\f -> case f of Flag (CopyTo p) -> [p]; _ -> [])) + ] + } + +emptyCopyFlags :: CopyFlags +emptyCopyFlags = mempty + +instance Monoid CopyFlags where + mempty = gmempty + mappend = (Semi.<>) + +instance Semigroup CopyFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Install flags +-- ------------------------------------------------------------ + +-- | Flags to @install@: (package db, verbosity) +data InstallFlags = InstallFlags { + installPackageDB :: Flag PackageDB, + installDistPref :: Flag FilePath, + installUseWrapper :: Flag Bool, + installInPlace :: Flag Bool, + installVerbosity :: Flag Verbosity + } + deriving (Show, Generic) + +defaultInstallFlags :: InstallFlags +defaultInstallFlags = InstallFlags { + installPackageDB = NoFlag, + installDistPref = NoFlag, + installUseWrapper = Flag False, + installInPlace = Flag False, + installVerbosity = Flag normal + } + +installCommand :: CommandUI InstallFlags +installCommand = CommandUI + { commandName = "install" + , commandSynopsis = + "Copy the files into the install locations. Run register." + , commandDescription = Just $ \_ -> wrapText $ + "Unlike the copy command, install calls the register command." + ++ "If you want to install into a location that is not what was" + ++ "specified in the configure step, use the copy command.\n" + , commandNotes = Nothing + , commandUsage = \pname -> + "Usage: " ++ pname ++ " install [FLAGS]\n" + , commandDefaultFlags = defaultInstallFlags + , commandOptions = \showOrParseArgs -> + [optionVerbosity installVerbosity (\v flags -> flags { installVerbosity = v }) + ,optionDistPref + installDistPref (\d flags -> flags { installDistPref = d }) + showOrParseArgs + + ,option "" ["inplace"] + "install the package in the install subdirectory of the dist prefix, so it can be used without being installed" + installInPlace (\v flags -> flags { installInPlace = v }) + trueArg + + ,option "" ["shell-wrappers"] + "using shell script wrappers around executables" + installUseWrapper (\v flags -> flags { installUseWrapper = v }) + (boolOpt [] []) + + ,option "" ["package-db"] "" + installPackageDB (\v flags -> flags { installPackageDB = v }) + (choiceOpt [ (Flag UserPackageDB, ([],["user"]), + "upon configuration register this package in the user's local package database") + , (Flag GlobalPackageDB, ([],["global"]), + "(default) upon configuration register this package in the system-wide package database")]) + ] + } + +emptyInstallFlags :: InstallFlags +emptyInstallFlags = mempty + +instance Monoid InstallFlags where + mempty = gmempty + mappend = (Semi.<>) + +instance Semigroup InstallFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * SDist flags +-- ------------------------------------------------------------ + +-- | Flags to @sdist@: (snapshot, verbosity) +data SDistFlags = SDistFlags { + sDistSnapshot :: Flag Bool, + sDistDirectory :: Flag FilePath, + sDistDistPref :: Flag FilePath, + sDistListSources :: Flag FilePath, + sDistVerbosity :: Flag Verbosity + } + deriving (Show, Generic) + +defaultSDistFlags :: SDistFlags +defaultSDistFlags = SDistFlags { + sDistSnapshot = Flag False, + sDistDirectory = mempty, + sDistDistPref = NoFlag, + sDistListSources = mempty, + sDistVerbosity = Flag normal + } + +sdistCommand :: CommandUI SDistFlags +sdistCommand = CommandUI + { commandName = "sdist" + , commandSynopsis = + "Generate a source distribution file (.tar.gz)." + , commandDescription = Nothing + , commandNotes = Nothing + , commandUsage = \pname -> + "Usage: " ++ pname ++ " sdist [FLAGS]\n" + , commandDefaultFlags = defaultSDistFlags + , commandOptions = \showOrParseArgs -> + [optionVerbosity sDistVerbosity (\v flags -> flags { sDistVerbosity = v }) + ,optionDistPref + sDistDistPref (\d flags -> flags { sDistDistPref = d }) + showOrParseArgs + + ,option "" ["list-sources"] + "Just write a list of the package's sources to a file" + sDistListSources (\v flags -> flags { sDistListSources = v }) + (reqArgFlag "FILE") + + ,option "" ["snapshot"] + "Produce a snapshot source distribution" + sDistSnapshot (\v flags -> flags { sDistSnapshot = v }) + trueArg + + ,option "" ["output-directory"] + ("Generate a source distribution in the given directory, " + ++ "without creating a tarball") + sDistDirectory (\v flags -> flags { sDistDirectory = v }) + (reqArgFlag "DIR") + ] + } + +emptySDistFlags :: SDistFlags +emptySDistFlags = mempty + +instance Monoid SDistFlags where + mempty = gmempty + mappend = (Semi.<>) + +instance Semigroup SDistFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Register flags +-- ------------------------------------------------------------ + +-- | Flags to @register@ and @unregister@: (user package, gen-script, +-- in-place, verbosity) +data RegisterFlags = RegisterFlags { + regPackageDB :: Flag PackageDB, + regGenScript :: Flag Bool, + regGenPkgConf :: Flag (Maybe FilePath), + regInPlace :: Flag Bool, + regDistPref :: Flag FilePath, + regPrintId :: Flag Bool, + regVerbosity :: Flag Verbosity + } + deriving (Show, Generic) + +defaultRegisterFlags :: RegisterFlags +defaultRegisterFlags = RegisterFlags { + regPackageDB = NoFlag, + regGenScript = Flag False, + regGenPkgConf = NoFlag, + regInPlace = Flag False, + regDistPref = NoFlag, + regPrintId = Flag False, + regVerbosity = Flag normal + } + +registerCommand :: CommandUI RegisterFlags +registerCommand = CommandUI + { commandName = "register" + , commandSynopsis = + "Register this package with the compiler." + , commandDescription = Nothing + , commandNotes = Nothing + , commandUsage = \pname -> + "Usage: " ++ pname ++ " register [FLAGS]\n" + , commandDefaultFlags = defaultRegisterFlags + , commandOptions = \showOrParseArgs -> + [optionVerbosity regVerbosity (\v flags -> flags { regVerbosity = v }) + ,optionDistPref + regDistPref (\d flags -> flags { regDistPref = d }) + showOrParseArgs + + ,option "" ["packageDB"] "" + regPackageDB (\v flags -> flags { regPackageDB = v }) + (choiceOpt [ (Flag UserPackageDB, ([],["user"]), + "upon registration, register this package in the user's local package database") + , (Flag GlobalPackageDB, ([],["global"]), + "(default)upon registration, register this package in the system-wide package database")]) + + ,option "" ["inplace"] + "register the package in the build location, so it can be used without being installed" + regInPlace (\v flags -> flags { regInPlace = v }) + trueArg + + ,option "" ["gen-script"] + "instead of registering, generate a script to register later" + regGenScript (\v flags -> flags { regGenScript = v }) + trueArg + + ,option "" ["gen-pkg-config"] + "instead of registering, generate a package registration file" + regGenPkgConf (\v flags -> flags { regGenPkgConf = v }) + (optArg' "PKG" Flag flagToList) + + ,option "" ["print-ipid"] + "print the installed package ID calculated for this package" + regPrintId (\v flags -> flags { regPrintId = v }) + trueArg + ] + } + +unregisterCommand :: CommandUI RegisterFlags +unregisterCommand = CommandUI + { commandName = "unregister" + , commandSynopsis = + "Unregister this package with the compiler." + , commandDescription = Nothing + , commandNotes = Nothing + , commandUsage = \pname -> + "Usage: " ++ pname ++ " unregister [FLAGS]\n" + , commandDefaultFlags = defaultRegisterFlags + , commandOptions = \showOrParseArgs -> + [optionVerbosity regVerbosity (\v flags -> flags { regVerbosity = v }) + ,optionDistPref + regDistPref (\d flags -> flags { regDistPref = d }) + showOrParseArgs + + ,option "" ["user"] "" + regPackageDB (\v flags -> flags { regPackageDB = v }) + (choiceOpt [ (Flag UserPackageDB, ([],["user"]), + "unregister this package in the user's local package database") + , (Flag GlobalPackageDB, ([],["global"]), + "(default) unregister this package in the system-wide package database")]) + + ,option "" ["gen-script"] + "Instead of performing the unregister command, generate a script to unregister later" + regGenScript (\v flags -> flags { regGenScript = v }) + trueArg + ] + } + +emptyRegisterFlags :: RegisterFlags +emptyRegisterFlags = mempty + +instance Monoid RegisterFlags where + mempty = gmempty + mappend = (Semi.<>) + +instance Semigroup RegisterFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * HsColour flags +-- ------------------------------------------------------------ + +data HscolourFlags = HscolourFlags { + hscolourCSS :: Flag FilePath, + hscolourExecutables :: Flag Bool, + hscolourTestSuites :: Flag Bool, + hscolourBenchmarks :: Flag Bool, + hscolourDistPref :: Flag FilePath, + hscolourVerbosity :: Flag Verbosity + } + deriving (Show, Generic) + +emptyHscolourFlags :: HscolourFlags +emptyHscolourFlags = mempty + +defaultHscolourFlags :: HscolourFlags +defaultHscolourFlags = HscolourFlags { + hscolourCSS = NoFlag, + hscolourExecutables = Flag False, + hscolourTestSuites = Flag False, + hscolourBenchmarks = Flag False, + hscolourDistPref = NoFlag, + hscolourVerbosity = Flag normal + } + +instance Monoid HscolourFlags where + mempty = gmempty + mappend = (Semi.<>) + +instance Semigroup HscolourFlags where + (<>) = gmappend + +hscolourCommand :: CommandUI HscolourFlags +hscolourCommand = CommandUI + { commandName = "hscolour" + , commandSynopsis = + "Generate HsColour colourised code, in HTML format." + , commandDescription = Just (\_ -> "Requires the hscolour program.\n") + , commandNotes = Nothing + , commandUsage = \pname -> + "Usage: " ++ pname ++ " hscolour [FLAGS]\n" + , commandDefaultFlags = defaultHscolourFlags + , commandOptions = \showOrParseArgs -> + [optionVerbosity hscolourVerbosity + (\v flags -> flags { hscolourVerbosity = v }) + ,optionDistPref + hscolourDistPref (\d flags -> flags { hscolourDistPref = d }) + showOrParseArgs + + ,option "" ["executables"] + "Run hscolour for Executables targets" + hscolourExecutables (\v flags -> flags { hscolourExecutables = v }) + trueArg + + ,option "" ["tests"] + "Run hscolour for Test Suite targets" + hscolourTestSuites (\v flags -> flags { hscolourTestSuites = v }) + trueArg + + ,option "" ["benchmarks"] + "Run hscolour for Benchmark targets" + hscolourBenchmarks (\v flags -> flags { hscolourBenchmarks = v }) + trueArg + + ,option "" ["all"] + "Run hscolour for all targets" + (\f -> allFlags [ hscolourExecutables f + , hscolourTestSuites f + , hscolourBenchmarks f]) + (\v flags -> flags { hscolourExecutables = v + , hscolourTestSuites = v + , hscolourBenchmarks = v }) + trueArg + + ,option "" ["css"] + "Use a cascading style sheet" + hscolourCSS (\v flags -> flags { hscolourCSS = v }) + (reqArgFlag "PATH") + ] + } + +-- ------------------------------------------------------------ +-- * Haddock flags +-- ------------------------------------------------------------ + + +-- | When we build haddock documentation, there are two cases: +-- +-- 1. We build haddocks only for the current development version, +-- intended for local use and not for distribution. In this case, +-- we store the generated documentation in @/doc/html/@. +-- +-- 2. We build haddocks for intended for uploading them to hackage. +-- In this case, we need to follow the layout that hackage expects +-- from documentation tarballs, and we might also want to use different +-- flags than for development builds, so in this case we store the generated +-- documentation in @/doc/html/-docs@. +data HaddockTarget = ForHackage | ForDevelopment deriving (Eq, Show, Generic) + +-- | Convert '--for-hackage' to 'HaddockTarget'. +haddockTargetFromFlag :: Flag Bool -> HaddockTarget +haddockTargetFromFlag NoFlag = ForDevelopment +haddockTargetFromFlag (Flag False) = ForDevelopment +haddockTargetFromFlag (Flag True) = ForHackage + +data HaddockFlags = HaddockFlags { + haddockProgramPaths :: [(String, FilePath)], + haddockProgramArgs :: [(String, [String])], + haddockHoogle :: Flag Bool, + haddockHtml :: Flag Bool, + haddockHtmlLocation :: Flag String, + haddockForHackage :: Flag Bool, + haddockExecutables :: Flag Bool, + haddockTestSuites :: Flag Bool, + haddockBenchmarks :: Flag Bool, + haddockInternal :: Flag Bool, + haddockCss :: Flag FilePath, + haddockHscolour :: Flag Bool, + haddockHscolourCss :: Flag FilePath, + haddockContents :: Flag PathTemplate, + haddockDistPref :: Flag FilePath, + haddockKeepTempFiles:: Flag Bool, + haddockVerbosity :: Flag Verbosity + } + deriving (Show, Generic) + +defaultHaddockFlags :: HaddockFlags +defaultHaddockFlags = HaddockFlags { + haddockProgramPaths = mempty, + haddockProgramArgs = [], + haddockHoogle = Flag False, + haddockHtml = Flag False, + haddockHtmlLocation = NoFlag, + haddockForHackage = Flag False, + haddockExecutables = Flag False, + haddockTestSuites = Flag False, + haddockBenchmarks = Flag False, + haddockInternal = Flag False, + haddockCss = NoFlag, + haddockHscolour = Flag False, + haddockHscolourCss = NoFlag, + haddockContents = NoFlag, + haddockDistPref = NoFlag, + haddockKeepTempFiles= Flag False, + haddockVerbosity = Flag normal + } + +haddockCommand :: CommandUI HaddockFlags +haddockCommand = CommandUI + { commandName = "haddock" + , commandSynopsis = "Generate Haddock HTML documentation." + , commandDescription = Just $ \_ -> + "Requires the program haddock, version 2.x.\n" + , commandNotes = Nothing + , commandUsage = \pname -> + "Usage: " ++ pname ++ " haddock [FLAGS]\n" + , commandDefaultFlags = defaultHaddockFlags + , commandOptions = \showOrParseArgs -> + haddockOptions showOrParseArgs + ++ programConfigurationPaths progConf ParseArgs + haddockProgramPaths (\v flags -> flags { haddockProgramPaths = v}) + ++ programConfigurationOption progConf showOrParseArgs + haddockProgramArgs (\v fs -> fs { haddockProgramArgs = v }) + ++ programConfigurationOptions progConf ParseArgs + haddockProgramArgs (\v flags -> flags { haddockProgramArgs = v}) + } + where + progConf = addKnownProgram haddockProgram + $ addKnownProgram ghcProgram + $ emptyProgramConfiguration + +haddockOptions :: ShowOrParseArgs -> [OptionField HaddockFlags] +haddockOptions showOrParseArgs = + [optionVerbosity haddockVerbosity + (\v flags -> flags { haddockVerbosity = v }) + ,optionDistPref + haddockDistPref (\d flags -> flags { haddockDistPref = d }) + showOrParseArgs + + ,option "" ["keep-temp-files"] + "Keep temporary files" + haddockKeepTempFiles (\b flags -> flags { haddockKeepTempFiles = b }) + trueArg + + ,option "" ["hoogle"] + "Generate a hoogle database" + haddockHoogle (\v flags -> flags { haddockHoogle = v }) + trueArg + + ,option "" ["html"] + "Generate HTML documentation (the default)" + haddockHtml (\v flags -> flags { haddockHtml = v }) + trueArg + + ,option "" ["html-location"] + "Location of HTML documentation for pre-requisite packages" + haddockHtmlLocation (\v flags -> flags { haddockHtmlLocation = v }) + (reqArgFlag "URL") + + ,option "" ["for-hackage"] + "Collection of flags to generate documentation suitable for upload to hackage" + haddockForHackage (\v flags -> flags { haddockForHackage = v }) + trueArg + + ,option "" ["executables"] + "Run haddock for Executables targets" + haddockExecutables (\v flags -> flags { haddockExecutables = v }) + trueArg + + ,option "" ["tests"] + "Run haddock for Test Suite targets" + haddockTestSuites (\v flags -> flags { haddockTestSuites = v }) + trueArg + + ,option "" ["benchmarks"] + "Run haddock for Benchmark targets" + haddockBenchmarks (\v flags -> flags { haddockBenchmarks = v }) + trueArg + + ,option "" ["all"] + "Run haddock for all targets" + (\f -> allFlags [ haddockExecutables f + , haddockTestSuites f + , haddockBenchmarks f]) + (\v flags -> flags { haddockExecutables = v + , haddockTestSuites = v + , haddockBenchmarks = v }) + trueArg + + ,option "" ["internal"] + "Run haddock for internal modules and include all symbols" + haddockInternal (\v flags -> flags { haddockInternal = v }) + trueArg + + ,option "" ["css"] + "Use PATH as the haddock stylesheet" + haddockCss (\v flags -> flags { haddockCss = v }) + (reqArgFlag "PATH") + + ,option "" ["hyperlink-source","hyperlink-sources"] + "Hyperlink the documentation to the source code (using HsColour)" + haddockHscolour (\v flags -> flags { haddockHscolour = v }) + trueArg + + ,option "" ["hscolour-css"] + "Use PATH as the HsColour stylesheet" + haddockHscolourCss (\v flags -> flags { haddockHscolourCss = v }) + (reqArgFlag "PATH") + + ,option "" ["contents-location"] + "Bake URL in as the location for the contents page" + haddockContents (\v flags -> flags { haddockContents = v }) + (reqArg' "URL" + (toFlag . toPathTemplate) + (flagToList . fmap fromPathTemplate)) + ] + +emptyHaddockFlags :: HaddockFlags +emptyHaddockFlags = mempty + +instance Monoid HaddockFlags where + mempty = gmempty + mappend = (Semi.<>) + +instance Semigroup HaddockFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Clean flags +-- ------------------------------------------------------------ + +data CleanFlags = CleanFlags { + cleanSaveConf :: Flag Bool, + cleanDistPref :: Flag FilePath, + cleanVerbosity :: Flag Verbosity + } + deriving (Show, Generic) + +defaultCleanFlags :: CleanFlags +defaultCleanFlags = CleanFlags { + cleanSaveConf = Flag False, + cleanDistPref = NoFlag, + cleanVerbosity = Flag normal + } + +cleanCommand :: CommandUI CleanFlags +cleanCommand = CommandUI + { commandName = "clean" + , commandSynopsis = "Clean up after a build." + , commandDescription = Just $ \_ -> + "Removes .hi, .o, preprocessed sources, etc.\n" + , commandNotes = Nothing + , commandUsage = \pname -> + "Usage: " ++ pname ++ " clean [FLAGS]\n" + , commandDefaultFlags = defaultCleanFlags + , commandOptions = \showOrParseArgs -> + [optionVerbosity cleanVerbosity (\v flags -> flags { cleanVerbosity = v }) + ,optionDistPref + cleanDistPref (\d flags -> flags { cleanDistPref = d }) + showOrParseArgs + + ,option "s" ["save-configure"] + "Do not remove the configuration file (dist/setup-config) during cleaning. Saves need to reconfigure." + cleanSaveConf (\v flags -> flags { cleanSaveConf = v }) + trueArg + ] + } + +emptyCleanFlags :: CleanFlags +emptyCleanFlags = mempty + +instance Monoid CleanFlags where + mempty = gmempty + mappend = (Semi.<>) + +instance Semigroup CleanFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Build flags +-- ------------------------------------------------------------ + +data BuildFlags = BuildFlags { + buildProgramPaths :: [(String, FilePath)], + buildProgramArgs :: [(String, [String])], + buildDistPref :: Flag FilePath, + buildVerbosity :: Flag Verbosity, + buildNumJobs :: Flag (Maybe Int), + -- TODO: this one should not be here, it's just that the silly + -- UserHooks stop us from passing extra info in other ways + buildArgs :: [String] + } + deriving (Show, Generic) + +{-# DEPRECATED buildVerbose "Use buildVerbosity instead" #-} +buildVerbose :: BuildFlags -> Verbosity +buildVerbose = fromFlagOrDefault normal . buildVerbosity + +defaultBuildFlags :: BuildFlags +defaultBuildFlags = BuildFlags { + buildProgramPaths = mempty, + buildProgramArgs = [], + buildDistPref = mempty, + buildVerbosity = Flag normal, + buildNumJobs = mempty, + buildArgs = [] + } + +buildCommand :: ProgramConfiguration -> CommandUI BuildFlags +buildCommand progConf = CommandUI + { commandName = "build" + , commandSynopsis = "Compile all/specific components." + , commandDescription = Just $ \_ -> wrapText $ + "Components encompass executables, tests, and benchmarks.\n" + ++ "\n" + ++ "Affected by configuration options, see `configure`.\n" + , commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " build " + ++ " All the components in the package\n" + ++ " " ++ pname ++ " build foo " + ++ " A component (i.e. lib, exe, test suite)\n\n" + ++ programFlagsDescription progConf +--TODO: re-enable once we have support for module/file targets +-- ++ " " ++ pname ++ " build Foo.Bar " +-- ++ " A module\n" +-- ++ " " ++ pname ++ " build Foo/Bar.hs" +-- ++ " A file\n\n" +-- ++ "If a target is ambiguous it can be qualified with the component " +-- ++ "name, e.g.\n" +-- ++ " " ++ pname ++ " build foo:Foo.Bar\n" +-- ++ " " ++ pname ++ " build testsuite1:Foo/Bar.hs\n" + , commandUsage = usageAlternatives "build" $ + [ "[FLAGS]" + , "COMPONENTS [FLAGS]" + ] + , commandDefaultFlags = defaultBuildFlags + , commandOptions = \showOrParseArgs -> + [ optionVerbosity + buildVerbosity (\v flags -> flags { buildVerbosity = v }) + + , optionDistPref + buildDistPref (\d flags -> flags { buildDistPref = d }) showOrParseArgs + ] + ++ buildOptions progConf showOrParseArgs + } + +buildOptions :: ProgramConfiguration -> ShowOrParseArgs + -> [OptionField BuildFlags] +buildOptions progConf showOrParseArgs = + [ optionNumJobs + buildNumJobs (\v flags -> flags { buildNumJobs = v }) + ] + + ++ programConfigurationPaths progConf showOrParseArgs + buildProgramPaths (\v flags -> flags { buildProgramPaths = v}) + + ++ programConfigurationOption progConf showOrParseArgs + buildProgramArgs (\v fs -> fs { buildProgramArgs = v }) + + ++ programConfigurationOptions progConf showOrParseArgs + buildProgramArgs (\v flags -> flags { buildProgramArgs = v}) + +emptyBuildFlags :: BuildFlags +emptyBuildFlags = mempty + +instance Monoid BuildFlags where + mempty = gmempty + mappend = (Semi.<>) + +instance Semigroup BuildFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * REPL Flags +-- ------------------------------------------------------------ + +data ReplFlags = ReplFlags { + replProgramPaths :: [(String, FilePath)], + replProgramArgs :: [(String, [String])], + replDistPref :: Flag FilePath, + replVerbosity :: Flag Verbosity, + replReload :: Flag Bool + } + deriving (Show, Generic) + +defaultReplFlags :: ReplFlags +defaultReplFlags = ReplFlags { + replProgramPaths = mempty, + replProgramArgs = [], + replDistPref = NoFlag, + replVerbosity = Flag normal, + replReload = Flag False + } + +instance Monoid ReplFlags where + mempty = gmempty + mappend = (Semi.<>) + +instance Semigroup ReplFlags where + (<>) = gmappend + +replCommand :: ProgramConfiguration -> CommandUI ReplFlags +replCommand progConf = CommandUI + { commandName = "repl" + , commandSynopsis = + "Open an interpreter session for the given component." + , commandDescription = Just $ \pname -> wrapText $ + "If the current directory contains no package, ignores COMPONENT " + ++ "parameters and opens an interactive interpreter session; if a " + ++ "sandbox is present, its package database will be used.\n" + ++ "\n" + ++ "Otherwise, (re)configures with the given or default flags, and " + ++ "loads the interpreter with the relevant modules. For executables, " + ++ "tests and benchmarks, loads the main module (and its " + ++ "dependencies); for libraries all exposed/other modules.\n" + ++ "\n" + ++ "The default component is the library itself, or the executable " + ++ "if that is the only component.\n" + ++ "\n" + ++ "Support for loading specific modules is planned but not " + ++ "implemented yet. For certain scenarios, `" ++ pname + ++ " exec -- ghci :l Foo` may be used instead. Note that `exec` will " + ++ "not (re)configure and you will have to specify the location of " + ++ "other modules, if required.\n" + + , commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " repl " + ++ " The first component in the package\n" + ++ " " ++ pname ++ " repl foo " + ++ " A named component (i.e. lib, exe, test suite)\n" + ++ " " ++ pname ++ " repl --ghc-options=\"-lstdc++\"" + ++ " Specifying flags for interpreter\n" +--TODO: re-enable once we have support for module/file targets +-- ++ " " ++ pname ++ " repl Foo.Bar " +-- ++ " A module\n" +-- ++ " " ++ pname ++ " repl Foo/Bar.hs" +-- ++ " A file\n\n" +-- ++ "If a target is ambiguous it can be qualified with the component " +-- ++ "name, e.g.\n" +-- ++ " " ++ pname ++ " repl foo:Foo.Bar\n" +-- ++ " " ++ pname ++ " repl testsuite1:Foo/Bar.hs\n" + , commandUsage = \pname -> "Usage: " ++ pname ++ " repl [COMPONENT] [FLAGS]\n" + , commandDefaultFlags = defaultReplFlags + , commandOptions = \showOrParseArgs -> + optionVerbosity replVerbosity (\v flags -> flags { replVerbosity = v }) + : optionDistPref + replDistPref (\d flags -> flags { replDistPref = d }) + showOrParseArgs + + : programConfigurationPaths progConf showOrParseArgs + replProgramPaths (\v flags -> flags { replProgramPaths = v}) + + ++ programConfigurationOption progConf showOrParseArgs + replProgramArgs (\v flags -> flags { replProgramArgs = v}) + + ++ programConfigurationOptions progConf showOrParseArgs + replProgramArgs (\v flags -> flags { replProgramArgs = v}) + + ++ case showOrParseArgs of + ParseArgs -> + [ option "" ["reload"] + "Used from within an interpreter to update files." + replReload (\v flags -> flags { replReload = v }) + trueArg + ] + _ -> [] + } + +-- ------------------------------------------------------------ +-- * Test flags +-- ------------------------------------------------------------ + +data TestShowDetails = Never | Failures | Always | Streaming | Direct + deriving (Eq, Ord, Enum, Bounded, Show) + +knownTestShowDetails :: [TestShowDetails] +knownTestShowDetails = [minBound..maxBound] + +instance Text TestShowDetails where + disp = Disp.text . lowercase . show + + parse = maybe Parse.pfail return . classify =<< ident + where + ident = Parse.munch1 (\c -> isAlpha c || c == '_' || c == '-') + classify str = lookup (lowercase str) enumMap + enumMap :: [(String, TestShowDetails)] + enumMap = [ (display x, x) + | x <- knownTestShowDetails ] + +--TODO: do we need this instance? +instance Monoid TestShowDetails where + mempty = Never + mappend = (Semi.<>) + +instance Semigroup TestShowDetails where + a <> b = if a < b then b else a + +data TestFlags = TestFlags { + testDistPref :: Flag FilePath, + testVerbosity :: Flag Verbosity, + testHumanLog :: Flag PathTemplate, + testMachineLog :: Flag PathTemplate, + testShowDetails :: Flag TestShowDetails, + testKeepTix :: Flag Bool, + -- TODO: think about if/how options are passed to test exes + testOptions :: [PathTemplate] + } deriving (Generic) + +defaultTestFlags :: TestFlags +defaultTestFlags = TestFlags { + testDistPref = NoFlag, + testVerbosity = Flag normal, + testHumanLog = toFlag $ toPathTemplate $ "$pkgid-$test-suite.log", + testMachineLog = toFlag $ toPathTemplate $ "$pkgid.log", + testShowDetails = toFlag Failures, + testKeepTix = toFlag False, + testOptions = [] + } + +testCommand :: CommandUI TestFlags +testCommand = CommandUI + { commandName = "test" + , commandSynopsis = + "Run all/specific tests in the test suite." + , commandDescription = Just $ \pname -> wrapText $ + "If necessary (re)configures with `--enable-tests` flag and builds" + ++ " the test suite.\n" + ++ "\n" + ++ "Remember that the tests' dependencies must be installed if there" + ++ " are additional ones; e.g. with `" ++ pname + ++ " install --only-dependencies --enable-tests`.\n" + ++ "\n" + ++ "By defining UserHooks in a custom Setup.hs, the package can" + ++ " define actions to be executed before and after running tests.\n" + , commandNotes = Nothing + , commandUsage = usageAlternatives "test" + [ "[FLAGS]" + , "TESTCOMPONENTS [FLAGS]" + ] + , commandDefaultFlags = defaultTestFlags + , commandOptions = \showOrParseArgs -> + [ optionVerbosity testVerbosity (\v flags -> flags { testVerbosity = v }) + , optionDistPref + testDistPref (\d flags -> flags { testDistPref = d }) + showOrParseArgs + , option [] ["log"] + ("Log all test suite results to file (name template can use " + ++ "$pkgid, $compiler, $os, $arch, $test-suite, $result)") + testHumanLog (\v flags -> flags { testHumanLog = v }) + (reqArg' "TEMPLATE" + (toFlag . toPathTemplate) + (flagToList . fmap fromPathTemplate)) + , option [] ["machine-log"] + ("Produce a machine-readable log file (name template can use " + ++ "$pkgid, $compiler, $os, $arch, $result)") + testMachineLog (\v flags -> flags { testMachineLog = v }) + (reqArg' "TEMPLATE" + (toFlag . toPathTemplate) + (flagToList . fmap fromPathTemplate)) + , option [] ["show-details"] + ("'always': always show results of individual test cases. " + ++ "'never': never show results of individual test cases. " + ++ "'failures': show results of failing test cases. " + ++ "'streaming': show results of test cases in real time." + ++ "'direct': send results of test cases in real time; no log file.") + testShowDetails (\v flags -> flags { testShowDetails = v }) + (reqArg "FILTER" + (readP_to_E (\_ -> "--show-details flag expects one of " + ++ intercalate ", " + (map display knownTestShowDetails)) + (fmap toFlag parse)) + (flagToList . fmap display)) + , option [] ["keep-tix-files"] + "keep .tix files for HPC between test runs" + testKeepTix (\v flags -> flags { testKeepTix = v}) + trueArg + , option [] ["test-options"] + ("give extra options to test executables " + ++ "(name templates can use $pkgid, $compiler, " + ++ "$os, $arch, $test-suite)") + testOptions (\v flags -> flags { testOptions = v }) + (reqArg' "TEMPLATES" (map toPathTemplate . splitArgs) + (const [])) + , option [] ["test-option"] + ("give extra option to test executables " + ++ "(no need to quote options containing spaces, " + ++ "name template can use $pkgid, $compiler, " + ++ "$os, $arch, $test-suite)") + testOptions (\v flags -> flags { testOptions = v }) + (reqArg' "TEMPLATE" (\x -> [toPathTemplate x]) + (map fromPathTemplate)) + ] + } + +emptyTestFlags :: TestFlags +emptyTestFlags = mempty + +instance Monoid TestFlags where + mempty = gmempty + mappend = (Semi.<>) + +instance Semigroup TestFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Benchmark flags +-- ------------------------------------------------------------ + +data BenchmarkFlags = BenchmarkFlags { + benchmarkDistPref :: Flag FilePath, + benchmarkVerbosity :: Flag Verbosity, + benchmarkOptions :: [PathTemplate] + } deriving (Generic) + +defaultBenchmarkFlags :: BenchmarkFlags +defaultBenchmarkFlags = BenchmarkFlags { + benchmarkDistPref = NoFlag, + benchmarkVerbosity = Flag normal, + benchmarkOptions = [] + } + +benchmarkCommand :: CommandUI BenchmarkFlags +benchmarkCommand = CommandUI + { commandName = "bench" + , commandSynopsis = + "Run all/specific benchmarks." + , commandDescription = Just $ \pname -> wrapText $ + "If necessary (re)configures with `--enable-benchmarks` flag and" + ++ " builds the benchmarks.\n" + ++ "\n" + ++ "Remember that the benchmarks' dependencies must be installed if" + ++ " there are additional ones; e.g. with `" ++ pname + ++ " install --only-dependencies --enable-benchmarks`.\n" + ++ "\n" + ++ "By defining UserHooks in a custom Setup.hs, the package can" + ++ " define actions to be executed before and after running" + ++ " benchmarks.\n" + , commandNotes = Nothing + , commandUsage = usageAlternatives "bench" + [ "[FLAGS]" + , "BENCHCOMPONENTS [FLAGS]" + ] + , commandDefaultFlags = defaultBenchmarkFlags + , commandOptions = \showOrParseArgs -> + [ optionVerbosity benchmarkVerbosity + (\v flags -> flags { benchmarkVerbosity = v }) + , optionDistPref + benchmarkDistPref (\d flags -> flags { benchmarkDistPref = d }) + showOrParseArgs + , option [] ["benchmark-options"] + ("give extra options to benchmark executables " + ++ "(name templates can use $pkgid, $compiler, " + ++ "$os, $arch, $benchmark)") + benchmarkOptions (\v flags -> flags { benchmarkOptions = v }) + (reqArg' "TEMPLATES" (map toPathTemplate . splitArgs) + (const [])) + , option [] ["benchmark-option"] + ("give extra option to benchmark executables " + ++ "(no need to quote options containing spaces, " + ++ "name template can use $pkgid, $compiler, " + ++ "$os, $arch, $benchmark)") + benchmarkOptions (\v flags -> flags { benchmarkOptions = v }) + (reqArg' "TEMPLATE" (\x -> [toPathTemplate x]) + (map fromPathTemplate)) + ] + } + +emptyBenchmarkFlags :: BenchmarkFlags +emptyBenchmarkFlags = mempty + +instance Monoid BenchmarkFlags where + mempty = gmempty + mappend = (Semi.<>) + +instance Semigroup BenchmarkFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Shared options utils +-- ------------------------------------------------------------ + +programFlagsDescription :: ProgramConfiguration -> String +programFlagsDescription progConf = + "The flags --with-PROG and --PROG-option(s) can be used with" + ++ " the following programs:" + ++ (concatMap (\line -> "\n " ++ unwords line) . wrapLine 77 . sort) + [ programName prog | (prog, _) <- knownPrograms progConf ] + ++ "\n" + +-- | For each known program @PROG@ in 'progConf', produce a @with-PROG@ +-- 'OptionField'. +programConfigurationPaths + :: ProgramConfiguration + -> ShowOrParseArgs + -> (flags -> [(String, FilePath)]) + -> ([(String, FilePath)] -> (flags -> flags)) + -> [OptionField flags] +programConfigurationPaths progConf showOrParseArgs get set = + programConfigurationPaths' ("with-" ++) progConf showOrParseArgs get set + +-- | Like 'programConfigurationPaths', but allows to customise the option name. +programConfigurationPaths' + :: (String -> String) + -> ProgramConfiguration + -> ShowOrParseArgs + -> (flags -> [(String, FilePath)]) + -> ([(String, FilePath)] -> (flags -> flags)) + -> [OptionField flags] +programConfigurationPaths' mkName progConf showOrParseArgs get set = + case showOrParseArgs of + -- we don't want a verbose help text list so we just show a generic one: + ShowArgs -> [withProgramPath "PROG"] + ParseArgs -> map (withProgramPath . programName . fst) + (knownPrograms progConf) + where + withProgramPath prog = + option "" [mkName prog] + ("give the path to " ++ prog) + get set + (reqArg' "PATH" (\path -> [(prog, path)]) + (\progPaths -> [ path | (prog', path) <- progPaths, prog==prog' ])) + +-- | For each known program @PROG@ in 'progConf', produce a @PROG-option@ +-- 'OptionField'. +programConfigurationOption + :: ProgramConfiguration + -> ShowOrParseArgs + -> (flags -> [(String, [String])]) + -> ([(String, [String])] -> (flags -> flags)) + -> [OptionField flags] +programConfigurationOption progConf showOrParseArgs get set = + case showOrParseArgs of + -- we don't want a verbose help text list so we just show a generic one: + ShowArgs -> [programOption "PROG"] + ParseArgs -> map (programOption . programName . fst) + (knownPrograms progConf) + where + programOption prog = + option "" [prog ++ "-option"] + ("give an extra option to " ++ prog ++ + " (no need to quote options containing spaces)") + get set + (reqArg' "OPT" (\arg -> [(prog, [arg])]) + (\progArgs -> concat [ args + | (prog', args) <- progArgs, prog==prog' ])) + +-- | For each known program @PROG@ in 'progConf', produce a @PROG-options@ +-- 'OptionField'. +programConfigurationOptions + :: ProgramConfiguration + -> ShowOrParseArgs + -> (flags -> [(String, [String])]) + -> ([(String, [String])] -> (flags -> flags)) + -> [OptionField flags] +programConfigurationOptions progConf showOrParseArgs get set = + case showOrParseArgs of + -- we don't want a verbose help text list so we just show a generic one: + ShowArgs -> [programOptions "PROG"] + ParseArgs -> map (programOptions . programName . fst) + (knownPrograms progConf) + where + programOptions prog = + option "" [prog ++ "-options"] + ("give extra options to " ++ prog) + get set + (reqArg' "OPTS" (\args -> [(prog, splitArgs args)]) (const [])) + +-- ------------------------------------------------------------ +-- * GetOpt Utils +-- ------------------------------------------------------------ + +boolOpt :: SFlags -> SFlags + -> MkOptDescr (a -> Flag Bool) (Flag Bool -> a -> a) a +boolOpt = Command.boolOpt flagToMaybe Flag + +boolOpt' :: OptFlags -> OptFlags + -> MkOptDescr (a -> Flag Bool) (Flag Bool -> a -> a) a +boolOpt' = Command.boolOpt' flagToMaybe Flag + +trueArg, falseArg :: MkOptDescr (a -> Flag Bool) (Flag Bool -> a -> a) a +trueArg sfT lfT = boolOpt' (sfT, lfT) ([], []) sfT lfT +falseArg sfF lfF = boolOpt' ([], []) (sfF, lfF) sfF lfF + +reqArgFlag :: ArgPlaceHolder -> SFlags -> LFlags -> Description -> + (b -> Flag String) -> (Flag String -> b -> b) -> OptDescr b +reqArgFlag ad = reqArg ad (succeedReadE Flag) flagToList + +optionDistPref :: (flags -> Flag FilePath) + -> (Flag FilePath -> flags -> flags) + -> ShowOrParseArgs + -> OptionField flags +optionDistPref get set = \showOrParseArgs -> + option "" (distPrefFlagName showOrParseArgs) + ( "The directory where Cabal puts generated build files " + ++ "(default " ++ defaultDistPref ++ ")") + get set + (reqArgFlag "DIR") + where + distPrefFlagName ShowArgs = ["builddir"] + distPrefFlagName ParseArgs = ["builddir", "distdir", "distpref"] + +optionVerbosity :: (flags -> Flag Verbosity) + -> (Flag Verbosity -> flags -> flags) + -> OptionField flags +optionVerbosity get set = + option "v" ["verbose"] + "Control verbosity (n is 0--3, default verbosity level is 1)" + get set + (optArg "n" (fmap Flag flagToVerbosity) + (Flag verbose) -- default Value if no n is given + (fmap (Just . showForCabal) . flagToList)) + +optionNumJobs :: (flags -> Flag (Maybe Int)) + -> (Flag (Maybe Int) -> flags -> flags) + -> OptionField flags +optionNumJobs get set = + option "j" ["jobs"] + "Run NUM jobs simultaneously (or '$ncpus' if no NUM is given)." + get set + (optArg "NUM" (fmap Flag numJobsParser) + (Flag Nothing) + (map (Just . maybe "$ncpus" show) . flagToList)) + where + numJobsParser :: ReadE (Maybe Int) + numJobsParser = ReadE $ \s -> + case s of + "$ncpus" -> Right Nothing + _ -> case reads s of + [(n, "")] + | n < 1 -> Left "The number of jobs should be 1 or more." + | otherwise -> Right (Just n) + _ -> Left "The jobs value should be a number or '$ncpus'" + +-- ------------------------------------------------------------ +-- * Other Utils +-- ------------------------------------------------------------ + +readPToMaybe :: Parse.ReadP a a -> String -> Maybe a +readPToMaybe p str = listToMaybe [ r | (r,s) <- Parse.readP_to_S p str + , all isSpace s ] + +-- | Arguments to pass to a @configure@ script, e.g. generated by +-- @autoconf@. +configureArgs :: Bool -> ConfigFlags -> [String] +configureArgs bcHack flags + = hc_flag + ++ optFlag "with-hc-pkg" configHcPkg + ++ optFlag' "prefix" prefix + ++ optFlag' "bindir" bindir + ++ optFlag' "libdir" libdir + ++ optFlag' "libexecdir" libexecdir + ++ optFlag' "datadir" datadir + ++ optFlag' "sysconfdir" sysconfdir + ++ configConfigureArgs flags + where + hc_flag = case (configHcFlavor flags, configHcPath flags) of + (_, Flag hc_path) -> [hc_flag_name ++ hc_path] + (Flag hc, NoFlag) -> [hc_flag_name ++ display hc] + (NoFlag,NoFlag) -> [] + hc_flag_name + --TODO kill off thic bc hack when defaultUserHooks is removed. + | bcHack = "--with-hc=" + | otherwise = "--with-compiler=" + optFlag name config_field = case config_field flags of + Flag p -> ["--" ++ name ++ "=" ++ p] + NoFlag -> [] + optFlag' name config_field = optFlag name (fmap fromPathTemplate + . config_field + . configInstallDirs) + +configureCCompiler :: Verbosity -> ProgramConfiguration + -> IO (FilePath, [String]) +configureCCompiler verbosity lbi = configureProg verbosity lbi gccProgram + +configureLinker :: Verbosity -> ProgramConfiguration -> IO (FilePath, [String]) +configureLinker verbosity lbi = configureProg verbosity lbi ldProgram + +configureProg :: Verbosity -> ProgramConfiguration -> Program + -> IO (FilePath, [String]) +configureProg verbosity programConfig prog = do + (p, _) <- requireProgram verbosity prog programConfig + let pInv = programInvocation p [] + return (progInvokePath pInv, progInvokeArgs pInv) + +-- | Helper function to split a string into a list of arguments. +-- It's supposed to handle quoted things sensibly, eg: +-- +-- > splitArgs "--foo=\"C:\Program Files\Bar\" --baz" +-- > = ["--foo=C:\Program Files\Bar", "--baz"] +-- +splitArgs :: String -> [String] +splitArgs = space [] + where + space :: String -> String -> [String] + space w [] = word w [] + space w ( c :s) + | isSpace c = word w (space [] s) + space w ('"':s) = string w s + space w s = nonstring w s + + string :: String -> String -> [String] + string w [] = word w [] + string w ('"':s) = space w s + string w ( c :s) = string (c:w) s + + nonstring :: String -> String -> [String] + nonstring w [] = word w [] + nonstring w ('"':s) = string w s + nonstring w ( c :s) = space (c:w) s + + word [] s = s + word w s = reverse w : s + +-- The test cases kinda have to be rewritten from the ground up... :/ +--hunitTests :: [Test] +--hunitTests = +-- let m = [("ghc", GHC), ("nhc98", NHC), ("hugs", Hugs)] +-- (flags, commands', unkFlags, ers) +-- = getOpt Permute options ["configure", "foobar", "--prefix=/foo", "--ghc", "--nhc98", "--hugs", "--with-compiler=/comp", "--unknown1", "--unknown2", "--install-prefix=/foo", "--user", "--global"] +-- in [TestLabel "very basic option parsing" $ TestList [ +-- "getOpt flags" ~: "failed" ~: +-- [Prefix "/foo", GhcFlag, NhcFlag, HugsFlag, +-- WithCompiler "/comp", InstPrefix "/foo", UserFlag, GlobalFlag] +-- ~=? flags, +-- "getOpt commands" ~: "failed" ~: ["configure", "foobar"] ~=? commands', +-- "getOpt unknown opts" ~: "failed" ~: +-- ["--unknown1", "--unknown2"] ~=? unkFlags, +-- "getOpt errors" ~: "failed" ~: [] ~=? ers], +-- +-- TestLabel "test location of various compilers" $ TestList +-- ["configure parsing for prefix and compiler flag" ~: "failed" ~: +-- (Right (ConfigCmd (Just comp, Nothing, Just "/usr/local"), [])) +-- ~=? (parseArgs ["--prefix=/usr/local", "--"++name, "configure"]) +-- | (name, comp) <- m], +-- +-- TestLabel "find the package tool" $ TestList +-- ["configure parsing for prefix comp flag, withcompiler" ~: "failed" ~: +-- (Right (ConfigCmd (Just comp, Just "/foo/comp", Just "/usr/local"), [])) +-- ~=? (parseArgs ["--prefix=/usr/local", "--"++name, +-- "--with-compiler=/foo/comp", "configure"]) +-- | (name, comp) <- m], +-- +-- TestLabel "simpler commands" $ TestList +-- [flag ~: "failed" ~: (Right (flagCmd, [])) ~=? (parseArgs [flag]) +-- | (flag, flagCmd) <- [("build", BuildCmd), +-- ("install", InstallCmd Nothing False), +-- ("sdist", SDistCmd), +-- ("register", RegisterCmd False)] +-- ] +-- ] + +{- Testing ideas: + * IO to look for hugs and hugs-pkg (which hugs, etc) + * quickCheck to test permutations of arguments + * what other options can we over-ride with a command-line flag? +-} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/SrcDist.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/SrcDist.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/SrcDist.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/SrcDist.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,477 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.SrcDist +-- Copyright : Simon Marlow 2004 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This handles the @sdist@ command. The module exports an 'sdist' action but +-- also some of the phases that make it up so that other tools can use just the +-- bits they need. In particular the preparation of the tree of files to go +-- into the source tarball is separated from actually building the source +-- tarball. +-- +-- The 'createArchive' action uses the external @tar@ program and assumes that +-- it accepts the @-z@ flag. Neither of these assumptions are valid on Windows. +-- The 'sdist' action now also does some distribution QA checks. + +-- NOTE: FIX: we don't have a great way of testing this module, since +-- we can't easily look inside a tarball once its created. + +module Distribution.Simple.SrcDist ( + -- * The top level action + sdist, + + -- ** Parts of 'sdist' + printPackageProblems, + prepareTree, + createArchive, + + -- ** Snapshots + prepareSnapshotTree, + snapshotPackage, + snapshotVersion, + dateToSnapshotNumber, + + -- * Extracting the source files + listPackageSources + + ) where + +import Distribution.PackageDescription hiding (Flag) +import Distribution.PackageDescription.Check hiding (doesFileExist) +import Distribution.Package +import Distribution.ModuleName +import qualified Distribution.ModuleName as ModuleName +import Distribution.Version +import Distribution.Simple.Utils +import Distribution.Simple.Setup +import Distribution.Simple.PreProcess +import Distribution.Simple.LocalBuildInfo +import Distribution.Simple.BuildPaths +import Distribution.Simple.Program +import Distribution.Text +import Distribution.Verbosity + +import Control.Monad(when, unless, forM) +import Data.Char (toLower) +import Data.List (partition, isPrefixOf) +import qualified Data.Map as Map +import Data.Maybe (isNothing, catMaybes) +import Data.Time (UTCTime, getCurrentTime, toGregorian, utctDay) +import System.Directory ( doesFileExist ) +import System.IO (IOMode(WriteMode), hPutStrLn, withFile) +import System.FilePath + ( (), (<.>), dropExtension, isAbsolute ) + +-- |Create a source distribution. +sdist :: PackageDescription -- ^information from the tarball + -> Maybe LocalBuildInfo -- ^Information from configure + -> SDistFlags -- ^verbosity & snapshot + -> (FilePath -> FilePath) -- ^build prefix (temp dir) + -> [PPSuffixHandler] -- ^ extra preprocessors (includes suffixes) + -> IO () +sdist pkg mb_lbi flags mkTmpDir pps = + + -- When given --list-sources, just output the list of sources to a file. + case (sDistListSources flags) of + Flag path -> withFile path WriteMode $ \outHandle -> do + (ordinary, maybeExecutable) <- listPackageSources verbosity pkg pps + mapM_ (hPutStrLn outHandle) ordinary + mapM_ (hPutStrLn outHandle) maybeExecutable + notice verbosity $ "List of package sources written to file '" + ++ path ++ "'" + NoFlag -> do + -- do some QA + printPackageProblems verbosity pkg + + when (isNothing mb_lbi) $ + warn verbosity "Cannot run preprocessors. Run 'configure' command first." + + date <- getCurrentTime + let pkg' | snapshot = snapshotPackage date pkg + | otherwise = pkg + + case flagToMaybe (sDistDirectory flags) of + Just targetDir -> do + generateSourceDir targetDir pkg' + info verbosity $ "Source directory created: " ++ targetDir + + Nothing -> do + createDirectoryIfMissingVerbose verbosity True tmpTargetDir + withTempDirectory verbosity tmpTargetDir "sdist." $ \tmpDir -> do + let targetDir = tmpDir tarBallName pkg' + generateSourceDir targetDir pkg' + targzFile <- createArchive verbosity pkg' mb_lbi tmpDir targetPref + notice verbosity $ "Source tarball created: " ++ targzFile + + where + generateSourceDir targetDir pkg' = do + + setupMessage verbosity "Building source dist for" (packageId pkg') + prepareTree verbosity pkg' mb_lbi targetDir pps + when snapshot $ + overwriteSnapshotPackageDesc verbosity pkg' targetDir + + verbosity = fromFlag (sDistVerbosity flags) + snapshot = fromFlag (sDistSnapshot flags) + + distPref = fromFlag $ sDistDistPref flags + targetPref = distPref + tmpTargetDir = mkTmpDir distPref + +-- | List all source files of a package. Returns a tuple of lists: first +-- component is a list of ordinary files, second one is a list of those files +-- that may be executable. +listPackageSources :: Verbosity -- ^ verbosity + -> PackageDescription -- ^ info from the cabal file + -> [PPSuffixHandler] -- ^ extra preprocessors (include + -- suffixes) + -> IO ([FilePath], [FilePath]) +listPackageSources verbosity pkg_descr0 pps = do + -- Call helpers that actually do all work. + ordinary <- listPackageSourcesOrdinary verbosity pkg_descr pps + maybeExecutable <- listPackageSourcesMaybeExecutable pkg_descr + return (ordinary, maybeExecutable) + where + pkg_descr = filterAutogenModule pkg_descr0 + +-- | List those source files that may be executable (e.g. the configure script). +listPackageSourcesMaybeExecutable :: PackageDescription -> IO [FilePath] +listPackageSourcesMaybeExecutable pkg_descr = + -- Extra source files. + fmap concat . forM (extraSrcFiles pkg_descr) $ \fpath -> matchFileGlob fpath + +-- | List those source files that should be copied with ordinary permissions. +listPackageSourcesOrdinary :: Verbosity + -> PackageDescription + -> [PPSuffixHandler] + -> IO [FilePath] +listPackageSourcesOrdinary verbosity pkg_descr pps = + fmap concat . sequence $ + [ + -- Library sources. + withAllLib $ \Library { exposedModules = modules, libBuildInfo = libBi } -> + allSourcesBuildInfo libBi pps modules + + -- Executables sources. + , fmap concat + . withAllExe $ \Executable { modulePath = mainPath, buildInfo = exeBi } -> do + biSrcs <- allSourcesBuildInfo exeBi pps [] + mainSrc <- findMainExeFile exeBi pps mainPath + return (mainSrc:biSrcs) + + -- Test suites sources. + , fmap concat + . withAllTest $ \t -> do + let bi = testBuildInfo t + case testInterface t of + TestSuiteExeV10 _ mainPath -> do + biSrcs <- allSourcesBuildInfo bi pps [] + srcMainFile <- do + ppFile <- findFileWithExtension (ppSuffixes pps) + (hsSourceDirs bi) (dropExtension mainPath) + case ppFile of + Nothing -> findFile (hsSourceDirs bi) mainPath + Just pp -> return pp + return (srcMainFile:biSrcs) + TestSuiteLibV09 _ m -> + allSourcesBuildInfo bi pps [m] + TestSuiteUnsupported tp -> die $ "Unsupported test suite type: " + ++ show tp + + -- Benchmarks sources. + , fmap concat + . withAllBenchmark $ \bm -> do + let bi = benchmarkBuildInfo bm + case benchmarkInterface bm of + BenchmarkExeV10 _ mainPath -> do + biSrcs <- allSourcesBuildInfo bi pps [] + srcMainFile <- do + ppFile <- findFileWithExtension (ppSuffixes pps) + (hsSourceDirs bi) (dropExtension mainPath) + case ppFile of + Nothing -> findFile (hsSourceDirs bi) mainPath + Just pp -> return pp + return (srcMainFile:biSrcs) + BenchmarkUnsupported tp -> die $ "Unsupported benchmark type: " + ++ show tp + + -- Data files. + , fmap concat + . forM (dataFiles pkg_descr) $ \filename -> + matchFileGlob (dataDir pkg_descr filename) + + -- Extra doc files. + , fmap concat + . forM (extraDocFiles pkg_descr) $ \ filename -> + matchFileGlob filename + + -- License file(s). + , return (licenseFiles pkg_descr) + + -- Install-include files. + , withAllLib $ \ l -> do + let lbi = libBuildInfo l + relincdirs = "." : filter (not.isAbsolute) (includeDirs lbi) + mapM (fmap snd . findIncludeFile relincdirs) (installIncludes lbi) + + -- Setup script, if it exists. + , fmap (maybe [] (\f -> [f])) $ findSetupFile "" + + -- The .cabal file itself. + , fmap (\d -> [d]) (defaultPackageDesc verbosity) + + ] + where + -- We have to deal with all libs and executables, so we have local + -- versions of these functions that ignore the 'buildable' attribute: + withAllLib action = maybe (return []) action (library pkg_descr) + withAllExe action = mapM action (executables pkg_descr) + withAllTest action = mapM action (testSuites pkg_descr) + withAllBenchmark action = mapM action (benchmarks pkg_descr) + + +-- |Prepare a directory tree of source files. +prepareTree :: Verbosity -- ^verbosity + -> PackageDescription -- ^info from the cabal file + -> Maybe LocalBuildInfo + -> FilePath -- ^source tree to populate + -> [PPSuffixHandler] -- ^extra preprocessors (includes suffixes) + -> IO () +prepareTree verbosity pkg_descr0 mb_lbi targetDir pps = do + -- If the package was configured then we can run platform-independent + -- pre-processors and include those generated files. + case mb_lbi of + Just lbi | not (null pps) -> do + let lbi' = lbi{ buildDir = targetDir buildDir lbi } + withAllComponentsInBuildOrder pkg_descr lbi' $ \c _ -> + preprocessComponent pkg_descr c lbi' True verbosity pps + _ -> return () + + (ordinary, mExecutable) <- listPackageSources verbosity pkg_descr0 pps + installOrdinaryFiles verbosity targetDir (zip (repeat []) ordinary) + installMaybeExecutableFiles verbosity targetDir (zip (repeat []) mExecutable) + maybeCreateDefaultSetupScript targetDir + + where + pkg_descr = filterAutogenModule pkg_descr0 + +-- | Find the setup script file, if it exists. +findSetupFile :: FilePath -> IO (Maybe FilePath) +findSetupFile targetDir = do + hsExists <- doesFileExist setupHs + lhsExists <- doesFileExist setupLhs + if hsExists + then return (Just setupHs) + else if lhsExists + then return (Just setupLhs) + else return Nothing + where + setupHs = targetDir "Setup.hs" + setupLhs = targetDir "Setup.lhs" + +-- | Create a default setup script in the target directory, if it doesn't exist. +maybeCreateDefaultSetupScript :: FilePath -> IO () +maybeCreateDefaultSetupScript targetDir = do + mSetupFile <- findSetupFile targetDir + case mSetupFile of + Just _setupFile -> return () + Nothing -> do + writeUTF8File (targetDir "Setup.hs") $ unlines [ + "import Distribution.Simple", + "main = defaultMain"] + +-- | Find the main executable file. +findMainExeFile :: BuildInfo -> [PPSuffixHandler] -> FilePath -> IO FilePath +findMainExeFile exeBi pps mainPath = do + ppFile <- findFileWithExtension (ppSuffixes pps) (hsSourceDirs exeBi) + (dropExtension mainPath) + case ppFile of + Nothing -> findFile (hsSourceDirs exeBi) mainPath + Just pp -> return pp + +-- | Given a list of include paths, try to find the include file named +-- @f@. Return the name of the file and the full path, or exit with error if +-- there's no such file. +findIncludeFile :: [FilePath] -> String -> IO (String, FilePath) +findIncludeFile [] f = die ("can't find include file " ++ f) +findIncludeFile (d:ds) f = do + let path = (d f) + b <- doesFileExist path + if b then return (f,path) else findIncludeFile ds f + +-- | Remove the auto-generated module ('Paths_*') from 'exposed-modules' and +-- 'other-modules'. +filterAutogenModule :: PackageDescription -> PackageDescription +filterAutogenModule pkg_descr0 = mapLib filterAutogenModuleLib $ + mapAllBuildInfo filterAutogenModuleBI pkg_descr0 + where + mapLib f pkg = pkg { library = fmap f (library pkg) } + filterAutogenModuleLib lib = lib { + exposedModules = filter (/=autogenModule) (exposedModules lib) + } + filterAutogenModuleBI bi = bi { + otherModules = filter (/=autogenModule) (otherModules bi) + } + autogenModule = autogenModuleName pkg_descr0 + +-- | Prepare a directory tree of source files for a snapshot version. +-- It is expected that the appropriate snapshot version has already been set +-- in the package description, eg using 'snapshotPackage' or 'snapshotVersion'. +-- +prepareSnapshotTree :: Verbosity -- ^verbosity + -> PackageDescription -- ^info from the cabal file + -> Maybe LocalBuildInfo + -> FilePath -- ^source tree to populate + -> [PPSuffixHandler] -- ^extra preprocessors (includes + -- suffixes) + -> IO () +prepareSnapshotTree verbosity pkg mb_lbi targetDir pps = do + prepareTree verbosity pkg mb_lbi targetDir pps + overwriteSnapshotPackageDesc verbosity pkg targetDir + +overwriteSnapshotPackageDesc :: Verbosity -- ^verbosity + -> PackageDescription -- ^info from the cabal file + -> FilePath -- ^source tree + -> IO () +overwriteSnapshotPackageDesc verbosity pkg targetDir = do + -- We could just writePackageDescription targetDescFile pkg_descr, + -- but that would lose comments and formatting. + descFile <- defaultPackageDesc verbosity + withUTF8FileContents descFile $ + writeUTF8File (targetDir descFile) + . unlines . map (replaceVersion (packageVersion pkg)) . lines + + where + replaceVersion :: Version -> String -> String + replaceVersion version line + | "version:" `isPrefixOf` map toLower line + = "version: " ++ display version + | otherwise = line + +-- | Modifies a 'PackageDescription' by appending a snapshot number +-- corresponding to the given date. +-- +snapshotPackage :: UTCTime -> PackageDescription -> PackageDescription +snapshotPackage date pkg = + pkg { + package = pkgid { pkgVersion = snapshotVersion date (pkgVersion pkgid) } + } + where pkgid = packageId pkg + +-- | Modifies a 'Version' by appending a snapshot number corresponding +-- to the given date. +-- +snapshotVersion :: UTCTime -> Version -> Version +snapshotVersion date version = version { + versionBranch = versionBranch version + ++ [dateToSnapshotNumber date] + } + +-- | Given a date produce a corresponding integer representation. +-- For example given a date @18/03/2008@ produce the number @20080318@. +-- +dateToSnapshotNumber :: UTCTime -> Int +dateToSnapshotNumber date = case toGregorian (utctDay date) of + (year, month, day) -> + fromIntegral year * 10000 + + month * 100 + + day + +-- | Callback type for use by sdistWith. +type CreateArchiveFun = Verbosity -- ^verbosity + -> PackageDescription -- ^info from cabal file + -> Maybe LocalBuildInfo -- ^info from configure + -> FilePath -- ^source tree to archive + -> FilePath -- ^name of archive to create + -> IO FilePath + +-- | Create an archive from a tree of source files, and clean up the tree. +createArchive :: CreateArchiveFun +createArchive verbosity pkg_descr mb_lbi tmpDir targetPref = do + let tarBallFilePath = targetPref tarBallName pkg_descr <.> "tar.gz" + + (tarProg, _) <- requireProgram verbosity tarProgram + (maybe defaultProgramConfiguration withPrograms mb_lbi) + let formatOptSupported = maybe False (== "YES") $ + Map.lookup "Supports --format" + (programProperties tarProg) + runProgram verbosity tarProg $ + -- Hmm: I could well be skating on thinner ice here by using the -C option + -- (=> seems to be supported at least by GNU and *BSD tar) [The + -- prev. solution used pipes and sub-command sequences to set up the paths + -- correctly, which is problematic in a Windows setting.] + ["-czf", tarBallFilePath, "-C", tmpDir] + ++ (if formatOptSupported then ["--format", "ustar"] else []) + ++ [tarBallName pkg_descr] + return tarBallFilePath + +-- | Given a buildinfo, return the names of all source files. +allSourcesBuildInfo :: BuildInfo + -> [PPSuffixHandler] -- ^ Extra preprocessors + -> [ModuleName] -- ^ Exposed modules + -> IO [FilePath] +allSourcesBuildInfo bi pps modules = do + let searchDirs = hsSourceDirs bi + sources <- fmap concat $ sequence $ + [ let file = ModuleName.toFilePath module_ + in findAllFilesWithExtension suffixes searchDirs file + >>= nonEmpty (notFound module_) return + | module_ <- modules ++ otherModules bi ] + bootFiles <- sequence + [ let file = ModuleName.toFilePath module_ + fileExts = ["hs-boot", "lhs-boot"] + in findFileWithExtension fileExts (hsSourceDirs bi) file + | module_ <- modules ++ otherModules bi ] + + return $ sources ++ catMaybes bootFiles ++ cSources bi ++ jsSources bi + + where + nonEmpty x _ [] = x + nonEmpty _ f xs = f xs + suffixes = ppSuffixes pps ++ ["hs", "lhs"] + notFound m = die $ "Error: Could not find module: " ++ display m + ++ " with any suffix: " ++ show suffixes + + +printPackageProblems :: Verbosity -> PackageDescription -> IO () +printPackageProblems verbosity pkg_descr = do + ioChecks <- checkPackageFiles pkg_descr "." + let pureChecks = checkConfiguredPackage pkg_descr + isDistError (PackageDistSuspicious _) = False + isDistError (PackageDistSuspiciousWarn _) = False + isDistError _ = True + (errors, warnings) = partition isDistError (pureChecks ++ ioChecks) + unless (null errors) $ + notice verbosity $ "Distribution quality errors:\n" + ++ unlines (map explanation errors) + unless (null warnings) $ + notice verbosity $ "Distribution quality warnings:\n" + ++ unlines (map explanation warnings) + unless (null errors) $ + notice verbosity + "Note: the public hackage server would reject this package." + +------------------------------------------------------------ + +-- | The name of the tarball without extension +-- +tarBallName :: PackageDescription -> String +tarBallName = display . packageId + +mapAllBuildInfo :: (BuildInfo -> BuildInfo) + -> (PackageDescription -> PackageDescription) +mapAllBuildInfo f pkg = pkg { + library = fmap mapLibBi (library pkg), + executables = fmap mapExeBi (executables pkg), + testSuites = fmap mapTestBi (testSuites pkg), + benchmarks = fmap mapBenchBi (benchmarks pkg) + } + where + mapLibBi lib = lib { libBuildInfo = f (libBuildInfo lib) } + mapExeBi exe = exe { buildInfo = f (buildInfo exe) } + mapTestBi t = t { testBuildInfo = f (testBuildInfo t) } + mapBenchBi bm = bm { benchmarkBuildInfo = f (benchmarkBuildInfo bm) } diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Test/ExeV10.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Test/ExeV10.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Test/ExeV10.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Test/ExeV10.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,167 @@ +module Distribution.Simple.Test.ExeV10 + ( runTest + ) where + +import Distribution.Compat.CreatePipe +import Distribution.Compat.Environment +import qualified Distribution.PackageDescription as PD +import Distribution.Simple.Build.PathsModule +import Distribution.Simple.BuildPaths +import Distribution.Simple.Compiler +import Distribution.Simple.Hpc +import Distribution.Simple.InstallDirs +import qualified Distribution.Simple.LocalBuildInfo as LBI +import Distribution.Simple.Setup +import Distribution.Simple.Test.Log +import Distribution.Simple.Utils +import Distribution.System +import Distribution.TestSuite +import Distribution.Text +import Distribution.Verbosity + +import Control.Concurrent (forkIO) +import Control.Monad ( unless, void, when ) +import System.Directory + ( createDirectoryIfMissing, doesDirectoryExist, doesFileExist + , getCurrentDirectory, removeDirectoryRecursive ) +import System.Exit ( ExitCode(..) ) +import System.FilePath ( (), (<.>) ) +import System.IO ( hGetContents, hPutStr, stdout, stderr ) + +runTest :: PD.PackageDescription + -> LBI.LocalBuildInfo + -> TestFlags + -> PD.TestSuite + -> IO TestSuiteLog +runTest pkg_descr lbi flags suite = do + let isCoverageEnabled = fromFlag $ configCoverage $ LBI.configFlags lbi + way = guessWay lbi + tixDir_ = tixDir distPref way $ PD.testName suite + + pwd <- getCurrentDirectory + existingEnv <- getEnvironment + + let cmd = LBI.buildDir lbi PD.testName suite + PD.testName suite <.> exeExtension + -- Check that the test executable exists. + exists <- doesFileExist cmd + unless exists $ die $ "Error: Could not find test program \"" ++ cmd + ++ "\". Did you build the package first?" + + -- Remove old .tix files if appropriate. + unless (fromFlag $ testKeepTix flags) $ do + exists' <- doesDirectoryExist tixDir_ + when exists' $ removeDirectoryRecursive tixDir_ + + -- Create directory for HPC files. + createDirectoryIfMissing True tixDir_ + + -- Write summary notices indicating start of test suite + notice verbosity $ summarizeSuiteStart $ PD.testName suite + + (wOut, wErr, logText) <- case details of + Direct -> return (stdout, stderr, "") + _ -> do + (rOut, wOut) <- createPipe + + -- Read test executable's output lazily (returns immediately) + logText <- hGetContents rOut + -- Force the IO manager to drain the test output pipe + void $ forkIO $ length logText `seq` return () + + -- '--show-details=streaming': print the log output in another thread + when (details == Streaming) $ void $ forkIO $ hPutStr stdout logText + + return (wOut, wOut, logText) + + -- Run the test executable + let opts = map (testOption pkg_descr lbi suite) + (testOptions flags) + dataDirPath = pwd PD.dataDir pkg_descr + tixFile = pwd tixFilePath distPref way (PD.testName suite) + pkgPathEnv = (pkgPathEnvVar pkg_descr "datadir", dataDirPath) + : existingEnv + shellEnv = [("HPCTIXFILE", tixFile) | isCoverageEnabled] ++ pkgPathEnv + + -- Add (DY)LD_LIBRARY_PATH if needed + shellEnv' <- if LBI.withDynExe lbi + then do let (Platform _ os) = LBI.hostPlatform lbi + clbi = LBI.getComponentLocalBuildInfo lbi + (LBI.CTestName (PD.testName suite)) + paths <- LBI.depLibraryPaths True False lbi clbi + return (addLibraryPath os paths shellEnv) + else return shellEnv + + exit <- rawSystemIOWithEnv verbosity cmd opts Nothing (Just shellEnv') + -- these handles are automatically closed + Nothing (Just wOut) (Just wErr) + + -- Generate TestSuiteLog from executable exit code and a machine- + -- readable test log. + let suiteLog = buildLog exit + + -- Write summary notice to log file indicating start of test suite + appendFile (logFile suiteLog) $ summarizeSuiteStart $ PD.testName suite + + -- Append contents of temporary log file to the final human- + -- readable log file + appendFile (logFile suiteLog) logText + + -- Write end-of-suite summary notice to log file + appendFile (logFile suiteLog) $ summarizeSuiteFinish suiteLog + + -- Show the contents of the human-readable log file on the terminal + -- if there is a failure and/or detailed output is requested + let whenPrinting = when $ + ( details == Always || + details == Failures && not (suitePassed $ testLogs suiteLog)) + -- verbosity overrides show-details + && verbosity >= normal + whenPrinting $ putStr $ unlines $ lines logText + + -- Write summary notice to terminal indicating end of test suite + notice verbosity $ summarizeSuiteFinish suiteLog + + when isCoverageEnabled $ + markupTest verbosity lbi distPref (display $ PD.package pkg_descr) suite + + return suiteLog + where + distPref = fromFlag $ testDistPref flags + verbosity = fromFlag $ testVerbosity flags + details = fromFlag $ testShowDetails flags + testLogDir = distPref "test" + + buildLog exit = + let r = case exit of + ExitSuccess -> Pass + ExitFailure c -> Fail $ "exit code: " ++ show c + n = PD.testName suite + l = TestLog + { testName = n + , testOptionsReturned = [] + , testResult = r + } + in TestSuiteLog + { testSuiteName = n + , testLogs = l + , logFile = + testLogDir + testSuiteLogPath (fromFlag $ testHumanLog flags) + pkg_descr lbi n l + } + +-- TODO: This is abusing the notion of a 'PathTemplate'. The result isn't +-- necessarily a path. +testOption :: PD.PackageDescription + -> LBI.LocalBuildInfo + -> PD.TestSuite + -> PathTemplate + -> String +testOption pkg_descr lbi suite template = + fromPathTemplate $ substPathTemplate env template + where + env = initialPathTemplateEnv + (PD.package pkg_descr) (LBI.localUnitId lbi) + (compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) ++ + [(TestSuiteNameVar, toPathTemplate $ PD.testName suite)] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Test/LibV09.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Test/LibV09.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Test/LibV09.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Test/LibV09.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,260 @@ +module Distribution.Simple.Test.LibV09 + ( runTest + -- Test stub + , simpleTestStub + , stubFilePath, stubMain, stubName, stubWriteLog + , writeSimpleTestStub + ) where + +import Distribution.Compat.CreatePipe +import Distribution.Compat.Environment +import Distribution.Compat.Internal.TempFile +import Distribution.ModuleName +import qualified Distribution.PackageDescription as PD +import Distribution.Simple.Build.PathsModule +import Distribution.Simple.BuildPaths +import Distribution.Simple.Compiler +import Distribution.Simple.Hpc +import Distribution.Simple.InstallDirs +import qualified Distribution.Simple.LocalBuildInfo as LBI +import Distribution.Simple.Setup +import Distribution.Simple.Test.Log +import Distribution.Simple.Utils +import Distribution.System +import Distribution.TestSuite +import Distribution.Text +import Distribution.Verbosity + +import Control.Exception ( bracket ) +import Control.Monad ( when, unless ) +import Data.Maybe ( mapMaybe ) +import System.Directory + ( createDirectoryIfMissing, doesDirectoryExist, doesFileExist + , getCurrentDirectory, removeDirectoryRecursive, removeFile + , setCurrentDirectory ) +import System.Exit ( ExitCode(..), exitWith ) +import System.FilePath ( (), (<.>) ) +import System.IO ( hClose, hGetContents, hPutStr ) +import System.Process (StdStream(..), waitForProcess) + +runTest :: PD.PackageDescription + -> LBI.LocalBuildInfo + -> TestFlags + -> PD.TestSuite + -> IO TestSuiteLog +runTest pkg_descr lbi flags suite = do + let isCoverageEnabled = fromFlag $ configCoverage $ LBI.configFlags lbi + way = guessWay lbi + + pwd <- getCurrentDirectory + existingEnv <- getEnvironment + + let cmd = LBI.buildDir lbi stubName suite + stubName suite <.> exeExtension + -- Check that the test executable exists. + exists <- doesFileExist cmd + unless exists $ die $ "Error: Could not find test program \"" ++ cmd + ++ "\". Did you build the package first?" + + -- Remove old .tix files if appropriate. + unless (fromFlag $ testKeepTix flags) $ do + let tDir = tixDir distPref way $ PD.testName suite + exists' <- doesDirectoryExist tDir + when exists' $ removeDirectoryRecursive tDir + + -- Create directory for HPC files. + createDirectoryIfMissing True $ tixDir distPref way $ PD.testName suite + + -- Write summary notices indicating start of test suite + notice verbosity $ summarizeSuiteStart $ PD.testName suite + + suiteLog <- bracket openCabalTemp deleteIfExists $ \tempLog -> do + + (rOut, wOut) <- createPipe + + -- Run test executable + (Just wIn, _, _, process) <- do + let opts = map (testOption pkg_descr lbi suite) $ testOptions flags + dataDirPath = pwd PD.dataDir pkg_descr + tixFile = pwd tixFilePath distPref way (PD.testName suite) + pkgPathEnv = (pkgPathEnvVar pkg_descr "datadir", dataDirPath) + : existingEnv + shellEnv = [("HPCTIXFILE", tixFile) | isCoverageEnabled] + ++ pkgPathEnv + -- Add (DY)LD_LIBRARY_PATH if needed + shellEnv' <- if LBI.withDynExe lbi + then do + let (Platform _ os) = LBI.hostPlatform lbi + clbi = LBI.getComponentLocalBuildInfo + lbi + (LBI.CTestName + (PD.testName suite)) + paths <- LBI.depLibraryPaths + True False lbi clbi + return (addLibraryPath os paths shellEnv) + else return shellEnv + createProcessWithEnv verbosity cmd opts Nothing (Just shellEnv') + -- these handles are closed automatically + CreatePipe (UseHandle wOut) (UseHandle wOut) + + hPutStr wIn $ show (tempLog, PD.testName suite) + hClose wIn + + -- Append contents of temporary log file to the final human- + -- readable log file + logText <- hGetContents rOut + -- Force the IO manager to drain the test output pipe + length logText `seq` return () + + exitcode <- waitForProcess process + unless (exitcode == ExitSuccess) $ do + debug verbosity $ cmd ++ " returned " ++ show exitcode + + -- Generate final log file name + let finalLogName l = testLogDir + testSuiteLogPath + (fromFlag $ testHumanLog flags) pkg_descr lbi + (testSuiteName l) (testLogs l) + -- Generate TestSuiteLog from executable exit code and a machine- + -- readable test log + suiteLog <- fmap ((\l -> l { logFile = finalLogName l }) . read) + $ readFile tempLog + + -- Write summary notice to log file indicating start of test suite + appendFile (logFile suiteLog) $ summarizeSuiteStart $ PD.testName suite + + appendFile (logFile suiteLog) logText + + -- Write end-of-suite summary notice to log file + appendFile (logFile suiteLog) $ summarizeSuiteFinish suiteLog + + -- Show the contents of the human-readable log file on the terminal + -- if there is a failure and/or detailed output is requested + let details = fromFlag $ testShowDetails flags + whenPrinting = when $ (details > Never) + && (not (suitePassed $ testLogs suiteLog) || details == Always) + && verbosity >= normal + whenPrinting $ putStr $ unlines $ lines logText + + return suiteLog + + -- Write summary notice to terminal indicating end of test suite + notice verbosity $ summarizeSuiteFinish suiteLog + + when isCoverageEnabled $ + markupTest verbosity lbi distPref (display $ PD.package pkg_descr) suite + + return suiteLog + where + deleteIfExists file = do + exists <- doesFileExist file + when exists $ removeFile file + + testLogDir = distPref "test" + openCabalTemp = do + (f, h) <- openTempFile testLogDir $ "cabal-test-" <.> "log" + hClose h >> return f + + distPref = fromFlag $ testDistPref flags + verbosity = fromFlag $ testVerbosity flags + +-- TODO: This is abusing the notion of a 'PathTemplate'. The result isn't +-- necessarily a path. +testOption :: PD.PackageDescription + -> LBI.LocalBuildInfo + -> PD.TestSuite + -> PathTemplate + -> String +testOption pkg_descr lbi suite template = + fromPathTemplate $ substPathTemplate env template + where + env = initialPathTemplateEnv + (PD.package pkg_descr) (LBI.localUnitId lbi) + (compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) ++ + [(TestSuiteNameVar, toPathTemplate $ PD.testName suite)] + +-- Test stub ---------- + +-- | The name of the stub executable associated with a library 'TestSuite'. +stubName :: PD.TestSuite -> FilePath +stubName t = PD.testName t ++ "Stub" + +-- | The filename of the source file for the stub executable associated with a +-- library 'TestSuite'. +stubFilePath :: PD.TestSuite -> FilePath +stubFilePath t = stubName t <.> "hs" + +-- | Write the source file for a library 'TestSuite' stub executable. +writeSimpleTestStub :: PD.TestSuite -- ^ library 'TestSuite' for which a stub + -- is being created + -> FilePath -- ^ path to directory where stub source + -- should be located + -> IO () +writeSimpleTestStub t dir = do + createDirectoryIfMissing True dir + let filename = dir stubFilePath t + PD.TestSuiteLibV09 _ m = PD.testInterface t + writeFile filename $ simpleTestStub m + +-- | Source code for library test suite stub executable +simpleTestStub :: ModuleName -> String +simpleTestStub m = unlines + [ "module Main ( main ) where" + , "import Distribution.Simple.Test.LibV09 ( stubMain )" + , "import " ++ show (disp m) ++ " ( tests )" + , "main :: IO ()" + , "main = stubMain tests" + ] + +-- | Main function for test stubs. Once, it was written directly into the stub, +-- but minimizing the amount of code actually in the stub maximizes the number +-- of detectable errors when Cabal is compiled. +stubMain :: IO [Test] -> IO () +stubMain tests = do + (f, n) <- fmap read getContents + dir <- getCurrentDirectory + results <- tests >>= stubRunTests + setCurrentDirectory dir + stubWriteLog f n results + +-- | The test runner used in library "TestSuite" stub executables. Runs a list +-- of 'Test's. An executable calling this function is meant to be invoked as +-- the child of a Cabal process during @.\/setup test@. A 'TestSuiteLog', +-- provided by Cabal, is read from the standard input; it supplies the name of +-- the test suite and the location of the machine-readable test suite log file. +-- Human-readable log information is written to the standard output for capture +-- by the calling Cabal process. +stubRunTests :: [Test] -> IO TestLogs +stubRunTests tests = do + logs <- mapM stubRunTests' tests + return $ GroupLogs "Default" logs + where + stubRunTests' (Test t) = do + l <- run t >>= finish + summarizeTest normal Always l + return l + where + finish (Finished result) = + return TestLog + { testName = name t + , testOptionsReturned = defaultOptions t + , testResult = result + } + finish (Progress _ next) = next >>= finish + stubRunTests' g@(Group {}) = do + logs <- mapM stubRunTests' $ groupTests g + return $ GroupLogs (groupName g) logs + stubRunTests' (ExtraOptions _ t) = stubRunTests' t + maybeDefaultOption opt = + maybe Nothing (\d -> Just (optionName opt, d)) $ optionDefault opt + defaultOptions testInst = mapMaybe maybeDefaultOption $ options testInst + +-- | From a test stub, write the 'TestSuiteLog' to temporary file for the calling +-- Cabal process to read. +stubWriteLog :: FilePath -> String -> TestLogs -> IO () +stubWriteLog f n logs = do + let testLog = TestSuiteLog { testSuiteName = n, testLogs = logs, logFile = f } + writeFile (logFile testLog) $ show testLog + when (suiteError logs) $ exitWith $ ExitFailure 2 + when (suiteFailed logs) $ exitWith $ ExitFailure 1 + exitWith ExitSuccess diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Test/Log.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Test/Log.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Test/Log.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Test/Log.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,159 @@ +module Distribution.Simple.Test.Log + ( PackageLog(..) + , TestLogs(..) + , TestSuiteLog(..) + , countTestResults + , localPackageLog + , summarizePackage + , summarizeSuiteFinish, summarizeSuiteStart + , summarizeTest + , suiteError, suiteFailed, suitePassed + , testSuiteLogPath + ) where + +import Distribution.Package +import qualified Distribution.PackageDescription as PD +import Distribution.Simple.Compiler +import Distribution.Simple.InstallDirs +import qualified Distribution.Simple.LocalBuildInfo as LBI +import Distribution.Simple.Setup +import Distribution.Simple.Utils +import Distribution.System +import Distribution.TestSuite +import Distribution.Verbosity + +import Control.Monad ( when ) +import Data.Char ( toUpper ) + +-- | Logs all test results for a package, broken down first by test suite and +-- then by test case. +data PackageLog = PackageLog + { package :: PackageId + , compiler :: CompilerId + , platform :: Platform + , testSuites :: [TestSuiteLog] + } + deriving (Read, Show, Eq) + +-- | A 'PackageLog' with package and platform information specified. +localPackageLog :: PD.PackageDescription -> LBI.LocalBuildInfo -> PackageLog +localPackageLog pkg_descr lbi = PackageLog + { package = PD.package pkg_descr + , compiler = compilerId $ LBI.compiler lbi + , platform = LBI.hostPlatform lbi + , testSuites = [] + } + +-- | Logs test suite results, itemized by test case. +data TestSuiteLog = TestSuiteLog + { testSuiteName :: String + , testLogs :: TestLogs + , logFile :: FilePath -- path to human-readable log file + } + deriving (Read, Show, Eq) + +data TestLogs + = TestLog + { testName :: String + , testOptionsReturned :: Options + , testResult :: Result + } + | GroupLogs String [TestLogs] + deriving (Read, Show, Eq) + +-- | Count the number of pass, fail, and error test results in a 'TestLogs' +-- tree. +countTestResults :: TestLogs + -> (Int, Int, Int) -- ^ Passes, fails, and errors, + -- respectively. +countTestResults = go (0, 0, 0) + where + go (p, f, e) (TestLog { testResult = r }) = + case r of + Pass -> (p + 1, f, e) + Fail _ -> (p, f + 1, e) + Error _ -> (p, f, e + 1) + go (p, f, e) (GroupLogs _ ts) = foldl go (p, f, e) ts + +-- | From a 'TestSuiteLog', determine if the test suite passed. +suitePassed :: TestLogs -> Bool +suitePassed l = + case countTestResults l of + (_, 0, 0) -> True + _ -> False + +-- | From a 'TestSuiteLog', determine if the test suite failed. +suiteFailed :: TestLogs -> Bool +suiteFailed l = + case countTestResults l of + (_, 0, _) -> False + _ -> True + +-- | From a 'TestSuiteLog', determine if the test suite encountered errors. +suiteError :: TestLogs -> Bool +suiteError l = + case countTestResults l of + (_, _, 0) -> False + _ -> True + +resultString :: TestLogs -> String +resultString l | suiteError l = "error" + | suiteFailed l = "fail" + | otherwise = "pass" + +testSuiteLogPath :: PathTemplate + -> PD.PackageDescription + -> LBI.LocalBuildInfo + -> String -- ^ test suite name + -> TestLogs -- ^ test suite results + -> FilePath +testSuiteLogPath template pkg_descr lbi test_name result = + fromPathTemplate $ substPathTemplate env template + where + env = initialPathTemplateEnv + (PD.package pkg_descr) (LBI.localUnitId lbi) + (compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) + ++ [ (TestSuiteNameVar, toPathTemplate test_name) + , (TestSuiteResultVar, toPathTemplate $ resultString result) + ] + +-- | Print a summary to the console after all test suites have been run +-- indicating the number of successful test suites and cases. Returns 'True' if +-- all test suites passed and 'False' otherwise. +summarizePackage :: Verbosity -> PackageLog -> IO Bool +summarizePackage verbosity packageLog = do + let counts = map (countTestResults . testLogs) $ testSuites packageLog + (passed, failed, errors) = foldl1 addTriple counts + totalCases = passed + failed + errors + passedSuites = length + $ filter (suitePassed . testLogs) + $ testSuites packageLog + totalSuites = length $ testSuites packageLog + notice verbosity $ show passedSuites ++ " of " ++ show totalSuites + ++ " test suites (" ++ show passed ++ " of " + ++ show totalCases ++ " test cases) passed." + return $! passedSuites == totalSuites + where + addTriple (p1, f1, e1) (p2, f2, e2) = (p1 + p2, f1 + f2, e1 + e2) + +-- | Print a summary of a single test case's result to the console, supressing +-- output for certain verbosity or test filter levels. +summarizeTest :: Verbosity -> TestShowDetails -> TestLogs -> IO () +summarizeTest _ _ (GroupLogs {}) = return () +summarizeTest verbosity details t = + when shouldPrint $ notice verbosity $ "Test case " ++ testName t + ++ ": " ++ show (testResult t) + where shouldPrint = (details > Never) && (notPassed || details == Always) + notPassed = testResult t /= Pass + +-- | Print a summary of the test suite's results on the console, suppressing +-- output for certain verbosity or test filter levels. +summarizeSuiteFinish :: TestSuiteLog -> String +summarizeSuiteFinish testLog = unlines + [ "Test suite " ++ testSuiteName testLog ++ ": " ++ resStr + , "Test suite logged to: " ++ logFile testLog + ] + where resStr = map toUpper (resultString $ testLogs testLog) + +summarizeSuiteStart :: String -> String +summarizeSuiteStart n = "Test suite " ++ n ++ ": RUNNING...\n" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Test.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Test.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Test.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Test.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,130 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Test +-- Copyright : Thomas Tuegel 2010 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This is the entry point into testing a built package. It performs the +-- \"@.\/setup test@\" action. It runs test suites designated in the package +-- description and reports on the results. + +module Distribution.Simple.Test + ( test + ) where + +import qualified Distribution.PackageDescription as PD +import Distribution.Simple.Compiler +import Distribution.Simple.Hpc +import Distribution.Simple.InstallDirs +import qualified Distribution.Simple.LocalBuildInfo as LBI +import Distribution.Simple.Setup +import Distribution.Simple.UserHooks +import qualified Distribution.Simple.Test.ExeV10 as ExeV10 +import qualified Distribution.Simple.Test.LibV09 as LibV09 +import Distribution.Simple.Test.Log +import Distribution.Simple.Utils +import Distribution.TestSuite +import Distribution.Text + +import Control.Monad ( when, unless, filterM ) +import System.Directory + ( createDirectoryIfMissing, doesFileExist, getDirectoryContents + , removeFile ) +import System.Exit ( ExitCode(..), exitFailure, exitWith ) +import System.FilePath ( () ) + +-- |Perform the \"@.\/setup test@\" action. +test :: Args -- ^positional command-line arguments + -> PD.PackageDescription -- ^information from the .cabal file + -> LBI.LocalBuildInfo -- ^information from the configure step + -> TestFlags -- ^flags sent to test + -> IO () +test args pkg_descr lbi flags = do + let verbosity = fromFlag $ testVerbosity flags + machineTemplate = fromFlag $ testMachineLog flags + distPref = fromFlag $ testDistPref flags + testLogDir = distPref "test" + testNames = args + pkgTests = PD.testSuites pkg_descr + enabledTests = [ t | t <- pkgTests + , PD.testEnabled t + , PD.buildable (PD.testBuildInfo t) ] + + doTest :: (PD.TestSuite, Maybe TestSuiteLog) -> IO TestSuiteLog + doTest (suite, _) = + case PD.testInterface suite of + PD.TestSuiteExeV10 _ _ -> + ExeV10.runTest pkg_descr lbi flags suite + + PD.TestSuiteLibV09 _ _ -> + LibV09.runTest pkg_descr lbi flags suite + + _ -> return TestSuiteLog + { testSuiteName = PD.testName suite + , testLogs = TestLog + { testName = PD.testName suite + , testOptionsReturned = [] + , testResult = + Error $ "No support for running test suite type: " + ++ show (disp $ PD.testType suite) + } + , logFile = "" + } + + when (not $ PD.hasTests pkg_descr) $ do + notice verbosity "Package has no test suites." + exitWith ExitSuccess + + when (PD.hasTests pkg_descr && null enabledTests) $ + die $ "No test suites enabled. Did you remember to configure with " + ++ "\'--enable-tests\'?" + + testsToRun <- case testNames of + [] -> return $ zip enabledTests $ repeat Nothing + names -> flip mapM names $ \tName -> + let testMap = zip enabledNames enabledTests + enabledNames = map PD.testName enabledTests + allNames = map PD.testName pkgTests + in case lookup tName testMap of + Just t -> return (t, Nothing) + _ | tName `elem` allNames -> + die $ "Package configured with test suite " + ++ tName ++ " disabled." + | otherwise -> die $ "no such test: " ++ tName + + createDirectoryIfMissing True testLogDir + + -- Delete ordinary files from test log directory. + getDirectoryContents testLogDir + >>= filterM doesFileExist . map (testLogDir ) + >>= mapM_ removeFile + + let totalSuites = length testsToRun + notice verbosity $ "Running " ++ show totalSuites ++ " test suites..." + suites <- mapM doTest testsToRun + let packageLog = (localPackageLog pkg_descr lbi) { testSuites = suites } + packageLogFile = () testLogDir + $ packageLogPath machineTemplate pkg_descr lbi + allOk <- summarizePackage verbosity packageLog + writeFile packageLogFile $ show packageLog + + let isCoverageEnabled = fromFlag $ configCoverage $ LBI.configFlags lbi + when isCoverageEnabled $ + markupPackage verbosity lbi distPref (display $ PD.package pkg_descr) $ + map fst testsToRun + + unless allOk exitFailure + +packageLogPath :: PathTemplate + -> PD.PackageDescription + -> LBI.LocalBuildInfo + -> FilePath +packageLogPath template pkg_descr lbi = + fromPathTemplate $ substPathTemplate env template + where + env = initialPathTemplateEnv + (PD.package pkg_descr) (LBI.localUnitId lbi) + (compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/UHC.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/UHC.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/UHC.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/UHC.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,287 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.UHC +-- Copyright : Andres Loeh 2009 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module contains most of the UHC-specific code for configuring, building +-- and installing packages. +-- +-- Thanks to the authors of the other implementation-specific files, in +-- particular to Isaac Jones, Duncan Coutts and Henning Thielemann, for +-- inspiration on how to design this module. + +module Distribution.Simple.UHC ( + configure, getInstalledPackages, + buildLib, buildExe, installLib, registerPackage, inplacePackageDbPath + ) where + +import Distribution.Compat.ReadP +import Distribution.InstalledPackageInfo +import Distribution.Package hiding (installedUnitId) +import Distribution.PackageDescription +import Distribution.Simple.BuildPaths +import Distribution.Simple.Compiler as C +import Distribution.Simple.LocalBuildInfo +import Distribution.Simple.PackageIndex +import Distribution.Simple.Program +import Distribution.Simple.Utils +import Distribution.Text +import Distribution.Verbosity +import Distribution.Version +import Distribution.System +import Language.Haskell.Extension + +import Control.Monad +import Data.List +import qualified Data.Map as M ( empty ) +import System.Directory +import System.FilePath + +-- ----------------------------------------------------------------------------- +-- Configuring + +configure :: Verbosity -> Maybe FilePath -> Maybe FilePath + -> ProgramConfiguration -> IO (Compiler, Maybe Platform, ProgramConfiguration) +configure verbosity hcPath _hcPkgPath conf = do + + (_uhcProg, uhcVersion, conf') <- + requireProgramVersion verbosity uhcProgram + (orLaterVersion (Version [1,0,2] [])) + (userMaybeSpecifyPath "uhc" hcPath conf) + + let comp = Compiler { + compilerId = CompilerId UHC uhcVersion, + compilerAbiTag = C.NoAbiTag, + compilerCompat = [], + compilerLanguages = uhcLanguages, + compilerExtensions = uhcLanguageExtensions, + compilerProperties = M.empty + } + compPlatform = Nothing + return (comp, compPlatform, conf') + +uhcLanguages :: [(Language, C.Flag)] +uhcLanguages = [(Haskell98, "")] + +-- | The flags for the supported extensions. +uhcLanguageExtensions :: [(Extension, C.Flag)] +uhcLanguageExtensions = + let doFlag (f, (enable, disable)) = [(EnableExtension f, enable), + (DisableExtension f, disable)] + alwaysOn = ("", ""{- wrong -}) + in concatMap doFlag + [(CPP, ("--cpp", ""{- wrong -})), + (PolymorphicComponents, alwaysOn), + (ExistentialQuantification, alwaysOn), + (ForeignFunctionInterface, alwaysOn), + (UndecidableInstances, alwaysOn), + (MultiParamTypeClasses, alwaysOn), + (Rank2Types, alwaysOn), + (PatternSignatures, alwaysOn), + (EmptyDataDecls, alwaysOn), + (ImplicitPrelude, ("", "--no-prelude"{- wrong -})), + (TypeOperators, alwaysOn), + (OverlappingInstances, alwaysOn), + (FlexibleInstances, alwaysOn)] + +getInstalledPackages :: Verbosity -> Compiler -> PackageDBStack -> ProgramConfiguration + -> IO InstalledPackageIndex +getInstalledPackages verbosity comp packagedbs conf = do + let compilerid = compilerId comp + systemPkgDir <- getGlobalPackageDir verbosity conf + userPkgDir <- getUserPackageDir + let pkgDirs = nub (concatMap (packageDbPaths userPkgDir systemPkgDir) packagedbs) + -- putStrLn $ "pkgdirs: " ++ show pkgDirs + pkgs <- liftM (map addBuiltinVersions . concat) $ + mapM (\ d -> getDirectoryContents d >>= filterM (isPkgDir (display compilerid) d)) + pkgDirs + -- putStrLn $ "pkgs: " ++ show pkgs + let iPkgs = + map mkInstalledPackageInfo $ + concatMap parsePackage $ + pkgs + -- putStrLn $ "installed pkgs: " ++ show iPkgs + return (fromList iPkgs) + +getGlobalPackageDir :: Verbosity -> ProgramConfiguration -> IO FilePath +getGlobalPackageDir verbosity conf = do + output <- rawSystemProgramStdoutConf verbosity + uhcProgram conf ["--meta-pkgdir-system"] + -- call to "lines" necessary, because pkgdir contains an extra newline at the end + let [pkgdir] = lines output + return pkgdir + +getUserPackageDir :: IO FilePath +getUserPackageDir = do + homeDir <- getHomeDirectory + return $ homeDir ".cabal" "lib" -- TODO: determine in some other way + +packageDbPaths :: FilePath -> FilePath -> PackageDB -> [FilePath] +packageDbPaths user system db = + case db of + GlobalPackageDB -> [ system ] + UserPackageDB -> [ user ] + SpecificPackageDB path -> [ path ] + +-- | Hack to add version numbers to UHC-built-in packages. This should sooner or +-- later be fixed on the UHC side. +addBuiltinVersions :: String -> String +{- +addBuiltinVersions "uhcbase" = "uhcbase-1.0" +addBuiltinVersions "base" = "base-3.0" +addBuiltinVersions "array" = "array-0.2" +-} +addBuiltinVersions xs = xs + +-- | Name of the installed package config file. +installedPkgConfig :: String +installedPkgConfig = "installed-pkg-config" + +-- | Check if a certain dir contains a valid package. Currently, we are +-- looking only for the presence of an installed package configuration. +-- TODO: Actually make use of the information provided in the file. +isPkgDir :: String -> String -> String -> IO Bool +isPkgDir _ _ ('.' : _) = return False -- ignore files starting with a . +isPkgDir c dir xs = do + let candidate = dir uhcPackageDir xs c + -- putStrLn $ "trying: " ++ candidate + doesFileExist (candidate installedPkgConfig) + +parsePackage :: String -> [PackageId] +parsePackage x = map fst (filter (\ (_,y) -> null y) (readP_to_S parse x)) + +-- | Create a trivial package info from a directory name. +mkInstalledPackageInfo :: PackageId -> InstalledPackageInfo +mkInstalledPackageInfo p = emptyInstalledPackageInfo + { installedUnitId = mkLegacyUnitId p, + sourcePackageId = p } + + +-- ----------------------------------------------------------------------------- +-- Building + +buildLib :: Verbosity -> PackageDescription -> LocalBuildInfo + -> Library -> ComponentLocalBuildInfo -> IO () +buildLib verbosity pkg_descr lbi lib clbi = do + + systemPkgDir <- getGlobalPackageDir verbosity (withPrograms lbi) + userPkgDir <- getUserPackageDir + let runUhcProg = rawSystemProgramConf verbosity uhcProgram (withPrograms lbi) + let uhcArgs = -- set package name + ["--pkg-build=" ++ display (packageId pkg_descr)] + -- common flags lib/exe + ++ constructUHCCmdLine userPkgDir systemPkgDir + lbi (libBuildInfo lib) clbi + (buildDir lbi) verbosity + -- source files + -- suboptimal: UHC does not understand module names, so + -- we replace periods by path separators + ++ map (map (\ c -> if c == '.' then pathSeparator else c)) + (map display (libModules lib)) + + runUhcProg uhcArgs + + return () + +buildExe :: Verbosity -> PackageDescription -> LocalBuildInfo + -> Executable -> ComponentLocalBuildInfo -> IO () +buildExe verbosity _pkg_descr lbi exe clbi = do + systemPkgDir <- getGlobalPackageDir verbosity (withPrograms lbi) + userPkgDir <- getUserPackageDir + let runUhcProg = rawSystemProgramConf verbosity uhcProgram (withPrograms lbi) + let uhcArgs = -- common flags lib/exe + constructUHCCmdLine userPkgDir systemPkgDir + lbi (buildInfo exe) clbi + (buildDir lbi) verbosity + -- output file + ++ ["--output", buildDir lbi exeName exe] + -- main source module + ++ [modulePath exe] + runUhcProg uhcArgs + +constructUHCCmdLine :: FilePath -> FilePath + -> LocalBuildInfo -> BuildInfo -> ComponentLocalBuildInfo + -> FilePath -> Verbosity -> [String] +constructUHCCmdLine user system lbi bi clbi odir verbosity = + -- verbosity + (if verbosity >= deafening then ["-v4"] + else if verbosity >= normal then [] + else ["-v0"]) + ++ hcOptions UHC bi + -- flags for language extensions + ++ languageToFlags (compiler lbi) (defaultLanguage bi) + ++ extensionsToFlags (compiler lbi) (usedExtensions bi) + -- packages + ++ ["--hide-all-packages"] + ++ uhcPackageDbOptions user system (withPackageDB lbi) + ++ ["--package=uhcbase"] + ++ ["--package=" ++ display (pkgName pkgid) | (_, pkgid) <- componentPackageDeps clbi ] + -- search paths + ++ ["-i" ++ odir] + ++ ["-i" ++ l | l <- nub (hsSourceDirs bi)] + ++ ["-i" ++ autogenModulesDir lbi] + -- cpp options + ++ ["--optP=" ++ opt | opt <- cppOptions bi] + -- output path + ++ ["--odir=" ++ odir] + -- optimization + ++ (case withOptimization lbi of + NoOptimisation -> ["-O0"] + NormalOptimisation -> ["-O1"] + MaximumOptimisation -> ["-O2"]) + +uhcPackageDbOptions :: FilePath -> FilePath -> PackageDBStack -> [String] +uhcPackageDbOptions user system db = map (\ x -> "--pkg-searchpath=" ++ x) + (concatMap (packageDbPaths user system) db) + +-- ----------------------------------------------------------------------------- +-- Installation + +installLib :: Verbosity -> LocalBuildInfo + -> FilePath -> FilePath -> FilePath + -> PackageDescription -> Library -> ComponentLocalBuildInfo -> IO () +installLib verbosity _lbi targetDir _dynlibTargetDir builtDir pkg _library _clbi = do + -- putStrLn $ "dest: " ++ targetDir + -- putStrLn $ "built: " ++ builtDir + installDirectoryContents verbosity (builtDir display (packageId pkg)) targetDir + +-- currently hard-coded UHC code generator and variant to use +uhcTarget, uhcTargetVariant :: String +uhcTarget = "bc" +uhcTargetVariant = "plain" + +-- root directory for a package in UHC +uhcPackageDir :: String -> String -> FilePath +uhcPackageSubDir :: String -> FilePath +uhcPackageDir pkgid compilerid = pkgid uhcPackageSubDir compilerid +uhcPackageSubDir compilerid = compilerid uhcTarget uhcTargetVariant + +-- ----------------------------------------------------------------------------- +-- Registering + +registerPackage + :: Verbosity + -> Compiler + -> ProgramConfiguration + -> PackageDBStack + -> InstalledPackageInfo + -> IO () +registerPackage verbosity comp progdb packageDbs installedPkgInfo = do + dbdir <- case last packageDbs of + GlobalPackageDB -> getGlobalPackageDir verbosity progdb + UserPackageDB -> getUserPackageDir + SpecificPackageDB dir -> return dir + let pkgdir = dbdir uhcPackageDir (display pkgid) (display compilerid) + createDirectoryIfMissingVerbose verbosity True pkgdir + writeUTF8File (pkgdir installedPkgConfig) + (showInstalledPackageInfo installedPkgInfo) + where + pkgid = sourcePackageId installedPkgInfo + compilerid = compilerId comp + +inplacePackageDbPath :: LocalBuildInfo -> FilePath +inplacePackageDbPath lbi = buildDir lbi diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/UserHooks.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/UserHooks.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/UserHooks.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/UserHooks.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,206 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.UserHooks +-- Copyright : Isaac Jones 2003-2005 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This defines the API that @Setup.hs@ scripts can use to customise the way +-- the build works. This module just defines the 'UserHooks' type. The +-- predefined sets of hooks that implement the @Simple@, @Make@ and @Configure@ +-- build systems are defined in "Distribution.Simple". The 'UserHooks' is a big +-- record of functions. There are 3 for each action, a pre, post and the action +-- itself. There are few other miscellaneous hooks, ones to extend the set of +-- programs and preprocessors and one to override the function used to read the +-- @.cabal@ file. +-- +-- This hooks type is widely agreed to not be the right solution. Partly this +-- is because changes to it usually break custom @Setup.hs@ files and yet many +-- internal code changes do require changes to the hooks. For example we cannot +-- pass any extra parameters to most of the functions that implement the +-- various phases because it would involve changing the types of the +-- corresponding hook. At some point it will have to be replaced. + +module Distribution.Simple.UserHooks ( + UserHooks(..), Args, + emptyUserHooks, + ) where + +import Distribution.PackageDescription +import Distribution.Simple.Program +import Distribution.Simple.Command +import Distribution.Simple.PreProcess +import Distribution.Simple.Setup +import Distribution.Simple.LocalBuildInfo + +type Args = [String] + +-- | Hooks allow authors to add specific functionality before and after a +-- command is run, and also to specify additional preprocessors. +-- +-- * WARNING: The hooks interface is under rather constant flux as we try to +-- understand users needs. Setup files that depend on this interface may +-- break in future releases. +data UserHooks = UserHooks { + + -- | Used for @.\/setup test@ + runTests :: Args -> Bool -> PackageDescription -> LocalBuildInfo -> IO (), + -- | Read the description file + readDesc :: IO (Maybe GenericPackageDescription), + -- | Custom preprocessors in addition to and overriding 'knownSuffixHandlers'. + hookedPreProcessors :: [ PPSuffixHandler ], + -- | These programs are detected at configure time. Arguments for them are + -- added to the configure command. + hookedPrograms :: [Program], + + -- |Hook to run before configure command + preConf :: Args -> ConfigFlags -> IO HookedBuildInfo, + -- |Over-ride this hook to get different behavior during configure. + confHook :: (GenericPackageDescription, HookedBuildInfo) + -> ConfigFlags -> IO LocalBuildInfo, + -- |Hook to run after configure command + postConf :: Args -> ConfigFlags -> PackageDescription -> LocalBuildInfo -> IO (), + + -- |Hook to run before build command. Second arg indicates verbosity level. + preBuild :: Args -> BuildFlags -> IO HookedBuildInfo, + + -- |Over-ride this hook to get different behavior during build. + buildHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> BuildFlags -> IO (), + -- |Hook to run after build command. Second arg indicates verbosity level. + postBuild :: Args -> BuildFlags -> PackageDescription -> LocalBuildInfo -> IO (), + + -- |Hook to run before repl command. Second arg indicates verbosity level. + preRepl :: Args -> ReplFlags -> IO HookedBuildInfo, + -- |Over-ride this hook to get different behavior during interpretation. + replHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> ReplFlags -> [String] -> IO (), + -- |Hook to run after repl command. Second arg indicates verbosity level. + postRepl :: Args -> ReplFlags -> PackageDescription -> LocalBuildInfo -> IO (), + + -- |Hook to run before clean command. Second arg indicates verbosity level. + preClean :: Args -> CleanFlags -> IO HookedBuildInfo, + -- |Over-ride this hook to get different behavior during clean. + cleanHook :: PackageDescription -> () -> UserHooks -> CleanFlags -> IO (), + -- |Hook to run after clean command. Second arg indicates verbosity level. + postClean :: Args -> CleanFlags -> PackageDescription -> () -> IO (), + + -- |Hook to run before copy command + preCopy :: Args -> CopyFlags -> IO HookedBuildInfo, + -- |Over-ride this hook to get different behavior during copy. + copyHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> CopyFlags -> IO (), + -- |Hook to run after copy command + postCopy :: Args -> CopyFlags -> PackageDescription -> LocalBuildInfo -> IO (), + + -- |Hook to run before install command + preInst :: Args -> InstallFlags -> IO HookedBuildInfo, + + -- |Over-ride this hook to get different behavior during install. + instHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> InstallFlags -> IO (), + -- |Hook to run after install command. postInst should be run + -- on the target, not on the build machine. + postInst :: Args -> InstallFlags -> PackageDescription -> LocalBuildInfo -> IO (), + + -- |Hook to run before sdist command. Second arg indicates verbosity level. + preSDist :: Args -> SDistFlags -> IO HookedBuildInfo, + -- |Over-ride this hook to get different behavior during sdist. + sDistHook :: PackageDescription -> Maybe LocalBuildInfo -> UserHooks -> SDistFlags -> IO (), + -- |Hook to run after sdist command. Second arg indicates verbosity level. + postSDist :: Args -> SDistFlags -> PackageDescription -> Maybe LocalBuildInfo -> IO (), + + -- |Hook to run before register command + preReg :: Args -> RegisterFlags -> IO HookedBuildInfo, + -- |Over-ride this hook to get different behavior during registration. + regHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> RegisterFlags -> IO (), + -- |Hook to run after register command + postReg :: Args -> RegisterFlags -> PackageDescription -> LocalBuildInfo -> IO (), + + -- |Hook to run before unregister command + preUnreg :: Args -> RegisterFlags -> IO HookedBuildInfo, + -- |Over-ride this hook to get different behavior during unregistration. + unregHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> RegisterFlags -> IO (), + -- |Hook to run after unregister command + postUnreg :: Args -> RegisterFlags -> PackageDescription -> LocalBuildInfo -> IO (), + + -- |Hook to run before hscolour command. Second arg indicates verbosity level. + preHscolour :: Args -> HscolourFlags -> IO HookedBuildInfo, + -- |Over-ride this hook to get different behavior during hscolour. + hscolourHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> HscolourFlags -> IO (), + -- |Hook to run after hscolour command. Second arg indicates verbosity level. + postHscolour :: Args -> HscolourFlags -> PackageDescription -> LocalBuildInfo -> IO (), + + -- |Hook to run before haddock command. Second arg indicates verbosity level. + preHaddock :: Args -> HaddockFlags -> IO HookedBuildInfo, + -- |Over-ride this hook to get different behavior during haddock. + haddockHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> HaddockFlags -> IO (), + -- |Hook to run after haddock command. Second arg indicates verbosity level. + postHaddock :: Args -> HaddockFlags -> PackageDescription -> LocalBuildInfo -> IO (), + + -- |Hook to run before test command. + preTest :: Args -> TestFlags -> IO HookedBuildInfo, + -- |Over-ride this hook to get different behavior during test. + testHook :: Args -> PackageDescription -> LocalBuildInfo -> UserHooks -> TestFlags -> IO (), + -- |Hook to run after test command. + postTest :: Args -> TestFlags -> PackageDescription -> LocalBuildInfo -> IO (), + + -- |Hook to run before bench command. + preBench :: Args -> BenchmarkFlags -> IO HookedBuildInfo, + -- |Over-ride this hook to get different behavior during bench. + benchHook :: Args -> PackageDescription -> LocalBuildInfo -> UserHooks -> BenchmarkFlags -> IO (), + -- |Hook to run after bench command. + postBench :: Args -> BenchmarkFlags -> PackageDescription -> LocalBuildInfo -> IO () + } + +{-# DEPRECATED runTests "Please use the new testing interface instead!" #-} + +-- |Empty 'UserHooks' which do nothing. +emptyUserHooks :: UserHooks +emptyUserHooks + = UserHooks { + runTests = ru, + readDesc = return Nothing, + hookedPreProcessors = [], + hookedPrograms = [], + preConf = rn, + confHook = (\_ _ -> return (error "No local build info generated during configure. Over-ride empty configure hook.")), + postConf = ru, + preBuild = rn', + buildHook = ru, + postBuild = ru, + preRepl = \_ _ -> return emptyHookedBuildInfo, + replHook = \_ _ _ _ _ -> return (), + postRepl = ru, + preClean = rn, + cleanHook = ru, + postClean = ru, + preCopy = rn, + copyHook = ru, + postCopy = ru, + preInst = rn, + instHook = ru, + postInst = ru, + preSDist = rn, + sDistHook = ru, + postSDist = ru, + preReg = rn, + regHook = ru, + postReg = ru, + preUnreg = rn, + unregHook = ru, + postUnreg = ru, + preHscolour = rn, + hscolourHook = ru, + postHscolour = ru, + preHaddock = rn, + haddockHook = ru, + postHaddock = ru, + preTest = rn', + testHook = \_ -> ru, + postTest = ru, + preBench = rn', + benchHook = \_ -> ru, + postBench = ru + } + where rn args _ = noExtraFlags args >> return emptyHookedBuildInfo + rn' _ _ = return emptyHookedBuildInfo + ru _ _ _ _ = return () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Utils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Utils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple/Utils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple/Utils.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,1486 @@ +{-# LANGUAGE CPP, ForeignFunctionInterface, ScopedTypeVariables #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Utils +-- Copyright : Isaac Jones, Simon Marlow 2003-2004 +-- License : BSD3 +-- portions Copyright (c) 2007, Galois Inc. +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- A large and somewhat miscellaneous collection of utility functions used +-- throughout the rest of the Cabal lib and in other tools that use the Cabal +-- lib like @cabal-install@. It has a very simple set of logging actions. It +-- has low level functions for running programs, a bunch of wrappers for +-- various directory and file functions that do extra logging. + +module Distribution.Simple.Utils ( + cabalVersion, + + -- * logging and errors + die, + dieWithLocation, + topHandler, topHandlerWith, + warn, notice, setupMessage, info, debug, + debugNoWrap, chattyTry, + printRawCommandAndArgs, printRawCommandAndArgsAndEnv, + + -- * exceptions + handleDoesNotExist, + + -- * running programs + rawSystemExit, + rawSystemExitCode, + rawSystemExitWithEnv, + rawSystemStdout, + rawSystemStdInOut, + rawSystemIOWithEnv, + createProcessWithEnv, + maybeExit, + xargs, + findProgramLocation, + findProgramVersion, + + -- * copying files + smartCopySources, + createDirectoryIfMissingVerbose, + copyFileVerbose, + copyDirectoryRecursiveVerbose, + copyFiles, + copyFileTo, + + -- * installing files + installOrdinaryFile, + installExecutableFile, + installMaybeExecutableFile, + installOrdinaryFiles, + installExecutableFiles, + installMaybeExecutableFiles, + installDirectoryContents, + copyDirectoryRecursive, + + -- * File permissions + doesExecutableExist, + setFileOrdinary, + setFileExecutable, + + -- * file names + currentDir, + shortRelativePath, + dropExeExtension, + exeExtensions, + + -- * finding files + findFile, + findFirstFile, + findFileWithExtension, + findFileWithExtension', + findAllFilesWithExtension, + findModuleFile, + findModuleFiles, + getDirectoryContentsRecursive, + + -- * environment variables + isInSearchPath, + addLibraryPath, + + -- * simple file globbing + matchFileGlob, + matchDirFileGlob, + parseFileGlob, + FileGlob(..), + + -- * modification time + moreRecentFile, + existsAndIsMoreRecentThan, + + -- * temp files and dirs + TempFileOptions(..), defaultTempFileOptions, + withTempFile, withTempFileEx, + withTempDirectory, withTempDirectoryEx, + + -- * .cabal and .buildinfo files + defaultPackageDesc, + findPackageDesc, + tryFindPackageDesc, + defaultHookedPackageDesc, + findHookedPackageDesc, + + -- * reading and writing files safely + withFileContents, + writeFileAtomic, + rewriteFile, + + -- * Unicode + fromUTF8, + toUTF8, + readUTF8File, + withUTF8FileContents, + writeUTF8File, + normaliseLineEndings, + + -- * BOM + startsWithBOM, + fileHasBOM, + ignoreBOM, + + -- * generic utils + dropWhileEndLE, + takeWhileEndLE, + equating, + comparing, + isInfixOf, + intercalate, + lowercase, + listUnion, + listUnionRight, + ordNub, + ordNubRight, + safeTail, + wrapText, + wrapLine, + ) where + +import Distribution.Text +import Distribution.Package +import Distribution.ModuleName as ModuleName +import Distribution.System +import Distribution.Version +import Distribution.Compat.CopyFile +import Distribution.Compat.Internal.TempFile +import Distribution.Compat.Exception +import Distribution.Verbosity + +#if __GLASGOW_HASKELL__ < 711 +#ifdef VERSION_base +#define BOOTSTRAPPED_CABAL 1 +#endif +#else +#ifdef CURRENT_PACKAGE_KEY +#define BOOTSTRAPPED_CABAL 1 +#endif +#endif + +#ifdef BOOTSTRAPPED_CABAL +import qualified Paths_Cabal (version) +#endif + +import Control.Monad + ( when, unless, filterM ) +import Control.Concurrent.MVar + ( newEmptyMVar, putMVar, takeMVar ) +import Data.Bits + ( Bits((.|.), (.&.), shiftL, shiftR) ) +import Data.Char as Char + ( isDigit, toLower, chr, ord ) +import Data.Foldable + ( traverse_ ) +import Data.List + ( nub, unfoldr, intercalate, isInfixOf ) +import Data.Typeable + ( cast ) +import Data.Ord + ( comparing ) +import qualified Data.ByteString.Lazy as BS +import qualified Data.ByteString.Lazy.Char8 as BS.Char8 +import qualified Data.Set as Set + +import System.Directory + ( Permissions(executable), getDirectoryContents, getPermissions + , doesDirectoryExist, doesFileExist, removeFile, findExecutable + , getModificationTime ) +import System.Environment + ( getProgName ) +import System.Exit + ( exitWith, ExitCode(..) ) +import System.FilePath + ( normalise, (), (<.>) + , getSearchPath, joinPath, takeDirectory, splitFileName + , splitExtension, splitExtensions, splitDirectories + , searchPathSeparator ) +import System.Directory + ( createDirectory, renameFile, removeDirectoryRecursive ) +import System.IO + ( Handle, openFile, openBinaryFile, openBinaryTempFileWithDefaultPermissions + , IOMode(ReadMode), hSetBinaryMode + , hGetContents, stderr, stdout, hPutStr, hFlush, hClose ) +import System.IO.Error as IO.Error + ( isDoesNotExistError, isAlreadyExistsError, isUserError + , ioeSetFileName, ioeGetFileName, ioeGetErrorString ) +import System.IO.Error + ( ioeSetLocation, ioeGetLocation ) +import System.IO.Unsafe + ( unsafeInterleaveIO ) +import qualified Control.Exception as Exception + +import Control.Exception (IOException, evaluate, throwIO) +import Control.Concurrent (forkIO) +import qualified System.Process as Process + ( CreateProcess(..), StdStream(..), proc) +import System.Process + ( ProcessHandle, createProcess, rawSystem, runInteractiveProcess + , showCommandForUser, waitForProcess) + +-- We only get our own version number when we're building with ourselves +cabalVersion :: Version +#if defined(BOOTSTRAPPED_CABAL) +cabalVersion = Paths_Cabal.version +#elif defined(CABAL_VERSION) +cabalVersion = Version [CABAL_VERSION] [] +#else +cabalVersion = Version [1,9999] [] --used when bootstrapping +#endif + +-- ---------------------------------------------------------------------------- +-- Exception and logging utils + +dieWithLocation :: FilePath -> Maybe Int -> String -> IO a +dieWithLocation filename lineno msg = + ioError . setLocation lineno + . flip ioeSetFileName (normalise filename) + $ userError msg + where + setLocation Nothing err = err + setLocation (Just n) err = ioeSetLocation err (show n) + +die :: String -> IO a +die msg = ioError (userError msg) + +topHandlerWith :: forall a. (Exception.SomeException -> IO a) -> IO a -> IO a +topHandlerWith cont prog = + Exception.catches prog [ + Exception.Handler rethrowAsyncExceptions + , Exception.Handler rethrowExitStatus + , Exception.Handler handle + ] + where + -- Let async exceptions rise to the top for the default top-handler + rethrowAsyncExceptions :: Exception.AsyncException -> IO a + rethrowAsyncExceptions = throwIO + + -- ExitCode gets thrown asynchronously too, and we don't want to print it + rethrowExitStatus :: ExitCode -> IO a + rethrowExitStatus = throwIO + + -- Print all other exceptions + handle :: Exception.SomeException -> IO a + handle se = do + hFlush stdout + pname <- getProgName + hPutStr stderr (wrapText (message pname se)) + cont se + + message :: String -> Exception.SomeException -> String + message pname (Exception.SomeException se) = + case cast se :: Maybe Exception.IOException of + Just ioe | isUserError ioe -> + let file = case ioeGetFileName ioe of + Nothing -> "" + Just path -> path ++ location ++ ": " + location = case ioeGetLocation ioe of + l@(n:_) | Char.isDigit n -> ':' : l + _ -> "" + detail = ioeGetErrorString ioe + in pname ++ ": " ++ file ++ detail + _ -> +#if __GLASGOW_HASKELL__ < 710 + show se +#else + Exception.displayException se +#endif + +topHandler :: IO a -> IO a +topHandler prog = topHandlerWith (const $ exitWith (ExitFailure 1)) prog + +-- | Non fatal conditions that may be indicative of an error or problem. +-- +-- We display these at the 'normal' verbosity level. +-- +warn :: Verbosity -> String -> IO () +warn verbosity msg = + when (verbosity >= normal) $ do + hFlush stdout + hPutStr stderr (wrapText ("Warning: " ++ msg)) + +-- | Useful status messages. +-- +-- We display these at the 'normal' verbosity level. +-- +-- This is for the ordinary helpful status messages that users see. Just +-- enough information to know that things are working but not floods of detail. +-- +notice :: Verbosity -> String -> IO () +notice verbosity msg = + when (verbosity >= normal) $ + putStr (wrapText msg) + +setupMessage :: Verbosity -> String -> PackageIdentifier -> IO () +setupMessage verbosity msg pkgid = + notice verbosity (msg ++ ' ': display pkgid ++ "...") + +-- | More detail on the operation of some action. +-- +-- We display these messages when the verbosity level is 'verbose' +-- +info :: Verbosity -> String -> IO () +info verbosity msg = + when (verbosity >= verbose) $ + putStr (wrapText msg) + +-- | Detailed internal debugging information +-- +-- We display these messages when the verbosity level is 'deafening' +-- +debug :: Verbosity -> String -> IO () +debug verbosity msg = + when (verbosity >= deafening) $ do + putStr (wrapText msg) + hFlush stdout + +-- | A variant of 'debug' that doesn't perform the automatic line +-- wrapping. Produces better output in some cases. +debugNoWrap :: Verbosity -> String -> IO () +debugNoWrap verbosity msg = + when (verbosity >= deafening) $ do + putStrLn msg + hFlush stdout + +-- | Perform an IO action, catching any IO exceptions and printing an error +-- if one occurs. +chattyTry :: String -- ^ a description of the action we were attempting + -> IO () -- ^ the action itself + -> IO () +chattyTry desc action = + catchIO action $ \exception -> + putStrLn $ "Error while " ++ desc ++ ": " ++ show exception + +-- | Run an IO computation, returning @e@ if it raises a "file +-- does not exist" error. +handleDoesNotExist :: a -> IO a -> IO a +handleDoesNotExist e = + Exception.handleJust + (\ioe -> if isDoesNotExistError ioe then Just ioe else Nothing) + (\_ -> return e) + +-- ----------------------------------------------------------------------------- +-- Helper functions + +-- | Wraps text to the default line width. Existing newlines are preserved. +wrapText :: String -> String +wrapText = unlines + . map (intercalate "\n" + . map unwords + . wrapLine 79 + . words) + . lines + +-- | Wraps a list of words to a list of lines of words of a particular width. +wrapLine :: Int -> [String] -> [[String]] +wrapLine width = wrap 0 [] + where wrap :: Int -> [String] -> [String] -> [[String]] + wrap 0 [] (w:ws) + | length w + 1 > width + = wrap (length w) [w] ws + wrap col line (w:ws) + | col + length w + 1 > width + = reverse line : wrap 0 [] (w:ws) + wrap col line (w:ws) + = let col' = col + length w + 1 + in wrap col' (w:line) ws + wrap _ [] [] = [] + wrap _ line [] = [reverse line] + +-- ----------------------------------------------------------------------------- +-- rawSystem variants +maybeExit :: IO ExitCode -> IO () +maybeExit cmd = do + res <- cmd + unless (res == ExitSuccess) $ exitWith res + +printRawCommandAndArgs :: Verbosity -> FilePath -> [String] -> IO () +printRawCommandAndArgs verbosity path args = + printRawCommandAndArgsAndEnv verbosity path args Nothing + +printRawCommandAndArgsAndEnv :: Verbosity + -> FilePath + -> [String] + -> Maybe [(String, String)] + -> IO () +printRawCommandAndArgsAndEnv verbosity path args menv + | verbosity >= deafening = do + traverse_ (putStrLn . ("Environment: " ++) . show) menv + print (path, args) + | verbosity >= verbose = putStrLn $ showCommandForUser path args + | otherwise = return () + + +-- Exit with the same exit code if the subcommand fails +rawSystemExit :: Verbosity -> FilePath -> [String] -> IO () +rawSystemExit verbosity path args = do + printRawCommandAndArgs verbosity path args + hFlush stdout + exitcode <- rawSystem path args + unless (exitcode == ExitSuccess) $ do + debug verbosity $ path ++ " returned " ++ show exitcode + exitWith exitcode + +rawSystemExitCode :: Verbosity -> FilePath -> [String] -> IO ExitCode +rawSystemExitCode verbosity path args = do + printRawCommandAndArgs verbosity path args + hFlush stdout + exitcode <- rawSystem path args + unless (exitcode == ExitSuccess) $ do + debug verbosity $ path ++ " returned " ++ show exitcode + return exitcode + +rawSystemExitWithEnv :: Verbosity + -> FilePath + -> [String] + -> [(String, String)] + -> IO () +rawSystemExitWithEnv verbosity path args env = do + printRawCommandAndArgsAndEnv verbosity path args (Just env) + hFlush stdout + (_,_,_,ph) <- createProcess $ + (Process.proc path args) { Process.env = (Just env) +#ifdef MIN_VERSION_process +#if MIN_VERSION_process(1,2,0) +-- delegate_ctlc has been added in process 1.2, and we still want to be able to +-- bootstrap GHC on systems not having that version + , Process.delegate_ctlc = True +#endif +#endif + } + exitcode <- waitForProcess ph + unless (exitcode == ExitSuccess) $ do + debug verbosity $ path ++ " returned " ++ show exitcode + exitWith exitcode + +-- Closes the passed in handles before returning. +rawSystemIOWithEnv :: Verbosity + -> FilePath + -> [String] + -> Maybe FilePath -- ^ New working dir or inherit + -> Maybe [(String, String)] -- ^ New environment or inherit + -> Maybe Handle -- ^ stdin + -> Maybe Handle -- ^ stdout + -> Maybe Handle -- ^ stderr + -> IO ExitCode +rawSystemIOWithEnv verbosity path args mcwd menv inp out err = do + (_,_,_,ph) <- createProcessWithEnv verbosity path args mcwd menv + (mbToStd inp) (mbToStd out) (mbToStd err) + exitcode <- waitForProcess ph + unless (exitcode == ExitSuccess) $ do + debug verbosity $ path ++ " returned " ++ show exitcode + return exitcode + where + mbToStd :: Maybe Handle -> Process.StdStream + mbToStd = maybe Process.Inherit Process.UseHandle + +createProcessWithEnv :: + Verbosity + -> FilePath + -> [String] + -> Maybe FilePath -- ^ New working dir or inherit + -> Maybe [(String, String)] -- ^ New environment or inherit + -> Process.StdStream -- ^ stdin + -> Process.StdStream -- ^ stdout + -> Process.StdStream -- ^ stderr + -> IO (Maybe Handle, Maybe Handle, Maybe Handle,ProcessHandle) + -- ^ Any handles created for stdin, stdout, or stderr + -- with 'CreateProcess', and a handle to the process. +createProcessWithEnv verbosity path args mcwd menv inp out err = do + printRawCommandAndArgsAndEnv verbosity path args menv + hFlush stdout + (inp', out', err', ph) <- createProcess $ + (Process.proc path args) { + Process.cwd = mcwd + , Process.env = menv + , Process.std_in = inp + , Process.std_out = out + , Process.std_err = err +#ifdef MIN_VERSION_process +#if MIN_VERSION_process(1,2,0) +-- delegate_ctlc has been added in process 1.2, and we still want to be able to +-- bootstrap GHC on systems not having that version + , Process.delegate_ctlc = True +#endif +#endif + } + return (inp', out', err', ph) + +-- | Run a command and return its output. +-- +-- The output is assumed to be text in the locale encoding. +-- +rawSystemStdout :: Verbosity -> FilePath -> [String] -> IO String +rawSystemStdout verbosity path args = do + (output, errors, exitCode) <- rawSystemStdInOut verbosity path args + Nothing Nothing + Nothing False + when (exitCode /= ExitSuccess) $ + die errors + return output + +-- | Run a command and return its output, errors and exit status. Optionally +-- also supply some input. Also provides control over whether the binary/text +-- mode of the input and output. +-- +rawSystemStdInOut :: Verbosity + -> FilePath -- ^ Program location + -> [String] -- ^ Arguments + -> Maybe FilePath -- ^ New working dir or inherit + -> Maybe [(String, String)] -- ^ New environment or inherit + -> Maybe (String, Bool) -- ^ input text and binary mode + -> Bool -- ^ output in binary mode + -> IO (String, String, ExitCode) -- ^ output, errors, exit +rawSystemStdInOut verbosity path args mcwd menv input outputBinary = do + printRawCommandAndArgs verbosity path args + + Exception.bracket + (runInteractiveProcess path args mcwd menv) + (\(inh,outh,errh,_) -> hClose inh >> hClose outh >> hClose errh) + $ \(inh,outh,errh,pid) -> do + + -- output mode depends on what the caller wants + hSetBinaryMode outh outputBinary + -- but the errors are always assumed to be text (in the current locale) + hSetBinaryMode errh False + + -- fork off a couple threads to pull on the stderr and stdout + -- so if the process writes to stderr we do not block. + + err <- hGetContents errh + out <- hGetContents outh + + mv <- newEmptyMVar + let force str = (evaluate (length str) >> return ()) + `Exception.finally` putMVar mv () + --TODO: handle exceptions like text decoding. + _ <- forkIO $ force out + _ <- forkIO $ force err + + -- push all the input, if any + case input of + Nothing -> return () + Just (inputStr, inputBinary) -> do + -- input mode depends on what the caller wants + hSetBinaryMode inh inputBinary + hPutStr inh inputStr + hClose inh + --TODO: this probably fails if the process refuses to consume + -- or if it closes stdin (eg if it exits) + + -- wait for both to finish, in either order + takeMVar mv + takeMVar mv + + -- wait for the program to terminate + exitcode <- waitForProcess pid + unless (exitcode == ExitSuccess) $ + debug verbosity $ path ++ " returned " ++ show exitcode + ++ if null err then "" else + " with error message:\n" ++ err + ++ case input of + Nothing -> "" + Just ("", _) -> "" + Just (inp, _) -> "\nstdin input:\n" ++ inp + + return (out, err, exitcode) + + +{-# DEPRECATED findProgramLocation + "No longer used within Cabal, try findProgramOnSearchPath" #-} +-- | Look for a program on the path. +findProgramLocation :: Verbosity -> FilePath -> IO (Maybe FilePath) +findProgramLocation verbosity prog = do + debug verbosity $ "searching for " ++ prog ++ " in path." + res <- findExecutable prog + case res of + Nothing -> debug verbosity ("Cannot find " ++ prog ++ " on the path") + Just path -> debug verbosity ("found " ++ prog ++ " at "++ path) + return res + + +-- | Look for a program and try to find it's version number. It can accept +-- either an absolute path or the name of a program binary, in which case we +-- will look for the program on the path. +-- +findProgramVersion :: String -- ^ version args + -> (String -> String) -- ^ function to select version + -- number from program output + -> Verbosity + -> FilePath -- ^ location + -> IO (Maybe Version) +findProgramVersion versionArg selectVersion verbosity path = do + str <- rawSystemStdout verbosity path [versionArg] + `catchIO` (\_ -> return "") + `catchExit` (\_ -> return "") + let version :: Maybe Version + version = simpleParse (selectVersion str) + case version of + Nothing -> warn verbosity $ "cannot determine version of " ++ path + ++ " :\n" ++ show str + Just v -> debug verbosity $ path ++ " is version " ++ display v + return version + + +-- | Like the Unix xargs program. Useful for when we've got very long command +-- lines that might overflow an OS limit on command line length and so you +-- need to invoke a command multiple times to get all the args in. +-- +-- Use it with either of the rawSystem variants above. For example: +-- +-- > xargs (32*1024) (rawSystemExit verbosity) prog fixedArgs bigArgs +-- +xargs :: Int -> ([String] -> IO ()) + -> [String] -> [String] -> IO () +xargs maxSize rawSystemFun fixedArgs bigArgs = + let fixedArgSize = sum (map length fixedArgs) + length fixedArgs + chunkSize = maxSize - fixedArgSize + in mapM_ (rawSystemFun . (fixedArgs ++)) (chunks chunkSize bigArgs) + + where chunks len = unfoldr $ \s -> + if null s then Nothing + else Just (chunk [] len s) + + chunk acc _ [] = (reverse acc,[]) + chunk acc len (s:ss) + | len' < len = chunk (s:acc) (len-len'-1) ss + | otherwise = (reverse acc, s:ss) + where len' = length s + +-- ------------------------------------------------------------ +-- * File Utilities +-- ------------------------------------------------------------ + +---------------- +-- Finding files + +-- | Find a file by looking in a search path. The file path must match exactly. +-- +findFile :: [FilePath] -- ^search locations + -> FilePath -- ^File Name + -> IO FilePath +findFile searchPath fileName = + findFirstFile id + [ path fileName + | path <- nub searchPath] + >>= maybe (die $ fileName ++ " doesn't exist") return + +-- | Find a file by looking in a search path with one of a list of possible +-- file extensions. The file base name should be given and it will be tried +-- with each of the extensions in each element of the search path. +-- +findFileWithExtension :: [String] + -> [FilePath] + -> FilePath + -> IO (Maybe FilePath) +findFileWithExtension extensions searchPath baseName = + findFirstFile id + [ path baseName <.> ext + | path <- nub searchPath + , ext <- nub extensions ] + +findAllFilesWithExtension :: [String] + -> [FilePath] + -> FilePath + -> IO [FilePath] +findAllFilesWithExtension extensions searchPath basename = + findAllFiles id + [ path basename <.> ext + | path <- nub searchPath + , ext <- nub extensions ] + +-- | Like 'findFileWithExtension' but returns which element of the search path +-- the file was found in, and the file path relative to that base directory. +-- +findFileWithExtension' :: [String] + -> [FilePath] + -> FilePath + -> IO (Maybe (FilePath, FilePath)) +findFileWithExtension' extensions searchPath baseName = + findFirstFile (uncurry ()) + [ (path, baseName <.> ext) + | path <- nub searchPath + , ext <- nub extensions ] + +findFirstFile :: (a -> FilePath) -> [a] -> IO (Maybe a) +findFirstFile file = findFirst + where findFirst [] = return Nothing + findFirst (x:xs) = do exists <- doesFileExist (file x) + if exists + then return (Just x) + else findFirst xs + +findAllFiles :: (a -> FilePath) -> [a] -> IO [a] +findAllFiles file = filterM (doesFileExist . file) + +-- | Finds the files corresponding to a list of Haskell module names. +-- +-- As 'findModuleFile' but for a list of module names. +-- +findModuleFiles :: [FilePath] -- ^ build prefix (location of objects) + -> [String] -- ^ search suffixes + -> [ModuleName] -- ^ modules + -> IO [(FilePath, FilePath)] +findModuleFiles searchPath extensions moduleNames = + mapM (findModuleFile searchPath extensions) moduleNames + +-- | Find the file corresponding to a Haskell module name. +-- +-- This is similar to 'findFileWithExtension'' but specialised to a module +-- name. The function fails if the file corresponding to the module is missing. +-- +findModuleFile :: [FilePath] -- ^ build prefix (location of objects) + -> [String] -- ^ search suffixes + -> ModuleName -- ^ module + -> IO (FilePath, FilePath) +findModuleFile searchPath extensions moduleName = + maybe notFound return + =<< findFileWithExtension' extensions searchPath + (ModuleName.toFilePath moduleName) + where + notFound = die $ "Error: Could not find module: " ++ display moduleName + ++ " with any suffix: " ++ show extensions + ++ " in the search path: " ++ show searchPath + +-- | List all the files in a directory and all subdirectories. +-- +-- The order places files in sub-directories after all the files in their +-- parent directories. The list is generated lazily so is not well defined if +-- the source directory structure changes before the list is used. +-- +getDirectoryContentsRecursive :: FilePath -> IO [FilePath] +getDirectoryContentsRecursive topdir = recurseDirectories [""] + where + recurseDirectories :: [FilePath] -> IO [FilePath] + recurseDirectories [] = return [] + recurseDirectories (dir:dirs) = unsafeInterleaveIO $ do + (files, dirs') <- collect [] [] =<< getDirectoryContents (topdir dir) + files' <- recurseDirectories (dirs' ++ dirs) + return (files ++ files') + + where + collect files dirs' [] = return (reverse files + ,reverse dirs') + collect files dirs' (entry:entries) | ignore entry + = collect files dirs' entries + collect files dirs' (entry:entries) = do + let dirEntry = dir entry + isDirectory <- doesDirectoryExist (topdir dirEntry) + if isDirectory + then collect files (dirEntry:dirs') entries + else collect (dirEntry:files) dirs' entries + + ignore ['.'] = True + ignore ['.', '.'] = True + ignore _ = False + +------------------------ +-- Environment variables + +-- | Is this directory in the system search path? +isInSearchPath :: FilePath -> IO Bool +isInSearchPath path = fmap (elem path) getSearchPath + +addLibraryPath :: OS + -> [FilePath] + -> [(String,String)] + -> [(String,String)] +addLibraryPath os paths = addEnv + where + pathsString = intercalate [searchPathSeparator] paths + ldPath = case os of + OSX -> "DYLD_LIBRARY_PATH" + _ -> "LD_LIBRARY_PATH" + + addEnv [] = [(ldPath,pathsString)] + addEnv ((key,value):xs) + | key == ldPath = + if null value + then (key,pathsString):xs + else (key,value ++ (searchPathSeparator:pathsString)):xs + | otherwise = (key,value):addEnv xs + +---------------- +-- File globbing + +data FileGlob + -- | No glob at all, just an ordinary file + = NoGlob FilePath + + -- | dir prefix and extension, like @\"foo\/bar\/\*.baz\"@ corresponds to + -- @FileGlob \"foo\/bar\" \".baz\"@ + | FileGlob FilePath String + +parseFileGlob :: FilePath -> Maybe FileGlob +parseFileGlob filepath = case splitExtensions filepath of + (filepath', ext) -> case splitFileName filepath' of + (dir, "*") | '*' `elem` dir + || '*' `elem` ext + || null ext -> Nothing + | null dir -> Just (FileGlob "." ext) + | otherwise -> Just (FileGlob dir ext) + _ | '*' `elem` filepath -> Nothing + | otherwise -> Just (NoGlob filepath) + +matchFileGlob :: FilePath -> IO [FilePath] +matchFileGlob = matchDirFileGlob "." + +matchDirFileGlob :: FilePath -> FilePath -> IO [FilePath] +matchDirFileGlob dir filepath = case parseFileGlob filepath of + Nothing -> die $ "invalid file glob '" ++ filepath + ++ "'. Wildcards '*' are only allowed in place of the file" + ++ " name, not in the directory name or file extension." + ++ " If a wildcard is used it must be with an file extension." + Just (NoGlob filepath') -> return [filepath'] + Just (FileGlob dir' ext) -> do + files <- getDirectoryContents (dir dir') + case [ dir' file + | file <- files + , let (name, ext') = splitExtensions file + , not (null name) && ext' == ext ] of + [] -> die $ "filepath wildcard '" ++ filepath + ++ "' does not match any files." + matches -> return matches + +-------------------- +-- Modification time + +-- | Compare the modification times of two files to see if the first is newer +-- than the second. The first file must exist but the second need not. +-- The expected use case is when the second file is generated using the first. +-- In this use case, if the result is True then the second file is out of date. +-- +moreRecentFile :: FilePath -> FilePath -> IO Bool +moreRecentFile a b = do + exists <- doesFileExist b + if not exists + then return True + else do tb <- getModificationTime b + ta <- getModificationTime a + return (ta > tb) + +-- | Like 'moreRecentFile', but also checks that the first file exists. +existsAndIsMoreRecentThan :: FilePath -> FilePath -> IO Bool +existsAndIsMoreRecentThan a b = do + exists <- doesFileExist a + if not exists + then return False + else a `moreRecentFile` b + +---------------------------------------- +-- Copying and installing files and dirs + +-- | Same as 'createDirectoryIfMissing' but logs at higher verbosity levels. +-- +createDirectoryIfMissingVerbose :: Verbosity + -> Bool -- ^ Create its parents too? + -> FilePath + -> IO () +createDirectoryIfMissingVerbose verbosity create_parents path0 + | create_parents = createDirs (parents path0) + | otherwise = createDirs (take 1 (parents path0)) + where + parents = reverse . scanl1 () . splitDirectories . normalise + + createDirs [] = return () + createDirs (dir:[]) = createDir dir throwIO + createDirs (dir:dirs) = + createDir dir $ \_ -> do + createDirs dirs + createDir dir throwIO + + createDir :: FilePath -> (IOException -> IO ()) -> IO () + createDir dir notExistHandler = do + r <- tryIO $ createDirectoryVerbose verbosity dir + case (r :: Either IOException ()) of + Right () -> return () + Left e + | isDoesNotExistError e -> notExistHandler e + -- createDirectory (and indeed POSIX mkdir) does not distinguish + -- between a dir already existing and a file already existing. So we + -- check for it here. Unfortunately there is a slight race condition + -- here, but we think it is benign. It could report an exception in + -- the case that the dir did exist but another process deletes the + -- directory and creates a file in its place before we can check + -- that the directory did indeed exist. + | isAlreadyExistsError e -> (do + isDir <- doesDirectoryExist dir + if isDir then return () + else throwIO e + ) `catchIO` ((\_ -> return ()) :: IOException -> IO ()) + | otherwise -> throwIO e + +createDirectoryVerbose :: Verbosity -> FilePath -> IO () +createDirectoryVerbose verbosity dir = do + info verbosity $ "creating " ++ dir + createDirectory dir + setDirOrdinary dir + +-- | Copies a file without copying file permissions. The target file is created +-- with default permissions. Any existing target file is replaced. +-- +-- At higher verbosity levels it logs an info message. +-- +copyFileVerbose :: Verbosity -> FilePath -> FilePath -> IO () +copyFileVerbose verbosity src dest = do + info verbosity ("copy " ++ src ++ " to " ++ dest) + copyFile src dest + +-- | Install an ordinary file. This is like a file copy but the permissions +-- are set appropriately for an installed file. On Unix it is \"-rw-r--r--\" +-- while on Windows it uses the default permissions for the target directory. +-- +installOrdinaryFile :: Verbosity -> FilePath -> FilePath -> IO () +installOrdinaryFile verbosity src dest = do + info verbosity ("Installing " ++ src ++ " to " ++ dest) + copyOrdinaryFile src dest + +-- | Install an executable file. This is like a file copy but the permissions +-- are set appropriately for an installed file. On Unix it is \"-rwxr-xr-x\" +-- while on Windows it uses the default permissions for the target directory. +-- +installExecutableFile :: Verbosity -> FilePath -> FilePath -> IO () +installExecutableFile verbosity src dest = do + info verbosity ("Installing executable " ++ src ++ " to " ++ dest) + copyExecutableFile src dest + +-- | Install a file that may or not be executable, preserving permissions. +installMaybeExecutableFile :: Verbosity -> FilePath -> FilePath -> IO () +installMaybeExecutableFile verbosity src dest = do + perms <- getPermissions src + if (executable perms) --only checks user x bit + then installExecutableFile verbosity src dest + else installOrdinaryFile verbosity src dest + +-- | Given a relative path to a file, copy it to the given directory, preserving +-- the relative path and creating the parent directories if needed. +copyFileTo :: Verbosity -> FilePath -> FilePath -> IO () +copyFileTo verbosity dir file = do + let targetFile = dir file + createDirectoryIfMissingVerbose verbosity True (takeDirectory targetFile) + installOrdinaryFile verbosity file targetFile + +-- | Common implementation of 'copyFiles', 'installOrdinaryFiles', +-- 'installExecutableFiles' and 'installMaybeExecutableFiles'. +copyFilesWith :: (Verbosity -> FilePath -> FilePath -> IO ()) + -> Verbosity -> FilePath -> [(FilePath, FilePath)] -> IO () +copyFilesWith doCopy verbosity targetDir srcFiles = do + + -- Create parent directories for everything + let dirs = map (targetDir ) . nub . map (takeDirectory . snd) $ srcFiles + mapM_ (createDirectoryIfMissingVerbose verbosity True) dirs + + -- Copy all the files + sequence_ [ let src = srcBase srcFile + dest = targetDir srcFile + in doCopy verbosity src dest + | (srcBase, srcFile) <- srcFiles ] + +-- | Copies a bunch of files to a target directory, preserving the directory +-- structure in the target location. The target directories are created if they +-- do not exist. +-- +-- The files are identified by a pair of base directory and a path relative to +-- that base. It is only the relative part that is preserved in the +-- destination. +-- +-- For example: +-- +-- > copyFiles normal "dist/src" +-- > [("", "src/Foo.hs"), ("dist/build/", "src/Bar.hs")] +-- +-- This would copy \"src\/Foo.hs\" to \"dist\/src\/src\/Foo.hs\" and +-- copy \"dist\/build\/src\/Bar.hs\" to \"dist\/src\/src\/Bar.hs\". +-- +-- This operation is not atomic. Any IO failure during the copy (including any +-- missing source files) leaves the target in an unknown state so it is best to +-- use it with a freshly created directory so that it can be simply deleted if +-- anything goes wrong. +-- +copyFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)] -> IO () +copyFiles = copyFilesWith copyFileVerbose + +-- | This is like 'copyFiles' but uses 'installOrdinaryFile'. +-- +installOrdinaryFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)] -> IO () +installOrdinaryFiles = copyFilesWith installOrdinaryFile + +-- | This is like 'copyFiles' but uses 'installExecutableFile'. +-- +installExecutableFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)] + -> IO () +installExecutableFiles = copyFilesWith installExecutableFile + +-- | This is like 'copyFiles' but uses 'installMaybeExecutableFile'. +-- +installMaybeExecutableFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)] + -> IO () +installMaybeExecutableFiles = copyFilesWith installMaybeExecutableFile + +-- | This installs all the files in a directory to a target location, +-- preserving the directory layout. All the files are assumed to be ordinary +-- rather than executable files. +-- +installDirectoryContents :: Verbosity -> FilePath -> FilePath -> IO () +installDirectoryContents verbosity srcDir destDir = do + info verbosity ("copy directory '" ++ srcDir ++ "' to '" ++ destDir ++ "'.") + srcFiles <- getDirectoryContentsRecursive srcDir + installOrdinaryFiles verbosity destDir [ (srcDir, f) | f <- srcFiles ] + +-- | Recursively copy the contents of one directory to another path. +copyDirectoryRecursive :: Verbosity -> FilePath -> FilePath -> IO () +copyDirectoryRecursive verbosity srcDir destDir = do + info verbosity ("copy directory '" ++ srcDir ++ "' to '" ++ destDir ++ "'.") + srcFiles <- getDirectoryContentsRecursive srcDir + copyFilesWith (const copyFile) verbosity destDir [ (srcDir, f) + | f <- srcFiles ] + +------------------- +-- File permissions + +-- | Like 'doesFileExist', but also checks that the file is executable. +doesExecutableExist :: FilePath -> IO Bool +doesExecutableExist f = do + exists <- doesFileExist f + if exists + then do perms <- getPermissions f + return (executable perms) + else return False + +--------------------------------- +-- Deprecated file copy functions + +{-# DEPRECATED smartCopySources + "Use findModuleFiles and copyFiles or installOrdinaryFiles" #-} +smartCopySources :: Verbosity -> [FilePath] -> FilePath + -> [ModuleName] -> [String] -> IO () +smartCopySources verbosity searchPath targetDir moduleNames extensions = + findModuleFiles searchPath extensions moduleNames + >>= copyFiles verbosity targetDir + +{-# DEPRECATED copyDirectoryRecursiveVerbose + "You probably want installDirectoryContents instead" #-} +copyDirectoryRecursiveVerbose :: Verbosity -> FilePath -> FilePath -> IO () +copyDirectoryRecursiveVerbose verbosity srcDir destDir = do + info verbosity ("copy directory '" ++ srcDir ++ "' to '" ++ destDir ++ "'.") + srcFiles <- getDirectoryContentsRecursive srcDir + copyFiles verbosity destDir [ (srcDir, f) | f <- srcFiles ] + +--------------------------- +-- Temporary files and dirs + +-- | Advanced options for 'withTempFile' and 'withTempDirectory'. +data TempFileOptions = TempFileOptions { + optKeepTempFiles :: Bool -- ^ Keep temporary files? + } + +defaultTempFileOptions :: TempFileOptions +defaultTempFileOptions = TempFileOptions { optKeepTempFiles = False } + +-- | Use a temporary filename that doesn't already exist. +-- +withTempFile :: FilePath -- ^ Temp dir to create the file in + -> String -- ^ File name template. See 'openTempFile'. + -> (FilePath -> Handle -> IO a) -> IO a +withTempFile tmpDir template action = + withTempFileEx defaultTempFileOptions tmpDir template action + +-- | A version of 'withTempFile' that additionally takes a 'TempFileOptions' +-- argument. +withTempFileEx :: TempFileOptions + -> FilePath -- ^ Temp dir to create the file in + -> String -- ^ File name template. See 'openTempFile'. + -> (FilePath -> Handle -> IO a) -> IO a +withTempFileEx opts tmpDir template action = + Exception.bracket + (openTempFile tmpDir template) + (\(name, handle) -> do hClose handle + unless (optKeepTempFiles opts) $ + handleDoesNotExist () . removeFile $ name) + (uncurry action) + +-- | Create and use a temporary directory. +-- +-- Creates a new temporary directory inside the given directory, making use +-- of the template. The temp directory is deleted after use. For example: +-- +-- > withTempDirectory verbosity "src" "sdist." $ \tmpDir -> do ... +-- +-- The @tmpDir@ will be a new subdirectory of the given directory, e.g. +-- @src/sdist.342@. +-- +withTempDirectory :: Verbosity + -> FilePath -> String -> (FilePath -> IO a) -> IO a +withTempDirectory verbosity targetDir template = + withTempDirectoryEx verbosity defaultTempFileOptions targetDir template + +-- | A version of 'withTempDirectory' that additionally takes a +-- 'TempFileOptions' argument. +withTempDirectoryEx :: Verbosity + -> TempFileOptions + -> FilePath -> String -> (FilePath -> IO a) -> IO a +withTempDirectoryEx _verbosity opts targetDir template = + Exception.bracket + (createTempDirectory targetDir template) + (unless (optKeepTempFiles opts) + . handleDoesNotExist () . removeDirectoryRecursive) + +----------------------------------- +-- Safely reading and writing files + +-- | Gets the contents of a file, but guarantee that it gets closed. +-- +-- The file is read lazily but if it is not fully consumed by the action then +-- the remaining input is truncated and the file is closed. +-- +withFileContents :: FilePath -> (String -> IO a) -> IO a +withFileContents name action = + Exception.bracket (openFile name ReadMode) hClose + (\hnd -> hGetContents hnd >>= action) + +-- | Writes a file atomically. +-- +-- The file is either written successfully or an IO exception is raised and +-- the original file is left unchanged. +-- +-- On windows it is not possible to delete a file that is open by a process. +-- This case will give an IO exception but the atomic property is not affected. +-- +writeFileAtomic :: FilePath -> BS.ByteString -> IO () +writeFileAtomic targetPath content = do + let (targetDir, targetFile) = splitFileName targetPath + Exception.bracketOnError + (openBinaryTempFileWithDefaultPermissions targetDir $ targetFile <.> "tmp") + (\(tmpPath, handle) -> hClose handle >> removeFile tmpPath) + (\(tmpPath, handle) -> do + BS.hPut handle content + hClose handle + renameFile tmpPath targetPath) + +-- | Write a file but only if it would have new content. If we would be writing +-- the same as the existing content then leave the file as is so that we do not +-- update the file's modification time. +-- +-- NB: the file is assumed to be ASCII-encoded. +rewriteFile :: FilePath -> String -> IO () +rewriteFile path newContent = + flip catchIO mightNotExist $ do + existingContent <- readFile path + _ <- evaluate (length existingContent) + unless (existingContent == newContent) $ + writeFileAtomic path (BS.Char8.pack newContent) + where + mightNotExist e | isDoesNotExistError e = writeFileAtomic path + (BS.Char8.pack newContent) + | otherwise = ioError e + +-- | The path name that represents the current directory. +-- In Unix, it's @\".\"@, but this is system-specific. +-- (E.g. AmigaOS uses the empty string @\"\"@ for the current directory.) +currentDir :: FilePath +currentDir = "." + +shortRelativePath :: FilePath -> FilePath -> FilePath +shortRelativePath from to = + case dropCommonPrefix (splitDirectories from) (splitDirectories to) of + (stuff, path) -> joinPath (map (const "..") stuff ++ path) + where + dropCommonPrefix :: Eq a => [a] -> [a] -> ([a],[a]) + dropCommonPrefix (x:xs) (y:ys) + | x == y = dropCommonPrefix xs ys + dropCommonPrefix xs ys = (xs,ys) + +-- | Drop the extension if it's one of 'exeExtensions', or return the path +-- unchanged. +dropExeExtension :: FilePath -> FilePath +dropExeExtension filepath = + case splitExtension filepath of + (filepath', extension) | extension `elem` exeExtensions -> filepath' + | otherwise -> filepath + +-- | List of possible executable file extensions on the current platform. +exeExtensions :: [String] +exeExtensions = case buildOS of + -- Possible improvement: on Windows, read the list of extensions from the + -- PATHEXT environment variable. By default PATHEXT is ".com; .exe; .bat; + -- .cmd". + Windows -> ["", "exe"] + Ghcjs -> ["", "exe"] + _ -> [""] + +-- ------------------------------------------------------------ +-- * Finding the description file +-- ------------------------------------------------------------ + +-- |Package description file (/pkgname/@.cabal@) +defaultPackageDesc :: Verbosity -> IO FilePath +defaultPackageDesc _verbosity = tryFindPackageDesc currentDir + +-- |Find a package description file in the given directory. Looks for +-- @.cabal@ files. +findPackageDesc :: FilePath -- ^Where to look + -> IO (Either String FilePath) -- ^.cabal +findPackageDesc dir + = do files <- getDirectoryContents dir + -- to make sure we do not mistake a ~/.cabal/ dir for a .cabal + -- file we filter to exclude dirs and null base file names: + cabalFiles <- filterM doesFileExist + [ dir file + | file <- files + , let (name, ext) = splitExtension file + , not (null name) && ext == ".cabal" ] + case cabalFiles of + [] -> return (Left noDesc) + [cabalFile] -> return (Right cabalFile) + multiple -> return (Left $ multiDesc multiple) + + where + noDesc :: String + noDesc = "No cabal file found.\n" + ++ "Please create a package description file .cabal" + + multiDesc :: [String] -> String + multiDesc l = "Multiple cabal files found.\n" + ++ "Please use only one of: " + ++ intercalate ", " l + +-- |Like 'findPackageDesc', but calls 'die' in case of error. +tryFindPackageDesc :: FilePath -> IO FilePath +tryFindPackageDesc dir = either die return =<< findPackageDesc dir + +-- |Optional auxiliary package information file (/pkgname/@.buildinfo@) +defaultHookedPackageDesc :: IO (Maybe FilePath) +defaultHookedPackageDesc = findHookedPackageDesc currentDir + +-- |Find auxiliary package information in the given directory. +-- Looks for @.buildinfo@ files. +findHookedPackageDesc + :: FilePath -- ^Directory to search + -> IO (Maybe FilePath) -- ^/dir/@\/@/pkgname/@.buildinfo@, if present +findHookedPackageDesc dir = do + files <- getDirectoryContents dir + buildInfoFiles <- filterM doesFileExist + [ dir file + | file <- files + , let (name, ext) = splitExtension file + , not (null name) && ext == buildInfoExt ] + case buildInfoFiles of + [] -> return Nothing + [f] -> return (Just f) + _ -> die ("Multiple files with extension " ++ buildInfoExt) + +buildInfoExt :: String +buildInfoExt = ".buildinfo" + +-- ------------------------------------------------------------ +-- * Unicode stuff +-- ------------------------------------------------------------ + +-- This is a modification of the UTF8 code from gtk2hs and the +-- utf8-string package. + +fromUTF8 :: String -> String +fromUTF8 [] = [] +fromUTF8 (c:cs) + | c <= '\x7F' = c : fromUTF8 cs + | c <= '\xBF' = replacementChar : fromUTF8 cs + | c <= '\xDF' = twoBytes c cs + | c <= '\xEF' = moreBytes 3 0x800 cs (ord c .&. 0xF) + | c <= '\xF7' = moreBytes 4 0x10000 cs (ord c .&. 0x7) + | c <= '\xFB' = moreBytes 5 0x200000 cs (ord c .&. 0x3) + | c <= '\xFD' = moreBytes 6 0x4000000 cs (ord c .&. 0x1) + | otherwise = replacementChar : fromUTF8 cs + where + twoBytes c0 (c1:cs') + | ord c1 .&. 0xC0 == 0x80 + = let d = ((ord c0 .&. 0x1F) `shiftL` 6) + .|. (ord c1 .&. 0x3F) + in if d >= 0x80 + then chr d : fromUTF8 cs' + else replacementChar : fromUTF8 cs' + twoBytes _ cs' = replacementChar : fromUTF8 cs' + + moreBytes :: Int -> Int -> [Char] -> Int -> [Char] + moreBytes 1 overlong cs' acc + | overlong <= acc && acc <= 0x10FFFF + && (acc < 0xD800 || 0xDFFF < acc) + && (acc < 0xFFFE || 0xFFFF < acc) + = chr acc : fromUTF8 cs' + + | otherwise + = replacementChar : fromUTF8 cs' + + moreBytes byteCount overlong (cn:cs') acc + | ord cn .&. 0xC0 == 0x80 + = moreBytes (byteCount-1) overlong cs' + ((acc `shiftL` 6) .|. ord cn .&. 0x3F) + + moreBytes _ _ cs' _ + = replacementChar : fromUTF8 cs' + + replacementChar = '\xfffd' + +toUTF8 :: String -> String +toUTF8 [] = [] +toUTF8 (c:cs) + | c <= '\x07F' = c + : toUTF8 cs + | c <= '\x7FF' = chr (0xC0 .|. (w `shiftR` 6)) + : chr (0x80 .|. (w .&. 0x3F)) + : toUTF8 cs + | c <= '\xFFFF'= chr (0xE0 .|. (w `shiftR` 12)) + : chr (0x80 .|. ((w `shiftR` 6) .&. 0x3F)) + : chr (0x80 .|. (w .&. 0x3F)) + : toUTF8 cs + | otherwise = chr (0xf0 .|. (w `shiftR` 18)) + : chr (0x80 .|. ((w `shiftR` 12) .&. 0x3F)) + : chr (0x80 .|. ((w `shiftR` 6) .&. 0x3F)) + : chr (0x80 .|. (w .&. 0x3F)) + : toUTF8 cs + where w = ord c + +-- | Whether BOM is at the beginning of the input +startsWithBOM :: String -> Bool +startsWithBOM ('\xFEFF':_) = True +startsWithBOM _ = False + +-- | Check whether a file has Unicode byte order mark (BOM). +fileHasBOM :: FilePath -> IO Bool +fileHasBOM f = fmap (startsWithBOM . fromUTF8) + . hGetContents =<< openBinaryFile f ReadMode + +-- | Ignore a Unicode byte order mark (BOM) at the beginning of the input +-- +ignoreBOM :: String -> String +ignoreBOM ('\xFEFF':string) = string +ignoreBOM string = string + +-- | Reads a UTF8 encoded text file as a Unicode String +-- +-- Reads lazily using ordinary 'readFile'. +-- +readUTF8File :: FilePath -> IO String +readUTF8File f = fmap (ignoreBOM . fromUTF8) + . hGetContents =<< openBinaryFile f ReadMode + +-- | Reads a UTF8 encoded text file as a Unicode String +-- +-- Same behaviour as 'withFileContents'. +-- +withUTF8FileContents :: FilePath -> (String -> IO a) -> IO a +withUTF8FileContents name action = + Exception.bracket + (openBinaryFile name ReadMode) + hClose + (\hnd -> hGetContents hnd >>= action . ignoreBOM . fromUTF8) + +-- | Writes a Unicode String as a UTF8 encoded text file. +-- +-- Uses 'writeFileAtomic', so provides the same guarantees. +-- +writeUTF8File :: FilePath -> String -> IO () +writeUTF8File path = writeFileAtomic path . BS.Char8.pack . toUTF8 + +-- | Fix different systems silly line ending conventions +normaliseLineEndings :: String -> String +normaliseLineEndings [] = [] +normaliseLineEndings ('\r':'\n':s) = '\n' : normaliseLineEndings s -- windows +normaliseLineEndings ('\r':s) = '\n' : normaliseLineEndings s -- old OS X +normaliseLineEndings ( c :s) = c : normaliseLineEndings s + +-- ------------------------------------------------------------ +-- * Common utils +-- ------------------------------------------------------------ + +-- | @dropWhileEndLE p@ is equivalent to @reverse . dropWhile p . reverse@, but +-- quite a bit faster. The difference between "Data.List.dropWhileEnd" and this +-- version is that the one in "Data.List" is strict in elements, but spine-lazy, +-- while this one is spine-strict but lazy in elements. That's what @LE@ stands +-- for - "lazy in elements". +-- +-- Example: +-- +-- @ +-- > tail $ Data.List.dropWhileEnd (<3) [undefined, 5, 4, 3, 2, 1] +-- *** Exception: Prelude.undefined +-- > tail $ dropWhileEndLE (<3) [undefined, 5, 4, 3, 2, 1] +-- [5,4,3] +-- > take 3 $ Data.List.dropWhileEnd (<3) [5, 4, 3, 2, 1, undefined] +-- [5,4,3] +-- > take 3 $ dropWhileEndLE (<3) [5, 4, 3, 2, 1, undefined] +-- *** Exception: Prelude.undefined +-- @ +dropWhileEndLE :: (a -> Bool) -> [a] -> [a] +dropWhileEndLE p = foldr (\x r -> if null r && p x then [] else x:r) [] + +-- | @takeWhileEndLE p@ is equivalent to @reverse . takeWhile p . reverse@, but +-- is usually faster (as well as being easier to read). +takeWhileEndLE :: (a -> Bool) -> [a] -> [a] +takeWhileEndLE p = fst . foldr go ([], False) + where + go x (rest, done) + | not done && p x = (x:rest, False) + | otherwise = (rest, True) + +-- | Like "Data.List.nub", but has @O(n log n)@ complexity instead of +-- @O(n^2)@. Code for 'ordNub' and 'listUnion' taken from Niklas Hambüchen's +-- package. +ordNub :: (Ord a) => [a] -> [a] +ordNub l = go Set.empty l + where + go _ [] = [] + go s (x:xs) = if x `Set.member` s then go s xs + else x : go (Set.insert x s) xs + +-- | Like "Data.List.union", but has @O(n log n)@ complexity instead of +-- @O(n^2)@. +listUnion :: (Ord a) => [a] -> [a] -> [a] +listUnion a b = a ++ ordNub (filter (`Set.notMember` aSet) b) + where + aSet = Set.fromList a + +-- | A right-biased version of 'ordNub'. +-- +-- Example: +-- +-- @ +-- > ordNub [1,2,1] +-- [1,2] +-- > ordNubRight [1,2,1] +-- [2,1] +-- @ +ordNubRight :: (Ord a) => [a] -> [a] +ordNubRight = fst . foldr go ([], Set.empty) + where + go x p@(l, s) = if x `Set.member` s then p + else (x:l, Set.insert x s) + +-- | A right-biased version of 'listUnion'. +-- +-- Example: +-- +-- @ +-- > listUnion [1,2,3,4,3] [2,1,1] +-- [1,2,3,4,3] +-- > listUnionRight [1,2,3,4,3] [2,1,1] +-- [4,3,2,1,1] +-- @ +listUnionRight :: (Ord a) => [a] -> [a] -> [a] +listUnionRight a b = ordNubRight (filter (`Set.notMember` bSet) a) ++ b + where + bSet = Set.fromList b + +-- | A total variant of 'tail'. +safeTail :: [a] -> [a] +safeTail [] = [] +safeTail (_:xs) = xs + +equating :: Eq a => (b -> a) -> b -> b -> Bool +equating p x y = p x == p y + +lowercase :: String -> String +lowercase = map Char.toLower diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Simple.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Simple.hs 2016-12-23 10:35:21.000000000 +0000 @@ -0,0 +1,695 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple +-- Copyright : Isaac Jones 2003-2005 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This is the command line front end to the Simple build system. When given +-- the parsed command-line args and package information, is able to perform +-- basic commands like configure, build, install, register, etc. +-- +-- This module exports the main functions that Setup.hs scripts use. It +-- re-exports the 'UserHooks' type, the standard entry points like +-- 'defaultMain' and 'defaultMainWithHooks' and the predefined sets of +-- 'UserHooks' that custom @Setup.hs@ scripts can extend to add their own +-- behaviour. +-- +-- This module isn't called \"Simple\" because it's simple. Far from +-- it. It's called \"Simple\" because it does complicated things to +-- simple software. +-- +-- The original idea was that there could be different build systems that all +-- presented the same compatible command line interfaces. There is still a +-- "Distribution.Make" system but in practice no packages use it. + +{- +Work around this warning: +libraries/Cabal/Distribution/Simple.hs:78:0: + Warning: In the use of `runTests' + (imported from Distribution.Simple.UserHooks): + Deprecated: "Please use the new testing interface instead!" +-} +{-# OPTIONS_GHC -fno-warn-deprecations #-} + +module Distribution.Simple ( + module Distribution.Package, + module Distribution.Version, + module Distribution.License, + module Distribution.Simple.Compiler, + module Language.Haskell.Extension, + -- * Simple interface + defaultMain, defaultMainNoRead, defaultMainArgs, + -- * Customization + UserHooks(..), Args, + defaultMainWithHooks, defaultMainWithHooksArgs, + -- ** Standard sets of hooks + simpleUserHooks, + autoconfUserHooks, + defaultUserHooks, emptyUserHooks, + -- ** Utils + defaultHookedPackageDesc + ) where + +-- local +import Distribution.Simple.Compiler hiding (Flag) +import Distribution.Simple.UserHooks +import Distribution.Package +import Distribution.PackageDescription hiding (Flag) +import Distribution.PackageDescription.Parse +import Distribution.PackageDescription.Configuration +import Distribution.Simple.Program +import Distribution.Simple.Program.Db +import Distribution.Simple.PreProcess +import Distribution.Simple.Setup +import Distribution.Simple.Command + +import Distribution.Simple.Build +import Distribution.Simple.SrcDist +import Distribution.Simple.Register + +import Distribution.Simple.Configure + +import Distribution.Simple.LocalBuildInfo +import Distribution.Simple.Bench +import Distribution.Simple.BuildPaths +import Distribution.Simple.Test +import Distribution.Simple.Install +import Distribution.Simple.Haddock +import Distribution.Simple.Utils +import Distribution.Utils.NubList +import Distribution.Verbosity +import Language.Haskell.Extension +import Distribution.Version +import Distribution.License +import Distribution.Text + +-- Base +import System.Environment (getArgs, getProgName) +import System.Directory (removeFile, doesFileExist + ,doesDirectoryExist, removeDirectoryRecursive) +import System.Exit (exitWith,ExitCode(..)) +import System.FilePath (searchPathSeparator) +import Distribution.Compat.Environment (getEnvironment) +import Distribution.Compat.GetShortPathName (getShortPathName) + +import Control.Monad (when) +import Data.Foldable (traverse_) +import Data.List (unionBy, nub, (\\)) + +-- | A simple implementation of @main@ for a Cabal setup script. +-- It reads the package description file using IO, and performs the +-- action specified on the command line. +defaultMain :: IO () +defaultMain = getArgs >>= defaultMainHelper simpleUserHooks + +-- | A version of 'defaultMain' that is passed the command line +-- arguments, rather than getting them from the environment. +defaultMainArgs :: [String] -> IO () +defaultMainArgs = defaultMainHelper simpleUserHooks + +-- | A customizable version of 'defaultMain'. +defaultMainWithHooks :: UserHooks -> IO () +defaultMainWithHooks hooks = getArgs >>= defaultMainHelper hooks + +-- | A customizable version of 'defaultMain' that also takes the command +-- line arguments. +defaultMainWithHooksArgs :: UserHooks -> [String] -> IO () +defaultMainWithHooksArgs = defaultMainHelper + +-- | Like 'defaultMain', but accepts the package description as input +-- rather than using IO to read it. +defaultMainNoRead :: GenericPackageDescription -> IO () +defaultMainNoRead pkg_descr = + getArgs >>= + defaultMainHelper simpleUserHooks { readDesc = return (Just pkg_descr) } + +defaultMainHelper :: UserHooks -> Args -> IO () +defaultMainHelper hooks args = topHandler $ + case commandsRun (globalCommand commands) commands args of + CommandHelp help -> printHelp help + CommandList opts -> printOptionsList opts + CommandErrors errs -> printErrors errs + CommandReadyToGo (flags, commandParse) -> + case commandParse of + _ | fromFlag (globalVersion flags) -> printVersion + | fromFlag (globalNumericVersion flags) -> printNumericVersion + CommandHelp help -> printHelp help + CommandList opts -> printOptionsList opts + CommandErrors errs -> printErrors errs + CommandReadyToGo action -> action + + where + printHelp help = getProgName >>= putStr . help + printOptionsList = putStr . unlines + printErrors errs = do + putStr (intercalate "\n" errs) + exitWith (ExitFailure 1) + printNumericVersion = putStrLn $ display cabalVersion + printVersion = putStrLn $ "Cabal library version " + ++ display cabalVersion + + progs = addKnownPrograms (hookedPrograms hooks) defaultProgramConfiguration + commands = + [configureCommand progs `commandAddAction` \fs as -> + configureAction hooks fs as >> return () + ,buildCommand progs `commandAddAction` buildAction hooks + ,replCommand progs `commandAddAction` replAction hooks + ,installCommand `commandAddAction` installAction hooks + ,copyCommand `commandAddAction` copyAction hooks + ,haddockCommand `commandAddAction` haddockAction hooks + ,cleanCommand `commandAddAction` cleanAction hooks + ,sdistCommand `commandAddAction` sdistAction hooks + ,hscolourCommand `commandAddAction` hscolourAction hooks + ,registerCommand `commandAddAction` registerAction hooks + ,unregisterCommand `commandAddAction` unregisterAction hooks + ,testCommand `commandAddAction` testAction hooks + ,benchmarkCommand `commandAddAction` benchAction hooks + ] + +-- | Combine the preprocessors in the given hooks with the +-- preprocessors built into cabal. +allSuffixHandlers :: UserHooks + -> [PPSuffixHandler] +allSuffixHandlers hooks + = overridesPP (hookedPreProcessors hooks) knownSuffixHandlers + where + overridesPP :: [PPSuffixHandler] -> [PPSuffixHandler] -> [PPSuffixHandler] + overridesPP = unionBy (\x y -> fst x == fst y) + +configureAction :: UserHooks -> ConfigFlags -> Args -> IO LocalBuildInfo +configureAction hooks flags args = do + distPref <- findDistPrefOrDefault (configDistPref flags) + let flags' = flags { configDistPref = toFlag distPref } + pbi <- preConf hooks args flags' + + (mb_pd_file, pkg_descr0) <- confPkgDescr + + --get_pkg_descr (configVerbosity flags') + --let pkg_descr = updatePackageDescription pbi pkg_descr0 + let epkg_descr = (pkg_descr0, pbi) + + --(warns, ers) <- sanityCheckPackage pkg_descr + --errorOut (configVerbosity flags') warns ers + + localbuildinfo0 <- confHook hooks epkg_descr flags' + + -- remember the .cabal filename if we know it + -- and all the extra command line args + let localbuildinfo = localbuildinfo0 { + pkgDescrFile = mb_pd_file, + extraConfigArgs = args + } + writePersistBuildConfig distPref localbuildinfo + + let pkg_descr = localPkgDescr localbuildinfo + postConf hooks args flags' pkg_descr localbuildinfo + return localbuildinfo + where + verbosity = fromFlag (configVerbosity flags) + confPkgDescr :: IO (Maybe FilePath, GenericPackageDescription) + confPkgDescr = do + mdescr <- readDesc hooks + case mdescr of + Just descr -> return (Nothing, descr) + Nothing -> do + pdfile <- defaultPackageDesc verbosity + descr <- readPackageDescription verbosity pdfile + return (Just pdfile, descr) + +buildAction :: UserHooks -> BuildFlags -> Args -> IO () +buildAction hooks flags args = do + distPref <- findDistPrefOrDefault (buildDistPref flags) + let verbosity = fromFlag $ buildVerbosity flags + flags' = flags { buildDistPref = toFlag distPref } + + lbi <- getBuildConfig hooks verbosity distPref + progs <- reconfigurePrograms verbosity + (buildProgramPaths flags') + (buildProgramArgs flags') + (withPrograms lbi) + + hookedAction preBuild buildHook postBuild + (return lbi { withPrograms = progs }) + hooks flags' { buildArgs = args } args + +replAction :: UserHooks -> ReplFlags -> Args -> IO () +replAction hooks flags args = do + distPref <- findDistPrefOrDefault (replDistPref flags) + let verbosity = fromFlag $ replVerbosity flags + flags' = flags { replDistPref = toFlag distPref } + + lbi <- getBuildConfig hooks verbosity distPref + progs <- reconfigurePrograms verbosity + (replProgramPaths flags') + (replProgramArgs flags') + (withPrograms lbi) + + pbi <- preRepl hooks args flags' + let lbi' = lbi { withPrograms = progs } + pkg_descr0 = localPkgDescr lbi' + pkg_descr = updatePackageDescription pbi pkg_descr0 + replHook hooks pkg_descr lbi' hooks flags' args + postRepl hooks args flags' pkg_descr lbi' + +hscolourAction :: UserHooks -> HscolourFlags -> Args -> IO () +hscolourAction hooks flags args = do + distPref <- findDistPrefOrDefault (hscolourDistPref flags) + let verbosity = fromFlag $ hscolourVerbosity flags + flags' = flags { hscolourDistPref = toFlag distPref } + hookedAction preHscolour hscolourHook postHscolour + (getBuildConfig hooks verbosity distPref) + hooks flags' args + +haddockAction :: UserHooks -> HaddockFlags -> Args -> IO () +haddockAction hooks flags args = do + distPref <- findDistPrefOrDefault (haddockDistPref flags) + let verbosity = fromFlag $ haddockVerbosity flags + flags' = flags { haddockDistPref = toFlag distPref } + + lbi <- getBuildConfig hooks verbosity distPref + progs <- reconfigurePrograms verbosity + (haddockProgramPaths flags') + (haddockProgramArgs flags') + (withPrograms lbi) + + hookedAction preHaddock haddockHook postHaddock + (return lbi { withPrograms = progs }) + hooks flags' args + +cleanAction :: UserHooks -> CleanFlags -> Args -> IO () +cleanAction hooks flags args = do + distPref <- findDistPrefOrDefault (cleanDistPref flags) + let flags' = flags { cleanDistPref = toFlag distPref } + + pbi <- preClean hooks args flags' + + pdfile <- defaultPackageDesc verbosity + ppd <- readPackageDescription verbosity pdfile + let pkg_descr0 = flattenPackageDescription ppd + -- We don't sanity check for clean as an error + -- here would prevent cleaning: + --sanityCheckHookedBuildInfo pkg_descr0 pbi + let pkg_descr = updatePackageDescription pbi pkg_descr0 + + cleanHook hooks pkg_descr () hooks flags' + postClean hooks args flags' pkg_descr () + where + verbosity = fromFlag (cleanVerbosity flags) + +copyAction :: UserHooks -> CopyFlags -> Args -> IO () +copyAction hooks flags args = do + distPref <- findDistPrefOrDefault (copyDistPref flags) + let verbosity = fromFlag $ copyVerbosity flags + flags' = flags { copyDistPref = toFlag distPref } + hookedAction preCopy copyHook postCopy + (getBuildConfig hooks verbosity distPref) + hooks flags' args + +installAction :: UserHooks -> InstallFlags -> Args -> IO () +installAction hooks flags args = do + distPref <- findDistPrefOrDefault (installDistPref flags) + let verbosity = fromFlag $ installVerbosity flags + flags' = flags { installDistPref = toFlag distPref } + hookedAction preInst instHook postInst + (getBuildConfig hooks verbosity distPref) + hooks flags' args + +sdistAction :: UserHooks -> SDistFlags -> Args -> IO () +sdistAction hooks flags args = do + distPref <- findDistPrefOrDefault (sDistDistPref flags) + let flags' = flags { sDistDistPref = toFlag distPref } + pbi <- preSDist hooks args flags' + + mlbi <- maybeGetPersistBuildConfig distPref + pdfile <- defaultPackageDesc verbosity + ppd <- readPackageDescription verbosity pdfile + let pkg_descr0 = flattenPackageDescription ppd + sanityCheckHookedBuildInfo pkg_descr0 pbi + let pkg_descr = updatePackageDescription pbi pkg_descr0 + + sDistHook hooks pkg_descr mlbi hooks flags' + postSDist hooks args flags' pkg_descr mlbi + where + verbosity = fromFlag (sDistVerbosity flags) + +testAction :: UserHooks -> TestFlags -> Args -> IO () +testAction hooks flags args = do + distPref <- findDistPrefOrDefault (testDistPref flags) + let verbosity = fromFlag $ testVerbosity flags + flags' = flags { testDistPref = toFlag distPref } + + localBuildInfo <- getBuildConfig hooks verbosity distPref + let pkg_descr = localPkgDescr localBuildInfo + -- It is safe to do 'runTests' before the new test handler because the + -- default action is a no-op and if the package uses the old test interface + -- the new handler will find no tests. + runTests hooks args False pkg_descr localBuildInfo + hookedActionWithArgs preTest testHook postTest + (getBuildConfig hooks verbosity distPref) + hooks flags' args + +benchAction :: UserHooks -> BenchmarkFlags -> Args -> IO () +benchAction hooks flags args = do + distPref <- findDistPrefOrDefault (benchmarkDistPref flags) + let verbosity = fromFlag $ benchmarkVerbosity flags + flags' = flags { benchmarkDistPref = toFlag distPref } + hookedActionWithArgs preBench benchHook postBench + (getBuildConfig hooks verbosity distPref) + hooks flags' args + +registerAction :: UserHooks -> RegisterFlags -> Args -> IO () +registerAction hooks flags args = do + distPref <- findDistPrefOrDefault (regDistPref flags) + let verbosity = fromFlag $ regVerbosity flags + flags' = flags { regDistPref = toFlag distPref } + hookedAction preReg regHook postReg + (getBuildConfig hooks verbosity distPref) + hooks flags' args + +unregisterAction :: UserHooks -> RegisterFlags -> Args -> IO () +unregisterAction hooks flags args = do + distPref <- findDistPrefOrDefault (regDistPref flags) + let verbosity = fromFlag $ regVerbosity flags + flags' = flags { regDistPref = toFlag distPref } + hookedAction preUnreg unregHook postUnreg + (getBuildConfig hooks verbosity distPref) + hooks flags' args + +hookedAction :: (UserHooks -> Args -> flags -> IO HookedBuildInfo) + -> (UserHooks -> PackageDescription -> LocalBuildInfo + -> UserHooks -> flags -> IO ()) + -> (UserHooks -> Args -> flags -> PackageDescription + -> LocalBuildInfo -> IO ()) + -> IO LocalBuildInfo + -> UserHooks -> flags -> Args -> IO () +hookedAction pre_hook cmd_hook = + hookedActionWithArgs pre_hook (\h _ pd lbi uh flags -> cmd_hook h pd lbi uh flags) + +hookedActionWithArgs :: (UserHooks -> Args -> flags -> IO HookedBuildInfo) + -> (UserHooks -> Args -> PackageDescription -> LocalBuildInfo + -> UserHooks -> flags -> IO ()) + -> (UserHooks -> Args -> flags -> PackageDescription + -> LocalBuildInfo -> IO ()) + -> IO LocalBuildInfo + -> UserHooks -> flags -> Args -> IO () +hookedActionWithArgs pre_hook cmd_hook post_hook get_build_config hooks flags args = do + pbi <- pre_hook hooks args flags + localbuildinfo <- get_build_config + let pkg_descr0 = localPkgDescr localbuildinfo + --pkg_descr0 <- get_pkg_descr (get_verbose flags) + sanityCheckHookedBuildInfo pkg_descr0 pbi + let pkg_descr = updatePackageDescription pbi pkg_descr0 + -- TODO: should we write the modified package descr back to the + -- localbuildinfo? + cmd_hook hooks args pkg_descr localbuildinfo hooks flags + post_hook hooks args flags pkg_descr localbuildinfo + +sanityCheckHookedBuildInfo :: PackageDescription -> HookedBuildInfo -> IO () +sanityCheckHookedBuildInfo PackageDescription { library = Nothing } (Just _,_) + = die $ "The buildinfo contains info for a library, " + ++ "but the package does not have a library." + +sanityCheckHookedBuildInfo pkg_descr (_, hookExes) + | not (null nonExistant) + = die $ "The buildinfo contains info for an executable called '" + ++ head nonExistant ++ "' but the package does not have a " + ++ "executable with that name." + where + pkgExeNames = nub (map exeName (executables pkg_descr)) + hookExeNames = nub (map fst hookExes) + nonExistant = hookExeNames \\ pkgExeNames + +sanityCheckHookedBuildInfo _ _ = return () + + +getBuildConfig :: UserHooks -> Verbosity -> FilePath -> IO LocalBuildInfo +getBuildConfig hooks verbosity distPref = do + lbi_wo_programs <- getPersistBuildConfig distPref + -- Restore info about unconfigured programs, since it is not serialized + let lbi = lbi_wo_programs { + withPrograms = restoreProgramConfiguration + (builtinPrograms ++ hookedPrograms hooks) + (withPrograms lbi_wo_programs) + } + + case pkgDescrFile lbi of + Nothing -> return lbi + Just pkg_descr_file -> do + outdated <- checkPersistBuildConfigOutdated distPref pkg_descr_file + if outdated + then reconfigure pkg_descr_file lbi + else return lbi + + where + reconfigure :: FilePath -> LocalBuildInfo -> IO LocalBuildInfo + reconfigure pkg_descr_file lbi = do + notice verbosity $ pkg_descr_file ++ " has been changed. " + ++ "Re-configuring with most recently used options. " + ++ "If this fails, please run configure manually.\n" + let cFlags = configFlags lbi + let cFlags' = cFlags { + -- Since the list of unconfigured programs is not serialized, + -- restore it to the same value as normally used at the beginning + -- of a configure run: + configPrograms_ = restoreProgramConfiguration + (builtinPrograms ++ hookedPrograms hooks) + `fmap` configPrograms_ cFlags, + + -- Use the current, not saved verbosity level: + configVerbosity = Flag verbosity + } + configureAction hooks cFlags' (extraConfigArgs lbi) + + +-- -------------------------------------------------------------------------- +-- Cleaning + +clean :: PackageDescription -> CleanFlags -> IO () +clean pkg_descr flags = do + let distPref = fromFlagOrDefault defaultDistPref $ cleanDistPref flags + notice verbosity "cleaning..." + + maybeConfig <- if fromFlag (cleanSaveConf flags) + then maybeGetPersistBuildConfig distPref + else return Nothing + + -- remove the whole dist/ directory rather than tracking exactly what files + -- we created in there. + chattyTry "removing dist/" $ do + exists <- doesDirectoryExist distPref + when exists (removeDirectoryRecursive distPref) + + -- Any extra files the user wants to remove + mapM_ removeFileOrDirectory (extraTmpFiles pkg_descr) + + -- If the user wanted to save the config, write it back + traverse_ (writePersistBuildConfig distPref) maybeConfig + + where + removeFileOrDirectory :: FilePath -> IO () + removeFileOrDirectory fname = do + isDir <- doesDirectoryExist fname + isFile <- doesFileExist fname + if isDir then removeDirectoryRecursive fname + else when isFile $ removeFile fname + verbosity = fromFlag (cleanVerbosity flags) + +-- -------------------------------------------------------------------------- +-- Default hooks + +-- | Hooks that correspond to a plain instantiation of the +-- \"simple\" build system +simpleUserHooks :: UserHooks +simpleUserHooks = + emptyUserHooks { + confHook = configure, + postConf = finalChecks, + buildHook = defaultBuildHook, + replHook = defaultReplHook, + copyHook = \desc lbi _ f -> install desc lbi f, -- has correct 'copy' behavior with params + testHook = defaultTestHook, + benchHook = defaultBenchHook, + instHook = defaultInstallHook, + sDistHook = \p l h f -> sdist p l f srcPref (allSuffixHandlers h), + cleanHook = \p _ _ f -> clean p f, + hscolourHook = \p l h f -> hscolour p l (allSuffixHandlers h) f, + haddockHook = \p l h f -> haddock p l (allSuffixHandlers h) f, + regHook = defaultRegHook, + unregHook = \p l _ f -> unregister p l f + } + where + finalChecks _args flags pkg_descr lbi = + checkForeignDeps pkg_descr lbi (lessVerbose verbosity) + where + verbosity = fromFlag (configVerbosity flags) + +-- | Basic autoconf 'UserHooks': +-- +-- * 'postConf' runs @.\/configure@, if present. +-- +-- * the pre-hooks 'preBuild', 'preClean', 'preCopy', 'preInst', +-- 'preReg' and 'preUnreg' read additional build information from +-- /package/@.buildinfo@, if present. +-- +-- Thus @configure@ can use local system information to generate +-- /package/@.buildinfo@ and possibly other files. + +{-# DEPRECATED defaultUserHooks + "Use simpleUserHooks or autoconfUserHooks, unless you need Cabal-1.2\n compatibility in which case you must stick with defaultUserHooks" #-} +defaultUserHooks :: UserHooks +defaultUserHooks = autoconfUserHooks { + confHook = \pkg flags -> do + let verbosity = fromFlag (configVerbosity flags) + warn verbosity + "defaultUserHooks in Setup script is deprecated." + confHook autoconfUserHooks pkg flags, + postConf = oldCompatPostConf + } + -- This is the annoying old version that only runs configure if it exists. + -- It's here for compatibility with existing Setup.hs scripts. See: + -- https://github.com/haskell/cabal/issues/158 + where oldCompatPostConf args flags pkg_descr lbi + = do let verbosity = fromFlag (configVerbosity flags) + noExtraFlags args + confExists <- doesFileExist "configure" + when confExists $ + runConfigureScript verbosity + backwardsCompatHack flags lbi + + pbi <- getHookedBuildInfo verbosity + sanityCheckHookedBuildInfo pkg_descr pbi + let pkg_descr' = updatePackageDescription pbi pkg_descr + postConf simpleUserHooks args flags pkg_descr' lbi + + backwardsCompatHack = True + +autoconfUserHooks :: UserHooks +autoconfUserHooks + = simpleUserHooks + { + postConf = defaultPostConf, + preBuild = \_ flags -> + -- not using 'readHook' here because 'build' takes + -- extra args + getHookedBuildInfo $ fromFlag $ buildVerbosity flags, + preClean = readHook cleanVerbosity, + preCopy = readHook copyVerbosity, + preInst = readHook installVerbosity, + preHscolour = readHook hscolourVerbosity, + preHaddock = readHook haddockVerbosity, + preReg = readHook regVerbosity, + preUnreg = readHook regVerbosity + } + where defaultPostConf :: Args -> ConfigFlags -> PackageDescription -> LocalBuildInfo -> IO () + defaultPostConf args flags pkg_descr lbi + = do let verbosity = fromFlag (configVerbosity flags) + noExtraFlags args + confExists <- doesFileExist "configure" + if confExists + then runConfigureScript verbosity + backwardsCompatHack flags lbi + else die "configure script not found." + + pbi <- getHookedBuildInfo verbosity + sanityCheckHookedBuildInfo pkg_descr pbi + let pkg_descr' = updatePackageDescription pbi pkg_descr + postConf simpleUserHooks args flags pkg_descr' lbi + + backwardsCompatHack = False + + readHook :: (a -> Flag Verbosity) -> Args -> a -> IO HookedBuildInfo + readHook get_verbosity a flags = do + noExtraFlags a + getHookedBuildInfo verbosity + where + verbosity = fromFlag (get_verbosity flags) + +runConfigureScript :: Verbosity -> Bool -> ConfigFlags -> LocalBuildInfo + -> IO () +runConfigureScript verbosity backwardsCompatHack flags lbi = do + env <- getEnvironment + let programConfig = withPrograms lbi + (ccProg, ccFlags) <- configureCCompiler verbosity programConfig + ccProgShort <- getShortPathName ccProg + -- The C compiler's compilation and linker flags (e.g. + -- "C compiler flags" and "Gcc Linker flags" from GHC) have already + -- been merged into ccFlags, so we set both CFLAGS and LDFLAGS + -- to ccFlags + -- We don't try and tell configure which ld to use, as we don't have + -- a way to pass its flags too + let extraPath = fromNubList $ configProgramPathExtra flags + let cflagsEnv = maybe (unwords ccFlags) (++ (" " ++ unwords ccFlags)) $ lookup "CFLAGS" env + spSep = [searchPathSeparator] + pathEnv = maybe (intercalate spSep extraPath) ((intercalate spSep extraPath ++ spSep)++) $ lookup "PATH" env + overEnv = ("CFLAGS", Just cflagsEnv) : [("PATH", Just pathEnv) | not (null extraPath)] + args' = args ++ ["CC=" ++ ccProgShort] + shProg = simpleProgram "sh" + progDb = modifyProgramSearchPath (\p -> map ProgramSearchPathDir extraPath ++ p) emptyProgramDb + shConfiguredProg <- lookupProgram shProg `fmap` configureProgram verbosity shProg progDb + case shConfiguredProg of + Just sh -> runProgramInvocation verbosity (programInvocation (sh {programOverrideEnv = overEnv}) args') + Nothing -> die notFoundMsg + + where + args = "./configure" : configureArgs backwardsCompatHack flags + + notFoundMsg = "The package has a './configure' script. If you are on Windows, This requires a " + ++ "Unix compatibility toolchain such as MinGW+MSYS or Cygwin. " + ++ "If you are not on Windows, ensure that an 'sh' command is discoverable in your path." + +getHookedBuildInfo :: Verbosity -> IO HookedBuildInfo +getHookedBuildInfo verbosity = do + maybe_infoFile <- defaultHookedPackageDesc + case maybe_infoFile of + Nothing -> return emptyHookedBuildInfo + Just infoFile -> do + info verbosity $ "Reading parameters from " ++ infoFile + readHookedBuildInfo verbosity infoFile + +defaultTestHook :: Args -> PackageDescription -> LocalBuildInfo + -> UserHooks -> TestFlags -> IO () +defaultTestHook args pkg_descr localbuildinfo _ flags = + test args pkg_descr localbuildinfo flags + +defaultBenchHook :: Args -> PackageDescription -> LocalBuildInfo + -> UserHooks -> BenchmarkFlags -> IO () +defaultBenchHook args pkg_descr localbuildinfo _ flags = + bench args pkg_descr localbuildinfo flags + +defaultInstallHook :: PackageDescription -> LocalBuildInfo + -> UserHooks -> InstallFlags -> IO () +defaultInstallHook pkg_descr localbuildinfo _ flags = do + let copyFlags = defaultCopyFlags { + copyDistPref = installDistPref flags, + copyDest = toFlag NoCopyDest, + copyVerbosity = installVerbosity flags + } + install pkg_descr localbuildinfo copyFlags + let registerFlags = defaultRegisterFlags { + regDistPref = installDistPref flags, + regInPlace = installInPlace flags, + regPackageDB = installPackageDB flags, + regVerbosity = installVerbosity flags + } + when (hasLibs pkg_descr) $ register pkg_descr localbuildinfo registerFlags + +defaultBuildHook :: PackageDescription -> LocalBuildInfo + -> UserHooks -> BuildFlags -> IO () +defaultBuildHook pkg_descr localbuildinfo hooks flags = + build pkg_descr localbuildinfo flags (allSuffixHandlers hooks) + +defaultReplHook :: PackageDescription -> LocalBuildInfo + -> UserHooks -> ReplFlags -> [String] -> IO () +defaultReplHook pkg_descr localbuildinfo hooks flags args = + repl pkg_descr localbuildinfo flags (allSuffixHandlers hooks) args + +defaultRegHook :: PackageDescription -> LocalBuildInfo + -> UserHooks -> RegisterFlags -> IO () +defaultRegHook pkg_descr localbuildinfo _ flags = + if hasLibs pkg_descr + then register pkg_descr localbuildinfo flags + else setupMessage (fromFlag (regVerbosity flags)) + "Package contains no library to register:" (packageId pkg_descr) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/System.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/System.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/System.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/System.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,235 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.System +-- Copyright : Duncan Coutts 2007-2008 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Cabal often needs to do slightly different things on specific platforms. You +-- probably know about the 'System.Info.os' however using that is very +-- inconvenient because it is a string and different Haskell implementations +-- do not agree on using the same strings for the same platforms! (In +-- particular see the controversy over \"windows\" vs \"mingw32\"). So to make it +-- more consistent and easy to use we have an 'OS' enumeration. +-- +module Distribution.System ( + -- * Operating System + OS(..), + buildOS, + + -- * Machine Architecture + Arch(..), + buildArch, + + -- * Platform is a pair of arch and OS + Platform(..), + buildPlatform, + platformFromTriple, + + -- * Internal + knownOSs, + knownArches + ) where + +import qualified System.Info (os, arch) +import qualified Data.Char as Char (toLower, isAlphaNum, isAlpha) + +import Distribution.Compat.Binary +import Distribution.Text +import qualified Distribution.Compat.ReadP as Parse + +import Control.Monad (liftM2) +import Data.Data (Data) +import Data.Typeable (Typeable) +import Data.Maybe (fromMaybe, listToMaybe) +import GHC.Generics (Generic) +import qualified Text.PrettyPrint as Disp +import Text.PrettyPrint ((<>)) + +-- | How strict to be when classifying strings into the 'OS' and 'Arch' enums. +-- +-- The reason we have multiple ways to do the classification is because there +-- are two situations where we need to do it. +-- +-- For parsing OS and arch names in .cabal files we really want everyone to be +-- referring to the same or or arch by the same name. Variety is not a virtue +-- in this case. We don't mind about case though. +-- +-- For the System.Info.os\/arch different Haskell implementations use different +-- names for the same or\/arch. Also they tend to distinguish versions of an +-- OS\/arch which we just don't care about. +-- +-- The 'Compat' classification allows us to recognise aliases that are already +-- in common use but it allows us to distinguish them from the canonical name +-- which enables us to warn about such deprecated aliases. +-- +data ClassificationStrictness = Permissive | Compat | Strict + +-- ------------------------------------------------------------ +-- * Operating System +-- ------------------------------------------------------------ + +data OS = Linux | Windows | OSX -- tier 1 desktop OSs + | FreeBSD | OpenBSD | NetBSD -- other free Unix OSs + | DragonFly + | Solaris | AIX | HPUX | IRIX -- ageing Unix OSs + | HaLVM -- bare metal / VMs / hypervisors + | Hurd -- GNU's microkernel + | IOS | Android -- mobile OSs + | Ghcjs + | OtherOS String + deriving (Eq, Generic, Ord, Show, Read, Typeable, Data) + +instance Binary OS + +knownOSs :: [OS] +knownOSs = [Linux, Windows, OSX + ,FreeBSD, OpenBSD, NetBSD, DragonFly + ,Solaris, AIX, HPUX, IRIX + ,HaLVM + ,Hurd + ,IOS, Android + ,Ghcjs] + +osAliases :: ClassificationStrictness -> OS -> [String] +osAliases Permissive Windows = ["mingw32", "win32", "cygwin32"] +osAliases Compat Windows = ["mingw32", "win32"] +osAliases _ OSX = ["darwin"] +osAliases _ Hurd = ["gnu"] +osAliases Permissive FreeBSD = ["kfreebsdgnu"] +osAliases Compat FreeBSD = ["kfreebsdgnu"] +osAliases Permissive Solaris = ["solaris2"] +osAliases Compat Solaris = ["solaris2"] +osAliases _ _ = [] + +instance Text OS where + disp (OtherOS name) = Disp.text name + disp other = Disp.text (lowercase (show other)) + + parse = fmap (classifyOS Compat) ident + +classifyOS :: ClassificationStrictness -> String -> OS +classifyOS strictness s = + fromMaybe (OtherOS s) $ lookup (lowercase s) osMap + where + osMap = [ (name, os) + | os <- knownOSs + , name <- display os : osAliases strictness os ] + +buildOS :: OS +buildOS = classifyOS Permissive System.Info.os + +-- ------------------------------------------------------------ +-- * Machine Architecture +-- ------------------------------------------------------------ + +data Arch = I386 | X86_64 | PPC | PPC64 | Sparc + | Arm | Mips | SH + | IA64 | S390 + | Alpha | Hppa | Rs6000 + | M68k | Vax + | JavaScript + | OtherArch String + deriving (Eq, Generic, Ord, Show, Read, Typeable, Data) + +instance Binary Arch + +knownArches :: [Arch] +knownArches = [I386, X86_64, PPC, PPC64, Sparc + ,Arm, Mips, SH + ,IA64, S390 + ,Alpha, Hppa, Rs6000 + ,M68k, Vax + ,JavaScript] + +archAliases :: ClassificationStrictness -> Arch -> [String] +archAliases Strict _ = [] +archAliases Compat _ = [] +archAliases _ PPC = ["powerpc"] +archAliases _ PPC64 = ["powerpc64"] +archAliases _ Sparc = ["sparc64", "sun4"] +archAliases _ Mips = ["mipsel", "mipseb"] +archAliases _ Arm = ["armeb", "armel"] +archAliases _ _ = [] + +instance Text Arch where + disp (OtherArch name) = Disp.text name + disp other = Disp.text (lowercase (show other)) + + parse = fmap (classifyArch Strict) ident + +classifyArch :: ClassificationStrictness -> String -> Arch +classifyArch strictness s = + fromMaybe (OtherArch s) $ lookup (lowercase s) archMap + where + archMap = [ (name, arch) + | arch <- knownArches + , name <- display arch : archAliases strictness arch ] + +buildArch :: Arch +buildArch = classifyArch Permissive System.Info.arch + +-- ------------------------------------------------------------ +-- * Platform +-- ------------------------------------------------------------ + +data Platform = Platform Arch OS + deriving (Eq, Generic, Ord, Show, Read, Typeable, Data) + +instance Binary Platform + +instance Text Platform where + disp (Platform arch os) = disp arch <> Disp.char '-' <> disp os + -- TODO: there are ambigious platforms like: `arch-word-os` + -- which could be parsed as + -- * Platform "arch-word" "os" + -- * Platform "arch" "word-os" + -- We could support that preferring variants 'OtherOS' or 'OtherArch' + -- + -- For now we split into arch and os parts on the first dash. + parse = do + arch <- parseDashlessArch + _ <- Parse.char '-' + os <- parse + return (Platform arch os) + where + parseDashlessArch :: Parse.ReadP r Arch + parseDashlessArch = fmap (classifyArch Strict) dashlessIdent + +-- | The platform Cabal was compiled on. In most cases, +-- @LocalBuildInfo.hostPlatform@ should be used instead (the platform we're +-- targeting). +buildPlatform :: Platform +buildPlatform = Platform buildArch buildOS + +-- Utils: + +ident :: Parse.ReadP r String +ident = liftM2 (:) first rest + where first = Parse.satisfy Char.isAlpha + rest = Parse.munch (\c -> Char.isAlphaNum c || c == '_' || c == '-') + +dashlessIdent :: Parse.ReadP r String +dashlessIdent = liftM2 (:) first rest + where first = Parse.satisfy Char.isAlpha + rest = Parse.munch (\c -> Char.isAlphaNum c || c == '_') + +lowercase :: String -> String +lowercase = map Char.toLower + +platformFromTriple :: String -> Maybe Platform +platformFromTriple triple = + fmap fst (listToMaybe $ Parse.readP_to_S parseTriple triple) + where parseWord = Parse.munch1 (\c -> Char.isAlphaNum c || c == '_') + parseTriple = do + arch <- fmap (classifyArch Permissive) parseWord + _ <- Parse.char '-' + _ <- parseWord -- Skip vendor + _ <- Parse.char '-' + os <- fmap (classifyOS Permissive) ident -- OS may have hyphens, like + -- 'nto-qnx' + return $ Platform arch os diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/TestSuite.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/TestSuite.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/TestSuite.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/TestSuite.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,96 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.TestSuite +-- Copyright : Thomas Tuegel 2010 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module defines the detailed test suite interface which makes it +-- possible to expose individual tests to Cabal or other test agents. + +module Distribution.TestSuite + ( TestInstance(..) + , OptionDescr(..) + , OptionType(..) + , Test(..) + , Options + , Progress(..) + , Result(..) + , testGroup + ) where + +data TestInstance = TestInstance + { run :: IO Progress -- ^ Perform the test. + , name :: String -- ^ A name for the test, unique within a + -- test suite. + , tags :: [String] -- ^ Users can select groups of tests by + -- their tags. + , options :: [OptionDescr] -- ^ Descriptions of the options recognized + -- by this test. + , setOption :: String -> String -> Either String TestInstance + -- ^ Try to set the named option to the given value. Returns an error + -- message if the option is not supported or the value could not be + -- correctly parsed; otherwise, a 'TestInstance' with the option set to + -- the given value is returned. + } + +data OptionDescr = OptionDescr + { optionName :: String + , optionDescription :: String -- ^ A human-readable description of the + -- option to guide the user setting it. + , optionType :: OptionType + , optionDefault :: Maybe String + } + deriving (Eq, Read, Show) + +data OptionType + = OptionFile + { optionFileMustExist :: Bool + , optionFileIsDir :: Bool + , optionFileExtensions :: [String] + } + | OptionString + { optionStringMultiline :: Bool + } + | OptionNumber + { optionNumberIsInt :: Bool + , optionNumberBounds :: (Maybe String, Maybe String) + } + | OptionBool + | OptionEnum [String] + | OptionSet [String] + | OptionRngSeed + deriving (Eq, Read, Show) + +data Test + = Test TestInstance + | Group + { groupName :: String + , concurrently :: Bool + -- ^ If true, then children of this group may be run in parallel. + -- Note that this setting is not inherited by children. In + -- particular, consider a group F with "concurrently = False" that + -- has some children, including a group T with "concurrently = + -- True". The children of group T may be run concurrently with each + -- other, as long as none are run at the same time as any of the + -- direct children of group F. + , groupTests :: [Test] + } + | ExtraOptions [OptionDescr] Test + +type Options = [(String, String)] + +data Progress = Finished Result + | Progress String (IO Progress) + +data Result = Pass + | Fail String + | Error String + deriving (Eq, Read, Show) + +-- | Create a named group of tests, which are assumed to be safe to run in +-- parallel. +testGroup :: String -> [Test] -> Test +testGroup n ts = Group { groupName = n, concurrently = True, groupTests = ts } diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Text.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Text.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Text.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Text.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,73 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Text +-- Copyright : Duncan Coutts 2007 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This defines a 'Text' class which is a bit like the 'Read' and 'Show' +-- classes. The difference is that is uses a modern pretty printer and parser +-- system and the format is not expected to be Haskell concrete syntax but +-- rather the external human readable representation used by Cabal. +-- +module Distribution.Text ( + Text(..), + defaultStyle, + display, + simpleParse, + ) where + +import qualified Distribution.Compat.ReadP as Parse +import qualified Text.PrettyPrint as Disp + +import Data.Version (Version(Version)) +import qualified Data.Char as Char (isDigit, isAlphaNum, isSpace) + +class Text a where + disp :: a -> Disp.Doc + parse :: Parse.ReadP r a + +-- | The default rendering style used in Cabal for console output. +defaultStyle :: Disp.Style +defaultStyle = Disp.Style { Disp.mode = Disp.PageMode + , Disp.lineLength = 79 + , Disp.ribbonsPerLine = 1.0 + } + +display :: Text a => a -> String +display = Disp.renderStyle defaultStyle . disp + +simpleParse :: Text a => String -> Maybe a +simpleParse str = case [ p | (p, s) <- Parse.readP_to_S parse str + , all Char.isSpace s ] of + [] -> Nothing + (p:_) -> Just p + +-- ----------------------------------------------------------------------------- +-- Instances for types from the base package + +instance Text Bool where + disp = Disp.text . show + parse = Parse.choice [ (Parse.string "True" Parse.+++ + Parse.string "true") >> return True + , (Parse.string "False" Parse.+++ + Parse.string "false") >> return False ] + +instance Text Int where + disp = Disp.text . show + parse = (fmap negate $ Parse.char '-' >> parseNat) Parse.+++ parseNat + +-- | Parser for non-negative integers. +parseNat :: Parse.ReadP r Int +parseNat = read `fmap` Parse.munch1 Char.isDigit + +instance Text Version where + disp (Version branch _tags) -- Death to version tags!! + = Disp.hcat (Disp.punctuate (Disp.char '.') (map Disp.int branch)) + + parse = do + branch <- Parse.sepBy1 parseNat (Parse.char '.') + -- allow but ignore tags: + _tags <- Parse.many (Parse.char '-' >> Parse.munch1 Char.isAlphaNum) + return (Version branch []) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Utils/NubList.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Utils/NubList.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Utils/NubList.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Utils/NubList.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,102 @@ +module Distribution.Utils.NubList + ( NubList -- opaque + , toNubList -- smart construtor + , fromNubList + , overNubList + + , NubListR + , toNubListR + , fromNubListR + , overNubListR + ) where + +import Distribution.Compat.Semigroup as Semi +import Distribution.Compat.Binary +import Distribution.Simple.Utils + +import qualified Text.Read as R + +-- | NubList : A de-duplicated list that maintains the original order. +newtype NubList a = + NubList { fromNubList :: [a] } + deriving Eq + +-- NubList assumes that nub retains the list order while removing duplicate +-- elements (keeping the first occurence). Documentation for "Data.List.nub" +-- does not specifically state that ordering is maintained so we will add a test +-- for that to the test suite. + +-- | Smart constructor for the NubList type. +toNubList :: Ord a => [a] -> NubList a +toNubList list = NubList $ ordNub list + +-- | Lift a function over lists to a function over NubLists. +overNubList :: Ord a => ([a] -> [a]) -> NubList a -> NubList a +overNubList f (NubList list) = toNubList . f $ list + +-- | Monoid operations on NubLists. +-- For a valid Monoid instance we need to satistfy the required monoid laws; +-- identity, associativity and closure. +-- +-- Identity : by inspection: +-- mempty `mappend` NubList xs == NubList xs `mappend` mempty +-- +-- Associativity : by inspection: +-- (NubList xs `mappend` NubList ys) `mappend` NubList zs +-- == NubList xs `mappend` (NubList ys `mappend` NubList zs) +-- +-- Closure : appending two lists of type a and removing duplicates obviously +-- does not change the type. + +instance Ord a => Monoid (NubList a) where + mempty = NubList [] + mappend = (Semi.<>) + +instance Ord a => Semigroup (NubList a) where + (NubList xs) <> (NubList ys) = NubList $ xs `listUnion` ys + +instance Show a => Show (NubList a) where + show (NubList list) = show list + +instance (Ord a, Read a) => Read (NubList a) where + readPrec = readNubList toNubList + +-- | Helper used by NubList/NubListR's Read instances. +readNubList :: (Read a) => ([a] -> l a) -> R.ReadPrec (l a) +readNubList toList = R.parens . R.prec 10 $ fmap toList R.readPrec + +-- | Binary instance for 'NubList a' is the same as for '[a]'. For 'put', we +-- just pull off constructor and put the list. For 'get', we get the list and +-- make a 'NubList' out of it using 'toNubList'. +instance (Ord a, Binary a) => Binary (NubList a) where + put (NubList l) = put l + get = fmap toNubList get + +-- | NubListR : A right-biased version of 'NubList'. That is @toNubListR +-- ["-XNoFoo", "-XFoo", "-XNoFoo"]@ will result in @["-XFoo", "-XNoFoo"]@, +-- unlike the normal 'NubList', which is left-biased. Built on top of +-- 'ordNubRight' and 'listUnionRight'. +newtype NubListR a = + NubListR { fromNubListR :: [a] } + deriving Eq + +-- | Smart constructor for the NubListR type. +toNubListR :: Ord a => [a] -> NubListR a +toNubListR list = NubListR $ ordNubRight list + +-- | Lift a function over lists to a function over NubListRs. +overNubListR :: Ord a => ([a] -> [a]) -> NubListR a -> NubListR a +overNubListR f (NubListR list) = toNubListR . f $ list + +instance Ord a => Monoid (NubListR a) where + mempty = NubListR [] + mappend = (Semi.<>) + +instance Ord a => Semigroup (NubListR a) where + (NubListR xs) <> (NubListR ys) = NubListR $ xs `listUnionRight` ys + +instance Show a => Show (NubListR a) where + show (NubListR list) = show list + +instance (Ord a, Read a) => Read (NubListR a) where + readPrec = readNubList toNubListR diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Verbosity.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Verbosity.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Verbosity.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Verbosity.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,90 @@ +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Verbosity +-- Copyright : Ian Lynagh 2007 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- A simple 'Verbosity' type with associated utilities. There are 4 standard +-- verbosity levels from 'silent', 'normal', 'verbose' up to 'deafening'. This +-- is used for deciding what logging messages to print. + +-- Verbosity for Cabal functions. + +module Distribution.Verbosity ( + -- * Verbosity + Verbosity, + silent, normal, verbose, deafening, + moreVerbose, lessVerbose, + intToVerbosity, flagToVerbosity, + showForCabal, showForGHC + ) where + +import Distribution.Compat.Binary +import Distribution.ReadE + +import Data.List (elemIndex) +import GHC.Generics + +data Verbosity = Silent | Normal | Verbose | Deafening + deriving (Generic, Show, Read, Eq, Ord, Enum, Bounded) + +instance Binary Verbosity + +-- We shouldn't print /anything/ unless an error occurs in silent mode +silent :: Verbosity +silent = Silent + +-- Print stuff we want to see by default +normal :: Verbosity +normal = Normal + +-- Be more verbose about what's going on +verbose :: Verbosity +verbose = Verbose + +-- Not only are we verbose ourselves (perhaps even noisier than when +-- being "verbose"), but we tell everything we run to be verbose too +deafening :: Verbosity +deafening = Deafening + +moreVerbose :: Verbosity -> Verbosity +moreVerbose Silent = Silent --silent should stay silent +moreVerbose Normal = Verbose +moreVerbose Verbose = Deafening +moreVerbose Deafening = Deafening + +lessVerbose :: Verbosity -> Verbosity +lessVerbose Deafening = Deafening +lessVerbose Verbose = Normal +lessVerbose Normal = Silent +lessVerbose Silent = Silent + +intToVerbosity :: Int -> Maybe Verbosity +intToVerbosity 0 = Just Silent +intToVerbosity 1 = Just Normal +intToVerbosity 2 = Just Verbose +intToVerbosity 3 = Just Deafening +intToVerbosity _ = Nothing + +flagToVerbosity :: ReadE Verbosity +flagToVerbosity = ReadE $ \s -> + case reads s of + [(i, "")] -> + case intToVerbosity i of + Just v -> Right v + Nothing -> Left ("Bad verbosity: " ++ show i ++ + ". Valid values are 0..3") + _ -> Left ("Can't parse verbosity " ++ s) + +showForCabal, showForGHC :: Verbosity -> String + +showForCabal v = maybe (error "unknown verbosity") show $ + elemIndex v [silent,normal,verbose,deafening] +showForGHC v = maybe (error "unknown verbosity") show $ + elemIndex v [silent,normal,__,verbose,deafening] + where __ = silent -- this will be always ignored by elemIndex diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Version.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Version.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Distribution/Version.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Distribution/Version.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,882 @@ +{-# LANGUAGE CPP, DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +#if __GLASGOW_HASKELL__ < 707 +{-# LANGUAGE StandaloneDeriving #-} +#endif + +-- Hack approach to support bootstrapping. +-- When MIN_VERSION_binary macro is available, use it. But it's not available +-- during bootstrapping (or anyone else building Setup.hs directly). If the +-- builder specifies -DMIN_VERSION_binary_0_8_0=1 or =0 then we respect that. +-- Otherwise we pick a default based on GHC version: assume binary <0.8 when +-- GHC < 8.0, and binary >=0.8 when GHC >= 8.0. +#ifdef MIN_VERSION_binary +#define MIN_VERSION_binary_0_8_0 MIN_VERSION_binary(0,8,0) +#else +#ifndef MIN_VERSION_binary_0_8_0 +#if __GLASGOW_HASKELL__ >= 800 +#define MIN_VERSION_binary_0_8_0 1 +#else +#define MIN_VERSION_binary_0_8_0 0 +#endif +#endif +#endif + +#if !MIN_VERSION_binary_0_8_0 +{-# OPTIONS_GHC -fno-warn-orphans #-} +#endif + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Version +-- Copyright : Isaac Jones, Simon Marlow 2003-2004 +-- Duncan Coutts 2008 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Exports the 'Version' type along with a parser and pretty printer. A version +-- is something like @\"1.3.3\"@. It also defines the 'VersionRange' data +-- types. Version ranges are like @\">= 1.2 && < 2\"@. + +module Distribution.Version ( + -- * Package versions + Version(..), + + -- * Version ranges + VersionRange(..), + + -- ** Constructing + anyVersion, noVersion, + thisVersion, notThisVersion, + laterVersion, earlierVersion, + orLaterVersion, orEarlierVersion, + unionVersionRanges, intersectVersionRanges, + differenceVersionRanges, + invertVersionRange, + withinVersion, + betweenVersionsInclusive, + + -- ** Inspection + withinRange, + isAnyVersion, + isNoVersion, + isSpecificVersion, + simplifyVersionRange, + foldVersionRange, + foldVersionRange', + hasUpperBound, + hasLowerBound, + + -- ** Modification + removeUpperBound, + + -- * Version intervals view + asVersionIntervals, + VersionInterval, + LowerBound(..), + UpperBound(..), + Bound(..), + + -- ** 'VersionIntervals' abstract type + -- | The 'VersionIntervals' type and the accompanying functions are exposed + -- primarily for completeness and testing purposes. In practice + -- 'asVersionIntervals' is the main function to use to + -- view a 'VersionRange' as a bunch of 'VersionInterval's. + -- + VersionIntervals, + toVersionIntervals, + fromVersionIntervals, + withinIntervals, + versionIntervals, + mkVersionIntervals, + unionVersionIntervals, + intersectVersionIntervals, + invertVersionIntervals + + ) where + +import Distribution.Compat.Binary ( Binary(..) ) +import Data.Data ( Data ) +import Data.Typeable ( Typeable ) +import Data.Version ( Version(..) ) +import GHC.Generics ( Generic ) + +import Distribution.Text +import qualified Distribution.Compat.ReadP as Parse +import Distribution.Compat.ReadP hiding (get) + +import qualified Text.PrettyPrint as Disp +import Text.PrettyPrint ((<>), (<+>)) +import qualified Data.Char as Char (isDigit) +import Control.Exception (assert) + +-- ----------------------------------------------------------------------------- +-- Version ranges + +-- Todo: maybe move this to Distribution.Package.Version? +-- (package-specific versioning scheme). + +data VersionRange + = AnyVersion + | ThisVersion Version -- = version + | LaterVersion Version -- > version (NB. not >=) + | EarlierVersion Version -- < version + | WildcardVersion Version -- == ver.* (same as >= ver && < ver+1) + | UnionVersionRanges VersionRange VersionRange + | IntersectVersionRanges VersionRange VersionRange + | VersionRangeParens VersionRange -- just '(exp)' parentheses syntax + deriving (Data, Eq, Generic, Read, Show, Typeable) + +instance Binary VersionRange + +#if __GLASGOW_HASKELL__ < 707 +-- starting with ghc-7.7/base-4.7 this instance is provided in "Data.Data" +deriving instance Data Version +#endif + +#if !(MIN_VERSION_binary_0_8_0) +-- Deriving this instance from Generic gives trouble on GHC 7.2 because the +-- Generic instance has to be standalone-derived. So, we hand-roll our own. +-- We can't use a generic Binary instance on later versions because we must +-- maintain compatibility between compiler versions. +instance Binary Version where + get = do + br <- get + tags <- get + return $ Version br tags + put (Version br tags) = put br >> put tags +#endif + +{-# DeprecateD AnyVersion + "Use 'anyVersion', 'foldVersionRange' or 'asVersionIntervals'" #-} +{-# DEPRECATED ThisVersion + "Use 'thisVersion', 'foldVersionRange' or 'asVersionIntervals'" #-} +{-# DEPRECATED LaterVersion + "Use 'laterVersion', 'foldVersionRange' or 'asVersionIntervals'" #-} +{-# DEPRECATED EarlierVersion + "Use 'earlierVersion', 'foldVersionRange' or 'asVersionIntervals'" #-} +{-# DEPRECATED WildcardVersion + "Use 'anyVersion', 'foldVersionRange' or 'asVersionIntervals'" #-} +{-# DEPRECATED UnionVersionRanges + "Use 'unionVersionRanges', 'foldVersionRange' or 'asVersionIntervals'" #-} +{-# DEPRECATED IntersectVersionRanges + "Use 'intersectVersionRanges', 'foldVersionRange' or 'asVersionIntervals'"#-} + +-- | The version range @-any@. That is, a version range containing all +-- versions. +-- +-- > withinRange v anyVersion = True +-- +anyVersion :: VersionRange +anyVersion = AnyVersion + +-- | The empty version range, that is a version range containing no versions. +-- +-- This can be constructed using any unsatisfiable version range expression, +-- for example @> 1 && < 1@. +-- +-- > withinRange v noVersion = False +-- +noVersion :: VersionRange +noVersion = IntersectVersionRanges (LaterVersion v) (EarlierVersion v) + where v = Version [1] [] + +-- | The version range @== v@ +-- +-- > withinRange v' (thisVersion v) = v' == v +-- +thisVersion :: Version -> VersionRange +thisVersion = ThisVersion + +-- | The version range @< v || > v@ +-- +-- > withinRange v' (notThisVersion v) = v' /= v +-- +notThisVersion :: Version -> VersionRange +notThisVersion v = UnionVersionRanges (EarlierVersion v) (LaterVersion v) + +-- | The version range @> v@ +-- +-- > withinRange v' (laterVersion v) = v' > v +-- +laterVersion :: Version -> VersionRange +laterVersion = LaterVersion + +-- | The version range @>= v@ +-- +-- > withinRange v' (orLaterVersion v) = v' >= v +-- +orLaterVersion :: Version -> VersionRange +orLaterVersion v = UnionVersionRanges (ThisVersion v) (LaterVersion v) + +-- | The version range @< v@ +-- +-- > withinRange v' (earlierVersion v) = v' < v +-- +earlierVersion :: Version -> VersionRange +earlierVersion = EarlierVersion + +-- | The version range @<= v@ +-- +-- > withinRange v' (orEarlierVersion v) = v' <= v +-- +orEarlierVersion :: Version -> VersionRange +orEarlierVersion v = UnionVersionRanges (ThisVersion v) (EarlierVersion v) + +-- | The version range @vr1 || vr2@ +-- +-- > withinRange v' (unionVersionRanges vr1 vr2) +-- > = withinRange v' vr1 || withinRange v' vr2 +-- +unionVersionRanges :: VersionRange -> VersionRange -> VersionRange +unionVersionRanges = UnionVersionRanges + +-- | The version range @vr1 && vr2@ +-- +-- > withinRange v' (intersectVersionRanges vr1 vr2) +-- > = withinRange v' vr1 && withinRange v' vr2 +-- +intersectVersionRanges :: VersionRange -> VersionRange -> VersionRange +intersectVersionRanges = IntersectVersionRanges + +-- | The difference of two version ranges +-- +-- > withinRange v' (differenceVersionRanges vr1 vr2) +-- > = withinRange v' vr1 && not (withinRange v' vr2) +-- +-- @since 1.24.1.0 +differenceVersionRanges :: VersionRange -> VersionRange -> VersionRange +differenceVersionRanges vr1 vr2 = + intersectVersionRanges vr1 (invertVersionRange vr2) + +-- | The inverse of a version range +-- +-- > withinRange v' (invertVersionRange vr) +-- > = not (withinRange v' vr) +-- +invertVersionRange :: VersionRange -> VersionRange +invertVersionRange = + fromVersionIntervals . invertVersionIntervals + . VersionIntervals . asVersionIntervals + +-- | The version range @== v.*@. +-- +-- For example, for version @1.2@, the version range @== 1.2.*@ is the same as +-- @>= 1.2 && < 1.3@ +-- +-- > withinRange v' (laterVersion v) = v' >= v && v' < upper v +-- > where +-- > upper (Version lower t) = Version (init lower ++ [last lower + 1]) t +-- +withinVersion :: Version -> VersionRange +withinVersion = WildcardVersion + +-- | The version range @>= v1 && <= v2@. +-- +-- In practice this is not very useful because we normally use inclusive lower +-- bounds and exclusive upper bounds. +-- +-- > withinRange v' (laterVersion v) = v' > v +-- +betweenVersionsInclusive :: Version -> Version -> VersionRange +betweenVersionsInclusive v1 v2 = + IntersectVersionRanges (orLaterVersion v1) (orEarlierVersion v2) + +{-# DEPRECATED betweenVersionsInclusive + "In practice this is not very useful because we normally use inclusive lower bounds and exclusive upper bounds" #-} + +-- | Given a version range, remove the highest upper bound. Example: @(>= 1 && < +-- 3) || (>= 4 && < 5)@ is converted to @(>= 1 && < 3) || (>= 4)@. +removeUpperBound :: VersionRange -> VersionRange +removeUpperBound = fromVersionIntervals . relaxLastInterval . toVersionIntervals + where + relaxLastInterval (VersionIntervals intervals) = + VersionIntervals (relaxLastInterval' intervals) + + relaxLastInterval' [] = [] + relaxLastInterval' [(l,_)] = [(l, NoUpperBound)] + relaxLastInterval' (i:is) = i : relaxLastInterval' is + +-- | Fold over the basic syntactic structure of a 'VersionRange'. +-- +-- This provides a syntactic view of the expression defining the version range. +-- The syntactic sugar @\">= v\"@, @\"<= v\"@ and @\"== v.*\"@ is presented +-- in terms of the other basic syntax. +-- +-- For a semantic view use 'asVersionIntervals'. +-- +foldVersionRange :: a -- ^ @\"-any\"@ version + -> (Version -> a) -- ^ @\"== v\"@ + -> (Version -> a) -- ^ @\"> v\"@ + -> (Version -> a) -- ^ @\"< v\"@ + -> (a -> a -> a) -- ^ @\"_ || _\"@ union + -> (a -> a -> a) -- ^ @\"_ && _\"@ intersection + -> VersionRange -> a +foldVersionRange anyv this later earlier union intersect = fold + where + fold AnyVersion = anyv + fold (ThisVersion v) = this v + fold (LaterVersion v) = later v + fold (EarlierVersion v) = earlier v + fold (WildcardVersion v) = fold (wildcard v) + fold (UnionVersionRanges v1 v2) = union (fold v1) (fold v2) + fold (IntersectVersionRanges v1 v2) = intersect (fold v1) (fold v2) + fold (VersionRangeParens v) = fold v + + wildcard v = intersectVersionRanges + (orLaterVersion v) + (earlierVersion (wildcardUpperBound v)) + +-- | An extended variant of 'foldVersionRange' that also provides a view of the +-- expression in which the syntactic sugar @\">= v\"@, @\"<= v\"@ and @\"== +-- v.*\"@ is presented explicitly rather than in terms of the other basic +-- syntax. +-- +foldVersionRange' :: a -- ^ @\"-any\"@ version + -> (Version -> a) -- ^ @\"== v\"@ + -> (Version -> a) -- ^ @\"> v\"@ + -> (Version -> a) -- ^ @\"< v\"@ + -> (Version -> a) -- ^ @\">= v\"@ + -> (Version -> a) -- ^ @\"<= v\"@ + -> (Version -> Version -> a) -- ^ @\"== v.*\"@ wildcard. The + -- function is passed the + -- inclusive lower bound and the + -- exclusive upper bounds of the + -- range defined by the wildcard. + -> (a -> a -> a) -- ^ @\"_ || _\"@ union + -> (a -> a -> a) -- ^ @\"_ && _\"@ intersection + -> (a -> a) -- ^ @\"(_)\"@ parentheses + -> VersionRange -> a +foldVersionRange' anyv this later earlier orLater orEarlier + wildcard union intersect parens = fold + where + fold AnyVersion = anyv + fold (ThisVersion v) = this v + fold (LaterVersion v) = later v + fold (EarlierVersion v) = earlier v + + fold (UnionVersionRanges (ThisVersion v) + (LaterVersion v')) | v==v' = orLater v + fold (UnionVersionRanges (LaterVersion v) + (ThisVersion v')) | v==v' = orLater v + fold (UnionVersionRanges (ThisVersion v) + (EarlierVersion v')) | v==v' = orEarlier v + fold (UnionVersionRanges (EarlierVersion v) + (ThisVersion v')) | v==v' = orEarlier v + + fold (WildcardVersion v) = wildcard v (wildcardUpperBound v) + fold (UnionVersionRanges v1 v2) = union (fold v1) (fold v2) + fold (IntersectVersionRanges v1 v2) = intersect (fold v1) (fold v2) + fold (VersionRangeParens v) = parens (fold v) + + +-- | Does this version fall within the given range? +-- +-- This is the evaluation function for the 'VersionRange' type. +-- +withinRange :: Version -> VersionRange -> Bool +withinRange v = foldVersionRange + True + (\v' -> versionBranch v == versionBranch v') + (\v' -> versionBranch v > versionBranch v') + (\v' -> versionBranch v < versionBranch v') + (||) + (&&) + +-- | View a 'VersionRange' as a union of intervals. +-- +-- This provides a canonical view of the semantics of a 'VersionRange' as +-- opposed to the syntax of the expression used to define it. For the syntactic +-- view use 'foldVersionRange'. +-- +-- Each interval is non-empty. The sequence is in increasing order and no +-- intervals overlap or touch. Therefore only the first and last can be +-- unbounded. The sequence can be empty if the range is empty +-- (e.g. a range expression like @< 1 && > 2@). +-- +-- Other checks are trivial to implement using this view. For example: +-- +-- > isNoVersion vr | [] <- asVersionIntervals vr = True +-- > | otherwise = False +-- +-- > isSpecificVersion vr +-- > | [(LowerBound v InclusiveBound +-- > ,UpperBound v' InclusiveBound)] <- asVersionIntervals vr +-- > , v == v' = Just v +-- > | otherwise = Nothing +-- +asVersionIntervals :: VersionRange -> [VersionInterval] +asVersionIntervals = versionIntervals . toVersionIntervals + +-- | Does this 'VersionRange' place any restriction on the 'Version' or is it +-- in fact equivalent to 'AnyVersion'. +-- +-- Note this is a semantic check, not simply a syntactic check. So for example +-- the following is @True@ (for all @v@). +-- +-- > isAnyVersion (EarlierVersion v `UnionVersionRanges` orLaterVersion v) +-- +isAnyVersion :: VersionRange -> Bool +isAnyVersion vr = case asVersionIntervals vr of + [(LowerBound v InclusiveBound, NoUpperBound)] | isVersion0 v -> True + _ -> False + +-- | This is the converse of 'isAnyVersion'. It check if the version range is +-- empty, if there is no possible version that satisfies the version range. +-- +-- For example this is @True@ (for all @v@): +-- +-- > isNoVersion (EarlierVersion v `IntersectVersionRanges` LaterVersion v) +-- +isNoVersion :: VersionRange -> Bool +isNoVersion vr = case asVersionIntervals vr of + [] -> True + _ -> False + +-- | Is this version range in fact just a specific version? +-- +-- For example the version range @\">= 3 && <= 3\"@ contains only the version +-- @3@. +-- +isSpecificVersion :: VersionRange -> Maybe Version +isSpecificVersion vr = case asVersionIntervals vr of + [(LowerBound v InclusiveBound + ,UpperBound v' InclusiveBound)] + | v == v' -> Just v + _ -> Nothing + +-- | Simplify a 'VersionRange' expression. For non-empty version ranges +-- this produces a canonical form. Empty or inconsistent version ranges +-- are left as-is because that provides more information. +-- +-- If you need a canonical form use +-- @fromVersionIntervals . toVersionIntervals@ +-- +-- It satisfies the following properties: +-- +-- > withinRange v (simplifyVersionRange r) = withinRange v r +-- +-- > withinRange v r = withinRange v r' +-- > ==> simplifyVersionRange r = simplifyVersionRange r' +-- > || isNoVersion r +-- > || isNoVersion r' +-- +simplifyVersionRange :: VersionRange -> VersionRange +simplifyVersionRange vr + -- If the version range is inconsistent then we just return the + -- original since that has more information than ">1 && < 1", which + -- is the canonical inconsistent version range. + | null (versionIntervals vi) = vr + | otherwise = fromVersionIntervals vi + where + vi = toVersionIntervals vr + +---------------------------- +-- Wildcard range utilities +-- + +wildcardUpperBound :: Version -> Version +wildcardUpperBound (Version lowerBound ts) = Version upperBound ts + where + upperBound = init lowerBound ++ [last lowerBound + 1] + +isWildcardRange :: Version -> Version -> Bool +isWildcardRange (Version branch1 _) (Version branch2 _) = check branch1 branch2 + where check (n:[]) (m:[]) | n+1 == m = True + check (n:ns) (m:ms) | n == m = check ns ms + check _ _ = False + +------------------ +-- Intervals view +-- + +-- | A complementary representation of a 'VersionRange'. Instead of a boolean +-- version predicate it uses an increasing sequence of non-overlapping, +-- non-empty intervals. +-- +-- The key point is that this representation gives a canonical representation +-- for the semantics of 'VersionRange's. This makes it easier to check things +-- like whether a version range is empty, covers all versions, or requires a +-- certain minimum or maximum version. It also makes it easy to check equality +-- or containment. It also makes it easier to identify \'simple\' version +-- predicates for translation into foreign packaging systems that do not +-- support complex version range expressions. +-- +newtype VersionIntervals = VersionIntervals [VersionInterval] + deriving (Eq, Show) + +-- | Inspect the list of version intervals. +-- +versionIntervals :: VersionIntervals -> [VersionInterval] +versionIntervals (VersionIntervals is) = is + +type VersionInterval = (LowerBound, UpperBound) +data LowerBound = LowerBound Version !Bound deriving (Eq, Show) +data UpperBound = NoUpperBound | UpperBound Version !Bound deriving (Eq, Show) +data Bound = ExclusiveBound | InclusiveBound deriving (Eq, Show) + +minLowerBound :: LowerBound +minLowerBound = LowerBound (Version [0] []) InclusiveBound + +isVersion0 :: Version -> Bool +isVersion0 (Version [0] _) = True +isVersion0 _ = False + +instance Ord LowerBound where + LowerBound ver bound <= LowerBound ver' bound' = case compare ver ver' of + LT -> True + EQ -> not (bound == ExclusiveBound && bound' == InclusiveBound) + GT -> False + +instance Ord UpperBound where + _ <= NoUpperBound = True + NoUpperBound <= UpperBound _ _ = False + UpperBound ver bound <= UpperBound ver' bound' = case compare ver ver' of + LT -> True + EQ -> not (bound == InclusiveBound && bound' == ExclusiveBound) + GT -> False + +invariant :: VersionIntervals -> Bool +invariant (VersionIntervals intervals) = all validInterval intervals + && all doesNotTouch' adjacentIntervals + where + doesNotTouch' :: (VersionInterval, VersionInterval) -> Bool + doesNotTouch' ((_,u), (l',_)) = doesNotTouch u l' + + adjacentIntervals :: [(VersionInterval, VersionInterval)] + adjacentIntervals + | null intervals = [] + | otherwise = zip intervals (tail intervals) + +checkInvariant :: VersionIntervals -> VersionIntervals +checkInvariant is = assert (invariant is) is + +-- | Directly construct a 'VersionIntervals' from a list of intervals. +-- +-- Each interval must be non-empty. The sequence must be in increasing order +-- and no intervals may overlap or touch. If any of these conditions are not +-- satisfied the function returns @Nothing@. +-- +mkVersionIntervals :: [VersionInterval] -> Maybe VersionIntervals +mkVersionIntervals intervals + | invariant (VersionIntervals intervals) = Just (VersionIntervals intervals) + | otherwise = Nothing + +validVersion :: Version -> Bool +validVersion (Version [] _) = False +validVersion (Version vs _) = all (>=0) vs + +validInterval :: (LowerBound, UpperBound) -> Bool +validInterval i@(l, u) = validLower l && validUpper u && nonEmpty i + where + validLower (LowerBound v _) = validVersion v + validUpper NoUpperBound = True + validUpper (UpperBound v _) = validVersion v + +-- Check an interval is non-empty +-- +nonEmpty :: VersionInterval -> Bool +nonEmpty (_, NoUpperBound ) = True +nonEmpty (LowerBound l lb, UpperBound u ub) = + (l < u) || (l == u && lb == InclusiveBound && ub == InclusiveBound) + +-- Check an upper bound does not intersect, or even touch a lower bound: +-- +-- ---| or ---) but not ---] or ---) or ---] +-- |--- (--- (--- [--- [--- +-- +doesNotTouch :: UpperBound -> LowerBound -> Bool +doesNotTouch NoUpperBound _ = False +doesNotTouch (UpperBound u ub) (LowerBound l lb) = + u < l + || (u == l && ub == ExclusiveBound && lb == ExclusiveBound) + +-- | Check an upper bound does not intersect a lower bound: +-- +-- ---| or ---) or ---] or ---) but not ---] +-- |--- (--- (--- [--- [--- +-- +doesNotIntersect :: UpperBound -> LowerBound -> Bool +doesNotIntersect NoUpperBound _ = False +doesNotIntersect (UpperBound u ub) (LowerBound l lb) = + u < l + || (u == l && not (ub == InclusiveBound && lb == InclusiveBound)) + +-- | Test if a version falls within the version intervals. +-- +-- It exists mostly for completeness and testing. It satisfies the following +-- properties: +-- +-- > withinIntervals v (toVersionIntervals vr) = withinRange v vr +-- > withinIntervals v ivs = withinRange v (fromVersionIntervals ivs) +-- +withinIntervals :: Version -> VersionIntervals -> Bool +withinIntervals v (VersionIntervals intervals) = any withinInterval intervals + where + withinInterval (lowerBound, upperBound) = withinLower lowerBound + && withinUpper upperBound + withinLower (LowerBound v' ExclusiveBound) = v' < v + withinLower (LowerBound v' InclusiveBound) = v' <= v + + withinUpper NoUpperBound = True + withinUpper (UpperBound v' ExclusiveBound) = v' > v + withinUpper (UpperBound v' InclusiveBound) = v' >= v + +-- | Convert a 'VersionRange' to a sequence of version intervals. +-- +toVersionIntervals :: VersionRange -> VersionIntervals +toVersionIntervals = foldVersionRange + ( chkIvl (minLowerBound, NoUpperBound)) + (\v -> chkIvl (LowerBound v InclusiveBound, UpperBound v InclusiveBound)) + (\v -> chkIvl (LowerBound v ExclusiveBound, NoUpperBound)) + (\v -> if isVersion0 v then VersionIntervals [] else + chkIvl (minLowerBound, UpperBound v ExclusiveBound)) + unionVersionIntervals + intersectVersionIntervals + where + chkIvl interval = checkInvariant (VersionIntervals [interval]) + +-- | Convert a 'VersionIntervals' value back into a 'VersionRange' expression +-- representing the version intervals. +-- +fromVersionIntervals :: VersionIntervals -> VersionRange +fromVersionIntervals (VersionIntervals []) = noVersion +fromVersionIntervals (VersionIntervals intervals) = + foldr1 UnionVersionRanges [ interval l u | (l, u) <- intervals ] + + where + interval (LowerBound v InclusiveBound) + (UpperBound v' InclusiveBound) | v == v' + = ThisVersion v + interval (LowerBound v InclusiveBound) + (UpperBound v' ExclusiveBound) | isWildcardRange v v' + = WildcardVersion v + interval l u = lowerBound l `intersectVersionRanges'` upperBound u + + lowerBound (LowerBound v InclusiveBound) + | isVersion0 v = AnyVersion + | otherwise = orLaterVersion v + lowerBound (LowerBound v ExclusiveBound) = LaterVersion v + + upperBound NoUpperBound = AnyVersion + upperBound (UpperBound v InclusiveBound) = orEarlierVersion v + upperBound (UpperBound v ExclusiveBound) = EarlierVersion v + + intersectVersionRanges' vr AnyVersion = vr + intersectVersionRanges' AnyVersion vr = vr + intersectVersionRanges' vr vr' = IntersectVersionRanges vr vr' + +unionVersionIntervals :: VersionIntervals -> VersionIntervals + -> VersionIntervals +unionVersionIntervals (VersionIntervals is0) (VersionIntervals is'0) = + checkInvariant (VersionIntervals (union is0 is'0)) + where + union is [] = is + union [] is' = is' + union (i:is) (i':is') = case unionInterval i i' of + Left Nothing -> i : union is (i' :is') + Left (Just i'') -> union is (i'':is') + Right Nothing -> i' : union (i :is) is' + Right (Just i'') -> union (i'':is) is' + +unionInterval :: VersionInterval -> VersionInterval + -> Either (Maybe VersionInterval) (Maybe VersionInterval) +unionInterval (lower , upper ) (lower', upper') + + -- Non-intersecting intervals with the left interval ending first + | upper `doesNotTouch` lower' = Left Nothing + + -- Non-intersecting intervals with the right interval first + | upper' `doesNotTouch` lower = Right Nothing + + -- Complete or partial overlap, with the left interval ending first + | upper <= upper' = lowerBound `seq` + Left (Just (lowerBound, upper')) + + -- Complete or partial overlap, with the left interval ending first + | otherwise = lowerBound `seq` + Right (Just (lowerBound, upper)) + where + lowerBound = min lower lower' + +intersectVersionIntervals :: VersionIntervals -> VersionIntervals + -> VersionIntervals +intersectVersionIntervals (VersionIntervals is0) (VersionIntervals is'0) = + checkInvariant (VersionIntervals (intersect is0 is'0)) + where + intersect _ [] = [] + intersect [] _ = [] + intersect (i:is) (i':is') = case intersectInterval i i' of + Left Nothing -> intersect is (i':is') + Left (Just i'') -> i'' : intersect is (i':is') + Right Nothing -> intersect (i:is) is' + Right (Just i'') -> i'' : intersect (i:is) is' + +intersectInterval :: VersionInterval -> VersionInterval + -> Either (Maybe VersionInterval) (Maybe VersionInterval) +intersectInterval (lower , upper ) (lower', upper') + + -- Non-intersecting intervals with the left interval ending first + | upper `doesNotIntersect` lower' = Left Nothing + + -- Non-intersecting intervals with the right interval first + | upper' `doesNotIntersect` lower = Right Nothing + + -- Complete or partial overlap, with the left interval ending first + | upper <= upper' = lowerBound `seq` + Left (Just (lowerBound, upper)) + + -- Complete or partial overlap, with the right interval ending first + | otherwise = lowerBound `seq` + Right (Just (lowerBound, upper')) + where + lowerBound = max lower lower' + +invertVersionIntervals :: VersionIntervals + -> VersionIntervals +invertVersionIntervals (VersionIntervals xs) = + case xs of + -- Empty interval set + [] -> VersionIntervals [(noLowerBound, NoUpperBound)] + -- Interval with no lower bound + ((lb, ub) : more) | lb == noLowerBound -> + VersionIntervals $ invertVersionIntervals' ub more + -- Interval with a lower bound + ((lb, ub) : more) -> + VersionIntervals $ (noLowerBound, invertLowerBound lb) + : invertVersionIntervals' ub more + where + -- Invert subsequent version intervals given the upper bound of + -- the intervals already inverted. + invertVersionIntervals' :: UpperBound + -> [(LowerBound, UpperBound)] + -> [(LowerBound, UpperBound)] + invertVersionIntervals' NoUpperBound [] = [] + invertVersionIntervals' ub0 [] = [(invertUpperBound ub0, NoUpperBound)] + invertVersionIntervals' ub0 [(lb, NoUpperBound)] = + [(invertUpperBound ub0, invertLowerBound lb)] + invertVersionIntervals' ub0 ((lb, ub1) : more) = + (invertUpperBound ub0, invertLowerBound lb) + : invertVersionIntervals' ub1 more + + invertLowerBound :: LowerBound -> UpperBound + invertLowerBound (LowerBound v b) = UpperBound v (invertBound b) + + invertUpperBound :: UpperBound -> LowerBound + invertUpperBound (UpperBound v b) = LowerBound v (invertBound b) + invertUpperBound NoUpperBound = error "NoUpperBound: unexpected" + + invertBound :: Bound -> Bound + invertBound ExclusiveBound = InclusiveBound + invertBound InclusiveBound = ExclusiveBound + + noLowerBound :: LowerBound + noLowerBound = LowerBound (Version [0] []) InclusiveBound + +------------------------------- +-- Parsing and pretty printing +-- + +instance Text VersionRange where + disp = fst + . foldVersionRange' -- precedence: + ( Disp.text "-any" , 0 :: Int) + (\v -> (Disp.text "==" <> disp v , 0)) + (\v -> (Disp.char '>' <> disp v , 0)) + (\v -> (Disp.char '<' <> disp v , 0)) + (\v -> (Disp.text ">=" <> disp v , 0)) + (\v -> (Disp.text "<=" <> disp v , 0)) + (\v _ -> (Disp.text "==" <> dispWild v , 0)) + (\(r1, p1) (r2, p2) -> + (punct 2 p1 r1 <+> Disp.text "||" <+> punct 2 p2 r2 , 2)) + (\(r1, p1) (r2, p2) -> + (punct 1 p1 r1 <+> Disp.text "&&" <+> punct 1 p2 r2 , 1)) + (\(r, _) -> (Disp.parens r, 0)) + + where dispWild (Version b _) = + Disp.hcat (Disp.punctuate (Disp.char '.') (map Disp.int b)) + <> Disp.text ".*" + punct p p' | p < p' = Disp.parens + | otherwise = id + + parse = expr + where + expr = do Parse.skipSpaces + t <- term + Parse.skipSpaces + (do _ <- Parse.string "||" + Parse.skipSpaces + e <- expr + return (UnionVersionRanges t e) + +++ + return t) + term = do f <- factor + Parse.skipSpaces + (do _ <- Parse.string "&&" + Parse.skipSpaces + t <- term + return (IntersectVersionRanges f t) + +++ + return f) + factor = Parse.choice $ parens expr + : parseAnyVersion + : parseNoVersion + : parseWildcardRange + : map parseRangeOp rangeOps + parseAnyVersion = Parse.string "-any" >> return AnyVersion + parseNoVersion = Parse.string "-none" >> return noVersion + + parseWildcardRange = do + _ <- Parse.string "==" + Parse.skipSpaces + branch <- Parse.sepBy1 digits (Parse.char '.') + _ <- Parse.char '.' + _ <- Parse.char '*' + return (WildcardVersion (Version branch [])) + + parens p = Parse.between (Parse.char '(' >> Parse.skipSpaces) + (Parse.char ')' >> Parse.skipSpaces) + (do a <- p + Parse.skipSpaces + return (VersionRangeParens a)) + + digits = do + first <- Parse.satisfy Char.isDigit + if first == '0' + then return 0 + else do rest <- Parse.munch Char.isDigit + return (read (first : rest)) + + parseRangeOp (s,f) = Parse.string s >> Parse.skipSpaces >> fmap f parse + rangeOps = [ ("<", EarlierVersion), + ("<=", orEarlierVersion), + (">", LaterVersion), + (">=", orLaterVersion), + ("==", ThisVersion) ] + +-- | Does the version range have an upper bound? +-- +-- @since 1.24.0.0 +hasUpperBound :: VersionRange -> Bool +hasUpperBound = foldVersionRange + False + (const True) + (const False) + (const True) + (&&) (||) + +-- | Does the version range have an explicit lower bound? +-- +-- Note: this function only considers the user-specified lower bounds, but not +-- the implicit >=0 lower bound. +-- +-- @since 1.24.0.0 +hasLowerBound :: VersionRange -> Bool +hasLowerBound = foldVersionRange + False + (const True) + (const True) + (const False) + (&&) (||) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/doc/Cabal.css cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/doc/Cabal.css --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/doc/Cabal.css 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/doc/Cabal.css 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,49 @@ +body { + max-width: 18cm; +} + +div { + font-family: sans-serif; + color: black; + background: white +} + +h1, h2, h3, h4, h5, h6, p.title { color: #005A9C } + +h1 { font: 170% sans-serif } +h2 { font: 140% sans-serif } +h3 { font: 120% sans-serif } +h4 { font: bold 100% sans-serif } +h5 { font: italic 100% sans-serif } +h6 { font: small-caps 100% sans-serif } + +pre { + font-family: monospace; + border-width: 1px; + border-style: solid; + padding: 0.3em +} + +pre.screen { color: #006400 } +pre.programlisting { color: maroon } + +div.example { + margin: 1ex 0em; + border: solid #412e25 1px; + padding: 0ex 0.4em +} + +div.example, div.example-contents { + background-color: #fffcf5 +} + +a:link { color: #0000C8 } +a:hover { background: #FFFFA8 } +a:active { color: #D00000 } +a:visited { color: #680098 } + +h1 a:link, h2 a:link, h3 a:link, h4 a:link, h5 a:link, h6 a:link, +h1 a:visited, h2 a:visited, h3 a:visited, h4 a:visited, h5 a:visited, h6 a:visited { + color: #005A9C; + text-decoration: none +} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/doc/developing-packages.markdown cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/doc/developing-packages.markdown --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/doc/developing-packages.markdown 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/doc/developing-packages.markdown 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,2238 @@ +% Cabal User Guide: Developing Cabal packages + + +# Quickstart # + + +Lets assume we have created a project directory and already have a +Haskell module or two. + +Every project needs a name, we'll call this example "proglet". + +~~~~~~~~~~~ +$ cd proglet/ +$ ls +Proglet.hs +~~~~~~~~~~~ + +It is assumed that (apart from external dependencies) all the files that +make up a package live under a common project root directory. This +simple example has all the project files in one directory, but most +packages will use one or more subdirectories. + +To turn this into a Cabal package we need two extra files in the +project's root directory: + + * `proglet.cabal`: containing package metadata and build information. + + * `Setup.hs`: usually containing a few standardized lines of code, but + can be customized if necessary. + +We can create both files manually or we can use `cabal init` to create +them for us. + +### Using "cabal init" ### + +The `cabal init` command is interactive. It asks us a number of +questions starting with the package name and version. + +~~~~~~~~~~ +$ cabal init +Package name [default "proglet"]? +Package version [default "0.1"]? +... +~~~~~~~~~~ + +It also asks questions about various other bits of package metadata. For +a package that you never intend to distribute to others, these fields can +be left blank. + +One of the important questions is whether the package contains a library +or an executable. Libraries are collections of Haskell modules that can +be re-used by other Haskell libraries and programs, while executables +are standalone programs. + +~~~~~~~~~~ +What does the package build: + 1) Library + 2) Executable +Your choice? +~~~~~~~~~~ + +For the moment these are the only choices. For more complex packages +(e.g. a library and multiple executables or test suites) the `.cabal` +file can be edited afterwards. + +Finally, `cabal init` creates the initial `proglet.cabal` and `Setup.hs` +files, and depending on your choice of license, a `LICENSE` file as well. + +~~~~~~~~~~ +Generating LICENSE... +Generating Setup.hs... +Generating proglet.cabal... + +You may want to edit the .cabal file and add a Description field. +~~~~~~~~~~ + +As this stage the `proglet.cabal` is not quite complete and before you +are able to build the package you will need to edit the file and add +some build information about the library or executable. + +### Editing the .cabal file ### + +Load up the `.cabal` file in a text editor. The first part of the +`.cabal` file has the package metadata and towards the end of the file +you will find the `executable` or `library` section. + +You will see that the fields that have yet to be filled in are commented +out. Cabal files use "`--`" Haskell-style comment syntax. (Note that +comments are only allowed on lines on their own. Trailing comments on +other lines are not allowed because they could be confused with program +options.) + +If you selected earlier to create a library package then your `.cabal` +file will have a section that looks like this: + +~~~~~~~~~~~~~~~~~ +library + exposed-modules: Proglet + -- other-modules: + -- build-depends: +~~~~~~~~~~~~~~~~~ + +Alternatively, if you selected an executable then there will be a +section like: + +~~~~~~~~~~~~~~~~~ +executable proglet + -- main-is: + -- other-modules: + -- build-depends: +~~~~~~~~~~~~~~~~~ + +The build information fields listed (but commented out) are just the few +most important and common fields. There are many others that are covered +later in this chapter. + +Most of the build information fields are the same between libraries and +executables. The difference is that libraries have a number of "exposed" +modules that make up the public interface of the library, while +executables have a file containing a `Main` module. + +The name of a library always matches the name of the package, so it is +not specified in the library section. Executables often follow the name +of the package too, but this is not required and the name is given +explicitly. + +### Modules included in the package ### + +For a library, `cabal init` looks in the project directory for files +that look like Haskell modules and adds all the modules to the +`exposed-modules` field. For modules that do not form part of your +package's public interface, you can move those modules to the +`other-modules` field. Either way, all modules in the library need to be +listed. + +For an executable, `cabal init` does not try to guess which file +contains your program's `Main` module. You will need to fill in the +`main-is` field with the file name of your program's `Main` module +(including `.hs` or `.lhs` extension). Other modules included in the +executable should be listed in the `other-modules` field. + +### Modules imported from other packages ### + +While your library or executable may include a number of modules, it +almost certainly also imports a number of external modules from the +standard libraries or other pre-packaged libraries. (These other +libraries are of course just Cabal packages that contain a library.) + +You have to list all of the library packages that your library or +executable imports modules from. Or to put it another way: you have to +list all the other packages that your package depends on. + +For example, suppose the example `Proglet` module imports the module +`Data.Map`. The `Data.Map` module comes from the `containers` package, +so we must list it: + +~~~~~~~~~~~~~~~~~ +library + exposed-modules: Proglet + other-modules: + build-depends: containers, base == 4.* +~~~~~~~~~~~~~~~~~ + +In addition, almost every package also depends on the `base` library +package because it exports the standard `Prelude` module plus other +basic modules like `Data.List`. + +You will notice that we have listed `base == 4.*`. This gives a +constraint on the version of the base package that our package will work +with. The most common kinds of constraints are: + + * `pkgname >= n` + * `pkgname >= n && < m` + * `pkgname == n.*` + +The last is just shorthand, for example `base == 4.*` means exactly the +same thing as `base >= 4 && < 5`. + +### Building the package ### + +For simple packages that's it! We can now try configuring and building +the package: + +~~~~~~~~~~~~~~~~ +cabal configure +cabal build +~~~~~~~~~~~~~~~~ + +Assuming those two steps worked then you can also install the package: + +~~~~~~~~~~~~~~~~ +cabal install +~~~~~~~~~~~~~~~~ + +For libraries this makes them available for use in GHCi or to be used by +other packages. For executables it installs the program so that you can +run it (though you may first need to adjust your system's `$PATH`). + +### Next steps ### + +What we have covered so far should be enough for very simple packages +that you use on your own system. + +The next few sections cover more details needed for more complex +packages and details needed for distributing packages to other people. + +The previous chapter covers building and installing packages -- your own +packages or ones developed by other people. + + +# Package concepts # + +Before diving into the details of writing packages it helps to +understand a bit about packages in the Haskell world and the particular +approach that Cabal takes. + +### The point of packages ### + +Packages are a mechanism for organising and distributing code. Packages +are particularly suited for "programming in the large", that is building +big systems by using and re-using code written by different people at +different times. + +People organise code into packages based on functionality and +dependencies. Social factors are also important: most packages have a +single author, or a relatively small team of authors. + +Packages are also used for distribution: the idea is that a package can +be created in one place and be moved to a different computer and be +usable in that different environment. There are a surprising number of +details that have to be got right for this to work, and a good package +system helps to simply this process and make it reliable. + +Packages come in two main flavours: libraries of reusable code, and +complete programs. Libraries present a code interface, an API, while +programs can be run directly. In the Haskell world, library packages +expose a set of Haskell modules as their public interface. Cabal +packages can contain a library or executables or both. + +Some programming languages have packages as a builtin language concept. +For example in Java, a package provides a local namespace for types and +other definitions. In the Haskell world, packages are not a part of the +language itself. Haskell programs consist of a number of modules, and +packages just provide a way to partition the modules into sets of +related functionality. Thus the choice of module names in Haskell is +still important, even when using packages. + +### Package names and versions ### + +All packages have a name, e.g. "HUnit". Package names are assumed to be +unique. Cabal package names can use letters, numbers and hyphens, but +not spaces. The namespace for Cabal packages is flat, not hierarchical. + +Packages also have a version, e.g "1.1". This matches the typical way in +which packages are developed. Strictly speaking, each version of a +package is independent, but usually they are very similar. Cabal package +versions follow the conventional numeric style, consisting of a sequence +of digits such as "1.0.1" or "2.0". There are a range of common +conventions for "versioning" packages, that is giving some meaning to +the version number in terms of changes in the package. Section [TODO] +has some tips on package versioning. + +The combination of package name and version is called the _package ID_ +and is written with a hyphen to separate the name and version, e.g. +"HUnit-1.1". + +For Cabal packages, the combination of the package name and version +_uniquely_ identifies each package. Or to put it another way: two +packages with the same name and version are considered to _be_ the same. + +Strictly speaking, the package ID only identifies each Cabal _source_ +package; the same Cabal source package can be configured and built in +different ways. There is a separate installed package ID that uniquely +identifies each installed package instance. Most of the time however, +users need not be aware of this detail. + +### Kinds of package: Cabal vs GHC vs system ### + +It can be slightly confusing at first because there are various +different notions of package floating around. Fortunately the details +are not very complicated. + +Cabal packages +: Cabal packages are really source packages. That is they contain + Haskell (and sometimes C) source code. + + Cabal packages can be compiled to produce GHC packages. They can + also be translated into operating system packages. + +GHC packages +: This is GHC's view on packages. GHC only cares about library + packages, not executables. Library packages have to be registered + with GHC for them to be available in GHCi or to be used when + compiling other programs or packages. + + The low-level tool `ghc-pkg` is used to register GHC packages and to + get information on what packages are currently registered. + + You never need to make GHC packages manually. When you build and + install a Cabal package containing a library then it gets registered + with GHC automatically. + + Haskell implementations other than GHC have essentially the same + concept of registered packages. For the most part, Cabal hides the + slight differences. + +Operating system packages +: On operating systems like Linux and Mac OS X, the system has a + specific notion of a package and there are tools for installing and + managing packages. + + The Cabal package format is designed to allow Cabal packages to be + translated, mostly-automatically, into operating system packages. + They are usually translated 1:1, that is a single Cabal package + becomes a single system package. + + It is also possible to make Windows installers from Cabal packages, + though this is typically done for a program together with all of its + library dependencies, rather than packaging each library separately. + + +### Unit of distribution ### + +The Cabal package is the unit of distribution. What this means is that +each Cabal package can be distributed on its own in source or binary +form. Of course there may dependencies between packages, but there is +usually a degree of flexibility in which versions of packages can work +together so distributing them independently makes sense. + +It is perhaps easiest to see what being ``the unit of distribution'' +means by contrast to an alternative approach. Many projects are made up +of several interdependent packages and during development these might +all be kept under one common directory tree and be built and tested +together. When it comes to distribution however, rather than +distributing them all together in a single tarball, it is required that +they each be distributed independently in their own tarballs. + +Cabal's approach is to say that if you can specify a dependency on a +package then that package should be able to be distributed +independently. Or to put it the other way round, if you want to +distribute it as a single unit, then it should be a single package. + + +### Explicit dependencies and automatic package management ### + +Cabal takes the approach that all packages dependencies are specified +explicitly and specified in a declarative way. The point is to enable +automatic package management. This means tools like `cabal` can resolve +dependencies and install a package plus all of its dependencies +automatically. Alternatively, it is possible to mechanically (or mostly +mechanically) translate Cabal packages into system packages and let the +system package manager install dependencies automatically. + +It is important to track dependencies accurately so that packages can +reliably be moved from one system to another system and still be able to +build it there. Cabal is therefore relatively strict about specifying +dependencies. For example Cabal's default build system will not even let +code build if it tries to import a module from a package that isn't +listed in the `.cabal` file, even if that package is actually installed. +This helps to ensure that there are no "untracked dependencies" that +could cause the code to fail to build on some other system. + +The explicit dependency approach is in contrast to the traditional +"./configure" approach where instead of specifying dependencies +declaratively, the `./configure` script checks if the dependencies are +present on the system. Some manual work is required to transform a +`./configure` based package into a Linux distribution package (or +similar). This conversion work is usually done by people other than the +package author(s). The practical effect of this is that only the most +popular packages will benefit from automatic package management. Instead, +Cabal forces the original author to specify the dependencies but the +advantage is that every package can benefit from automatic package +management. + +The "./configure" approach tends to encourage packages that adapt +themselves to the environment in which they are built, for example by +disabling optional features so that they can continue to work when a +particular dependency is not available. This approach makes sense in a +world where installing additional dependencies is a tiresome manual +process and so minimising dependencies is important. The automatic +package management view is that packages should just declare what they +need and the package manager will take responsibility for ensuring that +all the dependencies are installed. + +Sometimes of course optional features and optional dependencies do make +sense. Cabal packages can have optional features and varying +dependencies. These conditional dependencies are still specified in a +declarative way however and remain compatible with automatic package +management. The need to remain compatible with automatic package +management means that Cabal's conditional dependencies system is a bit +less flexible than with the "./configure" approach. + +### Portability ### + +One of the purposes of Cabal is to make it easier to build packages on +different platforms (operating systems and CPU architectures), with +different compiler versions and indeed even with different Haskell +implementations. (Yes, there are Haskell implementations other than +GHC!) + +Cabal provides abstractions of features present in different Haskell +implementations and wherever possible it is best to take advantage of +these to increase portability. Where necessary however it is possible to +use specific features of specific implementations. + +For example a package author can list in the package's `.cabal` what +language extensions the code uses. This allows Cabal to figure out if +the language extension is supported by the Haskell implementation that +the user picks. Additionally, certain language extensions such as +Template Haskell require special handling from the build system and by +listing the extension it provides the build system with enough +information to do the right thing. + +Another similar example is linking with foreign libraries. Rather than +specifying GHC flags directly, the package author can list the libraries +that are needed and the build system will take care of using the right +flags for the compiler. Additionally this makes it easier for tools to +discover what system C libraries a package needs, which is useful for +tracking dependencies on system libraries (e.g. when translating into +Linux distribution packages). + +In fact both of these examples fall into the category of explicitly +specifying dependencies. Not all dependencies are other Cabal packages. +Foreign libraries are clearly another kind of dependency. It's also +possible to think of language extensions as dependencies: the package +depends on a Haskell implementation that supports all those extensions. + +Where compiler-specific options are needed however, there is an "escape +hatch" available. The developer can specify implementation-specific +options and more generally there is a configuration mechanism to +customise many aspects of how a package is built depending on the +Haskell implementation, the operating system, computer architecture and +user-specified configuration flags. + + +# Developing packages # + +The Cabal package is the unit of distribution. When installed, its +purpose is to make available: + + * One or more Haskell programs. + + * At most one library, exposing a number of Haskell modules. + +However having both a library and executables in a package does not work +very well; if the executables depend on the library, they must +explicitly list all the modules they directly or indirectly import from +that library. Fortunately, starting with Cabal 1.8.0.4, executables can +also declare the package that they are in as a dependency, and Cabal +will treat them as if they were in another package that depended on +the library. + +Internally, the package may consist of much more than a bunch of Haskell +modules: it may also have C source code and header files, source code +meant for preprocessing, documentation, test cases, auxiliary tools etc. + +A package is identified by a globally-unique _package name_, which +consists of one or more alphanumeric words separated by hyphens. To +avoid ambiguity, each of these words should contain at least one letter. +Chaos will result if two distinct packages with the same name are +installed on the same system. A particular version of the package is +distinguished by a _version number_, consisting of a sequence of one or +more integers separated by dots. These can be combined to form a single +text string called the _package ID_, using a hyphen to separate the name +from the version, e.g. "`HUnit-1.1`". + +Note: Packages are not part of the Haskell language; they simply +populate the hierarchical space of module names. In GHC 6.6 and later a +program may contain multiple modules with the same name if they come +from separate packages; in all other current Haskell systems packages +may not overlap in the modules they provide, including hidden modules. + + +## Creating a package ## + +Suppose you have a directory hierarchy containing the source files that +make up your package. You will need to add two more files to the root +directory of the package: + +_package_`.cabal` + +: a Unicode UTF-8 text file containing a package description. + For details of the syntax of this file, see the [section on package + descriptions](#package-descriptions). + +`Setup.hs` + +: a single-module Haskell program to perform various setup tasks (with + the interface described in the section on [building and installing + packages](installing-packages.html). This module should + import only modules that will be present in all Haskell + implementations, including modules of the Cabal library. The + content of this file is determined by the `build-type` setting in + the `.cabal` file. In most cases it will be trivial, calling on + the Cabal library to do most of the work. + +Once you have these, you can create a source bundle of this directory +for distribution. Building of the package is discussed in the section on +[building and installing packages](installing-packages.html). + +One of the purposes of Cabal is to make it easier to build a package +with different Haskell implementations. So it provides abstractions of +features present in different Haskell implementations and wherever +possible it is best to take advantage of these to increase portability. +Where necessary however it is possible to use specific features of +specific implementations. For example one of the pieces of information a +package author can put in the package's `.cabal` file is what language +extensions the code uses. This is far preferable to specifying flags for +a specific compiler as it allows Cabal to pick the right flags for the +Haskell implementation that the user picks. It also allows Cabal to +figure out if the language extension is even supported by the Haskell +implementation that the user picks. Where compiler-specific options are +needed however, there is an "escape hatch" available. The developer can +specify implementation-specific options and more generally there is a +configuration mechanism to customise many aspects of how a package is +built depending on the Haskell implementation, the Operating system, +computer architecture and user-specified configuration flags. + +~~~~~~~~~~~~~~~~ +name: Foo +version: 1.0 + +library + build-depends: base + exposed-modules: Foo + extensions: ForeignFunctionInterface + ghc-options: -Wall + if os(windows) + build-depends: Win32 +~~~~~~~~~~~~~~~~ + +#### Example: A package containing a simple library #### + +The HUnit package contains a file `HUnit.cabal` containing: + +~~~~~~~~~~~~~~~~ +name: HUnit +version: 1.1.1 +synopsis: A unit testing framework for Haskell +homepage: http://hunit.sourceforge.net/ +category: Testing +author: Dean Herington +license: BSD3 +license-file: LICENSE +cabal-version: >= 1.10 +build-type: Simple + +library + build-depends: base >= 2 && < 4 + exposed-modules: Test.HUnit.Base, Test.HUnit.Lang, + Test.HUnit.Terminal, Test.HUnit.Text, Test.HUnit + default-extensions: CPP +~~~~~~~~~~~~~~~~ + +and the following `Setup.hs`: + +~~~~~~~~~~~~~~~~ +import Distribution.Simple +main = defaultMain +~~~~~~~~~~~~~~~~ + +#### Example: A package containing executable programs #### + +~~~~~~~~~~~~~~~~ +name: TestPackage +version: 0.0 +synopsis: Small package with two programs +author: Angela Author +license: BSD3 +build-type: Simple +cabal-version: >= 1.2 + +executable program1 + build-depends: HUnit + main-is: Main.hs + hs-source-dirs: prog1 + +executable program2 + main-is: Main.hs + build-depends: HUnit + hs-source-dirs: prog2 + other-modules: Utils +~~~~~~~~~~~~~~~~ + +with `Setup.hs` the same as above. + +#### Example: A package containing a library and executable programs #### + +~~~~~~~~~~~~~~~~ +name: TestPackage +version: 0.0 +synopsis: Package with library and two programs +license: BSD3 +author: Angela Author +build-type: Simple +cabal-version: >= 1.2 + +library + build-depends: HUnit + exposed-modules: A, B, C + +executable program1 + main-is: Main.hs + hs-source-dirs: prog1 + other-modules: A, B + +executable program2 + main-is: Main.hs + hs-source-dirs: prog2 + other-modules: A, C, Utils +~~~~~~~~~~~~~~~~ + +with `Setup.hs` the same as above. Note that any library modules +required (directly or indirectly) by an executable must be listed again. + +The trivial setup script used in these examples uses the _simple build +infrastructure_ provided by the Cabal library (see +[Distribution.Simple][dist-simple]). The simplicity lies in its +interface rather that its implementation. It automatically handles +preprocessing with standard preprocessors, and builds packages for all +the Haskell implementations. + +The simple build infrastructure can also handle packages where building +is governed by system-dependent parameters, if you specify a little more +(see the section on [system-dependent +parameters](#system-dependent-parameters)). A few packages require [more +elaborate solutions](#more-complex-packages). + +## Package descriptions ## + +The package description file must have a name ending in "`.cabal`". It +must be a Unicode text file encoded using valid UTF-8. There must be +exactly one such file in the directory. The first part of the name is +usually the package name, and some of the tools that operate on Cabal +packages require this. + +In the package description file, lines whose first non-whitespace characters +are "`--`" are treated as comments and ignored. + +This file should contain of a number global property descriptions and +several sections. + +* The [global properties](#package-properties) describe the package as a + whole, such as name, license, author, etc. + +* Optionally, a number of _configuration flags_ can be declared. These + can be used to enable or disable certain features of a package. (see + the section on [configurations](#configurations)). + +* The (optional) library section specifies the [library + properties](#library) and relevant [build + information](#build-information). + +* Following is an arbitrary number of executable sections + which describe an executable program and relevant [build + information](#build-information). + +Each section consists of a number of property descriptions +in the form of field/value pairs, with a syntax roughly like mail +message headers. + +* Case is not significant in field names, but is significant in field + values. + +* To continue a field value, indent the next line relative to the field + name. + +* Field names may be indented, but all field values in the same section + must use the same indentation. + +* Tabs are *not* allowed as indentation characters due to a missing + standard interpretation of tab width. + +* To get a blank line in a field value, use an indented "`.`" + +The syntax of the value depends on the field. Field types include: + +_token_, _filename_, _directory_ +: Either a sequence of one or more non-space non-comma characters, or a quoted + string in Haskell 98 lexical syntax. The latter can be used for escaping + whitespace, for example: `ghc-options: -Wall "-with-rtsopts=-T -I1"`. + Unless otherwise stated, relative filenames and directories are interpreted + from the package root directory. + +_freeform_, _URL_, _address_ +: An arbitrary, uninterpreted string. + +_identifier_ +: A letter followed by zero or more alphanumerics or underscores. + +_compiler_ +: A compiler flavor (one of: `GHC`, `JHC`, `UHC` or `LHC`) followed by a + version range. For example, `GHC ==6.10.3`, or `LHC >=0.6 && <0.8`. + +### Modules and preprocessors ### + +Haskell module names listed in the `exposed-modules` and `other-modules` +fields may correspond to Haskell source files, i.e. with names ending in +"`.hs`" or "`.lhs`", or to inputs for various Haskell preprocessors. The +simple build infrastructure understands the extensions: + +* `.gc` ([greencard][]) +* `.chs` ([c2hs][]) +* `.hsc` (`hsc2hs`) +* `.y` and `.ly` ([happy][]) +* `.x` ([alex][]) +* `.cpphs` ([cpphs][]) + +When building, Cabal will automatically run the appropriate +preprocessor and compile the Haskell module it produces. For the +`c2hs` and `hsc2hs` preprocessors, Cabal will also automatically add, +compile and link any C sources generated by the preprocessor (produced +by `hsc2hs`'s `#def` feature or `c2hs`'s auto-generated wrapper +functions). + +Some fields take lists of values, which are optionally separated by commas, +except for the `build-depends` field, where the commas are mandatory. + +Some fields are marked as required. All others are optional, and unless +otherwise specified have empty default values. + +### Package properties ### + +These fields may occur in the first top-level properties section and +describe the package as a whole: + +`name:` _package-name_ (required) +: The unique name of the package, without the version number. + +`version:` _numbers_ (required) +: The package version number, usually consisting of a sequence of + natural numbers separated by dots. + +`cabal-version:` _>= x.y_ +: The version of the Cabal specification that this package description uses. + The Cabal specification does slowly evolve, introducing new features and + occasionally changing the meaning of existing features. By specifying + which version of the spec you are using it enables programs which process + the package description to know what syntax to expect and what each part + means. + + For historical reasons this is always expressed using _>=_ version range + syntax. No other kinds of version range make sense, in particular upper + bounds do not make sense. In future this field will specify just a version + number, rather than a version range. + + The version number you specify will affect both compatibility and + behaviour. Most tools (including the Cabal library and cabal program) + understand a range of versions of the Cabal specification. Older tools + will of course only work with older versions of the Cabal specification. + Most of the time, tools that are too old will recognise this fact and + produce a suitable error message. + + As for behaviour, new versions of the Cabal spec can change the meaning + of existing syntax. This means if you want to take advantage of the new + meaning or behaviour then you must specify the newer Cabal version. + Tools are expected to use the meaning and behaviour appropriate to the + version given in the package description. + + In particular, the syntax of package descriptions changed significantly + with Cabal version 1.2 and the `cabal-version` field is now required. + Files written in the old syntax are still recognized, so if you require + compatibility with very old Cabal versions then you may write your package + description file using the old syntax. Please consult the user's guide of + an older Cabal version for a description of that syntax. + +`build-type:` _identifier_ +: The type of build used by this package. Build types are the + constructors of the [BuildType][] type, defaulting to `Custom`. + + If the build type is anything other than `Custom`, then the + `Setup.hs` file *must* be exactly the standardized content + discussed below. This is because in these cases, `cabal` will + ignore the `Setup.hs` file completely, whereas other methods of + package management, such as `runhaskell Setup.hs [CMD]`, still + rely on the `Setup.hs` file. + + For build type `Simple`, the contents of `Setup.hs` must be: + + ~~~~~~~~~~~~~~~~ + import Distribution.Simple + main = defaultMain + ~~~~~~~~~~~~~~~~ + + For build type `Configure` (see the section on [system-dependent + parameters](#system-dependent-parameters) below), the contents of + `Setup.hs` must be: + + ~~~~~~~~~~~~~~~~ + import Distribution.Simple + main = defaultMainWithHooks autoconfUserHooks + ~~~~~~~~~~~~~~~~ + + For build type `Make` (see the section on [more complex + packages](installing-packages.html#more-complex-packages) below), + the contents of `Setup.hs` must be: + + ~~~~~~~~~~~~~~~~ + import Distribution.Make + main = defaultMain + ~~~~~~~~~~~~~~~~ + + For build type `Custom`, the file `Setup.hs` can be customized, + and will be used both by `cabal` and other tools. + + For most packages, the build type `Simple` is sufficient. + +`license:` _identifier_ (default: `AllRightsReserved`) +: The type of license under which this package is distributed. + License names are the constants of the [License][dist-license] type. + +`license-file:` _filename_ or `license-files:` _filename list_ +: The name of a file(s) containing the precise copyright license for + this package. The license file(s) will be installed with the package. + + If you have multiple license files then use the `license-files` + field instead of (or in addition to) the `license-file` field. + +`copyright:` _freeform_ +: The content of a copyright notice, typically the name of the holder + of the copyright on the package and the year(s) from which copyright + is claimed. For example: `Copyright: (c) 2006-2007 Joe Bloggs` + +`author:` _freeform_ +: The original author of the package. + + Remember that `.cabal` files are Unicode, using the UTF-8 encoding. + +`maintainer:` _address_ +: The current maintainer or maintainers of the package. This is an e-mail address to which users should send bug + reports, feature requests and patches. + +`stability:` _freeform_ +: The stability level of the package, e.g. `alpha`, `experimental`, `provisional`, + `stable`. + +`homepage:` _URL_ +: The package homepage. + +`bug-reports:` _URL_ +: The URL where users should direct bug reports. This would normally be either: + + * A `mailto:` URL, e.g. for a person or a mailing list. + + * An `http:` (or `https:`) URL for an online bug tracking system. + + For example Cabal itself uses a web-based bug tracking system + + ~~~~~~~~~~~~~~~~ + bug-reports: http://hackage.haskell.org/trac/hackage/ + ~~~~~~~~~~~~~~~~ + +`package-url:` _URL_ +: The location of a source bundle for the package. The distribution + should be a Cabal package. + +`synopsis:` _freeform_ +: A very short description of the package, for use in a table of + packages. This is your headline, so keep it short (one line) but as + informative as possible. Save space by not including the package + name or saying it's written in Haskell. + +`description:` _freeform_ +: Description of the package. This may be several paragraphs, and + should be aimed at a Haskell programmer who has never heard of your + package before. + + For library packages, this field is used as prologue text by [`setup + haddock`](installing-packages.html#setup-haddock), and thus may + contain the same markup as [haddock][] documentation comments. + +`category:` _freeform_ +: A classification category for future use by the package catalogue [Hackage]. These + categories have not yet been specified, but the upper levels of the + module hierarchy make a good start. + +`tested-with:` _compiler list_ +: A list of compilers and versions against which the package has been + tested (or at least built). + +`data-files:` _filename list_ +: A list of files to be installed for run-time use by the package. + This is useful for packages that use a large amount of static data, + such as tables of values or code templates. Cabal provides a way to + [find these files at + run-time](#accessing-data-files-from-package-code). + + A limited form of `*` wildcards in file names, for example + `data-files: images/*.png` matches all the `.png` files in the + `images` directory. + + The limitation is that `*` wildcards are only allowed in place of + the file name, not in the directory name or file extension. In + particular, wildcards do not include directories contents + recursively. Furthermore, if a wildcard is used it must be used with + an extension, so `data-files: data/*` is not allowed. When matching + a wildcard plus extension, a file's full extension must match + exactly, so `*.gz` matches `foo.gz` but not `foo.tar.gz`. A wildcard + that does not match any files is an error. + + The reason for providing only a very limited form of wildcard is to + concisely express the common case of a large number of related files + of the same file type without making it too easy to accidentally + include unwanted files. + +`data-dir:` _directory_ +: The directory where Cabal looks for data files to install, relative + to the source directory. By default, Cabal will look in the source + directory itself. + +`extra-source-files:` _filename list_ +: A list of additional files to be included in source distributions + built with [`setup sdist`](installing-packages.html#setup-sdist). As + with `data-files` it can use a limited form of `*` wildcards in file + names. + +`extra-doc-files:` _filename list_ +: A list of additional files to be included in source distributions, + and also copied to the html directory when Haddock documentation is + generated. As with `data-files` it can use a limited form of `*` + wildcards in file names. + +`extra-tmp-files:` _filename list_ +: A list of additional files or directories to be removed by [`setup + clean`](installing-packages.html#setup-clean). These would typically + be additional files created by additional hooks, such as the scheme + described in the section on [system-dependent + parameters](#system-dependent-parameters). + +### Library ### + +The library section should contain the following fields: + +`exposed-modules:` _identifier list_ (required if this package contains a library) +: A list of modules added by this package. + +`exposed:` _boolean_ (default: `True`) +: Some Haskell compilers (notably GHC) support the notion of packages + being "exposed" or "hidden" which means the modules they provide can + be easily imported without always having to specify which package + they come from. However this only works effectively if the modules + provided by all exposed packages do not overlap (otherwise a module + import would be ambiguous). + + Almost all new libraries use hierarchical module names that do not + clash, so it is very uncommon to have to use this field. However it + may be necessary to set `exposed: False` for some old libraries that + use a flat module namespace or where it is known that the exposed + modules would clash with other common modules. + +`reexported-modules:` _exportlist _ +: Supported only in GHC 7.10 and later. A list of modules to _reexport_ from + this package. The syntax of this field is `orig-pkg:Name as NewName` to + reexport module `Name` from `orig-pkg` with the new name `NewName`. We also + support abbreviated versions of the syntax: if you omit `as NewName`, + we'll reexport without renaming; if you omit `orig-pkg`, then we will + automatically figure out which package to reexport from, if it's + unambiguous. + + Reexported modules are useful for compatibility shims when a package has + been split into multiple packages, and they have the useful property that + if a package provides a module, and another package reexports it under + the same name, these are not considered a conflict (as would be the case + with a stub module.) They can also be used to resolve name conflicts. + +The library section may also contain build information fields (see the +section on [build information](#build-information)). + +#### Opening an interpreter session #### + +While developing a package, it is often useful to make its code available inside +an interpreter session. This can be done with the `repl` command: + +~~~~~~~~~~~~~~~~ +cabal repl +~~~~~~~~~~~~~~~~ + +The name comes from the acronym [REPL], which stands for +"read-eval-print-loop". By default `cabal repl` loads the first component in a +package. If the package contains several named components, the name can be given +as an argument to `repl`. The name can be also optionally prefixed with the +component's type for disambiguation purposes. Example: + +~~~~~~~~~~~~~~~~ +cabal repl foo +cabal repl exe:foo +cabal repl test:bar +cabal repl bench:baz +~~~~~~~~~~~~~~~~ + +#### Freezing dependency versions #### + +If a package is built in several different environments, such as a development +environment, a staging environment and a production environment, it may be +necessary or desirable to ensure that the same dependency versions are +selected in each environment. This can be done with the `freeze` command: + +~~~~~~~~~~~~~~~~ +cabal freeze +~~~~~~~~~~~~~~~~ + +The command writes the selected version for all dependencies to the +`cabal.config` file. All environments which share this file will use the +dependency versions specified in it. + +#### Generating dependency version bounds #### + +Cabal also has the ability to suggest dependency version bounds that conform to +[Package Versioning Policy][PVP], which is a recommended versioning system for +publicly released Cabal packages. This is done by running the `gen-bounds` +command: + +~~~~~~~~~~~~~~~~ +cabal gen-bounds +~~~~~~~~~~~~~~~~ + +For example, given the following dependencies specified in `build-depends`: + +~~~~~~~~~~~~~~~~ +foo == 0.5.2 +bar == 1.1 +~~~~~~~~~~~~~~~~ + +`gen-bounds` will suggest changing them to the following: + +~~~~~~~~~~~~~~~~ +foo >= 0.5.2 && < 0.6 +bar >= 1.1 && < 1.2 +~~~~~~~~~~~~~~~~ + + +### Executables ### + +Executable sections (if present) describe executable programs contained +in the package and must have an argument after the section label, which +defines the name of the executable. This is a freeform argument but may +not contain spaces. + +The executable may be described using the following fields, as well as +build information fields (see the section on [build +information](#build-information)). + +`main-is:` _filename_ (required) +: The name of the `.hs` or `.lhs` file containing the `Main` module. Note that it is the + `.hs` filename that must be listed, even if that file is generated + using a preprocessor. The source file must be relative to one of the + directories listed in `hs-source-dirs`. + +#### Running executables #### + +You can have Cabal build and run your executables by using the `run` command: + +~~~~~~~~~~~~~~~~ +$ cabal run EXECUTABLE [-- EXECUTABLE_FLAGS] +~~~~~~~~~~~~~~~~ + +This command will configure, build and run the executable `EXECUTABLE`. The +double dash separator is required to distinguish executable flags from `run`'s +own flags. If there is only one executable defined in the whole package, the +executable's name can be omitted. See the output of `cabal help run` for a list +of options you can pass to `cabal run`. + + +### Test suites ### + +Test suite sections (if present) describe package test suites and must have an +argument after the section label, which defines the name of the test suite. +This is a freeform argument, but may not contain spaces. It should be unique +among the names of the package's other test suites, the package's executables, +and the package itself. Using test suite sections requires at least Cabal +version 1.9.2. + +The test suite may be described using the following fields, as well as build +information fields (see the section on [build +information](#build-information)). + +`type:` _interface_ (required) +: The interface type and version of the test suite. Cabal supports two test + suite interfaces, called `exitcode-stdio-1.0` and `detailed-0.9`. Each of + these types may require or disallow other fields as described below. + +Test suites using the `exitcode-stdio-1.0` interface are executables +that indicate test failure with a non-zero exit code when run; they may provide +human-readable log information through the standard output and error channels. +This interface is provided primarily for compatibility with existing test +suites; it is preferred that new test suites be written for the `detailed-0.9` +interface. The `exitcode-stdio-1.0` type requires the `main-is` field. + +`main-is:` _filename_ (required: `exitcode-stdio-1.0`, disallowed: `detailed-0.9`) +: The name of the `.hs` or `.lhs` file containing the `Main` module. Note that it is the + `.hs` filename that must be listed, even if that file is generated + using a preprocessor. The source file must be relative to one of the + directories listed in `hs-source-dirs`. This field is analogous to the + `main-is` field of an executable section. + +Test suites using the `detailed-0.9` interface are modules exporting the symbol +`tests :: IO [Test]`. The `Test` type is exported by the module +`Distribution.TestSuite` provided by Cabal. For more details, see the example below. + +The `detailed-0.9` interface allows Cabal and other test agents to inspect a +test suite's results case by case, producing detailed human- and +machine-readable log files. The `detailed-0.9` interface requires the +`test-module` field. + +`test-module:` _identifier_ (required: `detailed-0.9`, disallowed: `exitcode-stdio-1.0`) +: The module exporting the `tests` symbol. + +#### Example: Package using `exitcode-stdio-1.0` interface #### + +The example package description and executable source file below demonstrate +the use of the `exitcode-stdio-1.0` interface. + +foo.cabal: + +~~~~~~~~~~~~~~~~ +Name: foo +Version: 1.0 +License: BSD3 +Cabal-Version: >= 1.9.2 +Build-Type: Simple + +Test-Suite test-foo + type: exitcode-stdio-1.0 + main-is: test-foo.hs + build-depends: base +~~~~~~~~~~~~~~~~ + +test-foo.hs: + +~~~~~~~~~~~~~~~~ +module Main where + +import System.Exit (exitFailure) + +main = do + putStrLn "This test always fails!" + exitFailure +~~~~~~~~~~~~~~~~ + +#### Example: Package using `detailed-0.9` interface #### + +The example package description and test module source file below demonstrate +the use of the `detailed-0.9` interface. The test module also develops a simple +implementation of the interface set by `Distribution.TestSuite`, but in actual +usage the implementation would be provided by the library that provides the +testing facility. + +bar.cabal: + +~~~~~~~~~~~~~~~~ +Name: bar +Version: 1.0 +License: BSD3 +Cabal-Version: >= 1.9.2 +Build-Type: Simple + +Test-Suite test-bar + type: detailed-0.9 + test-module: Bar + build-depends: base, Cabal >= 1.9.2 +~~~~~~~~~~~~~~~~ + +Bar.hs: + +~~~~~~~~~~~~~~~~ +module Bar ( tests ) where + +import Distribution.TestSuite + +tests :: IO [Test] +tests = return [ Test succeeds, Test fails ] + where + succeeds = TestInstance + { run = return $ Finished Pass + , name = "succeeds" + , tags = [] + , options = [] + , setOption = \_ _ -> Right succeeds + } + fails = TestInstance + { run = return $ Finished $ Fail "Always fails!" + , name = "fails" + , tags = [] + , options = [] + , setOption = \_ _ -> Right fails + } +~~~~~~~~~~~~~~~~ + +#### Running test suites #### + +You can have Cabal run your test suites using its built-in test +runner: + +~~~~~~~~~~~~~~~~ +$ cabal configure --enable-tests +$ cabal build +$ cabal test +~~~~~~~~~~~~~~~~ + +See the output of `cabal help test` for a list of options you can pass +to `cabal test`. + +### Benchmarks ### + +Benchmark sections (if present) describe benchmarks contained in the package and +must have an argument after the section label, which defines the name of the +benchmark. This is a freeform argument, but may not contain spaces. It should +be unique among the names of the package's other benchmarks, the package's test +suites, the package's executables, and the package itself. Using benchmark +sections requires at least Cabal version 1.9.2. + +The benchmark may be described using the following fields, as well as build +information fields (see the section on [build information](#build-information)). + +`type:` _interface_ (required) +: The interface type and version of the benchmark. At the moment Cabal only + support one benchmark interface, called `exitcode-stdio-1.0`. + +Benchmarks using the `exitcode-stdio-1.0` interface are executables that +indicate failure to run the benchmark with a non-zero exit code when run; they +may provide human-readable information through the standard output and error +channels. + +`main-is:` _filename_ (required: `exitcode-stdio-1.0`) +: The name of the `.hs` or `.lhs` file containing the `Main` module. Note that + it is the `.hs` filename that must be listed, even if that file is generated + using a preprocessor. The source file must be relative to one of the + directories listed in `hs-source-dirs`. This field is analogous to the + `main-is` field of an executable section. + +#### Example: Package using `exitcode-stdio-1.0` interface #### + +The example package description and executable source file below demonstrate +the use of the `exitcode-stdio-1.0` interface. + +foo.cabal: + +~~~~~~~~~~~~~~~~ +Name: foo +Version: 1.0 +License: BSD3 +Cabal-Version: >= 1.9.2 +Build-Type: Simple + +Benchmark bench-foo + type: exitcode-stdio-1.0 + main-is: bench-foo.hs + build-depends: base, time +~~~~~~~~~~~~~~~~ + +bench-foo.hs: + +~~~~~~~~~~~~~~~~ +{-# LANGUAGE BangPatterns #-} +module Main where + +import Data.Time.Clock + +fib 0 = 1 +fib 1 = 1 +fib n = fib (n-1) + fib (n-2) + +main = do + start <- getCurrentTime + let !r = fib 20 + end <- getCurrentTime + putStrLn $ "fib 20 took " ++ show (diffUTCTime end start) +~~~~~~~~~~~~~~~~ + +#### Running benchmarks #### + +You can have Cabal run your benchmark using its built-in benchmark runner: + +~~~~~~~~~~~~~~~~ +$ cabal configure --enable-benchmarks +$ cabal build +$ cabal bench +~~~~~~~~~~~~~~~~ + +See the output of `cabal help bench` for a list of options you can +pass to `cabal bench`. + +### Build information ### + +The following fields may be optionally present in a library, executable, test +suite or benchmark section, and give information for the building of the +corresponding library or executable. See also the sections on +[system-dependent parameters](#system-dependent-parameters) and +[configurations](#configurations) for a way to supply system-dependent values +for these fields. + +`build-depends:` _package list_ +: A list of packages needed to build this one. Each package can be + annotated with a version constraint. + + Version constraints use the operators `==, >=, >, <, <=` and a + version number. Multiple constraints can be combined using `&&` or + `||`. If no version constraint is specified, any version is assumed + to be acceptable. For example: + + ~~~~~~~~~~~~~~~~ + library + build-depends: + base >= 2, + foo >= 1.2 && < 1.3, + bar + ~~~~~~~~~~~~~~~~ + + Dependencies like `foo >= 1.2 && < 1.3` turn out to be very common + because it is recommended practise for package versions to + correspond to API versions. As of Cabal 1.6, there is a special + syntax to support this use: + + ~~~~~~~~~~~~~~~~ + build-depends: foo ==1.2.* + ~~~~~~~~~~~~~~~~ + + It is only syntactic sugar. It is exactly equivalent to `foo >= 1.2 && < 1.3`. + + Note: Prior to Cabal 1.8, `build-depends` specified in each section + were global to all sections. This was unintentional, but some packages + were written to depend on it, so if you need your `build-depends` to + be local to each section, you must specify at least + `Cabal-Version: >= 1.8` in your `.cabal` file. + + Note: Cabal 1.20 experimentally supported module thinning and + renaming in `build-depends`; however, this support has since been + removed and should not be used. + +`other-modules:` _identifier list_ +: A list of modules used by the component but not exposed to users. + For a library component, these would be hidden modules of the + library. For an executable, these would be auxiliary modules to be + linked with the file named in the `main-is` field. + + Note: Every module in the package *must* be listed in one of + `other-modules`, `exposed-modules` or `main-is` fields. + +`hs-source-dirs:` _directory list_ (default: "`.`") +: Root directories for the module hierarchy. + + For backwards compatibility, the old variant `hs-source-dir` is also + recognized. + +`default-extensions:` _identifier list_ +: A list of Haskell extensions used by every module. These determine + corresponding compiler options enabled for all files. Extension names are + the constructors of the [Extension][extension] type. For example, `CPP` + specifies that Haskell source files are to be preprocessed with a C + preprocessor. + +`other-extensions:` _identifier list_ +: A list of Haskell extensions used by some (but not necessarily all) modules. + From GHC version 6.6 onward, these may be specified by placing a `LANGUAGE` + pragma in the source files affected e.g. + + ~~~~~~~~~~~~~~~~ + {-# LANGUAGE CPP, MultiParamTypeClasses #-} + ~~~~~~~~~~~~~~~~ + + In Cabal-1.24 the dependency solver will use this and `default-extensions` information. + Cabal prior to 1.24 will abort compilation if the current compiler doesn't provide + the extensions. + + If you use some extensions conditionally, using CPP or conditional module lists, + it is good to replicate the condition in `other-extensions` declarations: + + ~~~~~~~~~~~~~~~~ + other-extensions: CPP + if impl(ghc >= 7.5) + other-extensions: PolyKinds + ~~~~~~~~~~~~~~~~ + + You could also omit the conditionally used extensions, as they are for information only, + but it is recommended to replicate them in `other-extensions` declarations. + +`build-tools:` _program list_ +: A list of programs, possibly annotated with versions, needed to + build this package, e.g. `c2hs >= 0.15, cpphs`.If no version + constraint is specified, any version is assumed to be acceptable. + +`buildable:` _boolean_ (default: `True`) +: Is the component buildable? Like some of the other fields below, + this field is more useful with the slightly more elaborate form of + the simple build infrastructure described in the section on + [system-dependent parameters](#system-dependent-parameters). + +`ghc-options:` _token list_ +: Additional options for GHC. You can often achieve the same effect + using the `extensions` field, which is preferred. + + Options required only by one module may be specified by placing an + `OPTIONS_GHC` pragma in the source file affected. + + As with many other fields, whitespace can be escaped by using Haskell string + syntax. Example: `ghc-options: -Wcompat "-with-rtsopts=-T -I1" -Wall`. + +`ghc-prof-options:` _token list_ +: Additional options for GHC when the package is built with profiling + enabled. + + Note that as of Cabal-1.24, the default profiling detail level defaults to + `exported-functions` for libraries and `toplevel-functions` for + executables. For GHC these correspond to the flags `-fprof-auto-exported` + and `-fprof-auto-top`. Prior to Cabal-1.24 the level defaulted to `none`. + These levels can be adjusted by the person building the package with the + `--profiling-detail` and `--library-profiling-detail` flags. + + It is typically better for the person building the package to pick the + profiling detail level rather than for the package author. So unless you + have special needs it is probably better not to specify any of the GHC + `-fprof-auto*` flags here. However if you wish to override the profiling + detail level, you can do so using the `ghc-prof-options` field: use + `-fno-prof-auto` or one of the other `-fprof-auto*` flags. + + +`ghc-shared-options:` _token list_ +: Additional options for GHC when the package is built as shared library. + The options specified via this field are combined with the ones specified + via `ghc-options`, and are passed to GHC during both the compile and + link phases. + +`includes:` _filename list_ +: A list of header files to be included in any compilations via C. + This field applies to both header files that are already installed + on the system and to those coming with the package to be installed. + These files typically contain function prototypes for foreign + imports used by the package. + +`install-includes:` _filename list_ +: A list of header files from this package to be installed into + `$libdir/includes` when the package is installed. Files listed in + `install-includes:` should be found in relative to the top of the + source tree or relative to one of the directories listed in + `include-dirs`. + + `install-includes` is typically used to name header files that + contain prototypes for foreign imports used in Haskell code in this + package, for which the C implementations are also provided with the + package. Note that to include them when compiling the package + itself, they need to be listed in the `includes:` field as well. + +`include-dirs:` _directory list_ +: A list of directories to search for header files, when preprocessing + with `c2hs`, `hsc2hs`, `cpphs` or the C preprocessor, and + also when compiling via C. + +`c-sources:` _filename list_ +: A list of C source files to be compiled and linked with the Haskell files. + +`js-sources:` _filename list_ +: A list of JavaScript source files to be linked with the Haskell files + (only for JavaScript targets). + +`extra-libraries:` _token list_ +: A list of extra libraries to link with. + +`extra-ghci-libraries:` _token list_ +: A list of extra libraries to be used instead of 'extra-libraries' when + the package is loaded with GHCi. + +`extra-lib-dirs:` _directory list_ +: A list of directories to search for libraries. + +`cc-options:` _token list_ +: Command-line arguments to be passed to the C compiler. Since the + arguments are compiler-dependent, this field is more useful with the + setup described in the section on [system-dependent + parameters](#system-dependent-parameters). + +`cpp-options:` _token list_ +: Command-line arguments for pre-processing Haskell code. Applies to + haskell source and other pre-processed Haskell source like .hsc .chs. + Does not apply to C code, that's what cc-options is for. + +`ld-options:` _token list_ +: Command-line arguments to be passed to the linker. Since the + arguments are compiler-dependent, this field is more useful with the + setup described in the section on [system-dependent + parameters](#system-dependent-parameters)>. + +`pkgconfig-depends:` _package list_ +: A list of [pkg-config] packages, needed to build this package. + They can be annotated with versions, e.g. `gtk+-2.0 >= 2.10, cairo + >= 1.0`. If no version constraint is specified, any version is + assumed to be acceptable. Cabal uses `pkg-config` to find if the + packages are available on the system and to find the extra + compilation and linker options needed to use the packages. + + If you need to bind to a C library that supports `pkg-config` (use + `pkg-config --list-all` to find out if it is supported) then it is + much preferable to use this field rather than hard code options into + the other fields. + +`frameworks:` _token list_ +: On Darwin/MacOS X, a list of frameworks to link to. See Apple's + developer documentation for more details on frameworks. This entry + is ignored on all other platforms. + +`extra-frameworks-dirs:` _directory list_ +: On Darwin/MacOS X, a list of directories to search for frameworks. + This entry is ignored on all other platforms. + +### Configurations ### + +Library and executable sections may include conditional +blocks, which test for various system parameters and +configuration flags. The flags mechanism is rather generic, +but most of the time a flag represents certain feature, that +can be switched on or off by the package user. +Here is an example package description file using +configurations: + +#### Example: A package containing a library and executable programs #### + +~~~~~~~~~~~~~~~~ +Name: Test1 +Version: 0.0.1 +Cabal-Version: >= 1.2 +License: BSD3 +Author: Jane Doe +Synopsis: Test package to test configurations +Category: Example + +Flag Debug + Description: Enable debug support + Default: False + +Flag WebFrontend + Description: Include API for web frontend. + -- Cabal checks if the configuration is possible, first + -- with this flag set to True and if not it tries with False + +Library + Build-Depends: base + Exposed-Modules: Testing.Test1 + Extensions: CPP + + if flag(debug) + GHC-Options: -DDEBUG + if !os(windows) + CC-Options: "-DDEBUG" + else + CC-Options: "-DNDEBUG" + + if flag(webfrontend) + Build-Depends: cgi > 0.42 + Other-Modules: Testing.WebStuff + +Executable test1 + Main-is: T1.hs + Other-Modules: Testing.Test1 + Build-Depends: base + + if flag(debug) + CC-Options: "-DDEBUG" + GHC-Options: -DDEBUG +~~~~~~~~~~~~~~~~ + +#### Layout #### + +Flags, conditionals, library and executable sections use layout to +indicate structure. This is very similar to the Haskell layout rule. +Entries in a section have to all be indented to the same level which +must be more than the section header. Tabs are not allowed to be used +for indentation. + +As an alternative to using layout you can also use explicit braces `{}`. +In this case the indentation of entries in a section does not matter, +though different fields within a block must be on different lines. Here +is a bit of the above example again, using braces: + +#### Example: Using explicit braces rather than indentation for layout #### + +~~~~~~~~~~~~~~~~ +Name: Test1 +Version: 0.0.1 +Cabal-Version: >= 1.2 +License: BSD3 +Author: Jane Doe +Synopsis: Test package to test configurations +Category: Example + +Flag Debug { + Description: Enable debug support + Default: False +} + +Library { + Build-Depends: base + Exposed-Modules: Testing.Test1 + Extensions: CPP + if flag(debug) { + GHC-Options: -DDEBUG + if !os(windows) { + CC-Options: "-DDEBUG" + } else { + CC-Options: "-DNDEBUG" + } + } +} +~~~~~~~~~~~~~~~~ + +#### Configuration Flags #### + +A flag section takes the flag name as an argument and may contain the +following fields. + +`description:` _freeform_ +: The description of this flag. + +`default:` _boolean_ (default: `True`) +: The default value of this flag. + + Note that this value may be [overridden in several + ways](installing-packages.html#controlling-flag-assignments"). The + rationale for having flags default to True is that users usually + want new features as soon as they are available. Flags representing + features that are not (yet) recommended for most users (such as + experimental features or debugging support) should therefore + explicitly override the default to False. + +`manual:` _boolean_ (default: `False`) +: By default, Cabal will first try to satisfy dependencies with the + default flag value and then, if that is not possible, with the + negated value. However, if the flag is manual, then the default + value (which can be overridden by commandline flags) will be used. + +#### Conditional Blocks #### + +Conditional blocks may appear anywhere inside a library or executable +section. They have to follow rather strict formatting rules. +Conditional blocks must always be of the shape + +~~~~~~~~~~~~~~~~ + `if `_condition_ + _property-descriptions-or-conditionals*_ +~~~~~~~~~~~~~~~~ + +or + +~~~~~~~~~~~~~~~~ + `if `_condition_ + _property-descriptions-or-conditionals*_ + `else` + _property-descriptions-or-conditionals*_ +~~~~~~~~~~~~~~~~ + +Note that the `if` and the condition have to be all on the same line. + +#### Conditions #### + +Conditions can be formed using boolean tests and the boolean operators +`||` (disjunction / logical "or"), `&&` (conjunction / logical "and"), +or `!` (negation / logical "not"). The unary `!` takes highest +precedence, `||` takes lowest. Precedence levels may be overridden +through the use of parentheses. For example, `os(darwin) && !arch(i386) +|| os(freebsd)` is equivalent to `(os(darwin) && !(arch(i386))) || +os(freebsd)`. + +The following tests are currently supported. + +`os(`_name_`)` +: Tests if the current operating system is _name_. The argument is + tested against `System.Info.os` on the target system. There is + unfortunately some disagreement between Haskell implementations + about the standard values of `System.Info.os`. Cabal canonicalises + it so that in particular `os(windows)` works on all implementations. + If the canonicalised os names match, this test evaluates to true, + otherwise false. The match is case-insensitive. + +`arch(`_name_`)` +: Tests if the current architecture is _name_. The argument is + matched against `System.Info.arch` on the target system. If the arch + names match, this test evaluates to true, otherwise false. The match + is case-insensitive. + +`impl(`_compiler_`)` +: Tests for the configured Haskell implementation. An optional version + constraint may be specified (for example `impl(ghc >= 6.6.1)`). If + the configured implementation is of the right type and matches the + version constraint, then this evaluates to true, otherwise false. + The match is case-insensitive. + +`flag(`_name_`)` +: Evaluates to the current assignment of the flag of the given name. + Flag names are case insensitive. Testing for flags that have not + been introduced with a flag section is an error. + +`true` +: Constant value true. + +`false` +: Constant value false. + +#### Resolution of Conditions and Flags #### + +If a package descriptions specifies configuration flags the package user can +[control these in several ways](installing-packages.html#controlling-flag-assignments). +If the user does not fix the value of a flag, Cabal will try to find a flag +assignment in the following way. + + * For each flag specified, it will assign its default value, evaluate + all conditions with this flag assignment, and check if all + dependencies can be satisfied. If this check succeeded, the package + will be configured with those flag assignments. + + * If dependencies were missing, the last flag (as by the order in + which the flags were introduced in the package description) is tried + with its alternative value and so on. This continues until either + an assignment is found where all dependencies can be satisfied, or + all possible flag assignments have been tried. + +To put it another way, Cabal does a complete backtracking search to find +a satisfiable package configuration. It is only the dependencies +specified in the `build-depends` field in conditional blocks that +determine if a particular flag assignment is satisfiable (`build-tools` +are not considered). The order of the declaration and the default value +of the flags determines the search order. Flags overridden on the +command line fix the assignment of that flag, so no backtracking will be +tried for that flag. + +If no suitable flag assignment could be found, the configuration phase +will fail and a list of missing dependencies will be printed. Note that +this resolution process is exponential in the worst case (i.e., in the +case where dependencies cannot be satisfied). There are some +optimizations applied internally, but the overall complexity remains +unchanged. + +### Meaning of field values when using conditionals ### + +During the configuration phase, a flag assignment is chosen, all +conditionals are evaluated, and the package description is combined into +a flat package descriptions. If the same field both inside a conditional +and outside then they are combined using the following rules. + + + * Boolean fields are combined using conjunction (logical "and"). + + * List fields are combined by appending the inner items to the outer + items, for example + + ~~~~~~~~~~~~~~~~ + other-extensions: CPP + if impl(ghc) + other-extensions: MultiParamTypeClasses + ~~~~~~~~~~~~~~~~ + + when compiled using GHC will be combined to + + ~~~~~~~~~~~~~~~~ + other-extensions: CPP, MultiParamTypeClasses + ~~~~~~~~~~~~~~~~ + + Similarly, if two conditional sections appear at the same nesting + level, properties specified in the latter will come after properties + specified in the former. + + * All other fields must not be specified in ambiguous ways. For + example + + ~~~~~~~~~~~~~~~~ + Main-is: Main.hs + if flag(useothermain) + Main-is: OtherMain.hs + ~~~~~~~~~~~~~~~~ + + will lead to an error. Instead use + + ~~~~~~~~~~~~~~~~ + if flag(useothermain) + Main-is: OtherMain.hs + else + Main-is: Main.hs + ~~~~~~~~~~~~~~~~ + +### Source Repositories ### + +It is often useful to be able to specify a source revision control +repository for a package. Cabal lets you specifying this information in +a relatively structured form which enables other tools to interpret and +make effective use of the information. For example the information +should be sufficient for an automatic tool to checkout the sources. + +Cabal supports specifying different information for various common +source control systems. Obviously not all automated tools will support +all source control systems. + +Cabal supports specifying repositories for different use cases. By +declaring which case we mean automated tools can be more useful. There +are currently two kinds defined: + + * The `head` kind refers to the latest development branch of the + package. This may be used for example to track activity of a project + or as an indication to outside developers what sources to get for + making new contributions. + + * The `this` kind refers to the branch and tag of a repository that + contains the sources for this version or release of a package. For most + source control systems this involves specifying a tag, id or hash of + some form and perhaps a branch. The purpose is to be able to + reconstruct the sources corresponding to a particular package + version. This might be used to indicate what sources to get if + someone needs to fix a bug in an older branch that is no longer an + active head branch. + +You can specify one kind or the other or both. As an example here are +the repositories for the Cabal library. Note that the `this` kind of +repository specifies a tag. + +~~~~~~~~~~~~~~~~ +source-repository head + type: darcs + location: http://darcs.haskell.org/cabal/ + +source-repository this + type: darcs + location: http://darcs.haskell.org/cabal-branches/cabal-1.6/ + tag: 1.6.1 +~~~~~~~~~~~~~~~~ + +The exact fields are as follows: + +`type:` _token_ +: The name of the source control system used for this repository. The + currently recognised types are: + + * `darcs` + * `git` + * `svn` + * `cvs` + * `mercurial` (or alias `hg`) + * `bazaar` (or alias `bzr`) + * `arch` + * `monotone` + + This field is required. + +`location:` _URL_ +: The location of the repository. The exact form of this field depends + on the repository type. For example: + + * for darcs: `http://code.haskell.org/foo/` + * for git: `git://github.com/foo/bar.git` + * for CVS: `anoncvs@cvs.foo.org:/cvs` + + This field is required. + +`module:` _token_ +: CVS requires a named module, as each CVS server can host multiple + named repositories. + + This field is required for the CVS repository type and should not + be used otherwise. + +`branch:` _token_ +: Many source control systems support the notion of a branch, as a + distinct concept from having repositories in separate locations. For + example CVS, SVN and git use branches while for darcs uses different + locations for different branches. If you need to specify a branch to + identify a your repository then specify it in this field. + + This field is optional. + +`tag:` _token_ +: A tag identifies a particular state of a source repository. The tag + can be used with a `this` repository kind to identify the state of + a repository corresponding to a particular package version or + release. The exact form of the tag depends on the repository type. + + This field is required for the `this` repository kind. + +`subdir:` _directory_ +: Some projects put the sources for multiple packages under a single + source repository. This field lets you specify the relative path + from the root of the repository to the top directory for the + package, i.e. the directory containing the package's `.cabal` file. + + This field is optional. It default to empty which corresponds to the + root directory of the repository. + +### Downloading a package's source ### + +The `cabal get` command allows to access a package's source code - either by +unpacking a tarball downloaded from Hackage (the default) or by checking out a +working copy from the package's source repository. + +~~~~~~~~~~~~~~~~ +$ cabal get [FLAGS] PACKAGES +~~~~~~~~~~~~~~~~ + +The `get` command supports the following options: + +`-d --destdir` _PATH_ +: Where to place the package source, defaults to (a subdirectory of) the + current directory. + +`-s --source-repository` _[head|this|...]_ +: Fork the package's source repository using the appropriate version control + system. The optional argument allows to choose a specific repository kind. + +## Custom setup scripts + +The optional `custom-setup` stanza contains information needed for the +compilation of custom `Setup.hs` scripts, + +~~~~~~~~~~~~~~~~ +custom-setup + setup-depends: + base >= 4.5 && < 4.11, + Cabal < 1.25 +~~~~~~~~~~~~~~~~ + +`setup-depends:` _package list_ +: The dependencies needed to compile `Setup.hs`. See the + [`build-depends`](#build-information) section for a description of the + syntax expected by this field. + +## Accessing data files from package code ## + +The placement on the target system of files listed in the `data-files` +field varies between systems, and in some cases one can even move +packages around after installation (see [prefix +independence](installing-packages.html#prefix-independence)). To enable +packages to find these files in a portable way, Cabal generates a module +called `Paths_`_pkgname_ (with any hyphens in _pkgname_ replaced by +underscores) during building, so that it may be imported by modules of +the package. This module defines a function + +~~~~~~~~~~~~~~~ +getDataFileName :: FilePath -> IO FilePath +~~~~~~~~~~~~~~~ + +If the argument is a filename listed in the `data-files` field, the +result is the name of the corresponding file on the system on which the +program is running. + +Note: If you decide to import the `Paths_`_pkgname_ module then it +*must* be listed in the `other-modules` field just like any other module +in your package. + +The `Paths_`_pkgname_ module is not platform independent so it does not +get included in the source tarballs generated by `sdist`. + +The `Paths_`_pkgname_ module also includes some other useful functions +and values, which record the version of the package and some other +directories which the package has been configured to be installed +into (e.g. data files live in `getDataDir`): + +~~~~~~~~~~~~~~~ +version :: Version + +getBinDir :: IO FilePath +getLibDir :: IO FilePath +getDynLibDir :: IO FilePath +getDataDir :: IO FilePath +getLibexecDir :: IO FilePath +getSysconfDir :: IO FilePath +~~~~~~~~~~~~~~~ + +### Accessing the package version ### + +The aforementioned auto generated `Paths_`_pkgname_ module also +exports the constant `version ::` [Version][data-version] which is +defined as the version of your package as specified in the `version` +field. + +## System-dependent parameters ## + +For some packages, especially those interfacing with C libraries, +implementation details and the build procedure depend on the build +environment. The `build-type` `Configure` can be used to handle many +such situations. In this case, `Setup.hs` should be: + +~~~~~~~~~~~~~~~~ +import Distribution.Simple +main = defaultMainWithHooks autoconfUserHooks +~~~~~~~~~~~~~~~~ + +Most packages, however, would probably do better using the `Simple` +build type and [configurations](#configurations). + +The `build-type` `Configure` differs from `Simple` in two ways: + +* The package root directory must contain a shell script called + `configure`. The configure step will run the script. This `configure` + script may be produced by [autoconf][] or may be hand-written. The + `configure` script typically discovers information about the system + and records it for later steps, e.g. by generating system-dependent + header files for inclusion in C source files and preprocessed Haskell + source files. (Clearly this won't work for Windows without MSYS or + Cygwin: other ideas are needed.) + +* If the package root directory contains a file called + _package_`.buildinfo` after the configuration step, subsequent steps + will read it to obtain additional settings for [build + information](#build-information) fields,to be merged with the ones + given in the `.cabal` file. In particular, this file may be generated + by the `configure` script mentioned above, allowing these settings to + vary depending on the build environment. + + The build information file should have the following structure: + + > _buildinfo_ + > + > `executable:` _name_ + > _buildinfo_ + > + > `executable:` _name_ + > _buildinfo_ + > ... + + where each _buildinfo_ consists of settings of fields listed in the + section on [build information](#build-information). The first one (if + present) relates to the library, while each of the others relate to + the named executable. (The names must match the package description, + but you don't have to have entries for all of them.) + +Neither of these files is required. If they are absent, this setup +script is equivalent to `defaultMain`. + +#### Example: Using autoconf #### + +This example is for people familiar with the [autoconf][] tools. + +In the X11 package, the file `configure.ac` contains: + +~~~~~~~~~~~~~~~~ +AC_INIT([Haskell X11 package], [1.1], [libraries@haskell.org], [X11]) + +# Safety check: Ensure that we are in the correct source directory. +AC_CONFIG_SRCDIR([X11.cabal]) + +# Header file to place defines in +AC_CONFIG_HEADERS([include/HsX11Config.h]) + +# Check for X11 include paths and libraries +AC_PATH_XTRA +AC_TRY_CPP([#include ],,[no_x=yes]) + +# Build the package if we found X11 stuff +if test "$no_x" = yes +then BUILD_PACKAGE_BOOL=False +else BUILD_PACKAGE_BOOL=True +fi +AC_SUBST([BUILD_PACKAGE_BOOL]) + +AC_CONFIG_FILES([X11.buildinfo]) +AC_OUTPUT +~~~~~~~~~~~~~~~~ + +Then the setup script will run the `configure` script, which checks for +the presence of the X11 libraries and substitutes for variables in the +file `X11.buildinfo.in`: + +~~~~~~~~~~~~~~~~ +buildable: @BUILD_PACKAGE_BOOL@ +cc-options: @X_CFLAGS@ +ld-options: @X_LIBS@ +~~~~~~~~~~~~~~~~ + +This generates a file `X11.buildinfo` supplying the parameters needed by +later stages: + +~~~~~~~~~~~~~~~~ +buildable: True +cc-options: -I/usr/X11R6/include +ld-options: -L/usr/X11R6/lib +~~~~~~~~~~~~~~~~ + +The `configure` script also generates a header file `include/HsX11Config.h` +containing C preprocessor defines recording the results of various tests. This +file may be included by C source files and preprocessed Haskell source files in +the package. + +Note: Packages using these features will also need to list additional files such +as `configure`, templates for `.buildinfo` files, files named only in +`.buildinfo` files, header files and so on in the `extra-source-files` field to +ensure that they are included in source distributions. They should also list +files and directories generated by `configure` in the `extra-tmp-files` field to +ensure that they are removed by `setup clean`. + +Quite often the files generated by `configure` need to be listed somewhere in +the package description (for example, in the `install-includes` field). However, +we usually don't want generated files to be included in the source tarball. The +solution is again provided by the `.buildinfo` file. In the above example, the +following line should be added to `X11.buildinfo`: + +~~~~~~~~~~~~~~~~ +install-includes: HsX11Config.h +~~~~~~~~~~~~~~~~ + +In this way, the generated `HsX11Config.h` file won't be included in the source +tarball in addition to `HsX11Config.h.in`, but it will be copied to the right +location during the install process. Packages that use custom `Setup.hs` scripts +can update the necessary fields programmatically instead of using the +`.buildinfo` file. + + +## Conditional compilation ## + +Sometimes you want to write code that works with more than one version +of a dependency. You can specify a range of versions for the dependency +in the `build-depends`, but how do you then write the code that can use +different versions of the API? + +Haskell lets you preprocess your code using the C preprocessor (either +the real C preprocessor, or `cpphs`). To enable this, add `extensions: +CPP` to your package description. When using CPP, Cabal provides some +pre-defined macros to let you test the version of dependent packages; +for example, suppose your package works with either version 3 or version +4 of the `base` package, you could select the available version in your +Haskell modules like this: + +~~~~~~~~~~~~~~~~ +#if MIN_VERSION_base(4,0,0) +... code that works with base-4 ... +#else +... code that works with base-3 ... +#endif +~~~~~~~~~~~~~~~~ + +In general, Cabal supplies a macro `MIN_VERSION_`_`package`_`_(A,B,C)` +for each package depended on via `build-depends`. This macro is true if +the actual version of the package in use is greater than or equal to +`A.B.C` (using the conventional ordering on version numbers, which is +lexicographic on the sequence, but numeric on each component, so for +example 1.2.0 is greater than 1.0.3). + +Since version 1.20, there is also the `MIN_TOOL_VERSION_`_`tool`_ family of +macros for conditioning on the version of build tools used to build the program +(e.g. `hsc2hs`). + +Cabal places the definitions of these macros into an +automatically-generated header file, which is included when +preprocessing Haskell source code by passing options to the C +preprocessor. + +Cabal also allows to detect when the source code is being used for generating +documentation. The `__HADDOCK_VERSION__` macro is defined only when compiling +via [haddock][] instead of a normal Haskell compiler. The value of the +`__HADDOCK_VERSION__` macro is defined as `A*1000 + B*10 + C`, where `A.B.C` is +the Haddock version. This can be useful for working around bugs in Haddock or +generating prettier documentation in some special cases. + +## More complex packages ## + +For packages that don't fit the simple schemes described above, you have +a few options: + + * By using the `build-type` `Custom`, you can supply your own + `Setup.hs` file, and customize the simple build infrastructure + using _hooks_. These allow you to perform additional actions + before and after each command is run, and also to specify + additional preprocessors. A typical `Setup.hs` may look like this: + + ~~~~~~~~~~~~~~~~ + import Distribution.Simple + main = defaultMainWithHooks simpleUserHooks { postHaddock = posthaddock } + + posthaddock args flags desc info = .... + ~~~~~~~~~~~~~~~~ + + See `UserHooks` in [Distribution.Simple][dist-simple] for the + details, but note that this interface is experimental, and likely + to change in future releases. + + If you use a custom `Setup.hs` file you should strongly consider adding a + `custom-setup` stanza with a `setup-depends` field to ensure that your + setup script does not break with future dependency versions. + + * You could delegate all the work to `make`, though this is unlikely + to be very portable. Cabal supports this with the `build-type` + `Make` and a trivial setup library [Distribution.Make][dist-make], + which simply parses the command line arguments and invokes `make`. + Here `Setup.hs` should look like this: + + ~~~~~~~~~~~~~~~~ + import Distribution.Make + main = defaultMain + ~~~~~~~~~~~~~~~~ + + The root directory of the package should contain a `configure` + script, and, after that has run, a `Makefile` with a default target + that builds the package, plus targets `install`, `register`, + `unregister`, `clean`, `dist` and `docs`. Some options to commands + are passed through as follows: + + * The `--with-hc-pkg`, `--prefix`, `--bindir`, `--libdir`, `--dynlibdir`, `--datadir`, + `--libexecdir` and `--sysconfdir` options to the `configure` command are + passed on to the `configure` script. In addition the value of the + `--with-compiler` option is passed in a `--with-hc` option and all + options specified with `--configure-option=` are passed on. + + * The `--destdir` option to the `copy` command becomes a setting + of a `destdir` variable on the invocation of `make copy`. The + supplied `Makefile` should provide a `copy` target, which will + probably look like this: + + ~~~~~~~~~~~~~~~~ + copy : + $(MAKE) install prefix=$(destdir)/$(prefix) \ + bindir=$(destdir)/$(bindir) \ + libdir=$(destdir)/$(libdir) \ + dynlibdir=$(destdir)/$(dynlibdir) \ + datadir=$(destdir)/$(datadir) \ + libexecdir=$(destdir)/$(libexecdir) \ + sysconfdir=$(destdir)/$(sysconfdir) \ + ~~~~~~~~~~~~~~~~ + + * Finally, with the `build-type` `Custom`, you can also write your + own setup script from scratch. It must conform to the interface + described in the section on [building and installing + packages](installing-packages.html), and you may use the Cabal + library for all or part of the work. One option is to copy the + source of `Distribution.Simple`, and alter it for your needs. Good + luck. + + + +[dist-simple]: ../release/cabal-latest/doc/API/Cabal/Distribution-Simple.html +[dist-make]: ../release/cabal-latest/doc/API/Cabal/Distribution-Make.html +[dist-license]: ../release/cabal-latest/doc/API/Cabal/Distribution-License.html#t:License +[extension]: ../release/cabal-latest/doc/API/Cabal/Language-Haskell-Extension.html#t:Extension +[BuildType]: ../release/cabal-latest/doc/API/Cabal/Distribution-PackageDescription.html#t:BuildType +[data-version]: http://hackage.haskell.org/packages/archive/base/latest/doc/html/Data-Version.html +[alex]: http://www.haskell.org/alex/ +[autoconf]: http://www.gnu.org/software/autoconf/ +[c2hs]: http://www.cse.unsw.edu.au/~chak/haskell/c2hs/ +[cpphs]: http://projects.haskell.org/cpphs/ +[greencard]: http://hackage.haskell.org/package/greencard +[haddock]: http://www.haskell.org/haddock/ +[HsColour]: http://www.cs.york.ac.uk/fp/darcs/hscolour/ +[happy]: http://www.haskell.org/happy/ +[Hackage]: http://hackage.haskell.org/ +[pkg-config]: http://www.freedesktop.org/wiki/Software/pkg-config/ +[REPL]: http://en.wikipedia.org/wiki/Read%E2%80%93eval%E2%80%93print_loop +[PVP]: https://wiki.haskell.org/Package_versioning_policy diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/doc/index.markdown cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/doc/index.markdown --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/doc/index.markdown 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/doc/index.markdown 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,201 @@ +% Cabal User Guide +**Version: 1.24.2.0** + +Cabal is the standard package system for [Haskell] software. It helps +people to configure, build and install Haskell software and to +distribute it easily to other users and developers. + +There is a command line tool called `cabal` for working with Cabal +packages. It helps with installing existing packages and also helps +people developing their own packages. It can be used to work with local +packages or to install packages from online package archives, including +automatically installing dependencies. By default it is configured to +use [Hackage] which is Haskell's central package archive that contains +thousands of libraries and applications in the Cabal package format. + +# Contents # + + * [Introduction](#introduction) + - [What's in a package](#whats-in-a-package) + - [A tool for working with packages](#a-tool-for-working-with-packages) + * [Building, installing and managing packages](installing-packages.html) + * [Creating packages](developing-packages.html) + * [Reporting bugs and deficiencies](misc.html#reporting-bugs-and-deficiencies) + * [Stability of Cabal interfaces](misc.html#stability-of-cabal-interfaces) + +# Introduction # + +Cabal is a package system for Haskell software. The point of a package +system is to enable software developers and users to easily distribute, +use and reuse software. A package system makes it easier for developers +to get their software into the hands of users. Equally importantly, it +makes it easier for software developers to be able to reuse software +components written by other developers. + +Packaging systems deal with packages and with Cabal we call them _Cabal +packages_. The Cabal package is the unit of distribution. Every Cabal +package has a name and a version number which are used to identify the +package, e.g. `filepath-1.0`. + +Cabal packages can depend on other Cabal packages. There are tools +to enable automated package management. This means it is possible for +developers and users to install a package plus all of the other Cabal +packages that it depends on. It also means that it is practical to make +very modular systems using lots of packages that reuse code written by +many developers. + +Cabal packages are source based and are typically (but not necessarily) +portable to many platforms and Haskell implementations. The Cabal +package format is designed to make it possible to translate into other +formats, including binary packages for various systems. + +When distributed, Cabal packages use the standard compressed tarball +format, with the file extension `.tar.gz`, e.g. `filepath-1.0.tar.gz`. + +Note that packages are not part of the Haskell language, rather they +are a feature provided by the combination of Cabal and GHC (and several +other Haskell implementations). + + +## A tool for working with packages ## + +There is a command line tool, called "`cabal`", that users and developers +can use to build and install Cabal packages. It can be used for both +local packages and for packages available remotely over the network. It +can automatically install Cabal packages plus any other Cabal packages +they depend on. + +Developers can use the tool with packages in local directories, e.g. + +~~~~~~~~~~~~~~~~ +cd foo/ +cabal install +~~~~~~~~~~~~~~~~ + +While working on a package in a local directory, developers can run the +individual steps to configure and build, and also generate documentation +and run test suites and benchmarks. + +It is also possible to install several local packages at once, e.g. + +~~~~~~~~~~~~~~~~ +cabal install foo/ bar/ +~~~~~~~~~~~~~~~~ + +Developers and users can use the tool to install packages from remote +Cabal package archives. By default, the `cabal` tool is configured to +use the central Haskell package archive called [Hackage] but it +is possible to use it with any other suitable archive. + +~~~~~~~~~~~~~~~~ +cabal install xmonad +~~~~~~~~~~~~~~~~ + +This will install the `xmonad` package plus all of its dependencies. + +In addition to packages that have been published in an archive, +developers can install packages from local or remote tarball files, +for example + +~~~~~~~~~~~~~~~~ +cabal install foo-1.0.tar.gz +cabal install http://example.com/foo-1.0.tar.gz +~~~~~~~~~~~~~~~~ + +Cabal provides a number of ways for a user to customise how and where a +package is installed. They can decide where a package will be installed, +which Haskell implementation to use and whether to build optimised code +or build with the ability to profile code. It is not expected that users +will have to modify any of the information in the `.cabal` file. + +For full details, see the section on [building and installing +packages](installing-packages.html). + +Note that `cabal` is not the only tool for working with Cabal packages. +Due to the standardised format and a library for reading `.cabal` files, +there are several other special-purpose tools. + +## What's in a package ## + +A Cabal package consists of: + + * Haskell software, including libraries, executables and tests + * metadata about the package in a standard human and machine + readable format (the "`.cabal`" file) + * a standard interface to build the package (the "`Setup.hs`" file) + +The `.cabal` file contains information about the package, supplied by +the package author. In particular it lists the other Cabal packages +that the package depends on. + +For full details on what goes in the `.cabal` and `Setup.hs` files, and +for all the other features provided by the build system, see the section +on [developing packages](developing-packages.html). + + +## Cabal featureset ## + +Cabal and its associated tools and websites covers: + + * a software build system + * software configuration + * packaging for distribution + * automated package management + * natively using the `cabal` command line tool; or + * by translation into native package formats such as RPM or deb + * web and local Cabal package archives + * central Hackage website with 1000's of Cabal packages + +Some parts of the system can be used without others. In particular the +built-in build system for simple packages is optional: it is possible +to use custom build systems. + +## Similar systems ## + +The Cabal system is roughly comparable with the system of Python Eggs, +Ruby Gems or Perl distributions. Each system has a notion of +distributable packages, and has tools to manage the process of +distributing and installing packages. + +Hackage is an online archive of Cabal packages. It is roughly comparable +to CPAN but with rather fewer packages (around 5,000 vs 28,000). + +Cabal is often compared with autoconf and automake and there is some +overlap in functionality. The most obvious similarity is that the +command line interface for actually configuring and building packages +follows the same steps and has many of the same configuration +parameters. + +~~~~~~~~~~ +./configure --prefix=... +make +make install +~~~~~~~~~~ + +compared to + +~~~~~~~~~~ +cabal configure --prefix=... +cabal build +cabal install +~~~~~~~~~~ + +Cabal's build system for simple packages is considerably less flexible +than make/automake, but has builtin knowledge of how to build Haskell +code and requires very little manual configuration. Cabal's simple build +system is also portable to Windows, without needing a Unix-like +environment such as cygwin/mingwin. + +Compared to autoconf, Cabal takes a somewhat different approach to +package configuration. Cabal's approach is designed for automated +package management. Instead of having a configure script that tests for +whether dependencies are available, Cabal packages specify their +dependencies. There is some scope for optional and conditional +dependencies. By having package authors specify dependencies it makes it +possible for tools to install a package and all of its dependencies +automatically. It also makes it possible to translate (in a +mostly-automatically way) into another package format like RPM or deb +which also have automatic dependency resolution. + +[Haskell]: http://www.haskell.org/ +[Hackage]: http://hackage.haskell.org/ diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/doc/installing-packages.markdown cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/doc/installing-packages.markdown --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/doc/installing-packages.markdown 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/doc/installing-packages.markdown 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,1303 @@ +% Cabal User Guide + +# Configuration # + +## Overview ## + +The global configuration file for `cabal-install` is `~/.cabal/config`. If you +do not have this file, `cabal` will create it for you on the first call to +`cabal update`. Alternatively, you can explicitly ask `cabal` to create it for +you using + +> `cabal user-config update` + +Most of the options in this configuration file are also available as command +line arguments, and the corresponding documentation can be used to lookup their +meaning. The created configuration file only specifies values for a handful of +options. Most options are left at their default value, which it documents; +for instance, + +~~~~~~~~~~~~~~~~ +-- executable-stripping: True +~~~~~~~~~~~~~~~~ + +means that the configuration file currently does not specify a value for the +`executable-stripping` option (the line is commented out), and that the default +is `True`; if you wanted to disable stripping of executables by default, you +would change this line to + +~~~~~~~~~~~~~~~~ +executable-stripping: False +~~~~~~~~~~~~~~~~ + +You can also use `cabal user-config update` to migrate configuration files +created by older versions of `cabal`. + +## Repository specification ## + +An important part of the configuration if the specification of the repository. +When `cabal` creates a default config file, it configures the repository to +be the central Hackage server: + +~~~~~~~~~~~~~~~~ +repository hackage.haskell.org + url: http://hackage.haskell.org/ +~~~~~~~~~~~~~~~~ + +The name of the repository is given on the first line, and can be anything; +packages downloaded from this repository will be cached under +`~/.cabal/packages/hackage.haskell.org` (or whatever name you specify; you can +change the prefix by changing the value of `remote-repo-cache`). If you want, +you can configure multiple repositories, and `cabal` will combine them and be +able to download packages from any of them. + +### Using secure repositories ### + +For repositories that support the TUF security infrastructure (this includes +Hackage), you can enable secure access to the repository by specifying: + +~~~~~~~~~~~~~~~~ +repository hackage.haskell.org + url: http://hackage.haskell.org/ + secure: True + root-keys: + key-threshold: +~~~~~~~~~~~~~~~~ + +The `` and `` values are used for bootstrapping. As +part of the TUF infrastructure the repository will contain a file `root.json` +(for instance, +[http://hackage.haskell.org/root.json](http://hackage.haskell.org/root.json)) +which the client needs to do verification. However, how can `cabal` verify the +`root.json` file _itself_? This is known as bootstrapping: if you specify a list +of root key IDs and a corresponding threshold, `cabal` will verify that the +downloaded `root.json` file has been signed with at least `` +keys from your set of ``. + +You can, but are not recommended to, omit these two fields. In that case `cabal` +will download the `root.json` field and use it without verification. Although +this bootstrapping step is then unsafe, all subsequent access is secure +(provided that the downloaded `root.json` was not tempered with). Of course, +adding `root-keys` and `key-threshold` to your repository specification only +shifts the problem, because now you somehow need to make sure that the key IDs +you received were the right ones. How that is done is however outside the scope +of `cabal` proper. + +More information about the security infrastructure can be found at +[https://github.com/well-typed/hackage-security](https://github.com/well-typed/hackage-security). + +### Legacy repositories ### + +Currently `cabal` supports two kinds of “legacy” repositories. The +first is specified using + +~~~~~~~~~~~~~~~~ +remote-repo: hackage.haskell.org:http://hackage.haskell.org/packages/archive +~~~~~~~~~~~~~~~~ + +This is just syntactic sugar for + +~~~~~~~~~~~~~~~~ +repository hackage.haskell.org + url: hackage.haskell.org:http://hackage.haskell.org/packages/archive +~~~~~~~~~~~~~~~~ + +although, in (and only in) the specific case of Hackage, the URL +`http://hackage.haskell.org/packages/archive` will be silently translated to +`http://hackage.haskell.org/`. + +The second kind of legacy repositories are so-called “local” +repositories: + +~~~~~~~~~~~~~~~~ +local-repo: my-local-repo:/path/to/local/repo +~~~~~~~~~~~~~~~~ + +This can be used to access repositories on the local file system. However, the +layout of these local repositories is different from the layout of remote +repositories, and usage of these local repositories is deprecated. + +### Secure local repositories ### + +If you want to use repositories on your local file system, it is recommended +instead to use a _secure_ local repository: + +~~~~~~~~~~~~~~~~ +repository my-local-repo + url: file:/path/to/local/repo + secure: True + root-keys: + key-threshold: +~~~~~~~~~~~~~~~~ + +The layout of these secure local repos matches the layout of remote repositories +exactly; the +[hackage-repo-tool](http://hackage.haskell.org/package/hackage-repo-tool) can be +used to create and manage such repositories. + +# Building and installing packages # + +After you've unpacked a Cabal package, you can build it by moving into +the root directory of the package and running the `cabal` tool there: + +> `cabal [command] [option...]` + +The _command_ argument selects a particular step in the build/install process. + +You can also get a summary of the command syntax with + +> `cabal help` + +Alternatively, you can also use the `Setup.hs` or `Setup.lhs` script: + +> `runhaskell Setup.hs [command] [option...]` + +For the summary of the command syntax, run: + +> `cabal help` + +or + +> `runhaskell Setup.hs --help` + +## Building and installing a system package ## + +~~~~~~~~~~~~~~~~ +runhaskell Setup.hs configure --ghc +runhaskell Setup.hs build +runhaskell Setup.hs install +~~~~~~~~~~~~~~~~ + +The first line readies the system to build the tool using GHC; for +example, it checks that GHC exists on the system. The second line +performs the actual building, while the last both copies the build +results to some permanent place and registers the package with GHC. + +## Building and installing a user package ## + +~~~~~~~~~~~~~~~~ +runhaskell Setup.hs configure --user +runhaskell Setup.hs build +runhaskell Setup.hs install +~~~~~~~~~~~~~~~~ + +The package is installed under the user's home directory and is +registered in the user's package database (`--user`). + +## Installing packages from Hackage ## + +The `cabal` tool also can download, configure, build and install a [Hackage] +package and all of its dependencies in a single step. To do this, run: + +~~~~~~~~~~~~~~~~ +cabal install [PACKAGE...] +~~~~~~~~~~~~~~~~ + +To browse the list of available packages, visit the [Hackage] web site. + +## Developing with sandboxes ## + +By default, any dependencies of the package are installed into the global or +user package databases (e.g. using `cabal install --only-dependencies`). If +you're building several different packages that have incompatible dependencies, +this can cause the build to fail. One way to avoid this problem is to build each +package in an isolated environment ("sandbox"), with a sandbox-local package +database. Because sandboxes are per-project, inconsistent dependencies can be +simply disallowed. + +For more on sandboxes, see also +[this article](http://coldwa.st/e/blog/2013-08-20-Cabal-sandbox.html). + +### Sandboxes: basic usage ### + +To initialise a fresh sandbox in the current directory, run `cabal sandbox +init`. All subsequent commands (such as `build` and `install`) from this point +will use the sandbox. + +~~~~~~~~~~~~~~~ +$ cd /path/to/my/haskell/library +$ cabal sandbox init # Initialise the sandbox +$ cabal install --only-dependencies # Install dependencies into the sandbox +$ cabal build # Build your package inside the sandbox +~~~~~~~~~~~~~~~ + +It can be useful to make a source package available for installation in the +sandbox - for example, if your package depends on a patched or an unreleased +version of a library. This can be done with the `cabal sandbox add-source` +command - think of it as "local [Hackage]". If an add-source dependency is later +modified, it is reinstalled automatically. + +~~~~~~~~~~~~~~~ +$ cabal sandbox add-source /my/patched/library # Add a new add-source dependency +$ cabal install --dependencies-only # Install it into the sandbox +$ cabal build # Build the local package +$ $EDITOR /my/patched/library/Source.hs # Modify the add-source dependency +$ cabal build # Modified dependency is automatically reinstalled +~~~~~~~~~~~~~~~ + +Normally, the sandbox settings (such as optimisation level) are inherited from +the main Cabal config file (`$HOME/cabal/config`). Sometimes, though, you need +to change some settings specifically for a single sandbox. You can do this by +creating a `cabal.config` file in the same directory with your +`cabal.sandbox.config` (which was created by `sandbox init`). This file has the +same syntax as the main Cabal config file. + +~~~~~~~~~~~~~~~ +$ cat cabal.config +documentation: True +constraints: foo == 1.0, bar >= 2.0, baz +$ cabal build # Uses settings from the cabal.config file +~~~~~~~~~~~~~~~ + +When you have decided that you no longer want to build your package inside a +sandbox, just delete it: + +~~~~~~~~~~~~~~~ +$ cabal sandbox delete # Built-in command +$ rm -rf .cabal-sandbox cabal.sandbox.config # Alternative manual method +~~~~~~~~~~~~~~~ + +### Sandboxes: advanced usage ### + +The default behaviour of the `add-source` command is to track modifications done +to the added dependency and reinstall the sandbox copy of the package when +needed. Sometimes this is not desirable: in these cases you can use `add-source +--snapshot`, which disables the change tracking. In addition to `add-source`, +there are also `list-sources` and `delete-source` commands. + +Sometimes one wants to share a single sandbox between multiple packages. This +can be easily done with the `--sandbox` option: + +~~~~~~~~~~~~~~~ +$ mkdir -p /path/to/shared-sandbox +$ cd /path/to/shared-sandbox +$ cabal sandbox init --sandbox . +$ cd /path/to/package-a +$ cabal sandbox init --sandbox /path/to/shared-sandbox +$ cd /path/to/package-b +$ cabal sandbox init --sandbox /path/to/shared-sandbox +~~~~~~~~~~~~~~~ + +Note that `cabal sandbox init --sandbox .` puts all sandbox files into the +current directory. By default, `cabal sandbox init` initialises a new sandbox in +a newly-created subdirectory of the current working directory +(`./.cabal-sandbox`). + +Using multiple different compiler versions simultaneously is also supported, via +the `-w` option: + +~~~~~~~~~~~~~~~ +$ cabal sandbox init +$ cabal install --only-dependencies -w /path/to/ghc-1 # Install dependencies for both compilers +$ cabal install --only-dependencies -w /path/to/ghc-2 +$ cabal configure -w /path/to/ghc-1 # Build with the first compiler +$ cabal build +$ cabal configure -w /path/to/ghc-2 # Build with the second compiler +$ cabal build +~~~~~~~~~~~~~~~ + +It can be occasionally useful to run the compiler-specific package manager tool +(e.g. `ghc-pkg`) tool on the sandbox package DB directly (for example, you may +need to unregister some packages). The `cabal sandbox hc-pkg` command is a +convenient wrapper that runs the compiler-specific package manager tool with the +arguments: + +~~~~~~~~~~~~~~~ +$ cabal -v sandbox hc-pkg list +Using a sandbox located at /path/to/.cabal-sandbox +'ghc-pkg' '--global' '--no-user-package-conf' + '--package-conf=/path/to/.cabal-sandbox/i386-linux-ghc-7.4.2-packages.conf.d' + 'list' +[...] +~~~~~~~~~~~~~~~ + +The `--require-sandbox` option makes all sandbox-aware commands +(`install`/`build`/etc.) exit with error if there is no sandbox present. This +makes it harder to accidentally modify the user package database. The option can +be also turned on via the per-user configuration file (`~/.cabal/config`) or the +per-project one (`$PROJECT_DIR/cabal.config`). The error can be squelched with +`--no-require-sandbox`. + +The option `--sandbox-config-file` allows to specify the location of the +`cabal.sandbox.config` file (by default, `cabal` searches for it in the current +directory). This provides the same functionality as shared sandboxes, but +sometimes can be more convenient. Example: + +~~~~~~~~~~~~~~~ +$ mkdir my/sandbox +$ cd my/sandbox +$ cabal sandbox init +$ cd /path/to/my/project +$ cabal --sandbox-config-file=/path/to/my/sandbox/cabal.sandbox.config install +# Uses the sandbox located at /path/to/my/sandbox/.cabal-sandbox +$ cd ~ +$ cabal --sandbox-config-file=/path/to/my/sandbox/cabal.sandbox.config install +# Still uses the same sandbox +~~~~~~~~~~~~~~~ + +The sandbox config file can be also specified via the `CABAL_SANDBOX_CONFIG` +environment variable. + +Finally, the flag `--ignore-sandbox` lets you temporarily ignore an existing +sandbox: + +~~~~~~~~~~~~~~~ +$ mkdir my/sandbox +$ cd my/sandbox +$ cabal sandbox init +$ cabal --ignore-sandbox install text +# Installs 'text' in the user package database ('~/.cabal'). +~~~~~~~~~~~~~~~ + +## Creating a binary package ## + +When creating binary packages (e.g. for Red Hat or Debian) one needs to +create a tarball that can be sent to another system for unpacking in the +root directory: + +~~~~~~~~~~~~~~~~ +runhaskell Setup.hs configure --prefix=/usr +runhaskell Setup.hs build +runhaskell Setup.hs copy --destdir=/tmp/mypkg +tar -czf mypkg.tar.gz /tmp/mypkg/ +~~~~~~~~~~~~~~~~ + +If the package contains a library, you need two additional steps: + +~~~~~~~~~~~~~~~~ +runhaskell Setup.hs register --gen-script +runhaskell Setup.hs unregister --gen-script +~~~~~~~~~~~~~~~~ + +This creates shell scripts `register.sh` and `unregister.sh`, which must +also be sent to the target system. After unpacking there, the package +must be registered by running the `register.sh` script. The +`unregister.sh` script would be used in the uninstall procedure of the +package. Similar steps may be used for creating binary packages for +Windows. + + +The following options are understood by all commands: + +`--help`, `-h` or `-?` +: List the available options for the command. + +`--verbose=`_n_ or `-v`_n_ +: Set the verbosity level (0-3). The normal level is 1; a missing _n_ + defaults to 2. + +The various commands and the additional options they support are +described below. In the simple build infrastructure, any other options +will be reported as errors. + +## setup configure ## + +Prepare to build the package. Typically, this step checks that the +target platform is capable of building the package, and discovers +platform-specific features that are needed during the build. + +The user may also adjust the behaviour of later stages using the options +listed in the following subsections. In the simple build +infrastructure, the values supplied via these options are recorded in a +private file read by later stages. + +If a user-supplied `configure` script is run (see the section on +[system-dependent +parameters](developing-packages.html#system-dependent-parameters) or on +[complex packages](developing-packages.html#more-complex-packages)), it +is passed the `--with-hc-pkg`, `--prefix`, `--bindir`, `--libdir`, `--dynlibdir`, +`--datadir`, `--libexecdir` and `--sysconfdir` options. In addition the +value of the `--with-compiler` option is passed in a `--with-hc` option +and all options specified with `--configure-option=` are passed on. + +### Programs used for building ### + +The following options govern the programs used to process the source +files of a package: + +`--ghc` or `-g`, `--jhc`, `--lhc`, `--uhc` +: Specify which Haskell implementation to use to build the package. + At most one of these flags may be given. If none is given, the + implementation under which the setup script was compiled or + interpreted is used. + +`--with-compiler=`_path_ or `-w`_path_ +: Specify the path to a particular compiler. If given, this must match + the implementation selected above. The default is to search for the + usual name of the selected implementation. + + This flag also sets the default value of the `--with-hc-pkg` option + to the package tool for this compiler. Check the output of `setup + configure -v` to ensure that it finds the right package tool (or use + `--with-hc-pkg` explicitly). + + +`--with-hc-pkg=`_path_ +: Specify the path to the package tool, e.g. `ghc-pkg`. The package + tool must be compatible with the compiler specified by + `--with-compiler`. If this option is omitted, the default value is + determined from the compiler selected. + +`--with-`_`prog`_`=`_path_ +: Specify the path to the program _prog_. Any program known to Cabal + can be used in place of _prog_. It can either be a fully path or the + name of a program that can be found on the program search path. For + example: `--with-ghc=ghc-6.6.1` or + `--with-cpphs=/usr/local/bin/cpphs`. + The full list of accepted programs is not enumerated in this user guide. + Rather, run `cabal install --help` to view the list. + +`--`_`prog`_`-options=`_options_ +: Specify additional options to the program _prog_. Any program known + to Cabal can be used in place of _prog_. For example: + `--alex-options="--template=mytemplatedir/"`. The _options_ is split + into program options based on spaces. Any options containing embedded + spaced need to be quoted, for example + `--foo-options='--bar="C:\Program File\Bar"'`. As an alternative + that takes only one option at a time but avoids the need to quote, + use `--`_`prog`_`-option` instead. + +`--`_`prog`_`-option=`_option_ +: Specify a single additional option to the program _prog_. For + passing an option that contain embedded spaces, such as a file name + with embedded spaces, using this rather than `--`_`prog`_`-options` + means you do not need an additional level of quoting. Of course if + you are using a command shell you may still need to quote, for + example `--foo-options="--bar=C:\Program File\Bar"`. + +All of the options passed with either `--`_`prog`_`-options` or +`--`_`prog`_`-option` are passed in the order they were specified on the +configure command line. + +### Installation paths ### + +The following options govern the location of installed files from a +package: + +`--prefix=`_dir_ +: The root of the installation. For example for a global install you + might use `/usr/local` on a Unix system, or `C:\Program Files` on a + Windows system. The other installation paths are usually + subdirectories of _prefix_, but they don't have to be. + + In the simple build system, _dir_ may contain the following path + variables: `$pkgid`, `$pkg`, `$version`, `$compiler`, `$os`, + `$arch`, `$abi`, `$abitag` + +`--bindir=`_dir_ +: Executables that the user might invoke are installed here. + + In the simple build system, _dir_ may contain the following path + variables: `$prefix`, `$pkgid`, `$pkg`, `$version`, `$compiler`, + `$os`, `$arch`, `$abi`, `$abitag + +`--libdir=`_dir_ +: Object-code libraries are installed here. + + In the simple build system, _dir_ may contain the following path + variables: `$prefix`, `$bindir`, `$pkgid`, `$pkg`, `$version`, + `$compiler`, `$os`, `$arch`, `$abi`, `$abitag` + +`--dynlibdir=`_dir_ +: Dynamic libraries are installed here. + + By default, this is set to `$libdir/$abi`, which is usually not equal to + `$libdir/$libsubdir`. + + In the simple build system, _dir_ may contain the following path + variables: `$prefix`, `$bindir`, `$libdir`, `$pkgid`, `$pkg`, `$version`, + `$compiler`, `$os`, `$arch`, `$abi`, `$abitag` + + +`--libexecdir=`_dir_ +: Executables that are not expected to be invoked directly by the user + are installed here. + + In the simple build system, _dir_ may contain the following path + variables: `$prefix`, `$bindir`, `$libdir`, `$libsubdir`, `$pkgid`, + `$pkg`, `$version`, `$compiler`, `$os`, `$arch`, `$abi`, `$abitag` + +`--datadir`=_dir_ +: Architecture-independent data files are installed here. + + In the simple build system, _dir_ may contain the following path + variables: `$prefix`, `$bindir`, `$libdir`, `$libsubdir`, `$pkgid`, `$pkg`, + `$version`, `$compiler`, `$os`, `$arch`, `$abi`, `$abitag` + +`--sysconfdir=`_dir_ +: Installation directory for the configuration files. + + In the simple build system, _dir_ may contain the following path variables: + `$prefix`, `$bindir`, `$libdir`, `$libsubdir`, `$pkgid`, `$pkg`, `$version`, + `$compiler`, `$os`, `$arch`, `$abi`, `$abitag` + +In addition the simple build system supports the following installation path options: + +`--libsubdir=`_dir_ +: A subdirectory of _libdir_ in which libraries are actually + installed. For example, in the simple build system on Unix, the + default _libdir_ is `/usr/local/lib`, and _libsubdir_ contains the + package identifier and compiler, e.g. `mypkg-0.2/ghc-6.4`, so + libraries would be installed in `/usr/local/lib/mypkg-0.2/ghc-6.4`. + + _dir_ may contain the following path variables: `$pkgid`, `$pkg`, + `$version`, `$compiler`, `$os`, `$arch`, `$abi`, `$abitag` + +`--datasubdir=`_dir_ +: A subdirectory of _datadir_ in which data files are actually + installed. + + _dir_ may contain the following path variables: `$pkgid`, `$pkg`, + `$version`, `$compiler`, `$os`, `$arch`, `$abi`, `$abitag` + +`--docdir=`_dir_ +: Documentation files are installed relative to this directory. + + _dir_ may contain the following path variables: `$prefix`, `$bindir`, + `$libdir`, `$libsubdir`, `$datadir`, `$datasubdir`, `$pkgid`, `$pkg`, + `$version`, `$compiler`, `$os`, `$arch`, `$abi`, `$abitag` + +`--htmldir=`_dir_ +: HTML documentation files are installed relative to this directory. + + _dir_ may contain the following path variables: `$prefix`, `$bindir`, + `$libdir`, `$libsubdir`, `$datadir`, `$datasubdir`, `$docdir`, `$pkgid`, + `$pkg`, `$version`, `$compiler`, `$os`, `$arch`, `$abi`, `$abitag` + +`--program-prefix=`_prefix_ +: Prepend _prefix_ to installed program names. + + _prefix_ may contain the following path variables: `$pkgid`, `$pkg`, + `$version`, `$compiler`, `$os`, `$arch`, `$abi`, `$abitag` + +`--program-suffix=`_suffix_ +: Append _suffix_ to installed program names. The most obvious use for + this is to append the program's version number to make it possible + to install several versions of a program at once: + `--program-suffix='$version'`. + + _suffix_ may contain the following path variables: `$pkgid`, `$pkg`, + `$version`, `$compiler`, `$os`, `$arch`, `$abi`, `$abitag` + +#### Path variables in the simple build system #### + +For the simple build system, there are a number of variables that can be +used when specifying installation paths. The defaults are also specified +in terms of these variables. A number of the variables are actually for +other paths, like `$prefix`. This allows paths to be specified relative +to each other rather than as absolute paths, which is important for +building relocatable packages (see [prefix +independence](#prefix-independence)). + +`$prefix` +: The path variable that stands for the root of the installation. For + an installation to be relocatable, all other installation paths must + be relative to the `$prefix` variable. + +`$bindir` +: The path variable that expands to the path given by the `--bindir` + configure option (or the default). + +`$libdir` +: As above but for `--libdir` + +`$dynlibdir` +: As above but for `--dynlibdir` + +`$libsubdir` +: As above but for `--libsubdir` + +`$datadir` +: As above but for `--datadir` + +`$datasubdir` +: As above but for `--datasubdir` + +`$docdir` +: As above but for `--docdir` + +`$pkgid` +: The name and version of the package, e.g. `mypkg-0.2` + +`$pkg` +: The name of the package, e.g. `mypkg` + +`$version` +: The version of the package, e.g. `0.2` + +`$compiler` +: The compiler being used to build the package, e.g. `ghc-6.6.1` + +`$os` +: The operating system of the computer being used to build the + package, e.g. `linux`, `windows`, `osx`, `freebsd` or `solaris` + +`$arch` +: The architecture of the computer being used to build the package, e.g. + `i386`, `x86_64`, `ppc` or `sparc` + +`$abitag` +: An optional tag that a compiler can use for telling incompatible ABI's + on the same architecture apart. GHCJS encodes the underlying GHC version + in the ABI tag. + +`$abi` +: A shortcut for getting a path that completely identifies the platform in terms + of binary compatibility. Expands to the same value as `$arch-$os-compiler-$abitag` + if the compiler uses an abi tag, `$arch-$os-$compiler` if it doesn't. + +#### Paths in the simple build system #### + +For the simple build system, the following defaults apply: + +Option Windows Default Unix Default +------- ---------------- ------------- +`--prefix` (global) `C:\Program Files\Haskell` `/usr/local` +`--prefix` (per-user) `C:\Documents And Settings\user\Application Data\cabal` `$HOME/.cabal` +`--bindir` `$prefix\bin` `$prefix/bin` +`--libdir` `$prefix` `$prefix/lib` +`--libsubdir` (others) `$pkgid\$compiler` `$pkgid/$compiler` +`--dynlibdir` `$libdir\$abi` `$libdir/$abi` +`--libexecdir` `$prefix\$pkgid` `$prefix/libexec` +`--datadir` (executable) `$prefix` `$prefix/share` +`--datadir` (library) `C:\Program Files\Haskell` `$prefix/share` +`--datasubdir` `$pkgid` `$pkgid` +`--docdir` `$prefix\doc\$pkgid` `$datadir/doc/$pkgid` +`--sysconfdir` `$prefix\etc` `$prefix/etc` +`--htmldir` `$docdir\html` `$docdir/html` +`--program-prefix` (empty) (empty) +`--program-suffix` (empty) (empty) + + +#### Prefix-independence #### + +On Windows it is possible to obtain the pathname of the running program. This +means that we can construct an installable executable package that is +independent of its absolute install location. The executable can find its +auxiliary files by finding its own path and knowing the location of the other +files relative to `$bindir`. Prefix-independence is particularly useful: it +means the user can choose the install location (i.e. the value of `$prefix`) at +install-time, rather than having to bake the path into the binary when it is +built. + +In order to achieve this, we require that for an executable on Windows, +all of `$bindir`, `$libdir`, `$dynlibdir`, `$datadir` and `$libexecdir` begin with +`$prefix`. If this is not the case then the compiled executable will +have baked-in all absolute paths. + +The application need do nothing special to achieve prefix-independence. +If it finds any files using `getDataFileName` and the [other functions +provided for the +purpose](developing-packages.html#accessing-data-files-from-package-code), +the files will be accessed relative to the location of the current +executable. + +A library cannot (currently) be prefix-independent, because it will be +linked into an executable whose file system location bears no relation +to the library package. + +### Controlling Flag Assignments ### + +Flag assignments (see the [resolution of conditions and +flags](developing-packages.html#resolution-of-conditions-and-flags)) can +be controlled with the following command line options. + +`-f` _flagname_ or `-f` `-`_flagname_ +: Force the specified flag to `true` or `false` (if preceded with a `-`). Later + specifications for the same flags will override earlier, i.e., + specifying `-fdebug -f-debug` is equivalent to `-f-debug` + +`--flags=`_flagspecs_ +: Same as `-f`, but allows specifying multiple flag assignments at + once. The parameter is a space-separated list of flag names (to + force a flag to `true`), optionally preceded by a `-` (to force a + flag to `false`). For example, `--flags="debug -feature1 feature2"` is + equivalent to `-fdebug -f-feature1 -ffeature2`. + +### Building Test Suites ### + +`--enable-tests` +: Build the test suites defined in the package description file during the + `build` stage. Check for dependencies required by the test suites. If the + package is configured with this option, it will be possible to run the test + suites with the `test` command after the package is built. + +`--disable-tests` +: (default) Do not build any test suites during the `build` stage. + Do not check for dependencies required only by the test suites. It will not + be possible to invoke the `test` command without reconfiguring the package. + +`--enable-coverage` +: Build libraries and executables (including test suites) with Haskell + Program Coverage enabled. Running the test suites will automatically + generate coverage reports with HPC. + +`--disable-coverage` +: (default) Do not enable Haskell Program Coverage. + +### Miscellaneous options ## + +`--user` +: Does a per-user installation. This changes the [default installation + prefix](#paths-in-the-simple-build-system). It also allow + dependencies to be satisfied by the user's package database, in + addition to the global database. This also implies a default of + `--user` for any subsequent `install` command, as packages + registered in the global database should not depend on packages + registered in a user's database. + +`--global` +: (default) Does a global installation. In this case package + dependencies must be satisfied by the global package database. All + packages in the user's package database will be ignored. Typically + the final installation step will require administrative privileges. + +`--package-db=`_db_ +: Allows package dependencies to be satisfied from this additional + package database _db_ in addition to the global package database. + All packages in the user's package database will be ignored. The + interpretation of _db_ is implementation-specific. Typically it will + be a file or directory. Not all implementations support arbitrary + package databases. + +`--default-user-config=` _file_ +: Allows a "default" `cabal.config` freeze file to be passed in + manually. This file will only be used if one does not exist in the + project directory already. Typically, this can be set from the global + cabal `config` file so as to provide a default set of partial + constraints to be used by projects, providing a way for users to peg + themselves to stable package collections. + +`--enable-optimization`[=_n_] or `-O`[_n_] +: (default) Build with optimization flags (if available). This is + appropriate for production use, taking more time to build faster + libraries and programs. + + The optional _n_ value is the optimisation level. Some compilers + support multiple optimisation levels. The range is 0 to 2. Level 0 + is equivalent to `--disable-optimization`, level 1 is the default if + no _n_ parameter is given. Level 2 is higher optimisation if the + compiler supports it. Level 2 is likely to lead to longer compile + times and bigger generated code. + +`--disable-optimization` +: Build without optimization. This is suited for development: building + will be quicker, but the resulting library or programs will be slower. + +`--enable-profiling` +: Build libraries and executables with profiling enabled (for compilers + that support profiling as a separate mode). For this to work, all + libraries used by this package must also have been built with profiling + support. For libraries this involves building an additional instance of + the library in addition to the normal non-profiling instance. For + executables it changes the single executable to be built in profiling mode. + + This flag covers both libraries and executables, but can be overridden + by the `--enable-library-profiling` flag. + + See also the `--profiling-detail` flag below. + +`--disable-profiling` +: (default) Do not enable profiling in generated libraries and executables. + +`--enable-library-profiling` or `-p` +: As with `--enable-profiling` above, but it applies only for libraries. So + this generates an additional profiling instance of the library in addition + to the normal non-profiling instance. + + The `--enable-profiling` flag controls the profiling mode for both + libraries and executables, but if different modes are desired for + libraries versus executables then use `--enable-library-profiling` as well. + +`--disable-library-profiling` +: (default) Do not generate an additional profiling version of the + library. + +`--profiling-detail`[=_level_] +: Some compilers that support profiling, notably GHC, can allocate costs to + different parts of the program and there are different levels of + granularity or detail with which this can be done. In particular for GHC + this concept is called "cost centers", and GHC can automatically add cost + centers, and can do so in different ways. + + This flag covers both libraries and executables, but can be overridden + by the `--library-profiling-detail` flag. + + Currently this setting is ignored for compilers other than GHC. The levels + that cabal currently supports are: + + `default` + : For GHC this uses `exported-functions` for libraries and + `toplevel-functions` for executables. + + `none` + : No costs will be assigned to any code within this component. + + `exported-functions` + : Costs will be assigned at the granularity of all top level functions + exported from each module. In GHC specifically, this is for non-inline + functions. + + `toplevel-functions` + : Costs will be assigned at the granularity of all top level functions + in each module, whether they are exported from the module or not. + In GHC specifically, this is for non-inline functions. + + `all-functions` + : Costs will be assigned at the granularity of all functions in each + module, whether top level or local. In GHC specifically, this is for + non-inline toplevel or where-bound functions or values. + + This flag is new in Cabal-1.24. Prior versions used the equivalent of + `none` above. + +`--library-profiling-detail`[=_level_] +: As with `--profiling-detail` above, but it applies only for libraries. + + The level for both libraries and executables is set by the + `--profiling-detail` flag, but if different levels are desired for + libraries versus executables then use `--library-profiling-detail` as well. + + +`--enable-library-vanilla` +: (default) Build ordinary libraries (as opposed to profiling + libraries). This is independent of the `--enable-library-profiling` + option. If you enable both, you get both. + +`--disable-library-vanilla` +: Do not build ordinary libraries. This is useful in conjunction with + `--enable-library-profiling` to build only profiling libraries, + rather than profiling and ordinary libraries. + +`--enable-library-for-ghci` +: (default) Build libraries suitable for use with GHCi. + +`--disable-library-for-ghci` +: Not all platforms support GHCi and indeed on some platforms, trying + to build GHCi libs fails. In such cases this flag can be used as a + workaround. + +`--enable-split-objs` +: Use the GHC `-split-objs` feature when building the library. This + reduces the final size of the executables that use the library by + allowing them to link with only the bits that they use rather than + the entire library. The downside is that building the library takes + longer and uses considerably more memory. + +`--disable-split-objs` +: (default) Do not use the GHC `-split-objs` feature. This makes + building the library quicker but the final executables that use the + library will be larger. + +`--enable-executable-stripping` +: (default) When installing binary executable programs, run the + `strip` program on the binary. This can considerably reduce the size + of the executable binary file. It does this by removing debugging + information and symbols. While such extra information is useful for + debugging C programs with traditional debuggers it is rarely helpful + for debugging binaries produced by Haskell compilers. + + Not all Haskell implementations generate native binaries. For such + implementations this option has no effect. + +`--disable-executable-stripping` +: Do not strip binary executables during installation. You might want + to use this option if you need to debug a program using gdb, for + example if you want to debug the C parts of a program containing + both Haskell and C code. Another reason is if your are building a + package for a system which has a policy of managing the stripping + itself (such as some Linux distributions). + +`--enable-shared` +: Build shared library. This implies a separate compiler run to + generate position independent code as required on most platforms. + +`--disable-shared` +: (default) Do not build shared library. + +`--enable-executable-dynamic` +: Link executables dynamically. The executable's library dependencies should + be built as shared objects. This implies `--enable-shared` unless + `--disable-shared` is explicitly specified. + +`--disable-executable-dynamic` +: (default) Link executables statically. + +`--configure-option=`_str_ +: An extra option to an external `configure` script, if one is used + (see the section on [system-dependent + parameters](developing-packages.html#system-dependent-parameters)). + There can be several of these options. + +`--extra-include-dirs`[=_dir_] +: An extra directory to search for C header files. You can use this + flag multiple times to get a list of directories. + + You might need to use this flag if you have standard system header + files in a non-standard location that is not mentioned in the + package's `.cabal` file. Using this option has the same affect as + appending the directory _dir_ to the `include-dirs` field in each + library and executable in the package's `.cabal` file. The advantage + of course is that you do not have to modify the package at all. + These extra directories will be used while building the package and + for libraries it is also saved in the package registration + information and used when compiling modules that use the library. + +`--extra-lib-dirs`[=_dir_] +: An extra directory to search for system libraries files. You can use + this flag multiple times to get a list of directories. + +`--extra-framework-dirs`[=_dir_] +: An extra directory to search for frameworks (OS X only). You can use this + flag multiple times to get a list of directories. + + You might need to use this flag if you have standard system + libraries in a non-standard location that is not mentioned in the + package's `.cabal` file. Using this option has the same affect as + appending the directory _dir_ to the `extra-lib-dirs` field in each + library and executable in the package's `.cabal` file. The advantage + of course is that you do not have to modify the package at all. + These extra directories will be used while building the package and + for libraries it is also saved in the package registration + information and used when compiling modules that use the library. + +`--allow-newer`[=_pkgs_] +: Selectively relax upper bounds in dependencies without editing the + package description. + + If you want to install a package A that depends on B >= 1.0 && < 2.0, but + you have the version 2.0 of B installed, you can compile A against B 2.0 by + using `cabal install --allow-newer=B A`. This works for the whole package + index: if A also depends on C that in turn depends on B < 2.0, C's + dependency on B will be also relaxed. + + Example: + + ~~~~~~~~~~~~~~~~ + $ cd foo + $ cabal configure + Resolving dependencies... + cabal: Could not resolve dependencies: + [...] + $ cabal configure --allow-newer + Resolving dependencies... + Configuring foo... + ~~~~~~~~~~~~~~~~ + + Additional examples: + + ~~~~~~~~~~~~~~~~ + # Relax upper bounds in all dependencies. + $ cabal install --allow-newer foo + + # Relax upper bounds only in dependencies on bar, baz and quux. + $ cabal install --allow-newer=bar,baz,quux foo + + # Relax the upper bound on bar and force bar==2.1. + $ cabal install --allow-newer=bar --constraint="bar==2.1" foo + ~~~~~~~~~~~~~~~~ + + It's also possible to limit the scope of `--allow-newer` to single + packages with the `--allow-newer=scope:dep` syntax. This means that the + dependency on `dep` will be relaxed only for the package `scope`. + + Example: + + ~~~~~~~~~~~~~~~~ + # Relax upper bound in foo's dependency on base; also relax upper bound in + # every package's dependency on lens. + $ cabal install --allow-newer=foo:base,lens + + # Relax upper bounds in foo's dependency on base and bar's dependency + # on time; also relax the upper bound in the dependency on lens specified by + # any package. + $ cabal install --allow-newer=foo:base,lens --allow-newer=bar:time + ~~~~~~~~~~~~~~~~ + + Finally, one can enable `--allow-newer` permanently by setting `allow-newer: + True` in the `~/.cabal/config` file. Enabling 'allow-newer' selectively is + also supported in the config file (`allow-newer: foo, bar, baz:base`). + +`--constraint=`_constraint_ +: Restrict solutions involving a package to a given version range. + For example, `cabal install --constraint="bar==2.1"` will only consider + install plans that do not use `bar` at all, or `bar` of version 2.1. + + As a special case, `cabal install --constraint="bar -none"` prevents `bar` + from being used at all (`-none` abbreviates `> 1 && < 1`); `cabal install + --constraint="bar installed"` prevents reinstallation of the `bar` package; + `cabal install --constraint="bar +foo -baz"` specifies that the flag `foo` + should be turned on and the `baz` flag should be turned off. + +## setup build ## + +Perform any preprocessing or compilation needed to make this package ready for installation. + +This command takes the following options: + +--_prog_-options=_options_, --_prog_-option=_option_ +: These are mostly the same as the [options configure + step](#setup-configure). Unlike the options specified at the + configure step, any program options specified at the build step are + not persistent but are used for that invocation only. They options + specified at the build step are in addition not in replacement of + any options specified at the configure step. + +## setup haddock ## + +Build the documentation for the package using [haddock][]. By default, +only the documentation for the exposed modules is generated (but see the +`--executables` and `--internal` flags below). + +This command takes the following options: + +`--hoogle` +: Generate a file `dist/doc/html/`_pkgid_`.txt`, which can be + converted by [Hoogle](http://www.haskell.org/hoogle/) into a + database for searching. This is equivalent to running [haddock][] + with the `--hoogle` flag. + +`--html-location=`_url_ +: Specify a template for the location of HTML documentation for + prerequisite packages. The substitutions ([see + listing](#paths-in-the-simple-build-system)) are applied to the + template to obtain a location for each package, which will be used + by hyperlinks in the generated documentation. For example, the + following command generates links pointing at [Hackage] pages: + + > setup haddock --html-location='http://hackage.haskell.org/packages/archive/$pkg/latest/doc/html' + + Here the argument is quoted to prevent substitution by the shell. If + this option is omitted, the location for each package is obtained + using the package tool (e.g. `ghc-pkg`). + +`--executables` +: Also run [haddock][] for the modules of all the executable programs. + By default [haddock][] is run only on the exported modules. + +`--internal` +: Run [haddock][] for the all modules, including unexposed ones, and + make [haddock][] generate documentation for unexported symbols as + well. + +`--css=`_path_ +: The argument _path_ denotes a CSS file, which is passed to + [haddock][] and used to set the style of the generated + documentation. This is only needed to override the default style + that [haddock][] uses. + +`--hyperlink-source` +: Generate [haddock][] documentation integrated with [HsColour][]. + First, [HsColour][] is run to generate colourised code. Then + [haddock][] is run to generate HTML documentation. Each entity + shown in the documentation is linked to its definition in the + colourised code. + +`--hscolour-css=`_path_ +: The argument _path_ denotes a CSS file, which is passed to [HsColour][] as in + + > runhaskell Setup.hs hscolour --css=_path_ + +## setup hscolour ## + +Produce colourised code in HTML format using [HsColour][]. Colourised +code for exported modules is put in `dist/doc/html/`_pkgid_`/src`. + +This command takes the following options: + +`--executables` +: Also run [HsColour][] on the sources of all executable programs. + Colourised code is put in `dist/doc/html/`_pkgid_/_executable_`/src`. + +`--css=`_path_ +: Use the given CSS file for the generated HTML files. The CSS file + defines the colours used to colourise code. Note that this copies + the given CSS file to the directory with the generated HTML files + (renamed to `hscolour.css`) rather than linking to it. + +## setup install ## + +Copy the files into the install locations and (for library packages) +register the package with the compiler, i.e. make the modules it +contains available to programs. + +The [install locations](#installation-paths) are determined by options +to `setup configure`. + +This command takes the following options: + +`--global` +: Register this package in the system-wide database. (This is the + default, unless the `--user` option was supplied to the `configure` + command.) + +`--user` +: Register this package in the user's local package database. (This is + the default if the `--user` option was supplied to the `configure` + command.) + +## setup copy ## + +Copy the files without registering them. This command is mainly of use +to those creating binary packages. + +This command takes the following option: + +`--destdir=`_path_ + +Specify the directory under which to place installed files. If this is +not given, then the root directory is assumed. + +## setup register ## + +Register this package with the compiler, i.e. make the modules it +contains available to programs. This only makes sense for library +packages. Note that the `install` command incorporates this action. The +main use of this separate command is in the post-installation step for a +binary package. + +This command takes the following options: + +`--global` +: Register this package in the system-wide database. (This is the default.) + + +`--user` +: Register this package in the user's local package database. + + +`--gen-script` +: Instead of registering the package, generate a script containing + commands to perform the registration. On Unix, this file is called + `register.sh`, on Windows, `register.bat`. This script might be + included in a binary bundle, to be run after the bundle is unpacked + on the target system. + +`--gen-pkg-config`[=_path_] +: Instead of registering the package, generate a package registration + file. This only applies to compilers that support package + registration files which at the moment is only GHC. The file should + be used with the compiler's mechanism for registering packages. This + option is mainly intended for packaging systems. If possible use the + `--gen-script` option instead since it is more portable across + Haskell implementations. The _path_ is + optional and can be used to specify a particular output file to + generate. Otherwise, by default the file is the package name and + version with a `.conf` extension. + +`--inplace` +: Registers the package for use directly from the build tree, without + needing to install it. This can be useful for testing: there's no + need to install the package after modifying it, just recompile and + test. + + This flag does not create a build-tree-local package database. It + still registers the package in one of the user or global databases. + + However, there are some caveats. It only works with GHC + (currently). It only works if your package doesn't depend on having + any supplemental files installed --- plain Haskell libraries should + be fine. + +## setup unregister ## + +Deregister this package with the compiler. + +This command takes the following options: + +`--global` +: Deregister this package in the system-wide database. (This is the default.) + +`--user` +: Deregister this package in the user's local package database. + +`--gen-script` +: Instead of deregistering the package, generate a script containing + commands to perform the deregistration. On Unix, this file is + called `unregister.sh`, on Windows, `unregister.bat`. This script + might be included in a binary bundle, to be run on the target + system. + +## setup clean ## + +Remove any local files created during the `configure`, `build`, +`haddock`, `register` or `unregister` steps, and also any files and +directories listed in the `extra-tmp-files` field. + +This command takes the following options: + +`--save-configure` or `-s` +: Keeps the configuration information so it is not necessary to run + the configure step again before building. + +## setup test ## + +Run the test suites specified in the package description file. Aside from +the following flags, Cabal accepts the name of one or more test suites on the +command line after `test`. When supplied, Cabal will run only the named test +suites, otherwise, Cabal will run all test suites in the package. + +`--builddir=`_dir_ +: The directory where Cabal puts generated build files (default: `dist`). + Test logs will be located in the `test` subdirectory. + +`--human-log=`_path_ +: The template used to name human-readable test logs; the path is relative + to `dist/test`. By default, logs are named according to the template + `$pkgid-$test-suite.log`, so that each test suite will be logged to its own + human-readable log file. Template variables allowed are: `$pkgid`, + `$compiler`, `$os`, `$arch`, `$abi`, `$abitag`, `$test-suite`, and `$result`. + +`--machine-log=`_path_ +: The path to the machine-readable log, relative to `dist/test`. The default + template is `$pkgid.log`. Template variables allowed are: `$pkgid`, + `$compiler`, `$os`, `$arch`, `$abi`, `$abitag` and `$result`. + +`--show-details=`_filter_ +: Determines if the results of individual test cases are shown on the + terminal. May be `always` (always show), `never` (never show), `failures` + (show only failed results), or `streaming` (show all results in real time). + +`--test-options=`_options_ +: Give extra options to the test executables. + +`--test-option=`_option_ +: give an extra option to the test executables. There is no need to quote + options containing spaces because a single option is assumed, so options + will not be split on spaces. + +## setup sdist ## + +Create a system- and compiler-independent source distribution in a file +_package_-_version_`.tar.gz` in the `dist` subdirectory, for +distribution to package builders. When unpacked, the commands listed in +this section will be available. + +The files placed in this distribution are the package description file, +the setup script, the sources of the modules named in the package +description file, and files named in the `license-file`, `main-is`, +`c-sources`, `js-sources`, `data-files`, `extra-source-files` and +`extra-doc-files` fields. + +This command takes the following option: + +`--snapshot` +: Append today's date (in "YYYYMMDD" format) to the version number for + the generated source package. The original package is unaffected. + + +[dist-simple]: ../release/cabal-latest/doc/API/Cabal/Distribution-Simple.html +[dist-make]: ../release/cabal-latest/doc/API/Cabal/Distribution-Make.html +[dist-license]: ../release/cabal-latest/doc/API/Cabal/Distribution-License.html#t:License +[extension]: ../release/cabal-latest/doc/API/Cabal/Language-Haskell-Extension.html#t:Extension +[BuildType]: ../release/cabal-latest/doc/API/Cabal/Distribution-PackageDescription.html#t:BuildType +[alex]: http://www.haskell.org/alex/ +[autoconf]: http://www.gnu.org/software/autoconf/ +[c2hs]: http://www.cse.unsw.edu.au/~chak/haskell/c2hs/ +[cpphs]: http://projects.haskell.org/cpphs/ +[greencard]: http://hackage.haskell.org/package/greencard +[haddock]: http://www.haskell.org/haddock/ +[HsColour]: http://www.cs.york.ac.uk/fp/darcs/hscolour/ +[happy]: http://www.haskell.org/happy/ +[Hackage]: http://hackage.haskell.org/ +[pkg-config]: http://www.freedesktop.org/wiki/Software/pkg-config/ diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/doc/misc.markdown cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/doc/misc.markdown --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/doc/misc.markdown 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/doc/misc.markdown 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,109 @@ +% Cabal User Guide + +# Reporting bugs and deficiencies # + +Please report any flaws or feature requests in the [bug tracker][]. + +For general discussion or queries email the libraries mailing list +. There is also a development mailing list +. + +[bug tracker]: https://github.com/haskell/cabal/issues + +# Stability of Cabal interfaces # + +The Cabal library and related infrastructure is still under active +development. New features are being added and limitations and bugs are +being fixed. This requires internal changes and often user visible +changes as well. We therefore cannot promise complete future-proof +stability, at least not without halting all development work. + +This section documents the aspects of the Cabal interface that we can +promise to keep stable and which bits are subject to change. + +## Cabal file format ## + +This is backwards compatible and mostly forwards compatible. New fields +can be added without breaking older versions of Cabal. Fields can be +deprecated without breaking older packages. + +## Command-line interface ## + +### Very Stable Command-line interfaces ### + +* `./setup configure` + * `--prefix` + * `--user` + * `--ghc`, `--uhc` + * `--verbose` + * `--prefix` + +* `./setup build` +* `./setup install` +* `./setup register` +* `./setup copy` + +### Stable Command-line interfaces ### + +### Unstable command-line ### + +## Functions and Types ## + +The Cabal library follows the [Package Versioning Policy][PVP]. This +means that within a stable major release, for example 1.2.x, there will +be no incompatible API changes. But minor versions increments, for +example 1.2.3, indicate compatible API additions. + +The Package Versioning Policy does not require any API guarantees +between major releases, for example between 1.2.x and 1.4.x. In practise +of course not everything changes between major releases. Some parts of +the API are more prone to change than others. The rest of this section +gives some informal advice on what level of API stability you can expect +between major releases. + +[PVP]: http://www.haskell.org/haskellwiki/Package_versioning_policy + +### Very Stable API ### + +* `defaultMain` + +* `defaultMainWithHooks defaultUserHooks` + + But regular `defaultMainWithHooks` isn't stable since `UserHooks` + changes. + +### Semi-stable API ### + +* `UserHooks` The hooks API will change in the future + +* `Distribution.*` is mostly declarative information about packages and + is somewhat stable. + +### Unstable API ### + +Everything under `Distribution.Simple.*` has no stability guarantee. + +## Hackage ## + +The index format is a partly stable interface. It consists of a tar.gz +file that contains directories with `.cabal` files in. In future it may +contain more kinds of files so do not assume every file is a `.cabal` +file. Incompatible revisions to the format would involve bumping the +name of the index file, i.e., `00-index.tar.gz`, `01-index.tar.gz` etc. + + +[dist-simple]: ../release/cabal-latest/doc/API/Cabal/Distribution-Simple.html +[dist-make]: ../release/cabal-latest/doc/API/Cabal/Distribution-Make.html +[dist-license]: ../release/cabal-latest/doc/API/Cabal/Distribution-License.html#t:License +[extension]: ../release/cabal-latest/doc/API/Cabal/Language-Haskell-Extension.html#t:Extension +[BuildType]: ../release/cabal-latest/doc/API/Cabal/Distribution-PackageDescription.html#t:BuildType +[alex]: http://www.haskell.org/alex/ +[autoconf]: http://www.gnu.org/software/autoconf/ +[c2hs]: http://www.cse.unsw.edu.au/~chak/haskell/c2hs/ +[cpphs]: http://projects.haskell.org/cpphs/ +[greencard]: http://hackage.haskell.org/package/greencard +[haddock]: http://www.haskell.org/haddock/ +[HsColour]: http://www.cs.york.ac.uk/fp/darcs/hscolour/ +[happy]: http://www.haskell.org/happy/ +[Hackage]: http://hackage.haskell.org/ +[pkg-config]: http://www.freedesktop.org/wiki/Software/pkg-config/ diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Language/Haskell/Extension.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Language/Haskell/Extension.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Language/Haskell/Extension.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Language/Haskell/Extension.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,847 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Language.Haskell.Extension +-- Copyright : Isaac Jones 2003-2004 +-- License : BSD3 +-- +-- Maintainer : libraries@haskell.org +-- Portability : portable +-- +-- Haskell language dialects and extensions + +module Language.Haskell.Extension ( + Language(..), + knownLanguages, + + Extension(..), + KnownExtension(..), + knownExtensions, + deprecatedExtensions + ) where + +import Distribution.Text +import qualified Distribution.Compat.ReadP as Parse +import Distribution.Compat.Binary + +import qualified Text.PrettyPrint as Disp +import qualified Data.Char as Char (isAlphaNum) +import Data.Array (Array, accumArray, bounds, Ix(inRange), (!)) +import Data.Data (Data) +import Data.Typeable (Typeable) +import GHC.Generics (Generic) + +-- ------------------------------------------------------------ +-- * Language +-- ------------------------------------------------------------ + +-- | This represents a Haskell language dialect. +-- +-- Language 'Extension's are interpreted relative to one of these base +-- languages. +-- +data Language = + + -- | The Haskell 98 language as defined by the Haskell 98 report. + -- + Haskell98 + + -- | The Haskell 2010 language as defined by the Haskell 2010 report. + -- + | Haskell2010 + + -- | An unknown language, identified by its name. + | UnknownLanguage String + deriving (Generic, Show, Read, Eq, Typeable, Data) + +instance Binary Language + +knownLanguages :: [Language] +knownLanguages = [Haskell98, Haskell2010] + +instance Text Language where + disp (UnknownLanguage other) = Disp.text other + disp other = Disp.text (show other) + + parse = do + lang <- Parse.munch1 Char.isAlphaNum + return (classifyLanguage lang) + +classifyLanguage :: String -> Language +classifyLanguage = \str -> case lookup str langTable of + Just lang -> lang + Nothing -> UnknownLanguage str + where + langTable = [ (show lang, lang) + | lang <- knownLanguages ] + +-- ------------------------------------------------------------ +-- * Extension +-- ------------------------------------------------------------ + +-- Note: if you add a new 'KnownExtension': +-- +-- * also add it to the Distribution.Simple.X.languageExtensions lists +-- (where X is each compiler: GHC, JHC, LHC, UHC, HaskellSuite) +-- +-- | This represents language extensions beyond a base 'Language' definition +-- (such as 'Haskell98') that are supported by some implementations, usually +-- in some special mode. +-- +-- Where applicable, references are given to an implementation's +-- official documentation. + +data Extension = + -- | Enable a known extension + EnableExtension KnownExtension + + -- | Disable a known extension + | DisableExtension KnownExtension + + -- | An unknown extension, identified by the name of its @LANGUAGE@ + -- pragma. + | UnknownExtension String + + deriving (Generic, Show, Read, Eq, Ord, Typeable, Data) + +instance Binary Extension + +data KnownExtension = + + -- | Allow overlapping class instances, provided there is a unique + -- most specific instance for each use. + -- + -- * + OverlappingInstances + + -- | Ignore structural rules guaranteeing the termination of class + -- instance resolution. Termination is guaranteed by a fixed-depth + -- recursion stack, and compilation may fail if this depth is + -- exceeded. + -- + -- * + | UndecidableInstances + + -- | Implies 'OverlappingInstances'. Allow the implementation to + -- choose an instance even when it is possible that further + -- instantiation of types will lead to a more specific instance + -- being applicable. + -- + -- * + | IncoherentInstances + + -- | /(deprecated)/ Allow recursive bindings in @do@ blocks, using the @rec@ + -- keyword. See also 'RecursiveDo'. + | DoRec + + -- | Allow recursive bindings using @mdo@, a variant of @do@. + -- @DoRec@ provides a different, preferred syntax. + -- + -- * + | RecursiveDo + + -- | Provide syntax for writing list comprehensions which iterate + -- over several lists together, like the 'zipWith' family of + -- functions. + -- + -- * + | ParallelListComp + + -- | Allow multiple parameters in a type class. + -- + -- * + | MultiParamTypeClasses + + -- | Enable the dreaded monomorphism restriction. + -- + -- * + | MonomorphismRestriction + + -- | Allow a specification attached to a multi-parameter type class + -- which indicates that some parameters are entirely determined by + -- others. The implementation will check that this property holds + -- for the declared instances, and will use this property to reduce + -- ambiguity in instance resolution. + -- + -- * + | FunctionalDependencies + + -- | Like 'RankNTypes' but does not allow a higher-rank type to + -- itself appear on the left of a function arrow. + -- + -- * + | Rank2Types + + -- | Allow a universally-quantified type to occur on the left of a + -- function arrow. + -- + -- * + | RankNTypes + + -- | Allow data constructors to have polymorphic arguments. Unlike + -- 'RankNTypes', does not allow this for ordinary functions. + -- + -- * + | PolymorphicComponents + + -- | Allow existentially-quantified data constructors. + -- + -- * + | ExistentialQuantification + + -- | Cause a type variable in a signature, which has an explicit + -- @forall@ quantifier, to scope over the definition of the + -- accompanying value declaration. + -- + -- * + | ScopedTypeVariables + + -- | Deprecated, use 'ScopedTypeVariables' instead. + | PatternSignatures + + -- | Enable implicit function parameters with dynamic scope. + -- + -- * + | ImplicitParams + + -- | Relax some restrictions on the form of the context of a type + -- signature. + -- + -- * + | FlexibleContexts + + -- | Relax some restrictions on the form of the context of an + -- instance declaration. + -- + -- * + | FlexibleInstances + + -- | Allow data type declarations with no constructors. + -- + -- * + | EmptyDataDecls + + -- | Run the C preprocessor on Haskell source code. + -- + -- * + | CPP + + -- | Allow an explicit kind signature giving the kind of types over + -- which a type variable ranges. + -- + -- * + | KindSignatures + + -- | Enable a form of pattern which forces evaluation before an + -- attempted match, and a form of strict @let@/@where@ binding. + -- + -- * + | BangPatterns + + -- | Allow type synonyms in instance heads. + -- + -- * + | TypeSynonymInstances + + -- | Enable Template Haskell, a system for compile-time + -- metaprogramming. + -- + -- * + | TemplateHaskell + + -- | Enable the Foreign Function Interface. In GHC, implements the + -- standard Haskell 98 Foreign Function Interface Addendum, plus + -- some GHC-specific extensions. + -- + -- * + | ForeignFunctionInterface + + -- | Enable arrow notation. + -- + -- * + | Arrows + + -- | /(deprecated)/ Enable generic type classes, with default instances defined in + -- terms of the algebraic structure of a type. + -- + -- * + | Generics + + -- | Enable the implicit importing of the module "Prelude". When + -- disabled, when desugaring certain built-in syntax into ordinary + -- identifiers, use whatever is in scope rather than the "Prelude" + -- -- version. + -- + -- * + | ImplicitPrelude + + -- | Enable syntax for implicitly binding local names corresponding + -- to the field names of a record. Puns bind specific names, unlike + -- 'RecordWildCards'. + -- + -- * + | NamedFieldPuns + + -- | Enable a form of guard which matches a pattern and binds + -- variables. + -- + -- * + | PatternGuards + + -- | Allow a type declared with @newtype@ to use @deriving@ for any + -- class with an instance for the underlying type. + -- + -- * + | GeneralizedNewtypeDeriving + + -- | Enable the \"Trex\" extensible records system. + -- + -- * + | ExtensibleRecords + + -- | Enable type synonyms which are transparent in some definitions + -- and opaque elsewhere, as a way of implementing abstract + -- datatypes. + -- + -- * + | RestrictedTypeSynonyms + + -- | Enable an alternate syntax for string literals, + -- with string templating. + -- + -- * + | HereDocuments + + -- | Allow the character @#@ as a postfix modifier on identifiers. + -- Also enables literal syntax for unboxed values. + -- + -- * + | MagicHash + + -- | Allow data types and type synonyms which are indexed by types, + -- i.e. ad-hoc polymorphism for types. + -- + -- * + | TypeFamilies + + -- | Allow a standalone declaration which invokes the type class + -- @deriving@ mechanism. + -- + -- * + | StandaloneDeriving + + -- | Allow certain Unicode characters to stand for certain ASCII + -- character sequences, e.g. keywords and punctuation. + -- + -- * + | UnicodeSyntax + + -- | Allow the use of unboxed types as foreign types, e.g. in + -- @foreign import@ and @foreign export@. + -- + -- * + | UnliftedFFITypes + + -- | Enable interruptible FFI. + -- + -- * + | InterruptibleFFI + + -- | Allow use of CAPI FFI calling convention (@foreign import capi@). + -- + -- * + | CApiFFI + + -- | Defer validity checking of types until after expanding type + -- synonyms, relaxing the constraints on how synonyms may be used. + -- + -- * + | LiberalTypeSynonyms + + -- | Allow the name of a type constructor, type class, or type + -- variable to be an infix operator. + | TypeOperators + + -- | Enable syntax for implicitly binding local names corresponding + -- to the field names of a record. A wildcard binds all unmentioned + -- names, unlike 'NamedFieldPuns'. + -- + -- * + | RecordWildCards + + -- | Deprecated, use 'NamedFieldPuns' instead. + | RecordPuns + + -- | Allow a record field name to be disambiguated by the type of + -- the record it's in. + -- + -- * + | DisambiguateRecordFields + + -- | Enable traditional record syntax (as supported by Haskell 98) + -- + -- * + | TraditionalRecordSyntax + + -- | Enable overloading of string literals using a type class, much + -- like integer literals. + -- + -- * + | OverloadedStrings + + -- | Enable generalized algebraic data types, in which type + -- variables may be instantiated on a per-constructor basis. Implies + -- 'GADTSyntax'. + -- + -- * + | GADTs + + -- | Enable GADT syntax for declaring ordinary algebraic datatypes. + -- + -- * + | GADTSyntax + + -- | Make pattern bindings monomorphic. + -- + -- * + | MonoPatBinds + + -- | Relax the requirements on mutually-recursive polymorphic + -- functions. + -- + -- * + | RelaxedPolyRec + + -- | Allow default instantiation of polymorphic types in more + -- situations. + -- + -- * + | ExtendedDefaultRules + + -- | Enable unboxed tuples. + -- + -- * + | UnboxedTuples + + -- | Enable @deriving@ for classes 'Data.Typeable.Typeable' and + -- 'Data.Generics.Data'. + -- + -- * + | DeriveDataTypeable + + -- | Enable @deriving@ for 'GHC.Generics.Generic' and 'GHC.Generics.Generic1'. + -- + -- * + | DeriveGeneric + + -- | Enable support for default signatures. + -- + -- * + | DefaultSignatures + + -- | Allow type signatures to be specified in instance declarations. + -- + -- * + | InstanceSigs + + -- | Allow a class method's type to place additional constraints on + -- a class type variable. + -- + -- * + | ConstrainedClassMethods + + -- | Allow imports to be qualified by the package name the module is + -- intended to be imported from, e.g. + -- + -- > import "network" Network.Socket + -- + -- * + | PackageImports + + -- | /(deprecated)/ Allow a type variable to be instantiated at a + -- polymorphic type. + -- + -- * + | ImpredicativeTypes + + -- | /(deprecated)/ Change the syntax for qualified infix operators. + -- + -- * + | NewQualifiedOperators + + -- | Relax the interpretation of left operator sections to allow + -- unary postfix operators. + -- + -- * + | PostfixOperators + + -- | Enable quasi-quotation, a mechanism for defining new concrete + -- syntax for expressions and patterns. + -- + -- * + | QuasiQuotes + + -- | Enable generalized list comprehensions, supporting operations + -- such as sorting and grouping. + -- + -- * + | TransformListComp + + -- | Enable monad comprehensions, which generalise the list + -- comprehension syntax to work for any monad. + -- + -- * + | MonadComprehensions + + -- | Enable view patterns, which match a value by applying a + -- function and matching on the result. + -- + -- * + | ViewPatterns + + -- | Allow concrete XML syntax to be used in expressions and patterns, + -- as per the Haskell Server Pages extension language: + -- . The ideas behind it are + -- discussed in the paper \"Haskell Server Pages through Dynamic Loading\" + -- by Niklas Broberg, from Haskell Workshop '05. + | XmlSyntax + + -- | Allow regular pattern matching over lists, as discussed in the + -- paper \"Regular Expression Patterns\" by Niklas Broberg, Andreas Farre + -- and Josef Svenningsson, from ICFP '04. + | RegularPatterns + + -- | Enable the use of tuple sections, e.g. @(, True)@ desugars into + -- @\x -> (x, True)@. + -- + -- * + | TupleSections + + -- | Allow GHC primops, written in C--, to be imported into a Haskell + -- file. + | GHCForeignImportPrim + + -- | Support for patterns of the form @n + k@, where @k@ is an + -- integer literal. + -- + -- * + | NPlusKPatterns + + -- | Improve the layout rule when @if@ expressions are used in a @do@ + -- block. + | DoAndIfThenElse + + -- | Enable support for multi-way @if@-expressions. + -- + -- * + | MultiWayIf + + -- | Enable support lambda-@case@ expressions. + -- + -- * + | LambdaCase + + -- | Makes much of the Haskell sugar be desugared into calls to the + -- function with a particular name that is in scope. + -- + -- * + | RebindableSyntax + + -- | Make @forall@ a keyword in types, which can be used to give the + -- generalisation explicitly. + -- + -- * + | ExplicitForAll + + -- | Allow contexts to be put on datatypes, e.g. the @Eq a@ in + -- @data Eq a => Set a = NilSet | ConsSet a (Set a)@. + -- + -- * + | DatatypeContexts + + -- | Local (@let@ and @where@) bindings are monomorphic. + -- + -- * + | MonoLocalBinds + + -- | Enable @deriving@ for the 'Data.Functor.Functor' class. + -- + -- * + | DeriveFunctor + + -- | Enable @deriving@ for the 'Data.Traversable.Traversable' class. + -- + -- * + | DeriveTraversable + + -- | Enable @deriving@ for the 'Data.Foldable.Foldable' class. + -- + -- * + | DeriveFoldable + + -- | Enable non-decreasing indentation for @do@ blocks. + -- + -- * + | NondecreasingIndentation + + -- | Allow imports to be qualified with a safe keyword that requires + -- the imported module be trusted as according to the Safe Haskell + -- definition of trust. + -- + -- > import safe Network.Socket + -- + -- * + | SafeImports + + -- | Compile a module in the Safe, Safe Haskell mode -- a restricted + -- form of the Haskell language to ensure type safety. + -- + -- * + | Safe + + -- | Compile a module in the Trustworthy, Safe Haskell mode -- no + -- restrictions apply but the module is marked as trusted as long as + -- the package the module resides in is trusted. + -- + -- * + | Trustworthy + + -- | Compile a module in the Unsafe, Safe Haskell mode so that + -- modules compiled using Safe, Safe Haskell mode can't import it. + -- + -- * + | Unsafe + + -- | Allow type class/implicit parameter/equality constraints to be + -- used as types with the special kind constraint. Also generalise + -- the @(ctxt => ty)@ syntax so that any type of kind constraint can + -- occur before the arrow. + -- + -- * + | ConstraintKinds + + -- | Enable kind polymorphism. + -- + -- * + | PolyKinds + + -- | Enable datatype promotion. + -- + -- * + | DataKinds + + -- | Enable parallel arrays syntax (@[:@, @:]@) for /Data Parallel Haskell/. + -- + -- * + | ParallelArrays + + -- | Enable explicit role annotations, like in (@type role Foo representational representational@). + -- + -- * + | RoleAnnotations + + -- | Enable overloading of list literals, arithmetic sequences and + -- list patterns using the 'IsList' type class. + -- + -- * + | OverloadedLists + + -- | Enable case expressions that have no alternatives. Also applies to lambda-case expressions if they are enabled. + -- + -- * + | EmptyCase + + -- | Triggers the generation of derived 'Typeable' instances for every + -- datatype and type class declaration. + -- + -- * + | AutoDeriveTypeable + + -- | Desugars negative literals directly (without using negate). + -- + -- * + | NegativeLiterals + + -- | Allow the use of binary integer literal syntax (e.g. @0b11001001@ to denote @201@). + -- + -- * + | BinaryLiterals + + -- | Allow the use of floating literal syntax for all instances of 'Num', including 'Int' and 'Integer'. + -- + -- * + | NumDecimals + + -- | Enable support for type classes with no type parameter. + -- + -- * + | NullaryTypeClasses + + -- | Enable explicit namespaces in module import/export lists. + -- + -- * + | ExplicitNamespaces + + -- | Allow the user to write ambiguous types, and the type inference engine to infer them. + -- + -- * + | AllowAmbiguousTypes + + -- | Enable @foreign import javascript@. + | JavaScriptFFI + + -- | Allow giving names to and abstracting over patterns. + -- + -- * + | PatternSynonyms + + -- | Allow anonymous placeholders (underscore) inside type signatures. The + -- type inference engine will generate a message describing the type inferred + -- at the hole's location. + -- + -- * + | PartialTypeSignatures + + -- | Allow named placeholders written with a leading underscore inside type + -- signatures. Wildcards with the same name unify to the same type. + -- + -- * + | NamedWildCards + + -- | Enable @deriving@ for any class. + -- + -- * + | DeriveAnyClass + + -- | Enable @deriving@ for the 'Language.Haskell.TH.Syntax.Lift' class. + -- + -- * + | DeriveLift + + -- | Enable support for 'static pointers' (and the @static@ + -- keyword) to refer to globally stable names, even across + -- different programs. + -- + -- * + | StaticPointers + + -- | Switches data type declarations to be strict by default (as if + -- they had a bang using @BangPatterns@), and allow opt-in field + -- laziness using @~@. + | StrictData + + -- | Switches all pattern bindings to be strict by default (as if + -- they had a bang using @BangPatterns@), ordinary patterns are + -- recovered using @~@. Implies @StrictData@. + | Strict + + -- | Allows @do@-notation for types that are @'Applicative'@ as well + -- as @'Monad'@. When enabled, desugaring @do@ notation tries to use + -- @(<*>)@ and @'fmap'@ and @'join'@ as far as possible. + | ApplicativeDo + + -- | Allow records to use duplicated field labels for accessors. + | DuplicateRecordFields + + -- | Enable explicit type applications with the syntax @id \@Int@. + | TypeApplications + + -- | Dissolve the distinction between types and kinds, allowing the compiler + -- to reason about kind equality and therefore enabling GADTs to be promoted + -- to the type-level. + | TypeInType + + -- | Allow recursive (and therefore undecideable) super-class relationships. + | UndecidableSuperClasses + + -- | A temporary extension to help library authors check if their + -- code will compile with the new planned desugaring of fail. + | MonadFailDesugaring + + -- | A subset of @TemplateHaskell@ including only quasi-quoting. + | TemplateHaskellQuotes + + -- | Allows use of the @#label@ syntax. + | OverloadedLabels + + deriving (Generic, Show, Read, Eq, Ord, Enum, Bounded, Typeable, Data) + +instance Binary KnownExtension + +{-# DEPRECATED knownExtensions + "KnownExtension is an instance of Enum and Bounded, use those instead." #-} +knownExtensions :: [KnownExtension] +knownExtensions = [minBound..maxBound] + +-- | Extensions that have been deprecated, possibly paired with another +-- extension that replaces it. +-- +deprecatedExtensions :: [(Extension, Maybe Extension)] +deprecatedExtensions = + [ (EnableExtension RecordPuns, Just (EnableExtension NamedFieldPuns)) + , (EnableExtension PatternSignatures, Just (EnableExtension ScopedTypeVariables)) + ] +-- NOTE: when adding deprecated extensions that have new alternatives +-- we must be careful to make sure that the deprecation messages are +-- valid. We must not recommend aliases that cannot be used with older +-- compilers, perhaps by adding support in Cabal to translate the new +-- name to the old one for older compilers. Otherwise we are in danger +-- of the scenario in ticket #689. + +instance Text Extension where + disp (UnknownExtension other) = Disp.text other + disp (EnableExtension ke) = Disp.text (show ke) + disp (DisableExtension ke) = Disp.text ("No" ++ show ke) + + parse = do + extension <- Parse.munch1 Char.isAlphaNum + return (classifyExtension extension) + +instance Text KnownExtension where + disp ke = Disp.text (show ke) + + parse = do + extension <- Parse.munch1 Char.isAlphaNum + case classifyKnownExtension extension of + Just ke -> + return ke + Nothing -> + fail ("Can't parse " ++ show extension ++ " as KnownExtension") + +classifyExtension :: String -> Extension +classifyExtension string + = case classifyKnownExtension string of + Just ext -> EnableExtension ext + Nothing -> + case string of + 'N':'o':string' -> + case classifyKnownExtension string' of + Just ext -> DisableExtension ext + Nothing -> UnknownExtension string + _ -> UnknownExtension string + +-- | 'read' for 'KnownExtension's is really really slow so for the Text +-- instance +-- what we do is make a simple table indexed off the first letter in the +-- extension name. The extension names actually cover the range @'A'-'Z'@ +-- pretty densely and the biggest bucket is 7 so it's not too bad. We just do +-- a linear search within each bucket. +-- +-- This gives an order of magnitude improvement in parsing speed, and it'll +-- also allow us to do case insensitive matches in future if we prefer. +-- +classifyKnownExtension :: String -> Maybe KnownExtension +classifyKnownExtension "" = Nothing +classifyKnownExtension string@(c : _) + | inRange (bounds knownExtensionTable) c + = lookup string (knownExtensionTable ! c) + | otherwise = Nothing + +knownExtensionTable :: Array Char [(String, KnownExtension)] +knownExtensionTable = + accumArray (flip (:)) [] ('A', 'Z') + [ (head str, (str, extension)) + | extension <- [toEnum 0 ..] + , let str = show extension ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/LICENSE cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/LICENSE --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/LICENSE 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/LICENSE 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,34 @@ +Copyright (c) 2003-2014, Isaac Jones, Simon Marlow, Martin Sjögren, + Bjorn Bringert, Krasimir Angelov, + Malcolm Wallace, Ross Patterson, Ian Lynagh, + Duncan Coutts, Thomas Schilling, + Johan Tibell, Mikhail Glushenkov +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of Isaac Jones nor the names of other + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/README.md cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/README.md --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/README.md 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/README.md 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,182 @@ +The Cabal library package +========================= + +See the [Cabal web site] for more information. + +If you also want the `cabal` command-line program, you need the +[cabal-install] package in addition to this library. + +[cabal-install]: ../cabal-install/README.md + +Installing the Cabal library +============================ + +If you already have the `cabal` program +--------------------------------------- + +In this case run: + + $ cabal install + +However, if you do not have an existing version of the `cabal` program, +you first must install the Cabal library. To avoid this bootstrapping +problem, you can install the Cabal library directly as described below. + + +Installing as a user (no root or administrator access) +------------------------------------------------------ + + ghc -threaded --make Setup + ./Setup configure --user + ./Setup build + ./Setup install + +Note the use of the `--user` flag at the configure step. + +Compiling 'Setup' rather than using `runghc Setup` is much faster and +works on Windows. For all packages other than Cabal itself, it is fine +to use `runghc`. + +This will install into `$HOME/.cabal/` on Unix and into +`Documents and Settings\$User\Application Data\cabal\` on Windows. +If you want to install elsewhere, use the `--prefix=` flag at the +configure step. + + +Installing as root or Administrator +----------------------------------- + + ghc -threaded --make Setup + ./Setup configure + ./Setup build + sudo ./Setup install + +Compiling Setup rather than using `runghc Setup` is much faster and +works on Windows. For all packages other than Cabal itself, it is fine +to use `runghc`. + +This will install into `/usr/local` on Unix, and on Windows it will +install into `$ProgramFiles/Haskell`. If you want to install elsewhere, +use the `--prefix=` flag at the configure step. + + +Using older versions of GHC and Cabal +====================================== + +It is recommended that you leave any pre-existing version of Cabal +installed. In particular, it is *essential* you keep the version that +came with GHC itself, since other installed packages require it (for +instance, the "ghc" API package). + +Prior to GHC 6.4.2, however, GHC did not deal particularly well with +having multiple versions of packages installed at once. So if you are +using GHC 6.4.1 or older and you have an older version of Cabal +installed, you should probably remove it by running: + + $ ghc-pkg unregister Cabal + +or, if you had Cabal installed only for your user account, run: + + $ ghc-pkg unregister Cabal --user + +The `filepath` dependency +========================= + +Cabal uses the [filepath] package, so it must be installed first. +GHC version 6.6.1 and later come with `filepath`, however, earlier +versions do not by default. If you do not already have `filepath`, +you need to install it. You can use any existing version of Cabal to do +that. If you have neither Cabal nor `filepath`, it is slightly +harder but still possible. + +Unpack Cabal and `filepath` into separate directories. For example: + + tar -xzf filepath-1.1.0.0.tar.gz + tar -xzf Cabal-1.6.0.0.tar.gz + + # rename to make the following instructions simpler: + mv filepath-1.1.0.0/ filepath/ + mv Cabal-1.6.0.0/ Cabal/ + + cd Cabal + ghc -i../filepath -cpp --make Setup.hs -o ../filepath/setup + cd ../filepath/ + ./setup configure --user + ./setup build + ./setup install + +This installs `filepath` so that you can install Cabal with the normal +method. + +[filepath]: http://hackage.haskell.org/package/filepath + +More information +================ + +Please see the [Cabal web site] for the [user guide] and [API +documentation]. There is additional information available on the +[development wiki]. + +[user guide]: http://www.haskell.org/cabal/users-guide +[API documentation]: http://www.haskell.org/cabal/release/cabal-latest/doc/API/Cabal/Distribution-Simple.html +[development wiki]: https://github.com/haskell/cabal/wiki + + +Bugs +==== + +Please report bugs and feature requests to Cabal's [bug tracker]. + + +Your help +--------- + +To help Cabal's development, it is enormously helpful to know from +Cabal's users what their most pressing problems are with Cabal and +[Hackage]. You may have a favourite Cabal bug or limitation. Look at +Cabal's [bug tracker]. Ensure that the problem is reported there and +adequately described. Comment on the issue to report how much of a +problem the bug is for you. Subscribe to the issues's notifications to +discussed requirements and keep informed on progress. For feature +requests, it is helpful if there is a description of how you would +expect to interact with the new feature. + +[Hackage]: http://hackage.haskell.org + + +Source code +=========== + +You can get the master development branch using: + + $ git clone https://github.com/haskell/cabal.git + + +Credits +======= + +Cabal developers (in alphabetical order): + +- Krasimir Angelov +- Bjorn Bringert +- Duncan Coutts +- Isaac Jones +- David Himmelstrup ("Lemmih") +- Simon Marlow +- Ross Patterson +- Thomas Schilling +- Martin Sjögren +- Malcolm Wallace +- and nearly 30 other people have contributed occasional patches + +Cabal specification authors: + +- Isaac Jones +- Simon Marlow +- Ross Patterson +- Simon Peyton Jones +- Malcolm Wallace + + +[bug tracker]: https://github.com/haskell/cabal/issues +[Cabal web site]: http://www.haskell.org/cabal/ diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/Setup.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,13 @@ +import Distribution.Simple +main :: IO () +main = defaultMain + +-- Although this looks like the Simple build type, it is in fact vital that +-- we use this Setup.hs because it'll get compiled against the local copy +-- of the Cabal lib, thus enabling Cabal to bootstrap itself without relying +-- on any previous installation. This also means we can use any new features +-- immediately because we never have to worry about building Cabal with an +-- older version of itself. +-- +-- NOTE 25/01/2015: Bootstrapping is disabled for now, see +-- https://github.com/haskell/cabal/issues/3003. diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/hackage/check.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/hackage/check.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/hackage/check.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/hackage/check.sh 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,25 @@ +#!/bin/sh + +base_version=1.4.0.2 +test_version=1.5.6 + +for setup in archive/*/*/Setup.hs archive/*/*/Setup.lhs; do + + pkgname=$(basename ${setup}) + + if test $(wc -w < ${setup}) -gt 21; then + if ghc -package Cabal-${base_version} -S ${setup} -o /dev/null 2> /dev/null; then + + if ghc -package Cabal-${test_version} -S ${setup} -o /dev/null 2> /dev/null; then + echo "OK ${setup}" + else + echo "FAIL ${setup} does not compile with Cabal-${test_version}" + fi + else + echo "OK ${setup} (does not compile with Cabal-${base_version})" + fi + else + echo "trivial ${setup}" + fi + +done diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/hackage/download.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/hackage/download.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/hackage/download.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/hackage/download.sh 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,19 @@ +#!/bin/sh + +if test ! -f archive/archive.tar; then + + wget http://hackage.haskell.org/cgi-bin/hackage-scripts/archive.tar + mkdir -p archive + mv archive.tar archive/ + tar -C archive -xf archive/archive.tar + +fi + +if test ! -f archive/00-index.tar.gz; then + + wget http://hackage.haskell.org/packages/archive/00-index.tar.gz + mkdir -p archive + mv 00-index.tar.gz archive/ + tar -C archive -xzf archive/00-index.tar.gz + +fi diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/hackage/unpack.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/hackage/unpack.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/hackage/unpack.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/hackage/unpack.sh 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,16 @@ +#!/bin/sh + +for tarball in archive/*/*/*.tar.gz; do + + pkgdir=$(dirname ${tarball}) + pkgname=$(basename ${tarball} .tar.gz) + + if tar -tzf ${tarball} ${pkgname}/Setup.hs 2> /dev/null; then + tar -xzf ${tarball} ${pkgname}/Setup.hs -O > ${pkgdir}/Setup.hs + elif tar -tzf ${tarball} ${pkgname}/Setup.lhs 2> /dev/null; then + tar -xzf ${tarball} ${pkgname}/Setup.lhs -O > ${pkgdir}/Setup.lhs + else + echo "${pkgname} has no Setup.hs or .lhs at all!!?!" + fi + +done diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/misc/ghc-supported-languages.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/misc/ghc-supported-languages.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/misc/ghc-supported-languages.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/misc/ghc-supported-languages.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,97 @@ +-- | A test program to check that ghc has got all of its extensions registered +-- +module Main where + +import Language.Haskell.Extension +import Distribution.Text +import Distribution.Simple.Utils +import Distribution.Verbosity + +import Data.List ((\\)) +import Data.Maybe +import Control.Applicative +import Control.Monad +import System.Environment +import System.Exit + +-- | A list of GHC extensions that are deliberately not registered, +-- e.g. due to being experimental and not ready for public consumption +-- +exceptions = map readExtension [] + +checkProblems :: [Extension] -> [String] +checkProblems implemented = + + let unregistered = + [ ext | ext <- implemented -- extensions that ghc knows about + , not (registered ext) -- but that are not registered + , ext `notElem` exceptions ] -- except for the exceptions + + -- check if someone has forgotten to update the exceptions list... + + -- exceptions that are not implemented + badExceptions = exceptions \\ implemented + + -- exceptions that are now registered + badExceptions' = filter registered exceptions + + in catMaybes + [ check unregistered $ unlines + [ "The following extensions are known to GHC but are not in the " + , "extension registry in Language.Haskell.Extension." + , " " ++ intercalate "\n " (map display unregistered) + , "If these extensions are ready for public consumption then they " + , "should be registered. If they are still experimental and you " + , "think they are not ready to be registered then please add them " + , "to the exceptions list in this test program along with an " + , "explanation." + ] + , check badExceptions $ unlines + [ "Error in the extension exception list. The following extensions" + , "are listed as exceptions but are not even implemented by GHC:" + , " " ++ intercalate "\n " (map display badExceptions) + , "Please fix this test program by correcting the list of" + , "exceptions." + ] + , check badExceptions' $ unlines + [ "Error in the extension exception list. The following extensions" + , "are listed as exceptions to registration but they are in fact" + , "now registered in Language.Haskell.Extension:" + , " " ++ intercalate "\n " (map display badExceptions') + , "Please fix this test program by correcting the list of" + , "exceptions." + ] + ] + where + registered (UnknownExtension _) = False + registered _ = True + + check [] _ = Nothing + check _ i = Just i + + +main = topHandler $ do + [ghcPath] <- getArgs + exts <- getExtensions ghcPath + let problems = checkProblems exts + putStrLn (intercalate "\n" problems) + if null problems + then exitSuccess + else exitFailure + +getExtensions :: FilePath -> IO [Extension] +getExtensions ghcPath = + map readExtension . lines + <$> rawSystemStdout normal ghcPath ["--supported-languages"] + +readExtension :: String -> Extension +readExtension str = handleNoParse $ do + -- GHC defines extensions in a positive way, Cabal defines them + -- relative to H98 so we try parsing ("No" ++ extName) first + ext <- simpleParse ("No" ++ str) + case ext of + UnknownExtension _ -> simpleParse str + _ -> return ext + where + handleNoParse :: Maybe Extension -> Extension + handleNoParse = fromMaybe (error $ "unparsable extension " ++ show str) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/AllowNewer/AllowNewer.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/AllowNewer/AllowNewer.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/AllowNewer/AllowNewer.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/AllowNewer/AllowNewer.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,25 @@ +name: AllowNewer +version: 0.1.0.0 +license: BSD3 +author: Foo Bar +maintainer: cabal-dev@haskell.org +build-type: Simple +cabal-version: >=1.10 + +library + exposed-modules: Foo + hs-source-dirs: src + build-depends: base < 1 + default-language: Haskell2010 + +test-suite foo-test + type: exitcode-stdio-1.0 + main-is: Test.hs + hs-source-dirs: tests + build-depends: base < 1 + +benchmark foo-bench + type: exitcode-stdio-1.0 + main-is: Bench.hs + hs-source-dirs: benchmarks + build-depends: base < 1 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/AllowNewer/benchmarks/Bench.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/AllowNewer/benchmarks/Bench.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/AllowNewer/benchmarks/Bench.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/AllowNewer/benchmarks/Bench.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,4 @@ +module Main where + +main :: IO () +main = return () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/AllowNewer/src/Foo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/AllowNewer/src/Foo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/AllowNewer/src/Foo.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/AllowNewer/src/Foo.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,4 @@ +module Main where + +main :: IO () +main = return () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/AllowNewer/tests/Test.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/AllowNewer/tests/Test.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/AllowNewer/tests/Test.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/AllowNewer/tests/Test.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,4 @@ +!module Main where + +main :: IO () +main = return () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkExeV10/benchmarks/bench-Foo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkExeV10/benchmarks/bench-Foo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkExeV10/benchmarks/bench-Foo.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkExeV10/benchmarks/bench-Foo.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,8 @@ +module Main where + +import Foo +import System.Exit + +main :: IO () +main | fooTest [] = exitSuccess + | otherwise = exitFailure diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkExeV10/Foo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkExeV10/Foo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkExeV10/Foo.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkExeV10/Foo.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,4 @@ +module Foo where + +fooTest :: [String] -> Bool +fooTest _ = True diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkExeV10/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkExeV10/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkExeV10/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkExeV10/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,15 @@ +name: my +version: 0.1 +license: BSD3 +cabal-version: >= 1.9.2 +build-type: Simple + +library + exposed-modules: Foo + build-depends: base + +benchmark bench-Foo + type: exitcode-stdio-1.0 + hs-source-dirs: benchmarks + main-is: bench-Foo.hs + build-depends: base, my diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkOptions/BenchmarkOptions.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkOptions/BenchmarkOptions.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkOptions/BenchmarkOptions.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkOptions/BenchmarkOptions.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,20 @@ +name: BenchmarkOptions +version: 0.1 +license: BSD3 +author: Johan Tibell +stability: stable +category: PackageTests +build-type: Simple +cabal-version: >= 1.9.2 + +description: + Check that Cabal passes the correct test options to test suites. + +executable dummy + main-is: test-BenchmarkOptions.hs + build-depends: base + +benchmark test-BenchmarkOptions + main-is: test-BenchmarkOptions.hs + type: exitcode-stdio-1.0 + build-depends: base diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkOptions/test-BenchmarkOptions.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkOptions/test-BenchmarkOptions.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkOptions/test-BenchmarkOptions.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkOptions/test-BenchmarkOptions.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,11 @@ +module Main where + +import System.Environment ( getArgs ) +import System.Exit ( exitFailure, exitSuccess ) + +main :: IO () +main = do + args <- getArgs + if args == ["1", "2", "3"] + then exitSuccess + else putStrLn ("Got: " ++ show args) >> exitFailure diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkStanza/Check.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkStanza/Check.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkStanza/Check.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkStanza/Check.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,30 @@ +module PackageTests.BenchmarkStanza.Check where + +import PackageTests.PackageTester + +import Distribution.Version +import Distribution.Simple.LocalBuildInfo +import Distribution.Package +import Distribution.PackageDescription + +suite :: TestM () +suite = do + assertOutputDoesNotContain "unknown section type" + =<< cabal' "configure" [] + dist_dir <- distDir + lbi <- liftIO $ getPersistBuildConfig dist_dir + let anticipatedBenchmark = emptyBenchmark + { benchmarkName = "dummy" + , benchmarkInterface = BenchmarkExeV10 (Version [1,0] []) + "dummy.hs" + , benchmarkBuildInfo = emptyBuildInfo + { targetBuildDepends = + [ Dependency (PackageName "base") anyVersion ] + , hsSourceDirs = ["."] + } + , benchmarkEnabled = False + } + gotBenchmark = head $ benchmarks (localPkgDescr lbi) + assertEqual "parsed benchmark stanza does not match anticipated" + anticipatedBenchmark gotBenchmark + return () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkStanza/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkStanza/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkStanza/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BenchmarkStanza/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,19 @@ +name: BenchmarkStanza +version: 0.1 +license: BSD3 +author: Johan Tibell +stability: stable +category: PackageTests +build-type: Simple + +description: + Check that Cabal recognizes the benchmark stanza defined below. + +Library + exposed-modules: MyLibrary + build-depends: base + +benchmark dummy + main-is: dummy.hs + type: exitcode-stdio-1.0 + build-depends: base \ No newline at end of file diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildableField/BuildableField.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildableField/BuildableField.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildableField/BuildableField.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildableField/BuildableField.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,16 @@ +name: BuildableField +version: 0.1.0.0 +cabal-version: >=1.2 +build-type: Simple +license: BSD3 + +flag build-exe + default: True + +library + +executable my-executable + build-depends: base, unavailable-package + main-is: Main.hs + if !flag(build-exe) + buildable: False diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildableField/Main.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildableField/Main.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildableField/Main.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildableField/Main.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,4 @@ +import UnavailableModule + +main :: IO () +main = putStrLn "Hello" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive1/GlobalBuildDepsNotAdditive1.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive1/GlobalBuildDepsNotAdditive1.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive1/GlobalBuildDepsNotAdditive1.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive1/GlobalBuildDepsNotAdditive1.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,20 @@ +name: GlobalBuildDepsNotAdditive1 +version: 0.1 +license: BSD3 +cabal-version: >= 1.6 +author: Stephen Blackheath +stability: stable +category: PackageTests +build-type: Simple + +description: + If you specify 'base' in the global build dependencies, then define + a library without base, it fails to find 'base' for the library. + +--------------------------------------- + +build-depends: base + +Library + exposed-modules: MyLibrary + build-depends: bytestring, pretty diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive1/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive1/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive1/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive1/MyLibrary.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,10 @@ +module MyLibrary where + +import qualified Data.ByteString.Char8 as C +import Text.PrettyPrint + +myLibFunc :: IO () +myLibFunc = do + putStrLn (render (text "foo")) + let text = "myLibFunc" + C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive2/GlobalBuildDepsNotAdditive2.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive2/GlobalBuildDepsNotAdditive2.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive2/GlobalBuildDepsNotAdditive2.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive2/GlobalBuildDepsNotAdditive2.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,20 @@ +name: GlobalBuildDepsNotAdditive1 +version: 0.1 +license: BSD3 +cabal-version: >= 1.6 +author: Stephen Blackheath +stability: stable +category: PackageTests +build-type: Simple + +description: + If you specify 'base' in the global build dependencies, then define + an executable without base, it fails to find 'base' for the executable + +--------------------------------------- + +build-depends: base + +Executable lemon + main-is: lemon.hs + build-depends: bytestring, pretty diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive2/lemon.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive2/lemon.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive2/lemon.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/GlobalBuildDepsNotAdditive2/lemon.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,7 @@ +import qualified Data.ByteString.Char8 as C +import Text.PrettyPrint + +main = do + putStrLn (render (text "foo")) + let text = "lemon" + C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary0/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary0/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary0/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary0/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,24 @@ +name: InternalLibrary0 +version: 0.1 +license: BSD3 +cabal-version: >= 1.6 +author: Stephen Blackheath +stability: stable +category: PackageTests +build-type: Simple + +description: + Check that with 'cabal-version:' containing versions less than 1.7, we do *not* + have the new behaviour to allow executables to refer to the library defined + in the same module. + +--------------------------------------- + +Library + exposed-modules: MyLibrary + build-depends: base, bytestring, pretty + +Executable lemon + main-is: lemon.hs + hs-source-dirs: programs + build-depends: base, bytestring, pretty, InternalLibrary0 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary0/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary0/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary0/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary0/MyLibrary.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,10 @@ +module MyLibrary where + +import qualified Data.ByteString.Char8 as C +import Text.PrettyPrint + +myLibFunc :: IO () +myLibFunc = do + putStrLn (render (text "foo")) + let text = "myLibFunc" + C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary0/programs/lemon.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary0/programs/lemon.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary0/programs/lemon.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary0/programs/lemon.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,6 @@ +import Text.PrettyPrint +import MyLibrary + +main = do + putStrLn (render (text "foo")) + myLibFunc diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary1/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary1/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary1/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary1/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,23 @@ +name: InternalLibrary1 +version: 0.1 +license: BSD3 +cabal-version: >= 1.7.1 +author: Stephen Blackheath +stability: stable +category: PackageTests +build-type: Simple + +description: + Check for the new (in >= 1.7.1) ability to allow executables to refer to + the library defined in the same module. + +--------------------------------------- + +Library + exposed-modules: MyLibrary + build-depends: base, bytestring, pretty + +Executable lemon + main-is: lemon.hs + hs-source-dirs: programs + build-depends: base, bytestring, pretty, InternalLibrary1 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary1/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary1/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary1/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary1/MyLibrary.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,10 @@ +module MyLibrary where + +import qualified Data.ByteString.Char8 as C +import Text.PrettyPrint + +myLibFunc :: IO () +myLibFunc = do + putStrLn (render (text "foo")) + let text = "myLibFunc" + C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary1/programs/lemon.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary1/programs/lemon.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary1/programs/lemon.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary1/programs/lemon.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,6 @@ +import Text.PrettyPrint +import MyLibrary + +main = do + putStrLn (render (text "foo")) + myLibFunc diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary2/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary2/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary2/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary2/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,23 @@ +name: InternalLibrary2 +version: 0.1 +license: BSD3 +cabal-version: >= 1.7.1 +author: Stephen Blackheath +stability: stable +category: PackageTests +build-type: Simple + +description: + This test is to make sure that the internal library is preferred by ghc to + an installed one of the same name and version. + +--------------------------------------- + +Library + exposed-modules: MyLibrary + build-depends: base, bytestring, pretty + +Executable lemon + main-is: lemon.hs + hs-source-dirs: programs + build-depends: base, bytestring, pretty, InternalLibrary2 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary2/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary2/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary2/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary2/MyLibrary.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,10 @@ +module MyLibrary where + +import qualified Data.ByteString.Char8 as C +import Text.PrettyPrint + +myLibFunc :: IO () +myLibFunc = do + putStrLn (render (text "foo")) + let text = "myLibFunc internal" + C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary2/programs/lemon.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary2/programs/lemon.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary2/programs/lemon.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary2/programs/lemon.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,6 @@ +import Text.PrettyPrint +import MyLibrary + +main = do + putStrLn (render (text "foo")) + myLibFunc diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary2/to-install/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary2/to-install/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary2/to-install/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary2/to-install/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,18 @@ +name: InternalLibrary2 +version: 0.1 +license: BSD3 +cabal-version: >= 1.6 +author: Stephen Blackheath +stability: stable +category: PackageTests +build-type: Simple + +description: + This test is to make sure that the internal library is preferred by ghc to + an installed one of the same name and version. + +--------------------------------------- + +Library + exposed-modules: MyLibrary + build-depends: base, bytestring, pretty diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary2/to-install/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary2/to-install/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary2/to-install/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary2/to-install/MyLibrary.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,10 @@ +module MyLibrary where + +import qualified Data.ByteString.Char8 as C +import Text.PrettyPrint + +myLibFunc :: IO () +myLibFunc = do + putStrLn (render (text "foo")) + let text = "myLibFunc installed" + C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary3/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary3/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary3/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary3/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,23 @@ +name: InternalLibrary3 +version: 0.1 +license: BSD3 +cabal-version: >= 1.7.1 +author: Stephen Blackheath +stability: stable +category: PackageTests +build-type: Simple + +description: + This test is to make sure that the internal library is preferred by ghc to + an installed one of the same name, but a *newer* version. + +--------------------------------------- + +Library + exposed-modules: MyLibrary + build-depends: base, bytestring, pretty + +Executable lemon + main-is: lemon.hs + hs-source-dirs: programs + build-depends: base, bytestring, pretty, InternalLibrary3 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary3/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary3/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary3/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary3/MyLibrary.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,10 @@ +module MyLibrary where + +import qualified Data.ByteString.Char8 as C +import Text.PrettyPrint + +myLibFunc :: IO () +myLibFunc = do + putStrLn (render (text "foo")) + let text = "myLibFunc internal" + C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary3/programs/lemon.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary3/programs/lemon.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary3/programs/lemon.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary3/programs/lemon.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,6 @@ +import Text.PrettyPrint +import MyLibrary + +main = do + putStrLn (render (text "foo")) + myLibFunc diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary3/to-install/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary3/to-install/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary3/to-install/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary3/to-install/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,18 @@ +name: InternalLibrary3 +version: 0.2 +license: BSD3 +cabal-version: >= 1.6 +author: Stephen Blackheath +stability: stable +category: PackageTests +build-type: Simple + +description: + This test is to make sure that the internal library is preferred by ghc to + an installed one of the same name but a *newer* version. + +--------------------------------------- + +Library + exposed-modules: MyLibrary + build-depends: base, bytestring, pretty diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary3/to-install/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary3/to-install/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary3/to-install/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary3/to-install/MyLibrary.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,10 @@ +module MyLibrary where + +import qualified Data.ByteString.Char8 as C +import Text.PrettyPrint + +myLibFunc :: IO () +myLibFunc = do + putStrLn (render (text "foo")) + let text = "myLibFunc installed" + C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary4/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary4/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary4/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary4/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,23 @@ +name: InternalLibrary4 +version: 0.1 +license: BSD3 +cabal-version: >= 1.7.1 +author: Stephen Blackheath +stability: stable +category: PackageTests +build-type: Simple + +description: + This test is to make sure that we can explicitly say we want InternalLibrary4-0.2 + and it will give us the *installed* version 0.2 instead of the internal 0.1. + +--------------------------------------- + +Library + exposed-modules: MyLibrary + build-depends: base, bytestring, pretty + +Executable lemon + main-is: lemon.hs + hs-source-dirs: programs + build-depends: base, bytestring, pretty, InternalLibrary4 >= 0.2 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary4/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary4/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary4/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary4/MyLibrary.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,10 @@ +module MyLibrary where + +import qualified Data.ByteString.Char8 as C +import Text.PrettyPrint + +myLibFunc :: IO () +myLibFunc = do + putStrLn (render (text "foo")) + let text = "myLibFunc internal" + C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary4/programs/lemon.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary4/programs/lemon.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary4/programs/lemon.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary4/programs/lemon.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,6 @@ +import Text.PrettyPrint +import MyLibrary + +main = do + putStrLn (render (text "foo")) + myLibFunc diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary4/to-install/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary4/to-install/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary4/to-install/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary4/to-install/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,18 @@ +name: InternalLibrary4 +version: 0.2 +license: BSD3 +cabal-version: >= 1.6 +author: Stephen Blackheath +stability: stable +category: PackageTests +build-type: Simple + +description: + This test is to make sure that the internal library is preferred by ghc to + an installed one of the same name but a *newer* version. + +--------------------------------------- + +Library + exposed-modules: MyLibrary + build-depends: base, bytestring, pretty diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary4/to-install/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary4/to-install/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary4/to-install/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/InternalLibrary4/to-install/MyLibrary.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,10 @@ +module MyLibrary where + +import qualified Data.ByteString.Char8 as C +import Text.PrettyPrint + +myLibFunc :: IO () +myLibFunc = do + putStrLn (render (text "foo")) + let text = "myLibFunc installed" + C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/SameDepsAllRound/lemon.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/SameDepsAllRound/lemon.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/SameDepsAllRound/lemon.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/SameDepsAllRound/lemon.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,7 @@ +import qualified Data.ByteString.Char8 as C +import Text.PrettyPrint + +main = do + putStrLn (render (text "foo")) + let text = "lemon" + C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/SameDepsAllRound/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/SameDepsAllRound/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/SameDepsAllRound/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/SameDepsAllRound/MyLibrary.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,10 @@ +module MyLibrary where + +import qualified Data.ByteString.Char8 as C +import Text.PrettyPrint + +myLibFunc :: IO () +myLibFunc = do + putStrLn (render (text "foo")) + let text = "myLibFunc" + C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/SameDepsAllRound/pineapple.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/SameDepsAllRound/pineapple.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/SameDepsAllRound/pineapple.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/SameDepsAllRound/pineapple.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,7 @@ +import qualified Data.ByteString.Char8 as C +import Text.PrettyPrint + +main = do + putStrLn (render (text "foo")) + let text = "pineapple" + C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/SameDepsAllRound/SameDepsAllRound.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/SameDepsAllRound/SameDepsAllRound.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/SameDepsAllRound/SameDepsAllRound.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/SameDepsAllRound/SameDepsAllRound.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,31 @@ +name: SameDepsAllRound +version: 0.1 +license: BSD3 +cabal-version: >= 1.6 +author: Stephen Blackheath +stability: stable +synopsis: Same dependencies all round +category: PackageTests +build-type: Simple + +description: + Check for the "old build-dep behaviour" namely that we get the same + package dependencies on all build targets, even if different ones + were specified for different targets + . + Here all .hs files use the three packages mentioned, so this shows + that build-depends is not target-specific. This is the behaviour + we want when cabal-version contains versions less than 1.7. + +--------------------------------------- + +Library + exposed-modules: MyLibrary + build-depends: base, bytestring + +Executable lemon + main-is: lemon.hs + build-depends: pretty + +Executable pineapple + main-is: pineapple.hs diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps1/lemon.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps1/lemon.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps1/lemon.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps1/lemon.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,7 @@ +import qualified Data.ByteString.Char8 as C +import Text.PrettyPrint + +main = do + putStrLn (render (text "foo")) + let text = "lemon" + C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps1/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps1/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps1/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps1/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,22 @@ +name: TargetSpecificDeps1 +version: 0.1 +license: BSD3 +cabal-version: >= 1.7.1 +author: Stephen Blackheath +stability: stable +category: PackageTests +build-type: Simple + +description: + Check for the new build-dep behaviour, where build-depends are + handled specifically for each target + +--------------------------------------- + +Library + exposed-modules: MyLibrary + build-depends: base, bytestring + +Executable lemon + main-is: lemon.hs + build-depends: base, bytestring, pretty diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps1/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps1/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps1/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps1/MyLibrary.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,10 @@ +module MyLibrary where + +import qualified Data.ByteString.Char8 as C +import Text.PrettyPrint + +myLibFunc :: IO () +myLibFunc = do + putStrLn (render (text "foo")) + let text = "myLibFunc" + C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps2/lemon.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps2/lemon.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps2/lemon.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps2/lemon.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,5 @@ +import qualified Data.ByteString.Char8 as C + +main = do + let text = "lemon" + C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps2/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps2/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps2/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps2/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,24 @@ +name: TargetSpecificDeps1 +version: 0.1 +license: BSD3 +cabal-version: >= 1.7.1 +author: Stephen Blackheath +stability: stable +category: PackageTests +build-type: Simple + +description: + Check for the new build-dep behaviour, where build-depends are + handled specifically for each target + This one is a control against TargetSpecificDeps1 - it is correct and should + succeed. + +--------------------------------------- + +Library + exposed-modules: MyLibrary + build-depends: base, bytestring, pretty + +Executable lemon + main-is: lemon.hs + build-depends: base, bytestring diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps2/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps2/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps2/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps2/MyLibrary.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,10 @@ +module MyLibrary where + +import qualified Data.ByteString.Char8 as C +import Text.PrettyPrint + +myLibFunc :: IO () +myLibFunc = do + putStrLn (render (text "foo")) + let text = "myLibFunc" + C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps3/lemon.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps3/lemon.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps3/lemon.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps3/lemon.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,7 @@ +import qualified Data.ByteString.Char8 as C +import Text.PrettyPrint + +main = do + putStrLn (render (text "foo")) + let text = "lemon" + C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps3/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps3/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps3/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps3/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,22 @@ +name: test +version: 0.1 +license: BSD3 +cabal-version: >= 1.7.1 +author: Stephen Blackheath +stability: stable +category: PackageTests +build-type: Simple + +description: + Check for the new build-dep behaviour, where build-depends are + handled specifically for each target + +--------------------------------------- + +Library + exposed-modules: MyLibrary + build-depends: base, bytestring, pretty + +Executable lemon + main-is: lemon.hs + build-depends: base, bytestring diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps3/MyLibrary.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps3/MyLibrary.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps3/MyLibrary.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildDeps/TargetSpecificDeps3/MyLibrary.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,10 @@ +module MyLibrary where + +import qualified Data.ByteString.Char8 as C +import Text.PrettyPrint + +myLibFunc :: IO () +myLibFunc = do + putStrLn (render (text "foo")) + let text = "myLibFunc" + C.putStrLn $ C.pack text diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildTargetErrors/BuildTargetErrors.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildTargetErrors/BuildTargetErrors.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildTargetErrors/BuildTargetErrors.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildTargetErrors/BuildTargetErrors.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,10 @@ +name: BuildTargetErrors +version: 1.0 +build-type: Simple +cabal-version: >= 1.9 + +library + +executable not-buildable-exe + main-is: Main.hs + buildable: False diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildTargetErrors/Main.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildTargetErrors/Main.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildTargetErrors/Main.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildTargetErrors/Main.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1 @@ +main = return () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildTestSuiteDetailedV09/Dummy2.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildTestSuiteDetailedV09/Dummy2.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/BuildTestSuiteDetailedV09/Dummy2.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/BuildTestSuiteDetailedV09/Dummy2.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,6 @@ +module Dummy2 where + +import Distribution.TestSuite (Test) + +tests :: IO [Test] +tests = return [] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/CMain/Bar.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/CMain/Bar.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/CMain/Bar.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/CMain/Bar.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,7 @@ +{-# LANGUAGE ForeignFunctionInterface #-} +module Bar where + +bar :: IO () +bar = return () + +foreign export ccall bar :: IO () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/CMain/foo.c cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/CMain/foo.c --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/CMain/foo.c 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/CMain/foo.c 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,13 @@ +#include +#include "HsFFI.h" + +#ifdef __GLASGOW_HASKELL__ +#include "Bar_stub.h" +#endif + +int main(int argc, char **argv) { + hs_init(&argc, &argv); + bar(); + printf("Hello world!"); + return 0; +} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/CMain/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/CMain/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/CMain/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/CMain/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,10 @@ +name: my +version: 0.1 +license: BSD3 +cabal-version: >= 1.9.2 +build-type: Simple + +executable foo + main-is: foo.c + other-modules: Bar + build-depends: base diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/DeterministicAr/Check.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/DeterministicAr/Check.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/DeterministicAr/Check.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/DeterministicAr/Check.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,85 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE OverloadedStrings #-} + +module PackageTests.DeterministicAr.Check where + +import Control.Monad +import qualified Data.ByteString as BS +import qualified Data.ByteString.Char8 as BS8 +import Data.Char (isSpace) +import PackageTests.PackageTester +import System.IO + +import Distribution.Compiler (CompilerFlavor(..), CompilerId(..)) +import Distribution.Package (getHSLibraryName) +import Distribution.Version (Version(..)) +import Distribution.Simple.Compiler (compilerId) +import Distribution.Simple.LocalBuildInfo (LocalBuildInfo, compiler, localUnitId) + +suite :: TestM () +suite = do + cabal_build [] + dist_dir <- distDir + lbi <- liftIO $ getPersistBuildConfig dist_dir + liftIO $ checkMetadata lbi (dist_dir "build") + +-- Almost a copypasta of Distribution.Simple.Program.Ar.wipeMetadata +checkMetadata :: LocalBuildInfo -> FilePath -> IO () +checkMetadata lbi dir = withBinaryFile path ReadMode $ \ h -> do + hFileSize h >>= checkArchive h + where + path = dir "lib" ++ getHSLibraryName (localUnitId lbi) ++ ".a" + + _ghc_7_10 = case compilerId (compiler lbi) of + CompilerId GHC version | version >= Version [7, 10] [] -> True + _ -> False + + checkError msg = assertFailure ( + "PackageTests.DeterministicAr.checkMetadata: " ++ msg ++ + " in " ++ path) >> undefined + archLF = "!\x0a" -- global magic, 8 bytes + x60LF = "\x60\x0a" -- header magic, 2 bytes + metadata = BS.concat + [ "0 " -- mtime, 12 bytes + , "0 " -- UID, 6 bytes + , "0 " -- GID, 6 bytes + , "0644 " -- mode, 8 bytes + ] + headerSize = 60 + + -- http://en.wikipedia.org/wiki/Ar_(Unix)#File_format_details + checkArchive :: Handle -> Integer -> IO () + checkArchive h archiveSize = do + global <- BS.hGet h (BS.length archLF) + unless (global == archLF) $ checkError "Bad global header" + checkHeader (toInteger $ BS.length archLF) + + where + checkHeader :: Integer -> IO () + checkHeader offset = case compare offset archiveSize of + EQ -> return () + GT -> checkError (atOffset "Archive truncated") + LT -> do + header <- BS.hGet h headerSize + unless (BS.length header == headerSize) $ + checkError (atOffset "Short header") + let magic = BS.drop 58 header + unless (magic == x60LF) . checkError . atOffset $ + "Bad magic " ++ show magic ++ " in header" + + unless (metadata == BS.take 32 (BS.drop 16 header)) + . checkError . atOffset $ "Metadata has changed" + + let size = BS.take 10 $ BS.drop 48 header + objSize <- case reads (BS8.unpack size) of + [(n, s)] | all isSpace s -> return n + _ -> checkError (atOffset "Bad file size in header") + + let nextHeader = offset + toInteger headerSize + + -- Odd objects are padded with an extra '\x0a' + if odd objSize then objSize + 1 else objSize + hSeek h AbsoluteSeek nextHeader + checkHeader nextHeader + + where + atOffset msg = msg ++ " at offset " ++ show offset diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/DeterministicAr/Lib.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/DeterministicAr/Lib.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/DeterministicAr/Lib.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/DeterministicAr/Lib.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,5 @@ +module Lib where + +dummy :: IO () +dummy = return () + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/DeterministicAr/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/DeterministicAr/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/DeterministicAr/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/DeterministicAr/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,17 @@ +name: DeterministicAr +version: 0 +license: BSD3 +cabal-version: >= 1.9.1 +author: Liyang HU +stability: stable +category: PackageTests +build-type: Simple + +description: + Ensure our GNU ar -D emulation (#1537) works as advertised: check that + all metadata in the resulting .a archive match the default. + +Library + exposed-modules: Lib + build-depends: base + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/DuplicateModuleName/DuplicateModuleName.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/DuplicateModuleName/DuplicateModuleName.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/DuplicateModuleName/DuplicateModuleName.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/DuplicateModuleName/DuplicateModuleName.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,25 @@ +name: DuplicateModuleName +version: 0.1.0.0 +license: BSD3 +author: Edward Z. Yang +maintainer: ezyang@cs.stanford.edu +build-type: Simple +cabal-version: >=1.10 + +library + exposed-modules: Foo + hs-source-dirs: src + build-depends: base, Cabal + default-language: Haskell2010 + +test-suite foo + type: detailed-0.9 + test-module: Foo + hs-source-dirs: tests + build-depends: base, Cabal, DuplicateModuleName + +test-suite foo2 + type: detailed-0.9 + test-module: Foo + hs-source-dirs: tests2 + build-depends: base, Cabal, DuplicateModuleName diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/DuplicateModuleName/src/Foo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/DuplicateModuleName/src/Foo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/DuplicateModuleName/src/Foo.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/DuplicateModuleName/src/Foo.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,12 @@ +module Foo where + +import Distribution.TestSuite + +tests :: IO [Test] +tests = return [Test $ TestInstance + { run = return (Finished (Fail "A")) + , name = "test A" + , tags = [] + , options = [] + , setOption = \_ _-> Left "No Options" + }] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/DuplicateModuleName/tests/Foo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/DuplicateModuleName/tests/Foo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/DuplicateModuleName/tests/Foo.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/DuplicateModuleName/tests/Foo.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,18 @@ +{-# LANGUAGE PackageImports #-} +module Foo where + +import Distribution.TestSuite +import qualified "DuplicateModuleName" Foo as T + +tests :: IO [Test] +tests = do + r <- T.tests + return $ [Test $ TestInstance + { run = return (Finished (Fail "B")) + , name = "test B" + , tags = [] + , options = [] + , setOption = \_ _-> Left "No Options" + }] ++ r + +this_is_test = True diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/DuplicateModuleName/tests2/Foo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/DuplicateModuleName/tests2/Foo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/DuplicateModuleName/tests2/Foo.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/DuplicateModuleName/tests2/Foo.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,18 @@ +{-# LANGUAGE PackageImports #-} +module Foo where + +import Distribution.TestSuite +import qualified "DuplicateModuleName" Foo as T + +tests :: IO [Test] +tests = do + r <- T.tests + return $ [Test $ TestInstance + { run = return (Finished (Fail "C")) + , name = "test C" + , tags = [] + , options = [] + , setOption = \_ _-> Left "No Options" + }] ++ r + +this_is_test2 = True diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/EmptyLib/empty/empty.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/EmptyLib/empty/empty.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/EmptyLib/empty/empty.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/EmptyLib/empty/empty.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,6 @@ +name: emptyLib +Cabal-version: >= 1.2 +version: 1.0 +build-type: Simple + +Library diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectory/ghc cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectory/ghc --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectory/ghc 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectory/ghc 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,6 @@ +#!/bin/sh +if [ -z "$WITH_GHC" ]; then + echo "Need to set WITH_GHC" + exit 1 +fi +exec "$WITH_GHC" "$@" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectory/ghc-pkg cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectory/ghc-pkg --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectory/ghc-pkg 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectory/ghc-pkg 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,3 @@ +#!/bin/sh +echo "GHC package manager version 9999999" +exit 0 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectory/SameDirectory.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectory/SameDirectory.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectory/SameDirectory.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectory/SameDirectory.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,11 @@ +name: SameDirectory +version: 0.1.0.0 +license: BSD3 +author: Edward Z. Yang +maintainer: ezyang@cs.stanford.edu +build-type: Simple +cabal-version: >=1.10 + +library + build-depends: base + default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/ghc-7.10 cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/ghc-7.10 --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/ghc-7.10 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/ghc-7.10 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,6 @@ +#!/bin/sh +if [ -z "$WITH_GHC" ]; then + echo "Need to set WITH_GHC" + exit 1 +fi +exec "$WITH_GHC" "$@" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/ghc-pkg-ghc-7.10 cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/ghc-pkg-ghc-7.10 --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/ghc-pkg-ghc-7.10 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/ghc-pkg-ghc-7.10 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,3 @@ +#!/bin/sh +echo "GHC package manager version 9999999" +exit 0 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/SameDirectory.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/SameDirectory.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/SameDirectory.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectoryGhcVersion/SameDirectory.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,11 @@ +name: SameDirectory +version: 0.1.0.0 +license: BSD3 +author: Edward Z. Yang +maintainer: ezyang@cs.stanford.edu +build-type: Simple +cabal-version: >=1.10 + +library + build-depends: base + default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/ghc-7.10 cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/ghc-7.10 --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/ghc-7.10 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/ghc-7.10 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,6 @@ +#!/bin/sh +if [ -z "$WITH_GHC" ]; then + echo "Need to set WITH_GHC" + exit 1 +fi +exec "$WITH_GHC" "$@" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/ghc-pkg-7.10 cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/ghc-pkg-7.10 --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/ghc-pkg-7.10 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/ghc-pkg-7.10 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,3 @@ +#!/bin/sh +echo "GHC package manager version 9999999" +exit 0 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/SameDirectory.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/SameDirectory.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/SameDirectory.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SameDirectoryVersion/SameDirectory.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,11 @@ +name: SameDirectory +version: 0.1.0.0 +license: BSD3 +author: Edward Z. Yang +maintainer: ezyang@cs.stanford.edu +build-type: Simple +cabal-version: >=1.10 + +library + build-depends: base + default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/Symlink/bin/ghc cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/Symlink/bin/ghc --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/Symlink/bin/ghc 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/Symlink/bin/ghc 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,6 @@ +#!/bin/sh +if [ -z "$WITH_GHC" ]; then + echo "Need to set WITH_GHC" + exit 1 +fi +exec "$WITH_GHC" "$@" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/Symlink/bin/ghc-pkg cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/Symlink/bin/ghc-pkg --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/Symlink/bin/ghc-pkg 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/Symlink/bin/ghc-pkg 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,3 @@ +#!/bin/sh +echo "GHC package manager version 9999999" +exit 0 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/Symlink/SameDirectory.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/Symlink/SameDirectory.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/Symlink/SameDirectory.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/Symlink/SameDirectory.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,11 @@ +name: SameDirectory +version: 0.1.0.0 +license: BSD3 +author: Edward Z. Yang +maintainer: ezyang@cs.stanford.edu +build-type: Simple +cabal-version: >=1.10 + +library + build-depends: base + default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/bin/ghc-7.10 cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/bin/ghc-7.10 --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/bin/ghc-7.10 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/bin/ghc-7.10 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,6 @@ +#!/bin/sh +if [ -z "$WITH_GHC" ]; then + echo "Need to set WITH_GHC" + exit 1 +fi +exec "$WITH_GHC" "$@" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/bin/ghc-pkg-7.10 cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/bin/ghc-pkg-7.10 --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/bin/ghc-pkg-7.10 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/bin/ghc-pkg-7.10 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,3 @@ +#!/bin/sh +echo "GHC package manager version 9999999" +exit 0 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/SameDirectory.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/SameDirectory.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/SameDirectory.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/SameDirectory.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,11 @@ +name: SameDirectory +version: 0.1.0.0 +license: BSD3 +author: Edward Z. Yang +maintainer: ezyang@cs.stanford.edu +build-type: Simple +cabal-version: >=1.10 + +library + build-depends: base + default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SymlinkVersion/bin/ghc-7.10 cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SymlinkVersion/bin/ghc-7.10 --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SymlinkVersion/bin/ghc-7.10 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SymlinkVersion/bin/ghc-7.10 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,6 @@ +#!/bin/sh +if [ -z "$WITH_GHC" ]; then + echo "Need to set WITH_GHC" + exit 1 +fi +exec "$WITH_GHC" "$@" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SymlinkVersion/bin/ghc-pkg-ghc-7.10 cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SymlinkVersion/bin/ghc-pkg-ghc-7.10 --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SymlinkVersion/bin/ghc-pkg-ghc-7.10 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SymlinkVersion/bin/ghc-pkg-ghc-7.10 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,3 @@ +#!/bin/sh +echo "GHC package manager version 9999999" +exit 0 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SymlinkVersion/SameDirectory.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SymlinkVersion/SameDirectory.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SymlinkVersion/SameDirectory.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/GhcPkgGuess/SymlinkVersion/SameDirectory.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,11 @@ +name: SameDirectory +version: 0.1.0.0 +license: BSD3 +author: Edward Z. Yang +maintainer: ezyang@cs.stanford.edu +build-type: Simple +cabal-version: >=1.10 + +library + build-depends: base + default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/Haddock/CPP.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/Haddock/CPP.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/Haddock/CPP.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/Haddock/CPP.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,9 @@ +{-# LANGUAGE CPP #-} + +module CPP where + +#define HIDING hiding +#define NEEDLES needles + +-- | For HIDING NEEDLES. +data Haystack = Haystack diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/Haddock/Literate.lhs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/Haddock/Literate.lhs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/Haddock/Literate.lhs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/Haddock/Literate.lhs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,4 @@ +> module Literate where + +> -- | For hiding needles. +> data Haystack = Haystack diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/Haddock/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/Haddock/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/Haddock/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/Haddock/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,16 @@ +name: Haddock +version: 0.1 +license: BSD3 +author: Iain Nicol +stability: stable +category: PackageTests +build-type: Simple +Cabal-version: >= 1.2 + +description: + Check that Cabal successfully invokes Haddock. + +Library + exposed-modules: CPP, Literate, NoCPP, Simple + other-extensions: CPP + build-depends: base diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/Haddock/NoCPP.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/Haddock/NoCPP.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/Haddock/NoCPP.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/Haddock/NoCPP.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,8 @@ +module NoCPP (Haystack) where + +-- | For hiding needles. +data Haystack = Haystack + +-- | Causes a build failure if the CPP language extension is enabled. +stringGap = "Foo\ +\Bar" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/Haddock/Simple.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/Haddock/Simple.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/Haddock/Simple.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/Haddock/Simple.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,4 @@ +module Simple where + +-- | For hiding needles. +data Haystack = Haystack diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/HaddockNewline/A.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/HaddockNewline/A.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/HaddockNewline/A.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/HaddockNewline/A.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1 @@ +module A where diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/HaddockNewline/HaddockNewline.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/HaddockNewline/HaddockNewline.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/HaddockNewline/HaddockNewline.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/HaddockNewline/HaddockNewline.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,20 @@ +name: HaddockNewline +version: 0.1.0.0 +synopsis: This has a + newline yo. +-- description: +license: BSD3 +license-file: LICENSE +author: Edward Z. Yang +maintainer: ezyang@cs.stanford.edu +build-type: Simple +extra-source-files: ChangeLog.md +cabal-version: >=1.10 + +library + exposed-modules: A + -- other-modules: + -- other-extensions: + build-depends: base + -- hs-source-dirs: + default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/HaddockNewline/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/HaddockNewline/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/HaddockNewline/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/HaddockNewline/Setup.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,2 @@ +import Distribution.Simple +main = defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/multInst/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/multInst/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/multInst/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/multInst/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,16 @@ +name: Haddock +version: 0.1 +license: BSD3 +author: Iain Nicol +stability: stable +category: PackageTests +build-type: Simple +Cabal-version: >= 1.2 + +description: + Check that Cabal successfully invokes Haddock. + +Library + exposed-modules: CPP, Literate, NoCPP, Simple + other-extensions: CPP + build-depends: base diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/Options.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/Options.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/Options.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/Options.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,19 @@ +{-# LANGUAGE DeriveDataTypeable #-} + +module PackageTests.Options + ( OptionEnableAllTests(..) + ) where + +import Data.Typeable (Typeable) + +import Test.Tasty.Options (IsOption(..), flagCLParser, safeRead) + +newtype OptionEnableAllTests = OptionEnableAllTests Bool + deriving Typeable + +instance IsOption OptionEnableAllTests where + defaultValue = OptionEnableAllTests False + parseValue = fmap OptionEnableAllTests . safeRead + optionName = return "enable-all-tests" + optionHelp = return "Enable all tests" + optionCLParser = flagCLParser Nothing (OptionEnableAllTests True) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/OrderFlags/Foo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/OrderFlags/Foo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/OrderFlags/Foo.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/OrderFlags/Foo.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,8 @@ +module Foo where + +x :: IO Int +x = return 5 + +f :: IO Int +f = do x + return 3 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/OrderFlags/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/OrderFlags/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/OrderFlags/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/OrderFlags/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,20 @@ +name: OrderFlags +version: 0.1 +license: BSD3 +author: Oleksandr Manzyuk +stability: stable +category: PackageTests +build-type: Simple +cabal-version: >=1.9.2 + +description: + Check that Cabal correctly orders flags that are passed to GHC. + +library + exposed-modules: Foo + build-depends: base + + ghc-options: -Wall -Werror + + if impl(ghc >= 6.12.1) + ghc-options: -fno-warn-unused-do-bind diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/PackageTester.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/PackageTester.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/PackageTester.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/PackageTester.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,693 @@ +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE NondecreasingIndentation #-} +{-# LANGUAGE CPP #-} + +module PackageTests.PackageTester + ( PackageSpec + , SuiteConfig(..) + , TestConfig(..) + , Result(..) + , TestM + , runTestM + + -- * Paths + , packageDir + , distDir + , relativeDistDir + , sharedDBPath + , getWithGhcPath + + -- * Running cabal commands + , cabal + , cabal' + , cabal_build + , cabal_install + , ghcPkg + , ghcPkg' + , compileSetup + , run + , runExe + , runExe' + , rawRun + , rawCompileSetup + , withPackage + , withEnv + , withPackageDb + + -- * Polymorphic versions of HUnit functions + , assertFailure + , assertEqual + , assertBool + , shouldExist + , shouldNotExist + + -- * Test helpers + , shouldFail + , whenGhcVersion + , assertOutputContains + , assertOutputDoesNotContain + , assertFindInFile + , concatOutput + , withSymlink + + -- * Test trees + , TestTreeM + , runTestTree + , testTree + , testTree' + , groupTests + , mapTestTrees + , testWhen + , testUnless + , unlessWindows + , hasSharedLibraries + + , getPersistBuildConfig + + -- Common utilities + , module System.FilePath + , module Data.List + , module Control.Monad.IO.Class + , module Text.Regex.Posix + ) where + +import PackageTests.Options + +import Distribution.Compat.CreatePipe (createPipe) +import Distribution.Simple.Compiler (PackageDBStack, PackageDB(..)) +import Distribution.Simple.Program.Run (getEffectiveEnvironment) +import Distribution.System (OS(Windows), buildOS) +import Distribution.Simple.Utils + ( printRawCommandAndArgsAndEnv, withFileContents ) +import Distribution.Simple.Configure + ( getPersistBuildConfig ) +import Distribution.Verbosity (Verbosity) +import Distribution.Simple.BuildPaths (exeExtension) + +#ifndef CURRENT_COMPONENT_ID +import Distribution.Simple.Utils (cabalVersion) +import Distribution.Text (display) +#endif + +import qualified Test.Tasty.HUnit as HUnit +import Text.Regex.Posix + +import qualified Control.Exception as E +import Control.Monad +import Control.Monad.Trans.Class (lift) +import Control.Monad.Trans.Reader +import Control.Monad.Trans.Writer +import Control.Monad.IO.Class +import qualified Data.ByteString.Char8 as C +import Data.List +import Data.Version +import System.Directory + ( doesFileExist, canonicalizePath, createDirectoryIfMissing + , removeDirectoryRecursive, getPermissions, setPermissions + , setOwnerExecutable ) +import System.Exit +import System.FilePath +import System.IO +import System.IO.Error (isDoesNotExistError) +import System.Process (runProcess, waitForProcess, showCommandForUser) +import Test.Tasty (TestTree, askOption, testGroup) + +#ifndef mingw32_HOST_OS +import Control.Monad.Catch ( bracket_ ) +import System.Directory ( removeFile ) +import System.Posix.Files ( createSymbolicLink ) +#endif + +-- | Our test monad maintains an environment recording the global test +-- suite configuration 'SuiteConfig', and the local per-test +-- configuration 'TestConfig'. +type TestM = ReaderT (SuiteConfig, TestConfig) IO + +-- | Run a test in the test monad. +runTestM :: SuiteConfig -> FilePath -> Maybe String -> TestM a -> IO () +runTestM suite name subname m = do + let test = TestConfig { + testMainName = name, + testSubName = subname, + testShouldFail = False, + testCurrentPackage = ".", + testPackageDb = False, + testEnvironment = [] + } + void (runReaderT (cleanup >> m) (suite, test)) + where + -- TODO: option not to clean up dist dirs; this should be + -- harmless! + cleanup = do + onlyIfExists . removeDirectoryRecursive =<< topDir + +-- | Run an IO action, and suppress a "does not exist" error. +onlyIfExists :: MonadIO m => IO () -> m () +onlyIfExists m = liftIO $ + E.catch m $ \(e :: IOError) -> + if isDoesNotExistError e + then return () + else E.throwIO e + +-- cleaning up: +-- cabal clean will clean up dist directory, but we also need to zap +-- Setup etc. +-- +-- Suggestion: just copy the files somewhere else! + +-- | Global configuration for the entire test suite. +data SuiteConfig = SuiteConfig + -- | Path to GHC that was used to compile Cabal library under test. + { ghcPath :: FilePath + -- | Version of GHC that compiled Cabal. + , ghcVersion :: Version + -- | Path to ghc-pkg corresponding to 'ghcPath'. + , ghcPkgPath :: FilePath + -- | Path to GHC that we should use to "./Setup --with-ghc" + , withGhcPath :: FilePath + -- | Version of GHC at 'withGhcPath'. + , withGhcVersion :: Version + -- | The build directory that was used to build Cabal (used + -- to compile Setup scripts.) + , cabalDistPref :: FilePath + -- | Configuration options you can use to make the Cabal + -- being tested visible (e.g. if you're using the test runner). + -- We don't add these by default because then you have to + -- link against Cabal which makes the build go longer. + , packageDBStack :: PackageDBStack + -- | How verbose should we be + , suiteVerbosity :: Verbosity + -- | The absolute current working directory + , absoluteCWD :: FilePath + } + +data TestConfig = TestConfig + -- | Test name, MUST be the directory the test packages live in + -- relative to tests/PackageTests + { testMainName :: FilePath + -- | Test sub-name, used to qualify dist/database directory to avoid + -- conflicts. + , testSubName :: Maybe String + -- | This gets modified sometimes + , testShouldFail :: Bool + -- | The "current" package, ala current directory + , testCurrentPackage :: PackageSpec + -- | Says if we've initialized the per-test package DB + , testPackageDb :: Bool + -- | Environment override + , testEnvironment :: [(String, Maybe String)] + } + +-- | A package that can be built. +type PackageSpec = FilePath + +------------------------------------------------------------------------ +-- * Directories + +simpleSetupPath :: TestM FilePath +simpleSetupPath = do + (suite, _) <- ask + return (absoluteCWD suite "tests/Setup") + +-- | The absolute path to the directory containing the files for +-- this tests; usually @Check.hs@ and any test packages. +testDir :: TestM FilePath +testDir = do + (suite, test) <- ask + return $ absoluteCWD suite "tests/PackageTests" testMainName test + +-- | The absolute path to the root of the package directory; it's +-- where the Cabal file lives. This is what you want the CWD of cabal +-- calls to be. +packageDir :: TestM FilePath +packageDir = do + (_, test) <- ask + test_dir <- testDir + return $ test_dir testCurrentPackage test + +-- | The absolute path to the directory containing all the +-- files for ALL tests associated with a test (respecting +-- subtests.) To clean, you ONLY need to delete this directory. +topDir :: TestM FilePath +topDir = do + test_dir <- testDir + (_, test) <- ask + return $ test_dir + case testSubName test of + Nothing -> "dist-test" + Just n -> "dist-test." ++ n + +prefixDir :: TestM FilePath +prefixDir = do + top_dir <- topDir + return $ top_dir "usr" + +-- | The absolute path to the build directory that should be used +-- for the current package in a test. +distDir :: TestM FilePath +distDir = do + top_dir <- topDir + (_, test) <- ask + return $ top_dir testCurrentPackage test "dist" + +definitelyMakeRelative :: FilePath -> FilePath -> FilePath +definitelyMakeRelative base0 path0 = + let go [] path = joinPath path + go base [] = joinPath (replicate (length base) "..") + go (".":xs) ys = go xs ys + go xs (".":ys) = go xs ys + go (x:xs) (y:ys) + | x == y = go xs ys + | otherwise = go (x:xs) [] go [] (y:ys) + in go (splitPath base0) (splitPath path0) + +-- hpc is stupid and doesn't understand absolute paths. +relativeDistDir :: TestM FilePath +relativeDistDir = do + dist_dir0 <- distDir + pkg_dir <- packageDir + return $ definitelyMakeRelative pkg_dir dist_dir0 + +-- | The absolute path to the shared package database that should +-- be used by all packages in this test. +sharedDBPath :: TestM FilePath +sharedDBPath = do + top_dir <- topDir + return $ top_dir "packagedb" + +getWithGhcPath :: TestM FilePath +getWithGhcPath = do + (suite, _) <- ask + return $ withGhcPath suite + +------------------------------------------------------------------------ +-- * Running cabal + +cabal :: String -> [String] -> TestM () +cabal cmd extraArgs0 = void (cabal' cmd extraArgs0) + +cabal' :: String -> [String] -> TestM Result +cabal' cmd extraArgs0 = do + (suite, test) <- ask + prefix_dir <- prefixDir + when ((cmd == "register" || cmd == "copy") && not (testPackageDb test)) $ + error "Cannot register/copy without using 'withPackageDb'" + let extraArgs1 = case cmd of + "configure" -> + -- If the package database is empty, setting --global + -- here will make us error loudly if we try to install + -- into a bad place. + [ "--global" + , "--with-ghc", withGhcPath suite + -- This improves precision but it increases the number + -- of flags one has to specify and I don't like that; + -- Cabal is going to configure it and usually figure + -- out the right location in any case. + -- , "--with-ghc-pkg", withGhcPkgPath suite + -- Would really like to do this, but we're not always + -- going to be building against sufficiently recent + -- Cabal which provides this macro. + -- , "--dependency=Cabal=" ++ THIS_PACKAGE_KEY + -- These flags make the test suite run faster + -- Can't do this unless we LD_LIBRARY_PATH correctly + -- , "--enable-executable-dynamic" + , "--disable-optimization" + -- Specify where we want our installed packages to go + , "--prefix=" ++ prefix_dir + ] -- Only add the LBI package stack if the GHC version + -- matches. + ++ (if withGhcPath suite == ghcPath suite + then packageDBParams (packageDBStack suite) + else []) + ++ extraArgs0 + -- This gives us MUCH better error messages + "build" -> "-v" : extraArgs0 + _ -> extraArgs0 + -- This is a horrible hack to make hpc work correctly + dist_dir <- relativeDistDir + let extraArgs = ["--distdir", dist_dir] ++ extraArgs1 + doCabal (cmd:extraArgs) + +-- | This abstracts the common pattern of configuring and then building. +cabal_build :: [String] -> TestM () +cabal_build args = do + cabal "configure" args + cabal "build" [] + return () + +-- | This abstracts the common pattern of "installing" a package. +cabal_install :: [String] -> TestM () +cabal_install args = do + cabal "configure" args + cabal "build" [] + cabal "copy" [] + cabal "register" [] + return () + +-- | Determines what Setup executable to run and runs it +doCabal :: [String] -- ^ extra arguments + -> TestM Result +doCabal cabalArgs = do + pkg_dir <- packageDir + customSetup <- liftIO $ doesFileExist (pkg_dir "Setup.hs") + if customSetup + then do + compileSetup + -- TODO make this less racey + let path = pkg_dir "Setup" + run (Just pkg_dir) path cabalArgs + else do + -- Use shared Setup executable (only for Simple build types). + path <- simpleSetupPath + run (Just pkg_dir) path cabalArgs + +packageDBParams :: PackageDBStack -> [String] +packageDBParams dbs = "--package-db=clear" + : map (("--package-db=" ++) . convert) dbs + where + convert :: PackageDB -> String + convert GlobalPackageDB = "global" + convert UserPackageDB = "user" + convert (SpecificPackageDB path) = path + +------------------------------------------------------------------------ +-- * Compiling setup scripts + +compileSetup :: TestM () +compileSetup = do + (suite, test) <- ask + pkg_path <- packageDir + liftIO $ rawCompileSetup (suiteVerbosity suite) suite (testEnvironment test) pkg_path + +rawCompileSetup :: Verbosity -> SuiteConfig -> [(String, Maybe String)] -> FilePath -> IO () +rawCompileSetup verbosity suite e path = do + -- NB: Use 'ghcPath', not 'withGhcPath', since we need to be able to + -- link against the Cabal library which was built with 'ghcPath'. + r <- rawRun verbosity (Just path) (ghcPath suite) e $ + [ "--make"] ++ + ghcPackageDBParams (ghcVersion suite) (packageDBStack suite) ++ + [ "-hide-all-packages" + , "-package base" +#ifdef CURRENT_COMPONENT_ID + -- This is best, but we don't necessarily have it + -- if we're bootstrapping with old Cabal. + , "-package-id " ++ CURRENT_COMPONENT_ID +#else + -- This mostly works, UNLESS you've installed a + -- version of Cabal with the SAME version number. + -- Then old GHCs will incorrectly select the installed + -- version (because it prefers the FIRST package it finds.) + -- It also semi-works to not specify "-hide-all-packages" + -- at all, except if there's a later version of Cabal + -- installed GHC will prefer that. + , "-package Cabal-" ++ display cabalVersion +#endif + , "-O0" + , "Setup.hs" ] + unless (resultExitCode r == ExitSuccess) $ + error $ + "could not build shared Setup executable\n" ++ + " ran: " ++ resultCommand r ++ "\n" ++ + " output:\n" ++ resultOutput r ++ "\n\n" + +ghcPackageDBParams :: Version -> PackageDBStack -> [String] +ghcPackageDBParams ghc_version dbs + | ghc_version >= Version [7,6] [] + = "-clear-package-db" : map convert dbs + | otherwise + = concatMap convertLegacy dbs + where + convert :: PackageDB -> String + convert GlobalPackageDB = "-global-package-db" + convert UserPackageDB = "-user-package-db" + convert (SpecificPackageDB path) = "-package-db=" ++ path + + convertLegacy :: PackageDB -> [String] + convertLegacy (SpecificPackageDB path) = ["-package-conf=" ++ path] + convertLegacy _ = [] + +------------------------------------------------------------------------ +-- * Running ghc-pkg + +ghcPkg :: String -> [String] -> TestM () +ghcPkg cmd args = void (ghcPkg' cmd args) + +ghcPkg' :: String -> [String] -> TestM Result +ghcPkg' cmd args = do + db_path <- sharedDBPath + (config, test) <- ask + unless (testPackageDb test) $ + error "Must initialize package database using withPackageDb" + let db_stack = packageDBStack config ++ [SpecificPackageDB db_path] + extraArgs = ghcPkgPackageDBParams (ghcVersion config) db_stack + run Nothing (ghcPkgPath config) (cmd : extraArgs ++ args) + +ghcPkgPackageDBParams :: Version -> PackageDBStack -> [String] +ghcPkgPackageDBParams version dbs = concatMap convert dbs where + convert :: PackageDB -> [String] + -- Ignoring global/user is dodgy but there's no way good + -- way to give ghc-pkg the correct flags in this case. + convert GlobalPackageDB = [] + convert UserPackageDB = [] + convert (SpecificPackageDB path) + | version >= Version [7,6] [] + = ["--package-db=" ++ path] + | otherwise + = ["--package-conf=" ++ path] + +------------------------------------------------------------------------ +-- * Running other things + +-- | Run an executable that was produced by cabal. The @exe_name@ +-- is precisely the name of the executable section in the file. +runExe :: String -> [String] -> TestM () +runExe exe_name args = void (runExe' exe_name args) + +runExe' :: String -> [String] -> TestM Result +runExe' exe_name args = do + dist_dir <- distDir + let exe = dist_dir "build" exe_name exe_name + run Nothing exe args + +run :: Maybe FilePath -> String -> [String] -> TestM Result +run mb_cwd path args = do + verbosity <- getVerbosity + (_, test) <- ask + r <- liftIO $ rawRun verbosity mb_cwd path (testEnvironment test) args + record r + requireSuccess r + +rawRun :: Verbosity -> Maybe FilePath -> String -> [(String, Maybe String)] -> [String] -> IO Result +rawRun verbosity mb_cwd path envOverrides args = do + -- path is relative to the current directory; canonicalizePath makes it + -- absolute, so that runProcess will find it even when changing directory. + path' <- do pathExists <- doesFileExist path + canonicalizePath (if pathExists then path + else path <.> exeExtension) + menv <- getEffectiveEnvironment envOverrides + + printRawCommandAndArgsAndEnv verbosity path' args menv + (readh, writeh) <- createPipe + pid <- runProcess path' args mb_cwd menv Nothing (Just writeh) (Just writeh) + + out <- hGetContents readh + void $ E.evaluate (length out) -- force the output + hClose readh + + -- wait for the program to terminate + exitcode <- waitForProcess pid + return Result { + resultExitCode = exitcode, + resultDirectory = mb_cwd, + resultCommand = showCommandForUser path' args, + resultOutput = out + } + +------------------------------------------------------------------------ +-- * Subprocess run results + +data Result = Result + { resultExitCode :: ExitCode + , resultDirectory :: Maybe FilePath + , resultCommand :: String + , resultOutput :: String + } deriving Show + +requireSuccess :: Result -> TestM Result +requireSuccess r@Result { resultCommand = cmd + , resultExitCode = exitCode + , resultOutput = output } = do + (_, test) <- ask + when (exitCode /= ExitSuccess && not (testShouldFail test)) $ + assertFailure $ "Command " ++ cmd ++ " failed.\n" ++ + "Output:\n" ++ output ++ "\n" + when (exitCode == ExitSuccess && testShouldFail test) $ + assertFailure $ "Command " ++ cmd ++ " succeeded.\n" ++ + "Output:\n" ++ output ++ "\n" + return r + +record :: Result -> TestM () +record res = do + build_dir <- distDir + (suite, _) <- ask + liftIO $ createDirectoryIfMissing True build_dir + liftIO $ C.appendFile (build_dir "test.log") + (C.pack $ "+ " ++ resultCommand res ++ "\n" + ++ resultOutput res ++ "\n\n") + let test_sh = build_dir "test.sh" + b <- liftIO $ doesFileExist test_sh + when (not b) . liftIO $ do + -- This is hella racey but this is not that security important + C.appendFile test_sh + (C.pack $ "#/bin/sh\nset -ev\n" ++ + "cd "++ show (absoluteCWD suite) ++"\n") + perms <- getPermissions test_sh + setPermissions test_sh (setOwnerExecutable True perms) + + liftIO $ C.appendFile test_sh + (C.pack + (case resultDirectory res of + Nothing -> resultCommand res + Just d -> "(cd " ++ show d ++ " && " ++ resultCommand res ++ ")\n")) + +------------------------------------------------------------------------ +-- * Test helpers + +assertFailure :: MonadIO m => String -> m () +assertFailure = liftIO . HUnit.assertFailure + +assertEqual :: (Eq a, Show a, MonadIO m) => String -> a -> a -> m () +assertEqual s x y = liftIO $ HUnit.assertEqual s x y + +assertBool :: MonadIO m => String -> Bool -> m () +assertBool s x = liftIO $ HUnit.assertBool s x + +shouldExist :: MonadIO m => FilePath -> m () +shouldExist path = liftIO $ doesFileExist path >>= assertBool (path ++ " should exist") + +shouldNotExist :: MonadIO m => FilePath -> m () +shouldNotExist path = + liftIO $ doesFileExist path >>= assertBool (path ++ " should exist") . not + +shouldFail :: TestM a -> TestM a +shouldFail = withReaderT (\(suite, test) -> (suite, test { testShouldFail = not (testShouldFail test) })) + +whenGhcVersion :: (Version -> Bool) -> TestM () -> TestM () +whenGhcVersion p m = do + (suite, _) <- ask + when (p (ghcVersion suite)) m + +withPackage :: FilePath -> TestM a -> TestM a +withPackage f = withReaderT (\(suite, test) -> (suite, test { testCurrentPackage = f })) + +-- TODO: Really should accumulate... but I think to do this +-- properly we can't just append +withEnv :: [(String, Maybe String)] -> TestM a -> TestM a +withEnv e m = do + (_, test0) <- ask + when (not (null (testEnvironment test0))) + $ error "nested withEnv (not yet) supported" + withReaderT (\(suite, test) -> (suite, test { testEnvironment = e })) m + +withPackageDb :: TestM a -> TestM a +withPackageDb m = do + (_, test0) <- ask + db_path <- sharedDBPath + if testPackageDb test0 + then m + else withReaderT (\(suite, test) -> + (suite { packageDBStack + = packageDBStack suite + ++ [SpecificPackageDB db_path] }, + test { testPackageDb = True })) + $ do ghcPkg "init" [db_path] + m + +assertOutputContains :: MonadIO m => String -> Result -> m () +assertOutputContains needle result = + unless (needle `isInfixOf` (concatOutput output)) $ + assertFailure $ + " expected: " ++ needle ++ "\n" ++ + " in output: " ++ output ++ "" + where output = resultOutput result + +assertOutputDoesNotContain :: MonadIO m => String -> Result -> m () +assertOutputDoesNotContain needle result = + when (needle `isInfixOf` (concatOutput output)) $ + assertFailure $ + "unexpected: " ++ needle ++ + " in output: " ++ output + where output = resultOutput result + +assertFindInFile :: MonadIO m => String -> FilePath -> m () +assertFindInFile needle path = + liftIO $ withFileContents path + (\contents -> + unless (needle `isInfixOf` contents) + (assertFailure ("expected: " ++ needle ++ "\n" ++ + " in file: " ++ path))) + +-- | Replace line breaks with spaces, correctly handling "\r\n". +concatOutput :: String -> String +concatOutput = unwords . lines . filter ((/=) '\r') + +-- | Create a symlink for the duration of the provided action. If the symlink +-- already exists, it is deleted. Does not work on Windows. +withSymlink :: FilePath -> FilePath -> TestM a -> TestM a +#ifdef mingw32_HOST_OS +withSymlink _oldpath _newpath _act = + error "PackageTests.PackageTester.withSymlink: does not work on Windows!" +#else +withSymlink oldpath newpath act = do + symlinkExists <- liftIO $ doesFileExist newpath + when symlinkExists $ liftIO $ removeFile newpath + bracket_ (liftIO $ createSymbolicLink oldpath newpath) + (liftIO $ removeFile newpath) act +#endif + +------------------------------------------------------------------------ +-- * Test trees + +-- | Monad for creating test trees. The option --enable-all-tests determines +-- whether to filter tests with 'testWhen' and 'testUnless'. +type TestTreeM = WriterT [TestTree] (Reader OptionEnableAllTests) + +runTestTree :: String -> TestTreeM () -> TestTree +runTestTree name ts = askOption $ + testGroup name . runReader (execWriterT ts) + +testTree :: SuiteConfig -> String -> Maybe String -> TestM a -> TestTreeM () +testTree config name subname m = + testTree' $ HUnit.testCase name $ runTestM config name subname m + +testTree' :: TestTree -> TestTreeM () +testTree' tc = tell [tc] + +-- | Create a test group from the output of the given action. +groupTests :: String -> TestTreeM () -> TestTreeM () +groupTests name = censor (\ts -> [testGroup name ts]) + +-- | Apply a function to each top-level test tree. +mapTestTrees :: (TestTree -> TestTree) -> TestTreeM a -> TestTreeM a +mapTestTrees = censor . map + +testWhen :: Bool -> TestTreeM () -> TestTreeM () +testWhen c test = do + OptionEnableAllTests enableAll <- lift ask + when (enableAll || c) test + +testUnless :: Bool -> TestTreeM () -> TestTreeM () +testUnless = testWhen . not + +unlessWindows :: TestTreeM () -> TestTreeM () +unlessWindows = testUnless (buildOS == Windows) + +hasSharedLibraries :: SuiteConfig -> Bool +hasSharedLibraries config = + buildOS /= Windows || withGhcVersion config < Version [7,8] [] + +------------------------------------------------------------------------ +-- Verbosity + +getVerbosity :: TestM Verbosity +getVerbosity = fmap (suiteVerbosity . fst) ask diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/PathsModule/Executable/Main.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/PathsModule/Executable/Main.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/PathsModule/Executable/Main.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/PathsModule/Executable/Main.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,8 @@ +module Main where + +import Paths_PathsModule (getBinDir) + +main :: IO () +main = do + _ <- getBinDir + return () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/PathsModule/Executable/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/PathsModule/Executable/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/PathsModule/Executable/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/PathsModule/Executable/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,16 @@ +name: PathsModule +version: 0.1 +license: BSD3 +author: Johan Tibell +stability: stable +category: PackageTests +build-type: Simple +Cabal-version: >= 1.2 + +description: + Check that the generated paths module compiles. + +Executable TestPathsModule + main-is: Main.hs + other-modules: Paths_PathsModule + build-depends: base diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/PathsModule/Library/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/PathsModule/Library/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/PathsModule/Library/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/PathsModule/Library/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,15 @@ +name: PathsModule +version: 0.1 +license: BSD3 +author: Johan Tibell +stability: stable +category: PackageTests +build-type: Simple +Cabal-version: >= 1.2 + +description: + Check that the generated paths module compiles. + +Library + exposed-modules: Paths_PathsModule + build-depends: base diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/PreProcess/Foo.hsc cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/PreProcess/Foo.hsc --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/PreProcess/Foo.hsc 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/PreProcess/Foo.hsc 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1 @@ +module Foo where diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/PreProcess/Main.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/PreProcess/Main.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/PreProcess/Main.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/PreProcess/Main.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,6 @@ +module Main where + +import Foo + +main :: IO () +main = return () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/PreProcess/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/PreProcess/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/PreProcess/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/PreProcess/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,32 @@ +name: PreProcess +version: 0.1 +license: BSD3 +author: Johan Tibell +stability: stable +category: PackageTests +build-type: Simple +Cabal-version: >= 1.2 + +description: + Check that preprocessors are run. + +Library + exposed-modules: Foo + build-depends: base + +Executable my-executable + main-is: Main.hs + other-modules: Foo + build-depends: base + +Test-Suite my-test-suite + main-is: Main.hs + type: exitcode-stdio-1.0 + other-modules: Foo + build-depends: base + +Benchmark my-benchmark + main-is: Main.hs + type: exitcode-stdio-1.0 + other-modules: Foo + build-depends: base diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/PreProcessExtraSources/Foo.hsc cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/PreProcessExtraSources/Foo.hsc --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/PreProcessExtraSources/Foo.hsc 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/PreProcessExtraSources/Foo.hsc 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,9 @@ +{-# LANGUAGE ForeignFunctionInterface #-} +module Foo where + +import Foreign.C.Types + +#def int incr(int x) { return x + 1; } + +foreign import ccall unsafe "Foo_hsc.h incr" + incr :: CInt -> CInt diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/PreProcessExtraSources/Main.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/PreProcessExtraSources/Main.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/PreProcessExtraSources/Main.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/PreProcessExtraSources/Main.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,8 @@ +module Main where + +import Foo + +main :: IO () +main = do + let x = incr 4 + return () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/PreProcessExtraSources/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/PreProcessExtraSources/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/PreProcessExtraSources/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/PreProcessExtraSources/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,32 @@ +name: PreProcessExtraSources +version: 0.1 +license: BSD3 +author: Ian Ross +stability: stable +category: PackageTests +build-type: Simple +Cabal-version: >= 1.2 + +description: + Check that preprocessors that generate extra C sources are handled. + +Library + exposed-modules: Foo + build-depends: base + +Executable my-executable + main-is: Main.hs + other-modules: Foo + build-depends: base + +Test-Suite my-test-suite + main-is: Main.hs + type: exitcode-stdio-1.0 + other-modules: Foo + build-depends: base + +Benchmark my-benchmark + main-is: Main.hs + type: exitcode-stdio-1.0 + other-modules: Foo + build-depends: base diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/ReexportedModules/ReexportedModules.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/ReexportedModules/ReexportedModules.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/ReexportedModules/ReexportedModules.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/ReexportedModules/ReexportedModules.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,11 @@ +name: ReexportedModules +version: 0.1.0.0 +license-file: LICENSE +author: Edward Z. Yang +maintainer: ezyang@cs.stanford.edu +build-type: Simple +cabal-version: >=1.21 + +library + build-depends: base, containers + reexported-modules: containers:Data.Map as DataMap diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/dynamic/Exe.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/dynamic/Exe.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/dynamic/Exe.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/dynamic/Exe.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,6 @@ +{-# LANGUAGE TemplateHaskell #-} +module Main where + +import TH + +main = print $(splice) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/dynamic/Lib.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/dynamic/Lib.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/dynamic/Lib.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/dynamic/Lib.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,6 @@ +{-# LANGUAGE TemplateHaskell #-} +module Lib where + +import TH + +val = $(splice) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/dynamic/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/dynamic/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/dynamic/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/dynamic/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,15 @@ +Name: templateHaskell +Version: 0.1 +Build-Type: Simple +Cabal-Version: >= 1.2 + +Library + Exposed-Modules: Lib + Other-Modules: TH + Build-Depends: base, template-haskell + Extensions: TemplateHaskell + +Executable main + Main-is: Exe.hs + Build-Depends: base, template-haskell + Extensions: TemplateHaskell diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/dynamic/TH.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/dynamic/TH.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/dynamic/TH.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/dynamic/TH.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,4 @@ +{-# LANGUAGE TemplateHaskell #-} +module TH where + +splice = [| () |] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/profiling/Exe.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/profiling/Exe.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/profiling/Exe.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/profiling/Exe.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,6 @@ +{-# LANGUAGE TemplateHaskell #-} +module Main where + +import TH + +main = print $(splice) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/profiling/Lib.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/profiling/Lib.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/profiling/Lib.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/profiling/Lib.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,6 @@ +{-# LANGUAGE TemplateHaskell #-} +module Lib where + +import TH + +val = $(splice) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/profiling/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/profiling/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/profiling/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/profiling/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,15 @@ +Name: templateHaskell +Version: 0.1 +Build-Type: Simple +Cabal-Version: >= 1.2 + +Library + Exposed-Modules: Lib + Other-Modules: TH + Build-Depends: base, template-haskell + Extensions: TemplateHaskell + +Executable main + Main-is: Exe.hs + Build-Depends: base, template-haskell + Extensions: TemplateHaskell diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/profiling/TH.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/profiling/TH.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/profiling/TH.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/profiling/TH.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,4 @@ +{-# LANGUAGE TemplateHaskell #-} +module TH where + +splice = [| () |] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/vanilla/Exe.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/vanilla/Exe.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/vanilla/Exe.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/vanilla/Exe.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,6 @@ +{-# LANGUAGE TemplateHaskell #-} +module Main where + +import TH + +main = print $(splice) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/vanilla/Lib.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/vanilla/Lib.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/vanilla/Lib.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/vanilla/Lib.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,6 @@ +{-# LANGUAGE TemplateHaskell #-} +module Lib where + +import TH + +val = $(splice) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/vanilla/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/vanilla/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/vanilla/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/vanilla/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,15 @@ +Name: templateHaskell +Version: 0.1 +Build-Type: Simple +Cabal-Version: >= 1.2 + +Library + Exposed-Modules: Lib + Other-Modules: TH + Build-Depends: base, template-haskell + Extensions: TemplateHaskell + +Executable main + Main-is: Exe.hs + Build-Depends: base, template-haskell + Extensions: TemplateHaskell diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/vanilla/TH.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/vanilla/TH.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/vanilla/TH.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TemplateHaskell/vanilla/TH.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,4 @@ +{-# LANGUAGE TemplateHaskell #-} +module TH where + +splice = [| () |] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestNameCollision/child/child.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestNameCollision/child/child.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestNameCollision/child/child.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestNameCollision/child/child.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,19 @@ +name: child +version: 0.1 +description: This defines the colliding detailed-0.9 test suite +license: BSD3 +author: Edward Z. Yang +maintainer: ezyang@cs.stanford.edu +build-type: Simple +cabal-version: >=1.10 + +library + exposed-modules: Child + build-depends: base, parent + default-language: Haskell2010 + +test-suite parent + type: detailed-0.9 + test-module: Test + hs-source-dirs: tests + build-depends: base, Cabal, child diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestNameCollision/child/Child.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestNameCollision/child/Child.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestNameCollision/child/Child.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestNameCollision/child/Child.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,2 @@ +module Child where +import Parent diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestNameCollision/child/tests/Test.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestNameCollision/child/tests/Test.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestNameCollision/child/tests/Test.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestNameCollision/child/tests/Test.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,13 @@ +module Test where + +import Distribution.TestSuite +import Child + +tests :: IO [Test] +tests = return $ [Test $ TestInstance + { run = return (Finished Pass) + , name = "test" + , tags = [] + , options = [] + , setOption = \_ _-> Left "No Options" + }] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestNameCollision/parent/parent.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestNameCollision/parent/parent.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestNameCollision/parent/parent.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestNameCollision/parent/parent.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,13 @@ +name: parent +version: 0.1 +description: This package is what the test suite is going to collide with +license: BSD3 +author: Edward Z. Yang +maintainer: ezyang@cs.stanford.edu +build-type: Simple +cabal-version: >=1.10 + +library + exposed-modules: Parent + build-depends: base + default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestNameCollision/parent/Parent.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestNameCollision/parent/Parent.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestNameCollision/parent/Parent.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestNameCollision/parent/Parent.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1 @@ +module Parent where diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestOptions/TestOptions.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestOptions/TestOptions.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestOptions/TestOptions.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestOptions/TestOptions.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,20 @@ +name: TestOptions +version: 0.1 +license: BSD3 +author: Thomas Tuegel +stability: stable +category: PackageTests +build-type: Simple +cabal-version: >= 1.9.2 + +description: + Check that Cabal passes the correct test options to test suites. + +executable dummy + main-is: test-TestOptions.hs + build-depends: base + +test-suite test-TestOptions + main-is: test-TestOptions.hs + type: exitcode-stdio-1.0 + build-depends: base diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestOptions/test-TestOptions.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestOptions/test-TestOptions.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestOptions/test-TestOptions.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestOptions/test-TestOptions.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,11 @@ +module Main where + +import System.Environment ( getArgs ) +import System.Exit ( exitFailure, exitSuccess ) + +main :: IO () +main = do + args <- getArgs + if args == ["1", "2", "3"] + then exitSuccess + else putStrLn ("Got: " ++ show args) >> exitFailure diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/Tests.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/Tests.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/Tests.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/Tests.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,314 @@ +module PackageTests.Tests(tests) where + +import PackageTests.PackageTester + +import qualified PackageTests.BenchmarkStanza.Check +import qualified PackageTests.TestStanza.Check +import qualified PackageTests.DeterministicAr.Check +import qualified PackageTests.TestSuiteTests.ExeV10.Check + +import Control.Monad + +import Data.Version +import Test.Tasty (mkTimeout, localOption) +import Test.Tasty.HUnit (testCase) + +tests :: SuiteConfig -> TestTreeM () +tests config = do + + --------------------------------------------------------------------- + -- * External tests + + -- Test that Cabal parses 'benchmark' sections correctly + tc "BenchmarkStanza" PackageTests.BenchmarkStanza.Check.suite + + -- Test that Cabal parses 'test' sections correctly + tc "TestStanza" PackageTests.TestStanza.Check.suite + + -- Test that Cabal determinstically generates object archives + tc "DeterministicAr" PackageTests.DeterministicAr.Check.suite + + --------------------------------------------------------------------- + -- * Test suite tests + + groupTests "TestSuiteTests" $ do + + -- Test exitcode-stdio-1.0 test suites (and HPC) + groupTests "ExeV10" + (PackageTests.TestSuiteTests.ExeV10.Check.tests config) + + -- Test detailed-0.9 test suites + groupTests "LibV09" $ + let + tcs :: FilePath -> TestM a -> TestTreeM () + tcs name m + = testTree' $ testCase name (runTestM config + "TestSuiteTests/LibV09" (Just name) m) + in do + -- Test if detailed-0.9 builds correctly + tcs "Build" $ cabal_build ["--enable-tests"] + + -- Tests for #2489, stdio deadlock + mapTestTrees (localOption (mkTimeout $ 10 ^ (8 :: Int))) + . tcs "Deadlock" $ do + cabal_build ["--enable-tests"] + shouldFail $ cabal "test" [] + + --------------------------------------------------------------------- + -- * Inline tests + + -- Test if exitcode-stdio-1.0 benchmark builds correctly + tc "BenchmarkExeV10" $ cabal_build ["--enable-benchmarks"] + + -- Test --benchmark-option(s) flags on ./Setup bench + tc "BenchmarkOptions" $ do + cabal_build ["--enable-benchmarks"] + cabal "bench" [ "--benchmark-options=1 2 3" ] + cabal "bench" [ "--benchmark-option=1" + , "--benchmark-option=2" + , "--benchmark-option=3" + ] + + -- Test --test-option(s) flags on ./Setup test + tc "TestOptions" $ do + cabal_build ["--enable-tests"] + cabal "test" ["--test-options=1 2 3"] + cabal "test" [ "--test-option=1" + , "--test-option=2" + , "--test-option=3" + ] + + -- Test attempt to have executable depend on internal + -- library, but cabal-version is too old. + tc "BuildDeps/InternalLibrary0" $ do + r <- shouldFail $ cabal' "configure" [] + -- Should tell you how to enable the desired behavior + let sb = "library which is defined within the same package." + assertOutputContains sb r + + -- Test executable depends on internal library. + tc "BuildDeps/InternalLibrary1" $ cabal_build [] + + -- Test that internal library is preferred to an installed on + -- with the same name and version + tc "BuildDeps/InternalLibrary2" $ internal_lib_test "internal" + + -- Test that internal library is preferred to an installed on + -- with the same name and LATER version + tc "BuildDeps/InternalLibrary3" $ internal_lib_test "internal" + + -- Test that an explicit dependency constraint which doesn't + -- match the internal library causes us to use external library + tc "BuildDeps/InternalLibrary4" $ internal_lib_test "installed" + + -- Test "old build-dep behavior", where we should get the + -- same package dependencies on all targets if cabal-version + -- is sufficiently old. + tc "BuildDeps/SameDepsAllRound" $ cabal_build [] + + -- Test "new build-dep behavior", where each target gets + -- separate dependencies. This tests that an executable + -- dep does not leak into the library. + tc "BuildDeps/TargetSpecificDeps1" $ do + cabal "configure" [] + r <- shouldFail $ cabal' "build" [] + assertRegex "error should be in MyLibrary.hs" "^MyLibrary.hs:" r + assertRegex + "error should be \"Could not find module `Text\\.PrettyPrint\"" + "(Could not find module|Failed to load interface for).*Text\\.PrettyPrint" r + + -- This is a control on TargetSpecificDeps1; it should + -- succeed. + tc "BuildDeps/TargetSpecificDeps2" $ cabal_build [] + + -- Test "new build-dep behavior", where each target gets + -- separate dependencies. This tests that an library + -- dep does not leak into the executable. + tc "BuildDeps/TargetSpecificDeps3" $ do + cabal "configure" [] + r <- shouldFail $ cabal' "build" [] + assertRegex "error should be in lemon.hs" "^lemon.hs:" r + assertRegex + "error should be \"Could not find module `Text\\.PrettyPrint\"" + "(Could not find module|Failed to load interface for).*Text\\.PrettyPrint" r + + -- Test that Paths module is generated and available for executables. + tc "PathsModule/Executable" $ cabal_build [] + + -- Test that Paths module is generated and available for libraries. + tc "PathsModule/Library" $ cabal_build [] + + -- Check that preprocessors (hsc2hs) are run + tc "PreProcess" $ cabal_build ["--enable-tests", "--enable-benchmarks"] + + -- Check that preprocessors that generate extra C sources are handled + tc "PreProcessExtraSources" $ cabal_build ["--enable-tests", + "--enable-benchmarks"] + + -- Test building a vanilla library/executable which uses Template Haskell + tc "TemplateHaskell/vanilla" $ cabal_build [] + + -- Test building a profiled library/executable which uses Template Haskell + -- (Cabal has to build the non-profiled version first) + tc "TemplateHaskell/profiling" $ cabal_build ["--enable-library-profiling", + "--enable-profiling"] + + -- Test building a dynamic library/executable which uses Template + -- Haskell + testWhen (hasSharedLibraries config) $ + tc "TemplateHaskell/dynamic" $ cabal_build ["--enable-shared", + "--enable-executable-dynamic"] + + -- Test building an executable whose main() function is defined in a C + -- file + tc "CMain" $ cabal_build [] + + -- Test build when the library is empty, for #1241 + tc "EmptyLib" $ + withPackage "empty" $ cabal_build [] + + -- Test that "./Setup haddock" works correctly + tc "Haddock" $ do + dist_dir <- distDir + let haddocksDir = dist_dir "doc" "html" "Haddock" + cabal "configure" [] + cabal "haddock" [] + let docFiles + = map (haddocksDir ) + ["CPP.html", "Literate.html", "NoCPP.html", "Simple.html"] + mapM_ (assertFindInFile "For hiding needles.") docFiles + + -- Test that Haddock with a newline in synopsis works correctly, #3004 + tc "HaddockNewline" $ do + cabal "configure" [] + cabal "haddock" [] + + -- Test that Cabal properly orders GHC flags passed to GHC (when + -- there are multiple ghc-options fields.) + tc "OrderFlags" $ cabal_build [] + + -- Test that reexported modules build correctly + -- TODO: should also test that they import OK! + tc "ReexportedModules" $ do + whenGhcVersion (>= Version [7,9] []) $ cabal_build [] + + -- Test that Cabal computes different IPIDs when the source changes. + tc "UniqueIPID" . withPackageDb $ do + withPackage "P1" $ cabal "configure" [] + withPackage "P2" $ cabal "configure" [] + withPackage "P1" $ cabal "build" [] + withPackage "P1" $ cabal "build" [] -- rebuild should work + r1 <- withPackage "P1" $ cabal' "register" ["--print-ipid", "--inplace"] + withPackage "P2" $ cabal "build" [] + r2 <- withPackage "P2" $ cabal' "register" ["--print-ipid", "--inplace"] + let exIPID s = takeWhile (/= '\n') $ + head . filter (isPrefixOf $ "UniqueIPID-0.1-") $ (tails s) + when ((exIPID $ resultOutput r1) == (exIPID $ resultOutput r2)) $ + assertFailure $ "cabal has not calculated different Installed " ++ + "package ID when source is changed." + + tc "DuplicateModuleName" $ do + cabal_build ["--enable-tests"] + r1 <- shouldFail $ cabal' "test" ["foo"] + assertOutputContains "test B" r1 + assertOutputContains "test A" r1 + r2 <- shouldFail $ cabal' "test" ["foo2"] + assertOutputContains "test C" r2 + assertOutputContains "test A" r2 + + tc "TestNameCollision" $ do + withPackageDb $ do + withPackage "parent" $ cabal_install [] + withPackage "child" $ do + cabal_build ["--enable-tests"] + cabal "test" [] + + -- Test that '--allow-newer' works via the 'Setup.hs configure' interface. + tc "AllowNewer" $ do + shouldFail $ cabal "configure" [] + cabal "configure" ["--allow-newer"] + shouldFail $ cabal "configure" ["--allow-newer=baz,quux"] + cabal "configure" ["--allow-newer=base", "--allow-newer=baz,quux"] + cabal "configure" ["--allow-newer=bar", "--allow-newer=base,baz" + ,"--allow-newer=quux"] + shouldFail $ cabal "configure" ["--enable-tests"] + cabal "configure" ["--enable-tests", "--allow-newer"] + shouldFail $ cabal "configure" ["--enable-benchmarks"] + cabal "configure" ["--enable-benchmarks", "--allow-newer"] + shouldFail $ cabal "configure" ["--enable-benchmarks", "--enable-tests"] + cabal "configure" ["--enable-benchmarks", "--enable-tests" + ,"--allow-newer"] + shouldFail $ cabal "configure" ["--allow-newer=Foo:base"] + shouldFail $ cabal "configure" ["--allow-newer=Foo:base" + ,"--enable-tests", "--enable-benchmarks"] + cabal "configure" ["--allow-newer=AllowNewer:base"] + cabal "configure" ["--allow-newer=AllowNewer:base" + ,"--allow-newer=Foo:base"] + cabal "configure" ["--allow-newer=AllowNewer:base" + ,"--allow-newer=Foo:base" + ,"--enable-tests", "--enable-benchmarks"] + + -- Test that Cabal can choose flags to disable building a component when that + -- component's dependencies are unavailable. The build should succeed without + -- requiring the component's dependencies or imports. + tc "BuildableField" $ do + r <- cabal' "configure" ["-v"] + assertOutputContains "Flags chosen: build-exe=False" r + cabal "build" [] + + -- TODO: Enable these tests on Windows + unlessWindows $ do + tc "GhcPkgGuess/SameDirectory" $ ghc_pkg_guess "ghc" + tc "GhcPkgGuess/SameDirectoryVersion" $ ghc_pkg_guess "ghc-7.10" + tc "GhcPkgGuess/SameDirectoryGhcVersion" $ ghc_pkg_guess "ghc-7.10" + + unlessWindows $ do + tc "GhcPkgGuess/Symlink" $ do + -- We don't want to distribute a tarball with symlinks. See #3190. + withSymlink "bin/ghc" + "tests/PackageTests/GhcPkgGuess/Symlink/ghc" $ + ghc_pkg_guess "ghc" + + tc "GhcPkgGuess/SymlinkVersion" $ do + withSymlink "bin/ghc-7.10" + "tests/PackageTests/GhcPkgGuess/SymlinkVersion/ghc" $ + ghc_pkg_guess "ghc" + + tc "GhcPkgGuess/SymlinkGhcVersion" $ do + withSymlink "bin/ghc-7.10" + "tests/PackageTests/GhcPkgGuess/SymlinkGhcVersion/ghc" $ + ghc_pkg_guess "ghc" + + -- Test error message we report when a non-buildable target is + -- requested to be built + tc "BuildTargetErrors" $ do + cabal "configure" [] + assertOutputContains "the component is marked as disabled" + =<< shouldFail (cabal' "build" ["not-buildable-exe"]) + + where + ghc_pkg_guess bin_name = do + cwd <- packageDir + with_ghc <- getWithGhcPath + r <- withEnv [("WITH_GHC", Just with_ghc)] + . shouldFail $ cabal' "configure" ["-w", cwd bin_name] + assertOutputContains "is version 9999999" r + return () + + -- Shared test function for BuildDeps/InternalLibrary* tests. + internal_lib_test expect = withPackageDb $ do + withPackage "to-install" $ cabal_install [] + cabal_build [] + r <- runExe' "lemon" [] + assertEqual + ("executable should have linked with the " ++ expect ++ " library") + ("foo foo myLibFunc " ++ expect) + (concatOutput (resultOutput r)) + + assertRegex :: String -> String -> Result -> TestM () + assertRegex msg regex r = let out = resultOutput r + in assertBool (msg ++ ",\nactual output:\n" ++ out) + (out =~ regex) + + tc :: FilePath -> TestM a -> TestTreeM () + tc name = testTree config name Nothing diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestStanza/Check.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestStanza/Check.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestStanza/Check.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestStanza/Check.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,29 @@ +module PackageTests.TestStanza.Check where + +import PackageTests.PackageTester + +import Distribution.Version +import Distribution.Simple.LocalBuildInfo +import Distribution.Package +import Distribution.PackageDescription + +suite :: TestM () +suite = do + assertOutputDoesNotContain "unknown section type" + =<< cabal' "configure" [] + dist_dir <- distDir + lbi <- liftIO $ getPersistBuildConfig dist_dir + let anticipatedTestSuite = emptyTestSuite + { testName = "dummy" + , testInterface = TestSuiteExeV10 (Version [1,0] []) "dummy.hs" + , testBuildInfo = emptyBuildInfo + { targetBuildDepends = + [ Dependency (PackageName "base") anyVersion ] + , hsSourceDirs = ["."] + } + , testEnabled = False + } + gotTestSuite = head $ testSuites (localPkgDescr lbi) + assertEqual "parsed test-suite stanza does not match anticipated" + anticipatedTestSuite gotTestSuite + return () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestStanza/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestStanza/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestStanza/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestStanza/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,19 @@ +name: TestStanza +version: 0.1 +license: BSD3 +author: Thomas Tuegel +stability: stable +category: PackageTests +build-type: Simple + +description: + Check that Cabal recognizes the Test stanza defined below. + +Library + exposed-modules: MyLibrary + build-depends: base + +test-suite dummy + main-is: dummy.hs + type: exitcode-stdio-1.0 + build-depends: base \ No newline at end of file diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/ExeV10/Check.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/ExeV10/Check.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/ExeV10/Check.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/ExeV10/Check.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,129 @@ +module PackageTests.TestSuiteTests.ExeV10.Check (tests) where + +import qualified Control.Exception as E (IOException, catch) +import Control.Monad (forM_, liftM4, when) +import Data.Maybe (catMaybes) +import System.FilePath +import Test.Tasty.HUnit (testCase) + +import Distribution.Compiler (CompilerFlavor(..), CompilerId(..)) +import Distribution.PackageDescription (package) +import Distribution.Simple.Compiler (compilerId) +import Distribution.Simple.LocalBuildInfo (compiler, localPkgDescr, localCompatPackageKey) +import Distribution.Simple.Hpc +import Distribution.Simple.Program.Builtin (hpcProgram) +import Distribution.Simple.Program.Db + ( emptyProgramDb, configureProgram, requireProgramVersion ) +import Distribution.Text (display) +import qualified Distribution.Verbosity as Verbosity +import Distribution.Version (Version(..), orLaterVersion) + +import PackageTests.PackageTester + +tests :: SuiteConfig -> TestTreeM () +tests config = do + -- TODO: hierarchy and subnaming is a little unfortunate + tc "Test" "Default" $ do + cabal_build ["--enable-tests"] + -- This one runs both tests, including the very LONG Foo + -- test which prints a lot of output + cabal "test" ["--show-details=direct"] + groupTests "WithHpc" $ hpcTestMatrix config + groupTests "WithoutHpc" $ do + -- Ensures that even if -fhpc is manually provided no .tix file is output. + tc "NoTix" "NoHpcNoTix" $ do + dist_dir <- distDir + cabal_build + [ "--enable-tests" + , "--ghc-option=-fhpc" + , "--ghc-option=-hpcdir" + , "--ghc-option=" ++ dist_dir ++ "/hpc/vanilla" ] + cabal "test" ["test-Short", "--show-details=direct"] + lbi <- liftIO $ getPersistBuildConfig dist_dir + let way = guessWay lbi + shouldNotExist $ tixFilePath dist_dir way "test-Short" + -- Ensures that even if a .tix file happens to be left around + -- markup isn't generated. + tc "NoMarkup" "NoHpcNoMarkup" $ do + dist_dir <- distDir + let tixFile = tixFilePath dist_dir Vanilla "test-Short" + withEnv [("HPCTIXFILE", Just tixFile)] $ do + cabal_build + [ "--enable-tests" + , "--ghc-option=-fhpc" + , "--ghc-option=-hpcdir" + , "--ghc-option=" ++ dist_dir ++ "/hpc/vanilla" ] + cabal "test" ["test-Short", "--show-details=direct"] + shouldNotExist $ htmlDir dist_dir Vanilla "test-Short" "hpc_index.html" + where + tc :: String -> String -> TestM a -> TestTreeM () + tc name subname m + = testTree' $ testCase name + (runTestM config "TestSuiteTests/ExeV10" (Just subname) m) + +hpcTestMatrix :: SuiteConfig -> TestTreeM () +hpcTestMatrix config = forM_ (choose4 [True, False]) $ + \(libProf, exeProf, exeDyn, shared) -> do + let name | null suffixes = "Vanilla" + | otherwise = intercalate "-" suffixes + where + suffixes = catMaybes + [ if libProf then Just "LibProf" else Nothing + , if exeProf then Just "ExeProf" else Nothing + , if exeDyn then Just "ExeDyn" else Nothing + , if shared then Just "Shared" else Nothing + ] + opts = catMaybes + [ enable libProf "library-profiling" + , enable exeProf "profiling" + , enable exeDyn "executable-dynamic" + , enable shared "shared" + ] + where + enable cond flag + | cond = Just $ "--enable-" ++ flag + | otherwise = Nothing + -- Ensure that both .tix file and markup are generated if coverage + -- is enabled. + testUnless ((exeDyn || shared) && not (hasSharedLibraries config)) $ + tc name ("WithHpc-" ++ name) $ do + isCorrectVersion <- liftIO $ correctHpcVersion + when isCorrectVersion $ do + dist_dir <- distDir + cabal_build ("--enable-tests" : "--enable-coverage" : opts) + cabal "test" ["test-Short", "--show-details=direct"] + lbi <- liftIO $ getPersistBuildConfig dist_dir + let way = guessWay lbi + CompilerId comp version = compilerId (compiler lbi) + subdir + | comp == GHC && version >= Version [7, 10] [] = + localCompatPackageKey lbi + | otherwise = display (package $ localPkgDescr lbi) + mapM_ shouldExist + [ mixDir dist_dir way "my-0.1" subdir "Foo.mix" + , mixDir dist_dir way "test-Short" "Main.mix" + , tixFilePath dist_dir way "test-Short" + , htmlDir dist_dir way "test-Short" "hpc_index.html" + ] + where + tc :: String -> String -> TestM a -> TestTreeM () + tc name subname m + = testTree' $ testCase name + (runTestM config "TestSuiteTests/ExeV10" (Just subname) m) + + choose4 :: [a] -> [(a, a, a, a)] + choose4 xs = liftM4 (,,,) xs xs xs xs + +-- | Checks for a suitable HPC version for testing. +correctHpcVersion :: IO Bool +correctHpcVersion = do + let programDb' = emptyProgramDb + let verbosity = Verbosity.normal + let verRange = orLaterVersion (Version [0,7] []) + programDb <- configureProgram verbosity hpcProgram programDb' + (requireProgramVersion verbosity hpcProgram verRange programDb + >> return True) `catchIO` (\_ -> return False) + where + -- Distribution.Compat.Exception is hidden. + catchIO :: IO a -> (E.IOException -> IO a) -> IO a + catchIO = E.catch diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/ExeV10/Foo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/ExeV10/Foo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/ExeV10/Foo.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/ExeV10/Foo.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,4 @@ +module Foo where + +fooTest :: [String] -> Bool +fooTest _ = True diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/ExeV10/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/ExeV10/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/ExeV10/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/ExeV10/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,21 @@ +name: my +version: 0.1 +license: BSD3 +cabal-version: >= 1.9.2 +build-type: Simple + +library + exposed-modules: Foo + build-depends: base + +test-suite test-Foo + type: exitcode-stdio-1.0 + hs-source-dirs: tests + main-is: test-Foo.hs + build-depends: base, my + +test-suite test-Short + type: exitcode-stdio-1.0 + hs-source-dirs: tests + main-is: test-Short.hs + build-depends: base, my diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/ExeV10/tests/test-Foo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/ExeV10/tests/test-Foo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/ExeV10/tests/test-Foo.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/ExeV10/tests/test-Foo.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,12 @@ +module Main where + +import Foo +import System.Exit +import Control.Monad + +main :: IO () +main | fooTest [] = do + -- Make sure that the output buffer is drained + replicateM 10000 $ putStrLn "The quick brown fox jumps over the lazy dog" + exitSuccess + | otherwise = exitFailure diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/ExeV10/tests/test-Short.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/ExeV10/tests/test-Short.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/ExeV10/tests/test-Short.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/ExeV10/tests/test-Short.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,11 @@ +module Main where + +import Foo +import System.Exit +import Control.Monad + +main :: IO () +main | fooTest [] = do + replicateM 5 $ putStrLn "The quick brown fox jumps over the lazy dog" + exitSuccess + | otherwise = exitFailure diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/LibV09/Lib.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/LibV09/Lib.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/LibV09/Lib.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/LibV09/Lib.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,11 @@ +module Lib where + +import Distribution.TestSuite + +nullt x = Test $ TestInstance + { run = return $ Finished (Fail "no reason") + , name = "test " ++ show x + , tags = [] + , options = [] + , setOption = \_ _-> Left "No Options" + } diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/LibV09/LibV09.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/LibV09/LibV09.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/LibV09/LibV09.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/LibV09/LibV09.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,21 @@ +name: LibV09 +version: 0.1 +cabal-version: >= 1.2 +license: BSD3 +author: Thomas Tuegel +stability: stable +category: PackageTests +build-type: Simple +cabal-version: >= 1.9.2 + +description: Check type detailed-0.9 test suites. + +library + exposed-modules: Lib + build-depends: base, Cabal + +test-suite LibV09-Deadlock + type: detailed-0.9 + hs-source-dirs: tests + test-module: Deadlock + build-depends: base, Cabal, LibV09 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/LibV09/tests/Deadlock.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/LibV09/tests/Deadlock.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/LibV09/tests/Deadlock.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/TestSuiteTests/LibV09/tests/Deadlock.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,8 @@ +module Deadlock where + +import Distribution.TestSuite + +import Lib + +tests :: IO [Test] +tests = return [nullt x | x <- [1 .. 1000]] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/UniqueIPID/P1/M.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/UniqueIPID/P1/M.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/UniqueIPID/P1/M.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/UniqueIPID/P1/M.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,3 @@ +module M(m) where + +m = print "1" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/UniqueIPID/P1/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/UniqueIPID/P1/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/UniqueIPID/P1/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/UniqueIPID/P1/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,15 @@ +name: UniqueIPID +version: 0.1 +license: BSD3 +author: Vishal Agrawal +stability: stable +category: PackageTests +build-type: Simple +Cabal-version: >= 1.2 + +description: + Check that Cabal generates unique IPID based on source. + +Library + exposed-modules: M + build-depends: base diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/UniqueIPID/P2/M.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/UniqueIPID/P2/M.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/UniqueIPID/P2/M.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/UniqueIPID/P2/M.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,3 @@ +module M(m) where + +m = print "2" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/UniqueIPID/P2/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/UniqueIPID/P2/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests/UniqueIPID/P2/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests/UniqueIPID/P2/my.cabal 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,15 @@ +name: UniqueIPID +version: 0.1 +license: BSD3 +author: Vishal Agrawal +stability: stable +category: PackageTests +build-type: Simple +Cabal-version: >= 1.2 + +description: + Check that Cabal generates unique IPID based on source. + +Library + exposed-modules: M + build-depends: base, containers diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/PackageTests.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/PackageTests.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,283 @@ +-- The intention is that this will be the new unit test framework. +-- Please add any working tests here. This file should do nothing +-- but import tests from other modules. +-- +-- Stephen Blackheath, 2009 + +module Main where + +import PackageTests.Options +import PackageTests.PackageTester +import PackageTests.Tests + +import Distribution.Simple.Configure + ( ConfigStateFileError(..), findDistPrefOrDefault, getConfigStateFile + , interpretPackageDbFlags ) +import Distribution.Simple.Compiler (PackageDB(..), PackageDBStack) +import Distribution.Simple.LocalBuildInfo (LocalBuildInfo(..)) +import Distribution.Simple.Program.Types (Program(..), programPath, programVersion) +import Distribution.Simple.Program.Builtin + ( ghcProgram, ghcPkgProgram, haddockProgram ) +import Distribution.Simple.Program.Db (requireProgram) +import Distribution.Simple.Setup (Flag(..), readPackageDbList, showPackageDbList) +import Distribution.Simple.Utils (cabalVersion) +import Distribution.Text (display, simpleParse) +import Distribution.Verbosity (normal, flagToVerbosity) +import Distribution.ReadE (readEOrFail) + +import Control.Exception +import Data.Proxy ( Proxy(..) ) +import Distribution.Compat.Environment ( lookupEnv ) +import System.Directory +import Test.Tasty +import Test.Tasty.Options +import Test.Tasty.Ingredients +import Data.Maybe + +#if MIN_VERSION_base(4,6,0) +import System.Environment ( getExecutablePath ) +#endif + +main :: IO () +main = do + -- In abstract, the Cabal test suite makes calls to the "Setup" + -- executable and tests the output of Cabal. However, we have to + -- responsible for building this executable in the first place, + -- since (1) Cabal doesn't support a test-suite depending on an + -- executable, so we can't put a "Setup" executable in the Cabal + -- file and then depend on it, (2) we don't want to call the Cabal + -- functions *directly* because we need to capture and save the + -- stdout and stderr, and (3) even if we could do all that, we will + -- want to test some Custom setup scripts, which will be specific to + -- the test at hand and need to be compiled against Cabal. + -- + -- To be able to build the executable, there is some information + -- we need: + -- + -- 1. We need to know what ghc to use, + -- + -- 2. We need to know what package databases (plural!) contain + -- all of the necessary dependencies to make our Cabal package + -- well-formed. + -- + -- We could have the user pass these all in as arguments (TODO: this + -- should be an option), but there's a more convenient way to get + -- this information: the *build configuration* that was used to + -- build the Cabal library (and this test suite) in the first place. + -- To do this, we need to find the 'dist' directory that was set as + -- the build directory for Cabal. + + dist_dir <- guessDistDir + -- Might be bottom, if we can't figure it out. If you override + -- all the relevant parameters you might still succeed. + lbi <- getPersistBuildConfig_ (dist_dir "setup-config") + + -- You need to run the test suite in the right directory, sorry. + test_dir <- getCurrentDirectory + + -- Pull out the information we need from the LBI + -- TODO: The paths to GHC are configurable by command line, but you + -- have to be careful: some tests might depend on the Cabal library, + -- in which case you REALLY need to have built and installed Cabal + -- for the version that the test suite is being built against. The + -- easiest thing to do is make sure you built Cabal the same way as + -- you will run the tests. + let getExePathFromEnvOrLBI env_name prog = do + r <- lookupEnv env_name + case r of + Nothing -> do + (conf, _) <- requireProgram normal prog (withPrograms lbi) + return (programPath conf) + Just x -> return x + -- It is too much damn work to actually properly configure it + -- (Cabal will go off and probe GHC and we really aren't keen + -- on doing this every time we run the test suite.) + ghc_path <- getExePathFromEnvOrLBI "CABAL_PACKAGETESTS_GHC" ghcProgram + ghc_pkg_path <- getExePathFromEnvOrLBI "CABAL_PACKAGETESTS_GHC_PKG" + ghcPkgProgram + haddock_path <- getExePathFromEnvOrLBI "CABAL_PACKAGETESTS_HADDOCK" + haddockProgram + + with_ghc_path <- fromMaybe ghc_path + `fmap` lookupEnv "CABAL_PACKAGETESTS_WITH_GHC" + + ghc_version_env <- lookupEnv "CABAL_PACKAGETESTS_GHC_VERSION" + ghc_version <- case ghc_version_env of + Nothing -> do + (ghcConf, _) <- requireProgram normal ghcProgram (withPrograms lbi) + return (fromJust (programVersion ghcConf)) + Just str -> + return (fromJust (simpleParse str)) + + with_ghc_version <- do + version <- programFindVersion ghcProgram normal with_ghc_path + case version of + Nothing -> error "Cannot determine version of GHC used for --with-ghc" + Just v -> return v + + -- Package DBs are not guaranteed to be absolute, so make them so in + -- case a subprocess using the package DB needs a different CWD. + db_stack_env <- lookupEnv "CABAL_PACKAGETESTS_DB_STACK" + let packageDBStack0 = case db_stack_env of + Nothing -> withPackageDB lbi + Just str -> interpretPackageDbFlags True -- user install? why not. + (concatMap readPackageDbList + (splitSearchPath str)) + packageDBStack1 <- mapM canonicalizePackageDB packageDBStack0 + + -- The packageDBStack is worth some commentary. The database + -- stack we extract from the LBI will contain enough package + -- databases to make the Cabal package well-formed. However, + -- it does not *contain* the inplace installed Cabal package. + -- So we need to add that to the stack. + let packageDBStack2 + = packageDBStack1 ++ + [SpecificPackageDB + (dist_dir "package.conf.inplace")] + + -- THIS ISN'T EVEN MY FINAL FORM. The package database stack + -- controls where we install a package; specifically, the package is + -- installed to the top-most package on the stack (this makes the + -- most sense, since it could depend on any of the packages below + -- it.) If the test wants to register anything (as opposed to just + -- working in place), then we need to have another temporary + -- database we can install into (and not accidentally clobber any of + -- the other stacks.) This is done on a per-test basis. + -- + -- ONE MORE THING. On the subject of installing the package (with + -- copy/register) it is EXTREMELY important that we also overload + -- the install directories, so we don't clobber anything in the + -- default install paths. VERY IMPORTANT. + + -- TODO: make this controllable by a flag + verbosity <- maybe normal (readEOrFail flagToVerbosity) + `fmap` lookupEnv "VERBOSE" + -- The inplaceDB is where the Cabal library was registered + -- in place (and is usable.) inplaceConfig is a convenient + -- set of flags to make sure we make it visible. + let suite = SuiteConfig + { cabalDistPref = dist_dir + , ghcPath = ghc_path + , ghcVersion = ghc_version + , ghcPkgPath = ghc_pkg_path + , withGhcPath = with_ghc_path + , withGhcVersion = with_ghc_version + , packageDBStack = packageDBStack2 + , suiteVerbosity = verbosity + , absoluteCWD = test_dir + } + + putStrLn $ "Cabal test suite - testing cabal version " + ++ display cabalVersion + putStrLn $ "Cabal build directory: " ++ dist_dir + putStrLn $ "Test directory: " ++ test_dir + -- TODO: it might be useful to factor this out so that ./Setup + -- configure dumps this file, so we can read it without in a version + -- stable way. + putStrLn $ "Environment:" + putStrLn $ "CABAL_PACKAGETESTS_GHC=" ++ show ghc_path ++ " \\" + putStrLn $ "CABAL_PACKAGETESTS_GHC_VERSION=" + ++ show (display ghc_version) ++ " \\" + putStrLn $ "CABAL_PACKAGETESTS_GHC_PKG=" ++ show ghc_pkg_path ++ " \\" + putStrLn $ "CABAL_PACKAGETESTS_WITH_GHC=" ++ show with_ghc_path ++ " \\" + putStrLn $ "CABAL_PACKAGETESTS_HADDOCK=" ++ show haddock_path ++ " \\" + -- For brevity, do pre-canonicalization + putStrLn $ "CABAL_PACKAGETESTS_DB_STACK=" ++ + show (intercalate [searchPathSeparator] + (showPackageDbList (uninterpretPackageDBFlags + packageDBStack0))) + + -- Create a shared Setup executable to speed up Simple tests + putStrLn $ "Building shared ./Setup executable" + rawCompileSetup verbosity suite [] "tests" + + defaultMainWithIngredients options $ + runTestTree "Package Tests" (tests suite) + +-- Reverse of 'interpretPackageDbFlags'. +-- prop_idem stk b +-- = interpretPackageDbFlags b (uninterpretPackageDBFlags stk) == stk +uninterpretPackageDBFlags :: PackageDBStack -> [Maybe PackageDB] +uninterpretPackageDBFlags stk = Nothing : map (\x -> Just x) stk + +-- | Guess what the 'dist' directory Cabal was installed in is. There's +-- no 100% reliable way to find this, but there are a few good shots: +-- +-- 1. Test programs are ~always built in-place, in a directory +-- that looks like dist/build/package-tests/package-tests; +-- thus the directory can be determined by looking at $0. +-- This method is robust against sandboxes, Nix local +-- builds, and Stack, but doesn't work if you're running +-- in an interpreter. +-- +-- 2. We can use the normal input methods (as per Cabal), +-- checking for the CABAL_BUILDDIR environment variable as +-- well as the default location in the current working directory. +-- +-- NB: If you update this, also update its copy in cabal-install's +-- IntegrationTests +guessDistDir :: IO FilePath +guessDistDir = do +#if MIN_VERSION_base(4,6,0) + -- Method (1) + -- TODO: this needs to be BC'ified, probably. + exe_path <- canonicalizePath =<< getExecutablePath + -- exe_path is something like /path/to/dist/build/package-tests/package-tests + let dist0 = dropFileName exe_path ".." ".." + b <- doesFileExist (dist0 "setup-config") +#else + let dist0 = error "no path" + b = False +#endif + -- Method (2) + if b then canonicalizePath dist0 + else findDistPrefOrDefault NoFlag >>= canonicalizePath + +canonicalizePackageDB :: PackageDB -> IO PackageDB +canonicalizePackageDB (SpecificPackageDB path) + = SpecificPackageDB `fmap` canonicalizePath path +canonicalizePackageDB x = return x + +-- | Like Distribution.Simple.Configure.getPersistBuildConfig but +-- doesn't check that the Cabal version matches, which it doesn't when +-- we run Cabal's own test suite, due to bootstrapping issues. +-- Here's the situation: +-- +-- 1. There's some system Cabal-1.0 installed. We use this +-- to build Setup.hs +-- 2. We run ./Setup configure, which uses Cabal-1.0 to +-- write out the LocalBuildInfo +-- 3. We build the Cabal library, whose version is Cabal-2.0 +-- 4. We build the package-tests executable, which LINKS AGAINST +-- Cabal-2.0 +-- 5. We try to read the LocalBuildInfo that ./Setup configure +-- wrote out, but it's Cabal-1.0 format! +-- +-- It's a bit skeevy that we're trying to read Cabal-1.0 LocalBuildInfo +-- using Cabal-2.0's parser, but this seems to work OK in practice +-- because LocalBuildInfo is a slow-moving data structure. If +-- we ever make a major change, this won't work, and we'll have to +-- take a different approach (either setting "build-type: Custom" +-- so we bootstrap with the most recent Cabal, or by writing the +-- information we need in another format.) +getPersistBuildConfig_ :: FilePath -> IO LocalBuildInfo +getPersistBuildConfig_ filename = do + eLBI <- try $ getConfigStateFile filename + case eLBI of + Left (ConfigStateFileBadVersion _ _ (Right lbi)) -> return lbi + -- These errors are lazy! We might not need these parameters. + Left (ConfigStateFileBadVersion _ _ (Left err)) + -> return . error $ + "We couldn't understand the build configuration. Try " ++ + "editing Cabal.cabal to have 'build-type: Custom' " ++ + "and then rebuilding, or manually specifying CABAL_PACKAGETESTS_* " ++ + "environment variables (see README.md for more details)." ++ + "\n\nOriginal error: " ++ + show err + Left err -> return (throw err) + Right lbi -> return lbi + +options :: [Ingredient] +options = includingOptions + [Option (Proxy :: Proxy OptionEnableAllTests)] : + defaultIngredients diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/README.md cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/README.md --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/README.md 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/README.md 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,38 @@ +Writing package tests +===================== + +The tests under the [PackageTests] directory define and build packages +that exercise various components of Cabal. Each test case is an [HUnit] +test. The entry point for the test suite, where all the test cases are +listed, is [PackageTests.hs]. There are utilities for calling the stages +of Cabal's build process in [PackageTests/PackageTester.hs]; have a look +at an existing test case to see how they are used. + +In order to run the tests, `PackageTests` needs to know where the inplace +copy of Cabal being tested is, as well as some information which was +used to configure it. By default, `PackageTests` tries to look at the +`LocalBuildInfo`, but if the format of `LocalBuildInfo` has changed +between the version of Cabal which ran the configure step, and the +version of Cabal we are testing against, this may fail. In that +case, you can manually specify the information we need using +the following environment variables: + +* `CABAL_PACKAGETESTS_GHC` is the path to the GHC you compiled Cabal with +* `CABAL_PACKAGETESTS_GHC_PKG` is the path to the ghc-pkg associated with this GHC +* `CABAL_PACKAGETESTS_HADDOCK` is the path to the haddock associated with this GHC +* `CABAL_PACKAGETESTS_GHC_VERSION` is the version of your GHC +* `CABAL_PACKAGETESTS_DB_STACK` is a PATH-style list of package database paths, + `clear`, `global` and `user`. Each component of the list is + interpreted the same way as Cabal's `-package-db` flag. This list + must contain the copy of Cabal you are planning to test against + (as well as its transitive dependencies). + +If you can successfully run the test suite, we'll print out examples +of all of these values for you under "Environment". + +[PackageTests]: PackageTests +[HUnit]: http://hackage.haskell.org/package/HUnit +[PackageTests.hs]: PackageTests.hs +[PackageTests/PackageTester.hs]: PackageTests/PackageTester.hs +[detailed]: ../Distribution/TestSuite.hs +[PackageTests/BuildTestSuiteDetailedV09/Check.hs]: PackageTests/BuildTestSuiteDetailedV09/Check.hs diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/Setup.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,3 @@ +import Distribution.Simple +main = defaultMain + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/Test/Laws.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/Test/Laws.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/Test/Laws.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/Test/Laws.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,79 @@ +{-# OPTIONS_GHC -fno-warn-missing-signatures #-} +module Test.Laws where + +import Prelude hiding (Num((+), (*))) +import Data.Monoid (Monoid(..), Endo(..)) +import qualified Data.Foldable as Foldable + +idempotent_unary f x = f fx == fx where fx = f x + +-- Basic laws on binary operators + +idempotent_binary (+) x = x + x == x + +commutative (+) x y = x + y == y + x + +associative (+) x y z = (x + y) + z == x + (y + z) + +distributive_left (*) (+) x y z = x * (y + z) == (x * y) + (x * z) + +distributive_right (*) (+) x y z = (y + z) * x == (y * x) + (z * x) + + +-- | The first 'fmap' law +-- +-- > fmap id == id +-- +fmap_1 :: (Eq (f a), Functor f) => f a -> Bool +fmap_1 x = fmap id x == x + +-- | The second 'fmap' law +-- +-- > fmap (f . g) == fmap f . fmap g +-- +fmap_2 :: (Eq (f c), Functor f) => (b -> c) -> (a -> b) -> f a -> Bool +fmap_2 f g x = fmap (f . g) x == (fmap f . fmap g) x + + +-- | The monoid identity law, 'mempty' is a left and right identity of +-- 'mappend': +-- +-- > mempty `mappend` x = x +-- > x `mappend` mempty = x +-- +monoid_1 :: (Eq a, Data.Monoid.Monoid a) => a -> Bool +monoid_1 x = mempty `mappend` x == x + && x `mappend` mempty == x + +-- | The monoid associativity law, 'mappend' must be associative. +-- +-- > (x `mappend` y) `mappend` z = x `mappend` (y `mappend` z) +-- +monoid_2 :: (Eq a, Data.Monoid.Monoid a) => a -> a -> a -> Bool +monoid_2 x y z = (x `mappend` y) `mappend` z + == x `mappend` (y `mappend` z) + +-- | The 'mconcat' definition. It can be overidden for the sake of effeciency +-- but it must still satisfy the property given by the default definition: +-- +-- > mconcat = foldr mappend mempty +-- +monoid_3 :: (Eq a, Data.Monoid.Monoid a) => [a] -> Bool +monoid_3 xs = mconcat xs == foldr mappend mempty xs + + +-- | First 'Foldable' law +-- +-- > Foldable.fold = Foldable.foldr mappend mempty +-- +foldable_1 :: (Foldable.Foldable t, Monoid m, Eq m) => t m -> Bool +foldable_1 x = Foldable.fold x == Foldable.foldr mappend mempty x + +-- | Second 'Foldable' law +-- +-- > foldr f z t = appEndo (foldMap (Endo . f) t) z +-- +foldable_2 :: (Foldable.Foldable t, Eq b) + => (a -> b -> b) -> b -> t a -> Bool +foldable_2 f z t = Foldable.foldr f z t + == appEndo (Foldable.foldMap (Endo . f) t) z diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/Test/QuickCheck/Utils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/Test/QuickCheck/Utils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/Test/QuickCheck/Utils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/Test/QuickCheck/Utils.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,29 @@ +module Test.QuickCheck.Utils where + +import Test.QuickCheck.Gen + + +-- | Adjust the size of the generated value. +-- +-- In general the size gets bigger and bigger linearly. For some types +-- it is not appropriate to generate ever bigger values but instead +-- to generate lots of intermediate sized values. You could do that using: +-- +-- > adjustSize (\n -> min n 5) +-- +-- Similarly, for some types the linear size growth may mean getting too big +-- too quickly relative to other values. So you may want to adjust how +-- quickly the size grows. For example dividing by a constant, or even +-- something like the integer square root or log. +-- +-- > adjustSize (\n -> n `div` 2) +-- +-- Putting this together we can make for example a relatively short list: +-- +-- > adjustSize (\n -> min 5 (n `div` 3)) (listOf1 arbitrary) +-- +-- Not only do we put a limit on the length but we also scale the growth to +-- prevent it from hitting the maximum size quite so early. +-- +adjustSize :: (Int -> Int) -> Gen a -> Gen a +adjustSize adjust gen = sized (\n -> resize (adjust n) gen) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/Compat/CreatePipe.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/Compat/CreatePipe.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/Compat/CreatePipe.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/Compat/CreatePipe.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,19 @@ +module UnitTests.Distribution.Compat.CreatePipe (tests) where + +import Distribution.Compat.CreatePipe +import System.IO (hClose, hGetContents, hPutStr, hSetEncoding, localeEncoding) +import Test.Tasty +import Test.Tasty.HUnit + +tests :: [TestTree] +tests = [testCase "Locale Encoding" case_Locale_Encoding] + +case_Locale_Encoding :: Assertion +case_Locale_Encoding = assert $ do + let str = "\0252" + (r, w) <- createPipe + hSetEncoding w localeEncoding + out <- hGetContents r + hPutStr w str + hClose w + return $! out == str diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/Compat/ReadP.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/Compat/ReadP.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/Compat/ReadP.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/Compat/ReadP.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,153 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Compat.ReadP +-- Copyright : (c) The University of Glasgow 2002 +-- License : BSD-style (see the file libraries/base/LICENSE) +-- +-- Maintainer : libraries@haskell.org +-- Portability : portable +-- +-- This code was originally in Distribution.Compat.ReadP. Please see that file +-- for provenance. The tests have been integrated into the test framework. +-- Some properties cannot be tested, as they hold over arbitrary ReadP values, +-- and we don't have a good Arbitrary instance (nor Show instance) for ReadP. +-- +module UnitTests.Distribution.Compat.ReadP + ( tests + -- * Properties + -- $properties + ) where + +import Data.List +import Distribution.Compat.ReadP +import Test.Tasty +import Test.Tasty.QuickCheck + +tests :: [TestTree] +tests = + [ testProperty "Get Nil" prop_Get_Nil + , testProperty "Get Cons" prop_Get_Cons + , testProperty "Look" prop_Look + , testProperty "Fail" prop_Fail + , testProperty "Return" prop_Return + --, testProperty "Bind" prop_Bind + --, testProperty "Plus" prop_Plus + --, testProperty "LeftPlus" prop_LeftPlus + --, testProperty "Gather" prop_Gather + , testProperty "String Yes" prop_String_Yes + , testProperty "String Maybe" prop_String_Maybe + , testProperty "Munch" (prop_Munch evenChar) + , testProperty "Munch1" (prop_Munch1 evenChar) + --, testProperty "Choice" prop_Choice + --, testProperty "ReadS" prop_ReadS + ] + +-- --------------------------------------------------------------------------- +-- QuickCheck properties that hold for the combinators + +{- $properties +The following are QuickCheck specifications of what the combinators do. +These can be seen as formal specifications of the behavior of the +combinators. + +We use bags to give semantics to the combinators. +-} + +type Bag a = [a] + +-- Equality on bags does not care about the order of elements. + +(=~) :: Ord a => Bag a -> Bag a -> Bool +xs =~ ys = sort xs == sort ys + +-- A special equality operator to avoid unresolved overloading +-- when testing the properties. + +(=~.) :: Bag (Int,String) -> Bag (Int,String) -> Bool +(=~.) = (=~) + +-- Here follow the properties: + +prop_Get_Nil :: Bool +prop_Get_Nil = + readP_to_S get [] =~ [] + +prop_Get_Cons :: Char -> [Char] -> Bool +prop_Get_Cons c s = + readP_to_S get (c:s) =~ [(c,s)] + +prop_Look :: String -> Bool +prop_Look s = + readP_to_S look s =~ [(s,s)] + +prop_Fail :: String -> Bool +prop_Fail s = + readP_to_S pfail s =~. [] + +prop_Return :: Int -> String -> Bool +prop_Return x s = + readP_to_S (return x) s =~. [(x,s)] + +{- +prop_Bind p k s = + readP_to_S (p >>= k) s =~. + [ ys'' + | (x,s') <- readP_to_S p s + , ys'' <- readP_to_S (k (x::Int)) s' + ] + +prop_Plus :: ReadP Int Int -> ReadP Int Int -> String -> Bool +prop_Plus p q s = + readP_to_S (p +++ q) s =~. + (readP_to_S p s ++ readP_to_S q s) + +prop_LeftPlus :: ReadP Int Int -> ReadP Int Int -> String -> Bool +prop_LeftPlus p q s = + readP_to_S (p <++ q) s =~. + (readP_to_S p s +<+ readP_to_S q s) + where + [] +<+ ys = ys + xs +<+ _ = xs + +prop_Gather s = + forAll readPWithoutReadS $ \p -> + readP_to_S (gather p) s =~ + [ ((pre,x::Int),s') + | (x,s') <- readP_to_S p s + , let pre = take (length s - length s') s + ] +-} + +prop_String_Yes :: String -> [Char] -> Bool +prop_String_Yes this s = + readP_to_S (string this) (this ++ s) =~ + [(this,s)] + +prop_String_Maybe :: String -> String -> Bool +prop_String_Maybe this s = + readP_to_S (string this) s =~ + [(this, drop (length this) s) | this `isPrefixOf` s] + +prop_Munch :: (Char -> Bool) -> String -> Bool +prop_Munch p s = + readP_to_S (munch p) s =~ + [(takeWhile p s, dropWhile p s)] + +prop_Munch1 :: (Char -> Bool) -> String -> Bool +prop_Munch1 p s = + readP_to_S (munch1 p) s =~ + [(res,s') | let (res,s') = (takeWhile p s, dropWhile p s), not (null res)] + +{- +prop_Choice :: [ReadP Int Int] -> String -> Bool +prop_Choice ps s = + readP_to_S (choice ps) s =~. + readP_to_S (foldr (+++) pfail ps) s + +prop_ReadS :: ReadS Int -> String -> Bool +prop_ReadS r s = + readP_to_S (readS_to_P r) s =~. r s +-} + +evenChar :: Char -> Bool +evenChar = even . fromEnum diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/Simple/Program/Internal.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/Simple/Program/Internal.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/Simple/Program/Internal.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/Simple/Program/Internal.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,36 @@ +module UnitTests.Distribution.Simple.Program.Internal + ( tests + ) where + +import Distribution.Simple.Program.Internal ( stripExtractVersion ) + +import Test.Tasty +import Test.Tasty.HUnit + +v :: String +v = "GNU strip (GNU Binutils; openSUSE 13.2) 2.24.0.20140403-6.1\nCopyright 2013\ + \ Free Software Foundation, Inc.\nThis program is free software; you may\ + \ redistribute it under the terms of\nthe GNU General Public License version 3\ + \ or (at your option) any later version.\nThis program has absolutely no\ + \ warranty.\n" + +v' :: String +v' = "GNU strip 2.17.50.0.6-26.el5 20061020" + +v'' :: String +v'' = "GNU strip (openSUSE-13.2) 2.23.50.0.6-26.el5 20061020" + +v''' :: String +v''' = "GNU strip (GNU (Binutils for) Ubuntu 12.04 ) 2.22" + +tests :: [TestTree] +tests = + [ testCase "Handles parentheses" $ + (stripExtractVersion v) @=? "2.24" + , testCase "Handles dashes and alphabetic characters" $ + (stripExtractVersion v') @=? "2.17" + , testCase "Handles single-word parenthetical expressions" $ + (stripExtractVersion v'') @=? "2.23" + , testCase "Handles nested parentheses" $ + (stripExtractVersion v''') @=? "2.22" + ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/Simple/Utils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/Simple/Utils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/Simple/Utils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/Simple/Utils.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,59 @@ +module UnitTests.Distribution.Simple.Utils + ( tests + ) where + +import Distribution.Simple.Utils +import Distribution.Verbosity + +import Data.IORef +import System.Directory ( doesDirectoryExist, doesFileExist + , getTemporaryDirectory + , removeDirectoryRecursive, removeFile ) +import System.IO (hClose) + +import Test.Tasty +import Test.Tasty.HUnit + +withTempFileTest :: Assertion +withTempFileTest = do + fileName <- newIORef "" + tempDir <- getTemporaryDirectory + withTempFile tempDir ".foo" $ \fileName' _handle -> do + writeIORef fileName fileName' + fileExists <- readIORef fileName >>= doesFileExist + assertBool "Temporary file not deleted by 'withTempFile'!" (not fileExists) + +withTempFileRemovedTest :: Assertion +withTempFileRemovedTest = do + tempDir <- getTemporaryDirectory + withTempFile tempDir ".foo" $ \fileName handle -> do + hClose handle + removeFile fileName + +withTempDirTest :: Assertion +withTempDirTest = do + dirName <- newIORef "" + tempDir <- getTemporaryDirectory + withTempDirectory normal tempDir "foo" $ \dirName' -> do + writeIORef dirName dirName' + dirExists <- readIORef dirName >>= doesDirectoryExist + assertBool "Temporary directory not deleted by 'withTempDirectory'!" + (not dirExists) + +withTempDirRemovedTest :: Assertion +withTempDirRemovedTest = do + tempDir <- getTemporaryDirectory + withTempDirectory normal tempDir "foo" $ \dirPath -> do + removeDirectoryRecursive dirPath + +tests :: [TestTree] +tests = + [ testCase "withTempFile works as expected" $ + withTempFileTest + , testCase "withTempFile can handle removed files" $ + withTempFileRemovedTest + , testCase "withTempDirectory works as expected" $ + withTempDirTest + , testCase "withTempDirectory can handle removed directories" $ + withTempDirRemovedTest + ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/System.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/System.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/System.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/System.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,29 @@ +{-# OPTIONS_GHC -fno-warn-orphans #-} +module UnitTests.Distribution.System + ( tests + ) where + +import Control.Monad (liftM2) +import Distribution.Text (Text(..), display, simpleParse) +import Distribution.System +import Test.Tasty +import Test.Tasty.QuickCheck + +textRoundtrip :: (Show a, Eq a, Text a) => a -> Property +textRoundtrip x = simpleParse (display x) === Just x + +tests :: [TestTree] +tests = + [ testProperty "Text OS round trip" (textRoundtrip :: OS -> Property) + , testProperty "Text Arch round trip" (textRoundtrip :: Arch -> Property) + , testProperty "Text Platform round trip" (textRoundtrip :: Platform -> Property) + ] + +instance Arbitrary OS where + arbitrary = elements knownOSs + +instance Arbitrary Arch where + arbitrary = elements knownArches + +instance Arbitrary Platform where + arbitrary = liftM2 Platform arbitrary arbitrary diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/Utils/NubList.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/Utils/NubList.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/Utils/NubList.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/Utils/NubList.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,46 @@ +{-# LANGUAGE CPP #-} +module UnitTests.Distribution.Utils.NubList + ( tests + ) where + +#if __GLASGOW_HASKELL__ < 710 +import Data.Monoid +#endif +import Distribution.Utils.NubList +import Test.Tasty +import Test.Tasty.HUnit +import Test.Tasty.QuickCheck + +tests :: [TestTree] +tests = + [ testCase "Numlist retains ordering" testOrdering + , testCase "Numlist removes duplicates" testDeDupe + , testProperty "Monoid Numlist Identity" prop_Identity + , testProperty "Monoid Numlist Associativity" prop_Associativity + ] + +someIntList :: [Int] +-- This list must not have duplicate entries. +someIntList = [ 1, 3, 4, 2, 0, 7, 6, 5, 9, -1 ] + +testOrdering :: Assertion +testOrdering = + assertBool "Maintains element ordering:" $ + fromNubList (toNubList someIntList) == someIntList + +testDeDupe :: Assertion +testDeDupe = + assertBool "De-duplicates a list:" $ + fromNubList (toNubList (someIntList ++ someIntList)) == someIntList + +-- --------------------------------------------------------------------------- +-- QuickCheck properties for NubList + +prop_Identity :: [Int] -> Bool +prop_Identity xs = + mempty `mappend` toNubList xs == toNubList xs `mappend` mempty + +prop_Associativity :: [Int] -> [Int] -> [Int] -> Bool +prop_Associativity xs ys zs = + (toNubList xs `mappend` toNubList ys) `mappend` toNubList zs + == toNubList xs `mappend` (toNubList ys `mappend` toNubList zs) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/Version.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/Version.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/Version.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/UnitTests/Distribution/Version.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,723 @@ +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-orphans + -fno-warn-incomplete-patterns + -fno-warn-deprecations + -fno-warn-unused-binds #-} --FIXME +module UnitTests.Distribution.Version (versionTests) where + +import Distribution.Version +import Distribution.Text + +import Text.PrettyPrint as Disp (text, render, parens, hcat + ,punctuate, int, char, (<>), (<+>)) + +import Test.Tasty +import Test.Tasty.QuickCheck +import qualified Test.Laws as Laws + +#if !MIN_VERSION_QuickCheck(2,9,0) +import Test.QuickCheck.Utils +#endif + +import Control.Monad (liftM, liftM2) +import Data.Maybe (isJust, fromJust) +import Data.List (sort, sortBy, nub) +import Data.Ord (comparing) + +versionTests :: [TestTree] +versionTests = + zipWith (\n p -> testProperty ("Range Property " ++ show n) p) [1::Int ..] + -- properties to validate the test framework + [ property prop_nonNull + , property prop_gen_intervals1 + , property prop_gen_intervals2 +--, property prop_equivalentVersionRange --FIXME: runs out of test cases + , property prop_intermediateVersion + + -- the basic syntactic version range functions + , property prop_anyVersion + , property prop_noVersion + , property prop_thisVersion + , property prop_notThisVersion + , property prop_laterVersion + , property prop_orLaterVersion + , property prop_earlierVersion + , property prop_orEarlierVersion + , property prop_unionVersionRanges + , property prop_intersectVersionRanges + , property prop_differenceVersionRanges + , property prop_invertVersionRange + , property prop_withinVersion + , property prop_foldVersionRange + , property prop_foldVersionRange' + + -- the semantic query functions +--, property prop_isAnyVersion1 --FIXME: runs out of test cases +--, property prop_isAnyVersion2 --FIXME: runs out of test cases +--, property prop_isNoVersion --FIXME: runs out of test cases +--, property prop_isSpecificVersion1 --FIXME: runs out of test cases +--, property prop_isSpecificVersion2 --FIXME: runs out of test cases + , property prop_simplifyVersionRange1 + , property prop_simplifyVersionRange1' +--, property prop_simplifyVersionRange2 --FIXME: runs out of test cases +--, property prop_simplifyVersionRange2' --FIXME: runs out of test cases +--, property prop_simplifyVersionRange2'' --FIXME: actually wrong + + -- converting between version ranges and version intervals + , property prop_to_intervals +--, property prop_to_intervals_canonical --FIXME: runs out of test cases +--, property prop_to_intervals_canonical' --FIXME: runs out of test cases + , property prop_from_intervals + , property prop_to_from_intervals + , property prop_from_to_intervals + , property prop_from_to_intervals' + + -- union and intersection of version intervals + , property prop_unionVersionIntervals + , property prop_unionVersionIntervals_idempotent + , property prop_unionVersionIntervals_commutative + , property prop_unionVersionIntervals_associative + , property prop_intersectVersionIntervals + , property prop_intersectVersionIntervals_idempotent + , property prop_intersectVersionIntervals_commutative + , property prop_intersectVersionIntervals_associative + , property prop_union_intersect_distributive + , property prop_intersect_union_distributive + + -- inversion of version intervals + , property prop_invertVersionIntervals + , property prop_invertVersionIntervalsTwice + ] + +-- parseTests :: [TestTree] +-- parseTests = +-- zipWith (\n p -> testProperty ("Parse Property " ++ show n) p) [1::Int ..] +-- -- parsing and pretty printing +-- [ -- property prop_parse_disp1 --FIXME: actually wrong + +-- -- These are also wrong, see +-- -- https://github.com/haskell/cabal/issues/3037#issuecomment-177671011 + +-- -- property prop_parse_disp2 +-- -- , property prop_parse_disp3 +-- -- , property prop_parse_disp4 +-- -- , property prop_parse_disp5 +-- ] + +#if !MIN_VERSION_QuickCheck(2,9,0) +instance Arbitrary Version where + arbitrary = do + branch <- smallListOf1 $ + frequency [(3, return 0) + ,(3, return 1) + ,(2, return 2) + ,(1, return 3)] + return (Version branch []) -- deliberate [] + where + smallListOf1 = adjustSize (\n -> min 5 (n `div` 3)) . listOf1 + + shrink (Version branch []) = + [ Version branch' [] | branch' <- shrink branch, not (null branch') ] + shrink (Version branch _tags) = + [ Version branch [] ] +#endif + +instance Arbitrary VersionRange where + arbitrary = sized verRangeExp + where + verRangeExp n = frequency $ + [ (2, return anyVersion) + , (1, liftM thisVersion arbitrary) + , (1, liftM laterVersion arbitrary) + , (1, liftM orLaterVersion arbitrary) + , (1, liftM orLaterVersion' arbitrary) + , (1, liftM earlierVersion arbitrary) + , (1, liftM orEarlierVersion arbitrary) + , (1, liftM orEarlierVersion' arbitrary) + , (1, liftM withinVersion arbitrary) + , (2, liftM VersionRangeParens arbitrary) + ] ++ if n == 0 then [] else + [ (2, liftM2 unionVersionRanges verRangeExp2 verRangeExp2) + , (2, liftM2 intersectVersionRanges verRangeExp2 verRangeExp2) + ] + where + verRangeExp2 = verRangeExp (n `div` 2) + + orLaterVersion' v = + unionVersionRanges (LaterVersion v) (ThisVersion v) + orEarlierVersion' v = + unionVersionRanges (EarlierVersion v) (ThisVersion v) + +--------------------------- +-- VersionRange properties +-- + +prop_nonNull :: Version -> Bool +prop_nonNull = not . null . versionBranch + +prop_anyVersion :: Version -> Bool +prop_anyVersion v' = + withinRange v' anyVersion == True + +prop_noVersion :: Version -> Bool +prop_noVersion v' = + withinRange v' noVersion == False + +prop_thisVersion :: Version -> Version -> Bool +prop_thisVersion v v' = + withinRange v' (thisVersion v) + == (v' == v) + +prop_notThisVersion :: Version -> Version -> Bool +prop_notThisVersion v v' = + withinRange v' (notThisVersion v) + == (v' /= v) + +prop_laterVersion :: Version -> Version -> Bool +prop_laterVersion v v' = + withinRange v' (laterVersion v) + == (v' > v) + +prop_orLaterVersion :: Version -> Version -> Bool +prop_orLaterVersion v v' = + withinRange v' (orLaterVersion v) + == (v' >= v) + +prop_earlierVersion :: Version -> Version -> Bool +prop_earlierVersion v v' = + withinRange v' (earlierVersion v) + == (v' < v) + +prop_orEarlierVersion :: Version -> Version -> Bool +prop_orEarlierVersion v v' = + withinRange v' (orEarlierVersion v) + == (v' <= v) + +prop_unionVersionRanges :: VersionRange -> VersionRange -> Version -> Bool +prop_unionVersionRanges vr1 vr2 v' = + withinRange v' (unionVersionRanges vr1 vr2) + == (withinRange v' vr1 || withinRange v' vr2) + +prop_intersectVersionRanges :: VersionRange -> VersionRange -> Version -> Bool +prop_intersectVersionRanges vr1 vr2 v' = + withinRange v' (intersectVersionRanges vr1 vr2) + == (withinRange v' vr1 && withinRange v' vr2) + +prop_differenceVersionRanges :: VersionRange -> VersionRange -> Version -> Bool +prop_differenceVersionRanges vr1 vr2 v' = + withinRange v' (differenceVersionRanges vr1 vr2) + == (withinRange v' vr1 && not (withinRange v' vr2)) + +prop_invertVersionRange :: VersionRange -> Version -> Bool +prop_invertVersionRange vr v' = + withinRange v' (invertVersionRange vr) + == not (withinRange v' vr) + +prop_withinVersion :: Version -> Version -> Bool +prop_withinVersion v v' = + withinRange v' (withinVersion v) + == (v' >= v && v' < upper v) + where + upper (Version lower t) = Version (init lower ++ [last lower + 1]) t + +prop_foldVersionRange :: VersionRange -> Bool +prop_foldVersionRange range = + expandWildcard range + == foldVersionRange anyVersion thisVersion + laterVersion earlierVersion + unionVersionRanges intersectVersionRanges + range + where + expandWildcard (WildcardVersion v) = + intersectVersionRanges (orLaterVersion v) (earlierVersion (upper v)) + where + upper (Version lower t) = Version (init lower ++ [last lower + 1]) t + + expandWildcard (UnionVersionRanges v1 v2) = + UnionVersionRanges (expandWildcard v1) (expandWildcard v2) + expandWildcard (IntersectVersionRanges v1 v2) = + IntersectVersionRanges (expandWildcard v1) (expandWildcard v2) + expandWildcard (VersionRangeParens v) = expandWildcard v + expandWildcard v = v + + +prop_foldVersionRange' :: VersionRange -> Bool +prop_foldVersionRange' range = + canonicalise range + == foldVersionRange' anyVersion thisVersion + laterVersion earlierVersion + orLaterVersion orEarlierVersion + (\v _ -> withinVersion v) + unionVersionRanges intersectVersionRanges id + range + where + canonicalise (UnionVersionRanges (LaterVersion v) + (ThisVersion v')) | v == v' + = UnionVersionRanges (ThisVersion v') + (LaterVersion v) + canonicalise (UnionVersionRanges (EarlierVersion v) + (ThisVersion v')) | v == v' + = UnionVersionRanges (ThisVersion v') + (EarlierVersion v) + canonicalise (UnionVersionRanges v1 v2) = + UnionVersionRanges (canonicalise v1) (canonicalise v2) + canonicalise (IntersectVersionRanges v1 v2) = + IntersectVersionRanges (canonicalise v1) (canonicalise v2) + canonicalise (VersionRangeParens v) = canonicalise v + canonicalise v = v + + +prop_isAnyVersion1 :: VersionRange -> Version -> Property +prop_isAnyVersion1 range version = + isAnyVersion range ==> withinRange version range + +prop_isAnyVersion2 :: VersionRange -> Property +prop_isAnyVersion2 range = + isAnyVersion range ==> + foldVersionRange True (\_ -> False) (\_ -> False) (\_ -> False) + (\_ _ -> False) (\_ _ -> False) + (simplifyVersionRange range) + +prop_isNoVersion :: VersionRange -> Version -> Property +prop_isNoVersion range version = + isNoVersion range ==> not (withinRange version range) + +prop_isSpecificVersion1 :: VersionRange -> NonEmptyList Version -> Property +prop_isSpecificVersion1 range (NonEmpty versions) = + isJust version && not (null versions') ==> + allEqual (fromJust version : versions') + where + version = isSpecificVersion range + versions' = filter (`withinRange` range) versions + allEqual xs = and (zipWith (==) xs (tail xs)) + +prop_isSpecificVersion2 :: VersionRange -> Property +prop_isSpecificVersion2 range = + isJust version ==> + foldVersionRange Nothing Just (\_ -> Nothing) (\_ -> Nothing) + (\_ _ -> Nothing) (\_ _ -> Nothing) + (simplifyVersionRange range) + == version + + where + version = isSpecificVersion range + +-- | 'simplifyVersionRange' is a semantic identity on 'VersionRange'. +-- +prop_simplifyVersionRange1 :: VersionRange -> Version -> Bool +prop_simplifyVersionRange1 range version = + withinRange version range == withinRange version (simplifyVersionRange range) + +prop_simplifyVersionRange1' :: VersionRange -> Bool +prop_simplifyVersionRange1' range = + range `equivalentVersionRange` (simplifyVersionRange range) + +-- | 'simplifyVersionRange' produces a canonical form for ranges with +-- equivalent semantics. +-- +prop_simplifyVersionRange2 :: VersionRange -> VersionRange -> Version -> Property +prop_simplifyVersionRange2 r r' v = + r /= r' && simplifyVersionRange r == simplifyVersionRange r' ==> + withinRange v r == withinRange v r' + +prop_simplifyVersionRange2' :: VersionRange -> VersionRange -> Property +prop_simplifyVersionRange2' r r' = + r /= r' && simplifyVersionRange r == simplifyVersionRange r' ==> + r `equivalentVersionRange` r' + +--FIXME: see equivalentVersionRange for details +prop_simplifyVersionRange2'' :: VersionRange -> VersionRange -> Property +prop_simplifyVersionRange2'' r r' = + r /= r' && r `equivalentVersionRange` r' ==> + simplifyVersionRange r == simplifyVersionRange r' + || isNoVersion r + || isNoVersion r' + +-------------------- +-- VersionIntervals +-- + +-- | Generating VersionIntervals +-- +-- This is a tad tricky as VersionIntervals is an abstract type, so we first +-- make a local type for generating the internal representation. Then we check +-- that this lets us construct valid 'VersionIntervals'. +-- +newtype VersionIntervals' = VersionIntervals' [VersionInterval] + deriving (Eq, Show) + +instance Arbitrary VersionIntervals' where + arbitrary = do + ubound <- arbitrary + bounds <- arbitrary + let intervals = mergeTouching + . map fixEmpty + . replaceUpper ubound + . pairs + . sortBy (comparing fst) + $ bounds + return (VersionIntervals' intervals) + + where + pairs ((l, lb):(u, ub):bs) = (LowerBound l lb, UpperBound u ub) + : pairs bs + pairs _ = [] + + replaceUpper NoUpperBound [(l,_)] = [(l, NoUpperBound)] + replaceUpper NoUpperBound (i:is) = i : replaceUpper NoUpperBound is + replaceUpper _ is = is + + -- merge adjacent intervals that touch + mergeTouching (i1@(l,u):i2@(l',u'):is) + | doesNotTouch u l' = i1 : mergeTouching (i2:is) + | otherwise = mergeTouching ((l,u'):is) + mergeTouching is = is + + doesNotTouch :: UpperBound -> LowerBound -> Bool + doesNotTouch NoUpperBound _ = False + doesNotTouch (UpperBound u ub) (LowerBound l lb) = + u < l + || (u == l && ub == ExclusiveBound && lb == ExclusiveBound) + + fixEmpty (LowerBound l _, UpperBound u _) + | l == u = (LowerBound l InclusiveBound, UpperBound u InclusiveBound) + fixEmpty i = i + + shrink (VersionIntervals' intervals) = + [ VersionIntervals' intervals' | intervals' <- shrink intervals ] + +instance Arbitrary Bound where + arbitrary = elements [ExclusiveBound, InclusiveBound] + +instance Arbitrary LowerBound where + arbitrary = liftM2 LowerBound arbitrary arbitrary + +instance Arbitrary UpperBound where + arbitrary = oneof [return NoUpperBound + ,liftM2 UpperBound arbitrary arbitrary] + +-- | Check that our VersionIntervals' arbitrary instance generates intervals +-- that satisfies the invariant. +-- +prop_gen_intervals1 :: VersionIntervals' -> Bool +prop_gen_intervals1 (VersionIntervals' intervals) = + isJust (mkVersionIntervals intervals) + +instance Arbitrary VersionIntervals where + arbitrary = do + VersionIntervals' intervals <- arbitrary + case mkVersionIntervals intervals of + Just xs -> return xs + +-- | Check that constructing our intervals type and converting it to a +-- 'VersionRange' and then into the true intervals type gives us back +-- the exact same sequence of intervals. This tells us that our arbitrary +-- instance for 'VersionIntervals'' is ok. +-- +prop_gen_intervals2 :: VersionIntervals' -> Bool +prop_gen_intervals2 (VersionIntervals' intervals') = + asVersionIntervals (fromVersionIntervals intervals) == intervals' + where + Just intervals = mkVersionIntervals intervals' + +-- | Check that 'VersionIntervals' models 'VersionRange' via +-- 'toVersionIntervals'. +-- +prop_to_intervals :: VersionRange -> Version -> Bool +prop_to_intervals range version = + withinRange version range == withinIntervals version intervals + where + intervals = toVersionIntervals range + +-- | Check that semantic equality on 'VersionRange's is the same as converting +-- to 'VersionIntervals' and doing syntactic equality. +-- +prop_to_intervals_canonical :: VersionRange -> VersionRange -> Property +prop_to_intervals_canonical r r' = + r /= r' && r `equivalentVersionRange` r' ==> + toVersionIntervals r == toVersionIntervals r' + +prop_to_intervals_canonical' :: VersionRange -> VersionRange -> Property +prop_to_intervals_canonical' r r' = + r /= r' && toVersionIntervals r == toVersionIntervals r' ==> + r `equivalentVersionRange` r' + +-- | Check that 'VersionIntervals' models 'VersionRange' via +-- 'fromVersionIntervals'. +-- +prop_from_intervals :: VersionIntervals -> Version -> Bool +prop_from_intervals intervals version = + withinRange version range == withinIntervals version intervals + where + range = fromVersionIntervals intervals + +-- | @'toVersionIntervals' . 'fromVersionIntervals'@ is an exact identity on +-- 'VersionIntervals'. +-- +prop_to_from_intervals :: VersionIntervals -> Bool +prop_to_from_intervals intervals = + toVersionIntervals (fromVersionIntervals intervals) == intervals + +-- | @'fromVersionIntervals' . 'toVersionIntervals'@ is a semantic identity on +-- 'VersionRange', though not necessarily a syntactic identity. +-- +prop_from_to_intervals :: VersionRange -> Bool +prop_from_to_intervals range = + range' `equivalentVersionRange` range + where + range' = fromVersionIntervals (toVersionIntervals range) + +-- | Equivalent of 'prop_from_to_intervals' +-- +prop_from_to_intervals' :: VersionRange -> Version -> Bool +prop_from_to_intervals' range version = + withinRange version range' == withinRange version range + where + range' = fromVersionIntervals (toVersionIntervals range) + +-- | The semantics of 'unionVersionIntervals' is (||). +-- +prop_unionVersionIntervals :: VersionIntervals -> VersionIntervals + -> Version -> Bool +prop_unionVersionIntervals is1 is2 v = + withinIntervals v (unionVersionIntervals is1 is2) + == (withinIntervals v is1 || withinIntervals v is2) + +-- | 'unionVersionIntervals' is idempotent +-- +prop_unionVersionIntervals_idempotent :: VersionIntervals -> Bool +prop_unionVersionIntervals_idempotent = + Laws.idempotent_binary unionVersionIntervals + +-- | 'unionVersionIntervals' is commutative +-- +prop_unionVersionIntervals_commutative :: VersionIntervals + -> VersionIntervals -> Bool +prop_unionVersionIntervals_commutative = + Laws.commutative unionVersionIntervals + +-- | 'unionVersionIntervals' is associative +-- +prop_unionVersionIntervals_associative :: VersionIntervals + -> VersionIntervals + -> VersionIntervals -> Bool +prop_unionVersionIntervals_associative = + Laws.associative unionVersionIntervals + +-- | The semantics of 'intersectVersionIntervals' is (&&). +-- +prop_intersectVersionIntervals :: VersionIntervals -> VersionIntervals + -> Version -> Bool +prop_intersectVersionIntervals is1 is2 v = + withinIntervals v (intersectVersionIntervals is1 is2) + == (withinIntervals v is1 && withinIntervals v is2) + +-- | 'intersectVersionIntervals' is idempotent +-- +prop_intersectVersionIntervals_idempotent :: VersionIntervals -> Bool +prop_intersectVersionIntervals_idempotent = + Laws.idempotent_binary intersectVersionIntervals + +-- | 'intersectVersionIntervals' is commutative +-- +prop_intersectVersionIntervals_commutative :: VersionIntervals + -> VersionIntervals -> Bool +prop_intersectVersionIntervals_commutative = + Laws.commutative intersectVersionIntervals + +-- | 'intersectVersionIntervals' is associative +-- +prop_intersectVersionIntervals_associative :: VersionIntervals + -> VersionIntervals + -> VersionIntervals -> Bool +prop_intersectVersionIntervals_associative = + Laws.associative intersectVersionIntervals + +-- | 'unionVersionIntervals' distributes over 'intersectVersionIntervals' +-- +prop_union_intersect_distributive :: Property +prop_union_intersect_distributive = + Laws.distributive_left unionVersionIntervals intersectVersionIntervals + .&. Laws.distributive_right unionVersionIntervals intersectVersionIntervals + +-- | 'intersectVersionIntervals' distributes over 'unionVersionIntervals' +-- +prop_intersect_union_distributive :: Property +prop_intersect_union_distributive = + Laws.distributive_left intersectVersionIntervals unionVersionIntervals + .&. Laws.distributive_right intersectVersionIntervals unionVersionIntervals + +-- | The semantics of 'invertVersionIntervals' is 'not'. +-- +prop_invertVersionIntervals :: VersionIntervals + -> Version -> Bool +prop_invertVersionIntervals vi v = + withinIntervals v (invertVersionIntervals vi) + == not (withinIntervals v vi) + +-- | Double application of 'invertVersionIntervals' is the identity function +prop_invertVersionIntervalsTwice :: VersionIntervals -> Bool +prop_invertVersionIntervalsTwice vi = + invertVersionIntervals (invertVersionIntervals vi) == vi + + + +-------------------------------- +-- equivalentVersionRange helper + +prop_equivalentVersionRange :: VersionRange -> VersionRange + -> Version -> Property +prop_equivalentVersionRange range range' version = + equivalentVersionRange range range' && range /= range' ==> + withinRange version range == withinRange version range' + +--FIXME: this is wrong. consider version ranges "<=1" and "<1.0" +-- this algorithm cannot distinguish them because there is no version +-- that is included by one that is excluded by the other. +-- Alternatively we must reconsider the semantics of '<' and '<=' +-- in version ranges / version intervals. Perhaps the canonical +-- representation should use just < v and interpret "<= v" as "< v.0". +equivalentVersionRange :: VersionRange -> VersionRange -> Bool +equivalentVersionRange vr1 vr2 = + let allVersionsUsed = nub (sort (versionsUsed vr1 ++ versionsUsed vr2)) + minPoint = Version [0] [] + maxPoint | null allVersionsUsed = minPoint + | otherwise = case maximum allVersionsUsed of + Version vs _ -> Version (vs ++ [1]) [] + probeVersions = minPoint : maxPoint + : intermediateVersions allVersionsUsed + + in all (\v -> withinRange v vr1 == withinRange v vr2) probeVersions + + where + versionsUsed = foldVersionRange [] (\x->[x]) (\x->[x]) (\x->[x]) (++) (++) + intermediateVersions (v1:v2:vs) = v1 : intermediateVersion v1 v2 + : intermediateVersions (v2:vs) + intermediateVersions vs = vs + +intermediateVersion :: Version -> Version -> Version +intermediateVersion v1 v2 | v1 >= v2 = error "intermediateVersion: v1 >= v2" +intermediateVersion (Version v1 _) (Version v2 _) = + Version (intermediateList v1 v2) [] + where + intermediateList :: [Int] -> [Int] -> [Int] + intermediateList [] (_:_) = [0] + intermediateList (x:xs) (y:ys) + | x < y = x : xs ++ [0] + | otherwise = x : intermediateList xs ys + +prop_intermediateVersion :: Version -> Version -> Property +prop_intermediateVersion v1 v2 = + (v1 /= v2) && not (adjacentVersions v1 v2) ==> + if v1 < v2 + then let v = intermediateVersion v1 v2 + in (v1 < v && v < v2) + else let v = intermediateVersion v2 v1 + in v1 > v && v > v2 + +adjacentVersions :: Version -> Version -> Bool +adjacentVersions (Version v1 _) (Version v2 _) = v1 ++ [0] == v2 + || v2 ++ [0] == v1 + +-------------------------------- +-- Parsing and pretty printing +-- + +prop_parse_disp1 :: VersionRange -> Bool +prop_parse_disp1 vr = + fmap stripParens (simpleParse (display vr)) == Just (canonicalise vr) + + where + canonicalise = swizzle . swap + + swizzle (UnionVersionRanges (UnionVersionRanges v1 v2) v3) + | not (isOrLaterVersion v1 v2) && not (isOrEarlierVersion v1 v2) + = swizzle (UnionVersionRanges v1 (UnionVersionRanges v2 v3)) + + swizzle (IntersectVersionRanges (IntersectVersionRanges v1 v2) v3) + = swizzle (IntersectVersionRanges v1 (IntersectVersionRanges v2 v3)) + + swizzle (UnionVersionRanges v1 v2) = + UnionVersionRanges (swizzle v1) (swizzle v2) + swizzle (IntersectVersionRanges v1 v2) = + IntersectVersionRanges (swizzle v1) (swizzle v2) + swizzle (VersionRangeParens v) = swizzle v + swizzle v = v + + isOrLaterVersion (ThisVersion v) (LaterVersion v') = v == v' + isOrLaterVersion _ _ = False + + isOrEarlierVersion (ThisVersion v) (EarlierVersion v') = v == v' + isOrEarlierVersion _ _ = False + + swap = + foldVersionRange' anyVersion thisVersion + laterVersion earlierVersion + orLaterVersion orEarlierVersion + (\v _ -> withinVersion v) + unionVersionRanges intersectVersionRanges id + + stripParens :: VersionRange -> VersionRange + stripParens (VersionRangeParens v) = stripParens v + stripParens (UnionVersionRanges v1 v2) = + UnionVersionRanges (stripParens v1) (stripParens v2) + stripParens (IntersectVersionRanges v1 v2) = + IntersectVersionRanges (stripParens v1) (stripParens v2) + stripParens v = v + +prop_parse_disp2 :: VersionRange -> Property +prop_parse_disp2 vr = + let b = fmap (display :: VersionRange -> String) (simpleParse (display vr)) + a = Just (display vr) + in + counterexample ("Expected: " ++ show a) $ + counterexample ("But got: " ++ show b) $ + b == a + +prop_parse_disp3 :: VersionRange -> Property +prop_parse_disp3 vr = + let a = Just (display vr) + b = fmap displayRaw (simpleParse (display vr)) + in + counterexample ("Expected: " ++ show a) $ + counterexample ("But got: " ++ show b) $ + b == a + +prop_parse_disp4 :: VersionRange -> Property +prop_parse_disp4 vr = + let a = Just vr + b = (simpleParse (display vr)) + in + counterexample ("Expected: " ++ show a) $ + counterexample ("But got: " ++ show b) $ + b == a + +prop_parse_disp5 :: VersionRange -> Property +prop_parse_disp5 vr = + let a = Just vr + b = simpleParse (displayRaw vr) + in + counterexample ("Expected: " ++ show a) $ + counterexample ("But got: " ++ show b) $ + b == a + +displayRaw :: VersionRange -> String +displayRaw = + Disp.render + . foldVersionRange' -- precedence: + -- All the same as the usual pretty printer, except for the parens + ( Disp.text "-any") + (\v -> Disp.text "==" <> disp v) + (\v -> Disp.char '>' <> disp v) + (\v -> Disp.char '<' <> disp v) + (\v -> Disp.text ">=" <> disp v) + (\v -> Disp.text "<=" <> disp v) + (\v _ -> Disp.text "==" <> dispWild v) + (\r1 r2 -> r1 <+> Disp.text "||" <+> r2) + (\r1 r2 -> r1 <+> Disp.text "&&" <+> r2) + (\r -> Disp.parens r) -- parens + + where + dispWild (Version b _) = + Disp.hcat (Disp.punctuate (Disp.char '.') (map Disp.int b)) + <> Disp.text ".*" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/UnitTests.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/UnitTests.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/Cabal-1.24.2.0/tests/UnitTests.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/Cabal-1.24.2.0/tests/UnitTests.hs 2016-12-23 10:35:22.000000000 +0000 @@ -0,0 +1,34 @@ +module Main + ( main + ) where + +import Test.Tasty + +import qualified UnitTests.Distribution.Compat.CreatePipe +import qualified UnitTests.Distribution.Compat.ReadP +import qualified UnitTests.Distribution.Simple.Program.Internal +import qualified UnitTests.Distribution.Simple.Utils +import qualified UnitTests.Distribution.System +import qualified UnitTests.Distribution.Utils.NubList +import qualified UnitTests.Distribution.Version (versionTests) + +tests :: TestTree +tests = testGroup "Unit Tests" $ + [ testGroup "Distribution.Compat.CreatePipe" + UnitTests.Distribution.Compat.CreatePipe.tests + , testGroup "Distribution.Compat.ReadP" + UnitTests.Distribution.Compat.ReadP.tests + , testGroup "Distribution.Simple.Program.Internal" + UnitTests.Distribution.Simple.Program.Internal.tests + , testGroup "Distribution.Simple.Utils" + UnitTests.Distribution.Simple.Utils.tests + , testGroup "Distribution.Utils.NubList" + UnitTests.Distribution.Utils.NubList.tests + , testGroup "Distribution.System" + UnitTests.Distribution.System.tests + , testGroup "Distribution.Version" + UnitTests.Distribution.Version.versionTests + ] + +main :: IO () +main = defaultMain tests diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/bash-completion/cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/bash-completion/cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/bash-completion/cabal 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/bash-completion/cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,93 +0,0 @@ -# cabal command line completion -# Copyright 2007-2008 "Lennart Kolmodin" -# "Duncan Coutts" -# - -# List cabal targets by type, pass: -# - test-suite for test suites -# - benchmark for benchmarks -# - executable for executables -# - executable|test-suite|benchmark for the three -_cabal_list() -{ - cat *.cabal | - grep -Ei "^[[:space:]]*($1)[[:space:]]" | - sed -e "s/.* \([^ ]*\).*/\1/" -} - -# List possible targets depending on the command supplied as parameter. The -# ideal option would be to implement this via --list-options on cabal directly. -# This is a temporary workaround. -_cabal_targets() -{ - # If command ($*) contains build, repl, test or bench completes with - # targets of according type. - [ -f *.cabal ] || return 0 - local comp - for comp in $*; do - [ $comp == build ] && _cabal_list "executable|test-suite|benchmark" && break - [ $comp == repl ] && _cabal_list "executable|test-suite|benchmark" && break - [ $comp == run ] && _cabal_list "executable" && break - [ $comp == test ] && _cabal_list "test-suite" && break - [ $comp == bench ] && _cabal_list "benchmark" && break - done -} - -# List possible subcommands of a cabal subcommand. -# -# In example "sandbox" is a cabal subcommand that itself has subcommands. Since -# "cabal --list-options" doesn't work in such cases we have to get the list -# using other means. -_cabal_subcommands() -{ - local word - for word in "$@"; do - case "$word" in - sandbox) - # Get list of "cabal sandbox" subcommands from its help message. - "$1" help sandbox | - sed -n '1,/^Subcommands:$/d;/^Flags for sandbox:$/,$d;/^ /d;s/^\(.*\):/\1/p' - break # Terminate for loop. - ;; - esac - done -} - -__cabal_has_doubledash () -{ - local c=1 - # Ignore the last word, because it is replaced anyways. - # This allows expansion for flags on "cabal foo --", - # but does not try to complete after "cabal foo -- ". - local n=$((${#COMP_WORDS[@]} - 1)) - while [ $c -lt $n ]; do - if [ "--" = "${COMP_WORDS[c]}" ]; then - return 0 - fi - ((c++)) - done - return 1 -} - -_cabal() -{ - # no completion past cabal arguments. - __cabal_has_doubledash && return - - # get the word currently being completed - local cur - cur=${COMP_WORDS[$COMP_CWORD]} - - # create a command line to run - local cmd - # copy all words the user has entered - cmd=( ${COMP_WORDS[@]} ) - - # replace the current word with --list-options - cmd[${COMP_CWORD}]="--list-options" - - # the resulting completions should be put into this array - COMPREPLY=( $( compgen -W "$( ${cmd[@]} ) $( _cabal_targets ${cmd[@]} ) $( _cabal_subcommands ${COMP_WORDS[@]} )" -- $cur ) ) -} - -complete -F _cabal -o default cabal diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/bootstrap.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/bootstrap.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/bootstrap.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/bootstrap.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,564 +0,0 @@ -#!/bin/sh - -# A script to bootstrap cabal-install. - -# It works by downloading and installing the Cabal, zlib and -# HTTP packages. It then installs cabal-install itself. -# It expects to be run inside the cabal-install directory. - -# Install settings, you can override these by setting environment vars. E.g. if -# you don't want profiling and dynamic versions of libraries to be installed in -# addition to vanilla, run 'EXTRA_CONFIGURE_OPTS="" ./bootstrap.sh' - -#VERBOSE -DEFAULT_CONFIGURE_OPTS="--enable-library-profiling --enable-shared" -EXTRA_CONFIGURE_OPTS=${EXTRA_CONFIGURE_OPTS-$DEFAULT_CONFIGURE_OPTS} -#EXTRA_BUILD_OPTS -#EXTRA_INSTALL_OPTS - -die () { printf "\nError during cabal-install bootstrap:\n$1\n" >&2 && exit 2 ;} - -# programs, you can override these by setting environment vars -GHC="${GHC:-ghc}" -GHC_PKG="${GHC_PKG:-ghc-pkg}" -GHC_VER="$(${GHC} --numeric-version)" -HADDOCK=${HADDOCK:-haddock} -WGET="${WGET:-wget}" -CURL="${CURL:-curl}" -FETCH="${FETCH:-fetch}" -TAR="${TAR:-tar}" -GZIP_PROGRAM="${GZIP_PROGRAM:-gzip}" - -# The variable SCOPE_OF_INSTALLATION can be set on the command line to -# use/install the libaries needed to build cabal-install to a custom package -# database instead of the user or global package database. -# -# Example: -# -# $ ghc-pkg init /my/package/database -# $ SCOPE_OF_INSTALLATION='--package-db=/my/package/database' ./bootstrap.sh -# -# You can also combine SCOPE_OF_INSTALLATION with PREFIX: -# -# $ ghc-pkg init /my/prefix/packages.conf.d -# $ SCOPE_OF_INSTALLATION='--package-db=/my/prefix/packages.conf.d' \ -# PREFIX=/my/prefix ./bootstrap.sh -# -# If you use the --global,--user or --sandbox arguments, this will -# override the SCOPE_OF_INSTALLATION setting and not use the package -# database you pass in the SCOPE_OF_INSTALLATION variable. - -SCOPE_OF_INSTALLATION="${SCOPE_OF_INSTALLATION:---user}" -DEFAULT_PREFIX="${HOME}/.cabal" - -# Try to respect $TMPDIR. -[ -"$TMPDIR"- = -""- ] && - export TMPDIR=/tmp/cabal-$(echo $(od -XN4 -An /dev/random)) && mkdir $TMPDIR - -# Check for a C compiler, using user-set $CC, if any, first. -for c in $CC gcc clang cc icc; do - $c --version 2>&1 >/dev/null && CC=$c && - echo "Using $c for C compiler. If this is not what you want, set CC." >&2 && - break -done - -# None found. -[ -"$CC"- = -""- ] && die 'C compiler not found (or could not be run). - If a C compiler is installed make sure it is on your PATH, or set $CC.' - -# Find the correct linker/linker-wrapper. -LINK="$(for link in collect2 ld; do - [ $($CC -print-prog-name=$link) = $link ] && continue || - $CC -print-prog-name=$link - done)" - -# Fall back to "ld"... might work. -[ -$LINK- = -""- ] && LINK=ld - -# And finally, see if we can compile and link something. - echo 'int main(){}' | $CC -xc - -o /dev/null || - die "C compiler and linker could not compile a simple test program. - Please check your toolchain." - -# Warn that were's overriding $LD if set (if you want). -[ -"$LD"- != -""- ] && [ -"$LD"- != -"$LINK"- ] && - echo "Warning: value set in $LD is not the same as C compiler's $LINK." >&2 - echo "Using $LINK instead." >&2 - -# Set LD, overriding environment if necessary. -export LD=$LINK - -# Check we're in the right directory, etc. -grep "cabal-install" ./cabal-install.cabal > /dev/null 2>&1 || - die "The bootstrap.sh script must be run in the cabal-install directory" - -${GHC} --numeric-version > /dev/null 2>&1 || - die "${GHC} not found (or could not be run). - If ghc is installed, make sure it is on your PATH, - or set the GHC and GHC_PKG vars." - -${GHC_PKG} --version > /dev/null 2>&1 || die "${GHC_PKG} not found." - -GHC_PKG_VER="$(${GHC_PKG} --version | cut -d' ' -f 5)" - -[ ${GHC_VER} = ${GHC_PKG_VER} ] || - die "Version mismatch between ${GHC} and ${GHC_PKG}. - If you set the GHC variable then set GHC_PKG too." - -JOBS="-j1" -while [ "$#" -gt 0 ]; do - case "${1}" in - "--user") - SCOPE_OF_INSTALLATION="${1}" - shift;; - "--global") - SCOPE_OF_INSTALLATION="${1}" - DEFAULT_PREFIX="/usr/local" - shift;; - "--sandbox") - shift - # check if there is another argument which doesn't start with -- - if [ "$#" -le 0 ] || [ ! -z $(echo "${1}" | grep "^--") ] - then - SANDBOX=".cabal-sandbox" - else - SANDBOX="${1}" - shift - fi;; - "--no-doc") - NO_DOCUMENTATION=1 - shift;; - "-j"|"--jobs") - shift - # check if there is another argument which doesn't start with - or -- - if [ "$#" -le 0 ] \ - || [ ! -z $(echo "${1}" | grep "^-") ] \ - || [ ! -z $(echo "${1}" | grep "^--") ] - then - JOBS="-j" - else - JOBS="-j${1}" - shift - fi;; - *) - echo "Unknown argument or option, quitting: ${1}" - echo "usage: bootstrap.sh [OPTION]" - echo - echo "options:" - echo " -j/--jobs Number of concurrent workers to use (Default: 1)" - echo " -j without an argument will use all available cores" - echo " --user Install for the local user (default)" - echo " --global Install systemwide (must be run as root)" - echo " --no-doc Do not generate documentation for installed"\ - "packages" - echo " --sandbox Install to a sandbox in the default location"\ - "(.cabal-sandbox)" - echo " --sandbox path Install to a sandbox located at path" - exit;; - esac -done - -# Do not try to use -j with GHC older than 7.8 -case $GHC_VER in - 7.4*|7.6*) - JOBS="" - ;; - *) - ;; -esac - -abspath () { case "$1" in /*)printf "%s\n" "$1";; *)printf "%s\n" "$PWD/$1";; - esac; } - -if [ ! -z "$SANDBOX" ] -then # set up variables for sandbox bootstrap - # Make the sandbox path absolute since it will be used from - # different working directories when the dependency packages are - # installed. - SANDBOX=$(abspath "$SANDBOX") - # Get the name of the package database which cabal sandbox would use. - GHC_ARCH=$(ghc --info | - sed -n 's/.*"Target platform".*"\([^-]\+\)-[^-]\+-\([^"]\+\)".*/\1-\2/p') - PACKAGEDB="$SANDBOX/${GHC_ARCH}-ghc-${GHC_VER}-packages.conf.d" - # Assume that if the directory is already there, it is already a - # package database. We will get an error immediately below if it - # isn't. Uses -r to try to be compatible with Solaris, and allow - # symlinks as well as a normal dir/file. - [ ! -r "$PACKAGEDB" ] && ghc-pkg init "$PACKAGEDB" - PREFIX="$SANDBOX" - SCOPE_OF_INSTALLATION="--package-db=$PACKAGEDB" - echo Bootstrapping in sandbox at \'$SANDBOX\'. -fi - -# Check for haddock unless no documentation should be generated. -if [ ! ${NO_DOCUMENTATION} ] -then - ${HADDOCK} --version > /dev/null 2>&1 || die "${HADDOCK} not found." -fi - -PREFIX=${PREFIX:-${DEFAULT_PREFIX}} - -# Versions of the packages to install. -# The version regex says what existing installed versions are ok. -PARSEC_VER="3.1.9"; PARSEC_VER_REGEXP="[3]\.[01]\." - # >= 3.0 && < 3.2 -DEEPSEQ_VER="1.4.2.0"; DEEPSEQ_VER_REGEXP="1\.[1-9]\." - # >= 1.1 && < 2 - -case "$GHC_VER" in - 7.4*|7.6*) - # GHC 7.4 or 7.6 - BINARY_VER="0.8.2.1" - BINARY_VER_REGEXP="[0]\.[78]\.[0-2]\." # >= 0.7 && < 0.8.3 - ;; - *) - # GHC >= 7.8 - BINARY_VER="0.8.3.0" - BINARY_VER_REGEXP="[0]\.[78]\." # >= 0.7 && < 0.9 - ;; -esac - - -TEXT_VER="1.2.2.1"; TEXT_VER_REGEXP="((1\.[012]\.)|(0\.([2-9]|(1[0-1]))\.))" - # >= 0.2 && < 1.3 -NETWORK_VER="2.6.2.1"; NETWORK_VER_REGEXP="2\.[0-6]\." - # >= 2.0 && < 2.7 -NETWORK_URI_VER="2.6.1.0"; NETWORK_URI_VER_REGEXP="2\.6\." - # >= 2.6 && < 2.7 -CABAL_VER="1.24.1.0"; CABAL_VER_REGEXP="1\.24\.[0-9]" - # >= 1.24 && < 1.25 -TRANS_VER="0.5.2.0"; TRANS_VER_REGEXP="0\.[45]\." - # >= 0.2.* && < 0.6 -MTL_VER="2.2.1"; MTL_VER_REGEXP="[2]\." - # >= 2.0 && < 3 -HTTP_VER="4000.3.3"; HTTP_VER_REGEXP="4000\.(2\.([5-9]|1[0-9]|2[0-9])|3\.?)" - # >= 4000.2.5 < 4000.4 -ZLIB_VER="0.6.1.1"; ZLIB_VER_REGEXP="(0\.5\.([3-9]|1[0-9])|0\.6)" - # >= 0.5.3 && <= 0.7 -TIME_VER="1.6" TIME_VER_REGEXP="1\.[1-6]\.?" - # >= 1.1 && < 1.7 -RANDOM_VER="1.1" RANDOM_VER_REGEXP="1\.[01]\.?" - # >= 1 && < 1.2 -STM_VER="2.4.4.1"; STM_VER_REGEXP="2\." - # == 2.* -ASYNC_VER="2.1.0"; ASYNC_VER_REGEXP="2\." - # 2.* -OLD_TIME_VER="1.1.0.3"; OLD_TIME_VER_REGEXP="1\.[01]\.?" - # >=1.0.0.0 && <1.2 -OLD_LOCALE_VER="1.0.0.7"; OLD_LOCALE_VER_REGEXP="1\.0\.?" - # >=1.0.0.0 && <1.1 -BASE16_BYTESTRING_VER="0.1.1.6"; BASE16_BYTESTRING_VER_REGEXP="0\.1" - # 0.1.* -BASE64_BYTESTRING_VER="1.0.0.1"; BASE64_BYTESTRING_REGEXP="1\." - # >=1.0 -CRYPTOHASH_SHA256_VER="0.11.7.1"; CRYPTOHASH_SHA256_VER_REGEXP="0\.11\.?" - # 0.11.* -ED25519_VER="0.0.5.0"; ED25519_VER_REGEXP="0\.0\.?" - # 0.0.* -HACKAGE_SECURITY_VER="0.5.2.2"; HACKAGE_SECURITY_VER_REGEXP="0\.5\.(2\.[2-9]|[3-9])" - # >= 0.5.2 && < 0.6 -BYTESTRING_BUILDER_VER="0.10.8.1.0"; BYTESTRING_BUILDER_VER_REGEXP="0\.10\.?" -TAR_VER="0.5.0.3"; TAR_VER_REGEXP="0\.5\.([1-9]|1[0-9]|0\.[3-9]|0\.1[0-9])\.?" - # >= 0.5.0.3 && < 0.6 -HASHABLE_VER="1.2.4.0"; HASHABLE_VER_REGEXP="1\." - # 1.* - -HACKAGE_URL="https://hackage.haskell.org/package" - -# Haddock fails for network-2.5.0.0, and for hackage-security for -# GHC <8, c.f. https://github.com/well-typed/hackage-security/issues/149 -NO_DOCS_PACKAGES_VER_REGEXP="network-uri-2\.5\.[0-9]+\.[0-9]+|hackage-security-0\.5\.[0-9]+\.[0-9]+" - -# Cache the list of packages: -echo "Checking installed packages for ghc-${GHC_VER}..." -${GHC_PKG} list --global ${SCOPE_OF_INSTALLATION} > ghc-pkg.list || - die "running '${GHC_PKG} list' failed" - -# Will we need to install this package, or is a suitable version installed? -need_pkg () { - PKG=$1 - VER_MATCH=$2 - if egrep " ${PKG}-${VER_MATCH}" ghc-pkg.list > /dev/null 2>&1 - then - return 1; - else - return 0; - fi - #Note: we cannot use "! grep" here as Solaris 9 /bin/sh doesn't like it. -} - -info_pkg () { - PKG=$1 - VER=$2 - VER_MATCH=$3 - - if need_pkg ${PKG} ${VER_MATCH} - then - if [ -r "${PKG}-${VER}.tar.gz" ] - then - echo "${PKG}-${VER} will be installed from local tarball." - else - echo "${PKG}-${VER} will be downloaded and installed." - fi - else - echo "${PKG} is already installed and the version is ok." - fi -} - -fetch_pkg () { - PKG=$1 - VER=$2 - - URL_PKG=${HACKAGE_URL}/${PKG}-${VER}/${PKG}-${VER}.tar.gz - URL_PKGDESC=${HACKAGE_URL}/${PKG}-${VER}/${PKG}.cabal - if which ${CURL} > /dev/null - then - # TODO: switch back to resuming curl command once - # https://github.com/haskell/hackage-server/issues/111 is resolved - #${CURL} -L --fail -C - -O ${URL_PKG} || die "Failed to download ${PKG}." - ${CURL} -L --fail -O ${URL_PKG} || die "Failed to download ${PKG}." - ${CURL} -L --fail -O ${URL_PKGDESC} \ - || die "Failed to download '${PKG}.cabal'." - elif which ${WGET} > /dev/null - then - ${WGET} -c ${URL_PKG} || die "Failed to download ${PKG}." - ${WGET} -c ${URL_PKGDESC} || die "Failed to download '${PKG}.cabal'." - elif which ${FETCH} > /dev/null - then - ${FETCH} ${URL_PKG} || die "Failed to download ${PKG}." - ${FETCH} ${URL_PKGDESC} || die "Failed to download '${PKG}.cabal'." - else - die "Failed to find a downloader. 'curl', 'wget' or 'fetch' is required." - fi - [ -f "${PKG}-${VER}.tar.gz" ] || - die "Downloading ${URL_PKG} did not create ${PKG}-${VER}.tar.gz" - [ -f "${PKG}.cabal" ] || - die "Downloading ${URL_PKGDESC} did not create ${PKG}.cabal" - mv "${PKG}.cabal" "${PKG}.cabal.hackage" -} - -unpack_pkg () { - PKG=$1 - VER=$2 - - rm -rf "${PKG}-${VER}.tar" "${PKG}-${VER}" - ${GZIP_PROGRAM} -d < "${PKG}-${VER}.tar.gz" | ${TAR} -xf - - [ -d "${PKG}-${VER}" ] || die "Failed to unpack ${PKG}-${VER}.tar.gz" - cp "${PKG}.cabal.hackage" "${PKG}-${VER}/${PKG}.cabal" -} - -install_pkg () { - PKG=$1 - VER=$2 - - [ -x Setup ] && ./Setup clean - [ -f Setup ] && rm Setup - - ${GHC} --make ${JOBS} Setup -o Setup || - die "Compiling the Setup script failed." - - [ -x Setup ] || die "The Setup script does not exist or cannot be run" - - args="${SCOPE_OF_INSTALLATION} --prefix=${PREFIX} --with-compiler=${GHC}" - args="$args --with-hc-pkg=${GHC_PKG} --with-gcc=${CC} --with-ld=${LD}" - args="$args ${EXTRA_CONFIGURE_OPTS} ${VERBOSE}" - - ./Setup configure $args || die "Configuring the ${PKG} package failed." - - ./Setup build ${JOBS} ${EXTRA_BUILD_OPTS} ${VERBOSE} || - die "Building the ${PKG} package failed." - - if [ ! ${NO_DOCUMENTATION} ] - then - if echo "${PKG}-${VER}" | egrep ${NO_DOCS_PACKAGES_VER_REGEXP} \ - > /dev/null 2>&1 - then - echo "Skipping documentation for the ${PKG} package." - else - ./Setup haddock --with-ghc=${GHC} --with-haddock=${HADDOCK} ${VERBOSE} || - die "Documenting the ${PKG} package failed." - fi - fi - - ./Setup install ${EXTRA_INSTALL_OPTS} ${VERBOSE} || - die "Installing the ${PKG} package failed." -} - -do_pkg () { - PKG=$1 - VER=$2 - VER_MATCH=$3 - - if need_pkg ${PKG} ${VER_MATCH} - then - echo - if [ -r "${PKG}-${VER}.tar.gz" ] - then - echo "Using local tarball for ${PKG}-${VER}." - else - echo "Downloading ${PKG}-${VER}..." - fetch_pkg ${PKG} ${VER} - fi - unpack_pkg ${PKG} ${VER} - cd "${PKG}-${VER}" - install_pkg ${PKG} ${VER} - cd .. - fi -} - -# If we're bootstrapping from a Git clone, install the local version of Cabal -# instead of downloading one from Hackage. -do_Cabal_pkg () { - if [ -d "../.git" ] - then - if need_pkg "Cabal" ${CABAL_VER_REGEXP} - then - echo "Cabal-${CABAL_VER} will be installed from the local Git clone." - cd ../Cabal - install_pkg ${CABAL_VER} ${CABAL_VER_REGEXP} - cd ../cabal-install - else - echo "Cabal-${CABAL_VER} is already installed and the version is ok." - fi - else - info_pkg "Cabal" ${CABAL_VER} ${CABAL_VER_REGEXP} - do_pkg "Cabal" ${CABAL_VER} ${CABAL_VER_REGEXP} - fi -} - -# Replicate the flag selection logic for network-uri in the .cabal file. -do_network_uri_pkg () { - # Refresh installed package list. - ${GHC_PKG} list --global ${SCOPE_OF_INSTALLATION} > ghc-pkg-stage2.list \ - || die "running '${GHC_PKG} list' failed" - - NETWORK_URI_DUMMY_VER="2.5.0.0"; NETWORK_URI_DUMMY_VER_REGEXP="2\.5\." # < 2.6 - if egrep " network-2\.[6-9]\." ghc-pkg-stage2.list > /dev/null 2>&1 - then - # Use network >= 2.6 && network-uri >= 2.6 - info_pkg "network-uri" ${NETWORK_URI_VER} ${NETWORK_URI_VER_REGEXP} - do_pkg "network-uri" ${NETWORK_URI_VER} ${NETWORK_URI_VER_REGEXP} - else - # Use network < 2.6 && network-uri < 2.6 - info_pkg "network-uri" ${NETWORK_URI_DUMMY_VER} \ - ${NETWORK_URI_DUMMY_VER_REGEXP} - do_pkg "network-uri" ${NETWORK_URI_DUMMY_VER} \ - ${NETWORK_URI_DUMMY_VER_REGEXP} - fi -} - -# Conditionally install bytestring-builder if bytestring is < 0.10.2. -do_bytestring_builder_pkg () { - if egrep "bytestring-0\.(9|10\.[0,1])\.?" ghc-pkg-stage2.list > /dev/null 2>&1 - then - info_pkg "bytestring-builder" ${BYTESTRING_BUILDER_VER} \ - ${BYTESTRING_BUILDER_VER_REGEXP} - do_pkg "bytestring-builder" ${BYTESTRING_BUILDER_VER} \ - ${BYTESTRING_BUILDER_VER_REGEXP} - fi -} - -# Actually do something! - -info_pkg "deepseq" ${DEEPSEQ_VER} ${DEEPSEQ_VER_REGEXP} -info_pkg "binary" ${BINARY_VER} ${BINARY_VER_REGEXP} -info_pkg "time" ${TIME_VER} ${TIME_VER_REGEXP} -info_pkg "transformers" ${TRANS_VER} ${TRANS_VER_REGEXP} -info_pkg "mtl" ${MTL_VER} ${MTL_VER_REGEXP} -info_pkg "text" ${TEXT_VER} ${TEXT_VER_REGEXP} -info_pkg "parsec" ${PARSEC_VER} ${PARSEC_VER_REGEXP} -info_pkg "network" ${NETWORK_VER} ${NETWORK_VER_REGEXP} -info_pkg "old-locale" ${OLD_LOCALE_VER} ${OLD_LOCALE_VER_REGEXP} -info_pkg "old-time" ${OLD_TIME_VER} ${OLD_TIME_VER_REGEXP} -info_pkg "HTTP" ${HTTP_VER} ${HTTP_VER_REGEXP} -info_pkg "zlib" ${ZLIB_VER} ${ZLIB_VER_REGEXP} -info_pkg "random" ${RANDOM_VER} ${RANDOM_VER_REGEXP} -info_pkg "stm" ${STM_VER} ${STM_VER_REGEXP} -info_pkg "async" ${ASYNC_VER} ${ASYNC_VER_REGEXP} -info_pkg "base16-bytestring" ${BASE16_BYTESTRING_VER} \ - ${BASE16_BYTESTRING_VER_REGEXP} -info_pkg "base64-bytestring" ${BASE64_BYTESTRING_VER} \ - ${BASE64_BYTESTRING_VER_REGEXP} -info_pkg "cryptohash-sha256" ${CRYPTOHASH_SHA256_VER} \ - ${CRYPTOHASH_SHA256_VER_REGEXP} -info_pkg "ed25519" ${ED25519_VER} ${ED25519_VER_REGEXP} -info_pkg "tar" ${TAR_VER} ${TAR_VER_REGEXP} -info_pkg "hashable" ${HASHABLE_VER} ${HASHABLE_VER_REGEXP} -info_pkg "hackage-security" ${HACKAGE_SECURITY_VER} \ - ${HACKAGE_SECURITY_VER_REGEXP} - -do_pkg "deepseq" ${DEEPSEQ_VER} ${DEEPSEQ_VER_REGEXP} -do_pkg "binary" ${BINARY_VER} ${BINARY_VER_REGEXP} -do_pkg "time" ${TIME_VER} ${TIME_VER_REGEXP} - -# Install the Cabal library from the local Git clone if possible. -do_Cabal_pkg - -do_pkg "transformers" ${TRANS_VER} ${TRANS_VER_REGEXP} -do_pkg "mtl" ${MTL_VER} ${MTL_VER_REGEXP} -do_pkg "text" ${TEXT_VER} ${TEXT_VER_REGEXP} -do_pkg "parsec" ${PARSEC_VER} ${PARSEC_VER_REGEXP} -do_pkg "network" ${NETWORK_VER} ${NETWORK_VER_REGEXP} - -# We conditionally install network-uri, depending on the network version. -do_network_uri_pkg - -do_pkg "old-locale" ${OLD_LOCALE_VER} ${OLD_LOCALE_VER_REGEXP} -do_pkg "old-time" ${OLD_TIME_VER} ${OLD_TIME_VER_REGEXP} -do_pkg "HTTP" ${HTTP_VER} ${HTTP_VER_REGEXP} -do_pkg "zlib" ${ZLIB_VER} ${ZLIB_VER_REGEXP} -do_pkg "random" ${RANDOM_VER} ${RANDOM_VER_REGEXP} -do_pkg "stm" ${STM_VER} ${STM_VER_REGEXP} -do_pkg "async" ${ASYNC_VER} ${ASYNC_VER_REGEXP} -do_pkg "base16-bytestring" ${BASE16_BYTESTRING_VER} \ - ${BASE16_BYTESTRING_VER_REGEXP} -do_pkg "base64-bytestring" ${BASE64_BYTESTRING_VER} \ - ${BASE64_BYTESTRING_VER_REGEXP} -do_pkg "cryptohash-sha256" ${CRYPTOHASH_SHA256_VER} \ - ${CRYPTOHASH_SHA256_VER_REGEXP} -do_pkg "ed25519" ${ED25519_VER} ${ED25519_VER_REGEXP} - -# We conditionally install bytestring-builder, depending on the bytestring -# version. -do_bytestring_builder_pkg - -do_pkg "tar" ${TAR_VER} ${TAR_VER_REGEXP} -do_pkg "hashable" ${HASHABLE_VER} ${HASHABLE_VER_REGEXP} -do_pkg "hackage-security" ${HACKAGE_SECURITY_VER} \ - ${HACKAGE_SECURITY_VER_REGEXP} - - -install_pkg "cabal-install" - -# Use the newly built cabal to turn the prefix/package database into a -# legit cabal sandbox. This works because 'cabal sandbox init' will -# reuse the already existing package database and other files if they -# are in the expected locations. -[ ! -z "$SANDBOX" ] && $SANDBOX/bin/cabal sandbox init --sandbox $SANDBOX - -echo -echo "===========================================" -CABAL_BIN="$PREFIX/bin" -if [ -x "$CABAL_BIN/cabal" ] -then - echo "The 'cabal' program has been installed in $CABAL_BIN/" - echo "You should either add $CABAL_BIN to your PATH" - echo "or copy the cabal program to a directory that is on your PATH." - echo - echo "The first thing to do is to get the latest list of packages with:" - echo " cabal update" - echo "This will also create a default config file (if it does not already" - echo "exist) at $HOME/.cabal/config" - echo - echo "By default cabal will install programs to $HOME/.cabal/bin" - echo "If you do not want to add this directory to your PATH then you can" - echo "change the setting in the config file, for example you could use:" - echo "symlink-bindir: $HOME/bin" -else - echo "Sorry, something went wrong." - echo "The 'cabal' executable was not successfully installed into" - echo "$CABAL_BIN/" -fi -echo - -rm ghc-pkg.list diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/cabal-install.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/cabal-install.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/cabal-install.cabal 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/cabal-install.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,418 +0,0 @@ -Name: cabal-install -Version: 1.24.0.1 -Synopsis: The command-line interface for Cabal and Hackage. -Description: - The \'cabal\' command-line program simplifies the process of managing - Haskell software by automating the fetching, configuration, compilation - and installation of Haskell libraries and programs. -homepage: http://www.haskell.org/cabal/ -bug-reports: https://github.com/haskell/cabal/issues -License: BSD3 -License-File: LICENSE -Author: Lemmih - Paolo Martini - Bjorn Bringert - Isaac Potoczny-Jones - Duncan Coutts -Maintainer: cabal-devel@haskell.org -Copyright: 2005 Lemmih - 2006 Paolo Martini - 2007 Bjorn Bringert - 2007 Isaac Potoczny-Jones - 2007-2012 Duncan Coutts -Category: Distribution -Build-type: Custom -Cabal-Version: >= 1.10 -Extra-Source-Files: - README.md bash-completion/cabal bootstrap.sh changelog - tests/README.md - - -- Generated with '../Cabal/misc/gen-extra-source-files.sh' - -- Do NOT edit this section manually; instead, run the script. - -- BEGIN gen-extra-source-files - tests/IntegrationTests/custom-setup/common.sh - tests/IntegrationTests/custom-setup/should_run/Cabal-99998/Cabal.cabal - tests/IntegrationTests/custom-setup/should_run/Cabal-99998/CabalMessage.hs - tests/IntegrationTests/custom-setup/should_run/Cabal-99999/Cabal.cabal - tests/IntegrationTests/custom-setup/should_run/Cabal-99999/CabalMessage.hs - tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal-defaultMain/Setup.hs - tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal-defaultMain/custom-setup-without-cabal-defaultMain.cabal - tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal/Setup.hs - tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal/custom-setup-without-cabal.cabal - tests/IntegrationTests/custom-setup/should_run/custom-setup/Setup.hs - tests/IntegrationTests/custom-setup/should_run/custom-setup/custom-setup.cabal - tests/IntegrationTests/custom-setup/should_run/custom_setup_without_Cabal_doesnt_allow_Cabal_import.sh - tests/IntegrationTests/custom-setup/should_run/custom_setup_without_Cabal_doesnt_require_Cabal.sh - tests/IntegrationTests/custom-setup/should_run/installs_Cabal_as_setup_dep.sh - tests/IntegrationTests/custom/common.sh - tests/IntegrationTests/custom/should_run/plain.err - tests/IntegrationTests/custom/should_run/plain.sh - tests/IntegrationTests/custom/should_run/plain/A.hs - tests/IntegrationTests/custom/should_run/plain/Setup.hs - tests/IntegrationTests/custom/should_run/plain/plain.cabal - tests/IntegrationTests/exec/common.sh - tests/IntegrationTests/exec/should_fail/exit_with_failure_without_args.err - tests/IntegrationTests/exec/should_fail/exit_with_failure_without_args.sh - tests/IntegrationTests/exec/should_run/Foo.hs - tests/IntegrationTests/exec/should_run/My.hs - tests/IntegrationTests/exec/should_run/adds_sandbox_bin_directory_to_path.out - tests/IntegrationTests/exec/should_run/adds_sandbox_bin_directory_to_path.sh - tests/IntegrationTests/exec/should_run/auto_configures_on_exec.out - tests/IntegrationTests/exec/should_run/auto_configures_on_exec.sh - tests/IntegrationTests/exec/should_run/can_run_executables_installed_in_sandbox.out - tests/IntegrationTests/exec/should_run/can_run_executables_installed_in_sandbox.sh - tests/IntegrationTests/exec/should_run/configures_cabal_to_use_sandbox.sh - tests/IntegrationTests/exec/should_run/configures_ghc_to_use_sandbox.sh - tests/IntegrationTests/exec/should_run/my.cabal - tests/IntegrationTests/exec/should_run/runs_given_command.out - tests/IntegrationTests/exec/should_run/runs_given_command.sh - tests/IntegrationTests/freeze/common.sh - tests/IntegrationTests/freeze/should_run/disable_benchmarks_freezes_bench_deps.sh - tests/IntegrationTests/freeze/should_run/disable_tests_freezes_test_deps.sh - tests/IntegrationTests/freeze/should_run/does_not_freeze_nondeps.sh - tests/IntegrationTests/freeze/should_run/does_not_freeze_self.sh - tests/IntegrationTests/freeze/should_run/dry_run_does_not_create_config.sh - tests/IntegrationTests/freeze/should_run/enable_benchmarks_freezes_bench_deps.sh - tests/IntegrationTests/freeze/should_run/enable_tests_freezes_test_deps.sh - tests/IntegrationTests/freeze/should_run/freezes_direct_dependencies.sh - tests/IntegrationTests/freeze/should_run/freezes_transitive_dependencies.sh - tests/IntegrationTests/freeze/should_run/my.cabal - tests/IntegrationTests/freeze/should_run/runs_without_error.sh - tests/IntegrationTests/manpage/common.sh - tests/IntegrationTests/manpage/should_run/outputs_manpage.sh - tests/IntegrationTests/multiple-source/common.sh - tests/IntegrationTests/multiple-source/should_run/finds_second_source_of_multiple_source.sh - tests/IntegrationTests/multiple-source/should_run/p/Setup.hs - tests/IntegrationTests/multiple-source/should_run/p/p.cabal - tests/IntegrationTests/multiple-source/should_run/q/Setup.hs - tests/IntegrationTests/multiple-source/should_run/q/q.cabal - tests/IntegrationTests/new-build/monitor_cabal_files.sh - tests/IntegrationTests/new-build/monitor_cabal_files/p/P.hs - tests/IntegrationTests/new-build/monitor_cabal_files/p/Setup.hs - tests/IntegrationTests/new-build/monitor_cabal_files/p/p.cabal - tests/IntegrationTests/new-build/monitor_cabal_files/q/Main.hs - tests/IntegrationTests/new-build/monitor_cabal_files/q/Setup.hs - tests/IntegrationTests/new-build/monitor_cabal_files/q/q-broken.cabal.in - tests/IntegrationTests/new-build/monitor_cabal_files/q/q-fixed.cabal.in - tests/IntegrationTests/regression/common.sh - tests/IntegrationTests/regression/t3199.sh - tests/IntegrationTests/regression/t3199/Main.hs - tests/IntegrationTests/regression/t3199/Setup.hs - tests/IntegrationTests/regression/t3199/test-3199.cabal - tests/IntegrationTests/sandbox-sources/common.sh - tests/IntegrationTests/sandbox-sources/should_fail/fail_removing_source_thats_not_registered.err - tests/IntegrationTests/sandbox-sources/should_fail/fail_removing_source_thats_not_registered.sh - tests/IntegrationTests/sandbox-sources/should_fail/p/Setup.hs - tests/IntegrationTests/sandbox-sources/should_fail/p/p.cabal - tests/IntegrationTests/sandbox-sources/should_fail/q/Setup.hs - tests/IntegrationTests/sandbox-sources/should_fail/q/q.cabal - tests/IntegrationTests/sandbox-sources/should_run/p/Setup.hs - tests/IntegrationTests/sandbox-sources/should_run/p/p.cabal - tests/IntegrationTests/sandbox-sources/should_run/q/Setup.hs - tests/IntegrationTests/sandbox-sources/should_run/q/q.cabal - tests/IntegrationTests/sandbox-sources/should_run/remove_nonexistent_source.sh - tests/IntegrationTests/sandbox-sources/should_run/report_success_removing_source.out - tests/IntegrationTests/sandbox-sources/should_run/report_success_removing_source.sh - tests/IntegrationTests/user-config/common.sh - tests/IntegrationTests/user-config/should_fail/doesnt_overwrite_without_f.err - tests/IntegrationTests/user-config/should_fail/doesnt_overwrite_without_f.sh - tests/IntegrationTests/user-config/should_run/overwrites_with_f.out - tests/IntegrationTests/user-config/should_run/overwrites_with_f.sh - tests/IntegrationTests/user-config/should_run/runs_without_error.out - tests/IntegrationTests/user-config/should_run/runs_without_error.sh - tests/IntegrationTests/user-config/should_run/uses_CABAL_CONFIG.out - tests/IntegrationTests/user-config/should_run/uses_CABAL_CONFIG.sh - -- END gen-extra-source-files - -source-repository head - type: git - location: https://github.com/haskell/cabal/ - subdir: cabal-install - -Flag old-bytestring - description: Use bytestring < 0.10.2 and bytestring-builder - default: False - -Flag old-directory - description: Use directory < 1.2 and old-time - default: False - -Flag network-uri - description: Get Network.URI from the network-uri package - default: True - -executable cabal - main-is: Main.hs - ghc-options: -Wall -fwarn-tabs - if impl(ghc >= 8.0) - ghc-options: -Wcompat - -Wnoncanonical-monad-instances - -Wnoncanonical-monadfail-instances - - other-modules: - Distribution.Client.BuildTarget - Distribution.Client.BuildReports.Anonymous - Distribution.Client.BuildReports.Storage - Distribution.Client.BuildReports.Types - Distribution.Client.BuildReports.Upload - Distribution.Client.Check - Distribution.Client.CmdBuild - Distribution.Client.CmdConfigure - Distribution.Client.CmdRepl - Distribution.Client.ComponentDeps - Distribution.Client.Config - Distribution.Client.Configure - Distribution.Client.Dependency - Distribution.Client.Dependency.TopDown - Distribution.Client.Dependency.TopDown.Constraints - Distribution.Client.Dependency.TopDown.Types - Distribution.Client.Dependency.Types - Distribution.Client.Dependency.Modular - Distribution.Client.Dependency.Modular.Assignment - Distribution.Client.Dependency.Modular.Builder - Distribution.Client.Dependency.Modular.Configured - Distribution.Client.Dependency.Modular.ConfiguredConversion - Distribution.Client.Dependency.Modular.ConflictSet - Distribution.Client.Dependency.Modular.Cycles - Distribution.Client.Dependency.Modular.Dependency - Distribution.Client.Dependency.Modular.Explore - Distribution.Client.Dependency.Modular.Flag - Distribution.Client.Dependency.Modular.Index - Distribution.Client.Dependency.Modular.IndexConversion - Distribution.Client.Dependency.Modular.Linking - Distribution.Client.Dependency.Modular.Log - Distribution.Client.Dependency.Modular.Message - Distribution.Client.Dependency.Modular.Package - Distribution.Client.Dependency.Modular.Preference - Distribution.Client.Dependency.Modular.PSQ - Distribution.Client.Dependency.Modular.Solver - Distribution.Client.Dependency.Modular.Tree - Distribution.Client.Dependency.Modular.Validate - Distribution.Client.Dependency.Modular.Var - Distribution.Client.Dependency.Modular.Version - Distribution.Client.DistDirLayout - Distribution.Client.Exec - Distribution.Client.Fetch - Distribution.Client.FetchUtils - Distribution.Client.FileMonitor - Distribution.Client.Freeze - Distribution.Client.GenBounds - Distribution.Client.Get - Distribution.Client.Glob - Distribution.Client.GlobalFlags - Distribution.Client.GZipUtils - Distribution.Client.Haddock - Distribution.Client.HttpUtils - Distribution.Client.IndexUtils - Distribution.Client.Init - Distribution.Client.Init.Heuristics - Distribution.Client.Init.Licenses - Distribution.Client.Init.Types - Distribution.Client.Install - Distribution.Client.InstallPlan - Distribution.Client.InstallSymlink - Distribution.Client.JobControl - Distribution.Client.List - Distribution.Client.Manpage - Distribution.Client.PackageHash - Distribution.Client.PackageIndex - Distribution.Client.PackageUtils - Distribution.Client.ParseUtils - Distribution.Client.PkgConfigDb - Distribution.Client.PlanIndex - Distribution.Client.ProjectBuilding - Distribution.Client.ProjectConfig - Distribution.Client.ProjectConfig.Types - Distribution.Client.ProjectConfig.Legacy - Distribution.Client.ProjectOrchestration - Distribution.Client.ProjectPlanning - Distribution.Client.ProjectPlanning.Types - Distribution.Client.ProjectPlanOutput - Distribution.Client.Run - Distribution.Client.RebuildMonad - Distribution.Client.Sandbox - Distribution.Client.Sandbox.Index - Distribution.Client.Sandbox.PackageEnvironment - Distribution.Client.Sandbox.Timestamp - Distribution.Client.Sandbox.Types - Distribution.Client.Security.HTTP - Distribution.Client.Setup - Distribution.Client.SetupWrapper - Distribution.Client.SrcDist - Distribution.Client.Tar - Distribution.Client.Targets - Distribution.Client.Types - Distribution.Client.Update - Distribution.Client.Upload - Distribution.Client.Utils - Distribution.Client.Utils.LabeledGraph - Distribution.Client.Utils.Json - Distribution.Client.World - Distribution.Client.Win32SelfUpgrade - Distribution.Client.Compat.ExecutablePath - Distribution.Client.Compat.FilePerms - Distribution.Client.Compat.Process - Distribution.Client.Compat.Semaphore - Distribution.Client.Compat.Time - Paths_cabal_install - - -- NOTE: when updating build-depends, don't forget to update version regexps - -- in bootstrap.sh. - build-depends: - async >= 2.0 && < 3, - array >= 0.4 && < 0.6, - base >= 4.5 && < 5, - base16-bytestring >= 0.1.1 && < 0.2, - binary >= 0.5 && < 0.9, - bytestring >= 0.9 && < 1, - Cabal >= 1.24.1 && < 1.25, - containers >= 0.4 && < 0.6, - cryptohash-sha256 >= 0.11 && < 0.12, - filepath >= 1.3 && < 1.5, - hashable >= 1.0 && < 2, - HTTP >= 4000.1.5 && < 4000.4, - mtl >= 2.0 && < 3, - pretty >= 1.1 && < 1.2, - random >= 1 && < 1.2, - stm >= 2.0 && < 3, - tar >= 0.5.0.3 && < 0.6, - time >= 1.4 && < 1.7, - zlib >= 0.5.3 && < 0.7, - hackage-security >= 0.5.2.2 && < 0.6 - - if flag(old-bytestring) - build-depends: bytestring < 0.10.2, bytestring-builder >= 0.10 && < 1 - else - build-depends: bytestring >= 0.10.2 - - if flag(old-directory) - build-depends: directory >= 1.1 && < 1.2, old-time >= 1 && < 1.2, - process >= 1.0.1.1 && < 1.1.0.2 - else - build-depends: directory >= 1.2 && < 1.3, - process >= 1.1.0.2 && < 1.5 - - -- NOTE: you MUST include the network dependency even when network-uri - -- is pulled in, otherwise the constraint solver doesn't have enough - -- information - if flag(network-uri) - build-depends: network-uri >= 2.6 && < 2.7, network >= 2.6 && < 2.7 - else - build-depends: network >= 2.4 && < 2.6 - - -- Needed for GHC.Generics before GHC 7.6 - if impl(ghc < 7.6) - build-depends: ghc-prim >= 0.2 && < 0.3 - - if os(windows) - build-depends: Win32 >= 2 && < 3 - else - build-depends: unix >= 2.5 && < 2.8 - - if arch(arm) && impl(ghc < 7.6) - -- older ghc on arm does not support -threaded - cc-options: -DCABAL_NO_THREADED - else - ghc-options: -threaded - - c-sources: cbits/getnumcores.c - default-language: Haskell2010 - --- Small, fast running tests. -Test-Suite unit-tests - type: exitcode-stdio-1.0 - main-is: UnitTests.hs - hs-source-dirs: tests, . - ghc-options: -Wall -fwarn-tabs - other-modules: - UnitTests.Distribution.Client.ArbitraryInstances - UnitTests.Distribution.Client.Targets - UnitTests.Distribution.Client.Compat.Time - UnitTests.Distribution.Client.Dependency.Modular.PSQ - UnitTests.Distribution.Client.Dependency.Modular.Solver - UnitTests.Distribution.Client.Dependency.Modular.DSL - UnitTests.Distribution.Client.FileMonitor - UnitTests.Distribution.Client.Glob - UnitTests.Distribution.Client.GZipUtils - UnitTests.Distribution.Client.Sandbox - UnitTests.Distribution.Client.Sandbox.Timestamp - UnitTests.Distribution.Client.Tar - UnitTests.Distribution.Client.UserConfig - UnitTests.Distribution.Client.ProjectConfig - UnitTests.Options - build-depends: - base, - array, - bytestring, - Cabal, - containers, - mtl, - pretty, - process, - directory, - filepath, - hashable, - stm, - tar, - time, - HTTP, - zlib, - binary, - random, - hackage-security, - tasty, - tasty-hunit, - tasty-quickcheck, - tagged, - QuickCheck >= 2.8.2 - - if flag(old-directory) - build-depends: old-time - - if flag(network-uri) - build-depends: network-uri >= 2.6, network >= 2.6 - else - build-depends: network-uri < 2.6, network < 2.6 - - if impl(ghc < 7.6) - build-depends: ghc-prim >= 0.2 && < 0.3 - - if os(windows) - build-depends: Win32 - else - build-depends: unix - - if arch(arm) - cc-options: -DCABAL_NO_THREADED - else - ghc-options: -threaded - default-language: Haskell2010 - -test-suite integration-tests - type: exitcode-stdio-1.0 - hs-source-dirs: tests - main-is: IntegrationTests.hs - build-depends: - Cabal, - async, - base, - bytestring, - directory, - filepath, - process, - regex-posix, - tasty, - tasty-hunit - - if os(windows) - build-depends: Win32 >= 2 && < 3 - else - build-depends: unix >= 2.5 && < 2.8 - - if arch(arm) - cc-options: -DCABAL_NO_THREADED - else - ghc-options: -threaded - - ghc-options: -Wall -fwarn-tabs -fno-ignore-asserts - default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/cbits/getnumcores.c cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/cbits/getnumcores.c --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/cbits/getnumcores.c 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/cbits/getnumcores.c 1970-01-01 00:00:00.000000000 +0000 @@ -1,46 +0,0 @@ -#if defined(__GLASGOW_HASKELL__) && (__GLASGOW_HASKELL__ >= 612) && !defined(CABAL_NO_THREADED) -/* Since version 6.12, GHC's threaded RTS includes a getNumberOfProcessors - function, so we try to use that if available. cabal-install is always built - with -threaded nowadays. */ -#define HAS_GET_NUMBER_OF_PROCESSORS -#endif - - -#ifndef HAS_GET_NUMBER_OF_PROCESSORS - -#if defined(_WIN32) && !defined(__CYGWIN__) -#include -#elif MACOS -#include -#include -#elif __linux__ -#include -#endif - -int getNumberOfProcessors() { -#if defined(_WIN32) && !defined(__CYGWIN__) - SYSTEM_INFO sysinfo; - GetSystemInfo(&sysinfo); - return sysinfo.dwNumberOfProcessors; -#elif MACOS - int nm[2]; - size_t len = 4; - uint32_t count; - - nm[0] = CTL_HW; nm[1] = HW_AVAILCPU; - sysctl(nm, 2, &count, &len, NULL, 0); - - if(count < 1) { - nm[1] = HW_NCPU; - sysctl(nm, 2, &count, &len, NULL, 0); - if(count < 1) { count = 1; } - } - return count; -#elif __linux__ - return sysconf(_SC_NPROCESSORS_ONLN); -#else - return 1; -#endif -} - -#endif /* HAS_GET_NUMBER_OF_PROCESSORS */ diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/changelog cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/changelog --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/changelog 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/changelog 1970-01-01 00:00:00.000000000 +0000 @@ -1,254 +0,0 @@ --*-change-log-*- -1.24.0.1 Ryan Thomas October 2016 - * Fixed issue with passing '--enable-profiling' when invoking - Setup scripts built with older versions of Cabal (#3873). - * Fixed various behaviour differences between network transports - (#3429). - * Updated to depend on the latest hackage-security that fixes - various issues on Windows. - * Fixed 'new-build' to exit with a non-zero exit code on failure - (#3506). - * Store secure repo index data as 01-index.* (#3862). - * Added new hackage-security root keys for distribution with - cabal-install. - * Fix an issue where 'cabal install' sometimes had to be run twice - for packages with build-type: Custom and a custom-setup stanza - (#3723). - * 'cabal sdist' no longer ignores '--builddir' when the package's - build-type is Custom (#3794). - -1.24.0.0 Ryan Thomas May 2016 - * If there are multiple remote repos, 'cabal update' now updates - them in parallel (#2503). - * New 'cabal upload' option '-P'/'--password-command' for reading - Hackage password from arbitrary program output (#2506). - * Better warning for 'cabal run' (#2510). - * 'cabal init' now warns if the chosen package name is already - registered in the source package index (#2436). - * New 'cabal install' option: '--offline' (#2578). - * Accept 'builddir' field in cabal.config (#2484) - * Read 'builddir' option from 'CABAL_BUILDDIR' environment variable. - * Remote repos may now be configured to use https URLs. This uses - either curl or wget or, on Windows, PowerShell, under the hood (#2687). - * Install target URLs can now use https e.g. 'cabal install - https://example.com/foo-1.0.tar.gz'. - * Automatically use https for cabal upload for the main - hackage.haskell.org (other repos will use whatever they are - configured to use). - * Support for dependencies of custom Setup.hs scripts - (see http://www.well-typed.com/blog/2015/07/cabal-setup-deps/). - * 'cabal' program itself now can be used as an external setup - method. This fixes an issue when Cabal version mismatch caused - unnecessary reconfigures (#2633). - * Improved error message for unsatisfiable package constraints - (#2727). - * Fixed a space leak in 'cabal update' (#2826). - * 'cabal exec' and 'sandbox hc-pkg' now use the configured - compiler (#2859). - * New 'cabal haddock' option: '--for-hackage' (#2852). - * Added a warning when the solver cannot find a dependency (#2853). - * New 'cabal upload' option: '--doc': upload documentation to - hackage (#2890). - * Improved error handling for 'sandbox delete-source' (#2943). - * Solver support for extension and language flavours (#2873). - * Support for secure repos using hackage-security (#2983). - * Added a log file message similar to one printed by 'make' when - building in another directory (#2642). - * Added new subcommand 'init' to 'cabal user-config'. This - subcommand creates a cabal configuration file in either the - default location or as specified by --config-file (#2553). - * The man page for 'cabal-install' is now automatically generated - (#2877). - * The '--allow-newer' option now works as expected when specified - multiple times (#2588). - * New config file field: 'extra-framework-dirs' (extra locations - to find OS X frameworks in). Can be also specified as an argument - for 'install' and 'configure' commands (#3158). - * It's now possible to limit the scope of '--allow-newer' to - single packages in the install plan (#2756). - * Full '--allow-newer' syntax is now supported in the config file - (that is, 'allow-newer: base, ghc-prim, some-package:vector') - (#3171). - * Improved performance of '--reorder-goals' (#3208). - * Fixed space leaks in modular solver (#2916, #2914). - * Made the solver aware of pkg-config constraints (#3023). - * Added a new command: 'gen-bounds' (#3223). See - http://softwaresimply.blogspot.se/2015/08/cabal-gen-bounds-easy-generation-of.html. - * Tech preview of new nix-style isolated project-based builds. - Currently provides the commands (new-)build/repl/configure. - -1.22.0.0 Johan Tibell January 2015 - * New command: user-config (#2159). - * Implement 'cabal repl --only' (#2016). - * Fix an issue when 'cabal repl' was doing unnecessary compilation - (#1715). - * Prompt the user to specify source directory in 'cabal init' - (#1989). - * Remove the self-upgrade check (#2090). - * Don't redownload already downloaded packages when bootstrapping - (#2133). - * Support sandboxes in 'bootstrap.sh' (#2137). - * Install profiling and shared libs by default in 'bootstrap.sh' - (#2009). - -1.20.0.3 Johan Tibell June 2014 - * Don't attempt to rename dist if it is already named correctly - * Treat all flags of a package as interdependent. - * Allow template-haskell to be upgradable again - -1.20.0.2 Johan Tibell May 2014 - * Increase max-backjumps to 2000. - * Fix solver bug which led to missed install plans. - * Fix streaming test output. - * Tweak solver heuristics to avoid reinstalls. - -1.20.0.1 Johan Tibell May 2014 - * Fix cabal repl search path bug on Windows - * Include OS and arch in cabal-install user agent - * Revert --constraint flag behavior in configure to 1.18 behavior - -1.20.0.0 Johan Tibell April 2014 - * Build only selected executables - * Add -j flag to build/test/bench/run - * Improve install log file - * Don't symlink executables when in a sandbox - * Add --package-db flag to 'list' and 'info' - * Make upload more efficient - * Add --require-sandbox option - * Add experimental Cabal file format command - * Add haddock section to config file - * Add --main-is flag to init - -0.14.0 Andres Loeh April 2012 - * Works with ghc-7.4 - * Completely new modular dependency solver (default in most cases) - * Some tweaks to old topdown dependency solver - * Install plans are now checked for reinstalls that break packages - * Flags --constraint and --preference work for nonexisting packages - * New constraint forms for source and installed packages - * New constraint form for package-specific use flags - * New constraint form for package-specific stanza flags - * Test suite dependencies are pulled in on demand - * No longer install packages on --enable-tests when tests fail - * New "cabal bench" command - * Various "cabal init" tweaks - -0.10.0 Duncan Coutts February 2011 - * New package targets: local dirs, local and remote tarballs - * Initial support for a "world" package target - * Partial fix for situation where user packages mask global ones - * Removed cabal upgrade, new --upgrade-dependencies flag - * New cabal install --only-dependencies flag - * New cabal fetch --no-dependencies and --dry-run flags - * Improved output for cabal info - * Simpler and faster bash command line completion - * Fix for broken proxies that decompress wrongly - * Fix for cabal unpack to preserve executable permissions - * Adjusted the output for the -v verbosity level in a few places - -0.8.2 Duncan Coutts March 2010 - * Fix for cabal update on Windows - * On windows switch to per-user installs (rather than global) - * Handle intra-package dependencies in dependency planning - * Minor tweaks to cabal init feature - * Fix various -Wall warnings - * Fix for cabal sdist --snapshot - -0.8.0 Duncan Coutts Dec 2009 - * Works with ghc-6.12 - * New "cabal init" command for making initial project .cabal file - * New feature to maintain an index of haddock documentation - -0.6.4 Duncan Coutts Nov 2009 - * Improve the algorithm for selecting the base package version - * Hackage errors now reported by "cabal upload [--check]" - * Improved format of messages from "cabal check" - * Config file can now be selected by an env var - * Updated tar reading/writing code - * Improve instructions in the README and bootstrap output - * Fix bootstrap.sh on Solaris 9 - * Fix bootstrap for systems where network uses parsec 3 - * Fix building with ghc-6.6 - -0.6.2 Duncan Coutts Feb 2009 - * The upgrade command has been disabled in this release - * The configure and install commands now have consistent behaviour - * Reduce the tendancy to re-install already existing packages - * The --constraint= flag now works for the install command - * New --preference= flag for soft constraints / version preferences - * Improved bootstrap.sh script, smarter and better error checking - * New cabal info command to display detailed info on packages - * New cabal unpack command to download and untar a package - * HTTP-4000 package required, should fix bugs with http proxies - * Now works with authenticated proxies. - * On Windows can now override the proxy setting using an env var - * Fix compatability with config files generated by older versions - * Warn if the hackage package list is very old - * More helpful --help output, mention config file and examples - * Better documentation in ~/.cabal/config file - * Improved command line interface for logging and build reporting - * Minor improvements to some messages - -0.6.0 Duncan Coutts Oct 2008 - * Constraint solver can now cope with base 3 and base 4 - * Allow use of package version preferences from hackage index - * More detailed output from cabal install --dry-run -v - * Improved bootstrap.sh - -0.5.2 Duncan Coutts Aug 2008 - * Suport building haddock documentaion - * Self-reinstall now works on Windows - * Allow adding symlinks to excutables into a separate bindir - * New self-documenting config file - * New install --reinstall flag - * More helpful status messages in a couple places - * Upload failures now report full text error message from the server - * Support for local package repositories - * New build logging and reporting - * New command to upload build reports to (a compatible) server - * Allow tilde in hackage server URIs - * Internal code improvements - * Many other minor improvements and bug fixes - -0.5.1 Duncan Coutts June 2008 - * Restore minimal hugs support in dependency resolver - * Fix for disabled http proxies on Windows - * Revert to global installs on Windows by default - -0.5.0 Duncan Coutts June 2008 - * New package dependency resolver, solving diamond dep problem - * Integrate cabal-setup functionality - * Integrate cabal-upload functionality - * New cabal update and check commands - * Improved behavior for install and upgrade commands - * Full Windows support - * New command line handling - * Bash command line completion - * Allow case insensitive package names on command line - * New --dry-run flag for install, upgrade and fetch commands - * New --root-cmd flag to allow installing as root - * New --cabal-lib-version flag to select different Cabal lib versions - * Support for HTTP proxies - * Improved cabal list output - * Build other non-dependent packages even when some fail - * Report a summary of all build failures at the end - * Partial support for hugs - * Partial implementation of build reporting and logging - * More consistent logging and verbosity - * Significant internal code restructuring - -0.4 Duncan Coutts Oct 2007 - * Renamed executable from 'cabal-install' to 'cabal' - * Partial Windows compatability - * Do per-user installs by default - * cabal install now installs the package in the current directory - * Allow multiple remote servers - * Use zlib lib and internal tar code and rather than external tar - * Reorganised configuration files - * Significant code restructuring - * Cope with packages with conditional dependencies - -0.3 and older versions by Lemmih, Paolo Martini and others 2006-2007 - * Switch from smart-server, dumb-client model to the reverse - * New .tar.gz based index format - * New remote and local package archive format diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/BuildReports/Anonymous.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/BuildReports/Anonymous.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/BuildReports/Anonymous.hs 2016-11-07 10:02:40.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/BuildReports/Anonymous.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,315 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Reporting --- Copyright : (c) David Waern 2008 --- License : BSD-like --- --- Maintainer : david.waern@gmail.com --- Stability : experimental --- Portability : portable --- --- Anonymous build report data structure, printing and parsing --- ------------------------------------------------------------------------------ -module Distribution.Client.BuildReports.Anonymous ( - BuildReport(..), - InstallOutcome(..), - Outcome(..), - - -- * Constructing and writing reports - new, - - -- * parsing and pretty printing - parse, - parseList, - show, --- showList, - ) where - -import qualified Distribution.Client.Types as BR - ( BuildResult, BuildFailure(..), BuildSuccess(..) - , DocsResult(..), TestsResult(..) ) -import Distribution.Client.Utils - ( mergeBy, MergeResult(..) ) -import qualified Paths_cabal_install (version) - -import Distribution.Package - ( PackageIdentifier(..), PackageName(..) ) -import Distribution.PackageDescription - ( FlagName(..), FlagAssignment ) ---import Distribution.Version --- ( Version ) -import Distribution.System - ( OS, Arch ) -import Distribution.Compiler - ( CompilerId(..) ) -import qualified Distribution.Text as Text - ( Text(disp, parse) ) -import Distribution.ParseUtils - ( FieldDescr(..), ParseResult(..), Field(..) - , simpleField, listField, ppFields, readFields - , syntaxError, locatedErrorMsg ) -import Distribution.Simple.Utils - ( comparing ) - -import qualified Distribution.Compat.ReadP as Parse - ( ReadP, pfail, munch1, skipSpaces ) -import qualified Text.PrettyPrint as Disp - ( Doc, render, char, text ) -import Text.PrettyPrint - ( (<+>), (<>) ) - -import Data.List - ( unfoldr, sortBy ) -import Data.Char as Char - ( isAlpha, isAlphaNum ) - -import Prelude hiding (show) - -data BuildReport - = BuildReport { - -- | The package this build report is about - package :: PackageIdentifier, - - -- | The OS and Arch the package was built on - os :: OS, - arch :: Arch, - - -- | The Haskell compiler (and hopefully version) used - compiler :: CompilerId, - - -- | The uploading client, ie cabal-install-x.y.z - client :: PackageIdentifier, - - -- | Which configurations flags we used - flagAssignment :: FlagAssignment, - - -- | Which dependent packages we were using exactly - dependencies :: [PackageIdentifier], - - -- | Did installing work ok? - installOutcome :: InstallOutcome, - - -- Which version of the Cabal library was used to compile the Setup.hs --- cabalVersion :: Version, - - -- Which build tools we were using (with versions) --- tools :: [PackageIdentifier], - - -- | Configure outcome, did configure work ok? - docsOutcome :: Outcome, - - -- | Configure outcome, did configure work ok? - testsOutcome :: Outcome - } - -data InstallOutcome - = PlanningFailed - | DependencyFailed PackageIdentifier - | DownloadFailed - | UnpackFailed - | SetupFailed - | ConfigureFailed - | BuildFailed - | TestsFailed - | InstallFailed - | InstallOk - deriving Eq - -data Outcome = NotTried | Failed | Ok - deriving Eq - -new :: OS -> Arch -> CompilerId -> PackageIdentifier -> FlagAssignment - -> [PackageIdentifier] -> BR.BuildResult -> BuildReport -new os' arch' comp pkgid flags deps result = - BuildReport { - package = pkgid, - os = os', - arch = arch', - compiler = comp, - client = cabalInstallID, - flagAssignment = flags, - dependencies = deps, - installOutcome = convertInstallOutcome, --- cabalVersion = undefined - docsOutcome = convertDocsOutcome, - testsOutcome = convertTestsOutcome - } - where - convertInstallOutcome = case result of - Left BR.PlanningFailed -> PlanningFailed - Left (BR.DependentFailed p) -> DependencyFailed p - Left (BR.DownloadFailed _) -> DownloadFailed - Left (BR.UnpackFailed _) -> UnpackFailed - Left (BR.ConfigureFailed _) -> ConfigureFailed - Left (BR.BuildFailed _) -> BuildFailed - Left (BR.TestsFailed _) -> TestsFailed - Left (BR.InstallFailed _) -> InstallFailed - Right (BR.BuildOk _ _ _) -> InstallOk - convertDocsOutcome = case result of - Left _ -> NotTried - Right (BR.BuildOk BR.DocsNotTried _ _) -> NotTried - Right (BR.BuildOk BR.DocsFailed _ _) -> Failed - Right (BR.BuildOk BR.DocsOk _ _) -> Ok - convertTestsOutcome = case result of - Left (BR.TestsFailed _) -> Failed - Left _ -> NotTried - Right (BR.BuildOk _ BR.TestsNotTried _) -> NotTried - Right (BR.BuildOk _ BR.TestsOk _) -> Ok - -cabalInstallID :: PackageIdentifier -cabalInstallID = - PackageIdentifier (PackageName "cabal-install") Paths_cabal_install.version - --- ------------------------------------------------------------ --- * External format --- ------------------------------------------------------------ - -initialBuildReport :: BuildReport -initialBuildReport = BuildReport { - package = requiredField "package", - os = requiredField "os", - arch = requiredField "arch", - compiler = requiredField "compiler", - client = requiredField "client", - flagAssignment = [], - dependencies = [], - installOutcome = requiredField "install-outcome", --- cabalVersion = Nothing, --- tools = [], - docsOutcome = NotTried, - testsOutcome = NotTried - } - where - requiredField fname = error ("required field: " ++ fname) - --- ----------------------------------------------------------------------------- --- Parsing - -parse :: String -> Either String BuildReport -parse s = case parseFields s of - ParseFailed perror -> Left msg where (_, msg) = locatedErrorMsg perror - ParseOk _ report -> Right report - -parseFields :: String -> ParseResult BuildReport -parseFields input = do - fields <- mapM extractField =<< readFields input - let merged = mergeBy (\desc (_,name,_) -> compare (fieldName desc) name) - sortedFieldDescrs - (sortBy (comparing (\(_,name,_) -> name)) fields) - checkMerged initialBuildReport merged - - where - extractField :: Field -> ParseResult (Int, String, String) - extractField (F line name value) = return (line, name, value) - extractField (Section line _ _ _) = syntaxError line "Unrecognized stanza" - extractField (IfBlock line _ _ _) = syntaxError line "Unrecognized stanza" - - checkMerged report [] = return report - checkMerged report (merged:remaining) = case merged of - InBoth fieldDescr (line, _name, value) -> do - report' <- fieldSet fieldDescr line value report - checkMerged report' remaining - OnlyInRight (line, name, _) -> - syntaxError line ("Unrecognized field " ++ name) - OnlyInLeft fieldDescr -> - fail ("Missing field " ++ fieldName fieldDescr) - -parseList :: String -> [BuildReport] -parseList str = - [ report | Right report <- map parse (split str) ] - - where - split :: String -> [String] - split = filter (not . null) . unfoldr chunk . lines - chunk [] = Nothing - chunk ls = case break null ls of - (r, rs) -> Just (unlines r, dropWhile null rs) - --- ----------------------------------------------------------------------------- --- Pretty-printing - -show :: BuildReport -> String -show = Disp.render . ppFields fieldDescrs - --- ----------------------------------------------------------------------------- --- Description of the fields, for parsing/printing - -fieldDescrs :: [FieldDescr BuildReport] -fieldDescrs = - [ simpleField "package" Text.disp Text.parse - package (\v r -> r { package = v }) - , simpleField "os" Text.disp Text.parse - os (\v r -> r { os = v }) - , simpleField "arch" Text.disp Text.parse - arch (\v r -> r { arch = v }) - , simpleField "compiler" Text.disp Text.parse - compiler (\v r -> r { compiler = v }) - , simpleField "client" Text.disp Text.parse - client (\v r -> r { client = v }) - , listField "flags" dispFlag parseFlag - flagAssignment (\v r -> r { flagAssignment = v }) - , listField "dependencies" Text.disp Text.parse - dependencies (\v r -> r { dependencies = v }) - , simpleField "install-outcome" Text.disp Text.parse - installOutcome (\v r -> r { installOutcome = v }) - , simpleField "docs-outcome" Text.disp Text.parse - docsOutcome (\v r -> r { docsOutcome = v }) - , simpleField "tests-outcome" Text.disp Text.parse - testsOutcome (\v r -> r { testsOutcome = v }) - ] - -sortedFieldDescrs :: [FieldDescr BuildReport] -sortedFieldDescrs = sortBy (comparing fieldName) fieldDescrs - -dispFlag :: (FlagName, Bool) -> Disp.Doc -dispFlag (FlagName name, True) = Disp.text name -dispFlag (FlagName name, False) = Disp.char '-' <> Disp.text name - -parseFlag :: Parse.ReadP r (FlagName, Bool) -parseFlag = do - name <- Parse.munch1 (\c -> Char.isAlphaNum c || c == '_' || c == '-') - case name of - ('-':flag) -> return (FlagName flag, False) - flag -> return (FlagName flag, True) - -instance Text.Text InstallOutcome where - disp PlanningFailed = Disp.text "PlanningFailed" - disp (DependencyFailed pkgid) = Disp.text "DependencyFailed" <+> Text.disp pkgid - disp DownloadFailed = Disp.text "DownloadFailed" - disp UnpackFailed = Disp.text "UnpackFailed" - disp SetupFailed = Disp.text "SetupFailed" - disp ConfigureFailed = Disp.text "ConfigureFailed" - disp BuildFailed = Disp.text "BuildFailed" - disp TestsFailed = Disp.text "TestsFailed" - disp InstallFailed = Disp.text "InstallFailed" - disp InstallOk = Disp.text "InstallOk" - - parse = do - name <- Parse.munch1 Char.isAlphaNum - case name of - "PlanningFailed" -> return PlanningFailed - "DependencyFailed" -> do Parse.skipSpaces - pkgid <- Text.parse - return (DependencyFailed pkgid) - "DownloadFailed" -> return DownloadFailed - "UnpackFailed" -> return UnpackFailed - "SetupFailed" -> return SetupFailed - "ConfigureFailed" -> return ConfigureFailed - "BuildFailed" -> return BuildFailed - "TestsFailed" -> return TestsFailed - "InstallFailed" -> return InstallFailed - "InstallOk" -> return InstallOk - _ -> Parse.pfail - -instance Text.Text Outcome where - disp NotTried = Disp.text "NotTried" - disp Failed = Disp.text "Failed" - disp Ok = Disp.text "Ok" - parse = do - name <- Parse.munch1 Char.isAlpha - case name of - "NotTried" -> return NotTried - "Failed" -> return Failed - "Ok" -> return Ok - _ -> Parse.pfail diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/BuildReports/Storage.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/BuildReports/Storage.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/BuildReports/Storage.hs 2016-11-07 10:02:40.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/BuildReports/Storage.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,161 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Reporting --- Copyright : (c) David Waern 2008 --- License : BSD-like --- --- Maintainer : david.waern@gmail.com --- Stability : experimental --- Portability : portable --- --- Anonymous build report data structure, printing and parsing --- ------------------------------------------------------------------------------ -module Distribution.Client.BuildReports.Storage ( - - -- * Storing and retrieving build reports - storeAnonymous, - storeLocal, --- retrieve, - - -- * 'InstallPlan' support - fromInstallPlan, - fromPlanningFailure, - ) where - -import qualified Distribution.Client.BuildReports.Anonymous as BuildReport -import Distribution.Client.BuildReports.Anonymous (BuildReport) - -import Distribution.Client.Types -import qualified Distribution.Client.InstallPlan as InstallPlan -import qualified Distribution.Client.ComponentDeps as CD -import Distribution.Client.InstallPlan - ( InstallPlan ) - -import Distribution.Package - ( PackageId, packageId ) -import Distribution.PackageDescription - ( FlagAssignment ) -import Distribution.Simple.InstallDirs - ( PathTemplate, fromPathTemplate - , initialPathTemplateEnv, substPathTemplate ) -import Distribution.System - ( Platform(Platform) ) -import Distribution.Compiler - ( CompilerId(..), CompilerInfo(..) ) -import Distribution.Simple.Utils - ( comparing, equating ) - -import Data.List - ( groupBy, sortBy ) -import Data.Maybe - ( catMaybes ) -import System.FilePath - ( (), takeDirectory ) -import System.Directory - ( createDirectoryIfMissing ) - -storeAnonymous :: [(BuildReport, Maybe Repo)] -> IO () -storeAnonymous reports = sequence_ - [ appendFile file (concatMap format reports') - | (repo, reports') <- separate reports - , let file = repoLocalDir repo "build-reports.log" ] - --TODO: make this concurrency safe, either lock the report file or make sure - -- the writes for each report are atomic (under 4k and flush at boundaries) - - where - format r = '\n' : BuildReport.show r ++ "\n" - separate :: [(BuildReport, Maybe Repo)] - -> [(Repo, [BuildReport])] - separate = map (\rs@((_,repo,_):_) -> (repo, [ r | (r,_,_) <- rs ])) - . map concat - . groupBy (equating (repoName . head)) - . sortBy (comparing (repoName . head)) - . groupBy (equating repoName) - . onlyRemote - repoName (_,_,rrepo) = remoteRepoName rrepo - - onlyRemote :: [(BuildReport, Maybe Repo)] - -> [(BuildReport, Repo, RemoteRepo)] - onlyRemote rs = - [ (report, repo, remoteRepo) - | (report, Just repo) <- rs - , Just remoteRepo <- [maybeRepoRemote repo] - ] - -storeLocal :: CompilerInfo -> [PathTemplate] -> [(BuildReport, Maybe Repo)] - -> Platform -> IO () -storeLocal cinfo templates reports platform = sequence_ - [ do createDirectoryIfMissing True (takeDirectory file) - appendFile file output - --TODO: make this concurrency safe, either lock the report file or make - -- sure the writes for each report are atomic - | (file, reports') <- groupByFileName - [ (reportFileName template report, report) - | template <- templates - , (report, _repo) <- reports ] - , let output = concatMap format reports' - ] - where - format r = '\n' : BuildReport.show r ++ "\n" - - reportFileName template report = - fromPathTemplate (substPathTemplate env template) - where env = initialPathTemplateEnv - (BuildReport.package report) - -- ToDo: In principle, we can support $pkgkey, but only - -- if the configure step succeeds. So add a Maybe field - -- to the build report, and either use that or make up - -- a fake identifier if it's not available. - (error "storeLocal: package key not available") - cinfo - platform - - groupByFileName = map (\grp@((filename,_):_) -> (filename, map snd grp)) - . groupBy (equating fst) - . sortBy (comparing fst) - --- ------------------------------------------------------------ --- * InstallPlan support --- ------------------------------------------------------------ - -fromInstallPlan :: Platform -> CompilerId - -> InstallPlan - -> [(BuildReport, Maybe Repo)] -fromInstallPlan platform comp plan = - catMaybes - . map (fromPlanPackage platform comp) - . InstallPlan.toList - $ plan - -fromPlanPackage :: Platform -> CompilerId - -> InstallPlan.PlanPackage - -> Maybe (BuildReport, Maybe Repo) -fromPlanPackage (Platform arch os) comp planPackage = case planPackage of - InstallPlan.Installed (ReadyPackage (ConfiguredPackage srcPkg flags _ _) deps) - _ result - -> Just $ ( BuildReport.new os arch comp - (packageId srcPkg) flags - (map packageId (CD.nonSetupDeps deps)) - (Right result) - , extractRepo srcPkg) - - InstallPlan.Failed (ConfiguredPackage srcPkg flags _ deps) result - -> Just $ ( BuildReport.new os arch comp - (packageId srcPkg) flags - (map confSrcId (CD.nonSetupDeps deps)) - (Left result) - , extractRepo srcPkg ) - - _ -> Nothing - - where - extractRepo (SourcePackage { packageSource = RepoTarballPackage repo _ _ }) - = Just repo - extractRepo _ = Nothing - -fromPlanningFailure :: Platform -> CompilerId - -> [PackageId] -> FlagAssignment -> [(BuildReport, Maybe Repo)] -fromPlanningFailure (Platform arch os) comp pkgids flags = - [ (BuildReport.new os arch comp pkgid flags [] (Left PlanningFailed), Nothing) - | pkgid <- pkgids ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/BuildReports/Types.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/BuildReports/Types.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/BuildReports/Types.hs 2016-11-07 10:02:40.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/BuildReports/Types.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,50 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.BuildReports.Types --- Copyright : (c) Duncan Coutts 2009 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Types related to build reporting --- ------------------------------------------------------------------------------ -module Distribution.Client.BuildReports.Types ( - ReportLevel(..), - ) where - -import qualified Distribution.Text as Text - ( Text(..) ) - -import qualified Distribution.Compat.ReadP as Parse - ( pfail, munch1 ) -import qualified Text.PrettyPrint as Disp - ( text ) - -import Data.Char as Char - ( isAlpha, toLower ) -import GHC.Generics (Generic) -import Distribution.Compat.Binary (Binary) - - -data ReportLevel = NoReports | AnonymousReports | DetailedReports - deriving (Eq, Ord, Enum, Show, Generic) - -instance Binary ReportLevel - -instance Text.Text ReportLevel where - disp NoReports = Disp.text "none" - disp AnonymousReports = Disp.text "anonymous" - disp DetailedReports = Disp.text "detailed" - parse = do - name <- Parse.munch1 Char.isAlpha - case lowercase name of - "none" -> return NoReports - "anonymous" -> return AnonymousReports - "detailed" -> return DetailedReports - _ -> Parse.pfail - -lowercase :: String -> String -lowercase = map Char.toLower diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/BuildReports/Upload.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/BuildReports/Upload.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/BuildReports/Upload.hs 2016-11-07 10:02:40.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/BuildReports/Upload.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,92 +0,0 @@ -{-# LANGUAGE CPP, PatternGuards #-} --- This is a quick hack for uploading build reports to Hackage. - -module Distribution.Client.BuildReports.Upload - ( BuildLog - , BuildReportId - , uploadReports - ) where - -{- -import Network.Browser - ( BrowserAction, request, setAllowRedirects ) -import Network.HTTP - ( Header(..), HeaderName(..) - , Request(..), RequestMethod(..), Response(..) ) -import Network.TCP (HandleStream) --} -import Network.URI (URI, uriPath) --parseRelativeReference, relativeTo) - -import Control.Monad - ( forM_ ) -import System.FilePath.Posix - ( () ) -import qualified Distribution.Client.BuildReports.Anonymous as BuildReport -import Distribution.Client.BuildReports.Anonymous (BuildReport) -import Distribution.Text (display) -import Distribution.Verbosity (Verbosity) -import Distribution.Simple.Utils (die) -import Distribution.Client.HttpUtils -import Distribution.Client.Setup - ( RepoContext(..) ) - -type BuildReportId = URI -type BuildLog = String - -uploadReports :: Verbosity -> RepoContext -> (String, String) -> URI -> [(BuildReport, Maybe BuildLog)] -> IO () -uploadReports verbosity repoCtxt auth uri reports = do - forM_ reports $ \(report, mbBuildLog) -> do - buildId <- postBuildReport verbosity repoCtxt auth uri report - case mbBuildLog of - Just buildLog -> putBuildLog verbosity repoCtxt auth buildId buildLog - Nothing -> return () - -postBuildReport :: Verbosity -> RepoContext -> (String, String) -> URI -> BuildReport -> IO BuildReportId -postBuildReport verbosity repoCtxt auth uri buildReport = do - let fullURI = uri { uriPath = "/package" display (BuildReport.package buildReport) "reports" } - transport <- repoContextGetTransport repoCtxt - res <- postHttp transport verbosity fullURI (BuildReport.show buildReport) (Just auth) - case res of - (303, redir) -> return $ undefined redir --TODO parse redir - _ -> die "unrecognized response" -- give response - -{- - setAllowRedirects False - (_, response) <- request Request { - rqURI = uri { uriPath = "/package" display (BuildReport.package buildReport) "reports" }, - rqMethod = POST, - rqHeaders = [Header HdrContentType ("text/plain"), - Header HdrContentLength (show (length body)), - Header HdrAccept ("text/plain")], - rqBody = body - } - case rspCode response of - (3,0,3) | [Just buildId] <- [ do rel <- parseRelativeReference location -#if defined(VERSION_network_uri) - return $ relativeTo rel uri -#elif defined(VERSION_network) -#if MIN_VERSION_network(2,4,0) - return $ relativeTo rel uri -#else - relativeTo rel uri -#endif -#endif - | Header HdrLocation location <- rspHeaders response ] - -> return $ buildId - _ -> error "Unrecognised response from server." - where body = BuildReport.show buildReport --} - - --- TODO force this to be a PUT? - -putBuildLog :: Verbosity -> RepoContext -> (String, String) - -> BuildReportId -> BuildLog - -> IO () -putBuildLog verbosity repoCtxt auth reportId buildLog = do - let fullURI = reportId {uriPath = uriPath reportId "log"} - transport <- repoContextGetTransport repoCtxt - res <- postHttp transport verbosity fullURI buildLog (Just auth) - case res of - (200, _) -> return () - _ -> die "unrecognized response" -- give response diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/BuildTarget.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/BuildTarget.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/BuildTarget.hs 2016-11-07 10:02:40.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/BuildTarget.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1623 +0,0 @@ -{-# LANGUAGE CPP, DeriveGeneric, DeriveFunctor #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.BuildTargets --- Copyright : (c) Duncan Coutts 2012, 2015 --- License : BSD-like --- --- Maintainer : duncan@community.haskell.org --- --- Handling for user-specified build targets ------------------------------------------------------------------------------ -module Distribution.Client.BuildTarget ( - - -- * Build targets - BuildTarget(..), - --showBuildTarget, - QualLevel(..), - buildTargetPackage, - buildTargetComponentName, - - -- * Top level convenience - readUserBuildTargets, - resolveUserBuildTargets, - - -- * Parsing user build targets - UserBuildTarget, - parseUserBuildTargets, - showUserBuildTarget, - UserBuildTargetProblem(..), - reportUserBuildTargetProblems, - - -- * Resolving build targets - resolveBuildTargets, - BuildTargetProblem(..), - reportBuildTargetProblems, - ) where - -import Distribution.Package - ( Package(..), PackageId, PackageName, packageName ) -import Distribution.Client.Types - ( PackageLocation(..) ) - -import Distribution.PackageDescription - ( PackageDescription - , Executable(..) - , TestSuite(..), TestSuiteInterface(..), testModules - , Benchmark(..), BenchmarkInterface(..), benchmarkModules - , BuildInfo(..), libModules, exeModules ) -import Distribution.ModuleName - ( ModuleName, toFilePath ) -import Distribution.Simple.LocalBuildInfo - ( Component(..), ComponentName(..) - , pkgComponents, componentName, componentBuildInfo ) - -import Distribution.Text - ( display, simpleParse ) -import Distribution.Simple.Utils - ( die, lowercase ) -import Distribution.Client.Utils - ( makeRelativeToCwd ) - -import Data.List - ( nub, nubBy, stripPrefix, partition, intercalate, sortBy, groupBy ) -import Data.Maybe - ( listToMaybe, maybeToList ) -import Data.Either - ( partitionEithers ) -import Data.Function - ( on ) -import GHC.Generics (Generic) -#if MIN_VERSION_containers(0,5,0) -import qualified Data.Map.Lazy as Map.Lazy -import qualified Data.Map.Strict as Map -import Data.Map.Strict (Map) -#else -import qualified Data.Map as Map.Lazy -import qualified Data.Map as Map -import Data.Map (Map) -#endif -import Control.Monad -#if __GLASGOW_HASKELL__ < 710 -import Control.Applicative (Applicative(..), (<$>)) -#endif -import Control.Applicative (Alternative(..)) -import qualified Distribution.Compat.ReadP as Parse -import Distribution.Compat.ReadP - ( (+++), (<++) ) -import Data.Char - ( isSpace, isAlphaNum ) -import System.FilePath as FilePath - ( takeExtension, dropExtension, addTrailingPathSeparator - , splitDirectories, joinPath, splitPath ) -import System.Directory - ( doesFileExist, doesDirectoryExist, canonicalizePath - , getCurrentDirectory ) -import System.FilePath - ( (), (<.>), normalise ) - - --- ------------------------------------------------------------ --- * User build targets --- ------------------------------------------------------------ - --- | Various ways that a user may specify a build target. --- --- The main general form has lots of optional parts: --- --- > [ package name | package dir | package .cabal file ] --- > [ [lib:|exe:] component name ] --- > [ module name | source file ] --- --- There's also a special case of a package tarball. It doesn't take part in --- the main general form since we always build a tarball package as a whole. --- --- > [package tar.gz file] --- -data UserBuildTarget = - - -- | A simple target specified by a single part. This is any of the - -- general forms that can be expressed using one part, which are: - -- - -- > cabal build foo -- package name - -- > cabal build ../bar ../bar/bar.cabal -- package dir or package file - -- > cabal build foo -- component name - -- > cabal build Data.Foo -- module name - -- > cabal build Data/Foo.hs bar/Main.hsc -- file name - -- - -- It can also be a package tarball. - -- - -- > cabal build bar.tar.gz - -- - UserBuildTarget1 String - - -- | A qualified target with two parts. This is any of the general - -- forms that can be expressed using two parts, which are: - -- - -- > cabal build foo:foo -- package : component - -- > cabal build foo:Data.Foo -- package : module - -- > cabal build foo:Data/Foo.hs -- package : filename - -- - -- > cabal build ./foo:foo -- package dir : component - -- > cabal build ./foo:Data.Foo -- package dir : module - -- - -- > cabal build ./foo.cabal:foo -- package file : component - -- > cabal build ./foo.cabal:Data.Foo -- package file : module - -- > cabal build ./foo.cabal:Main.hs -- package file : filename - -- - -- > cabal build lib:foo exe:foo -- namespace : component - -- > cabal build foo:Data.Foo -- component : module - -- > cabal build foo:Data/Foo.hs -- component : filename - -- - | UserBuildTarget2 String String - - -- A (very) qualified target with three parts. This is any of the general - -- forms that can be expressed using three parts, which are: - -- - -- > cabal build foo:lib:foo -- package : namespace : component - -- > cabal build foo:foo:Data.Foo -- package : component : module - -- > cabal build foo:foo:Data/Foo.hs -- package : component : filename - -- - -- > cabal build foo/:lib:foo -- pkg dir : namespace : component - -- > cabal build foo/:foo:Data.Foo -- pkg dir : component : module - -- > cabal build foo/:foo:Data/Foo.hs -- pkg dir : component : filename - -- - -- > cabal build foo.cabal:lib:foo -- pkg file : namespace : component - -- > cabal build foo.cabal:foo:Data.Foo -- pkg file : component : module - -- > cabal build foo.cabal:foo:Data/Foo.hs -- pkg file : component : filename - -- - -- > cabal build lib:foo:Data.Foo -- namespace : component : module - -- > cabal build lib:foo:Data/Foo.hs -- namespace : component : filename - -- - | UserBuildTarget3 String String String - - -- A (rediculously) qualified target with four parts. This is any of the - -- general forms that can be expressed using all four parts, which are: - -- - -- > cabal build foo:lib:foo:Data.Foo -- package : namespace : component : module - -- > cabal build foo:lib:foo:Data/Foo.hs -- package : namespace : component : filename - -- - -- > cabal build foo/:lib:foo:Data.Foo -- pkg dir : namespace : component : module - -- > cabal build foo/:lib:foo:Data/Foo.hs -- pkg dir : namespace : component : filename - -- - -- > cabal build foo.cabal:lib:foo:Data.Foo -- pkg file : namespace : component : module - -- > cabal build foo.cabal:lib:foo:Data/Foo.hs -- pkg file : namespace : component : filename - -- - | UserBuildTarget4 String String String String - deriving (Show, Eq, Ord) - - --- ------------------------------------------------------------ --- * Resolved build targets --- ------------------------------------------------------------ - --- | A fully resolved build target. --- -data BuildTarget pkg = - - -- | A package as a whole - -- - BuildTargetPackage pkg - - -- | A specific component - -- - | BuildTargetComponent pkg ComponentName - - -- | A specific module within a specific component. - -- - | BuildTargetModule pkg ComponentName ModuleName - - -- | A specific file within a specific component. - -- - | BuildTargetFile pkg ComponentName FilePath - deriving (Eq, Ord, Functor, Show, Generic) - - --- | Get the package that the 'BuildTarget' is referring to. --- -buildTargetPackage :: BuildTarget pkg -> pkg -buildTargetPackage (BuildTargetPackage p) = p -buildTargetPackage (BuildTargetComponent p _cn) = p -buildTargetPackage (BuildTargetModule p _cn _mn) = p -buildTargetPackage (BuildTargetFile p _cn _fn) = p - - --- | Get the 'ComponentName' that the 'BuildTarget' is referring to, if any. --- The 'BuildTargetPackage' target kind doesn't refer to any individual --- component, while the component, module and file kinds do. --- -buildTargetComponentName :: BuildTarget pkg -> Maybe ComponentName -buildTargetComponentName (BuildTargetPackage _p) = Nothing -buildTargetComponentName (BuildTargetComponent _p cn) = Just cn -buildTargetComponentName (BuildTargetModule _p cn _mn) = Just cn -buildTargetComponentName (BuildTargetFile _p cn _fn) = Just cn - - --- ------------------------------------------------------------ --- * Top level, do everything --- ------------------------------------------------------------ - - --- | Parse a bunch of command line args as user build targets, failing with an --- error if any targets are unrecognised. --- -readUserBuildTargets :: [String] -> IO [UserBuildTarget] -readUserBuildTargets targetStrs = do - let (uproblems, utargets) = parseUserBuildTargets targetStrs - reportUserBuildTargetProblems uproblems - return utargets - - --- | A 'UserBuildTarget's is just a semi-structured string. We sill have quite --- a bit of work to do to figure out which targets they refer to (ie packages, --- components, file locations etc). --- --- The possible targets are based on the available packages (and their --- locations). It fails with an error if any user string cannot be matched to --- a valid target. --- -resolveUserBuildTargets :: [(PackageDescription, PackageLocation a)] - -> [UserBuildTarget] -> IO [BuildTarget PackageName] -resolveUserBuildTargets pkgs utargets = do - utargets' <- mapM getUserTargetFileStatus utargets - pkgs' <- mapM (uncurry selectPackageInfo) pkgs - pwd <- getCurrentDirectory - let (primaryPkg, otherPkgs) = selectPrimaryLocalPackage pwd pkgs' - (bproblems, btargets) = resolveBuildTargets - primaryPkg otherPkgs utargets'' - -- default local dir target if there's no given target - utargets'' - | not (null primaryPkg) - , null utargets = [UserBuildTargetFileStatus1 "./" - (FileStatusExistsDir pwd)] - | otherwise = utargets' - - reportBuildTargetProblems bproblems - return (map (fmap packageName) btargets) - where - selectPrimaryLocalPackage :: FilePath - -> [PackageInfo] - -> ([PackageInfo], [PackageInfo]) - selectPrimaryLocalPackage pwd pkgs' = - let (primary, others) = partition isPrimary pkgs' - in (primary, others) - where - isPrimary PackageInfo { pinfoDirectory = Just (dir,_) } - | dir == pwd = True - isPrimary _ = False - - --- ------------------------------------------------------------ --- * Checking if targets exist as files --- ------------------------------------------------------------ - -data UserBuildTargetFileStatus = - UserBuildTargetFileStatus1 String FileStatus - | UserBuildTargetFileStatus2 String FileStatus String - | UserBuildTargetFileStatus3 String FileStatus String String - | UserBuildTargetFileStatus4 String FileStatus String String String - deriving (Eq, Ord, Show) - -data FileStatus = FileStatusExistsFile FilePath -- the canonicalised filepath - | FileStatusExistsDir FilePath -- the canonicalised filepath - | FileStatusNotExists Bool -- does the parent dir exist even? - deriving (Eq, Ord, Show) - -getUserTargetFileStatus :: UserBuildTarget -> IO UserBuildTargetFileStatus -getUserTargetFileStatus t = - case t of - UserBuildTarget1 s1 -> - (\f1 -> UserBuildTargetFileStatus1 s1 f1) <$> fileStatus s1 - UserBuildTarget2 s1 s2 -> - (\f1 -> UserBuildTargetFileStatus2 s1 f1 s2) <$> fileStatus s1 - UserBuildTarget3 s1 s2 s3 -> - (\f1 -> UserBuildTargetFileStatus3 s1 f1 s2 s3) <$> fileStatus s1 - UserBuildTarget4 s1 s2 s3 s4 -> - (\f1 -> UserBuildTargetFileStatus4 s1 f1 s2 s3 s4) <$> fileStatus s1 - where - fileStatus f = do - fexists <- doesFileExist f - dexists <- doesDirectoryExist f - case splitPath f of - _ | fexists -> FileStatusExistsFile <$> canonicalizePath f - | dexists -> FileStatusExistsDir <$> canonicalizePath f - (d:_) -> FileStatusNotExists <$> doesDirectoryExist d - _ -> error "getUserTargetFileStatus: empty path" - -forgetFileStatus :: UserBuildTargetFileStatus -> UserBuildTarget -forgetFileStatus t = case t of - UserBuildTargetFileStatus1 s1 _ -> UserBuildTarget1 s1 - UserBuildTargetFileStatus2 s1 _ s2 -> UserBuildTarget2 s1 s2 - UserBuildTargetFileStatus3 s1 _ s2 s3 -> UserBuildTarget3 s1 s2 s3 - UserBuildTargetFileStatus4 s1 _ s2 s3 s4 -> UserBuildTarget4 s1 s2 s3 s4 - - --- ------------------------------------------------------------ --- * Parsing user targets --- ------------------------------------------------------------ - - --- | Parse a bunch of 'UserBuildTarget's (purely without throwing exceptions). --- -parseUserBuildTargets :: [String] -> ([UserBuildTargetProblem] - ,[UserBuildTarget]) -parseUserBuildTargets = partitionEithers . map parseUserBuildTarget - -parseUserBuildTarget :: String -> Either UserBuildTargetProblem - UserBuildTarget -parseUserBuildTarget targetstr = - case readPToMaybe parseTargetApprox targetstr of - Nothing -> Left (UserBuildTargetUnrecognised targetstr) - Just tgt -> Right tgt - - where - parseTargetApprox :: Parse.ReadP r UserBuildTarget - parseTargetApprox = - (do a <- tokenQ - return (UserBuildTarget1 a)) - +++ (do a <- tokenQ - _ <- Parse.char ':' - b <- tokenQ - return (UserBuildTarget2 a b)) - +++ (do a <- tokenQ - _ <- Parse.char ':' - b <- tokenQ - _ <- Parse.char ':' - c <- tokenQ - return (UserBuildTarget3 a b c)) - +++ (do a <- tokenQ - _ <- Parse.char ':' - b <- token - _ <- Parse.char ':' - c <- tokenQ - _ <- Parse.char ':' - d <- tokenQ - return (UserBuildTarget4 a b c d)) - - token = Parse.munch1 (\x -> not (isSpace x) && x /= ':') - tokenQ = parseHaskellString <++ token - parseHaskellString :: Parse.ReadP r String - parseHaskellString = Parse.readS_to_P reads - - readPToMaybe :: Parse.ReadP a a -> String -> Maybe a - readPToMaybe p str = listToMaybe [ r | (r,s) <- Parse.readP_to_S p str - , all isSpace s ] - --- | Syntax error when trying to parse a 'UserBuildTarget'. -data UserBuildTargetProblem - = UserBuildTargetUnrecognised String - deriving Show - --- | Throw an exception with a formatted message if there are any problems. --- -reportUserBuildTargetProblems :: [UserBuildTargetProblem] -> IO () -reportUserBuildTargetProblems problems = do - case [ target | UserBuildTargetUnrecognised target <- problems ] of - [] -> return () - target -> - die $ unlines - [ "Unrecognised build target syntax for '" ++ name ++ "'." - | name <- target ] - ++ "Syntax:\n" - ++ " - build [package]\n" - ++ " - build [package:]component\n" - ++ " - build [package:][component:]module\n" - ++ " - build [package:][component:]file\n" - ++ " where\n" - ++ " package is a package name, package dir or .cabal file\n\n" - ++ "Examples:\n" - ++ " - build foo -- package name\n" - ++ " - build tests -- component name\n" - ++ " (name of library, executable, test-suite or benchmark)\n" - ++ " - build Data.Foo -- module name\n" - ++ " - build Data/Foo.hsc -- file name\n\n" - ++ "An ambigious target can be qualified by package, component\n" - ++ "and/or component kind (lib|exe|test|bench)\n" - ++ " - build foo:tests -- component qualified by package\n" - ++ " - build tests:Data.Foo -- module qualified by component\n" - ++ " - build lib:foo -- component qualified by kind" - - --- | Render a 'UserBuildTarget' back as the external syntax. This is mainly for --- error messages. --- -showUserBuildTarget :: UserBuildTarget -> String -showUserBuildTarget = intercalate ":" . components - where - components (UserBuildTarget1 s1) = [s1] - components (UserBuildTarget2 s1 s2) = [s1,s2] - components (UserBuildTarget3 s1 s2 s3) = [s1,s2,s3] - components (UserBuildTarget4 s1 s2 s3 s4) = [s1,s2,s3,s4] - -showBuildTarget :: QualLevel -> BuildTarget PackageInfo -> String -showBuildTarget ql = showUserBuildTarget . forgetFileStatus - . head . renderBuildTarget ql - - --- ------------------------------------------------------------ --- * Resolving user targets to build targets --- ------------------------------------------------------------ - - --- | Given a bunch of user-specified targets, try to resolve what it is they --- refer to. --- -resolveBuildTargets :: [PackageInfo] -- any primary pkg, e.g. cur dir - -> [PackageInfo] -- all the other local packages - -> [UserBuildTargetFileStatus] - -> ([BuildTargetProblem], [BuildTarget PackageInfo]) -resolveBuildTargets ppinfo opinfo = - partitionEithers - . map (resolveBuildTarget ppinfo opinfo) - -resolveBuildTarget :: [PackageInfo] -> [PackageInfo] - -> UserBuildTargetFileStatus - -> Either BuildTargetProblem (BuildTarget PackageInfo) -resolveBuildTarget ppinfo opinfo userTarget = - case findMatch (matcher userTarget) of - Unambiguous target -> Right target - None errs -> Left (classifyMatchErrors errs) - Ambiguous exactMatch targets -> - case disambiguateBuildTargets - matcher userTarget exactMatch - targets of - Right targets' -> Left (BuildTargetAmbiguous userTarget' targets') - Left ((m, ms):_) -> Left (MatchingInternalError userTarget' m ms) - Left [] -> internalError "resolveBuildTarget" - where - matcher = matchBuildTarget ppinfo opinfo - - userTarget' = forgetFileStatus userTarget - - classifyMatchErrors errs - | not (null expected) - = let (things, got:_) = unzip expected in - BuildTargetExpected userTarget' things got - - | not (null nosuch) - = BuildTargetNoSuch userTarget' nosuch - - | otherwise - = internalError $ "classifyMatchErrors: " ++ show errs - where - expected = [ (thing, got) - | (_, MatchErrorExpected thing got) - <- map (innerErr Nothing) errs ] - nosuch = [ (inside, thing, got, alts) - | (inside, MatchErrorNoSuch thing got alts) - <- map (innerErr Nothing) errs ] - - innerErr _ (MatchErrorIn kind thing m) - = innerErr (Just (kind,thing)) m - innerErr c m = (c,m) - - --- | The various ways that trying to resolve a 'UserBuildTarget' to a --- 'BuildTarget' can fail. --- -data BuildTargetProblem - = BuildTargetExpected UserBuildTarget [String] String - -- ^ [expected thing] (actually got) - | BuildTargetNoSuch UserBuildTarget - [(Maybe (String, String), String, String, [String])] - -- ^ [([in thing], no such thing, actually got, alternatives)] - | BuildTargetAmbiguous UserBuildTarget - [(UserBuildTarget, BuildTarget PackageInfo)] - - | MatchingInternalError UserBuildTarget (BuildTarget PackageInfo) - [(UserBuildTarget, [BuildTarget PackageInfo])] - - -disambiguateBuildTargets - :: (UserBuildTargetFileStatus -> Match (BuildTarget PackageInfo)) - -> UserBuildTargetFileStatus -> Bool - -> [BuildTarget PackageInfo] - -> Either [(BuildTarget PackageInfo, - [(UserBuildTarget, [BuildTarget PackageInfo])])] - [(UserBuildTarget, BuildTarget PackageInfo)] -disambiguateBuildTargets matcher matchInput exactMatch matchResults = - case partitionEithers results of - (errs@(_:_), _) -> Left errs - ([], ok) -> Right ok - where - -- So, here's the strategy. We take the original match results, and make a - -- table of all their renderings at all qualification levels. - -- Note there can be multiple renderings at each qualification level. - matchResultsRenderings :: [(BuildTarget PackageInfo, [UserBuildTargetFileStatus])] - matchResultsRenderings = - [ (matchResult, matchRenderings) - | matchResult <- matchResults - , let matchRenderings = - [ rendering - | ql <- [QL1 .. QL4] - , rendering <- renderBuildTarget ql matchResult ] - ] - - -- Of course the point is that we're looking for renderings that are - -- unambiguous matches. So we build another memo table of all the matches - -- for all of those renderings. So by looking up in this table we can see - -- if we've got an unambiguous match. - - memoisedMatches :: Map UserBuildTargetFileStatus - (Match (BuildTarget PackageInfo)) - memoisedMatches = - -- avoid recomputing the main one if it was an exact match - (if exactMatch then Map.insert matchInput (ExactMatch 0 matchResults) - else id) - $ Map.Lazy.fromList - [ (rendering, matcher rendering) - | rendering <- concatMap snd matchResultsRenderings ] - - -- Finally, for each of the match results, we go through all their - -- possible renderings (in order of qualification level, though remember - -- there can be multiple renderings per level), and find the first one - -- that has an unambiguous match. - results :: [Either (BuildTarget PackageInfo, - [(UserBuildTarget, [BuildTarget PackageInfo])]) - (UserBuildTarget, BuildTarget PackageInfo)] - results = - [ case findUnambiguous originalMatch matchRenderings of - Just unambiguousRendering -> - Right ( forgetFileStatus unambiguousRendering - , originalMatch) - - -- This case is an internal error, but we bubble it up and report it - Nothing -> - Left ( originalMatch - , [ (forgetFileStatus rendering, matches) - | rendering <- matchRenderings - , let (ExactMatch _ matches) = - memoisedMatches Map.! rendering - ] ) - - | (originalMatch, matchRenderings) <- matchResultsRenderings ] - - findUnambiguous :: BuildTarget PackageInfo -> [UserBuildTargetFileStatus] - -> Maybe UserBuildTargetFileStatus - findUnambiguous _ [] = Nothing - findUnambiguous t (r:rs) = - case memoisedMatches Map.! r of - ExactMatch _ [t'] | fmap packageName t == fmap packageName t' - -> Just r - ExactMatch _ _ -> findUnambiguous t rs - InexactMatch _ _ -> internalError "InexactMatch" - NoMatch _ _ -> internalError "NoMatch" - -internalError :: String -> a -internalError msg = - error $ "BuildTargets: internal error: " ++ msg - - -data QualLevel = QL1 | QL2 | QL3 | QL4 - deriving (Enum, Show) - -renderBuildTarget :: QualLevel -> BuildTarget PackageInfo - -> [UserBuildTargetFileStatus] -renderBuildTarget ql t = - case t of - BuildTargetPackage p -> - case ql of - QL1 -> [t1 (dispP p)] - QL2 -> [t1' pf fs | (pf, fs) <- dispPF p] - QL3 -> [] - QL4 -> [] - - BuildTargetComponent p c -> - case ql of - QL1 -> [t1 (dispC p c)] - QL2 -> [t2 (dispP p) (dispC p c), - t2 (dispK c) (dispC p c)] - QL3 -> [t3 (dispP p) (dispK c) (dispC p c)] - QL4 -> [] - - BuildTargetModule p c m -> - case ql of - QL1 -> [t1 (dispM m)] - QL2 -> [t2 (dispP p) (dispM m), - t2 (dispC p c) (dispM m)] - QL3 -> [t3 (dispP p) (dispC p c) (dispM m), - t3 (dispK c) (dispC p c) (dispM m)] - QL4 -> [t4 (dispP p) (dispK c) (dispC p c) (dispM m)] - - BuildTargetFile p c f -> - case ql of - QL1 -> [t1 f] - QL2 -> [t2 (dispP p) f, - t2 (dispC p c) f] - QL3 -> [t3 (dispP p) (dispC p c) f, - t3 (dispK c) (dispC p c) f] - QL4 -> [t4 (dispP p) (dispK c) (dispC p c) f] - where - t1 s1 = UserBuildTargetFileStatus1 s1 none - t1' s1 = UserBuildTargetFileStatus1 s1 - t2 s1 = UserBuildTargetFileStatus2 s1 none - t3 s1 = UserBuildTargetFileStatus3 s1 none - t4 s1 = UserBuildTargetFileStatus4 s1 none - none = FileStatusNotExists False - - dispP = display . packageName - dispC = componentStringName . packageName - dispK = showComponentKindShort . componentKind - dispM = display - - dispPF p = [ (addTrailingPathSeparator drel, FileStatusExistsDir dabs) - | PackageInfo { pinfoDirectory = Just (dabs,drel) } <- [p] ] - ++ [ (frel, FileStatusExistsFile fabs) - | PackageInfo { pinfoPackageFile = Just (fabs,frel) } <- [p] ] - - --- | Throw an exception with a formatted message if there are any problems. --- -reportBuildTargetProblems :: [BuildTargetProblem] -> IO () -reportBuildTargetProblems problems = do - - case [ (t, m, ms) | MatchingInternalError t m ms <- problems ] of - [] -> return () - ((target, originalMatch, renderingsAndMatches):_) -> - die $ "Internal error in build target matching. It should always be " - ++ "possible to find a syntax that's sufficiently qualified to " - ++ "give an unambigious match. However when matching '" - ++ showUserBuildTarget target ++ "' we found " - ++ showBuildTarget QL1 originalMatch - ++ " (" ++ showBuildTargetKind originalMatch ++ ") which does not " - ++ "have an unambigious syntax. The possible syntax and the " - ++ "targets they match are as follows:\n" - ++ unlines - [ "'" ++ showUserBuildTarget rendering ++ "' which matches " - ++ intercalate ", " - [ showBuildTarget QL1 match ++ - " (" ++ showBuildTargetKind match ++ ")" - | match <- matches ] - | (rendering, matches) <- renderingsAndMatches ] - - case [ (t, e, g) | BuildTargetExpected t e g <- problems ] of - [] -> return () - targets -> - die $ unlines - [ "Unrecognised build target '" ++ showUserBuildTarget target - ++ "'.\n" - ++ "Expected a " ++ intercalate " or " expected - ++ ", rather than '" ++ got ++ "'." - | (target, expected, got) <- targets ] - - case [ (t, e) | BuildTargetNoSuch t e <- problems ] of - [] -> return () - targets -> - die $ unlines - [ "Unknown build target '" ++ showUserBuildTarget target ++ - "'.\n" ++ unlines - [ (case inside of - Just (kind, thing) - -> "The " ++ kind ++ " " ++ thing ++ " has no " - Nothing -> "There is no ") - ++ intercalate " or " [ mungeThing thing ++ " '" ++ got ++ "'" - | (thing, got, _alts) <- nosuch' ] ++ "." - ++ if null alternatives then "" else - "\nPerhaps you meant " ++ intercalate ";\nor " - [ "the " ++ thing ++ " " ++ intercalate " or " alts - | (thing, alts) <- alternatives ] - | (inside, nosuch') <- groupByContainer nosuch - , let alternatives = - [ (thing, take 10 alts) --TODO: select best ones - | (thing,_got,alts@(_:_)) <- nosuch' ] - ] - | (target, nosuch) <- targets - , let groupByContainer = - map (\g@((inside,_,_,_):_) -> - (inside, [ (thing,got,alts) - | (_,thing,got,alts) <- g ])) - . groupBy ((==) `on` (\(x,_,_,_) -> x)) - . sortBy (compare `on` (\(x,_,_,_) -> x)) - ] - where - mungeThing "file" = "file target" - mungeThing thing = thing - - case [ (t, ts) | BuildTargetAmbiguous t ts <- problems ] of - [] -> return () - targets -> - die $ unlines - [ "Ambiguous build target '" ++ showUserBuildTarget target - ++ "'. It could be:\n " - ++ unlines [ " "++ showUserBuildTarget ut ++ - " (" ++ showBuildTargetKind bt ++ ")" - | (ut, bt) <- amb ] - | (target, amb) <- targets ] - - where - showBuildTargetKind (BuildTargetPackage _ ) = "package" - showBuildTargetKind (BuildTargetComponent _ _ ) = "component" - showBuildTargetKind (BuildTargetModule _ _ _) = "module" - showBuildTargetKind (BuildTargetFile _ _ _) = "file" - - ----------------------------------- --- Top level BuildTarget matcher --- - -matchBuildTarget :: [PackageInfo] -> [PackageInfo] - -> UserBuildTargetFileStatus - -> Match (BuildTarget PackageInfo) -matchBuildTarget ppinfo opinfo = \utarget -> - nubMatchesBy ((==) `on` (fmap packageName)) $ - case utarget of - UserBuildTargetFileStatus1 str1 fstatus1 -> - matchBuildTarget1 ppinfo opinfo str1 fstatus1 - - UserBuildTargetFileStatus2 str1 fstatus1 str2 -> - matchBuildTarget2 pinfo str1 fstatus1 str2 - - UserBuildTargetFileStatus3 str1 fstatus1 str2 str3 -> - matchBuildTarget3 pinfo str1 fstatus1 str2 str3 - - UserBuildTargetFileStatus4 str1 fstatus1 str2 str3 str4 -> - matchBuildTarget4 pinfo str1 fstatus1 str2 str3 str4 - where - pinfo = ppinfo ++ opinfo - --TODO: sort this out - - -matchBuildTarget1 :: [PackageInfo] -> [PackageInfo] - -> String -> FileStatus -> Match (BuildTarget PackageInfo) -matchBuildTarget1 ppinfo opinfo = \str1 fstatus1 -> - match1Cmp pcinfo str1 - match1Pkg pinfo str1 fstatus1 - match1Cmp ocinfo str1 - match1Mod cinfo str1 - match1Fil pinfo str1 fstatus1 - where - pinfo = ppinfo ++ opinfo - cinfo = concatMap pinfoComponents pinfo - pcinfo = concatMap pinfoComponents ppinfo - ocinfo = concatMap pinfoComponents opinfo - - -matchBuildTarget2 :: [PackageInfo] -> String -> FileStatus -> String - -> Match (BuildTarget PackageInfo) -matchBuildTarget2 pinfo str1 fstatus1 str2 = - match2PkgCmp pinfo str1 fstatus1 str2 - <|> match2KndCmp cinfo str1 str2 - match2PkgMod pinfo str1 fstatus1 str2 - match2CmpMod cinfo str1 str2 - match2PkgFil pinfo str1 fstatus1 str2 - match2CmpFil cinfo str1 str2 - where - cinfo = concatMap pinfoComponents pinfo - --TODO: perhaps we actually do want to prioritise local/primary components - - -matchBuildTarget3 :: [PackageInfo] -> String -> FileStatus -> String -> String - -> Match (BuildTarget PackageInfo) -matchBuildTarget3 pinfo str1 fstatus1 str2 str3 = - match3PkgKndCmp pinfo str1 fstatus1 str2 str3 - match3PkgCmpMod pinfo str1 fstatus1 str2 str3 - match3PkgCmpFil pinfo str1 fstatus1 str2 str3 - match3KndCmpMod cinfo str1 str2 str3 - match3KndCmpFil cinfo str1 str2 str3 - where - cinfo = concatMap pinfoComponents pinfo - - -matchBuildTarget4 :: [PackageInfo] - -> String -> FileStatus -> String -> String -> String - -> Match (BuildTarget PackageInfo) -matchBuildTarget4 pinfo str1 fstatus1 str2 str3 str4 = - match4PkgKndCmpMod pinfo str1 fstatus1 str2 str3 str4 - match4PkgKndCmpFil pinfo str1 fstatus1 str2 str3 str4 - - ------------------------------------- --- Individual BuildTarget matchers --- - -match1Pkg :: [PackageInfo] -> String -> FileStatus - -> Match (BuildTarget PackageInfo) -match1Pkg pinfo = \str1 fstatus1 -> do - guardPackage str1 fstatus1 - p <- matchPackage pinfo str1 fstatus1 - return (BuildTargetPackage p) - -match1Cmp :: [ComponentInfo] -> String -> Match (BuildTarget PackageInfo) -match1Cmp cs = \str1 -> do - guardComponentName str1 - c <- matchComponentName cs str1 - return (BuildTargetComponent (cinfoPackage c) (cinfoName c)) - -match1Mod :: [ComponentInfo] -> String -> Match (BuildTarget PackageInfo) -match1Mod cs = \str1 -> do - guardModuleName str1 - let ms = [ (m,c) | c <- cs, m <- cinfoModules c ] - (m,c) <- matchModuleNameAnd ms str1 - return (BuildTargetModule (cinfoPackage c) (cinfoName c) m) - -match1Fil :: [PackageInfo] -> String -> FileStatus - -> Match (BuildTarget PackageInfo) -match1Fil ps str1 fstatus1 = - expecting "file" str1 $ do - (pkgfile, p) <- matchPackageDirectoryPrefix ps fstatus1 - orNoThingIn "package" (display (packageName p)) $ do - (filepath, c) <- matchComponentFile (pinfoComponents p) pkgfile - return (BuildTargetFile p (cinfoName c) filepath) - ---- - -match2PkgCmp :: [PackageInfo] - -> String -> FileStatus -> String - -> Match (BuildTarget PackageInfo) -match2PkgCmp ps = \str1 fstatus1 str2 -> do - guardPackage str1 fstatus1 - guardComponentName str2 - p <- matchPackage ps str1 fstatus1 - orNoThingIn "package" (display (packageName p)) $ do - c <- matchComponentName (pinfoComponents p) str2 - return (BuildTargetComponent p (cinfoName c)) - --TODO: the error here ought to say there's no component by that name in - -- this package, and name the package - -match2KndCmp :: [ComponentInfo] -> String -> String - -> Match (BuildTarget PackageInfo) -match2KndCmp cs = \str1 str2 -> do - ckind <- matchComponentKind str1 - guardComponentName str2 - c <- matchComponentKindAndName cs ckind str2 - return (BuildTargetComponent (cinfoPackage c) (cinfoName c)) - -match2PkgMod :: [PackageInfo] -> String -> FileStatus -> String - -> Match (BuildTarget PackageInfo) -match2PkgMod ps = \str1 fstatus1 str2 -> do - guardPackage str1 fstatus1 - guardModuleName str2 - p <- matchPackage ps str1 fstatus1 - orNoThingIn "package" (display (packageName p)) $ do - let ms = [ (m,c) | c <- pinfoComponents p, m <- cinfoModules c ] - (m,c) <- matchModuleNameAnd ms str2 - return (BuildTargetModule p (cinfoName c) m) - -match2CmpMod :: [ComponentInfo] -> String -> String - -> Match (BuildTarget PackageInfo) -match2CmpMod cs = \str1 str2 -> do - guardComponentName str1 - guardModuleName str2 - c <- matchComponentName cs str1 - orNoThingIn "component" (cinfoStrName c) $ do - let ms = cinfoModules c - m <- matchModuleName ms str2 - return (BuildTargetModule (cinfoPackage c) (cinfoName c) m) - -match2PkgFil :: [PackageInfo] -> String -> FileStatus -> String - -> Match (BuildTarget PackageInfo) -match2PkgFil ps str1 fstatus1 str2 = do - guardPackage str1 fstatus1 - p <- matchPackage ps str1 fstatus1 - orNoThingIn "package" (display (packageName p)) $ do - (filepath, c) <- matchComponentFile (pinfoComponents p) str2 - return (BuildTargetFile p (cinfoName c) filepath) - -match2CmpFil :: [ComponentInfo] -> String -> String - -> Match (BuildTarget PackageInfo) -match2CmpFil cs str1 str2 = do - guardComponentName str1 - c <- matchComponentName cs str1 - orNoThingIn "component" (cinfoStrName c) $ do - (filepath, _) <- matchComponentFile [c] str2 - return (BuildTargetFile (cinfoPackage c) (cinfoName c) filepath) - ---- - -match3PkgKndCmp :: [PackageInfo] - -> String -> FileStatus -> String -> String - -> Match (BuildTarget PackageInfo) -match3PkgKndCmp ps = \str1 fstatus1 str2 str3 -> do - guardPackage str1 fstatus1 - ckind <- matchComponentKind str2 - guardComponentName str3 - p <- matchPackage ps str1 fstatus1 - orNoThingIn "package" (display (packageName p)) $ do - c <- matchComponentKindAndName (pinfoComponents p) ckind str3 - return (BuildTargetComponent p (cinfoName c)) - -match3PkgCmpMod :: [PackageInfo] - -> String -> FileStatus -> String -> String - -> Match (BuildTarget PackageInfo) -match3PkgCmpMod ps = \str1 fstatus1 str2 str3 -> do - guardPackage str1 fstatus1 - guardComponentName str2 - guardModuleName str3 - p <- matchPackage ps str1 fstatus1 - orNoThingIn "package" (display (packageName p)) $ do - c <- matchComponentName (pinfoComponents p) str2 - orNoThingIn "component" (cinfoStrName c) $ do - let ms = cinfoModules c - m <- matchModuleName ms str3 - return (BuildTargetModule p (cinfoName c) m) - -match3KndCmpMod :: [ComponentInfo] - -> String -> String -> String - -> Match (BuildTarget PackageInfo) -match3KndCmpMod cs = \str1 str2 str3 -> do - ckind <- matchComponentKind str1 - guardComponentName str2 - guardModuleName str3 - c <- matchComponentKindAndName cs ckind str2 - orNoThingIn "component" (cinfoStrName c) $ do - let ms = cinfoModules c - m <- matchModuleName ms str3 - return (BuildTargetModule (cinfoPackage c) (cinfoName c) m) - -match3PkgCmpFil :: [PackageInfo] - -> String -> FileStatus -> String -> String - -> Match (BuildTarget PackageInfo) -match3PkgCmpFil ps = \str1 fstatus1 str2 str3 -> do - guardPackage str1 fstatus1 - guardComponentName str2 - p <- matchPackage ps str1 fstatus1 - orNoThingIn "package" (display (packageName p)) $ do - c <- matchComponentName (pinfoComponents p) str2 - orNoThingIn "component" (cinfoStrName c) $ do - (filepath, _) <- matchComponentFile [c] str3 - return (BuildTargetFile p (cinfoName c) filepath) - -match3KndCmpFil :: [ComponentInfo] -> String -> String -> String - -> Match (BuildTarget PackageInfo) -match3KndCmpFil cs = \str1 str2 str3 -> do - ckind <- matchComponentKind str1 - guardComponentName str2 - c <- matchComponentKindAndName cs ckind str2 - orNoThingIn "component" (cinfoStrName c) $ do - (filepath, _) <- matchComponentFile [c] str3 - return (BuildTargetFile (cinfoPackage c) (cinfoName c) filepath) - --- - -match4PkgKndCmpMod :: [PackageInfo] - -> String-> FileStatus -> String -> String -> String - -> Match (BuildTarget PackageInfo) -match4PkgKndCmpMod ps = \str1 fstatus1 str2 str3 str4 -> do - guardPackage str1 fstatus1 - ckind <- matchComponentKind str2 - guardComponentName str3 - guardModuleName str4 - p <- matchPackage ps str1 fstatus1 - orNoThingIn "package" (display (packageName p)) $ do - c <- matchComponentKindAndName (pinfoComponents p) ckind str3 - orNoThingIn "component" (cinfoStrName c) $ do - let ms = cinfoModules c - m <- matchModuleName ms str4 - return (BuildTargetModule p (cinfoName c) m) - -match4PkgKndCmpFil :: [PackageInfo] - -> String -> FileStatus -> String -> String -> String - -> Match (BuildTarget PackageInfo) -match4PkgKndCmpFil ps = \str1 fstatus1 str2 str3 str4 -> do - guardPackage str1 fstatus1 - ckind <- matchComponentKind str2 - guardComponentName str3 - p <- matchPackage ps str1 fstatus1 - orNoThingIn "package" (display (packageName p)) $ do - c <- matchComponentKindAndName (pinfoComponents p) ckind str3 - orNoThingIn "component" (cinfoStrName c) $ do - (filepath,_) <- matchComponentFile [c] str4 - return (BuildTargetFile p (cinfoName c) filepath) - - -------------------------------- --- Package and component info --- - -data PackageInfo = PackageInfo { - pinfoId :: PackageId, - pinfoLocation :: PackageLocation (), - pinfoDirectory :: Maybe (FilePath, FilePath), - pinfoPackageFile :: Maybe (FilePath, FilePath), - pinfoComponents :: [ComponentInfo] - } - -data ComponentInfo = ComponentInfo { - cinfoName :: ComponentName, - cinfoStrName :: ComponentStringName, - cinfoPackage :: PackageInfo, - cinfoSrcDirs :: [FilePath], - cinfoModules :: [ModuleName], - cinfoHsFiles :: [FilePath], -- other hs files (like main.hs) - cinfoCFiles :: [FilePath], - cinfoJsFiles :: [FilePath] - } - -type ComponentStringName = String - -instance Package PackageInfo where - packageId = pinfoId - ---TODO: [required eventually] need the original GenericPackageDescription or --- the flattening thereof because we need to be able to target modules etc --- that are not enabled in the current configuration. -selectPackageInfo :: PackageDescription -> PackageLocation a -> IO PackageInfo -selectPackageInfo pkg loc = do - (pkgdir, pkgfile) <- - case loc of - --TODO: local tarballs, remote tarballs etc - LocalUnpackedPackage dir -> do - dirabs <- canonicalizePath dir - dirrel <- makeRelativeToCwd dirabs - --TODO: ought to get this earlier in project reading - let fileabs = dirabs display (packageName pkg) <.> "cabal" - filerel = dirrel display (packageName pkg) <.> "cabal" - exists <- doesFileExist fileabs - return ( Just (dirabs, dirrel) - , if exists then Just (fileabs, filerel) else Nothing - ) - _ -> return (Nothing, Nothing) - let pinfo = - PackageInfo { - pinfoId = packageId pkg, - pinfoLocation = fmap (const ()) loc, - pinfoDirectory = pkgdir, - pinfoPackageFile = pkgfile, - pinfoComponents = selectComponentInfo pinfo pkg - } - return pinfo - - -selectComponentInfo :: PackageInfo -> PackageDescription -> [ComponentInfo] -selectComponentInfo pinfo pkg = - [ ComponentInfo { - cinfoName = componentName c, - cinfoStrName = componentStringName (packageName pkg) (componentName c), - cinfoPackage = pinfo, - cinfoSrcDirs = hsSourceDirs bi, --- [ pkgroot srcdir --- | (pkgroot,_) <- maybeToList (pinfoDirectory pinfo) --- , srcdir <- hsSourceDirs bi ], - cinfoModules = componentModules c, - cinfoHsFiles = componentHsFiles c, - cinfoCFiles = cSources bi, - cinfoJsFiles = jsSources bi - } - | c <- pkgComponents pkg - , let bi = componentBuildInfo c ] - - -componentStringName :: PackageName -> ComponentName -> ComponentStringName -componentStringName pkgname CLibName = display pkgname -componentStringName _ (CExeName name) = name -componentStringName _ (CTestName name) = name -componentStringName _ (CBenchName name) = name - -componentModules :: Component -> [ModuleName] -componentModules (CLib lib) = libModules lib -componentModules (CExe exe) = exeModules exe -componentModules (CTest test) = testModules test -componentModules (CBench bench) = benchmarkModules bench - -componentHsFiles :: Component -> [FilePath] -componentHsFiles (CExe exe) = [modulePath exe] -componentHsFiles (CTest TestSuite { - testInterface = TestSuiteExeV10 _ mainfile - }) = [mainfile] -componentHsFiles (CBench Benchmark { - benchmarkInterface = BenchmarkExeV10 _ mainfile - }) = [mainfile] -componentHsFiles _ = [] - - ------------------------------- --- Matching component kinds --- - -data ComponentKind = LibKind | ExeKind | TestKind | BenchKind - deriving (Eq, Ord, Show) - -componentKind :: ComponentName -> ComponentKind -componentKind CLibName = LibKind -componentKind (CExeName _) = ExeKind -componentKind (CTestName _) = TestKind -componentKind (CBenchName _) = BenchKind - -cinfoKind :: ComponentInfo -> ComponentKind -cinfoKind = componentKind . cinfoName - -matchComponentKind :: String -> Match ComponentKind -matchComponentKind s - | s `elem` ["lib", "library"] = increaseConfidence >> return LibKind - | s `elem` ["exe", "executable"] = increaseConfidence >> return ExeKind - | s `elem` ["tst", "test", "test-suite"] = increaseConfidence - >> return TestKind - | s `elem` ["bench", "benchmark"] = increaseConfidence - >> return BenchKind - | otherwise = matchErrorExpected - "component kind" s - -showComponentKind :: ComponentKind -> String -showComponentKind LibKind = "library" -showComponentKind ExeKind = "executable" -showComponentKind TestKind = "test-suite" -showComponentKind BenchKind = "benchmark" - -showComponentKindShort :: ComponentKind -> String -showComponentKindShort LibKind = "lib" -showComponentKindShort ExeKind = "exe" -showComponentKindShort TestKind = "test" -showComponentKindShort BenchKind = "bench" - ------------------------------- --- Matching package targets --- - -guardPackage :: String -> FileStatus -> Match () -guardPackage str fstatus = - guardPackageName str - <|> guardPackageDir str fstatus - <|> guardPackageFile str fstatus - - -guardPackageName :: String -> Match () -guardPackageName s - | validPackgageName s = increaseConfidence - | otherwise = matchErrorExpected "package name" s - where - -validPackgageName :: String -> Bool -validPackgageName s = - all validPackgageNameChar s - && not (null s) - where - validPackgageNameChar c = isAlphaNum c || c == '-' - - -guardPackageDir :: String -> FileStatus -> Match () -guardPackageDir _ (FileStatusExistsDir _) = increaseConfidence -guardPackageDir str _ = matchErrorExpected "package directory" str - - -guardPackageFile :: String -> FileStatus -> Match () -guardPackageFile _ (FileStatusExistsFile file) - | takeExtension file == ".cabal" - = increaseConfidence -guardPackageFile str _ = matchErrorExpected "package .cabal file" str - - -matchPackage :: [PackageInfo] -> String -> FileStatus -> Match PackageInfo -matchPackage pinfo = \str fstatus -> - orNoThingIn "project" "" $ - matchPackageName pinfo str - (matchPackageDir pinfo str fstatus - <|> matchPackageFile pinfo str fstatus) - - -matchPackageName :: [PackageInfo] -> String -> Match PackageInfo -matchPackageName ps = \str -> do - guard (validPackgageName str) - orNoSuchThing "package" str - (map (display . packageName) ps) $ - increaseConfidenceFor $ - matchInexactly caseFold (display . packageName) ps str - - -matchPackageDir :: [PackageInfo] - -> String -> FileStatus -> Match PackageInfo -matchPackageDir ps = \str fstatus -> - case fstatus of - FileStatusExistsDir canondir -> - orNoSuchThing "package directory" str (map (snd . fst) dirs) $ - increaseConfidenceFor $ - fmap snd $ matchExactly (fst . fst) dirs canondir - _ -> mzero - where - dirs = [ ((dabs,drel),p) - | p@PackageInfo{ pinfoDirectory = Just (dabs,drel) } <- ps ] - - -matchPackageFile :: [PackageInfo] -> String -> FileStatus -> Match PackageInfo -matchPackageFile ps = \str fstatus -> do - case fstatus of - FileStatusExistsFile canonfile -> - orNoSuchThing "package .cabal file" str (map (snd . fst) files) $ - increaseConfidenceFor $ - fmap snd $ matchExactly (fst . fst) files canonfile - _ -> mzero - where - files = [ ((fabs,frel),p) - | p@PackageInfo{ pinfoPackageFile = Just (fabs,frel) } <- ps ] - ---TODO: test outcome when dir exists but doesn't match any known one - ---TODO: perhaps need another distinction, vs no such thing, point is the --- thing is not known, within the project, but could be outside project - - ------------------------------- --- Matching component targets --- - - -guardComponentName :: String -> Match () -guardComponentName s - | all validComponentChar s - && not (null s) = increaseConfidence - | otherwise = matchErrorExpected "component name" s - where - validComponentChar c = isAlphaNum c || c == '.' - || c == '_' || c == '-' || c == '\'' - - -matchComponentName :: [ComponentInfo] -> String -> Match ComponentInfo -matchComponentName cs str = - orNoSuchThing "component" str (map cinfoStrName cs) - $ increaseConfidenceFor - $ matchInexactly caseFold cinfoStrName cs str - - -matchComponentKindAndName :: [ComponentInfo] -> ComponentKind -> String - -> Match ComponentInfo -matchComponentKindAndName cs ckind str = - orNoSuchThing (showComponentKind ckind ++ " component") str - (map render cs) - $ increaseConfidenceFor - $ matchInexactly (\(ck, cn) -> (ck, caseFold cn)) - (\c -> (cinfoKind c, cinfoStrName c)) - cs - (ckind, str) - where - render c = showComponentKindShort (cinfoKind c) ++ ":" ++ cinfoStrName c - - ------------------------------- --- Matching module targets --- - -guardModuleName :: String -> Match () -guardModuleName s = - case simpleParse s :: Maybe ModuleName of - Just _ -> increaseConfidence - _ | all validModuleChar s - && not (null s) -> return () - | otherwise -> matchErrorExpected "module name" s - where - validModuleChar c = isAlphaNum c || c == '.' || c == '_' || c == '\'' - - -matchModuleName :: [ModuleName] -> String -> Match ModuleName -matchModuleName ms str = - orNoSuchThing "module" str (map display ms) - $ increaseConfidenceFor - $ matchInexactly caseFold display ms str - - -matchModuleNameAnd :: [(ModuleName, a)] -> String -> Match (ModuleName, a) -matchModuleNameAnd ms str = - orNoSuchThing "module" str (map (display . fst) ms) - $ increaseConfidenceFor - $ matchInexactly caseFold (display . fst) ms str - - ------------------------------- --- Matching file targets --- - -matchPackageDirectoryPrefix :: [PackageInfo] -> FileStatus - -> Match (FilePath, PackageInfo) -matchPackageDirectoryPrefix ps (FileStatusExistsFile filepath) = - increaseConfidenceFor $ - matchDirectoryPrefix pkgdirs filepath - where - pkgdirs = [ (dir, p) - | p@PackageInfo { pinfoDirectory = Just (dir,_) } <- ps ] -matchPackageDirectoryPrefix _ _ = mzero - - -matchComponentFile :: [ComponentInfo] -> String - -> Match (FilePath, ComponentInfo) -matchComponentFile cs str = - orNoSuchThing "file" str [] $ - matchComponentModuleFile cs str - <|> matchComponentOtherFile cs str - - -matchComponentOtherFile :: [ComponentInfo] -> String - -> Match (FilePath, ComponentInfo) -matchComponentOtherFile cs = - matchFile - [ (file, c) - | c <- cs - , file <- cinfoHsFiles c - ++ cinfoCFiles c - ++ cinfoJsFiles c - ] - - -matchComponentModuleFile :: [ComponentInfo] -> String - -> Match (FilePath, ComponentInfo) -matchComponentModuleFile cs str = do - matchFile - [ (normalise (d toFilePath m), c) - | c <- cs - , d <- cinfoSrcDirs c - , m <- cinfoModules c - ] - (dropExtension (normalise str)) - --- utils - -matchFile :: [(FilePath, a)] -> FilePath -> Match (FilePath, a) -matchFile fs = - increaseConfidenceFor - . matchInexactly caseFold fst fs - -matchDirectoryPrefix :: [(FilePath, a)] -> FilePath -> Match (FilePath, a) -matchDirectoryPrefix dirs filepath = - tryEach $ - [ (file, x) - | (dir,x) <- dirs - , file <- maybeToList (stripDirectory dir) ] - where - stripDirectory :: FilePath -> Maybe FilePath - stripDirectory dir = - joinPath `fmap` stripPrefix (splitDirectories dir) filepathsplit - - filepathsplit = splitDirectories filepath - - ------------------------------- --- Matching monad --- - --- | A matcher embodies a way to match some input as being some recognised --- value. In particular it deals with multiple and ambiguous matches. --- --- There are various matcher primitives ('matchExactly', 'matchInexactly'), --- ways to combine matchers ('matchPlus', 'matchPlusShadowing') and finally we --- can run a matcher against an input using 'findMatch'. --- -data Match a = NoMatch Confidence [MatchError] - | ExactMatch Confidence [a] - | InexactMatch Confidence [a] - deriving Show - -type Confidence = Int - -data MatchError = MatchErrorExpected String String -- thing got - | MatchErrorNoSuch String String [String] -- thing got alts - | MatchErrorIn String String MatchError -- kind thing - deriving (Show, Eq) - - -instance Functor Match where - fmap _ (NoMatch d ms) = NoMatch d ms - fmap f (ExactMatch d xs) = ExactMatch d (fmap f xs) - fmap f (InexactMatch d xs) = InexactMatch d (fmap f xs) - -instance Applicative Match where - pure a = ExactMatch 0 [a] - (<*>) = ap - -instance Alternative Match where - empty = NoMatch 0 [] - (<|>) = matchPlus - -instance Monad Match where - return = pure - NoMatch d ms >>= _ = NoMatch d ms - ExactMatch d xs >>= f = addDepth d - $ msum (map f xs) - InexactMatch d xs >>= f = addDepth d . forceInexact - $ msum (map f xs) - -instance MonadPlus Match where - mzero = empty - mplus = matchPlus - -() :: Match a -> Match a -> Match a -() = matchPlusShadowing - -infixl 3 - -addDepth :: Confidence -> Match a -> Match a -addDepth d' (NoMatch d msgs) = NoMatch (d'+d) msgs -addDepth d' (ExactMatch d xs) = ExactMatch (d'+d) xs -addDepth d' (InexactMatch d xs) = InexactMatch (d'+d) xs - -forceInexact :: Match a -> Match a -forceInexact (ExactMatch d ys) = InexactMatch d ys -forceInexact m = m - --- | Combine two matchers. Exact matches are used over inexact matches --- but if we have multiple exact, or inexact then the we collect all the --- ambiguous matches. --- -matchPlus :: Match a -> Match a -> Match a -matchPlus (ExactMatch d1 xs) (ExactMatch d2 xs') = - ExactMatch (max d1 d2) (xs ++ xs') -matchPlus a@(ExactMatch _ _ ) (InexactMatch _ _ ) = a -matchPlus a@(ExactMatch _ _ ) (NoMatch _ _ ) = a -matchPlus (InexactMatch _ _ ) b@(ExactMatch _ _ ) = b -matchPlus (InexactMatch d1 xs) (InexactMatch d2 xs') = - InexactMatch (max d1 d2) (xs ++ xs') -matchPlus a@(InexactMatch _ _ ) (NoMatch _ _ ) = a -matchPlus (NoMatch _ _ ) b@(ExactMatch _ _ ) = b -matchPlus (NoMatch _ _ ) b@(InexactMatch _ _ ) = b -matchPlus a@(NoMatch d1 ms) b@(NoMatch d2 ms') - | d1 > d2 = a - | d1 < d2 = b - | otherwise = NoMatch d1 (ms ++ ms') - --- | Combine two matchers. This is similar to 'matchPlus' with the --- difference that an exact match from the left matcher shadows any exact --- match on the right. Inexact matches are still collected however. --- -matchPlusShadowing :: Match a -> Match a -> Match a -matchPlusShadowing a@(ExactMatch _ _) (ExactMatch _ _) = a -matchPlusShadowing a b = matchPlus a b - - ------------------------------- --- Various match primitives --- - -matchErrorExpected :: String -> String -> Match a -matchErrorExpected thing got = NoMatch 0 [MatchErrorExpected thing got] - -matchErrorNoSuch :: String -> String -> [String] -> Match a -matchErrorNoSuch thing got alts = NoMatch 0 [MatchErrorNoSuch thing got alts] - -expecting :: String -> String -> Match a -> Match a -expecting thing got (NoMatch 0 _) = matchErrorExpected thing got -expecting _ _ m = m - -orNoSuchThing :: String -> String -> [String] -> Match a -> Match a -orNoSuchThing thing got alts (NoMatch 0 _) = matchErrorNoSuch thing got alts -orNoSuchThing _ _ _ m = m - -orNoThingIn :: String -> String -> Match a -> Match a -orNoThingIn kind name (NoMatch n ms) = - NoMatch n [ MatchErrorIn kind name m | m <- ms ] -orNoThingIn _ _ m = m - -increaseConfidence :: Match () -increaseConfidence = ExactMatch 1 [()] - -increaseConfidenceFor :: Match a -> Match a -increaseConfidenceFor m = m >>= \r -> increaseConfidence >> return r - -nubMatchesBy :: (a -> a -> Bool) -> Match a -> Match a -nubMatchesBy _ (NoMatch d msgs) = NoMatch d msgs -nubMatchesBy eq (ExactMatch d xs) = ExactMatch d (nubBy eq xs) -nubMatchesBy eq (InexactMatch d xs) = InexactMatch d (nubBy eq xs) - -nubMatchErrors :: Match a -> Match a -nubMatchErrors (NoMatch d msgs) = NoMatch d (nub msgs) -nubMatchErrors (ExactMatch d xs) = ExactMatch d xs -nubMatchErrors (InexactMatch d xs) = InexactMatch d xs - --- | Lift a list of matches to an exact match. --- -exactMatches, inexactMatches :: [a] -> Match a - -exactMatches [] = mzero -exactMatches xs = ExactMatch 0 xs - -inexactMatches [] = mzero -inexactMatches xs = InexactMatch 0 xs - -tryEach :: [a] -> Match a -tryEach = exactMatches - - ------------------------------- --- Top level match runner --- - --- | Given a matcher and a key to look up, use the matcher to find all the --- possible matches. There may be 'None', a single 'Unambiguous' match or --- you may have an 'Ambiguous' match with several possibilities. --- -findMatch :: Match a -> MaybeAmbiguous a -findMatch match = - case nubMatchErrors match of - NoMatch _ msgs -> None msgs - ExactMatch _ [x] -> Unambiguous x - InexactMatch _ [x] -> Unambiguous x - ExactMatch _ [] -> error "findMatch: impossible: ExactMatch []" - InexactMatch _ [] -> error "findMatch: impossible: InexactMatch []" - ExactMatch _ xs -> Ambiguous True xs - InexactMatch _ xs -> Ambiguous False xs - -data MaybeAmbiguous a = None [MatchError] | Unambiguous a | Ambiguous Bool [a] - deriving Show - - ------------------------------- --- Basic matchers --- - --- | A primitive matcher that looks up a value in a finite 'Map'. The --- value must match exactly. --- -matchExactly :: Ord k => (a -> k) -> [a] -> (k -> Match a) -matchExactly key xs = - \k -> case Map.lookup k m of - Nothing -> mzero - Just ys -> exactMatches ys - where - m = Map.fromListWith (++) [ (key x, [x]) | x <- xs ] - --- | A primitive matcher that looks up a value in a finite 'Map'. It checks --- for an exact or inexact match. We get an inexact match if the match --- is not exact, but the canonical forms match. It takes a canonicalisation --- function for this purpose. --- --- So for example if we used string case fold as the canonicalisation --- function, then we would get case insensitive matching (but it will still --- report an exact match when the case matches too). --- -matchInexactly :: (Ord k, Ord k') => (k -> k') -> (a -> k) - -> [a] -> (k -> Match a) -matchInexactly cannonicalise key xs = - \k -> case Map.lookup k m of - Just ys -> exactMatches ys - Nothing -> case Map.lookup (cannonicalise k) m' of - Just ys -> inexactMatches ys - Nothing -> mzero - where - m = Map.fromListWith (++) [ (key x, [x]) | x <- xs ] - - -- the map of canonicalised keys to groups of inexact matches - m' = Map.mapKeysWith (++) cannonicalise m - - ------------------------------- --- Utils --- - -caseFold :: String -> String -caseFold = lowercase - - ------------------------------- --- Example inputs --- - -{- -ex1pinfo :: [PackageInfo] -ex1pinfo = - [ PackageInfo { - pinfoName = PackageName "foo", - pinfoDirectory = Just "/the/foo", - pinfoPackageFile = Just "/the/foo/foo.cabal", - pinfoComponents = [] - } - , PackageInfo { - pinfoName = PackageName "bar", - pinfoDirectory = Just "/the/bar", - pinfoPackageFile = Just "/the/bar/bar.cabal", - pinfoComponents = [] - } - ] --} -{- -stargets = - [ BuildTargetComponent (CExeName "foo") - , BuildTargetModule (CExeName "foo") (mkMn "Foo") - , BuildTargetModule (CExeName "tst") (mkMn "Foo") - ] - where - mkMn :: String -> ModuleName - mkMn = fromJust . simpleParse - -ex_pkgid :: PackageIdentifier -Just ex_pkgid = simpleParse "thelib" --} - -{- -ex_cs :: [ComponentInfo] -ex_cs = - [ (mkC (CExeName "foo") ["src1", "src1/src2"] ["Foo", "Src2.Bar", "Bar"]) - , (mkC (CExeName "tst") ["src1", "test"] ["Foo"]) - ] - where - mkC n ds ms = ComponentInfo n (componentStringName pkgid n) ds (map mkMn ms) - mkMn :: String -> ModuleName - mkMn = fromJust . simpleParse - pkgid :: PackageIdentifier - Just pkgid = simpleParse "thelib" --} - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Check.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Check.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Check.hs 2016-11-07 10:02:40.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Check.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,89 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Check --- Copyright : (c) Lennart Kolmodin 2008 --- License : BSD-like --- --- Maintainer : kolmodin@haskell.org --- Stability : provisional --- Portability : portable --- --- Check a package for common mistakes --- ------------------------------------------------------------------------------ -module Distribution.Client.Check ( - check - ) where - -import Control.Monad ( when, unless ) - -import Distribution.PackageDescription.Parse - ( readPackageDescription ) -import Distribution.PackageDescription.Check -import Distribution.PackageDescription.Configuration - ( flattenPackageDescription ) -import Distribution.Verbosity - ( Verbosity ) -import Distribution.Simple.Utils - ( defaultPackageDesc, toUTF8, wrapText ) - -check :: Verbosity -> IO Bool -check verbosity = do - pdfile <- defaultPackageDesc verbosity - ppd <- readPackageDescription verbosity pdfile - -- flatten the generic package description into a regular package - -- description - -- TODO: this may give more warnings than it should give; - -- consider two branches of a condition, one saying - -- ghc-options: -Wall - -- and the other - -- ghc-options: -Werror - -- joined into - -- ghc-options: -Wall -Werror - -- checkPackages will yield a warning on the last line, but it - -- would not on each individual branch. - -- Hovever, this is the same way hackage does it, so we will yield - -- the exact same errors as it will. - let pkg_desc = flattenPackageDescription ppd - ioChecks <- checkPackageFiles pkg_desc "." - let packageChecks = ioChecks ++ checkPackage ppd (Just pkg_desc) - buildImpossible = [ x | x@PackageBuildImpossible {} <- packageChecks ] - buildWarning = [ x | x@PackageBuildWarning {} <- packageChecks ] - distSuspicious = [ x | x@PackageDistSuspicious {} <- packageChecks ] - ++ [ x | x@PackageDistSuspiciousWarn {} <- packageChecks ] - distInexusable = [ x | x@PackageDistInexcusable {} <- packageChecks ] - - unless (null buildImpossible) $ do - putStrLn "The package will not build sanely due to these errors:" - printCheckMessages buildImpossible - - unless (null buildWarning) $ do - putStrLn "The following warnings are likely to affect your build negatively:" - printCheckMessages buildWarning - - unless (null distSuspicious) $ do - putStrLn "These warnings may cause trouble when distributing the package:" - printCheckMessages distSuspicious - - unless (null distInexusable) $ do - putStrLn "The following errors will cause portability problems on other environments:" - printCheckMessages distInexusable - - let isDistError (PackageDistSuspicious {}) = False - isDistError (PackageDistSuspiciousWarn {}) = False - isDistError _ = True - isCheckError (PackageDistSuspiciousWarn {}) = False - isCheckError _ = True - errors = filter isDistError packageChecks - - unless (null errors) $ - putStrLn "Hackage would reject this package." - - when (null packageChecks) $ - putStrLn "No errors or warnings could be found in the package." - - return (null . filter isCheckError $ packageChecks) - - where - printCheckMessages = mapM_ (putStrLn . format . explanation) - format = toUTF8 . wrapText . ("* "++) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/CmdBuild.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/CmdBuild.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/CmdBuild.hs 2016-11-07 10:02:40.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/CmdBuild.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,70 +0,0 @@ -{-# LANGUAGE NamedFieldPuns #-} - --- | cabal-install CLI command: build --- -module Distribution.Client.CmdBuild ( - buildAction, - ) where - -import Distribution.Client.ProjectOrchestration - ( PreBuildHooks(..), runProjectPreBuildPhase, selectTargets - , ProjectBuildContext(..), runProjectBuildPhase - , printPlan, reportBuildFailures ) -import Distribution.Client.ProjectConfig - ( BuildTimeSettings(..) ) -import Distribution.Client.ProjectPlanning - ( PackageTarget(..) ) -import Distribution.Client.BuildTarget - ( readUserBuildTargets ) - -import Distribution.Client.Setup - ( GlobalFlags, ConfigFlags(..), ConfigExFlags, InstallFlags ) -import Distribution.Simple.Setup - ( HaddockFlags, fromFlagOrDefault ) -import Distribution.Verbosity - ( normal ) - -import Control.Monad (unless) - - --- | The @build@ command does a lot. It brings the install plan up to date, --- selects that part of the plan needed by the given or implicit targets and --- then executes the plan. --- --- For more details on how this works, see the module --- "Distribution.Client.ProjectOrchestration" --- -buildAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) - -> [String] -> GlobalFlags -> IO () -buildAction (configFlags, configExFlags, installFlags, haddockFlags) - targetStrings globalFlags = do - - userTargets <- readUserBuildTargets targetStrings - - buildCtx@ProjectBuildContext{buildSettings} <- - runProjectPreBuildPhase - verbosity - ( globalFlags, configFlags, configExFlags - , installFlags, haddockFlags ) - PreBuildHooks { - hookPrePlanning = \_ _ _ -> return (), - hookSelectPlanSubset = selectBuildTargets userTargets - } - - printPlan verbosity buildCtx - - unless (buildSettingDryRun buildSettings) $ do - plan <- runProjectBuildPhase - verbosity - buildCtx - reportBuildFailures plan - where - verbosity = fromFlagOrDefault normal (configVerbosity configFlags) - - -- When we interpret the targets on the command line, interpret them as - -- repl targets (as opposed to say repl or haddock targets). - selectBuildTargets = - selectTargets - BuildDefaultComponents - BuildSpecificComponent - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/CmdConfigure.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/CmdConfigure.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/CmdConfigure.hs 2016-11-07 10:02:40.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/CmdConfigure.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,60 +0,0 @@ --- | cabal-install CLI command: configure --- -module Distribution.Client.CmdConfigure ( - configureAction, - ) where - -import Distribution.Client.ProjectOrchestration -import Distribution.Client.ProjectConfig - -import Distribution.Client.Setup - ( GlobalFlags, ConfigFlags(..), ConfigExFlags, InstallFlags ) -import Distribution.Simple.Setup - ( HaddockFlags, fromFlagOrDefault ) -import Distribution.Verbosity - ( normal ) - - --- | To a first approximation, the @configure@ just runs the first phase of --- the @build@ command where we bring the install plan up to date (thus --- checking that it's possible). --- --- The only difference is that @configure@ also allows the user to specify --- some extra config flags which we save in the file @cabal.project.local@. --- --- For more details on how this works, see the module --- "Distribution.Client.ProjectOrchestration" --- -configureAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) - -> [String] -> GlobalFlags -> IO () -configureAction (configFlags, configExFlags, installFlags, haddockFlags) - _extraArgs globalFlags = do - --TODO: deal with _extraArgs, since flags with wrong syntax end up there - - buildCtx <- - runProjectPreBuildPhase - verbosity - ( globalFlags, configFlags, configExFlags - , installFlags, haddockFlags ) - PreBuildHooks { - hookPrePlanning = \projectRootDir _ cliConfig -> - -- Write out the @cabal.project.local@ so it gets picked up by the - -- planning phase. - writeProjectLocalExtraConfig projectRootDir cliConfig, - - hookSelectPlanSubset = return - } - - --TODO: Hmm, but we don't have any targets. Currently this prints what we - -- would build if we were to build everything. Could pick implicit target like "." - --TODO: should we say what's in the project (+deps) as a whole? - printPlan - verbosity - buildCtx { - buildSettings = (buildSettings buildCtx) { - buildSettingDryRun = True - } - } - where - verbosity = fromFlagOrDefault normal (configVerbosity configFlags) - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/CmdRepl.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/CmdRepl.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/CmdRepl.hs 2016-11-07 10:02:40.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/CmdRepl.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,74 +0,0 @@ -{-# LANGUAGE NamedFieldPuns #-} - --- | cabal-install CLI command: repl --- -module Distribution.Client.CmdRepl ( - replAction, - ) where - -import Distribution.Client.ProjectOrchestration - ( PreBuildHooks(..), runProjectPreBuildPhase, selectTargets - , ProjectBuildContext(..), runProjectBuildPhase - , printPlan, reportBuildFailures ) -import Distribution.Client.ProjectConfig - ( BuildTimeSettings(..) ) -import Distribution.Client.ProjectPlanning - ( PackageTarget(..) ) -import Distribution.Client.BuildTarget - ( readUserBuildTargets ) - -import Distribution.Client.Setup - ( GlobalFlags, ConfigFlags(..), ConfigExFlags, InstallFlags ) -import Distribution.Simple.Setup - ( HaddockFlags, fromFlagOrDefault ) -import Distribution.Verbosity - ( normal ) - -import Control.Monad (unless) - - --- | The @repl@ command is very much like @build@. It brings the install plan --- up to date, selects that part of the plan needed by the given or implicit --- repl target and then executes the plan. --- --- Compared to @build@ the difference is that only one target is allowed --- (given or implicit) and the target type is repl rather than build. The --- general plan execution infrastructure handles both build and repl targets. --- --- For more details on how this works, see the module --- "Distribution.Client.ProjectOrchestration" --- -replAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) - -> [String] -> GlobalFlags -> IO () -replAction (configFlags, configExFlags, installFlags, haddockFlags) - targetStrings globalFlags = do - - userTargets <- readUserBuildTargets targetStrings - - buildCtx@ProjectBuildContext{buildSettings} <- - runProjectPreBuildPhase - verbosity - ( globalFlags, configFlags, configExFlags - , installFlags, haddockFlags ) - PreBuildHooks { - hookPrePlanning = \_ _ _ -> return (), - hookSelectPlanSubset = selectReplTargets userTargets - } - - printPlan verbosity buildCtx - - unless (buildSettingDryRun buildSettings) $ do - plan <- runProjectBuildPhase - verbosity - buildCtx - reportBuildFailures plan - where - verbosity = fromFlagOrDefault normal (configVerbosity configFlags) - - -- When we interpret the targets on the command line, interpret them as - -- repl targets (as opposed to say build or haddock targets). - selectReplTargets = - selectTargets - ReplDefaultComponent - ReplSpecificComponent - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Compat/ExecutablePath.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Compat/ExecutablePath.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Compat/ExecutablePath.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Compat/ExecutablePath.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,183 +0,0 @@ -{-# LANGUAGE ForeignFunctionInterface #-} -{-# LANGUAGE CPP #-} - --- Copied verbatim from base-4.6.0.0. We can't simply import --- System.Environment.getExecutablePath because we need compatibility with older --- GHCs. - -module Distribution.Client.Compat.ExecutablePath ( getExecutablePath ) where - --- The imports are purposely kept completely disjoint to prevent edits --- to one OS implementation from breaking another. - -#if defined(darwin_HOST_OS) -import Data.Word -import Foreign.C -import Foreign.Marshal.Alloc -import Foreign.Ptr -import Foreign.Storable -import System.Posix.Internals -#elif defined(linux_HOST_OS) -import Foreign.C -import Foreign.Marshal.Array -import System.Posix.Internals -#elif defined(mingw32_HOST_OS) -import Data.Word -import Foreign.C -import Foreign.Marshal.Array -import Foreign.Ptr -import System.Posix.Internals -#else -import Foreign.C -import Foreign.Marshal.Alloc -import Foreign.Ptr -import Foreign.Storable -import System.Posix.Internals -#endif - --- GHC 7.0.* compatibility. 'System.Posix.Internals' in base-4.3.* doesn't --- provide 'peekFilePath' and 'peekFilePathLen'. -#if !MIN_VERSION_base(4,4,0) -#ifdef mingw32_HOST_OS - -peekFilePath :: CWString -> IO FilePath -peekFilePath = peekCWString - -#else - -peekFilePath :: CString -> IO FilePath -peekFilePath = peekCString - -peekFilePathLen :: CStringLen -> IO FilePath -peekFilePathLen = peekCStringLen - -#endif -#endif - --- The exported function is defined outside any if-guard to make sure --- every OS implements it with the same type. - --- | Returns the absolute pathname of the current executable. --- --- Note that for scripts and interactive sessions, this is the path to --- the interpreter (e.g. ghci.) --- --- /Since: 4.6.0.0/ -getExecutablePath :: IO FilePath - --------------------------------------------------------------------------------- --- Mac OS X - -#if defined(darwin_HOST_OS) - -type UInt32 = Word32 - -foreign import ccall unsafe "mach-o/dyld.h _NSGetExecutablePath" - c__NSGetExecutablePath :: CString -> Ptr UInt32 -> IO CInt - --- | Returns the path of the main executable. The path may be a --- symbolic link and not the real file. --- --- See dyld(3) -_NSGetExecutablePath :: IO FilePath -_NSGetExecutablePath = - allocaBytes 1024 $ \ buf -> -- PATH_MAX is 1024 on OS X - alloca $ \ bufsize -> do - poke bufsize 1024 - status <- c__NSGetExecutablePath buf bufsize - if status == 0 - then peekFilePath buf - else do reqBufsize <- fromIntegral `fmap` peek bufsize - allocaBytes reqBufsize $ \ newBuf -> do - status2 <- c__NSGetExecutablePath newBuf bufsize - if status2 == 0 - then peekFilePath newBuf - else error "_NSGetExecutablePath: buffer too small" - -foreign import ccall unsafe "stdlib.h realpath" - c_realpath :: CString -> CString -> IO CString - --- | Resolves all symbolic links, extra \/ characters, and references --- to \/.\/ and \/..\/. Returns an absolute pathname. --- --- See realpath(3) -realpath :: FilePath -> IO FilePath -realpath path = - withFilePath path $ \ fileName -> - allocaBytes 1024 $ \ resolvedName -> do - _ <- throwErrnoIfNull "realpath" $ c_realpath fileName resolvedName - peekFilePath resolvedName - -getExecutablePath = _NSGetExecutablePath >>= realpath - --------------------------------------------------------------------------------- --- Linux - -#elif defined(linux_HOST_OS) - -foreign import ccall unsafe "readlink" - c_readlink :: CString -> CString -> CSize -> IO CInt - --- | Reads the @FilePath@ pointed to by the symbolic link and returns --- it. --- --- See readlink(2) -readSymbolicLink :: FilePath -> IO FilePath -readSymbolicLink file = - allocaArray0 4096 $ \buf -> do - withFilePath file $ \s -> do - len <- throwErrnoPathIfMinus1 "readSymbolicLink" file $ - c_readlink s buf 4096 - peekFilePathLen (buf,fromIntegral len) - -getExecutablePath = readSymbolicLink $ "/proc/self/exe" - --------------------------------------------------------------------------------- --- Windows - -#elif defined(mingw32_HOST_OS) - -# if defined(i386_HOST_ARCH) -# define WINDOWS_CCONV stdcall -# elif defined(x86_64_HOST_ARCH) -# define WINDOWS_CCONV ccall -# else -# error Unknown mingw32 arch -# endif - -foreign import WINDOWS_CCONV unsafe "windows.h GetModuleFileNameW" - c_GetModuleFileName :: Ptr () -> CWString -> Word32 -> IO Word32 - -getExecutablePath = go 2048 -- plenty, PATH_MAX is 512 under Win32 - where - go size = allocaArray (fromIntegral size) $ \ buf -> do - ret <- c_GetModuleFileName nullPtr buf size - case ret of - 0 -> error "getExecutablePath: GetModuleFileNameW returned an error" - _ | ret < size -> peekFilePath buf - | otherwise -> go (size * 2) - --------------------------------------------------------------------------------- --- Fallback to argv[0] - -#else - -foreign import ccall unsafe "getFullProgArgv" - c_getFullProgArgv :: Ptr CInt -> Ptr (Ptr CString) -> IO () - -getExecutablePath = - alloca $ \ p_argc -> - alloca $ \ p_argv -> do - c_getFullProgArgv p_argc p_argv - argc <- peek p_argc - if argc > 0 - -- If argc > 0 then argv[0] is guaranteed by the standard - -- to be a pointer to a null-terminated string. - then peek p_argv >>= peek >>= peekFilePath - else error $ "getExecutablePath: " ++ msg - where msg = "no OS specific implementation and program name couldn't be " ++ - "found in argv" - --------------------------------------------------------------------------------- - -#endif diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Compat/FilePerms.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Compat/FilePerms.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Compat/FilePerms.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Compat/FilePerms.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,36 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# OPTIONS_HADDOCK hide #-} -module Distribution.Client.Compat.FilePerms ( - setFileOrdinary, - setFileExecutable, - setFileHidden, - ) where - -#ifndef mingw32_HOST_OS -import System.Posix.Types - ( FileMode ) -import System.Posix.Internals - ( c_chmod ) -import Foreign.C - ( withCString ) -import Foreign.C - ( throwErrnoPathIfMinus1_ ) -#else -import System.Win32.File (setFileAttributes, fILE_ATTRIBUTE_HIDDEN) -#endif /* mingw32_HOST_OS */ - -setFileHidden, setFileOrdinary, setFileExecutable :: FilePath -> IO () -#ifndef mingw32_HOST_OS -setFileOrdinary path = setFileMode path 0o644 -- file perms -rw-r--r-- -setFileExecutable path = setFileMode path 0o755 -- file perms -rwxr-xr-x -setFileHidden _ = return () - -setFileMode :: FilePath -> FileMode -> IO () -setFileMode name m = - withCString name $ \s -> - throwErrnoPathIfMinus1_ "setFileMode" name (c_chmod s m) -#else -setFileOrdinary _ = return () -setFileExecutable _ = return () -setFileHidden path = setFileAttributes path fILE_ATTRIBUTE_HIDDEN -#endif diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Compat/Process.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Compat/Process.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Compat/Process.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Compat/Process.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,48 +0,0 @@ -{-# LANGUAGE CPP #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Compat.Process --- Copyright : (c) 2013 Liu Hao, Brent Yorgey --- License : BSD-style (see the file LICENSE) --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- Cross-platform utilities for invoking processes. --- ------------------------------------------------------------------------------ - -module Distribution.Client.Compat.Process ( - readProcessWithExitCode -) where - -#if !MIN_VERSION_base(4,6,0) -import Prelude hiding (catch) -#endif - -import Control.Exception (catch, throw) -import System.Exit (ExitCode (ExitFailure)) -import System.IO.Error (isDoesNotExistError) -import qualified System.Process as P - --- | @readProcessWithExitCode@ creates an external process, reads its --- standard output and standard error strictly, waits until the --- process terminates, and then returns the @ExitCode@ of the --- process, the standard output, and the standard error. --- --- See the documentation of the version from @System.Process@ for --- more information. --- --- The version from @System.Process@ behaves inconsistently across --- platforms when an executable with the given name is not found: in --- some cases it returns an @ExitFailure@, in others it throws an --- exception. This variant catches \"does not exist\" exceptions and --- turns them into @ExitFailure@s. -readProcessWithExitCode :: FilePath -> [String] -> String -> IO (ExitCode, String, String) -readProcessWithExitCode cmd args input = - P.readProcessWithExitCode cmd args input - `catch` \e -> if isDoesNotExistError e - then return (ExitFailure 127, "", "") - else throw e diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Compat/Semaphore.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Compat/Semaphore.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Compat/Semaphore.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Compat/Semaphore.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,104 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# OPTIONS_GHC -funbox-strict-fields #-} -module Distribution.Client.Compat.Semaphore - ( QSem - , newQSem - , waitQSem - , signalQSem - ) where - -import Control.Concurrent.STM (TVar, atomically, newTVar, readTVar, retry, - writeTVar) -import Control.Exception (mask_, onException) -import Control.Monad (join, when) -import Data.Typeable (Typeable) - --- | 'QSem' is a quantity semaphore in which the resource is aqcuired --- and released in units of one. It provides guaranteed FIFO ordering --- for satisfying blocked `waitQSem` calls. --- -data QSem = QSem !(TVar Int) !(TVar [TVar Bool]) !(TVar [TVar Bool]) - deriving (Eq, Typeable) - -newQSem :: Int -> IO QSem -newQSem i = atomically $ do - q <- newTVar i - b1 <- newTVar [] - b2 <- newTVar [] - return (QSem q b1 b2) - -waitQSem :: QSem -> IO () -waitQSem s@(QSem q _b1 b2) = - mask_ $ join $ atomically $ do - -- join, because if we need to block, we have to add a TVar to - -- the block queue. - -- mask_, because we need a chance to set up an exception handler - -- after the join returns. - v <- readTVar q - if v == 0 - then do b <- newTVar False - ys <- readTVar b2 - writeTVar b2 (b:ys) - return (wait b) - else do writeTVar q $! v - 1 - return (return ()) - where - -- - -- very careful here: if we receive an exception, then we need to - -- (a) write True into the TVar, so that another signalQSem doesn't - -- try to wake up this thread, and - -- (b) if the TVar is *already* True, then we need to do another - -- signalQSem to avoid losing a unit of the resource. - -- - -- The 'wake' function does both (a) and (b), so we can just call - -- it here. - -- - wait t = - flip onException (wake s t) $ - atomically $ do - b <- readTVar t - when (not b) retry - - -wake :: QSem -> TVar Bool -> IO () -wake s x = join $ atomically $ do - b <- readTVar x - if b then return (signalQSem s) - else do writeTVar x True - return (return ()) - -{- - property we want: - - bracket waitQSem (\_ -> signalQSem) (\_ -> ...) - - never loses a unit of the resource. --} - -signalQSem :: QSem -> IO () -signalQSem s@(QSem q b1 b2) = - mask_ $ join $ atomically $ do - -- join, so we don't force the reverse inside the txn - -- mask_ is needed so we don't lose a wakeup - v <- readTVar q - if v /= 0 - then do writeTVar q $! v + 1 - return (return ()) - else do xs <- readTVar b1 - checkwake1 xs - where - checkwake1 [] = do - ys <- readTVar b2 - checkwake2 ys - checkwake1 (x:xs) = do - writeTVar b1 xs - return (wake s x) - - checkwake2 [] = do - writeTVar q 1 - return (return ()) - checkwake2 ys = do - let (z:zs) = reverse ys - writeTVar b1 zs - writeTVar b2 [] - return (wake s z) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Compat/Time.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Compat/Time.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Compat/Time.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Compat/Time.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,167 +0,0 @@ -{-# LANGUAGE CPP, ForeignFunctionInterface, GeneralizedNewtypeDeriving #-} -module Distribution.Client.Compat.Time - ( ModTime(..) -- Needed for testing - , getModTime, getFileAge, getCurTime - , posixSecondsToModTime ) - where - -import Control.Arrow ( first ) -import Data.Int ( Int64 ) -import Data.Word ( Word64 ) -import System.Directory ( getModificationTime ) - -import Distribution.Compat.Binary ( Binary ) - -import Data.Time.Clock.POSIX ( POSIXTime, getPOSIXTime ) -#if MIN_VERSION_directory(1,2,0) -import Data.Time.Clock.POSIX ( posixDayLength ) -import Data.Time ( diffUTCTime, getCurrentTime ) -#else -import System.Time ( getClockTime, diffClockTimes - , normalizeTimeDiff, tdDay, tdHour ) -#endif - -#if defined mingw32_HOST_OS - -import Data.Bits ((.|.), unsafeShiftL) -#if MIN_VERSION_base(4,7,0) -import Data.Bits (finiteBitSize) -#else -import Data.Bits (bitSize) -#endif - -import Data.Int ( Int32 ) -import Foreign ( allocaBytes, peekByteOff ) -import System.IO.Error ( mkIOError, doesNotExistErrorType ) -import System.Win32.Types ( BOOL, DWORD, LPCTSTR, LPVOID, withTString ) - -#else - -import System.Posix.Files ( FileStatus, getFileStatus ) - -#if MIN_VERSION_unix(2,6,0) -import System.Posix.Files ( modificationTimeHiRes ) -#else -import System.Posix.Files ( modificationTime ) -#endif - -#endif - --- | An opaque type representing a file's modification time, represented --- internally as a 64-bit unsigned integer in the Windows UTC format. -newtype ModTime = ModTime Word64 - deriving (Binary, Bounded, Eq, Ord) - -instance Show ModTime where - show (ModTime x) = show x - -instance Read ModTime where - readsPrec p str = map (first ModTime) (readsPrec p str) - --- | Return modification time of the given file. Works around the low clock --- resolution problem that 'getModificationTime' has on GHC < 7.8. --- --- This is a modified version of the code originally written for Shake by Neil --- Mitchell. See module Development.Shake.FileInfo. -getModTime :: FilePath -> IO ModTime - -#if defined mingw32_HOST_OS - --- Directly against the Win32 API. -getModTime path = allocaBytes size_WIN32_FILE_ATTRIBUTE_DATA $ \info -> do - res <- getFileAttributesEx path info - if not res - then do - let err = mkIOError doesNotExistErrorType - "Distribution.Client.Compat.Time.getModTime" - Nothing (Just path) - ioError err - else do - dwLow <- peekByteOff info - index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwLowDateTime - dwHigh <- peekByteOff info - index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwHighDateTime -#if MIN_VERSION_base(4,7,0) - let qwTime = - (fromIntegral (dwHigh :: DWORD) `unsafeShiftL` finiteBitSize dwHigh) - .|. (fromIntegral (dwLow :: DWORD)) -#else - let qwTime = - (fromIntegral (dwHigh :: DWORD) `unsafeShiftL` bitSize dwHigh) - .|. (fromIntegral (dwLow :: DWORD)) -#endif - return $! ModTime (qwTime :: Word64) - -#ifdef x86_64_HOST_ARCH -#define CALLCONV ccall -#else -#define CALLCONV stdcall -#endif - -foreign import CALLCONV "windows.h GetFileAttributesExW" - c_getFileAttributesEx :: LPCTSTR -> Int32 -> LPVOID -> IO BOOL - -getFileAttributesEx :: String -> LPVOID -> IO BOOL -getFileAttributesEx path lpFileInformation = - withTString path $ \c_path -> - c_getFileAttributesEx c_path getFileExInfoStandard lpFileInformation - -getFileExInfoStandard :: Int32 -getFileExInfoStandard = 0 - -size_WIN32_FILE_ATTRIBUTE_DATA :: Int -size_WIN32_FILE_ATTRIBUTE_DATA = 36 - -index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwLowDateTime :: Int -index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwLowDateTime = 20 - -index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwHighDateTime :: Int -index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwHighDateTime = 24 - -#else - --- Directly against the unix library. -getModTime path = do - st <- getFileStatus path - return $! (extractFileTime st) - -extractFileTime :: FileStatus -> ModTime -#if MIN_VERSION_unix(2,6,0) -extractFileTime x = posixTimeToModTime (modificationTimeHiRes x) -#else -extractFileTime x = posixSecondsToModTime $ fromIntegral $ fromEnum $ - modificationTime x -#endif - -#endif - -windowsTick, secToUnixEpoch :: Word64 -windowsTick = 10000000 -secToUnixEpoch = 11644473600 - --- | Convert POSIX seconds to ModTime. -posixSecondsToModTime :: Int64 -> ModTime -posixSecondsToModTime s = - ModTime $ ((fromIntegral s :: Word64) + secToUnixEpoch) * windowsTick - --- | Convert 'POSIXTime' to 'ModTime'. -posixTimeToModTime :: POSIXTime -> ModTime -posixTimeToModTime p = ModTime $ (ceiling $ p * 1e7) -- 100 ns precision - + (secToUnixEpoch * windowsTick) - --- | Return age of given file in days. -getFileAge :: FilePath -> IO Double -getFileAge file = do - t0 <- getModificationTime file -#if MIN_VERSION_directory(1,2,0) - t1 <- getCurrentTime - return $ realToFrac (t1 `diffUTCTime` t0) / realToFrac posixDayLength -#else - t1 <- getClockTime - let dt = normalizeTimeDiff (t1 `diffClockTimes` t0) - return $ fromIntegral ((24 * tdDay dt) + tdHour dt) / 24.0 -#endif - --- | Return the current time as 'ModTime'. -getCurTime :: IO ModTime -getCurTime = posixTimeToModTime `fmap` getPOSIXTime -- Uses 'gettimeofday'. diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/ComponentDeps.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/ComponentDeps.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/ComponentDeps.hs 2016-11-07 10:02:40.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/ComponentDeps.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,161 +0,0 @@ --- | Fine-grained package dependencies --- --- Like many others, this module is meant to be "double-imported": --- --- > import Distribution.Client.ComponentDeps ( --- > Component --- > , ComponentDep --- > , ComponentDeps --- > ) --- > import qualified Distribution.Client.ComponentDeps as CD -{-# LANGUAGE CPP #-} -{-# LANGUAGE DeriveFunctor #-} -{-# LANGUAGE DeriveGeneric #-} -module Distribution.Client.ComponentDeps ( - -- * Fine-grained package dependencies - Component(..) - , ComponentDep - , ComponentDeps -- opaque - -- ** Constructing ComponentDeps - , empty - , fromList - , singleton - , insert - , filterDeps - , fromLibraryDeps - , fromSetupDeps - , fromInstalled - -- ** Deconstructing ComponentDeps - , toList - , flatDeps - , nonSetupDeps - , libraryDeps - , setupDeps - , select - ) where - -import Data.Map (Map) -import qualified Data.Map as Map -import Distribution.Compat.Binary (Binary) -import Distribution.Compat.Semigroup (Semigroup((<>))) -import GHC.Generics -import Data.Foldable (fold) - -#if !MIN_VERSION_base(4,8,0) -import Data.Foldable (Foldable(foldMap)) -import Data.Monoid (Monoid(..)) -import Data.Traversable (Traversable(traverse)) -#endif - -{------------------------------------------------------------------------------- - Types --------------------------------------------------------------------------------} - --- | Component of a package. -data Component = - ComponentLib - | ComponentExe String - | ComponentTest String - | ComponentBench String - | ComponentSetup - deriving (Show, Eq, Ord, Generic) - -instance Binary Component - --- | Dependency for a single component. -type ComponentDep a = (Component, a) - --- | Fine-grained dependencies for a package. --- --- Typically used as @ComponentDeps [Dependency]@, to represent the list of --- dependencies for each named component within a package. --- -newtype ComponentDeps a = ComponentDeps { unComponentDeps :: Map Component a } - deriving (Show, Functor, Eq, Ord, Generic) - -instance Semigroup a => Monoid (ComponentDeps a) where - mempty = ComponentDeps Map.empty - mappend = (<>) - -instance Semigroup a => Semigroup (ComponentDeps a) where - ComponentDeps d <> ComponentDeps d' = - ComponentDeps (Map.unionWith (<>) d d') - -instance Foldable ComponentDeps where - foldMap f = foldMap f . unComponentDeps - -instance Traversable ComponentDeps where - traverse f = fmap ComponentDeps . traverse f . unComponentDeps - -instance Binary a => Binary (ComponentDeps a) - -{------------------------------------------------------------------------------- - Construction --------------------------------------------------------------------------------} - -empty :: ComponentDeps a -empty = ComponentDeps $ Map.empty - -fromList :: Monoid a => [ComponentDep a] -> ComponentDeps a -fromList = ComponentDeps . Map.fromListWith mappend - -singleton :: Component -> a -> ComponentDeps a -singleton comp = ComponentDeps . Map.singleton comp - -insert :: Monoid a => Component -> a -> ComponentDeps a -> ComponentDeps a -insert comp a = ComponentDeps . Map.alter aux comp . unComponentDeps - where - aux Nothing = Just a - aux (Just a') = Just $ a `mappend` a' - --- | Keep only selected components (and their associated deps info). -filterDeps :: (Component -> a -> Bool) -> ComponentDeps a -> ComponentDeps a -filterDeps p = ComponentDeps . Map.filterWithKey p . unComponentDeps - --- | ComponentDeps containing library dependencies only -fromLibraryDeps :: a -> ComponentDeps a -fromLibraryDeps = singleton ComponentLib - --- | ComponentDeps containing setup dependencies only. -fromSetupDeps :: a -> ComponentDeps a -fromSetupDeps = singleton ComponentSetup - --- | ComponentDeps for installed packages. --- --- We assume that installed packages only record their library dependencies. -fromInstalled :: a -> ComponentDeps a -fromInstalled = fromLibraryDeps - -{------------------------------------------------------------------------------- - Deconstruction --------------------------------------------------------------------------------} - -toList :: ComponentDeps a -> [ComponentDep a] -toList = Map.toList . unComponentDeps - --- | All dependencies of a package. --- --- This is just a synonym for 'fold', but perhaps a use of 'flatDeps' is more --- obvious than a use of 'fold', and moreover this avoids introducing lots of --- @#ifdef@s for 7.10 just for the use of 'fold'. -flatDeps :: Monoid a => ComponentDeps a -> a -flatDeps = fold - --- | All dependencies except the setup dependencies. --- --- Prior to the introduction of setup dependencies in version 1.24 this --- would have been _all_ dependencies. -nonSetupDeps :: Monoid a => ComponentDeps a -> a -nonSetupDeps = select (/= ComponentSetup) - --- | Library dependencies proper only. -libraryDeps :: Monoid a => ComponentDeps a -> a -libraryDeps = select (== ComponentLib) - --- | Setup dependencies. -setupDeps :: Monoid a => ComponentDeps a -> a -setupDeps = select (== ComponentSetup) - --- | Select dependencies satisfying a given predicate. -select :: Monoid a => (Component -> Bool) -> ComponentDeps a -> a -select p = foldMap snd . filter (p . fst) . toList diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Config.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Config.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Config.hs 2016-11-07 10:02:40.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Config.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1163 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Config --- Copyright : (c) David Himmelstrup 2005 --- License : BSD-like --- --- Maintainer : lemmih@gmail.com --- Stability : provisional --- Portability : portable --- --- Utilities for handling saved state such as known packages, known servers and --- downloaded packages. ------------------------------------------------------------------------------ -module Distribution.Client.Config ( - SavedConfig(..), - loadConfig, - getConfigFilePath, - - showConfig, - showConfigWithComments, - parseConfig, - - defaultCabalDir, - defaultConfigFile, - defaultCacheDir, - defaultCompiler, - defaultLogsDir, - defaultUserInstall, - - baseSavedConfig, - commentSavedConfig, - initialSavedConfig, - configFieldDescriptions, - haddockFlagsFields, - installDirsFields, - withProgramsFields, - withProgramOptionsFields, - userConfigDiff, - userConfigUpdate, - createDefaultConfigFile, - - remoteRepoFields - ) where - -import Distribution.Client.Types - ( RemoteRepo(..), Username(..), Password(..), emptyRemoteRepo ) -import Distribution.Client.BuildReports.Types - ( ReportLevel(..) ) -import Distribution.Client.Dependency.Types - ( ConstraintSource(..) ) -import Distribution.Client.Setup - ( GlobalFlags(..), globalCommand, defaultGlobalFlags - , ConfigExFlags(..), configureExOptions, defaultConfigExFlags - , InstallFlags(..), installOptions, defaultInstallFlags - , UploadFlags(..), uploadCommand - , ReportFlags(..), reportCommand - , showRepo, parseRepo, readRepo ) -import Distribution.Utils.NubList - ( NubList, fromNubList, toNubList, overNubList ) - -import Distribution.Simple.Compiler - ( DebugInfoLevel(..), OptimisationLevel(..) ) -import Distribution.Simple.Setup - ( ConfigFlags(..), configureOptions, defaultConfigFlags - , AllowNewer(..) - , HaddockFlags(..), haddockOptions, defaultHaddockFlags - , installDirsOptions, optionDistPref - , programConfigurationPaths', programConfigurationOptions - , Flag(..), toFlag, flagToMaybe, fromFlagOrDefault ) -import Distribution.Simple.InstallDirs - ( InstallDirs(..), defaultInstallDirs - , PathTemplate, toPathTemplate ) -import Distribution.ParseUtils - ( FieldDescr(..), liftField - , ParseResult(..), PError(..), PWarning(..) - , locatedErrorMsg, showPWarning - , readFields, warning, lineNo - , simpleField, listField, spaceListField - , parseFilePathQ, parseOptCommaList, parseTokenQ ) -import Distribution.Client.ParseUtils - ( parseFields, ppFields, ppSection ) -import Distribution.Client.HttpUtils - ( isOldHackageURI ) -import qualified Distribution.ParseUtils as ParseUtils - ( Field(..) ) -import qualified Distribution.Text as Text - ( Text(..) ) -import Distribution.Simple.Command - ( CommandUI(commandOptions), commandDefaultFlags, ShowOrParseArgs(..) - , viewAsFieldDescr ) -import Distribution.Simple.Program - ( defaultProgramConfiguration ) -import Distribution.Simple.Utils - ( die, notice, warn, lowercase, cabalVersion ) -import Distribution.Compiler - ( CompilerFlavor(..), defaultCompilerFlavor ) -import Distribution.Verbosity - ( Verbosity, normal ) - -import Data.List - ( partition, find, foldl' ) -import Data.Maybe - ( fromMaybe ) -import Control.Monad - ( when, unless, foldM, liftM ) -import qualified Distribution.Compat.ReadP as Parse - ( (<++), option ) -import Distribution.Compat.Semigroup -import qualified Text.PrettyPrint as Disp - ( render, text, empty ) -import Text.PrettyPrint - ( ($+$) ) -import Text.PrettyPrint.HughesPJ - ( text, Doc ) -import System.Directory - ( createDirectoryIfMissing, getAppUserDataDirectory, renameFile ) -import Network.URI - ( URI(..), URIAuth(..), parseURI ) -import System.FilePath - ( (<.>), (), takeDirectory ) -import System.IO.Error - ( isDoesNotExistError ) -import Distribution.Compat.Environment - ( getEnvironment ) -import Distribution.Compat.Exception - ( catchIO ) -import qualified Paths_cabal_install - ( version ) -import Data.Version - ( showVersion ) -import Data.Char - ( isSpace ) -import qualified Data.Map as M -import Data.Function - ( on ) -import Data.List - ( nubBy ) -import GHC.Generics ( Generic ) - --- --- * Configuration saved in the config file --- - -data SavedConfig = SavedConfig { - savedGlobalFlags :: GlobalFlags, - savedInstallFlags :: InstallFlags, - savedConfigureFlags :: ConfigFlags, - savedConfigureExFlags :: ConfigExFlags, - savedUserInstallDirs :: InstallDirs (Flag PathTemplate), - savedGlobalInstallDirs :: InstallDirs (Flag PathTemplate), - savedUploadFlags :: UploadFlags, - savedReportFlags :: ReportFlags, - savedHaddockFlags :: HaddockFlags - } deriving Generic - -instance Monoid SavedConfig where - mempty = gmempty - mappend = (<>) - -instance Semigroup SavedConfig where - a <> b = SavedConfig { - savedGlobalFlags = combinedSavedGlobalFlags, - savedInstallFlags = combinedSavedInstallFlags, - savedConfigureFlags = combinedSavedConfigureFlags, - savedConfigureExFlags = combinedSavedConfigureExFlags, - savedUserInstallDirs = combinedSavedUserInstallDirs, - savedGlobalInstallDirs = combinedSavedGlobalInstallDirs, - savedUploadFlags = combinedSavedUploadFlags, - savedReportFlags = combinedSavedReportFlags, - savedHaddockFlags = combinedSavedHaddockFlags - } - where - -- This is ugly, but necessary. If we're mappending two config files, we - -- want the values of the *non-empty* list fields from the second one to - -- *override* the corresponding values from the first one. Default - -- behaviour (concatenation) is confusing and makes some use cases (see - -- #1884) impossible. - -- - -- However, we also want to allow specifying multiple values for a list - -- field in a *single* config file. For example, we want the following to - -- continue to work: - -- - -- remote-repo: hackage.haskell.org:http://hackage.haskell.org/ - -- remote-repo: private-collection:http://hackage.local/ - -- - -- So we can't just wrap the list fields inside Flags; we have to do some - -- special-casing just for SavedConfig. - - -- NB: the signature prevents us from using 'combine' on lists. - combine' :: (SavedConfig -> flags) -> (flags -> Flag a) -> Flag a - combine' field subfield = - (subfield . field $ a) `mappend` (subfield . field $ b) - - combineMonoid :: Monoid mon => (SavedConfig -> flags) -> (flags -> mon) - -> mon - combineMonoid field subfield = - (subfield . field $ a) `mappend` (subfield . field $ b) - - lastNonEmpty' :: (SavedConfig -> flags) -> (flags -> [a]) -> [a] - lastNonEmpty' field subfield = - let a' = subfield . field $ a - b' = subfield . field $ b - in case b' of [] -> a' - _ -> b' - - lastNonEmptyNL' :: (SavedConfig -> flags) -> (flags -> NubList a) - -> NubList a - lastNonEmptyNL' field subfield = - let a' = subfield . field $ a - b' = subfield . field $ b - in case fromNubList b' of [] -> a' - _ -> b' - - combinedSavedGlobalFlags = GlobalFlags { - globalVersion = combine globalVersion, - globalNumericVersion = combine globalNumericVersion, - globalConfigFile = combine globalConfigFile, - globalSandboxConfigFile = combine globalSandboxConfigFile, - globalConstraintsFile = combine globalConstraintsFile, - globalRemoteRepos = lastNonEmptyNL globalRemoteRepos, - globalCacheDir = combine globalCacheDir, - globalLocalRepos = lastNonEmptyNL globalLocalRepos, - globalLogsDir = combine globalLogsDir, - globalWorldFile = combine globalWorldFile, - globalRequireSandbox = combine globalRequireSandbox, - globalIgnoreSandbox = combine globalIgnoreSandbox, - globalIgnoreExpiry = combine globalIgnoreExpiry, - globalHttpTransport = combine globalHttpTransport - } - where - combine = combine' savedGlobalFlags - lastNonEmptyNL = lastNonEmptyNL' savedGlobalFlags - - combinedSavedInstallFlags = InstallFlags { - installDocumentation = combine installDocumentation, - installHaddockIndex = combine installHaddockIndex, - installDryRun = combine installDryRun, - installMaxBackjumps = combine installMaxBackjumps, - installReorderGoals = combine installReorderGoals, - installIndependentGoals = combine installIndependentGoals, - installShadowPkgs = combine installShadowPkgs, - installStrongFlags = combine installStrongFlags, - installReinstall = combine installReinstall, - installAvoidReinstalls = combine installAvoidReinstalls, - installOverrideReinstall = combine installOverrideReinstall, - installUpgradeDeps = combine installUpgradeDeps, - installOnly = combine installOnly, - installOnlyDeps = combine installOnlyDeps, - installRootCmd = combine installRootCmd, - installSummaryFile = lastNonEmptyNL installSummaryFile, - installLogFile = combine installLogFile, - installBuildReports = combine installBuildReports, - installReportPlanningFailure = combine installReportPlanningFailure, - installSymlinkBinDir = combine installSymlinkBinDir, - installOneShot = combine installOneShot, - installNumJobs = combine installNumJobs, - installRunTests = combine installRunTests, - installOfflineMode = combine installOfflineMode - } - where - combine = combine' savedInstallFlags - lastNonEmptyNL = lastNonEmptyNL' savedInstallFlags - - combinedSavedConfigureFlags = ConfigFlags { - configPrograms_ = configPrograms_ . savedConfigureFlags $ b, - -- TODO: NubListify - configProgramPaths = lastNonEmpty configProgramPaths, - -- TODO: NubListify - configProgramArgs = lastNonEmpty configProgramArgs, - configProgramPathExtra = lastNonEmptyNL configProgramPathExtra, - configHcFlavor = combine configHcFlavor, - configHcPath = combine configHcPath, - configHcPkg = combine configHcPkg, - configVanillaLib = combine configVanillaLib, - configProfLib = combine configProfLib, - configProf = combine configProf, - configSharedLib = combine configSharedLib, - configDynExe = combine configDynExe, - configProfExe = combine configProfExe, - configProfDetail = combine configProfDetail, - configProfLibDetail = combine configProfLibDetail, - -- TODO: NubListify - configConfigureArgs = lastNonEmpty configConfigureArgs, - configOptimization = combine configOptimization, - configDebugInfo = combine configDebugInfo, - configProgPrefix = combine configProgPrefix, - configProgSuffix = combine configProgSuffix, - -- Parametrised by (Flag PathTemplate), so safe to use 'mappend'. - configInstallDirs = - (configInstallDirs . savedConfigureFlags $ a) - `mappend` (configInstallDirs . savedConfigureFlags $ b), - configScratchDir = combine configScratchDir, - -- TODO: NubListify - configExtraLibDirs = lastNonEmpty configExtraLibDirs, - -- TODO: NubListify - configExtraFrameworkDirs = lastNonEmpty configExtraFrameworkDirs, - -- TODO: NubListify - configExtraIncludeDirs = lastNonEmpty configExtraIncludeDirs, - configIPID = combine configIPID, - configDistPref = combine configDistPref, - configVerbosity = combine configVerbosity, - configUserInstall = combine configUserInstall, - -- TODO: NubListify - configPackageDBs = lastNonEmpty configPackageDBs, - configGHCiLib = combine configGHCiLib, - configSplitObjs = combine configSplitObjs, - configStripExes = combine configStripExes, - configStripLibs = combine configStripLibs, - -- TODO: NubListify - configConstraints = lastNonEmpty configConstraints, - -- TODO: NubListify - configDependencies = lastNonEmpty configDependencies, - -- TODO: NubListify - configConfigurationsFlags = lastNonEmpty configConfigurationsFlags, - configTests = combine configTests, - configBenchmarks = combine configBenchmarks, - configCoverage = combine configCoverage, - configLibCoverage = combine configLibCoverage, - configExactConfiguration = combine configExactConfiguration, - configFlagError = combine configFlagError, - configRelocatable = combine configRelocatable, - configAllowNewer = combineMonoid savedConfigureFlags - configAllowNewer - } - where - combine = combine' savedConfigureFlags - lastNonEmpty = lastNonEmpty' savedConfigureFlags - lastNonEmptyNL = lastNonEmptyNL' savedConfigureFlags - - combinedSavedConfigureExFlags = ConfigExFlags { - configCabalVersion = combine configCabalVersion, - -- TODO: NubListify - configExConstraints = lastNonEmpty configExConstraints, - -- TODO: NubListify - configPreferences = lastNonEmpty configPreferences, - configSolver = combine configSolver - } - where - combine = combine' savedConfigureExFlags - lastNonEmpty = lastNonEmpty' savedConfigureExFlags - - -- Parametrised by (Flag PathTemplate), so safe to use 'mappend'. - combinedSavedUserInstallDirs = savedUserInstallDirs a - `mappend` savedUserInstallDirs b - - -- Parametrised by (Flag PathTemplate), so safe to use 'mappend'. - combinedSavedGlobalInstallDirs = savedGlobalInstallDirs a - `mappend` savedGlobalInstallDirs b - - combinedSavedUploadFlags = UploadFlags { - uploadCheck = combine uploadCheck, - uploadDoc = combine uploadDoc, - uploadUsername = combine uploadUsername, - uploadPassword = combine uploadPassword, - uploadPasswordCmd = combine uploadPasswordCmd, - uploadVerbosity = combine uploadVerbosity - } - where - combine = combine' savedUploadFlags - - combinedSavedReportFlags = ReportFlags { - reportUsername = combine reportUsername, - reportPassword = combine reportPassword, - reportVerbosity = combine reportVerbosity - } - where - combine = combine' savedReportFlags - - combinedSavedHaddockFlags = HaddockFlags { - -- TODO: NubListify - haddockProgramPaths = lastNonEmpty haddockProgramPaths, - -- TODO: NubListify - haddockProgramArgs = lastNonEmpty haddockProgramArgs, - haddockHoogle = combine haddockHoogle, - haddockHtml = combine haddockHtml, - haddockHtmlLocation = combine haddockHtmlLocation, - haddockForHackage = combine haddockForHackage, - haddockExecutables = combine haddockExecutables, - haddockTestSuites = combine haddockTestSuites, - haddockBenchmarks = combine haddockBenchmarks, - haddockInternal = combine haddockInternal, - haddockCss = combine haddockCss, - haddockHscolour = combine haddockHscolour, - haddockHscolourCss = combine haddockHscolourCss, - haddockContents = combine haddockContents, - haddockDistPref = combine haddockDistPref, - haddockKeepTempFiles = combine haddockKeepTempFiles, - haddockVerbosity = combine haddockVerbosity - } - where - combine = combine' savedHaddockFlags - lastNonEmpty = lastNonEmpty' savedHaddockFlags - - --- --- * Default config --- - --- | These are the absolute basic defaults. The fields that must be --- initialised. When we load the config from the file we layer the loaded --- values over these ones, so any missing fields in the file take their values --- from here. --- -baseSavedConfig :: IO SavedConfig -baseSavedConfig = do - userPrefix <- defaultCabalDir - logsDir <- defaultLogsDir - worldFile <- defaultWorldFile - return mempty { - savedConfigureFlags = mempty { - configHcFlavor = toFlag defaultCompiler, - configUserInstall = toFlag defaultUserInstall, - configVerbosity = toFlag normal - }, - savedUserInstallDirs = mempty { - prefix = toFlag (toPathTemplate userPrefix) - }, - savedGlobalFlags = mempty { - globalLogsDir = toFlag logsDir, - globalWorldFile = toFlag worldFile - } - } - --- | This is the initial configuration that we write out to to the config file --- if the file does not exist (or the config we use if the file cannot be read --- for some other reason). When the config gets loaded it gets layered on top --- of 'baseSavedConfig' so we do not need to include it into the initial --- values we save into the config file. --- -initialSavedConfig :: IO SavedConfig -initialSavedConfig = do - cacheDir <- defaultCacheDir - logsDir <- defaultLogsDir - worldFile <- defaultWorldFile - extraPath <- defaultExtraPath - return mempty { - savedGlobalFlags = mempty { - globalCacheDir = toFlag cacheDir, - globalRemoteRepos = toNubList [defaultRemoteRepo], - globalWorldFile = toFlag worldFile - }, - savedConfigureFlags = mempty { - configProgramPathExtra = toNubList extraPath - }, - savedInstallFlags = mempty { - installSummaryFile = toNubList [toPathTemplate (logsDir "build.log")], - installBuildReports= toFlag AnonymousReports, - installNumJobs = toFlag Nothing - } - } - ---TODO: misleading, there's no way to override this default --- either make it possible or rename to simply getCabalDir. -defaultCabalDir :: IO FilePath -defaultCabalDir = getAppUserDataDirectory "cabal" - -defaultConfigFile :: IO FilePath -defaultConfigFile = do - dir <- defaultCabalDir - return $ dir "config" - -defaultCacheDir :: IO FilePath -defaultCacheDir = do - dir <- defaultCabalDir - return $ dir "packages" - -defaultLogsDir :: IO FilePath -defaultLogsDir = do - dir <- defaultCabalDir - return $ dir "logs" - --- | Default position of the world file -defaultWorldFile :: IO FilePath -defaultWorldFile = do - dir <- defaultCabalDir - return $ dir "world" - -defaultExtraPath :: IO [FilePath] -defaultExtraPath = do - dir <- defaultCabalDir - return [dir "bin"] - -defaultCompiler :: CompilerFlavor -defaultCompiler = fromMaybe GHC defaultCompilerFlavor - -defaultUserInstall :: Bool -defaultUserInstall = True --- We do per-user installs by default on all platforms. We used to default to --- global installs on Windows but that no longer works on Windows Vista or 7. - -defaultRemoteRepo :: RemoteRepo -defaultRemoteRepo = RemoteRepo name uri Nothing [] 0 False - where - name = "hackage.haskell.org" - uri = URI "http:" (Just (URIAuth "" name "")) "/" "" "" - -- Note that lots of old ~/.cabal/config files will have the old url - -- http://hackage.haskell.org/packages/archive - -- but new config files can use the new url (without the /packages/archive) - -- and avoid having to do a http redirect - --- For the default repo we know extra information, fill this in. --- --- We need this because the 'defaultRemoteRepo' above is only used for the --- first time when a config file is made. So for users with older config files --- we might have only have older info. This lets us fill that in even for old --- config files. --- -addInfoForKnownRepos :: RemoteRepo -> RemoteRepo -addInfoForKnownRepos repo - | remoteRepoName repo == remoteRepoName defaultRemoteRepo - = useSecure . tryHttps . fixOldURI $ repo - where - fixOldURI r - | isOldHackageURI (remoteRepoURI r) - = r { remoteRepoURI = remoteRepoURI defaultRemoteRepo } - | otherwise = r - - tryHttps r = r { remoteRepoShouldTryHttps = True } - - useSecure r@RemoteRepo{ - remoteRepoSecure = secure, - remoteRepoRootKeys = [], - remoteRepoKeyThreshold = 0 - } | secure /= Just False - = r { - --TODO: When we want to switch us from using opt-in to opt-out - -- security for the central hackage server, uncomment the - -- following line. That will cause the default (of unspecified) - -- to get interpreted as if it were "secure: True". For the - -- moment it means the keys get added but you have to manually - -- set "secure: True" to opt-in. - --remoteRepoSecure = Just True, - remoteRepoRootKeys = defaultHackageRemoteRepoKeys, - remoteRepoKeyThreshold = defaultHackageRemoteRepoKeyThreshold - } - useSecure r = r -addInfoForKnownRepos other = other - --- | The current hackage.haskell.org repo root keys that we ship with cabal. ---- --- This lets us bootstrap trust in this repo without user intervention. --- These keys need to be periodically updated when new root keys are added. --- See the root key procedures for details. --- -defaultHackageRemoteRepoKeys :: [String] -defaultHackageRemoteRepoKeys = - [ "fe331502606802feac15e514d9b9ea83fee8b6ffef71335479a2e68d84adc6b0", - "1ea9ba32c526d1cc91ab5e5bd364ec5e9e8cb67179a471872f6e26f0ae773d42", - "2c6c3627bd6c982990239487f1abd02e08a02e6cf16edb105a8012d444d870c3", - "0a5c7ea47cd1b15f01f5f51a33adda7e655bc0f0b0615baa8e271f4c3351e21d", - "51f0161b906011b52c6613376b1ae937670da69322113a246a09f807c62f6921" - ] - --- | The required threshold of root key signatures for hackage.haskell.org --- -defaultHackageRemoteRepoKeyThreshold :: Int -defaultHackageRemoteRepoKeyThreshold = 3 - --- --- * Config file reading --- - --- | Loads the main configuration, and applies additional defaults to give the --- effective configuration. To loads just what is actually in the config file, --- use 'loadRawConfig'. --- -loadConfig :: Verbosity -> Flag FilePath -> IO SavedConfig -loadConfig verbosity configFileFlag = do - config <- loadRawConfig verbosity configFileFlag - extendToEffectiveConfig config - -extendToEffectiveConfig :: SavedConfig -> IO SavedConfig -extendToEffectiveConfig config = do - base <- baseSavedConfig - let effective0 = base `mappend` config - globalFlags0 = savedGlobalFlags effective0 - effective = effective0 { - savedGlobalFlags = globalFlags0 { - globalRemoteRepos = - overNubList (map addInfoForKnownRepos) - (globalRemoteRepos globalFlags0) - } - } - return effective - --- | Like 'loadConfig' but does not apply any additional defaults, it just --- loads what is actually in the config file. This is thus suitable for --- comparing or editing a config file, but not suitable for using as the --- effective configuration. --- -loadRawConfig :: Verbosity -> Flag FilePath -> IO SavedConfig -loadRawConfig verbosity configFileFlag = do - (source, configFile) <- getConfigFilePathAndSource configFileFlag - minp <- readConfigFile mempty configFile - case minp of - Nothing -> do - notice verbosity $ "Config file path source is " ++ sourceMsg source ++ "." - notice verbosity $ "Config file " ++ configFile ++ " not found." - createDefaultConfigFile verbosity configFile - Just (ParseOk ws conf) -> do - unless (null ws) $ warn verbosity $ - unlines (map (showPWarning configFile) ws) - return conf - Just (ParseFailed err) -> do - let (line, msg) = locatedErrorMsg err - die $ - "Error parsing config file " ++ configFile - ++ maybe "" (\n -> ':' : show n) line ++ ":\n" ++ msg - - where - sourceMsg CommandlineOption = "commandline option" - sourceMsg EnvironmentVariable = "env var CABAL_CONFIG" - sourceMsg Default = "default config file" - -data ConfigFileSource = CommandlineOption - | EnvironmentVariable - | Default - --- | Returns the config file path, without checking that the file exists. --- The order of precedence is: input flag, CABAL_CONFIG, default location. -getConfigFilePath :: Flag FilePath -> IO FilePath -getConfigFilePath = fmap snd . getConfigFilePathAndSource - -getConfigFilePathAndSource :: Flag FilePath -> IO (ConfigFileSource, FilePath) -getConfigFilePathAndSource configFileFlag = - getSource sources - where - sources = - [ (CommandlineOption, return . flagToMaybe $ configFileFlag) - , (EnvironmentVariable, lookup "CABAL_CONFIG" `liftM` getEnvironment) - , (Default, Just `liftM` defaultConfigFile) ] - - getSource [] = error "no config file path candidate found." - getSource ((source,action): xs) = - action >>= maybe (getSource xs) (return . (,) source) - -readConfigFile :: SavedConfig -> FilePath -> IO (Maybe (ParseResult SavedConfig)) -readConfigFile initial file = handleNotExists $ - fmap (Just . parseConfig (ConstraintSourceMainConfig file) initial) - (readFile file) - - where - handleNotExists action = catchIO action $ \ioe -> - if isDoesNotExistError ioe - then return Nothing - else ioError ioe - -createDefaultConfigFile :: Verbosity -> FilePath -> IO SavedConfig -createDefaultConfigFile verbosity filePath = do - commentConf <- commentSavedConfig - initialConf <- initialSavedConfig - notice verbosity $ "Writing default configuration to " ++ filePath - writeConfigFile filePath commentConf initialConf - return initialConf - -writeConfigFile :: FilePath -> SavedConfig -> SavedConfig -> IO () -writeConfigFile file comments vals = do - let tmpFile = file <.> "tmp" - createDirectoryIfMissing True (takeDirectory file) - writeFile tmpFile $ explanation ++ showConfigWithComments comments vals ++ "\n" - renameFile tmpFile file - where - explanation = unlines - ["-- This is the configuration file for the 'cabal' command line tool." - ,"" - ,"-- The available configuration options are listed below." - ,"-- Some of them have default values listed." - ,"" - ,"-- Lines (like this one) beginning with '--' are comments." - ,"-- Be careful with spaces and indentation because they are" - ,"-- used to indicate layout for nested sections." - ,"" - ,"-- Cabal library version: " ++ showVersion cabalVersion - ,"-- cabal-install version: " ++ showVersion Paths_cabal_install.version - ,"","" - ] - --- | These are the default values that get used in Cabal if a no value is --- given. We use these here to include in comments when we write out the --- initial config file so that the user can see what default value they are --- overriding. --- -commentSavedConfig :: IO SavedConfig -commentSavedConfig = do - userInstallDirs <- defaultInstallDirs defaultCompiler True True - globalInstallDirs <- defaultInstallDirs defaultCompiler False True - return SavedConfig { - savedGlobalFlags = defaultGlobalFlags, - savedInstallFlags = defaultInstallFlags, - savedConfigureExFlags = defaultConfigExFlags, - savedConfigureFlags = (defaultConfigFlags defaultProgramConfiguration) { - configUserInstall = toFlag defaultUserInstall, - configAllowNewer = Just AllowNewerNone - }, - savedUserInstallDirs = fmap toFlag userInstallDirs, - savedGlobalInstallDirs = fmap toFlag globalInstallDirs, - savedUploadFlags = commandDefaultFlags uploadCommand, - savedReportFlags = commandDefaultFlags reportCommand, - savedHaddockFlags = defaultHaddockFlags - } - --- | All config file fields. --- -configFieldDescriptions :: ConstraintSource -> [FieldDescr SavedConfig] -configFieldDescriptions src = - - toSavedConfig liftGlobalFlag - (commandOptions (globalCommand []) ParseArgs) - ["version", "numeric-version", "config-file", "sandbox-config-file"] [] - - ++ toSavedConfig liftConfigFlag - (configureOptions ParseArgs) - (["builddir", "constraint", "dependency", "ipid"] - ++ map fieldName installDirsFields) - - -- This is only here because viewAsFieldDescr gives us a parser - -- that only recognises 'ghc' etc, the case-sensitive flag names, not - -- what the normal case-insensitive parser gives us. - [simpleField "compiler" - (fromFlagOrDefault Disp.empty . fmap Text.disp) (optional Text.parse) - configHcFlavor (\v flags -> flags { configHcFlavor = v }) - ,let showAllowNewer Nothing = mempty - showAllowNewer (Just AllowNewerNone) = Disp.text "False" - showAllowNewer (Just _) = Disp.text "True" - - toAllowNewer True = Just AllowNewerAll - toAllowNewer False = Just AllowNewerNone - - pkgs = (Just . AllowNewerSome) `fmap` parseOptCommaList Text.parse - parseAllowNewer = (toAllowNewer `fmap` Text.parse) Parse.<++ pkgs in - simpleField "allow-newer" - showAllowNewer parseAllowNewer - configAllowNewer (\v flags -> flags { configAllowNewer = v }) - -- TODO: The following is a temporary fix. The "optimization" - -- and "debug-info" fields are OptArg, and viewAsFieldDescr - -- fails on that. Instead of a hand-written hackaged parser - -- and printer, we should handle this case properly in the - -- library. - ,liftField configOptimization (\v flags -> - flags { configOptimization = v }) $ - let name = "optimization" in - FieldDescr name - (\f -> case f of - Flag NoOptimisation -> Disp.text "False" - Flag NormalOptimisation -> Disp.text "True" - Flag MaximumOptimisation -> Disp.text "2" - _ -> Disp.empty) - (\line str _ -> case () of - _ | str == "False" -> ParseOk [] (Flag NoOptimisation) - | str == "True" -> ParseOk [] (Flag NormalOptimisation) - | str == "0" -> ParseOk [] (Flag NoOptimisation) - | str == "1" -> ParseOk [] (Flag NormalOptimisation) - | str == "2" -> ParseOk [] (Flag MaximumOptimisation) - | lstr == "false" -> ParseOk [caseWarning] (Flag NoOptimisation) - | lstr == "true" -> ParseOk [caseWarning] (Flag NormalOptimisation) - | otherwise -> ParseFailed (NoParse name line) - where - lstr = lowercase str - caseWarning = PWarning $ - "The '" ++ name - ++ "' field is case sensitive, use 'True' or 'False'.") - ,liftField configDebugInfo (\v flags -> flags { configDebugInfo = v }) $ - let name = "debug-info" in - FieldDescr name - (\f -> case f of - Flag NoDebugInfo -> Disp.text "False" - Flag MinimalDebugInfo -> Disp.text "1" - Flag NormalDebugInfo -> Disp.text "True" - Flag MaximalDebugInfo -> Disp.text "3" - _ -> Disp.empty) - (\line str _ -> case () of - _ | str == "False" -> ParseOk [] (Flag NoDebugInfo) - | str == "True" -> ParseOk [] (Flag NormalDebugInfo) - | str == "0" -> ParseOk [] (Flag NoDebugInfo) - | str == "1" -> ParseOk [] (Flag MinimalDebugInfo) - | str == "2" -> ParseOk [] (Flag NormalDebugInfo) - | str == "3" -> ParseOk [] (Flag MaximalDebugInfo) - | lstr == "false" -> ParseOk [caseWarning] (Flag NoDebugInfo) - | lstr == "true" -> ParseOk [caseWarning] (Flag NormalDebugInfo) - | otherwise -> ParseFailed (NoParse name line) - where - lstr = lowercase str - caseWarning = PWarning $ - "The '" ++ name - ++ "' field is case sensitive, use 'True' or 'False'.") - ] - - ++ toSavedConfig liftConfigExFlag - (configureExOptions ParseArgs src) - [] [] - - ++ toSavedConfig liftInstallFlag - (installOptions ParseArgs) - ["dry-run", "only", "only-dependencies", "dependencies-only"] [] - - ++ toSavedConfig liftUploadFlag - (commandOptions uploadCommand ParseArgs) - ["verbose", "check", "documentation"] [] - - ++ toSavedConfig liftReportFlag - (commandOptions reportCommand ParseArgs) - ["verbose", "username", "password"] [] - --FIXME: this is a hack, hiding the user name and password. - -- But otherwise it masks the upload ones. Either need to - -- share the options or make then distinct. In any case - -- they should probably be per-server. - - ++ [ viewAsFieldDescr - $ optionDistPref - (configDistPref . savedConfigureFlags) - (\distPref config -> - config - { savedConfigureFlags = (savedConfigureFlags config) { - configDistPref = distPref } - , savedHaddockFlags = (savedHaddockFlags config) { - haddockDistPref = distPref } - } - ) - ParseArgs - ] - - where - toSavedConfig lift options exclusions replacements = - [ lift (fromMaybe field replacement) - | opt <- options - , let field = viewAsFieldDescr opt - name = fieldName field - replacement = find ((== name) . fieldName) replacements - , name `notElem` exclusions ] - optional = Parse.option mempty . fmap toFlag - --- TODO: next step, make the deprecated fields elicit a warning. --- -deprecatedFieldDescriptions :: [FieldDescr SavedConfig] -deprecatedFieldDescriptions = - [ liftGlobalFlag $ - listField "repos" - (Disp.text . showRepo) parseRepo - (fromNubList . globalRemoteRepos) - (\rs cfg -> cfg { globalRemoteRepos = toNubList rs }) - , liftGlobalFlag $ - simpleField "cachedir" - (Disp.text . fromFlagOrDefault "") (optional parseFilePathQ) - globalCacheDir (\d cfg -> cfg { globalCacheDir = d }) - , liftUploadFlag $ - simpleField "hackage-username" - (Disp.text . fromFlagOrDefault "" . fmap unUsername) - (optional (fmap Username parseTokenQ)) - uploadUsername (\d cfg -> cfg { uploadUsername = d }) - , liftUploadFlag $ - simpleField "hackage-password" - (Disp.text . fromFlagOrDefault "" . fmap unPassword) - (optional (fmap Password parseTokenQ)) - uploadPassword (\d cfg -> cfg { uploadPassword = d }) - , liftUploadFlag $ - spaceListField "hackage-password-command" - Disp.text parseTokenQ - (fromFlagOrDefault [] . uploadPasswordCmd) - (\d cfg -> cfg { uploadPasswordCmd = Flag d }) - ] - ++ map (modifyFieldName ("user-"++) . liftUserInstallDirs) installDirsFields - ++ map (modifyFieldName ("global-"++) . liftGlobalInstallDirs) installDirsFields - where - optional = Parse.option mempty . fmap toFlag - modifyFieldName :: (String -> String) -> FieldDescr a -> FieldDescr a - modifyFieldName f d = d { fieldName = f (fieldName d) } - -liftUserInstallDirs :: FieldDescr (InstallDirs (Flag PathTemplate)) - -> FieldDescr SavedConfig -liftUserInstallDirs = liftField - savedUserInstallDirs (\flags conf -> conf { savedUserInstallDirs = flags }) - -liftGlobalInstallDirs :: FieldDescr (InstallDirs (Flag PathTemplate)) - -> FieldDescr SavedConfig -liftGlobalInstallDirs = liftField - savedGlobalInstallDirs (\flags conf -> conf { savedGlobalInstallDirs = flags }) - -liftGlobalFlag :: FieldDescr GlobalFlags -> FieldDescr SavedConfig -liftGlobalFlag = liftField - savedGlobalFlags (\flags conf -> conf { savedGlobalFlags = flags }) - -liftConfigFlag :: FieldDescr ConfigFlags -> FieldDescr SavedConfig -liftConfigFlag = liftField - savedConfigureFlags (\flags conf -> conf { savedConfigureFlags = flags }) - -liftConfigExFlag :: FieldDescr ConfigExFlags -> FieldDescr SavedConfig -liftConfigExFlag = liftField - savedConfigureExFlags (\flags conf -> conf { savedConfigureExFlags = flags }) - -liftInstallFlag :: FieldDescr InstallFlags -> FieldDescr SavedConfig -liftInstallFlag = liftField - savedInstallFlags (\flags conf -> conf { savedInstallFlags = flags }) - -liftUploadFlag :: FieldDescr UploadFlags -> FieldDescr SavedConfig -liftUploadFlag = liftField - savedUploadFlags (\flags conf -> conf { savedUploadFlags = flags }) - -liftReportFlag :: FieldDescr ReportFlags -> FieldDescr SavedConfig -liftReportFlag = liftField - savedReportFlags (\flags conf -> conf { savedReportFlags = flags }) - -parseConfig :: ConstraintSource - -> SavedConfig - -> String - -> ParseResult SavedConfig -parseConfig src initial = \str -> do - fields <- readFields str - let (knownSections, others) = partition isKnownSection fields - config <- parse others - let user0 = savedUserInstallDirs config - global0 = savedGlobalInstallDirs config - (remoteRepoSections0, haddockFlags, user, global, paths, args) <- - foldM parseSections - ([], savedHaddockFlags config, user0, global0, [], []) - knownSections - - let remoteRepoSections = - reverse - . nubBy ((==) `on` remoteRepoName) - $ remoteRepoSections0 - - return config { - savedGlobalFlags = (savedGlobalFlags config) { - globalRemoteRepos = toNubList remoteRepoSections - }, - savedConfigureFlags = (savedConfigureFlags config) { - configProgramPaths = paths, - configProgramArgs = args - }, - savedHaddockFlags = haddockFlags, - savedUserInstallDirs = user, - savedGlobalInstallDirs = global - } - - where - isKnownSection (ParseUtils.Section _ "repository" _ _) = True - isKnownSection (ParseUtils.F _ "remote-repo" _) = True - isKnownSection (ParseUtils.Section _ "haddock" _ _) = True - isKnownSection (ParseUtils.Section _ "install-dirs" _ _) = True - isKnownSection (ParseUtils.Section _ "program-locations" _ _) = True - isKnownSection (ParseUtils.Section _ "program-default-options" _ _) = True - isKnownSection _ = False - - parse = parseFields (configFieldDescriptions src - ++ deprecatedFieldDescriptions) initial - - parseSections (rs, h, u, g, p, a) - (ParseUtils.Section _ "repository" name fs) = do - r' <- parseFields remoteRepoFields (emptyRemoteRepo name) fs - when (remoteRepoKeyThreshold r' > length (remoteRepoRootKeys r')) $ - warning $ "'key-threshold' for repository " ++ show (remoteRepoName r') - ++ " higher than number of keys" - when (not (null (remoteRepoRootKeys r')) - && remoteRepoSecure r' /= Just True) $ - warning $ "'root-keys' for repository " ++ show (remoteRepoName r') - ++ " non-empty, but 'secure' not set to True." - return (r':rs, h, u, g, p, a) - - parseSections (rs, h, u, g, p, a) - (ParseUtils.F lno "remote-repo" raw) = do - let mr' = readRepo raw - r' <- maybe (ParseFailed $ NoParse "remote-repo" lno) return mr' - return (r':rs, h, u, g, p, a) - - parseSections accum@(rs, h, u, g, p, a) - (ParseUtils.Section _ "haddock" name fs) - | name == "" = do h' <- parseFields haddockFlagsFields h fs - return (rs, h', u, g, p, a) - | otherwise = do - warning "The 'haddock' section should be unnamed" - return accum - parseSections accum@(rs, h, u, g, p, a) - (ParseUtils.Section _ "install-dirs" name fs) - | name' == "user" = do u' <- parseFields installDirsFields u fs - return (rs, h, u', g, p, a) - | name' == "global" = do g' <- parseFields installDirsFields g fs - return (rs, h, u, g', p, a) - | otherwise = do - warning "The 'install-paths' section should be for 'user' or 'global'" - return accum - where name' = lowercase name - parseSections accum@(rs, h, u, g, p, a) - (ParseUtils.Section _ "program-locations" name fs) - | name == "" = do p' <- parseFields withProgramsFields p fs - return (rs, h, u, g, p', a) - | otherwise = do - warning "The 'program-locations' section should be unnamed" - return accum - parseSections accum@(rs, h, u, g, p, a) - (ParseUtils.Section _ "program-default-options" name fs) - | name == "" = do a' <- parseFields withProgramOptionsFields a fs - return (rs, h, u, g, p, a') - | otherwise = do - warning "The 'program-default-options' section should be unnamed" - return accum - parseSections accum f = do - warning $ "Unrecognized stanza on line " ++ show (lineNo f) - return accum - -showConfig :: SavedConfig -> String -showConfig = showConfigWithComments mempty - -showConfigWithComments :: SavedConfig -> SavedConfig -> String -showConfigWithComments comment vals = Disp.render $ - case fmap ppRemoteRepoSection . fromNubList . globalRemoteRepos - . savedGlobalFlags $ vals of - [] -> Disp.text "" - (x:xs) -> foldl' (\ r r' -> r $+$ Disp.text "" $+$ r') x xs - $+$ Disp.text "" - $+$ ppFields (skipSomeFields (configFieldDescriptions ConstraintSourceUnknown)) - mcomment vals - $+$ Disp.text "" - $+$ ppSection "haddock" "" haddockFlagsFields - (fmap savedHaddockFlags mcomment) (savedHaddockFlags vals) - $+$ Disp.text "" - $+$ installDirsSection "user" savedUserInstallDirs - $+$ Disp.text "" - $+$ installDirsSection "global" savedGlobalInstallDirs - $+$ Disp.text "" - $+$ configFlagsSection "program-locations" withProgramsFields - configProgramPaths - $+$ Disp.text "" - $+$ configFlagsSection "program-default-options" withProgramOptionsFields - configProgramArgs - where - mcomment = Just comment - installDirsSection name field = - ppSection "install-dirs" name installDirsFields - (fmap field mcomment) (field vals) - configFlagsSection name fields field = - ppSection name "" fields - (fmap (field . savedConfigureFlags) mcomment) - ((field . savedConfigureFlags) vals) - - -- skip fields based on field name. currently only skips "remote-repo", - -- because that is rendered as a section. (see 'ppRemoteRepoSection'.) - skipSomeFields = filter ((/= "remote-repo") . fieldName) - --- | Fields for the 'install-dirs' sections. -installDirsFields :: [FieldDescr (InstallDirs (Flag PathTemplate))] -installDirsFields = map viewAsFieldDescr installDirsOptions - -ppRemoteRepoSection :: RemoteRepo -> Doc -ppRemoteRepoSection vals = ppSection "repository" (remoteRepoName vals) - remoteRepoFields def vals - where - def = Just (emptyRemoteRepo "ignored") { remoteRepoSecure = Just False } - -remoteRepoFields :: [FieldDescr RemoteRepo] -remoteRepoFields = - [ simpleField "url" - (text . show) (parseTokenQ >>= parseURI') - remoteRepoURI (\x repo -> repo { remoteRepoURI = x }) - , simpleField "secure" - showSecure (Just `fmap` Text.parse) - remoteRepoSecure (\x repo -> repo { remoteRepoSecure = x }) - , listField "root-keys" - text parseTokenQ - remoteRepoRootKeys (\x repo -> repo { remoteRepoRootKeys = x }) - , simpleField "key-threshold" - showThreshold Text.parse - remoteRepoKeyThreshold (\x repo -> repo { remoteRepoKeyThreshold = x }) - ] - where - parseURI' uriString = - case parseURI uriString of - Nothing -> fail $ "remote-repo: no parse on " ++ show uriString - Just uri -> return uri - - showSecure Nothing = mempty -- default 'secure' setting - showSecure (Just True) = text "True" -- user explicitly enabled it - showSecure (Just False) = text "False" -- user explicitly disabled it - - -- If the key-threshold is set to 0, we omit it as this is the default - -- and it looks odd to have a value for key-threshold but not for 'secure' - -- (note that an empty list of keys is already omitted by default, since - -- that is what we do for all list fields) - showThreshold 0 = mempty - showThreshold t = text (show t) - --- | Fields for the 'haddock' section. -haddockFlagsFields :: [FieldDescr HaddockFlags] -haddockFlagsFields = [ field - | opt <- haddockOptions ParseArgs - , let field = viewAsFieldDescr opt - name = fieldName field - , name `notElem` exclusions ] - where - exclusions = ["verbose", "builddir", "for-hackage"] - --- | Fields for the 'program-locations' section. -withProgramsFields :: [FieldDescr [(String, FilePath)]] -withProgramsFields = - map viewAsFieldDescr $ - programConfigurationPaths' (++ "-location") defaultProgramConfiguration - ParseArgs id (++) - --- | Fields for the 'program-default-options' section. -withProgramOptionsFields :: [FieldDescr [(String, [String])]] -withProgramOptionsFields = - map viewAsFieldDescr $ - programConfigurationOptions defaultProgramConfiguration ParseArgs id (++) - --- | Get the differences (as a pseudo code diff) between the user's --- '~/.cabal/config' and the one that cabal would generate if it didn't exist. -userConfigDiff :: GlobalFlags -> IO [String] -userConfigDiff globalFlags = do - userConfig <- loadRawConfig normal (globalConfigFile globalFlags) - testConfig <- initialSavedConfig - return $ reverse . foldl' createDiff [] . M.toList - $ M.unionWith combine - (M.fromList . map justFst $ filterShow testConfig) - (M.fromList . map justSnd $ filterShow userConfig) - where - justFst (a, b) = (a, (Just b, Nothing)) - justSnd (a, b) = (a, (Nothing, Just b)) - - combine (Nothing, Just b) (Just a, Nothing) = (Just a, Just b) - combine (Just a, Nothing) (Nothing, Just b) = (Just a, Just b) - combine x y = error $ "Can't happen : userConfigDiff " - ++ show x ++ " " ++ show y - - createDiff :: [String] -> (String, (Maybe String, Maybe String)) -> [String] - createDiff acc (key, (Just a, Just b)) - | a == b = acc - | otherwise = ("+ " ++ key ++ ": " ++ b) - : ("- " ++ key ++ ": " ++ a) : acc - createDiff acc (key, (Nothing, Just b)) = ("+ " ++ key ++ ": " ++ b) : acc - createDiff acc (key, (Just a, Nothing)) = ("- " ++ key ++ ": " ++ a) : acc - createDiff acc (_, (Nothing, Nothing)) = acc - - filterShow :: SavedConfig -> [(String, String)] - filterShow cfg = map keyValueSplit - . filter (\s -> not (null s) && any (== ':') s) - . map nonComment - . lines - $ showConfig cfg - - nonComment [] = [] - nonComment ('-':'-':_) = [] - nonComment (x:xs) = x : nonComment xs - - topAndTail = reverse . dropWhile isSpace . reverse . dropWhile isSpace - - keyValueSplit s = - let (left, right) = break (== ':') s - in (topAndTail left, topAndTail (drop 1 right)) - - --- | Update the user's ~/.cabal/config' keeping the user's customizations. -userConfigUpdate :: Verbosity -> GlobalFlags -> IO () -userConfigUpdate verbosity globalFlags = do - userConfig <- loadRawConfig normal (globalConfigFile globalFlags) - newConfig <- initialSavedConfig - commentConf <- commentSavedConfig - cabalFile <- getConfigFilePath $ globalConfigFile globalFlags - let backup = cabalFile ++ ".backup" - notice verbosity $ "Renaming " ++ cabalFile ++ " to " ++ backup ++ "." - renameFile cabalFile backup - notice verbosity $ "Writing merged config to " ++ cabalFile ++ "." - writeConfigFile cabalFile commentConf (newConfig `mappend` userConfig) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Configure.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Configure.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Configure.hs 2016-11-07 10:02:40.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Configure.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,392 +0,0 @@ -{-# LANGUAGE CPP #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Configure --- Copyright : (c) David Himmelstrup 2005, --- Duncan Coutts 2005 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- High level interface to configuring a package. ------------------------------------------------------------------------------ -module Distribution.Client.Configure ( - configure, - configureSetupScript, - chooseCabalVersion, - checkConfigExFlags - ) where - -import Distribution.Client.Dependency -import Distribution.Client.Dependency.Types - ( ConstraintSource(..) - , LabeledPackageConstraint(..), showConstraintSource ) -import qualified Distribution.Client.InstallPlan as InstallPlan -import Distribution.Client.InstallPlan (InstallPlan) -import Distribution.Client.IndexUtils as IndexUtils - ( getSourcePackages, getInstalledPackages ) -import Distribution.Client.PackageIndex ( PackageIndex, elemByPackageName ) -import Distribution.Client.PkgConfigDb (PkgConfigDb, readPkgConfigDb) -import Distribution.Client.Setup - ( ConfigExFlags(..), configureCommand, filterConfigureFlags - , RepoContext(..) ) -import Distribution.Client.Types as Source -import Distribution.Client.SetupWrapper - ( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions ) -import Distribution.Client.Targets - ( userToPackageConstraint, userConstraintPackageName ) -import qualified Distribution.Client.ComponentDeps as CD -import Distribution.Package (PackageId) -import Distribution.Client.JobControl (Lock) - -import Distribution.Simple.Compiler - ( Compiler, CompilerInfo, compilerInfo, PackageDB(..), PackageDBStack ) -import Distribution.Simple.Program (ProgramConfiguration ) -import Distribution.Simple.Setup - ( ConfigFlags(..), AllowNewer(..) - , fromFlag, toFlag, flagToMaybe, fromFlagOrDefault ) -import Distribution.Simple.PackageIndex - ( InstalledPackageIndex, lookupPackageName ) -import Distribution.Simple.Utils - ( defaultPackageDesc ) -import qualified Distribution.InstalledPackageInfo as Installed -import Distribution.Package - ( Package(..), UnitId, packageName - , Dependency(..), thisPackageVersion - ) -import qualified Distribution.PackageDescription as PkgDesc -import Distribution.PackageDescription.Parse - ( readPackageDescription ) -import Distribution.PackageDescription.Configuration - ( finalizePackageDescription ) -import Distribution.Version - ( anyVersion, thisVersion ) -import Distribution.Simple.Utils as Utils - ( warn, notice, debug, die ) -import Distribution.Simple.Setup - ( isAllowNewer ) -import Distribution.System - ( Platform ) -import Distribution.Text ( display ) -import Distribution.Verbosity as Verbosity - ( Verbosity ) -import Distribution.Version - ( Version(..), VersionRange, orLaterVersion ) - -import Control.Monad (unless) -#if !MIN_VERSION_base(4,8,0) -import Data.Monoid (Monoid(..)) -#endif -import Data.Maybe (isJust, fromMaybe) - --- | Choose the Cabal version such that the setup scripts compiled against this --- version will support the given command-line flags. -chooseCabalVersion :: ConfigFlags -> Maybe Version -> VersionRange -chooseCabalVersion configFlags maybeVersion = - maybe defaultVersionRange thisVersion maybeVersion - where - -- Cabal < 1.19.2 doesn't support '--exact-configuration' which is needed - -- for '--allow-newer' to work. - allowNewer = isAllowNewer - (fromMaybe AllowNewerNone $ configAllowNewer configFlags) - - defaultVersionRange = if allowNewer - then orLaterVersion (Version [1,19,2] []) - else anyVersion - --- | Configure the package found in the local directory -configure :: Verbosity - -> PackageDBStack - -> RepoContext - -> Compiler - -> Platform - -> ProgramConfiguration - -> ConfigFlags - -> ConfigExFlags - -> [String] - -> IO () -configure verbosity packageDBs repoCtxt comp platform conf - configFlags configExFlags extraArgs = do - - installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf - sourcePkgDb <- getSourcePackages verbosity repoCtxt - pkgConfigDb <- readPkgConfigDb verbosity conf - - checkConfigExFlags verbosity installedPkgIndex - (packageIndex sourcePkgDb) configExFlags - - progress <- planLocalPackage verbosity comp platform configFlags configExFlags - installedPkgIndex sourcePkgDb pkgConfigDb - - notice verbosity "Resolving dependencies..." - maybePlan <- foldProgress logMsg (return . Left) (return . Right) - progress - case maybePlan of - Left message -> do - warn verbosity $ - "solver failed to find a solution:\n" - ++ message - ++ "Trying configure anyway." - setupWrapper verbosity (setupScriptOptions installedPkgIndex Nothing) - Nothing configureCommand (const configFlags) extraArgs - - Right installPlan -> case InstallPlan.ready installPlan of - [pkg@(ReadyPackage - (ConfiguredPackage (SourcePackage _ _ (LocalUnpackedPackage _) _) - _ _ _) - _)] -> do - configurePackage verbosity - platform (compilerInfo comp) - (setupScriptOptions installedPkgIndex (Just pkg)) - configFlags pkg extraArgs - - _ -> die $ "internal error: configure install plan should have exactly " - ++ "one local ready package." - - where - setupScriptOptions :: InstalledPackageIndex - -> Maybe ReadyPackage - -> SetupScriptOptions - setupScriptOptions = - configureSetupScript - packageDBs - comp - platform - conf - (fromFlagOrDefault - (useDistPref defaultSetupScriptOptions) - (configDistPref configFlags)) - (chooseCabalVersion - configFlags - (flagToMaybe (configCabalVersion configExFlags))) - Nothing - False - - logMsg message rest = debug verbosity message >> rest - -configureSetupScript :: PackageDBStack - -> Compiler - -> Platform - -> ProgramConfiguration - -> FilePath - -> VersionRange - -> Maybe Lock - -> Bool - -> InstalledPackageIndex - -> Maybe ReadyPackage - -> SetupScriptOptions -configureSetupScript packageDBs - comp - platform - conf - distPref - cabalVersion - lock - forceExternal - index - mpkg - = SetupScriptOptions { - useCabalVersion = cabalVersion - , useCabalSpecVersion = Nothing - , useCompiler = Just comp - , usePlatform = Just platform - , usePackageDB = packageDBs' - , usePackageIndex = index' - , useProgramConfig = conf - , useDistPref = distPref - , useLoggingHandle = Nothing - , useWorkingDir = Nothing - , setupCacheLock = lock - , useWin32CleanHack = False - , forceExternalSetupMethod = forceExternal - -- If we have explicit setup dependencies, list them; otherwise, we give - -- the empty list of dependencies; ideally, we would fix the version of - -- Cabal here, so that we no longer need the special case for that in - -- `compileSetupExecutable` in `externalSetupMethod`, but we don't yet - -- know the version of Cabal at this point, but only find this there. - -- Therefore, for now, we just leave this blank. - , useDependencies = fromMaybe [] explicitSetupDeps - , useDependenciesExclusive = not defaultSetupDeps && isJust explicitSetupDeps - , useVersionMacros = not defaultSetupDeps && isJust explicitSetupDeps - } - where - -- When we are compiling a legacy setup script without an explicit - -- setup stanza, we typically want to allow the UserPackageDB for - -- finding the Cabal lib when compiling any Setup.hs even if we're doing - -- a global install. However we also allow looking in a specific package - -- db. - packageDBs' :: PackageDBStack - index' :: Maybe InstalledPackageIndex - (packageDBs', index') = - case packageDBs of - (GlobalPackageDB:dbs) | UserPackageDB `notElem` dbs - , Nothing <- explicitSetupDeps - -> (GlobalPackageDB:UserPackageDB:dbs, Nothing) - -- but if the user is using an odd db stack, don't touch it - _otherwise -> (packageDBs, Just index) - - maybeSetupBuildInfo :: Maybe PkgDesc.SetupBuildInfo - maybeSetupBuildInfo = do - ReadyPackage (ConfiguredPackage (SourcePackage _ gpkg _ _) _ _ _) _ - <- mpkg - PkgDesc.setupBuildInfo (PkgDesc.packageDescription gpkg) - - -- Was a default 'custom-setup' stanza added by 'cabal-install' itself? If - -- so, 'setup-depends' must not be exclusive. See #3199. - defaultSetupDeps :: Bool - defaultSetupDeps = maybe False PkgDesc.defaultSetupDepends - maybeSetupBuildInfo - - explicitSetupDeps :: Maybe [(UnitId, PackageId)] - explicitSetupDeps = do - -- Check if there is an explicit setup stanza. - _buildInfo <- maybeSetupBuildInfo - -- Return the setup dependencies computed by the solver - ReadyPackage _ deps <- mpkg - return [ ( Installed.installedUnitId deppkg - , Installed.sourcePackageId deppkg - ) - | deppkg <- CD.setupDeps deps - ] - --- | Warn if any constraints or preferences name packages that are not in the --- source package index or installed package index. -checkConfigExFlags :: Package pkg - => Verbosity - -> InstalledPackageIndex - -> PackageIndex pkg - -> ConfigExFlags - -> IO () -checkConfigExFlags verbosity installedPkgIndex sourcePkgIndex flags = do - unless (null unknownConstraints) $ warn verbosity $ - "Constraint refers to an unknown package: " - ++ showConstraint (head unknownConstraints) - unless (null unknownPreferences) $ warn verbosity $ - "Preference refers to an unknown package: " - ++ display (head unknownPreferences) - where - unknownConstraints = filter (unknown . userConstraintPackageName . fst) $ - configExConstraints flags - unknownPreferences = filter (unknown . \(Dependency name _) -> name) $ - configPreferences flags - unknown pkg = null (lookupPackageName installedPkgIndex pkg) - && not (elemByPackageName sourcePkgIndex pkg) - showConstraint (uc, src) = - display uc ++ " (" ++ showConstraintSource src ++ ")" - --- | Make an 'InstallPlan' for the unpacked package in the current directory, --- and all its dependencies. --- -planLocalPackage :: Verbosity -> Compiler - -> Platform - -> ConfigFlags -> ConfigExFlags - -> InstalledPackageIndex - -> SourcePackageDb - -> PkgConfigDb - -> IO (Progress String String InstallPlan) -planLocalPackage verbosity comp platform configFlags configExFlags - installedPkgIndex (SourcePackageDb _ packagePrefs) pkgConfigDb = do - pkg <- readPackageDescription verbosity =<< defaultPackageDesc verbosity - solver <- chooseSolver verbosity (fromFlag $ configSolver configExFlags) - (compilerInfo comp) - - let -- We create a local package and ask to resolve a dependency on it - localPkg = SourcePackage { - packageInfoId = packageId pkg, - Source.packageDescription = pkg, - packageSource = LocalUnpackedPackage ".", - packageDescrOverride = Nothing - } - - testsEnabled = fromFlagOrDefault False $ configTests configFlags - benchmarksEnabled = - fromFlagOrDefault False $ configBenchmarks configFlags - - resolverParams = - removeUpperBounds - (fromMaybe AllowNewerNone $ configAllowNewer configFlags) - - . addPreferences - -- preferences from the config file or command line - [ PackageVersionPreference name ver - | Dependency name ver <- configPreferences configExFlags ] - - . addConstraints - -- version constraints from the config file or command line - -- TODO: should warn or error on constraints that are not on direct - -- deps or flag constraints not on the package in question. - [ LabeledPackageConstraint (userToPackageConstraint uc) src - | (uc, src) <- configExConstraints configExFlags ] - - . addConstraints - -- package flags from the config file or command line - [ let pc = PackageConstraintFlags (packageName pkg) - (configConfigurationsFlags configFlags) - in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget - ] - - . addConstraints - -- '--enable-tests' and '--enable-benchmarks' constraints from - -- the config file or command line - [ let pc = PackageConstraintStanzas (packageName pkg) $ - [ TestStanzas | testsEnabled ] ++ - [ BenchStanzas | benchmarksEnabled ] - in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget - ] - - $ standardInstallPolicy - installedPkgIndex - (SourcePackageDb mempty packagePrefs) - [SpecificSourcePackage localPkg] - - return (resolveDependencies platform (compilerInfo comp) pkgConfigDb solver resolverParams) - - --- | Call an installer for an 'SourcePackage' but override the configure --- flags with the ones given by the 'ReadyPackage'. In particular the --- 'ReadyPackage' specifies an exact 'FlagAssignment' and exactly --- versioned package dependencies. So we ignore any previous partial flag --- assignment or dependency constraints and use the new ones. --- --- NB: when updating this function, don't forget to also update --- 'installReadyPackage' in D.C.Install. -configurePackage :: Verbosity - -> Platform -> CompilerInfo - -> SetupScriptOptions - -> ConfigFlags - -> ReadyPackage - -> [String] - -> IO () -configurePackage verbosity platform comp scriptOptions configFlags - (ReadyPackage (ConfiguredPackage (SourcePackage _ gpkg _ _) - flags stanzas _) - deps) - extraArgs = - - setupWrapper verbosity - scriptOptions (Just pkg) configureCommand configureFlags extraArgs - - where - configureFlags = filterConfigureFlags configFlags { - configConfigurationsFlags = flags, - -- We generate the legacy constraints as well as the new style precise - -- deps. In the end only one set gets passed to Setup.hs configure, - -- depending on the Cabal version we are talking to. - configConstraints = [ thisPackageVersion (packageId deppkg) - | deppkg <- CD.nonSetupDeps deps ], - configDependencies = [ (packageName (Installed.sourcePackageId deppkg), - Installed.installedUnitId deppkg) - | deppkg <- CD.nonSetupDeps deps ], - -- Use '--exact-configuration' if supported. - configExactConfiguration = toFlag True, - configVerbosity = toFlag verbosity, - configBenchmarks = toFlag (BenchStanzas `elem` stanzas), - configTests = toFlag (TestStanzas `elem` stanzas) - } - - pkg = case finalizePackageDescription flags - (const True) - platform comp [] (enableStanzas stanzas gpkg) of - Left _ -> error "finalizePackageDescription ReadyPackage failed" - Right (desc, _) -> desc diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Assignment.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Assignment.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Assignment.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Assignment.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,150 +0,0 @@ -module Distribution.Client.Dependency.Modular.Assignment - ( Assignment(..) - , FAssignment - , SAssignment - , PreAssignment(..) - , extend - , toCPs - ) where - -import Control.Applicative -import Control.Monad -import Data.Array as A -import Data.List as L -import Data.Map as M -import Data.Maybe -import Prelude hiding (pi) - -import Language.Haskell.Extension (Extension, Language) - -import Distribution.PackageDescription (FlagAssignment) -- from Cabal -import Distribution.Client.Types (OptionalStanza) -import Distribution.Client.Utils.LabeledGraph -import Distribution.Client.ComponentDeps (ComponentDeps, Component) -import qualified Distribution.Client.ComponentDeps as CD - -import Distribution.Client.Dependency.Modular.Configured -import Distribution.Client.Dependency.Modular.Dependency -import Distribution.Client.Dependency.Modular.Flag -import Distribution.Client.Dependency.Modular.Package -import Distribution.Client.Dependency.Modular.Version - --- | A (partial) package assignment. Qualified package names --- are associated with instances. -type PAssignment = Map QPN I - --- | A (partial) package preassignment. Qualified package names --- are associated with constrained instances. Constrained instances --- record constraints about the instances that can still be chosen, --- and in the extreme case fix a concrete instance. -type PPreAssignment = Map QPN (CI QPN) -type FAssignment = Map QFN Bool -type SAssignment = Map QSN Bool - --- | A (partial) assignment of variables. -data Assignment = A PAssignment FAssignment SAssignment - deriving (Show, Eq) - --- | A preassignment comprises knowledge about variables, but not --- necessarily fixed values. -data PreAssignment = PA PPreAssignment FAssignment SAssignment - --- | Extend a package preassignment. --- --- Takes the variable that causes the new constraints, a current preassignment --- and a set of new dependency constraints. --- --- We're trying to extend the preassignment with each dependency one by one. --- Each dependency is for a particular variable. We check if we already have --- constraints for that variable in the current preassignment. If so, we're --- trying to merge the constraints. --- --- Either returns a witness of the conflict that would arise during the merge, --- or the successfully extended assignment. -extend :: (Extension -> Bool) -- ^ is a given extension supported - -> (Language -> Bool) -- ^ is a given language supported - -> (PN -> VR -> Bool) -- ^ is a given pkg-config requirement satisfiable - -> Var QPN - -> PPreAssignment -> [Dep QPN] -> Either (ConflictSet QPN, [Dep QPN]) PPreAssignment -extend extSupported langSupported pkgPresent var = foldM extendSingle - where - - extendSingle :: PPreAssignment -> Dep QPN - -> Either (ConflictSet QPN, [Dep QPN]) PPreAssignment - extendSingle a (Ext ext ) = - if extSupported ext then Right a - else Left (varToConflictSet var, [Ext ext]) - extendSingle a (Lang lang) = - if langSupported lang then Right a - else Left (varToConflictSet var, [Lang lang]) - extendSingle a (Pkg pn vr) = - if pkgPresent pn vr then Right a - else Left (varToConflictSet var, [Pkg pn vr]) - extendSingle a (Dep qpn ci) = - let ci' = M.findWithDefault (Constrained []) qpn a - in case (\ x -> M.insert qpn x a) <$> merge ci' ci of - Left (c, (d, d')) -> Left (c, L.map (Dep qpn) (simplify (P qpn) d d')) - Right x -> Right x - - -- We're trying to remove trivial elements of the conflict. If we're just - -- making a choice pkg == instance, and pkg => pkg == instance is a part - -- of the conflict, then this info is clear from the context and does not - -- have to be repeated. - simplify v (Fixed _ var') c | v == var && var' == var = [c] - simplify v c (Fixed _ var') | v == var && var' == var = [c] - simplify _ c d = [c, d] - --- | Delivers an ordered list of fully configured packages. --- --- TODO: This function is (sort of) ok. However, there's an open bug --- w.r.t. unqualification. There might be several different instances --- of one package version chosen by the solver, which will lead to --- clashes. -toCPs :: Assignment -> RevDepMap -> [CP QPN] -toCPs (A pa fa sa) rdm = - let - -- get hold of the graph - g :: Graph Component - vm :: Vertex -> ((), QPN, [(Component, QPN)]) - cvm :: QPN -> Maybe Vertex - -- Note that the RevDepMap contains duplicate dependencies. Therefore the nub. - (g, vm, cvm) = graphFromEdges (L.map (\ (x, xs) -> ((), x, nub xs)) - (M.toList rdm)) - tg :: Graph Component - tg = transposeG g - -- Topsort the dependency graph, yielding a list of pkgs in the right order. - -- The graph will still contain all the installed packages, and it might - -- contain duplicates, because several variables might actually resolve to - -- the same package in the presence of qualified package names. - ps :: [PI QPN] - ps = L.map ((\ (_, x, _) -> PI x (pa M.! x)) . vm) $ - topSort g - -- Determine the flags per package, by walking over and regrouping the - -- complete flag assignment by package. - fapp :: Map QPN FlagAssignment - fapp = M.fromListWith (++) $ - L.map (\ ((FN (PI qpn _) fn), b) -> (qpn, [(fn, b)])) $ - M.toList $ - fa - -- Stanzas per package. - sapp :: Map QPN [OptionalStanza] - sapp = M.fromListWith (++) $ - L.map (\ ((SN (PI qpn _) sn), b) -> (qpn, if b then [sn] else [])) $ - M.toList $ - sa - -- Dependencies per package. - depp :: QPN -> [(Component, PI QPN)] - depp qpn = let v :: Vertex - v = fromJust (cvm qpn) - dvs :: [(Component, Vertex)] - dvs = tg A.! v - in L.map (\ (comp, dv) -> case vm dv of (_, x, _) -> (comp, PI x (pa M.! x))) dvs - -- Translated to PackageDeps - depp' :: QPN -> ComponentDeps [PI QPN] - depp' = CD.fromList . L.map (\(comp, d) -> (comp, [d])) . depp - in - L.map (\ pi@(PI qpn _) -> CP pi - (M.findWithDefault [] qpn fapp) - (M.findWithDefault [] qpn sapp) - (depp' qpn)) - ps diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Builder.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Builder.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Builder.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Builder.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,188 +0,0 @@ -{-# LANGUAGE CPP #-} -module Distribution.Client.Dependency.Modular.Builder (buildTree) where - --- Building the search tree. --- --- In this phase, we build a search tree that is too large, i.e, it contains --- invalid solutions. We keep track of the open goals at each point. We --- nondeterministically pick an open goal (via a goal choice node), create --- subtrees according to the index and the available solutions, and extend the --- set of open goals by superficially looking at the dependencies recorded in --- the index. --- --- For each goal, we keep track of all the *reasons* why it is being --- introduced. These are for debugging and error messages, mainly. A little bit --- of care has to be taken due to the way we treat flags. If a package has --- flag-guarded dependencies, we cannot introduce them immediately. Instead, we --- store the entire dependency. - -import Data.List as L -import Data.Map as M -import Prelude hiding (sequence, mapM) - -import Distribution.Client.Dependency.Modular.Dependency -import Distribution.Client.Dependency.Modular.Flag -import Distribution.Client.Dependency.Modular.Index -import Distribution.Client.Dependency.Modular.Package -import Distribution.Client.Dependency.Modular.PSQ (PSQ) -import qualified Distribution.Client.Dependency.Modular.PSQ as P -import Distribution.Client.Dependency.Modular.Tree - -import Distribution.Client.ComponentDeps (Component) - --- | The state needed during the build phase of the search tree. -data BuildState = BS { - index :: Index, -- ^ information about packages and their dependencies - rdeps :: RevDepMap, -- ^ set of all package goals, completed and open, with reverse dependencies - open :: PSQ (OpenGoal ()) (), -- ^ set of still open goals (flag and package goals) - next :: BuildType, -- ^ kind of node to generate next - qualifyOptions :: QualifyOptions -- ^ qualification options -} - --- | Extend the set of open goals with the new goals listed. --- --- We also adjust the map of overall goals, and keep track of the --- reverse dependencies of each of the goals. -extendOpen :: QPN -> [OpenGoal Component] -> BuildState -> BuildState -extendOpen qpn' gs s@(BS { rdeps = gs', open = o' }) = go gs' o' gs - where - go :: RevDepMap -> PSQ (OpenGoal ()) () -> [OpenGoal Component] -> BuildState - go g o [] = s { rdeps = g, open = o } - go g o (ng@(OpenGoal (Flagged _ _ _ _) _gr) : ngs) = go g (cons' ng () o) ngs - -- Note: for 'Flagged' goals, we always insert, so later additions win. - -- This is important, because in general, if a goal is inserted twice, - -- the later addition will have better dependency information. - go g o (ng@(OpenGoal (Stanza _ _ ) _gr) : ngs) = go g (cons' ng () o) ngs - go g o (ng@(OpenGoal (Simple (Dep qpn _) c) _gr) : ngs) - | qpn == qpn' = go g o ngs - -- we ignore self-dependencies at this point; TODO: more care may be needed - | qpn `M.member` g = go (M.adjust ((c, qpn'):) qpn g) o ngs - | otherwise = go (M.insert qpn [(c, qpn')] g) (cons' ng () o) ngs - -- code above is correct; insert/adjust have different arg order - go g o ( (OpenGoal (Simple (Ext _ext ) _) _gr) : ngs) = go g o ngs - go g o ( (OpenGoal (Simple (Lang _lang)_) _gr) : ngs) = go g o ngs - go g o ( (OpenGoal (Simple (Pkg _pn _vr)_) _gr) : ngs)= go g o ngs - - cons' = P.cons . forgetCompOpenGoal - --- | Given the current scope, qualify all the package names in the given set of --- dependencies and then extend the set of open goals accordingly. -scopedExtendOpen :: QPN -> I -> QGoalReason -> FlaggedDeps Component PN -> FlagInfo -> - BuildState -> BuildState -scopedExtendOpen qpn i gr fdeps fdefs s = extendOpen qpn gs s - where - -- Qualify all package names - qfdeps = qualifyDeps (qualifyOptions s) qpn fdeps - -- Introduce all package flags - qfdefs = L.map (\ (fn, b) -> Flagged (FN (PI qpn i) fn) b [] []) $ M.toList fdefs - -- Combine new package and flag goals - gs = L.map (flip OpenGoal gr) (qfdefs ++ qfdeps) - -- NOTE: - -- - -- In the expression @qfdefs ++ qfdeps@ above, flags occur potentially - -- multiple times, both via the flag declaration and via dependencies. - -- The order is potentially important, because the occurrences via - -- dependencies may record flag-dependency information. After a number - -- of bugs involving computing this information incorrectly, however, - -- we're currently not using carefully computed inter-flag dependencies - -- anymore, but instead use 'simplifyVar' when computing conflict sets - -- to map all flags of one package to a single flag for conflict set - -- purposes, thereby treating them all as interdependent. - -- - -- If we ever move to a more clever algorithm again, then the line above - -- needs to be looked at very carefully, and probably be replaced by - -- more systematically computed flag dependency information. - --- | Datatype that encodes what to build next -data BuildType = - Goals -- ^ build a goal choice node - | OneGoal (OpenGoal ()) -- ^ build a node for this goal - | Instance QPN I PInfo QGoalReason -- ^ build a tree for a concrete instance - deriving Show - -build :: BuildState -> Tree QGoalReason -build = ana go - where - go :: BuildState -> TreeF QGoalReason BuildState - - -- If we have a choice between many goals, we just record the choice in - -- the tree. We select each open goal in turn, and before we descend, remove - -- it from the queue of open goals. - go bs@(BS { rdeps = rds, open = gs, next = Goals }) - | P.null gs = DoneF rds - | otherwise = GoalChoiceF (P.mapWithKey (\ g (_sc, gs') -> bs { next = OneGoal g, open = gs' }) - (P.splits gs)) - - -- If we have already picked a goal, then the choice depends on the kind - -- of goal. - -- - -- For a package, we look up the instances available in the global info, - -- and then handle each instance in turn. - go (BS { index = _ , next = OneGoal (OpenGoal (Simple (Ext _ ) _) _ ) }) = - error "Distribution.Client.Dependency.Modular.Builder: build.go called with Ext goal" - go (BS { index = _ , next = OneGoal (OpenGoal (Simple (Lang _ ) _) _ ) }) = - error "Distribution.Client.Dependency.Modular.Builder: build.go called with Lang goal" - go (BS { index = _ , next = OneGoal (OpenGoal (Simple (Pkg _ _ ) _) _ ) }) = - error "Distribution.Client.Dependency.Modular.Builder: build.go called with Pkg goal" - go bs@(BS { index = idx, next = OneGoal (OpenGoal (Simple (Dep qpn@(Q _ pn) _) _) gr) }) = - -- If the package does not exist in the index, we construct an emty PChoiceF node for it - -- After all, we have no choices here. Alternatively, we could immediately construct - -- a Fail node here, but that would complicate the construction of conflict sets. - -- We will probably want to give this case special treatment when generating error - -- messages though. - case M.lookup pn idx of - Nothing -> PChoiceF qpn gr (P.fromList []) - Just pis -> PChoiceF qpn gr (P.fromList (L.map (\ (i, info) -> - (POption i Nothing, bs { next = Instance qpn i info gr })) - (M.toList pis))) - -- TODO: data structure conversion is rather ugly here - - -- For a flag, we create only two subtrees, and we create them in the order - -- that is indicated by the flag default. - -- - -- TODO: Should we include the flag default in the tree? - go bs@(BS { next = OneGoal (OpenGoal (Flagged qfn@(FN (PI qpn _) _) (FInfo b m w) t f) gr) }) = - FChoiceF qfn gr (w || trivial) m (P.fromList (reorder b - [(True, (extendOpen qpn (L.map (flip OpenGoal (FDependency qfn True )) t) bs) { next = Goals }), - (False, (extendOpen qpn (L.map (flip OpenGoal (FDependency qfn False)) f) bs) { next = Goals })])) - where - reorder True = id - reorder False = reverse - trivial = L.null t && L.null f - - -- For a stanza, we also create only two subtrees. The order is initially - -- False, True. This can be changed later by constraints (force enabling - -- the stanza by replacing the False branch with failure) or preferences - -- (try enabling the stanza if possible by moving the True branch first). - - go bs@(BS { next = OneGoal (OpenGoal (Stanza qsn@(SN (PI qpn _) _) t) gr) }) = - SChoiceF qsn gr trivial (P.fromList - [(False, bs { next = Goals }), - (True, (extendOpen qpn (L.map (flip OpenGoal (SDependency qsn)) t) bs) { next = Goals })]) - where - trivial = L.null t - - -- For a particular instance, we change the state: we update the scope, - -- and furthermore we update the set of goals. - -- - -- TODO: We could inline this above. - go bs@(BS { next = Instance qpn i (PInfo fdeps fdefs _) _gr }) = - go ((scopedExtendOpen qpn i (PDependency (PI qpn i)) fdeps fdefs bs) - { next = Goals }) - --- | Interface to the tree builder. Just takes an index and a list of package names, --- and computes the initial state and then the tree from there. -buildTree :: Index -> Bool -> [PN] -> Tree QGoalReason -buildTree idx ind igs = - build BS { - index = idx - , rdeps = M.fromList (L.map (\ qpn -> (qpn, [])) qpns) - , open = P.fromList (L.map (\ qpn -> (topLevelGoal qpn, ())) qpns) - , next = Goals - , qualifyOptions = defaultQualifyOptions idx - } - where - topLevelGoal qpn = OpenGoal (Simple (Dep qpn (Constrained [])) ()) UserGoal - - qpns | ind = makeIndependent igs - | otherwise = L.map (Q (PP DefaultNamespace Unqualified)) igs diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/ConfiguredConversion.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/ConfiguredConversion.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/ConfiguredConversion.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/ConfiguredConversion.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,54 +0,0 @@ -module Distribution.Client.Dependency.Modular.ConfiguredConversion - ( convCP - ) where - -import Data.Maybe -import Prelude hiding (pi) - -import Distribution.Package (UnitId) - -import Distribution.Client.Types -import Distribution.Client.Dependency.Types (ResolverPackage(..)) -import qualified Distribution.Client.PackageIndex as CI -import qualified Distribution.Simple.PackageIndex as SI - -import Distribution.Client.Dependency.Modular.Configured -import Distribution.Client.Dependency.Modular.Package - -import Distribution.Client.ComponentDeps (ComponentDeps) - --- | Converts from the solver specific result @CP QPN@ into --- a 'ResolverPackage', which can then be converted into --- the install plan. -convCP :: SI.InstalledPackageIndex -> - CI.PackageIndex SourcePackage -> - CP QPN -> ResolverPackage -convCP iidx sidx (CP qpi fa es ds) = - case convPI qpi of - Left pi -> PreExisting - (fromJust $ SI.lookupUnitId iidx pi) - Right pi -> Configured $ ConfiguredPackage - srcpkg - fa - es - ds' - where - Just srcpkg = CI.lookupPackageId sidx pi - where - ds' :: ComponentDeps [ConfiguredId] - ds' = fmap (map convConfId) ds - -convPI :: PI QPN -> Either UnitId PackageId -convPI (PI _ (I _ (Inst pi))) = Left pi -convPI qpi = Right $ confSrcId $ convConfId qpi - -convConfId :: PI QPN -> ConfiguredId -convConfId (PI (Q _ pn) (I v loc)) = ConfiguredId { - confSrcId = sourceId - , confInstId = installedId - } - where - sourceId = PackageIdentifier pn v - installedId = case loc of - Inst pi -> pi - _otherwise -> fakeUnitId sourceId diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Configured.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Configured.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Configured.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Configured.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,13 +0,0 @@ -module Distribution.Client.Dependency.Modular.Configured - ( CP(..) - ) where - -import Distribution.PackageDescription (FlagAssignment) -- from Cabal -import Distribution.Client.Types (OptionalStanza) -import Distribution.Client.ComponentDeps (ComponentDeps) - -import Distribution.Client.Dependency.Modular.Package - --- | A configured package is a package instance together with --- a flag assignment and complete dependencies. -data CP qpn = CP (PI qpn) FlagAssignment [OptionalStanza] (ComponentDeps [PI qpn]) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/ConflictSet.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/ConflictSet.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/ConflictSet.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/ConflictSet.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,74 +0,0 @@ -{-# LANGUAGE CPP #-} --- | Conflict sets --- --- Intended for double import --- --- > import Distribution.Client.Dependency.Modular.ConflictSet (ConflictSet) --- > import qualified Distribution.Client.Dependency.Modular.ConflictSet as CS -module Distribution.Client.Dependency.Modular.ConflictSet ( - ConflictSet -- opaque - , showCS - -- Set-like operations - , toList - , union - , unions - , insert - , empty - , singleton - , member - , filter - , fromList - ) where - -import Prelude hiding (filter) -import Data.List (intercalate) -import Data.Set (Set) -import qualified Data.Set as S - -import Distribution.Client.Dependency.Modular.Package -import Distribution.Client.Dependency.Modular.Var - --- | The set of variables involved in a solver conflict --- --- Since these variables should be preprocessed in some way, this type is --- kept abstract. -newtype ConflictSet qpn = CS { fromConflictSet :: Set (Var qpn) } - deriving (Eq, Ord, Show) - -showCS :: ConflictSet QPN -> String -showCS = intercalate ", " . map showVar . toList - -{------------------------------------------------------------------------------- - Set-like operations --------------------------------------------------------------------------------} - -toList :: ConflictSet qpn -> [Var qpn] -toList = S.toList . fromConflictSet - -union :: Ord qpn => ConflictSet qpn -> ConflictSet qpn -> ConflictSet qpn -union (CS a) (CS b) = CS (a `S.union` b) - -unions :: Ord qpn => [ConflictSet qpn] -> ConflictSet qpn -unions = CS . S.unions . map fromConflictSet - -insert :: Ord qpn => Var qpn -> ConflictSet qpn -> ConflictSet qpn -insert var (CS set) = CS (S.insert (simplifyVar var) set) - -empty :: ConflictSet qpn -empty = CS S.empty - -singleton :: Var qpn -> ConflictSet qpn -singleton = CS . S.singleton . simplifyVar - -member :: Ord qpn => Var qpn -> ConflictSet qpn -> Bool -member var (CS set) = S.member (simplifyVar var) set - -#if MIN_VERSION_containers(0,5,0) -filter :: (Var qpn -> Bool) -> ConflictSet qpn -> ConflictSet qpn -#else -filter :: Ord qpn => (Var qpn -> Bool) -> ConflictSet qpn -> ConflictSet qpn -#endif -filter p (CS set) = CS $ S.filter p set - -fromList :: Ord qpn => [Var qpn] -> ConflictSet qpn -fromList = CS . S.fromList . map simplifyVar diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Cycles.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Cycles.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Cycles.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Cycles.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,52 +0,0 @@ -{-# LANGUAGE CPP #-} -module Distribution.Client.Dependency.Modular.Cycles ( - detectCyclesPhase - ) where - -import Prelude hiding (cycle) -import Data.Graph (SCC) -import qualified Data.Graph as Gr -import qualified Data.Map as Map - -import Distribution.Client.Dependency.Modular.Dependency -import Distribution.Client.Dependency.Modular.Package -import Distribution.Client.Dependency.Modular.Tree -import qualified Distribution.Client.Dependency.Modular.ConflictSet as CS - --- | Find and reject any solutions that are cyclic -detectCyclesPhase :: Tree QGoalReason -> Tree QGoalReason -detectCyclesPhase = cata go - where - -- The only node of interest is DoneF - go :: TreeF QGoalReason (Tree QGoalReason) -> Tree QGoalReason - go (PChoiceF qpn gr cs) = PChoice qpn gr cs - go (FChoiceF qfn gr w m cs) = FChoice qfn gr w m cs - go (SChoiceF qsn gr w cs) = SChoice qsn gr w cs - go (GoalChoiceF cs) = GoalChoice cs - go (FailF cs reason) = Fail cs reason - - -- We check for cycles only if we have actually found a solution - -- This minimizes the number of cycle checks we do as cycles are rare - go (DoneF revDeps) = do - case findCycles revDeps of - Nothing -> Done revDeps - Just relSet -> Fail relSet CyclicDependencies - --- | Given the reverse dependency map from a 'Done' node in the tree, as well --- as the full conflict set containing all decisions that led to that 'Done' --- node, check if the solution is cyclic. If it is, return the conflict set --- containing all decisions that could potentially break the cycle. -findCycles :: RevDepMap -> Maybe (ConflictSet QPN) -findCycles revDeps = - case cycles of - [] -> Nothing - c:_ -> Just $ CS.unions $ map (varToConflictSet . P) c - where - cycles :: [[QPN]] - cycles = [vs | Gr.CyclicSCC vs <- scc] - - scc :: [SCC QPN] - scc = Gr.stronglyConnComp . map aux . Map.toList $ revDeps - - aux :: (QPN, [(comp, QPN)]) -> (QPN, QPN, [QPN]) - aux (fr, to) = (fr, fr, map snd to) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Dependency.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Dependency.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Dependency.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Dependency.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,400 +0,0 @@ -{-# LANGUAGE DeriveFunctor #-} -{-# LANGUAGE RecordWildCards #-} -module Distribution.Client.Dependency.Modular.Dependency ( - -- * Variables - Var(..) - , simplifyVar - , varPI - -- * Conflict sets - , ConflictSet - , CS.showCS - -- * Constrained instances - , CI(..) - , merge - -- * Flagged dependencies - , FlaggedDeps - , FlaggedDep(..) - , Dep(..) - , showDep - , flattenFlaggedDeps - , QualifyOptions(..) - , qualifyDeps - , unqualifyDeps - -- ** Setting/forgetting components - , forgetCompOpenGoal - , setCompFlaggedDeps - -- * Reverse dependency map - , RevDepMap - -- * Goals - , Goal(..) - , GoalReason(..) - , QGoalReason - , ResetVar(..) - , goalVarToConflictSet - , varToConflictSet - , goalReasonToVars - -- * Open goals - , OpenGoal(..) - , close - ) where - -import Prelude hiding (pi) - -import Data.Map (Map) -import qualified Data.List as L - -import Language.Haskell.Extension (Extension(..), Language(..)) - -import Distribution.Text - -import Distribution.Client.Dependency.Modular.ConflictSet (ConflictSet) -import Distribution.Client.Dependency.Modular.Flag -import Distribution.Client.Dependency.Modular.Package -import Distribution.Client.Dependency.Modular.Var -import Distribution.Client.Dependency.Modular.Version -import qualified Distribution.Client.Dependency.Modular.ConflictSet as CS - -import Distribution.Client.ComponentDeps (Component(..)) - -{------------------------------------------------------------------------------- - Constrained instances --------------------------------------------------------------------------------} - --- | Constrained instance. If the choice has already been made, this is --- a fixed instance, and we record the package name for which the choice --- is for convenience. Otherwise, it is a list of version ranges paired with --- the goals / variables that introduced them. -data CI qpn = Fixed I (Var qpn) | Constrained [VROrigin qpn] - deriving (Eq, Show, Functor) - -showCI :: CI QPN -> String -showCI (Fixed i _) = "==" ++ showI i -showCI (Constrained vr) = showVR (collapse vr) - --- | Merge constrained instances. We currently adopt a lazy strategy for --- merging, i.e., we only perform actual checking if one of the two choices --- is fixed. If the merge fails, we return a conflict set indicating the --- variables responsible for the failure, as well as the two conflicting --- fragments. --- --- Note that while there may be more than one conflicting pair of version --- ranges, we only return the first we find. --- --- TODO: Different pairs might have different conflict sets. We're --- obviously interested to return a conflict that has a "better" conflict --- set in the sense the it contains variables that allow us to backjump --- further. We might apply some heuristics here, such as to change the --- order in which we check the constraints. -merge :: Ord qpn => CI qpn -> CI qpn -> Either (ConflictSet qpn, (CI qpn, CI qpn)) (CI qpn) -merge c@(Fixed i g1) d@(Fixed j g2) - | i == j = Right c - | otherwise = Left (CS.union (varToConflictSet g1) (varToConflictSet g2), (c, d)) -merge c@(Fixed (I v _) g1) (Constrained rs) = go rs -- I tried "reverse rs" here, but it seems to slow things down ... - where - go [] = Right c - go (d@(vr, g2) : vrs) - | checkVR vr v = go vrs - | otherwise = Left (CS.union (varToConflictSet g1) (varToConflictSet g2), (c, Constrained [d])) -merge c@(Constrained _) d@(Fixed _ _) = merge d c -merge (Constrained rs) (Constrained ss) = Right (Constrained (rs ++ ss)) - -{------------------------------------------------------------------------------- - Flagged dependencies --------------------------------------------------------------------------------} - --- | Flagged dependencies --- --- 'FlaggedDeps' is the modular solver's view of a packages dependencies: --- rather than having the dependencies indexed by component, each dependency --- defines what component it is in. --- --- However, top-level goals are also modelled as dependencies, but of course --- these don't actually belong in any component of any package. Therefore, we --- parameterize 'FlaggedDeps' and derived datatypes with a type argument that --- specifies whether or not we have a component: we only ever instantiate this --- type argument with @()@ for top-level goals, or 'Component' for everything --- else (we could express this as a kind at the type-level, but that would --- require a very recent GHC). --- --- Note however, crucially, that independent of the type parameters, the list --- of dependencies underneath a flag choice or stanza choices _always_ uses --- Component as the type argument. This is important: when we pick a value for --- a flag, we _must_ know what component the new dependencies belong to, or --- else we don't be able to construct fine-grained reverse dependencies. -type FlaggedDeps comp qpn = [FlaggedDep comp qpn] - --- | Flagged dependencies can either be plain dependency constraints, --- or flag-dependent dependency trees. -data FlaggedDep comp qpn = - Flagged (FN qpn) FInfo (TrueFlaggedDeps qpn) (FalseFlaggedDeps qpn) - | Stanza (SN qpn) (TrueFlaggedDeps qpn) - | Simple (Dep qpn) comp - deriving (Eq, Show) - --- | Conversatively flatten out flagged dependencies --- --- NOTE: We do not filter out duplicates. -flattenFlaggedDeps :: FlaggedDeps Component qpn -> [(Dep qpn, Component)] -flattenFlaggedDeps = concatMap aux - where - aux :: FlaggedDep Component qpn -> [(Dep qpn, Component)] - aux (Flagged _ _ t f) = flattenFlaggedDeps t ++ flattenFlaggedDeps f - aux (Stanza _ t) = flattenFlaggedDeps t - aux (Simple d c) = [(d, c)] - -type TrueFlaggedDeps qpn = FlaggedDeps Component qpn -type FalseFlaggedDeps qpn = FlaggedDeps Component qpn - --- | A dependency (constraint) associates a package name with a --- constrained instance. --- --- 'Dep' intentionally has no 'Functor' instance because the type variable --- is used both to record the dependencies as well as who's doing the --- depending; having a 'Functor' instance makes bugs where we don't distinguish --- these two far too likely. (By rights 'Dep' ought to have two type variables.) -data Dep qpn = Dep qpn (CI qpn) -- dependency on a package - | Ext Extension -- dependency on a language extension - | Lang Language -- dependency on a language version - | Pkg PN VR -- dependency on a pkg-config package - deriving (Eq, Show) - -showDep :: Dep QPN -> String -showDep (Dep qpn (Fixed i v) ) = - (if P qpn /= v then showVar v ++ " => " else "") ++ - showQPN qpn ++ "==" ++ showI i -showDep (Dep qpn (Constrained [(vr, v)])) = - showVar v ++ " => " ++ showQPN qpn ++ showVR vr -showDep (Dep qpn ci ) = - showQPN qpn ++ showCI ci -showDep (Ext ext) = "requires " ++ display ext -showDep (Lang lang) = "requires " ++ display lang -showDep (Pkg pn vr) = "requires pkg-config package " - ++ display pn ++ display vr - ++ ", not found in the pkg-config database" - --- | Options for goal qualification (used in 'qualifyDeps') --- --- See also 'defaultQualifyOptions' -data QualifyOptions = QO { - -- | Do we have a version of base relying on another version of base? - qoBaseShim :: Bool - - -- Should dependencies of the setup script be treated as independent? - , qoSetupIndependent :: Bool - } - deriving Show - --- | Apply built-in rules for package qualifiers --- --- Although the behaviour of 'qualifyDeps' depends on the 'QualifyOptions', --- it is important that these 'QualifyOptions' are _static_. Qualification --- does NOT depend on flag assignment; in other words, it behaves the same no --- matter which choices the solver makes (modulo the global 'QualifyOptions'); --- we rely on this in 'linkDeps' (see comment there). --- --- NOTE: It's the _dependencies_ of a package that may or may not be independent --- from the package itself. Package flag choices must of course be consistent. -qualifyDeps :: QualifyOptions -> QPN -> FlaggedDeps Component PN -> FlaggedDeps Component QPN -qualifyDeps QO{..} (Q pp@(PP ns q) pn) = go - where - go :: FlaggedDeps Component PN -> FlaggedDeps Component QPN - go = map go1 - - go1 :: FlaggedDep Component PN -> FlaggedDep Component QPN - go1 (Flagged fn nfo t f) = Flagged (fmap (Q pp) fn) nfo (go t) (go f) - go1 (Stanza sn t) = Stanza (fmap (Q pp) sn) (go t) - go1 (Simple dep comp) = Simple (goD dep comp) comp - - -- Suppose package B has a setup dependency on package A. - -- This will be recorded as something like - -- - -- > Dep "A" (Constrained [(AnyVersion, Goal (P "B") reason]) - -- - -- Observe that when we qualify this dependency, we need to turn that - -- @"A"@ into @"B-setup.A"@, but we should not apply that same qualifier - -- to the goal or the goal reason chain. - goD :: Dep PN -> Component -> Dep QPN - goD (Ext ext) _ = Ext ext - goD (Lang lang) _ = Lang lang - goD (Pkg pkn vr) _ = Pkg pkn vr - goD (Dep dep ci) comp - | qBase dep = Dep (Q (PP ns (Base pn)) dep) (fmap (Q pp) ci) - | qSetup comp = Dep (Q (PP ns (Setup pn)) dep) (fmap (Q pp) ci) - | otherwise = Dep (Q (PP ns inheritedQ) dep) (fmap (Q pp) ci) - - -- If P has a setup dependency on Q, and Q has a regular dependency on R, then - -- we say that the 'Setup' qualifier is inherited: P has an (indirect) setup - -- dependency on R. We do not do this for the base qualifier however. - -- - -- The inherited qualifier is only used for regular dependencies; for setup - -- and base deppendencies we override the existing qualifier. See #3160 for - -- a detailed discussion. - inheritedQ :: Qualifier - inheritedQ = case q of - Setup _ -> q - Unqualified -> q - Base _ -> Unqualified - - -- Should we qualify this goal with the 'Base' package path? - qBase :: PN -> Bool - qBase dep = qoBaseShim && unPackageName dep == "base" - - -- Should we qualify this goal with the 'Setup' packaeg path? - qSetup :: Component -> Bool - qSetup comp = qoSetupIndependent && comp == ComponentSetup - --- | Remove qualifiers from set of dependencies --- --- This is used during link validation: when we link package @Q.A@ to @Q'.A@, --- then all dependencies @Q.B@ need to be linked to @Q'.B@. In order to compute --- what to link these dependencies to, we need to requalify @Q.B@ to become --- @Q'.B@; we do this by first removing all qualifiers and then calling --- 'qualifyDeps' again. -unqualifyDeps :: FlaggedDeps comp QPN -> FlaggedDeps comp PN -unqualifyDeps = go - where - go :: FlaggedDeps comp QPN -> FlaggedDeps comp PN - go = map go1 - - go1 :: FlaggedDep comp QPN -> FlaggedDep comp PN - go1 (Flagged fn nfo t f) = Flagged (fmap unq fn) nfo (go t) (go f) - go1 (Stanza sn t) = Stanza (fmap unq sn) (go t) - go1 (Simple dep comp) = Simple (goD dep) comp - - goD :: Dep QPN -> Dep PN - goD (Dep qpn ci) = Dep (unq qpn) (fmap unq ci) - goD (Ext ext) = Ext ext - goD (Lang lang) = Lang lang - goD (Pkg pn vr) = Pkg pn vr - - unq :: QPN -> PN - unq (Q _ pn) = pn - -{------------------------------------------------------------------------------- - Setting/forgetting the Component --------------------------------------------------------------------------------} - -forgetCompOpenGoal :: OpenGoal Component -> OpenGoal () -forgetCompOpenGoal = mapCompOpenGoal $ const () - -setCompFlaggedDeps :: Component -> FlaggedDeps () qpn -> FlaggedDeps Component qpn -setCompFlaggedDeps = mapCompFlaggedDeps . const - -{------------------------------------------------------------------------------- - Auxiliary: Mapping over the Component goal - - We don't export these, because the only type instantiations for 'a' and 'b' - here should be () or Component. (We could express this at the type level - if we relied on newer versions of GHC.) --------------------------------------------------------------------------------} - -mapCompOpenGoal :: (a -> b) -> OpenGoal a -> OpenGoal b -mapCompOpenGoal g (OpenGoal d gr) = OpenGoal (mapCompFlaggedDep g d) gr - -mapCompFlaggedDeps :: (a -> b) -> FlaggedDeps a qpn -> FlaggedDeps b qpn -mapCompFlaggedDeps = L.map . mapCompFlaggedDep - -mapCompFlaggedDep :: (a -> b) -> FlaggedDep a qpn -> FlaggedDep b qpn -mapCompFlaggedDep _ (Flagged fn nfo t f) = Flagged fn nfo t f -mapCompFlaggedDep _ (Stanza sn t ) = Stanza sn t -mapCompFlaggedDep g (Simple pn a ) = Simple pn (g a) - -{------------------------------------------------------------------------------- - Reverse dependency map --------------------------------------------------------------------------------} - --- | A map containing reverse dependencies between qualified --- package names. -type RevDepMap = Map QPN [(Component, QPN)] - -{------------------------------------------------------------------------------- - Goals --------------------------------------------------------------------------------} - --- | A goal is just a solver variable paired with a reason. --- The reason is only used for tracing. -data Goal qpn = Goal (Var qpn) (GoalReason qpn) - deriving (Eq, Show, Functor) - --- | Reason why a goal is being added to a goal set. -data GoalReason qpn = - UserGoal - | PDependency (PI qpn) - | FDependency (FN qpn) Bool - | SDependency (SN qpn) - deriving (Eq, Show, Functor) - -type QGoalReason = GoalReason QPN - -class ResetVar f where - resetVar :: Var qpn -> f qpn -> f qpn - -instance ResetVar CI where - resetVar v (Fixed i _) = Fixed i v - resetVar v (Constrained vrs) = Constrained (L.map (\ (x, y) -> (x, resetVar v y)) vrs) - -instance ResetVar Dep where - resetVar v (Dep qpn ci) = Dep qpn (resetVar v ci) - resetVar _ (Ext ext) = Ext ext - resetVar _ (Lang lang) = Lang lang - resetVar _ (Pkg pn vr) = Pkg pn vr - -instance ResetVar Var where - resetVar = const - --- | Compute a singleton conflict set from a goal, containing just --- the goal variable. --- --- NOTE: This is just a call to 'varToConflictSet' under the hood; --- the 'GoalReason' is ignored. -goalVarToConflictSet :: Goal qpn -> ConflictSet qpn -goalVarToConflictSet (Goal g _gr) = varToConflictSet g - --- | Compute a singleton conflict set from a 'Var' -varToConflictSet :: Var qpn -> ConflictSet qpn -varToConflictSet = CS.singleton - --- | A goal reason is mostly just a variable paired with the --- decision we made for that variable (except for user goals, --- where we cannot really point to a solver variable). This --- function drops the decision and recovers the list of --- variables (which will be empty or contain one element). --- -goalReasonToVars :: GoalReason qpn -> [Var qpn] -goalReasonToVars UserGoal = [] -goalReasonToVars (PDependency (PI qpn _)) = [P qpn] -goalReasonToVars (FDependency qfn _) = [F qfn] -goalReasonToVars (SDependency qsn) = [S qsn] - -{------------------------------------------------------------------------------- - Open goals --------------------------------------------------------------------------------} - --- | For open goals as they occur during the build phase, we need to store --- additional information about flags. -data OpenGoal comp = OpenGoal (FlaggedDep comp QPN) QGoalReason - deriving (Eq, Show) - --- | Closes a goal, i.e., removes all the extraneous information that we --- need only during the build phase. -close :: OpenGoal comp -> Goal QPN -close (OpenGoal (Simple (Dep qpn _) _) gr) = Goal (P qpn) gr -close (OpenGoal (Simple (Ext _) _) _ ) = - error "Distribution.Client.Dependency.Modular.Dependency.close: called on Ext goal" -close (OpenGoal (Simple (Lang _) _) _ ) = - error "Distribution.Client.Dependency.Modular.Dependency.close: called on Lang goal" -close (OpenGoal (Simple (Pkg _ _) _) _ ) = - error "Distribution.Client.Dependency.Modular.Dependency.close: called on Pkg goal" -close (OpenGoal (Flagged qfn _ _ _ ) gr) = Goal (F qfn) gr -close (OpenGoal (Stanza qsn _) gr) = Goal (S qsn) gr - -{------------------------------------------------------------------------------- - Version ranges paired with origins --------------------------------------------------------------------------------} - -type VROrigin qpn = (VR, Var qpn) - --- | Helper function to collapse a list of version ranges with origins into --- a single, simplified, version range. -collapse :: [VROrigin qpn] -> VR -collapse = simplifyVR . L.foldr ((.&&.) . fst) anyVR diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Explore.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Explore.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Explore.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Explore.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,123 +0,0 @@ -module Distribution.Client.Dependency.Modular.Explore - ( backjump - , backjumpAndExplore - ) where - -import Data.Foldable as F -import Data.Map as M - -import Distribution.Client.Dependency.Modular.Assignment -import Distribution.Client.Dependency.Modular.Dependency -import Distribution.Client.Dependency.Modular.Log -import Distribution.Client.Dependency.Modular.Message -import Distribution.Client.Dependency.Modular.Package -import qualified Distribution.Client.Dependency.Modular.PSQ as P -import qualified Distribution.Client.Dependency.Modular.ConflictSet as CS -import Distribution.Client.Dependency.Modular.Tree -import qualified Distribution.Client.Dependency.Types as T - --- | This function takes the variable we're currently considering, an --- initial conflict set and a --- list of children's logs. Each log yields either a solution or a --- conflict set. The result is a combined log for the parent node that --- has explored a prefix of the children. --- --- We can stop traversing the children's logs if we find an individual --- conflict set that does not contain the current variable. In this --- case, we can just lift the conflict set to the current level, --- because the current level cannot possibly have contributed to this --- conflict, so no other choice at the current level would avoid the --- conflict. --- --- If any of the children might contain a successful solution, we can --- return it immediately. If all children contain conflict sets, we can --- take the union as the combined conflict set. --- --- The initial conflict set corresponds to the justification that we --- have to choose this goal at all. There is a reason why we have --- introduced the goal in the first place, and this reason is in conflict --- with the (virtual) option not to choose anything for the current --- variable. See also the comments for 'avoidSet'. --- -backjump :: F.Foldable t => Var QPN -> ConflictSet QPN -> t (ConflictSetLog a) -> ConflictSetLog a -backjump var initial xs = F.foldr combine logBackjump xs initial - where - combine :: ConflictSetLog a - -> (ConflictSet QPN -> ConflictSetLog a) - -> ConflictSet QPN -> ConflictSetLog a - combine (T.Done x) _ _ = T.Done x - combine (T.Fail cs) f csAcc - | not (var `CS.member` cs) = logBackjump cs - | otherwise = f (csAcc `CS.union` cs) - combine (T.Step m ms) f cs = T.Step m (combine ms f cs) - - logBackjump :: ConflictSet QPN -> ConflictSetLog a - logBackjump cs = failWith (Failure cs Backjump) cs - -type ConflictSetLog = T.Progress Message (ConflictSet QPN) - --- | A tree traversal that simultaneously propagates conflict sets up --- the tree from the leaves and creates a log. -exploreLog :: Tree QGoalReason -> (Assignment -> ConflictSetLog (Assignment, RevDepMap)) -exploreLog = cata go - where - go :: TreeF QGoalReason (Assignment -> ConflictSetLog (Assignment, RevDepMap)) - -> (Assignment -> ConflictSetLog (Assignment, RevDepMap)) - go (FailF c fr) _ = failWith (Failure c fr) c - go (DoneF rdm) a = succeedWith Success (a, rdm) - go (PChoiceF qpn gr ts) (A pa fa sa) = - backjump (P qpn) (avoidSet (P qpn) gr) $ -- try children in order, - P.mapWithKey -- when descending ... - (\ i@(POption k _) r -> tryWith (TryP qpn i) $ -- log and ... - r (A (M.insert qpn k pa) fa sa)) -- record the pkg choice - ts - go (FChoiceF qfn gr _ _ ts) (A pa fa sa) = - backjump (F qfn) (avoidSet (F qfn) gr) $ -- try children in order, - P.mapWithKey -- when descending ... - (\ k r -> tryWith (TryF qfn k) $ -- log and ... - r (A pa (M.insert qfn k fa) sa)) -- record the pkg choice - ts - go (SChoiceF qsn gr _ ts) (A pa fa sa) = - backjump (S qsn) (avoidSet (S qsn) gr) $ -- try children in order, - P.mapWithKey -- when descending ... - (\ k r -> tryWith (TryS qsn k) $ -- log and ... - r (A pa fa (M.insert qsn k sa))) -- record the pkg choice - ts - go (GoalChoiceF ts) a = - P.casePSQ ts - (failWith (Failure CS.empty EmptyGoalChoice) CS.empty) -- empty goal choice is an internal error - (\ k v _xs -> continueWith (Next (close k)) (v a)) -- commit to the first goal choice - --- | Build a conflict set corresponding to the (virtual) option not to --- choose a solution for a goal at all. --- --- In the solver, the set of goals is not statically determined, but depends --- on the choices we make. Therefore, when dealing with conflict sets, we --- always have to consider that we could perhaps make choices that would --- avoid the existence of the goal completely. --- --- Whenever we actual introduce a choice in the tree, we have already established --- that the goal cannot be avoided. This is tracked in the "goal reason". --- The choice to avoid the goal therefore is a conflict between the goal itself --- and its goal reason. We build this set here, and pass it to the 'backjump' --- function as the initial conflict set. --- --- This has two effects: --- --- - In a situation where there are no choices available at all (this happens --- if an unknown package is requested), the initial conflict set becomes the --- actual conflict set. --- --- - In a situation where we backjump past the current node, the goal reason --- of the current node will be added to the conflict set. --- -avoidSet :: Var QPN -> QGoalReason -> ConflictSet QPN -avoidSet var gr = - CS.fromList (var : goalReasonToVars gr) - --- | Interface. -backjumpAndExplore :: Tree QGoalReason -> Log Message (Assignment, RevDepMap) -backjumpAndExplore t = toLog $ exploreLog t (A M.empty M.empty M.empty) - where - toLog :: T.Progress step fail done -> Log step done - toLog = T.foldProgress T.Step (const (T.Fail ())) T.Done diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Flag.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Flag.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Flag.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Flag.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,80 +0,0 @@ -{-# LANGUAGE DeriveFunctor #-} -module Distribution.Client.Dependency.Modular.Flag - ( FInfo(..) - , Flag - , FlagInfo - , FN(..) - , QFN - , QSN - , SN(..) - , mkFlag - , showFBool - , showQFN - , showQFNBool - , showQSN - , showQSNBool - ) where - -import Data.Map as M -import Prelude hiding (pi) - -import Distribution.PackageDescription hiding (Flag) -- from Cabal - -import Distribution.Client.Dependency.Modular.Package -import Distribution.Client.Types (OptionalStanza(..)) - --- | Flag name. Consists of a package instance and the flag identifier itself. -data FN qpn = FN (PI qpn) Flag - deriving (Eq, Ord, Show, Functor) - --- | Flag identifier. Just a string. -type Flag = FlagName - -unFlag :: Flag -> String -unFlag (FlagName fn) = fn - -mkFlag :: String -> Flag -mkFlag fn = FlagName fn - --- | Flag info. Default value, whether the flag is manual, and --- whether the flag is weak. Manual flags can only be set explicitly. --- Weak flags are typically deferred by the solver. -data FInfo = FInfo { fdefault :: Bool, fmanual :: Bool, fweak :: Bool } - deriving (Eq, Ord, Show) - --- | Flag defaults. -type FlagInfo = Map Flag FInfo - --- | Qualified flag name. -type QFN = FN QPN - --- | Stanza name. Paired with a package name, much like a flag. -data SN qpn = SN (PI qpn) OptionalStanza - deriving (Eq, Ord, Show, Functor) - --- | Qualified stanza name. -type QSN = SN QPN - -unStanza :: OptionalStanza -> String -unStanza TestStanzas = "test" -unStanza BenchStanzas = "bench" - -showQFNBool :: QFN -> Bool -> String -showQFNBool qfn@(FN pi _f) b = showPI pi ++ ":" ++ showFBool qfn b - -showQSNBool :: QSN -> Bool -> String -showQSNBool qsn@(SN pi _f) b = showPI pi ++ ":" ++ showSBool qsn b - -showFBool :: FN qpn -> Bool -> String -showFBool (FN _ f) True = "+" ++ unFlag f -showFBool (FN _ f) False = "-" ++ unFlag f - -showSBool :: SN qpn -> Bool -> String -showSBool (SN _ s) True = "*" ++ unStanza s -showSBool (SN _ s) False = "!" ++ unStanza s - -showQFN :: QFN -> String -showQFN (FN pi f) = showPI pi ++ ":" ++ unFlag f - -showQSN :: QSN -> String -showQSN (SN pi f) = showPI pi ++ ":" ++ unStanza f diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/IndexConversion.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/IndexConversion.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/IndexConversion.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/IndexConversion.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,219 +0,0 @@ -module Distribution.Client.Dependency.Modular.IndexConversion - ( convPIs - ) where - -import Data.List as L -import Data.Map as M -import Data.Maybe -import Data.Monoid as Mon -import Prelude hiding (pi) - -import qualified Distribution.Client.PackageIndex as CI -import Distribution.Client.Types -import Distribution.Client.ComponentDeps (Component(..)) -import Distribution.Compiler -import Distribution.InstalledPackageInfo as IPI -import Distribution.Package -- from Cabal -import Distribution.PackageDescription as PD -- from Cabal -import Distribution.PackageDescription.Configuration as PDC -import qualified Distribution.Simple.PackageIndex as SI -import Distribution.System - -import Distribution.Client.Dependency.Modular.Dependency as D -import Distribution.Client.Dependency.Modular.Flag as F -import Distribution.Client.Dependency.Modular.Index -import Distribution.Client.Dependency.Modular.Package -import Distribution.Client.Dependency.Modular.Tree -import Distribution.Client.Dependency.Modular.Version - --- | Convert both the installed package index and the source package --- index into one uniform solver index. --- --- We use 'allPackagesBySourcePackageId' for the installed package index --- because that returns us several instances of the same package and version --- in order of preference. This allows us in principle to \"shadow\" --- packages if there are several installed packages of the same version. --- There are currently some shortcomings in both GHC and Cabal in --- resolving these situations. However, the right thing to do is to --- fix the problem there, so for now, shadowing is only activated if --- explicitly requested. -convPIs :: OS -> Arch -> CompilerInfo -> Bool -> Bool -> - SI.InstalledPackageIndex -> CI.PackageIndex SourcePackage -> Index -convPIs os arch comp sip strfl iidx sidx = - mkIndex (convIPI' sip iidx ++ convSPI' os arch comp strfl sidx) - --- | Convert a Cabal installed package index to the simpler, --- more uniform index format of the solver. -convIPI' :: Bool -> SI.InstalledPackageIndex -> [(PN, I, PInfo)] -convIPI' sip idx = - -- apply shadowing whenever there are multiple installed packages with - -- the same version - [ maybeShadow (convIP idx pkg) - | (_pkgid, pkgs) <- SI.allPackagesBySourcePackageId idx - , (maybeShadow, pkg) <- zip (id : repeat shadow) pkgs ] - where - - -- shadowing is recorded in the package info - shadow (pn, i, PInfo fdeps fds _) | sip = (pn, i, PInfo fdeps fds (Just Shadowed)) - shadow x = x - --- | Convert a single installed package into the solver-specific format. -convIP :: SI.InstalledPackageIndex -> InstalledPackageInfo -> (PN, I, PInfo) -convIP idx ipi = - let ipid = IPI.installedUnitId ipi - i = I (pkgVersion (sourcePackageId ipi)) (Inst ipid) - pn = pkgName (sourcePackageId ipi) - in case mapM (convIPId pn idx) (IPI.depends ipi) of - Nothing -> (pn, i, PInfo [] M.empty (Just Broken)) - Just fds -> (pn, i, PInfo (setComp fds) M.empty Nothing) - where - -- We assume that all dependencies of installed packages are _library_ deps - setComp = setCompFlaggedDeps ComponentLib --- TODO: Installed packages should also store their encapsulations! - --- | Convert dependencies specified by an installed package id into --- flagged dependencies of the solver. --- --- May return Nothing if the package can't be found in the index. That --- indicates that the original package having this dependency is broken --- and should be ignored. -convIPId :: PN -> SI.InstalledPackageIndex -> UnitId -> Maybe (FlaggedDep () PN) -convIPId pn' idx ipid = - case SI.lookupUnitId idx ipid of - Nothing -> Nothing - Just ipi -> let i = I (pkgVersion (sourcePackageId ipi)) (Inst ipid) - pn = pkgName (sourcePackageId ipi) - in Just (D.Simple (Dep pn (Fixed i (P pn'))) ()) - --- | Convert a cabal-install source package index to the simpler, --- more uniform index format of the solver. -convSPI' :: OS -> Arch -> CompilerInfo -> Bool -> - CI.PackageIndex SourcePackage -> [(PN, I, PInfo)] -convSPI' os arch cinfo strfl = L.map (convSP os arch cinfo strfl) . CI.allPackages - --- | Convert a single source package into the solver-specific format. -convSP :: OS -> Arch -> CompilerInfo -> Bool -> SourcePackage -> (PN, I, PInfo) -convSP os arch cinfo strfl (SourcePackage (PackageIdentifier pn pv) gpd _ _pl) = - let i = I pv InRepo - in (pn, i, convGPD os arch cinfo strfl (PI pn i) gpd) - --- We do not use 'flattenPackageDescription' or 'finalizePackageDescription' --- from 'Distribution.PackageDescription.Configuration' here, because we --- want to keep the condition tree, but simplify much of the test. - --- | Convert a generic package description to a solver-specific 'PInfo'. -convGPD :: OS -> Arch -> CompilerInfo -> Bool -> - PI PN -> GenericPackageDescription -> PInfo -convGPD os arch cinfo strfl pi - (GenericPackageDescription pkg flags libs exes tests benchs) = - let - fds = flagInfo strfl flags - - conv :: Mon.Monoid a => Component -> (a -> BuildInfo) -> - CondTree ConfVar [Dependency] a -> FlaggedDeps Component PN - conv comp getInfo = convCondTree os arch cinfo pi fds comp getInfo . - PDC.addBuildableCondition getInfo - in - PInfo - (maybe [] (conv ComponentLib libBuildInfo ) libs ++ - maybe [] (convSetupBuildInfo pi) (setupBuildInfo pkg) ++ - concatMap (\(nm, ds) -> conv (ComponentExe nm) buildInfo ds) exes ++ - prefix (Stanza (SN pi TestStanzas)) - (L.map (\(nm, ds) -> conv (ComponentTest nm) testBuildInfo ds) tests) ++ - prefix (Stanza (SN pi BenchStanzas)) - (L.map (\(nm, ds) -> conv (ComponentBench nm) benchmarkBuildInfo ds) benchs)) - fds - Nothing - -prefix :: (FlaggedDeps comp qpn -> FlaggedDep comp' qpn) -> [FlaggedDeps comp qpn] -> FlaggedDeps comp' qpn -prefix _ [] = [] -prefix f fds = [f (concat fds)] - --- | Convert flag information. Automatic flags are now considered weak --- unless strong flags have been selected explicitly. -flagInfo :: Bool -> [PD.Flag] -> FlagInfo -flagInfo strfl = M.fromList . L.map (\ (MkFlag fn _ b m) -> (fn, FInfo b m (not (strfl || m)))) - --- | Convert condition trees to flagged dependencies. -convCondTree :: OS -> Arch -> CompilerInfo -> PI PN -> FlagInfo -> - Component -> - (a -> BuildInfo) -> - CondTree ConfVar [Dependency] a -> FlaggedDeps Component PN -convCondTree os arch cinfo pi@(PI pn _) fds comp getInfo (CondNode info ds branches) = - L.map (\d -> D.Simple (convDep pn d) comp) ds -- unconditional package dependencies - ++ L.map (\e -> D.Simple (Ext e) comp) (PD.allExtensions bi) -- unconditional extension dependencies - ++ L.map (\l -> D.Simple (Lang l) comp) (PD.allLanguages bi) -- unconditional language dependencies - ++ L.map (\(Dependency pkn vr) -> D.Simple (Pkg pkn vr) comp) (PD.pkgconfigDepends bi) -- unconditional pkg-config dependencies - ++ concatMap (convBranch os arch cinfo pi fds comp getInfo) branches - where - bi = getInfo info - --- | Branch interpreter. --- --- Here, we try to simplify one of Cabal's condition tree branches into the --- solver's flagged dependency format, which is weaker. Condition trees can --- contain complex logical expression composed from flag choices and special --- flags (such as architecture, or compiler flavour). We try to evaluate the --- special flags and subsequently simplify to a tree that only depends on --- simple flag choices. -convBranch :: OS -> Arch -> CompilerInfo -> - PI PN -> FlagInfo -> - Component -> - (a -> BuildInfo) -> - (Condition ConfVar, - CondTree ConfVar [Dependency] a, - Maybe (CondTree ConfVar [Dependency] a)) -> FlaggedDeps Component PN -convBranch os arch cinfo pi@(PI pn _) fds comp getInfo (c', t', mf') = - go c' ( convCondTree os arch cinfo pi fds comp getInfo t') - (maybe [] (convCondTree os arch cinfo pi fds comp getInfo) mf') - where - go :: Condition ConfVar -> - FlaggedDeps Component PN -> FlaggedDeps Component PN -> FlaggedDeps Component PN - go (Lit True) t _ = t - go (Lit False) _ f = f - go (CNot c) t f = go c f t - go (CAnd c d) t f = go c (go d t f) f - go (COr c d) t f = go c t (go d t f) - go (Var (Flag fn)) t f = extractCommon t f ++ [Flagged (FN pi fn) (fds ! fn) t f] - go (Var (OS os')) t f - | os == os' = t - | otherwise = f - go (Var (Arch arch')) t f - | arch == arch' = t - | otherwise = f - go (Var (Impl cf cvr)) t f - | matchImpl (compilerInfoId cinfo) || - -- fixme: Nothing should be treated as unknown, rather than empty - -- list. This code should eventually be changed to either - -- support partial resolution of compiler flags or to - -- complain about incompletely configured compilers. - any matchImpl (fromMaybe [] $ compilerInfoCompat cinfo) = t - | otherwise = f - where - matchImpl (CompilerId cf' cv) = cf == cf' && checkVR cvr cv - - -- If both branches contain the same package as a simple dep, we lift it to - -- the next higher-level, but without constraints. This heuristic together - -- with deferring flag choices will then usually first resolve this package, - -- and try an already installed version before imposing a default flag choice - -- that might not be what we want. - -- - -- Note that we make assumptions here on the form of the dependencies that - -- can occur at this point. In particular, no occurrences of Fixed, and no - -- occurrences of multiple version ranges, as all dependencies below this - -- point have been generated using 'convDep'. - extractCommon :: FlaggedDeps Component PN -> FlaggedDeps Component PN -> FlaggedDeps Component PN - extractCommon ps ps' = [ D.Simple (Dep pn1 (Constrained [(vr1 .||. vr2, P pn)])) comp - | D.Simple (Dep pn1 (Constrained [(vr1, _)])) _ <- ps - , D.Simple (Dep pn2 (Constrained [(vr2, _)])) _ <- ps' - , pn1 == pn2 - ] - --- | Convert a Cabal dependency to a solver-specific dependency. -convDep :: PN -> Dependency -> Dep PN -convDep pn' (Dependency pn vr) = Dep pn (Constrained [(vr, P pn')]) - --- | Convert setup dependencies -convSetupBuildInfo :: PI PN -> SetupBuildInfo -> FlaggedDeps Component PN -convSetupBuildInfo (PI pn _i) nfo = - L.map (\d -> D.Simple (convDep pn d) ComponentSetup) (PD.setupDepends nfo) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Index.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Index.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Index.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Index.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,52 +0,0 @@ -module Distribution.Client.Dependency.Modular.Index - ( Index - , PInfo(..) - , defaultQualifyOptions - , mkIndex - ) where - -import Data.List as L -import Data.Map as M -import Prelude hiding (pi) - -import Distribution.Client.Dependency.Modular.Dependency -import Distribution.Client.Dependency.Modular.Flag -import Distribution.Client.Dependency.Modular.Package -import Distribution.Client.Dependency.Modular.Tree - -import Distribution.Client.ComponentDeps (Component) - --- | An index contains information about package instances. This is a nested --- dictionary. Package names are mapped to instances, which in turn is mapped --- to info. -type Index = Map PN (Map I PInfo) - --- | Info associated with a package instance. --- Currently, dependencies, flags and failure reasons. --- Packages that have a failure reason recorded for them are disabled --- globally, for reasons external to the solver. We currently use this --- for shadowing which essentially is a GHC limitation, and for --- installed packages that are broken. -data PInfo = PInfo (FlaggedDeps Component PN) FlagInfo (Maybe FailReason) - deriving (Show) - -mkIndex :: [(PN, I, PInfo)] -> Index -mkIndex xs = M.map M.fromList (groupMap (L.map (\ (pn, i, pi) -> (pn, (i, pi))) xs)) - -groupMap :: Ord a => [(a, b)] -> Map a [b] -groupMap xs = M.fromListWith (flip (++)) (L.map (\ (x, y) -> (x, [y])) xs) - -defaultQualifyOptions :: Index -> QualifyOptions -defaultQualifyOptions idx = QO { - qoBaseShim = or [ dep == base - | -- Find all versions of base .. - Just is <- [M.lookup base idx] - -- .. which are installed .. - , (I _ver (Inst _), PInfo deps _flagNfo _fr) <- M.toList is - -- .. and flatten all their dependencies .. - , (Dep dep _ci, _comp) <- flattenFlaggedDeps deps - ] - , qoSetupIndependent = True - } - where - base = PackageName "base" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Linking.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Linking.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Linking.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Linking.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,574 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE MultiParamTypeClasses #-} -module Distribution.Client.Dependency.Modular.Linking ( - addLinking - , validateLinking - ) where - -import Prelude hiding (pi) -import Control.Exception (assert) -import Control.Monad.Reader -import Control.Monad.State -import Data.Maybe (catMaybes) -import Data.Map (Map, (!)) -import Data.List (intercalate) -import Data.Set (Set) -import qualified Data.Map as M -import qualified Data.Set as S -import qualified Data.Traversable as T - -#if !MIN_VERSION_base(4,8,0) -import Control.Applicative -#endif - -import Distribution.Client.Dependency.Modular.Assignment -import Distribution.Client.Dependency.Modular.Dependency -import Distribution.Client.Dependency.Modular.Flag -import Distribution.Client.Dependency.Modular.Index -import Distribution.Client.Dependency.Modular.Package -import Distribution.Client.Dependency.Modular.Tree -import qualified Distribution.Client.Dependency.Modular.PSQ as P -import qualified Distribution.Client.Dependency.Modular.ConflictSet as CS - -import Distribution.Client.Types (OptionalStanza(..)) -import Distribution.Client.ComponentDeps (Component) - -{------------------------------------------------------------------------------- - Add linking --------------------------------------------------------------------------------} - -type RelatedGoals = Map (PN, I) [PP] -type Linker = Reader RelatedGoals - --- | Introduce link nodes into tree tree --- --- Linking is a traversal of the solver tree that adapts package choice nodes --- and adds the option to link wherever appropriate: Package goals are called --- "related" if they are for the same version of the same package (but have --- different prefixes). A link option is available in a package choice node --- whenever we can choose an instance that has already been chosen for a related --- goal at a higher position in the tree. --- --- The code here proceeds by maintaining a finite map recording choices that --- have been made at higher positions in the tree. For each pair of package name --- and instance, it stores the prefixes at which we have made a choice for this --- package instance. Whenever we make a choice, we extend the map. Whenever we --- find a choice, we look into the map in order to find out what link options we --- have to add. -addLinking :: Tree QGoalReason -> Tree QGoalReason -addLinking = (`runReader` M.empty) . cata go - where - go :: TreeF QGoalReason (Linker (Tree QGoalReason)) -> Linker (Tree QGoalReason) - - -- The only nodes of interest are package nodes - go (PChoiceF qpn gr cs) = do - env <- ask - cs' <- T.sequence $ P.mapWithKey (goP qpn) cs - let newCs = concatMap (linkChoices env qpn) (P.toList cs') - return $ PChoice qpn gr (cs' `P.union` P.fromList newCs) - go _otherwise = - innM _otherwise - - -- Recurse underneath package choices. Here we just need to make sure - -- that we record the package choice so that it is available below - goP :: QPN -> POption -> Linker (Tree QGoalReason) -> Linker (Tree QGoalReason) - goP (Q pp pn) (POption i Nothing) = local (M.insertWith (++) (pn, i) [pp]) - goP _ _ = alreadyLinked - -linkChoices :: RelatedGoals -> QPN -> (POption, Tree QGoalReason) -> [(POption, Tree QGoalReason)] -linkChoices related (Q _pp pn) (POption i Nothing, subtree) = - map aux (M.findWithDefault [] (pn, i) related) - where - aux :: PP -> (POption, Tree QGoalReason) - aux pp = (POption i (Just pp), subtree) -linkChoices _ _ (POption _ (Just _), _) = - alreadyLinked - -alreadyLinked :: a -alreadyLinked = error "addLinking called on tree that already contains linked nodes" - -{------------------------------------------------------------------------------- - Validation - - Validation of links is a separate pass that's performed after normal - validation. Validation of links checks that if the tree indicates that a - package is linked, then everything underneath that choice really matches the - package we have linked to. - - This is interesting because it isn't unidirectional. Consider that we've - chosen a.foo to be version 1 and later decide that b.foo should link to a.foo. - Now foo depends on bar. Because a.foo and b.foo are linked, it's required that - a.bar and b.bar are also linked. However, it's not required that we actually - choose a.bar before b.bar. Goal choice order is relatively free. It's possible - that we choose a.bar first, but also possible that we choose b.bar first. In - both cases, we have to recognize that we have freedom of choice for the first - of the two, but no freedom of choice for the second. - - This is what LinkGroups are all about. Using LinkGroup, we can record (in the - situation above) that a.bar and b.bar need to be linked even if we haven't - chosen either of them yet. --------------------------------------------------------------------------------} - -data ValidateState = VS { - vsIndex :: Index - , vsLinks :: Map QPN LinkGroup - , vsFlags :: FAssignment - , vsStanzas :: SAssignment - , vsQualifyOptions :: QualifyOptions - } - deriving Show - -type Validate = Reader ValidateState - --- | Validate linked packages --- --- Verify that linked packages have --- --- * Linked dependencies, --- * Equal flag assignments --- * Equal stanza assignments -validateLinking :: Index -> Tree QGoalReason -> Tree QGoalReason -validateLinking index = (`runReader` initVS) . cata go - where - go :: TreeF QGoalReason (Validate (Tree QGoalReason)) -> Validate (Tree QGoalReason) - - go (PChoiceF qpn gr cs) = - PChoice qpn gr <$> T.sequence (P.mapWithKey (goP qpn) cs) - go (FChoiceF qfn gr t m cs) = - FChoice qfn gr t m <$> T.sequence (P.mapWithKey (goF qfn) cs) - go (SChoiceF qsn gr t cs) = - SChoice qsn gr t <$> T.sequence (P.mapWithKey (goS qsn) cs) - - -- For the other nodes we just recurse - go (GoalChoiceF cs) = GoalChoice <$> T.sequence cs - go (DoneF revDepMap) = return $ Done revDepMap - go (FailF conflictSet failReason) = return $ Fail conflictSet failReason - - -- Package choices - goP :: QPN -> POption -> Validate (Tree QGoalReason) -> Validate (Tree QGoalReason) - goP qpn@(Q _pp pn) opt@(POption i _) r = do - vs <- ask - let PInfo deps _ _ = vsIndex vs ! pn ! i - qdeps = qualifyDeps (vsQualifyOptions vs) qpn deps - case execUpdateState (pickPOption qpn opt qdeps) vs of - Left (cs, err) -> return $ Fail cs (DependenciesNotLinked err) - Right vs' -> local (const vs') r - - -- Flag choices - goF :: QFN -> Bool -> Validate (Tree QGoalReason) -> Validate (Tree QGoalReason) - goF qfn b r = do - vs <- ask - case execUpdateState (pickFlag qfn b) vs of - Left (cs, err) -> return $ Fail cs (DependenciesNotLinked err) - Right vs' -> local (const vs') r - - -- Stanza choices (much the same as flag choices) - goS :: QSN -> Bool -> Validate (Tree QGoalReason) -> Validate (Tree QGoalReason) - goS qsn b r = do - vs <- ask - case execUpdateState (pickStanza qsn b) vs of - Left (cs, err) -> return $ Fail cs (DependenciesNotLinked err) - Right vs' -> local (const vs') r - - initVS :: ValidateState - initVS = VS { - vsIndex = index - , vsLinks = M.empty - , vsFlags = M.empty - , vsStanzas = M.empty - , vsQualifyOptions = defaultQualifyOptions index - } - -{------------------------------------------------------------------------------- - Updating the validation state --------------------------------------------------------------------------------} - -type Conflict = (ConflictSet QPN, String) - -newtype UpdateState a = UpdateState { - unUpdateState :: StateT ValidateState (Either Conflict) a - } - deriving (Functor, Applicative, Monad) - -instance MonadState ValidateState UpdateState where - get = UpdateState $ get - put st = UpdateState $ do - assert (lgInvariant $ vsLinks st) $ return () - put st - -lift' :: Either Conflict a -> UpdateState a -lift' = UpdateState . lift - -conflict :: Conflict -> UpdateState a -conflict = lift' . Left - -execUpdateState :: UpdateState () -> ValidateState -> Either Conflict ValidateState -execUpdateState = execStateT . unUpdateState - -pickPOption :: QPN -> POption -> FlaggedDeps Component QPN -> UpdateState () -pickPOption qpn (POption i Nothing) _deps = pickConcrete qpn i -pickPOption qpn (POption i (Just pp')) deps = pickLink qpn i pp' deps - -pickConcrete :: QPN -> I -> UpdateState () -pickConcrete qpn@(Q pp _) i = do - vs <- get - case M.lookup qpn (vsLinks vs) of - -- Package is not yet in a LinkGroup. Create a new singleton link group. - Nothing -> do - let lg = lgSingleton qpn (Just $ PI pp i) - updateLinkGroup lg - - -- Package is already in a link group. Since we are picking a concrete - -- instance here, it must by definition be the canonical package. - Just lg -> - makeCanonical lg qpn i - -pickLink :: QPN -> I -> PP -> FlaggedDeps Component QPN -> UpdateState () -pickLink qpn@(Q _pp pn) i pp' deps = do - vs <- get - - -- The package might already be in a link group - -- (because one of its reverse dependencies is) - let lgSource = case M.lookup qpn (vsLinks vs) of - Nothing -> lgSingleton qpn Nothing - Just lg -> lg - - -- Find the link group for the package we are linking to - -- - -- Since the builder never links to a package without having first picked a - -- concrete instance for that package, and since we create singleton link - -- groups for concrete instances, this link group must exist (and must - -- in fact already have a canonical member). - let target = Q pp' pn - lgTarget = vsLinks vs ! target - - -- Verify here that the member we add is in fact for the same package and - -- matches the version of the canonical instance. However, violations of - -- these checks would indicate a bug in the linker, not a true conflict. - let sanityCheck :: Maybe (PI PP) -> Bool - sanityCheck Nothing = False - sanityCheck (Just (PI _ canonI)) = pn == lgPackage lgTarget && i == canonI - assert (sanityCheck (lgCanon lgTarget)) $ return () - - -- Merge the two link groups (updateLinkGroup will propagate the change) - lgTarget' <- lift' $ lgMerge [] lgSource lgTarget - updateLinkGroup lgTarget' - - -- Make sure all dependencies are linked as well - linkDeps target [P qpn] deps - -makeCanonical :: LinkGroup -> QPN -> I -> UpdateState () -makeCanonical lg qpn@(Q pp _) i = - case lgCanon lg of - -- There is already a canonical member. Fail. - Just _ -> - conflict ( CS.insert (P qpn) (lgConflictSet lg) - , "cannot make " ++ showQPN qpn - ++ " canonical member of " ++ showLinkGroup lg - ) - Nothing -> do - let lg' = lg { lgCanon = Just (PI pp i) } - updateLinkGroup lg' - --- | Link the dependencies of linked parents. --- --- When we decide to link one package against another we walk through the --- package's direct depedencies and make sure that they're all linked to each --- other by merging their link groups (or creating new singleton link groups if --- they don't have link groups yet). We do not need to do this recursively, --- because having the direct dependencies in a link group means that we must --- have already made or will make sooner or later a link choice for one of these --- as well, and cover their dependencies at that point. -linkDeps :: QPN -> [Var QPN] -> FlaggedDeps Component QPN -> UpdateState () -linkDeps target = \blame deps -> do - -- linkDeps is called in two places: when we first link one package to - -- another, and when we discover more dependencies of an already linked - -- package after doing some flag assignment. It is therefore important that - -- flag assignments cannot influence _how_ dependencies are qualified; - -- fortunately this is a documented property of 'qualifyDeps'. - rdeps <- requalify deps - go blame deps rdeps - where - go :: [Var QPN] -> FlaggedDeps Component QPN -> FlaggedDeps Component QPN -> UpdateState () - go = zipWithM_ . go1 - - go1 :: [Var QPN] -> FlaggedDep Component QPN -> FlaggedDep Component QPN -> UpdateState () - go1 blame dep rdep = case (dep, rdep) of - (Simple (Dep qpn _) _, ~(Simple (Dep qpn' _) _)) -> do - vs <- get - let lg = M.findWithDefault (lgSingleton qpn Nothing) qpn $ vsLinks vs - lg' = M.findWithDefault (lgSingleton qpn' Nothing) qpn' $ vsLinks vs - lg'' <- lift' $ lgMerge blame lg lg' - updateLinkGroup lg'' - (Flagged fn _ t f, ~(Flagged _ _ t' f')) -> do - vs <- get - case M.lookup fn (vsFlags vs) of - Nothing -> return () -- flag assignment not yet known - Just True -> go (F fn:blame) t t' - Just False -> go (F fn:blame) f f' - (Stanza sn t, ~(Stanza _ t')) -> do - vs <- get - case M.lookup sn (vsStanzas vs) of - Nothing -> return () -- stanza assignment not yet known - Just True -> go (S sn:blame) t t' - Just False -> return () -- stanza not enabled; no new deps - -- For extensions and language dependencies, there is nothing to do. - -- No choice is involved, just checking, so there is nothing to link. - -- The same goes for for pkg-config constraints. - (Simple (Ext _) _, _) -> return () - (Simple (Lang _) _, _) -> return () - (Simple (Pkg _ _) _, _) -> return () - - requalify :: FlaggedDeps Component QPN -> UpdateState (FlaggedDeps Component QPN) - requalify deps = do - vs <- get - return $ qualifyDeps (vsQualifyOptions vs) target (unqualifyDeps deps) - -pickFlag :: QFN -> Bool -> UpdateState () -pickFlag qfn b = do - modify $ \vs -> vs { vsFlags = M.insert qfn b (vsFlags vs) } - verifyFlag qfn - linkNewDeps (F qfn) b - -pickStanza :: QSN -> Bool -> UpdateState () -pickStanza qsn b = do - modify $ \vs -> vs { vsStanzas = M.insert qsn b (vsStanzas vs) } - verifyStanza qsn - linkNewDeps (S qsn) b - --- | Link dependencies that we discover after making a flag choice. --- --- When we make a flag choice for a package, then new dependencies for that --- package might become available. If the package under consideration is in a --- non-trivial link group, then these new dependencies have to be linked as --- well. In linkNewDeps, we compute such new dependencies and make sure they are --- linked. -linkNewDeps :: Var QPN -> Bool -> UpdateState () -linkNewDeps var b = do - vs <- get - let (qpn@(Q pp pn), Just i) = varPI var - PInfo deps _ _ = vsIndex vs ! pn ! i - qdeps = qualifyDeps (vsQualifyOptions vs) qpn deps - lg = vsLinks vs ! qpn - (parents, newDeps) = findNewDeps vs qdeps - linkedTo = S.delete pp (lgMembers lg) - forM_ (S.toList linkedTo) $ \pp' -> linkDeps (Q pp' pn) (P qpn : parents) newDeps - where - findNewDeps :: ValidateState -> FlaggedDeps comp QPN -> ([Var QPN], FlaggedDeps Component QPN) - findNewDeps vs = concatMapUnzip (findNewDeps' vs) - - findNewDeps' :: ValidateState -> FlaggedDep comp QPN -> ([Var QPN], FlaggedDeps Component QPN) - findNewDeps' _ (Simple _ _) = ([], []) - findNewDeps' vs (Flagged qfn _ t f) = - case (F qfn == var, M.lookup qfn (vsFlags vs)) of - (True, _) -> ([F qfn], if b then t else f) - (_, Nothing) -> ([], []) -- not yet known - (_, Just b') -> let (parents, deps) = findNewDeps vs (if b' then t else f) - in (F qfn:parents, deps) - findNewDeps' vs (Stanza qsn t) = - case (S qsn == var, M.lookup qsn (vsStanzas vs)) of - (True, _) -> ([S qsn], if b then t else []) - (_, Nothing) -> ([], []) -- not yet known - (_, Just b') -> let (parents, deps) = findNewDeps vs (if b' then t else []) - in (S qsn:parents, deps) - -updateLinkGroup :: LinkGroup -> UpdateState () -updateLinkGroup lg = do - verifyLinkGroup lg - modify $ \vs -> vs { - vsLinks = M.fromList (map aux (S.toList (lgMembers lg))) - `M.union` vsLinks vs - } - where - aux pp = (Q pp (lgPackage lg), lg) - -{------------------------------------------------------------------------------- - Verification --------------------------------------------------------------------------------} - -verifyLinkGroup :: LinkGroup -> UpdateState () -verifyLinkGroup lg = - case lgInstance lg of - -- No instance picked yet. Nothing to verify - Nothing -> - return () - - -- We picked an instance. Verify flags and stanzas - -- TODO: The enumeration of OptionalStanza names is very brittle; - -- if a constructor is added to the datatype we won't notice it here - Just i -> do - vs <- get - let PInfo _deps finfo _ = vsIndex vs ! lgPackage lg ! i - flags = M.keys finfo - stanzas = [TestStanzas, BenchStanzas] - forM_ flags $ \fn -> do - let flag = FN (PI (lgPackage lg) i) fn - verifyFlag' flag lg - forM_ stanzas $ \sn -> do - let stanza = SN (PI (lgPackage lg) i) sn - verifyStanza' stanza lg - -verifyFlag :: QFN -> UpdateState () -verifyFlag (FN (PI qpn@(Q _pp pn) i) fn) = do - vs <- get - -- We can only pick a flag after picking an instance; link group must exist - verifyFlag' (FN (PI pn i) fn) (vsLinks vs ! qpn) - -verifyStanza :: QSN -> UpdateState () -verifyStanza (SN (PI qpn@(Q _pp pn) i) sn) = do - vs <- get - -- We can only pick a stanza after picking an instance; link group must exist - verifyStanza' (SN (PI pn i) sn) (vsLinks vs ! qpn) - --- | Verify that all packages in the link group agree on flag assignments --- --- For the given flag and the link group, obtain all assignments for the flag --- that have already been made for link group members, and check that they are --- equal. -verifyFlag' :: FN PN -> LinkGroup -> UpdateState () -verifyFlag' (FN (PI pn i) fn) lg = do - vs <- get - let flags = map (\pp' -> FN (PI (Q pp' pn) i) fn) (S.toList (lgMembers lg)) - vals = map (`M.lookup` vsFlags vs) flags - if allEqual (catMaybes vals) -- We ignore not-yet assigned flags - then return () - else conflict ( CS.fromList (map F flags) `CS.union` lgConflictSet lg - , "flag " ++ show fn ++ " incompatible" - ) - --- | Verify that all packages in the link group agree on stanza assignments --- --- For the given stanza and the link group, obtain all assignments for the --- stanza that have already been made for link group members, and check that --- they are equal. --- --- This function closely mirrors 'verifyFlag''. -verifyStanza' :: SN PN -> LinkGroup -> UpdateState () -verifyStanza' (SN (PI pn i) sn) lg = do - vs <- get - let stanzas = map (\pp' -> SN (PI (Q pp' pn) i) sn) (S.toList (lgMembers lg)) - vals = map (`M.lookup` vsStanzas vs) stanzas - if allEqual (catMaybes vals) -- We ignore not-yet assigned stanzas - then return () - else conflict ( CS.fromList (map S stanzas) `CS.union` lgConflictSet lg - , "stanza " ++ show sn ++ " incompatible" - ) - -{------------------------------------------------------------------------------- - Link groups --------------------------------------------------------------------------------} - --- | Set of packages that must be linked together --- --- A LinkGroup is between several qualified package names. In the validation --- state, we maintain a map vsLinks from qualified package names to link groups. --- There is an invariant that for all members of a link group, vsLinks must map --- to the same link group. The function updateLinkGroup can be used to --- re-establish this invariant after creating or expanding a LinkGroup. -data LinkGroup = LinkGroup { - -- | The name of the package of this link group - lgPackage :: PN - - -- | The canonical member of this link group (the one where we picked - -- a concrete instance). Once we have picked a canonical member, all - -- other packages must link to this one. - -- - -- We may not know this yet (if we are constructing link groups - -- for dependencies) - , lgCanon :: Maybe (PI PP) - - -- | The members of the link group - , lgMembers :: Set PP - - -- | The set of variables that should be added to the conflict set if - -- something goes wrong with this link set (in addition to the members - -- of the link group itself) - , lgBlame :: ConflictSet QPN - } - deriving (Show, Eq) - --- | Invariant for the set of link groups: every element in the link group --- must be pointing to the /same/ link group -lgInvariant :: Map QPN LinkGroup -> Bool -lgInvariant links = all invGroup (M.elems links) - where - invGroup :: LinkGroup -> Bool - invGroup lg = allEqual $ map (`M.lookup` links) members - where - members :: [QPN] - members = map (`Q` lgPackage lg) $ S.toList (lgMembers lg) - --- | Package version of this group --- --- This is only known once we have picked a canonical element. -lgInstance :: LinkGroup -> Maybe I -lgInstance = fmap (\(PI _ i) -> i) . lgCanon - -showLinkGroup :: LinkGroup -> String -showLinkGroup lg = - "{" ++ intercalate "," (map showMember (S.toList (lgMembers lg))) ++ "}" - where - showMember :: PP -> String - showMember pp = case lgCanon lg of - Just (PI pp' _i) | pp == pp' -> "*" - _otherwise -> "" - ++ case lgInstance lg of - Nothing -> showQPN (qpn pp) - Just i -> showPI (PI (qpn pp) i) - - qpn :: PP -> QPN - qpn pp = Q pp (lgPackage lg) - --- | Creates a link group that contains a single member. -lgSingleton :: QPN -> Maybe (PI PP) -> LinkGroup -lgSingleton (Q pp pn) canon = LinkGroup { - lgPackage = pn - , lgCanon = canon - , lgMembers = S.singleton pp - , lgBlame = CS.empty - } - -lgMerge :: [Var QPN] -> LinkGroup -> LinkGroup -> Either Conflict LinkGroup -lgMerge blame lg lg' = do - canon <- pick (lgCanon lg) (lgCanon lg') - return LinkGroup { - lgPackage = lgPackage lg - , lgCanon = canon - , lgMembers = lgMembers lg `S.union` lgMembers lg' - , lgBlame = CS.unions [CS.fromList blame, lgBlame lg, lgBlame lg'] - } - where - pick :: Eq a => Maybe a -> Maybe a -> Either Conflict (Maybe a) - pick Nothing Nothing = Right Nothing - pick (Just x) Nothing = Right $ Just x - pick Nothing (Just y) = Right $ Just y - pick (Just x) (Just y) = - if x == y then Right $ Just x - else Left ( CS.unions [ - CS.fromList blame - , lgConflictSet lg - , lgConflictSet lg' - ] - , "cannot merge " ++ showLinkGroup lg - ++ " and " ++ showLinkGroup lg' - ) - -lgConflictSet :: LinkGroup -> ConflictSet QPN -lgConflictSet lg = - CS.fromList (map aux (S.toList (lgMembers lg))) - `CS.union` lgBlame lg - where - aux pp = P (Q pp (lgPackage lg)) - -{------------------------------------------------------------------------------- - Auxiliary --------------------------------------------------------------------------------} - -allEqual :: Eq a => [a] -> Bool -allEqual [] = True -allEqual [_] = True -allEqual (x:y:ys) = x == y && allEqual (y:ys) - -concatMapUnzip :: (a -> ([b], [c])) -> [a] -> ([b], [c]) -concatMapUnzip f = (\(xs, ys) -> (concat xs, concat ys)) . unzip . map f diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Log.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Log.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Log.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Log.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,106 +0,0 @@ -module Distribution.Client.Dependency.Modular.Log - ( Log - , continueWith - , failWith - , logToProgress - , succeedWith - , tryWith - ) where - -import Control.Applicative -import Data.List as L -import Data.Maybe (isNothing) - -import Distribution.Client.Dependency.Types -- from Cabal - -import Distribution.Client.Dependency.Modular.Dependency -import Distribution.Client.Dependency.Modular.Message -import Distribution.Client.Dependency.Modular.Package -import Distribution.Client.Dependency.Modular.Tree (FailReason(..)) -import qualified Distribution.Client.Dependency.Modular.ConflictSet as CS - --- | The 'Log' datatype. --- --- Represents the progress of a computation lazily. --- --- Parameterized over the type of actual messages and the final result. -type Log m a = Progress m () a - -messages :: Progress step fail done -> [step] -messages = foldProgress (:) (const []) (const []) - --- | Postprocesses a log file. Takes as an argument a limit on allowed backjumps. --- If the limit is 'Nothing', then infinitely many backjumps are allowed. If the --- limit is 'Just 0', backtracking is completely disabled. -logToProgress :: Maybe Int -> Log Message a -> Progress String String a -logToProgress mbj l = let - es = proc (Just 0) l -- catch first error (always) - ms = useFirstError (proc mbj l) - in go es es -- trace for first error - (showMessages (const True) True ms) -- run with backjump limit applied - where - -- Proc takes the allowed number of backjumps and a 'Progress' and explores the - -- messages until the maximum number of backjumps has been reached. It filters out - -- and ignores repeated backjumps. If proc reaches the backjump limit, it truncates - -- the 'Progress' and ends it with the last conflict set. Otherwise, it leaves the - -- original success result or replaces the original failure with 'Nothing'. - proc :: Maybe Int -> Progress Message a b -> Progress Message (Maybe (ConflictSet QPN)) b - proc _ (Done x) = Done x - proc _ (Fail _) = Fail Nothing - proc mbj' (Step (Failure cs Backjump) xs@(Step Leave (Step (Failure cs' Backjump) _))) - | cs == cs' = proc mbj' xs -- repeated backjumps count as one - proc (Just 0) (Step (Failure cs Backjump) _) = Fail (Just cs) - proc (Just n) (Step x@(Failure _ Backjump) xs) = Step x (proc (Just (n - 1)) xs) - proc mbj' (Step x xs) = Step x (proc mbj' xs) - - -- Sets the conflict set from the first backjump as the final error, and records - -- whether the search was exhaustive. - useFirstError :: Progress Message (Maybe (ConflictSet QPN)) b - -> Progress Message (Bool, Maybe (ConflictSet QPN)) b - useFirstError = replace Nothing - where - replace _ (Done x) = Done x - replace cs' (Fail cs) = -- 'Nothing' means backjump limit not reached. - -- Prefer first error over later error. - Fail (isNothing cs, cs' <|> cs) - replace Nothing (Step x@(Failure cs Backjump) xs) = Step x $ replace (Just cs) xs - replace cs' (Step x xs) = Step x $ replace cs' xs - - -- The first two arguments are both supposed to be the log up to the first error. - -- That's the error that will always be printed in case we do not find a solution. - -- We pass this log twice, because we evaluate it in parallel with the full log, - -- but we also want to retain the reference to its beginning for when we print it. - -- This trick prevents a space leak! - -- - -- The third argument is the full log, ending with either the solution or the - -- exhaustiveness and first conflict set. - go :: Progress Message a b - -> Progress Message a b - -> Progress String (Bool, Maybe (ConflictSet QPN)) b - -> Progress String String b - go ms (Step _ ns) (Step x xs) = Step x (go ms ns xs) - go ms r (Step x xs) = Step x (go ms r xs) - go ms _ (Fail (exh, Just cs)) = Fail $ - "Could not resolve dependencies:\n" ++ - unlines (messages $ showMessages (L.foldr (\ v _ -> v `CS.member` cs) True) False ms) ++ - (if exh then "Dependency tree exhaustively searched.\n" - else "Backjump limit reached (" ++ currlimit mbj ++ - "change with --max-backjumps or try to run with --reorder-goals).\n") - where currlimit (Just n) = "currently " ++ show n ++ ", " - currlimit Nothing = "" - go _ _ (Done s) = Done s - go _ _ (Fail (_, Nothing)) = Fail ("Could not resolve dependencies; something strange happened.") -- should not happen - -failWith :: step -> fail -> Progress step fail done -failWith s f = Step s (Fail f) - -succeedWith :: step -> done -> Progress step fail done -succeedWith s d = Step s (Done d) - -continueWith :: step -> Progress step fail done -> Progress step fail done -continueWith = Step - -tryWith :: Message - -> Progress Message fail done - -> Progress Message fail done -tryWith m = Step m . Step Enter . foldProgress Step (failWith Leave) Done diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Message.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Message.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Message.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Message.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,154 +0,0 @@ -{-# LANGUAGE BangPatterns #-} - -module Distribution.Client.Dependency.Modular.Message ( - Message(..), - showMessages - ) where - -import qualified Data.List as L -import Prelude hiding (pi) - -import Distribution.Text -- from Cabal - -import Distribution.Client.Dependency.Modular.Dependency -import Distribution.Client.Dependency.Modular.Flag -import Distribution.Client.Dependency.Modular.Package -import Distribution.Client.Dependency.Modular.Tree - ( FailReason(..), POption(..) ) -import Distribution.Client.Dependency.Types - ( ConstraintSource(..), showConstraintSource, Progress(..) ) - -data Message = - Enter -- ^ increase indentation level - | Leave -- ^ decrease indentation level - | TryP QPN POption - | TryF QFN Bool - | TryS QSN Bool - | Next (Goal QPN) - | Success - | Failure (ConflictSet QPN) FailReason - --- | Transforms the structured message type to actual messages (strings). --- --- Takes an additional relevance predicate. The predicate gets a stack of goal --- variables and can decide whether messages regarding these goals are relevant. --- You can plug in 'const True' if you're interested in a full trace. If you --- want a slice of the trace concerning a particular conflict set, then plug in --- a predicate returning 'True' on the empty stack and if the head is in the --- conflict set. --- --- The second argument indicates if the level numbers should be shown. This is --- recommended for any trace that involves backtracking, because only the level --- numbers will allow to keep track of backjumps. -showMessages :: ([Var QPN] -> Bool) -> Bool -> Progress Message a b -> Progress String a b -showMessages p sl = go [] 0 - where - -- The stack 'v' represents variables that are currently assigned by the - -- solver. 'go' pushes a variable for a recursive call when it encounters - -- 'TryP', 'TryF', or 'TryS' and pops a variable when it encounters 'Leave'. - -- When 'go' processes a package goal, or a package goal followed by a - -- 'Failure', it calls 'atLevel' with the goal variable at the head of the - -- stack so that the predicate can also select messages relating to package - -- goal choices. - go :: [Var QPN] -> Int -> Progress Message a b -> Progress String a b - go !_ !_ (Done x) = Done x - go !_ !_ (Fail x) = Fail x - -- complex patterns - go !v !l (Step (TryP qpn i) (Step Enter (Step (Failure c fr) (Step Leave ms)))) = - goPReject v l qpn [i] c fr ms - go !v !l (Step (TryF qfn b) (Step Enter (Step (Failure c fr) (Step Leave ms)))) = - (atLevel (add (F qfn) v) l $ "rejecting: " ++ showQFNBool qfn b ++ showFR c fr) (go v l ms) - go !v !l (Step (TryS qsn b) (Step Enter (Step (Failure c fr) (Step Leave ms)))) = - (atLevel (add (S qsn) v) l $ "rejecting: " ++ showQSNBool qsn b ++ showFR c fr) (go v l ms) - go !v !l (Step (Next (Goal (P qpn) gr)) (Step (TryP qpn' i) ms@(Step Enter (Step (Next _) _)))) = - (atLevel (add (P qpn) v) l $ "trying: " ++ showQPNPOpt qpn' i ++ showGR gr) (go (add (P qpn) v) l ms) - go !v !l (Step (Next (Goal (P qpn) gr)) ms@(Fail _)) = - (atLevel (add (P qpn) v) l $ "unknown package: " ++ showQPN qpn ++ showGR gr) $ go v l ms - -- the previous case potentially arises in the error output, because we remove the backjump itself - -- if we cut the log after the first error - go !v !l (Step (Next (Goal (P qpn) gr)) ms@(Step (Failure _c Backjump) _)) = - (atLevel (add (P qpn) v) l $ "unknown package: " ++ showQPN qpn ++ showGR gr) $ go v l ms - go !v !l (Step (Next (Goal (P qpn) gr)) (Step (Failure c fr) ms)) = - let v' = add (P qpn) v - in (atLevel v' l $ showPackageGoal qpn gr) $ (atLevel v' l $ showFailure c fr) (go v l ms) - go !v !l (Step (Failure c Backjump) ms@(Step Leave (Step (Failure c' Backjump) _))) - | c == c' = go v l ms - -- standard display - go !v !l (Step Enter ms) = go v (l+1) ms - go !v !l (Step Leave ms) = go (drop 1 v) (l-1) ms - go !v !l (Step (TryP qpn i) ms) = (atLevel (add (P qpn) v) l $ "trying: " ++ showQPNPOpt qpn i) (go (add (P qpn) v) l ms) - go !v !l (Step (TryF qfn b) ms) = (atLevel (add (F qfn) v) l $ "trying: " ++ showQFNBool qfn b) (go (add (F qfn) v) l ms) - go !v !l (Step (TryS qsn b) ms) = (atLevel (add (S qsn) v) l $ "trying: " ++ showQSNBool qsn b) (go (add (S qsn) v) l ms) - go !v !l (Step (Next (Goal (P qpn) gr)) ms) = (atLevel (add (P qpn) v) l $ showPackageGoal qpn gr) (go v l ms) - go !v !l (Step (Next _) ms) = go v l ms -- ignore flag goals in the log - go !v !l (Step (Success) ms) = (atLevel v l $ "done") (go v l ms) - go !v !l (Step (Failure c fr) ms) = (atLevel v l $ showFailure c fr) (go v l ms) - - showPackageGoal :: QPN -> QGoalReason -> String - showPackageGoal qpn gr = "next goal: " ++ showQPN qpn ++ showGR gr - - showFailure :: ConflictSet QPN -> FailReason -> String - showFailure c fr = "fail" ++ showFR c fr - - add :: Var QPN -> [Var QPN] -> [Var QPN] - add v vs = simplifyVar v : vs - - -- special handler for many subsequent package rejections - goPReject :: [Var QPN] - -> Int - -> QPN - -> [POption] - -> ConflictSet QPN - -> FailReason - -> Progress Message a b - -> Progress String a b - goPReject v l qpn is c fr (Step (TryP qpn' i) (Step Enter (Step (Failure _ fr') (Step Leave ms)))) - | qpn == qpn' && fr == fr' = goPReject v l qpn (i : is) c fr ms - goPReject v l qpn is c fr ms = - (atLevel (P qpn : v) l $ "rejecting: " ++ L.intercalate ", " (map (showQPNPOpt qpn) (reverse is)) ++ showFR c fr) (go v l ms) - - -- write a message, but only if it's relevant; we can also enable or disable the display of the current level - atLevel :: [Var QPN] -> Int -> String -> Progress String a b -> Progress String a b - atLevel v l x xs - | sl && p v = let s = show l - in Step ("[" ++ replicate (3 - length s) '_' ++ s ++ "] " ++ x) xs - | p v = Step x xs - | otherwise = xs - -showQPNPOpt :: QPN -> POption -> String -showQPNPOpt qpn@(Q _pp pn) (POption i linkedTo) = - case linkedTo of - Nothing -> showPI (PI qpn i) -- Consistent with prior to POption - Just pp' -> showQPN qpn ++ "~>" ++ showPI (PI (Q pp' pn) i) - -showGR :: QGoalReason -> String -showGR UserGoal = " (user goal)" -showGR (PDependency pi) = " (dependency of " ++ showPI pi ++ ")" -showGR (FDependency qfn b) = " (dependency of " ++ showQFNBool qfn b ++ ")" -showGR (SDependency qsn) = " (dependency of " ++ showQSNBool qsn True ++ ")" - -showFR :: ConflictSet QPN -> FailReason -> String -showFR _ InconsistentInitialConstraints = " (inconsistent initial constraints)" -showFR _ (Conflicting ds) = " (conflict: " ++ L.intercalate ", " (map showDep ds) ++ ")" -showFR _ CannotInstall = " (only already installed instances can be used)" -showFR _ CannotReinstall = " (avoiding to reinstall a package with same version but new dependencies)" -showFR _ Shadowed = " (shadowed by another installed package with same version)" -showFR _ Broken = " (package is broken)" -showFR _ (GlobalConstraintVersion vr src) = " (" ++ constraintSource src ++ " requires " ++ display vr ++ ")" -showFR _ (GlobalConstraintInstalled src) = " (" ++ constraintSource src ++ " requires installed instance)" -showFR _ (GlobalConstraintSource src) = " (" ++ constraintSource src ++ " requires source instance)" -showFR _ (GlobalConstraintFlag src) = " (" ++ constraintSource src ++ " requires opposite flag selection)" -showFR _ ManualFlag = " (manual flag can only be changed explicitly)" -showFR c Backjump = " (backjumping, conflict set: " ++ showCS c ++ ")" -showFR _ MultipleInstances = " (multiple instances)" -showFR c (DependenciesNotLinked msg) = " (dependencies not linked: " ++ msg ++ "; conflict set: " ++ showCS c ++ ")" -showFR c CyclicDependencies = " (cyclic dependencies; conflict set: " ++ showCS c ++ ")" --- The following are internal failures. They should not occur. In the --- interest of not crashing unnecessarily, we still just print an error --- message though. -showFR _ (MalformedFlagChoice qfn) = " (INTERNAL ERROR: MALFORMED FLAG CHOICE: " ++ showQFN qfn ++ ")" -showFR _ (MalformedStanzaChoice qsn) = " (INTERNAL ERROR: MALFORMED STANZA CHOICE: " ++ showQSN qsn ++ ")" -showFR _ EmptyGoalChoice = " (INTERNAL ERROR: EMPTY GOAL CHOICE)" - -constraintSource :: ConstraintSource -> String -constraintSource src = "constraint from " ++ showConstraintSource src diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Package.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Package.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Package.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Package.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,175 +0,0 @@ -{-# LANGUAGE DeriveFunctor #-} -module Distribution.Client.Dependency.Modular.Package - ( I(..) - , Loc(..) - , PackageId - , PackageIdentifier(..) - , PackageName(..) - , PI(..) - , PN - , PP(..) - , Namespace(..) - , Qualifier(..) - , QPN - , QPV - , Q(..) - , instI - , makeIndependent - , primaryPP - , showI - , showPI - , showQPN - , unPN - ) where - -import Data.List as L - -import Distribution.Package -- from Cabal -import Distribution.Text -- from Cabal - -import Distribution.Client.Dependency.Modular.Version - --- | A package name. -type PN = PackageName - --- | Unpacking a package name. -unPN :: PN -> String -unPN (PackageName pn) = pn - --- | Package version. A package name plus a version number. -type PV = PackageId - --- | Qualified package version. -type QPV = Q PV - --- | Package id. Currently just a black-box string. -type PId = UnitId - --- | Location. Info about whether a package is installed or not, and where --- exactly it is located. For installed packages, uniquely identifies the --- package instance via its 'PId'. --- --- TODO: More information is needed about the repo. -data Loc = Inst PId | InRepo - deriving (Eq, Ord, Show) - --- | Instance. A version number and a location. -data I = I Ver Loc - deriving (Eq, Ord, Show) - --- | String representation of an instance. -showI :: I -> String -showI (I v InRepo) = showVer v -showI (I v (Inst uid)) = showVer v ++ "/installed" ++ shortId uid - where - -- A hack to extract the beginning of the package ABI hash - shortId (SimpleUnitId (ComponentId i)) - = snip (splitAt 4) (++ "...") - . snip ((\ (x, y) -> (reverse x, y)) . break (=='-') . reverse) ('-':) - $ i - snip p f xs = case p xs of - (ys, zs) -> (if L.null zs then id else f) ys - --- | Package instance. A package name and an instance. -data PI qpn = PI qpn I - deriving (Eq, Ord, Show, Functor) - --- | String representation of a package instance. -showPI :: PI QPN -> String -showPI (PI qpn i) = showQPN qpn ++ "-" ++ showI i - -instI :: I -> Bool -instI (I _ (Inst _)) = True -instI _ = False - --- | A package path consists of a namespace and a package path inside that --- namespace. -data PP = PP Namespace Qualifier - deriving (Eq, Ord, Show) - --- | Top-level namespace --- --- Package choices in different namespaces are considered completely independent --- by the solver. -data Namespace = - -- | The default namespace - DefaultNamespace - - -- | Independent namespace - -- - -- For now we just number these (rather than giving them more structure). - | Independent Int - deriving (Eq, Ord, Show) - --- | Qualifier of a package within a namespace (see 'PP') -data Qualifier = - -- | Top-level dependency in this namespace - Unqualified - - -- | Any dependency on base is considered independent - -- - -- This makes it possible to have base shims. - | Base PN - - -- | Setup dependency - -- - -- By rights setup dependencies ought to be nestable; after all, the setup - -- dependencies of a package might themselves have setup dependencies, which - -- are independent from everything else. However, this very quickly leads to - -- infinite search trees in the solver. Therefore we limit ourselves to - -- a single qualifier (within a given namespace). - | Setup PN - deriving (Eq, Ord, Show) - --- | Is the package in the primary group of packages. In particular this --- does not include packages pulled in as setup deps. --- -primaryPP :: PP -> Bool -primaryPP (PP _ns q) = go q - where - go Unqualified = True - go (Base _) = True - go (Setup _) = False - --- | String representation of a package path. --- --- NOTE: The result of 'showPP' is either empty or results in a period, so that --- it can be prepended to a package name. -showPP :: PP -> String -showPP (PP ns q) = - case ns of - DefaultNamespace -> go q - Independent i -> show i ++ "." ++ go q - where - -- Print the qualifier - -- - -- NOTE: the base qualifier is for a dependency _on_ base; the qualifier is - -- there to make sure different dependencies on base are all independent. - -- So we want to print something like @"A.base"@, where the @"A."@ part - -- is the qualifier and @"base"@ is the actual dependency (which, for the - -- 'Base' qualifier, will always be @base@). - go Unqualified = "" - go (Setup pn) = display pn ++ "-setup." - go (Base pn) = display pn ++ "." - --- | A qualified entity. Pairs a package path with the entity. -data Q a = Q PP a - deriving (Eq, Ord, Show) - --- | Standard string representation of a qualified entity. -showQ :: (a -> String) -> (Q a -> String) -showQ showa (Q pp x) = showPP pp ++ showa x - --- | Qualified package name. -type QPN = Q PN - --- | String representation of a qualified package path. -showQPN :: QPN -> String -showQPN = showQ display - --- | Create artificial parents for each of the package names, making --- them all independent. -makeIndependent :: [PN] -> [QPN] -makeIndependent ps = [ Q pp pn | (pn, i) <- zip ps [0::Int ..] - , let pp = PP (Independent i) Unqualified - ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Preference.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Preference.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Preference.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Preference.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,397 +0,0 @@ -{-# LANGUAGE CPP #-} -module Distribution.Client.Dependency.Modular.Preference - ( avoidReinstalls - , deferSetupChoices - , deferWeakFlagChoices - , enforceManualFlags - , enforcePackageConstraints - , enforceSingleInstanceRestriction - , firstGoal - , preferBaseGoalChoice - , preferEasyGoalChoices - , preferLinked - , preferPackagePreferences - , preferReallyEasyGoalChoices - , requireInstalled - ) where - --- Reordering or pruning the tree in order to prefer or make certain choices. - -import qualified Data.List as L -import qualified Data.Map as M -#if !MIN_VERSION_base(4,8,0) -import Data.Monoid -import Control.Applicative -#endif -import Prelude hiding (sequence) -import Control.Monad.Reader hiding (sequence) -import Data.Map (Map) -import Data.Traversable (sequence) - -import Distribution.Client.Dependency.Types - ( PackageConstraint(..), LabeledPackageConstraint(..), ConstraintSource(..) - , PackagePreferences(..), InstalledPreference(..) ) -import Distribution.Client.Types - ( OptionalStanza(..) ) - -import Distribution.Client.Dependency.Modular.Dependency -import Distribution.Client.Dependency.Modular.Flag -import Distribution.Client.Dependency.Modular.Package -import qualified Distribution.Client.Dependency.Modular.PSQ as P -import Distribution.Client.Dependency.Modular.Tree -import Distribution.Client.Dependency.Modular.Version -import qualified Distribution.Client.Dependency.Modular.ConflictSet as CS - --- | Generic abstraction for strategies that just rearrange the package order. --- Only packages that match the given predicate are reordered. -packageOrderFor :: (PN -> Bool) -> (PN -> I -> I -> Ordering) -> Tree a -> Tree a -packageOrderFor p cmp' = trav go - where - go (PChoiceF v@(Q _ pn) r cs) - | p pn = PChoiceF v r (P.sortByKeys (flip (cmp pn)) cs) - | otherwise = PChoiceF v r cs - go x = x - - cmp :: PN -> POption -> POption -> Ordering - cmp pn (POption i _) (POption i' _) = cmp' pn i i' - --- | Prefer to link packages whenever possible -preferLinked :: Tree a -> Tree a -preferLinked = trav go - where - go (PChoiceF qn a cs) = PChoiceF qn a (P.sortByKeys cmp cs) - go x = x - - cmp (POption _ linkedTo) (POption _ linkedTo') = cmpL linkedTo linkedTo' - - cmpL Nothing Nothing = EQ - cmpL Nothing (Just _) = GT - cmpL (Just _) Nothing = LT - cmpL (Just _) (Just _) = EQ - --- | Ordering that treats versions satisfying more preferred ranges as greater --- than versions satisfying less preferred ranges. -preferredVersionsOrdering :: [VR] -> Ver -> Ver -> Ordering -preferredVersionsOrdering vrs v1 v2 = compare (check v1) (check v2) - where - check v = Prelude.length . Prelude.filter (==True) . - Prelude.map (flip checkVR v) $ vrs - --- | Traversal that tries to establish package preferences (not constraints). --- Works by reordering choice nodes. Also applies stanza preferences. -preferPackagePreferences :: (PN -> PackagePreferences) -> Tree a -> Tree a -preferPackagePreferences pcs = preferPackageStanzaPreferences pcs - . packageOrderFor (const True) preference - where - preference pn i1@(I v1 _) i2@(I v2 _) = - let PackagePreferences vrs ipref _ = pcs pn - in preferredVersionsOrdering vrs v1 v2 `mappend` -- combines lexically - locationsOrdering ipref i1 i2 - - -- Note that we always rank installed before uninstalled, and later - -- versions before earlier, but we can change the priority of the - -- two orderings. - locationsOrdering PreferInstalled v1 v2 = - preferInstalledOrdering v1 v2 `mappend` preferLatestOrdering v1 v2 - locationsOrdering PreferLatest v1 v2 = - preferLatestOrdering v1 v2 `mappend` preferInstalledOrdering v1 v2 - --- | Ordering that treats installed instances as greater than uninstalled ones. -preferInstalledOrdering :: I -> I -> Ordering -preferInstalledOrdering (I _ (Inst _)) (I _ (Inst _)) = EQ -preferInstalledOrdering (I _ (Inst _)) _ = GT -preferInstalledOrdering _ (I _ (Inst _)) = LT -preferInstalledOrdering _ _ = EQ - --- | Compare instances by their version numbers. -preferLatestOrdering :: I -> I -> Ordering -preferLatestOrdering (I v1 _) (I v2 _) = compare v1 v2 - --- | Traversal that tries to establish package stanza enable\/disable --- preferences. Works by reordering the branches of stanza choices. -preferPackageStanzaPreferences :: (PN -> PackagePreferences) -> Tree a -> Tree a -preferPackageStanzaPreferences pcs = trav go - where - go (SChoiceF qsn@(SN (PI (Q pp pn) _) s) gr _tr ts) | primaryPP pp = - let PackagePreferences _ _ spref = pcs pn - enableStanzaPref = s `elem` spref - -- move True case first to try enabling the stanza - ts' | enableStanzaPref = P.sortByKeys (flip compare) ts - | otherwise = ts - in SChoiceF qsn gr True ts' -- True: now weak choice - go x = x - --- | Helper function that tries to enforce a single package constraint on a --- given instance for a P-node. Translates the constraint into a --- tree-transformer that either leaves the subtree untouched, or replaces it --- with an appropriate failure node. -processPackageConstraintP :: PP - -> ConflictSet QPN - -> I - -> LabeledPackageConstraint - -> Tree a - -> Tree a -processPackageConstraintP pp _ _ (LabeledPackageConstraint _ src) r - | src == ConstraintSourceUserTarget && not (primaryPP pp) = r - -- the constraints arising from targets, like "foo-1.0" only apply to - -- the main packages in the solution, they don't constrain setup deps - -processPackageConstraintP _ c i (LabeledPackageConstraint pc src) r = go i pc - where - go (I v _) (PackageConstraintVersion _ vr) - | checkVR vr v = r - | otherwise = Fail c (GlobalConstraintVersion vr src) - go _ (PackageConstraintInstalled _) - | instI i = r - | otherwise = Fail c (GlobalConstraintInstalled src) - go _ (PackageConstraintSource _) - | not (instI i) = r - | otherwise = Fail c (GlobalConstraintSource src) - go _ _ = r - --- | Helper function that tries to enforce a single package constraint on a --- given flag setting for an F-node. Translates the constraint into a --- tree-transformer that either leaves the subtree untouched, or replaces it --- with an appropriate failure node. -processPackageConstraintF :: Flag - -> ConflictSet QPN - -> Bool - -> LabeledPackageConstraint - -> Tree a - -> Tree a -processPackageConstraintF f c b' (LabeledPackageConstraint pc src) r = go pc - where - go (PackageConstraintFlags _ fa) = - case L.lookup f fa of - Nothing -> r - Just b | b == b' -> r - | otherwise -> Fail c (GlobalConstraintFlag src) - go _ = r - --- | Helper function that tries to enforce a single package constraint on a --- given flag setting for an F-node. Translates the constraint into a --- tree-transformer that either leaves the subtree untouched, or replaces it --- with an appropriate failure node. -processPackageConstraintS :: OptionalStanza - -> ConflictSet QPN - -> Bool - -> LabeledPackageConstraint - -> Tree a - -> Tree a -processPackageConstraintS s c b' (LabeledPackageConstraint pc src) r = go pc - where - go (PackageConstraintStanzas _ ss) = - if not b' && s `elem` ss then Fail c (GlobalConstraintFlag src) - else r - go _ = r - --- | Traversal that tries to establish various kinds of user constraints. Works --- by selectively disabling choices that have been ruled out by global user --- constraints. -enforcePackageConstraints :: M.Map PN [LabeledPackageConstraint] - -> Tree QGoalReason - -> Tree QGoalReason -enforcePackageConstraints pcs = trav go - where - go (PChoiceF qpn@(Q pp pn) gr ts) = - let c = varToConflictSet (P qpn) - -- compose the transformation functions for each of the relevant constraint - g = \ (POption i _) -> foldl (\ h pc -> h . processPackageConstraintP pp c i pc) id - (M.findWithDefault [] pn pcs) - in PChoiceF qpn gr (P.mapWithKey g ts) - go (FChoiceF qfn@(FN (PI (Q _ pn) _) f) gr tr m ts) = - let c = varToConflictSet (F qfn) - -- compose the transformation functions for each of the relevant constraint - g = \ b -> foldl (\ h pc -> h . processPackageConstraintF f c b pc) id - (M.findWithDefault [] pn pcs) - in FChoiceF qfn gr tr m (P.mapWithKey g ts) - go (SChoiceF qsn@(SN (PI (Q _ pn) _) f) gr tr ts) = - let c = varToConflictSet (S qsn) - -- compose the transformation functions for each of the relevant constraint - g = \ b -> foldl (\ h pc -> h . processPackageConstraintS f c b pc) id - (M.findWithDefault [] pn pcs) - in SChoiceF qsn gr tr (P.mapWithKey g ts) - go x = x - --- | Transformation that tries to enforce manual flags. Manual flags --- can only be re-set explicitly by the user. This transformation should --- be run after user preferences have been enforced. For manual flags, --- it checks if a user choice has been made. If not, it disables all but --- the first choice. -enforceManualFlags :: Tree QGoalReason -> Tree QGoalReason -enforceManualFlags = trav go - where - go (FChoiceF qfn gr tr True ts) = FChoiceF qfn gr tr True $ - let c = varToConflictSet (F qfn) - in case span isDisabled (P.toList ts) of - ([], y : ys) -> P.fromList (y : L.map (\ (b, _) -> (b, Fail c ManualFlag)) ys) - _ -> ts -- something has been manually selected, leave things alone - where - isDisabled (_, Fail _ (GlobalConstraintFlag _)) = True - isDisabled _ = False - go x = x - --- | Require installed packages. -requireInstalled :: (PN -> Bool) -> Tree QGoalReason -> Tree QGoalReason -requireInstalled p = trav go - where - go (PChoiceF v@(Q _ pn) gr cs) - | p pn = PChoiceF v gr (P.mapWithKey installed cs) - | otherwise = PChoiceF v gr cs - where - installed (POption (I _ (Inst _)) _) x = x - installed _ _ = Fail (varToConflictSet (P v)) CannotInstall - go x = x - --- | Avoid reinstalls. --- --- This is a tricky strategy. If a package version is installed already and the --- same version is available from a repo, the repo version will never be chosen. --- This would result in a reinstall (either destructively, or potentially, --- shadowing). The old instance won't be visible or even present anymore, but --- other packages might have depended on it. --- --- TODO: It would be better to actually check the reverse dependencies of installed --- packages. If they're not depended on, then reinstalling should be fine. Even if --- they are, perhaps this should just result in trying to reinstall those other --- packages as well. However, doing this all neatly in one pass would require to --- change the builder, or at least to change the goal set after building. -avoidReinstalls :: (PN -> Bool) -> Tree QGoalReason -> Tree QGoalReason -avoidReinstalls p = trav go - where - go (PChoiceF qpn@(Q _ pn) gr cs) - | p pn = PChoiceF qpn gr disableReinstalls - | otherwise = PChoiceF qpn gr cs - where - disableReinstalls = - let installed = [ v | (POption (I v (Inst _)) _, _) <- P.toList cs ] - in P.mapWithKey (notReinstall installed) cs - - notReinstall vs (POption (I v InRepo) _) _ | v `elem` vs = - Fail (varToConflictSet (P qpn)) CannotReinstall - notReinstall _ _ x = - x - go x = x - --- | Always choose the first goal in the list next, abandoning all --- other choices. --- --- This is unnecessary for the default search strategy, because --- it descends only into the first goal choice anyway, --- but may still make sense to just reduce the tree size a bit. -firstGoal :: Tree a -> Tree a -firstGoal = trav go - where - go (GoalChoiceF xs) = GoalChoiceF (P.firstOnly xs) - go x = x - -- Note that we keep empty choice nodes, because they mean success. - --- | Transformation that tries to make a decision on base as early as --- possible. In nearly all cases, there's a single choice for the base --- package. Also, fixing base early should lead to better error messages. -preferBaseGoalChoice :: Tree a -> Tree a -preferBaseGoalChoice = trav go - where - go (GoalChoiceF xs) = GoalChoiceF (P.preferByKeys isBase xs) - go x = x - - isBase :: OpenGoal comp -> Bool - isBase (OpenGoal (Simple (Dep (Q _pp pn) _) _) _) | unPN pn == "base" = True - isBase _ = False - --- | Deal with setup dependencies after regular dependencies, so that we can --- will link setup depencencies against package dependencies when possible -deferSetupChoices :: Tree a -> Tree a -deferSetupChoices = trav go - where - go (GoalChoiceF xs) = GoalChoiceF (P.preferByKeys noSetup xs) - go x = x - - noSetup :: OpenGoal comp -> Bool - noSetup (OpenGoal (Simple (Dep (Q (PP _ns (Setup _)) _) _) _) _) = False - noSetup _ = True - --- | Transformation that tries to avoid making weak flag choices early. --- Weak flags are trivial flags (not influencing dependencies) or such --- flags that are explicitly declared to be weak in the index. -deferWeakFlagChoices :: Tree a -> Tree a -deferWeakFlagChoices = trav go - where - go (GoalChoiceF xs) = GoalChoiceF (P.prefer noWeakStanza (P.prefer noWeakFlag xs)) - go x = x - - noWeakStanza :: Tree a -> Bool - noWeakStanza (SChoice _ _ True _) = False - noWeakStanza _ = True - - noWeakFlag :: Tree a -> Bool - noWeakFlag (FChoice _ _ True _ _) = False - noWeakFlag _ = True - --- | Transformation that sorts choice nodes so that --- child nodes with a small branching degree are preferred. --- --- Only approximates the number of choices in the branches. --- In particular, we try to take any goal immediately if it has --- a branching degree of 0 (guaranteed failure) or 1 (no other --- choice possible). --- --- Returns at most one choice. --- -preferEasyGoalChoices :: Tree a -> Tree a -preferEasyGoalChoices = trav go - where - go (GoalChoiceF xs) = GoalChoiceF (P.dminimumBy dchoices xs) - -- (a different implementation that seems slower): - -- GoalChoiceF (P.firstOnly (P.preferOrElse zeroOrOneChoices (P.minimumBy choices) xs)) - go x = x - --- | A variant of 'preferEasyGoalChoices' that just keeps the --- ones with a branching degree of 0 or 1. Note that unlike --- 'preferEasyGoalChoices', this may return more than one --- choice. --- -preferReallyEasyGoalChoices :: Tree a -> Tree a -preferReallyEasyGoalChoices = trav go - where - go (GoalChoiceF xs) = GoalChoiceF (P.prefer zeroOrOneChoices xs) - go x = x - --- | Monad used internally in enforceSingleInstanceRestriction --- --- For each package instance we record the goal for which we picked a concrete --- instance. The SIR means that for any package instance there can only be one. -type EnforceSIR = Reader (Map (PI PN) QPN) - --- | Enforce ghc's single instance restriction --- --- From the solver's perspective, this means that for any package instance --- (that is, package name + package version) there can be at most one qualified --- goal resolving to that instance (there may be other goals _linking_ to that --- instance however). -enforceSingleInstanceRestriction :: Tree QGoalReason -> Tree QGoalReason -enforceSingleInstanceRestriction = (`runReader` M.empty) . cata go - where - go :: TreeF QGoalReason (EnforceSIR (Tree QGoalReason)) -> EnforceSIR (Tree QGoalReason) - - -- We just verify package choices. - go (PChoiceF qpn gr cs) = - PChoice qpn gr <$> sequence (P.mapWithKey (goP qpn) cs) - go _otherwise = - innM _otherwise - - -- The check proper - goP :: QPN -> POption -> EnforceSIR (Tree QGoalReason) -> EnforceSIR (Tree QGoalReason) - goP qpn@(Q _ pn) (POption i linkedTo) r = do - let inst = PI pn i - env <- ask - case (linkedTo, M.lookup inst env) of - (Just _, _) -> - -- For linked nodes we don't check anything - r - (Nothing, Nothing) -> - -- Not linked, not already used - local (M.insert inst qpn) r - (Nothing, Just qpn') -> do - -- Not linked, already used. This is an error - return $ Fail (CS.union (varToConflictSet (P qpn)) (varToConflictSet (P qpn'))) MultipleInstances diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/PSQ.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/PSQ.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/PSQ.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/PSQ.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,213 +0,0 @@ -{-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveTraversable #-} -module Distribution.Client.Dependency.Modular.PSQ - ( PSQ(..) -- Unit test needs constructor access - , Degree(..) - , casePSQ - , cons - , degree - , delete - , dminimumBy - , length - , lookup - , filter - , filterKeys - , firstOnly - , fromList - , isZeroOrOne - , keys - , map - , mapKeys - , mapWithKey - , mapWithKeyState - , minimumBy - , null - , prefer - , preferByKeys - , preferOrElse - , snoc - , sortBy - , sortByKeys - , splits - , toList - , union - ) where - --- Priority search queues. --- --- I am not yet sure what exactly is needed. But we need a data structure with --- key-based lookup that can be sorted. We're using a sequence right now with --- (inefficiently implemented) lookup, because I think that queue-based --- operations and sorting turn out to be more efficiency-critical in practice. - -import Control.Arrow (first, second) - -import qualified Data.Foldable as F -import Data.Function -import qualified Data.List as S -import Data.Ord (comparing) -import Data.Traversable -import Prelude hiding (foldr, length, lookup, filter, null, map) - -newtype PSQ k v = PSQ [(k, v)] - deriving (Eq, Show, Functor, F.Foldable, Traversable) -- Qualified Foldable to avoid issues with FTP - -keys :: PSQ k v -> [k] -keys (PSQ xs) = fmap fst xs - -lookup :: Eq k => k -> PSQ k v -> Maybe v -lookup k (PSQ xs) = S.lookup k xs - -map :: (v1 -> v2) -> PSQ k v1 -> PSQ k v2 -map f (PSQ xs) = PSQ (fmap (second f) xs) - -mapKeys :: (k1 -> k2) -> PSQ k1 v -> PSQ k2 v -mapKeys f (PSQ xs) = PSQ (fmap (first f) xs) - -mapWithKey :: (k -> a -> b) -> PSQ k a -> PSQ k b -mapWithKey f (PSQ xs) = PSQ (fmap (\ (k, v) -> (k, f k v)) xs) - -mapWithKeyState :: (s -> k -> a -> (b, s)) -> PSQ k a -> s -> PSQ k b -mapWithKeyState p (PSQ xs) s0 = - PSQ (F.foldr (\ (k, v) r s -> case p s k v of - (w, n) -> (k, w) : (r n)) - (const []) xs s0) - -delete :: Eq k => k -> PSQ k a -> PSQ k a -delete k (PSQ xs) = PSQ (snd (S.partition ((== k) . fst) xs)) - -fromList :: [(k, a)] -> PSQ k a -fromList = PSQ - -cons :: k -> a -> PSQ k a -> PSQ k a -cons k x (PSQ xs) = PSQ ((k, x) : xs) - -snoc :: PSQ k a -> k -> a -> PSQ k a -snoc (PSQ xs) k x = PSQ (xs ++ [(k, x)]) - -casePSQ :: PSQ k a -> r -> (k -> a -> PSQ k a -> r) -> r -casePSQ (PSQ xs) n c = - case xs of - [] -> n - (k, v) : ys -> c k v (PSQ ys) - -splits :: PSQ k a -> PSQ k (a, PSQ k a) -splits = go id - where - go f xs = casePSQ xs - (PSQ []) - (\ k v ys -> cons k (v, f ys) (go (f . cons k v) ys)) - -sortBy :: (a -> a -> Ordering) -> PSQ k a -> PSQ k a -sortBy cmp (PSQ xs) = PSQ (S.sortBy (cmp `on` snd) xs) - -sortByKeys :: (k -> k -> Ordering) -> PSQ k a -> PSQ k a -sortByKeys cmp (PSQ xs) = PSQ (S.sortBy (cmp `on` fst) xs) - --- | Given a measure in form of a pseudo-peano-natural number, --- determine the approximate minimum. This is designed to stop --- even traversing the list as soon as we find any element with --- measure 'ZeroOrOne'. --- --- Always returns a one-element queue (except if the queue is --- empty, then we return an empty queue again). --- -dminimumBy :: (a -> Degree) -> PSQ k a -> PSQ k a -dminimumBy _ (PSQ []) = PSQ [] -dminimumBy sel (PSQ (x : xs)) = go (sel (snd x)) x xs - where - go ZeroOrOne v _ = PSQ [v] - go _ v [] = PSQ [v] - go c v (y : ys) = case compare c d of - LT -> go c v ys - EQ -> go c v ys - GT -> go d y ys - where - d = sel (snd y) - -minimumBy :: (a -> Int) -> PSQ k a -> PSQ k a -minimumBy sel (PSQ xs) = - PSQ [snd (S.minimumBy (comparing fst) (S.map (\ x -> (sel (snd x), x)) xs))] - --- | Will partition the list according to the predicate. If --- there is any element that satisfies the precidate, then only --- the elements satisfying the predicate are returned. --- Otherwise, the rest is returned. --- -prefer :: (a -> Bool) -> PSQ k a -> PSQ k a -prefer p (PSQ xs) = - let - (pro, con) = S.partition (p . snd) xs - in - if S.null pro then PSQ con else PSQ pro - --- | Variant of 'prefer' that takes a continuation for the case --- that there are none of the desired elements. -preferOrElse :: (a -> Bool) -> (PSQ k a -> PSQ k a) -> PSQ k a -> PSQ k a -preferOrElse p k (PSQ xs) = - let - (pro, con) = S.partition (p . snd) xs - in - if S.null pro then k (PSQ con) else PSQ pro - --- | Variant of 'prefer' that takes a predicate on the keys --- rather than on the values. --- -preferByKeys :: (k -> Bool) -> PSQ k a -> PSQ k a -preferByKeys p (PSQ xs) = - let - (pro, con) = S.partition (p . fst) xs - in - if S.null pro then PSQ con else PSQ pro - -filterKeys :: (k -> Bool) -> PSQ k a -> PSQ k a -filterKeys p (PSQ xs) = PSQ (S.filter (p . fst) xs) - -filter :: (a -> Bool) -> PSQ k a -> PSQ k a -filter p (PSQ xs) = PSQ (S.filter (p . snd) xs) - -length :: PSQ k a -> Int -length (PSQ xs) = S.length xs - --- | Approximation of the branching degree. --- --- This is designed for computing the branching degree of a goal choice --- node. If the degree is 0 or 1, it is always good to take that goal, --- because we can either abort immediately, or have no other choice anyway. --- --- So we do not actually want to compute the full degree (which is --- somewhat costly) in cases where we have such an easy choice. --- -data Degree = ZeroOrOne | Two | Other - deriving (Show, Eq) - -instance Ord Degree where - compare ZeroOrOne _ = LT -- lazy approximation - compare _ ZeroOrOne = GT -- approximation - compare Two Two = EQ - compare Two Other = LT - compare Other Two = GT - compare Other Other = EQ - -degree :: PSQ k a -> Degree -degree (PSQ []) = ZeroOrOne -degree (PSQ [_]) = ZeroOrOne -degree (PSQ [_, _]) = Two -degree (PSQ _) = Other - -null :: PSQ k a -> Bool -null (PSQ xs) = S.null xs - -isZeroOrOne :: PSQ k a -> Bool -isZeroOrOne (PSQ []) = True -isZeroOrOne (PSQ [_]) = True -isZeroOrOne _ = False - -firstOnly :: PSQ k a -> PSQ k a -firstOnly (PSQ []) = PSQ [] -firstOnly (PSQ (x : _)) = PSQ [x] - -toList :: PSQ k a -> [(k, a)] -toList (PSQ xs) = xs - -union :: PSQ k a -> PSQ k a -> PSQ k a -union (PSQ xs) (PSQ ys) = PSQ (xs ++ ys) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Solver.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Solver.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Solver.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Solver.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,100 +0,0 @@ -module Distribution.Client.Dependency.Modular.Solver - ( SolverConfig(..) - , solve - ) where - -import Data.Map as M - -import Distribution.Compiler (CompilerInfo) - -import Distribution.Client.PkgConfigDb (PkgConfigDb) - -import Distribution.Client.Dependency.Types - -import Distribution.Client.Dependency.Modular.Assignment -import Distribution.Client.Dependency.Modular.Builder -import Distribution.Client.Dependency.Modular.Cycles -import Distribution.Client.Dependency.Modular.Dependency -import Distribution.Client.Dependency.Modular.Explore -import Distribution.Client.Dependency.Modular.Index -import Distribution.Client.Dependency.Modular.Log -import Distribution.Client.Dependency.Modular.Message -import Distribution.Client.Dependency.Modular.Package -import qualified Distribution.Client.Dependency.Modular.Preference as P -import Distribution.Client.Dependency.Modular.Validate -import Distribution.Client.Dependency.Modular.Linking - --- | Various options for the modular solver. -data SolverConfig = SolverConfig { - preferEasyGoalChoices :: Bool, - independentGoals :: Bool, - avoidReinstalls :: Bool, - shadowPkgs :: Bool, - strongFlags :: Bool, - maxBackjumps :: Maybe Int -} - --- | Run all solver phases. --- --- In principle, we have a valid tree after 'validationPhase', which --- means that every 'Done' node should correspond to valid solution. --- --- There is one exception, though, and that is cycle detection, which --- has been added relatively recently. Cycles are only removed directly --- before exploration. --- --- Semantically, there is no difference. Cycle detection, as implemented --- now, only occurs for 'Done' nodes we encounter during exploration, --- and cycle detection itself does not change the shape of the tree, --- it only marks some 'Done' nodes as 'Fail', if they contain cyclic --- solutions. --- --- There is a tiny performance impact, however, in doing cycle detection --- directly after validation. Probably because cycle detection maintains --- some information, and the various reorderings implemented by --- 'preferencesPhase' and 'heuristicsPhase' are ever so slightly more --- costly if that information is already around during the reorderings. --- --- With the current positioning directly before the 'explorePhase', there --- seems to be no statistically significant performance impact of cycle --- detection in the common case where there are no cycles. --- -solve :: SolverConfig -> -- ^ solver parameters - CompilerInfo -> - Index -> -- ^ all available packages as an index - PkgConfigDb -> -- ^ available pkg-config pkgs - (PN -> PackagePreferences) -> -- ^ preferences - Map PN [LabeledPackageConstraint] -> -- ^ global constraints - [PN] -> -- ^ global goals - Log Message (Assignment, RevDepMap) -solve sc cinfo idx pkgConfigDB userPrefs userConstraints userGoals = - explorePhase $ - detectCyclesPhase$ - heuristicsPhase $ - preferencesPhase $ - validationPhase $ - prunePhase $ - buildPhase - where - explorePhase = backjumpAndExplore - heuristicsPhase = (if preferEasyGoalChoices sc - then P.preferEasyGoalChoices -- also leaves just one choice - else P.firstGoal) . -- after doing goal-choice heuristics, commit to the first choice (saves space) - P.deferWeakFlagChoices . - P.deferSetupChoices . - P.preferBaseGoalChoice . - P.preferLinked - preferencesPhase = P.preferPackagePreferences userPrefs - validationPhase = P.enforceManualFlags . -- can only be done after user constraints - P.enforcePackageConstraints userConstraints . - P.enforceSingleInstanceRestriction . - validateLinking idx . - validateTree cinfo idx pkgConfigDB - prunePhase = (if avoidReinstalls sc then P.avoidReinstalls (const True) else id) . - -- packages that can never be "upgraded": - P.requireInstalled (`elem` [ PackageName "base" - , PackageName "ghc-prim" - , PackageName "integer-gmp" - , PackageName "integer-simple" - ]) - buildPhase = addLinking $ buildTree idx (independentGoals sc) userGoals diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Tree.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Tree.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Tree.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Tree.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,168 +0,0 @@ -{-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveTraversable #-} -module Distribution.Client.Dependency.Modular.Tree - ( FailReason(..) - , POption(..) - , Tree(..) - , TreeF(..) - , ana - , cata - , choices - , dchoices - , inn - , innM - , para - , trav - , zeroOrOneChoices - ) where - -import Control.Monad hiding (mapM, sequence) -import Data.Foldable -import Data.Traversable -import Prelude hiding (foldr, mapM, sequence) - -import Distribution.Client.Dependency.Modular.Dependency -import Distribution.Client.Dependency.Modular.Flag -import Distribution.Client.Dependency.Modular.Package -import Distribution.Client.Dependency.Modular.PSQ (PSQ) -import qualified Distribution.Client.Dependency.Modular.PSQ as P -import Distribution.Client.Dependency.Modular.Version -import Distribution.Client.Dependency.Types ( ConstraintSource(..) ) - --- | Type of the search tree. Inlining the choice nodes for now. -data Tree a = - PChoice QPN a (PSQ POption (Tree a)) - | FChoice QFN a Bool Bool (PSQ Bool (Tree a)) -- Bool indicates whether it's weak/trivial, second Bool whether it's manual - | SChoice QSN a Bool (PSQ Bool (Tree a)) -- Bool indicates whether it's trivial - | GoalChoice (PSQ (OpenGoal ()) (Tree a)) -- PSQ should never be empty - | Done RevDepMap - | Fail (ConflictSet QPN) FailReason - deriving (Eq, Show, Functor) - -- Above, a choice is called trivial if it clearly does not matter. The - -- special case of triviality we actually consider is if there are no new - -- dependencies introduced by this node. - -- - -- A (flag) choice is called weak if we do want to defer it. This is the - -- case for flags that should be implied by what's currently installed on - -- the system, as opposed to flags that are used to explicitly enable or - -- disable some functionality. - --- | A package option is a package instance with an optional linking annotation --- --- The modular solver has a number of package goals to solve for, and can only --- pick a single package version for a single goal. In order to allow to --- install multiple versions of the same package as part of a single solution --- the solver uses qualified goals. For example, @0.P@ and @1.P@ might both --- be qualified goals for @P@, allowing to pick a difference version of package --- @P@ for @0.P@ and @1.P@. --- --- Linking is an essential part of this story. In addition to picking a specific --- version for @1.P@, the solver can also decide to link @1.P@ to @0.P@ (or --- vice versa). Teans that @1.P@ and @0.P@ really must be the very same package --- (and hence must have the same build time configuration, and their --- dependencies must also be the exact same). --- --- See for details. -data POption = POption I (Maybe PP) - deriving (Eq, Show) - -data FailReason = InconsistentInitialConstraints - | Conflicting [Dep QPN] - | CannotInstall - | CannotReinstall - | Shadowed - | Broken - | GlobalConstraintVersion VR ConstraintSource - | GlobalConstraintInstalled ConstraintSource - | GlobalConstraintSource ConstraintSource - | GlobalConstraintFlag ConstraintSource - | ManualFlag - | MalformedFlagChoice QFN - | MalformedStanzaChoice QSN - | EmptyGoalChoice - | Backjump - | MultipleInstances - | DependenciesNotLinked String - | CyclicDependencies - deriving (Eq, Show) - --- | Functor for the tree type. -data TreeF a b = - PChoiceF QPN a (PSQ POption b) - | FChoiceF QFN a Bool Bool (PSQ Bool b) - | SChoiceF QSN a Bool (PSQ Bool b) - | GoalChoiceF (PSQ (OpenGoal ()) b) - | DoneF RevDepMap - | FailF (ConflictSet QPN) FailReason - deriving (Functor, Foldable, Traversable) - -out :: Tree a -> TreeF a (Tree a) -out (PChoice p i ts) = PChoiceF p i ts -out (FChoice p i b m ts) = FChoiceF p i b m ts -out (SChoice p i b ts) = SChoiceF p i b ts -out (GoalChoice ts) = GoalChoiceF ts -out (Done x ) = DoneF x -out (Fail c x ) = FailF c x - -inn :: TreeF a (Tree a) -> Tree a -inn (PChoiceF p i ts) = PChoice p i ts -inn (FChoiceF p i b m ts) = FChoice p i b m ts -inn (SChoiceF p i b ts) = SChoice p i b ts -inn (GoalChoiceF ts) = GoalChoice ts -inn (DoneF x ) = Done x -inn (FailF c x ) = Fail c x - -innM :: Monad m => TreeF a (m (Tree a)) -> m (Tree a) -innM (PChoiceF p i ts) = liftM (PChoice p i ) (sequence ts) -innM (FChoiceF p i b m ts) = liftM (FChoice p i b m) (sequence ts) -innM (SChoiceF p i b ts) = liftM (SChoice p i b ) (sequence ts) -innM (GoalChoiceF ts) = liftM (GoalChoice ) (sequence ts) -innM (DoneF x ) = return $ Done x -innM (FailF c x ) = return $ Fail c x - --- | Determines whether a tree is active, i.e., isn't a failure node. -active :: Tree a -> Bool -active (Fail _ _) = False -active _ = True - --- | Determines how many active choices are available in a node. Note that we --- count goal choices as having one choice, always. -choices :: Tree a -> Int -choices (PChoice _ _ ts) = P.length (P.filter active ts) -choices (FChoice _ _ _ _ ts) = P.length (P.filter active ts) -choices (SChoice _ _ _ ts) = P.length (P.filter active ts) -choices (GoalChoice _ ) = 1 -choices (Done _ ) = 1 -choices (Fail _ _ ) = 0 - --- | Variant of 'choices' that only approximates the number of choices. -dchoices :: Tree a -> P.Degree -dchoices (PChoice _ _ ts) = P.degree (P.filter active ts) -dchoices (FChoice _ _ _ _ ts) = P.degree (P.filter active ts) -dchoices (SChoice _ _ _ ts) = P.degree (P.filter active ts) -dchoices (GoalChoice _ ) = P.ZeroOrOne -dchoices (Done _ ) = P.ZeroOrOne -dchoices (Fail _ _ ) = P.ZeroOrOne - --- | Variant of 'choices' that only approximates the number of choices. -zeroOrOneChoices :: Tree a -> Bool -zeroOrOneChoices (PChoice _ _ ts) = P.isZeroOrOne (P.filter active ts) -zeroOrOneChoices (FChoice _ _ _ _ ts) = P.isZeroOrOne (P.filter active ts) -zeroOrOneChoices (SChoice _ _ _ ts) = P.isZeroOrOne (P.filter active ts) -zeroOrOneChoices (GoalChoice _ ) = True -zeroOrOneChoices (Done _ ) = True -zeroOrOneChoices (Fail _ _ ) = True - --- | Catamorphism on trees. -cata :: (TreeF a b -> b) -> Tree a -> b -cata phi x = (phi . fmap (cata phi) . out) x - -trav :: (TreeF a (Tree b) -> TreeF b (Tree b)) -> Tree a -> Tree b -trav psi x = cata (inn . psi) x - --- | Paramorphism on trees. -para :: (TreeF a (b, Tree a) -> b) -> Tree a -> b -para phi = phi . fmap (\ x -> (para phi x, x)) . out - --- | Anamorphism on trees. -ana :: (b -> TreeF a b) -> b -> Tree a -ana psi = inn . fmap (ana psi) . psi diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Validate.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Validate.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Validate.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Validate.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,269 +0,0 @@ -module Distribution.Client.Dependency.Modular.Validate (validateTree) where - --- Validation of the tree. --- --- The task here is to make sure all constraints hold. After validation, any --- assignment returned by exploration of the tree should be a complete valid --- assignment, i.e., actually constitute a solution. - -import Control.Applicative -import Control.Monad.Reader hiding (sequence) -import Data.List as L -import Data.Map as M -import Data.Set as S -import Data.Traversable -import Prelude hiding (sequence) - -import Language.Haskell.Extension (Extension, Language) - -import Distribution.Compiler (CompilerInfo(..)) - -import Distribution.Client.Dependency.Modular.Assignment -import Distribution.Client.Dependency.Modular.Dependency -import Distribution.Client.Dependency.Modular.Flag -import Distribution.Client.Dependency.Modular.Index -import Distribution.Client.Dependency.Modular.Package -import qualified Distribution.Client.Dependency.Modular.PSQ as P -import Distribution.Client.Dependency.Modular.Tree -import Distribution.Client.Dependency.Modular.Version (VR) - -import Distribution.Client.ComponentDeps (Component) -import Distribution.Client.PkgConfigDb (PkgConfigDb, pkgConfigPkgIsPresent) - --- In practice, most constraints are implication constraints (IF we have made --- a number of choices, THEN we also have to ensure that). We call constraints --- that for which the preconditions are fulfilled ACTIVE. We maintain a set --- of currently active constraints that we pass down the node. --- --- We aim at detecting inconsistent states as early as possible. --- --- Whenever we make a choice, there are two things that need to happen: --- --- (1) We must check that the choice is consistent with the currently --- active constraints. --- --- (2) The choice increases the set of active constraints. For the new --- active constraints, we must check that they are consistent with --- the current state. --- --- We can actually merge (1) and (2) by saying the the current choice is --- a new active constraint, fixing the choice. --- --- If a test fails, we have detected an inconsistent state. We can --- disable the current subtree and do not have to traverse it any further. --- --- We need a good way to represent the current state, i.e., the current --- set of active constraints. Since the main situation where we have to --- search in it is (1), it seems best to store the state by package: for --- every package, we store which versions are still allowed. If for any --- package, we have inconsistent active constraints, we can also stop. --- This is a particular way to read task (2): --- --- (2, weak) We only check if the new constraints are consistent with --- the choices we've already made, and add them to the active set. --- --- (2, strong) We check if the new constraints are consistent with the --- choices we've already made, and the constraints we already have. --- --- It currently seems as if we're implementing the weak variant. However, --- when used together with 'preferEasyGoalChoices', we will find an --- inconsistent state in the very next step. --- --- What do we do about flags? --- --- Like for packages, we store the flag choices we have already made. --- Now, regarding (1), we only have to test whether we've decided the --- current flag before. Regarding (2), the interesting bit is in discovering --- the new active constraints. To this end, we look up the constraints for --- the package the flag belongs to, and traverse its flagged dependencies. --- Wherever we find the flag in question, we start recording dependencies --- underneath as new active dependencies. If we encounter other flags, we --- check if we've chosen them already and either proceed or stop. - --- | The state needed during validation. -data ValidateState = VS { - supportedExt :: Extension -> Bool, - supportedLang :: Language -> Bool, - presentPkgs :: PN -> VR -> Bool, - index :: Index, - saved :: Map QPN (FlaggedDeps Component QPN), -- saved, scoped, dependencies - pa :: PreAssignment, - qualifyOptions :: QualifyOptions -} - -type Validate = Reader ValidateState - -validate :: Tree QGoalReason -> Validate (Tree QGoalReason) -validate = cata go - where - go :: TreeF QGoalReason (Validate (Tree QGoalReason)) -> Validate (Tree QGoalReason) - - go (PChoiceF qpn gr ts) = PChoice qpn gr <$> sequence (P.mapWithKey (goP qpn) ts) - go (FChoiceF qfn gr b m ts) = - do - -- Flag choices may occur repeatedly (because they can introduce new constraints - -- in various places). However, subsequent choices must be consistent. We thereby - -- collapse repeated flag choice nodes. - PA _ pfa _ <- asks pa -- obtain current flag-preassignment - case M.lookup qfn pfa of - Just rb -> -- flag has already been assigned; collapse choice to the correct branch - case P.lookup rb ts of - Just t -> goF qfn rb t - Nothing -> return $ Fail (varToConflictSet (F qfn)) (MalformedFlagChoice qfn) - Nothing -> -- flag choice is new, follow both branches - FChoice qfn gr b m <$> sequence (P.mapWithKey (goF qfn) ts) - go (SChoiceF qsn gr b ts) = - do - -- Optional stanza choices are very similar to flag choices. - PA _ _ psa <- asks pa -- obtain current stanza-preassignment - case M.lookup qsn psa of - Just rb -> -- stanza choice has already been made; collapse choice to the correct branch - case P.lookup rb ts of - Just t -> goS qsn rb t - Nothing -> return $ Fail (varToConflictSet (S qsn)) (MalformedStanzaChoice qsn) - Nothing -> -- stanza choice is new, follow both branches - SChoice qsn gr b <$> sequence (P.mapWithKey (goS qsn) ts) - - -- We don't need to do anything for goal choices or failure nodes. - go (GoalChoiceF ts) = GoalChoice <$> sequence ts - go (DoneF rdm ) = pure (Done rdm) - go (FailF c fr ) = pure (Fail c fr) - - -- What to do for package nodes ... - goP :: QPN -> POption -> Validate (Tree QGoalReason) -> Validate (Tree QGoalReason) - goP qpn@(Q _pp pn) (POption i _) r = do - PA ppa pfa psa <- asks pa -- obtain current preassignment - extSupported <- asks supportedExt -- obtain the supported extensions - langSupported <- asks supportedLang -- obtain the supported languages - pkgPresent <- asks presentPkgs -- obtain the present pkg-config pkgs - idx <- asks index -- obtain the index - svd <- asks saved -- obtain saved dependencies - qo <- asks qualifyOptions - -- obtain dependencies and index-dictated exclusions introduced by the choice - let (PInfo deps _ mfr) = idx ! pn ! i - -- qualify the deps in the current scope - let qdeps = qualifyDeps qo qpn deps - -- the new active constraints are given by the instance we have chosen, - -- plus the dependency information we have for that instance - let newactives = Dep qpn (Fixed i (P qpn)) : L.map (resetVar (P qpn)) (extractDeps pfa psa qdeps) - -- We now try to extend the partial assignment with the new active constraints. - let mnppa = extend extSupported langSupported pkgPresent (P qpn) ppa newactives - -- In case we continue, we save the scoped dependencies - let nsvd = M.insert qpn qdeps svd - case mfr of - Just fr -> -- The index marks this as an invalid choice. We can stop. - return (Fail (varToConflictSet (P qpn)) fr) - _ -> case mnppa of - Left (c, d) -> -- We have an inconsistency. We can stop. - return (Fail c (Conflicting d)) - Right nppa -> -- We have an updated partial assignment for the recursive validation. - local (\ s -> s { pa = PA nppa pfa psa, saved = nsvd }) r - - -- What to do for flag nodes ... - goF :: QFN -> Bool -> Validate (Tree QGoalReason) -> Validate (Tree QGoalReason) - goF qfn@(FN (PI qpn _i) _f) b r = do - PA ppa pfa psa <- asks pa -- obtain current preassignment - extSupported <- asks supportedExt -- obtain the supported extensions - langSupported <- asks supportedLang -- obtain the supported languages - pkgPresent <- asks presentPkgs -- obtain the present pkg-config pkgs - svd <- asks saved -- obtain saved dependencies - -- Note that there should be saved dependencies for the package in question, - -- because while building, we do not choose flags before we see the packages - -- that define them. - let qdeps = svd ! qpn - -- We take the *saved* dependencies, because these have been qualified in the - -- correct scope. - -- - -- Extend the flag assignment - let npfa = M.insert qfn b pfa - -- We now try to get the new active dependencies we might learn about because - -- we have chosen a new flag. - let newactives = extractNewDeps (F qfn) b npfa psa qdeps - -- As in the package case, we try to extend the partial assignment. - case extend extSupported langSupported pkgPresent (F qfn) ppa newactives of - Left (c, d) -> return (Fail c (Conflicting d)) -- inconsistency found - Right nppa -> local (\ s -> s { pa = PA nppa npfa psa }) r - - -- What to do for stanza nodes (similar to flag nodes) ... - goS :: QSN -> Bool -> Validate (Tree QGoalReason) -> Validate (Tree QGoalReason) - goS qsn@(SN (PI qpn _i) _f) b r = do - PA ppa pfa psa <- asks pa -- obtain current preassignment - extSupported <- asks supportedExt -- obtain the supported extensions - langSupported <- asks supportedLang -- obtain the supported languages - pkgPresent <- asks presentPkgs -- obtain the present pkg-config pkgs - svd <- asks saved -- obtain saved dependencies - -- Note that there should be saved dependencies for the package in question, - -- because while building, we do not choose flags before we see the packages - -- that define them. - let qdeps = svd ! qpn - -- We take the *saved* dependencies, because these have been qualified in the - -- correct scope. - -- - -- Extend the flag assignment - let npsa = M.insert qsn b psa - -- We now try to get the new active dependencies we might learn about because - -- we have chosen a new flag. - let newactives = extractNewDeps (S qsn) b pfa npsa qdeps - -- As in the package case, we try to extend the partial assignment. - case extend extSupported langSupported pkgPresent (S qsn) ppa newactives of - Left (c, d) -> return (Fail c (Conflicting d)) -- inconsistency found - Right nppa -> local (\ s -> s { pa = PA nppa pfa npsa }) r - --- | We try to extract as many concrete dependencies from the given flagged --- dependencies as possible. We make use of all the flag knowledge we have --- already acquired. -extractDeps :: FAssignment -> SAssignment -> FlaggedDeps comp QPN -> [Dep QPN] -extractDeps fa sa deps = do - d <- deps - case d of - Simple sd _ -> return sd - Flagged qfn _ td fd -> case M.lookup qfn fa of - Nothing -> mzero - Just True -> extractDeps fa sa td - Just False -> extractDeps fa sa fd - Stanza qsn td -> case M.lookup qsn sa of - Nothing -> mzero - Just True -> extractDeps fa sa td - Just False -> [] - --- | We try to find new dependencies that become available due to the given --- flag or stanza choice. We therefore look for the choice in question, and then call --- 'extractDeps' for everything underneath. -extractNewDeps :: Var QPN -> Bool -> FAssignment -> SAssignment -> FlaggedDeps comp QPN -> [Dep QPN] -extractNewDeps v b fa sa = go - where - go :: FlaggedDeps comp QPN -> [Dep QPN] -- Type annotation necessary (polymorphic recursion) - go deps = do - d <- deps - case d of - Simple _ _ -> mzero - Flagged qfn' _ td fd - | v == F qfn' -> L.map (resetVar v) $ - if b then extractDeps fa sa td else extractDeps fa sa fd - | otherwise -> case M.lookup qfn' fa of - Nothing -> mzero - Just True -> go td - Just False -> go fd - Stanza qsn' td - | v == S qsn' -> L.map (resetVar v) $ - if b then extractDeps fa sa td else [] - | otherwise -> case M.lookup qsn' sa of - Nothing -> mzero - Just True -> go td - Just False -> [] - --- | Interface. -validateTree :: CompilerInfo -> Index -> PkgConfigDb -> Tree QGoalReason -> Tree QGoalReason -validateTree cinfo idx pkgConfigDb t = runReader (validate t) VS { - supportedExt = maybe (const True) -- if compiler has no list of extensions, we assume everything is supported - (\ es -> let s = S.fromList es in \ x -> S.member x s) - (compilerInfoExtensions cinfo) - , supportedLang = maybe (const True) - (flip L.elem) -- use list lookup because language list is small and no Ord instance - (compilerInfoLanguages cinfo) - , presentPkgs = pkgConfigPkgIsPresent pkgConfigDb - , index = idx - , saved = M.empty - , pa = PA M.empty M.empty M.empty - , qualifyOptions = defaultQualifyOptions idx - } diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Var.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Var.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Var.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Var.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,45 +0,0 @@ -{-# LANGUAGE DeriveFunctor #-} -module Distribution.Client.Dependency.Modular.Var ( - Var(..) - , simplifyVar - , showVar - , varPI - ) where - -import Prelude hiding (pi) - -import Distribution.Client.Dependency.Modular.Flag -import Distribution.Client.Dependency.Modular.Package - -{------------------------------------------------------------------------------- - Variables --------------------------------------------------------------------------------} - --- | The type of variables that play a role in the solver. --- Note that the tree currently does not use this type directly, --- and rather has separate tree nodes for the different types of --- variables. This fits better with the fact that in most cases, --- these have to be treated differently. -data Var qpn = P qpn | F (FN qpn) | S (SN qpn) - deriving (Eq, Ord, Show, Functor) - --- | For computing conflict sets, we map flag choice vars to a --- single flag choice. This means that all flag choices are treated --- as interdependent. So if one flag of a package ends up in a --- conflict set, then all flags are being treated as being part of --- the conflict set. -simplifyVar :: Var qpn -> Var qpn -simplifyVar (P qpn) = P qpn -simplifyVar (F (FN pi _)) = F (FN pi (mkFlag "flag")) -simplifyVar (S qsn) = S qsn - -showVar :: Var QPN -> String -showVar (P qpn) = showQPN qpn -showVar (F qfn) = showQFN qfn -showVar (S qsn) = showQSN qsn - --- | Extract the package instance from a Var -varPI :: Var QPN -> (QPN, Maybe I) -varPI (P qpn) = (qpn, Nothing) -varPI (F (FN (PI qpn i) _)) = (qpn, Just i) -varPI (S (SN (PI qpn i) _)) = (qpn, Just i) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Version.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Version.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Version.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular/Version.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,53 +0,0 @@ -module Distribution.Client.Dependency.Modular.Version - ( Ver - , VR - , anyVR - , checkVR - , eqVR - , showVer - , showVR - , simplifyVR - , (.&&.) - , (.||.) - ) where - -import qualified Distribution.Version as CV -- from Cabal -import Distribution.Text -- from Cabal - --- | Preliminary type for versions. -type Ver = CV.Version - --- | String representation of a version. -showVer :: Ver -> String -showVer = display - --- | Version range. Consists of a lower and upper bound. -type VR = CV.VersionRange - --- | String representation of a version range. -showVR :: VR -> String -showVR = display - --- | Unconstrained version range. -anyVR :: VR -anyVR = CV.anyVersion - --- | Version range fixing a single version. -eqVR :: Ver -> VR -eqVR = CV.thisVersion - --- | Intersect two version ranges. -(.&&.) :: VR -> VR -> VR -(.&&.) = CV.intersectVersionRanges - --- | Union of two version ranges. -(.||.) :: VR -> VR -> VR -(.||.) = CV.unionVersionRanges - --- | Simplify a version range. -simplifyVR :: VR -> VR -simplifyVR = CV.simplifyVersionRange - --- | Checking a version against a version range. -checkVR :: VR -> Ver -> Bool -checkVR = flip CV.withinRange diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Modular.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,59 +0,0 @@ -module Distribution.Client.Dependency.Modular - ( modularResolver, SolverConfig(..)) where - --- Here, we try to map between the external cabal-install solver --- interface and the internal interface that the solver actually --- expects. There are a number of type conversions to perform: we --- have to convert the package indices to the uniform index used --- by the solver; we also have to convert the initial constraints; --- and finally, we have to convert back the resulting install --- plan. - -import Data.Map as M - ( fromListWith ) -import Distribution.Client.Dependency.Modular.Assignment - ( Assignment, toCPs ) -import Distribution.Client.Dependency.Modular.Dependency - ( RevDepMap ) -import Distribution.Client.Dependency.Modular.ConfiguredConversion - ( convCP ) -import Distribution.Client.Dependency.Modular.IndexConversion - ( convPIs ) -import Distribution.Client.Dependency.Modular.Log - ( logToProgress ) -import Distribution.Client.Dependency.Modular.Package - ( PN ) -import Distribution.Client.Dependency.Modular.Solver - ( SolverConfig(..), solve ) -import Distribution.Client.Dependency.Types - ( DependencyResolver, ResolverPackage - , PackageConstraint(..), unlabelPackageConstraint ) -import Distribution.System - ( Platform(..) ) - --- | Ties the two worlds together: classic cabal-install vs. the modular --- solver. Performs the necessary translations before and after. -modularResolver :: SolverConfig -> DependencyResolver -modularResolver sc (Platform arch os) cinfo iidx sidx pkgConfigDB pprefs pcs pns = - fmap (uncurry postprocess) $ -- convert install plan - logToProgress (maxBackjumps sc) $ -- convert log format into progress format - solve sc cinfo idx pkgConfigDB pprefs gcs pns - where - -- Indices have to be converted into solver-specific uniform index. - idx = convPIs os arch cinfo (shadowPkgs sc) (strongFlags sc) iidx sidx - -- Constraints have to be converted into a finite map indexed by PN. - gcs = M.fromListWith (++) (map pair pcs) - where - pair lpc = (pcName $ unlabelPackageConstraint lpc, [lpc]) - - -- Results have to be converted into an install plan. - postprocess :: Assignment -> RevDepMap -> [ResolverPackage] - postprocess a rdm = map (convCP iidx sidx) (toCPs a rdm) - - -- Helper function to extract the PN from a constraint. - pcName :: PackageConstraint -> PN - pcName (PackageConstraintVersion pn _) = pn - pcName (PackageConstraintInstalled pn ) = pn - pcName (PackageConstraintSource pn ) = pn - pcName (PackageConstraintFlags pn _) = pn - pcName (PackageConstraintStanzas pn _) = pn diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/TopDown/Constraints.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/TopDown/Constraints.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/TopDown/Constraints.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/TopDown/Constraints.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,599 +0,0 @@ -{-# LANGUAGE CPP #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Dependency.TopDown.Constraints --- Copyright : (c) Duncan Coutts 2008 --- License : BSD-like --- --- Maintainer : duncan@community.haskell.org --- Stability : provisional --- Portability : portable --- --- A set of satisfiable constraints on a set of packages. ------------------------------------------------------------------------------ -module Distribution.Client.Dependency.TopDown.Constraints ( - Constraints, - empty, - packages, - choices, - isPaired, - - addTarget, - constrain, - Satisfiable(..), - conflicting, - ) where - -import Distribution.Client.Dependency.TopDown.Types -import qualified Distribution.Client.PackageIndex as PackageIndex -import Distribution.Client.PackageIndex - ( PackageIndex ) -import Distribution.Package - ( PackageName, PackageId, PackageIdentifier(..) - , Package(packageId), packageName, packageVersion - , Dependency ) -import Distribution.Version - ( Version ) -import Distribution.Client.Utils - ( mergeBy, MergeResult(..) ) - -#if !MIN_VERSION_base(4,8,0) -import Data.Monoid - ( Monoid(mempty) ) -#endif -import Data.Either - ( partitionEithers ) -import qualified Data.Map as Map -import Data.Map (Map) -import qualified Data.Set as Set -import Data.Set (Set) -import Control.Exception - ( assert ) - - --- | A set of satisfiable constraints on a set of packages. --- --- The 'Constraints' type keeps track of a set of targets (identified by --- package name) that we know that we need. It also keeps track of a set of --- constraints over all packages in the environment. --- --- It maintains the guarantee that, for the target set, the constraints are --- satisfiable, meaning that there is at least one instance available for each --- package name that satisfies the constraints on that package name. --- --- Note that it is possible to over-constrain a package in the environment that --- is not in the target set -- the satisfiability guarantee is only maintained --- for the target set. This is useful because it allows us to exclude packages --- without needing to know if it would ever be needed or not (e.g. allows --- excluding broken installed packages). --- --- Adding a constraint for a target package can fail if it would mean that --- there are no remaining choices. --- --- Adding a constraint for package that is not a target never fails. --- --- Adding a new target package can fail if that package already has conflicting --- constraints. --- -data Constraints installed source reason - = Constraints - - -- | Targets that we know we need. This is the set for which we - -- guarantee the constraints are satisfiable. - !(Set PackageName) - - -- | The available/remaining set. These are packages that have available - -- choices remaining. This is guaranteed to cover the target packages, - -- but can also cover other packages in the environment. New targets can - -- only be added if there are available choices remaining for them. - !(PackageIndex (InstalledOrSource installed source)) - - -- | The excluded set. Choices that we have excluded by applying - -- constraints. Excluded choices are tagged with the reason. - !(PackageIndex (ExcludedPkg (InstalledOrSource installed source) reason)) - - -- | Paired choices, this is an ugly hack. - !(Map PackageName (Version, Version)) - - -- | Purely for the invariant, we keep a copy of the original index - !(PackageIndex (InstalledOrSource installed source)) - - --- | Reasons for excluding all, or some choices for a package version. --- --- Each package version can have a source instance, an installed instance or --- both. We distinguish reasons for constraints that excluded both instances, --- from reasons for constraints that excluded just one instance. --- -data ExcludedPkg pkg reason - = ExcludedPkg pkg - [reason] -- ^ reasons for excluding both source and installed instances - [reason] -- ^ reasons for excluding the installed instance - [reason] -- ^ reasons for excluding the source instance - -instance Package pkg => Package (ExcludedPkg pkg reason) where - packageId (ExcludedPkg p _ _ _) = packageId p - - --- | There is a conservation of packages property. Packages are never gained or --- lost, they just transfer from the remaining set to the excluded set. --- -invariant :: (Package installed, Package source) - => Constraints installed source a -> Bool -invariant (Constraints targets available excluded _ original) = - - -- Relationship between available, excluded and original - all check merged - - -- targets is a subset of available - && all (PackageIndex.elemByPackageName available) (Set.elems targets) - - where - merged = mergeBy (\a b -> packageId a `compare` mergedPackageId b) - (PackageIndex.allPackages original) - (mergeBy (\a b -> packageId a `compare` packageId b) - (PackageIndex.allPackages available) - (PackageIndex.allPackages excluded)) - where - mergedPackageId (OnlyInLeft p ) = packageId p - mergedPackageId (OnlyInRight p) = packageId p - mergedPackageId (InBoth p _) = packageId p - - -- If the package was originally installed only, then - check (InBoth (InstalledOnly _) cur) = case cur of - -- now it's either still remaining as installed only - OnlyInLeft (InstalledOnly _) -> True - -- or it has been excluded - OnlyInRight (ExcludedPkg (InstalledOnly _) [] (_:_) []) -> True - _ -> False - - -- If the package was originally available only, then - check (InBoth (SourceOnly _) cur) = case cur of - -- now it's either still remaining as source only - OnlyInLeft (SourceOnly _) -> True - -- or it has been excluded - OnlyInRight (ExcludedPkg (SourceOnly _) [] [] (_:_)) -> True - _ -> False - - -- If the package was originally installed and source, then - check (InBoth (InstalledAndSource _ _) cur) = case cur of - -- We can have both remaining: - OnlyInLeft (InstalledAndSource _ _) -> True - - -- both excluded, in particular it can have had the just source or - -- installed excluded and later had both excluded so we do not mind if - -- the source or installed excluded is empty or non-empty. - OnlyInRight (ExcludedPkg (InstalledAndSource _ _) _ _ _) -> True - - -- the installed remaining and the source excluded: - InBoth (InstalledOnly _) - (ExcludedPkg (SourceOnly _) [] [] (_:_)) -> True - - -- the source remaining and the installed excluded: - InBoth (SourceOnly _) - (ExcludedPkg (InstalledOnly _) [] (_:_) []) -> True - _ -> False - - check _ = False - - --- | An update to the constraints can move packages between the two piles --- but not gain or loose packages. -transitionsTo :: (Package installed, Package source) - => Constraints installed source a - -> Constraints installed source a -> Bool -transitionsTo constraints @(Constraints _ available excluded _ _) - constraints'@(Constraints _ available' excluded' _ _) = - - invariant constraints && invariant constraints' - && null availableGained && null excludedLost - && map (mapInstalledOrSource packageId packageId) availableLost - == map (mapInstalledOrSource packageId packageId) excludedGained - - where - (availableLost, availableGained) - = partitionEithers (foldr lostAndGained [] availableChange) - - (excludedLost, excludedGained) - = partitionEithers (foldr lostAndGained [] excludedChange) - - availableChange = - mergeBy (\a b -> packageId a `compare` packageId b) - (PackageIndex.allPackages available) - (PackageIndex.allPackages available') - - excludedChange = - mergeBy (\a b -> packageId a `compare` packageId b) - [ pkg | ExcludedPkg pkg _ _ _ <- PackageIndex.allPackages excluded ] - [ pkg | ExcludedPkg pkg _ _ _ <- PackageIndex.allPackages excluded' ] - - lostAndGained mr rest = case mr of - OnlyInLeft pkg -> Left pkg : rest - InBoth (InstalledAndSource pkg _) - (SourceOnly _) -> Left (InstalledOnly pkg) : rest - InBoth (InstalledAndSource _ pkg) - (InstalledOnly _) -> Left (SourceOnly pkg) : rest - InBoth (SourceOnly _) - (InstalledAndSource pkg _) -> Right (InstalledOnly pkg) : rest - InBoth (InstalledOnly _) - (InstalledAndSource _ pkg) -> Right (SourceOnly pkg) : rest - OnlyInRight pkg -> Right pkg : rest - _ -> rest - - mapInstalledOrSource f g pkg = case pkg of - InstalledOnly a -> InstalledOnly (f a) - SourceOnly b -> SourceOnly (g b) - InstalledAndSource a b -> InstalledAndSource (f a) (g b) - --- | We construct 'Constraints' with an initial 'PackageIndex' of all the --- packages available. --- -empty :: PackageIndex InstalledPackageEx - -> PackageIndex UnconfiguredPackage - -> Constraints InstalledPackageEx UnconfiguredPackage reason -empty installed source = - Constraints targets pkgs excluded pairs pkgs - where - targets = mempty - excluded = mempty - pkgs = PackageIndex.fromList - . map toInstalledOrSource - $ mergeBy (\a b -> packageId a `compare` packageId b) - (PackageIndex.allPackages installed) - (PackageIndex.allPackages source) - toInstalledOrSource (OnlyInLeft i ) = InstalledOnly i - toInstalledOrSource (OnlyInRight a) = SourceOnly a - toInstalledOrSource (InBoth i a) = InstalledAndSource i a - - -- pick up cases like base-3 and 4 where one version depends on the other: - pairs = Map.fromList - [ (name, (packageVersion pkgid1, packageVersion pkgid2)) - | [pkg1, pkg2] <- PackageIndex.allPackagesByName installed - , let name = packageName pkg1 - pkgid1 = packageId pkg1 - pkgid2 = packageId pkg2 - , any ((pkgid1==) . packageId) (sourceDeps pkg2) - || any ((pkgid2==) . packageId) (sourceDeps pkg1) ] - - --- | The package targets. --- -packages :: Constraints installed source reason - -> Set PackageName -packages (Constraints ts _ _ _ _) = ts - - --- | The package choices that are still available. --- -choices :: Constraints installed source reason - -> PackageIndex (InstalledOrSource installed source) -choices (Constraints _ available _ _ _) = available - -isPaired :: Constraints installed source reason - -> PackageId -> Maybe PackageId -isPaired (Constraints _ _ _ pairs _) (PackageIdentifier name version) = - case Map.lookup name pairs of - Just (v1, v2) - | version == v1 -> Just (PackageIdentifier name v2) - | version == v2 -> Just (PackageIdentifier name v1) - _ -> Nothing - - -data Satisfiable constraints discarded reason - = Satisfiable constraints discarded - | Unsatisfiable - | ConflictsWith [(PackageId, [reason])] - - -addTarget :: (Package installed, Package source) - => PackageName - -> Constraints installed source reason - -> Satisfiable (Constraints installed source reason) - () reason -addTarget pkgname - constraints@(Constraints targets available excluded paired original) - - -- If it's already a target then there's no change - | pkgname `Set.member` targets - = Satisfiable constraints () - - -- If there is some possible choice available for this target then we're ok - | PackageIndex.elemByPackageName available pkgname - = let targets' = Set.insert pkgname targets - constraints' = Constraints targets' available excluded paired original - in assert (constraints `transitionsTo` constraints') $ - Satisfiable constraints' () - - -- If it's not available and it is excluded then we return the conflicts - | PackageIndex.elemByPackageName excluded pkgname - = ConflictsWith conflicts - - -- Otherwise, it's not available and it has not been excluded so the - -- package is simply completely unknown. - | otherwise - = Unsatisfiable - - where - conflicts = - [ (packageId pkg, reasons) - | let excludedChoices = PackageIndex.lookupPackageName excluded pkgname - , ExcludedPkg pkg isReasons iReasons sReasons <- excludedChoices - , let reasons = isReasons ++ iReasons ++ sReasons ] - - -constrain :: (Package installed, Package source) - => PackageName -- ^ which package to constrain - -> (Version -> Bool -> Bool) -- ^ the constraint test - -> reason -- ^ the reason for the constraint - -> Constraints installed source reason - -> Satisfiable (Constraints installed source reason) - [PackageId] reason -constrain pkgname constraint reason - constraints@(Constraints targets available excluded paired original) - - | pkgname `Set.member` targets && not anyRemaining - = if null conflicts then Unsatisfiable - else ConflictsWith conflicts - - | otherwise - = let constraints' = Constraints targets available' excluded' paired original - in assert (constraints `transitionsTo` constraints') $ - Satisfiable constraints' (map packageId newExcluded) - - where - -- This tells us if any packages would remain at all for this package name if - -- we applied this constraint. This amounts to checking if any package - -- satisfies the given constraint, including version range and installation - -- status. - -- - (available', excluded', newExcluded, anyRemaining, conflicts) = - updatePkgsStatus - available excluded - [] False [] - (mergeBy (\pkg pkg' -> packageVersion pkg `compare` packageVersion pkg') - (PackageIndex.lookupPackageName available pkgname) - (PackageIndex.lookupPackageName excluded pkgname)) - - testConstraint pkg = - let ver = packageVersion pkg in - case Map.lookup (packageName pkg) paired of - - Just (v1, v2) - | ver == v1 || ver == v2 - -> case pkg of - InstalledOnly ipkg -> InstalledOnly (ipkg, iOk) - SourceOnly spkg -> SourceOnly (spkg, sOk) - InstalledAndSource ipkg spkg -> - InstalledAndSource (ipkg, iOk) (spkg, sOk) - where - iOk = constraint v1 True || constraint v2 True - sOk = constraint v1 False || constraint v2 False - - _ -> case pkg of - InstalledOnly ipkg -> InstalledOnly (ipkg, iOk) - SourceOnly spkg -> SourceOnly (spkg, sOk) - InstalledAndSource ipkg spkg -> - InstalledAndSource (ipkg, iOk) (spkg, sOk) - where - iOk = constraint ver True - sOk = constraint ver False - - -- For the info about available and excluded versions of the package in - -- question, update the info given the current constraint - -- - -- We update the available package map and the excluded package map - -- we also collect: - -- * the change in available packages (for logging) - -- * whether there are any remaining choices - -- * any constraints that conflict with the current constraint - - updatePkgsStatus _ _ nePkgs ok cs _ - | seq nePkgs $ seq ok $ seq cs False = undefined - - updatePkgsStatus aPkgs ePkgs nePkgs ok cs [] - = (aPkgs, ePkgs, reverse nePkgs, ok, reverse cs) - - updatePkgsStatus aPkgs ePkgs nePkgs ok cs (pkg:pkgs) = - let (aPkgs', ePkgs', mnePkg, ok', mc) = updatePkgStatus aPkgs ePkgs pkg - nePkgs' = maybeCons mnePkg nePkgs - cs' = maybeCons mc cs - in updatePkgsStatus aPkgs' ePkgs' nePkgs' (ok' || ok) cs' pkgs - - maybeCons Nothing xs = xs - maybeCons (Just x) xs = x:xs - - - -- For the info about an available or excluded version of the package in - -- question, update the info given the current constraint. - -- - updatePkgStatus aPkgs ePkgs pkg = - case viewPackageStatus pkg of - AllAvailable (InstalledOnly (aiPkg, False)) -> - removeAvailable False - (InstalledOnly aiPkg) - (PackageIndex.deletePackageId pkgid) - (ExcludedPkg (InstalledOnly aiPkg) [] [reason] []) - Nothing - - AllAvailable (SourceOnly (asPkg, False)) -> - removeAvailable False - (SourceOnly asPkg) - (PackageIndex.deletePackageId pkgid) - (ExcludedPkg (SourceOnly asPkg) [] [] [reason]) - Nothing - - AllAvailable (InstalledAndSource (aiPkg, False) (asPkg, False)) -> - removeAvailable False - (InstalledAndSource aiPkg asPkg) - (PackageIndex.deletePackageId pkgid) - (ExcludedPkg (InstalledAndSource aiPkg asPkg) [reason] [] []) - Nothing - - AllAvailable (InstalledAndSource (aiPkg, True) (asPkg, False)) -> - removeAvailable True - (SourceOnly asPkg) - (PackageIndex.insert (InstalledOnly aiPkg)) - (ExcludedPkg (SourceOnly asPkg) [] [] [reason]) - Nothing - - AllAvailable (InstalledAndSource (aiPkg, False) (asPkg, True)) -> - removeAvailable True - (InstalledOnly aiPkg) - (PackageIndex.insert (SourceOnly asPkg)) - (ExcludedPkg (InstalledOnly aiPkg) [] [reason] []) - Nothing - - AllAvailable _ -> noChange True Nothing - - AvailableExcluded (aiPkg, False) (ExcludedPkg (esPkg, False) _ _ srs) -> - removeAvailable False - (InstalledOnly aiPkg) - (PackageIndex.deletePackageId pkgid) - (ExcludedPkg (InstalledAndSource aiPkg esPkg) [reason] [] srs) - Nothing - - AvailableExcluded (_aiPkg, True) (ExcludedPkg (esPkg, False) _ _ srs) -> - addExtraExclusion True - (ExcludedPkg (SourceOnly esPkg) [] [] (reason:srs)) - Nothing - - AvailableExcluded (aiPkg, False) (ExcludedPkg (esPkg, True) _ _ srs) -> - removeAvailable True - (InstalledOnly aiPkg) - (PackageIndex.deletePackageId pkgid) - (ExcludedPkg (InstalledAndSource aiPkg esPkg) [] [reason] srs) - (Just (pkgid, srs)) - - AvailableExcluded (_aiPkg, True) (ExcludedPkg (_esPkg, True) _ _ srs) -> - noChange True - (Just (pkgid, srs)) - - ExcludedAvailable (ExcludedPkg (eiPkg, False) _ irs _) (asPkg, False) -> - removeAvailable False - (SourceOnly asPkg) - (PackageIndex.deletePackageId pkgid) - (ExcludedPkg (InstalledAndSource eiPkg asPkg) [reason] irs []) - Nothing - - ExcludedAvailable (ExcludedPkg (eiPkg, True) _ irs _) (asPkg, False) -> - removeAvailable False - (SourceOnly asPkg) - (PackageIndex.deletePackageId pkgid) - (ExcludedPkg (InstalledAndSource eiPkg asPkg) [] irs [reason]) - (Just (pkgid, irs)) - - ExcludedAvailable (ExcludedPkg (eiPkg, False) _ irs _) (_asPkg, True) -> - addExtraExclusion True - (ExcludedPkg (InstalledOnly eiPkg) [] (reason:irs) []) - Nothing - - ExcludedAvailable (ExcludedPkg (_eiPkg, True) _ irs _) (_asPkg, True) -> - noChange True - (Just (pkgid, irs)) - - AllExcluded (ExcludedPkg (InstalledOnly (eiPkg, False)) _ irs _) -> - addExtraExclusion False - (ExcludedPkg (InstalledOnly eiPkg) [] (reason:irs) []) - Nothing - - AllExcluded (ExcludedPkg (InstalledOnly (_eiPkg, True)) _ irs _) -> - noChange False - (Just (pkgid, irs)) - - AllExcluded (ExcludedPkg (SourceOnly (esPkg, False)) _ _ srs) -> - addExtraExclusion False - (ExcludedPkg (SourceOnly esPkg) [] [] (reason:srs)) - Nothing - - AllExcluded (ExcludedPkg (SourceOnly (_esPkg, True)) _ _ srs) -> - noChange False - (Just (pkgid, srs)) - - AllExcluded (ExcludedPkg (InstalledAndSource (eiPkg, False) (esPkg, False)) isrs irs srs) -> - addExtraExclusion False - (ExcludedPkg (InstalledAndSource eiPkg esPkg) (reason:isrs) irs srs) - Nothing - - AllExcluded (ExcludedPkg (InstalledAndSource (eiPkg, True) (esPkg, False)) isrs irs srs) -> - addExtraExclusion False - (ExcludedPkg (InstalledAndSource eiPkg esPkg) isrs irs (reason:srs)) - (Just (pkgid, irs)) - - AllExcluded (ExcludedPkg (InstalledAndSource (eiPkg, False) (esPkg, True)) isrs irs srs) -> - addExtraExclusion False - (ExcludedPkg (InstalledAndSource eiPkg esPkg) isrs (reason:irs) srs) - (Just (pkgid, srs)) - - AllExcluded (ExcludedPkg (InstalledAndSource (_eiPkg, True) (_esPkg, True)) isrs irs srs) -> - noChange False - (Just (pkgid, isrs ++ irs ++ srs)) - - where - removeAvailable ok nePkg adjustAvailable ePkg c = - let aPkgs' = adjustAvailable aPkgs - ePkgs' = PackageIndex.insert ePkg ePkgs - in aPkgs' `seq` ePkgs' `seq` - (aPkgs', ePkgs', Just nePkg, ok, c) - - addExtraExclusion ok ePkg c = - let ePkgs' = PackageIndex.insert ePkg ePkgs - in ePkgs' `seq` - (aPkgs, ePkgs', Nothing, ok, c) - - noChange ok c = - (aPkgs, ePkgs, Nothing, ok, c) - - pkgid = case pkg of OnlyInLeft p -> packageId p - OnlyInRight p -> packageId p - InBoth p _ -> packageId p - - - viewPackageStatus - :: (Package installed, Package source) - => MergeResult (InstalledOrSource installed source) - (ExcludedPkg (InstalledOrSource installed source) reason) - -> PackageStatus (installed, Bool) (source, Bool) reason - viewPackageStatus merged = - case merged of - OnlyInLeft aPkg -> - AllAvailable (testConstraint aPkg) - - OnlyInRight (ExcludedPkg ePkg isrs irs srs) -> - AllExcluded (ExcludedPkg (testConstraint ePkg) isrs irs srs) - - InBoth (InstalledOnly aiPkg) - (ExcludedPkg (SourceOnly esPkg) [] [] srs) -> - case testConstraint (InstalledAndSource aiPkg esPkg) of - InstalledAndSource (aiPkg', iOk) (esPkg', sOk) -> - AvailableExcluded (aiPkg', iOk) (ExcludedPkg (esPkg', sOk) [] [] srs) - _ -> impossible - - InBoth (SourceOnly asPkg) - (ExcludedPkg (InstalledOnly eiPkg) [] irs []) -> - case testConstraint (InstalledAndSource eiPkg asPkg) of - InstalledAndSource (eiPkg', iOk) (asPkg', sOk) -> - ExcludedAvailable (ExcludedPkg (eiPkg', iOk) [] irs []) (asPkg', sOk) - _ -> impossible - _ -> impossible - where - impossible = error "impossible: viewPackageStatus invariant violation" - --- A intermediate structure that enumerates all the possible cases given the --- invariant. This helps us to get simpler and complete pattern matching in --- updatePkg above --- -data PackageStatus installed source reason - = AllAvailable (InstalledOrSource installed source) - | AllExcluded (ExcludedPkg (InstalledOrSource installed source) reason) - | AvailableExcluded installed (ExcludedPkg source reason) - | ExcludedAvailable (ExcludedPkg installed reason) source - - -conflicting :: (Package installed, Package source) - => Constraints installed source reason - -> Dependency - -> [(PackageId, [reason])] -conflicting (Constraints _ _ excluded _ _) dep = - [ (packageId pkg, reasonsAll ++ reasonsAvail ++ reasonsInstalled) --TODO - | ExcludedPkg pkg reasonsAll reasonsAvail reasonsInstalled <- - PackageIndex.lookupDependency excluded dep ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/TopDown/Types.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/TopDown/Types.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/TopDown/Types.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/TopDown/Types.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,143 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Dependency.TopDown.Types --- Copyright : (c) Duncan Coutts 2008 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- Types for the top-down dependency resolver. ------------------------------------------------------------------------------ -{-# LANGUAGE CPP #-} -module Distribution.Client.Dependency.TopDown.Types where - -import Distribution.Client.Types - ( SourcePackage(..), ConfiguredPackage(..) - , OptionalStanza, ConfiguredId(..) ) -import Distribution.InstalledPackageInfo - ( InstalledPackageInfo ) -import qualified Distribution.Client.ComponentDeps as CD - -import Distribution.Package - ( PackageId, PackageIdentifier, Dependency - , Package(packageId) ) -import Distribution.PackageDescription - ( FlagAssignment ) - --- ------------------------------------------------------------ --- * The various kinds of packages --- ------------------------------------------------------------ - -type SelectablePackage - = InstalledOrSource InstalledPackageEx UnconfiguredPackage - -type SelectedPackage - = InstalledOrSource InstalledPackageEx SemiConfiguredPackage - -data InstalledOrSource installed source - = InstalledOnly installed - | SourceOnly source - | InstalledAndSource installed source - deriving Eq - -data FinalSelectedPackage - = SelectedInstalled InstalledPackage - | SelectedSource ConfiguredPackage - -type TopologicalSortNumber = Int - --- | InstalledPackage caches its dependencies as source package IDs. -data InstalledPackage - = InstalledPackage - InstalledPackageInfo - [PackageId] - -data InstalledPackageEx - = InstalledPackageEx - InstalledPackage - !TopologicalSortNumber - [PackageIdentifier] -- transitive closure of installed deps - -data UnconfiguredPackage - = UnconfiguredPackage - SourcePackage - !TopologicalSortNumber - FlagAssignment - [OptionalStanza] - -data SemiConfiguredPackage - = SemiConfiguredPackage - SourcePackage -- package info - FlagAssignment -- total flag assignment for the package - [OptionalStanza] -- enabled optional stanzas - [Dependency] -- dependencies we end up with when we apply - -- the flag assignment - -instance Package InstalledPackage where - packageId (InstalledPackage pkg _) = packageId pkg - -instance Package InstalledPackageEx where - packageId (InstalledPackageEx p _ _) = packageId p - -instance Package UnconfiguredPackage where - packageId (UnconfiguredPackage p _ _ _) = packageId p - -instance Package SemiConfiguredPackage where - packageId (SemiConfiguredPackage p _ _ _) = packageId p - -instance (Package installed, Package source) - => Package (InstalledOrSource installed source) where - packageId (InstalledOnly p ) = packageId p - packageId (SourceOnly p ) = packageId p - packageId (InstalledAndSource p _) = packageId p - -instance Package FinalSelectedPackage where - packageId (SelectedInstalled pkg) = packageId pkg - packageId (SelectedSource pkg) = packageId pkg - - --- | We can have constraints on selecting just installed or just source --- packages. --- --- In particular, installed packages can only depend on other installed --- packages while packages that are not yet installed but which we plan to --- install can depend on installed or other not-yet-installed packages. --- -data InstalledConstraint = InstalledConstraint - | SourceConstraint - deriving (Eq, Show) - --- | Package dependencies --- --- The top-down solver uses its down type class for package dependencies, --- because it wants to know these dependencies as PackageIds, rather than as --- ComponentIds (so it cannot use PackageFixedDeps). --- --- Ideally we would switch the top-down solver over to use ComponentIds --- throughout; that means getting rid of this type class, and changing over the --- package index type to use Cabal's rather than cabal-install's. That will --- avoid the need for the local definitions of dependencyGraph and --- reverseTopologicalOrder in the top-down solver. --- --- Note that the top-down solver does not (and probably will never) make a --- distinction between the various kinds of dependencies, so we return a flat --- list here. If we get rid of this type class then any use of `sourceDeps` --- should be replaced by @fold . depends@. -class Package a => PackageSourceDeps a where - sourceDeps :: a -> [PackageIdentifier] - -instance PackageSourceDeps InstalledPackageEx where - sourceDeps (InstalledPackageEx _ _ deps) = deps - -instance PackageSourceDeps ConfiguredPackage where - sourceDeps (ConfiguredPackage _ _ _ deps) = map confSrcId $ CD.nonSetupDeps deps - -instance PackageSourceDeps InstalledPackage where - sourceDeps (InstalledPackage _ deps) = deps - -instance PackageSourceDeps FinalSelectedPackage where - sourceDeps (SelectedInstalled pkg) = sourceDeps pkg - sourceDeps (SelectedSource pkg) = sourceDeps pkg - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/TopDown.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/TopDown.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/TopDown.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/TopDown.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1079 +0,0 @@ -{-# LANGUAGE CPP #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Dependency.Types --- Copyright : (c) Duncan Coutts 2008 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- Common types for dependency resolution. ------------------------------------------------------------------------------ -module Distribution.Client.Dependency.TopDown ( - topDownResolver - ) where - -import Distribution.Client.Dependency.TopDown.Types -import qualified Distribution.Client.Dependency.TopDown.Constraints as Constraints -import Distribution.Client.Dependency.TopDown.Constraints - ( Satisfiable(..) ) -import Distribution.Client.Types - ( SourcePackage(..), ConfiguredPackage(..) - , enableStanzas, ConfiguredId(..), fakeUnitId ) -import Distribution.Client.Dependency.Types - ( DependencyResolver, ResolverPackage(..) - , PackageConstraint(..), unlabelPackageConstraint - , PackagePreferences(..), InstalledPreference(..) - , Progress(..), foldProgress ) - -import qualified Distribution.Client.PackageIndex as PackageIndex -import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex -import Distribution.Simple.PackageIndex (InstalledPackageIndex) -import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo -import Distribution.Client.ComponentDeps - ( ComponentDeps ) -import qualified Distribution.Client.ComponentDeps as CD -import Distribution.Client.PackageIndex - ( PackageIndex ) -import Distribution.Package - ( PackageName(..), PackageId, PackageIdentifier(..) - , UnitId(..), ComponentId(..) - , Package(..), packageVersion, packageName - , Dependency(Dependency), thisPackageVersion, simplifyDependency ) -import Distribution.PackageDescription - ( PackageDescription(buildDepends) ) -import Distribution.Client.PackageUtils - ( externalBuildDepends ) -import Distribution.PackageDescription.Configuration - ( finalizePackageDescription, flattenPackageDescription ) -import Distribution.Version - ( Version(..), VersionRange, withinRange, simplifyVersionRange - , UpperBound(..), asVersionIntervals ) -import Distribution.Compiler - ( CompilerInfo ) -import Distribution.System - ( Platform ) -import Distribution.Simple.Utils - ( equating, comparing ) -import Distribution.Text - ( display ) - -import Data.List - ( foldl', maximumBy, minimumBy, nub, sort, sortBy, groupBy ) -import Data.Maybe - ( fromJust, fromMaybe, catMaybes ) -#if !MIN_VERSION_base(4,8,0) -import Data.Monoid - ( Monoid(mempty) ) -#endif -import Control.Monad - ( guard ) -import qualified Data.Set as Set -import Data.Set (Set) -import qualified Data.Map as Map -import qualified Data.Graph as Graph -import qualified Data.Array as Array -import Control.Exception - ( assert ) - --- ------------------------------------------------------------ --- * Search state types --- ------------------------------------------------------------ - -type Constraints = Constraints.Constraints - InstalledPackageEx UnconfiguredPackage ExclusionReason -type SelectedPackages = PackageIndex SelectedPackage - --- ------------------------------------------------------------ --- * The search tree type --- ------------------------------------------------------------ - -data SearchSpace inherited pkg - = ChoiceNode inherited [[(pkg, SearchSpace inherited pkg)]] - | Failure Failure - --- ------------------------------------------------------------ --- * Traverse a search tree --- ------------------------------------------------------------ - -explore :: (PackageName -> PackagePreferences) - -> SearchSpace (SelectedPackages, Constraints, SelectionChanges) - SelectablePackage - -> Progress Log Failure (SelectedPackages, Constraints) - -explore _ (Failure failure) = Fail failure -explore _ (ChoiceNode (s,c,_) []) = Done (s,c) -explore pref (ChoiceNode _ choices) = - case [ choice | [choice] <- choices ] of - ((_, node'):_) -> Step (logInfo node') (explore pref node') - [] -> Step (logInfo node') (explore pref node') - where - choice = minimumBy (comparing topSortNumber) choices - pkgname = packageName . fst . head $ choice - (_, node') = maximumBy (bestByPref pkgname) choice - where - topSortNumber choice = case fst (head choice) of - InstalledOnly (InstalledPackageEx _ i _) -> i - SourceOnly (UnconfiguredPackage _ i _ _) -> i - InstalledAndSource _ (UnconfiguredPackage _ i _ _) -> i - - bestByPref pkgname = case packageInstalledPreference of - PreferLatest -> - comparing (\(p,_) -> ( isPreferred p, packageId p)) - PreferInstalled -> - comparing (\(p,_) -> (isInstalled p, isPreferred p, packageId p)) - where - isInstalled (SourceOnly _) = False - isInstalled _ = True - isPreferred p = length . filter (packageVersion p `withinRange`) $ - preferredVersions - - (PackagePreferences preferredVersions packageInstalledPreference _) - = pref pkgname - - logInfo node = Select selected discarded - where (selected, discarded) = case node of - Failure _ -> ([], []) - ChoiceNode (_,_,changes) _ -> changes - --- ------------------------------------------------------------ --- * Generate a search tree --- ------------------------------------------------------------ - -type ConfigurePackage = PackageIndex SelectablePackage - -> SelectablePackage - -> Either [Dependency] SelectedPackage - --- | (packages selected, packages discarded) -type SelectionChanges = ([SelectedPackage], [PackageId]) - -searchSpace :: ConfigurePackage - -> Constraints - -> SelectedPackages - -> SelectionChanges - -> Set PackageName - -> SearchSpace (SelectedPackages, Constraints, SelectionChanges) - SelectablePackage -searchSpace configure constraints selected changes next = - assert (Set.null (selectedSet `Set.intersection` next)) $ - assert (selectedSet `Set.isSubsetOf` Constraints.packages constraints) $ - assert (next `Set.isSubsetOf` Constraints.packages constraints) $ - - ChoiceNode (selected, constraints, changes) - [ [ (pkg, select name pkg) - | pkg <- PackageIndex.lookupPackageName available name ] - | name <- Set.elems next ] - where - available = Constraints.choices constraints - - selectedSet = Set.fromList (map packageName (PackageIndex.allPackages selected)) - - select name pkg = case configure available pkg of - Left missing -> Failure $ ConfigureFailed pkg - [ (dep, Constraints.conflicting constraints dep) - | dep <- missing ] - Right pkg' -> - case constrainDeps pkg' newDeps (addDeps constraints newPkgs) [] of - Left failure -> Failure failure - Right (constraints', newDiscarded) -> - searchSpace configure - constraints' selected' (newSelected, newDiscarded) next' - where - selected' = foldl' (flip PackageIndex.insert) selected newSelected - newSelected = - case Constraints.isPaired constraints (packageId pkg) of - Nothing -> [pkg'] - Just pkgid' -> [pkg', pkg''] - where - Just pkg'' = fmap (\(InstalledOnly p) -> InstalledOnly p) - (PackageIndex.lookupPackageId available pkgid') - - newPkgs = [ name' - | (Dependency name' _, _) <- newDeps - , null (PackageIndex.lookupPackageName selected' name') ] - newDeps = concatMap packageConstraints newSelected - next' = Set.delete name - $ foldl' (flip Set.insert) next newPkgs - -packageConstraints :: SelectedPackage -> [(Dependency, Bool)] -packageConstraints = either installedConstraints availableConstraints - . preferSource - where - preferSource (InstalledOnly pkg) = Left pkg - preferSource (SourceOnly pkg) = Right pkg - preferSource (InstalledAndSource _ pkg) = Right pkg - installedConstraints (InstalledPackageEx _ _ deps) = - [ (thisPackageVersion dep, True) - | dep <- deps ] - availableConstraints (SemiConfiguredPackage _ _ _ deps) = - [ (dep, False) | dep <- deps ] - -addDeps :: Constraints -> [PackageName] -> Constraints -addDeps = - foldr $ \pkgname cs -> - case Constraints.addTarget pkgname cs of - Satisfiable cs' () -> cs' - _ -> impossible "addDeps unsatisfiable" - -constrainDeps :: SelectedPackage -> [(Dependency, Bool)] -> Constraints - -> [PackageId] - -> Either Failure (Constraints, [PackageId]) -constrainDeps pkg [] cs discard = - case addPackageSelectConstraint (packageId pkg) cs of - Satisfiable cs' discard' -> Right (cs', discard' ++ discard) - _ -> impossible "constrainDeps unsatisfiable(1)" -constrainDeps pkg ((dep, installedConstraint):deps) cs discard = - case addPackageDependencyConstraint (packageId pkg) dep installedConstraint cs of - Satisfiable cs' discard' -> constrainDeps pkg deps cs' (discard' ++ discard) - Unsatisfiable -> impossible "constrainDeps unsatisfiable(2)" - ConflictsWith conflicts -> - Left (DependencyConflict pkg dep installedConstraint conflicts) - --- ------------------------------------------------------------ --- * The main algorithm --- ------------------------------------------------------------ - -search :: ConfigurePackage - -> (PackageName -> PackagePreferences) - -> Constraints - -> Set PackageName - -> Progress Log Failure (SelectedPackages, Constraints) -search configure pref constraints = - explore pref . searchSpace configure constraints mempty ([], []) - --- ------------------------------------------------------------ --- * The top level resolver --- ------------------------------------------------------------ - --- | The main exported resolver, with string logging and failure types to fit --- the standard 'DependencyResolver' interface. --- -topDownResolver :: DependencyResolver -topDownResolver platform cinfo installedPkgIndex sourcePkgIndex _pkgConfigDB - preferences constraints targets = - mapMessages $ topDownResolver' - platform cinfo - (convertInstalledPackageIndex installedPkgIndex) - sourcePkgIndex - preferences - (map unlabelPackageConstraint constraints) - targets - where - mapMessages :: Progress Log Failure a -> Progress String String a - mapMessages = foldProgress (Step . showLog) (Fail . showFailure) Done - --- | The native resolver with detailed structured logging and failure types. --- -topDownResolver' :: Platform -> CompilerInfo - -> PackageIndex InstalledPackage - -> PackageIndex SourcePackage - -> (PackageName -> PackagePreferences) - -> [PackageConstraint] - -> [PackageName] - -> Progress Log Failure [ResolverPackage] -topDownResolver' platform cinfo installedPkgIndex sourcePkgIndex - preferences constraints targets = - fmap (uncurry finalise) - . (\cs -> search configure preferences cs initialPkgNames) - =<< pruneBottomUp platform cinfo - =<< addTopLevelConstraints constraints - =<< addTopLevelTargets targets emptyConstraintSet - - where - configure = configurePackage platform cinfo - emptyConstraintSet :: Constraints - emptyConstraintSet = Constraints.empty - (annotateInstalledPackages topSortNumber installedPkgIndex') - (annotateSourcePackages constraints topSortNumber sourcePkgIndex') - (installedPkgIndex', sourcePkgIndex') = - selectNeededSubset installedPkgIndex sourcePkgIndex initialPkgNames - topSortNumber = topologicalSortNumbering installedPkgIndex' sourcePkgIndex' - - initialPkgNames = Set.fromList targets - - finalise selected' constraints' = - map toResolverPackage - . PackageIndex.allPackages - . fst . improvePlan installedPkgIndex' constraints' - . PackageIndex.fromList - $ finaliseSelectedPackages preferences selected' constraints' - - toResolverPackage :: FinalSelectedPackage -> ResolverPackage - toResolverPackage (SelectedInstalled (InstalledPackage pkg _)) - = PreExisting pkg - toResolverPackage (SelectedSource pkg) = Configured pkg - -addTopLevelTargets :: [PackageName] - -> Constraints - -> Progress a Failure Constraints -addTopLevelTargets [] cs = Done cs -addTopLevelTargets (pkg:pkgs) cs = - case Constraints.addTarget pkg cs of - Satisfiable cs' () -> addTopLevelTargets pkgs cs' - Unsatisfiable -> Fail (NoSuchPackage pkg) - ConflictsWith _conflicts -> impossible "addTopLevelTargets conflicts" - - -addTopLevelConstraints :: [PackageConstraint] -> Constraints - -> Progress Log Failure Constraints -addTopLevelConstraints [] cs = Done cs -addTopLevelConstraints (PackageConstraintFlags _ _ :deps) cs = - addTopLevelConstraints deps cs - -addTopLevelConstraints (PackageConstraintVersion pkg ver:deps) cs = - case addTopLevelVersionConstraint pkg ver cs of - Satisfiable cs' pkgids -> - Step (AppliedVersionConstraint pkg ver pkgids) - (addTopLevelConstraints deps cs') - - Unsatisfiable -> - Fail (TopLevelVersionConstraintUnsatisfiable pkg ver) - - ConflictsWith conflicts -> - Fail (TopLevelVersionConstraintConflict pkg ver conflicts) - -addTopLevelConstraints (PackageConstraintInstalled pkg:deps) cs = - case addTopLevelInstalledConstraint pkg cs of - Satisfiable cs' pkgids -> - Step (AppliedInstalledConstraint pkg InstalledConstraint pkgids) - (addTopLevelConstraints deps cs') - - Unsatisfiable -> - Fail (TopLevelInstallConstraintUnsatisfiable pkg InstalledConstraint) - - ConflictsWith conflicts -> - Fail (TopLevelInstallConstraintConflict pkg InstalledConstraint conflicts) - -addTopLevelConstraints (PackageConstraintSource pkg:deps) cs = - case addTopLevelSourceConstraint pkg cs of - Satisfiable cs' pkgids -> - Step (AppliedInstalledConstraint pkg SourceConstraint pkgids) - (addTopLevelConstraints deps cs') - - Unsatisfiable -> - Fail (TopLevelInstallConstraintUnsatisfiable pkg SourceConstraint) - - ConflictsWith conflicts -> - Fail (TopLevelInstallConstraintConflict pkg SourceConstraint conflicts) - -addTopLevelConstraints (PackageConstraintStanzas _ _ : deps) cs = - addTopLevelConstraints deps cs - --- | Add exclusion on available packages that cannot be configured. --- -pruneBottomUp :: Platform -> CompilerInfo - -> Constraints -> Progress Log Failure Constraints -pruneBottomUp platform comp constraints = - foldr prune Done (initialPackages constraints) constraints - - where - prune pkgs rest cs = foldr addExcludeConstraint rest unconfigurable cs - where - unconfigurable = - [ (pkg, missing) -- if necessary we could look up missing reasons - | (Just pkg', pkg) <- zip (map getSourcePkg pkgs) pkgs - , Left missing <- [configure cs pkg'] ] - - addExcludeConstraint (pkg, missing) rest cs = - let reason = ExcludedByConfigureFail missing in - case addPackageExcludeConstraint (packageId pkg) reason cs of - Satisfiable cs' [pkgid]| packageId pkg == pkgid - -> Step (ExcludeUnconfigurable pkgid) (rest cs') - Satisfiable _ _ -> impossible "pruneBottomUp satisfiable" - _ -> Fail $ ConfigureFailed pkg - [ (dep, Constraints.conflicting cs dep) - | dep <- missing ] - - configure cs (UnconfiguredPackage (SourcePackage _ pkg _ _) _ flags stanzas) = - finalizePackageDescription flags (dependencySatisfiable cs) - platform comp [] (enableStanzas stanzas pkg) - dependencySatisfiable cs = - not . null . PackageIndex.lookupDependency (Constraints.choices cs) - - -- collect each group of packages (by name) in reverse topsort order - initialPackages = - reverse - . sortBy (comparing (topSortNumber . head)) - . PackageIndex.allPackagesByName - . Constraints.choices - - topSortNumber (InstalledOnly (InstalledPackageEx _ i _)) = i - topSortNumber (SourceOnly (UnconfiguredPackage _ i _ _)) = i - topSortNumber (InstalledAndSource _ (UnconfiguredPackage _ i _ _)) = i - - getSourcePkg (InstalledOnly _ ) = Nothing - getSourcePkg (SourceOnly spkg) = Just spkg - getSourcePkg (InstalledAndSource _ spkg) = Just spkg - - -configurePackage :: Platform -> CompilerInfo -> ConfigurePackage -configurePackage platform cinfo available spkg = case spkg of - InstalledOnly ipkg -> Right (InstalledOnly ipkg) - SourceOnly apkg -> fmap SourceOnly (configure apkg) - InstalledAndSource ipkg apkg -> fmap (InstalledAndSource ipkg) - (configure apkg) - where - configure (UnconfiguredPackage apkg@(SourcePackage _ p _ _) _ flags stanzas) = - case finalizePackageDescription flags dependencySatisfiable - platform cinfo [] - (enableStanzas stanzas p) of - Left missing -> Left missing - Right (pkg, flags') -> Right $ - SemiConfiguredPackage apkg flags' stanzas (externalBuildDepends pkg) - - dependencySatisfiable = not . null . PackageIndex.lookupDependency available - --- | Annotate each installed packages with its set of transitive dependencies --- and its topological sort number. --- -annotateInstalledPackages :: (PackageName -> TopologicalSortNumber) - -> PackageIndex InstalledPackage - -> PackageIndex InstalledPackageEx -annotateInstalledPackages dfsNumber installed = PackageIndex.fromList - [ InstalledPackageEx pkg (dfsNumber (packageName pkg)) (transitiveDepends pkg) - | pkg <- PackageIndex.allPackages installed ] - where - transitiveDepends :: InstalledPackage -> [PackageId] - transitiveDepends = map (packageId . toPkg) . tail . Graph.reachable graph - . fromJust . toVertex . packageId - (graph, toPkg, toVertex) = dependencyGraph installed - - --- | Annotate each available packages with its topological sort number and any --- user-supplied partial flag assignment. --- -annotateSourcePackages :: [PackageConstraint] - -> (PackageName -> TopologicalSortNumber) - -> PackageIndex SourcePackage - -> PackageIndex UnconfiguredPackage -annotateSourcePackages constraints dfsNumber sourcePkgIndex = - PackageIndex.fromList - [ UnconfiguredPackage pkg (dfsNumber name) (flagsFor name) (stanzasFor name) - | pkg <- PackageIndex.allPackages sourcePkgIndex - , let name = packageName pkg ] - where - flagsFor = fromMaybe [] . flip Map.lookup flagsMap - flagsMap = Map.fromList - [ (name, flags) - | PackageConstraintFlags name flags <- constraints ] - stanzasFor = fromMaybe [] . flip Map.lookup stanzasMap - stanzasMap = Map.fromListWith (++) - [ (name, stanzas) - | PackageConstraintStanzas name stanzas <- constraints ] - --- | One of the heuristics we use when guessing which path to take in the --- search space is an ordering on the choices we make. It's generally better --- to make decisions about packages higer in the dep graph first since they --- place constraints on packages lower in the dep graph. --- --- To pick them in that order we annotate each package with its topological --- sort number. So if package A depends on package B then package A will have --- a lower topological sort number than B and we'll make a choice about which --- version of A to pick before we make a choice about B (unless there is only --- one possible choice for B in which case we pick that immediately). --- --- To construct these topological sort numbers we combine and flatten the --- installed and source package sets. We consider only dependencies between --- named packages, not including versions and for not-yet-configured packages --- we look at all the possible dependencies, not just those under any single --- flag assignment. This means we can actually get impossible combinations of --- edges and even cycles, but that doesn't really matter here, it's only a --- heuristic. --- -topologicalSortNumbering :: PackageIndex InstalledPackage - -> PackageIndex SourcePackage - -> (PackageName -> TopologicalSortNumber) -topologicalSortNumbering installedPkgIndex sourcePkgIndex = - \pkgname -> let Just vertex = toVertex pkgname - in topologicalSortNumbers Array.! vertex - where - topologicalSortNumbers = Array.array (Array.bounds graph) - (zip (Graph.topSort graph) [0..]) - (graph, _, toVertex) = Graph.graphFromEdges $ - [ ((), packageName pkg, nub deps) - | pkgs@(pkg:_) <- PackageIndex.allPackagesByName installedPkgIndex - , let deps = [ packageName dep - | pkg' <- pkgs - , dep <- sourceDeps pkg' ] ] - ++ [ ((), packageName pkg, nub deps) - | pkgs@(pkg:_) <- PackageIndex.allPackagesByName sourcePkgIndex - , let deps = [ depName - | SourcePackage _ pkg' _ _ <- pkgs - , Dependency depName _ <- - buildDepends (flattenPackageDescription pkg') ] ] - --- | We don't need the entire index (which is rather large and costly if we --- force it by examining the whole thing). So trace out the maximul subset of --- each index that we could possibly ever need. Do this by flattening packages --- and looking at the names of all possible dependencies. --- -selectNeededSubset :: PackageIndex InstalledPackage - -> PackageIndex SourcePackage - -> Set PackageName - -> (PackageIndex InstalledPackage - ,PackageIndex SourcePackage) -selectNeededSubset installedPkgIndex sourcePkgIndex = select mempty mempty - where - select :: PackageIndex InstalledPackage - -> PackageIndex SourcePackage - -> Set PackageName - -> (PackageIndex InstalledPackage - ,PackageIndex SourcePackage) - select installedPkgIndex' sourcePkgIndex' remaining - | Set.null remaining = (installedPkgIndex', sourcePkgIndex') - | otherwise = select installedPkgIndex'' sourcePkgIndex'' remaining'' - where - (next, remaining') = Set.deleteFindMin remaining - moreInstalled = PackageIndex.lookupPackageName installedPkgIndex next - moreSource = PackageIndex.lookupPackageName sourcePkgIndex next - moreRemaining = -- we filter out packages already included in the indexes - -- this avoids an infinite loop if a package depends on itself - -- like base-3.0.3.0 with base-4.0.0.0 - filter notAlreadyIncluded - $ [ packageName dep - | pkg <- moreInstalled - , dep <- sourceDeps pkg ] - ++ [ name - | SourcePackage _ pkg _ _ <- moreSource - , Dependency name _ <- - buildDepends (flattenPackageDescription pkg) ] - installedPkgIndex'' = foldl' (flip PackageIndex.insert) - installedPkgIndex' moreInstalled - sourcePkgIndex'' = foldl' (flip PackageIndex.insert) - sourcePkgIndex' moreSource - remaining'' = foldl' (flip Set.insert) - remaining' moreRemaining - notAlreadyIncluded name = - null (PackageIndex.lookupPackageName installedPkgIndex' name) - && null (PackageIndex.lookupPackageName sourcePkgIndex' name) - - --- | The old top down solver assumes that installed packages are indexed by --- their source package id. But these days they're actually indexed by an --- installed package id and there can be many installed packages with the same --- source package id. This function tries to do a convertion, but it can only --- be partial. --- -convertInstalledPackageIndex :: InstalledPackageIndex - -> PackageIndex InstalledPackage -convertInstalledPackageIndex index' = PackageIndex.fromList - -- There can be multiple installed instances of each package version, - -- like when the same package is installed in the global & user DBs. - -- InstalledPackageIndex.allPackagesBySourcePackageId gives us the - -- installed packages with the most preferred instances first, so by - -- picking the first we should get the user one. This is almost but not - -- quite the same as what ghc does. - [ InstalledPackage ipkg (sourceDepsOf index' ipkg) - | (_,ipkg:_) <- InstalledPackageIndex.allPackagesBySourcePackageId index' ] - where - -- The InstalledPackageInfo only lists dependencies by the - -- UnitId, which means we do not directly know the corresponding - -- source dependency. The only way to find out is to lookup the - -- UnitId to get the InstalledPackageInfo and look at its - -- source PackageId. But if the package is broken because it depends on - -- other packages that do not exist then we have a problem we cannot find - -- the original source package id. Instead we make up a bogus package id. - -- This should have the same effect since it should be a dependency on a - -- nonexistent package. - sourceDepsOf index ipkg = - [ maybe (brokenPackageId depid) packageId mdep - | let depids = InstalledPackageInfo.depends ipkg - getpkg = InstalledPackageIndex.lookupUnitId index - , (depid, mdep) <- zip depids (map getpkg depids) ] - - brokenPackageId (SimpleUnitId (ComponentId str)) = - PackageIdentifier (PackageName (str ++ "-broken")) (Version [] []) - --- ------------------------------------------------------------ --- * Post processing the solution --- ------------------------------------------------------------ - -finaliseSelectedPackages :: (PackageName -> PackagePreferences) - -> SelectedPackages - -> Constraints - -> [FinalSelectedPackage] -finaliseSelectedPackages pref selected constraints = - map finaliseSelected (PackageIndex.allPackages selected) - where - remainingChoices = Constraints.choices constraints - finaliseSelected (InstalledOnly ipkg ) = finaliseInstalled ipkg - finaliseSelected (SourceOnly apkg) = finaliseSource Nothing apkg - finaliseSelected (InstalledAndSource ipkg apkg) = - case PackageIndex.lookupPackageId remainingChoices (packageId ipkg) of - --picked package not in constraints - Nothing -> impossible "finaliseSelected no pkg" - -- to constrain to avail only: - Just (SourceOnly _) -> impossible "finaliseSelected src only" - Just (InstalledOnly _) -> finaliseInstalled ipkg - Just (InstalledAndSource _ _) -> finaliseSource (Just ipkg) apkg - - finaliseInstalled (InstalledPackageEx pkg _ _) = SelectedInstalled pkg - finaliseSource mipkg (SemiConfiguredPackage pkg flags stanzas deps) = - SelectedSource (ConfiguredPackage pkg flags stanzas deps') - where - -- We cheat in the cabal solver, and classify all dependencies as - -- library dependencies. - deps' :: ComponentDeps [ConfiguredId] - deps' = CD.fromLibraryDeps $ map (confId . pickRemaining mipkg) deps - - -- InstalledOrSource indicates that we either have a source package - -- available, or an installed one, or both. In the case that we have both - -- available, we don't yet know if we can pick the installed one (the - -- dependencies may not match up, for instance); this is verified in - -- `improvePlan`. - -- - -- This means that at this point we cannot construct a valid installed - -- package ID yet for the dependencies. We therefore have two options: - -- - -- * We could leave the installed package ID undefined here, and have a - -- separate pass over the output of the top-down solver, fixing all - -- dependencies so that if we depend on an already installed package we - -- use the proper installed package ID. - -- - -- * We can _always_ use fake installed IDs, irrespective of whether we the - -- dependency is on an already installed package or not. This is okay - -- because (i) the top-down solver does not (and never will) support - -- multiple package instances, and (ii) we initialize the FakeMap with - -- fake IDs for already installed packages. - -- - -- For now we use the second option; if however we change the implementation - -- of these fake IDs so that we do away with the FakeMap and update a - -- package reverse dependencies as we execute the install plan and discover - -- real package IDs, then this is no longer possible and we have to - -- implement the first option (see also Note [FakeMap] in Cabal). - confId :: InstalledOrSource InstalledPackageEx UnconfiguredPackage -> ConfiguredId - confId pkg = ConfiguredId { - confSrcId = packageId pkg - , confInstId = fakeUnitId (packageId pkg) - } - - pickRemaining mipkg dep@(Dependency _name versionRange) = - case PackageIndex.lookupDependency remainingChoices dep of - [] -> impossible "pickRemaining no pkg" - [pkg'] -> pkg' - remaining -> assert (checkIsPaired remaining) - $ maximumBy bestByPref remaining - where - -- We order candidate packages to pick for a dependency by these - -- three factors. The last factor is just highest version wins. - bestByPref = - comparing (\p -> (isCurrent p, isPreferred p, packageVersion p)) - -- Is the package already used by the installed version of this - -- package? If so we should pick that first. This stops us from doing - -- silly things like deciding to rebuild haskell98 against base 3. - isCurrent = case mipkg :: Maybe InstalledPackageEx of - Nothing -> \_ -> False - Just ipkg -> \p -> packageId p `elem` sourceDeps ipkg - -- If there is no upper bound on the version range then we apply a - -- preferred version according to the hackage or user's suggested - -- version constraints. TODO: distinguish hacks from prefs - bounded = boundedAbove versionRange - isPreferred p - | bounded = boundedRank -- this is a dummy constant - | otherwise = length . filter (packageVersion p `withinRange`) $ - preferredVersions - where (PackagePreferences preferredVersions _ _) = pref (packageName p) - boundedRank = 0 -- any value will do - - boundedAbove :: VersionRange -> Bool - boundedAbove vr = case asVersionIntervals vr of - [] -> True -- this is the inconsistent version range. - intervals -> case last intervals of - (_, UpperBound _ _) -> True - (_, NoUpperBound ) -> False - - -- We really only expect to find more than one choice remaining when - -- we're finalising a dependency on a paired package. - checkIsPaired [p1, p2] = - case Constraints.isPaired constraints (packageId p1) of - Just p2' -> packageId p2' == packageId p2 - Nothing -> False - checkIsPaired _ = False - --- | Improve an existing installation plan by, where possible, swapping --- packages we plan to install with ones that are already installed. --- This may add additional constraints due to the dependencies of installed --- packages on other installed packages. --- -improvePlan :: PackageIndex InstalledPackage - -> Constraints - -> PackageIndex FinalSelectedPackage - -> (PackageIndex FinalSelectedPackage, Constraints) -improvePlan installed constraints0 selected0 = - foldl' improve (selected0, constraints0) (reverseTopologicalOrder selected0) - where - improve (selected, constraints) = fromMaybe (selected, constraints) - . improvePkg selected constraints - - -- The idea is to improve the plan by swapping a configured package for - -- an equivalent installed one. For a particular package the condition is - -- that the package be in a configured state, that a the same version be - -- already installed with the exact same dependencies and all the packages - -- in the plan that it depends on are in the installed state - improvePkg selected constraints pkgid = do - SelectedSource pkg <- PackageIndex.lookupPackageId selected pkgid - ipkg <- PackageIndex.lookupPackageId installed pkgid - guard $ all (isInstalled selected) (sourceDeps pkg) - tryInstalled selected constraints [ipkg] - - isInstalled selected pkgid = - case PackageIndex.lookupPackageId selected pkgid of - Just (SelectedInstalled _) -> True - _ -> False - - tryInstalled :: PackageIndex FinalSelectedPackage -> Constraints - -> [InstalledPackage] - -> Maybe (PackageIndex FinalSelectedPackage, Constraints) - tryInstalled selected constraints [] = Just (selected, constraints) - tryInstalled selected constraints (pkg:pkgs) = - case constraintsOk (packageId pkg) (sourceDeps pkg) constraints of - Nothing -> Nothing - Just constraints' -> tryInstalled selected' constraints' pkgs' - where - selected' = PackageIndex.insert (SelectedInstalled pkg) selected - pkgs' = catMaybes (map notSelected (sourceDeps pkg)) ++ pkgs - notSelected pkgid = - case (PackageIndex.lookupPackageId installed pkgid - ,PackageIndex.lookupPackageId selected pkgid) of - (Just pkg', Nothing) -> Just pkg' - _ -> Nothing - - constraintsOk _ [] constraints = Just constraints - constraintsOk pkgid (pkgid':pkgids) constraints = - case addPackageDependencyConstraint pkgid dep True constraints of - Satisfiable constraints' _ -> constraintsOk pkgid pkgids constraints' - _ -> Nothing - where - dep = thisPackageVersion pkgid' - - reverseTopologicalOrder :: PackageIndex FinalSelectedPackage -> [PackageId] - reverseTopologicalOrder index = map (packageId . toPkg) - . Graph.topSort - . Graph.transposeG - $ graph - where (graph, toPkg, _) = dependencyGraph index - --- ------------------------------------------------------------ --- * Adding and recording constraints --- ------------------------------------------------------------ - -addPackageSelectConstraint :: PackageId -> Constraints - -> Satisfiable Constraints - [PackageId] ExclusionReason -addPackageSelectConstraint pkgid = - Constraints.constrain pkgname constraint reason - where - pkgname = packageName pkgid - constraint ver _ = ver == packageVersion pkgid - reason = SelectedOther pkgid - -addPackageExcludeConstraint :: PackageId -> ExclusionReason - -> Constraints - -> Satisfiable Constraints - [PackageId] ExclusionReason -addPackageExcludeConstraint pkgid reason = - Constraints.constrain pkgname constraint reason - where - pkgname = packageName pkgid - constraint ver installed - | ver == packageVersion pkgid = installed - | otherwise = True - -addPackageDependencyConstraint :: PackageId -> Dependency -> Bool - -> Constraints - -> Satisfiable Constraints - [PackageId] ExclusionReason -addPackageDependencyConstraint pkgid dep@(Dependency pkgname verrange) - installedConstraint = - Constraints.constrain pkgname constraint reason - where - constraint ver installed = ver `withinRange` verrange - && if installedConstraint then installed else True - reason = ExcludedByPackageDependency pkgid dep installedConstraint - -addTopLevelVersionConstraint :: PackageName -> VersionRange - -> Constraints - -> Satisfiable Constraints - [PackageId] ExclusionReason -addTopLevelVersionConstraint pkgname verrange = - Constraints.constrain pkgname constraint reason - where - constraint ver _installed = ver `withinRange` verrange - reason = ExcludedByTopLevelConstraintVersion pkgname verrange - -addTopLevelInstalledConstraint, - addTopLevelSourceConstraint :: PackageName - -> Constraints - -> Satisfiable Constraints - [PackageId] ExclusionReason -addTopLevelInstalledConstraint pkgname = - Constraints.constrain pkgname constraint reason - where - constraint _ver installed = installed - reason = ExcludedByTopLevelConstraintInstalled pkgname - -addTopLevelSourceConstraint pkgname = - Constraints.constrain pkgname constraint reason - where - constraint _ver installed = not installed - reason = ExcludedByTopLevelConstraintSource pkgname - - --- ------------------------------------------------------------ --- * Reasons for constraints --- ------------------------------------------------------------ - --- | For every constraint we record we also record the reason that constraint --- is needed. So if we end up failing due to conflicting constraints then we --- can give an explnanation as to what was conflicting and why. --- -data ExclusionReason = - - -- | We selected this other version of the package. That means we exclude - -- all the other versions. - SelectedOther PackageId - - -- | We excluded this version of the package because it failed to - -- configure probably because of unsatisfiable deps. - | ExcludedByConfigureFail [Dependency] - - -- | We excluded this version of the package because another package that - -- we selected imposed a dependency which this package did not satisfy. - | ExcludedByPackageDependency PackageId Dependency Bool - - -- | We excluded this version of the package because it did not satisfy - -- a dependency given as an original top level input. - -- - | ExcludedByTopLevelConstraintVersion PackageName VersionRange - | ExcludedByTopLevelConstraintInstalled PackageName - | ExcludedByTopLevelConstraintSource PackageName - - deriving Eq - --- | Given an excluded package and the reason it was excluded, produce a human --- readable explanation. --- -showExclusionReason :: PackageId -> ExclusionReason -> String -showExclusionReason pkgid (SelectedOther pkgid') = - display pkgid ++ " was excluded because " ++ - display pkgid' ++ " was selected instead" -showExclusionReason pkgid (ExcludedByConfigureFail missingDeps) = - display pkgid ++ " was excluded because it could not be configured. " - ++ "It requires " ++ listOf displayDep missingDeps -showExclusionReason pkgid (ExcludedByPackageDependency pkgid' dep installedConstraint) - = display pkgid ++ " was excluded because " ++ display pkgid' ++ " requires " - ++ (if installedConstraint then "an installed instance of " else "") - ++ displayDep dep -showExclusionReason pkgid (ExcludedByTopLevelConstraintVersion pkgname verRange) = - display pkgid ++ " was excluded because of the top level constraint " ++ - displayDep (Dependency pkgname verRange) -showExclusionReason pkgid (ExcludedByTopLevelConstraintInstalled pkgname) - = display pkgid ++ " was excluded because of the top level constraint '" - ++ display pkgname ++ " installed' which means that only installed instances " - ++ "of the package may be selected." -showExclusionReason pkgid (ExcludedByTopLevelConstraintSource pkgname) - = display pkgid ++ " was excluded because of the top level constraint '" - ++ display pkgname ++ " source' which means that only source versions " - ++ "of the package may be selected." - - --- ------------------------------------------------------------ --- * Logging progress and failures --- ------------------------------------------------------------ - -data Log = Select [SelectedPackage] [PackageId] - | AppliedVersionConstraint PackageName VersionRange [PackageId] - | AppliedInstalledConstraint PackageName InstalledConstraint [PackageId] - | ExcludeUnconfigurable PackageId - -data Failure - = NoSuchPackage - PackageName - | ConfigureFailed - SelectablePackage - [(Dependency, [(PackageId, [ExclusionReason])])] - | DependencyConflict - SelectedPackage Dependency Bool - [(PackageId, [ExclusionReason])] - | TopLevelVersionConstraintConflict - PackageName VersionRange - [(PackageId, [ExclusionReason])] - | TopLevelVersionConstraintUnsatisfiable - PackageName VersionRange - | TopLevelInstallConstraintConflict - PackageName InstalledConstraint - [(PackageId, [ExclusionReason])] - | TopLevelInstallConstraintUnsatisfiable - PackageName InstalledConstraint - -showLog :: Log -> String -showLog (Select selected discarded) = case (selectedMsg, discardedMsg) of - ("", y) -> y - (x, "") -> x - (x, y) -> x ++ " and " ++ y - - where - selectedMsg = "selecting " ++ case selected of - [] -> "" - [s] -> display (packageId s) ++ " " ++ kind s - (s:ss) -> listOf id - $ (display (packageId s) ++ " " ++ kind s) - : [ display (packageVersion s') ++ " " ++ kind s' - | s' <- ss ] - - kind (InstalledOnly _) = "(installed)" - kind (SourceOnly _) = "(source)" - kind (InstalledAndSource _ _) = "(installed or source)" - - discardedMsg = case discarded of - [] -> "" - _ -> "discarding " ++ listOf id - [ element - | (pkgid:pkgids) <- groupBy (equating packageName) (sort discarded) - , element <- display pkgid : map (display . packageVersion) pkgids ] -showLog (AppliedVersionConstraint pkgname ver pkgids) = - "applying constraint " ++ display (Dependency pkgname ver) - ++ if null pkgids - then "" - else " which excludes " ++ listOf display pkgids -showLog (AppliedInstalledConstraint pkgname inst pkgids) = - "applying constraint " ++ display pkgname ++ " '" - ++ (case inst of InstalledConstraint -> "installed"; _ -> "source") ++ "' " - ++ if null pkgids - then "" - else "which excludes " ++ listOf display pkgids -showLog (ExcludeUnconfigurable pkgid) = - "excluding " ++ display pkgid ++ " (it cannot be configured)" - -showFailure :: Failure -> String -showFailure (NoSuchPackage pkgname) = - "The package " ++ display pkgname ++ " is unknown." -showFailure (ConfigureFailed pkg missingDeps) = - "cannot configure " ++ displayPkg pkg ++ ". It requires " - ++ listOf (displayDep . fst) missingDeps - ++ '\n' : unlines (map (uncurry whyNot) missingDeps) - - where - whyNot (Dependency name ver) [] = - "There is no available version of " ++ display name - ++ " that satisfies " ++ displayVer ver - - whyNot dep conflicts = - "For the dependency on " ++ displayDep dep - ++ " there are these packages: " ++ listOf display pkgs - ++ ". However none of them are available.\n" - ++ unlines [ showExclusionReason (packageId pkg') reason - | (pkg', reasons) <- conflicts, reason <- reasons ] - - where pkgs = map fst conflicts - -showFailure (DependencyConflict pkg dep installedConstraint conflicts) = - "dependencies conflict: " - ++ displayPkg pkg ++ " requires " - ++ (if installedConstraint then "an installed instance of " else "") - ++ displayDep dep ++ " however:\n" - ++ unlines [ showExclusionReason (packageId pkg') reason - | (pkg', reasons) <- conflicts, reason <- reasons ] - -showFailure (TopLevelVersionConstraintConflict name ver conflicts) = - "constraints conflict: we have the top level constraint " - ++ displayDep (Dependency name ver) ++ ", but\n" - ++ unlines [ showExclusionReason (packageId pkg') reason - | (pkg', reasons) <- conflicts, reason <- reasons ] - -showFailure (TopLevelVersionConstraintUnsatisfiable name ver) = - "There is no available version of " ++ display name - ++ " that satisfies " ++ displayVer ver - -showFailure (TopLevelInstallConstraintConflict name InstalledConstraint conflicts) = - "constraints conflict: " - ++ "top level constraint '" ++ display name ++ " installed' however\n" - ++ unlines [ showExclusionReason (packageId pkg') reason - | (pkg', reasons) <- conflicts, reason <- reasons ] - -showFailure (TopLevelInstallConstraintUnsatisfiable name InstalledConstraint) = - "There is no installed version of " ++ display name - -showFailure (TopLevelInstallConstraintConflict name SourceConstraint conflicts) = - "constraints conflict: " - ++ "top level constraint '" ++ display name ++ " source' however\n" - ++ unlines [ showExclusionReason (packageId pkg') reason - | (pkg', reasons) <- conflicts, reason <- reasons ] - -showFailure (TopLevelInstallConstraintUnsatisfiable name SourceConstraint) = - "There is no available source version of " ++ display name - -displayVer :: VersionRange -> String -displayVer = display . simplifyVersionRange - -displayDep :: Dependency -> String -displayDep = display . simplifyDependency - - --- ------------------------------------------------------------ --- * Utils --- ------------------------------------------------------------ - -impossible :: String -> a -impossible msg = internalError $ "assertion failure: " ++ msg - -internalError :: String -> a -internalError msg = error $ "internal error: " ++ msg - -displayPkg :: Package pkg => pkg -> String -displayPkg = display . packageId - -listOf :: (a -> String) -> [a] -> String -listOf _ [] = [] -listOf disp [x0] = disp x0 -listOf disp (x0:x1:xs) = disp x0 ++ go x1 xs - where go x [] = " and " ++ disp x - go x (x':xs') = ", " ++ disp x ++ go x' xs' - --- ------------------------------------------------------------ --- * Construct a dependency graph --- ------------------------------------------------------------ - --- | Builds a graph of the package dependencies. --- --- Dependencies on other packages that are not in the index are discarded. --- You can check if there are any such dependencies with 'brokenPackages'. --- --- The top-down solver gets its own implementation, because both --- `dependencyGraph` in `Distribution.Client.PlanIndex` (in cabal-install) and --- `dependencyGraph` in `Distribution.Simple.PackageIndex` (in Cabal) both work --- with `PackageIndex` from `Cabal` (that is, a package index indexed by --- installed package IDs rather than package names). --- --- Ideally we would switch the top-down solver over to use that too, so that --- this duplication could be avoided, but that's a bit of work and the top-down --- solver is legacy code anyway. --- --- (NOTE: This is called at two types: InstalledPackage and FinalSelectedPackage.) -dependencyGraph :: PackageSourceDeps pkg - => PackageIndex pkg - -> (Graph.Graph, - Graph.Vertex -> pkg, - PackageId -> Maybe Graph.Vertex) -dependencyGraph index = (graph, vertexToPkg, pkgIdToVertex) - where - graph = Array.listArray bounds $ - map (catMaybes . map pkgIdToVertex . sourceDeps) pkgs - vertexToPkg vertex = pkgTable Array.! vertex - pkgIdToVertex = binarySearch 0 topBound - - pkgTable = Array.listArray bounds pkgs - pkgIdTable = Array.listArray bounds (map packageId pkgs) - pkgs = sortBy (comparing packageId) (PackageIndex.allPackages index) - topBound = length pkgs - 1 - bounds = (0, topBound) - - binarySearch a b key - | a > b = Nothing - | otherwise = case compare key (pkgIdTable Array.! mid) of - LT -> binarySearch a (mid-1) key - EQ -> Just mid - GT -> binarySearch (mid+1) b key - where mid = (a + b) `div` 2 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Types.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Types.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Types.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency/Types.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,318 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE DeriveFunctor #-} -{-# LANGUAGE DeriveGeneric #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Dependency.Types --- Copyright : (c) Duncan Coutts 2008 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- Common types for dependency resolution. ------------------------------------------------------------------------------ -module Distribution.Client.Dependency.Types ( - PreSolver(..), - Solver(..), - DependencyResolver, - ResolverPackage(..), - - PackageConstraint(..), - showPackageConstraint, - PackagePreferences(..), - InstalledPreference(..), - PackagesPreferenceDefault(..), - - Progress(..), - foldProgress, - - LabeledPackageConstraint(..), - ConstraintSource(..), - unlabelPackageConstraint, - showConstraintSource - - ) where - -#if !MIN_VERSION_base(4,8,0) -import Control.Applicative - ( Applicative(..) ) -#endif -import Control.Applicative - ( Alternative(..) ) - -import Data.Char - ( isAlpha, toLower ) -#if !MIN_VERSION_base(4,8,0) -import Data.Monoid - ( Monoid(..) ) -#endif - -import Distribution.Client.PkgConfigDb - ( PkgConfigDb ) -import Distribution.Client.Types - ( OptionalStanza(..), SourcePackage(..), ConfiguredPackage ) - -import qualified Distribution.Compat.ReadP as Parse - ( pfail, munch1 ) -import Distribution.PackageDescription - ( FlagAssignment, FlagName(..) ) -import Distribution.InstalledPackageInfo - ( InstalledPackageInfo ) -import qualified Distribution.Client.PackageIndex as PackageIndex - ( PackageIndex ) -import Distribution.Simple.PackageIndex ( InstalledPackageIndex ) -import Distribution.Package - ( PackageName ) -import Distribution.Version - ( VersionRange, simplifyVersionRange ) -import Distribution.Compiler - ( CompilerInfo ) -import Distribution.System - ( Platform ) -import Distribution.Text - ( Text(..), display ) - -import Text.PrettyPrint - ( text ) -import GHC.Generics (Generic) -import Distribution.Compat.Binary (Binary(..)) - -import Prelude hiding (fail) - - --- | All the solvers that can be selected. -data PreSolver = AlwaysTopDown | AlwaysModular | Choose - deriving (Eq, Ord, Show, Bounded, Enum, Generic) - --- | All the solvers that can be used. -data Solver = TopDown | Modular - deriving (Eq, Ord, Show, Bounded, Enum, Generic) - -instance Binary PreSolver -instance Binary Solver - -instance Text PreSolver where - disp AlwaysTopDown = text "topdown" - disp AlwaysModular = text "modular" - disp Choose = text "choose" - parse = do - name <- Parse.munch1 isAlpha - case map toLower name of - "topdown" -> return AlwaysTopDown - "modular" -> return AlwaysModular - "choose" -> return Choose - _ -> Parse.pfail - --- | A dependency resolver is a function that works out an installation plan --- given the set of installed and available packages and a set of deps to --- solve for. --- --- The reason for this interface is because there are dozens of approaches to --- solving the package dependency problem and we want to make it easy to swap --- in alternatives. --- -type DependencyResolver = Platform - -> CompilerInfo - -> InstalledPackageIndex - -> PackageIndex.PackageIndex SourcePackage - -> PkgConfigDb - -> (PackageName -> PackagePreferences) - -> [LabeledPackageConstraint] - -> [PackageName] - -> Progress String String [ResolverPackage] - --- | The dependency resolver picks either pre-existing installed packages --- or it picks source packages along with package configuration. --- --- This is like the 'InstallPlan.PlanPackage' but with fewer cases. --- -data ResolverPackage = PreExisting InstalledPackageInfo - | Configured ConfiguredPackage - --- | Per-package constraints. Package constraints must be respected by the --- solver. Multiple constraints for each package can be given, though obviously --- it is possible to construct conflicting constraints (eg impossible version --- range or inconsistent flag assignment). --- -data PackageConstraint - = PackageConstraintVersion PackageName VersionRange - | PackageConstraintInstalled PackageName - | PackageConstraintSource PackageName - | PackageConstraintFlags PackageName FlagAssignment - | PackageConstraintStanzas PackageName [OptionalStanza] - deriving (Eq,Show,Generic) - -instance Binary PackageConstraint - --- | Provide a textual representation of a package constraint --- for debugging purposes. --- -showPackageConstraint :: PackageConstraint -> String -showPackageConstraint (PackageConstraintVersion pn vr) = - display pn ++ " " ++ display (simplifyVersionRange vr) -showPackageConstraint (PackageConstraintInstalled pn) = - display pn ++ " installed" -showPackageConstraint (PackageConstraintSource pn) = - display pn ++ " source" -showPackageConstraint (PackageConstraintFlags pn fs) = - "flags " ++ display pn ++ " " ++ unwords (map (uncurry showFlag) fs) - where - showFlag (FlagName f) True = "+" ++ f - showFlag (FlagName f) False = "-" ++ f -showPackageConstraint (PackageConstraintStanzas pn ss) = - "stanzas " ++ display pn ++ " " ++ unwords (map showStanza ss) - where - showStanza TestStanzas = "test" - showStanza BenchStanzas = "bench" - --- | Per-package preferences on the version. It is a soft constraint that the --- 'DependencyResolver' should try to respect where possible. It consists of --- an 'InstalledPreference' which says if we prefer versions of packages --- that are already installed. It also has (possibly multiple) --- 'PackageVersionPreference's which are suggested constraints on the version --- number. The resolver should try to use package versions that satisfy --- the maximum number of the suggested version constraints. --- --- It is not specified if preferences on some packages are more important than --- others. --- -data PackagePreferences = PackagePreferences [VersionRange] - InstalledPreference - [OptionalStanza] - --- | Whether we prefer an installed version of a package or simply the latest --- version. --- -data InstalledPreference = PreferInstalled | PreferLatest - deriving Show - --- | Global policy for all packages to say if we prefer package versions that --- are already installed locally or if we just prefer the latest available. --- -data PackagesPreferenceDefault = - - -- | Always prefer the latest version irrespective of any existing - -- installed version. - -- - -- * This is the standard policy for upgrade. - -- - PreferAllLatest - - -- | Always prefer the installed versions over ones that would need to be - -- installed. Secondarily, prefer latest versions (eg the latest installed - -- version or if there are none then the latest source version). - | PreferAllInstalled - - -- | Prefer the latest version for packages that are explicitly requested - -- but prefers the installed version for any other packages. - -- - -- * This is the standard policy for install. - -- - | PreferLatestForSelected - deriving Show - --- | A type to represent the unfolding of an expensive long running --- calculation that may fail. We may get intermediate steps before the final --- result which may be used to indicate progress and\/or logging messages. --- -data Progress step fail done = Step step (Progress step fail done) - | Fail fail - | Done done - deriving (Functor) - --- | Consume a 'Progress' calculation. Much like 'foldr' for lists but with two --- base cases, one for a final result and one for failure. --- --- Eg to convert into a simple 'Either' result use: --- --- > foldProgress (flip const) Left Right --- -foldProgress :: (step -> a -> a) -> (fail -> a) -> (done -> a) - -> Progress step fail done -> a -foldProgress step fail done = fold - where fold (Step s p) = step s (fold p) - fold (Fail f) = fail f - fold (Done r) = done r - -instance Monad (Progress step fail) where - return = pure - p >>= f = foldProgress Step Fail f p - -instance Applicative (Progress step fail) where - pure a = Done a - p <*> x = foldProgress Step Fail (flip fmap x) p - -instance Monoid fail => Alternative (Progress step fail) where - empty = Fail mempty - p <|> q = foldProgress Step (const q) Done p - --- | 'PackageConstraint' labeled with its source. -data LabeledPackageConstraint - = LabeledPackageConstraint PackageConstraint ConstraintSource - -unlabelPackageConstraint :: LabeledPackageConstraint -> PackageConstraint -unlabelPackageConstraint (LabeledPackageConstraint pc _) = pc - --- | Source of a 'PackageConstraint'. -data ConstraintSource = - - -- | Main config file, which is ~/.cabal/config by default. - ConstraintSourceMainConfig FilePath - - -- | Local cabal.project file - | ConstraintSourceProjectConfig FilePath - - -- | Sandbox config file, which is ./cabal.sandbox.config by default. - | ConstraintSourceSandboxConfig FilePath - - -- | User config file, which is ./cabal.config by default. - | ConstraintSourceUserConfig FilePath - - -- | Flag specified on the command line. - | ConstraintSourceCommandlineFlag - - -- | Target specified by the user, e.g., @cabal install package-0.1.0.0@ - -- implies @package==0.1.0.0@. - | ConstraintSourceUserTarget - - -- | Internal requirement to use installed versions of packages like ghc-prim. - | ConstraintSourceNonUpgradeablePackage - - -- | Internal requirement to use the add-source version of a package when that - -- version is installed and the source is modified. - | ConstraintSourceModifiedAddSourceDep - - -- | Internal constraint used by @cabal freeze@. - | ConstraintSourceFreeze - - -- | Constraint specified by a config file, a command line flag, or a user - -- target, when a more specific source is not known. - | ConstraintSourceConfigFlagOrTarget - - -- | The source of the constraint is not specified. - | ConstraintSourceUnknown - deriving (Eq, Show, Generic) - -instance Binary ConstraintSource - --- | Description of a 'ConstraintSource'. -showConstraintSource :: ConstraintSource -> String -showConstraintSource (ConstraintSourceMainConfig path) = - "main config " ++ path -showConstraintSource (ConstraintSourceProjectConfig path) = - "project config " ++ path -showConstraintSource (ConstraintSourceSandboxConfig path) = - "sandbox config " ++ path -showConstraintSource (ConstraintSourceUserConfig path)= "user config " ++ path -showConstraintSource ConstraintSourceCommandlineFlag = "command line flag" -showConstraintSource ConstraintSourceUserTarget = "user target" -showConstraintSource ConstraintSourceNonUpgradeablePackage = - "non-upgradeable package" -showConstraintSource ConstraintSourceModifiedAddSourceDep = - "modified add-source dependency" -showConstraintSource ConstraintSourceFreeze = "cabal freeze" -showConstraintSource ConstraintSourceConfigFlagOrTarget = - "config file, command line flag, or user target" -showConstraintSource ConstraintSourceUnknown = "unknown source" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Dependency.hs 2016-11-07 10:02:40.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Dependency.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,890 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Dependency --- Copyright : (c) David Himmelstrup 2005, --- Bjorn Bringert 2007 --- Duncan Coutts 2008 --- License : BSD-like --- --- Maintainer : cabal-devel@gmail.com --- Stability : provisional --- Portability : portable --- --- Top level interface to dependency resolution. ------------------------------------------------------------------------------ -module Distribution.Client.Dependency ( - -- * The main package dependency resolver - chooseSolver, - resolveDependencies, - Progress(..), - foldProgress, - - -- * Alternate, simple resolver that does not do dependencies recursively - resolveWithoutDependencies, - - -- * Constructing resolver policies - DepResolverParams(..), - PackageConstraint(..), - PackagesPreferenceDefault(..), - PackagePreference(..), - InstalledPreference(..), - - -- ** Standard policy - standardInstallPolicy, - PackageSpecifier(..), - - -- ** Sandbox policy - applySandboxInstallPolicy, - - -- ** Extra policy options - dontUpgradeNonUpgradeablePackages, - hideBrokenInstalledPackages, - upgradeDependencies, - reinstallTargets, - - -- ** Policy utils - addConstraints, - addPreferences, - setPreferenceDefault, - setReorderGoals, - setIndependentGoals, - setAvoidReinstalls, - setShadowPkgs, - setStrongFlags, - setMaxBackjumps, - addSourcePackages, - hideInstalledPackagesSpecificByUnitId, - hideInstalledPackagesSpecificBySourcePackageId, - hideInstalledPackagesAllVersions, - removeUpperBounds, - addDefaultSetupDependencies, - ) where - -import Distribution.Client.Dependency.TopDown - ( topDownResolver ) -import Distribution.Client.Dependency.Modular - ( modularResolver, SolverConfig(..) ) -import qualified Distribution.Client.PackageIndex as PackageIndex -import Distribution.Simple.PackageIndex (InstalledPackageIndex) -import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex -import qualified Distribution.Client.InstallPlan as InstallPlan -import Distribution.Client.InstallPlan (InstallPlan) -import Distribution.Client.PkgConfigDb (PkgConfigDb) -import Distribution.Client.Types - ( SourcePackageDb(SourcePackageDb), SourcePackage(..) - , ConfiguredPackage(..), ConfiguredId(..) - , OptionalStanza(..), enableStanzas ) -import Distribution.Client.Dependency.Types - ( PreSolver(..), Solver(..), DependencyResolver, ResolverPackage(..) - , PackageConstraint(..), showPackageConstraint - , LabeledPackageConstraint(..), unlabelPackageConstraint - , ConstraintSource(..), showConstraintSource - , PackagePreferences(..), InstalledPreference(..) - , PackagesPreferenceDefault(..) - , Progress(..), foldProgress ) -import Distribution.Client.Sandbox.Types - ( SandboxPackageInfo(..) ) -import Distribution.Client.Targets -import Distribution.Client.ComponentDeps (ComponentDeps) -import qualified Distribution.Client.ComponentDeps as CD -import qualified Distribution.InstalledPackageInfo as Installed -import Distribution.Package - ( PackageName(..), PackageIdentifier(PackageIdentifier), PackageId - , Package(..), packageName, packageVersion - , UnitId, Dependency(Dependency)) -import qualified Distribution.PackageDescription as PD -import qualified Distribution.PackageDescription.Configuration as PD -import Distribution.PackageDescription.Configuration - ( finalizePackageDescription ) -import Distribution.Client.PackageUtils - ( externalBuildDepends ) -import Distribution.Version - ( VersionRange, Version(..), anyVersion, orLaterVersion, thisVersion - , withinRange, simplifyVersionRange ) -import Distribution.Compiler - ( CompilerInfo(..) ) -import Distribution.System - ( Platform ) -import Distribution.Client.Utils - ( duplicates, duplicatesBy, mergeBy, MergeResult(..) ) -import Distribution.Simple.Utils - ( comparing, warn, info ) -import Distribution.Simple.Configure - ( relaxPackageDeps ) -import Distribution.Simple.Setup - ( AllowNewer(..) ) -import Distribution.Text - ( display ) -import Distribution.Verbosity - ( Verbosity ) - -import Data.List - ( foldl', sort, sortBy, nubBy, maximumBy, intercalate, nub ) -import Data.Function (on) -import Data.Maybe (fromMaybe) -import qualified Data.Map as Map -import qualified Data.Set as Set -import Data.Set (Set) -import Control.Exception - ( assert ) - - --- ------------------------------------------------------------ --- * High level planner policy --- ------------------------------------------------------------ - --- | The set of parameters to the dependency resolver. These parameters are --- relatively low level but many kinds of high level policies can be --- implemented in terms of adjustments to the parameters. --- -data DepResolverParams = DepResolverParams { - depResolverTargets :: [PackageName], - depResolverConstraints :: [LabeledPackageConstraint], - depResolverPreferences :: [PackagePreference], - depResolverPreferenceDefault :: PackagesPreferenceDefault, - depResolverInstalledPkgIndex :: InstalledPackageIndex, - depResolverSourcePkgIndex :: PackageIndex.PackageIndex SourcePackage, - depResolverReorderGoals :: Bool, - depResolverIndependentGoals :: Bool, - depResolverAvoidReinstalls :: Bool, - depResolverShadowPkgs :: Bool, - depResolverStrongFlags :: Bool, - depResolverMaxBackjumps :: Maybe Int - } - -showDepResolverParams :: DepResolverParams -> String -showDepResolverParams p = - "targets: " ++ intercalate ", " (map display (depResolverTargets p)) - ++ "\nconstraints: " - ++ concatMap (("\n " ++) . showLabeledConstraint) - (depResolverConstraints p) - ++ "\npreferences: " - ++ concatMap (("\n " ++) . showPackagePreference) - (depResolverPreferences p) - ++ "\nstrategy: " ++ show (depResolverPreferenceDefault p) - ++ "\nreorder goals: " ++ show (depResolverReorderGoals p) - ++ "\nindependent goals: " ++ show (depResolverIndependentGoals p) - ++ "\navoid reinstalls: " ++ show (depResolverAvoidReinstalls p) - ++ "\nshadow packages: " ++ show (depResolverShadowPkgs p) - ++ "\nstrong flags: " ++ show (depResolverStrongFlags p) - ++ "\nmax backjumps: " ++ maybe "infinite" show - (depResolverMaxBackjumps p) - where - showLabeledConstraint :: LabeledPackageConstraint -> String - showLabeledConstraint (LabeledPackageConstraint pc src) = - showPackageConstraint pc ++ " (" ++ showConstraintSource src ++ ")" - --- | A package selection preference for a particular package. --- --- Preferences are soft constraints that the dependency resolver should try to --- respect where possible. It is not specified if preferences on some packages --- are more important than others. --- -data PackagePreference = - - -- | A suggested constraint on the version number. - PackageVersionPreference PackageName VersionRange - - -- | If we prefer versions of packages that are already installed. - | PackageInstalledPreference PackageName InstalledPreference - - -- | If we would prefer to enable these optional stanzas - -- (i.e. test suites and/or benchmarks) - | PackageStanzasPreference PackageName [OptionalStanza] - - --- | Provide a textual representation of a package preference --- for debugging purposes. --- -showPackagePreference :: PackagePreference -> String -showPackagePreference (PackageVersionPreference pn vr) = - display pn ++ " " ++ display (simplifyVersionRange vr) -showPackagePreference (PackageInstalledPreference pn ip) = - display pn ++ " " ++ show ip -showPackagePreference (PackageStanzasPreference pn st) = - display pn ++ " " ++ show st - -basicDepResolverParams :: InstalledPackageIndex - -> PackageIndex.PackageIndex SourcePackage - -> DepResolverParams -basicDepResolverParams installedPkgIndex sourcePkgIndex = - DepResolverParams { - depResolverTargets = [], - depResolverConstraints = [], - depResolverPreferences = [], - depResolverPreferenceDefault = PreferLatestForSelected, - depResolverInstalledPkgIndex = installedPkgIndex, - depResolverSourcePkgIndex = sourcePkgIndex, - depResolverReorderGoals = False, - depResolverIndependentGoals = False, - depResolverAvoidReinstalls = False, - depResolverShadowPkgs = False, - depResolverStrongFlags = False, - depResolverMaxBackjumps = Nothing - } - -addTargets :: [PackageName] - -> DepResolverParams -> DepResolverParams -addTargets extraTargets params = - params { - depResolverTargets = extraTargets ++ depResolverTargets params - } - -addConstraints :: [LabeledPackageConstraint] - -> DepResolverParams -> DepResolverParams -addConstraints extraConstraints params = - params { - depResolverConstraints = extraConstraints - ++ depResolverConstraints params - } - -addPreferences :: [PackagePreference] - -> DepResolverParams -> DepResolverParams -addPreferences extraPreferences params = - params { - depResolverPreferences = extraPreferences - ++ depResolverPreferences params - } - -setPreferenceDefault :: PackagesPreferenceDefault - -> DepResolverParams -> DepResolverParams -setPreferenceDefault preferenceDefault params = - params { - depResolverPreferenceDefault = preferenceDefault - } - -setReorderGoals :: Bool -> DepResolverParams -> DepResolverParams -setReorderGoals b params = - params { - depResolverReorderGoals = b - } - -setIndependentGoals :: Bool -> DepResolverParams -> DepResolverParams -setIndependentGoals b params = - params { - depResolverIndependentGoals = b - } - -setAvoidReinstalls :: Bool -> DepResolverParams -> DepResolverParams -setAvoidReinstalls b params = - params { - depResolverAvoidReinstalls = b - } - -setShadowPkgs :: Bool -> DepResolverParams -> DepResolverParams -setShadowPkgs b params = - params { - depResolverShadowPkgs = b - } - -setStrongFlags :: Bool -> DepResolverParams -> DepResolverParams -setStrongFlags b params = - params { - depResolverStrongFlags = b - } - -setMaxBackjumps :: Maybe Int -> DepResolverParams -> DepResolverParams -setMaxBackjumps n params = - params { - depResolverMaxBackjumps = n - } - --- | Some packages are specific to a given compiler version and should never be --- upgraded. -dontUpgradeNonUpgradeablePackages :: DepResolverParams -> DepResolverParams -dontUpgradeNonUpgradeablePackages params = - addConstraints extraConstraints params - where - extraConstraints = - [ LabeledPackageConstraint - (PackageConstraintInstalled pkgname) - ConstraintSourceNonUpgradeablePackage - | notElem (PackageName "base") (depResolverTargets params) - , pkgname <- map PackageName [ "base", "ghc-prim", "integer-gmp" - , "integer-simple" ] - , isInstalled pkgname ] - -- TODO: the top down resolver chokes on the base constraints - -- below when there are no targets and thus no dep on base. - -- Need to refactor constraints separate from needing packages. - isInstalled = not . null - . InstalledPackageIndex.lookupPackageName - (depResolverInstalledPkgIndex params) - -addSourcePackages :: [SourcePackage] - -> DepResolverParams -> DepResolverParams -addSourcePackages pkgs params = - params { - depResolverSourcePkgIndex = - foldl (flip PackageIndex.insert) - (depResolverSourcePkgIndex params) pkgs - } - -hideInstalledPackagesSpecificByUnitId :: [UnitId] - -> DepResolverParams - -> DepResolverParams -hideInstalledPackagesSpecificByUnitId pkgids params = - --TODO: this should work using exclude constraints instead - params { - depResolverInstalledPkgIndex = - foldl' (flip InstalledPackageIndex.deleteUnitId) - (depResolverInstalledPkgIndex params) pkgids - } - -hideInstalledPackagesSpecificBySourcePackageId :: [PackageId] - -> DepResolverParams - -> DepResolverParams -hideInstalledPackagesSpecificBySourcePackageId pkgids params = - --TODO: this should work using exclude constraints instead - params { - depResolverInstalledPkgIndex = - foldl' (flip InstalledPackageIndex.deleteSourcePackageId) - (depResolverInstalledPkgIndex params) pkgids - } - -hideInstalledPackagesAllVersions :: [PackageName] - -> DepResolverParams -> DepResolverParams -hideInstalledPackagesAllVersions pkgnames params = - --TODO: this should work using exclude constraints instead - params { - depResolverInstalledPkgIndex = - foldl' (flip InstalledPackageIndex.deletePackageName) - (depResolverInstalledPkgIndex params) pkgnames - } - - -hideBrokenInstalledPackages :: DepResolverParams -> DepResolverParams -hideBrokenInstalledPackages params = - hideInstalledPackagesSpecificByUnitId pkgids params - where - pkgids = map Installed.installedUnitId - . InstalledPackageIndex.reverseDependencyClosure - (depResolverInstalledPkgIndex params) - . map (Installed.installedUnitId . fst) - . InstalledPackageIndex.brokenPackages - $ depResolverInstalledPkgIndex params - --- | Remove upper bounds in dependencies using the policy specified by the --- 'AllowNewer' argument (all/some/none). --- --- Note: It's important to apply 'removeUpperBounds' after --- 'addSourcePackages'. Otherwise, the packages inserted by --- 'addSourcePackages' won't have upper bounds in dependencies relaxed. --- -removeUpperBounds :: AllowNewer -> DepResolverParams -> DepResolverParams -removeUpperBounds AllowNewerNone params = params -removeUpperBounds allowNewer params = - params { - depResolverSourcePkgIndex = sourcePkgIndex' - } - where - sourcePkgIndex' = fmap relaxDeps $ depResolverSourcePkgIndex params - - relaxDeps :: SourcePackage -> SourcePackage - relaxDeps srcPkg = srcPkg { - packageDescription = relaxPackageDeps allowNewer - (packageDescription srcPkg) - } - --- | Supply defaults for packages without explicit Setup dependencies --- --- Note: It's important to apply 'addDefaultSetupDepends' after --- 'addSourcePackages'. Otherwise, the packages inserted by --- 'addSourcePackages' won't have upper bounds in dependencies relaxed. --- -addDefaultSetupDependencies :: (SourcePackage -> Maybe [Dependency]) - -> DepResolverParams -> DepResolverParams -addDefaultSetupDependencies defaultSetupDeps params = - params { - depResolverSourcePkgIndex = - fmap applyDefaultSetupDeps (depResolverSourcePkgIndex params) - } - where - applyDefaultSetupDeps :: SourcePackage -> SourcePackage - applyDefaultSetupDeps srcpkg = - srcpkg { - packageDescription = gpkgdesc { - PD.packageDescription = pkgdesc { - PD.setupBuildInfo = - case PD.setupBuildInfo pkgdesc of - Just sbi -> Just sbi - Nothing -> case defaultSetupDeps srcpkg of - Nothing -> Nothing - Just deps -> Just PD.SetupBuildInfo { - PD.defaultSetupDepends = True, - PD.setupDepends = deps - } - } - } - } - where - gpkgdesc = packageDescription srcpkg - pkgdesc = PD.packageDescription gpkgdesc - - -upgradeDependencies :: DepResolverParams -> DepResolverParams -upgradeDependencies = setPreferenceDefault PreferAllLatest - - -reinstallTargets :: DepResolverParams -> DepResolverParams -reinstallTargets params = - hideInstalledPackagesAllVersions (depResolverTargets params) params - - -standardInstallPolicy :: InstalledPackageIndex - -> SourcePackageDb - -> [PackageSpecifier SourcePackage] - -> DepResolverParams -standardInstallPolicy - installedPkgIndex (SourcePackageDb sourcePkgIndex sourcePkgPrefs) - pkgSpecifiers - - = addPreferences - [ PackageVersionPreference name ver - | (name, ver) <- Map.toList sourcePkgPrefs ] - - . addConstraints - (concatMap pkgSpecifierConstraints pkgSpecifiers) - - . addTargets - (map pkgSpecifierTarget pkgSpecifiers) - - . hideInstalledPackagesSpecificBySourcePackageId - [ packageId pkg | SpecificSourcePackage pkg <- pkgSpecifiers ] - - . addDefaultSetupDependencies mkDefaultSetupDeps - - . addSourcePackages - [ pkg | SpecificSourcePackage pkg <- pkgSpecifiers ] - - $ basicDepResolverParams - installedPkgIndex sourcePkgIndex - - where - -- Force Cabal >= 1.24 dep when the package is affected by #3199. - mkDefaultSetupDeps :: SourcePackage -> Maybe [Dependency] - mkDefaultSetupDeps srcpkg | affected = - Just [Dependency (PackageName "Cabal") - (orLaterVersion $ Version [1,24] [])] - | otherwise = Nothing - where - gpkgdesc = packageDescription srcpkg - pkgdesc = PD.packageDescription gpkgdesc - bt = fromMaybe PD.Custom (PD.buildType pkgdesc) - affected = bt == PD.Custom && hasBuildableFalse gpkgdesc - - -- Does this package contain any components with non-empty 'build-depends' - -- and a 'buildable' field that could potentially be set to 'False'? False - -- positives are possible. - hasBuildableFalse :: PD.GenericPackageDescription -> Bool - hasBuildableFalse gpkg = - not (all alwaysTrue (zipWith PD.cOr buildableConditions noDepConditions)) - where - buildableConditions = PD.extractConditions PD.buildable gpkg - noDepConditions = PD.extractConditions - (null . PD.targetBuildDepends) gpkg - alwaysTrue (PD.Lit True) = True - alwaysTrue _ = False - - -applySandboxInstallPolicy :: SandboxPackageInfo - -> DepResolverParams - -> DepResolverParams -applySandboxInstallPolicy - (SandboxPackageInfo modifiedDeps otherDeps allSandboxPkgs _allDeps) - params - - = addPreferences [ PackageInstalledPreference n PreferInstalled - | n <- installedNotModified ] - - . addTargets installedNotModified - - . addPreferences - [ PackageVersionPreference (packageName pkg) - (thisVersion (packageVersion pkg)) | pkg <- otherDeps ] - - . addConstraints - [ let pc = PackageConstraintVersion (packageName pkg) - (thisVersion (packageVersion pkg)) - in LabeledPackageConstraint pc ConstraintSourceModifiedAddSourceDep - | pkg <- modifiedDeps ] - - . addTargets [ packageName pkg | pkg <- modifiedDeps ] - - . hideInstalledPackagesSpecificBySourcePackageId - [ packageId pkg | pkg <- modifiedDeps ] - - -- We don't need to add source packages for add-source deps to the - -- 'installedPkgIndex' since 'getSourcePackages' did that for us. - - $ params - - where - installedPkgIds = - map fst . InstalledPackageIndex.allPackagesBySourcePackageId - $ allSandboxPkgs - modifiedPkgIds = map packageId modifiedDeps - installedNotModified = [ packageName pkg | pkg <- installedPkgIds, - pkg `notElem` modifiedPkgIds ] - --- ------------------------------------------------------------ --- * Interface to the standard resolver --- ------------------------------------------------------------ - -chooseSolver :: Verbosity -> PreSolver -> CompilerInfo -> IO Solver -chooseSolver verbosity preSolver _cinfo = - case preSolver of - AlwaysTopDown -> do - warn verbosity "Topdown solver is deprecated" - return TopDown - AlwaysModular -> do - return Modular - Choose -> do - info verbosity "Choosing modular solver." - return Modular - -runSolver :: Solver -> SolverConfig -> DependencyResolver -runSolver TopDown = const topDownResolver -- TODO: warn about unsupported options -runSolver Modular = modularResolver - --- | Run the dependency solver. --- --- Since this is potentially an expensive operation, the result is wrapped in a --- a 'Progress' structure that can be unfolded to provide progress information, --- logging messages and the final result or an error. --- -resolveDependencies :: Platform - -> CompilerInfo - -> PkgConfigDb - -> Solver - -> DepResolverParams - -> Progress String String InstallPlan - - --TODO: is this needed here? see dontUpgradeNonUpgradeablePackages -resolveDependencies platform comp _pkgConfigDB _solver params - | null (depResolverTargets params) - = return (validateSolverResult platform comp indGoals []) - where - indGoals = depResolverIndependentGoals params - -resolveDependencies platform comp pkgConfigDB solver params = - - Step (showDepResolverParams finalparams) - $ fmap (validateSolverResult platform comp indGoals) - $ runSolver solver (SolverConfig reorderGoals indGoals noReinstalls - shadowing strFlags maxBkjumps) - platform comp installedPkgIndex sourcePkgIndex - pkgConfigDB preferences constraints targets - where - - finalparams @ (DepResolverParams - targets constraints - prefs defpref - installedPkgIndex - sourcePkgIndex - reorderGoals - indGoals - noReinstalls - shadowing - strFlags - maxBkjumps) = dontUpgradeNonUpgradeablePackages - -- TODO: - -- The modular solver can properly deal with broken - -- packages and won't select them. So the - -- 'hideBrokenInstalledPackages' function should be moved - -- into a module that is specific to the top-down solver. - . (if solver /= Modular then hideBrokenInstalledPackages - else id) - $ params - - preferences = interpretPackagesPreference - (Set.fromList targets) defpref prefs - - --- | Give an interpretation to the global 'PackagesPreference' as --- specific per-package 'PackageVersionPreference'. --- -interpretPackagesPreference :: Set PackageName - -> PackagesPreferenceDefault - -> [PackagePreference] - -> (PackageName -> PackagePreferences) -interpretPackagesPreference selected defaultPref prefs = - \pkgname -> PackagePreferences (versionPref pkgname) - (installPref pkgname) - (stanzasPref pkgname) - where - versionPref pkgname = - fromMaybe [anyVersion] (Map.lookup pkgname versionPrefs) - versionPrefs = Map.fromListWith (++) - [(pkgname, [pref]) - | PackageVersionPreference pkgname pref <- prefs] - - installPref pkgname = - fromMaybe (installPrefDefault pkgname) (Map.lookup pkgname installPrefs) - installPrefs = Map.fromList - [ (pkgname, pref) - | PackageInstalledPreference pkgname pref <- prefs ] - installPrefDefault = case defaultPref of - PreferAllLatest -> const PreferLatest - PreferAllInstalled -> const PreferInstalled - PreferLatestForSelected -> \pkgname -> - -- When you say cabal install foo, what you really mean is, prefer the - -- latest version of foo, but the installed version of everything else - if pkgname `Set.member` selected then PreferLatest - else PreferInstalled - - stanzasPref pkgname = - fromMaybe [] (Map.lookup pkgname stanzasPrefs) - stanzasPrefs = Map.fromListWith (\a b -> nub (a ++ b)) - [ (pkgname, pref) - | PackageStanzasPreference pkgname pref <- prefs ] - - --- ------------------------------------------------------------ --- * Checking the result of the solver --- ------------------------------------------------------------ - --- | Make an install plan from the output of the dep resolver. --- It checks that the plan is valid, or it's an error in the dep resolver. --- -validateSolverResult :: Platform - -> CompilerInfo - -> Bool - -> [ResolverPackage] - -> InstallPlan -validateSolverResult platform comp indepGoals pkgs = - case planPackagesProblems platform comp pkgs of - [] -> case InstallPlan.new indepGoals index of - Right plan -> plan - Left problems -> error (formatPlanProblems problems) - problems -> error (formatPkgProblems problems) - - where - index = InstalledPackageIndex.fromList (map toPlanPackage pkgs) - - toPlanPackage (PreExisting pkg) = InstallPlan.PreExisting pkg - toPlanPackage (Configured pkg) = InstallPlan.Configured pkg - - formatPkgProblems = formatProblemMessage . map showPlanPackageProblem - formatPlanProblems = formatProblemMessage . map InstallPlan.showPlanProblem - - formatProblemMessage problems = - unlines $ - "internal error: could not construct a valid install plan." - : "The proposed (invalid) plan contained the following problems:" - : problems - ++ "Proposed plan:" - : [InstallPlan.showPlanIndex index] - - -data PlanPackageProblem = - InvalidConfiguredPackage ConfiguredPackage [PackageProblem] - -showPlanPackageProblem :: PlanPackageProblem -> String -showPlanPackageProblem (InvalidConfiguredPackage pkg packageProblems) = - "Package " ++ display (packageId pkg) - ++ " has an invalid configuration, in particular:\n" - ++ unlines [ " " ++ showPackageProblem problem - | problem <- packageProblems ] - -planPackagesProblems :: Platform -> CompilerInfo - -> [ResolverPackage] - -> [PlanPackageProblem] -planPackagesProblems platform cinfo pkgs = - [ InvalidConfiguredPackage pkg packageProblems - | Configured pkg <- pkgs - , let packageProblems = configuredPackageProblems platform cinfo pkg - , not (null packageProblems) ] - -data PackageProblem = DuplicateFlag PD.FlagName - | MissingFlag PD.FlagName - | ExtraFlag PD.FlagName - | DuplicateDeps [PackageId] - | MissingDep Dependency - | ExtraDep PackageId - | InvalidDep Dependency PackageId - -showPackageProblem :: PackageProblem -> String -showPackageProblem (DuplicateFlag (PD.FlagName flag)) = - "duplicate flag in the flag assignment: " ++ flag - -showPackageProblem (MissingFlag (PD.FlagName flag)) = - "missing an assignment for the flag: " ++ flag - -showPackageProblem (ExtraFlag (PD.FlagName flag)) = - "extra flag given that is not used by the package: " ++ flag - -showPackageProblem (DuplicateDeps pkgids) = - "duplicate packages specified as selected dependencies: " - ++ intercalate ", " (map display pkgids) - -showPackageProblem (MissingDep dep) = - "the package has a dependency " ++ display dep - ++ " but no package has been selected to satisfy it." - -showPackageProblem (ExtraDep pkgid) = - "the package configuration specifies " ++ display pkgid - ++ " but (with the given flag assignment) the package does not actually" - ++ " depend on any version of that package." - -showPackageProblem (InvalidDep dep pkgid) = - "the package depends on " ++ display dep - ++ " but the configuration specifies " ++ display pkgid - ++ " which does not satisfy the dependency." - --- | A 'ConfiguredPackage' is valid if the flag assignment is total and if --- in the configuration given by the flag assignment, all the package --- dependencies are satisfied by the specified packages. --- -configuredPackageProblems :: Platform -> CompilerInfo - -> ConfiguredPackage -> [PackageProblem] -configuredPackageProblems platform cinfo - (ConfiguredPackage pkg specifiedFlags stanzas specifiedDeps') = - [ DuplicateFlag flag | ((flag,_):_) <- duplicates specifiedFlags ] - ++ [ MissingFlag flag | OnlyInLeft flag <- mergedFlags ] - ++ [ ExtraFlag flag | OnlyInRight flag <- mergedFlags ] - ++ [ DuplicateDeps pkgs - | pkgs <- CD.nonSetupDeps (fmap (duplicatesBy (comparing packageName)) - specifiedDeps) ] - ++ [ MissingDep dep | OnlyInLeft dep <- mergedDeps ] - ++ [ ExtraDep pkgid | OnlyInRight pkgid <- mergedDeps ] - ++ [ InvalidDep dep pkgid | InBoth dep pkgid <- mergedDeps - , not (packageSatisfiesDependency pkgid dep) ] - where - specifiedDeps :: ComponentDeps [PackageId] - specifiedDeps = fmap (map confSrcId) specifiedDeps' - - mergedFlags = mergeBy compare - (sort $ map PD.flagName (PD.genPackageFlags (packageDescription pkg))) - (sort $ map fst specifiedFlags) - - packageSatisfiesDependency - (PackageIdentifier name version) - (Dependency name' versionRange) = assert (name == name') $ - version `withinRange` versionRange - - dependencyName (Dependency name _) = name - - mergedDeps :: [MergeResult Dependency PackageId] - mergedDeps = mergeDeps requiredDeps (CD.flatDeps specifiedDeps) - - mergeDeps :: [Dependency] -> [PackageId] - -> [MergeResult Dependency PackageId] - mergeDeps required specified = - let sortNubOn f = nubBy ((==) `on` f) . sortBy (compare `on` f) in - mergeBy - (\dep pkgid -> dependencyName dep `compare` packageName pkgid) - (sortNubOn dependencyName required) - (sortNubOn packageName specified) - - -- TODO: It would be nicer to use ComponentDeps here so we can be more - -- precise in our checks. That's a bit tricky though, as this currently - -- relies on the 'buildDepends' field of 'PackageDescription'. (OTOH, that - -- field is deprecated and should be removed anyway.) As long as we _do_ - -- use a flat list here, we have to allow for duplicates when we fold - -- specifiedDeps; once we have proper ComponentDeps here we should get rid - -- of the `nubOn` in `mergeDeps`. - requiredDeps :: [Dependency] - requiredDeps = - --TODO: use something lower level than finalizePackageDescription - case finalizePackageDescription specifiedFlags - (const True) - platform cinfo - [] - (enableStanzas stanzas $ packageDescription pkg) of - Right (resolvedPkg, _) -> - externalBuildDepends resolvedPkg - ++ maybe [] PD.setupDepends (PD.setupBuildInfo resolvedPkg) - Left _ -> - error "configuredPackageInvalidDeps internal error" - - --- ------------------------------------------------------------ --- * Simple resolver that ignores dependencies --- ------------------------------------------------------------ - --- | A simplistic method of resolving a list of target package names to --- available packages. --- --- Specifically, it does not consider package dependencies at all. Unlike --- 'resolveDependencies', no attempt is made to ensure that the selected --- packages have dependencies that are satisfiable or consistent with --- each other. --- --- It is suitable for tasks such as selecting packages to download for user --- inspection. It is not suitable for selecting packages to install. --- --- Note: if no installed package index is available, it is OK to pass 'mempty'. --- It simply means preferences for installed packages will be ignored. --- -resolveWithoutDependencies :: DepResolverParams - -> Either [ResolveNoDepsError] [SourcePackage] -resolveWithoutDependencies (DepResolverParams targets constraints - prefs defpref installedPkgIndex sourcePkgIndex - _reorderGoals _indGoals _avoidReinstalls - _shadowing _strFlags _maxBjumps) = - collectEithers (map selectPackage targets) - where - selectPackage :: PackageName -> Either ResolveNoDepsError SourcePackage - selectPackage pkgname - | null choices = Left $! ResolveUnsatisfiable pkgname requiredVersions - | otherwise = Right $! maximumBy bestByPrefs choices - - where - -- Constraints - requiredVersions = packageConstraints pkgname - pkgDependency = Dependency pkgname requiredVersions - choices = PackageIndex.lookupDependency sourcePkgIndex - pkgDependency - - -- Preferences - PackagePreferences preferredVersions preferInstalled _ - = packagePreferences pkgname - - bestByPrefs = comparing $ \pkg -> - (installPref pkg, versionPref pkg, packageVersion pkg) - installPref = case preferInstalled of - PreferLatest -> const False - PreferInstalled -> not . null - . InstalledPackageIndex.lookupSourcePackageId - installedPkgIndex - . packageId - versionPref pkg = length . filter (packageVersion pkg `withinRange`) $ - preferredVersions - - packageConstraints :: PackageName -> VersionRange - packageConstraints pkgname = - Map.findWithDefault anyVersion pkgname packageVersionConstraintMap - packageVersionConstraintMap = - let pcs = map unlabelPackageConstraint constraints - in Map.fromList [ (name, range) - | PackageConstraintVersion name range <- pcs ] - - packagePreferences :: PackageName -> PackagePreferences - packagePreferences = interpretPackagesPreference - (Set.fromList targets) defpref prefs - - -collectEithers :: [Either a b] -> Either [a] [b] -collectEithers = collect . partitionEithers - where - collect ([], xs) = Right xs - collect (errs,_) = Left errs - partitionEithers :: [Either a b] -> ([a],[b]) - partitionEithers = foldr (either left right) ([],[]) - where - left a (l, r) = (a:l, r) - right a (l, r) = (l, a:r) - --- | Errors for 'resolveWithoutDependencies'. --- -data ResolveNoDepsError = - - -- | A package name which cannot be resolved to a specific package. - -- Also gives the constraint on the version and whether there was - -- a constraint on the package being installed. - ResolveUnsatisfiable PackageName VersionRange - -instance Show ResolveNoDepsError where - show (ResolveUnsatisfiable name ver) = - "There is no available version of " ++ display name - ++ " that satisfies " ++ display (simplifyVersionRange ver) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/DistDirLayout.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/DistDirLayout.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/DistDirLayout.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/DistDirLayout.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,134 +0,0 @@ -{-# LANGUAGE RecordWildCards #-} - --- | --- --- The layout of the .\/dist\/ directory where cabal keeps all of it's state --- and build artifacts. --- -module Distribution.Client.DistDirLayout where - -import System.FilePath -import Distribution.Package - ( PackageId ) -import Distribution.Compiler -import Distribution.Simple.Compiler (PackageDB(..)) -import Distribution.Text -import Distribution.Client.Types - ( InstalledPackageId ) - - - --- | The layout of the project state directory. Traditionally this has been --- called the @dist@ directory. --- -data DistDirLayout = DistDirLayout { - - -- | The dist directory, which is the root of where cabal keeps all its - -- state including the build artifacts from each package we build. - -- - distDirectory :: FilePath, - - -- | The directory under dist where we keep the build artifacts for a - -- package we're building from a local directory. - -- - -- This uses a 'PackageId' not just a 'PackageName' because technically - -- we can have multiple instances of the same package in a solution - -- (e.g. setup deps). - -- - distBuildDirectory :: PackageId -> FilePath, - distBuildRootDirectory :: FilePath, - - -- | The directory under dist where we put the unpacked sources of - -- packages, in those cases where it makes sense to keep the build - -- artifacts to reduce rebuild times. These can be tarballs or could be - -- scm repos. - -- - distUnpackedSrcDirectory :: PackageId -> FilePath, - distUnpackedSrcRootDirectory :: FilePath, - - -- | The location for project-wide cache files (e.g. state used in - -- incremental rebuilds). - -- - distProjectCacheFile :: String -> FilePath, - distProjectCacheDirectory :: FilePath, - - -- | The location for package-specific cache files (e.g. state used in - -- incremental rebuilds). - -- - distPackageCacheFile :: PackageId -> String -> FilePath, - distPackageCacheDirectory :: PackageId -> FilePath, - - distTempDirectory :: FilePath, - distBinDirectory :: FilePath, - - distPackageDB :: CompilerId -> PackageDB - } - - - ---TODO: move to another module, e.g. CabalDirLayout? -data CabalDirLayout = CabalDirLayout { - cabalStoreDirectory :: CompilerId -> FilePath, - cabalStorePackageDirectory :: CompilerId -> InstalledPackageId - -> FilePath, - cabalStorePackageDBPath :: CompilerId -> FilePath, - cabalStorePackageDB :: CompilerId -> PackageDB, - - cabalPackageCacheDirectory :: FilePath, - cabalLogsDirectory :: FilePath, - cabalWorldFile :: FilePath - } - - -defaultDistDirLayout :: FilePath -> DistDirLayout -defaultDistDirLayout projectRootDirectory = - DistDirLayout {..} - where - distDirectory = projectRootDirectory "dist-newstyle" - --TODO: switch to just dist at some point, or some other new name - - distBuildRootDirectory = distDirectory "build" - distBuildDirectory pkgid = distBuildRootDirectory display pkgid - - distUnpackedSrcRootDirectory = distDirectory "src" - distUnpackedSrcDirectory pkgid = distUnpackedSrcRootDirectory - display pkgid - - distProjectCacheDirectory = distDirectory "cache" - distProjectCacheFile name = distProjectCacheDirectory name - - distPackageCacheDirectory pkgid = distBuildDirectory pkgid "cache" - distPackageCacheFile pkgid name = distPackageCacheDirectory pkgid name - - distTempDirectory = distDirectory "tmp" - - distBinDirectory = distDirectory "bin" - - distPackageDBPath compid = distDirectory "packagedb" display compid - distPackageDB = SpecificPackageDB . distPackageDBPath - - - -defaultCabalDirLayout :: FilePath -> CabalDirLayout -defaultCabalDirLayout cabalDir = - CabalDirLayout {..} - where - - cabalStoreDirectory compid = - cabalDir "store" display compid - - cabalStorePackageDirectory compid ipkgid = - cabalStoreDirectory compid display ipkgid - - cabalStorePackageDBPath compid = - cabalStoreDirectory compid "package.db" - - cabalStorePackageDB = - SpecificPackageDB . cabalStorePackageDBPath - - cabalPackageCacheDirectory = cabalDir "packages" - - cabalLogsDirectory = cabalDir "logs" - - cabalWorldFile = cabalDir "world" - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Exec.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Exec.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Exec.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Exec.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,128 +0,0 @@ -{-# LANGUAGE CPP #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Exec --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Implementation of the 'exec' command. Runs an arbitrary executable in an --- environment suitable for making use of the sandbox. ------------------------------------------------------------------------------ - -module Distribution.Client.Exec ( exec - ) where - -import Control.Monad (unless) - -import qualified Distribution.Simple.GHC as GHC -import qualified Distribution.Simple.GHCJS as GHCJS - -import Distribution.Client.Sandbox (getSandboxConfigFilePath) -import Distribution.Client.Sandbox.PackageEnvironment (sandboxPackageDBPath) -import Distribution.Client.Sandbox.Types (UseSandbox (..)) - -import Distribution.Simple.Compiler (Compiler, CompilerFlavor(..), compilerFlavor) -import Distribution.Simple.Program (ghcProgram, ghcjsProgram, lookupProgram) -import Distribution.Simple.Program.Db (ProgramDb, requireProgram, modifyProgramSearchPath) -import Distribution.Simple.Program.Find (ProgramSearchPathEntry(..)) -import Distribution.Simple.Program.Run (programInvocation, runProgramInvocation) -import Distribution.Simple.Program.Types ( simpleProgram, ConfiguredProgram(..) ) -import Distribution.Simple.Utils (die, warn) - -import Distribution.System (Platform) -import Distribution.Verbosity (Verbosity) - -import System.Directory ( doesDirectoryExist ) -import System.FilePath (searchPathSeparator, ()) -#if !MIN_VERSION_base(4,8,0) -import Control.Applicative ((<$>)) -import Data.Monoid (mempty) -#endif - - --- | Execute the given command in the package's environment. --- --- The given command is executed with GHC configured to use the correct --- package database and with the sandbox bin directory added to the PATH. -exec :: Verbosity - -> UseSandbox - -> Compiler - -> Platform - -> ProgramDb - -> [String] - -> IO () -exec verbosity useSandbox comp platform programDb extraArgs = - case extraArgs of - (exe:args) -> do - program <- requireProgram' verbosity useSandbox programDb exe - env <- ((++) (programOverrideEnv program)) <$> environmentOverrides - let invocation = programInvocation - program { programOverrideEnv = env } - args - runProgramInvocation verbosity invocation - - [] -> die "Please specify an executable to run" - where - environmentOverrides = - case useSandbox of - NoSandbox -> return [] - (UseSandbox sandboxDir) -> - sandboxEnvironment verbosity sandboxDir comp platform programDb - - --- | Return the package's sandbox environment. --- --- The environment sets GHC_PACKAGE_PATH so that GHC will use the sandbox. -sandboxEnvironment :: Verbosity - -> FilePath - -> Compiler - -> Platform - -> ProgramDb - -> IO [(String, Maybe String)] -sandboxEnvironment verbosity sandboxDir comp platform programDb = - case compilerFlavor comp of - GHC -> env GHC.getGlobalPackageDB ghcProgram "GHC_PACKAGE_PATH" - GHCJS -> env GHCJS.getGlobalPackageDB ghcjsProgram "GHCJS_PACKAGE_PATH" - _ -> die "exec only works with GHC and GHCJS" - where - env getGlobalPackageDB hcProgram packagePathEnvVar = do - let Just program = lookupProgram hcProgram programDb - gDb <- getGlobalPackageDB verbosity program - sandboxConfigFilePath <- getSandboxConfigFilePath mempty - let sandboxPackagePath = sandboxPackageDBPath sandboxDir comp platform - compilerPackagePaths = prependToSearchPath gDb sandboxPackagePath - -- Packages database must exist, otherwise things will start - -- failing in mysterious ways. - exists <- doesDirectoryExist sandboxPackagePath - unless exists $ warn verbosity $ "Package database is not a directory: " - ++ sandboxPackagePath - -- Build the environment - return [ (packagePathEnvVar, Just compilerPackagePaths) - , ("CABAL_SANDBOX_PACKAGE_PATH", Just compilerPackagePaths) - , ("CABAL_SANDBOX_CONFIG", Just sandboxConfigFilePath) - ] - - prependToSearchPath path newValue = - newValue ++ [searchPathSeparator] ++ path - - --- | Check that a program is configured and available to be run. If --- a sandbox is available check in the sandbox's directory. -requireProgram' :: Verbosity - -> UseSandbox - -> ProgramDb - -> String - -> IO ConfiguredProgram -requireProgram' verbosity useSandbox programDb exe = do - (program, _) <- requireProgram - verbosity - (simpleProgram exe) - updateSearchPath - return program - where - updateSearchPath = - flip modifyProgramSearchPath programDb $ \searchPath -> - case useSandbox of - NoSandbox -> searchPath - UseSandbox sandboxDir -> - ProgramSearchPathDir (sandboxDir "bin") : searchPath diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Fetch.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Fetch.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Fetch.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Fetch.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,199 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Fetch --- Copyright : (c) David Himmelstrup 2005 --- Duncan Coutts 2011 --- License : BSD-like --- --- Maintainer : cabal-devel@gmail.com --- Stability : provisional --- Portability : portable --- --- The cabal fetch command ------------------------------------------------------------------------------ -module Distribution.Client.Fetch ( - fetch, - ) where - -import Distribution.Client.Types -import Distribution.Client.Targets -import Distribution.Client.FetchUtils hiding (fetchPackage) -import Distribution.Client.Dependency -import Distribution.Client.IndexUtils as IndexUtils - ( getSourcePackages, getInstalledPackages ) -import qualified Distribution.Client.InstallPlan as InstallPlan -import Distribution.Client.PkgConfigDb - ( PkgConfigDb, readPkgConfigDb ) -import Distribution.Client.Setup - ( GlobalFlags(..), FetchFlags(..), RepoContext(..) ) - -import Distribution.Package - ( packageId ) -import Distribution.Simple.Compiler - ( Compiler, compilerInfo, PackageDBStack ) -import Distribution.Simple.PackageIndex (InstalledPackageIndex) -import Distribution.Simple.Program - ( ProgramConfiguration ) -import Distribution.Simple.Setup - ( fromFlag ) -import Distribution.Simple.Utils - ( die, notice, debug ) -import Distribution.System - ( Platform ) -import Distribution.Text - ( display ) -import Distribution.Verbosity - ( Verbosity ) - -import Control.Monad - ( filterM ) - --- ------------------------------------------------------------ --- * The fetch command --- ------------------------------------------------------------ - ---TODO: --- * add fetch -o support --- * support tarball URLs via ad-hoc download cache (or in -o mode?) --- * suggest using --no-deps, unpack or fetch -o if deps cannot be satisfied --- * Port various flags from install: --- * --updage-dependencies --- * --constraint and --preference --- * --only-dependencies, but note it conflicts with --no-deps - - --- | Fetch a list of packages and their dependencies. --- -fetch :: Verbosity - -> PackageDBStack - -> RepoContext - -> Compiler - -> Platform - -> ProgramConfiguration - -> GlobalFlags - -> FetchFlags - -> [UserTarget] - -> IO () -fetch verbosity _ _ _ _ _ _ _ [] = - notice verbosity "No packages requested. Nothing to do." - -fetch verbosity packageDBs repoCtxt comp platform conf - globalFlags fetchFlags userTargets = do - - mapM_ checkTarget userTargets - - installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf - sourcePkgDb <- getSourcePackages verbosity repoCtxt - pkgConfigDb <- readPkgConfigDb verbosity conf - - pkgSpecifiers <- resolveUserTargets verbosity repoCtxt - (fromFlag $ globalWorldFile globalFlags) - (packageIndex sourcePkgDb) - userTargets - - pkgs <- planPackages - verbosity comp platform fetchFlags - installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers - - pkgs' <- filterM (fmap not . isFetched . packageSource) pkgs - if null pkgs' - --TODO: when we add support for remote tarballs then this message - -- will need to be changed because for remote tarballs we fetch them - -- at the earlier phase. - then notice verbosity $ "No packages need to be fetched. " - ++ "All the requested packages are already local " - ++ "or cached locally." - else if dryRun - then notice verbosity $ unlines $ - "The following packages would be fetched:" - : map (display . packageId) pkgs' - - else mapM_ (fetchPackage verbosity repoCtxt . packageSource) pkgs' - - where - dryRun = fromFlag (fetchDryRun fetchFlags) - -planPackages :: Verbosity - -> Compiler - -> Platform - -> FetchFlags - -> InstalledPackageIndex - -> SourcePackageDb - -> PkgConfigDb - -> [PackageSpecifier SourcePackage] - -> IO [SourcePackage] -planPackages verbosity comp platform fetchFlags - installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers - - | includeDependencies = do - solver <- chooseSolver verbosity - (fromFlag (fetchSolver fetchFlags)) (compilerInfo comp) - notice verbosity "Resolving dependencies..." - installPlan <- foldProgress logMsg die return $ - resolveDependencies - platform (compilerInfo comp) pkgConfigDb - solver - resolverParams - - -- The packages we want to fetch are those packages the 'InstallPlan' - -- that are in the 'InstallPlan.Configured' state. - return - [ pkg - | (InstallPlan.Configured (ConfiguredPackage pkg _ _ _)) - <- InstallPlan.toList installPlan ] - - | otherwise = - either (die . unlines . map show) return $ - resolveWithoutDependencies resolverParams - - where - resolverParams = - - setMaxBackjumps (if maxBackjumps < 0 then Nothing - else Just maxBackjumps) - - . setIndependentGoals independentGoals - - . setReorderGoals reorderGoals - - . setShadowPkgs shadowPkgs - - . setStrongFlags strongFlags - - -- Reinstall the targets given on the command line so that the dep - -- resolver will decide that they need fetching, even if they're - -- already installed. Since we want to get the source packages of - -- things we might have installed (but not have the sources for). - . reinstallTargets - - $ standardInstallPolicy installedPkgIndex sourcePkgDb pkgSpecifiers - - includeDependencies = fromFlag (fetchDeps fetchFlags) - logMsg message rest = debug verbosity message >> rest - - reorderGoals = fromFlag (fetchReorderGoals fetchFlags) - independentGoals = fromFlag (fetchIndependentGoals fetchFlags) - shadowPkgs = fromFlag (fetchShadowPkgs fetchFlags) - strongFlags = fromFlag (fetchStrongFlags fetchFlags) - maxBackjumps = fromFlag (fetchMaxBackjumps fetchFlags) - - -checkTarget :: UserTarget -> IO () -checkTarget target = case target of - UserTargetRemoteTarball _uri - -> die $ "The 'fetch' command does not yet support remote tarballs. " - ++ "In the meantime you can use the 'unpack' commands." - _ -> return () - -fetchPackage :: Verbosity -> RepoContext -> PackageLocation a -> IO () -fetchPackage verbosity repoCtxt pkgsrc = case pkgsrc of - LocalUnpackedPackage _dir -> return () - LocalTarballPackage _file -> return () - - RemoteTarballPackage _uri _ -> - die $ "The 'fetch' command does not yet support remote tarballs. " - ++ "In the meantime you can use the 'unpack' commands." - - RepoTarballPackage repo pkgid _ -> do - _ <- fetchRepoTarball verbosity repoCtxt repo pkgid - return () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/FetchUtils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/FetchUtils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/FetchUtils.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/FetchUtils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,226 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.FetchUtils --- Copyright : (c) David Himmelstrup 2005 --- Duncan Coutts 2011 --- License : BSD-like --- --- Maintainer : cabal-devel@gmail.com --- Stability : provisional --- Portability : portable --- --- Functions for fetching packages ------------------------------------------------------------------------------ -{-# LANGUAGE RecordWildCards #-} -module Distribution.Client.FetchUtils ( - - -- * fetching packages - fetchPackage, - isFetched, - checkFetched, - - -- ** specifically for repo packages - checkRepoTarballFetched, - fetchRepoTarball, - - -- * fetching other things - downloadIndex, - ) where - -import Distribution.Client.Types -import Distribution.Client.HttpUtils - ( downloadURI, isOldHackageURI, DownloadResult(..) - , HttpTransport(..), transportCheckHttps, remoteRepoCheckHttps ) - -import Distribution.Package - ( PackageId, packageName, packageVersion ) -import Distribution.Simple.Utils - ( notice, info, setupMessage ) -import Distribution.Text - ( display ) -import Distribution.Verbosity - ( Verbosity ) -import Distribution.Client.GlobalFlags - ( RepoContext(..) ) - -import Data.Maybe -import System.Directory - ( doesFileExist, createDirectoryIfMissing, getTemporaryDirectory ) -import System.IO - ( openTempFile, hClose ) -import System.FilePath - ( (), (<.>) ) -import qualified System.FilePath.Posix as FilePath.Posix - ( combine, joinPath ) -import Network.URI - ( URI(uriPath) ) - -import qualified Hackage.Security.Client as Sec - --- ------------------------------------------------------------ --- * Actually fetch things --- ------------------------------------------------------------ - --- | Returns @True@ if the package has already been fetched --- or does not need fetching. --- -isFetched :: PackageLocation (Maybe FilePath) -> IO Bool -isFetched loc = case loc of - LocalUnpackedPackage _dir -> return True - LocalTarballPackage _file -> return True - RemoteTarballPackage _uri local -> return (isJust local) - RepoTarballPackage repo pkgid _ -> doesFileExist (packageFile repo pkgid) - - --- | Checks if the package has already been fetched (or does not need --- fetching) and if so returns evidence in the form of a 'PackageLocation' --- with a resolved local file location. --- -checkFetched :: PackageLocation (Maybe FilePath) - -> IO (Maybe (PackageLocation FilePath)) -checkFetched loc = case loc of - LocalUnpackedPackage dir -> - return (Just $ LocalUnpackedPackage dir) - LocalTarballPackage file -> - return (Just $ LocalTarballPackage file) - RemoteTarballPackage uri (Just file) -> - return (Just $ RemoteTarballPackage uri file) - RepoTarballPackage repo pkgid (Just file) -> - return (Just $ RepoTarballPackage repo pkgid file) - - RemoteTarballPackage _uri Nothing -> return Nothing - RepoTarballPackage repo pkgid Nothing -> - fmap (fmap (RepoTarballPackage repo pkgid)) - (checkRepoTarballFetched repo pkgid) - - --- | Like 'checkFetched' but for the specific case of a 'RepoTarballPackage'. --- -checkRepoTarballFetched :: Repo -> PackageId -> IO (Maybe FilePath) -checkRepoTarballFetched repo pkgid = do - let file = packageFile repo pkgid - exists <- doesFileExist file - if exists - then return (Just file) - else return Nothing - - --- | Fetch a package if we don't have it already. --- -fetchPackage :: Verbosity - -> RepoContext - -> PackageLocation (Maybe FilePath) - -> IO (PackageLocation FilePath) -fetchPackage verbosity repoCtxt loc = case loc of - LocalUnpackedPackage dir -> - return (LocalUnpackedPackage dir) - LocalTarballPackage file -> - return (LocalTarballPackage file) - RemoteTarballPackage uri (Just file) -> - return (RemoteTarballPackage uri file) - RepoTarballPackage repo pkgid (Just file) -> - return (RepoTarballPackage repo pkgid file) - - RemoteTarballPackage uri Nothing -> do - path <- downloadTarballPackage uri - return (RemoteTarballPackage uri path) - RepoTarballPackage repo pkgid Nothing -> do - local <- fetchRepoTarball verbosity repoCtxt repo pkgid - return (RepoTarballPackage repo pkgid local) - where - downloadTarballPackage uri = do - transport <- repoContextGetTransport repoCtxt - transportCheckHttps transport uri - notice verbosity ("Downloading " ++ show uri) - tmpdir <- getTemporaryDirectory - (path, hnd) <- openTempFile tmpdir "cabal-.tar.gz" - hClose hnd - _ <- downloadURI transport verbosity uri path - return path - - --- | Fetch a repo package if we don't have it already. --- -fetchRepoTarball :: Verbosity -> RepoContext -> Repo -> PackageId -> IO FilePath -fetchRepoTarball verbosity repoCtxt repo pkgid = do - fetched <- doesFileExist (packageFile repo pkgid) - if fetched - then do info verbosity $ display pkgid ++ " has already been downloaded." - return (packageFile repo pkgid) - else do setupMessage verbosity "Downloading" pkgid - downloadRepoPackage - where - downloadRepoPackage = case repo of - RepoLocal{..} -> return (packageFile repo pkgid) - - RepoRemote{..} -> do - transport <- repoContextGetTransport repoCtxt - remoteRepoCheckHttps transport repoRemote - let uri = packageURI repoRemote pkgid - dir = packageDir repo pkgid - path = packageFile repo pkgid - createDirectoryIfMissing True dir - _ <- downloadURI transport verbosity uri path - return path - - RepoSecure{} -> repoContextWithSecureRepo repoCtxt repo $ \rep -> do - let dir = packageDir repo pkgid - path = packageFile repo pkgid - createDirectoryIfMissing True dir - Sec.uncheckClientErrors $ do - info verbosity ("writing " ++ path) - Sec.downloadPackage' rep pkgid path - return path - --- | Downloads an index file to [config-dir/packages/serv-id]. --- -downloadIndex :: HttpTransport -> Verbosity -> RemoteRepo -> FilePath -> IO DownloadResult -downloadIndex transport verbosity remoteRepo cacheDir = do - remoteRepoCheckHttps transport remoteRepo - let uri = (remoteRepoURI remoteRepo) { - uriPath = uriPath (remoteRepoURI remoteRepo) - `FilePath.Posix.combine` "00-index.tar.gz" - } - path = cacheDir "00-index" <.> "tar.gz" - createDirectoryIfMissing True cacheDir - downloadURI transport verbosity uri path - - --- ------------------------------------------------------------ --- * Path utilities --- ------------------------------------------------------------ - --- | Generate the full path to the locally cached copy of --- the tarball for a given @PackageIdentifer@. --- -packageFile :: Repo -> PackageId -> FilePath -packageFile repo pkgid = packageDir repo pkgid - display pkgid - <.> "tar.gz" - --- | Generate the full path to the directory where the local cached copy of --- the tarball for a given @PackageIdentifer@ is stored. --- -packageDir :: Repo -> PackageId -> FilePath -packageDir repo pkgid = repoLocalDir repo - display (packageName pkgid) - display (packageVersion pkgid) - --- | Generate the URI of the tarball for a given package. --- -packageURI :: RemoteRepo -> PackageId -> URI -packageURI repo pkgid | isOldHackageURI (remoteRepoURI repo) = - (remoteRepoURI repo) { - uriPath = FilePath.Posix.joinPath - [uriPath (remoteRepoURI repo) - ,display (packageName pkgid) - ,display (packageVersion pkgid) - ,display pkgid <.> "tar.gz"] - } -packageURI repo pkgid = - (remoteRepoURI repo) { - uriPath = FilePath.Posix.joinPath - [uriPath (remoteRepoURI repo) - ,"package" - ,display pkgid <.> "tar.gz"] - } diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/FileMonitor.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/FileMonitor.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/FileMonitor.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/FileMonitor.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1119 +0,0 @@ -{-# LANGUAGE CPP, DeriveGeneric, DeriveFunctor, GeneralizedNewtypeDeriving, - NamedFieldPuns, BangPatterns #-} -{-# OPTIONS_GHC -fno-warn-orphans #-} - --- | An abstraction to help with re-running actions when files or other --- input values they depend on have changed. --- -module Distribution.Client.FileMonitor ( - - -- * Declaring files to monitor - MonitorFilePath(..), - MonitorKindFile(..), - MonitorKindDir(..), - FilePathGlob(..), - monitorFile, - monitorFileHashed, - monitorNonExistentFile, - monitorDirectory, - monitorNonExistentDirectory, - monitorDirectoryExistence, - monitorFileOrDirectory, - monitorFileGlob, - monitorFileGlobExistence, - monitorFileSearchPath, - monitorFileHashedSearchPath, - - -- * Creating and checking sets of monitored files - FileMonitor(..), - newFileMonitor, - MonitorChanged(..), - MonitorChangedReason(..), - checkFileMonitorChanged, - updateFileMonitor, - MonitorTimestamp, - beginUpdateFileMonitor, - ) where - - -#if MIN_VERSION_containers(0,5,0) -import Data.Map.Strict (Map) -import qualified Data.Map.Strict as Map -#else -import Data.Map (Map) -import qualified Data.Map as Map -#endif -import qualified Data.ByteString.Lazy as BS -import Distribution.Compat.Binary -import qualified Distribution.Compat.Binary as Binary -import qualified Data.Hashable as Hashable -import Data.List (sort) - -#if !MIN_VERSION_base(4,8,0) -import Control.Applicative -#endif -import Control.Monad -import Control.Monad.Trans (MonadIO, liftIO) -import Control.Monad.State (StateT, mapStateT) -import qualified Control.Monad.State as State -import Control.Monad.Except (ExceptT, runExceptT, withExceptT, - throwError) -import Control.Exception - -import Distribution.Client.Compat.Time -import Distribution.Client.Glob -import Distribution.Simple.Utils (handleDoesNotExist, writeFileAtomic) -import Distribution.Client.Utils (mergeBy, MergeResult(..)) - -import System.FilePath -import System.Directory -import System.IO -import GHC.Generics (Generic) - - ------------------------------------------------------------------------------- --- Types for specifying files to monitor --- - - --- | A description of a file (or set of files) to monitor for changes. --- --- Where file paths are relative they are relative to a common directory --- (e.g. project root), not necessarily the process current directory. --- -data MonitorFilePath = - MonitorFile { - monitorKindFile :: !MonitorKindFile, - monitorKindDir :: !MonitorKindDir, - monitorPath :: !FilePath - } - | MonitorFileGlob { - monitorKindFile :: !MonitorKindFile, - monitorKindDir :: !MonitorKindDir, - monitorPathGlob :: !FilePathGlob - } - deriving (Eq, Show, Generic) - -data MonitorKindFile = FileExists - | FileModTime - | FileHashed - | FileNotExists - deriving (Eq, Show, Generic) - -data MonitorKindDir = DirExists - | DirModTime - | DirNotExists - deriving (Eq, Show, Generic) - -instance Binary MonitorFilePath -instance Binary MonitorKindFile -instance Binary MonitorKindDir - --- | Monitor a single file for changes, based on its modification time. --- The monitored file is considered to have changed if it no longer --- exists or if its modification time has changed. --- -monitorFile :: FilePath -> MonitorFilePath -monitorFile = MonitorFile FileModTime DirNotExists - --- | Monitor a single file for changes, based on its modification time --- and content hash. The monitored file is considered to have changed if --- it no longer exists or if its modification time and content hash have --- changed. --- -monitorFileHashed :: FilePath -> MonitorFilePath -monitorFileHashed = MonitorFile FileHashed DirNotExists - --- | Monitor a single non-existent file for changes. The monitored file --- is considered to have changed if it exists. --- -monitorNonExistentFile :: FilePath -> MonitorFilePath -monitorNonExistentFile = MonitorFile FileNotExists DirNotExists - --- | Monitor a single directory for changes, based on its modification --- time. The monitored directory is considered to have changed if it no --- longer exists or if its modification time has changed. --- -monitorDirectory :: FilePath -> MonitorFilePath -monitorDirectory = MonitorFile FileNotExists DirModTime - --- | Monitor a single non-existent directory for changes. The monitored --- directory is considered to have changed if it exists. --- -monitorNonExistentDirectory :: FilePath -> MonitorFilePath --- Just an alias for monitorNonExistentFile, since you can't --- tell the difference between a non-existent directory and --- a non-existent file :) -monitorNonExistentDirectory = monitorNonExistentFile - --- | Monitor a single directory for existence. The monitored directory is --- considered to have changed only if it no longer exists. --- -monitorDirectoryExistence :: FilePath -> MonitorFilePath -monitorDirectoryExistence = MonitorFile FileNotExists DirExists - --- | Monitor a single file or directory for changes, based on its modification --- time. The monitored file is considered to have changed if it no longer --- exists or if its modification time has changed. --- -monitorFileOrDirectory :: FilePath -> MonitorFilePath -monitorFileOrDirectory = MonitorFile FileModTime DirModTime - --- | Monitor a set of files (or directories) identified by a file glob. --- The monitored glob is considered to have changed if the set of files --- matching the glob changes (i.e. creations or deletions), or for files if the --- modification time and content hash of any matching file has changed. --- -monitorFileGlob :: FilePathGlob -> MonitorFilePath -monitorFileGlob = MonitorFileGlob FileHashed DirExists - --- | Monitor a set of files (or directories) identified by a file glob for --- existence only. The monitored glob is considered to have changed if the set --- of files matching the glob changes (i.e. creations or deletions). --- -monitorFileGlobExistence :: FilePathGlob -> MonitorFilePath -monitorFileGlobExistence = MonitorFileGlob FileExists DirExists - --- | Creates a list of files to monitor when you search for a file which --- unsuccessfully looked in @notFoundAtPaths@ before finding it at --- @foundAtPath@. -monitorFileSearchPath :: [FilePath] -> FilePath -> [MonitorFilePath] -monitorFileSearchPath notFoundAtPaths foundAtPath = - monitorFile foundAtPath - : map monitorNonExistentFile notFoundAtPaths - --- | Similar to 'monitorFileSearchPath', but also instructs us to --- monitor the hash of the found file. -monitorFileHashedSearchPath :: [FilePath] -> FilePath -> [MonitorFilePath] -monitorFileHashedSearchPath notFoundAtPaths foundAtPath = - monitorFileHashed foundAtPath - : map monitorNonExistentFile notFoundAtPaths - - ------------------------------------------------------------------------------- --- Implementation types, files status --- - --- | The state necessary to determine whether a set of monitored --- files has changed. It consists of two parts: a set of specific --- files to be monitored (index by their path), and a list of --- globs, which monitor may files at once. -data MonitorStateFileSet - = MonitorStateFileSet !(Map FilePath MonitorStateFile) - ![MonitorStateGlob] - deriving Show - -type Hash = Int - --- | The state necessary to determine whether a monitored file has changed. --- --- This covers all the cases of 'MonitorFilePath' except for globs which is --- covered separately by 'MonitorStateGlob'. --- --- The @Maybe ModTime@ is to cover the case where we already consider the --- file to have changed, either because it had already changed by the time we --- did the snapshot (i.e. too new, changed since start of update process) or it --- no longer exists at all. --- -data MonitorStateFile = MonitorStateFile !MonitorKindFile !MonitorKindDir - !MonitorStateFileStatus - deriving (Show, Generic) - -data MonitorStateFileStatus - = MonitorStateFileExists - | MonitorStateFileModTime !ModTime -- ^ cached file mtime - | MonitorStateFileHashed !ModTime !Hash -- ^ cached mtime and content hash - | MonitorStateDirExists - | MonitorStateDirModTime !ModTime -- ^ cached dir mtime - | MonitorStateNonExistent - | MonitorStateAlreadyChanged - deriving (Show, Generic) - -instance Binary MonitorStateFile -instance Binary MonitorStateFileStatus - --- | The state necessary to determine whether the files matched by a globbing --- match have changed. --- -data MonitorStateGlob = MonitorStateGlob !MonitorKindFile !MonitorKindDir - !FilePathRoot !MonitorStateGlobRel - deriving (Show, Generic) - -data MonitorStateGlobRel - = MonitorStateGlobDirs - !Glob !FilePathGlobRel - !ModTime - ![(FilePath, MonitorStateGlobRel)] -- invariant: sorted - - | MonitorStateGlobFiles - !Glob - !ModTime - ![(FilePath, MonitorStateFileStatus)] -- invariant: sorted - - | MonitorStateGlobDirTrailing - deriving (Show, Generic) - -instance Binary MonitorStateGlob -instance Binary MonitorStateGlobRel - --- | We can build a 'MonitorStateFileSet' from a set of 'MonitorFilePath' by --- inspecting the state of the file system, and we can go in the reverse --- direction by just forgetting the extra info. --- -reconstructMonitorFilePaths :: MonitorStateFileSet -> [MonitorFilePath] -reconstructMonitorFilePaths (MonitorStateFileSet singlePaths globPaths) = - Map.foldrWithKey (\k x r -> getSinglePath k x : r) - (map getGlobPath globPaths) - singlePaths - where - getSinglePath filepath (MonitorStateFile kindfile kinddir _) = - MonitorFile kindfile kinddir filepath - - getGlobPath (MonitorStateGlob kindfile kinddir root gstate) = - MonitorFileGlob kindfile kinddir $ FilePathGlob root $ - case gstate of - MonitorStateGlobDirs glob globs _ _ -> GlobDir glob globs - MonitorStateGlobFiles glob _ _ -> GlobFile glob - MonitorStateGlobDirTrailing -> GlobDirTrailing - ------------------------------------------------------------------------------- --- Checking the status of monitored files --- - --- | A monitor for detecting changes to a set of files. It can be used to --- efficiently test if any of a set of files (specified individually or by --- glob patterns) has changed since some snapshot. In addition, it also checks --- for changes in a value (of type @a@), and when there are no changes in --- either it returns a saved value (of type @b@). --- --- The main use case looks like this: suppose we have some expensive action --- that depends on certain pure inputs and reads some set of files, and --- produces some pure result. We want to avoid re-running this action when it --- would produce the same result. So we need to monitor the files the action --- looked at, the other pure input values, and we need to cache the result. --- Then at some later point, if the input value didn't change, and none of the --- files changed, then we can re-use the cached result rather than re-running --- the action. --- --- This can be achieved using a 'FileMonitor'. Each 'FileMonitor' instance --- saves state in a disk file, so the file for that has to be specified, --- making sure it is unique. The pattern is to use 'checkFileMonitorChanged' --- to see if there's been any change. If there is, re-run the action, keeping --- track of the files, then use 'updateFileMonitor' to record the current --- set of files to monitor, the current input value for the action, and the --- result of the action. --- --- The typical occurrence of this pattern is captured by 'rerunIfChanged' --- and the 'Rebuild' monad. More complicated cases may need to use --- 'checkFileMonitorChanged' and 'updateFileMonitor' directly. --- -data FileMonitor a b - = FileMonitor { - - -- | The file where this 'FileMonitor' should store its state. - -- - fileMonitorCacheFile :: FilePath, - - -- | Compares a new cache key with old one to determine if a - -- corresponding cached value is still valid. - -- - -- Typically this is just an equality test, but in some - -- circumstances it can make sense to do things like subset - -- comparisons. - -- - -- The first arg is the new value, the second is the old cached value. - -- - fileMonitorKeyValid :: a -> a -> Bool, - - -- | When this mode is enabled, if 'checkFileMonitorChanged' returns - -- 'MonitoredValueChanged' then we have the guarantee that no files - -- changed, that the value change was the only change. In the default - -- mode no such guarantee is provided which is slightly faster. - -- - fileMonitorCheckIfOnlyValueChanged :: Bool - } - --- | Define a new file monitor. --- --- It's best practice to define file monitor values once, and then use the --- same value for 'checkFileMonitorChanged' and 'updateFileMonitor' as this --- ensures you get the same types @a@ and @b@ for reading and writing. --- --- The path of the file monitor itself must be unique because it keeps state --- on disk and these would clash. --- -newFileMonitor :: Eq a => FilePath -- ^ The file to cache the state of the - -- file monitor. Must be unique. - -> FileMonitor a b -newFileMonitor path = FileMonitor path (==) False - --- | The result of 'checkFileMonitorChanged': either the monitored files or --- value changed (and it tells us which it was) or nothing changed and we get --- the cached result. --- -data MonitorChanged a b = - -- | The monitored files and value did not change. The cached result is - -- @b@. - -- - -- The set of monitored files is also returned. This is useful - -- for composing or nesting 'FileMonitor's. - MonitorUnchanged b [MonitorFilePath] - - -- | The monitor found that something changed. The reason is given. - -- - | MonitorChanged (MonitorChangedReason a) - deriving Show - --- | What kind of change 'checkFileMonitorChanged' detected. --- -data MonitorChangedReason a = - - -- | One of the files changed (existence, file type, mtime or file - -- content, depending on the 'MonitorFilePath' in question) - MonitoredFileChanged FilePath - - -- | The pure input value changed. - -- - -- The previous cached key value is also returned. This is sometimes - -- useful when using a 'fileMonitorKeyValid' function that is not simply - -- '(==)', when invalidation can be partial. In such cases it can make - -- sense to 'updateFileMonitor' with a key value that's a combination of - -- the new and old (e.g. set union). - | MonitoredValueChanged a - - -- | There was no saved monitor state, cached value etc. Ie the file - -- for the 'FileMonitor' does not exist. - | MonitorFirstRun - - -- | There was existing state, but we could not read it. This typically - -- happens when the code has changed compared to an existing 'FileMonitor' - -- cache file and type of the input value or cached value has changed such - -- that we cannot decode the values. This is completely benign as we can - -- treat is just as if there were no cache file and re-run. - | MonitorCorruptCache - deriving (Eq, Show, Functor) - --- | Test if the input value or files monitored by the 'FileMonitor' have --- changed. If not, return the cached value. --- --- See 'FileMonitor' for a full explanation. --- -checkFileMonitorChanged - :: (Binary a, Binary b) - => FileMonitor a b -- ^ cache file path - -> FilePath -- ^ root directory - -> a -- ^ guard or key value - -> IO (MonitorChanged a b) -- ^ did the key or any paths change? -checkFileMonitorChanged - monitor@FileMonitor { fileMonitorKeyValid, - fileMonitorCheckIfOnlyValueChanged } - root currentKey = - - -- Consider it a change if the cache file does not exist, - -- or we cannot decode it. Sadly ErrorCall can still happen, despite - -- using decodeFileOrFail, e.g. Data.Char.chr errors - - handleDoesNotExist (MonitorChanged MonitorFirstRun) $ - handleErrorCall (MonitorChanged MonitorCorruptCache) $ - readCacheFile monitor - >>= either (\_ -> return (MonitorChanged MonitorCorruptCache)) - checkStatusCache - - where - checkStatusCache (cachedFileStatus, cachedKey, cachedResult) = do - change <- checkForChanges - case change of - Just reason -> return (MonitorChanged reason) - Nothing -> return (MonitorUnchanged cachedResult monitorFiles) - where monitorFiles = reconstructMonitorFilePaths cachedFileStatus - where - -- In fileMonitorCheckIfOnlyValueChanged mode we want to guarantee that - -- if we return MonitoredValueChanged that only the value changed. - -- We do that by checkin for file changes first. Otherwise it makes - -- more sense to do the cheaper test first. - checkForChanges - | fileMonitorCheckIfOnlyValueChanged - = checkFileChange cachedFileStatus cachedKey cachedResult - `mplusMaybeT` - checkValueChange cachedKey - - | otherwise - = checkValueChange cachedKey - `mplusMaybeT` - checkFileChange cachedFileStatus cachedKey cachedResult - - mplusMaybeT :: Monad m => m (Maybe a) -> m (Maybe a) -> m (Maybe a) - mplusMaybeT ma mb = do - mx <- ma - case mx of - Nothing -> mb - Just x -> return (Just x) - - -- Check if the guard value has changed - checkValueChange cachedKey - | not (fileMonitorKeyValid currentKey cachedKey) - = return (Just (MonitoredValueChanged cachedKey)) - | otherwise - = return Nothing - - -- Check if any file has changed - checkFileChange cachedFileStatus cachedKey cachedResult = do - res <- probeFileSystem root cachedFileStatus - case res of - -- Some monitored file has changed - Left changedPath -> - return (Just (MonitoredFileChanged (normalise changedPath))) - - -- No monitored file has changed - Right (cachedFileStatus', cacheStatus) -> do - - -- But we might still want to update the cache - whenCacheChanged cacheStatus $ - rewriteCacheFile monitor cachedFileStatus' cachedKey cachedResult - - return Nothing - --- | Helper for reading the cache file. --- --- This determines the type and format of the binary cache file. --- -readCacheFile :: (Binary a, Binary b) - => FileMonitor a b - -> IO (Either String (MonitorStateFileSet, a, b)) -readCacheFile FileMonitor {fileMonitorCacheFile} = - withBinaryFile fileMonitorCacheFile ReadMode $ \hnd -> - Binary.decodeOrFailIO =<< BS.hGetContents hnd - --- | Helper for writing the cache file. --- --- This determines the type and format of the binary cache file. --- -rewriteCacheFile :: (Binary a, Binary b) - => FileMonitor a b - -> MonitorStateFileSet -> a -> b -> IO () -rewriteCacheFile FileMonitor {fileMonitorCacheFile} fileset key result = - writeFileAtomic fileMonitorCacheFile $ - Binary.encode (fileset, key, result) - --- | Probe the file system to see if any of the monitored files have changed. --- --- It returns Nothing if any file changed, or returns a possibly updated --- file 'MonitorStateFileSet' plus an indicator of whether it actually changed. --- --- We may need to update the cache since there may be changes in the filesystem --- state which don't change any of our affected files. --- --- Consider the glob @{proj1,proj2}\/\*.cabal@. Say we first run and find a --- @proj1@ directory containing @proj1.cabal@ yet no @proj2@. If we later run --- and find @proj2@ was created, yet contains no files matching @*.cabal@ then --- we want to update the cache despite no changes in our relevant file set. --- Specifically, we should add an mtime for this directory so we can avoid --- re-traversing the directory in future runs. --- -probeFileSystem :: FilePath -> MonitorStateFileSet - -> IO (Either FilePath (MonitorStateFileSet, CacheChanged)) -probeFileSystem root (MonitorStateFileSet singlePaths globPaths) = - runChangedM $ do - sequence_ - [ probeMonitorStateFileStatus root file status - | (file, MonitorStateFile _ _ status) <- Map.toList singlePaths ] - -- The glob monitors can require state changes - globPaths' <- - sequence - [ probeMonitorStateGlob root globPath - | globPath <- globPaths ] - return (MonitorStateFileSet singlePaths globPaths') - - ------------------------------------------------ --- Monad for checking for file system changes --- --- We need to be able to bail out if we detect a change (using ExceptT), --- but if there's no change we need to be able to rebuild the monitor --- state. And we want to optimise that rebuilding by keeping track if --- anything actually changed (using StateT), so that in the typical case --- we can avoid rewriting the state file. - -newtype ChangedM a = ChangedM (StateT CacheChanged (ExceptT FilePath IO) a) - deriving (Functor, Applicative, Monad, MonadIO) - -runChangedM :: ChangedM a -> IO (Either FilePath (a, CacheChanged)) -runChangedM (ChangedM action) = - runExceptT $ State.runStateT action CacheUnchanged - -somethingChanged :: FilePath -> ChangedM a -somethingChanged path = ChangedM $ throwError path - -cacheChanged :: ChangedM () -cacheChanged = ChangedM $ State.put CacheChanged - -mapChangedFile :: (FilePath -> FilePath) -> ChangedM a -> ChangedM a -mapChangedFile adjust (ChangedM a) = - ChangedM (mapStateT (withExceptT adjust) a) - -data CacheChanged = CacheChanged | CacheUnchanged - -whenCacheChanged :: Monad m => CacheChanged -> m () -> m () -whenCacheChanged CacheChanged action = action -whenCacheChanged CacheUnchanged _ = return () - ----------------------- - --- | Probe the file system to see if a single monitored file has changed. --- -probeMonitorStateFileStatus :: FilePath -> FilePath - -> MonitorStateFileStatus - -> ChangedM () -probeMonitorStateFileStatus root file status = - case status of - MonitorStateFileExists -> - probeFileExistence root file - - MonitorStateFileModTime mtime -> - probeFileModificationTime root file mtime - - MonitorStateFileHashed mtime hash -> - probeFileModificationTimeAndHash root file mtime hash - - MonitorStateDirExists -> - probeDirExistence root file - - MonitorStateDirModTime mtime -> - probeFileModificationTime root file mtime - - MonitorStateNonExistent -> - probeFileNonExistence root file - - MonitorStateAlreadyChanged -> - somethingChanged file - - --- | Probe the file system to see if a monitored file glob has changed. --- -probeMonitorStateGlob :: FilePath -- ^ root path - -> MonitorStateGlob - -> ChangedM MonitorStateGlob -probeMonitorStateGlob relroot - (MonitorStateGlob kindfile kinddir globroot glob) = do - root <- liftIO $ getFilePathRootDirectory globroot relroot - case globroot of - FilePathRelative -> - MonitorStateGlob kindfile kinddir globroot <$> - probeMonitorStateGlobRel kindfile kinddir root "." glob - - -- for absolute cases, make the changed file we report absolute too - _ -> - mapChangedFile (root ) $ - MonitorStateGlob kindfile kinddir globroot <$> - probeMonitorStateGlobRel kindfile kinddir root "" glob - -probeMonitorStateGlobRel :: MonitorKindFile -> MonitorKindDir - -> FilePath -- ^ root path - -> FilePath -- ^ path of the directory we are - -- looking in relative to @root@ - -> MonitorStateGlobRel - -> ChangedM MonitorStateGlobRel -probeMonitorStateGlobRel kindfile kinddir root dirName - (MonitorStateGlobDirs glob globPath mtime children) = do - change <- liftIO $ checkDirectoryModificationTime (root dirName) mtime - case change of - Nothing -> do - children' <- sequence - [ do fstate' <- probeMonitorStateGlobRel - kindfile kinddir root - (dirName fname) fstate - return (fname, fstate') - | (fname, fstate) <- children ] - return $! MonitorStateGlobDirs glob globPath mtime children' - - Just mtime' -> do - -- directory modification time changed: - -- a matching subdir may have been added or deleted - matches <- filterM (\entry -> let subdir = root dirName entry - in liftIO $ doesDirectoryExist subdir) - . filter (matchGlob glob) - =<< liftIO (getDirectoryContents (root dirName)) - - children' <- mapM probeMergeResult $ - mergeBy (\(path1,_) path2 -> compare path1 path2) - children - (sort matches) - return $! MonitorStateGlobDirs glob globPath mtime' children' - -- Note that just because the directory has changed, we don't force - -- a cache rewrite with 'cacheChanged' since that has some cost, and - -- all we're saving is scanning the directory. But we do rebuild the - -- cache with the new mtime', so that if the cache is rewritten for - -- some other reason, we'll take advantage of that. - - where - probeMergeResult :: MergeResult (FilePath, MonitorStateGlobRel) FilePath - -> ChangedM (FilePath, MonitorStateGlobRel) - - -- Only in cached (directory deleted) - probeMergeResult (OnlyInLeft (path, fstate)) = do - case allMatchingFiles (dirName path) fstate of - [] -> return (path, fstate) - -- Strictly speaking we should be returning 'CacheChanged' above - -- as we should prune the now-missing 'MonitorStateGlobRel'. However - -- we currently just leave these now-redundant entries in the - -- cache as they cost no IO and keeping them allows us to avoid - -- rewriting the cache. - (file:_) -> somethingChanged file - - -- Only in current filesystem state (directory added) - probeMergeResult (OnlyInRight path) = do - fstate <- liftIO $ buildMonitorStateGlobRel Nothing Map.empty - kindfile kinddir root (dirName path) globPath - case allMatchingFiles (dirName path) fstate of - (file:_) -> somethingChanged file - -- This is the only case where we use 'cacheChanged' because we can - -- have a whole new dir subtree (of unbounded size and cost), so we - -- need to save the state of that new subtree in the cache. - [] -> cacheChanged >> return (path, fstate) - - -- Found in path - probeMergeResult (InBoth (path, fstate) _) = do - fstate' <- probeMonitorStateGlobRel kindfile kinddir - root (dirName path) fstate - return (path, fstate') - - -- | Does a 'MonitorStateGlob' have any relevant files within it? - allMatchingFiles :: FilePath -> MonitorStateGlobRel -> [FilePath] - allMatchingFiles dir (MonitorStateGlobFiles _ _ entries) = - [ dir fname | (fname, _) <- entries ] - allMatchingFiles dir (MonitorStateGlobDirs _ _ _ entries) = - [ res - | (subdir, fstate) <- entries - , res <- allMatchingFiles (dir subdir) fstate ] - allMatchingFiles dir MonitorStateGlobDirTrailing = - [dir] - -probeMonitorStateGlobRel _ _ root dirName - (MonitorStateGlobFiles glob mtime children) = do - change <- liftIO $ checkDirectoryModificationTime (root dirName) mtime - mtime' <- case change of - Nothing -> return mtime - Just mtime' -> do - -- directory modification time changed: - -- a matching file may have been added or deleted - matches <- return . filter (matchGlob glob) - =<< liftIO (getDirectoryContents (root dirName)) - - mapM_ probeMergeResult $ - mergeBy (\(path1,_) path2 -> compare path1 path2) - children - (sort matches) - return mtime' - - -- Check that none of the children have changed - forM_ children $ \(file, status) -> - probeMonitorStateFileStatus root (dirName file) status - - - return (MonitorStateGlobFiles glob mtime' children) - -- Again, we don't force a cache rewite with 'cacheChanged', but we do use - -- the new mtime' if any. - where - probeMergeResult :: MergeResult (FilePath, MonitorStateFileStatus) FilePath - -> ChangedM () - probeMergeResult mr = case mr of - InBoth _ _ -> return () - -- this is just to be able to accurately report which file changed: - OnlyInLeft (path, _) -> somethingChanged (dirName path) - OnlyInRight path -> somethingChanged (dirName path) - -probeMonitorStateGlobRel _ _ _ _ MonitorStateGlobDirTrailing = - return MonitorStateGlobDirTrailing - ------------------------------------------------------------------------------- - --- | Update the input value and the set of files monitored by the --- 'FileMonitor', plus the cached value that may be returned in future. --- --- This takes a snapshot of the state of the monitored files right now, so --- 'checkFileMonitorChanged' will look for file system changes relative to --- this snapshot. --- --- This is typically done once the action has been completed successfully and --- we have the action's result and we know what files it looked at. See --- 'FileMonitor' for a full explanation. --- --- If we do take the snapshot after the action has completed then we have a --- problem. The problem is that files might have changed /while/ the action was --- running but /after/ the action read them. If we take the snapshot after the --- action completes then we will miss these changes. The solution is to record --- a timestamp before beginning execution of the action and then we make the --- conservative assumption that any file that has changed since then has --- already changed, ie the file monitor state for these files will be such that --- 'checkFileMonitorChanged' will report that they have changed. --- --- So if you do use 'updateFileMonitor' after the action (so you can discover --- the files used rather than predicting them in advance) then use --- 'beginUpdateFileMonitor' to get a timestamp and pass that. Alternatively, --- if you take the snapshot in advance of the action, or you're not monitoring --- any files then you can use @Nothing@ for the timestamp parameter. --- -updateFileMonitor - :: (Binary a, Binary b) - => FileMonitor a b -- ^ cache file path - -> FilePath -- ^ root directory - -> Maybe MonitorTimestamp -- ^ timestamp when the update action started - -> [MonitorFilePath] -- ^ files of interest relative to root - -> a -- ^ the current key value - -> b -- ^ the current result value - -> IO () -updateFileMonitor monitor root startTime monitorFiles - cachedKey cachedResult = do - hashcache <- readCacheFileHashes monitor - msfs <- buildMonitorStateFileSet startTime hashcache root monitorFiles - rewriteCacheFile monitor msfs cachedKey cachedResult - --- | A timestamp to help with the problem of file changes during actions. --- See 'updateFileMonitor' for details. --- -newtype MonitorTimestamp = MonitorTimestamp ModTime - --- | Record a timestamp at the beginning of an action, and when the action --- completes call 'updateFileMonitor' passing it the timestamp. --- See 'updateFileMonitor' for details. --- -beginUpdateFileMonitor :: IO MonitorTimestamp -beginUpdateFileMonitor = MonitorTimestamp <$> getCurTime - --- | Take the snapshot of the monitored files. That is, given the --- specification of the set of files we need to monitor, inspect the state --- of the file system now and collect the information we'll need later to --- determine if anything has changed. --- -buildMonitorStateFileSet :: Maybe MonitorTimestamp -- ^ optional: timestamp - -- of the start of the action - -> FileHashCache -- ^ existing file hashes - -> FilePath -- ^ root directory - -> [MonitorFilePath] -- ^ patterns of interest - -- relative to root - -> IO MonitorStateFileSet -buildMonitorStateFileSet mstartTime hashcache root = - go Map.empty [] - where - go :: Map FilePath MonitorStateFile -> [MonitorStateGlob] - -> [MonitorFilePath] -> IO MonitorStateFileSet - go !singlePaths !globPaths [] = - return (MonitorStateFileSet singlePaths globPaths) - - go !singlePaths !globPaths - (MonitorFile kindfile kinddir path : monitors) = do - monitorState <- MonitorStateFile kindfile kinddir - <$> buildMonitorStateFile mstartTime hashcache - kindfile kinddir root path - go (Map.insert path monitorState singlePaths) globPaths monitors - - go !singlePaths !globPaths - (MonitorFileGlob kindfile kinddir globPath : monitors) = do - monitorState <- buildMonitorStateGlob mstartTime hashcache - kindfile kinddir root globPath - go singlePaths (monitorState : globPaths) monitors - - -buildMonitorStateFile :: Maybe MonitorTimestamp -- ^ start time of update - -> FileHashCache -- ^ existing file hashes - -> MonitorKindFile -> MonitorKindDir - -> FilePath -- ^ the root directory - -> FilePath - -> IO MonitorStateFileStatus -buildMonitorStateFile mstartTime hashcache kindfile kinddir root path = do - let abspath = root path - isFile <- doesFileExist abspath - isDir <- doesDirectoryExist abspath - case (isFile, kindfile, isDir, kinddir) of - (_, FileNotExists, _, DirNotExists) -> - -- we don't need to care if it exists now, since we check at probe time - return MonitorStateNonExistent - - (False, _, False, _) -> - return MonitorStateAlreadyChanged - - (True, FileExists, _, _) -> - return MonitorStateFileExists - - (True, FileModTime, _, _) -> - handleIOException MonitorStateAlreadyChanged $ do - mtime <- getModTime abspath - if changedDuringUpdate mstartTime mtime - then return MonitorStateAlreadyChanged - else return (MonitorStateFileModTime mtime) - - (True, FileHashed, _, _) -> - handleIOException MonitorStateAlreadyChanged $ do - mtime <- getModTime abspath - if changedDuringUpdate mstartTime mtime - then return MonitorStateAlreadyChanged - else do hash <- getFileHash hashcache abspath abspath mtime - return (MonitorStateFileHashed mtime hash) - - (_, _, True, DirExists) -> - return MonitorStateDirExists - - (_, _, True, DirModTime) -> - handleIOException MonitorStateAlreadyChanged $ do - mtime <- getModTime abspath - if changedDuringUpdate mstartTime mtime - then return MonitorStateAlreadyChanged - else return (MonitorStateDirModTime mtime) - - (False, _, True, DirNotExists) -> return MonitorStateAlreadyChanged - (True, FileNotExists, False, _) -> return MonitorStateAlreadyChanged - --- | If we have a timestamp for the beginning of the update, then any file --- mtime later than this means that it changed during the update and we ought --- to consider the file as already changed. --- -changedDuringUpdate :: Maybe MonitorTimestamp -> ModTime -> Bool -changedDuringUpdate (Just (MonitorTimestamp startTime)) mtime - = mtime > startTime -changedDuringUpdate _ _ = False - --- | Much like 'buildMonitorStateFileSet' but for the somewhat complicated case --- of a file glob. --- --- This gets used both by 'buildMonitorStateFileSet' when we're taking the --- file system snapshot, but also by 'probeGlobStatus' as part of checking --- the monitored (globed) files for changes when we find a whole new subtree. --- -buildMonitorStateGlob :: Maybe MonitorTimestamp -- ^ start time of update - -> FileHashCache -- ^ existing file hashes - -> MonitorKindFile -> MonitorKindDir - -> FilePath -- ^ the root directory - -> FilePathGlob -- ^ the matching glob - -> IO MonitorStateGlob -buildMonitorStateGlob mstartTime hashcache kindfile kinddir relroot - (FilePathGlob globroot globPath) = do - root <- liftIO $ getFilePathRootDirectory globroot relroot - MonitorStateGlob kindfile kinddir globroot <$> - buildMonitorStateGlobRel - mstartTime hashcache kindfile kinddir root "." globPath - -buildMonitorStateGlobRel :: Maybe MonitorTimestamp -- ^ start time of update - -> FileHashCache -- ^ existing file hashes - -> MonitorKindFile -> MonitorKindDir - -> FilePath -- ^ the root directory - -> FilePath -- ^ directory we are examining - -- relative to the root - -> FilePathGlobRel -- ^ the matching glob - -> IO MonitorStateGlobRel -buildMonitorStateGlobRel mstartTime hashcache kindfile kinddir root - dir globPath = do - let absdir = root dir - dirEntries <- getDirectoryContents absdir - dirMTime <- getModTime absdir - case globPath of - GlobDir glob globPath' -> do - subdirs <- filterM (\subdir -> doesDirectoryExist (absdir subdir)) - $ filter (matchGlob glob) dirEntries - subdirStates <- - forM (sort subdirs) $ \subdir -> do - fstate <- buildMonitorStateGlobRel - mstartTime hashcache kindfile kinddir root - (dir subdir) globPath' - return (subdir, fstate) - return $! MonitorStateGlobDirs glob globPath' dirMTime subdirStates - - GlobFile glob -> do - let files = filter (matchGlob glob) dirEntries - filesStates <- - forM (sort files) $ \file -> do - fstate <- buildMonitorStateFile - mstartTime hashcache kindfile kinddir root - (dir file) - return (file, fstate) - return $! MonitorStateGlobFiles glob dirMTime filesStates - - GlobDirTrailing -> - return MonitorStateGlobDirTrailing - - --- | We really want to avoid re-hashing files all the time. We already make --- the assumption that if a file mtime has not changed then we don't need to --- bother checking if the content hash has changed. We can apply the same --- assumption when updating the file monitor state. In the typical case of --- updating a file monitor the set of files is the same or largely the same so --- we can grab the previously known content hashes with their corresponding --- mtimes. --- -type FileHashCache = Map FilePath (ModTime, Hash) - --- | We declare it a cache hit if the mtime of a file is the same as before. --- -lookupFileHashCache :: FileHashCache -> FilePath -> ModTime -> Maybe Hash -lookupFileHashCache hashcache file mtime = do - (mtime', hash) <- Map.lookup file hashcache - guard (mtime' == mtime) - return hash - --- | Either get it from the cache or go read the file -getFileHash :: FileHashCache -> FilePath -> FilePath -> ModTime -> IO Hash -getFileHash hashcache relfile absfile mtime = - case lookupFileHashCache hashcache relfile mtime of - Just hash -> return hash - Nothing -> readFileHash absfile - --- | Build a 'FileHashCache' from the previous 'MonitorStateFileSet'. While --- in principle we could preserve the structure of the previous state, given --- that the set of files to monitor can change then it's simpler just to throw --- away the structure and use a finite map. --- -readCacheFileHashes :: (Binary a, Binary b) - => FileMonitor a b -> IO FileHashCache -readCacheFileHashes monitor = - handleDoesNotExist Map.empty $ - handleErrorCall Map.empty $ do - res <- readCacheFile monitor - case res of - Left _ -> return Map.empty - Right (msfs, _, _) -> return (mkFileHashCache msfs) - where - mkFileHashCache :: MonitorStateFileSet -> FileHashCache - mkFileHashCache (MonitorStateFileSet singlePaths globPaths) = - collectAllFileHashes singlePaths - `Map.union` collectAllGlobHashes globPaths - - collectAllFileHashes = - Map.mapMaybe $ \(MonitorStateFile _ _ fstate) -> case fstate of - MonitorStateFileHashed mtime hash -> Just (mtime, hash) - _ -> Nothing - - collectAllGlobHashes globPaths = - Map.fromList [ (fpath, hash) - | MonitorStateGlob _ _ _ gstate <- globPaths - , (fpath, hash) <- collectGlobHashes "" gstate ] - - collectGlobHashes dir (MonitorStateGlobDirs _ _ _ entries) = - [ res - | (subdir, fstate) <- entries - , res <- collectGlobHashes (dir subdir) fstate ] - - collectGlobHashes dir (MonitorStateGlobFiles _ _ entries) = - [ (dir fname, (mtime, hash)) - | (fname, MonitorStateFileHashed mtime hash) <- entries ] - - collectGlobHashes _dir MonitorStateGlobDirTrailing = - [] - - ------------------------------------------------------------------------------- --- Utils --- - --- | Within the @root@ directory, check if @file@ has its 'ModTime' is --- the same as @mtime@, short-circuiting if it is different. -probeFileModificationTime :: FilePath -> FilePath -> ModTime -> ChangedM () -probeFileModificationTime root file mtime = do - unchanged <- liftIO $ checkModificationTimeUnchanged root file mtime - unless unchanged (somethingChanged file) - --- | Within the @root@ directory, check if @file@ has its 'ModTime' and --- 'Hash' is the same as @mtime@ and @hash@, short-circuiting if it is --- different. -probeFileModificationTimeAndHash :: FilePath -> FilePath -> ModTime -> Hash - -> ChangedM () -probeFileModificationTimeAndHash root file mtime hash = do - unchanged <- liftIO $ - checkFileModificationTimeAndHashUnchanged root file mtime hash - unless unchanged (somethingChanged file) - --- | Within the @root@ directory, check if @file@ still exists as a file. --- If it *does not* exist, short-circuit. -probeFileExistence :: FilePath -> FilePath -> ChangedM () -probeFileExistence root file = do - existsFile <- liftIO $ doesFileExist (root file) - unless existsFile (somethingChanged file) - --- | Within the @root@ directory, check if @dir@ still exists. --- If it *does not* exist, short-circuit. -probeDirExistence :: FilePath -> FilePath -> ChangedM () -probeDirExistence root dir = do - existsDir <- liftIO $ doesDirectoryExist (root dir) - unless existsDir (somethingChanged dir) - --- | Within the @root@ directory, check if @file@ still does not exist. --- If it *does* exist, short-circuit. -probeFileNonExistence :: FilePath -> FilePath -> ChangedM () -probeFileNonExistence root file = do - existsFile <- liftIO $ doesFileExist (root file) - existsDir <- liftIO $ doesDirectoryExist (root file) - when (existsFile || existsDir) (somethingChanged file) - --- | Returns @True@ if, inside the @root@ directory, @file@ has the same --- 'ModTime' as @mtime@. -checkModificationTimeUnchanged :: FilePath -> FilePath - -> ModTime -> IO Bool -checkModificationTimeUnchanged root file mtime = - handleIOException False $ do - mtime' <- getModTime (root file) - return (mtime == mtime') - --- | Returns @True@ if, inside the @root@ directory, @file@ has the --- same 'ModTime' and 'Hash' as @mtime and @chash@. -checkFileModificationTimeAndHashUnchanged :: FilePath -> FilePath - -> ModTime -> Hash -> IO Bool -checkFileModificationTimeAndHashUnchanged root file mtime chash = - handleIOException False $ do - mtime' <- getModTime (root file) - if mtime == mtime' - then return True - else do - chash' <- readFileHash (root file) - return (chash == chash') - --- | Read a non-cryptographic hash of a @file@. -readFileHash :: FilePath -> IO Hash -readFileHash file = - withBinaryFile file ReadMode $ \hnd -> - evaluate . Hashable.hash =<< BS.hGetContents hnd - --- | Given a directory @dir@, return @Nothing@ if its 'ModTime' --- is the same as @mtime@, and the new 'ModTime' if it is not. -checkDirectoryModificationTime :: FilePath -> ModTime -> IO (Maybe ModTime) -checkDirectoryModificationTime dir mtime = - handleIOException Nothing $ do - mtime' <- getModTime dir - if mtime == mtime' - then return Nothing - else return (Just mtime') - --- | Run an IO computation, returning @e@ if there is an 'error' --- call. ('ErrorCall') -handleErrorCall :: a -> IO a -> IO a -handleErrorCall e = - handle (\(ErrorCall _) -> return e) - --- | Run an IO computation, returning @e@ if there is any 'IOException'. --- --- This policy is OK in the file monitor code because it just causes the --- monitor to report that something changed, and then code reacting to that --- will normally encounter the same IO exception when it re-runs the action --- that uses the file. --- -handleIOException :: a -> IO a -> IO a -handleIOException e = - handle (anyIOException e) - where - anyIOException :: a -> IOException -> IO a - anyIOException x _ = return x - - ------------------------------------------------------------------------------- --- Instances --- - -instance Binary MonitorStateFileSet where - put (MonitorStateFileSet singlePaths globPaths) = do - put (1 :: Int) -- version - put singlePaths - put globPaths - get = do - ver <- get - if ver == (1 :: Int) - then do singlePaths <- get - globPaths <- get - return $! MonitorStateFileSet singlePaths globPaths - else fail "MonitorStateFileSet: wrong version" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Freeze.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Freeze.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Freeze.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Freeze.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,259 +0,0 @@ -{-# LANGUAGE CPP #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Freeze --- Copyright : (c) David Himmelstrup 2005 --- Duncan Coutts 2011 --- License : BSD-like --- --- Maintainer : cabal-devel@gmail.com --- Stability : provisional --- Portability : portable --- --- The cabal freeze command ------------------------------------------------------------------------------ -module Distribution.Client.Freeze ( - freeze, getFreezePkgs - ) where - -import Distribution.Client.Config ( SavedConfig(..) ) -import Distribution.Client.Types -import Distribution.Client.Targets -import Distribution.Client.Dependency -import Distribution.Client.Dependency.Types - ( ConstraintSource(..), LabeledPackageConstraint(..) ) -import Distribution.Client.IndexUtils as IndexUtils - ( getSourcePackages, getInstalledPackages ) -import Distribution.Client.InstallPlan - ( InstallPlan, PlanPackage ) -import qualified Distribution.Client.InstallPlan as InstallPlan -import Distribution.Client.PkgConfigDb - ( PkgConfigDb, readPkgConfigDb ) -import Distribution.Client.Setup - ( GlobalFlags(..), FreezeFlags(..), ConfigExFlags(..) - , RepoContext(..) ) -import Distribution.Client.Sandbox.PackageEnvironment - ( loadUserConfig, pkgEnvSavedConfig, showPackageEnvironment, - userPackageEnvironmentFile ) -import Distribution.Client.Sandbox.Types - ( SandboxPackageInfo(..) ) - -import Distribution.Package - ( Package, packageId, packageName, packageVersion ) -import Distribution.Simple.Compiler - ( Compiler, compilerInfo, PackageDBStack ) -import Distribution.Simple.PackageIndex (InstalledPackageIndex) -import Distribution.Simple.Program - ( ProgramConfiguration ) -import Distribution.Simple.Setup - ( fromFlag, fromFlagOrDefault, flagToMaybe ) -import Distribution.Simple.Utils - ( die, notice, debug, writeFileAtomic ) -import Distribution.System - ( Platform ) -import Distribution.Text - ( display ) -import Distribution.Verbosity - ( Verbosity ) - -import Control.Monad - ( when ) -import qualified Data.ByteString.Lazy.Char8 as BS.Char8 -#if !MIN_VERSION_base(4,8,0) -import Data.Monoid - ( mempty ) -#endif -import Data.Version - ( showVersion ) -import Distribution.Version - ( thisVersion ) - --- ------------------------------------------------------------ --- * The freeze command --- ------------------------------------------------------------ - --- | Freeze all of the dependencies by writing a constraints section --- constraining each dependency to an exact version. --- -freeze :: Verbosity - -> PackageDBStack - -> RepoContext - -> Compiler - -> Platform - -> ProgramConfiguration - -> Maybe SandboxPackageInfo - -> GlobalFlags - -> FreezeFlags - -> IO () -freeze verbosity packageDBs repoCtxt comp platform conf mSandboxPkgInfo - globalFlags freezeFlags = do - - pkgs <- getFreezePkgs - verbosity packageDBs repoCtxt comp platform conf mSandboxPkgInfo - globalFlags freezeFlags - - if null pkgs - then notice verbosity $ "No packages to be frozen. " - ++ "As this package has no dependencies." - else if dryRun - then notice verbosity $ unlines $ - "The following packages would be frozen:" - : formatPkgs pkgs - - else freezePackages verbosity globalFlags pkgs - - where - dryRun = fromFlag (freezeDryRun freezeFlags) - --- | Get the list of packages whose versions would be frozen by the @freeze@ --- command. -getFreezePkgs :: Verbosity - -> PackageDBStack - -> RepoContext - -> Compiler - -> Platform - -> ProgramConfiguration - -> Maybe SandboxPackageInfo - -> GlobalFlags - -> FreezeFlags - -> IO [PlanPackage] -getFreezePkgs verbosity packageDBs repoCtxt comp platform conf mSandboxPkgInfo - globalFlags freezeFlags = do - - installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf - sourcePkgDb <- getSourcePackages verbosity repoCtxt - pkgConfigDb <- readPkgConfigDb verbosity conf - - pkgSpecifiers <- resolveUserTargets verbosity repoCtxt - (fromFlag $ globalWorldFile globalFlags) - (packageIndex sourcePkgDb) - [UserTargetLocalDir "."] - - sanityCheck pkgSpecifiers - planPackages - verbosity comp platform mSandboxPkgInfo freezeFlags - installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers - where - sanityCheck pkgSpecifiers = do - when (not . null $ [n | n@(NamedPackage _ _) <- pkgSpecifiers]) $ - die $ "internal error: 'resolveUserTargets' returned " - ++ "unexpected named package specifiers!" - when (length pkgSpecifiers /= 1) $ - die $ "internal error: 'resolveUserTargets' returned " - ++ "unexpected source package specifiers!" - -planPackages :: Verbosity - -> Compiler - -> Platform - -> Maybe SandboxPackageInfo - -> FreezeFlags - -> InstalledPackageIndex - -> SourcePackageDb - -> PkgConfigDb - -> [PackageSpecifier SourcePackage] - -> IO [PlanPackage] -planPackages verbosity comp platform mSandboxPkgInfo freezeFlags - installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers = do - - solver <- chooseSolver verbosity - (fromFlag (freezeSolver freezeFlags)) (compilerInfo comp) - notice verbosity "Resolving dependencies..." - - installPlan <- foldProgress logMsg die return $ - resolveDependencies - platform (compilerInfo comp) pkgConfigDb - solver - resolverParams - - return $ pruneInstallPlan installPlan pkgSpecifiers - - where - resolverParams = - - setMaxBackjumps (if maxBackjumps < 0 then Nothing - else Just maxBackjumps) - - . setIndependentGoals independentGoals - - . setReorderGoals reorderGoals - - . setShadowPkgs shadowPkgs - - . setStrongFlags strongFlags - - . addConstraints - [ let pkg = pkgSpecifierTarget pkgSpecifier - pc = PackageConstraintStanzas pkg stanzas - in LabeledPackageConstraint pc ConstraintSourceFreeze - | pkgSpecifier <- pkgSpecifiers ] - - . maybe id applySandboxInstallPolicy mSandboxPkgInfo - - $ standardInstallPolicy installedPkgIndex sourcePkgDb pkgSpecifiers - - logMsg message rest = debug verbosity message >> rest - - stanzas = [ TestStanzas | testsEnabled ] - ++ [ BenchStanzas | benchmarksEnabled ] - testsEnabled = fromFlagOrDefault False $ freezeTests freezeFlags - benchmarksEnabled = fromFlagOrDefault False $ freezeBenchmarks freezeFlags - - reorderGoals = fromFlag (freezeReorderGoals freezeFlags) - independentGoals = fromFlag (freezeIndependentGoals freezeFlags) - shadowPkgs = fromFlag (freezeShadowPkgs freezeFlags) - strongFlags = fromFlag (freezeStrongFlags freezeFlags) - maxBackjumps = fromFlag (freezeMaxBackjumps freezeFlags) - - --- | Remove all unneeded packages from an install plan. --- --- A package is unneeded if it is either --- --- 1) the package that we are freezing, or --- --- 2) not a dependency (directly or transitively) of the package we are --- freezing. This is useful for removing previously installed packages --- which are no longer required from the install plan. -pruneInstallPlan :: InstallPlan - -> [PackageSpecifier SourcePackage] - -> [PlanPackage] -pruneInstallPlan installPlan pkgSpecifiers = - removeSelf pkgIds $ - InstallPlan.dependencyClosure installPlan (map fakeUnitId pkgIds) - where - pkgIds = [ packageId pkg - | SpecificSourcePackage pkg <- pkgSpecifiers ] - removeSelf [thisPkg] = filter (\pp -> packageId pp /= packageId thisPkg) - removeSelf _ = error $ "internal error: 'pruneInstallPlan' given " - ++ "unexpected package specifiers!" - - -freezePackages :: Package pkg => Verbosity -> GlobalFlags -> [pkg] -> IO () -freezePackages verbosity globalFlags pkgs = do - - pkgEnv <- fmap (createPkgEnv . addFrozenConstraints) $ - loadUserConfig verbosity "" (flagToMaybe . globalConstraintsFile $ globalFlags) - writeFileAtomic userPackageEnvironmentFile $ showPkgEnv pkgEnv - where - addFrozenConstraints config = - config { - savedConfigureExFlags = (savedConfigureExFlags config) { - configExConstraints = map constraint pkgs - } - } - constraint pkg = - (pkgIdToConstraint $ packageId pkg, ConstraintSourceUserConfig userPackageEnvironmentFile) - where - pkgIdToConstraint pkgId = - UserConstraintVersion (packageName pkgId) - (thisVersion $ packageVersion pkgId) - createPkgEnv config = mempty { pkgEnvSavedConfig = config } - showPkgEnv = BS.Char8.pack . showPackageEnvironment - - -formatPkgs :: Package pkg => [pkg] -> [String] -formatPkgs = map $ showPkg . packageId - where - showPkg pid = name pid ++ " == " ++ version pid - name = display . packageName - version = showVersion . packageVersion diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/GenBounds.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/GenBounds.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/GenBounds.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/GenBounds.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,159 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.GenBounds --- Copyright : (c) Doug Beardsley 2015 --- License : BSD-like --- --- Maintainer : cabal-devel@gmail.com --- Stability : provisional --- Portability : portable --- --- The cabal gen-bounds command for generating PVP-compliant version bounds. ------------------------------------------------------------------------------ -module Distribution.Client.GenBounds ( - genBounds - ) where - -import Data.Version - ( Version(..), showVersion ) -import Distribution.Client.Init - ( incVersion ) -import Distribution.Client.Freeze - ( getFreezePkgs ) -import Distribution.Client.Sandbox.Types - ( SandboxPackageInfo(..) ) -import Distribution.Client.Setup - ( GlobalFlags(..), FreezeFlags(..), RepoContext ) -import Distribution.Package - ( Package(..), Dependency(..), PackageName(..) - , packageName, packageVersion ) -import Distribution.PackageDescription - ( buildDepends ) -import Distribution.PackageDescription.Configuration - ( finalizePackageDescription ) -import Distribution.PackageDescription.Parse - ( readPackageDescription ) -import Distribution.Simple.Compiler - ( Compiler, PackageDBStack, compilerInfo ) -import Distribution.Simple.Program - ( ProgramConfiguration ) -import Distribution.Simple.Utils - ( tryFindPackageDesc ) -import Distribution.System - ( Platform ) -import Distribution.Verbosity - ( Verbosity ) -import Distribution.Version - ( LowerBound(..), UpperBound(..), VersionRange(..), asVersionIntervals - , orLaterVersion, earlierVersion, intersectVersionRanges ) -import System.Directory - ( getCurrentDirectory ) - --- | Does this version range have an upper bound? -hasUpperBound :: VersionRange -> Bool -hasUpperBound vr = - case asVersionIntervals vr of - [] -> False - is -> if snd (last is) == NoUpperBound then False else True - --- | Given a version, return an API-compatible (according to PVP) version range. --- --- Example: @0.4.1.2@ produces the version range @>= 0.4.1 && < 0.5@. --- --- This version is slightly different than the one in --- 'Distribution.Client.Init'. This one uses a.b.c as the lower bound because --- the user could be using a new function introduced in a.b.c which would make --- ">= a.b" incorrect. -pvpize :: Version -> VersionRange -pvpize v = orLaterVersion (vn 3) - `intersectVersionRanges` - earlierVersion (incVersion 1 (vn 2)) - where - vn n = (v { versionBranch = take n (versionBranch v) }) - --- | Show the PVP-mandated version range for this package. The @padTo@ parameter --- specifies the width of the package name column. -showBounds :: Package pkg => Int -> pkg -> String -showBounds padTo p = unwords $ - (padAfter padTo $ unPackageName $ packageName p) : - map showInterval (asVersionIntervals $ pvpize $ packageVersion p) - where - padAfter :: Int -> String -> String - padAfter n str = str ++ replicate (n - length str) ' ' - - showInterval :: (LowerBound, UpperBound) -> String - showInterval (LowerBound _ _, NoUpperBound) = - error "Error: expected upper bound...this should never happen!" - showInterval (LowerBound l _, UpperBound u _) = - unwords [">=", showVersion l, "&& <", showVersion u] - --- | Entry point for the @gen-bounds@ command. -genBounds - :: Verbosity - -> PackageDBStack - -> RepoContext - -> Compiler - -> Platform - -> ProgramConfiguration - -> Maybe SandboxPackageInfo - -> GlobalFlags - -> FreezeFlags - -> IO () -genBounds verbosity packageDBs repoCtxt comp platform conf mSandboxPkgInfo - globalFlags freezeFlags = do - - let cinfo = compilerInfo comp - - cwd <- getCurrentDirectory - path <- tryFindPackageDesc cwd - gpd <- readPackageDescription verbosity path - let epd = finalizePackageDescription [] (const True) platform cinfo [] gpd - case epd of - Left _ -> putStrLn "finalizePackageDescription failed" - Right (pd,_) -> do - let needBounds = filter (not . hasUpperBound . depVersion) $ - buildDepends pd - - if (null needBounds) - then putStrLn - "Congratulations, all your dependencies have upper bounds!" - else go needBounds - where - go needBounds = do - pkgs <- getFreezePkgs - verbosity packageDBs repoCtxt comp platform conf - mSandboxPkgInfo globalFlags freezeFlags - - putStrLn boundsNeededMsg - - let isNeeded pkg = unPackageName (packageName pkg) - `elem` map depName needBounds - let thePkgs = filter isNeeded pkgs - - let padTo = maximum $ map (length . unPackageName . packageName) pkgs - mapM_ (putStrLn . (++",") . showBounds padTo) thePkgs - - depName :: Dependency -> String - depName (Dependency (PackageName nm) _) = nm - - depVersion :: Dependency -> VersionRange - depVersion (Dependency _ vr) = vr - --- | The message printed when some dependencies are found to be lacking proper --- PVP-mandated bounds. -boundsNeededMsg :: String -boundsNeededMsg = unlines - [ "" - , "The following packages need bounds and here is a suggested starting point." - , "You can copy and paste this into the build-depends section in your .cabal" - , "file and it should work (with the appropriate removal of commas)." - , "" - , "Note that version bounds are a statement that you've successfully built and" - , "tested your package and expect it to work with any of the specified package" - , "versions (PROVIDED that those packages continue to conform with the PVP)." - , "Therefore, the version bounds generated here are the most conservative" - , "based on the versions that you are currently building with. If you know" - , "your package will work with versions outside the ranges generated here," - , "feel free to widen them." - , "" - ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Get.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Get.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Get.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Get.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,355 +0,0 @@ -{-# LANGUAGE CPP #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Get --- Copyright : (c) Andrea Vezzosi 2008 --- Duncan Coutts 2011 --- John Millikin 2012 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- The 'cabal get' command. ------------------------------------------------------------------------------ - -module Distribution.Client.Get ( - get - ) where - -import Distribution.Package - ( PackageId, packageId, packageName ) -import Distribution.Simple.Setup - ( Flag(..), fromFlag, fromFlagOrDefault ) -import Distribution.Simple.Utils - ( notice, die, info, writeFileAtomic ) -import Distribution.Verbosity - ( Verbosity ) -import Distribution.Text(display) -import qualified Distribution.PackageDescription as PD - -import Distribution.Client.Setup - ( GlobalFlags(..), GetFlags(..), RepoContext(..) ) -import Distribution.Client.Types -import Distribution.Client.Targets -import Distribution.Client.Dependency -import Distribution.Client.FetchUtils -import qualified Distribution.Client.Tar as Tar (extractTarGzFile) -import Distribution.Client.IndexUtils as IndexUtils - ( getSourcePackages ) -import Distribution.Client.Compat.Process - ( readProcessWithExitCode ) -import Distribution.Compat.Exception - ( catchIO ) - -import Control.Exception - ( finally ) -import Control.Monad - ( filterM, forM_, unless, when ) -import Data.List - ( sortBy ) -import qualified Data.Map -import Data.Maybe - ( listToMaybe, mapMaybe ) -#if !MIN_VERSION_base(4,8,0) -import Data.Monoid - ( mempty ) -#endif -import Data.Ord - ( comparing ) -import System.Directory - ( createDirectoryIfMissing, doesDirectoryExist, doesFileExist - , getCurrentDirectory, setCurrentDirectory - ) -import System.Exit - ( ExitCode(..) ) -import System.FilePath - ( (), (<.>), addTrailingPathSeparator ) -import System.Process - ( rawSystem ) - - --- | Entry point for the 'cabal get' command. -get :: Verbosity - -> RepoContext - -> GlobalFlags - -> GetFlags - -> [UserTarget] - -> IO () -get verbosity _ _ _ [] = - notice verbosity "No packages requested. Nothing to do." - -get verbosity repoCtxt globalFlags getFlags userTargets = do - let useFork = case (getSourceRepository getFlags) of - NoFlag -> False - _ -> True - - unless useFork $ - mapM_ checkTarget userTargets - - sourcePkgDb <- getSourcePackages verbosity repoCtxt - - pkgSpecifiers <- resolveUserTargets verbosity repoCtxt - (fromFlag $ globalWorldFile globalFlags) - (packageIndex sourcePkgDb) - userTargets - - pkgs <- either (die . unlines . map show) return $ - resolveWithoutDependencies - (resolverParams sourcePkgDb pkgSpecifiers) - - unless (null prefix) $ - createDirectoryIfMissing True prefix - - if useFork - then fork pkgs - else unpack pkgs - - where - resolverParams sourcePkgDb pkgSpecifiers = - --TODO: add command-line constraint and preference args for unpack - standardInstallPolicy mempty sourcePkgDb pkgSpecifiers - - prefix = fromFlagOrDefault "" (getDestDir getFlags) - - fork :: [SourcePackage] -> IO () - fork pkgs = do - let kind = fromFlag . getSourceRepository $ getFlags - branchers <- findUsableBranchers - mapM_ (forkPackage verbosity branchers prefix kind) pkgs - - unpack :: [SourcePackage] -> IO () - unpack pkgs = do - forM_ pkgs $ \pkg -> do - location <- fetchPackage verbosity repoCtxt (packageSource pkg) - let pkgid = packageId pkg - descOverride | usePristine = Nothing - | otherwise = packageDescrOverride pkg - case location of - LocalTarballPackage tarballPath -> - unpackPackage verbosity prefix pkgid descOverride tarballPath - - RemoteTarballPackage _tarballURL tarballPath -> - unpackPackage verbosity prefix pkgid descOverride tarballPath - - RepoTarballPackage _repo _pkgid tarballPath -> - unpackPackage verbosity prefix pkgid descOverride tarballPath - - LocalUnpackedPackage _ -> - error "Distribution.Client.Get.unpack: the impossible happened." - where - usePristine = fromFlagOrDefault False (getPristine getFlags) - -checkTarget :: UserTarget -> IO () -checkTarget target = case target of - UserTargetLocalDir dir -> die (notTarball dir) - UserTargetLocalCabalFile file -> die (notTarball file) - _ -> return () - where - notTarball t = - "The 'get' command is for tarball packages. " - ++ "The target '" ++ t ++ "' is not a tarball." - --- ------------------------------------------------------------ --- * Unpacking the source tarball --- ------------------------------------------------------------ - -unpackPackage :: Verbosity -> FilePath -> PackageId - -> PackageDescriptionOverride - -> FilePath -> IO () -unpackPackage verbosity prefix pkgid descOverride pkgPath = do - let pkgdirname = display pkgid - pkgdir = prefix pkgdirname - pkgdir' = addTrailingPathSeparator pkgdir - existsDir <- doesDirectoryExist pkgdir - when existsDir $ die $ - "The directory \"" ++ pkgdir' ++ "\" already exists, not unpacking." - existsFile <- doesFileExist pkgdir - when existsFile $ die $ - "A file \"" ++ pkgdir ++ "\" is in the way, not unpacking." - notice verbosity $ "Unpacking to " ++ pkgdir' - Tar.extractTarGzFile prefix pkgdirname pkgPath - - case descOverride of - Nothing -> return () - Just pkgtxt -> do - let descFilePath = pkgdir display (packageName pkgid) <.> "cabal" - info verbosity $ - "Updating " ++ descFilePath - ++ " with the latest revision from the index." - writeFileAtomic descFilePath pkgtxt - - --- ------------------------------------------------------------ --- * Forking the source repository --- ------------------------------------------------------------ - -data BranchCmd = BranchCmd (Verbosity -> FilePath -> IO ExitCode) - -data Brancher = Brancher - { brancherBinary :: String - , brancherBuildCmd :: PD.SourceRepo -> Maybe BranchCmd - } - --- | The set of all supported branch drivers. -allBranchers :: [(PD.RepoType, Brancher)] -allBranchers = - [ (PD.Bazaar, branchBzr) - , (PD.Darcs, branchDarcs) - , (PD.Git, branchGit) - , (PD.Mercurial, branchHg) - , (PD.SVN, branchSvn) - ] - --- | Find which usable branch drivers (selected from 'allBranchers') are --- available and usable on the local machine. --- --- Each driver's main command is run with @--help@, and if the child process --- exits successfully, that brancher is considered usable. -findUsableBranchers :: IO (Data.Map.Map PD.RepoType Brancher) -findUsableBranchers = do - let usable (_, brancher) = flip catchIO (const (return False)) $ do - let cmd = brancherBinary brancher - (exitCode, _, _) <- readProcessWithExitCode cmd ["--help"] "" - return (exitCode == ExitSuccess) - pairs <- filterM usable allBranchers - return (Data.Map.fromList pairs) - --- | Fork a single package from a remote source repository to the local --- file system. -forkPackage :: Verbosity - -> Data.Map.Map PD.RepoType Brancher - -- ^ Branchers supported by the local machine. - -> FilePath - -- ^ The directory in which new branches or repositories will - -- be created. - -> (Maybe PD.RepoKind) - -- ^ Which repo to choose. - -> SourcePackage - -- ^ The package to fork. - -> IO () -forkPackage verbosity branchers prefix kind src = do - let desc = PD.packageDescription (packageDescription src) - pkgid = display (packageId src) - pkgname = display (packageName src) - destdir = prefix pkgname - - destDirExists <- doesDirectoryExist destdir - when destDirExists $ do - die ("The directory " ++ show destdir ++ " already exists, not forking.") - - destFileExists <- doesFileExist destdir - when destFileExists $ do - die ("A file " ++ show destdir ++ " is in the way, not forking.") - - let repos = PD.sourceRepos desc - case findBranchCmd branchers repos kind of - Just (BranchCmd io) -> do - exitCode <- io verbosity destdir - case exitCode of - ExitSuccess -> return () - ExitFailure _ -> die ("Couldn't fork package " ++ pkgid) - Nothing -> case repos of - [] -> die ("Package " ++ pkgid - ++ " does not have any source repositories.") - _ -> die ("Package " ++ pkgid - ++ " does not have any usable source repositories.") - --- | Given a set of possible branchers, and a set of possible source --- repositories, find a repository that is both 1) likely to be specific to --- this source version and 2) is supported by the local machine. -findBranchCmd :: Data.Map.Map PD.RepoType Brancher -> [PD.SourceRepo] - -> (Maybe PD.RepoKind) -> Maybe BranchCmd -findBranchCmd branchers allRepos maybeKind = cmd where - -- Sort repositories by kind, from This to Head to Unknown. Repositories - -- with equivalent kinds are selected based on the order they appear in - -- the Cabal description file. - repos' = sortBy (comparing thisFirst) allRepos - thisFirst r = case PD.repoKind r of - PD.RepoThis -> 0 :: Int - PD.RepoHead -> case PD.repoTag r of - -- If the type is 'head' but the author specified a tag, they - -- probably meant to create a 'this' repository but screwed up. - Just _ -> 0 - Nothing -> 1 - PD.RepoKindUnknown _ -> 2 - - -- If the user has specified the repo kind, filter out the repositories - -- she's not interested in. - repos = maybe repos' (\k -> filter ((==) k . PD.repoKind) repos') maybeKind - - repoBranchCmd repo = do - t <- PD.repoType repo - brancher <- Data.Map.lookup t branchers - brancherBuildCmd brancher repo - - cmd = listToMaybe (mapMaybe repoBranchCmd repos) - --- | Branch driver for Bazaar. -branchBzr :: Brancher -branchBzr = Brancher "bzr" $ \repo -> do - src <- PD.repoLocation repo - let args dst = case PD.repoTag repo of - Just tag -> ["branch", src, dst, "-r", "tag:" ++ tag] - Nothing -> ["branch", src, dst] - return $ BranchCmd $ \verbosity dst -> do - notice verbosity ("bzr: branch " ++ show src) - rawSystem "bzr" (args dst) - --- | Branch driver for Darcs. -branchDarcs :: Brancher -branchDarcs = Brancher "darcs" $ \repo -> do - src <- PD.repoLocation repo - let args dst = case PD.repoTag repo of - Just tag -> ["get", src, dst, "-t", tag] - Nothing -> ["get", src, dst] - return $ BranchCmd $ \verbosity dst -> do - notice verbosity ("darcs: get " ++ show src) - rawSystem "darcs" (args dst) - --- | Branch driver for Git. -branchGit :: Brancher -branchGit = Brancher "git" $ \repo -> do - src <- PD.repoLocation repo - let branchArgs = case PD.repoBranch repo of - Just b -> ["--branch", b] - Nothing -> [] - let postClone dst = case PD.repoTag repo of - Just t -> do - cwd <- getCurrentDirectory - setCurrentDirectory dst - finally - (rawSystem "git" (["checkout", t] ++ branchArgs)) - (setCurrentDirectory cwd) - Nothing -> return ExitSuccess - return $ BranchCmd $ \verbosity dst -> do - notice verbosity ("git: clone " ++ show src) - code <- rawSystem "git" (["clone", src, dst] ++ branchArgs) - case code of - ExitFailure _ -> return code - ExitSuccess -> postClone dst - --- | Branch driver for Mercurial. -branchHg :: Brancher -branchHg = Brancher "hg" $ \repo -> do - src <- PD.repoLocation repo - let branchArgs = case PD.repoBranch repo of - Just b -> ["--branch", b] - Nothing -> [] - let tagArgs = case PD.repoTag repo of - Just t -> ["--rev", t] - Nothing -> [] - let args dst = ["clone", src, dst] ++ branchArgs ++ tagArgs - return $ BranchCmd $ \verbosity dst -> do - notice verbosity ("hg: clone " ++ show src) - rawSystem "hg" (args dst) - --- | Branch driver for Subversion. -branchSvn :: Brancher -branchSvn = Brancher "svn" $ \repo -> do - src <- PD.repoLocation repo - let args dst = ["checkout", src, dst] - return $ BranchCmd $ \verbosity dst -> do - notice verbosity ("svn: checkout " ++ show src) - rawSystem "svn" (args dst) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/GlobalFlags.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/GlobalFlags.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/GlobalFlags.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/GlobalFlags.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,267 +0,0 @@ -{-# LANGUAGE BangPatterns #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE ExistentialQuantification #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE RecordWildCards #-} -module Distribution.Client.GlobalFlags ( - GlobalFlags(..) - , defaultGlobalFlags - , RepoContext(..) - , withRepoContext - , withRepoContext' - ) where - -import Distribution.Client.Types - ( Repo(..), RemoteRepo(..) ) -import Distribution.Compat.Semigroup -import Distribution.Simple.Setup - ( Flag(..), fromFlag, flagToMaybe ) -import Distribution.Utils.NubList - ( NubList, fromNubList ) -import Distribution.Client.HttpUtils - ( HttpTransport, configureTransport ) -import Distribution.Verbosity - ( Verbosity ) -import Distribution.Simple.Utils - ( info ) - -import Data.Maybe - ( fromMaybe ) -import Control.Concurrent - ( MVar, newMVar, modifyMVar ) -import Control.Exception - ( throwIO ) -import Control.Monad - ( when ) -import System.FilePath - ( () ) -import Network.URI - ( uriScheme, uriPath ) -import Data.Map - ( Map ) -import qualified Data.Map as Map -import GHC.Generics ( Generic ) - -import qualified Hackage.Security.Client as Sec -import qualified Hackage.Security.Util.Path as Sec -import qualified Hackage.Security.Util.Pretty as Sec -import qualified Hackage.Security.Client.Repository.Cache as Sec -import qualified Hackage.Security.Client.Repository.Local as Sec.Local -import qualified Hackage.Security.Client.Repository.Remote as Sec.Remote -import qualified Distribution.Client.Security.HTTP as Sec.HTTP - --- ------------------------------------------------------------ --- * Global flags --- ------------------------------------------------------------ - --- | Flags that apply at the top level, not to any sub-command. -data GlobalFlags = GlobalFlags { - globalVersion :: Flag Bool, - globalNumericVersion :: Flag Bool, - globalConfigFile :: Flag FilePath, - globalSandboxConfigFile :: Flag FilePath, - globalConstraintsFile :: Flag FilePath, - globalRemoteRepos :: NubList RemoteRepo, -- ^ Available Hackage servers. - globalCacheDir :: Flag FilePath, - globalLocalRepos :: NubList FilePath, - globalLogsDir :: Flag FilePath, - globalWorldFile :: Flag FilePath, - globalRequireSandbox :: Flag Bool, - globalIgnoreSandbox :: Flag Bool, - globalIgnoreExpiry :: Flag Bool, -- ^ Ignore security expiry dates - globalHttpTransport :: Flag String - } deriving Generic - -defaultGlobalFlags :: GlobalFlags -defaultGlobalFlags = GlobalFlags { - globalVersion = Flag False, - globalNumericVersion = Flag False, - globalConfigFile = mempty, - globalSandboxConfigFile = mempty, - globalConstraintsFile = mempty, - globalRemoteRepos = mempty, - globalCacheDir = mempty, - globalLocalRepos = mempty, - globalLogsDir = mempty, - globalWorldFile = mempty, - globalRequireSandbox = Flag False, - globalIgnoreSandbox = Flag False, - globalIgnoreExpiry = Flag False, - globalHttpTransport = mempty - } - -instance Monoid GlobalFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup GlobalFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Repo context --- ------------------------------------------------------------ - --- | Access to repositories -data RepoContext = RepoContext { - -- | All user-specified repositories - repoContextRepos :: [Repo] - - -- | Get the HTTP transport - -- - -- The transport will be initialized on the first call to this function. - -- - -- NOTE: It is important that we don't eagerly initialize the transport. - -- Initializing the transport is not free, and especially in contexts where - -- we don't know a-priori whether or not we need the transport (for instance - -- when using cabal in "nix mode") incurring the overhead of transport - -- initialization on _every_ invocation (eg @cabal build@) is undesirable. - , repoContextGetTransport :: IO HttpTransport - - -- | Get the (initialized) secure repo - -- - -- (the 'Repo' type itself is stateless and must remain so, because it - -- must be serializable) - , repoContextWithSecureRepo :: forall a. - Repo - -> (forall down. Sec.Repository down -> IO a) - -> IO a - - -- | Should we ignore expiry times (when checking security)? - , repoContextIgnoreExpiry :: Bool - } - --- | Wrapper around 'Repository', hiding the type argument -data SecureRepo = forall down. SecureRepo (Sec.Repository down) - -withRepoContext :: Verbosity -> GlobalFlags -> (RepoContext -> IO a) -> IO a -withRepoContext verbosity globalFlags = - withRepoContext' - verbosity - (fromNubList (globalRemoteRepos globalFlags)) - (fromNubList (globalLocalRepos globalFlags)) - (fromFlag (globalCacheDir globalFlags)) - (flagToMaybe (globalHttpTransport globalFlags)) - (flagToMaybe (globalIgnoreExpiry globalFlags)) - -withRepoContext' :: Verbosity -> [RemoteRepo] -> [FilePath] - -> FilePath -> Maybe String -> Maybe Bool - -> (RepoContext -> IO a) - -> IO a -withRepoContext' verbosity remoteRepos localRepos - sharedCacheDir httpTransport ignoreExpiry = \callback -> do - transportRef <- newMVar Nothing - let httpLib = Sec.HTTP.transportAdapter - verbosity - (getTransport transportRef) - initSecureRepos verbosity httpLib secureRemoteRepos $ \secureRepos' -> - callback RepoContext { - repoContextRepos = allRemoteRepos - ++ map RepoLocal localRepos - , repoContextGetTransport = getTransport transportRef - , repoContextWithSecureRepo = withSecureRepo secureRepos' - , repoContextIgnoreExpiry = fromMaybe False ignoreExpiry - } - where - secureRemoteRepos = - [ (remote, cacheDir) | RepoSecure remote cacheDir <- allRemoteRepos ] - allRemoteRepos = - [ (if isSecure then RepoSecure else RepoRemote) remote cacheDir - | remote <- remoteRepos - , let cacheDir = sharedCacheDir remoteRepoName remote - isSecure = remoteRepoSecure remote == Just True - ] - - getTransport :: MVar (Maybe HttpTransport) -> IO HttpTransport - getTransport transportRef = - modifyMVar transportRef $ \mTransport -> do - transport <- case mTransport of - Just tr -> return tr - Nothing -> configureTransport verbosity httpTransport - return (Just transport, transport) - - withSecureRepo :: Map Repo SecureRepo - -> Repo - -> (forall down. Sec.Repository down -> IO a) - -> IO a - withSecureRepo secureRepos repo callback = - case Map.lookup repo secureRepos of - Just (SecureRepo secureRepo) -> callback secureRepo - Nothing -> throwIO $ userError "repoContextWithSecureRepo: unknown repo" - --- | Initialize the provided secure repositories --- --- Assumed invariant: `remoteRepoSecure` should be set for all these repos. -initSecureRepos :: forall a. Verbosity - -> Sec.HTTP.HttpLib - -> [(RemoteRepo, FilePath)] - -> (Map Repo SecureRepo -> IO a) - -> IO a -initSecureRepos verbosity httpLib repos callback = go Map.empty repos - where - go :: Map Repo SecureRepo -> [(RemoteRepo, FilePath)] -> IO a - go !acc [] = callback acc - go !acc ((r,cacheDir):rs) = do - cachePath <- Sec.makeAbsolute $ Sec.fromFilePath cacheDir - initSecureRepo verbosity httpLib r cachePath $ \r' -> - go (Map.insert (RepoSecure r cacheDir) r' acc) rs - --- | Initialize the given secure repo --- --- The security library has its own concept of a "local" repository, distinct --- from @cabal-install@'s; these are secure repositories, but live in the local --- file system. We use the convention that these repositories are identified by --- URLs of the form @file:/path/to/local/repo@. -initSecureRepo :: Verbosity - -> Sec.HTTP.HttpLib - -> RemoteRepo -- ^ Secure repo ('remoteRepoSecure' assumed) - -> Sec.Path Sec.Absolute -- ^ Cache dir - -> (SecureRepo -> IO a) -- ^ Callback - -> IO a -initSecureRepo verbosity httpLib RemoteRepo{..} cachePath = \callback -> do - withRepo $ \r -> do - requiresBootstrap <- Sec.requiresBootstrap r - when requiresBootstrap $ Sec.uncheckClientErrors $ - Sec.bootstrap r - (map Sec.KeyId remoteRepoRootKeys) - (Sec.KeyThreshold (fromIntegral remoteRepoKeyThreshold)) - callback $ SecureRepo r - where - -- Initialize local or remote repo depending on the URI - withRepo :: (forall down. Sec.Repository down -> IO a) -> IO a - withRepo callback | uriScheme remoteRepoURI == "file:" = do - dir <- Sec.makeAbsolute $ Sec.fromFilePath (uriPath remoteRepoURI) - Sec.Local.withRepository dir - cache - Sec.hackageRepoLayout - Sec.hackageIndexLayout - logTUF - callback - withRepo callback = - Sec.Remote.withRepository httpLib - [remoteRepoURI] - Sec.Remote.defaultRepoOpts - cache - Sec.hackageRepoLayout - Sec.hackageIndexLayout - logTUF - callback - - cache :: Sec.Cache - cache = Sec.Cache { - cacheRoot = cachePath - , cacheLayout = Sec.cabalCacheLayout { - Sec.cacheLayoutIndexTar = cacheFn "01-index.tar" - , Sec.cacheLayoutIndexIdx = cacheFn "01-index.tar.idx" - , Sec.cacheLayoutIndexTarGz = cacheFn "01-index.tar.gz" - } - } - - cacheFn :: FilePath -> Sec.CachePath - cacheFn = Sec.rootPath . Sec.fragment - - -- We display any TUF progress only in verbose mode, including any transient - -- verification errors. If verification fails, then the final exception that - -- is thrown will of course be shown. - logTUF :: Sec.LogMessage -> IO () - logTUF = info verbosity . Sec.pretty diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Glob.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Glob.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Glob.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Glob.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,269 +0,0 @@ -{-# LANGUAGE CPP, DeriveGeneric #-} - ---TODO: [code cleanup] plausibly much of this module should be merged with --- similar functionality in Cabal. -module Distribution.Client.Glob - ( FilePathGlob(..) - , FilePathRoot(..) - , FilePathGlobRel(..) - , Glob - , GlobPiece(..) - , matchFileGlob - , matchFileGlobRel - , matchGlob - , isTrivialFilePathGlob - , getFilePathRootDirectory - ) where - -import Data.Char (toUpper) -import Data.List (stripPrefix) -#if !MIN_VERSION_base(4,8,0) -import Control.Applicative -#endif -import Control.Monad -import Distribution.Compat.Binary -import GHC.Generics (Generic) - -import Distribution.Text -import Distribution.Compat.ReadP (ReadP, (<++), (+++)) -import qualified Distribution.Compat.ReadP as Parse -import qualified Text.PrettyPrint as Disp - -import System.FilePath -import System.Directory - - --- | A file path specified by globbing --- -data FilePathGlob = FilePathGlob FilePathRoot FilePathGlobRel - deriving (Eq, Show, Generic) - -data FilePathGlobRel - = GlobDir !Glob !FilePathGlobRel - | GlobFile !Glob - | GlobDirTrailing -- ^ trailing dir, a glob ending in @/@ - deriving (Eq, Show, Generic) - --- | A single directory or file component of a globbed path -type Glob = [GlobPiece] - --- | A piece of a globbing pattern -data GlobPiece = WildCard - | Literal String - | Union [Glob] - deriving (Eq, Show, Generic) - -data FilePathRoot - = FilePathRelative - | FilePathRoot FilePath -- ^ e.g. @"/"@, @"c:\"@ or result of 'takeDrive' - | FilePathHomeDir - deriving (Eq, Show, Generic) - -instance Binary FilePathGlob -instance Binary FilePathRoot -instance Binary FilePathGlobRel -instance Binary GlobPiece - - --- | Check if a 'FilePathGlob' doesn't actually make use of any globbing and --- is in fact equivalent to a non-glob 'FilePath'. --- --- If it is trivial in this sense then the result is the equivalent constant --- 'FilePath'. On the other hand if it is not trivial (so could in principle --- match more than one file) then the result is @Nothing@. --- -isTrivialFilePathGlob :: FilePathGlob -> Maybe FilePath -isTrivialFilePathGlob (FilePathGlob root pathglob) = - case root of - FilePathRelative -> go [] pathglob - FilePathRoot root' -> go [root'] pathglob - FilePathHomeDir -> Nothing - where - go paths (GlobDir [Literal path] globs) = go (path:paths) globs - go paths (GlobFile [Literal path]) = Just (joinPath (reverse (path:paths))) - go paths GlobDirTrailing = Just (addTrailingPathSeparator - (joinPath (reverse paths))) - go _ _ = Nothing - --- | Get the 'FilePath' corresponding to a 'FilePathRoot'. --- --- The 'FilePath' argument is required to supply the path for the --- 'FilePathRelative' case. --- -getFilePathRootDirectory :: FilePathRoot - -> FilePath -- ^ root for relative paths - -> IO FilePath -getFilePathRootDirectory FilePathRelative root = return root -getFilePathRootDirectory (FilePathRoot root) _ = return root -getFilePathRootDirectory FilePathHomeDir _ = getHomeDirectory - - ------------------------------------------------------------------------------- --- Matching --- - --- | Match a 'FilePathGlob' against the file system, starting from a given --- root directory for relative paths. The results of relative globs are --- relative to the given root. Matches for absolute globs are absolute. --- -matchFileGlob :: FilePath -> FilePathGlob -> IO [FilePath] -matchFileGlob relroot (FilePathGlob globroot glob) = do - root <- getFilePathRootDirectory globroot relroot - matches <- matchFileGlobRel root glob - case globroot of - FilePathRelative -> return matches - _ -> return (map (root ) matches) - --- | Match a 'FilePathGlobRel' against the file system, starting from a --- given root directory. The results are all relative to the given root. --- -matchFileGlobRel :: FilePath -> FilePathGlobRel -> IO [FilePath] -matchFileGlobRel root glob0 = go glob0 "" - where - go (GlobFile glob) dir = do - entries <- getDirectoryContents (root dir) - let files = filter (matchGlob glob) entries - return (map (dir ) files) - - go (GlobDir glob globPath) dir = do - entries <- getDirectoryContents (root dir) - subdirs <- filterM (\subdir -> doesDirectoryExist - (root dir subdir)) - $ filter (matchGlob glob) entries - concat <$> mapM (\subdir -> go globPath (dir subdir)) subdirs - - go GlobDirTrailing dir = return [dir] - - --- | Match a globbing pattern against a file path component --- -matchGlob :: Glob -> String -> Bool -matchGlob = goStart - where - -- From the man page, glob(7): - -- "If a filename starts with a '.', this character must be - -- matched explicitly." - - go, goStart :: [GlobPiece] -> String -> Bool - - goStart (WildCard:_) ('.':_) = False - goStart (Union globs:rest) cs = any (\glob -> goStart (glob ++ rest) cs) - globs - goStart rest cs = go rest cs - - go [] "" = True - go (Literal lit:rest) cs - | Just cs' <- stripPrefix lit cs - = go rest cs' - | otherwise = False - go [WildCard] "" = True - go (WildCard:rest) (c:cs) = go rest (c:cs) || go (WildCard:rest) cs - go (Union globs:rest) cs = any (\glob -> go (glob ++ rest) cs) globs - go [] (_:_) = False - go (_:_) "" = False - - ------------------------------------------------------------------------------- --- Parsing & printing --- - -instance Text FilePathGlob where - disp (FilePathGlob root pathglob) = disp root Disp.<> disp pathglob - parse = - parse >>= \root -> - (FilePathGlob root <$> parse) - <++ (when (root == FilePathRelative) Parse.pfail >> - return (FilePathGlob root GlobDirTrailing)) - -instance Text FilePathRoot where - disp FilePathRelative = Disp.empty - disp (FilePathRoot root) = Disp.text root - disp FilePathHomeDir = Disp.char '~' Disp.<> Disp.char '/' - - parse = - ( (Parse.char '/' >> return (FilePathRoot "/")) - +++ (Parse.char '~' >> Parse.char '/' >> return FilePathHomeDir) - +++ (do drive <- Parse.satisfy (\c -> (c >= 'a' && c <= 'z') - || (c >= 'A' && c <= 'Z')) - _ <- Parse.char ':' - _ <- Parse.char '/' +++ Parse.char '\\' - return (FilePathRoot (toUpper drive : ":\\"))) - ) - <++ return FilePathRelative - -instance Text FilePathGlobRel where - disp (GlobDir glob pathglob) = dispGlob glob - Disp.<> Disp.char '/' - Disp.<> disp pathglob - disp (GlobFile glob) = dispGlob glob - disp GlobDirTrailing = Disp.empty - - parse = parsePath - where - parsePath :: ReadP r FilePathGlobRel - parsePath = - parseGlob >>= \globpieces -> - asDir globpieces - <++ asTDir globpieces - <++ asFile globpieces - - asDir glob = do dirSep - globs <- parsePath - return (GlobDir glob globs) - asTDir glob = do dirSep - return (GlobDir glob GlobDirTrailing) - asFile glob = return (GlobFile glob) - - dirSep = (Parse.char '/' >> return ()) - +++ (do _ <- Parse.char '\\' - -- check this isn't an escape code - following <- Parse.look - case following of - (c:_) | isGlobEscapedChar c -> Parse.pfail - _ -> return ()) - - -dispGlob :: Glob -> Disp.Doc -dispGlob = Disp.hcat . map dispPiece - where - dispPiece WildCard = Disp.char '*' - dispPiece (Literal str) = Disp.text (escape str) - dispPiece (Union globs) = Disp.braces - (Disp.hcat (Disp.punctuate - (Disp.char ',') - (map dispGlob globs))) - escape [] = [] - escape (c:cs) - | isGlobEscapedChar c = '\\' : c : escape cs - | otherwise = c : escape cs - -parseGlob :: ReadP r Glob -parseGlob = Parse.many1 parsePiece - where - parsePiece = literal +++ wildcard +++ union - - wildcard = Parse.char '*' >> return WildCard - - union = Parse.between (Parse.char '{') (Parse.char '}') $ - fmap Union (Parse.sepBy1 parseGlob (Parse.char ',')) - - literal = Literal `fmap` litchars1 - - litchar = normal +++ escape - - normal = Parse.satisfy (\c -> not (isGlobEscapedChar c) - && c /= '/' && c /= '\\') - escape = Parse.char '\\' >> Parse.satisfy isGlobEscapedChar - - litchars1 :: ReadP r [Char] - litchars1 = liftM2 (:) litchar litchars - - litchars :: ReadP r [Char] - litchars = litchars1 <++ return [] - -isGlobEscapedChar :: Char -> Bool -isGlobEscapedChar '*' = True -isGlobEscapedChar '{' = True -isGlobEscapedChar '}' = True -isGlobEscapedChar ',' = True -isGlobEscapedChar _ = False diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/GZipUtils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/GZipUtils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/GZipUtils.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/GZipUtils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,86 +0,0 @@ -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE CPP #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.GZipUtils --- Copyright : (c) Dmitry Astapov 2010 --- License : BSD-like --- --- Maintainer : cabal-devel@gmail.com --- Stability : provisional --- Portability : portable --- --- Provides a convenience functions for working with files that may or may not --- be zipped. ------------------------------------------------------------------------------ -module Distribution.Client.GZipUtils ( - maybeDecompress, - ) where - -import Codec.Compression.Zlib.Internal -import Data.ByteString.Lazy.Internal as BS (ByteString(Empty, Chunk)) - -#if MIN_VERSION_zlib(0,6,0) -import Control.Exception (throw) -import Control.Monad (liftM) -import Control.Monad.ST.Lazy (ST, runST) -import qualified Data.ByteString as Strict -#endif - --- | Attempts to decompress the `bytes' under the assumption that --- "data format" error at the very beginning of the stream means --- that it is already decompressed. Caller should make sanity checks --- to verify that it is not, in fact, garbage. --- --- This is to deal with http proxies that lie to us and transparently --- decompress without removing the content-encoding header. See: --- --- -maybeDecompress :: ByteString -> ByteString -#if MIN_VERSION_zlib(0,6,0) -maybeDecompress bytes = runST (go bytes decompressor) - where - decompressor :: DecompressStream (ST s) - decompressor = decompressST gzipOrZlibFormat defaultDecompressParams - - -- DataError at the beginning of the stream probably means that stream is - -- not compressed, so we return it as-is. - -- TODO: alternatively, we might consider looking for the two magic bytes - -- at the beginning of the gzip header. (not an option for zlib, though.) - go :: Monad m => ByteString -> DecompressStream m -> m ByteString - go cs (DecompressOutputAvailable bs k) = liftM (Chunk bs) $ go' cs =<< k - go _ (DecompressStreamEnd _bs ) = return Empty - go _ (DecompressStreamError _err ) = return bytes - go cs (DecompressInputRequired k) = go cs' =<< k c - where - (c, cs') = uncons cs - - -- Once we have received any output though we regard errors as actual errors - -- and we throw them (as pure exceptions). - -- TODO: We could (and should) avoid these pure exceptions. - go' :: Monad m => ByteString -> DecompressStream m -> m ByteString - go' cs (DecompressOutputAvailable bs k) = liftM (Chunk bs) $ go' cs =<< k - go' _ (DecompressStreamEnd _bs ) = return Empty - go' _ (DecompressStreamError err ) = throw err - go' cs (DecompressInputRequired k) = go' cs' =<< k c - where - (c, cs') = uncons cs - - uncons :: ByteString -> (Strict.ByteString, ByteString) - uncons Empty = (Strict.empty, Empty) - uncons (Chunk c cs) = (c, cs) -#else -maybeDecompress bytes = foldStream $ decompressWithErrors gzipOrZlibFormat defaultDecompressParams bytes - where - -- DataError at the beginning of the stream probably means that stream is not compressed. - -- Returning it as-is. - -- TODO: alternatively, we might consider looking for the two magic bytes - -- at the beginning of the gzip header. - foldStream (StreamError _ _) = bytes - foldStream somethingElse = doFold somethingElse - - doFold StreamEnd = BS.Empty - doFold (StreamChunk bs stream) = BS.Chunk bs (doFold stream) - doFold (StreamError _ msg) = error $ "Codec.Compression.Zlib: " ++ msg -#endif diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Haddock.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Haddock.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Haddock.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Haddock.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,69 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Haddock --- Copyright : (c) Andrea Vezzosi 2009 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Interfacing with Haddock --- ------------------------------------------------------------------------------ -module Distribution.Client.Haddock - ( - regenerateHaddockIndex - ) - where - -import Data.List (maximumBy) -import Data.Foldable (forM_) -import System.Directory (createDirectoryIfMissing, renameFile) -import System.FilePath ((), splitFileName) -import Distribution.Package - ( packageVersion ) -import Distribution.Simple.Haddock (haddockPackagePaths) -import Distribution.Simple.Program (haddockProgram, ProgramConfiguration - , rawSystemProgram, requireProgramVersion) -import Distribution.Version (Version(Version), orLaterVersion) -import Distribution.Verbosity (Verbosity) -import Distribution.Simple.PackageIndex - ( InstalledPackageIndex, allPackagesByName ) -import Distribution.Simple.Utils - ( comparing, debug, installDirectoryContents, withTempDirectory ) -import Distribution.InstalledPackageInfo as InstalledPackageInfo - ( InstalledPackageInfo(exposed) ) - -regenerateHaddockIndex :: Verbosity - -> InstalledPackageIndex -> ProgramConfiguration - -> FilePath - -> IO () -regenerateHaddockIndex verbosity pkgs conf index = do - (paths, warns) <- haddockPackagePaths pkgs' Nothing - let paths' = [ (interface, html) | (interface, Just html) <- paths] - forM_ warns (debug verbosity) - - (confHaddock, _, _) <- - requireProgramVersion verbosity haddockProgram - (orLaterVersion (Version [0,6] [])) conf - - createDirectoryIfMissing True destDir - - withTempDirectory verbosity destDir "tmphaddock" $ \tempDir -> do - - let flags = [ "--gen-contents" - , "--gen-index" - , "--odir=" ++ tempDir - , "--title=Haskell modules on this system" ] - ++ [ "--read-interface=" ++ html ++ "," ++ interface - | (interface, html) <- paths' ] - rawSystemProgram verbosity confHaddock flags - renameFile (tempDir "index.html") (tempDir destFile) - installDirectoryContents verbosity tempDir destDir - - where - (destDir,destFile) = splitFileName index - pkgs' = [ maximumBy (comparing packageVersion) pkgvers' - | (_pname, pkgvers) <- allPackagesByName pkgs - , let pkgvers' = filter exposed pkgvers - , not (null pkgvers') ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/HttpUtils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/HttpUtils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/HttpUtils.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/HttpUtils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,794 +0,0 @@ -{-# LANGUAGE CPP, BangPatterns #-} ------------------------------------------------------------------------------ --- | Separate module for HTTP actions, using a proxy server if one exists. ------------------------------------------------------------------------------ -module Distribution.Client.HttpUtils ( - DownloadResult(..), - configureTransport, - HttpTransport(..), - HttpCode, - downloadURI, - transportCheckHttps, - remoteRepoCheckHttps, - remoteRepoTryUpgradeToHttps, - isOldHackageURI - ) where - -import Network.HTTP - ( Request (..), Response (..), RequestMethod (..) - , Header(..), HeaderName(..), lookupHeader ) -import Network.HTTP.Proxy ( Proxy(..), fetchProxy) -import Network.URI - ( URI (..), URIAuth (..), uriToString ) -import Network.Browser - ( browse, setOutHandler, setErrHandler, setProxy - , setAuthorityGen, request, setAllowBasicAuth, setUserAgent ) -#if !MIN_VERSION_base(4,8,0) -import Control.Applicative -#endif -import qualified Control.Exception as Exception -import Control.Monad - ( when, guard ) -import qualified Data.ByteString.Lazy.Char8 as BS -import Data.List - ( isPrefixOf, find, intercalate ) -import Data.Maybe - ( listToMaybe, maybeToList, fromMaybe ) -import qualified Paths_cabal_install (version) -import Distribution.Verbosity (Verbosity) -import Distribution.Simple.Utils - ( die, info, warn, debug, notice, writeFileAtomic - , copyFileVerbose, withTempFile - , rawSystemStdInOut, toUTF8, fromUTF8, normaliseLineEndings ) -import Distribution.Client.Utils - ( readMaybe, withTempFileName ) -import Distribution.Client.Types - ( RemoteRepo(..) ) -import Distribution.System - ( buildOS, buildArch ) -import Distribution.Text - ( display ) -import Data.Char - ( isSpace ) -import qualified System.FilePath.Posix as FilePath.Posix - ( splitDirectories ) -import System.FilePath - ( (<.>) ) -import System.Directory - ( doesFileExist, renameFile ) -import System.IO.Error - ( isDoesNotExistError ) -import Distribution.Simple.Program - ( Program, simpleProgram, ConfiguredProgram, programPath - , ProgramInvocation(..), programInvocation - , getProgramInvocationOutput ) -import Distribution.Simple.Program.Db - ( ProgramDb, emptyProgramDb, addKnownPrograms - , configureAllKnownPrograms - , requireProgram, lookupProgram ) -import Distribution.Simple.Program.Run - ( IOEncoding(..), getEffectiveEnvironment ) -import Numeric (showHex) -import System.Directory (canonicalizePath) -import System.IO (hClose) -import System.FilePath (takeFileName, takeDirectory) -import System.Random (randomRIO) -import System.Exit (ExitCode(..)) - - ------------------------------------------------------------------------------- --- Downloading a URI, given an HttpTransport --- - -data DownloadResult = FileAlreadyInCache - | FileDownloaded FilePath - deriving (Eq) - -downloadURI :: HttpTransport - -> Verbosity - -> URI -- ^ What to download - -> FilePath -- ^ Where to put it - -> IO DownloadResult -downloadURI _transport verbosity uri path | uriScheme uri == "file:" = do - copyFileVerbose verbosity (uriPath uri) path - return (FileDownloaded path) - -- Can we store the hash of the file so we can safely return path when the - -- hash matches to avoid unnecessary computation? - -downloadURI transport verbosity uri path = do - - let etagPath = path <.> "etag" - targetExists <- doesFileExist path - etagPathExists <- doesFileExist etagPath - -- In rare cases the target file doesn't exist, but the etag does. - etag <- if targetExists && etagPathExists - then Just <$> readFile etagPath - else return Nothing - - -- Only use the external http transports if we actually have to - -- (or have been told to do so) - let transport' - | uriScheme uri == "http:" - , not (transportManuallySelected transport) - = plainHttpTransport - - | otherwise - = transport - - withTempFileName (takeDirectory path) (takeFileName path) $ \tmpFile -> do - result <- getHttp transport' verbosity uri etag tmpFile [] - - -- Only write the etag if we get a 200 response code. - -- A 304 still sends us an etag header. - case result of - (200, Just newEtag) -> writeFile etagPath newEtag - _ -> return () - - case fst result of - 200 -> do - info verbosity ("Downloaded to " ++ path) - renameFile tmpFile path - return (FileDownloaded path) - 304 -> do - notice verbosity "Skipping download: local and remote files match." - return FileAlreadyInCache - errCode -> die $ "Failed to download " ++ show uri - ++ " : HTTP code " ++ show errCode - ------------------------------------------------------------------------------- --- Utilities for repo url management --- - -remoteRepoCheckHttps :: HttpTransport -> RemoteRepo -> IO () -remoteRepoCheckHttps transport repo - | uriScheme (remoteRepoURI repo) == "https:" - , not (transportSupportsHttps transport) - = die $ "The remote repository '" ++ remoteRepoName repo - ++ "' specifies a URL that " ++ requiresHttpsErrorMessage - | otherwise = return () - -transportCheckHttps :: HttpTransport -> URI -> IO () -transportCheckHttps transport uri - | uriScheme uri == "https:" - , not (transportSupportsHttps transport) - = die $ "The URL " ++ show uri - ++ " " ++ requiresHttpsErrorMessage - | otherwise = return () - -requiresHttpsErrorMessage :: String -requiresHttpsErrorMessage = - "requires HTTPS however the built-in HTTP implementation " - ++ "does not support HTTPS. The transport implementations with HTTPS " - ++ "support are " ++ intercalate ", " - [ name | (name, _, True, _ ) <- supportedTransports ] - ++ ". One of these will be selected automatically if the corresponding " - ++ "external program is available, or one can be selected specifically " - ++ "with the global flag --http-transport=" - -remoteRepoTryUpgradeToHttps :: HttpTransport -> RemoteRepo -> IO RemoteRepo -remoteRepoTryUpgradeToHttps transport repo - | remoteRepoShouldTryHttps repo - , uriScheme (remoteRepoURI repo) == "http:" - , not (transportSupportsHttps transport) - , not (transportManuallySelected transport) - = die $ "The builtin HTTP implementation does not support HTTPS, but using " - ++ "HTTPS for authenticated uploads is recommended. " - ++ "The transport implementations with HTTPS support are " - ++ intercalate ", " [ name | (name, _, True, _ ) <- supportedTransports ] - ++ "but they require the corresponding external program to be " - ++ "available. You can either make one available or use plain HTTP by " - ++ "using the global flag --http-transport=plain-http (or putting the " - ++ "equivalent in the config file). With plain HTTP, your password " - ++ "is sent using HTTP digest authentication so it cannot be easily " - ++ "intercepted, but it is not as secure as using HTTPS." - - | remoteRepoShouldTryHttps repo - , uriScheme (remoteRepoURI repo) == "http:" - , transportSupportsHttps transport - = return repo { - remoteRepoURI = (remoteRepoURI repo) { uriScheme = "https:" } - } - - | otherwise - = return repo - --- | Utility function for legacy support. -isOldHackageURI :: URI -> Bool -isOldHackageURI uri - = case uriAuthority uri of - Just (URIAuth {uriRegName = "hackage.haskell.org"}) -> - FilePath.Posix.splitDirectories (uriPath uri) - == ["/","packages","archive"] - _ -> False - - ------------------------------------------------------------------------------- --- Setting up a HttpTransport --- - -data HttpTransport = HttpTransport { - -- | GET a URI, with an optional ETag (to do a conditional fetch), - -- write the resource to the given file and return the HTTP status code, - -- and optional ETag. - getHttp :: Verbosity -> URI -> Maybe ETag -> FilePath -> [Header] - -> IO (HttpCode, Maybe ETag), - - -- | POST a resource to a URI, with optional auth (username, password) - -- and return the HTTP status code and any redirect URL. - postHttp :: Verbosity -> URI -> String -> Maybe Auth - -> IO (HttpCode, String), - - -- | POST a file resource to a URI using multipart\/form-data encoding, - -- with optional auth (username, password) and return the HTTP status - -- code and any error string. - postHttpFile :: Verbosity -> URI -> FilePath -> Maybe Auth - -> IO (HttpCode, String), - - -- | PUT a file resource to a URI, with optional auth - -- (username, password), extra headers and return the HTTP status code - -- and any error string. - putHttpFile :: Verbosity -> URI -> FilePath -> Maybe Auth -> [Header] - -> IO (HttpCode, String), - - -- | Whether this transport supports https or just http. - transportSupportsHttps :: Bool, - - -- | Whether this transport implementation was specifically chosen by - -- the user via configuration, or whether it was automatically selected. - -- Strictly speaking this is not a property of the transport itself but - -- about how it was chosen. Nevertheless it's convenient to keep here. - transportManuallySelected :: Bool - } - --TODO: why does postHttp return a redirect, but postHttpFile return errors? - -type HttpCode = Int -type ETag = String -type Auth = (String, String) - -noPostYet :: Verbosity -> URI -> String -> Maybe (String, String) - -> IO (Int, String) -noPostYet _ _ _ _ = die "Posting (for report upload) is not implemented yet" - -supportedTransports :: [(String, Maybe Program, Bool, - ProgramDb -> Maybe HttpTransport)] -supportedTransports = - [ let prog = simpleProgram "curl" in - ( "curl", Just prog, True - , \db -> curlTransport <$> lookupProgram prog db ) - - , let prog = simpleProgram "wget" in - ( "wget", Just prog, True - , \db -> wgetTransport <$> lookupProgram prog db ) - - , let prog = simpleProgram "powershell" in - ( "powershell", Just prog, True - , \db -> powershellTransport <$> lookupProgram prog db ) - - , ( "plain-http", Nothing, False - , \_ -> Just plainHttpTransport ) - ] - -configureTransport :: Verbosity -> Maybe String -> IO HttpTransport - -configureTransport verbosity (Just name) = - -- the user secifically selected a transport by name so we'll try and - -- configure that one - - case find (\(name',_,_,_) -> name' == name) supportedTransports of - Just (_, mprog, _tls, mkTrans) -> do - - progdb <- case mprog of - Nothing -> return emptyProgramDb - Just prog -> snd <$> requireProgram verbosity prog emptyProgramDb - -- ^^ if it fails, it'll fail here - - let Just transport = mkTrans progdb - return transport { transportManuallySelected = True } - - Nothing -> die $ "Unknown HTTP transport specified: " ++ name - ++ ". The supported transports are " - ++ intercalate ", " - [ name' | (name', _, _, _ ) <- supportedTransports ] - -configureTransport verbosity Nothing = do - -- the user hasn't selected a transport, so we'll pick the first one we - -- can configure successfully, provided that it supports tls - - -- for all the transports except plain-http we need to try and find - -- their external executable - progdb <- configureAllKnownPrograms verbosity $ - addKnownPrograms - [ prog | (_, Just prog, _, _) <- supportedTransports ] - emptyProgramDb - - let availableTransports = - [ (name, transport) - | (name, _, _, mkTrans) <- supportedTransports - , transport <- maybeToList (mkTrans progdb) ] - -- there's always one because the plain one is last and never fails - let (name, transport) = head availableTransports - debug verbosity $ "Selected http transport implementation: " ++ name - - return transport { transportManuallySelected = False } - - ------------------------------------------------------------------------------- --- The HttpTransports based on external programs --- - -curlTransport :: ConfiguredProgram -> HttpTransport -curlTransport prog = - HttpTransport gethttp posthttp posthttpfile puthttpfile True False - where - gethttp verbosity uri etag destPath reqHeaders = do - withTempFile (takeDirectory destPath) - "curl-headers.txt" $ \tmpFile tmpHandle -> do - hClose tmpHandle - let args = [ show uri - , "--output", destPath - , "--location" - , "--write-out", "%{http_code}" - , "--user-agent", userAgent - , "--silent", "--show-error" - , "--dump-header", tmpFile ] - ++ concat - [ ["--header", "If-None-Match: " ++ t] - | t <- maybeToList etag ] - ++ concat - [ ["--header", show name ++ ": " ++ value] - | Header name value <- reqHeaders ] - - resp <- getProgramInvocationOutput verbosity - (programInvocation prog args) - headers <- readFile tmpFile - (code, _err, etag') <- parseResponse uri resp headers - return (code, etag') - - posthttp = noPostYet - - addAuthConfig auth progInvocation = progInvocation - { progInvokeInput = do - (uname, passwd) <- auth - return $ unlines - [ "--digest" - , "--user " ++ uname ++ ":" ++ passwd - ] - , progInvokeArgs = ["--config", "-"] ++ progInvokeArgs progInvocation - } - - posthttpfile verbosity uri path auth = do - let args = [ show uri - , "--form", "package=@"++path - , "--write-out", "\n%{http_code}" - , "--user-agent", userAgent - , "--silent", "--show-error" - , "--header", "Accept: text/plain" - , "--location" - ] - resp <- getProgramInvocationOutput verbosity $ addAuthConfig auth - (programInvocation prog args) - (code, err, _etag) <- parseResponse uri resp "" - return (code, err) - - puthttpfile verbosity uri path auth headers = do - let args = [ show uri - , "--request", "PUT", "--data-binary", "@"++path - , "--write-out", "\n%{http_code}" - , "--user-agent", userAgent - , "--silent", "--show-error" - , "--location" - , "--header", "Accept: text/plain" - ] - ++ concat - [ ["--header", show name ++ ": " ++ value] - | Header name value <- headers ] - resp <- getProgramInvocationOutput verbosity $ addAuthConfig auth - (programInvocation prog args) - (code, err, _etag) <- parseResponse uri resp "" - return (code, err) - - -- on success these curl involcations produces an output like "200" - -- and on failure it has the server error response first - parseResponse uri resp headers = - let codeerr = - case reverse (lines resp) of - (codeLine:rerrLines) -> - case readMaybe (trim codeLine) of - Just i -> let errstr = mkErrstr rerrLines - in Just (i, errstr) - Nothing -> Nothing - [] -> Nothing - - mkErrstr = unlines . reverse . dropWhile (all isSpace) - - mb_etag :: Maybe ETag - mb_etag = listToMaybe $ reverse - [ etag - | ["ETag:", etag] <- map words (lines headers) ] - - in case codeerr of - Just (i, err) -> return (i, err, mb_etag) - _ -> statusParseFail uri resp - - -wgetTransport :: ConfiguredProgram -> HttpTransport -wgetTransport prog = - HttpTransport gethttp posthttp posthttpfile puthttpfile True False - where - gethttp verbosity uri etag destPath reqHeaders = do - resp <- runWGet verbosity uri args - (code, etag') <- parseOutput uri resp - return (code, etag') - where - args = [ "--output-document=" ++ destPath - , "--user-agent=" ++ userAgent - , "--tries=5" - , "--timeout=15" - , "--server-response" ] - ++ concat - [ ["--header", "If-None-Match: " ++ t] - | t <- maybeToList etag ] - ++ [ "--header=" ++ show name ++ ": " ++ value - | Header name value <- reqHeaders ] - - posthttp = noPostYet - - posthttpfile verbosity uri path auth = - withTempFile (takeDirectory path) - (takeFileName path) $ \tmpFile tmpHandle -> - withTempFile (takeDirectory path) "response" $ \responseFile responseHandle -> do - hClose responseHandle - (body, boundary) <- generateMultipartBody path - BS.hPut tmpHandle body - hClose tmpHandle - let args = [ "--post-file=" ++ tmpFile - , "--user-agent=" ++ userAgent - , "--server-response" - , "--output-document=" ++ responseFile - , "--header=Accept: text/plain" - , "--header=Content-type: multipart/form-data; " ++ - "boundary=" ++ boundary ] - out <- runWGet verbosity (addUriAuth auth uri) args - (code, _etag) <- parseOutput uri out - resp <- readFile responseFile - return (code, resp) - - puthttpfile verbosity uri path auth headers = - withTempFile (takeDirectory path) "response" $ \responseFile responseHandle -> do - hClose responseHandle - let args = [ "--method=PUT", "--body-file="++path - , "--user-agent=" ++ userAgent - , "--server-response" - , "--output-document=" ++ responseFile - , "--header=Accept: text/plain" ] - ++ [ "--header=" ++ show name ++ ": " ++ value - | Header name value <- headers ] - - out <- runWGet verbosity (addUriAuth auth uri) args - (code, _etag) <- parseOutput uri out - resp <- readFile responseFile - return (code, resp) - - addUriAuth Nothing uri = uri - addUriAuth (Just (user, pass)) uri = uri - { uriAuthority = Just a { uriUserInfo = user ++ ":" ++ pass ++ "@" } - } - where - a = fromMaybe (URIAuth "" "" "") (uriAuthority uri) - - runWGet verbosity uri args = do - -- We pass the URI via STDIN because it contains the users' credentials - -- and sensitive data should not be passed via command line arguments. - let - invocation = (programInvocation prog ("--input-file=-" : args)) - { progInvokeInput = Just (uriToString id uri "") - } - - -- wget returns its output on stderr rather than stdout - (_, resp, exitCode) <- getProgramInvocationOutputAndErrors verbosity - invocation - -- wget returns exit code 8 for server "errors" like "304 not modified" - if exitCode == ExitSuccess || exitCode == ExitFailure 8 - then return resp - else die $ "'" ++ programPath prog - ++ "' exited with an error:\n" ++ resp - - -- With the --server-response flag, wget produces output with the full - -- http server response with all headers, we want to find a line like - -- "HTTP/1.1 200 OK", but only the last one, since we can have multiple - -- requests due to redirects. - parseOutput uri resp = - let parsedCode = listToMaybe - [ code - | (protocol:codestr:_err) <- map words (reverse (lines resp)) - , "HTTP/" `isPrefixOf` protocol - , code <- maybeToList (readMaybe codestr) ] - mb_etag :: Maybe ETag - mb_etag = listToMaybe - [ etag - | ["ETag:", etag] <- map words (reverse (lines resp)) ] - in case parsedCode of - Just i -> return (i, mb_etag) - _ -> statusParseFail uri resp - - -powershellTransport :: ConfiguredProgram -> HttpTransport -powershellTransport prog = - HttpTransport gethttp posthttp posthttpfile puthttpfile True False - where - gethttp verbosity uri etag destPath reqHeaders = do - resp <- runPowershellScript verbosity $ - webclientScript - (setupHeaders ((useragentHeader : etagHeader) ++ reqHeaders)) - [ "$wc.DownloadFile(" ++ escape (show uri) - ++ "," ++ escape destPath ++ ");" - , "Write-Host \"200\";" - , "Write-Host $wc.ResponseHeaders.Item(\"ETag\");" - ] - parseResponse resp - where - parseResponse x = case readMaybe . unlines . take 1 . lines $ trim x of - Just i -> return (i, Nothing) -- TODO extract real etag - Nothing -> statusParseFail uri x - etagHeader = [ Header HdrIfNoneMatch t | t <- maybeToList etag ] - - posthttp = noPostYet - - posthttpfile verbosity uri path auth = - withTempFile (takeDirectory path) - (takeFileName path) $ \tmpFile tmpHandle -> do - (body, boundary) <- generateMultipartBody path - BS.hPut tmpHandle body - hClose tmpHandle - fullPath <- canonicalizePath tmpFile - - let contentHeader = Header HdrContentType - ("multipart/form-data; boundary=" ++ boundary) - resp <- runPowershellScript verbosity $ webclientScript - (setupHeaders (contentHeader : extraHeaders) ++ setupAuth auth) - (uploadFileAction "POST" uri fullPath) - parseUploadResponse uri resp - - puthttpfile verbosity uri path auth headers = do - fullPath <- canonicalizePath path - resp <- runPowershellScript verbosity $ webclientScript - (setupHeaders (extraHeaders ++ headers) ++ setupAuth auth) - (uploadFileAction "PUT" uri fullPath) - parseUploadResponse uri resp - - runPowershellScript verbosity script = do - let args = - [ "-InputFormat", "None" - -- the default execution policy doesn't allow running - -- unsigned scripts, so we need to tell powershell to bypass it - , "-ExecutionPolicy", "bypass" - , "-NoProfile", "-NonInteractive" - , "-Command", "-" - ] - getProgramInvocationOutput verbosity (programInvocation prog args) - { progInvokeInput = Just (script ++ "\nExit(0);") - } - - escape = show - - useragentHeader = Header HdrUserAgent userAgent - extraHeaders = [Header HdrAccept "text/plain", useragentHeader] - - setupHeaders headers = - [ "$wc.Headers.Add(" ++ escape (show name) ++ "," ++ escape value ++ ");" - | Header name value <- headers - ] - - setupAuth auth = - [ "$wc.Credentials = new-object System.Net.NetworkCredential(" - ++ escape uname ++ "," ++ escape passwd ++ ",\"\");" - | (uname,passwd) <- maybeToList auth - ] - - uploadFileAction method uri fullPath = - [ "$fileBytes = [System.IO.File]::ReadAllBytes(" ++ escape fullPath ++ ");" - , "$bodyBytes = $wc.UploadData(" ++ escape (show uri) ++ "," - ++ show method ++ ", $fileBytes);" - , "Write-Host \"200\";" - , "Write-Host (-join [System.Text.Encoding]::UTF8.GetChars($bodyBytes));" - ] - - parseUploadResponse uri resp = case lines (trim resp) of - (codeStr : message) - | Just code <- readMaybe codeStr -> return (code, unlines message) - _ -> statusParseFail uri resp - - webclientScript setup action = unlines - [ "$wc = new-object system.net.webclient;" - , unlines setup - , "Try {" - , unlines (map (" " ++) action) - , "} Catch [System.Net.WebException] {" - , " $exception = $_.Exception;" - , " If ($exception.Status -eq " - ++ "[System.Net.WebExceptionStatus]::ProtocolError) {" - , " $response = $exception.Response -as [System.Net.HttpWebResponse];" - , " $reader = new-object " - ++ "System.IO.StreamReader($response.GetResponseStream());" - , " Write-Host ($response.StatusCode -as [int]);" - , " Write-Host $reader.ReadToEnd();" - , " } Else {" - , " Write-Host $exception.Message;" - , " }" - , "} Catch {" - , " Write-Host $_.Exception.Message;" - , "}" - ] - - ------------------------------------------------------------------------------- --- The builtin plain HttpTransport --- - -plainHttpTransport :: HttpTransport -plainHttpTransport = - HttpTransport gethttp posthttp posthttpfile puthttpfile False False - where - gethttp verbosity uri etag destPath reqHeaders = do - let req = Request{ - rqURI = uri, - rqMethod = GET, - rqHeaders = [ Header HdrIfNoneMatch t - | t <- maybeToList etag ] - ++ reqHeaders, - rqBody = BS.empty - } - (_, resp) <- cabalBrowse verbosity Nothing (request req) - let code = convertRspCode (rspCode resp) - etag' = lookupHeader HdrETag (rspHeaders resp) - when (code==200 || code==206) $ - writeFileAtomic destPath $ rspBody resp - return (code, etag') - - posthttp = noPostYet - - posthttpfile verbosity uri path auth = do - (body, boundary) <- generateMultipartBody path - let headers = [ Header HdrContentType - ("multipart/form-data; boundary="++boundary) - , Header HdrContentLength (show (BS.length body)) - , Header HdrAccept ("text/plain") - ] - req = Request { - rqURI = uri, - rqMethod = POST, - rqHeaders = headers, - rqBody = body - } - (_, resp) <- cabalBrowse verbosity auth (request req) - return (convertRspCode (rspCode resp), rspErrorString resp) - - puthttpfile verbosity uri path auth headers = do - body <- BS.readFile path - let req = Request { - rqURI = uri, - rqMethod = PUT, - rqHeaders = Header HdrContentLength (show (BS.length body)) - : Header HdrAccept "text/plain" - : headers, - rqBody = body - } - (_, resp) <- cabalBrowse verbosity auth (request req) - return (convertRspCode (rspCode resp), rspErrorString resp) - - convertRspCode (a,b,c) = a*100 + b*10 + c - - rspErrorString resp = - case lookupHeader HdrContentType (rspHeaders resp) of - Just contenttype - | takeWhile (/= ';') contenttype == "text/plain" - -> BS.unpack (rspBody resp) - _ -> rspReason resp - - cabalBrowse verbosity auth act = do - p <- fixupEmptyProxy <$> fetchProxy True - Exception.handleJust - (guard . isDoesNotExistError) - (const . die $ "Couldn't establish HTTP connection. " - ++ "Possible cause: HTTP proxy server is down.") $ - browse $ do - setProxy p - setErrHandler (warn verbosity . ("http error: "++)) - setOutHandler (debug verbosity) - setUserAgent userAgent - setAllowBasicAuth False - setAuthorityGen (\_ _ -> return auth) - act - - fixupEmptyProxy (Proxy uri _) | null uri = NoProxy - fixupEmptyProxy p = p - - ------------------------------------------------------------------------------- --- Common stuff used by multiple transport impls --- - -userAgent :: String -userAgent = concat [ "cabal-install/", display Paths_cabal_install.version - , " (", display buildOS, "; ", display buildArch, ")" - ] - -statusParseFail :: URI -> String -> IO a -statusParseFail uri r = - die $ "Failed to download " ++ show uri ++ " : " - ++ "No Status Code could be parsed from response: " ++ r - --- Trim -trim :: String -> String -trim = f . f - where f = reverse . dropWhile isSpace - - ------------------------------------------------------------------------------- --- Multipart stuff partially taken from cgi package. --- - -generateMultipartBody :: FilePath -> IO (BS.ByteString, String) -generateMultipartBody path = do - content <- BS.readFile path - boundary <- genBoundary - let !body = formatBody content (BS.pack boundary) - return (body, boundary) - where - formatBody content boundary = - BS.concat $ - [ crlf, dd, boundary, crlf ] - ++ [ BS.pack (show header) | header <- headers ] - ++ [ crlf - , content - , crlf, dd, boundary, dd, crlf ] - - headers = - [ Header (HdrCustom "Content-disposition") - ("form-data; name=package; " ++ - "filename=\"" ++ takeFileName path ++ "\"") - , Header HdrContentType "application/x-gzip" - ] - - crlf = BS.pack "\r\n" - dd = BS.pack "--" - -genBoundary :: IO String -genBoundary = do - i <- randomRIO (0x10000000000000,0xFFFFFFFFFFFFFF) :: IO Integer - return $ showHex i "" - ------------------------------------------------------------------------------- --- Compat utils - --- TODO: This is only here temporarily so we can release without also requiring --- the latest Cabal lib. The function is also included in Cabal now. - -getProgramInvocationOutputAndErrors :: Verbosity -> ProgramInvocation - -> IO (String, String, ExitCode) -getProgramInvocationOutputAndErrors verbosity - ProgramInvocation { - progInvokePath = path, - progInvokeArgs = args, - progInvokeEnv = envOverrides, - progInvokeCwd = mcwd, - progInvokeInput = minputStr, - progInvokeOutputEncoding = encoding - } = do - let utf8 = case encoding of IOEncodingUTF8 -> True; _ -> False - decode | utf8 = fromUTF8 . normaliseLineEndings - | otherwise = id - menv <- getEffectiveEnvironment envOverrides - (output, errors, exitCode) <- rawSystemStdInOut verbosity - path args - mcwd menv - input utf8 - return (decode output, decode errors, exitCode) - where - input = - case minputStr of - Nothing -> Nothing - Just inputStr -> Just $ - case encoding of - IOEncodingText -> (inputStr, False) - IOEncodingUTF8 -> (toUTF8 inputStr, True) -- use binary mode for utf8 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/IndexUtils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/IndexUtils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/IndexUtils.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/IndexUtils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,647 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE BangPatterns #-} -{-# LANGUAGE GADTs #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.IndexUtils --- Copyright : (c) Duncan Coutts 2008 --- License : BSD-like --- --- Maintainer : duncan@community.haskell.org --- Stability : provisional --- Portability : portable --- --- Extra utils related to the package indexes. ------------------------------------------------------------------------------ -module Distribution.Client.IndexUtils ( - getIndexFileAge, - getInstalledPackages, - Configure.getInstalledPackagesMonitorFiles, - getSourcePackages, - getSourcePackagesMonitorFiles, - - Index(..), - PackageEntry(..), - parsePackageIndex, - updateRepoIndexCache, - updatePackageIndexCacheFile, - readCacheStrict, - - BuildTreeRefType(..), refTypeFromTypeCode, typeCodeFromRefType - ) where - -import qualified Codec.Archive.Tar as Tar -import qualified Codec.Archive.Tar.Entry as Tar -import qualified Codec.Archive.Tar.Index as Tar -import qualified Distribution.Client.Tar as Tar -import Distribution.Client.Types - -import Distribution.Package - ( PackageId, PackageIdentifier(..), PackageName(..) - , Package(..), packageVersion, packageName - , Dependency(Dependency) ) -import Distribution.Client.PackageIndex (PackageIndex) -import qualified Distribution.Client.PackageIndex as PackageIndex -import Distribution.Simple.PackageIndex (InstalledPackageIndex) -import qualified Distribution.PackageDescription.Parse as PackageDesc.Parse -import Distribution.PackageDescription - ( GenericPackageDescription ) -import Distribution.PackageDescription.Parse - ( parsePackageDescription ) -import Distribution.Simple.Compiler - ( Compiler, PackageDBStack ) -import Distribution.Simple.Program - ( ProgramConfiguration ) -import qualified Distribution.Simple.Configure as Configure - ( getInstalledPackages, getInstalledPackagesMonitorFiles ) -import Distribution.ParseUtils - ( ParseResult(..) ) -import Distribution.Version - ( Version(Version), intersectVersionRanges ) -import Distribution.Text - ( display, simpleParse ) -import Distribution.Verbosity - ( Verbosity, normal, lessVerbose ) -import Distribution.Simple.Utils - ( die, warn, info, fromUTF8, ignoreBOM ) -import Distribution.Client.Setup - ( RepoContext(..) ) - -import Data.Char (isAlphaNum) -import Data.Maybe (mapMaybe, catMaybes, maybeToList) -import Data.List (isPrefixOf) -#if !MIN_VERSION_base(4,8,0) -import Data.Monoid (Monoid(..)) -#endif -import qualified Data.Map as Map -import Control.Monad (when, liftM) -import Control.Exception (evaluate) -import qualified Data.ByteString.Lazy as BS -import qualified Data.ByteString.Lazy.Char8 as BS.Char8 -import qualified Data.ByteString.Char8 as BSS -import Data.ByteString.Lazy (ByteString) -import Distribution.Client.GZipUtils (maybeDecompress) -import Distribution.Client.Utils ( byteStringToFilePath - , tryFindAddSourcePackageDesc ) -import Distribution.Compat.Exception (catchIO) -import Distribution.Client.Compat.Time (getFileAge, getModTime) -import System.Directory (doesFileExist, doesDirectoryExist) -import System.FilePath - ( (), (<.>), takeExtension, replaceExtension, splitDirectories, normalise ) -import System.FilePath.Posix as FilePath.Posix - ( takeFileName ) -import System.IO -import System.IO.Unsafe (unsafeInterleaveIO) -import System.IO.Error (isDoesNotExistError) - -import qualified Hackage.Security.Client as Sec -import qualified Hackage.Security.Util.Some as Sec - --- | Reduced-verbosity version of 'Configure.getInstalledPackages' -getInstalledPackages :: Verbosity -> Compiler - -> PackageDBStack -> ProgramConfiguration - -> IO InstalledPackageIndex -getInstalledPackages verbosity comp packageDbs conf = - Configure.getInstalledPackages verbosity' comp packageDbs conf - where - verbosity' = lessVerbose verbosity - - --- | Get filename base (i.e. without file extension) for index-related files --- --- /Secure/ cabal repositories use a new extended & incremental --- @01-index.tar@. In order to avoid issues resulting from clobbering --- new/old-style index data, we save them locally to different names. --- --- Example: Use @indexBaseName repo <.> "tar.gz"@ to compute the 'FilePath' of the --- @00-index.tar.gz@/@01-index.tar.gz@ file. -indexBaseName :: Repo -> FilePath -indexBaseName repo = repoLocalDir repo fn - where - fn = case repo of - RepoSecure {} -> "01-index" - RepoRemote {} -> "00-index" - RepoLocal {} -> "00-index" - ------------------------------------------------------------------------- --- Reading the source package index --- - --- | Read a repository index from disk, from the local files specified by --- a list of 'Repo's. --- --- All the 'SourcePackage's are marked as having come from the appropriate --- 'Repo'. --- --- This is a higher level wrapper used internally in cabal-install. -getSourcePackages :: Verbosity -> RepoContext -> IO SourcePackageDb -getSourcePackages verbosity repoCtxt | null (repoContextRepos repoCtxt) = do - warn verbosity $ "No remote package servers have been specified. Usually " - ++ "you would have one specified in the config file." - return SourcePackageDb { - packageIndex = mempty, - packagePreferences = mempty - } -getSourcePackages verbosity repoCtxt = do - info verbosity "Reading available packages..." - pkgss <- mapM (\r -> readRepoIndex verbosity repoCtxt r) (repoContextRepos repoCtxt) - let (pkgs, prefs) = mconcat pkgss - prefs' = Map.fromListWith intersectVersionRanges - [ (name, range) | Dependency name range <- prefs ] - _ <- evaluate pkgs - _ <- evaluate prefs' - return SourcePackageDb { - packageIndex = pkgs, - packagePreferences = prefs' - } - -readCacheStrict :: Verbosity -> Index -> (PackageEntry -> pkg) -> IO ([pkg], [Dependency]) -readCacheStrict verbosity index mkPkg = do - updateRepoIndexCache verbosity index - cache <- liftM readIndexCache $ BSS.readFile (cacheFile index) - withFile (indexFile index) ReadMode $ \indexHnd -> - packageListFromCache mkPkg indexHnd cache ReadPackageIndexStrict - --- | Read a repository index from disk, from the local file specified by --- the 'Repo'. --- --- All the 'SourcePackage's are marked as having come from the given 'Repo'. --- --- This is a higher level wrapper used internally in cabal-install. --- -readRepoIndex :: Verbosity -> RepoContext -> Repo - -> IO (PackageIndex SourcePackage, [Dependency]) -readRepoIndex verbosity repoCtxt repo = - handleNotFound $ do - warnIfIndexIsOld =<< getIndexFileAge repo - updateRepoIndexCache verbosity (RepoIndex repoCtxt repo) - readPackageIndexCacheFile mkAvailablePackage (RepoIndex repoCtxt repo) - - where - mkAvailablePackage pkgEntry = - SourcePackage { - packageInfoId = pkgid, - packageDescription = packageDesc pkgEntry, - packageSource = case pkgEntry of - NormalPackage _ _ _ _ -> RepoTarballPackage repo pkgid Nothing - BuildTreeRef _ _ _ path _ -> LocalUnpackedPackage path, - packageDescrOverride = case pkgEntry of - NormalPackage _ _ pkgtxt _ -> Just pkgtxt - _ -> Nothing - } - where - pkgid = packageId pkgEntry - - handleNotFound action = catchIO action $ \e -> if isDoesNotExistError e - then do - case repo of - RepoRemote{..} -> warn verbosity $ errMissingPackageList repoRemote - RepoSecure{..} -> warn verbosity $ errMissingPackageList repoRemote - RepoLocal{..} -> warn verbosity $ - "The package list for the local repo '" ++ repoLocalDir - ++ "' is missing. The repo is invalid." - return mempty - else ioError e - - isOldThreshold = 15 --days - warnIfIndexIsOld dt = do - when (dt >= isOldThreshold) $ case repo of - RepoRemote{..} -> warn verbosity $ errOutdatedPackageList repoRemote dt - RepoSecure{..} -> warn verbosity $ errOutdatedPackageList repoRemote dt - RepoLocal{..} -> return () - - errMissingPackageList repoRemote = - "The package list for '" ++ remoteRepoName repoRemote - ++ "' does not exist. Run 'cabal update' to download it." - errOutdatedPackageList repoRemote dt = - "The package list for '" ++ remoteRepoName repoRemote - ++ "' is " ++ shows (floor dt :: Int) " days old.\nRun " - ++ "'cabal update' to get the latest list of available packages." - --- | Return the age of the index file in days (as a Double). -getIndexFileAge :: Repo -> IO Double -getIndexFileAge repo = getFileAge $ indexBaseName repo <.> "tar" - --- | A set of files (or directories) that can be monitored to detect when --- there might have been a change in the source packages. --- -getSourcePackagesMonitorFiles :: [Repo] -> [FilePath] -getSourcePackagesMonitorFiles repos = - [ indexBaseName repo <.> "cache" | repo <- repos ] - --- | It is not necessary to call this, as the cache will be updated when the --- index is read normally. However you can do the work earlier if you like. --- -updateRepoIndexCache :: Verbosity -> Index -> IO () -updateRepoIndexCache verbosity index = - whenCacheOutOfDate index $ do - updatePackageIndexCacheFile verbosity index - -whenCacheOutOfDate :: Index -> IO () -> IO () -whenCacheOutOfDate index action = do - exists <- doesFileExist $ cacheFile index - if not exists - then action - else do - indexTime <- getModTime $ indexFile index - cacheTime <- getModTime $ cacheFile index - when (indexTime > cacheTime) action - ------------------------------------------------------------------------- --- Reading the index file --- - --- | An index entry is either a normal package, or a local build tree reference. -data PackageEntry = - NormalPackage PackageId GenericPackageDescription ByteString BlockNo - | BuildTreeRef BuildTreeRefType - PackageId GenericPackageDescription FilePath BlockNo - --- | A build tree reference is either a link or a snapshot. -data BuildTreeRefType = SnapshotRef | LinkRef - deriving Eq - -refTypeFromTypeCode :: Tar.TypeCode -> BuildTreeRefType -refTypeFromTypeCode t - | t == Tar.buildTreeRefTypeCode = LinkRef - | t == Tar.buildTreeSnapshotTypeCode = SnapshotRef - | otherwise = - error "Distribution.Client.IndexUtils.refTypeFromTypeCode: unknown type code" - -typeCodeFromRefType :: BuildTreeRefType -> Tar.TypeCode -typeCodeFromRefType LinkRef = Tar.buildTreeRefTypeCode -typeCodeFromRefType SnapshotRef = Tar.buildTreeSnapshotTypeCode - -instance Package PackageEntry where - packageId (NormalPackage pkgid _ _ _) = pkgid - packageId (BuildTreeRef _ pkgid _ _ _) = pkgid - -packageDesc :: PackageEntry -> GenericPackageDescription -packageDesc (NormalPackage _ descr _ _) = descr -packageDesc (BuildTreeRef _ _ descr _ _) = descr - --- | Parse an uncompressed \"00-index.tar\" repository index file represented --- as a 'ByteString'. --- - -data PackageOrDep = Pkg PackageEntry | Dep Dependency - --- | Read @00-index.tar.gz@ and extract @.cabal@ and @preferred-versions@ files --- --- We read the index using 'Tar.read', which gives us a lazily constructed --- 'TarEntries'. We translate it to a list of entries using 'tarEntriesList', --- which preserves the lazy nature of 'TarEntries', and finally 'concatMap' a --- function over this to translate it to a list of IO actions returning --- 'PackageOrDep's. We can use 'lazySequence' to turn this into a list of --- 'PackageOrDep's, still maintaining the lazy nature of the original tar read. -parsePackageIndex :: ByteString -> [IO (Maybe PackageOrDep)] -parsePackageIndex = concatMap (uncurry extract) . tarEntriesList . Tar.read - where - extract :: BlockNo -> Tar.Entry -> [IO (Maybe PackageOrDep)] - extract blockNo entry = tryExtractPkg ++ tryExtractPrefs - where - tryExtractPkg = do - mkPkgEntry <- maybeToList $ extractPkg entry blockNo - return $ fmap (fmap Pkg) mkPkgEntry - - tryExtractPrefs = do - prefs' <- maybeToList $ extractPrefs entry - fmap (return . Just . Dep) prefs' - --- | Turn the 'Entries' data structure from the @tar@ package into a list, --- and pair each entry with its block number. --- --- NOTE: This preserves the lazy nature of 'Entries': the tar file is only read --- as far as the list is evaluated. -tarEntriesList :: Show e => Tar.Entries e -> [(BlockNo, Tar.Entry)] -tarEntriesList = go 0 - where - go !_ Tar.Done = [] - go !_ (Tar.Fail e) = error ("tarEntriesList: " ++ show e) - go !n (Tar.Next e es') = (n, e) : go (Tar.nextEntryOffset e n) es' - -extractPkg :: Tar.Entry -> BlockNo -> Maybe (IO (Maybe PackageEntry)) -extractPkg entry blockNo = case Tar.entryContent entry of - Tar.NormalFile content _ - | takeExtension fileName == ".cabal" - -> case splitDirectories (normalise fileName) of - [pkgname,vers,_] -> case simpleParse vers of - Just ver -> Just . return $ Just (NormalPackage pkgid descr content blockNo) - where - pkgid = PackageIdentifier (PackageName pkgname) ver - parsed = parsePackageDescription . ignoreBOM . fromUTF8 . BS.Char8.unpack - $ content - descr = case parsed of - ParseOk _ d -> d - _ -> error $ "Couldn't read cabal file " - ++ show fileName - _ -> Nothing - _ -> Nothing - - Tar.OtherEntryType typeCode content _ - | Tar.isBuildTreeRefTypeCode typeCode -> - Just $ do - let path = byteStringToFilePath content - dirExists <- doesDirectoryExist path - result <- if not dirExists then return Nothing - else do - cabalFile <- tryFindAddSourcePackageDesc path "Error reading package index." - descr <- PackageDesc.Parse.readPackageDescription normal cabalFile - return . Just $ BuildTreeRef (refTypeFromTypeCode typeCode) (packageId descr) - descr path blockNo - return result - - _ -> Nothing - - where - fileName = Tar.entryPath entry - -extractPrefs :: Tar.Entry -> Maybe [Dependency] -extractPrefs entry = case Tar.entryContent entry of - Tar.NormalFile content _ - | takeFileName entrypath == "preferred-versions" - -> Just prefs - where - entrypath = Tar.entryPath entry - prefs = parsePreferredVersions content - _ -> Nothing - -parsePreferredVersions :: ByteString -> [Dependency] -parsePreferredVersions = mapMaybe simpleParse - . filter (not . isPrefixOf "--") - . lines - . BS.Char8.unpack -- TODO: Are we sure no unicode? - ------------------------------------------------------------------------- --- Reading and updating the index cache --- - --- | Variation on 'sequence' which evaluates the actions lazily --- --- Pattern matching on the result list will execute just the first action; --- more generally pattern matching on the first @n@ '(:)' nodes will execute --- the first @n@ actions. -lazySequence :: [IO a] -> IO [a] -lazySequence = unsafeInterleaveIO . go - where - go [] = return [] - go (x:xs) = do x' <- x - xs' <- lazySequence xs - return (x' : xs') - --- | Which index do we mean? -data Index = - -- | The main index for the specified repository - RepoIndex RepoContext Repo - - -- | A sandbox-local repository - -- Argument is the location of the index file - | SandboxIndex FilePath - -indexFile :: Index -> FilePath -indexFile (RepoIndex _ctxt repo) = indexBaseName repo <.> "tar" -indexFile (SandboxIndex index) = index - -cacheFile :: Index -> FilePath -cacheFile (RepoIndex _ctxt repo) = indexBaseName repo <.> "cache" -cacheFile (SandboxIndex index) = index `replaceExtension` "cache" - -updatePackageIndexCacheFile :: Verbosity -> Index -> IO () -updatePackageIndexCacheFile verbosity index = do - info verbosity ("Updating index cache file " ++ cacheFile index) - withIndexEntries index $ \entries -> do - let cache = Cache { cacheEntries = entries } - writeFile (cacheFile index) (showIndexCache cache) - --- | Read the index (for the purpose of building a cache) --- --- The callback is provided with list of cache entries, which is guaranteed to --- be lazily constructed. This list must ONLY be used in the scope of the --- callback; when the callback is terminated the file handle to the index will --- be closed and further attempts to read from the list will result in (pure) --- I/O exceptions. --- --- In the construction of the index for a secure repo we take advantage of the --- index built by the @hackage-security@ library to avoid reading the @.tar@ --- file as much as possible (we need to read it only to extract preferred --- versions). This helps performance, but is also required for correctness: --- the new @01-index.tar.gz@ may have multiple versions of preferred-versions --- files, and 'parsePackageIndex' does not correctly deal with that (see #2956); --- by reading the already-built cache from the security library we will be sure --- to only read the latest versions of all files. --- --- TODO: It would be nicer if we actually incrementally updated @cabal@'s --- cache, rather than reconstruct it from zero on each update. However, this --- would require a change in the cache format. -withIndexEntries :: Index -> ([IndexCacheEntry] -> IO a) -> IO a -withIndexEntries (RepoIndex repoCtxt repo@RepoSecure{..}) callback = - repoContextWithSecureRepo repoCtxt repo $ \repoSecure -> - Sec.withIndex repoSecure $ \Sec.IndexCallbacks{..} -> do - let mk :: (Sec.DirectoryEntry, fp, Maybe (Sec.Some Sec.IndexFile)) - -> IO [IndexCacheEntry] - mk (_, _fp, Nothing) = - return [] -- skip unrecognized file - mk (_, _fp, Just (Sec.Some (Sec.IndexPkgMetadata _pkgId))) = - return [] -- skip metadata - mk (dirEntry, _fp, Just (Sec.Some (Sec.IndexPkgCabal pkgId))) = do - let blockNo = fromIntegral (Sec.directoryEntryBlockNo dirEntry) - return [CachePackageId pkgId blockNo] - mk (dirEntry, _fp, Just (Sec.Some file@(Sec.IndexPkgPrefs _pkgName))) = do - content <- Sec.indexEntryContent `fmap` indexLookupFileEntry dirEntry file - return $ map CachePreference (parsePreferredVersions content) - entriess <- lazySequence $ map mk (Sec.directoryEntries indexDirectory) - callback $ concat entriess -withIndexEntries index callback = do - withFile (indexFile index) ReadMode $ \h -> do - bs <- maybeDecompress `fmap` BS.hGetContents h - pkgsOrPrefs <- lazySequence $ parsePackageIndex bs - callback $ map toCache (catMaybes pkgsOrPrefs) - where - toCache :: PackageOrDep -> IndexCacheEntry - toCache (Pkg (NormalPackage pkgid _ _ blockNo)) = CachePackageId pkgid blockNo - toCache (Pkg (BuildTreeRef refType _ _ _ blockNo)) = CacheBuildTreeRef refType blockNo - toCache (Dep d) = CachePreference d - -data ReadPackageIndexMode = ReadPackageIndexStrict - | ReadPackageIndexLazyIO - -readPackageIndexCacheFile :: Package pkg - => (PackageEntry -> pkg) - -> Index - -> IO (PackageIndex pkg, [Dependency]) -readPackageIndexCacheFile mkPkg index = do - cache <- liftM readIndexCache $ BSS.readFile (cacheFile index) - indexHnd <- openFile (indexFile index) ReadMode - packageIndexFromCache mkPkg indexHnd cache ReadPackageIndexLazyIO - -packageIndexFromCache :: Package pkg - => (PackageEntry -> pkg) - -> Handle - -> Cache - -> ReadPackageIndexMode - -> IO (PackageIndex pkg, [Dependency]) -packageIndexFromCache mkPkg hnd cache mode = do - (pkgs, prefs) <- packageListFromCache mkPkg hnd cache mode - pkgIndex <- evaluate $ PackageIndex.fromList pkgs - return (pkgIndex, prefs) - --- | Read package list --- --- The result packages (though not the preferences) are guaranteed to be listed --- in the same order as they are in the tar file (because later entries in a tar --- file mask earlier ones). -packageListFromCache :: (PackageEntry -> pkg) - -> Handle - -> Cache - -> ReadPackageIndexMode - -> IO ([pkg], [Dependency]) -packageListFromCache mkPkg hnd Cache{..} mode = accum mempty [] cacheEntries - where - accum srcpkgs prefs [] = return (reverse srcpkgs, prefs) - - accum srcpkgs prefs (CachePackageId pkgid blockno : entries) = do - -- Given the cache entry, make a package index entry. - -- The magic here is that we use lazy IO to read the .cabal file - -- from the index tarball if it turns out that we need it. - -- Most of the time we only need the package id. - ~(pkg, pkgtxt) <- unsafeInterleaveIO $ do - pkgtxt <- getEntryContent blockno - pkg <- readPackageDescription pkgtxt - return (pkg, pkgtxt) - let srcpkg = case mode of - ReadPackageIndexLazyIO -> - mkPkg (NormalPackage pkgid pkg pkgtxt blockno) - ReadPackageIndexStrict -> - pkg `seq` pkgtxt `seq` mkPkg (NormalPackage pkgid pkg - pkgtxt blockno) - accum (srcpkg:srcpkgs) prefs entries - - accum srcpkgs prefs (CacheBuildTreeRef refType blockno : entries) = do - -- We have to read the .cabal file eagerly here because we can't cache the - -- package id for build tree references - the user might edit the .cabal - -- file after the reference was added to the index. - path <- liftM byteStringToFilePath . getEntryContent $ blockno - pkg <- do let err = "Error reading package index from cache." - file <- tryFindAddSourcePackageDesc path err - PackageDesc.Parse.readPackageDescription normal file - let srcpkg = mkPkg (BuildTreeRef refType (packageId pkg) pkg path blockno) - accum (srcpkg:srcpkgs) prefs entries - - accum srcpkgs prefs (CachePreference pref : entries) = - accum srcpkgs (pref:prefs) entries - - getEntryContent :: BlockNo -> IO ByteString - getEntryContent blockno = do - entry <- Tar.hReadEntry hnd blockno - case Tar.entryContent entry of - Tar.NormalFile content _size -> return content - Tar.OtherEntryType typecode content _size - | Tar.isBuildTreeRefTypeCode typecode - -> return content - _ -> interror "unexpected tar entry type" - - readPackageDescription :: ByteString -> IO GenericPackageDescription - readPackageDescription content = - case parsePackageDescription . ignoreBOM . fromUTF8 . BS.Char8.unpack $ content of - ParseOk _ d -> return d - _ -> interror "failed to parse .cabal file" - - interror msg = die $ "internal error when reading package index: " ++ msg - ++ "The package index or index cache is probably " - ++ "corrupt. Running cabal update might fix it." - ------------------------------------------------------------------------- --- Index cache data structure --- - --- | Tar files are block structured with 512 byte blocks. Every header and file --- content starts on a block boundary. --- -type BlockNo = Tar.TarEntryOffset - -data IndexCacheEntry = CachePackageId PackageId BlockNo - | CacheBuildTreeRef BuildTreeRefType BlockNo - | CachePreference Dependency - deriving (Eq) - -installedUnitId, blocknoKey, buildTreeRefKey, preferredVersionKey :: String -installedUnitId = "pkg:" -blocknoKey = "b#" -buildTreeRefKey = "build-tree-ref:" -preferredVersionKey = "pref-ver:" - -readIndexCacheEntry :: BSS.ByteString -> Maybe IndexCacheEntry -readIndexCacheEntry = \line -> - case BSS.words line of - [key, pkgnamestr, pkgverstr, sep, blocknostr] - | key == BSS.pack installedUnitId && sep == BSS.pack blocknoKey -> - case (parseName pkgnamestr, parseVer pkgverstr [], - parseBlockNo blocknostr) of - (Just pkgname, Just pkgver, Just blockno) - -> Just (CachePackageId (PackageIdentifier pkgname pkgver) blockno) - _ -> Nothing - [key, typecodestr, blocknostr] | key == BSS.pack buildTreeRefKey -> - case (parseRefType typecodestr, parseBlockNo blocknostr) of - (Just refType, Just blockno) - -> Just (CacheBuildTreeRef refType blockno) - _ -> Nothing - - (key: remainder) | key == BSS.pack preferredVersionKey -> - fmap CachePreference (simpleParse (BSS.unpack (BSS.unwords remainder))) - _ -> Nothing - where - parseName str - | BSS.all (\c -> isAlphaNum c || c == '-') str - = Just (PackageName (BSS.unpack str)) - | otherwise = Nothing - - parseVer str vs = - case BSS.readInt str of - Nothing -> Nothing - Just (v, str') -> case BSS.uncons str' of - Just ('.', str'') -> parseVer str'' (v:vs) - Just _ -> Nothing - Nothing -> Just (Version (reverse (v:vs)) []) - - parseBlockNo str = - case BSS.readInt str of - Just (blockno, remainder) - | BSS.null remainder -> Just (fromIntegral blockno) - _ -> Nothing - - parseRefType str = - case BSS.uncons str of - Just (typeCode, remainder) - | BSS.null remainder && Tar.isBuildTreeRefTypeCode typeCode - -> Just (refTypeFromTypeCode typeCode) - _ -> Nothing - -showIndexCacheEntry :: IndexCacheEntry -> String -showIndexCacheEntry entry = unwords $ case entry of - CachePackageId pkgid b -> [ installedUnitId - , display (packageName pkgid) - , display (packageVersion pkgid) - , blocknoKey - , show b - ] - CacheBuildTreeRef t b -> [ buildTreeRefKey - , [typeCodeFromRefType t] - , show b - ] - CachePreference dep -> [ preferredVersionKey - , display dep - ] - --- | Cabal caches various information about the Hackage index -data Cache = Cache { - cacheEntries :: [IndexCacheEntry] - } - -readIndexCache :: BSS.ByteString -> Cache -readIndexCache bs = Cache { - cacheEntries = mapMaybe readIndexCacheEntry $ BSS.lines bs - } - -showIndexCache :: Cache -> String -showIndexCache Cache{..} = unlines $ map showIndexCacheEntry cacheEntries diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Init/Heuristics.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Init/Heuristics.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Init/Heuristics.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Init/Heuristics.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,391 +0,0 @@ -{-# LANGUAGE CPP #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Init.Heuristics --- Copyright : (c) Benedikt Huber 2009 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- Heuristics for creating initial cabal files. --- ------------------------------------------------------------------------------ -module Distribution.Client.Init.Heuristics ( - guessPackageName, - scanForModules, SourceFileEntry(..), - neededBuildPrograms, - guessMainFileCandidates, - guessAuthorNameMail, - knownCategories, -) where -import Distribution.Text (simpleParse) -import Distribution.Simple.Setup (Flag(..), flagToMaybe) -import Distribution.ModuleName - ( ModuleName, toFilePath ) -import Distribution.Client.PackageIndex - ( allPackagesByName ) -import qualified Distribution.Package as P -import qualified Distribution.PackageDescription as PD - ( category, packageDescription ) -import Distribution.Simple.Utils - ( intercalate ) -import Distribution.Client.Utils - ( tryCanonicalizePath ) -import Language.Haskell.Extension ( Extension ) - -import Distribution.Client.Types ( packageDescription, SourcePackageDb(..) ) -#if !MIN_VERSION_base(4,8,0) -import Control.Applicative ( pure, (<$>), (<*>) ) -import Data.Monoid ( mempty, mappend, mconcat ) -#endif -import Control.Arrow ( first ) -import Control.Monad ( liftM ) -import Data.Char ( isAlphaNum, isNumber, isUpper, isLower, isSpace ) -import Data.Either ( partitionEithers ) -import Data.List ( isInfixOf, isPrefixOf, isSuffixOf, sortBy ) -import Data.Maybe ( mapMaybe, catMaybes, maybeToList ) -import Data.Ord ( comparing ) -import qualified Data.Set as Set ( fromList, toList ) -import System.Directory ( getCurrentDirectory, getDirectoryContents, - doesDirectoryExist, doesFileExist, getHomeDirectory, ) -import Distribution.Compat.Environment ( getEnvironment ) -import System.FilePath ( takeExtension, takeBaseName, dropExtension, - (), (<.>), splitDirectories, makeRelative ) - -import Distribution.Client.Init.Types ( InitFlags(..) ) -import Distribution.Client.Compat.Process ( readProcessWithExitCode ) -import System.Exit ( ExitCode(..) ) - --- | Return a list of candidate main files for this executable: top-level --- modules including the word 'Main' in the file name. The list is sorted in --- order of preference, shorter file names are preferred. 'Right's are existing --- candidates and 'Left's are those that do not yet exist. -guessMainFileCandidates :: InitFlags -> IO [Either FilePath FilePath] -guessMainFileCandidates flags = do - dir <- - maybe getCurrentDirectory return (flagToMaybe $ packageDir flags) - files <- getDirectoryContents dir - let existingCandidates = filter isMain files - -- We always want to give the user at least one default choice. If either - -- Main.hs or Main.lhs has already been created, then we don't want to - -- suggest the other; however, if neither has been created, then we - -- suggest both. - newCandidates = - if any (`elem` existingCandidates) ["Main.hs", "Main.lhs"] - then [] - else ["Main.hs", "Main.lhs"] - candidates = - sortBy (\x y -> comparing (length . either id id) x y - `mappend` compare x y) - (map Left newCandidates ++ map Right existingCandidates) - return candidates - - where - isMain f = (isInfixOf "Main" f || isInfixOf "main" f) - && (isSuffixOf ".hs" f || isSuffixOf ".lhs" f) - --- | Guess the package name based on the given root directory. -guessPackageName :: FilePath -> IO P.PackageName -guessPackageName = liftM (P.PackageName . repair . last . splitDirectories) - . tryCanonicalizePath - where - -- Treat each span of non-alphanumeric characters as a hyphen. Each - -- hyphenated component of a package name must contain at least one - -- alphabetic character. An arbitrary character ('x') will be prepended if - -- this is not the case for the first component, and subsequent components - -- will simply be run together. For example, "1+2_foo-3" will become - -- "x12-foo3". - repair = repair' ('x' :) id - repair' invalid valid x = case dropWhile (not . isAlphaNum) x of - "" -> repairComponent "" - x' -> let (c, r) = first repairComponent $ break (not . isAlphaNum) x' - in c ++ repairRest r - where - repairComponent c | all isNumber c = invalid c - | otherwise = valid c - repairRest = repair' id ('-' :) - --- |Data type of source files found in the working directory -data SourceFileEntry = SourceFileEntry - { relativeSourcePath :: FilePath - , moduleName :: ModuleName - , fileExtension :: String - , imports :: [ModuleName] - , extensions :: [Extension] - } deriving Show - -sfToFileName :: FilePath -> SourceFileEntry -> FilePath -sfToFileName projectRoot (SourceFileEntry relPath m ext _ _) - = projectRoot relPath toFilePath m <.> ext - --- |Search for source files in the given directory --- and return pairs of guessed Haskell source path and --- module names. -scanForModules :: FilePath -> IO [SourceFileEntry] -scanForModules rootDir = scanForModulesIn rootDir rootDir - -scanForModulesIn :: FilePath -> FilePath -> IO [SourceFileEntry] -scanForModulesIn projectRoot srcRoot = scan srcRoot [] - where - scan dir hierarchy = do - entries <- getDirectoryContents (projectRoot dir) - (files, dirs) <- liftM partitionEithers (mapM (tagIsDir dir) entries) - let modules = catMaybes [ guessModuleName hierarchy file - | file <- files - , isUpper (head file) ] - modules' <- mapM (findImportsAndExts projectRoot) modules - recMods <- mapM (scanRecursive dir hierarchy) dirs - return $ concat (modules' : recMods) - tagIsDir parent entry = do - isDir <- doesDirectoryExist (parent entry) - return $ (if isDir then Right else Left) entry - guessModuleName hierarchy entry - | takeBaseName entry == "Setup" = Nothing - | ext `elem` sourceExtensions = - SourceFileEntry <$> pure relRoot <*> modName <*> pure ext <*> pure [] <*> pure [] - | otherwise = Nothing - where - relRoot = makeRelative projectRoot srcRoot - unqualModName = dropExtension entry - modName = simpleParse - $ intercalate "." . reverse $ (unqualModName : hierarchy) - ext = case takeExtension entry of '.':e -> e; e -> e - scanRecursive parent hierarchy entry - | isUpper (head entry) = scan (parent entry) (entry : hierarchy) - | isLower (head entry) && not (ignoreDir entry) = - scanForModulesIn projectRoot $ foldl () srcRoot (reverse (entry : hierarchy)) - | otherwise = return [] - ignoreDir ('.':_) = True - ignoreDir dir = dir `elem` ["dist", "_darcs"] - -findImportsAndExts :: FilePath -> SourceFileEntry -> IO SourceFileEntry -findImportsAndExts projectRoot sf = do - s <- readFile (sfToFileName projectRoot sf) - - let modules = mapMaybe - ( getModName - . drop 1 - . filter (not . null) - . dropWhile (/= "import") - . words - ) - . filter (not . ("--" `isPrefixOf`)) -- poor man's comment filtering - . lines - $ s - - -- TODO: We should probably make a better attempt at parsing - -- comments above. Unfortunately we can't use a full-fledged - -- Haskell parser since cabal's dependencies must be kept at a - -- minimum. - - -- A poor man's LANGUAGE pragma parser. - exts = mapMaybe simpleParse - . concatMap getPragmas - . filter isLANGUAGEPragma - . map fst - . drop 1 - . takeWhile (not . null . snd) - . iterate (takeBraces . snd) - $ ("",s) - - takeBraces = break (== '}') . dropWhile (/= '{') - - isLANGUAGEPragma = ("{-# LANGUAGE " `isPrefixOf`) - - getPragmas = map trim . splitCommas . takeWhile (/= '#') . drop 13 - - splitCommas "" = [] - splitCommas xs = x : splitCommas (drop 1 y) - where (x,y) = break (==',') xs - - return sf { imports = modules - , extensions = exts - } - - where getModName :: [String] -> Maybe ModuleName - getModName [] = Nothing - getModName ("qualified":ws) = getModName ws - getModName (ms:_) = simpleParse ms - - - --- Unfortunately we cannot use the version exported by Distribution.Simple.Program -knownSuffixHandlers :: [(String,String)] -knownSuffixHandlers = - [ ("gc", "greencard") - , ("chs", "chs") - , ("hsc", "hsc2hs") - , ("x", "alex") - , ("y", "happy") - , ("ly", "happy") - , ("cpphs", "cpp") - ] - -sourceExtensions :: [String] -sourceExtensions = "hs" : "lhs" : map fst knownSuffixHandlers - -neededBuildPrograms :: [SourceFileEntry] -> [String] -neededBuildPrograms entries = - [ handler - | ext <- nubSet (map fileExtension entries) - , handler <- maybeToList (lookup ext knownSuffixHandlers) - ] - --- | Guess author and email using darcs and git configuration options. Use --- the following in decreasing order of preference: --- --- 1. vcs env vars ($DARCS_EMAIL, $GIT_AUTHOR_*) --- 2. Local repo configs --- 3. Global vcs configs --- 4. The generic $EMAIL --- --- Name and email are processed separately, so the guess might end up being --- a name from DARCS_EMAIL and an email from git config. --- --- Darcs has preference, for tradition's sake. -guessAuthorNameMail :: IO (Flag String, Flag String) -guessAuthorNameMail = fmap authorGuessPure authorGuessIO - --- Ordered in increasing preference, since Flag-as-monoid is identical to --- Last. -authorGuessPure :: AuthorGuessIO -> AuthorGuess -authorGuessPure (AuthorGuessIO env darcsLocalF darcsGlobalF gitLocal gitGlobal) - = mconcat - [ emailEnv env - , gitGlobal - , darcsCfg darcsGlobalF - , gitLocal - , darcsCfg darcsLocalF - , gitEnv env - , darcsEnv env - ] - -authorGuessIO :: IO AuthorGuessIO -authorGuessIO = AuthorGuessIO - <$> getEnvironment - <*> (maybeReadFile $ "_darcs" "prefs" "author") - <*> (maybeReadFile =<< liftM ( (".darcs" "author")) getHomeDirectory) - <*> gitCfg Local - <*> gitCfg Global - --- Types and functions used for guessing the author are now defined: - -type AuthorGuess = (Flag String, Flag String) -type Enviro = [(String, String)] -data GitLoc = Local | Global -data AuthorGuessIO = AuthorGuessIO - Enviro -- ^ Environment lookup table - (Maybe String) -- ^ Contents of local darcs author info - (Maybe String) -- ^ Contents of global darcs author info - AuthorGuess -- ^ Git config --local - AuthorGuess -- ^ Git config --global - -darcsEnv :: Enviro -> AuthorGuess -darcsEnv = maybe mempty nameAndMail . lookup "DARCS_EMAIL" - -gitEnv :: Enviro -> AuthorGuess -gitEnv env = (name, mail) - where - name = maybeFlag "GIT_AUTHOR_NAME" env - mail = maybeFlag "GIT_AUTHOR_EMAIL" env - -darcsCfg :: Maybe String -> AuthorGuess -darcsCfg = maybe mempty nameAndMail - -emailEnv :: Enviro -> AuthorGuess -emailEnv env = (mempty, mail) - where - mail = maybeFlag "EMAIL" env - -gitCfg :: GitLoc -> IO AuthorGuess -gitCfg which = do - name <- gitVar which "user.name" - mail <- gitVar which "user.email" - return (name, mail) - -gitVar :: GitLoc -> String -> IO (Flag String) -gitVar which = fmap happyOutput . gitConfigQuery which - -happyOutput :: (ExitCode, a, t) -> Flag a -happyOutput v = case v of - (ExitSuccess, s, _) -> Flag s - _ -> mempty - -gitConfigQuery :: GitLoc -> String -> IO (ExitCode, String, String) -gitConfigQuery which key = - fmap trim' $ readProcessWithExitCode "git" ["config", w, key] "" - where - w = case which of - Local -> "--local" - Global -> "--global" - trim' (a, b, c) = (a, trim b, c) - -maybeFlag :: String -> Enviro -> Flag String -maybeFlag k = maybe mempty Flag . lookup k - --- | Read the first non-comment, non-trivial line of a file, if it exists -maybeReadFile :: String -> IO (Maybe String) -maybeReadFile f = do - exists <- doesFileExist f - if exists - then fmap getFirstLine $ readFile f - else return Nothing - where - getFirstLine content = - let nontrivialLines = dropWhile (\l -> (null l) || ("#" `isPrefixOf` l)) . lines $ content - in case nontrivialLines of - [] -> Nothing - (l:_) -> Just l - --- |Get list of categories used in Hackage. NOTE: Very slow, needs to be cached -knownCategories :: SourcePackageDb -> [String] -knownCategories (SourcePackageDb sourcePkgIndex _) = nubSet - [ cat | pkg <- map head (allPackagesByName sourcePkgIndex) - , let catList = (PD.category . PD.packageDescription . packageDescription) pkg - , cat <- splitString ',' catList - ] - --- Parse name and email, from darcs pref files or environment variable -nameAndMail :: String -> (Flag String, Flag String) -nameAndMail str - | all isSpace nameOrEmail = mempty - | null erest = (mempty, Flag $ trim nameOrEmail) - | otherwise = (Flag $ trim nameOrEmail, Flag mail) - where - (nameOrEmail,erest) = break (== '<') str - (mail,_) = break (== '>') (tail erest) - -trim :: String -> String -trim = removeLeadingSpace . reverse . removeLeadingSpace . reverse - where - removeLeadingSpace = dropWhile isSpace - --- split string at given character, and remove whitespace -splitString :: Char -> String -> [String] -splitString sep str = go str where - go s = if null s' then [] else tok : go rest where - s' = dropWhile (\c -> c == sep || isSpace c) s - (tok,rest) = break (==sep) s' - -nubSet :: (Ord a) => [a] -> [a] -nubSet = Set.toList . Set.fromList - -{- -test db testProjectRoot = do - putStrLn "Guessed package name" - (guessPackageName >=> print) testProjectRoot - putStrLn "Guessed name and email" - guessAuthorNameMail >>= print - - mods <- scanForModules testProjectRoot - - putStrLn "Guessed modules" - mapM_ print mods - putStrLn "Needed build programs" - print (neededBuildPrograms mods) - - putStrLn "List of known categories" - print $ knownCategories db --} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Init/Licenses.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Init/Licenses.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Init/Licenses.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Init/Licenses.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,3065 +0,0 @@ -module Distribution.Client.Init.Licenses - ( License - , bsd2 - , bsd3 - , gplv2 - , gplv3 - , lgpl21 - , lgpl3 - , agplv3 - , apache20 - , mit - , mpl20 - , isc - ) where - -type License = String - -bsd2 :: String -> String -> License -bsd2 authors year = unlines - [ "Copyright (c) " ++ year ++ ", " ++ authors - , "All rights reserved." - , "" - , "Redistribution and use in source and binary forms, with or without" - , "modification, are permitted provided that the following conditions are" - , "met:" - , "" - , "1. Redistributions of source code must retain the above copyright" - , " notice, this list of conditions and the following disclaimer." - , "" - , "2. Redistributions in binary form must reproduce the above copyright" - , " notice, this list of conditions and the following disclaimer in the" - , " documentation and/or other materials provided with the" - , " distribution." - , "" - , "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS" - , "\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT" - , "LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR" - , "A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT" - , "OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL," - , "SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT" - , "LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE," - , "DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY" - , "THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT" - , "(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE" - , "OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." - ] - -bsd3 :: String -> String -> License -bsd3 authors year = unlines - [ "Copyright (c) " ++ year ++ ", " ++ authors - , "" - , "All rights reserved." - , "" - , "Redistribution and use in source and binary forms, with or without" - , "modification, are permitted provided that the following conditions are met:" - , "" - , " * Redistributions of source code must retain the above copyright" - , " notice, this list of conditions and the following disclaimer." - , "" - , " * Redistributions in binary form must reproduce the above" - , " copyright notice, this list of conditions and the following" - , " disclaimer in the documentation and/or other materials provided" - , " with the distribution." - , "" - , " * Neither the name of " ++ authors ++ " nor the names of other" - , " contributors may be used to endorse or promote products derived" - , " from this software without specific prior written permission." - , "" - , "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS" - , "\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT" - , "LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR" - , "A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT" - , "OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL," - , "SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT" - , "LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE," - , "DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY" - , "THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT" - , "(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE" - , "OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." - ] - -gplv2 :: License -gplv2 = unlines - [ " GNU GENERAL PUBLIC LICENSE" - , " Version 2, June 1991" - , "" - , " Copyright (C) 1989, 1991 Free Software Foundation, Inc.," - , " 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA" - , " Everyone is permitted to copy and distribute verbatim copies" - , " of this license document, but changing it is not allowed." - , "" - , " Preamble" - , "" - , " The licenses for most software are designed to take away your" - , "freedom to share and change it. By contrast, the GNU General Public" - , "License is intended to guarantee your freedom to share and change free" - , "software--to make sure the software is free for all its users. This" - , "General Public License applies to most of the Free Software" - , "Foundation's software and to any other program whose authors commit to" - , "using it. (Some other Free Software Foundation software is covered by" - , "the GNU Lesser General Public License instead.) You can apply it to" - , "your programs, too." - , "" - , " When we speak of free software, we are referring to freedom, not" - , "price. Our General Public Licenses are designed to make sure that you" - , "have the freedom to distribute copies of free software (and charge for" - , "this service if you wish), that you receive source code or can get it" - , "if you want it, that you can change the software or use pieces of it" - , "in new free programs; and that you know you can do these things." - , "" - , " To protect your rights, we need to make restrictions that forbid" - , "anyone to deny you these rights or to ask you to surrender the rights." - , "These restrictions translate to certain responsibilities for you if you" - , "distribute copies of the software, or if you modify it." - , "" - , " For example, if you distribute copies of such a program, whether" - , "gratis or for a fee, you must give the recipients all the rights that" - , "you have. You must make sure that they, too, receive or can get the" - , "source code. And you must show them these terms so they know their" - , "rights." - , "" - , " We protect your rights with two steps: (1) copyright the software, and" - , "(2) offer you this license which gives you legal permission to copy," - , "distribute and/or modify the software." - , "" - , " Also, for each author's protection and ours, we want to make certain" - , "that everyone understands that there is no warranty for this free" - , "software. If the software is modified by someone else and passed on, we" - , "want its recipients to know that what they have is not the original, so" - , "that any problems introduced by others will not reflect on the original" - , "authors' reputations." - , "" - , " Finally, any free program is threatened constantly by software" - , "patents. We wish to avoid the danger that redistributors of a free" - , "program will individually obtain patent licenses, in effect making the" - , "program proprietary. To prevent this, we have made it clear that any" - , "patent must be licensed for everyone's free use or not licensed at all." - , "" - , " The precise terms and conditions for copying, distribution and" - , "modification follow." - , "" - , " GNU GENERAL PUBLIC LICENSE" - , " TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION" - , "" - , " 0. This License applies to any program or other work which contains" - , "a notice placed by the copyright holder saying it may be distributed" - , "under the terms of this General Public License. The \"Program\", below," - , "refers to any such program or work, and a \"work based on the Program\"" - , "means either the Program or any derivative work under copyright law:" - , "that is to say, a work containing the Program or a portion of it," - , "either verbatim or with modifications and/or translated into another" - , "language. (Hereinafter, translation is included without limitation in" - , "the term \"modification\".) Each licensee is addressed as \"you\"." - , "" - , "Activities other than copying, distribution and modification are not" - , "covered by this License; they are outside its scope. The act of" - , "running the Program is not restricted, and the output from the Program" - , "is covered only if its contents constitute a work based on the" - , "Program (independent of having been made by running the Program)." - , "Whether that is true depends on what the Program does." - , "" - , " 1. You may copy and distribute verbatim copies of the Program's" - , "source code as you receive it, in any medium, provided that you" - , "conspicuously and appropriately publish on each copy an appropriate" - , "copyright notice and disclaimer of warranty; keep intact all the" - , "notices that refer to this License and to the absence of any warranty;" - , "and give any other recipients of the Program a copy of this License" - , "along with the Program." - , "" - , "You may charge a fee for the physical act of transferring a copy, and" - , "you may at your option offer warranty protection in exchange for a fee." - , "" - , " 2. You may modify your copy or copies of the Program or any portion" - , "of it, thus forming a work based on the Program, and copy and" - , "distribute such modifications or work under the terms of Section 1" - , "above, provided that you also meet all of these conditions:" - , "" - , " a) You must cause the modified files to carry prominent notices" - , " stating that you changed the files and the date of any change." - , "" - , " b) You must cause any work that you distribute or publish, that in" - , " whole or in part contains or is derived from the Program or any" - , " part thereof, to be licensed as a whole at no charge to all third" - , " parties under the terms of this License." - , "" - , " c) If the modified program normally reads commands interactively" - , " when run, you must cause it, when started running for such" - , " interactive use in the most ordinary way, to print or display an" - , " announcement including an appropriate copyright notice and a" - , " notice that there is no warranty (or else, saying that you provide" - , " a warranty) and that users may redistribute the program under" - , " these conditions, and telling the user how to view a copy of this" - , " License. (Exception: if the Program itself is interactive but" - , " does not normally print such an announcement, your work based on" - , " the Program is not required to print an announcement.)" - , "" - , "These requirements apply to the modified work as a whole. If" - , "identifiable sections of that work are not derived from the Program," - , "and can be reasonably considered independent and separate works in" - , "themselves, then this License, and its terms, do not apply to those" - , "sections when you distribute them as separate works. But when you" - , "distribute the same sections as part of a whole which is a work based" - , "on the Program, the distribution of the whole must be on the terms of" - , "this License, whose permissions for other licensees extend to the" - , "entire whole, and thus to each and every part regardless of who wrote it." - , "" - , "Thus, it is not the intent of this section to claim rights or contest" - , "your rights to work written entirely by you; rather, the intent is to" - , "exercise the right to control the distribution of derivative or" - , "collective works based on the Program." - , "" - , "In addition, mere aggregation of another work not based on the Program" - , "with the Program (or with a work based on the Program) on a volume of" - , "a storage or distribution medium does not bring the other work under" - , "the scope of this License." - , "" - , " 3. You may copy and distribute the Program (or a work based on it," - , "under Section 2) in object code or executable form under the terms of" - , "Sections 1 and 2 above provided that you also do one of the following:" - , "" - , " a) Accompany it with the complete corresponding machine-readable" - , " source code, which must be distributed under the terms of Sections" - , " 1 and 2 above on a medium customarily used for software interchange; or," - , "" - , " b) Accompany it with a written offer, valid for at least three" - , " years, to give any third party, for a charge no more than your" - , " cost of physically performing source distribution, a complete" - , " machine-readable copy of the corresponding source code, to be" - , " distributed under the terms of Sections 1 and 2 above on a medium" - , " customarily used for software interchange; or," - , "" - , " c) Accompany it with the information you received as to the offer" - , " to distribute corresponding source code. (This alternative is" - , " allowed only for noncommercial distribution and only if you" - , " received the program in object code or executable form with such" - , " an offer, in accord with Subsection b above.)" - , "" - , "The source code for a work means the preferred form of the work for" - , "making modifications to it. For an executable work, complete source" - , "code means all the source code for all modules it contains, plus any" - , "associated interface definition files, plus the scripts used to" - , "control compilation and installation of the executable. However, as a" - , "special exception, the source code distributed need not include" - , "anything that is normally distributed (in either source or binary" - , "form) with the major components (compiler, kernel, and so on) of the" - , "operating system on which the executable runs, unless that component" - , "itself accompanies the executable." - , "" - , "If distribution of executable or object code is made by offering" - , "access to copy from a designated place, then offering equivalent" - , "access to copy the source code from the same place counts as" - , "distribution of the source code, even though third parties are not" - , "compelled to copy the source along with the object code." - , "" - , " 4. You may not copy, modify, sublicense, or distribute the Program" - , "except as expressly provided under this License. Any attempt" - , "otherwise to copy, modify, sublicense or distribute the Program is" - , "void, and will automatically terminate your rights under this License." - , "However, parties who have received copies, or rights, from you under" - , "this License will not have their licenses terminated so long as such" - , "parties remain in full compliance." - , "" - , " 5. You are not required to accept this License, since you have not" - , "signed it. However, nothing else grants you permission to modify or" - , "distribute the Program or its derivative works. These actions are" - , "prohibited by law if you do not accept this License. Therefore, by" - , "modifying or distributing the Program (or any work based on the" - , "Program), you indicate your acceptance of this License to do so, and" - , "all its terms and conditions for copying, distributing or modifying" - , "the Program or works based on it." - , "" - , " 6. Each time you redistribute the Program (or any work based on the" - , "Program), the recipient automatically receives a license from the" - , "original licensor to copy, distribute or modify the Program subject to" - , "these terms and conditions. You may not impose any further" - , "restrictions on the recipients' exercise of the rights granted herein." - , "You are not responsible for enforcing compliance by third parties to" - , "this License." - , "" - , " 7. If, as a consequence of a court judgment or allegation of patent" - , "infringement or for any other reason (not limited to patent issues)," - , "conditions are imposed on you (whether by court order, agreement or" - , "otherwise) that contradict the conditions of this License, they do not" - , "excuse you from the conditions of this License. If you cannot" - , "distribute so as to satisfy simultaneously your obligations under this" - , "License and any other pertinent obligations, then as a consequence you" - , "may not distribute the Program at all. For example, if a patent" - , "license would not permit royalty-free redistribution of the Program by" - , "all those who receive copies directly or indirectly through you, then" - , "the only way you could satisfy both it and this License would be to" - , "refrain entirely from distribution of the Program." - , "" - , "If any portion of this section is held invalid or unenforceable under" - , "any particular circumstance, the balance of the section is intended to" - , "apply and the section as a whole is intended to apply in other" - , "circumstances." - , "" - , "It is not the purpose of this section to induce you to infringe any" - , "patents or other property right claims or to contest validity of any" - , "such claims; this section has the sole purpose of protecting the" - , "integrity of the free software distribution system, which is" - , "implemented by public license practices. Many people have made" - , "generous contributions to the wide range of software distributed" - , "through that system in reliance on consistent application of that" - , "system; it is up to the author/donor to decide if he or she is willing" - , "to distribute software through any other system and a licensee cannot" - , "impose that choice." - , "" - , "This section is intended to make thoroughly clear what is believed to" - , "be a consequence of the rest of this License." - , "" - , " 8. If the distribution and/or use of the Program is restricted in" - , "certain countries either by patents or by copyrighted interfaces, the" - , "original copyright holder who places the Program under this License" - , "may add an explicit geographical distribution limitation excluding" - , "those countries, so that distribution is permitted only in or among" - , "countries not thus excluded. In such case, this License incorporates" - , "the limitation as if written in the body of this License." - , "" - , " 9. The Free Software Foundation may publish revised and/or new versions" - , "of the General Public License from time to time. Such new versions will" - , "be similar in spirit to the present version, but may differ in detail to" - , "address new problems or concerns." - , "" - , "Each version is given a distinguishing version number. If the Program" - , "specifies a version number of this License which applies to it and \"any" - , "later version\", you have the option of following the terms and conditions" - , "either of that version or of any later version published by the Free" - , "Software Foundation. If the Program does not specify a version number of" - , "this License, you may choose any version ever published by the Free Software" - , "Foundation." - , "" - , " 10. If you wish to incorporate parts of the Program into other free" - , "programs whose distribution conditions are different, write to the author" - , "to ask for permission. For software which is copyrighted by the Free" - , "Software Foundation, write to the Free Software Foundation; we sometimes" - , "make exceptions for this. Our decision will be guided by the two goals" - , "of preserving the free status of all derivatives of our free software and" - , "of promoting the sharing and reuse of software generally." - , "" - , " NO WARRANTY" - , "" - , " 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY" - , "FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN" - , "OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES" - , "PROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED" - , "OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF" - , "MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS" - , "TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE" - , "PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING," - , "REPAIR OR CORRECTION." - , "" - , " 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING" - , "WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR" - , "REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES," - , "INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING" - , "OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED" - , "TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY" - , "YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER" - , "PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE" - , "POSSIBILITY OF SUCH DAMAGES." - , "" - , " END OF TERMS AND CONDITIONS" - , "" - , " How to Apply These Terms to Your New Programs" - , "" - , " If you develop a new program, and you want it to be of the greatest" - , "possible use to the public, the best way to achieve this is to make it" - , "free software which everyone can redistribute and change under these terms." - , "" - , " To do so, attach the following notices to the program. It is safest" - , "to attach them to the start of each source file to most effectively" - , "convey the exclusion of warranty; and each file should have at least" - , "the \"copyright\" line and a pointer to where the full notice is found." - , "" - , " " - , " Copyright (C) " - , "" - , " This program is free software; you can redistribute it and/or modify" - , " it under the terms of the GNU General Public License as published by" - , " the Free Software Foundation; either version 2 of the License, or" - , " (at your option) any later version." - , "" - , " This program is distributed in the hope that it will be useful," - , " but WITHOUT ANY WARRANTY; without even the implied warranty of" - , " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the" - , " GNU General Public License for more details." - , "" - , " You should have received a copy of the GNU General Public License along" - , " with this program; if not, write to the Free Software Foundation, Inc.," - , " 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA." - , "" - , "Also add information on how to contact you by electronic and paper mail." - , "" - , "If the program is interactive, make it output a short notice like this" - , "when it starts in an interactive mode:" - , "" - , " Gnomovision version 69, Copyright (C) year name of author" - , " Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'." - , " This is free software, and you are welcome to redistribute it" - , " under certain conditions; type `show c' for details." - , "" - , "The hypothetical commands `show w' and `show c' should show the appropriate" - , "parts of the General Public License. Of course, the commands you use may" - , "be called something other than `show w' and `show c'; they could even be" - , "mouse-clicks or menu items--whatever suits your program." - , "" - , "You should also get your employer (if you work as a programmer) or your" - , "school, if any, to sign a \"copyright disclaimer\" for the program, if" - , "necessary. Here is a sample; alter the names:" - , "" - , " Yoyodyne, Inc., hereby disclaims all copyright interest in the program" - , " `Gnomovision' (which makes passes at compilers) written by James Hacker." - , "" - , " , 1 April 1989" - , " Ty Coon, President of Vice" - , "" - , "This General Public License does not permit incorporating your program into" - , "proprietary programs. If your program is a subroutine library, you may" - , "consider it more useful to permit linking proprietary applications with the" - , "library. If this is what you want to do, use the GNU Lesser General" - , "Public License instead of this License." - ] - -gplv3 :: License -gplv3 = unlines - [ " GNU GENERAL PUBLIC LICENSE" - , " Version 3, 29 June 2007" - , "" - , " Copyright (C) 2007 Free Software Foundation, Inc. " - , " Everyone is permitted to copy and distribute verbatim copies" - , " of this license document, but changing it is not allowed." - , "" - , " Preamble" - , "" - , " The GNU General Public License is a free, copyleft license for" - , "software and other kinds of works." - , "" - , " The licenses for most software and other practical works are designed" - , "to take away your freedom to share and change the works. By contrast," - , "the GNU General Public License is intended to guarantee your freedom to" - , "share and change all versions of a program--to make sure it remains free" - , "software for all its users. We, the Free Software Foundation, use the" - , "GNU General Public License for most of our software; it applies also to" - , "any other work released this way by its authors. You can apply it to" - , "your programs, too." - , "" - , " When we speak of free software, we are referring to freedom, not" - , "price. Our General Public Licenses are designed to make sure that you" - , "have the freedom to distribute copies of free software (and charge for" - , "them if you wish), that you receive source code or can get it if you" - , "want it, that you can change the software or use pieces of it in new" - , "free programs, and that you know you can do these things." - , "" - , " To protect your rights, we need to prevent others from denying you" - , "these rights or asking you to surrender the rights. Therefore, you have" - , "certain responsibilities if you distribute copies of the software, or if" - , "you modify it: responsibilities to respect the freedom of others." - , "" - , " For example, if you distribute copies of such a program, whether" - , "gratis or for a fee, you must pass on to the recipients the same" - , "freedoms that you received. You must make sure that they, too, receive" - , "or can get the source code. And you must show them these terms so they" - , "know their rights." - , "" - , " Developers that use the GNU GPL protect your rights with two steps:" - , "(1) assert copyright on the software, and (2) offer you this License" - , "giving you legal permission to copy, distribute and/or modify it." - , "" - , " For the developers' and authors' protection, the GPL clearly explains" - , "that there is no warranty for this free software. For both users' and" - , "authors' sake, the GPL requires that modified versions be marked as" - , "changed, so that their problems will not be attributed erroneously to" - , "authors of previous versions." - , "" - , " Some devices are designed to deny users access to install or run" - , "modified versions of the software inside them, although the manufacturer" - , "can do so. This is fundamentally incompatible with the aim of" - , "protecting users' freedom to change the software. The systematic" - , "pattern of such abuse occurs in the area of products for individuals to" - , "use, which is precisely where it is most unacceptable. Therefore, we" - , "have designed this version of the GPL to prohibit the practice for those" - , "products. If such problems arise substantially in other domains, we" - , "stand ready to extend this provision to those domains in future versions" - , "of the GPL, as needed to protect the freedom of users." - , "" - , " Finally, every program is threatened constantly by software patents." - , "States should not allow patents to restrict development and use of" - , "software on general-purpose computers, but in those that do, we wish to" - , "avoid the special danger that patents applied to a free program could" - , "make it effectively proprietary. To prevent this, the GPL assures that" - , "patents cannot be used to render the program non-free." - , "" - , " The precise terms and conditions for copying, distribution and" - , "modification follow." - , "" - , " TERMS AND CONDITIONS" - , "" - , " 0. Definitions." - , "" - , " \"This License\" refers to version 3 of the GNU General Public License." - , "" - , " \"Copyright\" also means copyright-like laws that apply to other kinds of" - , "works, such as semiconductor masks." - , "" - , " \"The Program\" refers to any copyrightable work licensed under this" - , "License. Each licensee is addressed as \"you\". \"Licensees\" and" - , "\"recipients\" may be individuals or organizations." - , "" - , " To \"modify\" a work means to copy from or adapt all or part of the work" - , "in a fashion requiring copyright permission, other than the making of an" - , "exact copy. The resulting work is called a \"modified version\" of the" - , "earlier work or a work \"based on\" the earlier work." - , "" - , " A \"covered work\" means either the unmodified Program or a work based" - , "on the Program." - , "" - , " To \"propagate\" a work means to do anything with it that, without" - , "permission, would make you directly or secondarily liable for" - , "infringement under applicable copyright law, except executing it on a" - , "computer or modifying a private copy. Propagation includes copying," - , "distribution (with or without modification), making available to the" - , "public, and in some countries other activities as well." - , "" - , " To \"convey\" a work means any kind of propagation that enables other" - , "parties to make or receive copies. Mere interaction with a user through" - , "a computer network, with no transfer of a copy, is not conveying." - , "" - , " An interactive user interface displays \"Appropriate Legal Notices\"" - , "to the extent that it includes a convenient and prominently visible" - , "feature that (1) displays an appropriate copyright notice, and (2)" - , "tells the user that there is no warranty for the work (except to the" - , "extent that warranties are provided), that licensees may convey the" - , "work under this License, and how to view a copy of this License. If" - , "the interface presents a list of user commands or options, such as a" - , "menu, a prominent item in the list meets this criterion." - , "" - , " 1. Source Code." - , "" - , " The \"source code\" for a work means the preferred form of the work" - , "for making modifications to it. \"Object code\" means any non-source" - , "form of a work." - , "" - , " A \"Standard Interface\" means an interface that either is an official" - , "standard defined by a recognized standards body, or, in the case of" - , "interfaces specified for a particular programming language, one that" - , "is widely used among developers working in that language." - , "" - , " The \"System Libraries\" of an executable work include anything, other" - , "than the work as a whole, that (a) is included in the normal form of" - , "packaging a Major Component, but which is not part of that Major" - , "Component, and (b) serves only to enable use of the work with that" - , "Major Component, or to implement a Standard Interface for which an" - , "implementation is available to the public in source code form. A" - , "\"Major Component\", in this context, means a major essential component" - , "(kernel, window system, and so on) of the specific operating system" - , "(if any) on which the executable work runs, or a compiler used to" - , "produce the work, or an object code interpreter used to run it." - , "" - , " The \"Corresponding Source\" for a work in object code form means all" - , "the source code needed to generate, install, and (for an executable" - , "work) run the object code and to modify the work, including scripts to" - , "control those activities. However, it does not include the work's" - , "System Libraries, or general-purpose tools or generally available free" - , "programs which are used unmodified in performing those activities but" - , "which are not part of the work. For example, Corresponding Source" - , "includes interface definition files associated with source files for" - , "the work, and the source code for shared libraries and dynamically" - , "linked subprograms that the work is specifically designed to require," - , "such as by intimate data communication or control flow between those" - , "subprograms and other parts of the work." - , "" - , " The Corresponding Source need not include anything that users" - , "can regenerate automatically from other parts of the Corresponding" - , "Source." - , "" - , " The Corresponding Source for a work in source code form is that" - , "same work." - , "" - , " 2. Basic Permissions." - , "" - , " All rights granted under this License are granted for the term of" - , "copyright on the Program, and are irrevocable provided the stated" - , "conditions are met. This License explicitly affirms your unlimited" - , "permission to run the unmodified Program. The output from running a" - , "covered work is covered by this License only if the output, given its" - , "content, constitutes a covered work. This License acknowledges your" - , "rights of fair use or other equivalent, as provided by copyright law." - , "" - , " You may make, run and propagate covered works that you do not" - , "convey, without conditions so long as your license otherwise remains" - , "in force. You may convey covered works to others for the sole purpose" - , "of having them make modifications exclusively for you, or provide you" - , "with facilities for running those works, provided that you comply with" - , "the terms of this License in conveying all material for which you do" - , "not control copyright. Those thus making or running the covered works" - , "for you must do so exclusively on your behalf, under your direction" - , "and control, on terms that prohibit them from making any copies of" - , "your copyrighted material outside their relationship with you." - , "" - , " Conveying under any other circumstances is permitted solely under" - , "the conditions stated below. Sublicensing is not allowed; section 10" - , "makes it unnecessary." - , "" - , " 3. Protecting Users' Legal Rights From Anti-Circumvention Law." - , "" - , " No covered work shall be deemed part of an effective technological" - , "measure under any applicable law fulfilling obligations under article" - , "11 of the WIPO copyright treaty adopted on 20 December 1996, or" - , "similar laws prohibiting or restricting circumvention of such" - , "measures." - , "" - , " When you convey a covered work, you waive any legal power to forbid" - , "circumvention of technological measures to the extent such circumvention" - , "is effected by exercising rights under this License with respect to" - , "the covered work, and you disclaim any intention to limit operation or" - , "modification of the work as a means of enforcing, against the work's" - , "users, your or third parties' legal rights to forbid circumvention of" - , "technological measures." - , "" - , " 4. Conveying Verbatim Copies." - , "" - , " You may convey verbatim copies of the Program's source code as you" - , "receive it, in any medium, provided that you conspicuously and" - , "appropriately publish on each copy an appropriate copyright notice;" - , "keep intact all notices stating that this License and any" - , "non-permissive terms added in accord with section 7 apply to the code;" - , "keep intact all notices of the absence of any warranty; and give all" - , "recipients a copy of this License along with the Program." - , "" - , " You may charge any price or no price for each copy that you convey," - , "and you may offer support or warranty protection for a fee." - , "" - , " 5. Conveying Modified Source Versions." - , "" - , " You may convey a work based on the Program, or the modifications to" - , "produce it from the Program, in the form of source code under the" - , "terms of section 4, provided that you also meet all of these conditions:" - , "" - , " a) The work must carry prominent notices stating that you modified" - , " it, and giving a relevant date." - , "" - , " b) The work must carry prominent notices stating that it is" - , " released under this License and any conditions added under section" - , " 7. This requirement modifies the requirement in section 4 to" - , " \"keep intact all notices\"." - , "" - , " c) You must license the entire work, as a whole, under this" - , " License to anyone who comes into possession of a copy. This" - , " License will therefore apply, along with any applicable section 7" - , " additional terms, to the whole of the work, and all its parts," - , " regardless of how they are packaged. This License gives no" - , " permission to license the work in any other way, but it does not" - , " invalidate such permission if you have separately received it." - , "" - , " d) If the work has interactive user interfaces, each must display" - , " Appropriate Legal Notices; however, if the Program has interactive" - , " interfaces that do not display Appropriate Legal Notices, your" - , " work need not make them do so." - , "" - , " A compilation of a covered work with other separate and independent" - , "works, which are not by their nature extensions of the covered work," - , "and which are not combined with it such as to form a larger program," - , "in or on a volume of a storage or distribution medium, is called an" - , "\"aggregate\" if the compilation and its resulting copyright are not" - , "used to limit the access or legal rights of the compilation's users" - , "beyond what the individual works permit. Inclusion of a covered work" - , "in an aggregate does not cause this License to apply to the other" - , "parts of the aggregate." - , "" - , " 6. Conveying Non-Source Forms." - , "" - , " You may convey a covered work in object code form under the terms" - , "of sections 4 and 5, provided that you also convey the" - , "machine-readable Corresponding Source under the terms of this License," - , "in one of these ways:" - , "" - , " a) Convey the object code in, or embodied in, a physical product" - , " (including a physical distribution medium), accompanied by the" - , " Corresponding Source fixed on a durable physical medium" - , " customarily used for software interchange." - , "" - , " b) Convey the object code in, or embodied in, a physical product" - , " (including a physical distribution medium), accompanied by a" - , " written offer, valid for at least three years and valid for as" - , " long as you offer spare parts or customer support for that product" - , " model, to give anyone who possesses the object code either (1) a" - , " copy of the Corresponding Source for all the software in the" - , " product that is covered by this License, on a durable physical" - , " medium customarily used for software interchange, for a price no" - , " more than your reasonable cost of physically performing this" - , " conveying of source, or (2) access to copy the" - , " Corresponding Source from a network server at no charge." - , "" - , " c) Convey individual copies of the object code with a copy of the" - , " written offer to provide the Corresponding Source. This" - , " alternative is allowed only occasionally and noncommercially, and" - , " only if you received the object code with such an offer, in accord" - , " with subsection 6b." - , "" - , " d) Convey the object code by offering access from a designated" - , " place (gratis or for a charge), and offer equivalent access to the" - , " Corresponding Source in the same way through the same place at no" - , " further charge. You need not require recipients to copy the" - , " Corresponding Source along with the object code. If the place to" - , " copy the object code is a network server, the Corresponding Source" - , " may be on a different server (operated by you or a third party)" - , " that supports equivalent copying facilities, provided you maintain" - , " clear directions next to the object code saying where to find the" - , " Corresponding Source. Regardless of what server hosts the" - , " Corresponding Source, you remain obligated to ensure that it is" - , " available for as long as needed to satisfy these requirements." - , "" - , " e) Convey the object code using peer-to-peer transmission, provided" - , " you inform other peers where the object code and Corresponding" - , " Source of the work are being offered to the general public at no" - , " charge under subsection 6d." - , "" - , " A separable portion of the object code, whose source code is excluded" - , "from the Corresponding Source as a System Library, need not be" - , "included in conveying the object code work." - , "" - , " A \"User Product\" is either (1) a \"consumer product\", which means any" - , "tangible personal property which is normally used for personal, family," - , "or household purposes, or (2) anything designed or sold for incorporation" - , "into a dwelling. In determining whether a product is a consumer product," - , "doubtful cases shall be resolved in favor of coverage. For a particular" - , "product received by a particular user, \"normally used\" refers to a" - , "typical or common use of that class of product, regardless of the status" - , "of the particular user or of the way in which the particular user" - , "actually uses, or expects or is expected to use, the product. A product" - , "is a consumer product regardless of whether the product has substantial" - , "commercial, industrial or non-consumer uses, unless such uses represent" - , "the only significant mode of use of the product." - , "" - , " \"Installation Information\" for a User Product means any methods," - , "procedures, authorization keys, or other information required to install" - , "and execute modified versions of a covered work in that User Product from" - , "a modified version of its Corresponding Source. The information must" - , "suffice to ensure that the continued functioning of the modified object" - , "code is in no case prevented or interfered with solely because" - , "modification has been made." - , "" - , " If you convey an object code work under this section in, or with, or" - , "specifically for use in, a User Product, and the conveying occurs as" - , "part of a transaction in which the right of possession and use of the" - , "User Product is transferred to the recipient in perpetuity or for a" - , "fixed term (regardless of how the transaction is characterized), the" - , "Corresponding Source conveyed under this section must be accompanied" - , "by the Installation Information. But this requirement does not apply" - , "if neither you nor any third party retains the ability to install" - , "modified object code on the User Product (for example, the work has" - , "been installed in ROM)." - , "" - , " The requirement to provide Installation Information does not include a" - , "requirement to continue to provide support service, warranty, or updates" - , "for a work that has been modified or installed by the recipient, or for" - , "the User Product in which it has been modified or installed. Access to a" - , "network may be denied when the modification itself materially and" - , "adversely affects the operation of the network or violates the rules and" - , "protocols for communication across the network." - , "" - , " Corresponding Source conveyed, and Installation Information provided," - , "in accord with this section must be in a format that is publicly" - , "documented (and with an implementation available to the public in" - , "source code form), and must require no special password or key for" - , "unpacking, reading or copying." - , "" - , " 7. Additional Terms." - , "" - , " \"Additional permissions\" are terms that supplement the terms of this" - , "License by making exceptions from one or more of its conditions." - , "Additional permissions that are applicable to the entire Program shall" - , "be treated as though they were included in this License, to the extent" - , "that they are valid under applicable law. If additional permissions" - , "apply only to part of the Program, that part may be used separately" - , "under those permissions, but the entire Program remains governed by" - , "this License without regard to the additional permissions." - , "" - , " When you convey a copy of a covered work, you may at your option" - , "remove any additional permissions from that copy, or from any part of" - , "it. (Additional permissions may be written to require their own" - , "removal in certain cases when you modify the work.) You may place" - , "additional permissions on material, added by you to a covered work," - , "for which you have or can give appropriate copyright permission." - , "" - , " Notwithstanding any other provision of this License, for material you" - , "add to a covered work, you may (if authorized by the copyright holders of" - , "that material) supplement the terms of this License with terms:" - , "" - , " a) Disclaiming warranty or limiting liability differently from the" - , " terms of sections 15 and 16 of this License; or" - , "" - , " b) Requiring preservation of specified reasonable legal notices or" - , " author attributions in that material or in the Appropriate Legal" - , " Notices displayed by works containing it; or" - , "" - , " c) Prohibiting misrepresentation of the origin of that material, or" - , " requiring that modified versions of such material be marked in" - , " reasonable ways as different from the original version; or" - , "" - , " d) Limiting the use for publicity purposes of names of licensors or" - , " authors of the material; or" - , "" - , " e) Declining to grant rights under trademark law for use of some" - , " trade names, trademarks, or service marks; or" - , "" - , " f) Requiring indemnification of licensors and authors of that" - , " material by anyone who conveys the material (or modified versions of" - , " it) with contractual assumptions of liability to the recipient, for" - , " any liability that these contractual assumptions directly impose on" - , " those licensors and authors." - , "" - , " All other non-permissive additional terms are considered \"further" - , "restrictions\" within the meaning of section 10. If the Program as you" - , "received it, or any part of it, contains a notice stating that it is" - , "governed by this License along with a term that is a further" - , "restriction, you may remove that term. If a license document contains" - , "a further restriction but permits relicensing or conveying under this" - , "License, you may add to a covered work material governed by the terms" - , "of that license document, provided that the further restriction does" - , "not survive such relicensing or conveying." - , "" - , " If you add terms to a covered work in accord with this section, you" - , "must place, in the relevant source files, a statement of the" - , "additional terms that apply to those files, or a notice indicating" - , "where to find the applicable terms." - , "" - , " Additional terms, permissive or non-permissive, may be stated in the" - , "form of a separately written license, or stated as exceptions;" - , "the above requirements apply either way." - , "" - , " 8. Termination." - , "" - , " You may not propagate or modify a covered work except as expressly" - , "provided under this License. Any attempt otherwise to propagate or" - , "modify it is void, and will automatically terminate your rights under" - , "this License (including any patent licenses granted under the third" - , "paragraph of section 11)." - , "" - , " However, if you cease all violation of this License, then your" - , "license from a particular copyright holder is reinstated (a)" - , "provisionally, unless and until the copyright holder explicitly and" - , "finally terminates your license, and (b) permanently, if the copyright" - , "holder fails to notify you of the violation by some reasonable means" - , "prior to 60 days after the cessation." - , "" - , " Moreover, your license from a particular copyright holder is" - , "reinstated permanently if the copyright holder notifies you of the" - , "violation by some reasonable means, this is the first time you have" - , "received notice of violation of this License (for any work) from that" - , "copyright holder, and you cure the violation prior to 30 days after" - , "your receipt of the notice." - , "" - , " Termination of your rights under this section does not terminate the" - , "licenses of parties who have received copies or rights from you under" - , "this License. If your rights have been terminated and not permanently" - , "reinstated, you do not qualify to receive new licenses for the same" - , "material under section 10." - , "" - , " 9. Acceptance Not Required for Having Copies." - , "" - , " You are not required to accept this License in order to receive or" - , "run a copy of the Program. Ancillary propagation of a covered work" - , "occurring solely as a consequence of using peer-to-peer transmission" - , "to receive a copy likewise does not require acceptance. However," - , "nothing other than this License grants you permission to propagate or" - , "modify any covered work. These actions infringe copyright if you do" - , "not accept this License. Therefore, by modifying or propagating a" - , "covered work, you indicate your acceptance of this License to do so." - , "" - , " 10. Automatic Licensing of Downstream Recipients." - , "" - , " Each time you convey a covered work, the recipient automatically" - , "receives a license from the original licensors, to run, modify and" - , "propagate that work, subject to this License. You are not responsible" - , "for enforcing compliance by third parties with this License." - , "" - , " An \"entity transaction\" is a transaction transferring control of an" - , "organization, or substantially all assets of one, or subdividing an" - , "organization, or merging organizations. If propagation of a covered" - , "work results from an entity transaction, each party to that" - , "transaction who receives a copy of the work also receives whatever" - , "licenses to the work the party's predecessor in interest had or could" - , "give under the previous paragraph, plus a right to possession of the" - , "Corresponding Source of the work from the predecessor in interest, if" - , "the predecessor has it or can get it with reasonable efforts." - , "" - , " You may not impose any further restrictions on the exercise of the" - , "rights granted or affirmed under this License. For example, you may" - , "not impose a license fee, royalty, or other charge for exercise of" - , "rights granted under this License, and you may not initiate litigation" - , "(including a cross-claim or counterclaim in a lawsuit) alleging that" - , "any patent claim is infringed by making, using, selling, offering for" - , "sale, or importing the Program or any portion of it." - , "" - , " 11. Patents." - , "" - , " A \"contributor\" is a copyright holder who authorizes use under this" - , "License of the Program or a work on which the Program is based. The" - , "work thus licensed is called the contributor's \"contributor version\"." - , "" - , " A contributor's \"essential patent claims\" are all patent claims" - , "owned or controlled by the contributor, whether already acquired or" - , "hereafter acquired, that would be infringed by some manner, permitted" - , "by this License, of making, using, or selling its contributor version," - , "but do not include claims that would be infringed only as a" - , "consequence of further modification of the contributor version. For" - , "purposes of this definition, \"control\" includes the right to grant" - , "patent sublicenses in a manner consistent with the requirements of" - , "this License." - , "" - , " Each contributor grants you a non-exclusive, worldwide, royalty-free" - , "patent license under the contributor's essential patent claims, to" - , "make, use, sell, offer for sale, import and otherwise run, modify and" - , "propagate the contents of its contributor version." - , "" - , " In the following three paragraphs, a \"patent license\" is any express" - , "agreement or commitment, however denominated, not to enforce a patent" - , "(such as an express permission to practice a patent or covenant not to" - , "sue for patent infringement). To \"grant\" such a patent license to a" - , "party means to make such an agreement or commitment not to enforce a" - , "patent against the party." - , "" - , " If you convey a covered work, knowingly relying on a patent license," - , "and the Corresponding Source of the work is not available for anyone" - , "to copy, free of charge and under the terms of this License, through a" - , "publicly available network server or other readily accessible means," - , "then you must either (1) cause the Corresponding Source to be so" - , "available, or (2) arrange to deprive yourself of the benefit of the" - , "patent license for this particular work, or (3) arrange, in a manner" - , "consistent with the requirements of this License, to extend the patent" - , "license to downstream recipients. \"Knowingly relying\" means you have" - , "actual knowledge that, but for the patent license, your conveying the" - , "covered work in a country, or your recipient's use of the covered work" - , "in a country, would infringe one or more identifiable patents in that" - , "country that you have reason to believe are valid." - , "" - , " If, pursuant to or in connection with a single transaction or" - , "arrangement, you convey, or propagate by procuring conveyance of, a" - , "covered work, and grant a patent license to some of the parties" - , "receiving the covered work authorizing them to use, propagate, modify" - , "or convey a specific copy of the covered work, then the patent license" - , "you grant is automatically extended to all recipients of the covered" - , "work and works based on it." - , "" - , " A patent license is \"discriminatory\" if it does not include within" - , "the scope of its coverage, prohibits the exercise of, or is" - , "conditioned on the non-exercise of one or more of the rights that are" - , "specifically granted under this License. You may not convey a covered" - , "work if you are a party to an arrangement with a third party that is" - , "in the business of distributing software, under which you make payment" - , "to the third party based on the extent of your activity of conveying" - , "the work, and under which the third party grants, to any of the" - , "parties who would receive the covered work from you, a discriminatory" - , "patent license (a) in connection with copies of the covered work" - , "conveyed by you (or copies made from those copies), or (b) primarily" - , "for and in connection with specific products or compilations that" - , "contain the covered work, unless you entered into that arrangement," - , "or that patent license was granted, prior to 28 March 2007." - , "" - , " Nothing in this License shall be construed as excluding or limiting" - , "any implied license or other defenses to infringement that may" - , "otherwise be available to you under applicable patent law." - , "" - , " 12. No Surrender of Others' Freedom." - , "" - , " If conditions are imposed on you (whether by court order, agreement or" - , "otherwise) that contradict the conditions of this License, they do not" - , "excuse you from the conditions of this License. If you cannot convey a" - , "covered work so as to satisfy simultaneously your obligations under this" - , "License and any other pertinent obligations, then as a consequence you may" - , "not convey it at all. For example, if you agree to terms that obligate you" - , "to collect a royalty for further conveying from those to whom you convey" - , "the Program, the only way you could satisfy both those terms and this" - , "License would be to refrain entirely from conveying the Program." - , "" - , " 13. Use with the GNU Affero General Public License." - , "" - , " Notwithstanding any other provision of this License, you have" - , "permission to link or combine any covered work with a work licensed" - , "under version 3 of the GNU Affero General Public License into a single" - , "combined work, and to convey the resulting work. The terms of this" - , "License will continue to apply to the part which is the covered work," - , "but the special requirements of the GNU Affero General Public License," - , "section 13, concerning interaction through a network will apply to the" - , "combination as such." - , "" - , " 14. Revised Versions of this License." - , "" - , " The Free Software Foundation may publish revised and/or new versions of" - , "the GNU General Public License from time to time. Such new versions will" - , "be similar in spirit to the present version, but may differ in detail to" - , "address new problems or concerns." - , "" - , " Each version is given a distinguishing version number. If the" - , "Program specifies that a certain numbered version of the GNU General" - , "Public License \"or any later version\" applies to it, you have the" - , "option of following the terms and conditions either of that numbered" - , "version or of any later version published by the Free Software" - , "Foundation. If the Program does not specify a version number of the" - , "GNU General Public License, you may choose any version ever published" - , "by the Free Software Foundation." - , "" - , " If the Program specifies that a proxy can decide which future" - , "versions of the GNU General Public License can be used, that proxy's" - , "public statement of acceptance of a version permanently authorizes you" - , "to choose that version for the Program." - , "" - , " Later license versions may give you additional or different" - , "permissions. However, no additional obligations are imposed on any" - , "author or copyright holder as a result of your choosing to follow a" - , "later version." - , "" - , " 15. Disclaimer of Warranty." - , "" - , " THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY" - , "APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT" - , "HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY" - , "OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO," - , "THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR" - , "PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM" - , "IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF" - , "ALL NECESSARY SERVICING, REPAIR OR CORRECTION." - , "" - , " 16. Limitation of Liability." - , "" - , " IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING" - , "WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS" - , "THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY" - , "GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE" - , "USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF" - , "DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD" - , "PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS)," - , "EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF" - , "SUCH DAMAGES." - , "" - , " 17. Interpretation of Sections 15 and 16." - , "" - , " If the disclaimer of warranty and limitation of liability provided" - , "above cannot be given local legal effect according to their terms," - , "reviewing courts shall apply local law that most closely approximates" - , "an absolute waiver of all civil liability in connection with the" - , "Program, unless a warranty or assumption of liability accompanies a" - , "copy of the Program in return for a fee." - , "" - , " END OF TERMS AND CONDITIONS" - , "" - , " How to Apply These Terms to Your New Programs" - , "" - , " If you develop a new program, and you want it to be of the greatest" - , "possible use to the public, the best way to achieve this is to make it" - , "free software which everyone can redistribute and change under these terms." - , "" - , " To do so, attach the following notices to the program. It is safest" - , "to attach them to the start of each source file to most effectively" - , "state the exclusion of warranty; and each file should have at least" - , "the \"copyright\" line and a pointer to where the full notice is found." - , "" - , " " - , " Copyright (C) " - , "" - , " This program is free software: you can redistribute it and/or modify" - , " it under the terms of the GNU General Public License as published by" - , " the Free Software Foundation, either version 3 of the License, or" - , " (at your option) any later version." - , "" - , " This program is distributed in the hope that it will be useful," - , " but WITHOUT ANY WARRANTY; without even the implied warranty of" - , " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the" - , " GNU General Public License for more details." - , "" - , " You should have received a copy of the GNU General Public License" - , " along with this program. If not, see ." - , "" - , "Also add information on how to contact you by electronic and paper mail." - , "" - , " If the program does terminal interaction, make it output a short" - , "notice like this when it starts in an interactive mode:" - , "" - , " Copyright (C) " - , " This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'." - , " This is free software, and you are welcome to redistribute it" - , " under certain conditions; type `show c' for details." - , "" - , "The hypothetical commands `show w' and `show c' should show the appropriate" - , "parts of the General Public License. Of course, your program's commands" - , "might be different; for a GUI interface, you would use an \"about box\"." - , "" - , " You should also get your employer (if you work as a programmer) or school," - , "if any, to sign a \"copyright disclaimer\" for the program, if necessary." - , "For more information on this, and how to apply and follow the GNU GPL, see" - , "." - , "" - , " The GNU General Public License does not permit incorporating your program" - , "into proprietary programs. If your program is a subroutine library, you" - , "may consider it more useful to permit linking proprietary applications with" - , "the library. If this is what you want to do, use the GNU Lesser General" - , "Public License instead of this License. But first, please read" - , "." - ] - -agplv3 :: License -agplv3 = unlines - [ " GNU AFFERO GENERAL PUBLIC LICENSE" - , " Version 3, 19 November 2007" - , "" - , " Copyright (C) 2007 Free Software Foundation, Inc. " - , " Everyone is permitted to copy and distribute verbatim copies" - , " of this license document, but changing it is not allowed." - , "" - , " Preamble" - , "" - , " The GNU Affero General Public License is a free, copyleft license for" - , "software and other kinds of works, specifically designed to ensure" - , "cooperation with the community in the case of network server software." - , "" - , " The licenses for most software and other practical works are designed" - , "to take away your freedom to share and change the works. By contrast," - , "our General Public Licenses are intended to guarantee your freedom to" - , "share and change all versions of a program--to make sure it remains free" - , "software for all its users." - , "" - , " When we speak of free software, we are referring to freedom, not" - , "price. Our General Public Licenses are designed to make sure that you" - , "have the freedom to distribute copies of free software (and charge for" - , "them if you wish), that you receive source code or can get it if you" - , "want it, that you can change the software or use pieces of it in new" - , "free programs, and that you know you can do these things." - , "" - , " Developers that use our General Public Licenses protect your rights" - , "with two steps: (1) assert copyright on the software, and (2) offer" - , "you this License which gives you legal permission to copy, distribute" - , "and/or modify the software." - , "" - , " A secondary benefit of defending all users' freedom is that" - , "improvements made in alternate versions of the program, if they" - , "receive widespread use, become available for other developers to" - , "incorporate. Many developers of free software are heartened and" - , "encouraged by the resulting cooperation. However, in the case of" - , "software used on network servers, this result may fail to come about." - , "The GNU General Public License permits making a modified version and" - , "letting the public access it on a server without ever releasing its" - , "source code to the public." - , "" - , " The GNU Affero General Public License is designed specifically to" - , "ensure that, in such cases, the modified source code becomes available" - , "to the community. It requires the operator of a network server to" - , "provide the source code of the modified version running there to the" - , "users of that server. Therefore, public use of a modified version, on" - , "a publicly accessible server, gives the public access to the source" - , "code of the modified version." - , "" - , " An older license, called the Affero General Public License and" - , "published by Affero, was designed to accomplish similar goals. This is" - , "a different license, not a version of the Affero GPL, but Affero has" - , "released a new version of the Affero GPL which permits relicensing under" - , "this license." - , "" - , " The precise terms and conditions for copying, distribution and" - , "modification follow." - , "" - , " TERMS AND CONDITIONS" - , "" - , " 0. Definitions." - , "" - , " \"This License\" refers to version 3 of the GNU Affero General Public License." - , "" - , " \"Copyright\" also means copyright-like laws that apply to other kinds of" - , "works, such as semiconductor masks." - , "" - , " \"The Program\" refers to any copyrightable work licensed under this" - , "License. Each licensee is addressed as \"you\". \"Licensees\" and" - , "\"recipients\" may be individuals or organizations." - , "" - , " To \"modify\" a work means to copy from or adapt all or part of the work" - , "in a fashion requiring copyright permission, other than the making of an" - , "exact copy. The resulting work is called a \"modified version\" of the" - , "earlier work or a work \"based on\" the earlier work." - , "" - , " A \"covered work\" means either the unmodified Program or a work based" - , "on the Program." - , "" - , " To \"propagate\" a work means to do anything with it that, without" - , "permission, would make you directly or secondarily liable for" - , "infringement under applicable copyright law, except executing it on a" - , "computer or modifying a private copy. Propagation includes copying," - , "distribution (with or without modification), making available to the" - , "public, and in some countries other activities as well." - , "" - , " To \"convey\" a work means any kind of propagation that enables other" - , "parties to make or receive copies. Mere interaction with a user through" - , "a computer network, with no transfer of a copy, is not conveying." - , "" - , " An interactive user interface displays \"Appropriate Legal Notices\"" - , "to the extent that it includes a convenient and prominently visible" - , "feature that (1) displays an appropriate copyright notice, and (2)" - , "tells the user that there is no warranty for the work (except to the" - , "extent that warranties are provided), that licensees may convey the" - , "work under this License, and how to view a copy of this License. If" - , "the interface presents a list of user commands or options, such as a" - , "menu, a prominent item in the list meets this criterion." - , "" - , " 1. Source Code." - , "" - , " The \"source code\" for a work means the preferred form of the work" - , "for making modifications to it. \"Object code\" means any non-source" - , "form of a work." - , "" - , " A \"Standard Interface\" means an interface that either is an official" - , "standard defined by a recognized standards body, or, in the case of" - , "interfaces specified for a particular programming language, one that" - , "is widely used among developers working in that language." - , "" - , " The \"System Libraries\" of an executable work include anything, other" - , "than the work as a whole, that (a) is included in the normal form of" - , "packaging a Major Component, but which is not part of that Major" - , "Component, and (b) serves only to enable use of the work with that" - , "Major Component, or to implement a Standard Interface for which an" - , "implementation is available to the public in source code form. A" - , "\"Major Component\", in this context, means a major essential component" - , "(kernel, window system, and so on) of the specific operating system" - , "(if any) on which the executable work runs, or a compiler used to" - , "produce the work, or an object code interpreter used to run it." - , "" - , " The \"Corresponding Source\" for a work in object code form means all" - , "the source code needed to generate, install, and (for an executable" - , "work) run the object code and to modify the work, including scripts to" - , "control those activities. However, it does not include the work's" - , "System Libraries, or general-purpose tools or generally available free" - , "programs which are used unmodified in performing those activities but" - , "which are not part of the work. For example, Corresponding Source" - , "includes interface definition files associated with source files for" - , "the work, and the source code for shared libraries and dynamically" - , "linked subprograms that the work is specifically designed to require," - , "such as by intimate data communication or control flow between those" - , "subprograms and other parts of the work." - , "" - , " The Corresponding Source need not include anything that users" - , "can regenerate automatically from other parts of the Corresponding" - , "Source." - , "" - , " The Corresponding Source for a work in source code form is that" - , "same work." - , "" - , " 2. Basic Permissions." - , "" - , " All rights granted under this License are granted for the term of" - , "copyright on the Program, and are irrevocable provided the stated" - , "conditions are met. This License explicitly affirms your unlimited" - , "permission to run the unmodified Program. The output from running a" - , "covered work is covered by this License only if the output, given its" - , "content, constitutes a covered work. This License acknowledges your" - , "rights of fair use or other equivalent, as provided by copyright law." - , "" - , " You may make, run and propagate covered works that you do not" - , "convey, without conditions so long as your license otherwise remains" - , "in force. You may convey covered works to others for the sole purpose" - , "of having them make modifications exclusively for you, or provide you" - , "with facilities for running those works, provided that you comply with" - , "the terms of this License in conveying all material for which you do" - , "not control copyright. Those thus making or running the covered works" - , "for you must do so exclusively on your behalf, under your direction" - , "and control, on terms that prohibit them from making any copies of" - , "your copyrighted material outside their relationship with you." - , "" - , " Conveying under any other circumstances is permitted solely under" - , "the conditions stated below. Sublicensing is not allowed; section 10" - , "makes it unnecessary." - , "" - , " 3. Protecting Users' Legal Rights From Anti-Circumvention Law." - , "" - , " No covered work shall be deemed part of an effective technological" - , "measure under any applicable law fulfilling obligations under article" - , "11 of the WIPO copyright treaty adopted on 20 December 1996, or" - , "similar laws prohibiting or restricting circumvention of such" - , "measures." - , "" - , " When you convey a covered work, you waive any legal power to forbid" - , "circumvention of technological measures to the extent such circumvention" - , "is effected by exercising rights under this License with respect to" - , "the covered work, and you disclaim any intention to limit operation or" - , "modification of the work as a means of enforcing, against the work's" - , "users, your or third parties' legal rights to forbid circumvention of" - , "technological measures." - , "" - , " 4. Conveying Verbatim Copies." - , "" - , " You may convey verbatim copies of the Program's source code as you" - , "receive it, in any medium, provided that you conspicuously and" - , "appropriately publish on each copy an appropriate copyright notice;" - , "keep intact all notices stating that this License and any" - , "non-permissive terms added in accord with section 7 apply to the code;" - , "keep intact all notices of the absence of any warranty; and give all" - , "recipients a copy of this License along with the Program." - , "" - , " You may charge any price or no price for each copy that you convey," - , "and you may offer support or warranty protection for a fee." - , "" - , " 5. Conveying Modified Source Versions." - , "" - , " You may convey a work based on the Program, or the modifications to" - , "produce it from the Program, in the form of source code under the" - , "terms of section 4, provided that you also meet all of these conditions:" - , "" - , " a) The work must carry prominent notices stating that you modified" - , " it, and giving a relevant date." - , "" - , " b) The work must carry prominent notices stating that it is" - , " released under this License and any conditions added under section" - , " 7. This requirement modifies the requirement in section 4 to" - , " \"keep intact all notices\"." - , "" - , " c) You must license the entire work, as a whole, under this" - , " License to anyone who comes into possession of a copy. This" - , " License will therefore apply, along with any applicable section 7" - , " additional terms, to the whole of the work, and all its parts," - , " regardless of how they are packaged. This License gives no" - , " permission to license the work in any other way, but it does not" - , " invalidate such permission if you have separately received it." - , "" - , " d) If the work has interactive user interfaces, each must display" - , " Appropriate Legal Notices; however, if the Program has interactive" - , " interfaces that do not display Appropriate Legal Notices, your" - , " work need not make them do so." - , "" - , " A compilation of a covered work with other separate and independent" - , "works, which are not by their nature extensions of the covered work," - , "and which are not combined with it such as to form a larger program," - , "in or on a volume of a storage or distribution medium, is called an" - , "\"aggregate\" if the compilation and its resulting copyright are not" - , "used to limit the access or legal rights of the compilation's users" - , "beyond what the individual works permit. Inclusion of a covered work" - , "in an aggregate does not cause this License to apply to the other" - , "parts of the aggregate." - , "" - , " 6. Conveying Non-Source Forms." - , "" - , " You may convey a covered work in object code form under the terms" - , "of sections 4 and 5, provided that you also convey the" - , "machine-readable Corresponding Source under the terms of this License," - , "in one of these ways:" - , "" - , " a) Convey the object code in, or embodied in, a physical product" - , " (including a physical distribution medium), accompanied by the" - , " Corresponding Source fixed on a durable physical medium" - , " customarily used for software interchange." - , "" - , " b) Convey the object code in, or embodied in, a physical product" - , " (including a physical distribution medium), accompanied by a" - , " written offer, valid for at least three years and valid for as" - , " long as you offer spare parts or customer support for that product" - , " model, to give anyone who possesses the object code either (1) a" - , " copy of the Corresponding Source for all the software in the" - , " product that is covered by this License, on a durable physical" - , " medium customarily used for software interchange, for a price no" - , " more than your reasonable cost of physically performing this" - , " conveying of source, or (2) access to copy the" - , " Corresponding Source from a network server at no charge." - , "" - , " c) Convey individual copies of the object code with a copy of the" - , " written offer to provide the Corresponding Source. This" - , " alternative is allowed only occasionally and noncommercially, and" - , " only if you received the object code with such an offer, in accord" - , " with subsection 6b." - , "" - , " d) Convey the object code by offering access from a designated" - , " place (gratis or for a charge), and offer equivalent access to the" - , " Corresponding Source in the same way through the same place at no" - , " further charge. You need not require recipients to copy the" - , " Corresponding Source along with the object code. If the place to" - , " copy the object code is a network server, the Corresponding Source" - , " may be on a different server (operated by you or a third party)" - , " that supports equivalent copying facilities, provided you maintain" - , " clear directions next to the object code saying where to find the" - , " Corresponding Source. Regardless of what server hosts the" - , " Corresponding Source, you remain obligated to ensure that it is" - , " available for as long as needed to satisfy these requirements." - , "" - , " e) Convey the object code using peer-to-peer transmission, provided" - , " you inform other peers where the object code and Corresponding" - , " Source of the work are being offered to the general public at no" - , " charge under subsection 6d." - , "" - , " A separable portion of the object code, whose source code is excluded" - , "from the Corresponding Source as a System Library, need not be" - , "included in conveying the object code work." - , "" - , " A \"User Product\" is either (1) a \"consumer product\", which means any" - , "tangible personal property which is normally used for personal, family," - , "or household purposes, or (2) anything designed or sold for incorporation" - , "into a dwelling. In determining whether a product is a consumer product," - , "doubtful cases shall be resolved in favor of coverage. For a particular" - , "product received by a particular user, \"normally used\" refers to a" - , "typical or common use of that class of product, regardless of the status" - , "of the particular user or of the way in which the particular user" - , "actually uses, or expects or is expected to use, the product. A product" - , "is a consumer product regardless of whether the product has substantial" - , "commercial, industrial or non-consumer uses, unless such uses represent" - , "the only significant mode of use of the product." - , "" - , " \"Installation Information\" for a User Product means any methods," - , "procedures, authorization keys, or other information required to install" - , "and execute modified versions of a covered work in that User Product from" - , "a modified version of its Corresponding Source. The information must" - , "suffice to ensure that the continued functioning of the modified object" - , "code is in no case prevented or interfered with solely because" - , "modification has been made." - , "" - , " If you convey an object code work under this section in, or with, or" - , "specifically for use in, a User Product, and the conveying occurs as" - , "part of a transaction in which the right of possession and use of the" - , "User Product is transferred to the recipient in perpetuity or for a" - , "fixed term (regardless of how the transaction is characterized), the" - , "Corresponding Source conveyed under this section must be accompanied" - , "by the Installation Information. But this requirement does not apply" - , "if neither you nor any third party retains the ability to install" - , "modified object code on the User Product (for example, the work has" - , "been installed in ROM)." - , "" - , " The requirement to provide Installation Information does not include a" - , "requirement to continue to provide support service, warranty, or updates" - , "for a work that has been modified or installed by the recipient, or for" - , "the User Product in which it has been modified or installed. Access to a" - , "network may be denied when the modification itself materially and" - , "adversely affects the operation of the network or violates the rules and" - , "protocols for communication across the network." - , "" - , " Corresponding Source conveyed, and Installation Information provided," - , "in accord with this section must be in a format that is publicly" - , "documented (and with an implementation available to the public in" - , "source code form), and must require no special password or key for" - , "unpacking, reading or copying." - , "" - , " 7. Additional Terms." - , "" - , " \"Additional permissions\" are terms that supplement the terms of this" - , "License by making exceptions from one or more of its conditions." - , "Additional permissions that are applicable to the entire Program shall" - , "be treated as though they were included in this License, to the extent" - , "that they are valid under applicable law. If additional permissions" - , "apply only to part of the Program, that part may be used separately" - , "under those permissions, but the entire Program remains governed by" - , "this License without regard to the additional permissions." - , "" - , " When you convey a copy of a covered work, you may at your option" - , "remove any additional permissions from that copy, or from any part of" - , "it. (Additional permissions may be written to require their own" - , "removal in certain cases when you modify the work.) You may place" - , "additional permissions on material, added by you to a covered work," - , "for which you have or can give appropriate copyright permission." - , "" - , " Notwithstanding any other provision of this License, for material you" - , "add to a covered work, you may (if authorized by the copyright holders of" - , "that material) supplement the terms of this License with terms:" - , "" - , " a) Disclaiming warranty or limiting liability differently from the" - , " terms of sections 15 and 16 of this License; or" - , "" - , " b) Requiring preservation of specified reasonable legal notices or" - , " author attributions in that material or in the Appropriate Legal" - , " Notices displayed by works containing it; or" - , "" - , " c) Prohibiting misrepresentation of the origin of that material, or" - , " requiring that modified versions of such material be marked in" - , " reasonable ways as different from the original version; or" - , "" - , " d) Limiting the use for publicity purposes of names of licensors or" - , " authors of the material; or" - , "" - , " e) Declining to grant rights under trademark law for use of some" - , " trade names, trademarks, or service marks; or" - , "" - , " f) Requiring indemnification of licensors and authors of that" - , " material by anyone who conveys the material (or modified versions of" - , " it) with contractual assumptions of liability to the recipient, for" - , " any liability that these contractual assumptions directly impose on" - , " those licensors and authors." - , "" - , " All other non-permissive additional terms are considered \"further" - , "restrictions\" within the meaning of section 10. If the Program as you" - , "received it, or any part of it, contains a notice stating that it is" - , "governed by this License along with a term that is a further" - , "restriction, you may remove that term. If a license document contains" - , "a further restriction but permits relicensing or conveying under this" - , "License, you may add to a covered work material governed by the terms" - , "of that license document, provided that the further restriction does" - , "not survive such relicensing or conveying." - , "" - , " If you add terms to a covered work in accord with this section, you" - , "must place, in the relevant source files, a statement of the" - , "additional terms that apply to those files, or a notice indicating" - , "where to find the applicable terms." - , "" - , " Additional terms, permissive or non-permissive, may be stated in the" - , "form of a separately written license, or stated as exceptions;" - , "the above requirements apply either way." - , "" - , " 8. Termination." - , "" - , " You may not propagate or modify a covered work except as expressly" - , "provided under this License. Any attempt otherwise to propagate or" - , "modify it is void, and will automatically terminate your rights under" - , "this License (including any patent licenses granted under the third" - , "paragraph of section 11)." - , "" - , " However, if you cease all violation of this License, then your" - , "license from a particular copyright holder is reinstated (a)" - , "provisionally, unless and until the copyright holder explicitly and" - , "finally terminates your license, and (b) permanently, if the copyright" - , "holder fails to notify you of the violation by some reasonable means" - , "prior to 60 days after the cessation." - , "" - , " Moreover, your license from a particular copyright holder is" - , "reinstated permanently if the copyright holder notifies you of the" - , "violation by some reasonable means, this is the first time you have" - , "received notice of violation of this License (for any work) from that" - , "copyright holder, and you cure the violation prior to 30 days after" - , "your receipt of the notice." - , "" - , " Termination of your rights under this section does not terminate the" - , "licenses of parties who have received copies or rights from you under" - , "this License. If your rights have been terminated and not permanently" - , "reinstated, you do not qualify to receive new licenses for the same" - , "material under section 10." - , "" - , " 9. Acceptance Not Required for Having Copies." - , "" - , " You are not required to accept this License in order to receive or" - , "run a copy of the Program. Ancillary propagation of a covered work" - , "occurring solely as a consequence of using peer-to-peer transmission" - , "to receive a copy likewise does not require acceptance. However," - , "nothing other than this License grants you permission to propagate or" - , "modify any covered work. These actions infringe copyright if you do" - , "not accept this License. Therefore, by modifying or propagating a" - , "covered work, you indicate your acceptance of this License to do so." - , "" - , " 10. Automatic Licensing of Downstream Recipients." - , "" - , " Each time you convey a covered work, the recipient automatically" - , "receives a license from the original licensors, to run, modify and" - , "propagate that work, subject to this License. You are not responsible" - , "for enforcing compliance by third parties with this License." - , "" - , " An \"entity transaction\" is a transaction transferring control of an" - , "organization, or substantially all assets of one, or subdividing an" - , "organization, or merging organizations. If propagation of a covered" - , "work results from an entity transaction, each party to that" - , "transaction who receives a copy of the work also receives whatever" - , "licenses to the work the party's predecessor in interest had or could" - , "give under the previous paragraph, plus a right to possession of the" - , "Corresponding Source of the work from the predecessor in interest, if" - , "the predecessor has it or can get it with reasonable efforts." - , "" - , " You may not impose any further restrictions on the exercise of the" - , "rights granted or affirmed under this License. For example, you may" - , "not impose a license fee, royalty, or other charge for exercise of" - , "rights granted under this License, and you may not initiate litigation" - , "(including a cross-claim or counterclaim in a lawsuit) alleging that" - , "any patent claim is infringed by making, using, selling, offering for" - , "sale, or importing the Program or any portion of it." - , "" - , " 11. Patents." - , "" - , " A \"contributor\" is a copyright holder who authorizes use under this" - , "License of the Program or a work on which the Program is based. The" - , "work thus licensed is called the contributor's \"contributor version\"." - , "" - , " A contributor's \"essential patent claims\" are all patent claims" - , "owned or controlled by the contributor, whether already acquired or" - , "hereafter acquired, that would be infringed by some manner, permitted" - , "by this License, of making, using, or selling its contributor version," - , "but do not include claims that would be infringed only as a" - , "consequence of further modification of the contributor version. For" - , "purposes of this definition, \"control\" includes the right to grant" - , "patent sublicenses in a manner consistent with the requirements of" - , "this License." - , "" - , " Each contributor grants you a non-exclusive, worldwide, royalty-free" - , "patent license under the contributor's essential patent claims, to" - , "make, use, sell, offer for sale, import and otherwise run, modify and" - , "propagate the contents of its contributor version." - , "" - , " In the following three paragraphs, a \"patent license\" is any express" - , "agreement or commitment, however denominated, not to enforce a patent" - , "(such as an express permission to practice a patent or covenant not to" - , "sue for patent infringement). To \"grant\" such a patent license to a" - , "party means to make such an agreement or commitment not to enforce a" - , "patent against the party." - , "" - , " If you convey a covered work, knowingly relying on a patent license," - , "and the Corresponding Source of the work is not available for anyone" - , "to copy, free of charge and under the terms of this License, through a" - , "publicly available network server or other readily accessible means," - , "then you must either (1) cause the Corresponding Source to be so" - , "available, or (2) arrange to deprive yourself of the benefit of the" - , "patent license for this particular work, or (3) arrange, in a manner" - , "consistent with the requirements of this License, to extend the patent" - , "license to downstream recipients. \"Knowingly relying\" means you have" - , "actual knowledge that, but for the patent license, your conveying the" - , "covered work in a country, or your recipient's use of the covered work" - , "in a country, would infringe one or more identifiable patents in that" - , "country that you have reason to believe are valid." - , "" - , " If, pursuant to or in connection with a single transaction or" - , "arrangement, you convey, or propagate by procuring conveyance of, a" - , "covered work, and grant a patent license to some of the parties" - , "receiving the covered work authorizing them to use, propagate, modify" - , "or convey a specific copy of the covered work, then the patent license" - , "you grant is automatically extended to all recipients of the covered" - , "work and works based on it." - , "" - , " A patent license is \"discriminatory\" if it does not include within" - , "the scope of its coverage, prohibits the exercise of, or is" - , "conditioned on the non-exercise of one or more of the rights that are" - , "specifically granted under this License. You may not convey a covered" - , "work if you are a party to an arrangement with a third party that is" - , "in the business of distributing software, under which you make payment" - , "to the third party based on the extent of your activity of conveying" - , "the work, and under which the third party grants, to any of the" - , "parties who would receive the covered work from you, a discriminatory" - , "patent license (a) in connection with copies of the covered work" - , "conveyed by you (or copies made from those copies), or (b) primarily" - , "for and in connection with specific products or compilations that" - , "contain the covered work, unless you entered into that arrangement," - , "or that patent license was granted, prior to 28 March 2007." - , "" - , " Nothing in this License shall be construed as excluding or limiting" - , "any implied license or other defenses to infringement that may" - , "otherwise be available to you under applicable patent law." - , "" - , " 12. No Surrender of Others' Freedom." - , "" - , " If conditions are imposed on you (whether by court order, agreement or" - , "otherwise) that contradict the conditions of this License, they do not" - , "excuse you from the conditions of this License. If you cannot convey a" - , "covered work so as to satisfy simultaneously your obligations under this" - , "License and any other pertinent obligations, then as a consequence you may" - , "not convey it at all. For example, if you agree to terms that obligate you" - , "to collect a royalty for further conveying from those to whom you convey" - , "the Program, the only way you could satisfy both those terms and this" - , "License would be to refrain entirely from conveying the Program." - , "" - , " 13. Remote Network Interaction; Use with the GNU General Public License." - , "" - , " Notwithstanding any other provision of this License, if you modify the" - , "Program, your modified version must prominently offer all users" - , "interacting with it remotely through a computer network (if your version" - , "supports such interaction) an opportunity to receive the Corresponding" - , "Source of your version by providing access to the Corresponding Source" - , "from a network server at no charge, through some standard or customary" - , "means of facilitating copying of software. This Corresponding Source" - , "shall include the Corresponding Source for any work covered by version 3" - , "of the GNU General Public License that is incorporated pursuant to the" - , "following paragraph." - , "" - , " Notwithstanding any other provision of this License, you have" - , "permission to link or combine any covered work with a work licensed" - , "under version 3 of the GNU General Public License into a single" - , "combined work, and to convey the resulting work. The terms of this" - , "License will continue to apply to the part which is the covered work," - , "but the work with which it is combined will remain governed by version" - , "3 of the GNU General Public License." - , "" - , " 14. Revised Versions of this License." - , "" - , " The Free Software Foundation may publish revised and/or new versions of" - , "the GNU Affero General Public License from time to time. Such new versions" - , "will be similar in spirit to the present version, but may differ in detail to" - , "address new problems or concerns." - , "" - , " Each version is given a distinguishing version number. If the" - , "Program specifies that a certain numbered version of the GNU Affero General" - , "Public License \"or any later version\" applies to it, you have the" - , "option of following the terms and conditions either of that numbered" - , "version or of any later version published by the Free Software" - , "Foundation. If the Program does not specify a version number of the" - , "GNU Affero General Public License, you may choose any version ever published" - , "by the Free Software Foundation." - , "" - , " If the Program specifies that a proxy can decide which future" - , "versions of the GNU Affero General Public License can be used, that proxy's" - , "public statement of acceptance of a version permanently authorizes you" - , "to choose that version for the Program." - , "" - , " Later license versions may give you additional or different" - , "permissions. However, no additional obligations are imposed on any" - , "author or copyright holder as a result of your choosing to follow a" - , "later version." - , "" - , " 15. Disclaimer of Warranty." - , "" - , " THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY" - , "APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT" - , "HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY" - , "OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO," - , "THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR" - , "PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM" - , "IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF" - , "ALL NECESSARY SERVICING, REPAIR OR CORRECTION." - , "" - , " 16. Limitation of Liability." - , "" - , " IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING" - , "WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS" - , "THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY" - , "GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE" - , "USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF" - , "DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD" - , "PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS)," - , "EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF" - , "SUCH DAMAGES." - , "" - , " 17. Interpretation of Sections 15 and 16." - , "" - , " If the disclaimer of warranty and limitation of liability provided" - , "above cannot be given local legal effect according to their terms," - , "reviewing courts shall apply local law that most closely approximates" - , "an absolute waiver of all civil liability in connection with the" - , "Program, unless a warranty or assumption of liability accompanies a" - , "copy of the Program in return for a fee." - , "" - , " END OF TERMS AND CONDITIONS" - , "" - , " How to Apply These Terms to Your New Programs" - , "" - , " If you develop a new program, and you want it to be of the greatest" - , "possible use to the public, the best way to achieve this is to make it" - , "free software which everyone can redistribute and change under these terms." - , "" - , " To do so, attach the following notices to the program. It is safest" - , "to attach them to the start of each source file to most effectively" - , "state the exclusion of warranty; and each file should have at least" - , "the \"copyright\" line and a pointer to where the full notice is found." - , "" - , " " - , " Copyright (C) " - , "" - , " This program is free software: you can redistribute it and/or modify" - , " it under the terms of the GNU Affero General Public License as published by" - , " the Free Software Foundation, either version 3 of the License, or" - , " (at your option) any later version." - , "" - , " This program is distributed in the hope that it will be useful," - , " but WITHOUT ANY WARRANTY; without even the implied warranty of" - , " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the" - , " GNU Affero General Public License for more details." - , "" - , " You should have received a copy of the GNU Affero General Public License" - , " along with this program. If not, see ." - , "" - , "Also add information on how to contact you by electronic and paper mail." - , "" - , " If your software can interact with users remotely through a computer" - , "network, you should also make sure that it provides a way for users to" - , "get its source. For example, if your program is a web application, its" - , "interface could display a \"Source\" link that leads users to an archive" - , "of the code. There are many ways you could offer source, and different" - , "solutions will be better for different programs; see section 13 for the" - , "specific requirements." - , "" - , " You should also get your employer (if you work as a programmer) or school," - , "if any, to sign a \"copyright disclaimer\" for the program, if necessary." - , "For more information on this, and how to apply and follow the GNU AGPL, see" - , "." - ] - -lgpl21 :: License -lgpl21 = unlines - [ " GNU LESSER GENERAL PUBLIC LICENSE" - , " Version 2.1, February 1999" - , "" - , " Copyright (C) 1991, 1999 Free Software Foundation, Inc." - , " 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA" - , " Everyone is permitted to copy and distribute verbatim copies" - , " of this license document, but changing it is not allowed." - , "" - , "[This is the first released version of the Lesser GPL. It also counts" - , " as the successor of the GNU Library Public License, version 2, hence" - , " the version number 2.1.]" - , "" - , " Preamble" - , "" - , " The licenses for most software are designed to take away your" - , "freedom to share and change it. By contrast, the GNU General Public" - , "Licenses are intended to guarantee your freedom to share and change" - , "free software--to make sure the software is free for all its users." - , "" - , " This license, the Lesser General Public License, applies to some" - , "specially designated software packages--typically libraries--of the" - , "Free Software Foundation and other authors who decide to use it. You" - , "can use it too, but we suggest you first think carefully about whether" - , "this license or the ordinary General Public License is the better" - , "strategy to use in any particular case, based on the explanations below." - , "" - , " When we speak of free software, we are referring to freedom of use," - , "not price. Our General Public Licenses are designed to make sure that" - , "you have the freedom to distribute copies of free software (and charge" - , "for this service if you wish); that you receive source code or can get" - , "it if you want it; that you can change the software and use pieces of" - , "it in new free programs; and that you are informed that you can do" - , "these things." - , "" - , " To protect your rights, we need to make restrictions that forbid" - , "distributors to deny you these rights or to ask you to surrender these" - , "rights. These restrictions translate to certain responsibilities for" - , "you if you distribute copies of the library or if you modify it." - , "" - , " For example, if you distribute copies of the library, whether gratis" - , "or for a fee, you must give the recipients all the rights that we gave" - , "you. You must make sure that they, too, receive or can get the source" - , "code. If you link other code with the library, you must provide" - , "complete object files to the recipients, so that they can relink them" - , "with the library after making changes to the library and recompiling" - , "it. And you must show them these terms so they know their rights." - , "" - , " We protect your rights with a two-step method: (1) we copyright the" - , "library, and (2) we offer you this license, which gives you legal" - , "permission to copy, distribute and/or modify the library." - , "" - , " To protect each distributor, we want to make it very clear that" - , "there is no warranty for the free library. Also, if the library is" - , "modified by someone else and passed on, the recipients should know" - , "that what they have is not the original version, so that the original" - , "author's reputation will not be affected by problems that might be" - , "introduced by others." - , "" - , " Finally, software patents pose a constant threat to the existence of" - , "any free program. We wish to make sure that a company cannot" - , "effectively restrict the users of a free program by obtaining a" - , "restrictive license from a patent holder. Therefore, we insist that" - , "any patent license obtained for a version of the library must be" - , "consistent with the full freedom of use specified in this license." - , "" - , " Most GNU software, including some libraries, is covered by the" - , "ordinary GNU General Public License. This license, the GNU Lesser" - , "General Public License, applies to certain designated libraries, and" - , "is quite different from the ordinary General Public License. We use" - , "this license for certain libraries in order to permit linking those" - , "libraries into non-free programs." - , "" - , " When a program is linked with a library, whether statically or using" - , "a shared library, the combination of the two is legally speaking a" - , "combined work, a derivative of the original library. The ordinary" - , "General Public License therefore permits such linking only if the" - , "entire combination fits its criteria of freedom. The Lesser General" - , "Public License permits more lax criteria for linking other code with" - , "the library." - , "" - , " We call this license the \"Lesser\" General Public License because it" - , "does Less to protect the user's freedom than the ordinary General" - , "Public License. It also provides other free software developers Less" - , "of an advantage over competing non-free programs. These disadvantages" - , "are the reason we use the ordinary General Public License for many" - , "libraries. However, the Lesser license provides advantages in certain" - , "special circumstances." - , "" - , " For example, on rare occasions, there may be a special need to" - , "encourage the widest possible use of a certain library, so that it becomes" - , "a de-facto standard. To achieve this, non-free programs must be" - , "allowed to use the library. A more frequent case is that a free" - , "library does the same job as widely used non-free libraries. In this" - , "case, there is little to gain by limiting the free library to free" - , "software only, so we use the Lesser General Public License." - , "" - , " In other cases, permission to use a particular library in non-free" - , "programs enables a greater number of people to use a large body of" - , "free software. For example, permission to use the GNU C Library in" - , "non-free programs enables many more people to use the whole GNU" - , "operating system, as well as its variant, the GNU/Linux operating" - , "system." - , "" - , " Although the Lesser General Public License is Less protective of the" - , "users' freedom, it does ensure that the user of a program that is" - , "linked with the Library has the freedom and the wherewithal to run" - , "that program using a modified version of the Library." - , "" - , " The precise terms and conditions for copying, distribution and" - , "modification follow. Pay close attention to the difference between a" - , "\"work based on the library\" and a \"work that uses the library\". The" - , "former contains code derived from the library, whereas the latter must" - , "be combined with the library in order to run." - , "" - , " GNU LESSER GENERAL PUBLIC LICENSE" - , " TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION" - , "" - , " 0. This License Agreement applies to any software library or other" - , "program which contains a notice placed by the copyright holder or" - , "other authorized party saying it may be distributed under the terms of" - , "this Lesser General Public License (also called \"this License\")." - , "Each licensee is addressed as \"you\"." - , "" - , " A \"library\" means a collection of software functions and/or data" - , "prepared so as to be conveniently linked with application programs" - , "(which use some of those functions and data) to form executables." - , "" - , " The \"Library\", below, refers to any such software library or work" - , "which has been distributed under these terms. A \"work based on the" - , "Library\" means either the Library or any derivative work under" - , "copyright law: that is to say, a work containing the Library or a" - , "portion of it, either verbatim or with modifications and/or translated" - , "straightforwardly into another language. (Hereinafter, translation is" - , "included without limitation in the term \"modification\".)" - , "" - , " \"Source code\" for a work means the preferred form of the work for" - , "making modifications to it. For a library, complete source code means" - , "all the source code for all modules it contains, plus any associated" - , "interface definition files, plus the scripts used to control compilation" - , "and installation of the library." - , "" - , " Activities other than copying, distribution and modification are not" - , "covered by this License; they are outside its scope. The act of" - , "running a program using the Library is not restricted, and output from" - , "such a program is covered only if its contents constitute a work based" - , "on the Library (independent of the use of the Library in a tool for" - , "writing it). Whether that is true depends on what the Library does" - , "and what the program that uses the Library does." - , "" - , " 1. You may copy and distribute verbatim copies of the Library's" - , "complete source code as you receive it, in any medium, provided that" - , "you conspicuously and appropriately publish on each copy an" - , "appropriate copyright notice and disclaimer of warranty; keep intact" - , "all the notices that refer to this License and to the absence of any" - , "warranty; and distribute a copy of this License along with the" - , "Library." - , "" - , " You may charge a fee for the physical act of transferring a copy," - , "and you may at your option offer warranty protection in exchange for a" - , "fee." - , "" - , " 2. You may modify your copy or copies of the Library or any portion" - , "of it, thus forming a work based on the Library, and copy and" - , "distribute such modifications or work under the terms of Section 1" - , "above, provided that you also meet all of these conditions:" - , "" - , " a) The modified work must itself be a software library." - , "" - , " b) You must cause the files modified to carry prominent notices" - , " stating that you changed the files and the date of any change." - , "" - , " c) You must cause the whole of the work to be licensed at no" - , " charge to all third parties under the terms of this License." - , "" - , " d) If a facility in the modified Library refers to a function or a" - , " table of data to be supplied by an application program that uses" - , " the facility, other than as an argument passed when the facility" - , " is invoked, then you must make a good faith effort to ensure that," - , " in the event an application does not supply such function or" - , " table, the facility still operates, and performs whatever part of" - , " its purpose remains meaningful." - , "" - , " (For example, a function in a library to compute square roots has" - , " a purpose that is entirely well-defined independent of the" - , " application. Therefore, Subsection 2d requires that any" - , " application-supplied function or table used by this function must" - , " be optional: if the application does not supply it, the square" - , " root function must still compute square roots.)" - , "" - , "These requirements apply to the modified work as a whole. If" - , "identifiable sections of that work are not derived from the Library," - , "and can be reasonably considered independent and separate works in" - , "themselves, then this License, and its terms, do not apply to those" - , "sections when you distribute them as separate works. But when you" - , "distribute the same sections as part of a whole which is a work based" - , "on the Library, the distribution of the whole must be on the terms of" - , "this License, whose permissions for other licensees extend to the" - , "entire whole, and thus to each and every part regardless of who wrote" - , "it." - , "" - , "Thus, it is not the intent of this section to claim rights or contest" - , "your rights to work written entirely by you; rather, the intent is to" - , "exercise the right to control the distribution of derivative or" - , "collective works based on the Library." - , "" - , "In addition, mere aggregation of another work not based on the Library" - , "with the Library (or with a work based on the Library) on a volume of" - , "a storage or distribution medium does not bring the other work under" - , "the scope of this License." - , "" - , " 3. You may opt to apply the terms of the ordinary GNU General Public" - , "License instead of this License to a given copy of the Library. To do" - , "this, you must alter all the notices that refer to this License, so" - , "that they refer to the ordinary GNU General Public License, version 2," - , "instead of to this License. (If a newer version than version 2 of the" - , "ordinary GNU General Public License has appeared, then you can specify" - , "that version instead if you wish.) Do not make any other change in" - , "these notices." - , "" - , " Once this change is made in a given copy, it is irreversible for" - , "that copy, so the ordinary GNU General Public License applies to all" - , "subsequent copies and derivative works made from that copy." - , "" - , " This option is useful when you wish to copy part of the code of" - , "the Library into a program that is not a library." - , "" - , " 4. You may copy and distribute the Library (or a portion or" - , "derivative of it, under Section 2) in object code or executable form" - , "under the terms of Sections 1 and 2 above provided that you accompany" - , "it with the complete corresponding machine-readable source code, which" - , "must be distributed under the terms of Sections 1 and 2 above on a" - , "medium customarily used for software interchange." - , "" - , " If distribution of object code is made by offering access to copy" - , "from a designated place, then offering equivalent access to copy the" - , "source code from the same place satisfies the requirement to" - , "distribute the source code, even though third parties are not" - , "compelled to copy the source along with the object code." - , "" - , " 5. A program that contains no derivative of any portion of the" - , "Library, but is designed to work with the Library by being compiled or" - , "linked with it, is called a \"work that uses the Library\". Such a" - , "work, in isolation, is not a derivative work of the Library, and" - , "therefore falls outside the scope of this License." - , "" - , " However, linking a \"work that uses the Library\" with the Library" - , "creates an executable that is a derivative of the Library (because it" - , "contains portions of the Library), rather than a \"work that uses the" - , "library\". The executable is therefore covered by this License." - , "Section 6 states terms for distribution of such executables." - , "" - , " When a \"work that uses the Library\" uses material from a header file" - , "that is part of the Library, the object code for the work may be a" - , "derivative work of the Library even though the source code is not." - , "Whether this is true is especially significant if the work can be" - , "linked without the Library, or if the work is itself a library. The" - , "threshold for this to be true is not precisely defined by law." - , "" - , " If such an object file uses only numerical parameters, data" - , "structure layouts and accessors, and small macros and small inline" - , "functions (ten lines or less in length), then the use of the object" - , "file is unrestricted, regardless of whether it is legally a derivative" - , "work. (Executables containing this object code plus portions of the" - , "Library will still fall under Section 6.)" - , "" - , " Otherwise, if the work is a derivative of the Library, you may" - , "distribute the object code for the work under the terms of Section 6." - , "Any executables containing that work also fall under Section 6," - , "whether or not they are linked directly with the Library itself." - , "" - , " 6. As an exception to the Sections above, you may also combine or" - , "link a \"work that uses the Library\" with the Library to produce a" - , "work containing portions of the Library, and distribute that work" - , "under terms of your choice, provided that the terms permit" - , "modification of the work for the customer's own use and reverse" - , "engineering for debugging such modifications." - , "" - , " You must give prominent notice with each copy of the work that the" - , "Library is used in it and that the Library and its use are covered by" - , "this License. You must supply a copy of this License. If the work" - , "during execution displays copyright notices, you must include the" - , "copyright notice for the Library among them, as well as a reference" - , "directing the user to the copy of this License. Also, you must do one" - , "of these things:" - , "" - , " a) Accompany the work with the complete corresponding" - , " machine-readable source code for the Library including whatever" - , " changes were used in the work (which must be distributed under" - , " Sections 1 and 2 above); and, if the work is an executable linked" - , " with the Library, with the complete machine-readable \"work that" - , " uses the Library\", as object code and/or source code, so that the" - , " user can modify the Library and then relink to produce a modified" - , " executable containing the modified Library. (It is understood" - , " that the user who changes the contents of definitions files in the" - , " Library will not necessarily be able to recompile the application" - , " to use the modified definitions.)" - , "" - , " b) Use a suitable shared library mechanism for linking with the" - , " Library. A suitable mechanism is one that (1) uses at run time a" - , " copy of the library already present on the user's computer system," - , " rather than copying library functions into the executable, and (2)" - , " will operate properly with a modified version of the library, if" - , " the user installs one, as long as the modified version is" - , " interface-compatible with the version that the work was made with." - , "" - , " c) Accompany the work with a written offer, valid for at" - , " least three years, to give the same user the materials" - , " specified in Subsection 6a, above, for a charge no more" - , " than the cost of performing this distribution." - , "" - , " d) If distribution of the work is made by offering access to copy" - , " from a designated place, offer equivalent access to copy the above" - , " specified materials from the same place." - , "" - , " e) Verify that the user has already received a copy of these" - , " materials or that you have already sent this user a copy." - , "" - , " For an executable, the required form of the \"work that uses the" - , "Library\" must include any data and utility programs needed for" - , "reproducing the executable from it. However, as a special exception," - , "the materials to be distributed need not include anything that is" - , "normally distributed (in either source or binary form) with the major" - , "components (compiler, kernel, and so on) of the operating system on" - , "which the executable runs, unless that component itself accompanies" - , "the executable." - , "" - , " It may happen that this requirement contradicts the license" - , "restrictions of other proprietary libraries that do not normally" - , "accompany the operating system. Such a contradiction means you cannot" - , "use both them and the Library together in an executable that you" - , "distribute." - , "" - , " 7. You may place library facilities that are a work based on the" - , "Library side-by-side in a single library together with other library" - , "facilities not covered by this License, and distribute such a combined" - , "library, provided that the separate distribution of the work based on" - , "the Library and of the other library facilities is otherwise" - , "permitted, and provided that you do these two things:" - , "" - , " a) Accompany the combined library with a copy of the same work" - , " based on the Library, uncombined with any other library" - , " facilities. This must be distributed under the terms of the" - , " Sections above." - , "" - , " b) Give prominent notice with the combined library of the fact" - , " that part of it is a work based on the Library, and explaining" - , " where to find the accompanying uncombined form of the same work." - , "" - , " 8. You may not copy, modify, sublicense, link with, or distribute" - , "the Library except as expressly provided under this License. Any" - , "attempt otherwise to copy, modify, sublicense, link with, or" - , "distribute the Library is void, and will automatically terminate your" - , "rights under this License. However, parties who have received copies," - , "or rights, from you under this License will not have their licenses" - , "terminated so long as such parties remain in full compliance." - , "" - , " 9. You are not required to accept this License, since you have not" - , "signed it. However, nothing else grants you permission to modify or" - , "distribute the Library or its derivative works. These actions are" - , "prohibited by law if you do not accept this License. Therefore, by" - , "modifying or distributing the Library (or any work based on the" - , "Library), you indicate your acceptance of this License to do so, and" - , "all its terms and conditions for copying, distributing or modifying" - , "the Library or works based on it." - , "" - , " 10. Each time you redistribute the Library (or any work based on the" - , "Library), the recipient automatically receives a license from the" - , "original licensor to copy, distribute, link with or modify the Library" - , "subject to these terms and conditions. You may not impose any further" - , "restrictions on the recipients' exercise of the rights granted herein." - , "You are not responsible for enforcing compliance by third parties with" - , "this License." - , "" - , " 11. If, as a consequence of a court judgment or allegation of patent" - , "infringement or for any other reason (not limited to patent issues)," - , "conditions are imposed on you (whether by court order, agreement or" - , "otherwise) that contradict the conditions of this License, they do not" - , "excuse you from the conditions of this License. If you cannot" - , "distribute so as to satisfy simultaneously your obligations under this" - , "License and any other pertinent obligations, then as a consequence you" - , "may not distribute the Library at all. For example, if a patent" - , "license would not permit royalty-free redistribution of the Library by" - , "all those who receive copies directly or indirectly through you, then" - , "the only way you could satisfy both it and this License would be to" - , "refrain entirely from distribution of the Library." - , "" - , "If any portion of this section is held invalid or unenforceable under any" - , "particular circumstance, the balance of the section is intended to apply," - , "and the section as a whole is intended to apply in other circumstances." - , "" - , "It is not the purpose of this section to induce you to infringe any" - , "patents or other property right claims or to contest validity of any" - , "such claims; this section has the sole purpose of protecting the" - , "integrity of the free software distribution system which is" - , "implemented by public license practices. Many people have made" - , "generous contributions to the wide range of software distributed" - , "through that system in reliance on consistent application of that" - , "system; it is up to the author/donor to decide if he or she is willing" - , "to distribute software through any other system and a licensee cannot" - , "impose that choice." - , "" - , "This section is intended to make thoroughly clear what is believed to" - , "be a consequence of the rest of this License." - , "" - , " 12. If the distribution and/or use of the Library is restricted in" - , "certain countries either by patents or by copyrighted interfaces, the" - , "original copyright holder who places the Library under this License may add" - , "an explicit geographical distribution limitation excluding those countries," - , "so that distribution is permitted only in or among countries not thus" - , "excluded. In such case, this License incorporates the limitation as if" - , "written in the body of this License." - , "" - , " 13. The Free Software Foundation may publish revised and/or new" - , "versions of the Lesser General Public License from time to time." - , "Such new versions will be similar in spirit to the present version," - , "but may differ in detail to address new problems or concerns." - , "" - , "Each version is given a distinguishing version number. If the Library" - , "specifies a version number of this License which applies to it and" - , "\"any later version\", you have the option of following the terms and" - , "conditions either of that version or of any later version published by" - , "the Free Software Foundation. If the Library does not specify a" - , "license version number, you may choose any version ever published by" - , "the Free Software Foundation." - , "" - , " 14. If you wish to incorporate parts of the Library into other free" - , "programs whose distribution conditions are incompatible with these," - , "write to the author to ask for permission. For software which is" - , "copyrighted by the Free Software Foundation, write to the Free" - , "Software Foundation; we sometimes make exceptions for this. Our" - , "decision will be guided by the two goals of preserving the free status" - , "of all derivatives of our free software and of promoting the sharing" - , "and reuse of software generally." - , "" - , " NO WARRANTY" - , "" - , " 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO" - , "WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW." - , "EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR" - , "OTHER PARTIES PROVIDE THE LIBRARY \"AS IS\" WITHOUT WARRANTY OF ANY" - , "KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE" - , "IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR" - , "PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE" - , "LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME" - , "THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION." - , "" - , " 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN" - , "WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY" - , "AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU" - , "FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR" - , "CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE" - , "LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING" - , "RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A" - , "FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF" - , "SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH" - , "DAMAGES." - , "" - , " END OF TERMS AND CONDITIONS" - , "" - , " How to Apply These Terms to Your New Libraries" - , "" - , " If you develop a new library, and you want it to be of the greatest" - , "possible use to the public, we recommend making it free software that" - , "everyone can redistribute and change. You can do so by permitting" - , "redistribution under these terms (or, alternatively, under the terms of the" - , "ordinary General Public License)." - , "" - , " To apply these terms, attach the following notices to the library. It is" - , "safest to attach them to the start of each source file to most effectively" - , "convey the exclusion of warranty; and each file should have at least the" - , "\"copyright\" line and a pointer to where the full notice is found." - , "" - , " " - , " Copyright (C) " - , "" - , " This library is free software; you can redistribute it and/or" - , " modify it under the terms of the GNU Lesser General Public" - , " License as published by the Free Software Foundation; either" - , " version 2.1 of the License, or (at your option) any later version." - , "" - , " This library is distributed in the hope that it will be useful," - , " but WITHOUT ANY WARRANTY; without even the implied warranty of" - , " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU" - , " Lesser General Public License for more details." - , "" - , " You should have received a copy of the GNU Lesser General Public" - , " License along with this library; if not, write to the Free Software" - , " Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA" - , "" - , "Also add information on how to contact you by electronic and paper mail." - , "" - , "You should also get your employer (if you work as a programmer) or your" - , "school, if any, to sign a \"copyright disclaimer\" for the library, if" - , "necessary. Here is a sample; alter the names:" - , "" - , " Yoyodyne, Inc., hereby disclaims all copyright interest in the" - , " library `Frob' (a library for tweaking knobs) written by James Random Hacker." - , "" - , " , 1 April 1990" - , " Ty Coon, President of Vice" - , "" - , "That's all there is to it!" - ] - -lgpl3 :: License -lgpl3 = unlines - [ " GNU LESSER GENERAL PUBLIC LICENSE" - , " Version 3, 29 June 2007" - , "" - , " Copyright (C) 2007 Free Software Foundation, Inc. " - , " Everyone is permitted to copy and distribute verbatim copies" - , " of this license document, but changing it is not allowed." - , "" - , "" - , " This version of the GNU Lesser General Public License incorporates" - , "the terms and conditions of version 3 of the GNU General Public" - , "License, supplemented by the additional permissions listed below." - , "" - , " 0. Additional Definitions." - , "" - , " As used herein, \"this License\" refers to version 3 of the GNU Lesser" - , "General Public License, and the \"GNU GPL\" refers to version 3 of the GNU" - , "General Public License." - , "" - , " \"The Library\" refers to a covered work governed by this License," - , "other than an Application or a Combined Work as defined below." - , "" - , " An \"Application\" is any work that makes use of an interface provided" - , "by the Library, but which is not otherwise based on the Library." - , "Defining a subclass of a class defined by the Library is deemed a mode" - , "of using an interface provided by the Library." - , "" - , " A \"Combined Work\" is a work produced by combining or linking an" - , "Application with the Library. The particular version of the Library" - , "with which the Combined Work was made is also called the \"Linked" - , "Version\"." - , "" - , " The \"Minimal Corresponding Source\" for a Combined Work means the" - , "Corresponding Source for the Combined Work, excluding any source code" - , "for portions of the Combined Work that, considered in isolation, are" - , "based on the Application, and not on the Linked Version." - , "" - , " The \"Corresponding Application Code\" for a Combined Work means the" - , "object code and/or source code for the Application, including any data" - , "and utility programs needed for reproducing the Combined Work from the" - , "Application, but excluding the System Libraries of the Combined Work." - , "" - , " 1. Exception to Section 3 of the GNU GPL." - , "" - , " You may convey a covered work under sections 3 and 4 of this License" - , "without being bound by section 3 of the GNU GPL." - , "" - , " 2. Conveying Modified Versions." - , "" - , " If you modify a copy of the Library, and, in your modifications, a" - , "facility refers to a function or data to be supplied by an Application" - , "that uses the facility (other than as an argument passed when the" - , "facility is invoked), then you may convey a copy of the modified" - , "version:" - , "" - , " a) under this License, provided that you make a good faith effort to" - , " ensure that, in the event an Application does not supply the" - , " function or data, the facility still operates, and performs" - , " whatever part of its purpose remains meaningful, or" - , "" - , " b) under the GNU GPL, with none of the additional permissions of" - , " this License applicable to that copy." - , "" - , " 3. Object Code Incorporating Material from Library Header Files." - , "" - , " The object code form of an Application may incorporate material from" - , "a header file that is part of the Library. You may convey such object" - , "code under terms of your choice, provided that, if the incorporated" - , "material is not limited to numerical parameters, data structure" - , "layouts and accessors, or small macros, inline functions and templates" - , "(ten or fewer lines in length), you do both of the following:" - , "" - , " a) Give prominent notice with each copy of the object code that the" - , " Library is used in it and that the Library and its use are" - , " covered by this License." - , "" - , " b) Accompany the object code with a copy of the GNU GPL and this license" - , " document." - , "" - , " 4. Combined Works." - , "" - , " You may convey a Combined Work under terms of your choice that," - , "taken together, effectively do not restrict modification of the" - , "portions of the Library contained in the Combined Work and reverse" - , "engineering for debugging such modifications, if you also do each of" - , "the following:" - , "" - , " a) Give prominent notice with each copy of the Combined Work that" - , " the Library is used in it and that the Library and its use are" - , " covered by this License." - , "" - , " b) Accompany the Combined Work with a copy of the GNU GPL and this license" - , " document." - , "" - , " c) For a Combined Work that displays copyright notices during" - , " execution, include the copyright notice for the Library among" - , " these notices, as well as a reference directing the user to the" - , " copies of the GNU GPL and this license document." - , "" - , " d) Do one of the following:" - , "" - , " 0) Convey the Minimal Corresponding Source under the terms of this" - , " License, and the Corresponding Application Code in a form" - , " suitable for, and under terms that permit, the user to" - , " recombine or relink the Application with a modified version of" - , " the Linked Version to produce a modified Combined Work, in the" - , " manner specified by section 6 of the GNU GPL for conveying" - , " Corresponding Source." - , "" - , " 1) Use a suitable shared library mechanism for linking with the" - , " Library. A suitable mechanism is one that (a) uses at run time" - , " a copy of the Library already present on the user's computer" - , " system, and (b) will operate properly with a modified version" - , " of the Library that is interface-compatible with the Linked" - , " Version." - , "" - , " e) Provide Installation Information, but only if you would otherwise" - , " be required to provide such information under section 6 of the" - , " GNU GPL, and only to the extent that such information is" - , " necessary to install and execute a modified version of the" - , " Combined Work produced by recombining or relinking the" - , " Application with a modified version of the Linked Version. (If" - , " you use option 4d0, the Installation Information must accompany" - , " the Minimal Corresponding Source and Corresponding Application" - , " Code. If you use option 4d1, you must provide the Installation" - , " Information in the manner specified by section 6 of the GNU GPL" - , " for conveying Corresponding Source.)" - , "" - , " 5. Combined Libraries." - , "" - , " You may place library facilities that are a work based on the" - , "Library side by side in a single library together with other library" - , "facilities that are not Applications and are not covered by this" - , "License, and convey such a combined library under terms of your" - , "choice, if you do both of the following:" - , "" - , " a) Accompany the combined library with a copy of the same work based" - , " on the Library, uncombined with any other library facilities," - , " conveyed under the terms of this License." - , "" - , " b) Give prominent notice with the combined library that part of it" - , " is a work based on the Library, and explaining where to find the" - , " accompanying uncombined form of the same work." - , "" - , " 6. Revised Versions of the GNU Lesser General Public License." - , "" - , " The Free Software Foundation may publish revised and/or new versions" - , "of the GNU Lesser General Public License from time to time. Such new" - , "versions will be similar in spirit to the present version, but may" - , "differ in detail to address new problems or concerns." - , "" - , " Each version is given a distinguishing version number. If the" - , "Library as you received it specifies that a certain numbered version" - , "of the GNU Lesser General Public License \"or any later version\"" - , "applies to it, you have the option of following the terms and" - , "conditions either of that published version or of any later version" - , "published by the Free Software Foundation. If the Library as you" - , "received it does not specify a version number of the GNU Lesser" - , "General Public License, you may choose any version of the GNU Lesser" - , "General Public License ever published by the Free Software Foundation." - , "" - , " If the Library as you received it specifies that a proxy can decide" - , "whether future versions of the GNU Lesser General Public License shall" - , "apply, that proxy's public statement of acceptance of any version is" - , "permanent authorization for you to choose that version for the" - , "Library." - ] - -apache20 :: License -apache20 = unlines - [ "" - , " Apache License" - , " Version 2.0, January 2004" - , " http://www.apache.org/licenses/" - , "" - , " TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION" - , "" - , " 1. Definitions." - , "" - , " \"License\" shall mean the terms and conditions for use, reproduction," - , " and distribution as defined by Sections 1 through 9 of this document." - , "" - , " \"Licensor\" shall mean the copyright owner or entity authorized by" - , " the copyright owner that is granting the License." - , "" - , " \"Legal Entity\" shall mean the union of the acting entity and all" - , " other entities that control, are controlled by, or are under common" - , " control with that entity. For the purposes of this definition," - , " \"control\" means (i) the power, direct or indirect, to cause the" - , " direction or management of such entity, whether by contract or" - , " otherwise, or (ii) ownership of fifty percent (50%) or more of the" - , " outstanding shares, or (iii) beneficial ownership of such entity." - , "" - , " \"You\" (or \"Your\") shall mean an individual or Legal Entity" - , " exercising permissions granted by this License." - , "" - , " \"Source\" form shall mean the preferred form for making modifications," - , " including but not limited to software source code, documentation" - , " source, and configuration files." - , "" - , " \"Object\" form shall mean any form resulting from mechanical" - , " transformation or translation of a Source form, including but" - , " not limited to compiled object code, generated documentation," - , " and conversions to other media types." - , "" - , " \"Work\" shall mean the work of authorship, whether in Source or" - , " Object form, made available under the License, as indicated by a" - , " copyright notice that is included in or attached to the work" - , " (an example is provided in the Appendix below)." - , "" - , " \"Derivative Works\" shall mean any work, whether in Source or Object" - , " form, that is based on (or derived from) the Work and for which the" - , " editorial revisions, annotations, elaborations, or other modifications" - , " represent, as a whole, an original work of authorship. For the purposes" - , " of this License, Derivative Works shall not include works that remain" - , " separable from, or merely link (or bind by name) to the interfaces of," - , " the Work and Derivative Works thereof." - , "" - , " \"Contribution\" shall mean any work of authorship, including" - , " the original version of the Work and any modifications or additions" - , " to that Work or Derivative Works thereof, that is intentionally" - , " submitted to Licensor for inclusion in the Work by the copyright owner" - , " or by an individual or Legal Entity authorized to submit on behalf of" - , " the copyright owner. For the purposes of this definition, \"submitted\"" - , " means any form of electronic, verbal, or written communication sent" - , " to the Licensor or its representatives, including but not limited to" - , " communication on electronic mailing lists, source code control systems," - , " and issue tracking systems that are managed by, or on behalf of, the" - , " Licensor for the purpose of discussing and improving the Work, but" - , " excluding communication that is conspicuously marked or otherwise" - , " designated in writing by the copyright owner as \"Not a Contribution.\"" - , "" - , " \"Contributor\" shall mean Licensor and any individual or Legal Entity" - , " on behalf of whom a Contribution has been received by Licensor and" - , " subsequently incorporated within the Work." - , "" - , " 2. Grant of Copyright License. Subject to the terms and conditions of" - , " this License, each Contributor hereby grants to You a perpetual," - , " worldwide, non-exclusive, no-charge, royalty-free, irrevocable" - , " copyright license to reproduce, prepare Derivative Works of," - , " publicly display, publicly perform, sublicense, and distribute the" - , " Work and such Derivative Works in Source or Object form." - , "" - , " 3. Grant of Patent License. Subject to the terms and conditions of" - , " this License, each Contributor hereby grants to You a perpetual," - , " worldwide, non-exclusive, no-charge, royalty-free, irrevocable" - , " (except as stated in this section) patent license to make, have made," - , " use, offer to sell, sell, import, and otherwise transfer the Work," - , " where such license applies only to those patent claims licensable" - , " by such Contributor that are necessarily infringed by their" - , " Contribution(s) alone or by combination of their Contribution(s)" - , " with the Work to which such Contribution(s) was submitted. If You" - , " institute patent litigation against any entity (including a" - , " cross-claim or counterclaim in a lawsuit) alleging that the Work" - , " or a Contribution incorporated within the Work constitutes direct" - , " or contributory patent infringement, then any patent licenses" - , " granted to You under this License for that Work shall terminate" - , " as of the date such litigation is filed." - , "" - , " 4. Redistribution. You may reproduce and distribute copies of the" - , " Work or Derivative Works thereof in any medium, with or without" - , " modifications, and in Source or Object form, provided that You" - , " meet the following conditions:" - , "" - , " (a) You must give any other recipients of the Work or" - , " Derivative Works a copy of this License; and" - , "" - , " (b) You must cause any modified files to carry prominent notices" - , " stating that You changed the files; and" - , "" - , " (c) You must retain, in the Source form of any Derivative Works" - , " that You distribute, all copyright, patent, trademark, and" - , " attribution notices from the Source form of the Work," - , " excluding those notices that do not pertain to any part of" - , " the Derivative Works; and" - , "" - , " (d) If the Work includes a \"NOTICE\" text file as part of its" - , " distribution, then any Derivative Works that You distribute must" - , " include a readable copy of the attribution notices contained" - , " within such NOTICE file, excluding those notices that do not" - , " pertain to any part of the Derivative Works, in at least one" - , " of the following places: within a NOTICE text file distributed" - , " as part of the Derivative Works; within the Source form or" - , " documentation, if provided along with the Derivative Works; or," - , " within a display generated by the Derivative Works, if and" - , " wherever such third-party notices normally appear. The contents" - , " of the NOTICE file are for informational purposes only and" - , " do not modify the License. You may add Your own attribution" - , " notices within Derivative Works that You distribute, alongside" - , " or as an addendum to the NOTICE text from the Work, provided" - , " that such additional attribution notices cannot be construed" - , " as modifying the License." - , "" - , " You may add Your own copyright statement to Your modifications and" - , " may provide additional or different license terms and conditions" - , " for use, reproduction, or distribution of Your modifications, or" - , " for any such Derivative Works as a whole, provided Your use," - , " reproduction, and distribution of the Work otherwise complies with" - , " the conditions stated in this License." - , "" - , " 5. Submission of Contributions. Unless You explicitly state otherwise," - , " any Contribution intentionally submitted for inclusion in the Work" - , " by You to the Licensor shall be under the terms and conditions of" - , " this License, without any additional terms or conditions." - , " Notwithstanding the above, nothing herein shall supersede or modify" - , " the terms of any separate license agreement you may have executed" - , " with Licensor regarding such Contributions." - , "" - , " 6. Trademarks. This License does not grant permission to use the trade" - , " names, trademarks, service marks, or product names of the Licensor," - , " except as required for reasonable and customary use in describing the" - , " origin of the Work and reproducing the content of the NOTICE file." - , "" - , " 7. Disclaimer of Warranty. Unless required by applicable law or" - , " agreed to in writing, Licensor provides the Work (and each" - , " Contributor provides its Contributions) on an \"AS IS\" BASIS," - , " WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or" - , " implied, including, without limitation, any warranties or conditions" - , " of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A" - , " PARTICULAR PURPOSE. You are solely responsible for determining the" - , " appropriateness of using or redistributing the Work and assume any" - , " risks associated with Your exercise of permissions under this License." - , "" - , " 8. Limitation of Liability. In no event and under no legal theory," - , " whether in tort (including negligence), contract, or otherwise," - , " unless required by applicable law (such as deliberate and grossly" - , " negligent acts) or agreed to in writing, shall any Contributor be" - , " liable to You for damages, including any direct, indirect, special," - , " incidental, or consequential damages of any character arising as a" - , " result of this License or out of the use or inability to use the" - , " Work (including but not limited to damages for loss of goodwill," - , " work stoppage, computer failure or malfunction, or any and all" - , " other commercial damages or losses), even if such Contributor" - , " has been advised of the possibility of such damages." - , "" - , " 9. Accepting Warranty or Additional Liability. While redistributing" - , " the Work or Derivative Works thereof, You may choose to offer," - , " and charge a fee for, acceptance of support, warranty, indemnity," - , " or other liability obligations and/or rights consistent with this" - , " License. However, in accepting such obligations, You may act only" - , " on Your own behalf and on Your sole responsibility, not on behalf" - , " of any other Contributor, and only if You agree to indemnify," - , " defend, and hold each Contributor harmless for any liability" - , " incurred by, or claims asserted against, such Contributor by reason" - , " of your accepting any such warranty or additional liability." - , "" - , " END OF TERMS AND CONDITIONS" - , "" - , " APPENDIX: How to apply the Apache License to your work." - , "" - , " To apply the Apache License to your work, attach the following" - , " boilerplate notice, with the fields enclosed by brackets \"[]\"" - , " replaced with your own identifying information. (Don't include" - , " the brackets!) The text should be enclosed in the appropriate" - , " comment syntax for the file format. We also recommend that a" - , " file or class name and description of purpose be included on the" - , " same \"printed page\" as the copyright notice for easier" - , " identification within third-party archives." - , "" - , " Copyright [yyyy] [name of copyright owner]" - , "" - , " Licensed under the Apache License, Version 2.0 (the \"License\");" - , " you may not use this file except in compliance with the License." - , " You may obtain a copy of the License at" - , "" - , " http://www.apache.org/licenses/LICENSE-2.0" - , "" - , " Unless required by applicable law or agreed to in writing, software" - , " distributed under the License is distributed on an \"AS IS\" BASIS," - , " WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied." - , " See the License for the specific language governing permissions and" - , " limitations under the License." - ] - -mit :: String -> String -> License -mit authors year = unlines - [ "Copyright (c) " ++ year ++ " " ++ authors - , "" - , "Permission is hereby granted, free of charge, to any person obtaining" - , "a copy of this software and associated documentation files (the" - , "\"Software\"), to deal in the Software without restriction, including" - , "without limitation the rights to use, copy, modify, merge, publish," - , "distribute, sublicense, and/or sell copies of the Software, and to" - , "permit persons to whom the Software is furnished to do so, subject to" - , "the following conditions:" - , "" - , "The above copyright notice and this permission notice shall be included" - , "in all copies or substantial portions of the Software." - , "" - , "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND," - , "EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF" - , "MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT." - , "IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY" - , "CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT," - , "TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE" - , "SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE." - ] - -mpl20 :: License -mpl20 = unlines - [ "Mozilla Public License Version 2.0" - , "==================================" - , "" - , "1. Definitions" - , "--------------" - , "" - , "1.1. \"Contributor\"" - , " means each individual or legal entity that creates, contributes to" - , " the creation of, or owns Covered Software." - , "" - , "1.2. \"Contributor Version\"" - , " means the combination of the Contributions of others (if any) used" - , " by a Contributor and that particular Contributor's Contribution." - , "" - , "1.3. \"Contribution\"" - , " means Covered Software of a particular Contributor." - , "" - , "1.4. \"Covered Software\"" - , " means Source Code Form to which the initial Contributor has attached" - , " the notice in Exhibit A, the Executable Form of such Source Code" - , " Form, and Modifications of such Source Code Form, in each case" - , " including portions thereof." - , "" - , "1.5. \"Incompatible With Secondary Licenses\"" - , " means" - , "" - , " (a) that the initial Contributor has attached the notice described" - , " in Exhibit B to the Covered Software; or" - , "" - , " (b) that the Covered Software was made available under the terms of" - , " version 1.1 or earlier of the License, but not also under the" - , " terms of a Secondary License." - , "" - , "1.6. \"Executable Form\"" - , " means any form of the work other than Source Code Form." - , "" - , "1.7. \"Larger Work\"" - , " means a work that combines Covered Software with other material, in" - , " a separate file or files, that is not Covered Software." - , "" - , "1.8. \"License\"" - , " means this document." - , "" - , "1.9. \"Licensable\"" - , " means having the right to grant, to the maximum extent possible," - , " whether at the time of the initial grant or subsequently, any and" - , " all of the rights conveyed by this License." - , "" - , "1.10. \"Modifications\"" - , " means any of the following:" - , "" - , " (a) any file in Source Code Form that results from an addition to," - , " deletion from, or modification of the contents of Covered" - , " Software; or" - , "" - , " (b) any new file in Source Code Form that contains any Covered" - , " Software." - , "" - , "1.11. \"Patent Claims\" of a Contributor" - , " means any patent claim(s), including without limitation, method," - , " process, and apparatus claims, in any patent Licensable by such" - , " Contributor that would be infringed, but for the grant of the" - , " License, by the making, using, selling, offering for sale, having" - , " made, import, or transfer of either its Contributions or its" - , " Contributor Version." - , "" - , "1.12. \"Secondary License\"" - , " means either the GNU General Public License, Version 2.0, the GNU" - , " Lesser General Public License, Version 2.1, the GNU Affero General" - , " Public License, Version 3.0, or any later versions of those" - , " licenses." - , "" - , "1.13. \"Source Code Form\"" - , " means the form of the work preferred for making modifications." - , "" - , "1.14. \"You\" (or \"Your\")" - , " means an individual or a legal entity exercising rights under this" - , " License. For legal entities, \"You\" includes any entity that" - , " controls, is controlled by, or is under common control with You. For" - , " purposes of this definition, \"control\" means (a) the power, direct" - , " or indirect, to cause the direction or management of such entity," - , " whether by contract or otherwise, or (b) ownership of more than" - , " fifty percent (50%) of the outstanding shares or beneficial" - , " ownership of such entity." - , "" - , "2. License Grants and Conditions" - , "--------------------------------" - , "" - , "2.1. Grants" - , "" - , "Each Contributor hereby grants You a world-wide, royalty-free," - , "non-exclusive license:" - , "" - , "(a) under intellectual property rights (other than patent or trademark)" - , " Licensable by such Contributor to use, reproduce, make available," - , " modify, display, perform, distribute, and otherwise exploit its" - , " Contributions, either on an unmodified basis, with Modifications, or" - , " as part of a Larger Work; and" - , "" - , "(b) under Patent Claims of such Contributor to make, use, sell, offer" - , " for sale, have made, import, and otherwise transfer either its" - , " Contributions or its Contributor Version." - , "" - , "2.2. Effective Date" - , "" - , "The licenses granted in Section 2.1 with respect to any Contribution" - , "become effective for each Contribution on the date the Contributor first" - , "distributes such Contribution." - , "" - , "2.3. Limitations on Grant Scope" - , "" - , "The licenses granted in this Section 2 are the only rights granted under" - , "this License. No additional rights or licenses will be implied from the" - , "distribution or licensing of Covered Software under this License." - , "Notwithstanding Section 2.1(b) above, no patent license is granted by a" - , "Contributor:" - , "" - , "(a) for any code that a Contributor has removed from Covered Software;" - , " or" - , "" - , "(b) for infringements caused by: (i) Your and any other third party's" - , " modifications of Covered Software, or (ii) the combination of its" - , " Contributions with other software (except as part of its Contributor" - , " Version); or" - , "" - , "(c) under Patent Claims infringed by Covered Software in the absence of" - , " its Contributions." - , "" - , "This License does not grant any rights in the trademarks, service marks," - , "or logos of any Contributor (except as may be necessary to comply with" - , "the notice requirements in Section 3.4)." - , "" - , "2.4. Subsequent Licenses" - , "" - , "No Contributor makes additional grants as a result of Your choice to" - , "distribute the Covered Software under a subsequent version of this" - , "License (see Section 10.2) or under the terms of a Secondary License (if" - , "permitted under the terms of Section 3.3)." - , "" - , "2.5. Representation" - , "" - , "Each Contributor represents that the Contributor believes its" - , "Contributions are its original creation(s) or it has sufficient rights" - , "to grant the rights to its Contributions conveyed by this License." - , "" - , "2.6. Fair Use" - , "" - , "This License is not intended to limit any rights You have under" - , "applicable copyright doctrines of fair use, fair dealing, or other" - , "equivalents." - , "" - , "2.7. Conditions" - , "" - , "Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted" - , "in Section 2.1." - , "" - , "3. Responsibilities" - , "-------------------" - , "" - , "3.1. Distribution of Source Form" - , "" - , "All distribution of Covered Software in Source Code Form, including any" - , "Modifications that You create or to which You contribute, must be under" - , "the terms of this License. You must inform recipients that the Source" - , "Code Form of the Covered Software is governed by the terms of this" - , "License, and how they can obtain a copy of this License. You may not" - , "attempt to alter or restrict the recipients' rights in the Source Code" - , "Form." - , "" - , "3.2. Distribution of Executable Form" - , "" - , "If You distribute Covered Software in Executable Form then:" - , "" - , "(a) such Covered Software must also be made available in Source Code" - , " Form, as described in Section 3.1, and You must inform recipients of" - , " the Executable Form how they can obtain a copy of such Source Code" - , " Form by reasonable means in a timely manner, at a charge no more" - , " than the cost of distribution to the recipient; and" - , "" - , "(b) You may distribute such Executable Form under the terms of this" - , " License, or sublicense it under different terms, provided that the" - , " license for the Executable Form does not attempt to limit or alter" - , " the recipients' rights in the Source Code Form under this License." - , "" - , "3.3. Distribution of a Larger Work" - , "" - , "You may create and distribute a Larger Work under terms of Your choice," - , "provided that You also comply with the requirements of this License for" - , "the Covered Software. If the Larger Work is a combination of Covered" - , "Software with a work governed by one or more Secondary Licenses, and the" - , "Covered Software is not Incompatible With Secondary Licenses, this" - , "License permits You to additionally distribute such Covered Software" - , "under the terms of such Secondary License(s), so that the recipient of" - , "the Larger Work may, at their option, further distribute the Covered" - , "Software under the terms of either this License or such Secondary" - , "License(s)." - , "" - , "3.4. Notices" - , "" - , "You may not remove or alter the substance of any license notices" - , "(including copyright notices, patent notices, disclaimers of warranty," - , "or limitations of liability) contained within the Source Code Form of" - , "the Covered Software, except that You may alter any license notices to" - , "the extent required to remedy known factual inaccuracies." - , "" - , "3.5. Application of Additional Terms" - , "" - , "You may choose to offer, and to charge a fee for, warranty, support," - , "indemnity or liability obligations to one or more recipients of Covered" - , "Software. However, You may do so only on Your own behalf, and not on" - , "behalf of any Contributor. You must make it absolutely clear that any" - , "such warranty, support, indemnity, or liability obligation is offered by" - , "You alone, and You hereby agree to indemnify every Contributor for any" - , "liability incurred by such Contributor as a result of warranty, support," - , "indemnity or liability terms You offer. You may include additional" - , "disclaimers of warranty and limitations of liability specific to any" - , "jurisdiction." - , "" - , "4. Inability to Comply Due to Statute or Regulation" - , "---------------------------------------------------" - , "" - , "If it is impossible for You to comply with any of the terms of this" - , "License with respect to some or all of the Covered Software due to" - , "statute, judicial order, or regulation then You must: (a) comply with" - , "the terms of this License to the maximum extent possible; and (b)" - , "describe the limitations and the code they affect. Such description must" - , "be placed in a text file included with all distributions of the Covered" - , "Software under this License. Except to the extent prohibited by statute" - , "or regulation, such description must be sufficiently detailed for a" - , "recipient of ordinary skill to be able to understand it." - , "" - , "5. Termination" - , "--------------" - , "" - , "5.1. The rights granted under this License will terminate automatically" - , "if You fail to comply with any of its terms. However, if You become" - , "compliant, then the rights granted under this License from a particular" - , "Contributor are reinstated (a) provisionally, unless and until such" - , "Contributor explicitly and finally terminates Your grants, and (b) on an" - , "ongoing basis, if such Contributor fails to notify You of the" - , "non-compliance by some reasonable means prior to 60 days after You have" - , "come back into compliance. Moreover, Your grants from a particular" - , "Contributor are reinstated on an ongoing basis if such Contributor" - , "notifies You of the non-compliance by some reasonable means, this is the" - , "first time You have received notice of non-compliance with this License" - , "from such Contributor, and You become compliant prior to 30 days after" - , "Your receipt of the notice." - , "" - , "5.2. If You initiate litigation against any entity by asserting a patent" - , "infringement claim (excluding declaratory judgment actions," - , "counter-claims, and cross-claims) alleging that a Contributor Version" - , "directly or indirectly infringes any patent, then the rights granted to" - , "You by any and all Contributors for the Covered Software under Section" - , "2.1 of this License shall terminate." - , "" - , "5.3. In the event of termination under Sections 5.1 or 5.2 above, all" - , "end user license agreements (excluding distributors and resellers) which" - , "have been validly granted by You or Your distributors under this License" - , "prior to termination shall survive termination." - , "" - , "************************************************************************" - , "* *" - , "* 6. Disclaimer of Warranty *" - , "* ------------------------- *" - , "* *" - , "* Covered Software is provided under this License on an \"as is\" *" - , "* basis, without warranty of any kind, either expressed, implied, or *" - , "* statutory, including, without limitation, warranties that the *" - , "* Covered Software is free of defects, merchantable, fit for a *" - , "* particular purpose or non-infringing. The entire risk as to the *" - , "* quality and performance of the Covered Software is with You. *" - , "* Should any Covered Software prove defective in any respect, You *" - , "* (not any Contributor) assume the cost of any necessary servicing, *" - , "* repair, or correction. This disclaimer of warranty constitutes an *" - , "* essential part of this License. No use of any Covered Software is *" - , "* authorized under this License except under this disclaimer. *" - , "* *" - , "************************************************************************" - , "" - , "************************************************************************" - , "* *" - , "* 7. Limitation of Liability *" - , "* -------------------------- *" - , "* *" - , "* Under no circumstances and under no legal theory, whether tort *" - , "* (including negligence), contract, or otherwise, shall any *" - , "* Contributor, or anyone who distributes Covered Software as *" - , "* permitted above, be liable to You for any direct, indirect, *" - , "* special, incidental, or consequential damages of any character *" - , "* including, without limitation, damages for lost profits, loss of *" - , "* goodwill, work stoppage, computer failure or malfunction, or any *" - , "* and all other commercial damages or losses, even if such party *" - , "* shall have been informed of the possibility of such damages. This *" - , "* limitation of liability shall not apply to liability for death or *" - , "* personal injury resulting from such party's negligence to the *" - , "* extent applicable law prohibits such limitation. Some *" - , "* jurisdictions do not allow the exclusion or limitation of *" - , "* incidental or consequential damages, so this exclusion and *" - , "* limitation may not apply to You. *" - , "* *" - , "************************************************************************" - , "" - , "8. Litigation" - , "-------------" - , "" - , "Any litigation relating to this License may be brought only in the" - , "courts of a jurisdiction where the defendant maintains its principal" - , "place of business and such litigation shall be governed by laws of that" - , "jurisdiction, without reference to its conflict-of-law provisions." - , "Nothing in this Section shall prevent a party's ability to bring" - , "cross-claims or counter-claims." - , "" - , "9. Miscellaneous" - , "----------------" - , "" - , "This License represents the complete agreement concerning the subject" - , "matter hereof. If any provision of this License is held to be" - , "unenforceable, such provision shall be reformed only to the extent" - , "necessary to make it enforceable. Any law or regulation which provides" - , "that the language of a contract shall be construed against the drafter" - , "shall not be used to construe this License against a Contributor." - , "" - , "10. Versions of the License" - , "---------------------------" - , "" - , "10.1. New Versions" - , "" - , "Mozilla Foundation is the license steward. Except as provided in Section" - , "10.3, no one other than the license steward has the right to modify or" - , "publish new versions of this License. Each version will be given a" - , "distinguishing version number." - , "" - , "10.2. Effect of New Versions" - , "" - , "You may distribute the Covered Software under the terms of the version" - , "of the License under which You originally received the Covered Software," - , "or under the terms of any subsequent version published by the license" - , "steward." - , "" - , "10.3. Modified Versions" - , "" - , "If you create software not governed by this License, and you want to" - , "create a new license for such software, you may create and use a" - , "modified version of this License if you rename the license and remove" - , "any references to the name of the license steward (except to note that" - , "such modified license differs from this License)." - , "" - , "10.4. Distributing Source Code Form that is Incompatible With Secondary" - , "Licenses" - , "" - , "If You choose to distribute Source Code Form that is Incompatible With" - , "Secondary Licenses under the terms of this version of the License, the" - , "notice described in Exhibit B of this License must be attached." - , "" - , "Exhibit A - Source Code Form License Notice" - , "-------------------------------------------" - , "" - , " This Source Code Form is subject to the terms of the Mozilla Public" - , " License, v. 2.0. If a copy of the MPL was not distributed with this" - , " file, You can obtain one at http://mozilla.org/MPL/2.0/." - , "" - , "If it is not possible or desirable to put the notice in a particular" - , "file, then You may include the notice in a location (such as a LICENSE" - , "file in a relevant directory) where a recipient would be likely to look" - , "for such a notice." - , "" - , "You may add additional accurate notices of copyright ownership." - , "" - , "Exhibit B - \"Incompatible With Secondary Licenses\" Notice" - , "---------------------------------------------------------" - , "" - , " This Source Code Form is \"Incompatible With Secondary Licenses\", as" - , " defined by the Mozilla Public License, v. 2.0." - ] - -isc :: String -> String -> License -isc authors year = unlines - [ "Copyright (c) " ++ year ++ " " ++ authors - , "" - , "Permission to use, copy, modify, and/or distribute this software for any purpose" - , "with or without fee is hereby granted, provided that the above copyright notice" - , "and this permission notice appear in all copies." - , "" - , "THE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH" - , "REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND" - , "FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT," - , "INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS" - , "OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER" - , "TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF" - , "THIS SOFTWARE." - ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Init/Types.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Init/Types.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Init/Types.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Init/Types.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,118 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Init.Types --- Copyright : (c) Brent Yorgey, Benedikt Huber 2009 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- Some types used by the 'cabal init' command. --- ------------------------------------------------------------------------------ -module Distribution.Client.Init.Types where - -import Distribution.Simple.Setup - ( Flag(..) ) - -import Distribution.Compat.Semigroup -import Distribution.Version -import Distribution.Verbosity -import qualified Distribution.Package as P -import Distribution.License -import Distribution.ModuleName -import Language.Haskell.Extension ( Language(..), Extension ) - -import qualified Text.PrettyPrint as Disp -import qualified Distribution.Compat.ReadP as Parse -import Distribution.Text - -import GHC.Generics ( Generic ) - --- | InitFlags is really just a simple type to represent certain --- portions of a .cabal file. Rather than have a flag for EVERY --- possible field, we just have one for each field that the user is --- likely to want and/or that we are likely to be able to --- intelligently guess. -data InitFlags = - InitFlags { nonInteractive :: Flag Bool - , quiet :: Flag Bool - , packageDir :: Flag FilePath - , noComments :: Flag Bool - , minimal :: Flag Bool - - , packageName :: Flag P.PackageName - , version :: Flag Version - , cabalVersion :: Flag VersionRange - , license :: Flag License - , author :: Flag String - , email :: Flag String - , homepage :: Flag String - - , synopsis :: Flag String - , category :: Flag (Either String Category) - , extraSrc :: Maybe [String] - - , packageType :: Flag PackageType - , mainIs :: Flag FilePath - , language :: Flag Language - - , exposedModules :: Maybe [ModuleName] - , otherModules :: Maybe [ModuleName] - , otherExts :: Maybe [Extension] - - , dependencies :: Maybe [P.Dependency] - , sourceDirs :: Maybe [String] - , buildTools :: Maybe [String] - - , initVerbosity :: Flag Verbosity - , overwrite :: Flag Bool - } - deriving (Show, Generic) - - -- the Monoid instance for Flag has later values override earlier - -- ones, which is why we want Maybe [foo] for collecting foo values, - -- not Flag [foo]. - -data PackageType = Library | Executable - deriving (Show, Read, Eq) - -instance Text PackageType where - disp = Disp.text . show - parse = Parse.choice $ map (fmap read . Parse.string . show) [Library, Executable] - -instance Monoid InitFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup InitFlags where - (<>) = gmappend - --- | Some common package categories. -data Category - = Codec - | Concurrency - | Control - | Data - | Database - | Development - | Distribution - | Game - | Graphics - | Language - | Math - | Network - | Sound - | System - | Testing - | Text - | Web - deriving (Read, Show, Eq, Ord, Bounded, Enum) - -instance Text Category where - disp = Disp.text . show - parse = Parse.choice $ map (fmap read . Parse.string . show) [Codec .. ] - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Init.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Init.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Init.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Init.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,965 +0,0 @@ -{-# LANGUAGE CPP #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Init --- Copyright : (c) Brent Yorgey 2009 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- Implementation of the 'cabal init' command, which creates an initial .cabal --- file for a project. --- ------------------------------------------------------------------------------ - -module Distribution.Client.Init ( - - -- * Commands - initCabal - , pvpize - , incVersion - - ) where - -import System.IO - ( hSetBuffering, stdout, BufferMode(..) ) -import System.Directory - ( getCurrentDirectory, doesDirectoryExist, doesFileExist, copyFile - , getDirectoryContents, createDirectoryIfMissing ) -import System.FilePath - ( (), (<.>), takeBaseName ) -import Data.Time - ( getCurrentTime, utcToLocalTime, toGregorian, localDay, getCurrentTimeZone ) - -import Data.Char - ( toUpper ) -import Data.List - ( intercalate, nub, groupBy, (\\) ) -import Data.Maybe - ( fromMaybe, isJust, catMaybes, listToMaybe ) -import Data.Function - ( on ) -import qualified Data.Map as M -#if !MIN_VERSION_base(4,8,0) -import Control.Applicative - ( (<$>) ) -import Data.Traversable - ( traverse ) -#endif -import Control.Monad - ( when, unless, (>=>), join, forM_ ) -import Control.Arrow - ( (&&&), (***) ) - -import Text.PrettyPrint hiding (mode, cat) - -import Data.Version - ( Version(..) ) -import Distribution.Version - ( orLaterVersion, earlierVersion, intersectVersionRanges, VersionRange ) -import Distribution.Verbosity - ( Verbosity ) -import Distribution.ModuleName - ( ModuleName, fromString ) -- And for the Text instance -import Distribution.InstalledPackageInfo - ( InstalledPackageInfo, sourcePackageId, exposed ) -import qualified Distribution.Package as P -import Language.Haskell.Extension ( Language(..) ) - -import Distribution.Client.Init.Types - ( InitFlags(..), PackageType(..), Category(..) ) -import Distribution.Client.Init.Licenses - ( bsd2, bsd3, gplv2, gplv3, lgpl21, lgpl3, agplv3, apache20, mit, mpl20, isc ) -import Distribution.Client.Init.Heuristics - ( guessPackageName, guessAuthorNameMail, guessMainFileCandidates, - SourceFileEntry(..), - scanForModules, neededBuildPrograms ) - -import Distribution.License - ( License(..), knownLicenses ) - -import Distribution.ReadE - ( runReadE, readP_to_E ) -import Distribution.Simple.Setup - ( Flag(..), flagToMaybe ) -import Distribution.Simple.Configure - ( getInstalledPackages ) -import Distribution.Simple.Compiler - ( PackageDBStack, Compiler ) -import Distribution.Simple.Program - ( ProgramConfiguration ) -import Distribution.Simple.PackageIndex - ( InstalledPackageIndex, moduleNameIndex ) -import Distribution.Text - ( display, Text(..) ) - -import Distribution.Client.PackageIndex - ( elemByPackageName ) -import Distribution.Client.IndexUtils - ( getSourcePackages ) -import Distribution.Client.Types - ( SourcePackageDb(..) ) -import Distribution.Client.Setup - ( RepoContext(..) ) - -initCabal :: Verbosity - -> PackageDBStack - -> RepoContext - -> Compiler - -> ProgramConfiguration - -> InitFlags - -> IO () -initCabal verbosity packageDBs repoCtxt comp conf initFlags = do - - installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf - sourcePkgDb <- getSourcePackages verbosity repoCtxt - - hSetBuffering stdout NoBuffering - - initFlags' <- extendFlags installedPkgIndex sourcePkgDb initFlags - - case license initFlags' of - Flag PublicDomain -> return () - _ -> writeLicense initFlags' - writeSetupFile initFlags' - writeChangeLog initFlags' - createSourceDirectories initFlags' - createMainHs initFlags' - success <- writeCabalFile initFlags' - - when success $ generateWarnings initFlags' - ---------------------------------------------------------------------------- --- Flag acquisition ----------------------------------------------------- ---------------------------------------------------------------------------- - --- | Fill in more details by guessing, discovering, or prompting the --- user. -extendFlags :: InstalledPackageIndex -> SourcePackageDb -> InitFlags -> IO InitFlags -extendFlags pkgIx sourcePkgDb = - getPackageName sourcePkgDb - >=> getVersion - >=> getLicense - >=> getAuthorInfo - >=> getHomepage - >=> getSynopsis - >=> getCategory - >=> getExtraSourceFiles - >=> getLibOrExec - >=> getSrcDir - >=> getLanguage - >=> getGenComments - >=> getModulesBuildToolsAndDeps pkgIx - --- | Combine two actions which may return a value, preferring the first. That --- is, run the second action only if the first doesn't return a value. -infixr 1 ?>> -(?>>) :: IO (Maybe a) -> IO (Maybe a) -> IO (Maybe a) -f ?>> g = do - ma <- f - if isJust ma - then return ma - else g - --- | Witness the isomorphism between Maybe and Flag. -maybeToFlag :: Maybe a -> Flag a -maybeToFlag = maybe NoFlag Flag - --- | Get the package name: use the package directory (supplied, or the current --- directory by default) as a guess. It looks at the SourcePackageDb to avoid --- using an existing package name. -getPackageName :: SourcePackageDb -> InitFlags -> IO InitFlags -getPackageName sourcePkgDb flags = do - guess <- traverse guessPackageName (flagToMaybe $ packageDir flags) - ?>> Just `fmap` (getCurrentDirectory >>= guessPackageName) - - let guess' | isPkgRegistered guess = Nothing - | otherwise = guess - - pkgName' <- return (flagToMaybe $ packageName flags) - ?>> maybePrompt flags (prompt "Package name" guess') - ?>> return guess' - - chooseAgain <- if isPkgRegistered pkgName' - then promptYesNo promptOtherNameMsg (Just True) - else return False - - if chooseAgain - then getPackageName sourcePkgDb flags - else return $ flags { packageName = maybeToFlag pkgName' } - - where - isPkgRegistered (Just pkg) = elemByPackageName (packageIndex sourcePkgDb) pkg - isPkgRegistered Nothing = False - - promptOtherNameMsg = "This package name is already used by another " ++ - "package on hackage. Do you want to choose a " ++ - "different name" - --- | Package version: use 0.1.0.0 as a last resort, but try prompting the user --- if possible. -getVersion :: InitFlags -> IO InitFlags -getVersion flags = do - let v = Just $ Version [0,1,0,0] [] - v' <- return (flagToMaybe $ version flags) - ?>> maybePrompt flags (prompt "Package version" v) - ?>> return v - return $ flags { version = maybeToFlag v' } - --- | Choose a license. -getLicense :: InitFlags -> IO InitFlags -getLicense flags = do - lic <- return (flagToMaybe $ license flags) - ?>> fmap (fmap (either UnknownLicense id)) - (maybePrompt flags - (promptList "Please choose a license" listedLicenses (Just BSD3) display True)) - return $ flags { license = maybeToFlag lic } - where - listedLicenses = - knownLicenses \\ [GPL Nothing, LGPL Nothing, AGPL Nothing - , Apache Nothing, OtherLicense] - --- | The author's name and email. Prompt, or try to guess from an existing --- darcs repo. -getAuthorInfo :: InitFlags -> IO InitFlags -getAuthorInfo flags = do - (authorName, authorEmail) <- - (flagToMaybe *** flagToMaybe) `fmap` guessAuthorNameMail - authorName' <- return (flagToMaybe $ author flags) - ?>> maybePrompt flags (promptStr "Author name" authorName) - ?>> return authorName - - authorEmail' <- return (flagToMaybe $ email flags) - ?>> maybePrompt flags (promptStr "Maintainer email" authorEmail) - ?>> return authorEmail - - return $ flags { author = maybeToFlag authorName' - , email = maybeToFlag authorEmail' - } - --- | Prompt for a homepage URL. -getHomepage :: InitFlags -> IO InitFlags -getHomepage flags = do - hp <- queryHomepage - hp' <- return (flagToMaybe $ homepage flags) - ?>> maybePrompt flags (promptStr "Project homepage URL" hp) - ?>> return hp - - return $ flags { homepage = maybeToFlag hp' } - --- | Right now this does nothing, but it could be changed to do some --- intelligent guessing. -queryHomepage :: IO (Maybe String) -queryHomepage = return Nothing -- get default remote darcs repo? - --- | Prompt for a project synopsis. -getSynopsis :: InitFlags -> IO InitFlags -getSynopsis flags = do - syn <- return (flagToMaybe $ synopsis flags) - ?>> maybePrompt flags (promptStr "Project synopsis" Nothing) - - return $ flags { synopsis = maybeToFlag syn } - --- | Prompt for a package category. --- Note that it should be possible to do some smarter guessing here too, i.e. --- look at the name of the top level source directory. -getCategory :: InitFlags -> IO InitFlags -getCategory flags = do - cat <- return (flagToMaybe $ category flags) - ?>> fmap join (maybePrompt flags - (promptListOptional "Project category" [Codec ..])) - return $ flags { category = maybeToFlag cat } - --- | Try to guess extra source files (don't prompt the user). -getExtraSourceFiles :: InitFlags -> IO InitFlags -getExtraSourceFiles flags = do - extraSrcFiles <- return (extraSrc flags) - ?>> Just `fmap` guessExtraSourceFiles flags - - return $ flags { extraSrc = extraSrcFiles } - -defaultChangeLog :: FilePath -defaultChangeLog = "ChangeLog.md" - --- | Try to guess things to include in the extra-source-files field. --- For now, we just look for things in the root directory named --- 'readme', 'changes', or 'changelog', with any sort of --- capitalization and any extension. -guessExtraSourceFiles :: InitFlags -> IO [FilePath] -guessExtraSourceFiles flags = do - dir <- - maybe getCurrentDirectory return . flagToMaybe $ packageDir flags - files <- getDirectoryContents dir - let extraFiles = filter isExtra files - if any isLikeChangeLog extraFiles - then return extraFiles - else return (defaultChangeLog : extraFiles) - - where - isExtra = likeFileNameBase ("README" : changeLogLikeBases) - isLikeChangeLog = likeFileNameBase changeLogLikeBases - likeFileNameBase candidates = (`elem` candidates) . map toUpper . takeBaseName - changeLogLikeBases = ["CHANGES", "CHANGELOG"] - --- | Ask whether the project builds a library or executable. -getLibOrExec :: InitFlags -> IO InitFlags -getLibOrExec flags = do - isLib <- return (flagToMaybe $ packageType flags) - ?>> maybePrompt flags (either (const Library) id `fmap` - promptList "What does the package build" - [Library, Executable] - Nothing display False) - ?>> return (Just Library) - mainFile <- if isLib /= Just Executable then return Nothing else - getMainFile flags - - return $ flags { packageType = maybeToFlag isLib - , mainIs = maybeToFlag mainFile - } - --- | Try to guess the main file of the executable, and prompt the user to choose --- one of them. Top-level modules including the word 'Main' in the file name --- will be candidates, and shorter filenames will be preferred. -getMainFile :: InitFlags -> IO (Maybe FilePath) -getMainFile flags = - return (flagToMaybe $ mainIs flags) - ?>> do - candidates <- guessMainFileCandidates flags - let showCandidate = either (++" (does not yet exist, but will be created)") id - defaultFile = listToMaybe candidates - maybePrompt flags (either id (either id id) `fmap` - promptList "What is the main module of the executable" - candidates - defaultFile showCandidate True) - ?>> return (fmap (either id id) defaultFile) - --- | Ask for the base language of the package. -getLanguage :: InitFlags -> IO InitFlags -getLanguage flags = do - lang <- return (flagToMaybe $ language flags) - ?>> maybePrompt flags - (either UnknownLanguage id `fmap` - promptList "What base language is the package written in" - [Haskell2010, Haskell98] - (Just Haskell2010) display True) - ?>> return (Just Haskell2010) - - return $ flags { language = maybeToFlag lang } - --- | Ask whether to generate explanatory comments. -getGenComments :: InitFlags -> IO InitFlags -getGenComments flags = do - genComments <- return (not <$> flagToMaybe (noComments flags)) - ?>> maybePrompt flags (promptYesNo promptMsg (Just False)) - ?>> return (Just False) - return $ flags { noComments = maybeToFlag (fmap not genComments) } - where - promptMsg = "Add informative comments to each field in the cabal file (y/n)" - --- | Ask for the source root directory. -getSrcDir :: InitFlags -> IO InitFlags -getSrcDir flags = do - srcDirs <- return (sourceDirs flags) - ?>> fmap (:[]) `fmap` guessSourceDir flags - ?>> fmap (>>= fmap ((:[]) . either id id)) (maybePrompt - flags - (promptListOptional' "Source directory" ["src"] id)) - - return $ flags { sourceDirs = srcDirs } - --- | Try to guess source directory. Could try harder; for the --- moment just looks to see whether there is a directory called 'src'. -guessSourceDir :: InitFlags -> IO (Maybe String) -guessSourceDir flags = do - dir <- - maybe getCurrentDirectory return . flagToMaybe $ packageDir flags - srcIsDir <- doesDirectoryExist (dir "src") - return $ if srcIsDir - then Just "src" - else Nothing - --- | Get the list of exposed modules and extra tools needed to build them. -getModulesBuildToolsAndDeps :: InstalledPackageIndex -> InitFlags -> IO InitFlags -getModulesBuildToolsAndDeps pkgIx flags = do - dir <- maybe getCurrentDirectory return . flagToMaybe $ packageDir flags - - -- TODO: really should use guessed source roots. - sourceFiles <- scanForModules dir - - Just mods <- return (exposedModules flags) - ?>> (return . Just . map moduleName $ sourceFiles) - - tools <- return (buildTools flags) - ?>> (return . Just . neededBuildPrograms $ sourceFiles) - - deps <- return (dependencies flags) - ?>> Just <$> importsToDeps flags - (fromString "Prelude" : -- to ensure we get base as a dep - ( nub -- only need to consider each imported package once - . filter (`notElem` mods) -- don't consider modules from - -- this package itself - . concatMap imports - $ sourceFiles - ) - ) - pkgIx - - exts <- return (otherExts flags) - ?>> (return . Just . nub . concatMap extensions $ sourceFiles) - - return $ flags { exposedModules = Just mods - , buildTools = tools - , dependencies = deps - , otherExts = exts - } - -importsToDeps :: InitFlags -> [ModuleName] -> InstalledPackageIndex -> IO [P.Dependency] -importsToDeps flags mods pkgIx = do - - let modMap :: M.Map ModuleName [InstalledPackageInfo] - modMap = M.map (filter exposed) $ moduleNameIndex pkgIx - - modDeps :: [(ModuleName, Maybe [InstalledPackageInfo])] - modDeps = map (id &&& flip M.lookup modMap) mods - - message flags "\nGuessing dependencies..." - nub . catMaybes <$> mapM (chooseDep flags) modDeps - --- Given a module and a list of installed packages providing it, --- choose a dependency (i.e. package + version range) to use for that --- module. -chooseDep :: InitFlags -> (ModuleName, Maybe [InstalledPackageInfo]) - -> IO (Maybe P.Dependency) - -chooseDep flags (m, Nothing) - = message flags ("\nWarning: no package found providing " ++ display m ++ ".") - >> return Nothing - -chooseDep flags (m, Just []) - = message flags ("\nWarning: no package found providing " ++ display m ++ ".") - >> return Nothing - - -- We found some packages: group them by name. -chooseDep flags (m, Just ps) - = case pkgGroups of - -- if there's only one group, i.e. multiple versions of a single package, - -- we make it into a dependency, choosing the latest-ish version (see toDep). - [grp] -> Just <$> toDep grp - -- otherwise, we refuse to choose between different packages and make the user - -- do it. - grps -> do message flags ("\nWarning: multiple packages found providing " - ++ display m - ++ ": " ++ intercalate ", " (map (display . P.pkgName . head) grps)) - message flags "You will need to pick one and manually add it to the Build-depends: field." - return Nothing - where - pkgGroups = groupBy ((==) `on` P.pkgName) (map sourcePackageId ps) - - -- Given a list of available versions of the same package, pick a dependency. - toDep :: [P.PackageIdentifier] -> IO P.Dependency - - -- If only one version, easy. We change e.g. 0.4.2 into 0.4.* - toDep [pid] = return $ P.Dependency (P.pkgName pid) (pvpize . P.pkgVersion $ pid) - - -- Otherwise, choose the latest version and issue a warning. - toDep pids = do - message flags ("\nWarning: multiple versions of " ++ display (P.pkgName . head $ pids) ++ " provide " ++ display m ++ ", choosing the latest.") - return $ P.Dependency (P.pkgName . head $ pids) - (pvpize . maximum . map P.pkgVersion $ pids) - --- | Given a version, return an API-compatible (according to PVP) version range. --- --- Example: @0.4.1@ produces the version range @>= 0.4 && < 0.5@ (which is the --- same as @0.4.*@). -pvpize :: Version -> VersionRange -pvpize v = orLaterVersion v' - `intersectVersionRanges` - earlierVersion (incVersion 1 v') - where v' = (v { versionBranch = take 2 (versionBranch v) }) - --- | Increment the nth version component (counting from 0). -incVersion :: Int -> Version -> Version -incVersion n (Version vlist tags) = Version (incVersion' n vlist) tags - where - incVersion' 0 [] = [1] - incVersion' 0 (v:_) = [v+1] - incVersion' m [] = replicate m 0 ++ [1] - incVersion' m (v:vs) = v : incVersion' (m-1) vs - ---------------------------------------------------------------------------- --- Prompting/user interaction ------------------------------------------- ---------------------------------------------------------------------------- - --- | Run a prompt or not based on the nonInteractive flag of the --- InitFlags structure. -maybePrompt :: InitFlags -> IO t -> IO (Maybe t) -maybePrompt flags p = - case nonInteractive flags of - Flag True -> return Nothing - _ -> Just `fmap` p - --- | Create a prompt with optional default value that returns a --- String. -promptStr :: String -> Maybe String -> IO String -promptStr = promptDefault' Just id - --- | Create a yes/no prompt with optional default value. --- -promptYesNo :: String -> Maybe Bool -> IO Bool -promptYesNo = - promptDefault' recogniseYesNo showYesNo - where - recogniseYesNo s | s == "y" || s == "Y" = Just True - | s == "n" || s == "N" = Just False - | otherwise = Nothing - showYesNo True = "y" - showYesNo False = "n" - --- | Create a prompt with optional default value that returns a value --- of some Text instance. -prompt :: Text t => String -> Maybe t -> IO t -prompt = promptDefault' - (either (const Nothing) Just . runReadE (readP_to_E id parse)) - display - --- | Create a prompt with an optional default value. -promptDefault' :: (String -> Maybe t) -- ^ parser - -> (t -> String) -- ^ pretty-printer - -> String -- ^ prompt message - -> Maybe t -- ^ optional default value - -> IO t -promptDefault' parser pretty pr def = do - putStr $ mkDefPrompt pr (pretty `fmap` def) - inp <- getLine - case (inp, def) of - ("", Just d) -> return d - _ -> case parser inp of - Just t -> return t - Nothing -> do putStrLn $ "Couldn't parse " ++ inp ++ ", please try again!" - promptDefault' parser pretty pr def - --- | Create a prompt from a prompt string and a String representation --- of an optional default value. -mkDefPrompt :: String -> Maybe String -> String -mkDefPrompt pr def = pr ++ "?" ++ defStr def - where defStr Nothing = " " - defStr (Just s) = " [default: " ++ s ++ "] " - -promptListOptional :: (Text t, Eq t) - => String -- ^ prompt - -> [t] -- ^ choices - -> IO (Maybe (Either String t)) -promptListOptional pr choices = promptListOptional' pr choices display - -promptListOptional' :: Eq t - => String -- ^ prompt - -> [t] -- ^ choices - -> (t -> String) -- ^ show an item - -> IO (Maybe (Either String t)) -promptListOptional' pr choices displayItem = - fmap rearrange - $ promptList pr (Nothing : map Just choices) (Just Nothing) - (maybe "(none)" displayItem) True - where - rearrange = either (Just . Left) (fmap Right) - --- | Create a prompt from a list of items. -promptList :: Eq t - => String -- ^ prompt - -> [t] -- ^ choices - -> Maybe t -- ^ optional default value - -> (t -> String) -- ^ show an item - -> Bool -- ^ whether to allow an 'other' option - -> IO (Either String t) -promptList pr choices def displayItem other = do - putStrLn $ pr ++ ":" - let options1 = map (\c -> (Just c == def, displayItem c)) choices - options2 = zip ([1..]::[Int]) - (options1 ++ [(False, "Other (specify)") | other]) - mapM_ (putStrLn . \(n,(i,s)) -> showOption n i ++ s) options2 - promptList' displayItem (length options2) choices def other - where showOption n i | n < 10 = " " ++ star i ++ " " ++ rest - | otherwise = " " ++ star i ++ rest - where rest = show n ++ ") " - star True = "*" - star False = " " - -promptList' :: (t -> String) -> Int -> [t] -> Maybe t -> Bool -> IO (Either String t) -promptList' displayItem numChoices choices def other = do - putStr $ mkDefPrompt "Your choice" (displayItem `fmap` def) - inp <- getLine - case (inp, def) of - ("", Just d) -> return $ Right d - _ -> case readMaybe inp of - Nothing -> invalidChoice inp - Just n -> getChoice n - where invalidChoice inp = do putStrLn $ inp ++ " is not a valid choice." - promptList' displayItem numChoices choices def other - getChoice n | n < 1 || n > numChoices = invalidChoice (show n) - | n < numChoices || - (n == numChoices && not other) - = return . Right $ choices !! (n-1) - | otherwise = Left `fmap` promptStr "Please specify" Nothing - -readMaybe :: (Read a) => String -> Maybe a -readMaybe s = case reads s of - [(a,"")] -> Just a - _ -> Nothing - ---------------------------------------------------------------------------- --- File generation ------------------------------------------------------ ---------------------------------------------------------------------------- - -writeLicense :: InitFlags -> IO () -writeLicense flags = do - message flags "\nGenerating LICENSE..." - year <- show <$> getYear - let authors = fromMaybe "???" . flagToMaybe . author $ flags - let licenseFile = - case license flags of - Flag BSD2 - -> Just $ bsd2 authors year - - Flag BSD3 - -> Just $ bsd3 authors year - - Flag (GPL (Just (Version {versionBranch = [2]}))) - -> Just gplv2 - - Flag (GPL (Just (Version {versionBranch = [3]}))) - -> Just gplv3 - - Flag (LGPL (Just (Version {versionBranch = [2, 1]}))) - -> Just lgpl21 - - Flag (LGPL (Just (Version {versionBranch = [3]}))) - -> Just lgpl3 - - Flag (AGPL (Just (Version {versionBranch = [3]}))) - -> Just agplv3 - - Flag (Apache (Just (Version {versionBranch = [2, 0]}))) - -> Just apache20 - - Flag MIT - -> Just $ mit authors year - - Flag (MPL (Version {versionBranch = [2, 0]})) - -> Just mpl20 - - Flag ISC - -> Just $ isc authors year - - _ -> Nothing - - case licenseFile of - Just licenseText -> writeFileSafe flags "LICENSE" licenseText - Nothing -> message flags "Warning: unknown license type, you must put a copy in LICENSE yourself." - -getYear :: IO Integer -getYear = do - u <- getCurrentTime - z <- getCurrentTimeZone - let l = utcToLocalTime z u - (y, _, _) = toGregorian $ localDay l - return y - -writeSetupFile :: InitFlags -> IO () -writeSetupFile flags = do - message flags "Generating Setup.hs..." - writeFileSafe flags "Setup.hs" setupFile - where - setupFile = unlines - [ "import Distribution.Simple" - , "main = defaultMain" - ] - -writeChangeLog :: InitFlags -> IO () -writeChangeLog flags = when (any (== defaultChangeLog) $ maybe [] id (extraSrc flags)) $ do - message flags ("Generating "++ defaultChangeLog ++"...") - writeFileSafe flags defaultChangeLog changeLog - where - changeLog = unlines - [ "# Revision history for " ++ pname - , "" - , "## " ++ pver ++ " -- YYYY-mm-dd" - , "" - , "* First version. Released on an unsuspecting world." - ] - pname = maybe "" display $ flagToMaybe $ packageName flags - pver = maybe "" display $ flagToMaybe $ version flags - - - -writeCabalFile :: InitFlags -> IO Bool -writeCabalFile flags@(InitFlags{packageName = NoFlag}) = do - message flags "Error: no package name provided." - return False -writeCabalFile flags@(InitFlags{packageName = Flag p}) = do - let cabalFileName = display p ++ ".cabal" - message flags $ "Generating " ++ cabalFileName ++ "..." - writeFileSafe flags cabalFileName (generateCabalFile cabalFileName flags) - return True - --- | Write a file \"safely\", backing up any existing version (unless --- the overwrite flag is set). -writeFileSafe :: InitFlags -> FilePath -> String -> IO () -writeFileSafe flags fileName content = do - moveExistingFile flags fileName - writeFile fileName content - --- | Create source directories, if they were given. -createSourceDirectories :: InitFlags -> IO () -createSourceDirectories flags = case sourceDirs flags of - Just dirs -> forM_ dirs (createDirectoryIfMissing True) - Nothing -> return () - --- | Create Main.hs, but only if we are init'ing an executable and --- the mainIs flag has been provided. -createMainHs :: InitFlags -> IO () -createMainHs flags@InitFlags{ sourceDirs = Just (srcPath:_) - , packageType = Flag Executable - , mainIs = Flag mainFile } = - writeMainHs flags (srcPath mainFile) -createMainHs flags@InitFlags{ sourceDirs = _ - , packageType = Flag Executable - , mainIs = Flag mainFile } = - writeMainHs flags mainFile -createMainHs _ = return () - --- | Write a main file if it doesn't already exist. -writeMainHs :: InitFlags -> FilePath -> IO () -writeMainHs flags mainPath = do - dir <- maybe getCurrentDirectory return (flagToMaybe $ packageDir flags) - let mainFullPath = dir mainPath - exists <- doesFileExist mainFullPath - unless exists $ do - message flags $ "Generating " ++ mainPath ++ "..." - writeFileSafe flags mainFullPath mainHs - --- | Default Main.hs file. Used when no Main.hs exists. -mainHs :: String -mainHs = unlines - [ "module Main where" - , "" - , "main :: IO ()" - , "main = putStrLn \"Hello, Haskell!\"" - ] - --- | Move an existing file, if there is one, and the overwrite flag is --- not set. -moveExistingFile :: InitFlags -> FilePath -> IO () -moveExistingFile flags fileName = - unless (overwrite flags == Flag True) $ do - e <- doesFileExist fileName - when e $ do - newName <- findNewName fileName - message flags $ "Warning: " ++ fileName ++ " already exists, backing up old version in " ++ newName - copyFile fileName newName - -findNewName :: FilePath -> IO FilePath -findNewName oldName = findNewName' 0 - where - findNewName' :: Integer -> IO FilePath - findNewName' n = do - let newName = oldName <.> ("save" ++ show n) - e <- doesFileExist newName - if e then findNewName' (n+1) else return newName - --- | Generate a .cabal file from an InitFlags structure. NOTE: this --- is rather ad-hoc! What we would REALLY like is to have a --- standard low-level AST type representing .cabal files, which --- preserves things like comments, and to write an *inverse* --- parser/pretty-printer pair between .cabal files and this AST. --- Then instead of this ad-hoc code we could just map an InitFlags --- structure onto a low-level AST structure and use the existing --- pretty-printing code to generate the file. -generateCabalFile :: String -> InitFlags -> String -generateCabalFile fileName c = - (++ "\n") . - renderStyle style { lineLength = 79, ribbonsPerLine = 1.1 } $ - (if minimal c /= Flag True - then showComment (Just $ "Initial " ++ fileName ++ " generated by cabal " - ++ "init. For further documentation, see " - ++ "http://haskell.org/cabal/users-guide/") - $$ text "" - else empty) - $$ - vcat [ field "name" (packageName c) - (Just "The name of the package.") - True - - , field "version" (version c) - (Just $ "The package version. See the Haskell package versioning policy (PVP) for standards guiding when and how versions should be incremented.\nhttps://wiki.haskell.org/Package_versioning_policy\n" - ++ "PVP summary: +-+------- breaking API changes\n" - ++ " | | +----- non-breaking API additions\n" - ++ " | | | +--- code changes with no API change") - True - - , fieldS "synopsis" (synopsis c) - (Just "A short (one-line) description of the package.") - True - - , fieldS "description" NoFlag - (Just "A longer description of the package.") - True - - , fieldS "homepage" (homepage c) - (Just "URL for the project homepage or repository.") - False - - , fieldS "bug-reports" NoFlag - (Just "A URL where users can report bugs.") - False - - , field "license" (license c) - (Just "The license under which the package is released.") - True - - , case (license c) of - Flag PublicDomain -> empty - _ -> fieldS "license-file" (Flag "LICENSE") - (Just "The file containing the license text.") - True - - , fieldS "author" (author c) - (Just "The package author(s).") - True - - , fieldS "maintainer" (email c) - (Just "An email address to which users can send suggestions, bug reports, and patches.") - True - - , case (license c) of - Flag PublicDomain -> empty - _ -> fieldS "copyright" NoFlag - (Just "A copyright notice.") - True - - , fieldS "category" (either id display `fmap` category c) - Nothing - True - - , fieldS "build-type" (Flag "Simple") - Nothing - True - - , fieldS "extra-source-files" (listFieldS (extraSrc c)) - (Just "Extra files to be distributed with the package, such as examples or a README.") - True - - , field "cabal-version" (Flag $ orLaterVersion (Version [1,10] [])) - (Just "Constraint on the version of Cabal needed to build this package.") - False - - , case packageType c of - Flag Executable -> - text "\nexecutable" <+> - text (maybe "" display . flagToMaybe $ packageName c) $$ - nest 2 (vcat - [ fieldS "main-is" (mainIs c) (Just ".hs or .lhs file containing the Main module.") True - - , generateBuildInfo Executable c - ]) - Flag Library -> text "\nlibrary" $$ nest 2 (vcat - [ fieldS "exposed-modules" (listField (exposedModules c)) - (Just "Modules exported by the library.") - True - - , generateBuildInfo Library c - ]) - _ -> empty - ] - where - generateBuildInfo :: PackageType -> InitFlags -> Doc - generateBuildInfo pkgtype c' = vcat - [ fieldS "other-modules" (listField (otherModules c')) - (Just $ case pkgtype of - Library -> "Modules included in this library but not exported." - Executable -> "Modules included in this executable, other than Main.") - True - - , fieldS "other-extensions" (listField (otherExts c')) - (Just "LANGUAGE extensions used by modules in this package.") - True - - , fieldS "build-depends" (listField (dependencies c')) - (Just "Other library packages from which modules are imported.") - True - - , fieldS "hs-source-dirs" (listFieldS (sourceDirs c')) - (Just "Directories containing source files.") - True - - , fieldS "build-tools" (listFieldS (buildTools c')) - (Just "Extra tools (e.g. alex, hsc2hs, ...) needed to build the source.") - False - - , field "default-language" (language c') - (Just "Base language which the package is written in.") - True - ] - - listField :: Text s => Maybe [s] -> Flag String - listField = listFieldS . fmap (map display) - - listFieldS :: Maybe [String] -> Flag String - listFieldS = Flag . maybe "" (intercalate ", ") - - field :: Text t => String -> Flag t -> Maybe String -> Bool -> Doc - field s f = fieldS s (fmap display f) - - fieldS :: String -- ^ Name of the field - -> Flag String -- ^ Field contents - -> Maybe String -- ^ Comment to explain the field - -> Bool -- ^ Should the field be included (commented out) even if blank? - -> Doc - fieldS _ NoFlag _ inc | not inc || (minimal c == Flag True) = empty - fieldS _ (Flag "") _ inc | not inc || (minimal c == Flag True) = empty - fieldS s f com _ = case (isJust com, noComments c, minimal c) of - (_, _, Flag True) -> id - (_, Flag True, _) -> id - (True, _, _) -> (showComment com $$) . ($$ text "") - (False, _, _) -> ($$ text "") - $ - comment f <> text s <> colon - <> text (replicate (20 - length s) ' ') - <> text (fromMaybe "" . flagToMaybe $ f) - comment NoFlag = text "-- " - comment (Flag "") = text "-- " - comment _ = text "" - - showComment :: Maybe String -> Doc - showComment (Just t) = vcat - . map (text . ("-- "++)) . lines - . renderStyle style { - lineLength = 76, - ribbonsPerLine = 1.05 - } - . vcat - . map (fcat . map text . breakLine) - . lines - $ t - showComment Nothing = text "" - - breakLine [] = [] - breakLine cs = case break (==' ') cs of (w,cs') -> w : breakLine' cs' - breakLine' [] = [] - breakLine' cs = case span (==' ') cs of (w,cs') -> w : breakLine cs' - --- | Generate warnings for missing fields etc. -generateWarnings :: InitFlags -> IO () -generateWarnings flags = do - message flags "" - when (synopsis flags `elem` [NoFlag, Flag ""]) - (message flags "Warning: no synopsis given. You should edit the .cabal file and add one.") - - message flags "You may want to edit the .cabal file and add a Description field." - --- | Possibly generate a message to stdout, taking into account the --- --quiet flag. -message :: InitFlags -> String -> IO () -message (InitFlags{quiet = Flag True}) _ = return () -message _ s = putStrLn s diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Install.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Install.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Install.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Install.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1641 +0,0 @@ -{-# LANGUAGE CPP #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Install --- Copyright : (c) 2005 David Himmelstrup --- 2007 Bjorn Bringert --- 2007-2010 Duncan Coutts --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- High level interface to package installation. ------------------------------------------------------------------------------ -module Distribution.Client.Install ( - -- * High-level interface - install, - - -- * Lower-level interface that allows to manipulate the install plan - makeInstallContext, - makeInstallPlan, - processInstallPlan, - InstallArgs, - InstallContext, - - -- * Prune certain packages from the install plan - pruneInstallPlan - ) where - -import Data.Foldable - ( traverse_ ) -import Data.List - ( isPrefixOf, unfoldr, nub, sort, (\\) ) -import qualified Data.Map as Map -import qualified Data.Set as S -import Data.Maybe - ( catMaybes, isJust, isNothing, fromMaybe, mapMaybe ) -import Control.Exception as Exception - ( Exception(toException), bracket, catches - , Handler(Handler), handleJust, IOException, SomeException ) -#ifndef mingw32_HOST_OS -import Control.Exception as Exception - ( Exception(fromException) ) -#endif -import System.Exit - ( ExitCode(..) ) -import Distribution.Compat.Exception - ( catchIO, catchExit ) -#if !MIN_VERSION_base(4,8,0) -import Control.Applicative - ( (<$>) ) -import Data.Traversable - ( traverse ) -#endif -import Control.Monad - ( filterM, forM_, when, unless ) -import System.Directory - ( getTemporaryDirectory, doesDirectoryExist, doesFileExist, - createDirectoryIfMissing, removeFile, renameDirectory ) -import System.FilePath - ( (), (<.>), equalFilePath, takeDirectory ) -import System.IO - ( openFile, IOMode(AppendMode), hClose ) -import System.IO.Error - ( isDoesNotExistError, ioeGetFileName ) - -import Distribution.Client.Targets -import Distribution.Client.Configure - ( chooseCabalVersion, configureSetupScript, checkConfigExFlags ) -import Distribution.Client.Dependency -import Distribution.Client.Dependency.Types - ( Solver(..), ConstraintSource(..), LabeledPackageConstraint(..) ) -import Distribution.Client.FetchUtils -import Distribution.Client.HttpUtils - ( HttpTransport (..) ) -import qualified Distribution.Client.Haddock as Haddock (regenerateHaddockIndex) -import Distribution.Client.IndexUtils as IndexUtils - ( getSourcePackages, getInstalledPackages ) -import qualified Distribution.Client.InstallPlan as InstallPlan -import Distribution.Client.InstallPlan (InstallPlan) -import Distribution.Client.Setup - ( GlobalFlags(..), RepoContext(..) - , ConfigFlags(..), configureCommand, filterConfigureFlags - , ConfigExFlags(..), InstallFlags(..) ) -import Distribution.Client.Config - ( defaultCabalDir, defaultUserInstall ) -import Distribution.Client.Sandbox.Timestamp - ( withUpdateTimestamps ) -import Distribution.Client.Sandbox.Types - ( SandboxPackageInfo(..), UseSandbox(..), isUseSandbox - , whenUsingSandbox ) -import Distribution.Client.Tar (extractTarGzFile) -import Distribution.Client.Types as Source -import Distribution.Client.BuildReports.Types - ( ReportLevel(..) ) -import Distribution.Client.SetupWrapper - ( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions ) -import qualified Distribution.Client.BuildReports.Anonymous as BuildReports -import qualified Distribution.Client.BuildReports.Storage as BuildReports - ( storeAnonymous, storeLocal, fromInstallPlan, fromPlanningFailure ) -import qualified Distribution.Client.InstallSymlink as InstallSymlink - ( symlinkBinaries ) -import qualified Distribution.Client.PackageIndex as SourcePackageIndex -import qualified Distribution.Client.Win32SelfUpgrade as Win32SelfUpgrade -import qualified Distribution.Client.World as World -import qualified Distribution.InstalledPackageInfo as Installed -import Distribution.Client.Compat.ExecutablePath -import Distribution.Client.JobControl -import qualified Distribution.Client.ComponentDeps as CD - -import Distribution.Utils.NubList -import Distribution.Simple.Compiler - ( CompilerId(..), Compiler(compilerId), compilerFlavor - , CompilerInfo(..), compilerInfo, PackageDB(..), PackageDBStack ) -import Distribution.Simple.Program (ProgramConfiguration, - defaultProgramConfiguration) -import qualified Distribution.Simple.InstallDirs as InstallDirs -import qualified Distribution.Simple.PackageIndex as PackageIndex -import Distribution.Simple.PackageIndex (InstalledPackageIndex) -import Distribution.Simple.LocalBuildInfo (ComponentName(CLibName)) -import qualified Distribution.Simple.Configure as Configure -import Distribution.Simple.Setup - ( haddockCommand, HaddockFlags(..) - , buildCommand, BuildFlags(..), emptyBuildFlags - , AllowNewer(..) - , toFlag, fromFlag, fromFlagOrDefault, flagToMaybe, defaultDistPref ) -import qualified Distribution.Simple.Setup as Cabal - ( Flag(..) - , copyCommand, CopyFlags(..), emptyCopyFlags - , registerCommand, RegisterFlags(..), emptyRegisterFlags - , testCommand, TestFlags(..), emptyTestFlags ) -import Distribution.Simple.Utils - ( createDirectoryIfMissingVerbose, rawSystemExit, comparing - , writeFileAtomic, withTempFile , withUTF8FileContents ) -import Distribution.Simple.InstallDirs as InstallDirs - ( PathTemplate, fromPathTemplate, toPathTemplate, substPathTemplate - , initialPathTemplateEnv, installDirsTemplateEnv ) -import Distribution.Package - ( PackageIdentifier(..), PackageId, packageName, packageVersion - , Package(..) - , Dependency(..), thisPackageVersion - , UnitId(..), mkUnitId - , HasUnitId(..) ) -import qualified Distribution.PackageDescription as PackageDescription -import Distribution.PackageDescription - ( PackageDescription, GenericPackageDescription(..), Flag(..) - , FlagName(..), FlagAssignment ) -import Distribution.PackageDescription.Configuration - ( finalizePackageDescription ) -import Distribution.Client.PkgConfigDb - ( PkgConfigDb, readPkgConfigDb ) -import Distribution.ParseUtils - ( showPWarning ) -import Distribution.Version - ( Version, VersionRange, foldVersionRange ) -import Distribution.Simple.Utils as Utils - ( notice, info, warn, debug, debugNoWrap, die - , intercalate, withTempDirectory ) -import Distribution.Client.Utils - ( determineNumJobs, inDir, logDirChange, mergeBy, MergeResult(..) - , tryCanonicalizePath ) -import Distribution.System - ( Platform, OS(Windows), buildOS ) -import Distribution.Text - ( display ) -import Distribution.Verbosity as Verbosity - ( Verbosity, showForCabal, normal, verbose ) -import Distribution.Simple.BuildPaths ( exeExtension ) - ---TODO: --- * assign flags to packages individually --- * complain about flags that do not apply to any package given as target --- so flags do not apply to dependencies, only listed, can use flag --- constraints for dependencies --- * only record applicable flags in world file --- * allow flag constraints --- * allow installed constraints --- * allow flag and installed preferences --- * change world file to use cabal section syntax --- * allow persistent configure flags for each package individually - --- ------------------------------------------------------------ --- * Top level user actions --- ------------------------------------------------------------ - --- | Installs the packages needed to satisfy a list of dependencies. --- -install - :: Verbosity - -> PackageDBStack - -> RepoContext - -> Compiler - -> Platform - -> ProgramConfiguration - -> UseSandbox - -> Maybe SandboxPackageInfo - -> GlobalFlags - -> ConfigFlags - -> ConfigExFlags - -> InstallFlags - -> HaddockFlags - -> [UserTarget] - -> IO () -install verbosity packageDBs repos comp platform conf useSandbox mSandboxPkgInfo - globalFlags configFlags configExFlags installFlags haddockFlags - userTargets0 = do - - installContext <- makeInstallContext verbosity args (Just userTargets0) - planResult <- foldProgress logMsg (return . Left) (return . Right) =<< - makeInstallPlan verbosity args installContext - - case planResult of - Left message -> do - reportPlanningFailure verbosity args installContext message - die' message - Right installPlan -> - processInstallPlan verbosity args installContext installPlan - where - args :: InstallArgs - args = (packageDBs, repos, comp, platform, conf, useSandbox, mSandboxPkgInfo, - globalFlags, configFlags, configExFlags, installFlags, - haddockFlags) - - die' message = die (message ++ if isUseSandbox useSandbox - then installFailedInSandbox else []) - -- TODO: use a better error message, remove duplication. - installFailedInSandbox = - "\nNote: when using a sandbox, all packages are required to have " - ++ "consistent dependencies. " - ++ "Try reinstalling/unregistering the offending packages or " - ++ "recreating the sandbox." - logMsg message rest = debugNoWrap verbosity message >> rest - --- TODO: Make InstallContext a proper data type with documented fields. --- | Common context for makeInstallPlan and processInstallPlan. -type InstallContext = ( InstalledPackageIndex, SourcePackageDb - , PkgConfigDb - , [UserTarget], [PackageSpecifier SourcePackage] - , HttpTransport ) - --- TODO: Make InstallArgs a proper data type with documented fields or just get --- rid of it completely. --- | Initial arguments given to 'install' or 'makeInstallContext'. -type InstallArgs = ( PackageDBStack - , RepoContext - , Compiler - , Platform - , ProgramConfiguration - , UseSandbox - , Maybe SandboxPackageInfo - , GlobalFlags - , ConfigFlags - , ConfigExFlags - , InstallFlags - , HaddockFlags ) - --- | Make an install context given install arguments. -makeInstallContext :: Verbosity -> InstallArgs -> Maybe [UserTarget] - -> IO InstallContext -makeInstallContext verbosity - (packageDBs, repoCtxt, comp, _, conf,_,_, - globalFlags, _, configExFlags, _, _) mUserTargets = do - - installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf - sourcePkgDb <- getSourcePackages verbosity repoCtxt - pkgConfigDb <- readPkgConfigDb verbosity conf - - checkConfigExFlags verbosity installedPkgIndex - (packageIndex sourcePkgDb) configExFlags - transport <- repoContextGetTransport repoCtxt - - (userTargets, pkgSpecifiers) <- case mUserTargets of - Nothing -> - -- We want to distinguish between the case where the user has given an - -- empty list of targets on the command-line and the case where we - -- specifically want to have an empty list of targets. - return ([], []) - Just userTargets0 -> do - -- For install, if no target is given it means we use the current - -- directory as the single target. - let userTargets | null userTargets0 = [UserTargetLocalDir "."] - | otherwise = userTargets0 - - pkgSpecifiers <- resolveUserTargets verbosity repoCtxt - (fromFlag $ globalWorldFile globalFlags) - (packageIndex sourcePkgDb) - userTargets - return (userTargets, pkgSpecifiers) - - return (installedPkgIndex, sourcePkgDb, pkgConfigDb, userTargets - ,pkgSpecifiers, transport) - --- | Make an install plan given install context and install arguments. -makeInstallPlan :: Verbosity -> InstallArgs -> InstallContext - -> IO (Progress String String InstallPlan) -makeInstallPlan verbosity - (_, _, comp, platform, _, _, mSandboxPkgInfo, - _, configFlags, configExFlags, installFlags, - _) - (installedPkgIndex, sourcePkgDb, pkgConfigDb, - _, pkgSpecifiers, _) = do - - solver <- chooseSolver verbosity (fromFlag (configSolver configExFlags)) - (compilerInfo comp) - notice verbosity "Resolving dependencies..." - return $ planPackages comp platform mSandboxPkgInfo solver - configFlags configExFlags installFlags - installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers - --- | Given an install plan, perform the actual installations. -processInstallPlan :: Verbosity -> InstallArgs -> InstallContext - -> InstallPlan - -> IO () -processInstallPlan verbosity - args@(_,_, _, _, _, _, _, _, _, _, installFlags, _) - (installedPkgIndex, sourcePkgDb, _, - userTargets, pkgSpecifiers, _) installPlan = do - checkPrintPlan verbosity installedPkgIndex installPlan sourcePkgDb - installFlags pkgSpecifiers - - unless (dryRun || nothingToInstall) $ do - installPlan' <- performInstallations verbosity - args installedPkgIndex installPlan - postInstallActions verbosity args userTargets installPlan' - where - dryRun = fromFlag (installDryRun installFlags) - nothingToInstall = null (InstallPlan.ready installPlan) - --- ------------------------------------------------------------ --- * Installation planning --- ------------------------------------------------------------ - -planPackages :: Compiler - -> Platform - -> Maybe SandboxPackageInfo - -> Solver - -> ConfigFlags - -> ConfigExFlags - -> InstallFlags - -> InstalledPackageIndex - -> SourcePackageDb - -> PkgConfigDb - -> [PackageSpecifier SourcePackage] - -> Progress String String InstallPlan -planPackages comp platform mSandboxPkgInfo solver - configFlags configExFlags installFlags - installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers = - - resolveDependencies - platform (compilerInfo comp) pkgConfigDb - solver - resolverParams - - >>= if onlyDeps then pruneInstallPlan pkgSpecifiers else return - - where - resolverParams = - - setMaxBackjumps (if maxBackjumps < 0 then Nothing - else Just maxBackjumps) - - . setIndependentGoals independentGoals - - . setReorderGoals reorderGoals - - . setAvoidReinstalls avoidReinstalls - - . setShadowPkgs shadowPkgs - - . setStrongFlags strongFlags - - . setPreferenceDefault (if upgradeDeps then PreferAllLatest - else PreferLatestForSelected) - - . removeUpperBounds allowNewer - - . addPreferences - -- preferences from the config file or command line - [ PackageVersionPreference name ver - | Dependency name ver <- configPreferences configExFlags ] - - . addConstraints - -- version constraints from the config file or command line - [ LabeledPackageConstraint (userToPackageConstraint pc) src - | (pc, src) <- configExConstraints configExFlags ] - - . addConstraints - --FIXME: this just applies all flags to all targets which - -- is silly. We should check if the flags are appropriate - [ let pc = PackageConstraintFlags - (pkgSpecifierTarget pkgSpecifier) flags - in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget - | let flags = configConfigurationsFlags configFlags - , not (null flags) - , pkgSpecifier <- pkgSpecifiers ] - - . addConstraints - [ let pc = PackageConstraintStanzas - (pkgSpecifierTarget pkgSpecifier) stanzas - in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget - | pkgSpecifier <- pkgSpecifiers ] - - . maybe id applySandboxInstallPolicy mSandboxPkgInfo - - . (if reinstall then reinstallTargets else id) - - $ standardInstallPolicy - installedPkgIndex sourcePkgDb pkgSpecifiers - - stanzas = [ TestStanzas | testsEnabled ] - ++ [ BenchStanzas | benchmarksEnabled ] - testsEnabled = fromFlagOrDefault False $ configTests configFlags - benchmarksEnabled = fromFlagOrDefault False $ configBenchmarks configFlags - - reinstall = fromFlag (installOverrideReinstall installFlags) || - fromFlag (installReinstall installFlags) - reorderGoals = fromFlag (installReorderGoals installFlags) - independentGoals = fromFlag (installIndependentGoals installFlags) - avoidReinstalls = fromFlag (installAvoidReinstalls installFlags) - shadowPkgs = fromFlag (installShadowPkgs installFlags) - strongFlags = fromFlag (installStrongFlags installFlags) - maxBackjumps = fromFlag (installMaxBackjumps installFlags) - upgradeDeps = fromFlag (installUpgradeDeps installFlags) - onlyDeps = fromFlag (installOnlyDeps installFlags) - allowNewer = fromMaybe AllowNewerNone (configAllowNewer configFlags) - --- | Remove the provided targets from the install plan. -pruneInstallPlan :: Package targetpkg - => [PackageSpecifier targetpkg] - -> InstallPlan - -> Progress String String InstallPlan -pruneInstallPlan pkgSpecifiers = - -- TODO: this is a general feature and should be moved to D.C.Dependency - -- Also, the InstallPlan.remove should return info more precise to the - -- problem, rather than the very general PlanProblem type. - either (Fail . explain) Done - . InstallPlan.remove (\pkg -> packageName pkg `elem` targetnames) - where - explain :: [InstallPlan.PlanProblem ipkg srcpkg iresult ifailure] -> String - explain problems = - "Cannot select only the dependencies (as requested by the " - ++ "'--only-dependencies' flag), " - ++ (case pkgids of - [pkgid] -> "the package " ++ display pkgid ++ " is " - _ -> "the packages " - ++ intercalate ", " (map display pkgids) ++ " are ") - ++ "required by a dependency of one of the other targets." - where - pkgids = - nub [ depid - | InstallPlan.PackageMissingDeps _ depids <- problems - , depid <- depids - , packageName depid `elem` targetnames ] - - targetnames = map pkgSpecifierTarget pkgSpecifiers - --- ------------------------------------------------------------ --- * Informational messages --- ------------------------------------------------------------ - --- | Perform post-solver checks of the install plan and print it if --- either requested or needed. -checkPrintPlan :: Verbosity - -> InstalledPackageIndex - -> InstallPlan - -> SourcePackageDb - -> InstallFlags - -> [PackageSpecifier SourcePackage] - -> IO () -checkPrintPlan verbosity installed installPlan sourcePkgDb - installFlags pkgSpecifiers = do - - -- User targets that are already installed. - let preExistingTargets = - [ p | let tgts = map pkgSpecifierTarget pkgSpecifiers, - InstallPlan.PreExisting p <- InstallPlan.toList installPlan, - packageName p `elem` tgts ] - - -- If there's nothing to install, we print the already existing - -- target packages as an explanation. - when nothingToInstall $ - notice verbosity $ unlines $ - "All the requested packages are already installed:" - : map (display . packageId) preExistingTargets - ++ ["Use --reinstall if you want to reinstall anyway."] - - let lPlan = linearizeInstallPlan installed installPlan - -- Are any packages classified as reinstalls? - let reinstalledPkgs = concatMap (extractReinstalls . snd) lPlan - -- Packages that are already broken. - let oldBrokenPkgs = - map Installed.installedUnitId - . PackageIndex.reverseDependencyClosure installed - . map (Installed.installedUnitId . fst) - . PackageIndex.brokenPackages - $ installed - let excluded = reinstalledPkgs ++ oldBrokenPkgs - -- Packages that are reverse dependencies of replaced packages are very - -- likely to be broken. We exclude packages that are already broken. - let newBrokenPkgs = - filter (\ p -> not (Installed.installedUnitId p `elem` excluded)) - (PackageIndex.reverseDependencyClosure installed reinstalledPkgs) - let containsReinstalls = not (null reinstalledPkgs) - let breaksPkgs = not (null newBrokenPkgs) - - let adaptedVerbosity - | containsReinstalls && not overrideReinstall = verbosity `max` verbose - | otherwise = verbosity - - -- We print the install plan if we are in a dry-run or if we are confronted - -- with a dangerous install plan. - when (dryRun || containsReinstalls && not overrideReinstall) $ - printPlan (dryRun || breaksPkgs && not overrideReinstall) - adaptedVerbosity lPlan sourcePkgDb - - -- If the install plan is dangerous, we print various warning messages. In - -- particular, if we can see that packages are likely to be broken, we even - -- bail out (unless installation has been forced with --force-reinstalls). - when containsReinstalls $ do - if breaksPkgs - then do - (if dryRun || overrideReinstall then warn verbosity else die) $ unlines $ - "The following packages are likely to be broken by the reinstalls:" - : map (display . Installed.sourcePackageId) newBrokenPkgs - ++ if overrideReinstall - then if dryRun then [] else - ["Continuing even though " ++ - "the plan contains dangerous reinstalls."] - else - ["Use --force-reinstalls if you want to install anyway."] - else unless dryRun $ warn verbosity - "Note that reinstalls are always dangerous. Continuing anyway..." - - -- If we are explicitly told to not download anything, check that all packages - -- are already fetched. - let offline = fromFlagOrDefault False (installOfflineMode installFlags) - when offline $ do - let pkgs = [ sourcePkg - | InstallPlan.Configured (ConfiguredPackage sourcePkg _ _ _) - <- InstallPlan.toList installPlan ] - notFetched <- fmap (map packageInfoId) - . filterM (fmap isNothing . checkFetched . packageSource) - $ pkgs - unless (null notFetched) $ - die $ "Can't download packages in offline mode. " - ++ "Must download the following packages to proceed:\n" - ++ intercalate ", " (map display notFetched) - ++ "\nTry using 'cabal fetch'." - - where - nothingToInstall = null (InstallPlan.ready installPlan) - - dryRun = fromFlag (installDryRun installFlags) - overrideReinstall = fromFlag (installOverrideReinstall installFlags) - ---TODO: this type is too specific -linearizeInstallPlan :: InstalledPackageIndex - -> InstallPlan - -> [(ReadyPackage, PackageStatus)] -linearizeInstallPlan installedPkgIndex plan = - unfoldr next plan - where - next plan' = case InstallPlan.ready plan' of - [] -> Nothing - (pkg:_) -> Just ((pkg, status), plan'') - where - pkgid = installedUnitId pkg - status = packageStatus installedPkgIndex pkg - ipkg = Installed.emptyInstalledPackageInfo { - Installed.sourcePackageId = packageId pkg, - Installed.installedUnitId = pkgid - } - plan'' = InstallPlan.completed pkgid (Just ipkg) - (BuildOk DocsNotTried TestsNotTried (Just ipkg)) - (InstallPlan.processing [pkg] plan') - --FIXME: This is a bit of a hack, - -- pretending that each package is installed - -- It's doubly a hack because the installed package ID - -- didn't get updated... - -data PackageStatus = NewPackage - | NewVersion [Version] - | Reinstall [UnitId] [PackageChange] - -type PackageChange = MergeResult PackageIdentifier PackageIdentifier - -extractReinstalls :: PackageStatus -> [UnitId] -extractReinstalls (Reinstall ipids _) = ipids -extractReinstalls _ = [] - -packageStatus :: InstalledPackageIndex - -> ReadyPackage - -> PackageStatus -packageStatus installedPkgIndex cpkg = - case PackageIndex.lookupPackageName installedPkgIndex - (packageName cpkg) of - [] -> NewPackage - ps -> case filter ((== packageId cpkg) - . Installed.sourcePackageId) (concatMap snd ps) of - [] -> NewVersion (map fst ps) - pkgs@(pkg:_) -> Reinstall (map Installed.installedUnitId pkgs) - (changes pkg cpkg) - - where - - changes :: Installed.InstalledPackageInfo - -> ReadyPackage - -> [MergeResult PackageIdentifier PackageIdentifier] - changes pkg pkg' = filter changed $ - mergeBy (comparing packageName) - -- deps of installed pkg - (resolveInstalledIds $ Installed.depends pkg) - -- deps of configured pkg - (resolveInstalledIds $ CD.nonSetupDeps (depends pkg')) - - -- convert to source pkg ids via index - resolveInstalledIds :: [UnitId] -> [PackageIdentifier] - resolveInstalledIds = - nub - . sort - . map Installed.sourcePackageId - . catMaybes - . map (PackageIndex.lookupUnitId installedPkgIndex) - - changed (InBoth pkgid pkgid') = pkgid /= pkgid' - changed _ = True - -printPlan :: Bool -- is dry run - -> Verbosity - -> [(ReadyPackage, PackageStatus)] - -> SourcePackageDb - -> IO () -printPlan dryRun verbosity plan sourcePkgDb = case plan of - [] -> return () - pkgs - | verbosity >= Verbosity.verbose -> putStr $ unlines $ - ("In order, the following " ++ wouldWill ++ " be installed:") - : map showPkgAndReason pkgs - | otherwise -> notice verbosity $ unlines $ - ("In order, the following " ++ wouldWill - ++ " be installed (use -v for more details):") - : map showPkg pkgs - where - wouldWill | dryRun = "would" - | otherwise = "will" - - showPkg (pkg, _) = display (packageId pkg) ++ - showLatest (pkg) - - showPkgAndReason (ReadyPackage pkg' _, pr) = display (packageId pkg') ++ - showLatest pkg' ++ - showFlagAssignment (nonDefaultFlags pkg') ++ - showStanzas (stanzas pkg') ++ - showDep pkg' ++ - case pr of - NewPackage -> " (new package)" - NewVersion _ -> " (new version)" - Reinstall _ cs -> " (reinstall)" ++ case cs of - [] -> "" - diff -> " (changes: " ++ intercalate ", " (map change diff) - ++ ")" - - showLatest :: Package srcpkg => srcpkg -> String - showLatest pkg = case mLatestVersion of - Just latestVersion -> - if packageVersion pkg < latestVersion - then (" (latest: " ++ display latestVersion ++ ")") - else "" - Nothing -> "" - where - mLatestVersion :: Maybe Version - mLatestVersion = case SourcePackageIndex.lookupPackageName - (packageIndex sourcePkgDb) - (packageName pkg) of - [] -> Nothing - x -> Just $ packageVersion $ last x - - toFlagAssignment :: [Flag] -> FlagAssignment - toFlagAssignment = map (\ f -> (flagName f, flagDefault f)) - - nonDefaultFlags :: ConfiguredPackage -> FlagAssignment - nonDefaultFlags (ConfiguredPackage spkg fa _ _) = - let defaultAssignment = - toFlagAssignment - (genPackageFlags (Source.packageDescription spkg)) - in fa \\ defaultAssignment - - stanzas :: ConfiguredPackage -> [OptionalStanza] - stanzas (ConfiguredPackage _ _ sts _) = sts - - showStanzas :: [OptionalStanza] -> String - showStanzas = concatMap ((' ' :) . showStanza) - showStanza TestStanzas = "*test" - showStanza BenchStanzas = "*bench" - - showFlagAssignment :: FlagAssignment -> String - showFlagAssignment = concatMap ((' ' :) . showFlagValue) - showFlagValue (f, True) = '+' : showFlagName f - showFlagValue (f, False) = '-' : showFlagName f - showFlagName (FlagName f) = f - - change (OnlyInLeft pkgid) = display pkgid ++ " removed" - change (InBoth pkgid pkgid') = display pkgid ++ " -> " - ++ display (packageVersion pkgid') - change (OnlyInRight pkgid') = display pkgid' ++ " added" - - showDep pkg | Just rdeps <- Map.lookup (packageId pkg) revDeps - = " (via: " ++ unwords (map display rdeps) ++ ")" - | otherwise = "" - - revDepGraphEdges :: [(PackageId, PackageId)] - revDepGraphEdges = [ (rpid, packageId pkg) - | (pkg@(ReadyPackage _ deps), _) <- plan - , rpid <- Installed.sourcePackageId <$> CD.flatDeps deps ] - - revDeps :: Map.Map PackageId [PackageId] - revDeps = Map.fromListWith (++) (map (fmap (:[])) revDepGraphEdges) - --- ------------------------------------------------------------ --- * Post installation stuff --- ------------------------------------------------------------ - --- | Report a solver failure. This works slightly differently to --- 'postInstallActions', as (by definition) we don't have an install plan. -reportPlanningFailure :: Verbosity -> InstallArgs -> InstallContext -> String - -> IO () -reportPlanningFailure verbosity - (_, _, comp, platform, _, _, _ - ,_, configFlags, _, installFlags, _) - (_, sourcePkgDb, _, _, pkgSpecifiers, _) - message = do - - when reportFailure $ do - - -- Only create reports for explicitly named packages - let pkgids = filter - (SourcePackageIndex.elemByPackageId (packageIndex sourcePkgDb)) $ - mapMaybe theSpecifiedPackage pkgSpecifiers - - buildReports = BuildReports.fromPlanningFailure platform - (compilerId comp) pkgids - (configConfigurationsFlags configFlags) - - when (not (null buildReports)) $ - info verbosity $ - "Solver failure will be reported for " - ++ intercalate "," (map display pkgids) - - -- Save reports - BuildReports.storeLocal (compilerInfo comp) - (fromNubList $ installSummaryFile installFlags) - buildReports platform - - -- Save solver log - case logFile of - Nothing -> return () - Just template -> forM_ pkgids $ \pkgid -> - let env = initialPathTemplateEnv pkgid dummyIpid - (compilerInfo comp) platform - path = fromPathTemplate $ substPathTemplate env template - in writeFile path message - - where - reportFailure = fromFlag (installReportPlanningFailure installFlags) - logFile = flagToMaybe (installLogFile installFlags) - - -- A IPID is calculated from the transitive closure of - -- dependencies, but when the solver fails we don't have that. - -- So we fail. - dummyIpid = error "reportPlanningFailure: installed package ID not available" - --- | If a 'PackageSpecifier' refers to a single package, return Just that --- package. -theSpecifiedPackage :: Package pkg => PackageSpecifier pkg -> Maybe PackageId -theSpecifiedPackage pkgSpec = - case pkgSpec of - NamedPackage name [PackageConstraintVersion name' version] - | name == name' -> PackageIdentifier name <$> trivialRange version - NamedPackage _ _ -> Nothing - SpecificSourcePackage pkg -> Just $ packageId pkg - where - -- | If a range includes only a single version, return Just that version. - trivialRange :: VersionRange -> Maybe Version - trivialRange = foldVersionRange - Nothing - Just -- "== v" - (\_ -> Nothing) - (\_ -> Nothing) - (\_ _ -> Nothing) - (\_ _ -> Nothing) - --- | Various stuff we do after successful or unsuccessfully installing a bunch --- of packages. This includes: --- --- * build reporting, local and remote --- * symlinking binaries --- * updating indexes --- * updating world file --- * error reporting --- -postInstallActions :: Verbosity - -> InstallArgs - -> [UserTarget] - -> InstallPlan - -> IO () -postInstallActions verbosity - (packageDBs, _, comp, platform, conf, useSandbox, mSandboxPkgInfo - ,globalFlags, configFlags, _, installFlags, _) - targets installPlan = do - - unless oneShot $ - World.insert verbosity worldFile - --FIXME: does not handle flags - [ World.WorldPkgInfo dep [] - | UserTargetNamed dep <- targets ] - - let buildReports = BuildReports.fromInstallPlan platform (compilerId comp) - installPlan - BuildReports.storeLocal (compilerInfo comp) - (fromNubList $ installSummaryFile installFlags) - buildReports - platform - when (reportingLevel >= AnonymousReports) $ - BuildReports.storeAnonymous buildReports - when (reportingLevel == DetailedReports) $ - storeDetailedBuildReports verbosity logsDir buildReports - - regenerateHaddockIndex verbosity packageDBs comp platform conf useSandbox - configFlags installFlags installPlan - - symlinkBinaries verbosity platform comp configFlags installFlags installPlan - - printBuildFailures installPlan - - updateSandboxTimestampsFile useSandbox mSandboxPkgInfo - comp platform installPlan - - where - reportingLevel = fromFlag (installBuildReports installFlags) - logsDir = fromFlag (globalLogsDir globalFlags) - oneShot = fromFlag (installOneShot installFlags) - worldFile = fromFlag $ globalWorldFile globalFlags - -storeDetailedBuildReports :: Verbosity -> FilePath - -> [(BuildReports.BuildReport, Maybe Repo)] -> IO () -storeDetailedBuildReports verbosity logsDir reports = sequence_ - [ do dotCabal <- defaultCabalDir - let logFileName = display (BuildReports.package report) <.> "log" - logFile = logsDir logFileName - reportsDir = dotCabal "reports" remoteRepoName remoteRepo - reportFile = reportsDir logFileName - - handleMissingLogFile $ do - buildLog <- readFile logFile - createDirectoryIfMissing True reportsDir -- FIXME - writeFile reportFile (show (BuildReports.show report, buildLog)) - - | (report, Just repo) <- reports - , Just remoteRepo <- [maybeRepoRemote repo] - , isLikelyToHaveLogFile (BuildReports.installOutcome report) ] - - where - isLikelyToHaveLogFile BuildReports.ConfigureFailed {} = True - isLikelyToHaveLogFile BuildReports.BuildFailed {} = True - isLikelyToHaveLogFile BuildReports.InstallFailed {} = True - isLikelyToHaveLogFile BuildReports.InstallOk {} = True - isLikelyToHaveLogFile _ = False - - handleMissingLogFile = Exception.handleJust missingFile $ \ioe -> - warn verbosity $ "Missing log file for build report: " - ++ fromMaybe "" (ioeGetFileName ioe) - - missingFile ioe - | isDoesNotExistError ioe = Just ioe - missingFile _ = Nothing - - -regenerateHaddockIndex :: Verbosity - -> [PackageDB] - -> Compiler - -> Platform - -> ProgramConfiguration - -> UseSandbox - -> ConfigFlags - -> InstallFlags - -> InstallPlan - -> IO () -regenerateHaddockIndex verbosity packageDBs comp platform conf useSandbox - configFlags installFlags installPlan - | haddockIndexFileIsRequested && shouldRegenerateHaddockIndex = do - - defaultDirs <- InstallDirs.defaultInstallDirs - (compilerFlavor comp) - (fromFlag (configUserInstall configFlags)) - True - let indexFileTemplate = fromFlag (installHaddockIndex installFlags) - indexFile = substHaddockIndexFileName defaultDirs indexFileTemplate - - notice verbosity $ - "Updating documentation index " ++ indexFile - - --TODO: might be nice if the install plan gave us the new InstalledPackageInfo - installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf - Haddock.regenerateHaddockIndex verbosity installedPkgIndex conf indexFile - - | otherwise = return () - where - haddockIndexFileIsRequested = - fromFlag (installDocumentation installFlags) - && isJust (flagToMaybe (installHaddockIndex installFlags)) - - -- We want to regenerate the index if some new documentation was actually - -- installed. Since the index can be only per-user or per-sandbox (see - -- #1337), we don't do it for global installs or special cases where we're - -- installing into a specific db. - shouldRegenerateHaddockIndex = (isUseSandbox useSandbox || normalUserInstall) - && someDocsWereInstalled installPlan - where - someDocsWereInstalled = any installedDocs . InstallPlan.toList - normalUserInstall = (UserPackageDB `elem` packageDBs) - && all (not . isSpecificPackageDB) packageDBs - - installedDocs (InstallPlan.Installed _ _ (BuildOk DocsOk _ _)) = True - installedDocs _ = False - isSpecificPackageDB (SpecificPackageDB _) = True - isSpecificPackageDB _ = False - - substHaddockIndexFileName defaultDirs = fromPathTemplate - . substPathTemplate env - where - env = env0 ++ installDirsTemplateEnv absoluteDirs - env0 = InstallDirs.compilerTemplateEnv (compilerInfo comp) - ++ InstallDirs.platformTemplateEnv platform - ++ InstallDirs.abiTemplateEnv (compilerInfo comp) platform - absoluteDirs = InstallDirs.substituteInstallDirTemplates - env0 templateDirs - templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault - defaultDirs (configInstallDirs configFlags) - - -symlinkBinaries :: Verbosity - -> Platform -> Compiler - -> ConfigFlags - -> InstallFlags - -> InstallPlan - -> IO () -symlinkBinaries verbosity platform comp configFlags installFlags plan = do - failed <- InstallSymlink.symlinkBinaries platform comp - configFlags installFlags - plan - case failed of - [] -> return () - [(_, exe, path)] -> - warn verbosity $ - "could not create a symlink in " ++ bindir ++ " for " - ++ exe ++ " because the file exists there already but is not " - ++ "managed by cabal. You can create a symlink for this executable " - ++ "manually if you wish. The executable file has been installed at " - ++ path - exes -> - warn verbosity $ - "could not create symlinks in " ++ bindir ++ " for " - ++ intercalate ", " [ exe | (_, exe, _) <- exes ] - ++ " because the files exist there already and are not " - ++ "managed by cabal. You can create symlinks for these executables " - ++ "manually if you wish. The executable files have been installed at " - ++ intercalate ", " [ path | (_, _, path) <- exes ] - where - bindir = fromFlag (installSymlinkBinDir installFlags) - - -printBuildFailures :: InstallPlan - -> IO () -printBuildFailures plan = - case [ (pkg, reason) - | InstallPlan.Failed pkg reason <- InstallPlan.toList plan ] of - [] -> return () - failed -> die . unlines - $ "Error: some packages failed to install:" - : [ display (packageId pkg) ++ printFailureReason reason - | (pkg, reason) <- failed ] - where - printFailureReason reason = case reason of - DependentFailed pkgid -> " depends on " ++ display pkgid - ++ " which failed to install." - DownloadFailed e -> " failed while downloading the package." - ++ showException e - UnpackFailed e -> " failed while unpacking the package." - ++ showException e - ConfigureFailed e -> " failed during the configure step." - ++ showException e - BuildFailed e -> " failed during the building phase." - ++ showException e - TestsFailed e -> " failed during the tests phase." - ++ showException e - InstallFailed e -> " failed during the final install step." - ++ showException e - - -- This will never happen, but we include it for completeness - PlanningFailed -> " failed during the planning phase." - - showException e = " The exception was:\n " ++ show e ++ maybeOOM e -#ifdef mingw32_HOST_OS - maybeOOM _ = "" -#else - maybeOOM e = maybe "" onExitFailure (fromException e) - onExitFailure (ExitFailure n) - | n == 9 || n == -9 = - "\nThis may be due to an out-of-memory condition." - onExitFailure _ = "" -#endif - - --- | If we're working inside a sandbox and some add-source deps were installed, --- update the timestamps of those deps. -updateSandboxTimestampsFile :: UseSandbox -> Maybe SandboxPackageInfo - -> Compiler -> Platform - -> InstallPlan - -> IO () -updateSandboxTimestampsFile (UseSandbox sandboxDir) - (Just (SandboxPackageInfo _ _ _ allAddSourceDeps)) - comp platform installPlan = - withUpdateTimestamps sandboxDir (compilerId comp) platform $ \_ -> do - let allInstalled = [ pkg | InstallPlan.Installed pkg _ _ - <- InstallPlan.toList installPlan ] - allSrcPkgs = [ pkg | ReadyPackage (ConfiguredPackage pkg _ _ _) _ - <- allInstalled ] - allPaths = [ pth | LocalUnpackedPackage pth - <- map packageSource allSrcPkgs] - allPathsCanonical <- mapM tryCanonicalizePath allPaths - return $! filter (`S.member` allAddSourceDeps) allPathsCanonical - -updateSandboxTimestampsFile _ _ _ _ _ = return () - --- ------------------------------------------------------------ --- * Actually do the installations --- ------------------------------------------------------------ - -data InstallMisc = InstallMisc { - rootCmd :: Maybe FilePath, - libVersion :: Maybe Version - } - --- | If logging is enabled, contains location of the log file and the verbosity --- level for logging. -type UseLogFile = Maybe (PackageIdentifier -> UnitId -> FilePath, Verbosity) - -performInstallations :: Verbosity - -> InstallArgs - -> InstalledPackageIndex - -> InstallPlan - -> IO InstallPlan -performInstallations verbosity - (packageDBs, repoCtxt, comp, platform, conf, useSandbox, _, - globalFlags, configFlags, configExFlags, installFlags, haddockFlags) - installedPkgIndex installPlan = do - - -- With 'install -j' it can be a bit hard to tell whether a sandbox is used. - whenUsingSandbox useSandbox $ \sandboxDir -> - when parallelInstall $ - notice verbosity $ "Notice: installing into a sandbox located at " - ++ sandboxDir - - jobControl <- if parallelInstall then newParallelJobControl - else newSerialJobControl - buildLimit <- newJobLimit numJobs - fetchLimit <- newJobLimit (min numJobs numFetchJobs) - installLock <- newLock -- serialise installation - cacheLock <- newLock -- serialise access to setup exe cache - - executeInstallPlan verbosity comp jobControl useLogFile installPlan $ \rpkg -> - installReadyPackage platform cinfo configFlags - rpkg $ \configFlags' src pkg pkgoverride -> - fetchSourcePackage verbosity repoCtxt fetchLimit src $ \src' -> - installLocalPackage verbosity buildLimit - (packageId pkg) src' distPref $ \mpath -> - installUnpackedPackage verbosity buildLimit installLock numJobs - (setupScriptOptions installedPkgIndex - cacheLock rpkg) - miscOptions configFlags' - installFlags haddockFlags - cinfo platform pkg rpkg pkgoverride mpath useLogFile - - where - cinfo = compilerInfo comp - - numJobs = determineNumJobs (installNumJobs installFlags) - numFetchJobs = 2 - parallelInstall = numJobs >= 2 - distPref = fromFlagOrDefault (useDistPref defaultSetupScriptOptions) - (configDistPref configFlags) - - setupScriptOptions index lock rpkg = - configureSetupScript - packageDBs - comp - platform - conf - distPref - (chooseCabalVersion configFlags (libVersion miscOptions)) - (Just lock) - parallelInstall - index - (Just rpkg) - - reportingLevel = fromFlag (installBuildReports installFlags) - logsDir = fromFlag (globalLogsDir globalFlags) - - -- Should the build output be written to a log file instead of stdout? - useLogFile :: UseLogFile - useLogFile = fmap ((\f -> (f, loggingVerbosity)) . substLogFileName) - logFileTemplate - where - installLogFile' = flagToMaybe $ installLogFile installFlags - defaultTemplate = toPathTemplate $ logsDir "$pkgid" <.> "log" - - -- If the user has specified --remote-build-reporting=detailed, use the - -- default log file location. If the --build-log option is set, use the - -- provided location. Otherwise don't use logging, unless building in - -- parallel (in which case the default location is used). - logFileTemplate :: Maybe PathTemplate - logFileTemplate - | useDefaultTemplate = Just defaultTemplate - | otherwise = installLogFile' - - -- If the user has specified --remote-build-reporting=detailed or - -- --build-log, use more verbose logging. - loggingVerbosity :: Verbosity - loggingVerbosity | overrideVerbosity = max Verbosity.verbose verbosity - | otherwise = verbosity - - useDefaultTemplate :: Bool - useDefaultTemplate - | reportingLevel == DetailedReports = True - | isJust installLogFile' = False - | parallelInstall = True - | otherwise = False - - overrideVerbosity :: Bool - overrideVerbosity - | reportingLevel == DetailedReports = True - | isJust installLogFile' = True - | parallelInstall = False - | otherwise = False - - substLogFileName :: PathTemplate -> PackageIdentifier -> UnitId -> FilePath - substLogFileName template pkg ipid = fromPathTemplate - . substPathTemplate env - $ template - where env = initialPathTemplateEnv (packageId pkg) - ipid - (compilerInfo comp) platform - - miscOptions = InstallMisc { - rootCmd = if fromFlag (configUserInstall configFlags) - || (isUseSandbox useSandbox) - then Nothing -- ignore --root-cmd if --user - -- or working inside a sandbox. - else flagToMaybe (installRootCmd installFlags), - libVersion = flagToMaybe (configCabalVersion configExFlags) - } - - -executeInstallPlan :: Verbosity - -> Compiler - -> JobControl IO (PackageId, UnitId, BuildResult) - -> UseLogFile - -> InstallPlan - -> (ReadyPackage -> IO BuildResult) - -> IO InstallPlan -executeInstallPlan verbosity _comp jobCtl useLogFile plan0 installPkg = - tryNewTasks 0 plan0 - where - tryNewTasks taskCount plan = do - case InstallPlan.ready plan of - [] | taskCount == 0 -> return plan - | otherwise -> waitForTasks taskCount plan - pkgs -> do - sequence_ - [ do info verbosity $ "Ready to install " ++ display pkgid - spawnJob jobCtl $ do - buildResult <- installPkg pkg - let ipid = case buildResult of - Right (BuildOk _ _ (Just ipi)) -> - Installed.installedUnitId ipi - _ -> mkUnitId (display (packageId pkg)) - return (packageId pkg, ipid, buildResult) - | pkg <- pkgs - , let pkgid = packageId pkg ] - - let taskCount' = taskCount + length pkgs - plan' = InstallPlan.processing pkgs plan - waitForTasks taskCount' plan' - - waitForTasks taskCount plan = do - info verbosity $ "Waiting for install task to finish..." - (pkgid, ipid, buildResult) <- collectJob jobCtl - printBuildResult pkgid ipid buildResult - let taskCount' = taskCount-1 - plan' = updatePlan pkgid buildResult plan - tryNewTasks taskCount' plan' - - updatePlan :: PackageIdentifier -> BuildResult -> InstallPlan - -> InstallPlan - updatePlan pkgid (Right buildSuccess@(BuildOk _ _ mipkg)) = - InstallPlan.completed (Source.fakeUnitId pkgid) - mipkg buildSuccess - - updatePlan pkgid (Left buildFailure) = - InstallPlan.failed (Source.fakeUnitId pkgid) - buildFailure depsFailure - where - depsFailure = DependentFailed pkgid - -- So this first pkgid failed for whatever reason (buildFailure). - -- All the other packages that depended on this pkgid, which we - -- now cannot build, we mark as failing due to 'DependentFailed' - -- which kind of means it was not their fault. - - -- Print build log if something went wrong, and 'Installed $PKGID' - -- otherwise. - printBuildResult :: PackageId -> UnitId -> BuildResult -> IO () - printBuildResult pkgid ipid buildResult = case buildResult of - (Right _) -> notice verbosity $ "Installed " ++ display pkgid - (Left _) -> do - notice verbosity $ "Failed to install " ++ display pkgid - when (verbosity >= normal) $ - case useLogFile of - Nothing -> return () - Just (mkLogFileName, _) -> do - let logName = mkLogFileName pkgid ipid - putStr $ "Build log ( " ++ logName ++ " ):\n" - printFile logName - - printFile :: FilePath -> IO () - printFile path = readFile path >>= putStr - --- | Call an installer for an 'SourcePackage' but override the configure --- flags with the ones given by the 'ReadyPackage'. In particular the --- 'ReadyPackage' specifies an exact 'FlagAssignment' and exactly --- versioned package dependencies. So we ignore any previous partial flag --- assignment or dependency constraints and use the new ones. --- --- NB: when updating this function, don't forget to also update --- 'configurePackage' in D.C.Configure. -installReadyPackage :: Platform -> CompilerInfo - -> ConfigFlags - -> ReadyPackage - -> (ConfigFlags -> PackageLocation (Maybe FilePath) - -> PackageDescription - -> PackageDescriptionOverride - -> a) - -> a -installReadyPackage platform cinfo configFlags - (ReadyPackage (ConfiguredPackage - (SourcePackage _ gpkg source pkgoverride) - flags stanzas _) - deps) - installPkg = - installPkg configFlags { - configConfigurationsFlags = flags, - -- We generate the legacy constraints as well as the new style precise deps. - -- In the end only one set gets passed to Setup.hs configure, depending on - -- the Cabal version we are talking to. - configConstraints = [ thisPackageVersion (packageId deppkg) - | deppkg <- CD.nonSetupDeps deps ], - configDependencies = [ (packageName (Installed.sourcePackageId deppkg), - Installed.installedUnitId deppkg) - | deppkg <- CD.nonSetupDeps deps ], - -- Use '--exact-configuration' if supported. - configExactConfiguration = toFlag True, - configBenchmarks = toFlag False, - configTests = toFlag (TestStanzas `elem` stanzas) - } source pkg pkgoverride - where - pkg = case finalizePackageDescription flags - (const True) - platform cinfo [] (enableStanzas stanzas gpkg) of - Left _ -> error "finalizePackageDescription ReadyPackage failed" - Right (desc, _) -> desc - -fetchSourcePackage - :: Verbosity - -> RepoContext - -> JobLimit - -> PackageLocation (Maybe FilePath) - -> (PackageLocation FilePath -> IO BuildResult) - -> IO BuildResult -fetchSourcePackage verbosity repoCtxt fetchLimit src installPkg = do - fetched <- checkFetched src - case fetched of - Just src' -> installPkg src' - Nothing -> onFailure DownloadFailed $ do - loc <- withJobLimit fetchLimit $ - fetchPackage verbosity repoCtxt src - installPkg loc - - -installLocalPackage - :: Verbosity - -> JobLimit - -> PackageIdentifier -> PackageLocation FilePath -> FilePath - -> (Maybe FilePath -> IO BuildResult) - -> IO BuildResult -installLocalPackage verbosity jobLimit pkgid location distPref installPkg = - - case location of - - LocalUnpackedPackage dir -> - installPkg (Just dir) - - LocalTarballPackage tarballPath -> - installLocalTarballPackage verbosity jobLimit - pkgid tarballPath distPref installPkg - - RemoteTarballPackage _ tarballPath -> - installLocalTarballPackage verbosity jobLimit - pkgid tarballPath distPref installPkg - - RepoTarballPackage _ _ tarballPath -> - installLocalTarballPackage verbosity jobLimit - pkgid tarballPath distPref installPkg - - -installLocalTarballPackage - :: Verbosity - -> JobLimit - -> PackageIdentifier -> FilePath -> FilePath - -> (Maybe FilePath -> IO BuildResult) - -> IO BuildResult -installLocalTarballPackage verbosity jobLimit pkgid - tarballPath distPref installPkg = do - tmp <- getTemporaryDirectory - withTempDirectory verbosity tmp "cabal-tmp" $ \tmpDirPath -> - onFailure UnpackFailed $ do - let relUnpackedPath = display pkgid - absUnpackedPath = tmpDirPath relUnpackedPath - descFilePath = absUnpackedPath - display (packageName pkgid) <.> "cabal" - withJobLimit jobLimit $ do - info verbosity $ "Extracting " ++ tarballPath - ++ " to " ++ tmpDirPath ++ "..." - extractTarGzFile tmpDirPath relUnpackedPath tarballPath - exists <- doesFileExist descFilePath - when (not exists) $ - die $ "Package .cabal file not found: " ++ show descFilePath - maybeRenameDistDir absUnpackedPath - - installPkg (Just absUnpackedPath) - - where - -- 'cabal sdist' puts pre-generated files in the 'dist' - -- directory. This fails when a nonstandard build directory name - -- is used (as is the case with sandboxes), so we need to rename - -- the 'dist' dir here. - -- - -- TODO: 'cabal get happy && cd sandbox && cabal install ../happy' still - -- fails even with this workaround. We probably can live with that. - maybeRenameDistDir :: FilePath -> IO () - maybeRenameDistDir absUnpackedPath = do - let distDirPath = absUnpackedPath defaultDistPref - distDirPathTmp = absUnpackedPath (defaultDistPref ++ "-tmp") - distDirPathNew = absUnpackedPath distPref - distDirExists <- doesDirectoryExist distDirPath - when (distDirExists - && (not $ distDirPath `equalFilePath` distDirPathNew)) $ do - -- NB: we need to handle the case when 'distDirPathNew' is a - -- subdirectory of 'distDirPath' (e.g. the former is - -- 'dist/dist-sandbox-3688fbc2' and the latter is 'dist'). - debug verbosity $ "Renaming '" ++ distDirPath ++ "' to '" - ++ distDirPathTmp ++ "'." - renameDirectory distDirPath distDirPathTmp - when (distDirPath `isPrefixOf` distDirPathNew) $ - createDirectoryIfMissingVerbose verbosity False distDirPath - debug verbosity $ "Renaming '" ++ distDirPathTmp ++ "' to '" - ++ distDirPathNew ++ "'." - renameDirectory distDirPathTmp distDirPathNew - -installUnpackedPackage - :: Verbosity - -> JobLimit - -> Lock - -> Int - -> SetupScriptOptions - -> InstallMisc - -> ConfigFlags - -> InstallFlags - -> HaddockFlags - -> CompilerInfo - -> Platform - -> PackageDescription - -> ReadyPackage - -> PackageDescriptionOverride - -> Maybe FilePath -- ^ Directory to change to before starting the installation. - -> UseLogFile -- ^ File to log output to (if any) - -> IO BuildResult -installUnpackedPackage verbosity buildLimit installLock numJobs - scriptOptions miscOptions - configFlags installFlags haddockFlags - cinfo platform pkg rpkg pkgoverride workingDir useLogFile = do - - -- Override the .cabal file if necessary - case pkgoverride of - Nothing -> return () - Just pkgtxt -> do - let descFilePath = fromMaybe "." workingDir - display (packageName pkgid) <.> "cabal" - info verbosity $ - "Updating " ++ display (packageName pkgid) <.> "cabal" - ++ " with the latest revision from the index." - writeFileAtomic descFilePath pkgtxt - - -- Compute the IPID - let flags (ReadyPackage (ConfiguredPackage _ x _ _) _) = x - cid = Configure.computeComponentId (PackageDescription.package pkg) CLibName - (map (\(SimpleUnitId cid0) -> cid0) (CD.libraryDeps (depends rpkg))) (flags rpkg) - ipid = SimpleUnitId cid - - -- Make sure that we pass --libsubdir etc to 'setup configure' (necessary if - -- the setup script was compiled against an old version of the Cabal lib). - configFlags' <- addDefaultInstallDirs ipid configFlags - -- Filter out flags not supported by the old versions of the Cabal lib. - let configureFlags :: Version -> ConfigFlags - configureFlags = filterConfigureFlags configFlags' { - configVerbosity = toFlag verbosity' - } - - -- Path to the optional log file. - mLogPath <- maybeLogPath ipid - - logDirChange (maybe putStr appendFile mLogPath) workingDir $ do - -- Configure phase - onFailure ConfigureFailed $ withJobLimit buildLimit $ do - when (numJobs > 1) $ notice verbosity $ - "Configuring " ++ display pkgid ++ "..." - setup configureCommand configureFlags mLogPath - - -- Build phase - onFailure BuildFailed $ do - when (numJobs > 1) $ notice verbosity $ - "Building " ++ display pkgid ++ "..." - setup buildCommand' buildFlags mLogPath - - -- Doc generation phase - docsResult <- if shouldHaddock - then (do setup haddockCommand haddockFlags' mLogPath - return DocsOk) - `catchIO` (\_ -> return DocsFailed) - `catchExit` (\_ -> return DocsFailed) - else return DocsNotTried - - -- Tests phase - onFailure TestsFailed $ do - when (testsEnabled && PackageDescription.hasTests pkg) $ - setup Cabal.testCommand testFlags mLogPath - - let testsResult | testsEnabled = TestsOk - | otherwise = TestsNotTried - - -- Install phase - onFailure InstallFailed $ criticalSection installLock $ do - -- Capture installed package configuration file - maybePkgConf <- maybeGenPkgConf mLogPath - - -- Actual installation - withWin32SelfUpgrade verbosity ipid configFlags - cinfo platform pkg $ do - case rootCmd miscOptions of - (Just cmd) -> reexec cmd - Nothing -> do - setup Cabal.copyCommand copyFlags mLogPath - when shouldRegister $ do - setup Cabal.registerCommand registerFlags mLogPath - return (Right (BuildOk docsResult testsResult maybePkgConf)) - - where - pkgid = packageId pkg - buildCommand' = buildCommand defaultProgramConfiguration - buildFlags _ = emptyBuildFlags { - buildDistPref = configDistPref configFlags, - buildVerbosity = toFlag verbosity' - } - shouldHaddock = fromFlag (installDocumentation installFlags) - haddockFlags' _ = haddockFlags { - haddockVerbosity = toFlag verbosity', - haddockDistPref = configDistPref configFlags - } - testsEnabled = fromFlag (configTests configFlags) - && fromFlagOrDefault False (installRunTests installFlags) - testFlags _ = Cabal.emptyTestFlags { - Cabal.testDistPref = configDistPref configFlags - } - copyFlags _ = Cabal.emptyCopyFlags { - Cabal.copyDistPref = configDistPref configFlags, - Cabal.copyDest = toFlag InstallDirs.NoCopyDest, - Cabal.copyVerbosity = toFlag verbosity' - } - shouldRegister = PackageDescription.hasLibs pkg - registerFlags _ = Cabal.emptyRegisterFlags { - Cabal.regDistPref = configDistPref configFlags, - Cabal.regVerbosity = toFlag verbosity' - } - verbosity' = maybe verbosity snd useLogFile - tempTemplate name = name ++ "-" ++ display pkgid - - addDefaultInstallDirs :: UnitId -> ConfigFlags -> IO ConfigFlags - addDefaultInstallDirs ipid configFlags' = do - defInstallDirs <- InstallDirs.defaultInstallDirs flavor userInstall False - return $ configFlags' { - configInstallDirs = fmap Cabal.Flag . - InstallDirs.substituteInstallDirTemplates env $ - InstallDirs.combineInstallDirs fromFlagOrDefault - defInstallDirs (configInstallDirs configFlags) - } - where - CompilerId flavor _ = compilerInfoId cinfo - env = initialPathTemplateEnv pkgid ipid cinfo platform - userInstall = fromFlagOrDefault defaultUserInstall - (configUserInstall configFlags') - - maybeGenPkgConf :: Maybe FilePath - -> IO (Maybe Installed.InstalledPackageInfo) - maybeGenPkgConf mLogPath = - if shouldRegister then do - tmp <- getTemporaryDirectory - withTempFile tmp (tempTemplate "pkgConf") $ \pkgConfFile handle -> do - hClose handle - let registerFlags' version = (registerFlags version) { - Cabal.regGenPkgConf = toFlag (Just pkgConfFile) - } - setup Cabal.registerCommand registerFlags' mLogPath - withUTF8FileContents pkgConfFile $ \pkgConfText -> - case Installed.parseInstalledPackageInfo pkgConfText of - Installed.ParseFailed perror -> pkgConfParseFailed perror - Installed.ParseOk warns pkgConf -> do - unless (null warns) $ - warn verbosity $ unlines (map (showPWarning pkgConfFile) warns) - return (Just pkgConf) - else return Nothing - - pkgConfParseFailed :: Installed.PError -> IO a - pkgConfParseFailed perror = - die $ "Couldn't parse the output of 'setup register --gen-pkg-config':" - ++ show perror - - maybeLogPath :: UnitId -> IO (Maybe FilePath) - maybeLogPath ipid = - case useLogFile of - Nothing -> return Nothing - Just (mkLogFileName, _) -> do - let logFileName = mkLogFileName (packageId pkg) ipid - logDir = takeDirectory logFileName - unless (null logDir) $ createDirectoryIfMissing True logDir - logFileExists <- doesFileExist logFileName - when logFileExists $ removeFile logFileName - return (Just logFileName) - - setup cmd flags mLogPath = - Exception.bracket - (traverse (\path -> openFile path AppendMode) mLogPath) - (traverse_ hClose) - (\logFileHandle -> - setupWrapper verbosity - scriptOptions { useLoggingHandle = logFileHandle - , useWorkingDir = workingDir } - (Just pkg) - cmd flags []) - - reexec cmd = do - -- look for our own executable file and re-exec ourselves using a helper - -- program like sudo to elevate privileges: - self <- getExecutablePath - weExist <- doesFileExist self - if weExist - then inDir workingDir $ - rawSystemExit verbosity cmd - [self, "install", "--only" - ,"--verbose=" ++ showForCabal verbosity] - else die $ "Unable to find cabal executable at: " ++ self - - --- helper -onFailure :: (SomeException -> BuildFailure) -> IO BuildResult -> IO BuildResult -onFailure result action = - action `catches` - [ Handler $ \ioe -> handler (ioe :: IOException) - , Handler $ \exit -> handler (exit :: ExitCode) - ] - where - handler :: Exception e => e -> IO BuildResult - handler = return . Left . result . toException - - --- ------------------------------------------------------------ --- * Weird windows hacks --- ------------------------------------------------------------ - -withWin32SelfUpgrade :: Verbosity - -> UnitId - -> ConfigFlags - -> CompilerInfo - -> Platform - -> PackageDescription - -> IO a -> IO a -withWin32SelfUpgrade _ _ _ _ _ _ action | buildOS /= Windows = action -withWin32SelfUpgrade verbosity ipid configFlags cinfo platform pkg action = do - - defaultDirs <- InstallDirs.defaultInstallDirs - compFlavor - (fromFlag (configUserInstall configFlags)) - (PackageDescription.hasLibs pkg) - - Win32SelfUpgrade.possibleSelfUpgrade verbosity - (exeInstallPaths defaultDirs) action - - where - pkgid = packageId pkg - (CompilerId compFlavor _) = compilerInfoId cinfo - - exeInstallPaths defaultDirs = - [ InstallDirs.bindir absoluteDirs exeName <.> exeExtension - | exe <- PackageDescription.executables pkg - , PackageDescription.buildable (PackageDescription.buildInfo exe) - , let exeName = prefix ++ PackageDescription.exeName exe ++ suffix - prefix = substTemplate prefixTemplate - suffix = substTemplate suffixTemplate ] - where - fromFlagTemplate = fromFlagOrDefault (InstallDirs.toPathTemplate "") - prefixTemplate = fromFlagTemplate (configProgPrefix configFlags) - suffixTemplate = fromFlagTemplate (configProgSuffix configFlags) - templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault - defaultDirs (configInstallDirs configFlags) - absoluteDirs = InstallDirs.absoluteInstallDirs - pkgid ipid - cinfo InstallDirs.NoCopyDest - platform templateDirs - substTemplate = InstallDirs.fromPathTemplate - . InstallDirs.substPathTemplate env - where env = InstallDirs.initialPathTemplateEnv pkgid ipid - cinfo platform diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/InstallPlan.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/InstallPlan.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/InstallPlan.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/InstallPlan.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,788 +0,0 @@ -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE DeriveGeneric #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.InstallPlan --- Copyright : (c) Duncan Coutts 2008 --- License : BSD-like --- --- Maintainer : duncan@community.haskell.org --- Stability : provisional --- Portability : portable --- --- Package installation plan --- ------------------------------------------------------------------------------ -module Distribution.Client.InstallPlan ( - InstallPlan, - GenericInstallPlan, - PlanPackage, - GenericPlanPackage(..), - - -- * Operations on 'InstallPlan's - new, - toList, - mapPreservingGraph, - - ready, - processing, - completed, - failed, - remove, - preexisting, - preinstalled, - - showPlanIndex, - showInstallPlan, - - -- * Checking validity of plans - valid, - closed, - consistent, - acyclic, - - -- ** Details on invalid plans - PlanProblem(..), - showPlanProblem, - problems, - - -- ** Querying the install plan - dependencyClosure, - reverseDependencyClosure, - topologicalOrder, - reverseTopologicalOrder, - ) where - -import Distribution.InstalledPackageInfo - ( InstalledPackageInfo ) -import Distribution.Package - ( PackageIdentifier(..), PackageName(..), Package(..) - , HasUnitId(..), UnitId(..) ) -import Distribution.Client.Types - ( BuildSuccess, BuildFailure - , PackageFixedDeps(..), ConfiguredPackage - , GenericReadyPackage(..), fakeUnitId ) -import Distribution.Version - ( Version ) -import Distribution.Client.ComponentDeps (ComponentDeps) -import qualified Distribution.Client.ComponentDeps as CD -import Distribution.Simple.PackageIndex - ( PackageIndex ) -import qualified Distribution.Simple.PackageIndex as PackageIndex -import Distribution.Client.PlanIndex - ( FakeMap ) -import qualified Distribution.Client.PlanIndex as PlanIndex -import Distribution.Text - ( display ) - -import Data.List - ( foldl', intercalate ) -import Data.Maybe - ( fromMaybe, catMaybes ) -import qualified Data.Graph as Graph -import Data.Graph (Graph) -import qualified Data.Tree as Tree -import Distribution.Compat.Binary (Binary(..)) -import GHC.Generics -import Control.Exception - ( assert ) -import qualified Data.Map as Map -import qualified Data.Traversable as T - - --- When cabal tries to install a number of packages, including all their --- dependencies it has a non-trivial problem to solve. --- --- The Problem: --- --- In general we start with a set of installed packages and a set of source --- packages. --- --- Installed packages have fixed dependencies. They have already been built and --- we know exactly what packages they were built against, including their exact --- versions. --- --- Source package have somewhat flexible dependencies. They are specified as --- version ranges, though really they're predicates. To make matters worse they --- have conditional flexible dependencies. Configuration flags can affect which --- packages are required and can place additional constraints on their --- versions. --- --- These two sets of package can and usually do overlap. There can be installed --- packages that are also available as source packages which means they could --- be re-installed if required, though there will also be packages which are --- not available as source and cannot be re-installed. Very often there will be --- extra versions available than are installed. Sometimes we may like to prefer --- installed packages over source ones or perhaps always prefer the latest --- available version whether installed or not. --- --- The goal is to calculate an installation plan that is closed, acyclic and --- consistent and where every configured package is valid. --- --- An installation plan is a set of packages that are going to be used --- together. It will consist of a mixture of installed packages and source --- packages along with their exact version dependencies. An installation plan --- is closed if for every package in the set, all of its dependencies are --- also in the set. It is consistent if for every package in the set, all --- dependencies which target that package have the same version. - --- Note that plans do not necessarily compose. You might have a valid plan for --- package A and a valid plan for package B. That does not mean the composition --- is simultaneously valid for A and B. In particular you're most likely to --- have problems with inconsistent dependencies. --- On the other hand it is true that every closed sub plan is valid. - --- | Packages in an install plan --- --- NOTE: 'ConfiguredPackage', 'GenericReadyPackage' and 'GenericPlanPackage' --- intentionally have no 'PackageInstalled' instance. `This is important: --- PackageInstalled returns only library dependencies, but for package that --- aren't yet installed we know many more kinds of dependencies (setup --- dependencies, exe, test-suite, benchmark, ..). Any functions that operate on --- dependencies in cabal-install should consider what to do with these --- dependencies; if we give a 'PackageInstalled' instance it would be too easy --- to get this wrong (and, for instance, call graph traversal functions from --- Cabal rather than from cabal-install). Instead, see 'PackageFixedDeps'. -data GenericPlanPackage ipkg srcpkg iresult ifailure - = PreExisting ipkg - | Configured srcpkg - | Processing (GenericReadyPackage srcpkg ipkg) - | Installed (GenericReadyPackage srcpkg ipkg) (Maybe ipkg) iresult - | Failed srcpkg ifailure - deriving (Eq, Show, Generic) - -instance (Binary ipkg, Binary srcpkg, Binary iresult, Binary ifailure) - => Binary (GenericPlanPackage ipkg srcpkg iresult ifailure) - -type PlanPackage = GenericPlanPackage - InstalledPackageInfo ConfiguredPackage - BuildSuccess BuildFailure - -instance (Package ipkg, Package srcpkg) => - Package (GenericPlanPackage ipkg srcpkg iresult ifailure) where - packageId (PreExisting ipkg) = packageId ipkg - packageId (Configured spkg) = packageId spkg - packageId (Processing rpkg) = packageId rpkg - packageId (Installed rpkg _ _) = packageId rpkg - packageId (Failed spkg _) = packageId spkg - -instance (PackageFixedDeps srcpkg, - PackageFixedDeps ipkg, HasUnitId ipkg) => - PackageFixedDeps (GenericPlanPackage ipkg srcpkg iresult ifailure) where - depends (PreExisting pkg) = depends pkg - depends (Configured pkg) = depends pkg - depends (Processing pkg) = depends pkg - depends (Installed pkg _ _) = depends pkg - depends (Failed pkg _) = depends pkg - -instance (HasUnitId ipkg, HasUnitId srcpkg) => - HasUnitId - (GenericPlanPackage ipkg srcpkg iresult ifailure) where - installedUnitId (PreExisting ipkg ) = installedUnitId ipkg - installedUnitId (Configured spkg) = installedUnitId spkg - installedUnitId (Processing rpkg) = installedUnitId rpkg - -- NB: defer to the actual installed package info in this case - installedUnitId (Installed _ (Just ipkg) _) = installedUnitId ipkg - installedUnitId (Installed rpkg _ _) = installedUnitId rpkg - installedUnitId (Failed spkg _) = installedUnitId spkg - - -data GenericInstallPlan ipkg srcpkg iresult ifailure = GenericInstallPlan { - planIndex :: !(PlanIndex ipkg srcpkg iresult ifailure), - planFakeMap :: !FakeMap, - planIndepGoals :: !Bool, - - -- | Cached (lazily) graph - -- - -- The 'Graph' representaion works in terms of integer node ids, so we - -- have to keep mapping to and from our meaningful nodes, which of course - -- are package ids. - -- - planGraph :: Graph, - planGraphRev :: Graph, -- ^ Reverse deps, transposed - planPkgIdOf :: Graph.Vertex -> UnitId, -- ^ mapping back to package ids - planVertexOf :: UnitId -> Graph.Vertex -- ^ mapping into node ids - } - --- | Much like 'planPkgIdOf', but mapping back to full packages. -planPkgOf :: GenericInstallPlan ipkg srcpkg iresult ifailure - -> Graph.Vertex - -> GenericPlanPackage ipkg srcpkg iresult ifailure -planPkgOf plan v = - case PackageIndex.lookupUnitId (planIndex plan) - (planPkgIdOf plan v) of - Just pkg -> pkg - Nothing -> error "InstallPlan: internal error: planPkgOf lookup failed" - - --- | 'GenericInstallPlan' specialised to most commonly used types. -type InstallPlan = GenericInstallPlan - InstalledPackageInfo ConfiguredPackage - BuildSuccess BuildFailure - -type PlanIndex ipkg srcpkg iresult ifailure = - PackageIndex (GenericPlanPackage ipkg srcpkg iresult ifailure) - -invariant :: (HasUnitId ipkg, PackageFixedDeps ipkg, - HasUnitId srcpkg, PackageFixedDeps srcpkg) - => GenericInstallPlan ipkg srcpkg iresult ifailure -> Bool -invariant plan = - valid (planFakeMap plan) - (planIndepGoals plan) - (planIndex plan) - --- | Smart constructor that deals with caching the 'Graph' representation. --- -mkInstallPlan :: (HasUnitId ipkg, PackageFixedDeps ipkg, - HasUnitId srcpkg, PackageFixedDeps srcpkg) - => PlanIndex ipkg srcpkg iresult ifailure - -> FakeMap - -> Bool - -> GenericInstallPlan ipkg srcpkg iresult ifailure -mkInstallPlan index fakeMap indepGoals = - GenericInstallPlan { - planIndex = index, - planFakeMap = fakeMap, - planIndepGoals = indepGoals, - - -- lazily cache the graph stuff: - planGraph = graph, - planGraphRev = Graph.transposeG graph, - planPkgIdOf = vertexToPkgId, - planVertexOf = fromMaybe noSuchPkgId . pkgIdToVertex - } - where - (graph, vertexToPkgId, pkgIdToVertex) = - PlanIndex.dependencyGraph fakeMap index - noSuchPkgId = internalError "package is not in the graph" - -internalError :: String -> a -internalError msg = error $ "InstallPlan: internal error: " ++ msg - -instance (HasUnitId ipkg, PackageFixedDeps ipkg, - HasUnitId srcpkg, PackageFixedDeps srcpkg, - Binary ipkg, Binary srcpkg, Binary iresult, Binary ifailure) - => Binary (GenericInstallPlan ipkg srcpkg iresult ifailure) where - put GenericInstallPlan { - planIndex = index, - planFakeMap = fakeMap, - planIndepGoals = indepGoals - } = put (index, fakeMap, indepGoals) - - get = do - (index, fakeMap, indepGoals) <- get - return $! mkInstallPlan index fakeMap indepGoals - -showPlanIndex :: (HasUnitId ipkg, HasUnitId srcpkg) - => PlanIndex ipkg srcpkg iresult ifailure -> String -showPlanIndex index = - intercalate "\n" (map showPlanPackage (PackageIndex.allPackages index)) - where showPlanPackage p = - showPlanPackageTag p ++ " " - ++ display (packageId p) ++ " (" - ++ display (installedUnitId p) ++ ")" - -showInstallPlan :: (HasUnitId ipkg, HasUnitId srcpkg) - => GenericInstallPlan ipkg srcpkg iresult ifailure -> String -showInstallPlan plan = - showPlanIndex (planIndex plan) ++ "\n" ++ - "fake map:\n " ++ - intercalate "\n " (map showKV (Map.toList (planFakeMap plan))) - where showKV (k,v) = display k ++ " -> " ++ display v - -showPlanPackageTag :: GenericPlanPackage ipkg srcpkg iresult ifailure -> String -showPlanPackageTag (PreExisting _) = "PreExisting" -showPlanPackageTag (Configured _) = "Configured" -showPlanPackageTag (Processing _) = "Processing" -showPlanPackageTag (Installed _ _ _) = "Installed" -showPlanPackageTag (Failed _ _) = "Failed" - --- | Build an installation plan from a valid set of resolved packages. --- -new :: (HasUnitId ipkg, PackageFixedDeps ipkg, - HasUnitId srcpkg, PackageFixedDeps srcpkg) - => Bool - -> PlanIndex ipkg srcpkg iresult ifailure - -> Either [PlanProblem ipkg srcpkg iresult ifailure] - (GenericInstallPlan ipkg srcpkg iresult ifailure) -new indepGoals index = - -- NB: Need to pre-initialize the fake-map with pre-existing - -- packages - let isPreExisting (PreExisting _) = True - isPreExisting _ = False - fakeMap = Map.fromList - . map (\p -> (fakeUnitId (packageId p) - ,installedUnitId p)) - . filter isPreExisting - $ PackageIndex.allPackages index in - case problems fakeMap indepGoals index of - [] -> Right (mkInstallPlan index fakeMap indepGoals) - probs -> Left probs - -toList :: GenericInstallPlan ipkg srcpkg iresult ifailure - -> [GenericPlanPackage ipkg srcpkg iresult ifailure] -toList = PackageIndex.allPackages . planIndex - --- | Remove packages from the install plan. This will result in an --- error if there are remaining packages that depend on any matching --- package. This is primarily useful for obtaining an install plan for --- the dependencies of a package or set of packages without actually --- installing the package itself, as when doing development. --- -remove :: (HasUnitId ipkg, PackageFixedDeps ipkg, - HasUnitId srcpkg, PackageFixedDeps srcpkg) - => (GenericPlanPackage ipkg srcpkg iresult ifailure -> Bool) - -> GenericInstallPlan ipkg srcpkg iresult ifailure - -> Either [PlanProblem ipkg srcpkg iresult ifailure] - (GenericInstallPlan ipkg srcpkg iresult ifailure) -remove shouldRemove plan = - new (planIndepGoals plan) newIndex - where - newIndex = PackageIndex.fromList $ - filter (not . shouldRemove) (toList plan) - --- | The packages that are ready to be installed. That is they are in the --- configured state and have all their dependencies installed already. --- The plan is complete if the result is @[]@. --- -ready :: forall ipkg srcpkg iresult ifailure. PackageFixedDeps srcpkg - => GenericInstallPlan ipkg srcpkg iresult ifailure - -> [GenericReadyPackage srcpkg ipkg] -ready plan = assert check readyPackages - where - check = if null readyPackages && null processingPackages - then null configuredPackages - else True - configuredPackages = [ pkg | Configured pkg <- toList plan ] - processingPackages = [ pkg | Processing pkg <- toList plan] - - readyPackages :: [GenericReadyPackage srcpkg ipkg] - readyPackages = catMaybes (map (lookupReadyPackage plan) configuredPackages) - -lookupReadyPackage :: forall ipkg srcpkg iresult ifailure. - PackageFixedDeps srcpkg - => GenericInstallPlan ipkg srcpkg iresult ifailure - -> srcpkg - -> Maybe (GenericReadyPackage srcpkg ipkg) -lookupReadyPackage plan pkg = do - deps <- hasAllInstalledDeps pkg - return (ReadyPackage pkg deps) - where - - hasAllInstalledDeps :: srcpkg -> Maybe (ComponentDeps [ipkg]) - hasAllInstalledDeps = T.mapM (mapM isInstalledDep) . depends - - isInstalledDep :: UnitId -> Maybe ipkg - isInstalledDep pkgid = - -- NB: Need to check if the ID has been updated in planFakeMap, in which - -- case we might be dealing with an old pointer - case PlanIndex.fakeLookupUnitId - (planFakeMap plan) (planIndex plan) pkgid - of - Just (PreExisting ipkg) -> Just ipkg - Just (Configured _) -> Nothing - Just (Processing _) -> Nothing - Just (Installed _ (Just ipkg) _) -> Just ipkg - Just (Installed _ Nothing _) -> internalError depOnNonLib - Just (Failed _ _) -> internalError depOnFailed - Nothing -> internalError incomplete - incomplete = "install plan is not closed" - depOnFailed = "configured package depends on failed package" - depOnNonLib = "configured package depends on a non-library package" - --- | Marks packages in the graph as currently processing (e.g. building). --- --- * The package must exist in the graph and be in the configured state. --- -processing :: (HasUnitId ipkg, PackageFixedDeps ipkg, - HasUnitId srcpkg, PackageFixedDeps srcpkg) - => [GenericReadyPackage srcpkg ipkg] - -> GenericInstallPlan ipkg srcpkg iresult ifailure - -> GenericInstallPlan ipkg srcpkg iresult ifailure -processing pkgs plan = assert (invariant plan') plan' - where - plan' = plan { - planIndex = PackageIndex.merge (planIndex plan) processingPkgs - } - processingPkgs = PackageIndex.fromList [Processing pkg | pkg <- pkgs] - --- | Marks a package in the graph as completed. Also saves the build result for --- the completed package in the plan. --- --- * The package must exist in the graph and be in the processing state. --- * The package must have had no uninstalled dependent packages. --- -completed :: (HasUnitId ipkg, PackageFixedDeps ipkg, - HasUnitId srcpkg, PackageFixedDeps srcpkg) - => UnitId - -> Maybe ipkg -> iresult - -> GenericInstallPlan ipkg srcpkg iresult ifailure - -> GenericInstallPlan ipkg srcpkg iresult ifailure -completed pkgid mipkg buildResult plan = assert (invariant plan') plan' - where - plan' = plan { - -- NB: installation can change the IPID, so better - -- record it in the fake mapping... - planFakeMap = insert_fake_mapping mipkg - $ planFakeMap plan, - planIndex = PackageIndex.insert installed - . PackageIndex.deleteUnitId pkgid - $ planIndex plan - } - -- ...but be sure to use the *old* IPID for the lookup for the - -- preexisting record - installed = Installed (lookupProcessingPackage plan pkgid) mipkg buildResult - insert_fake_mapping (Just ipkg) = Map.insert pkgid (installedUnitId ipkg) - insert_fake_mapping _ = id - --- | Marks a package in the graph as having failed. It also marks all the --- packages that depended on it as having failed. --- --- * The package must exist in the graph and be in the processing --- state. --- -failed :: (HasUnitId ipkg, PackageFixedDeps ipkg, - HasUnitId srcpkg, PackageFixedDeps srcpkg) - => UnitId -- ^ The id of the package that failed to install - -> ifailure -- ^ The build result to use for the failed package - -> ifailure -- ^ The build result to use for its dependencies - -> GenericInstallPlan ipkg srcpkg iresult ifailure - -> GenericInstallPlan ipkg srcpkg iresult ifailure -failed pkgid buildResult buildResult' plan = assert (invariant plan') plan' - where - -- NB: failures don't update IPIDs - plan' = plan { - planIndex = PackageIndex.merge (planIndex plan) failures - } - ReadyPackage srcpkg _deps = lookupProcessingPackage plan pkgid - failures = PackageIndex.fromList - $ Failed srcpkg buildResult - : [ Failed pkg' buildResult' - | Just pkg' <- map checkConfiguredPackage - $ packagesThatDependOn plan pkgid ] - --- | Lookup the reachable packages in the reverse dependency graph. --- -packagesThatDependOn :: GenericInstallPlan ipkg srcpkg iresult ifailure - -> UnitId - -> [GenericPlanPackage ipkg srcpkg iresult ifailure] -packagesThatDependOn plan pkgid = map (planPkgOf plan) - . tail - . Graph.reachable (planGraphRev plan) - . planVertexOf plan - $ Map.findWithDefault pkgid pkgid (planFakeMap plan) - --- | Lookup a package that we expect to be in the processing state. --- -lookupProcessingPackage :: GenericInstallPlan ipkg srcpkg iresult ifailure - -> UnitId - -> GenericReadyPackage srcpkg ipkg -lookupProcessingPackage plan pkgid = - -- NB: processing packages are guaranteed to not indirect through - -- planFakeMap - case PackageIndex.lookupUnitId (planIndex plan) pkgid of - Just (Processing pkg) -> pkg - _ -> internalError $ "not in processing state or no such pkg " ++ - display pkgid - --- | Check a package that we expect to be in the configured or failed state. --- -checkConfiguredPackage :: (Package srcpkg, Package ipkg) - => GenericPlanPackage ipkg srcpkg iresult ifailure - -> Maybe srcpkg -checkConfiguredPackage (Configured pkg) = Just pkg -checkConfiguredPackage (Failed _ _) = Nothing -checkConfiguredPackage pkg = - internalError $ "not configured or no such pkg " ++ display (packageId pkg) - --- | Replace a ready package with a pre-existing one. The pre-existing one --- must have exactly the same dependencies as the source one was configured --- with. --- -preexisting :: (HasUnitId ipkg, PackageFixedDeps ipkg, - HasUnitId srcpkg, PackageFixedDeps srcpkg) - => UnitId - -> ipkg - -> GenericInstallPlan ipkg srcpkg iresult ifailure - -> GenericInstallPlan ipkg srcpkg iresult ifailure -preexisting pkgid ipkg plan = assert (invariant plan') plan' - where - plan' = plan { - -- NB: installation can change the IPID, so better - -- record it in the fake mapping... - planFakeMap = Map.insert pkgid - (installedUnitId ipkg) - (planFakeMap plan), - planIndex = PackageIndex.insert (PreExisting ipkg) - -- ...but be sure to use the *old* IPID for the lookup for - -- the preexisting record - . PackageIndex.deleteUnitId pkgid - $ planIndex plan - } - --- | Replace a ready package with an installed one. The installed one --- must have exactly the same dependencies as the source one was configured --- with. --- -preinstalled :: (HasUnitId ipkg, PackageFixedDeps ipkg, - HasUnitId srcpkg, PackageFixedDeps srcpkg) - => UnitId - -> Maybe ipkg -> iresult - -> GenericInstallPlan ipkg srcpkg iresult ifailure - -> GenericInstallPlan ipkg srcpkg iresult ifailure -preinstalled pkgid mipkg buildResult plan = assert (invariant plan') plan' - where - plan' = plan { planIndex = PackageIndex.insert installed (planIndex plan) } - Just installed = do - Configured pkg <- PackageIndex.lookupUnitId (planIndex plan) pkgid - rpkg <- lookupReadyPackage plan pkg - return (Installed rpkg mipkg buildResult) - --- | Transform an install plan by mapping a function over all the packages in --- the plan. It can consistently change the 'UnitId' of all the packages, --- while preserving the same overall graph structure. --- --- The mapping function has a few constraints on it for correct operation. --- The mapping function /may/ change the 'UnitId' of the package, but it --- /must/ also remap the 'UnitId's of its dependencies using ths supplied --- remapping function. Apart from this consistent remapping it /may not/ --- change the structure of the dependencies. --- -mapPreservingGraph :: (HasUnitId ipkg, - HasUnitId srcpkg, - HasUnitId ipkg', PackageFixedDeps ipkg', - HasUnitId srcpkg', PackageFixedDeps srcpkg') - => ( (UnitId -> UnitId) - -> GenericPlanPackage ipkg srcpkg iresult ifailure - -> GenericPlanPackage ipkg' srcpkg' iresult' ifailure') - -> GenericInstallPlan ipkg srcpkg iresult ifailure - -> GenericInstallPlan ipkg' srcpkg' iresult' ifailure' -mapPreservingGraph f plan = - mkInstallPlan (PackageIndex.fromList pkgs') - Map.empty -- empty fakeMap - (planIndepGoals plan) - where - -- The package mapping function may change the UnitId. So we - -- walk over the packages in dependency order keeping track of these - -- package id changes and use it to supply the correct set of package - -- dependencies as an extra input to the package mapping function. - -- - -- Having fully remapped all the deps this also means we can use an empty - -- FakeMap for the resulting install plan. - - (_, pkgs') = foldl' f' (Map.empty, []) (reverseTopologicalOrder plan) - - f' (ipkgidMap, pkgs) pkg = (ipkgidMap', pkg' : pkgs) - where - pkg' = f (mapDep ipkgidMap) pkg - - ipkgidMap' - | ipkgid /= ipkgid' = Map.insert ipkgid ipkgid' ipkgidMap - | otherwise = ipkgidMap - where - ipkgid = installedUnitId pkg - ipkgid' = installedUnitId pkg' - - mapDep ipkgidMap ipkgid = Map.findWithDefault ipkgid ipkgid ipkgidMap - - --- ------------------------------------------------------------ --- * Checking validity of plans --- ------------------------------------------------------------ - --- | A valid installation plan is a set of packages that is 'acyclic', --- 'closed' and 'consistent'. Also, every 'ConfiguredPackage' in the --- plan has to have a valid configuration (see 'configuredPackageValid'). --- --- * if the result is @False@ use 'problems' to get a detailed list. --- -valid :: (HasUnitId ipkg, PackageFixedDeps ipkg, - HasUnitId srcpkg, PackageFixedDeps srcpkg) - => FakeMap -> Bool - -> PlanIndex ipkg srcpkg iresult ifailure - -> Bool -valid fakeMap indepGoals index = - null $ problems fakeMap indepGoals index - -data PlanProblem ipkg srcpkg iresult ifailure = - PackageMissingDeps (GenericPlanPackage ipkg srcpkg iresult ifailure) - [PackageIdentifier] - | PackageCycle [GenericPlanPackage ipkg srcpkg iresult ifailure] - | PackageInconsistency PackageName [(PackageIdentifier, Version)] - | PackageStateInvalid (GenericPlanPackage ipkg srcpkg iresult ifailure) - (GenericPlanPackage ipkg srcpkg iresult ifailure) - -showPlanProblem :: (Package ipkg, Package srcpkg) - => PlanProblem ipkg srcpkg iresult ifailure -> String -showPlanProblem (PackageMissingDeps pkg missingDeps) = - "Package " ++ display (packageId pkg) - ++ " depends on the following packages which are missing from the plan: " - ++ intercalate ", " (map display missingDeps) - -showPlanProblem (PackageCycle cycleGroup) = - "The following packages are involved in a dependency cycle " - ++ intercalate ", " (map (display.packageId) cycleGroup) - -showPlanProblem (PackageInconsistency name inconsistencies) = - "Package " ++ display name - ++ " is required by several packages," - ++ " but they require inconsistent versions:\n" - ++ unlines [ " package " ++ display pkg ++ " requires " - ++ display (PackageIdentifier name ver) - | (pkg, ver) <- inconsistencies ] - -showPlanProblem (PackageStateInvalid pkg pkg') = - "Package " ++ display (packageId pkg) - ++ " is in the " ++ showPlanState pkg - ++ " state but it depends on package " ++ display (packageId pkg') - ++ " which is in the " ++ showPlanState pkg' - ++ " state" - where - showPlanState (PreExisting _) = "pre-existing" - showPlanState (Configured _) = "configured" - showPlanState (Processing _) = "processing" - showPlanState (Installed _ _ _) = "installed" - showPlanState (Failed _ _) = "failed" - --- | For an invalid plan, produce a detailed list of problems as human readable --- error messages. This is mainly intended for debugging purposes. --- Use 'showPlanProblem' for a human readable explanation. --- -problems :: (HasUnitId ipkg, PackageFixedDeps ipkg, - HasUnitId srcpkg, PackageFixedDeps srcpkg) - => FakeMap -> Bool - -> PlanIndex ipkg srcpkg iresult ifailure - -> [PlanProblem ipkg srcpkg iresult ifailure] -problems fakeMap indepGoals index = - - [ PackageMissingDeps pkg - (catMaybes - (map - (fmap packageId . PlanIndex.fakeLookupUnitId fakeMap index) - missingDeps)) - | (pkg, missingDeps) <- PlanIndex.brokenPackages fakeMap index ] - - ++ [ PackageCycle cycleGroup - | cycleGroup <- PlanIndex.dependencyCycles fakeMap index ] - - ++ [ PackageInconsistency name inconsistencies - | (name, inconsistencies) <- - PlanIndex.dependencyInconsistencies fakeMap indepGoals index ] - - ++ [ PackageStateInvalid pkg pkg' - | pkg <- PackageIndex.allPackages index - , Just pkg' <- map (PlanIndex.fakeLookupUnitId fakeMap index) - (CD.flatDeps (depends pkg)) - , not (stateDependencyRelation pkg pkg') ] - --- | The graph of packages (nodes) and dependencies (edges) must be acyclic. --- --- * if the result is @False@ use 'PackageIndex.dependencyCycles' to find out --- which packages are involved in dependency cycles. --- -acyclic :: (HasUnitId ipkg, PackageFixedDeps ipkg, - HasUnitId srcpkg, PackageFixedDeps srcpkg) - => FakeMap -> PlanIndex ipkg srcpkg iresult ifailure -> Bool -acyclic fakeMap = null . PlanIndex.dependencyCycles fakeMap - --- | An installation plan is closed if for every package in the set, all of --- its dependencies are also in the set. That is, the set is closed under the --- dependency relation. --- --- * if the result is @False@ use 'PackageIndex.brokenPackages' to find out --- which packages depend on packages not in the index. --- -closed :: (HasUnitId ipkg, PackageFixedDeps ipkg, - PackageFixedDeps srcpkg) - => FakeMap -> PlanIndex ipkg srcpkg iresult ifailure -> Bool -closed fakeMap = null . PlanIndex.brokenPackages fakeMap - --- | An installation plan is consistent if all dependencies that target a --- single package name, target the same version. --- --- This is slightly subtle. It is not the same as requiring that there be at --- most one version of any package in the set. It only requires that of --- packages which have more than one other package depending on them. We could --- actually make the condition even more precise and say that different --- versions are OK so long as they are not both in the transitive closure of --- any other package (or equivalently that their inverse closures do not --- intersect). The point is we do not want to have any packages depending --- directly or indirectly on two different versions of the same package. The --- current definition is just a safe approximation of that. --- --- * if the result is @False@ use 'PackageIndex.dependencyInconsistencies' to --- find out which packages are. --- -consistent :: (HasUnitId ipkg, PackageFixedDeps ipkg, - HasUnitId srcpkg, PackageFixedDeps srcpkg) - => FakeMap -> PlanIndex ipkg srcpkg iresult ifailure -> Bool -consistent fakeMap = null . PlanIndex.dependencyInconsistencies fakeMap False - --- | The states of packages have that depend on each other must respect --- this relation. That is for very case where package @a@ depends on --- package @b@ we require that @dependencyStatesOk a b = True@. --- -stateDependencyRelation :: GenericPlanPackage ipkg srcpkg iresult ifailure - -> GenericPlanPackage ipkg srcpkg iresult ifailure - -> Bool -stateDependencyRelation (PreExisting _) (PreExisting _) = True - -stateDependencyRelation (Configured _) (PreExisting _) = True -stateDependencyRelation (Configured _) (Configured _) = True -stateDependencyRelation (Configured _) (Processing _) = True -stateDependencyRelation (Configured _) (Installed _ _ _) = True - -stateDependencyRelation (Processing _) (PreExisting _) = True -stateDependencyRelation (Processing _) (Installed _ _ _) = True - -stateDependencyRelation (Installed _ _ _) (PreExisting _) = True -stateDependencyRelation (Installed _ _ _) (Installed _ _ _) = True - -stateDependencyRelation (Failed _ _) (PreExisting _) = True --- failed can depends on configured because a package can depend on --- several other packages and if one of the deps fail then we fail --- but we still depend on the other ones that did not fail: -stateDependencyRelation (Failed _ _) (Configured _) = True -stateDependencyRelation (Failed _ _) (Processing _) = True -stateDependencyRelation (Failed _ _) (Installed _ _ _) = True -stateDependencyRelation (Failed _ _) (Failed _ _) = True - -stateDependencyRelation _ _ = False - - --- | Compute the dependency closure of a package in a install plan --- -dependencyClosure :: GenericInstallPlan ipkg srcpkg iresult ifailure - -> [UnitId] - -> [GenericPlanPackage ipkg srcpkg iresult ifailure] -dependencyClosure plan = - map (planPkgOf plan) - . concatMap Tree.flatten - . Graph.dfs (planGraph plan) - . map (planVertexOf plan) - - -reverseDependencyClosure :: GenericInstallPlan ipkg srcpkg iresult ifailure - -> [UnitId] - -> [GenericPlanPackage ipkg srcpkg iresult ifailure] -reverseDependencyClosure plan = - map (planPkgOf plan) - . concatMap Tree.flatten - . Graph.dfs (planGraphRev plan) - . map (planVertexOf plan) - - -topologicalOrder :: GenericInstallPlan ipkg srcpkg iresult ifailure - -> [GenericPlanPackage ipkg srcpkg iresult ifailure] -topologicalOrder plan = - map (planPkgOf plan) - . Graph.topSort - $ planGraph plan - - -reverseTopologicalOrder :: GenericInstallPlan ipkg srcpkg iresult ifailure - -> [GenericPlanPackage ipkg srcpkg iresult ifailure] -reverseTopologicalOrder plan = - map (planPkgOf plan) - . Graph.topSort - $ planGraphRev plan diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/InstallSymlink.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/InstallSymlink.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/InstallSymlink.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/InstallSymlink.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,251 +0,0 @@ -{-# LANGUAGE CPP #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.InstallSymlink --- Copyright : (c) Duncan Coutts 2008 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- Managing installing binaries with symlinks. ------------------------------------------------------------------------------ -module Distribution.Client.InstallSymlink ( - symlinkBinaries, - symlinkBinary, - ) where - -#if mingw32_HOST_OS - -import Distribution.Package (PackageIdentifier) -import Distribution.Client.InstallPlan (InstallPlan) -import Distribution.Client.Setup (InstallFlags) -import Distribution.Simple.Setup (ConfigFlags) -import Distribution.Simple.Compiler -import Distribution.System - -symlinkBinaries :: Platform -> Compiler - -> ConfigFlags - -> InstallFlags - -> InstallPlan - -> IO [(PackageIdentifier, String, FilePath)] -symlinkBinaries _ _ _ _ _ = return [] - -symlinkBinary :: FilePath -> FilePath -> String -> String -> IO Bool -symlinkBinary _ _ _ _ = fail "Symlinking feature not available on Windows" - -#else - -import Distribution.Client.Types - ( SourcePackage(..) - , GenericReadyPackage(..), ReadyPackage, enableStanzas - , ConfiguredPackage(..) , fakeUnitId) -import Distribution.Client.Setup - ( InstallFlags(installSymlinkBinDir) ) -import qualified Distribution.Client.InstallPlan as InstallPlan -import Distribution.Client.InstallPlan (InstallPlan) - -import Distribution.Package - ( PackageIdentifier, Package(packageId), UnitId(..) ) -import Distribution.Compiler - ( CompilerId(..) ) -import qualified Distribution.PackageDescription as PackageDescription -import Distribution.PackageDescription - ( PackageDescription ) -import Distribution.PackageDescription.Configuration - ( finalizePackageDescription ) -import Distribution.Simple.Setup - ( ConfigFlags(..), fromFlag, fromFlagOrDefault, flagToMaybe ) -import qualified Distribution.Simple.InstallDirs as InstallDirs -import Distribution.Simple.Compiler - ( Compiler, compilerInfo, CompilerInfo(..) ) -import Distribution.System - ( Platform ) - -import System.Posix.Files - ( getSymbolicLinkStatus, isSymbolicLink, createSymbolicLink - , removeLink ) -import System.Directory - ( canonicalizePath ) -import System.FilePath - ( (), splitPath, joinPath, isAbsolute ) - -import Prelude hiding (ioError) -import System.IO.Error - ( isDoesNotExistError, ioError ) -import Distribution.Compat.Exception ( catchIO ) -import Control.Exception - ( assert ) -import Data.Maybe - ( catMaybes ) - --- | We would like by default to install binaries into some location that is on --- the user's PATH. For per-user installations on Unix systems that basically --- means the @~/bin/@ directory. On the majority of platforms the @~/bin/@ --- directory will be on the user's PATH. However some people are a bit nervous --- about letting a package manager install programs into @~/bin/@. --- --- A compromise solution is that instead of installing binaries directly into --- @~/bin/@, we could install them in a private location under @~/.cabal/bin@ --- and then create symlinks in @~/bin/@. We can be careful when setting up the --- symlinks that we do not overwrite any binary that the user installed. We can --- check if it was a symlink we made because it would point to the private dir --- where we install our binaries. This means we can install normally without --- worrying and in a later phase set up symlinks, and if that fails then we --- report it to the user, but even in this case the package is still in an OK --- installed state. --- --- This is an optional feature that users can choose to use or not. It is --- controlled from the config file. Of course it only works on POSIX systems --- with symlinks so is not available to Windows users. --- -symlinkBinaries :: Platform -> Compiler - -> ConfigFlags - -> InstallFlags - -> InstallPlan - -> IO [(PackageIdentifier, String, FilePath)] -symlinkBinaries platform comp configFlags installFlags plan = - case flagToMaybe (installSymlinkBinDir installFlags) of - Nothing -> return [] - Just symlinkBinDir - | null exes -> return [] - | otherwise -> do - publicBinDir <- canonicalizePath symlinkBinDir --- TODO: do we want to do this here? : --- createDirectoryIfMissing True publicBinDir - fmap catMaybes $ sequence - [ do privateBinDir <- pkgBinDir pkg ipid - ok <- symlinkBinary - publicBinDir privateBinDir - publicExeName privateExeName - if ok - then return Nothing - else return (Just (pkgid, publicExeName, - privateBinDir privateExeName)) - | (ReadyPackage (ConfiguredPackage _ _flags _ _) _, pkg, exe) <- exes - , let pkgid = packageId pkg - -- This is a bit dodgy; probably won't work for Backpack packages - ipid = fakeUnitId pkgid - publicExeName = PackageDescription.exeName exe - privateExeName = prefix ++ publicExeName ++ suffix - prefix = substTemplate pkgid ipid prefixTemplate - suffix = substTemplate pkgid ipid suffixTemplate ] - where - exes = - [ (cpkg, pkg, exe) - | InstallPlan.Installed cpkg _ _ <- InstallPlan.toList plan - , let pkg = pkgDescription cpkg - , exe <- PackageDescription.executables pkg - , PackageDescription.buildable (PackageDescription.buildInfo exe) ] - - pkgDescription :: ReadyPackage -> PackageDescription - pkgDescription (ReadyPackage (ConfiguredPackage - (SourcePackage _ pkg _ _) - flags stanzas _) - _) = - case finalizePackageDescription flags - (const True) - platform cinfo [] (enableStanzas stanzas pkg) of - Left _ -> error "finalizePackageDescription ReadyPackage failed" - Right (desc, _) -> desc - - -- This is sadly rather complicated. We're kind of re-doing part of the - -- configuration for the package. :-( - pkgBinDir :: PackageDescription -> UnitId -> IO FilePath - pkgBinDir pkg ipid = do - defaultDirs <- InstallDirs.defaultInstallDirs - compilerFlavor - (fromFlag (configUserInstall configFlags)) - (PackageDescription.hasLibs pkg) - let templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault - defaultDirs (configInstallDirs configFlags) - absoluteDirs = InstallDirs.absoluteInstallDirs - (packageId pkg) ipid - cinfo InstallDirs.NoCopyDest - platform templateDirs - canonicalizePath (InstallDirs.bindir absoluteDirs) - - substTemplate pkgid ipid = InstallDirs.fromPathTemplate - . InstallDirs.substPathTemplate env - where env = InstallDirs.initialPathTemplateEnv pkgid ipid - cinfo platform - - fromFlagTemplate = fromFlagOrDefault (InstallDirs.toPathTemplate "") - prefixTemplate = fromFlagTemplate (configProgPrefix configFlags) - suffixTemplate = fromFlagTemplate (configProgSuffix configFlags) - cinfo = compilerInfo comp - (CompilerId compilerFlavor _) = compilerInfoId cinfo - -symlinkBinary :: FilePath -- ^ The canonical path of the public bin dir - -- eg @/home/user/bin@ - -> FilePath -- ^ The canonical path of the private bin dir - -- eg @/home/user/.cabal/bin@ - -> String -- ^ The name of the executable to go in the public - -- bin dir, eg @foo@ - -> String -- ^ The name of the executable to in the private bin - -- dir, eg @foo-1.0@ - -> IO Bool -- ^ If creating the symlink was successful. @False@ - -- if there was another file there already that we - -- did not own. Other errors like permission errors - -- just propagate as exceptions. -symlinkBinary publicBindir privateBindir publicName privateName = do - ok <- targetOkToOverwrite (publicBindir publicName) - (privateBindir privateName) - case ok of - NotOurFile -> return False - NotExists -> mkLink >> return True - OkToOverwrite -> rmLink >> mkLink >> return True - where - relativeBindir = makeRelative publicBindir privateBindir - mkLink = createSymbolicLink (relativeBindir privateName) - (publicBindir publicName) - rmLink = removeLink (publicBindir publicName) - --- | Check a file path of a symlink that we would like to create to see if it --- is OK. For it to be OK to overwrite it must either not already exist yet or --- be a symlink to our target (in which case we can assume ownership). --- -targetOkToOverwrite :: FilePath -- ^ The file path of the symlink to the private - -- binary that we would like to create - -> FilePath -- ^ The canonical path of the private binary. - -- Use 'canonicalizePath' to make this. - -> IO SymlinkStatus -targetOkToOverwrite symlink target = handleNotExist $ do - status <- getSymbolicLinkStatus symlink - if not (isSymbolicLink status) - then return NotOurFile - else do target' <- canonicalizePath symlink - -- This relies on canonicalizePath handling symlinks - if target == target' - then return OkToOverwrite - else return NotOurFile - - where - handleNotExist action = catchIO action $ \ioexception -> - -- If the target doesn't exist then there's no problem overwriting it! - if isDoesNotExistError ioexception - then return NotExists - else ioError ioexception - -data SymlinkStatus - = NotExists -- ^ The file doesn't exist so we can make a symlink. - | OkToOverwrite -- ^ A symlink already exists, though it is ours. We'll - -- have to delete it first before we make a new symlink. - | NotOurFile -- ^ A file already exists and it is not one of our existing - -- symlinks (either because it is not a symlink or because - -- it points somewhere other than our managed space). - deriving Show - --- | Take two canonical paths and produce a relative path to get from the first --- to the second, even if it means adding @..@ path components. --- -makeRelative :: FilePath -> FilePath -> FilePath -makeRelative a b = assert (isAbsolute a && isAbsolute b) $ - let as = splitPath a - bs = splitPath b - commonLen = length $ takeWhile id $ zipWith (==) as bs - in joinPath $ [ ".." | _ <- drop commonLen as ] - ++ drop commonLen bs - -#endif diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/JobControl.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/JobControl.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/JobControl.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/JobControl.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,89 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.JobControl --- Copyright : (c) Duncan Coutts 2012 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- A job control concurrency abstraction ------------------------------------------------------------------------------ -module Distribution.Client.JobControl ( - JobControl, - newSerialJobControl, - newParallelJobControl, - spawnJob, - collectJob, - - JobLimit, - newJobLimit, - withJobLimit, - - Lock, - newLock, - criticalSection - ) where - -import Control.Monad -import Control.Concurrent hiding (QSem, newQSem, waitQSem, signalQSem) -import Control.Exception (SomeException, bracket_, mask, throw, try) -import Distribution.Client.Compat.Semaphore - -data JobControl m a = JobControl { - spawnJob :: m a -> m (), - collectJob :: m a - } - - -newSerialJobControl :: IO (JobControl IO a) -newSerialJobControl = do - queue <- newChan - return JobControl { - spawnJob = spawn queue, - collectJob = collect queue - } - where - spawn :: Chan (IO a) -> IO a -> IO () - spawn = writeChan - - collect :: Chan (IO a) -> IO a - collect = join . readChan - -newParallelJobControl :: IO (JobControl IO a) -newParallelJobControl = do - resultVar <- newEmptyMVar - return JobControl { - spawnJob = spawn resultVar, - collectJob = collect resultVar - } - where - spawn :: MVar (Either SomeException a) -> IO a -> IO () - spawn resultVar job = - mask $ \restore -> - forkIO (do res <- try (restore job) - putMVar resultVar res) - >> return () - - collect :: MVar (Either SomeException a) -> IO a - collect resultVar = - takeMVar resultVar >>= either throw return - -data JobLimit = JobLimit QSem - -newJobLimit :: Int -> IO JobLimit -newJobLimit n = - fmap JobLimit (newQSem n) - -withJobLimit :: JobLimit -> IO a -> IO a -withJobLimit (JobLimit sem) = - bracket_ (waitQSem sem) (signalQSem sem) - -newtype Lock = Lock (MVar ()) - -newLock :: IO Lock -newLock = fmap Lock $ newMVar () - -criticalSection :: Lock -> IO a -> IO a -criticalSection (Lock lck) act = bracket_ (takeMVar lck) (putMVar lck ()) act diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/List.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/List.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/List.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/List.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,599 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.List --- Copyright : (c) David Himmelstrup 2005 --- Duncan Coutts 2008-2011 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- --- Search for and print information about packages ------------------------------------------------------------------------------ -module Distribution.Client.List ( - list, info - ) where - -import Distribution.Package - ( PackageName(..), Package(..), packageName, packageVersion - , Dependency(..), simplifyDependency - , UnitId ) -import Distribution.ModuleName (ModuleName) -import Distribution.License (License) -import qualified Distribution.InstalledPackageInfo as Installed -import qualified Distribution.PackageDescription as Source -import Distribution.PackageDescription - ( Flag(..), FlagName(..) ) -import Distribution.PackageDescription.Configuration - ( flattenPackageDescription ) - -import Distribution.Simple.Compiler - ( Compiler, PackageDBStack ) -import Distribution.Simple.Program (ProgramConfiguration) -import Distribution.Simple.Utils - ( equating, comparing, die, notice ) -import Distribution.Simple.Setup (fromFlag) -import Distribution.Simple.PackageIndex (InstalledPackageIndex) -import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex -import qualified Distribution.Client.PackageIndex as PackageIndex -import Distribution.Version - ( Version(..), VersionRange, withinRange, anyVersion - , intersectVersionRanges, simplifyVersionRange ) -import Distribution.Verbosity (Verbosity) -import Distribution.Text - ( Text(disp), display ) - -import Distribution.Client.Types - ( SourcePackage(..), SourcePackageDb(..) ) -import Distribution.Client.Dependency.Types - ( PackageConstraint(..) ) -import Distribution.Client.Targets - ( UserTarget, resolveUserTargets, PackageSpecifier(..) ) -import Distribution.Client.Setup - ( GlobalFlags(..), ListFlags(..), InfoFlags(..) - , RepoContext(..) ) -import Distribution.Client.Utils - ( mergeBy, MergeResult(..) ) -import Distribution.Client.IndexUtils as IndexUtils - ( getSourcePackages, getInstalledPackages ) -import Distribution.Client.FetchUtils - ( isFetched ) - -import Data.List - ( sortBy, groupBy, sort, nub, intersperse, maximumBy, partition ) -import Data.Maybe - ( listToMaybe, fromJust, fromMaybe, isJust ) -import qualified Data.Map as Map -import Data.Tree as Tree -import Control.Monad - ( MonadPlus(mplus), join ) -import Control.Exception - ( assert ) -import Text.PrettyPrint as Disp -import System.Directory - ( doesDirectoryExist ) - - --- | Return a list of packages matching given search strings. -getPkgList :: Verbosity - -> PackageDBStack - -> RepoContext - -> Compiler - -> ProgramConfiguration - -> ListFlags - -> [String] - -> IO [PackageDisplayInfo] -getPkgList verbosity packageDBs repoCtxt comp conf listFlags pats = do - installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf - sourcePkgDb <- getSourcePackages verbosity repoCtxt - let sourcePkgIndex = packageIndex sourcePkgDb - prefs name = fromMaybe anyVersion - (Map.lookup name (packagePreferences sourcePkgDb)) - - pkgsInfo :: - [(PackageName, [Installed.InstalledPackageInfo], [SourcePackage])] - pkgsInfo - -- gather info for all packages - | null pats = mergePackages - (InstalledPackageIndex.allPackages installedPkgIndex) - ( PackageIndex.allPackages sourcePkgIndex) - - -- gather info for packages matching search term - | otherwise = pkgsInfoMatching - - pkgsInfoMatching :: - [(PackageName, [Installed.InstalledPackageInfo], [SourcePackage])] - pkgsInfoMatching = - let matchingInstalled = matchingPackages - InstalledPackageIndex.searchByNameSubstring - installedPkgIndex - matchingSource = matchingPackages - (\ idx n -> - concatMap snd - (PackageIndex.searchByNameSubstring idx n)) - sourcePkgIndex - in mergePackages matchingInstalled matchingSource - - matches :: [PackageDisplayInfo] - matches = [ mergePackageInfo pref - installedPkgs sourcePkgs selectedPkg False - | (pkgname, installedPkgs, sourcePkgs) <- pkgsInfo - , not onlyInstalled || not (null installedPkgs) - , let pref = prefs pkgname - selectedPkg = latestWithPref pref sourcePkgs ] - return matches - where - onlyInstalled = fromFlag (listInstalled listFlags) - matchingPackages search index = - [ pkg - | pat <- pats - , pkg <- search index pat ] - - --- | Show information about packages. -list :: Verbosity - -> PackageDBStack - -> RepoContext - -> Compiler - -> ProgramConfiguration - -> ListFlags - -> [String] - -> IO () -list verbosity packageDBs repos comp conf listFlags pats = do - matches <- getPkgList verbosity packageDBs repos comp conf listFlags pats - - if simpleOutput - then putStr $ unlines - [ display (pkgName pkg) ++ " " ++ display version - | pkg <- matches - , version <- if onlyInstalled - then installedVersions pkg - else nub . sort $ installedVersions pkg - ++ sourceVersions pkg ] - -- Note: this only works because for 'list', one cannot currently - -- specify any version constraints, so listing all installed - -- and source ones works. - else - if null matches - then notice verbosity "No matches found." - else putStr $ unlines (map showPackageSummaryInfo matches) - where - onlyInstalled = fromFlag (listInstalled listFlags) - simpleOutput = fromFlag (listSimpleOutput listFlags) - -info :: Verbosity - -> PackageDBStack - -> RepoContext - -> Compiler - -> ProgramConfiguration - -> GlobalFlags - -> InfoFlags - -> [UserTarget] - -> IO () -info verbosity _ _ _ _ _ _ [] = - notice verbosity "No packages requested. Nothing to do." - -info verbosity packageDBs repoCtxt comp conf - globalFlags _listFlags userTargets = do - - installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf - sourcePkgDb <- getSourcePackages verbosity repoCtxt - let sourcePkgIndex = packageIndex sourcePkgDb - prefs name = fromMaybe anyVersion - (Map.lookup name (packagePreferences sourcePkgDb)) - - -- Users may specify names of packages that are only installed, not - -- just available source packages, so we must resolve targets using - -- the combination of installed and source packages. - let sourcePkgs' = PackageIndex.fromList - $ map packageId - (InstalledPackageIndex.allPackages installedPkgIndex) - ++ map packageId - (PackageIndex.allPackages sourcePkgIndex) - pkgSpecifiers <- resolveUserTargets verbosity repoCtxt - (fromFlag $ globalWorldFile globalFlags) - sourcePkgs' userTargets - - pkgsinfo <- sequence - [ do pkginfo <- either die return $ - gatherPkgInfo prefs - installedPkgIndex sourcePkgIndex - pkgSpecifier - updateFileSystemPackageDetails pkginfo - | pkgSpecifier <- pkgSpecifiers ] - - putStr $ unlines (map showPackageDetailedInfo pkgsinfo) - - where - gatherPkgInfo :: (PackageName -> VersionRange) -> - InstalledPackageIndex -> - PackageIndex.PackageIndex SourcePackage -> - PackageSpecifier SourcePackage -> - Either String PackageDisplayInfo - gatherPkgInfo prefs installedPkgIndex sourcePkgIndex - (NamedPackage name constraints) - | null (selectedInstalledPkgs) && null (selectedSourcePkgs) - = Left $ "There is no available version of " ++ display name - ++ " that satisfies " - ++ display (simplifyVersionRange verConstraint) - - | otherwise - = Right $ mergePackageInfo pref installedPkgs - sourcePkgs selectedSourcePkg' - showPkgVersion - where - (pref, installedPkgs, sourcePkgs) = - sourcePkgsInfo prefs name installedPkgIndex sourcePkgIndex - - selectedInstalledPkgs = InstalledPackageIndex.lookupDependency - installedPkgIndex - (Dependency name verConstraint) - selectedSourcePkgs = PackageIndex.lookupDependency sourcePkgIndex - (Dependency name verConstraint) - selectedSourcePkg' = latestWithPref pref selectedSourcePkgs - - -- display a specific package version if the user - -- supplied a non-trivial version constraint - showPkgVersion = not (null verConstraints) - verConstraint = foldr intersectVersionRanges anyVersion verConstraints - verConstraints = [ vr | PackageConstraintVersion _ vr <- constraints ] - - gatherPkgInfo prefs installedPkgIndex sourcePkgIndex - (SpecificSourcePackage pkg) = - Right $ mergePackageInfo pref installedPkgs sourcePkgs - selectedPkg True - where - name = packageName pkg - selectedPkg = Just pkg - (pref, installedPkgs, sourcePkgs) = - sourcePkgsInfo prefs name installedPkgIndex sourcePkgIndex - -sourcePkgsInfo :: - (PackageName -> VersionRange) - -> PackageName - -> InstalledPackageIndex - -> PackageIndex.PackageIndex SourcePackage - -> (VersionRange, [Installed.InstalledPackageInfo], [SourcePackage]) -sourcePkgsInfo prefs name installedPkgIndex sourcePkgIndex = - (pref, installedPkgs, sourcePkgs) - where - pref = prefs name - installedPkgs = concatMap snd (InstalledPackageIndex.lookupPackageName - installedPkgIndex name) - sourcePkgs = PackageIndex.lookupPackageName sourcePkgIndex name - - --- | The info that we can display for each package. It is information per --- package name and covers all installed and available versions. --- -data PackageDisplayInfo = PackageDisplayInfo { - pkgName :: PackageName, - selectedVersion :: Maybe Version, - selectedSourcePkg :: Maybe SourcePackage, - installedVersions :: [Version], - sourceVersions :: [Version], - preferredVersions :: VersionRange, - homepage :: String, - bugReports :: String, - sourceRepo :: String, - synopsis :: String, - description :: String, - category :: String, - license :: License, - author :: String, - maintainer :: String, - dependencies :: [ExtDependency], - flags :: [Flag], - hasLib :: Bool, - hasExe :: Bool, - executables :: [String], - modules :: [ModuleName], - haddockHtml :: FilePath, - haveTarball :: Bool - } - --- | Covers source dependencies and installed dependencies in --- one type. -data ExtDependency = SourceDependency Dependency - | InstalledDependency UnitId - -showPackageSummaryInfo :: PackageDisplayInfo -> String -showPackageSummaryInfo pkginfo = - renderStyle (style {lineLength = 80, ribbonsPerLine = 1}) $ - char '*' <+> disp (pkgName pkginfo) - $+$ - (nest 4 $ vcat [ - maybeShow (synopsis pkginfo) "Synopsis:" reflowParagraphs - , text "Default available version:" <+> - case selectedSourcePkg pkginfo of - Nothing -> text "[ Not available from any configured repository ]" - Just pkg -> disp (packageVersion pkg) - , text "Installed versions:" <+> - case installedVersions pkginfo of - [] | hasLib pkginfo -> text "[ Not installed ]" - | otherwise -> text "[ Unknown ]" - versions -> dispTopVersions 4 - (preferredVersions pkginfo) versions - , maybeShow (homepage pkginfo) "Homepage:" text - , text "License: " <+> text (display (license pkginfo)) - ]) - $+$ text "" - where - maybeShow [] _ _ = empty - maybeShow l s f = text s <+> (f l) - -showPackageDetailedInfo :: PackageDisplayInfo -> String -showPackageDetailedInfo pkginfo = - renderStyle (style {lineLength = 80, ribbonsPerLine = 1}) $ - char '*' <+> disp (pkgName pkginfo) - <> maybe empty (\v -> char '-' <> disp v) (selectedVersion pkginfo) - <+> text (replicate (16 - length (display (pkgName pkginfo))) ' ') - <> parens pkgkind - $+$ - (nest 4 $ vcat [ - entry "Synopsis" synopsis hideIfNull reflowParagraphs - , entry "Versions available" sourceVersions - (altText null "[ Not available from server ]") - (dispTopVersions 9 (preferredVersions pkginfo)) - , entry "Versions installed" installedVersions - (altText null (if hasLib pkginfo then "[ Not installed ]" - else "[ Unknown ]")) - (dispTopVersions 4 (preferredVersions pkginfo)) - , entry "Homepage" homepage orNotSpecified text - , entry "Bug reports" bugReports orNotSpecified text - , entry "Description" description hideIfNull reflowParagraphs - , entry "Category" category hideIfNull text - , entry "License" license alwaysShow disp - , entry "Author" author hideIfNull reflowLines - , entry "Maintainer" maintainer hideIfNull reflowLines - , entry "Source repo" sourceRepo orNotSpecified text - , entry "Executables" executables hideIfNull (commaSep text) - , entry "Flags" flags hideIfNull (commaSep dispFlag) - , entry "Dependencies" dependencies hideIfNull (commaSep dispExtDep) - , entry "Documentation" haddockHtml showIfInstalled text - , entry "Cached" haveTarball alwaysShow dispYesNo - , if not (hasLib pkginfo) then empty else - text "Modules:" $+$ nest 4 (vcat (map disp . sort . modules $ pkginfo)) - ]) - $+$ text "" - where - entry fname field cond format = case cond (field pkginfo) of - Nothing -> label <+> format (field pkginfo) - Just Nothing -> empty - Just (Just other) -> label <+> text other - where - label = text fname <> char ':' <> padding - padding = text (replicate (13 - length fname ) ' ') - - normal = Nothing - hide = Just Nothing - replace msg = Just (Just msg) - - alwaysShow = const normal - hideIfNull v = if null v then hide else normal - showIfInstalled v - | not isInstalled = hide - | null v = replace "[ Not installed ]" - | otherwise = normal - altText nul msg v = if nul v then replace msg else normal - orNotSpecified = altText null "[ Not specified ]" - - commaSep f = Disp.fsep . Disp.punctuate (Disp.char ',') . map f - dispFlag f = case flagName f of FlagName n -> text n - dispYesNo True = text "Yes" - dispYesNo False = text "No" - - dispExtDep (SourceDependency dep) = disp dep - dispExtDep (InstalledDependency dep) = disp dep - - isInstalled = not (null (installedVersions pkginfo)) - hasExes = length (executables pkginfo) >= 2 - --TODO: exclude non-buildable exes - pkgkind | hasLib pkginfo && hasExes = text "programs and library" - | hasLib pkginfo && hasExe pkginfo = text "program and library" - | hasLib pkginfo = text "library" - | hasExes = text "programs" - | hasExe pkginfo = text "program" - | otherwise = empty - - -reflowParagraphs :: String -> Doc -reflowParagraphs = - vcat - . intersperse (text "") -- re-insert blank lines - . map (fsep . map text . concatMap words) -- reflow paragraphs - . filter (/= [""]) - . groupBy (\x y -> "" `notElem` [x,y]) -- break on blank lines - . lines - -reflowLines :: String -> Doc -reflowLines = vcat . map text . lines - --- | We get the 'PackageDisplayInfo' by combining the info for the installed --- and available versions of a package. --- --- * We're building info about a various versions of a single named package so --- the input package info records are all supposed to refer to the same --- package name. --- -mergePackageInfo :: VersionRange - -> [Installed.InstalledPackageInfo] - -> [SourcePackage] - -> Maybe SourcePackage - -> Bool - -> PackageDisplayInfo -mergePackageInfo versionPref installedPkgs sourcePkgs selectedPkg showVer = - assert (length installedPkgs + length sourcePkgs > 0) $ - PackageDisplayInfo { - pkgName = combine packageName source - packageName installed, - selectedVersion = if showVer then fmap packageVersion selectedPkg - else Nothing, - selectedSourcePkg = sourceSelected, - installedVersions = map packageVersion installedPkgs, - sourceVersions = map packageVersion sourcePkgs, - preferredVersions = versionPref, - - license = combine Source.license source - Installed.license installed, - maintainer = combine Source.maintainer source - Installed.maintainer installed, - author = combine Source.author source - Installed.author installed, - homepage = combine Source.homepage source - Installed.homepage installed, - bugReports = maybe "" Source.bugReports source, - sourceRepo = fromMaybe "" . join - . fmap (uncons Nothing Source.repoLocation - . sortBy (comparing Source.repoKind) - . Source.sourceRepos) - $ source, - --TODO: installed package info is missing synopsis - synopsis = maybe "" Source.synopsis source, - description = combine Source.description source - Installed.description installed, - category = combine Source.category source - Installed.category installed, - flags = maybe [] Source.genPackageFlags sourceGeneric, - hasLib = isJust installed - || fromMaybe False - (fmap (isJust . Source.condLibrary) sourceGeneric), - hasExe = fromMaybe False - (fmap (not . null . Source.condExecutables) sourceGeneric), - executables = map fst (maybe [] Source.condExecutables sourceGeneric), - modules = combine (map Installed.exposedName . Installed.exposedModules) - installed - (maybe [] getListOfExposedModules . Source.library) - source, - dependencies = - combine (map (SourceDependency . simplifyDependency) - . Source.buildDepends) source - (map InstalledDependency . Installed.depends) installed, - haddockHtml = fromMaybe "" . join - . fmap (listToMaybe . Installed.haddockHTMLs) - $ installed, - haveTarball = False - } - where - combine f x g y = fromJust (fmap f x `mplus` fmap g y) - installed :: Maybe Installed.InstalledPackageInfo - installed = latestWithPref versionPref installedPkgs - - getListOfExposedModules lib = Source.exposedModules lib - ++ map Source.moduleReexportName - (Source.reexportedModules lib) - - sourceSelected - | isJust selectedPkg = selectedPkg - | otherwise = latestWithPref versionPref sourcePkgs - sourceGeneric = fmap packageDescription sourceSelected - source = fmap flattenPackageDescription sourceGeneric - - uncons :: b -> (a -> b) -> [a] -> b - uncons z _ [] = z - uncons _ f (x:_) = f x - - --- | Not all the info is pure. We have to check if the docs really are --- installed, because the registered package info lies. Similarly we have to --- check if the tarball has indeed been fetched. --- -updateFileSystemPackageDetails :: PackageDisplayInfo -> IO PackageDisplayInfo -updateFileSystemPackageDetails pkginfo = do - fetched <- maybe (return False) (isFetched . packageSource) - (selectedSourcePkg pkginfo) - docsExist <- doesDirectoryExist (haddockHtml pkginfo) - return pkginfo { - haveTarball = fetched, - haddockHtml = if docsExist then haddockHtml pkginfo else "" - } - -latestWithPref :: Package pkg => VersionRange -> [pkg] -> Maybe pkg -latestWithPref _ [] = Nothing -latestWithPref pref pkgs = Just (maximumBy (comparing prefThenVersion) pkgs) - where - prefThenVersion pkg = let ver = packageVersion pkg - in (withinRange ver pref, ver) - - --- | Rearrange installed and source packages into groups referring to the --- same package by name. In the result pairs, the lists are guaranteed to not --- both be empty. --- -mergePackages :: [Installed.InstalledPackageInfo] - -> [SourcePackage] - -> [( PackageName - , [Installed.InstalledPackageInfo] - , [SourcePackage] )] -mergePackages installedPkgs sourcePkgs = - map collect - $ mergeBy (\i a -> fst i `compare` fst a) - (groupOn packageName installedPkgs) - (groupOn packageName sourcePkgs) - where - collect (OnlyInLeft (name,is) ) = (name, is, []) - collect ( InBoth (_,is) (name,as)) = (name, is, as) - collect (OnlyInRight (name,as)) = (name, [], as) - -groupOn :: Ord key => (a -> key) -> [a] -> [(key,[a])] -groupOn key = map (\xs -> (key (head xs), xs)) - . groupBy (equating key) - . sortBy (comparing key) - -dispTopVersions :: Int -> VersionRange -> [Version] -> Doc -dispTopVersions n pref vs = - (Disp.fsep . Disp.punctuate (Disp.char ',') - . map (\ver -> if ispref ver then disp ver else parens (disp ver)) - . sort . take n . interestingVersions ispref - $ vs) - <+> trailingMessage - - where - ispref ver = withinRange ver pref - extra = length vs - n - trailingMessage - | extra <= 0 = Disp.empty - | otherwise = Disp.parens $ Disp.text "and" - <+> Disp.int (length vs - n) - <+> if extra == 1 then Disp.text "other" - else Disp.text "others" - --- | Reorder a bunch of versions to put the most interesting / significant --- versions first. A preferred version range is taken into account. --- --- This may be used in a user interface to select a small number of versions --- to present to the user, e.g. --- --- > let selectVersions = sort . take 5 . interestingVersions pref --- -interestingVersions :: (Version -> Bool) -> [Version] -> [Version] -interestingVersions pref = - map ((\ns -> Version ns []) . fst) . filter snd - . concat . Tree.levels - . swizzleTree - . reorderTree (\(Node (v,_) _) -> pref (Version v [])) - . reverseTree - . mkTree - . map versionBranch - - where - swizzleTree = unfoldTree (spine []) - where - spine ts' (Node x []) = (x, ts') - spine ts' (Node x (t:ts)) = spine (Node x ts:ts') t - - reorderTree _ (Node x []) = Node x [] - reorderTree p (Node x ts) = Node x (ts' ++ ts'') - where - (ts',ts'') = partition p (map (reorderTree p) ts) - - reverseTree (Node x cs) = Node x (reverse (map reverseTree cs)) - - mkTree xs = unfoldTree step (False, [], xs) - where - step (node,ns,vs) = - ( (reverse ns, node) - , [ (any null vs', n:ns, filter (not . null) vs') - | (n, vs') <- groups vs ] - ) - groups = map (\g -> (head (head g), map tail g)) - . groupBy (equating head) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Manpage.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Manpage.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Manpage.hs 2016-11-07 10:02:41.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Manpage.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,171 +0,0 @@ -{-# LANGUAGE CPP #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Manpage --- Copyright : (c) Maciek Makowski 2015 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- Functions for building the manual page. - -module Distribution.Client.Manpage - ( -- * Manual page generation - manpage - ) where - -import Distribution.Simple.Command -import Distribution.Client.Setup (globalCommand) - -import Data.Char (toUpper) -import Data.List (intercalate) - -data FileInfo = FileInfo String String -- ^ path, description - --- | A list of files that should be documented in the manual page. -files :: [FileInfo] -files = - [ (FileInfo "~/.cabal/config" "The defaults that can be overridden with command-line options.") - , (FileInfo "~/.cabal/world" "A list of all packages whose installation has been explicitly requested.") - ] - --- | Produces a manual page with @troff@ markup. -manpage :: String -> [CommandSpec a] -> String -manpage pname commands = unlines $ - [ ".TH " ++ map toUpper pname ++ " 1" - , ".SH NAME" - , pname ++ " \\- a system for building and packaging Haskell libraries and programs" - , ".SH SYNOPSIS" - , ".B " ++ pname - , ".I command" - , ".RI < arguments |[ options ]>..." - , "" - , "Where the" - , ".I commands" - , "are" - , "" - ] ++ - concatMap (commandSynopsisLines pname) commands ++ - [ ".SH DESCRIPTION" - , "Cabal is the standard package system for Haskell software. It helps people to configure, " - , "build and install Haskell software and to distribute it easily to other users and developers." - , "" - , "The command line " ++ pname ++ " tool (also referred to as cabal-install) helps with " - , "installing existing packages and developing new packages. " - , "It can be used to work with local packages or to install packages from online package archives, " - , "including automatically installing dependencies. By default it is configured to use Hackage, " - , "which is Haskell’s central package archive that contains thousands of libraries and applications " - , "in the Cabal package format." - , ".SH OPTIONS" - , "Global options:" - , "" - ] ++ - optionsLines (globalCommand []) ++ - [ ".SH COMMANDS" - ] ++ - concatMap (commandDetailsLines pname) commands ++ - [ ".SH FILES" - ] ++ - concatMap fileLines files ++ - [ ".SH BUGS" - , "To browse the list of known issues or report a new one please see " - , "https://github.com/haskell/cabal/labels/cabal-install." - ] - -commandSynopsisLines :: String -> CommandSpec action -> [String] -commandSynopsisLines pname (CommandSpec ui _ NormalCommand) = - [ ".B " ++ pname ++ " " ++ (commandName ui) - , ".R - " ++ commandSynopsis ui - , ".br" - ] -commandSynopsisLines _ (CommandSpec _ _ HiddenCommand) = [] - -commandDetailsLines :: String -> CommandSpec action -> [String] -commandDetailsLines pname (CommandSpec ui _ NormalCommand) = - [ ".B " ++ pname ++ " " ++ (commandName ui) - , "" - , commandUsage ui pname - , "" - ] ++ - optional commandDescription ++ - optional commandNotes ++ - [ "Flags:" - , ".RS" - ] ++ - optionsLines ui ++ - [ ".RE" - , "" - ] - where - optional field = - case field ui of - Just text -> [text pname, ""] - Nothing -> [] -commandDetailsLines _ (CommandSpec _ _ HiddenCommand) = [] - -optionsLines :: CommandUI flags -> [String] -optionsLines command = concatMap optionLines (concatMap optionDescr (commandOptions command ParseArgs)) - -data ArgumentRequired = Optional | Required -type OptionArg = (ArgumentRequired, ArgPlaceHolder) - -optionLines :: OptDescr flags -> [String] -optionLines (ReqArg description (optionChars, optionStrings) placeHolder _ _) = - argOptionLines description optionChars optionStrings (Required, placeHolder) -optionLines (OptArg description (optionChars, optionStrings) placeHolder _ _ _) = - argOptionLines description optionChars optionStrings (Optional, placeHolder) -optionLines (BoolOpt description (trueChars, trueStrings) (falseChars, falseStrings) _ _) = - optionLinesIfPresent trueChars trueStrings ++ - optionLinesIfPresent falseChars falseStrings ++ - optionDescriptionLines description -optionLines (ChoiceOpt options) = - concatMap choiceLines options - where - choiceLines (description, (optionChars, optionStrings), _, _) = - [ optionsLine optionChars optionStrings ] ++ - optionDescriptionLines description - -argOptionLines :: String -> [Char] -> [String] -> OptionArg -> [String] -argOptionLines description optionChars optionStrings arg = - [ optionsLine optionChars optionStrings - , optionArgLine arg - ] ++ - optionDescriptionLines description - -optionLinesIfPresent :: [Char] -> [String] -> [String] -optionLinesIfPresent optionChars optionStrings = - if null optionChars && null optionStrings then [] - else [ optionsLine optionChars optionStrings, ".br" ] - -optionDescriptionLines :: String -> [String] -optionDescriptionLines description = - [ ".RS" - , description - , ".RE" - , "" - ] - -optionsLine :: [Char] -> [String] -> String -optionsLine optionChars optionStrings = - intercalate ", " (shortOptions optionChars ++ longOptions optionStrings) - -shortOptions :: [Char] -> [String] -shortOptions = map (\c -> "\\-" ++ [c]) - -longOptions :: [String] -> [String] -longOptions = map (\s -> "\\-\\-" ++ s) - -optionArgLine :: OptionArg -> String -optionArgLine (Required, placeHolder) = ".I " ++ placeHolder -optionArgLine (Optional, placeHolder) = ".RI [ " ++ placeHolder ++ " ]" - -fileLines :: FileInfo -> [String] -fileLines (FileInfo path description) = - [ path - , ".RS" - , description - , ".RE" - , "" - ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/PackageHash.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/PackageHash.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/PackageHash.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/PackageHash.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,305 +0,0 @@ -{-# LANGUAGE RecordWildCards, NamedFieldPuns, GeneralizedNewtypeDeriving #-} - --- | Functions to calculate nix-style hashes for package ids. --- --- The basic idea is simple, hash the combination of: --- --- * the package tarball --- * the ids of all the direct dependencies --- * other local configuration (flags, profiling, etc) --- -module Distribution.Client.PackageHash ( - -- * Calculating package hashes - PackageHashInputs(..), - PackageHashConfigInputs(..), - PackageSourceHash, - hashedInstalledPackageId, - hashPackageHashInputs, - renderPackageHashInputs, - -- ** Platform-specific variations - hashedInstalledPackageIdLong, - hashedInstalledPackageIdShort, - - -- * Low level hash choice - HashValue, - hashValue, - showHashValue, - readFileHashValue, - hashFromTUF, - ) where - -import Distribution.Package - ( PackageId, PackageIdentifier(..), mkUnitId ) -import Distribution.System - ( Platform, OS(Windows), buildOS ) -import Distribution.PackageDescription - ( FlagName(..), FlagAssignment ) -import Distribution.Simple.Compiler - ( CompilerId, OptimisationLevel(..), DebugInfoLevel(..) - , ProfDetailLevel(..), showProfDetailLevel ) -import Distribution.Simple.InstallDirs - ( PathTemplate, fromPathTemplate ) -import Distribution.Text - ( display ) -import Distribution.Client.Types - ( InstalledPackageId ) - -import qualified Hackage.Security.Client as Sec - -import qualified Crypto.Hash.SHA256 as SHA256 -import qualified Data.ByteString.Base16 as Base16 -import qualified Data.ByteString.Char8 as BS -import qualified Data.ByteString.Lazy.Char8 as LBS -import qualified Data.Set as Set -import Data.Set (Set) - -import Data.Maybe (catMaybes) -import Data.List (sortBy, intercalate) -import Data.Function (on) -import Distribution.Compat.Binary (Binary(..)) -import Control.Exception (evaluate) -import System.IO (withBinaryFile, IOMode(..)) - - -------------------------------- --- Calculating package hashes --- - --- | Calculate a 'InstalledPackageId' for a package using our nix-style --- inputs hashing method. --- --- Note that due to path length limitations on Windows, this function uses --- a different method on Windows that produces shorted package ids. --- See 'hashedInstalledPackageIdLong' vs 'hashedInstalledPackageIdShort'. --- -hashedInstalledPackageId :: PackageHashInputs -> InstalledPackageId -hashedInstalledPackageId - | buildOS == Windows = hashedInstalledPackageIdShort - | otherwise = hashedInstalledPackageIdLong - --- | Calculate a 'InstalledPackageId' for a package using our nix-style --- inputs hashing method. --- --- This produces large ids with big hashes. It is only suitable for systems --- without significant path length limitations (ie not Windows). --- -hashedInstalledPackageIdLong :: PackageHashInputs -> InstalledPackageId -hashedInstalledPackageIdLong pkghashinputs@PackageHashInputs{pkgHashPkgId} = - mkUnitId $ - display pkgHashPkgId -- to be a bit user friendly - ++ "-" - ++ showHashValue (hashPackageHashInputs pkghashinputs) - --- | On Windows we have serious problems with path lengths. Windows imposes a --- maximum path length of 260 chars, and even if we can use the windows long --- path APIs ourselves, we cannot guarantee that ghc, gcc, ld, ar, etc etc all --- do so too. --- --- So our only choice is to limit the lengths of the paths, and the only real --- way to do that is to limit the size of the 'InstalledPackageId's that we --- generate. We do this by truncating the package names and versions and also --- by truncating the hash sizes. --- --- Truncating the package names and versions is technically ok because they are --- just included for human convenience, the full source package id is included --- in the hash. --- --- Truncating the hash size is disappointing but also technically ok. We --- rely on the hash primarily for collision avoidance not for any securty --- properties (at least for now). --- -hashedInstalledPackageIdShort :: PackageHashInputs -> InstalledPackageId -hashedInstalledPackageIdShort pkghashinputs@PackageHashInputs{pkgHashPkgId} = - mkUnitId $ - intercalate "-" - -- max length now 64 - [ truncateStr 14 (display name) - , truncateStr 8 (display version) - , showHashValue (truncateHash (hashPackageHashInputs pkghashinputs)) - ] - where - PackageIdentifier name version = pkgHashPkgId - - -- Truncate a 32 byte SHA256 hash to 160bits, 20 bytes :-( - -- It'll render as 40 hex chars. - truncateHash (HashValue h) = HashValue (BS.take 20 h) - - -- Truncate a string, with a visual indication that it is truncated. - truncateStr n s | length s <= n = s - | otherwise = take (n-1) s ++ "_" - --- | All the information that contribues to a package's hash, and thus its --- 'InstalledPackageId'. --- -data PackageHashInputs = PackageHashInputs { - pkgHashPkgId :: PackageId, - pkgHashSourceHash :: PackageSourceHash, - pkgHashDirectDeps :: Set InstalledPackageId, - pkgHashOtherConfig :: PackageHashConfigInputs - } - -type PackageSourceHash = HashValue - --- | Those parts of the package configuration that contribute to the --- package hash. --- -data PackageHashConfigInputs = PackageHashConfigInputs { - pkgHashCompilerId :: CompilerId, - pkgHashPlatform :: Platform, - pkgHashFlagAssignment :: FlagAssignment, -- complete not partial - pkgHashConfigureScriptArgs :: [String], -- just ./configure for build-type Configure - pkgHashVanillaLib :: Bool, - pkgHashSharedLib :: Bool, - pkgHashDynExe :: Bool, - pkgHashGHCiLib :: Bool, - pkgHashProfLib :: Bool, - pkgHashProfExe :: Bool, - pkgHashProfLibDetail :: ProfDetailLevel, - pkgHashProfExeDetail :: ProfDetailLevel, - pkgHashCoverage :: Bool, - pkgHashOptimization :: OptimisationLevel, - pkgHashSplitObjs :: Bool, - pkgHashStripLibs :: Bool, - pkgHashStripExes :: Bool, - pkgHashDebugInfo :: DebugInfoLevel, - pkgHashExtraLibDirs :: [FilePath], - pkgHashExtraFrameworkDirs :: [FilePath], - pkgHashExtraIncludeDirs :: [FilePath], - pkgHashProgPrefix :: Maybe PathTemplate, - pkgHashProgSuffix :: Maybe PathTemplate - --- TODO: [required eventually] extra program options --- TODO: [required eventually] pkgHashToolsVersions ? --- TODO: [required eventually] pkgHashToolsExtraOptions ? --- TODO: [research required] and what about docs? - } - deriving Show - - --- | Calculate the overall hash to be used for an 'InstalledPackageId'. --- -hashPackageHashInputs :: PackageHashInputs -> HashValue -hashPackageHashInputs = hashValue . renderPackageHashInputs - --- | Render a textual representation of the 'PackageHashInputs'. --- --- The 'hashValue' of this text is the overall package hash. --- -renderPackageHashInputs :: PackageHashInputs -> LBS.ByteString -renderPackageHashInputs PackageHashInputs{ - pkgHashPkgId, - pkgHashSourceHash, - pkgHashDirectDeps, - pkgHashOtherConfig = - PackageHashConfigInputs{..} - } = - -- The purpose of this somewhat laboured rendering (e.g. why not just - -- use show?) is so that existing package hashes do not change - -- unnecessarily when new configuration inputs are added into the hash. - - -- In particular, the assumption is that when a new configuration input - -- is included into the hash, that existing packages will typically get - -- the default value for that feature. So if we avoid adding entries with - -- the default value then most of the time adding new features will not - -- change the hashes of existing packages and so fewer packages will need - -- to be rebuilt. - - --TODO: [nice to have] ultimately we probably want to put this config info - -- into the ghc-pkg db. At that point this should probably be changed to - -- use the config file infrastructure so it can be read back in again. - LBS.pack $ unlines $ catMaybes - [ entry "pkgid" display pkgHashPkgId - , entry "src" showHashValue pkgHashSourceHash - , entry "deps" (intercalate ", " . map display - . Set.toList) pkgHashDirectDeps - -- and then all the config - , entry "compilerid" display pkgHashCompilerId - , entry "platform" display pkgHashPlatform - , opt "flags" [] showFlagAssignment pkgHashFlagAssignment - , opt "configure-script" [] unwords pkgHashConfigureScriptArgs - , opt "vanilla-lib" True display pkgHashVanillaLib - , opt "shared-lib" False display pkgHashSharedLib - , opt "dynamic-exe" False display pkgHashDynExe - , opt "ghci-lib" False display pkgHashGHCiLib - , opt "prof-lib" False display pkgHashProfLib - , opt "prof-exe" False display pkgHashProfExe - , opt "prof-lib-detail" ProfDetailDefault showProfDetailLevel pkgHashProfLibDetail - , opt "prof-exe-detail" ProfDetailDefault showProfDetailLevel pkgHashProfExeDetail - , opt "hpc" False display pkgHashCoverage - , opt "optimisation" NormalOptimisation (show . fromEnum) pkgHashOptimization - , opt "split-objs" False display pkgHashSplitObjs - , opt "stripped-lib" False display pkgHashStripLibs - , opt "stripped-exe" True display pkgHashStripExes - , opt "debug-info" NormalDebugInfo (show . fromEnum) pkgHashDebugInfo - , opt "extra-lib-dirs" [] unwords pkgHashExtraLibDirs - , opt "extra-framework-dirs" [] unwords pkgHashExtraFrameworkDirs - , opt "extra-include-dirs" [] unwords pkgHashExtraIncludeDirs - , opt "prog-prefix" Nothing (maybe "" fromPathTemplate) pkgHashProgPrefix - , opt "prog-suffix" Nothing (maybe "" fromPathTemplate) pkgHashProgSuffix - ] - where - entry key format value = Just (key ++ ": " ++ format value) - opt key def format value - | value == def = Nothing - | otherwise = entry key format value - - showFlagAssignment = unwords . map showEntry . sortBy (compare `on` fst) - where - showEntry (FlagName name, False) = '-' : name - showEntry (FlagName name, True) = '+' : name - ------------------------------------------------ --- The specific choice of hash implementation --- - --- Is a crypto hash necessary here? One thing to consider is who controls the --- inputs and what's the result of a hash collision. Obviously we should not --- install packages we don't trust because they can run all sorts of code, but --- if I've checked there's no TH, no custom Setup etc, is there still a --- problem? If someone provided us a tarball that hashed to the same value as --- some other package and we installed it, we could end up re-using that --- installed package in place of another one we wanted. So yes, in general --- there is some value in preventing intentional hash collisions in installed --- package ids. - -newtype HashValue = HashValue BS.ByteString - deriving (Eq, Show) - -instance Binary HashValue where - put (HashValue digest) = put digest - get = do - digest <- get - -- Cannot do any sensible validation here. Although we use SHA256 - -- for stuff we hash ourselves, we can also get hashes from TUF - -- and that can in principle use different hash functions in future. - return (HashValue digest) - --- | Hash some data. Currently uses SHA256. --- -hashValue :: LBS.ByteString -> HashValue -hashValue = HashValue . SHA256.hashlazy - -showHashValue :: HashValue -> String -showHashValue (HashValue digest) = BS.unpack (Base16.encode digest) - --- | Hash the content of a file. Uses SHA256. --- -readFileHashValue :: FilePath -> IO HashValue -readFileHashValue tarball = - withBinaryFile tarball ReadMode $ \hnd -> - evaluate . hashValue =<< LBS.hGetContents hnd - --- | Convert a hash from TUF metadata into a 'PackageSourceHash'. --- --- Note that TUF hashes don't neessarily have to be SHA256, since it can --- support new algorithms in future. --- -hashFromTUF :: Sec.Hash -> HashValue -hashFromTUF (Sec.Hash hashstr) = - --TODO: [code cleanup] either we should get TUF to use raw bytestrings or - -- perhaps we should also just use a base16 string as the internal rep. - case Base16.decode (BS.pack hashstr) of - (hash, trailing) | not (BS.null hash) && BS.null trailing - -> HashValue hash - _ -> error "hashFromTUF: cannot decode base16 hash" - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/PackageIndex.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/PackageIndex.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/PackageIndex.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/PackageIndex.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,318 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE DeriveFunctor #-} -{-# LANGUAGE DeriveGeneric #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.PackageIndex --- Copyright : (c) David Himmelstrup 2005, --- Bjorn Bringert 2007, --- Duncan Coutts 2008 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- An index of packages. --- -module Distribution.Client.PackageIndex ( - -- * Package index data type - PackageIndex, - - -- * Creating an index - fromList, - - -- * Updates - merge, - insert, - deletePackageName, - deletePackageId, - deleteDependency, - - -- * Queries - - -- ** Precise lookups - elemByPackageId, - elemByPackageName, - lookupPackageName, - lookupPackageId, - lookupDependency, - - -- ** Case-insensitive searches - searchByName, - SearchResult(..), - searchByNameSubstring, - - -- ** Bulk queries - allPackages, - allPackagesByName, - ) where - -import Prelude hiding (lookup) -import Control.Exception (assert) -import qualified Data.Map as Map -import Data.Map (Map) -import Data.List (groupBy, sortBy, isInfixOf) -#if !MIN_VERSION_base(4,8,0) -import Data.Monoid (Monoid(..)) -#endif -import Data.Maybe (isJust, fromMaybe) -import GHC.Generics (Generic) -import Distribution.Compat.Binary (Binary) -import Distribution.Compat.Semigroup (Semigroup((<>))) - -import Distribution.Package - ( PackageName(..), PackageIdentifier(..) - , Package(..), packageName, packageVersion - , Dependency(Dependency) ) -import Distribution.Version - ( withinRange ) -import Distribution.Simple.Utils - ( lowercase, comparing ) - - --- | The collection of information about packages from one or more 'PackageDB's. --- --- It can be searched efficiently by package name and version. --- -newtype PackageIndex pkg = PackageIndex - -- This index package names to all the package records matching that package - -- name case-sensitively. It includes all versions. - -- - -- This allows us to find all versions satisfying a dependency. - -- Most queries are a map lookup followed by a linear scan of the bucket. - -- - (Map PackageName [pkg]) - - deriving (Eq, Show, Read, Functor, Generic) ---FIXME: the Functor instance here relies on no package id changes - -instance Package pkg => Semigroup (PackageIndex pkg) where - (<>) = merge - -instance Package pkg => Monoid (PackageIndex pkg) where - mempty = PackageIndex Map.empty - mappend = (<>) - --save one mappend with empty in the common case: - mconcat [] = mempty - mconcat xs = foldr1 mappend xs - -instance Binary pkg => Binary (PackageIndex pkg) - -invariant :: Package pkg => PackageIndex pkg -> Bool -invariant (PackageIndex m) = all (uncurry goodBucket) (Map.toList m) - where - goodBucket _ [] = False - goodBucket name (pkg0:pkgs0) = check (packageId pkg0) pkgs0 - where - check pkgid [] = packageName pkgid == name - check pkgid (pkg':pkgs) = packageName pkgid == name - && pkgid < pkgid' - && check pkgid' pkgs - where pkgid' = packageId pkg' - --- --- * Internal helpers --- - -mkPackageIndex :: Package pkg => Map PackageName [pkg] -> PackageIndex pkg -mkPackageIndex index = assert (invariant (PackageIndex index)) - (PackageIndex index) - -internalError :: String -> a -internalError name = error ("PackageIndex." ++ name ++ ": internal error") - --- | Lookup a name in the index to get all packages that match that name --- case-sensitively. --- -lookup :: PackageIndex pkg -> PackageName -> [pkg] -lookup (PackageIndex m) name = fromMaybe [] $ Map.lookup name m - --- --- * Construction --- - --- | Build an index out of a bunch of packages. --- --- If there are duplicates, later ones mask earlier ones. --- -fromList :: Package pkg => [pkg] -> PackageIndex pkg -fromList pkgs = mkPackageIndex - . Map.map fixBucket - . Map.fromListWith (++) - $ [ (packageName pkg, [pkg]) - | pkg <- pkgs ] - where - fixBucket = -- out of groups of duplicates, later ones mask earlier ones - -- but Map.fromListWith (++) constructs groups in reverse order - map head - -- Eq instance for PackageIdentifier is wrong, so use Ord: - . groupBy (\a b -> EQ == comparing packageId a b) - -- relies on sortBy being a stable sort so we - -- can pick consistently among duplicates - . sortBy (comparing packageId) - --- --- * Updates --- - --- | Merge two indexes. --- --- Packages from the second mask packages of the same exact name --- (case-sensitively) from the first. --- -merge :: Package pkg => PackageIndex pkg -> PackageIndex pkg -> PackageIndex pkg -merge i1@(PackageIndex m1) i2@(PackageIndex m2) = - assert (invariant i1 && invariant i2) $ - mkPackageIndex (Map.unionWith mergeBuckets m1 m2) - --- | Elements in the second list mask those in the first. -mergeBuckets :: Package pkg => [pkg] -> [pkg] -> [pkg] -mergeBuckets [] ys = ys -mergeBuckets xs [] = xs -mergeBuckets xs@(x:xs') ys@(y:ys') = - case packageId x `compare` packageId y of - GT -> y : mergeBuckets xs ys' - EQ -> y : mergeBuckets xs' ys' - LT -> x : mergeBuckets xs' ys - --- | Inserts a single package into the index. --- --- This is equivalent to (but slightly quicker than) using 'mappend' or --- 'merge' with a singleton index. --- -insert :: Package pkg => pkg -> PackageIndex pkg -> PackageIndex pkg -insert pkg (PackageIndex index) = mkPackageIndex $ - Map.insertWith (\_ -> insertNoDup) (packageName pkg) [pkg] index - where - pkgid = packageId pkg - insertNoDup [] = [pkg] - insertNoDup pkgs@(pkg':pkgs') = case compare pkgid (packageId pkg') of - LT -> pkg : pkgs - EQ -> pkg : pkgs' - GT -> pkg' : insertNoDup pkgs' - --- | Internal delete helper. --- -delete :: Package pkg => PackageName -> (pkg -> Bool) -> PackageIndex pkg -> PackageIndex pkg -delete name p (PackageIndex index) = mkPackageIndex $ - Map.update filterBucket name index - where - filterBucket = deleteEmptyBucket - . filter (not . p) - deleteEmptyBucket [] = Nothing - deleteEmptyBucket remaining = Just remaining - --- | Removes a single package from the index. --- -deletePackageId :: Package pkg => PackageIdentifier -> PackageIndex pkg -> PackageIndex pkg -deletePackageId pkgid = - delete (packageName pkgid) (\pkg -> packageId pkg == pkgid) - --- | Removes all packages with this (case-sensitive) name from the index. --- -deletePackageName :: Package pkg => PackageName -> PackageIndex pkg -> PackageIndex pkg -deletePackageName name = - delete name (\pkg -> packageName pkg == name) - --- | Removes all packages satisfying this dependency from the index. --- -deleteDependency :: Package pkg => Dependency -> PackageIndex pkg -> PackageIndex pkg -deleteDependency (Dependency name verstionRange) = - delete name (\pkg -> packageVersion pkg `withinRange` verstionRange) - --- --- * Bulk queries --- - --- | Get all the packages from the index. --- -allPackages :: PackageIndex pkg -> [pkg] -allPackages (PackageIndex m) = concat (Map.elems m) - --- | Get all the packages from the index. --- --- They are grouped by package name, case-sensitively. --- -allPackagesByName :: PackageIndex pkg -> [[pkg]] -allPackagesByName (PackageIndex m) = Map.elems m - --- --- * Lookups --- - -elemByPackageId :: Package pkg => PackageIndex pkg -> PackageIdentifier -> Bool -elemByPackageId index = isJust . lookupPackageId index - -elemByPackageName :: Package pkg => PackageIndex pkg -> PackageName -> Bool -elemByPackageName index = not . null . lookupPackageName index - - --- | Does a lookup by package id (name & version). --- --- Since multiple package DBs mask each other case-sensitively by package name, --- then we get back at most one package. --- -lookupPackageId :: Package pkg => PackageIndex pkg -> PackageIdentifier -> Maybe pkg -lookupPackageId index pkgid = - case [ pkg | pkg <- lookup index (packageName pkgid) - , packageId pkg == pkgid ] of - [] -> Nothing - [pkg] -> Just pkg - _ -> internalError "lookupPackageIdentifier" - --- | Does a case-sensitive search by package name. --- -lookupPackageName :: Package pkg => PackageIndex pkg -> PackageName -> [pkg] -lookupPackageName index name = - [ pkg | pkg <- lookup index name - , packageName pkg == name ] - --- | Does a case-sensitive search by package name and a range of versions. --- --- We get back any number of versions of the specified package name, all --- satisfying the version range constraint. --- -lookupDependency :: Package pkg => PackageIndex pkg -> Dependency -> [pkg] -lookupDependency index (Dependency name versionRange) = - [ pkg | pkg <- lookup index name - , packageName pkg == name - , packageVersion pkg `withinRange` versionRange ] - --- --- * Case insensitive name lookups --- - --- | Does a case-insensitive search by package name. --- --- If there is only one package that compares case-insensitively to this name --- then the search is unambiguous and we get back all versions of that package. --- If several match case-insensitively but one matches exactly then it is also --- unambiguous. --- --- If however several match case-insensitively and none match exactly then we --- have an ambiguous result, and we get back all the versions of all the --- packages. The list of ambiguous results is split by exact package name. So --- it is a non-empty list of non-empty lists. --- -searchByName :: PackageIndex pkg - -> String -> [(PackageName, [pkg])] -searchByName (PackageIndex m) name = - [ pkgs - | pkgs@(PackageName name',_) <- Map.toList m - , lowercase name' == lname ] - where - lname = lowercase name - -data SearchResult a = None | Unambiguous a | Ambiguous [a] - --- | Does a case-insensitive substring search by package name. --- --- That is, all packages that contain the given string in their name. --- -searchByNameSubstring :: PackageIndex pkg - -> String -> [(PackageName, [pkg])] -searchByNameSubstring (PackageIndex m) searchterm = - [ pkgs - | pkgs@(PackageName name, _) <- Map.toList m - , lsearchterm `isInfixOf` lowercase name ] - where - lsearchterm = lowercase searchterm diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/PackageUtils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/PackageUtils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/PackageUtils.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/PackageUtils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,34 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.PackageUtils --- Copyright : (c) Duncan Coutts 2010 --- License : BSD-like --- --- Maintainer : cabal-devel@gmail.com --- Stability : provisional --- Portability : portable --- --- Various package description utils that should be in the Cabal lib ------------------------------------------------------------------------------ -module Distribution.Client.PackageUtils ( - externalBuildDepends, - ) where - -import Distribution.Package - ( packageVersion, packageName, Dependency(..) ) -import Distribution.PackageDescription - ( PackageDescription(..) ) -import Distribution.Version - ( withinRange ) - --- | The list of dependencies that refer to external packages --- rather than internal package components. --- -externalBuildDepends :: PackageDescription -> [Dependency] -externalBuildDepends pkg = filter (not . internal) (buildDepends pkg) - where - -- True if this dependency is an internal one (depends on a library - -- defined in the same package). - internal (Dependency depName versionRange) = - depName == packageName pkg && - packageVersion pkg `withinRange` versionRange diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/ParseUtils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/ParseUtils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/ParseUtils.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/ParseUtils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,279 +0,0 @@ -{-# LANGUAGE ExistentialQuantification, NamedFieldPuns #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.ParseUtils --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Parsing utilities. ------------------------------------------------------------------------------ - -module Distribution.Client.ParseUtils ( - - -- * Fields and field utilities - FieldDescr(..), - liftField, - liftFields, - filterFields, - mapFieldNames, - commandOptionToField, - commandOptionsToFields, - - -- * Sections and utilities - SectionDescr(..), - liftSection, - - -- * Parsing and printing flat config - parseFields, - ppFields, - ppSection, - - -- * Parsing and printing config with sections and subsections - parseFieldsAndSections, - ppFieldsAndSections, - - -- ** Top level of config files - parseConfig, - showConfig, - ) - where - -import Distribution.ParseUtils - ( FieldDescr(..), ParseResult(..), warning, LineNo, lineNo - , Field(..), liftField, readFieldsFlat ) -import Distribution.Simple.Command - ( OptionField, viewAsFieldDescr ) - -import Control.Monad ( foldM ) -import Text.PrettyPrint ( (<>), (<+>), ($+$) ) -import qualified Data.Map as Map -import qualified Text.PrettyPrint as Disp - ( Doc, text, colon, vcat, empty, isEmpty, nest ) - - -------------------------- --- FieldDescr utilities --- - -liftFields :: (b -> a) - -> (a -> b -> b) - -> [FieldDescr a] - -> [FieldDescr b] -liftFields get set = map (liftField get set) - - --- | Given a collection of field descriptions, keep only a given list of them, --- identified by name. --- -filterFields :: [String] -> [FieldDescr a] -> [FieldDescr a] -filterFields includeFields = filter ((`elem` includeFields) . fieldName) - --- | Apply a name mangling function to the field names of all the field --- descriptions. The typical use case is to apply some prefix. --- -mapFieldNames :: (String -> String) -> [FieldDescr a] -> [FieldDescr a] -mapFieldNames mangleName = - map (\descr -> descr { fieldName = mangleName (fieldName descr) }) - - --- | Reuse a command line 'OptionField' as a config file 'FieldDescr'. --- -commandOptionToField :: OptionField a -> FieldDescr a -commandOptionToField = viewAsFieldDescr - --- | Reuse a bunch of command line 'OptionField's as config file 'FieldDescr's. --- -commandOptionsToFields :: [OptionField a] -> [FieldDescr a] -commandOptionsToFields = map viewAsFieldDescr - - ------------------------------------------- --- SectionDescr definition and utilities --- - --- | The description of a section in a config file. It can contain both --- fields and optionally further subsections. See also 'FieldDescr'. --- -data SectionDescr a = forall b. SectionDescr { - sectionName :: String, - sectionFields :: [FieldDescr b], - sectionSubsections :: [SectionDescr b], - sectionGet :: a -> [(String, b)], - sectionSet :: LineNo -> String -> b -> a -> ParseResult a, - sectionEmpty :: b - } - --- | To help construction of config file descriptions in a modular way it is --- useful to define fields and sections on local types and then hoist them --- into the parent types when combining them in bigger descriptions. --- --- This is essentially a lens operation for 'SectionDescr' to help embedding --- one inside another. --- -liftSection :: (b -> a) - -> (a -> b -> b) - -> SectionDescr a - -> SectionDescr b -liftSection get' set' (SectionDescr name fields sections get set empty) = - let sectionGet' = get . get' - sectionSet' lineno param x y = do - x' <- set lineno param x (get' y) - return (set' x' y) - in SectionDescr name fields sections sectionGet' sectionSet' empty - - -------------------------------------- --- Parsing and printing flat config --- - --- | Parse a bunch of semi-parsed 'Field's according to a set of field --- descriptions. It accumulates the result on top of a given initial value. --- --- This only covers the case of flat configuration without subsections. See --- also 'parseFieldsAndSections'. --- -parseFields :: [FieldDescr a] -> a -> [Field] -> ParseResult a -parseFields fieldDescrs = - foldM setField - where - fieldMap = Map.fromList [ (fieldName f, f) | f <- fieldDescrs ] - - setField accum (F line name value) = - case Map.lookup name fieldMap of - Just (FieldDescr _ _ set) -> set line value accum - Nothing -> do - warning $ "Unrecognized field " ++ name ++ " on line " ++ show line - return accum - - setField accum f = do - warning $ "Unrecognized stanza on line " ++ show (lineNo f) - return accum - --- | This is a customised version of the functions from Distribution.ParseUtils --- that also optionally print default values for empty fields as comments. --- -ppFields :: [FieldDescr a] -> (Maybe a) -> a -> Disp.Doc -ppFields fields def cur = - Disp.vcat [ ppField name (fmap getter def) (getter cur) - | FieldDescr name getter _ <- fields] - -ppField :: String -> (Maybe Disp.Doc) -> Disp.Doc -> Disp.Doc -ppField name mdef cur - | Disp.isEmpty cur = maybe Disp.empty - (\def -> Disp.text "--" <+> Disp.text name - <> Disp.colon <+> def) mdef - | otherwise = Disp.text name <> Disp.colon <+> cur - --- | Pretty print a section. --- --- Since 'ppFields' does not cover subsections you can use this to add them. --- Or alternatively use a 'SectionDescr' and use 'ppFieldsAndSections'. --- -ppSection :: String -> String -> [FieldDescr a] -> (Maybe a) -> a -> Disp.Doc -ppSection name arg fields def cur - | Disp.isEmpty fieldsDoc = Disp.empty - | otherwise = Disp.text name <+> argDoc - $+$ (Disp.nest 2 fieldsDoc) - where - fieldsDoc = ppFields fields def cur - argDoc | arg == "" = Disp.empty - | otherwise = Disp.text arg - - ------------------------------------------ --- Parsing and printing non-flat config --- - --- | Much like 'parseFields' but it also allows subsections. The permitted --- subsections are given by a list of 'SectionDescr's. --- -parseFieldsAndSections :: [FieldDescr a] -> [SectionDescr a] -> a - -> [Field] -> ParseResult a -parseFieldsAndSections fieldDescrs sectionDescrs = - foldM setField - where - fieldMap = Map.fromList [ (fieldName f, f) | f <- fieldDescrs ] - sectionMap = Map.fromList [ (sectionName s, s) | s <- sectionDescrs ] - - setField a (F line name value) = - case Map.lookup name fieldMap of - Just (FieldDescr _ _ set) -> set line value a - Nothing -> do - warning $ "Unrecognized field '" ++ name - ++ "' on line " ++ show line - return a - - setField a (Section line name param fields) = - case Map.lookup name sectionMap of - Just (SectionDescr _ fieldDescrs' sectionDescrs' _ set sectionEmpty) -> do - b <- parseFieldsAndSections fieldDescrs' sectionDescrs' sectionEmpty fields - set line param b a - Nothing -> do - warning $ "Unrecognized section '" ++ name - ++ "' on line " ++ show line - return a - - setField accum (block@IfBlock {}) = do - warning $ "Unrecognized stanza on line " ++ show (lineNo block) - return accum - --- | Much like 'ppFields' but also pretty prints any subsections. Subsection --- are only shown if they are non-empty. --- --- Note that unlike 'ppFields', at present it does not support printing --- default values. If needed, adding such support would be quite reasonable. --- -ppFieldsAndSections :: [FieldDescr a] -> [SectionDescr a] -> a -> Disp.Doc -ppFieldsAndSections fieldDescrs sectionDescrs val = - ppFields fieldDescrs Nothing val - $+$ - Disp.vcat - [ Disp.text "" $+$ sectionDoc - | SectionDescr { - sectionName, sectionGet, - sectionFields, sectionSubsections - } <- sectionDescrs - , (param, x) <- sectionGet val - , let sectionDoc = ppSectionAndSubsections - sectionName param - sectionFields sectionSubsections x - , not (Disp.isEmpty sectionDoc) - ] - --- | Unlike 'ppSection' which has to be called directly, this gets used via --- 'ppFieldsAndSections' and so does not need to be exported. --- -ppSectionAndSubsections :: String -> String - -> [FieldDescr a] -> [SectionDescr a] -> a -> Disp.Doc -ppSectionAndSubsections name arg fields sections cur - | Disp.isEmpty fieldsDoc = Disp.empty - | otherwise = Disp.text name <+> argDoc - $+$ (Disp.nest 2 fieldsDoc) - where - fieldsDoc = showConfig fields sections cur - argDoc | arg == "" = Disp.empty - | otherwise = Disp.text arg - - ------------------------------------------------ --- Top level config file parsing and printing --- - --- | Parse a string in the config file syntax into a value, based on a --- description of the configuration file in terms of its fields and sections. --- --- It accumulates the result on top of a given initial (typically empty) value. --- -parseConfig :: [FieldDescr a] -> [SectionDescr a] -> a - -> String -> ParseResult a -parseConfig fieldDescrs sectionDescrs empty str = - parseFieldsAndSections fieldDescrs sectionDescrs empty - =<< readFieldsFlat str - --- | Render a value in the config file syntax, based on a description of the --- configuration file in terms of its fields and sections. --- -showConfig :: [FieldDescr a] -> [SectionDescr a] -> a -> Disp.Doc -showConfig = ppFieldsAndSections - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/PkgConfigDb.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/PkgConfigDb.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/PkgConfigDb.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/PkgConfigDb.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,146 +0,0 @@ -{-# LANGUAGE CPP #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.PkgConfigDb --- Copyright : (c) Iñaki García Etxebarria 2016 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Read the list of packages available to pkg-config. ------------------------------------------------------------------------------ -module Distribution.Client.PkgConfigDb - ( PkgConfigDb - , readPkgConfigDb - , pkgConfigDbFromList - , pkgConfigPkgIsPresent - , getPkgConfigDbDirs - ) where - -#if !MIN_VERSION_base(4,8,0) -import Control.Applicative ((<$>), (<*>)) -#endif - -import Control.Exception (IOException, handle) -import Data.Char (isSpace) -import qualified Data.Map as M -import Data.Version (parseVersion) -import Text.ParserCombinators.ReadP (readP_to_S) -import System.FilePath (splitSearchPath) - -import Distribution.Package - ( PackageName(..) ) -import Distribution.Verbosity - ( Verbosity ) -import Distribution.Version - ( Version, VersionRange, withinRange ) - -import Distribution.Compat.Environment - ( lookupEnv ) -import Distribution.Simple.Program - ( ProgramConfiguration, pkgConfigProgram, getProgramOutput, - requireProgram ) -import Distribution.Simple.Utils - ( info ) - --- | The list of packages installed in the system visible to --- @pkg-config@. This is an opaque datatype, to be constructed with --- `readPkgConfigDb` and queried with `pkgConfigPkgPresent`. -data PkgConfigDb = PkgConfigDb (M.Map PackageName (Maybe Version)) - -- ^ If an entry is `Nothing`, this means that the - -- package seems to be present, but we don't know the - -- exact version (because parsing of the version - -- number failed). - | NoPkgConfigDb - -- ^ For when we could not run pkg-config successfully. - deriving (Show) - --- | Query pkg-config for the list of installed packages, together --- with their versions. Return a `PkgConfigDb` encapsulating this --- information. -readPkgConfigDb :: Verbosity -> ProgramConfiguration -> IO PkgConfigDb -readPkgConfigDb verbosity conf = handle ioErrorHandler $ do - (pkgConfig, _) <- requireProgram verbosity pkgConfigProgram conf - pkgList <- lines <$> getProgramOutput verbosity pkgConfig ["--list-all"] - -- The output of @pkg-config --list-all@ also includes a description - -- for each package, which we do not need. - let pkgNames = map (takeWhile (not . isSpace)) pkgList - pkgVersions <- lines <$> getProgramOutput verbosity pkgConfig - ("--modversion" : pkgNames) - (return . pkgConfigDbFromList . zip pkgNames) pkgVersions - where - -- For when pkg-config invocation fails (possibly because of a - -- too long command line). - ioErrorHandler :: IOException -> IO PkgConfigDb - ioErrorHandler e = do - info verbosity ("Failed to query pkg-config, Cabal will continue" - ++ " without solving for pkg-config constraints: " - ++ show e) - return NoPkgConfigDb - --- | Create a `PkgConfigDb` from a list of @(packageName, version)@ pairs. -pkgConfigDbFromList :: [(String, String)] -> PkgConfigDb -pkgConfigDbFromList pairs = (PkgConfigDb . M.fromList . map convert) pairs - where - convert :: (String, String) -> (PackageName, Maybe Version) - convert (n,vs) = (PackageName n, - case (reverse . readP_to_S parseVersion) vs of - (v, "") : _ -> Just v - _ -> Nothing -- Version not (fully) - -- understood. - ) - --- | Check whether a given package range is satisfiable in the given --- @pkg-config@ database. -pkgConfigPkgIsPresent :: PkgConfigDb -> PackageName -> VersionRange -> Bool -pkgConfigPkgIsPresent (PkgConfigDb db) pn vr = - case M.lookup pn db of - Nothing -> False -- Package not present in the DB. - Just Nothing -> True -- Package present, but version unknown. - Just (Just v) -> withinRange v vr --- If we could not read the pkg-config database successfully we allow --- the check to succeed. The plan found by the solver may fail to be --- executed later on, but we have no grounds for rejecting the plan at --- this stage. -pkgConfigPkgIsPresent NoPkgConfigDb _ _ = True - - --- | Query pkg-config for the locations of pkg-config's package files. Use this --- to monitor for changes in the pkg-config DB. --- -getPkgConfigDbDirs :: Verbosity -> ProgramConfiguration -> IO [FilePath] -getPkgConfigDbDirs verbosity conf = - (++) <$> getEnvPath <*> getDefPath - where - -- According to @man pkg-config@: - -- - -- PKG_CONFIG_PATH - -- A colon-separated (on Windows, semicolon-separated) list of directories - -- to search for .pc files. The default directory will always be searched - -- after searching the path - -- - getEnvPath = maybe [] parseSearchPath - <$> lookupEnv "PKG_CONFIG_PATH" - - -- Again according to @man pkg-config@: - -- - -- pkg-config can be used to query itself for the default search path, - -- version number and other information, for instance using: - -- - -- > pkg-config --variable pc_path pkg-config - -- - getDefPath = handle ioErrorHandler $ do - (pkgConfig, _) <- requireProgram verbosity pkgConfigProgram conf - parseSearchPath <$> - getProgramOutput verbosity pkgConfig - ["--variable", "pc_path", "pkg-config"] - - parseSearchPath str = - case lines str of - [p] | not (null p) -> splitSearchPath p - _ -> [] - - ioErrorHandler :: IOException -> IO [FilePath] - ioErrorHandler _e = return [] - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/PlanIndex.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/PlanIndex.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/PlanIndex.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/PlanIndex.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,289 +0,0 @@ --- | These graph traversal functions mirror the ones in Cabal, but work with --- the more complete (and fine-grained) set of dependencies provided by --- PackageFixedDeps rather than only the library dependencies provided by --- PackageInstalled. -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE CPP #-} -module Distribution.Client.PlanIndex ( - -- * FakeMap and related operations - FakeMap - , fakeDepends - , fakeLookupUnitId - -- * Graph traversal functions - , brokenPackages - , dependencyCycles - , dependencyGraph - , dependencyInconsistencies - ) where - -import Prelude hiding (lookup) -import qualified Data.Map as Map -import qualified Data.Graph as Graph -import Data.Array ((!)) -import Data.Map (Map) -import Data.Maybe (isNothing) -import Data.Either (rights) - -#if !MIN_VERSION_base(4,8,0) -import Data.Monoid (Monoid(..)) -#endif - -import Distribution.Package - ( PackageName(..), PackageIdentifier(..), UnitId(..) - , Package(..), packageName, packageVersion - ) -import Distribution.Version - ( Version ) - -import Distribution.Client.ComponentDeps (ComponentDeps) -import qualified Distribution.Client.ComponentDeps as CD -import Distribution.Client.Types - ( PackageFixedDeps(..) ) -import Distribution.Simple.PackageIndex - ( PackageIndex, allPackages, insert, lookupUnitId ) -import Distribution.Package - ( HasUnitId(..), PackageId ) - --- Note [FakeMap] ------------------ --- We'd like to use the PackageIndex defined in this module for cabal-install's --- InstallPlan. However, at the moment, this data structure is indexed by --- UnitId, which we don't know until after we've compiled a package --- (whereas InstallPlan needs to store not-compiled packages in the index.) --- Eventually, an UnitId will be calculatable prior to actually building --- the package, but at the moment, the "fake installed package ID map" is a --- workaround to solve this problem while reusing PackageIndex. The basic idea --- is that, since we don't know what an UnitId is beforehand, we just fake --- up one based on the package ID (it only needs to be unique for the particular --- install plan), and fill it out with the actual generated UnitId after --- the package is successfully compiled. --- --- However, there is a problem: in the index there may be references using the --- old package ID, which are now dangling if we update the UnitId. We --- could map over the entire index to update these pointers as well (a costly --- operation), but instead, we've chosen to parametrize a variety of important --- functions by a FakeMap, which records what a fake installed package ID was --- actually resolved to post-compilation. If we do a lookup, we first check and --- see if it's a fake ID in the FakeMap. --- --- It's a bit grungy, but we expect this to only be temporary anyway. (Another --- possible workaround would have been to *not* update the installed package ID, --- but I decided this would be hard to understand.) - --- | Map from fake package keys to real ones. See Note [FakeMap] -type FakeMap = Map UnitId UnitId - --- | Variant of `depends` which accepts a `FakeMap` --- --- Analogous to `fakeInstalledDepends`. See Note [FakeMap]. -fakeDepends :: PackageFixedDeps pkg => FakeMap -> pkg -> ComponentDeps [UnitId] -fakeDepends fakeMap = fmap (map resolveFakeId) . depends - where - resolveFakeId :: UnitId -> UnitId - resolveFakeId ipid = Map.findWithDefault ipid ipid fakeMap - ---- | Variant of 'lookupUnitId' which accepts a 'FakeMap'. See Note ---- [FakeMap]. -fakeLookupUnitId :: FakeMap -> PackageIndex a -> UnitId - -> Maybe a -fakeLookupUnitId fakeMap index pkg = - lookupUnitId index (Map.findWithDefault pkg pkg fakeMap) - --- | All packages that have dependencies that are not in the index. --- --- Returns such packages along with the dependencies that they're missing. --- -brokenPackages :: (PackageFixedDeps pkg) - => FakeMap - -> PackageIndex pkg - -> [(pkg, [UnitId])] -brokenPackages fakeMap index = - [ (pkg, missing) - | pkg <- allPackages index - , let missing = - [ pkg' | pkg' <- CD.flatDeps (depends pkg) - , isNothing (fakeLookupUnitId fakeMap index pkg') ] - , not (null missing) ] - --- | Compute all roots of the install plan, and verify that the transitive --- plans from those roots are all consistent. --- --- NOTE: This does not check for dependency cycles. Moreover, dependency cycles --- may be absent from the subplans even if the larger plan contains a dependency --- cycle. Such cycles may or may not be an issue; either way, we don't check --- for them here. -dependencyInconsistencies :: forall pkg. (PackageFixedDeps pkg, HasUnitId pkg) - => FakeMap - -> Bool - -> PackageIndex pkg - -> [(PackageName, [(PackageIdentifier, Version)])] -dependencyInconsistencies fakeMap indepGoals index = - concatMap (dependencyInconsistencies' fakeMap) subplans - where - subplans :: [PackageIndex pkg] - subplans = rights $ - map (dependencyClosure fakeMap index) - (rootSets fakeMap indepGoals index) - --- | Compute the root sets of a plan --- --- A root set is a set of packages whose dependency closure must be consistent. --- This is the set of all top-level library roots (taken together normally, or --- as singletons sets if we are considering them as independent goals), along --- with all setup dependencies of all packages. -rootSets :: (PackageFixedDeps pkg, HasUnitId pkg) - => FakeMap -> Bool -> PackageIndex pkg -> [[UnitId]] -rootSets fakeMap indepGoals index = - if indepGoals then map (:[]) libRoots else [libRoots] - ++ setupRoots index - where - libRoots = libraryRoots fakeMap index - --- | Compute the library roots of a plan --- --- The library roots are the set of packages with no reverse dependencies --- (no reverse library dependencies but also no reverse setup dependencies). -libraryRoots :: (PackageFixedDeps pkg, HasUnitId pkg) - => FakeMap -> PackageIndex pkg -> [UnitId] -libraryRoots fakeMap index = - map toPkgId roots - where - (graph, toPkgId, _) = dependencyGraph fakeMap index - indegree = Graph.indegree graph - roots = filter isRoot (Graph.vertices graph) - isRoot v = indegree ! v == 0 - --- | The setup dependencies of each package in the plan -setupRoots :: PackageFixedDeps pkg => PackageIndex pkg -> [[UnitId]] -setupRoots = filter (not . null) - . map (CD.setupDeps . depends) - . allPackages - --- | Given a package index where we assume we want to use all the packages --- (use 'dependencyClosure' if you need to get such a index subset) find out --- if the dependencies within it use consistent versions of each package. --- Return all cases where multiple packages depend on different versions of --- some other package. --- --- Each element in the result is a package name along with the packages that --- depend on it and the versions they require. These are guaranteed to be --- distinct. --- -dependencyInconsistencies' :: forall pkg. - (PackageFixedDeps pkg, HasUnitId pkg) - => FakeMap - -> PackageIndex pkg - -> [(PackageName, [(PackageIdentifier, Version)])] -dependencyInconsistencies' fakeMap index = - [ (name, [ (pid,packageVersion dep) | (dep,pids) <- uses, pid <- pids]) - | (name, ipid_map) <- Map.toList inverseIndex - , let uses = Map.elems ipid_map - , reallyIsInconsistent (map fst uses) - ] - where - -- For each package name (of a dependency, somewhere) - -- and each installed ID of that that package - -- the associated package instance - -- and a list of reverse dependencies (as source IDs) - inverseIndex :: Map PackageName (Map UnitId (pkg, [PackageId])) - inverseIndex = Map.fromListWith (Map.unionWith (\(a,b) (_,b') -> (a,b++b'))) - [ (packageName dep, Map.fromList [(ipid,(dep,[packageId pkg]))]) - | -- For each package @pkg@ - pkg <- allPackages index - -- Find out which @ipid@ @pkg@ depends on - , ipid <- CD.nonSetupDeps (fakeDepends fakeMap pkg) - -- And look up those @ipid@ (i.e., @ipid@ is the ID of @dep@) - , Just dep <- [fakeLookupUnitId fakeMap index ipid] - ] - - -- If, in a single install plan, we depend on more than one version of a - -- package, then this is ONLY okay in the (rather special) case that we - -- depend on precisely two versions of that package, and one of them - -- depends on the other. This is necessary for example for the base where - -- we have base-3 depending on base-4. - reallyIsInconsistent :: [pkg] -> Bool - reallyIsInconsistent [] = False - reallyIsInconsistent [_p] = False - reallyIsInconsistent [p1, p2] = - let pid1 = installedUnitId p1 - pid2 = installedUnitId p2 - in Map.findWithDefault pid1 pid1 fakeMap `notElem` CD.nonSetupDeps (fakeDepends fakeMap p2) - && Map.findWithDefault pid2 pid2 fakeMap `notElem` CD.nonSetupDeps (fakeDepends fakeMap p1) - reallyIsInconsistent _ = True - - - --- | Find if there are any cycles in the dependency graph. If there are no --- cycles the result is @[]@. --- --- This actually computes the strongly connected components. So it gives us a --- list of groups of packages where within each group they all depend on each --- other, directly or indirectly. --- -dependencyCycles :: (PackageFixedDeps pkg, HasUnitId pkg) - => FakeMap - -> PackageIndex pkg - -> [[pkg]] -dependencyCycles fakeMap index = - [ vs | Graph.CyclicSCC vs <- Graph.stronglyConnComp adjacencyList ] - where - adjacencyList = [ (pkg, installedUnitId pkg, - CD.flatDeps (fakeDepends fakeMap pkg)) - | pkg <- allPackages index ] - - --- | Tries to take the transitive closure of the package dependencies. --- --- If the transitive closure is complete then it returns that subset of the --- index. Otherwise it returns the broken packages as in 'brokenPackages'. --- --- * Note that if the result is @Right []@ it is because at least one of --- the original given 'PackageIdentifier's do not occur in the index. -dependencyClosure :: (PackageFixedDeps pkg, HasUnitId pkg) - => FakeMap - -> PackageIndex pkg - -> [UnitId] - -> Either [(pkg, [UnitId])] - (PackageIndex pkg) -dependencyClosure fakeMap index pkgids0 = case closure mempty [] pkgids0 of - (completed, []) -> Right completed - (completed, _) -> Left (brokenPackages fakeMap completed) - where - closure completed failed [] = (completed, failed) - closure completed failed (pkgid:pkgids) = - case fakeLookupUnitId fakeMap index pkgid of - Nothing -> closure completed (pkgid:failed) pkgids - Just pkg -> - case fakeLookupUnitId fakeMap completed - (installedUnitId pkg) of - Just _ -> closure completed failed pkgids - Nothing -> closure completed' failed pkgids' - where completed' = insert pkg completed - pkgids' = CD.nonSetupDeps (depends pkg) ++ pkgids - - --- | Builds a graph of the package dependencies. --- --- Dependencies on other packages that are not in the index are discarded. --- You can check if there are any such dependencies with 'brokenPackages'. --- -dependencyGraph :: (PackageFixedDeps pkg, HasUnitId pkg) - => FakeMap - -> PackageIndex pkg - -> (Graph.Graph, - Graph.Vertex -> UnitId, - UnitId -> Maybe Graph.Vertex) -dependencyGraph fakeMap index = (graph, vertexToPkg, idToVertex) - where - (graph, vertexToPkg', idToVertex) = Graph.graphFromEdges edges - vertexToPkg v = case vertexToPkg' v of - ((), pkgid, _targets) -> pkgid - - pkgs = allPackages index - edges = map edgesFrom pkgs - - resolve pid = Map.findWithDefault pid pid fakeMap - edgesFrom pkg = ( () - , resolve (installedUnitId pkg) - , CD.flatDeps (fakeDepends fakeMap pkg) - ) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/ProjectBuilding.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/ProjectBuilding.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/ProjectBuilding.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/ProjectBuilding.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1292 +0,0 @@ -{-# LANGUAGE CPP, BangPatterns, RecordWildCards, NamedFieldPuns, - DeriveGeneric, DeriveDataTypeable, GeneralizedNewtypeDeriving, - ScopedTypeVariables #-} - --- | --- -module Distribution.Client.ProjectBuilding ( - BuildStatus(..), - BuildStatusMap, - BuildStatusRebuild(..), - BuildReason(..), - MonitorChangedReason(..), - rebuildTargetsDryRun, - rebuildTargets - ) where - -import Distribution.Client.PackageHash (renderPackageHashInputs) -import Distribution.Client.RebuildMonad -import Distribution.Client.ProjectConfig -import Distribution.Client.ProjectPlanning - -import Distribution.Client.Types - ( PackageLocation(..), GenericReadyPackage(..) - , PackageFixedDeps(..) - , InstalledPackageId, installedPackageId ) -import Distribution.Client.InstallPlan - ( GenericInstallPlan, GenericPlanPackage ) -import qualified Distribution.Client.InstallPlan as InstallPlan -import qualified Distribution.Client.ComponentDeps as CD -import Distribution.Client.ComponentDeps (ComponentDeps) -import Distribution.Client.DistDirLayout -import Distribution.Client.FileMonitor -import Distribution.Client.SetupWrapper -import Distribution.Client.JobControl -import Distribution.Client.FetchUtils -import Distribution.Client.GlobalFlags (RepoContext) -import qualified Distribution.Client.Tar as Tar -import Distribution.Client.Setup (filterConfigureFlags) -import Distribution.Client.SrcDist (allPackageSourceFiles) -import Distribution.Client.Utils (removeExistingFile) - -import Distribution.Package hiding (InstalledPackageId, installedPackageId) -import Distribution.InstalledPackageInfo (InstalledPackageInfo) -import qualified Distribution.InstalledPackageInfo as Installed -import Distribution.Simple.Program -import qualified Distribution.Simple.Setup as Cabal -import Distribution.Simple.Command (CommandUI) -import qualified Distribution.Simple.Register as Cabal -import qualified Distribution.Simple.InstallDirs as InstallDirs -import Distribution.Simple.LocalBuildInfo (ComponentName) - -import Distribution.Simple.Utils hiding (matchFileGlob) -import Distribution.Version -import Distribution.Verbosity -import Distribution.Text -import Distribution.ParseUtils ( showPWarning ) - -import Data.Map (Map) -import qualified Data.Map as Map -import Data.Set (Set) -import qualified Data.Set as Set -import qualified Data.ByteString.Lazy as LBS - -#if !MIN_VERSION_base(4,8,0) -import Control.Applicative -#endif -import Control.Monad -import Control.Exception -import Control.Concurrent.Async -import Control.Concurrent.MVar -import Data.List -import Data.Maybe - -import System.FilePath -import System.IO -import System.Directory -import System.Exit (ExitCode) - - ------------------------------------------------------------------------------- --- * Overall building strategy. ------------------------------------------------------------------------------- --- --- We start with an 'ElaboratedInstallPlan' that has already been improved by --- reusing packages from the store. So the remaining packages in the --- 'InstallPlan.Configured' state are ones we either need to build or rebuild. --- --- First, we do a preliminary dry run phase where we work out which packages --- we really need to (re)build, and for the ones we do need to build which --- build phase to start at. - - ------------------------------------------------------------------------------- --- * Dry run: what bits of the 'ElaboratedInstallPlan' will we execute? ------------------------------------------------------------------------------- - --- We split things like this for a couple reasons. Firstly we need to be able --- to do dry runs, and these need to be reasonably accurate in terms of --- letting users know what (and why) things are going to be (re)built. --- --- Given that we need to be able to do dry runs, it would not be great if --- we had to repeat all the same work when we do it for real. Not only is --- it duplicate work, but it's duplicate code which is likely to get out of --- sync. So we do things only once. We preserve info we discover in the dry --- run phase and rely on it later when we build things for real. This also --- somewhat simplifies the build phase. So this way the dry run can't so --- easily drift out of sync with the real thing since we're relying on the --- info it produces. --- --- An additional advantage is that it makes it easier to debug rebuild --- errors (ie rebuilding too much or too little), since all the rebuild --- decisions are made without making any state changes at the same time --- (that would make it harder to reproduce the problem sitation). - - --- | The 'BuildStatus' of every package in the 'ElaboratedInstallPlan' --- -type BuildStatusMap = Map InstalledPackageId BuildStatus - --- | The build status for an individual package. That is, the state that the --- package is in prior to initiating a (re)build. --- --- It serves two purposes: --- --- * For dry-run output, it lets us explain to the user if and why a package --- is going to be (re)built. --- --- * It tell us what step to start or resume building from, and carries --- enough information for us to be able to do so. --- -data BuildStatus = - - -- | The package is in the 'InstallPlan.PreExisting' state, so does not - -- need building. - BuildStatusPreExisting - - -- | The package has not been downloaded yet, so it will have to be - -- downloaded, unpacked and built. - | BuildStatusDownload - - -- | The package has not been unpacked yet, so it will have to be - -- unpacked and built. - | BuildStatusUnpack FilePath - - -- | The package exists in a local dir already, and just needs building - -- or rebuilding. So this can only happen for 'BuildInplaceOnly' style - -- packages. - | BuildStatusRebuild FilePath BuildStatusRebuild - - -- | The package exists in a local dir already, and is fully up to date. - -- So this package can be put into the 'InstallPlan.Installed' state - -- and it does not need to be built. - | BuildStatusUpToDate (Maybe InstalledPackageInfo) BuildSuccess - --- | For a package that is going to be built or rebuilt, the state it's in now. --- --- So again, this tells us why a package needs to be rebuilt and what build --- phases need to be run. The 'MonitorChangedReason' gives us details like --- which file changed, which is mainly for high verbosity debug output. --- -data BuildStatusRebuild = - - -- | The package configuration changed, so the configure and build phases - -- needs to be (re)run. - BuildStatusConfigure (MonitorChangedReason ()) - - -- | The configuration has not changed but the build phase needs to be - -- rerun. We record the reason the (re)build is needed. - -- - -- The optional registration info here tells us if we've registered the - -- package already, or if we stil need to do that after building. - -- - | BuildStatusBuild (Maybe (Maybe InstalledPackageInfo)) BuildReason - -data BuildReason = - -- | The depencencies of this package have been (re)built so the build - -- phase needs to be rerun. - -- - -- The optional registration info here tells us if we've registered the - -- package already, or if we stil need to do that after building. - -- - BuildReasonDepsRebuilt - - -- | Changes in files within the package (or first run or corrupt cache) - | BuildReasonFilesChanged (MonitorChangedReason ()) - - -- | An important special case is that no files have changed but the - -- set of components the /user asked to build/ has changed. We track the - -- set of components /we have built/, which of course only grows (until - -- some other change resets it). - -- - -- The @Set 'ComponentName'@ is the set of components we have built - -- previously. When we update the monitor we take the union of the ones - -- we have built previously with the ones the user has asked for this - -- time and save those. See 'updatePackageBuildFileMonitor'. - -- - | BuildReasonExtraTargets (Set ComponentName) - - -- | Although we're not going to build any additional targets as a whole, - -- we're going to build some part of a component or run a repl or any - -- other action that does not result in additional persistent artifacts. - -- - | BuildReasonEphemeralTargets - --- | Which 'BuildStatus' values indicate we'll have to do some build work of --- some sort. In particular we use this as part of checking if any of a --- package's deps have changed. --- -buildStatusRequiresBuild :: BuildStatus -> Bool -buildStatusRequiresBuild BuildStatusPreExisting = False -buildStatusRequiresBuild BuildStatusUpToDate {} = False -buildStatusRequiresBuild _ = True - --- | Do the dry run pass. This is a prerequisite of 'rebuildTargets'. --- --- It gives us the 'BuildStatusMap' and also gives us an improved version of --- the 'ElaboratedInstallPlan' with packages switched to the --- 'InstallPlan.Installed' state when we find that they're already up to date. --- -rebuildTargetsDryRun :: DistDirLayout - -> ElaboratedInstallPlan - -> IO (ElaboratedInstallPlan, BuildStatusMap) -rebuildTargetsDryRun distDirLayout@DistDirLayout{..} = \installPlan -> do - - -- Do the various checks to work out the 'BuildStatus' of each package - pkgsBuildStatus <- foldMInstallPlanDepOrder installPlan dryRunPkg - - -- For 'BuildStatusUpToDate' packages, improve the plan by marking them as - -- 'InstallPlan.Installed'. - let installPlan' = improveInstallPlanWithUpToDatePackages - installPlan pkgsBuildStatus - - return (installPlan', pkgsBuildStatus) - where - dryRunPkg :: ElaboratedPlanPackage - -> ComponentDeps [BuildStatus] - -> IO BuildStatus - dryRunPkg (InstallPlan.PreExisting _pkg) _depsBuildStatus = - return BuildStatusPreExisting - - dryRunPkg (InstallPlan.Configured pkg) depsBuildStatus = do - mloc <- checkFetched (pkgSourceLocation pkg) - case mloc of - Nothing -> return BuildStatusDownload - - Just (LocalUnpackedPackage srcdir) -> - -- For the case of a user-managed local dir, irrespective of the - -- build style, we build from that directory and put build - -- artifacts under the shared dist directory. - dryRunLocalPkg pkg depsBuildStatus srcdir - - -- The three tarball cases are handled the same as each other, - -- though depending on the build style. - Just (LocalTarballPackage tarball) -> - dryRunTarballPkg pkg depsBuildStatus tarball - - Just (RemoteTarballPackage _ tarball) -> - dryRunTarballPkg pkg depsBuildStatus tarball - - Just (RepoTarballPackage _ _ tarball) -> - dryRunTarballPkg pkg depsBuildStatus tarball - - dryRunPkg (InstallPlan.Processing {}) _ = unexpectedState - dryRunPkg (InstallPlan.Installed {}) _ = unexpectedState - dryRunPkg (InstallPlan.Failed {}) _ = unexpectedState - - unexpectedState = error "rebuildTargetsDryRun: unexpected package state" - - dryRunTarballPkg :: ElaboratedConfiguredPackage - -> ComponentDeps [BuildStatus] - -> FilePath - -> IO BuildStatus - dryRunTarballPkg pkg depsBuildStatus tarball = - case pkgBuildStyle pkg of - BuildAndInstall -> return (BuildStatusUnpack tarball) - BuildInplaceOnly -> do - -- TODO: [nice to have] use a proper file monitor rather than this dir exists test - exists <- doesDirectoryExist srcdir - if exists - then dryRunLocalPkg pkg depsBuildStatus srcdir - else return (BuildStatusUnpack tarball) - where - srcdir = distUnpackedSrcDirectory (packageId pkg) - - dryRunLocalPkg :: ElaboratedConfiguredPackage - -> ComponentDeps [BuildStatus] - -> FilePath - -> IO BuildStatus - dryRunLocalPkg pkg depsBuildStatus srcdir = do - -- Go and do lots of I/O, reading caches and probing files to work out - -- if anything has changed - change <- checkPackageFileMonitorChanged - packageFileMonitor pkg srcdir depsBuildStatus - case change of - -- It did change, giving us 'BuildStatusRebuild' info on why - Left rebuild -> - return (BuildStatusRebuild srcdir rebuild) - - -- No changes, the package is up to date. Use the saved build results. - Right (mipkg, buildSuccess) -> - return (BuildStatusUpToDate mipkg buildSuccess) - where - packageFileMonitor = - newPackageFileMonitor distDirLayout (packageId pkg) - - --- | A specialised traversal over the packages in an install plan. --- --- The packages are visited in dependency order, starting with packages with no --- depencencies. The result for each package is accumulated into a 'Map' and --- returned as the final result. In addition, when visting a package, the --- visiting function is passed the results for all the immediate package --- depencencies. This can be used to propagate information from depencencies. --- -foldMInstallPlanDepOrder - :: forall m ipkg srcpkg iresult ifailure b. - (Monad m, - HasUnitId ipkg, PackageFixedDeps ipkg, - HasUnitId srcpkg, PackageFixedDeps srcpkg) - => GenericInstallPlan ipkg srcpkg iresult ifailure - -> (GenericPlanPackage ipkg srcpkg iresult ifailure -> - ComponentDeps [b] -> m b) - -> m (Map InstalledPackageId b) -foldMInstallPlanDepOrder plan0 visit = - go Map.empty (InstallPlan.reverseTopologicalOrder plan0) - where - go :: Map InstalledPackageId b - -> [GenericPlanPackage ipkg srcpkg iresult ifailure] - -> m (Map InstalledPackageId b) - go !results [] = return results - - go !results (pkg : pkgs) = do - -- we go in the right order so the results map has entries for all deps - let depresults :: ComponentDeps [b] - depresults = - fmap (map (\ipkgid -> let Just result = Map.lookup ipkgid results - in result)) - (depends pkg) - result <- visit pkg depresults - let results' = Map.insert (installedPackageId pkg) result results - go results' pkgs - -improveInstallPlanWithUpToDatePackages :: ElaboratedInstallPlan - -> BuildStatusMap - -> ElaboratedInstallPlan -improveInstallPlanWithUpToDatePackages installPlan pkgsBuildStatus = - replaceWithPreInstalled installPlan - [ (installedPackageId pkg, mipkg, buildSuccess) - | InstallPlan.Configured pkg - <- InstallPlan.reverseTopologicalOrder installPlan - , let ipkgid = installedPackageId pkg - Just pkgBuildStatus = Map.lookup ipkgid pkgsBuildStatus - , BuildStatusUpToDate mipkg buildSuccess <- [pkgBuildStatus] - ] - where - replaceWithPreInstalled = - foldl' (\plan (ipkgid, mipkg, buildSuccess) -> - InstallPlan.preinstalled ipkgid mipkg buildSuccess plan) - - ------------------------------ --- Package change detection --- - --- | As part of the dry run for local unpacked packages we have to check if the --- package config or files have changed. That is the purpose of --- 'PackageFileMonitor' and 'checkPackageFileMonitorChanged'. --- --- When a package is (re)built, the monitor must be updated to reflect the new --- state of the package. Because we sometimes build without reconfiguring the --- state updates are split into two, one for package config changes and one --- for other changes. This is the purpose of 'updatePackageConfigFileMonitor' --- and 'updatePackageBuildFileMonitor'. --- -data PackageFileMonitor = PackageFileMonitor { - pkgFileMonitorConfig :: FileMonitor ElaboratedConfiguredPackage (), - pkgFileMonitorBuild :: FileMonitor (Set ComponentName) BuildSuccess, - pkgFileMonitorReg :: FileMonitor () (Maybe InstalledPackageInfo) - } - -newPackageFileMonitor :: DistDirLayout -> PackageId -> PackageFileMonitor -newPackageFileMonitor DistDirLayout{distPackageCacheFile} pkgid = - PackageFileMonitor { - pkgFileMonitorConfig = - newFileMonitor (distPackageCacheFile pkgid "config"), - - pkgFileMonitorBuild = - FileMonitor { - fileMonitorCacheFile = distPackageCacheFile pkgid "build", - fileMonitorKeyValid = \componentsToBuild componentsAlreadyBuilt -> - componentsToBuild `Set.isSubsetOf` componentsAlreadyBuilt, - fileMonitorCheckIfOnlyValueChanged = True - }, - - pkgFileMonitorReg = - newFileMonitor (distPackageCacheFile pkgid "registration") - } - --- | Helper function for 'checkPackageFileMonitorChanged', --- 'updatePackageConfigFileMonitor' and 'updatePackageBuildFileMonitor'. --- --- It selects the info from a 'ElaboratedConfiguredPackage' that are used by --- the 'FileMonitor's (in the 'PackageFileMonitor') to detect value changes. --- -packageFileMonitorKeyValues :: ElaboratedConfiguredPackage - -> (ElaboratedConfiguredPackage, Set ComponentName) -packageFileMonitorKeyValues pkg = - (pkgconfig, buildComponents) - where - -- The first part is the value used to guard (re)configuring the package. - -- That is, if this value changes then we will reconfigure. - -- The ElaboratedConfiguredPackage consists mostly (but not entirely) of - -- information that affects the (re)configure step. But those parts that - -- do not affect the configure step need to be nulled out. Those parts are - -- the specific targets that we're going to build. - -- - pkgconfig = pkg { - pkgBuildTargets = [], - pkgReplTarget = Nothing, - pkgBuildHaddocks = False - } - - -- The second part is the value used to guard the build step. So this is - -- more or less the opposite of the first part, as it's just the info about - -- what targets we're going to build. - -- - buildComponents = pkgBuildTargetWholeComponents pkg - --- | Do all the checks on whether a package has changed and thus needs either --- rebuilding or reconfiguring and rebuilding. --- -checkPackageFileMonitorChanged :: PackageFileMonitor - -> ElaboratedConfiguredPackage - -> FilePath - -> ComponentDeps [BuildStatus] - -> IO (Either BuildStatusRebuild - (Maybe InstalledPackageInfo, - BuildSuccess)) -checkPackageFileMonitorChanged PackageFileMonitor{..} - pkg srcdir depsBuildStatus = do - --TODO: [nice to have] some debug-level message about file changes, like rerunIfChanged - configChanged <- checkFileMonitorChanged - pkgFileMonitorConfig srcdir pkgconfig - case configChanged of - MonitorChanged monitorReason -> - return (Left (BuildStatusConfigure monitorReason')) - where - monitorReason' = fmap (const ()) monitorReason - - MonitorUnchanged () _ - -- The configChanged here includes the identity of the dependencies, - -- so depsBuildStatus is just needed for the changes in the content - -- of depencencies. - | any buildStatusRequiresBuild (CD.flatDeps depsBuildStatus) -> do - regChanged <- checkFileMonitorChanged pkgFileMonitorReg srcdir () - let mreg = changedToMaybe regChanged - return (Left (BuildStatusBuild mreg BuildReasonDepsRebuilt)) - - | otherwise -> do - buildChanged <- checkFileMonitorChanged - pkgFileMonitorBuild srcdir buildComponents - regChanged <- checkFileMonitorChanged - pkgFileMonitorReg srcdir () - let mreg = changedToMaybe regChanged - case (buildChanged, regChanged) of - (MonitorChanged (MonitoredValueChanged prevBuildComponents), _) -> - return (Left (BuildStatusBuild mreg buildReason)) - where - buildReason = BuildReasonExtraTargets prevBuildComponents - - (MonitorChanged monitorReason, _) -> - return (Left (BuildStatusBuild mreg buildReason)) - where - buildReason = BuildReasonFilesChanged monitorReason' - monitorReason' = fmap (const ()) monitorReason - - (MonitorUnchanged _ _, MonitorChanged monitorReason) -> - -- this should only happen if the file is corrupt or been - -- manually deleted. We don't want to bother with another - -- phase just for this, so we'll reregister by doing a build. - return (Left (BuildStatusBuild Nothing buildReason)) - where - buildReason = BuildReasonFilesChanged monitorReason' - monitorReason' = fmap (const ()) monitorReason - - (MonitorUnchanged _ _, MonitorUnchanged _ _) - | pkgHasEphemeralBuildTargets pkg -> - return (Left (BuildStatusBuild mreg buildReason)) - where - buildReason = BuildReasonEphemeralTargets - - (MonitorUnchanged buildSuccess _, MonitorUnchanged mipkg _) -> - return (Right (mipkg, buildSuccess)) - where - (pkgconfig, buildComponents) = packageFileMonitorKeyValues pkg - changedToMaybe (MonitorChanged _) = Nothing - changedToMaybe (MonitorUnchanged x _) = Just x - - -updatePackageConfigFileMonitor :: PackageFileMonitor - -> FilePath - -> ElaboratedConfiguredPackage - -> IO () -updatePackageConfigFileMonitor PackageFileMonitor{pkgFileMonitorConfig} - srcdir pkg = - updateFileMonitor pkgFileMonitorConfig srcdir Nothing - [] pkgconfig () - where - (pkgconfig, _buildComponents) = packageFileMonitorKeyValues pkg - -updatePackageBuildFileMonitor :: PackageFileMonitor - -> FilePath - -> MonitorTimestamp - -> ElaboratedConfiguredPackage - -> BuildStatusRebuild - -> [FilePath] - -> BuildSuccess - -> IO () -updatePackageBuildFileMonitor PackageFileMonitor{pkgFileMonitorBuild} - srcdir timestamp pkg pkgBuildStatus - allSrcFiles buildSuccess = - updateFileMonitor pkgFileMonitorBuild srcdir (Just timestamp) - (map monitorFileHashed allSrcFiles) - buildComponents' buildSuccess - where - (_pkgconfig, buildComponents) = packageFileMonitorKeyValues pkg - - -- If the only thing that's changed is that we're now building extra - -- components, then we can avoid later unnecessary rebuilds by saving the - -- total set of components that have been built, namely the union of the - -- existing ones plus the new ones. If files also changed this would be - -- the wrong thing to do. Note that we rely on the - -- fileMonitorCheckIfOnlyValueChanged = True mode to get this guarantee - -- that it's /only/ the value that changed not any files that changed. - buildComponents' = - case pkgBuildStatus of - BuildStatusBuild _ (BuildReasonExtraTargets prevBuildComponents) - -> buildComponents `Set.union` prevBuildComponents - _ -> buildComponents - -updatePackageRegFileMonitor :: PackageFileMonitor - -> FilePath - -> Maybe InstalledPackageInfo - -> IO () -updatePackageRegFileMonitor PackageFileMonitor{pkgFileMonitorReg} - srcdir mipkg = - updateFileMonitor pkgFileMonitorReg srcdir Nothing - [] () mipkg - -invalidatePackageRegFileMonitor :: PackageFileMonitor -> IO () -invalidatePackageRegFileMonitor PackageFileMonitor{pkgFileMonitorReg} = - removeExistingFile (fileMonitorCacheFile pkgFileMonitorReg) - - ------------------------------------------------------------------------------- --- * Doing it: executing an 'ElaboratedInstallPlan' ------------------------------------------------------------------------------- - - --- | Build things for real. --- --- It requires the 'BuildStatusMap' gatthered by 'rebuildTargetsDryRun'. --- -rebuildTargets :: Verbosity - -> DistDirLayout - -> ElaboratedInstallPlan - -> ElaboratedSharedConfig - -> BuildStatusMap - -> BuildTimeSettings - -> IO ElaboratedInstallPlan -rebuildTargets verbosity - distDirLayout@DistDirLayout{..} - installPlan - sharedPackageConfig - pkgsBuildStatus - buildSettings@BuildTimeSettings{buildSettingNumJobs} = do - - -- Concurrency control: create the job controller and concurrency limits - -- for downloading, building and installing. - jobControl <- if isParallelBuild then newParallelJobControl - else newSerialJobControl - buildLimit <- newJobLimit buildSettingNumJobs - installLock <- newLock -- serialise installation - cacheLock <- newLock -- serialise access to setup exe cache - --TODO: [code cleanup] eliminate setup exe cache - - createDirectoryIfMissingVerbose verbosity False distBuildRootDirectory - createDirectoryIfMissingVerbose verbosity False distTempDirectory - - -- Before traversing the install plan, pre-emptively find all packages that - -- will need to be downloaded and start downloading them. - asyncDownloadPackages verbosity withRepoCtx - installPlan pkgsBuildStatus $ \downloadMap -> - - -- For each package in the plan, in dependency order, but in parallel... - executeInstallPlan verbosity jobControl installPlan $ \pkg -> - handle (return . BuildFailure) $ --TODO: review exception handling - - let ipkgid = installedPackageId pkg - Just pkgBuildStatus = Map.lookup ipkgid pkgsBuildStatus in - - rebuildTarget - verbosity - distDirLayout - buildSettings downloadMap - buildLimit installLock cacheLock - sharedPackageConfig - pkg - pkgBuildStatus - where - isParallelBuild = buildSettingNumJobs >= 2 - withRepoCtx = projectConfigWithBuilderRepoContext verbosity - buildSettings - --- | Given all the context and resources, (re)build an individual package. --- -rebuildTarget :: Verbosity - -> DistDirLayout - -> BuildTimeSettings - -> AsyncDownloadMap - -> JobLimit -> Lock -> Lock - -> ElaboratedSharedConfig - -> ElaboratedReadyPackage - -> BuildStatus - -> IO BuildResult -rebuildTarget verbosity - distDirLayout@DistDirLayout{distBuildDirectory} - buildSettings downloadMap - buildLimit installLock cacheLock - sharedPackageConfig - rpkg@(ReadyPackage pkg _) - pkgBuildStatus = - - -- We rely on the 'BuildStatus' to decide which phase to start from: - case pkgBuildStatus of - BuildStatusDownload -> downloadPhase - BuildStatusUnpack tarball -> unpackTarballPhase tarball - BuildStatusRebuild srcdir status -> rebuildPhase status srcdir - - -- TODO: perhaps re-nest the types to make these impossible - BuildStatusPreExisting {} -> unexpectedState - BuildStatusUpToDate {} -> unexpectedState - where - unexpectedState = error "rebuildTarget: unexpected package status" - - downloadPhase = do - downsrcloc <- waitAsyncPackageDownload verbosity downloadMap pkg - case downsrcloc of - DownloadedTarball tarball -> unpackTarballPhase tarball - --TODO: [nice to have] git/darcs repos etc - - - unpackTarballPhase tarball = - withJobLimit buildLimit $ - withTarballLocalDirectory - verbosity distDirLayout tarball - (packageId pkg) (pkgBuildStyle pkg) - (pkgDescriptionOverride pkg) $ - - case pkgBuildStyle pkg of - BuildAndInstall -> buildAndInstall - BuildInplaceOnly -> buildInplace buildStatus - where - buildStatus = BuildStatusConfigure MonitorFirstRun - - -- Note that this really is rebuild, not build. It can only happen for - -- 'BuildInplaceOnly' style packages. 'BuildAndInstall' style packages - -- would only start from download or unpack phases. - -- - rebuildPhase buildStatus srcdir = - assert (pkgBuildStyle pkg == BuildInplaceOnly) $ - - withJobLimit buildLimit $ - buildInplace buildStatus srcdir builddir - where - builddir = distBuildDirectory (packageId pkg) - - buildAndInstall srcdir builddir = - buildAndInstallUnpackedPackage - verbosity distDirLayout - buildSettings installLock cacheLock - sharedPackageConfig - rpkg - srcdir builddir' - where - builddir' = makeRelative srcdir builddir - --TODO: [nice to have] ^^ do this relative stuff better - - buildInplace buildStatus srcdir builddir = - --TODO: [nice to have] use a relative build dir rather than absolute - buildInplaceUnpackedPackage - verbosity distDirLayout - buildSettings cacheLock - sharedPackageConfig - rpkg - buildStatus - srcdir builddir - ---TODO: [nice to have] do we need to use a with-style for the temp files for downloading http --- packages, or are we going to cache them persistently? - -type AsyncDownloadMap = Map (PackageLocation (Maybe FilePath)) - (MVar DownloadedSourceLocation) - -data DownloadedSourceLocation = DownloadedTarball FilePath - --TODO: [nice to have] git/darcs repos etc - -downloadedSourceLocation :: PackageLocation FilePath - -> Maybe DownloadedSourceLocation -downloadedSourceLocation pkgloc = - case pkgloc of - RemoteTarballPackage _ tarball -> Just (DownloadedTarball tarball) - RepoTarballPackage _ _ tarball -> Just (DownloadedTarball tarball) - _ -> Nothing - --- | Given the current 'InstallPlan' and 'BuildStatusMap', select all the --- packages we have to download and fork off an async action to download them. --- We download them in dependency order so that the one's we'll need --- first are the ones we will start downloading first. --- --- The body action is passed a map from those packages (identified by their --- location) to a completion var for that package. So the body action should --- lookup the location and use 'waitAsyncPackageDownload' to get the result. --- -asyncDownloadPackages :: Verbosity - -> ((RepoContext -> IO ()) -> IO ()) - -> ElaboratedInstallPlan - -> BuildStatusMap - -> (AsyncDownloadMap -> IO a) - -> IO a -asyncDownloadPackages verbosity withRepoCtx installPlan pkgsBuildStatus body - | null pkgsToDownload = body Map.empty - | otherwise = do - --TODO: [research required] use parallel downloads? if so, use the fetchLimit - - asyncDownloadVars <- mapM (\loc -> (,) loc <$> newEmptyMVar) pkgsToDownload - - let downloadAction :: IO () - downloadAction = - withRepoCtx $ \repoctx -> - forM_ asyncDownloadVars $ \(pkgloc, var) -> do - Just scrloc <- downloadedSourceLocation <$> - fetchPackage verbosity repoctx pkgloc - putMVar var scrloc - - withAsync downloadAction $ \_ -> - body (Map.fromList asyncDownloadVars) - where - pkgsToDownload = - [ pkgSourceLocation pkg - | InstallPlan.Configured pkg - <- InstallPlan.reverseTopologicalOrder installPlan - , let ipkgid = installedPackageId pkg - Just pkgBuildStatus = Map.lookup ipkgid pkgsBuildStatus - , BuildStatusDownload <- [pkgBuildStatus] - ] - - --- | Check if a package needs downloading, and if so expect to find a download --- in progress in the given 'AsyncDownloadMap' and wait on it to finish. --- -waitAsyncPackageDownload :: Verbosity - -> AsyncDownloadMap - -> ElaboratedConfiguredPackage - -> IO DownloadedSourceLocation -waitAsyncPackageDownload verbosity downloadMap pkg = - case Map.lookup (pkgSourceLocation pkg) downloadMap of - Just hnd -> do - debug verbosity $ - "Waiting for download of " ++ display (packageId pkg) ++ " to finish" - --TODO: [required eventually] do the exception handling on download stuff - takeMVar hnd - Nothing -> - fail "waitAsyncPackageDownload: package not being download" - - -executeInstallPlan - :: forall ipkg srcpkg iresult. - (HasUnitId ipkg, PackageFixedDeps ipkg, - HasUnitId srcpkg, PackageFixedDeps srcpkg) - => Verbosity - -> JobControl IO ( GenericReadyPackage srcpkg ipkg - , GenericBuildResult ipkg iresult BuildFailure ) - -> GenericInstallPlan ipkg srcpkg iresult BuildFailure - -> ( GenericReadyPackage srcpkg ipkg - -> IO (GenericBuildResult ipkg iresult BuildFailure)) - -> IO (GenericInstallPlan ipkg srcpkg iresult BuildFailure) -executeInstallPlan verbosity jobCtl plan0 installPkg = - tryNewTasks 0 plan0 - where - tryNewTasks taskCount plan = do - case InstallPlan.ready plan of - [] | taskCount == 0 -> return plan - | otherwise -> waitForTasks taskCount plan - pkgs -> do - sequence_ - [ do debug verbosity $ "Ready to install " ++ display pkgid - spawnJob jobCtl $ do - buildResult <- installPkg pkg - return (pkg, buildResult) - | pkg <- pkgs - , let pkgid = packageId pkg - ] - - let taskCount' = taskCount + length pkgs - plan' = InstallPlan.processing pkgs plan - waitForTasks taskCount' plan' - - waitForTasks taskCount plan = do - debug verbosity $ "Waiting for install task to finish..." - (pkg, buildResult) <- collectJob jobCtl - let taskCount' = taskCount-1 - plan' = updatePlan pkg buildResult plan - tryNewTasks taskCount' plan' - - updatePlan :: GenericReadyPackage srcpkg ipkg - -> GenericBuildResult ipkg iresult BuildFailure - -> GenericInstallPlan ipkg srcpkg iresult BuildFailure - -> GenericInstallPlan ipkg srcpkg iresult BuildFailure - updatePlan pkg (BuildSuccess mipkg buildSuccess) = - InstallPlan.completed (installedPackageId pkg) mipkg buildSuccess - - updatePlan pkg (BuildFailure buildFailure) = - InstallPlan.failed (installedPackageId pkg) buildFailure depsFailure - where - depsFailure = DependentFailed (packageId pkg) - -- So this first pkgid failed for whatever reason (buildFailure). - -- All the other packages that depended on this pkgid, which we - -- now cannot build, we mark as failing due to 'DependentFailed' - -- which kind of means it was not their fault. - - --- | Ensure that the package is unpacked in an appropriate directory, either --- a temporary one or a persistent one under the shared dist directory. --- -withTarballLocalDirectory - :: Verbosity - -> DistDirLayout - -> FilePath - -> PackageId - -> BuildStyle - -> Maybe CabalFileText - -> (FilePath -> FilePath -> IO a) - -> IO a -withTarballLocalDirectory verbosity distDirLayout@DistDirLayout{..} - tarball pkgid buildstyle pkgTextOverride - buildPkg = - case buildstyle of - -- In this case we make a temp dir, unpack the tarball to there and - -- build and install it from that temp dir. - BuildAndInstall -> - withTempDirectory verbosity distTempDirectory - (display (packageName pkgid)) $ \tmpdir -> do - unpackPackageTarball verbosity tarball tmpdir - pkgid pkgTextOverride - let srcdir = tmpdir display pkgid - builddir = srcdir "dist" - buildPkg srcdir builddir - - -- In this case we make sure the tarball has been unpacked to the - -- appropriate location under the shared dist dir, and then build it - -- inplace there - BuildInplaceOnly -> do - let srcrootdir = distUnpackedSrcRootDirectory - srcdir = distUnpackedSrcDirectory pkgid - builddir = distBuildDirectory pkgid - -- TODO: [nice to have] use a proper file monitor rather than this dir exists test - exists <- doesDirectoryExist srcdir - unless exists $ do - createDirectoryIfMissingVerbose verbosity False srcrootdir - unpackPackageTarball verbosity tarball srcrootdir - pkgid pkgTextOverride - moveTarballShippedDistDirectory verbosity distDirLayout - srcrootdir pkgid - buildPkg srcdir builddir - - -unpackPackageTarball :: Verbosity -> FilePath -> FilePath - -> PackageId -> Maybe CabalFileText - -> IO () -unpackPackageTarball verbosity tarball parentdir pkgid pkgTextOverride = - --TODO: [nice to have] switch to tar package and catch tar exceptions - annotateFailure UnpackFailed $ do - - -- Unpack the tarball - -- - info verbosity $ "Extracting " ++ tarball ++ " to " ++ parentdir ++ "..." - Tar.extractTarGzFile parentdir pkgsubdir tarball - - -- Sanity check - -- - exists <- doesFileExist cabalFile - when (not exists) $ - die $ "Package .cabal file not found in the tarball: " ++ cabalFile - - -- Overwrite the .cabal with the one from the index, when appropriate - -- - case pkgTextOverride of - Nothing -> return () - Just pkgtxt -> do - info verbosity $ "Updating " ++ display pkgname <.> "cabal" - ++ " with the latest revision from the index." - writeFileAtomic cabalFile pkgtxt - - where - cabalFile = parentdir pkgsubdir - display pkgname <.> "cabal" - pkgsubdir = display pkgid - pkgname = packageName pkgid - - --- | This is a bit of a hacky workaround. A number of packages ship --- pre-processed .hs files in a dist directory inside the tarball. We don't --- use the standard 'dist' location so unless we move this dist dir to the --- right place then we'll miss the shipped pre-procssed files. This hacky --- approach to shipped pre-procssed files ought to be replaced by a proper --- system, though we'll still need to keep this hack for older packages. --- -moveTarballShippedDistDirectory :: Verbosity -> DistDirLayout - -> FilePath -> PackageId -> IO () -moveTarballShippedDistDirectory verbosity DistDirLayout{distBuildDirectory} - parentdir pkgid = do - distDirExists <- doesDirectoryExist tarballDistDir - when distDirExists $ do - debug verbosity $ "Moving '" ++ tarballDistDir ++ "' to '" - ++ targetDistDir ++ "'" - --TODO: [nice to have] or perhaps better to copy, and use a file monitor - renameDirectory tarballDistDir targetDistDir - where - tarballDistDir = parentdir display pkgid "dist" - targetDistDir = distBuildDirectory pkgid - - -buildAndInstallUnpackedPackage :: Verbosity - -> DistDirLayout - -> BuildTimeSettings -> Lock -> Lock - -> ElaboratedSharedConfig - -> ElaboratedReadyPackage - -> FilePath -> FilePath - -> IO BuildResult -buildAndInstallUnpackedPackage verbosity - DistDirLayout{distTempDirectory} - BuildTimeSettings { - buildSettingNumJobs, - buildSettingLogFile - } - installLock cacheLock - pkgshared@ElaboratedSharedConfig { - pkgConfigPlatform = platform, - pkgConfigCompiler = compiler, - pkgConfigCompilerProgs = progdb - } - rpkg@(ReadyPackage pkg _deps) - srcdir builddir = do - - createDirectoryIfMissingVerbose verbosity False builddir - initLogFile - - --TODO: [code cleanup] deal consistently with talking to older Setup.hs versions, much like - -- we do for ghc, with a proper options type and rendering step - -- which will also let us call directly into the lib, rather than always - -- going via the lib's command line interface, which would also allow - -- passing data like installed packages, compiler, and program db for a - -- quicker configure. - - --TODO: [required feature] docs and tests - --TODO: [required feature] sudo re-exec - - -- Configure phase - when isParallelBuild $ - notice verbosity $ "Configuring " ++ display pkgid ++ "..." - annotateFailure ConfigureFailed $ - setup configureCommand configureFlags - - -- Build phase - when isParallelBuild $ - notice verbosity $ "Building " ++ display pkgid ++ "..." - annotateFailure BuildFailed $ - setup buildCommand buildFlags - - -- Install phase - mipkg <- - criticalSection installLock $ - annotateFailure InstallFailed $ do - --TODO: [research required] do we need the installLock for copying? can we not do that in - -- parallel? Isn't it just registering that we have to lock for? - - --TODO: [required eventually] need to lock installing this ipkig so other processes don't - -- stomp on our files, since we don't have ABI compat, not safe to replace - - -- TODO: [required eventually] note that for nix-style installations it is not necessary to do - -- the 'withWin32SelfUpgrade' dance, but it would be necessary for a - -- shared bin dir. - - -- Actual installation - setup Cabal.copyCommand copyFlags - - LBS.writeFile - (InstallDirs.prefix (pkgInstallDirs pkg) "cabal-hash.txt") $ - (renderPackageHashInputs (packageHashInputs pkgshared pkg)) - - -- here's where we could keep track of the installed files ourselves if - -- we wanted by calling copy to an image dir and then we would make a - -- manifest and move it to its final location - - --TODO: [nice to have] we should actually have it make an image in store/incomming and - -- then when it's done, move it to its final location, to reduce problems - -- with installs failing half-way. Could also register and then move. - - -- For libraries, grab the package configuration file - -- and register it ourselves - if pkgRequiresRegistration pkg - then do - ipkg <- generateInstalledPackageInfo - -- We register ourselves rather than via Setup.hs. We need to - -- grab and modify the InstalledPackageInfo. We decide what - -- the installed package id is, not the build system. - let ipkg' = ipkg { Installed.installedUnitId = ipkgid } - Cabal.registerPackage verbosity compiler progdb - True -- multi-instance, nix style - (pkgRegisterPackageDBStack pkg) ipkg' - return (Just ipkg') - else return Nothing - - --TODO: [required feature] docs and test phases - let docsResult = DocsNotTried - testsResult = TestsNotTried - - return (BuildSuccess mipkg (BuildOk docsResult testsResult)) - - where - pkgid = packageId rpkg - ipkgid = installedPackageId rpkg - - isParallelBuild = buildSettingNumJobs >= 2 - - configureCommand = Cabal.configureCommand defaultProgramConfiguration - configureFlags v = flip filterConfigureFlags v $ - setupHsConfigureFlags rpkg pkgshared - verbosity builddir - - buildCommand = Cabal.buildCommand defaultProgramConfiguration - buildFlags _ = setupHsBuildFlags pkg pkgshared verbosity builddir - - generateInstalledPackageInfo :: IO InstalledPackageInfo - generateInstalledPackageInfo = - withTempInstalledPackageInfoFile - verbosity distTempDirectory $ \pkgConfFile -> do - -- make absolute since setup changes dir - pkgConfFile' <- canonicalizePath pkgConfFile - let registerFlags _ = setupHsRegisterFlags - pkg pkgshared - verbosity builddir - pkgConfFile' - setup Cabal.registerCommand registerFlags - - copyFlags _ = setupHsCopyFlags pkg pkgshared verbosity builddir - - scriptOptions = setupHsScriptOptions rpkg pkgshared srcdir builddir - isParallelBuild cacheLock - - setup :: CommandUI flags -> (Version -> flags) -> IO () - setup cmd flags = - withLogging $ \mLogFileHandle -> - setupWrapper - verbosity - scriptOptions { useLoggingHandle = mLogFileHandle } - (Just (pkgDescription pkg)) - cmd flags [] - - mlogFile = - case buildSettingLogFile of - Nothing -> Nothing - Just mkLogFile -> Just (mkLogFile compiler platform pkgid ipkgid) - - initLogFile = - case mlogFile of - Nothing -> return () - Just logFile -> do - createDirectoryIfMissing True (takeDirectory logFile) - exists <- doesFileExist logFile - when exists $ removeFile logFile - - withLogging action = - case mlogFile of - Nothing -> action Nothing - Just logFile -> withFile logFile AppendMode (action . Just) - - -buildInplaceUnpackedPackage :: Verbosity - -> DistDirLayout - -> BuildTimeSettings -> Lock - -> ElaboratedSharedConfig - -> ElaboratedReadyPackage - -> BuildStatusRebuild - -> FilePath -> FilePath - -> IO BuildResult -buildInplaceUnpackedPackage verbosity - distDirLayout@DistDirLayout { - distTempDirectory, - distPackageCacheDirectory - } - BuildTimeSettings{buildSettingNumJobs} - cacheLock - pkgshared@ElaboratedSharedConfig { - pkgConfigCompiler = compiler, - pkgConfigCompilerProgs = progdb - } - rpkg@(ReadyPackage pkg _deps) - buildStatus - srcdir builddir = do - - --TODO: [code cleanup] there is duplication between the distdirlayout and the builddir here - -- builddir is not enough, we also need the per-package cachedir - createDirectoryIfMissingVerbose verbosity False builddir - createDirectoryIfMissingVerbose verbosity False (distPackageCacheDirectory pkgid) - createPackageDBIfMissing verbosity compiler progdb (pkgBuildPackageDBStack pkg) - - -- Configure phase - -- - whenReConfigure $ do - annotateFailure ConfigureFailed $ - setup configureCommand configureFlags [] - invalidatePackageRegFileMonitor packageFileMonitor - updatePackageConfigFileMonitor packageFileMonitor srcdir pkg - - -- Build phase - -- - let docsResult = DocsNotTried - testsResult = TestsNotTried - - buildSuccess :: BuildSuccess - buildSuccess = BuildOk docsResult testsResult - - whenRebuild $ do - timestamp <- beginUpdateFileMonitor - annotateFailure BuildFailed $ - setup buildCommand buildFlags buildArgs - - --TODO: [required eventually] this doesn't track file - --non-existence, so we could fail to rebuild if someone - --adds a new file which changes behavior. - allSrcFiles <- allPackageSourceFiles verbosity srcdir - - updatePackageBuildFileMonitor packageFileMonitor srcdir timestamp - pkg buildStatus - allSrcFiles buildSuccess - - mipkg <- whenReRegister $ annotateFailure InstallFailed $ do - -- Register locally - mipkg <- if pkgRequiresRegistration pkg - then do - ipkg <- generateInstalledPackageInfo - -- We register ourselves rather than via Setup.hs. We need to - -- grab and modify the InstalledPackageInfo. We decide what - -- the installed package id is, not the build system. - let ipkg' = ipkg { Installed.installedUnitId = ipkgid } - Cabal.registerPackage verbosity compiler progdb False - (pkgRegisterPackageDBStack pkg) - ipkg' - return (Just ipkg') - - else return Nothing - - updatePackageRegFileMonitor packageFileMonitor srcdir mipkg - return mipkg - - -- Repl phase - -- - whenRepl $ - annotateFailure BuildFailed $ - setup replCommand replFlags replArgs - - -- Haddock phase - whenHaddock $ - annotateFailure BuildFailed $ - setup haddockCommand haddockFlags [] - - return (BuildSuccess mipkg buildSuccess) - - where - pkgid = packageId rpkg - ipkgid = installedPackageId rpkg - - isParallelBuild = buildSettingNumJobs >= 2 - - packageFileMonitor = newPackageFileMonitor distDirLayout pkgid - - whenReConfigure action = case buildStatus of - BuildStatusConfigure _ -> action - _ -> return () - - whenRebuild action - | null (pkgBuildTargets pkg) = return () - | otherwise = action - - whenRepl action - | isNothing (pkgReplTarget pkg) = return () - | otherwise = action - - whenHaddock action - | pkgBuildHaddocks pkg = action - | otherwise = return () - - whenReRegister action = case buildStatus of - BuildStatusConfigure _ -> action - BuildStatusBuild Nothing _ -> action - BuildStatusBuild (Just mipkg) _ -> return mipkg - - configureCommand = Cabal.configureCommand defaultProgramConfiguration - configureFlags v = flip filterConfigureFlags v $ - setupHsConfigureFlags rpkg pkgshared - verbosity builddir - - buildCommand = Cabal.buildCommand defaultProgramConfiguration - buildFlags _ = setupHsBuildFlags pkg pkgshared - verbosity builddir - buildArgs = setupHsBuildArgs pkg - - replCommand = Cabal.replCommand defaultProgramConfiguration - replFlags _ = setupHsReplFlags pkg pkgshared - verbosity builddir - replArgs = setupHsReplArgs pkg - - haddockCommand = Cabal.haddockCommand - haddockFlags _ = setupHsHaddockFlags pkg pkgshared - verbosity builddir - - scriptOptions = setupHsScriptOptions rpkg pkgshared - srcdir builddir - isParallelBuild cacheLock - - setup :: CommandUI flags -> (Version -> flags) -> [String] -> IO () - setup cmd flags args = - setupWrapper verbosity - scriptOptions - (Just (pkgDescription pkg)) - cmd flags args - - generateInstalledPackageInfo :: IO InstalledPackageInfo - generateInstalledPackageInfo = - withTempInstalledPackageInfoFile - verbosity distTempDirectory $ \pkgConfFile -> do - -- make absolute since setup changes dir - pkgConfFile' <- canonicalizePath pkgConfFile - let registerFlags _ = setupHsRegisterFlags - pkg pkgshared - verbosity builddir - pkgConfFile' - setup Cabal.registerCommand registerFlags [] - - --- helper -annotateFailure :: (String -> BuildFailure) -> IO a -> IO a -annotateFailure annotate action = - action `catches` - [ Handler $ \ioe -> handler (ioe :: IOException) - , Handler $ \exit -> handler (exit :: ExitCode) - ] - where - handler :: Exception e => e -> IO a - handler = throwIO . annotate -#if MIN_VERSION_base(4,8,0) - . displayException -#else - . show -#endif - - -withTempInstalledPackageInfoFile :: Verbosity -> FilePath - -> (FilePath -> IO ()) - -> IO InstalledPackageInfo -withTempInstalledPackageInfoFile verbosity tempdir action = - withTempFile tempdir "package-registration-" $ \pkgConfFile hnd -> do - hClose hnd - action pkgConfFile - - (warns, ipkg) <- withUTF8FileContents pkgConfFile $ \pkgConfStr -> - case Installed.parseInstalledPackageInfo pkgConfStr of - Installed.ParseFailed perror -> pkgConfParseFailed perror - Installed.ParseOk warns ipkg -> return (warns, ipkg) - - unless (null warns) $ - warn verbosity $ unlines (map (showPWarning pkgConfFile) warns) - - return ipkg - where - pkgConfParseFailed :: Installed.PError -> IO a - pkgConfParseFailed perror = - die $ "Couldn't parse the output of 'setup register --gen-pkg-config':" - ++ show perror - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/ProjectConfig/Legacy.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/ProjectConfig/Legacy.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/ProjectConfig/Legacy.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/ProjectConfig/Legacy.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1259 +0,0 @@ -{-# LANGUAGE CPP, RecordWildCards, NamedFieldPuns, DeriveGeneric #-} - --- | Project configuration, implementation in terms of legacy types. --- -module Distribution.Client.ProjectConfig.Legacy ( - - -- * Project config in terms of legacy types - LegacyProjectConfig, - parseLegacyProjectConfig, - showLegacyProjectConfig, - - -- * Conversion to and from legacy config types - commandLineFlagsToProjectConfig, - convertLegacyProjectConfig, - convertLegacyGlobalConfig, - convertToLegacyProjectConfig, - - -- * Internals, just for tests - parsePackageLocationTokenQ, - renderPackageLocationToken, - ) where - -import Distribution.Client.ProjectConfig.Types -import Distribution.Client.Types - ( RemoteRepo(..), emptyRemoteRepo ) -import Distribution.Client.Dependency.Types - ( ConstraintSource(..) ) -import Distribution.Client.Config - ( SavedConfig(..), remoteRepoFields ) - -import Distribution.Package -import Distribution.PackageDescription - ( SourceRepo(..), RepoKind(..) ) -import Distribution.PackageDescription.Parse - ( sourceRepoFieldDescrs ) -import Distribution.Simple.Compiler - ( OptimisationLevel(..), DebugInfoLevel(..) ) -import Distribution.Simple.Setup - ( Flag(Flag), toFlag, fromFlagOrDefault - , ConfigFlags(..), configureOptions - , HaddockFlags(..), haddockOptions, defaultHaddockFlags - , programConfigurationPaths', splitArgs - , AllowNewer(..) ) -import Distribution.Client.Setup - ( GlobalFlags(..), globalCommand - , ConfigExFlags(..), configureExOptions, defaultConfigExFlags - , InstallFlags(..), installOptions, defaultInstallFlags ) -import Distribution.Simple.Program - ( programName, knownPrograms ) -import Distribution.Simple.Program.Db - ( ProgramDb, defaultProgramDb ) -import Distribution.Client.Targets - ( dispFlagAssignment, parseFlagAssignment ) -import Distribution.Simple.Utils - ( lowercase ) -import Distribution.Utils.NubList - ( toNubList, fromNubList, overNubList ) -import Distribution.Simple.LocalBuildInfo - ( toPathTemplate, fromPathTemplate ) - -import Distribution.Text -import qualified Distribution.Compat.ReadP as Parse -import Distribution.Compat.ReadP - ( ReadP, (+++), (<++) ) -import qualified Text.Read as Read -import qualified Text.PrettyPrint as Disp -import Text.PrettyPrint - ( Doc, ($+$) ) -import qualified Distribution.ParseUtils as ParseUtils (field) -import Distribution.ParseUtils - ( ParseResult(..), PError(..), syntaxError, PWarning(..), warning - , simpleField, commaNewLineListField - , showToken ) -import Distribution.Client.ParseUtils -import Distribution.Simple.Command - ( CommandUI(commandOptions), ShowOrParseArgs(..) - , OptionField, option, reqArg' ) - -#if !MIN_VERSION_base(4,8,0) -import Control.Applicative -#endif -import Control.Monad -import qualified Data.Map as Map -import Data.Char (isSpace) -import Distribution.Compat.Semigroup -import GHC.Generics (Generic) - ------------------------------------------------------------------- --- Representing the project config file in terms of legacy types --- - --- | We already have parsers\/pretty-printers for almost all the fields in the --- project config file, but they're in terms of the types used for the command --- line flags for Setup.hs or cabal commands. We don't want to redefine them --- all, at least not yet so for the moment we use the parsers at the old types --- and use conversion functions. --- --- Ultimately if\/when this project-based approach becomes the default then we --- can redefine the parsers directly for the new types. --- -data LegacyProjectConfig = LegacyProjectConfig { - legacyPackages :: [String], - legacyPackagesOptional :: [String], - legacyPackagesRepo :: [SourceRepo], - legacyPackagesNamed :: [Dependency], - - legacySharedConfig :: LegacySharedConfig, - legacyLocalConfig :: LegacyPackageConfig, - legacySpecificConfig :: MapMappend PackageName LegacyPackageConfig - } deriving Generic - -instance Monoid LegacyProjectConfig where - mempty = gmempty - mappend = (<>) - -instance Semigroup LegacyProjectConfig where - (<>) = gmappend - -data LegacyPackageConfig = LegacyPackageConfig { - legacyConfigureFlags :: ConfigFlags, - legacyInstallPkgFlags :: InstallFlags, - legacyHaddockFlags :: HaddockFlags - } deriving Generic - -instance Monoid LegacyPackageConfig where - mempty = gmempty - mappend = (<>) - -instance Semigroup LegacyPackageConfig where - (<>) = gmappend - -data LegacySharedConfig = LegacySharedConfig { - legacyGlobalFlags :: GlobalFlags, - legacyConfigureShFlags :: ConfigFlags, - legacyConfigureExFlags :: ConfigExFlags, - legacyInstallFlags :: InstallFlags - } deriving Generic - -instance Monoid LegacySharedConfig where - mempty = gmempty - mappend = (<>) - -instance Semigroup LegacySharedConfig where - (<>) = gmappend - - ------------------------------------------------------------------- --- Converting from and to the legacy types --- - --- | Convert configuration from the @cabal configure@ or @cabal build@ command --- line into a 'ProjectConfig' value that can combined with configuration from --- other sources. --- --- At the moment this uses the legacy command line flag types. See --- 'LegacyProjectConfig' for an explanation. --- -commandLineFlagsToProjectConfig :: GlobalFlags - -> ConfigFlags -> ConfigExFlags - -> InstallFlags -> HaddockFlags - -> ProjectConfig -commandLineFlagsToProjectConfig globalFlags configFlags configExFlags - installFlags haddockFlags = - mempty { - projectConfigBuildOnly = convertLegacyBuildOnlyFlags - globalFlags configFlags - installFlags haddockFlags, - projectConfigShared = convertLegacyAllPackageFlags - globalFlags configFlags - configExFlags installFlags, - projectConfigLocalPackages = convertLegacyPerPackageFlags - configFlags installFlags haddockFlags - } - - --- | Convert from the types currently used for the user-wide @~/.cabal/config@ --- file into the 'ProjectConfig' type. --- --- Only a subset of the 'ProjectConfig' can be represented in the user-wide --- config. In particular it does not include packages that are in the project, --- and it also doesn't support package-specific configuration (only --- configuration that applies to all packages). --- -convertLegacyGlobalConfig :: SavedConfig -> ProjectConfig -convertLegacyGlobalConfig - SavedConfig { - savedGlobalFlags = globalFlags, - savedInstallFlags = installFlags, - savedConfigureFlags = configFlags, - savedConfigureExFlags = configExFlags, - savedUserInstallDirs = _, - savedGlobalInstallDirs = _, - savedUploadFlags = _, - savedReportFlags = _, - savedHaddockFlags = haddockFlags - } = - mempty { - projectConfigShared = configAllPackages, - projectConfigLocalPackages = configLocalPackages, - projectConfigBuildOnly = configBuildOnly - } - where - --TODO: [code cleanup] eliminate use of default*Flags here and specify the - -- defaults in the various resolve functions in terms of the new types. - configExFlags' = defaultConfigExFlags <> configExFlags - installFlags' = defaultInstallFlags <> installFlags - haddockFlags' = defaultHaddockFlags <> haddockFlags - - configLocalPackages = convertLegacyPerPackageFlags - configFlags installFlags' haddockFlags' - configAllPackages = convertLegacyAllPackageFlags - globalFlags configFlags - configExFlags' installFlags' - configBuildOnly = convertLegacyBuildOnlyFlags - globalFlags configFlags - installFlags' haddockFlags' - - --- | Convert the project config from the legacy types to the 'ProjectConfig' --- and associated types. See 'LegacyProjectConfig' for an explanation of the --- approach. --- -convertLegacyProjectConfig :: LegacyProjectConfig -> ProjectConfig -convertLegacyProjectConfig - LegacyProjectConfig { - legacyPackages, - legacyPackagesOptional, - legacyPackagesRepo, - legacyPackagesNamed, - legacySharedConfig = LegacySharedConfig globalFlags configShFlags - configExFlags installSharedFlags, - legacyLocalConfig = LegacyPackageConfig configFlags installPerPkgFlags - haddockFlags, - legacySpecificConfig - } = - - ProjectConfig { - projectPackages = legacyPackages, - projectPackagesOptional = legacyPackagesOptional, - projectPackagesRepo = legacyPackagesRepo, - projectPackagesNamed = legacyPackagesNamed, - - projectConfigBuildOnly = configBuildOnly, - projectConfigShared = configAllPackages, - projectConfigLocalPackages = configLocalPackages, - projectConfigSpecificPackage = fmap perPackage legacySpecificConfig - } - where - configLocalPackages = convertLegacyPerPackageFlags - configFlags installPerPkgFlags haddockFlags - configAllPackages = convertLegacyAllPackageFlags - globalFlags (configFlags <> configShFlags) - configExFlags installSharedFlags - configBuildOnly = convertLegacyBuildOnlyFlags - globalFlags configShFlags - installSharedFlags haddockFlags - - perPackage (LegacyPackageConfig perPkgConfigFlags perPkgInstallFlags - perPkgHaddockFlags) = - convertLegacyPerPackageFlags - perPkgConfigFlags perPkgInstallFlags perPkgHaddockFlags - - --- | Helper used by other conversion functions that returns the --- 'ProjectConfigShared' subset of the 'ProjectConfig'. --- -convertLegacyAllPackageFlags :: GlobalFlags -> ConfigFlags - -> ConfigExFlags -> InstallFlags - -> ProjectConfigShared -convertLegacyAllPackageFlags globalFlags configFlags - configExFlags installFlags = - ProjectConfigShared{..} - where - GlobalFlags { - globalConfigFile = _, -- TODO: [required feature] - globalSandboxConfigFile = _, -- ?? - globalRemoteRepos = projectConfigRemoteRepos, - globalLocalRepos = projectConfigLocalRepos - } = globalFlags - - ConfigFlags { - configHcFlavor = projectConfigHcFlavor, - configHcPath = projectConfigHcPath, - configHcPkg = projectConfigHcPkg, - --configInstallDirs = projectConfigInstallDirs, - --configUserInstall = projectConfigUserInstall, - --configPackageDBs = projectConfigPackageDBs, - configAllowNewer = projectConfigAllowNewer - } = configFlags - - ConfigExFlags { - configCabalVersion = projectConfigCabalVersion, - configExConstraints = projectConfigConstraints, - configPreferences = projectConfigPreferences, - configSolver = projectConfigSolver - } = configExFlags - - InstallFlags { - installHaddockIndex = projectConfigHaddockIndex, - --installReinstall = projectConfigReinstall, - --installAvoidReinstalls = projectConfigAvoidReinstalls, - --installOverrideReinstall = projectConfigOverrideReinstall, - installMaxBackjumps = projectConfigMaxBackjumps, - --installUpgradeDeps = projectConfigUpgradeDeps, - installReorderGoals = projectConfigReorderGoals, - --installIndependentGoals = projectConfigIndependentGoals, - --installShadowPkgs = projectConfigShadowPkgs, - installStrongFlags = projectConfigStrongFlags - } = installFlags - - - --- | Helper used by other conversion functions that returns the --- 'PackageConfig' subset of the 'ProjectConfig'. --- -convertLegacyPerPackageFlags :: ConfigFlags -> InstallFlags -> HaddockFlags - -> PackageConfig -convertLegacyPerPackageFlags configFlags installFlags haddockFlags = - PackageConfig{..} - where - ConfigFlags { - configProgramPaths, - configProgramArgs, - configProgramPathExtra = packageConfigProgramPathExtra, - configVanillaLib = packageConfigVanillaLib, - configProfLib = packageConfigProfLib, - configSharedLib = packageConfigSharedLib, - configDynExe = packageConfigDynExe, - configProfExe = packageConfigProfExe, - configProf = packageConfigProf, - configProfDetail = packageConfigProfDetail, - configProfLibDetail = packageConfigProfLibDetail, - configConfigureArgs = packageConfigConfigureArgs, - configOptimization = packageConfigOptimization, - configProgPrefix = packageConfigProgPrefix, - configProgSuffix = packageConfigProgSuffix, - configGHCiLib = packageConfigGHCiLib, - configSplitObjs = packageConfigSplitObjs, - configStripExes = packageConfigStripExes, - configStripLibs = packageConfigStripLibs, - configExtraLibDirs = packageConfigExtraLibDirs, - configExtraFrameworkDirs = packageConfigExtraFrameworkDirs, - configExtraIncludeDirs = packageConfigExtraIncludeDirs, - configConfigurationsFlags = packageConfigFlagAssignment, - configTests = packageConfigTests, - configBenchmarks = packageConfigBenchmarks, - configCoverage = coverage, - configLibCoverage = libcoverage, --deprecated - configDebugInfo = packageConfigDebugInfo, - configRelocatable = packageConfigRelocatable - } = configFlags - packageConfigProgramPaths = MapLast (Map.fromList configProgramPaths) - packageConfigProgramArgs = MapMappend (Map.fromList configProgramArgs) - - packageConfigCoverage = coverage <> libcoverage - --TODO: defer this merging to the resolve phase - - InstallFlags { - installDocumentation = packageConfigDocumentation, - installRunTests = packageConfigRunTests - } = installFlags - - HaddockFlags { - haddockHoogle = packageConfigHaddockHoogle, - haddockHtml = packageConfigHaddockHtml, - haddockHtmlLocation = packageConfigHaddockHtmlLocation, - haddockExecutables = packageConfigHaddockExecutables, - haddockTestSuites = packageConfigHaddockTestSuites, - haddockBenchmarks = packageConfigHaddockBenchmarks, - haddockInternal = packageConfigHaddockInternal, - haddockCss = packageConfigHaddockCss, - haddockHscolour = packageConfigHaddockHscolour, - haddockHscolourCss = packageConfigHaddockHscolourCss, - haddockContents = packageConfigHaddockContents - } = haddockFlags - - - --- | Helper used by other conversion functions that returns the --- 'ProjectConfigBuildOnly' subset of the 'ProjectConfig'. --- -convertLegacyBuildOnlyFlags :: GlobalFlags -> ConfigFlags - -> InstallFlags -> HaddockFlags - -> ProjectConfigBuildOnly -convertLegacyBuildOnlyFlags globalFlags configFlags - installFlags haddockFlags = - ProjectConfigBuildOnly{..} - where - GlobalFlags { - globalCacheDir = projectConfigCacheDir, - globalLogsDir = projectConfigLogsDir, - globalWorldFile = projectConfigWorldFile, - globalHttpTransport = projectConfigHttpTransport, - globalIgnoreExpiry = projectConfigIgnoreExpiry - } = globalFlags - - ConfigFlags { - configVerbosity = projectConfigVerbosity - } = configFlags - - InstallFlags { - installDryRun = projectConfigDryRun, - installOnly = _, - installOnlyDeps = projectConfigOnlyDeps, - installRootCmd = projectConfigRootCmd, - installSummaryFile = projectConfigSummaryFile, - installLogFile = projectConfigLogFile, - installBuildReports = projectConfigBuildReports, - installReportPlanningFailure = projectConfigReportPlanningFailure, - installSymlinkBinDir = projectConfigSymlinkBinDir, - installOneShot = projectConfigOneShot, - installNumJobs = projectConfigNumJobs, - installOfflineMode = projectConfigOfflineMode - } = installFlags - - HaddockFlags { - haddockKeepTempFiles = projectConfigKeepTempFiles --TODO: this ought to live elsewhere - } = haddockFlags - - -convertToLegacyProjectConfig :: ProjectConfig -> LegacyProjectConfig -convertToLegacyProjectConfig - projectConfig@ProjectConfig { - projectPackages, - projectPackagesOptional, - projectPackagesRepo, - projectPackagesNamed, - projectConfigLocalPackages, - projectConfigSpecificPackage - } = - LegacyProjectConfig { - legacyPackages = projectPackages, - legacyPackagesOptional = projectPackagesOptional, - legacyPackagesRepo = projectPackagesRepo, - legacyPackagesNamed = projectPackagesNamed, - legacySharedConfig = convertToLegacySharedConfig projectConfig, - legacyLocalConfig = convertToLegacyAllPackageConfig projectConfig - <> convertToLegacyPerPackageConfig - projectConfigLocalPackages, - legacySpecificConfig = fmap convertToLegacyPerPackageConfig - projectConfigSpecificPackage - } - -convertToLegacySharedConfig :: ProjectConfig -> LegacySharedConfig -convertToLegacySharedConfig - ProjectConfig { - projectConfigBuildOnly = ProjectConfigBuildOnly {..}, - projectConfigShared = ProjectConfigShared {..} - } = - - LegacySharedConfig { - legacyGlobalFlags = globalFlags, - legacyConfigureShFlags = configFlags, - legacyConfigureExFlags = configExFlags, - legacyInstallFlags = installFlags - } - where - globalFlags = GlobalFlags { - globalVersion = mempty, - globalNumericVersion = mempty, - globalConfigFile = mempty, - globalSandboxConfigFile = mempty, - globalConstraintsFile = mempty, - globalRemoteRepos = projectConfigRemoteRepos, - globalCacheDir = projectConfigCacheDir, - globalLocalRepos = projectConfigLocalRepos, - globalLogsDir = projectConfigLogsDir, - globalWorldFile = projectConfigWorldFile, - globalRequireSandbox = mempty, - globalIgnoreSandbox = mempty, - globalIgnoreExpiry = projectConfigIgnoreExpiry, - globalHttpTransport = projectConfigHttpTransport - } - - configFlags = mempty { - configVerbosity = projectConfigVerbosity, - configAllowNewer = projectConfigAllowNewer - } - - configExFlags = ConfigExFlags { - configCabalVersion = projectConfigCabalVersion, - configExConstraints = projectConfigConstraints, - configPreferences = projectConfigPreferences, - configSolver = projectConfigSolver - } - - installFlags = InstallFlags { - installDocumentation = mempty, - installHaddockIndex = projectConfigHaddockIndex, - installDryRun = projectConfigDryRun, - installReinstall = mempty, --projectConfigReinstall, - installAvoidReinstalls = mempty, --projectConfigAvoidReinstalls, - installOverrideReinstall = mempty, --projectConfigOverrideReinstall, - installMaxBackjumps = projectConfigMaxBackjumps, - installUpgradeDeps = mempty, --projectConfigUpgradeDeps, - installReorderGoals = projectConfigReorderGoals, - installIndependentGoals = mempty, --projectConfigIndependentGoals, - installShadowPkgs = mempty, --projectConfigShadowPkgs, - installStrongFlags = projectConfigStrongFlags, - installOnly = mempty, - installOnlyDeps = projectConfigOnlyDeps, - installRootCmd = projectConfigRootCmd, - installSummaryFile = projectConfigSummaryFile, - installLogFile = projectConfigLogFile, - installBuildReports = projectConfigBuildReports, - installReportPlanningFailure = projectConfigReportPlanningFailure, - installSymlinkBinDir = projectConfigSymlinkBinDir, - installOneShot = projectConfigOneShot, - installNumJobs = projectConfigNumJobs, - installRunTests = mempty, - installOfflineMode = projectConfigOfflineMode - } - - -convertToLegacyAllPackageConfig :: ProjectConfig -> LegacyPackageConfig -convertToLegacyAllPackageConfig - ProjectConfig { - projectConfigBuildOnly = ProjectConfigBuildOnly {..}, - projectConfigShared = ProjectConfigShared {..} - } = - - LegacyPackageConfig { - legacyConfigureFlags = configFlags, - legacyInstallPkgFlags= mempty, - legacyHaddockFlags = haddockFlags - } - where - configFlags = ConfigFlags { - configPrograms_ = mempty, - configProgramPaths = mempty, - configProgramArgs = mempty, - configProgramPathExtra = mempty, - configHcFlavor = projectConfigHcFlavor, - configHcPath = projectConfigHcPath, - configHcPkg = projectConfigHcPkg, - configVanillaLib = mempty, - configProfLib = mempty, - configSharedLib = mempty, - configDynExe = mempty, - configProfExe = mempty, - configProf = mempty, - configProfDetail = mempty, - configProfLibDetail = mempty, - configConfigureArgs = mempty, - configOptimization = mempty, - configProgPrefix = mempty, - configProgSuffix = mempty, - configInstallDirs = mempty, - configScratchDir = mempty, - configDistPref = mempty, - configVerbosity = mempty, - configUserInstall = mempty, --projectConfigUserInstall, - configPackageDBs = mempty, --projectConfigPackageDBs, - configGHCiLib = mempty, - configSplitObjs = mempty, - configStripExes = mempty, - configStripLibs = mempty, - configExtraLibDirs = mempty, - configExtraFrameworkDirs = mempty, - configConstraints = mempty, - configDependencies = mempty, - configExtraIncludeDirs = mempty, - configIPID = mempty, - configConfigurationsFlags = mempty, - configTests = mempty, - configCoverage = mempty, --TODO: don't merge - configLibCoverage = mempty, --TODO: don't merge - configExactConfiguration = mempty, - configBenchmarks = mempty, - configFlagError = mempty, --TODO: ??? - configRelocatable = mempty, - configDebugInfo = mempty, - configAllowNewer = mempty - } - - haddockFlags = mempty { - haddockKeepTempFiles = projectConfigKeepTempFiles - } - - -convertToLegacyPerPackageConfig :: PackageConfig -> LegacyPackageConfig -convertToLegacyPerPackageConfig PackageConfig {..} = - LegacyPackageConfig { - legacyConfigureFlags = configFlags, - legacyInstallPkgFlags = installFlags, - legacyHaddockFlags = haddockFlags - } - where - configFlags = ConfigFlags { - configPrograms_ = configPrograms_ mempty, - configProgramPaths = Map.toList (getMapLast packageConfigProgramPaths), - configProgramArgs = Map.toList (getMapMappend packageConfigProgramArgs), - configProgramPathExtra = packageConfigProgramPathExtra, - configHcFlavor = mempty, - configHcPath = mempty, - configHcPkg = mempty, - configVanillaLib = packageConfigVanillaLib, - configProfLib = packageConfigProfLib, - configSharedLib = packageConfigSharedLib, - configDynExe = packageConfigDynExe, - configProfExe = packageConfigProfExe, - configProf = packageConfigProf, - configProfDetail = packageConfigProfDetail, - configProfLibDetail = packageConfigProfLibDetail, - configConfigureArgs = packageConfigConfigureArgs, - configOptimization = packageConfigOptimization, - configProgPrefix = packageConfigProgPrefix, - configProgSuffix = packageConfigProgSuffix, - configInstallDirs = mempty, - configScratchDir = mempty, - configDistPref = mempty, - configVerbosity = mempty, - configUserInstall = mempty, - configPackageDBs = mempty, - configGHCiLib = packageConfigGHCiLib, - configSplitObjs = packageConfigSplitObjs, - configStripExes = packageConfigStripExes, - configStripLibs = packageConfigStripLibs, - configExtraLibDirs = packageConfigExtraLibDirs, - configExtraFrameworkDirs = packageConfigExtraFrameworkDirs, - configConstraints = mempty, - configDependencies = mempty, - configExtraIncludeDirs = packageConfigExtraIncludeDirs, - configIPID = mempty, - configConfigurationsFlags = packageConfigFlagAssignment, - configTests = packageConfigTests, - configCoverage = packageConfigCoverage, --TODO: don't merge - configLibCoverage = packageConfigCoverage, --TODO: don't merge - configExactConfiguration = mempty, - configBenchmarks = packageConfigBenchmarks, - configFlagError = mempty, --TODO: ??? - configRelocatable = packageConfigRelocatable, - configDebugInfo = packageConfigDebugInfo, - configAllowNewer = mempty - } - - installFlags = mempty { - installDocumentation = packageConfigDocumentation, - installRunTests = packageConfigRunTests - } - - haddockFlags = HaddockFlags { - haddockProgramPaths = mempty, - haddockProgramArgs = mempty, - haddockHoogle = packageConfigHaddockHoogle, - haddockHtml = packageConfigHaddockHtml, - haddockHtmlLocation = packageConfigHaddockHtmlLocation, - haddockForHackage = mempty, --TODO: added recently - haddockExecutables = packageConfigHaddockExecutables, - haddockTestSuites = packageConfigHaddockTestSuites, - haddockBenchmarks = packageConfigHaddockBenchmarks, - haddockInternal = packageConfigHaddockInternal, - haddockCss = packageConfigHaddockCss, - haddockHscolour = packageConfigHaddockHscolour, - haddockHscolourCss = packageConfigHaddockHscolourCss, - haddockContents = packageConfigHaddockContents, - haddockDistPref = mempty, - haddockKeepTempFiles = mempty, - haddockVerbosity = mempty - } - - ------------------------------------------------- --- Parsing and showing the project config file --- - -parseLegacyProjectConfig :: String -> ParseResult LegacyProjectConfig -parseLegacyProjectConfig = - parseConfig legacyProjectConfigFieldDescrs - legacyPackageConfigSectionDescrs - mempty - -showLegacyProjectConfig :: LegacyProjectConfig -> String -showLegacyProjectConfig config = - Disp.render $ - showConfig legacyProjectConfigFieldDescrs - legacyPackageConfigSectionDescrs - config - $+$ - Disp.text "" - - -legacyProjectConfigFieldDescrs :: [FieldDescr LegacyProjectConfig] -legacyProjectConfigFieldDescrs = - - [ newLineListField "packages" - (Disp.text . renderPackageLocationToken) parsePackageLocationTokenQ - legacyPackages - (\v flags -> flags { legacyPackages = v }) - , newLineListField "optional-packages" - (Disp.text . renderPackageLocationToken) parsePackageLocationTokenQ - legacyPackagesOptional - (\v flags -> flags { legacyPackagesOptional = v }) - , commaNewLineListField "extra-packages" - disp parse - legacyPackagesNamed - (\v flags -> flags { legacyPackagesNamed = v }) - ] - - ++ map (liftField - legacySharedConfig - (\flags conf -> conf { legacySharedConfig = flags })) - legacySharedConfigFieldDescrs - - ++ map (liftField - legacyLocalConfig - (\flags conf -> conf { legacyLocalConfig = flags })) - legacyPackageConfigFieldDescrs - --- | This is a bit tricky since it has to cover globs which have embedded @,@ --- chars. But we don't just want to parse strictly as a glob since we want to --- allow http urls which don't parse as globs, and possibly some --- system-dependent file paths. So we parse fairly liberally as a token, but --- we allow @,@ inside matched @{}@ braces. --- -parsePackageLocationTokenQ :: ReadP r String -parsePackageLocationTokenQ = parseHaskellString - Parse.<++ parsePackageLocationToken - where - parsePackageLocationToken :: ReadP r String - parsePackageLocationToken = fmap fst (Parse.gather outerTerm) - where - outerTerm = alternateEither1 outerToken (braces innerTerm) - innerTerm = alternateEither innerToken (braces innerTerm) - outerToken = Parse.munch1 outerChar >> return () - innerToken = Parse.munch1 innerChar >> return () - outerChar c = not (isSpace c || c == '{' || c == '}' || c == ',') - innerChar c = not (isSpace c || c == '{' || c == '}') - braces = Parse.between (Parse.char '{') (Parse.char '}') - - alternateEither, alternateEither1, - alternatePQs, alternate1PQs, alternateQsP, alternate1QsP - :: ReadP r () -> ReadP r () -> ReadP r () - - alternateEither1 p q = alternate1PQs p q +++ alternate1QsP q p - alternateEither p q = alternateEither1 p q +++ return () - alternate1PQs p q = p >> alternateQsP q p - alternatePQs p q = alternate1PQs p q +++ return () - alternate1QsP q p = Parse.many1 q >> alternatePQs p q - alternateQsP q p = alternate1QsP q p +++ return () - -renderPackageLocationToken :: String -> String -renderPackageLocationToken s | needsQuoting = show s - | otherwise = s - where - needsQuoting = not (ok 0 s) - || s == "." -- . on its own on a line has special meaning - || take 2 s == "--" -- on its own line is comment syntax - --TODO: [code cleanup] these "." and "--" escaping issues - -- ought to be dealt with systematically in ParseUtils. - ok :: Int -> String -> Bool - ok n [] = n == 0 - ok _ ('"':_) = False - ok n ('{':cs) = ok (n+1) cs - ok n ('}':cs) = ok (n-1) cs - ok n (',':cs) = (n > 0) && ok n cs - ok _ (c:_) - | isSpace c = False - ok n (_ :cs) = ok n cs - - -legacySharedConfigFieldDescrs :: [FieldDescr LegacySharedConfig] -legacySharedConfigFieldDescrs = - - ( liftFields - legacyGlobalFlags - (\flags conf -> conf { legacyGlobalFlags = flags }) - . addFields - [ newLineListField "local-repo" - showTokenQ parseTokenQ - (fromNubList . globalLocalRepos) - (\v conf -> conf { globalLocalRepos = toNubList v }) - ] - . filterFields - [ "remote-repo-cache" - , "logs-dir", "world-file", "ignore-expiry", "http-transport" - ] - . commandOptionsToFields - ) (commandOptions (globalCommand []) ParseArgs) - ++ - ( liftFields - legacyConfigureShFlags - (\flags conf -> conf { legacyConfigureShFlags = flags }) - . addFields - [ simpleField "allow-newer" - (maybe mempty dispAllowNewer) (fmap Just parseAllowNewer) - configAllowNewer (\v conf -> conf { configAllowNewer = v }) - ] - . filterFields ["verbose"] - . commandOptionsToFields - ) (configureOptions ParseArgs) - ++ - ( liftFields - legacyConfigureExFlags - (\flags conf -> conf { legacyConfigureExFlags = flags }) - . addFields - [ commaNewLineListField "constraints" - (disp . fst) (fmap (\constraint -> (constraint, constraintSrc)) parse) - configExConstraints (\v conf -> conf { configExConstraints = v }) - - , commaNewLineListField "preferences" - disp parse - configPreferences (\v conf -> conf { configPreferences = v }) - ] - . filterFields - [ "cabal-lib-version", "solver" - -- not "constraint" or "preference", we use our own plural ones above - ] - . commandOptionsToFields - ) (configureExOptions ParseArgs constraintSrc) - ++ - ( liftFields - legacyInstallFlags - (\flags conf -> conf { legacyInstallFlags = flags }) - . addFields - [ newLineListField "build-summary" - (showTokenQ . fromPathTemplate) (fmap toPathTemplate parseTokenQ) - (fromNubList . installSummaryFile) - (\v conf -> conf { installSummaryFile = toNubList v }) - ] - . filterFields - [ "doc-index-file" - , "root-cmd", "symlink-bindir" - , "build-log" - , "remote-build-reporting", "report-planning-failure" - , "one-shot", "jobs", "offline" - -- solver flags: - , "max-backjumps", "reorder-goals", "strong-flags" - ] - . commandOptionsToFields - ) (installOptions ParseArgs) - where - constraintSrc = ConstraintSourceProjectConfig "TODO" - -parseAllowNewer :: ReadP r AllowNewer -parseAllowNewer = - ((const AllowNewerNone <$> (Parse.string "none" +++ Parse.string "None")) - +++ (const AllowNewerAll <$> (Parse.string "all" +++ Parse.string "All"))) - <++ ( AllowNewerSome <$> parseOptCommaList parse) - -dispAllowNewer :: AllowNewer -> Doc -dispAllowNewer AllowNewerNone = Disp.text "None" -dispAllowNewer (AllowNewerSome pkgs) = Disp.fsep . Disp.punctuate Disp.comma - . map disp $ pkgs -dispAllowNewer AllowNewerAll = Disp.text "All" - - -legacyPackageConfigFieldDescrs :: [FieldDescr LegacyPackageConfig] -legacyPackageConfigFieldDescrs = - ( liftFields - legacyConfigureFlags - (\flags conf -> conf { legacyConfigureFlags = flags }) - . addFields - [ newLineListField "extra-include-dirs" - showTokenQ parseTokenQ - configExtraIncludeDirs - (\v conf -> conf { configExtraIncludeDirs = v }) - , newLineListField "extra-lib-dirs" - showTokenQ parseTokenQ - configExtraLibDirs - (\v conf -> conf { configExtraLibDirs = v }) - , newLineListField "extra-framework-dirs" - showTokenQ parseTokenQ - configExtraFrameworkDirs - (\v conf -> conf { configExtraFrameworkDirs = v }) - , newLineListField "extra-prog-path" - showTokenQ parseTokenQ - (fromNubList . configProgramPathExtra) - (\v conf -> conf { configProgramPathExtra = toNubList v }) - , newLineListField "configure-options" - showTokenQ parseTokenQ - configConfigureArgs - (\v conf -> conf { configConfigureArgs = v }) - , simpleField "flags" - dispFlagAssignment parseFlagAssignment - configConfigurationsFlags - (\v conf -> conf { configConfigurationsFlags = v }) - ] - . filterFields - [ "compiler", "with-compiler", "with-hc-pkg" - , "program-prefix", "program-suffix" - , "library-vanilla", "library-profiling" - , "shared", "executable-dynamic" - , "profiling", "executable-profiling" - , "profiling-detail", "library-profiling-detail" - , "optimization", "debug-info", "library-for-ghci", "split-objs" - , "executable-stripping", "library-stripping" - , "tests", "benchmarks" - , "coverage", "library-coverage" - , "relocatable" - -- not "extra-include-dirs", "extra-lib-dirs", "extra-framework-dirs" - -- or "extra-prog-path". We use corrected ones above that parse - -- as list fields. - ] - . commandOptionsToFields - ) (configureOptions ParseArgs) - ++ - liftFields - legacyConfigureFlags - (\flags conf -> conf { legacyConfigureFlags = flags }) - [ overrideFieldCompiler - , overrideFieldOptimization - , overrideFieldDebugInfo - ] - ++ - ( liftFields - legacyInstallPkgFlags - (\flags conf -> conf { legacyInstallPkgFlags = flags }) - . filterFields - [ "documentation", "run-tests" - ] - . commandOptionsToFields - ) (installOptions ParseArgs) - ++ - ( liftFields - legacyHaddockFlags - (\flags conf -> conf { legacyHaddockFlags = flags }) - . mapFieldNames - ("haddock-"++) - . filterFields - [ "hoogle", "html", "html-location" - , "executables", "tests", "benchmarks", "all", "internal", "css" - , "hyperlink-source", "hscolour-css" - , "contents-location", "keep-temp-files" - ] - . commandOptionsToFields - ) (haddockOptions ParseArgs) - - where - overrideFieldCompiler = - simpleField "compiler" - (fromFlagOrDefault Disp.empty . fmap disp) - (Parse.option mempty (fmap toFlag parse)) - configHcFlavor (\v flags -> flags { configHcFlavor = v }) - - - -- TODO: [code cleanup] The following is a hack. The "optimization" and - -- "debug-info" fields are OptArg, and viewAsFieldDescr fails on that. - -- Instead of a hand-written parser and printer, we should handle this case - -- properly in the library. - - overrideFieldOptimization = - liftField configOptimization - (\v flags -> flags { configOptimization = v }) $ - let name = "optimization" in - FieldDescr name - (\f -> case f of - Flag NoOptimisation -> Disp.text "False" - Flag NormalOptimisation -> Disp.text "True" - Flag MaximumOptimisation -> Disp.text "2" - _ -> Disp.empty) - (\line str _ -> case () of - _ | str == "False" -> ParseOk [] (Flag NoOptimisation) - | str == "True" -> ParseOk [] (Flag NormalOptimisation) - | str == "0" -> ParseOk [] (Flag NoOptimisation) - | str == "1" -> ParseOk [] (Flag NormalOptimisation) - | str == "2" -> ParseOk [] (Flag MaximumOptimisation) - | lstr == "false" -> ParseOk [caseWarning] (Flag NoOptimisation) - | lstr == "true" -> ParseOk [caseWarning] (Flag NormalOptimisation) - | otherwise -> ParseFailed (NoParse name line) - where - lstr = lowercase str - caseWarning = PWarning $ - "The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'.") - - overrideFieldDebugInfo = - liftField configDebugInfo (\v flags -> flags { configDebugInfo = v }) $ - let name = "debug-info" in - FieldDescr name - (\f -> case f of - Flag NoDebugInfo -> Disp.text "False" - Flag MinimalDebugInfo -> Disp.text "1" - Flag NormalDebugInfo -> Disp.text "True" - Flag MaximalDebugInfo -> Disp.text "3" - _ -> Disp.empty) - (\line str _ -> case () of - _ | str == "False" -> ParseOk [] (Flag NoDebugInfo) - | str == "True" -> ParseOk [] (Flag NormalDebugInfo) - | str == "0" -> ParseOk [] (Flag NoDebugInfo) - | str == "1" -> ParseOk [] (Flag MinimalDebugInfo) - | str == "2" -> ParseOk [] (Flag NormalDebugInfo) - | str == "3" -> ParseOk [] (Flag MaximalDebugInfo) - | lstr == "false" -> ParseOk [caseWarning] (Flag NoDebugInfo) - | lstr == "true" -> ParseOk [caseWarning] (Flag NormalDebugInfo) - | otherwise -> ParseFailed (NoParse name line) - where - lstr = lowercase str - caseWarning = PWarning $ - "The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'.") - - -legacyPackageConfigSectionDescrs :: [SectionDescr LegacyProjectConfig] -legacyPackageConfigSectionDescrs = - [ packageRepoSectionDescr - , packageSpecificOptionsSectionDescr - , liftSection - legacyLocalConfig - (\flags conf -> conf { legacyLocalConfig = flags }) - programOptionsSectionDescr - , liftSection - legacyLocalConfig - (\flags conf -> conf { legacyLocalConfig = flags }) - programLocationsSectionDescr - , liftSection - legacySharedConfig - (\flags conf -> conf { legacySharedConfig = flags }) $ - liftSection - legacyGlobalFlags - (\flags conf -> conf { legacyGlobalFlags = flags }) - remoteRepoSectionDescr - ] - -packageRepoSectionDescr :: SectionDescr LegacyProjectConfig -packageRepoSectionDescr = - SectionDescr { - sectionName = "source-repository-package", - sectionFields = sourceRepoFieldDescrs, - sectionSubsections = [], - sectionGet = map (\x->("", x)) - . legacyPackagesRepo, - sectionSet = - \lineno unused pkgrepo projconf -> do - unless (null unused) $ - syntaxError lineno "the section 'source-repository-package' takes no arguments" - return projconf { - legacyPackagesRepo = legacyPackagesRepo projconf ++ [pkgrepo] - }, - sectionEmpty = SourceRepo { - repoKind = RepoThis, -- hopefully unused - repoType = Nothing, - repoLocation = Nothing, - repoModule = Nothing, - repoBranch = Nothing, - repoTag = Nothing, - repoSubdir = Nothing - } - } - -packageSpecificOptionsSectionDescr :: SectionDescr LegacyProjectConfig -packageSpecificOptionsSectionDescr = - SectionDescr { - sectionName = "package", - sectionFields = legacyPackageConfigFieldDescrs - ++ programOptionsFieldDescrs - (configProgramArgs . legacyConfigureFlags) - (\args pkgconf -> pkgconf { - legacyConfigureFlags = (legacyConfigureFlags pkgconf) { - configProgramArgs = args - } - } - ) - ++ liftFields - legacyConfigureFlags - (\flags pkgconf -> pkgconf { - legacyConfigureFlags = flags - } - ) - programLocationsFieldDescrs, - sectionSubsections = [], - sectionGet = \projconf -> - [ (display pkgname, pkgconf) - | (pkgname, pkgconf) <- - Map.toList . getMapMappend - . legacySpecificConfig $ projconf ], - sectionSet = - \lineno pkgnamestr pkgconf projconf -> do - pkgname <- case simpleParse pkgnamestr of - Just pkgname -> return pkgname - Nothing -> syntaxError lineno $ - "a 'package' section requires a package name " - ++ "as an argument" - return projconf { - legacySpecificConfig = - MapMappend $ - Map.insertWith mappend pkgname pkgconf - (getMapMappend $ legacySpecificConfig projconf) - }, - sectionEmpty = mempty - } - -programOptionsFieldDescrs :: (a -> [(String, [String])]) - -> ([(String, [String])] -> a -> a) - -> [FieldDescr a] -programOptionsFieldDescrs get set = - commandOptionsToFields - $ programConfigurationOptions - defaultProgramDb - ParseArgs get set - -programOptionsSectionDescr :: SectionDescr LegacyPackageConfig -programOptionsSectionDescr = - SectionDescr { - sectionName = "program-options", - sectionFields = programOptionsFieldDescrs - configProgramArgs - (\args conf -> conf { configProgramArgs = args }), - sectionSubsections = [], - sectionGet = (\x->[("", x)]) - . legacyConfigureFlags, - sectionSet = - \lineno unused confflags pkgconf -> do - unless (null unused) $ - syntaxError lineno "the section 'program-options' takes no arguments" - return pkgconf { - legacyConfigureFlags = legacyConfigureFlags pkgconf <> confflags - }, - sectionEmpty = mempty - } - -programLocationsFieldDescrs :: [FieldDescr ConfigFlags] -programLocationsFieldDescrs = - commandOptionsToFields - $ programConfigurationPaths' - (++ "-location") - defaultProgramDb - ParseArgs - configProgramPaths - (\paths conf -> conf { configProgramPaths = paths }) - -programLocationsSectionDescr :: SectionDescr LegacyPackageConfig -programLocationsSectionDescr = - SectionDescr { - sectionName = "program-locations", - sectionFields = programLocationsFieldDescrs, - sectionSubsections = [], - sectionGet = (\x->[("", x)]) - . legacyConfigureFlags, - sectionSet = - \lineno unused confflags pkgconf -> do - unless (null unused) $ - syntaxError lineno "the section 'program-locations' takes no arguments" - return pkgconf { - legacyConfigureFlags = legacyConfigureFlags pkgconf <> confflags - }, - sectionEmpty = mempty - } - - --- | For each known program @PROG@ in 'progConf', produce a @PROG-options@ --- 'OptionField'. -programConfigurationOptions - :: ProgramDb - -> ShowOrParseArgs - -> (flags -> [(String, [String])]) - -> ([(String, [String])] -> (flags -> flags)) - -> [OptionField flags] -programConfigurationOptions progConf showOrParseArgs get set = - case showOrParseArgs of - -- we don't want a verbose help text list so we just show a generic one: - ShowArgs -> [programOptions "PROG"] - ParseArgs -> map (programOptions . programName . fst) - (knownPrograms progConf) - where - programOptions prog = - option "" [prog ++ "-options"] - ("give extra options to " ++ prog) - get set - (reqArg' "OPTS" (\args -> [(prog, splitArgs args)]) - (\progArgs -> [ joinsArgs args - | (prog', args) <- progArgs, prog==prog' ])) - - - joinsArgs = unwords . map escape - escape arg | any isSpace arg = "\"" ++ arg ++ "\"" - | otherwise = arg - - -remoteRepoSectionDescr :: SectionDescr GlobalFlags -remoteRepoSectionDescr = - SectionDescr { - sectionName = "repository", - sectionFields = remoteRepoFields, - sectionSubsections = [], - sectionGet = map (\x->(remoteRepoName x, x)) . fromNubList - . globalRemoteRepos, - sectionSet = - \lineno reponame repo0 conf -> do - when (null reponame) $ - syntaxError lineno $ "a 'repository' section requires the " - ++ "repository name as an argument" - let repo = repo0 { remoteRepoName = reponame } - when (remoteRepoKeyThreshold repo - > length (remoteRepoRootKeys repo)) $ - warning $ "'key-threshold' for repository " - ++ show (remoteRepoName repo) - ++ " higher than number of keys" - when (not (null (remoteRepoRootKeys repo)) - && remoteRepoSecure repo /= Just True) $ - warning $ "'root-keys' for repository " - ++ show (remoteRepoName repo) - ++ " non-empty, but 'secure' not set to True." - return conf { - globalRemoteRepos = overNubList (++[repo]) (globalRemoteRepos conf) - }, - sectionEmpty = emptyRemoteRepo "" - } - - -------------------------------- --- Local field utils --- - ---TODO: [code cleanup] all these utils should move to Distribution.ParseUtils --- either augmenting or replacing the ones there - ---TODO: [code cleanup] this is a different definition from listField, like --- commaNewLineListField it pretty prints on multiple lines -newLineListField :: String -> (a -> Doc) -> ReadP [a] a - -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b -newLineListField = listFieldWithSep Disp.sep - ---TODO: [code cleanup] local copy purely so we can use the fixed version --- of parseOptCommaList below -listFieldWithSep :: ([Doc] -> Doc) -> String -> (a -> Doc) -> ReadP [a] a - -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b -listFieldWithSep separator name showF readF get set = - liftField get set' $ - ParseUtils.field name showF' (parseOptCommaList readF) - where - set' xs b = set (get b ++ xs) b - showF' = separator . map showF - ---TODO: [code cleanup] local redefinition that should replace the version in --- D.ParseUtils. This version avoid parse ambiguity for list element parsers --- that have multiple valid parses of prefixes. -parseOptCommaList :: ReadP r a -> ReadP r [a] -parseOptCommaList p = Parse.sepBy p sep - where - -- The separator must not be empty or it introduces ambiguity - sep = (Parse.skipSpaces >> Parse.char ',' >> Parse.skipSpaces) - +++ (Parse.satisfy isSpace >> Parse.skipSpaces) - ---TODO: [code cleanup] local redefinition that should replace the version in --- D.ParseUtils called showFilePath. This version escapes "." and "--" which --- otherwise are special syntax. -showTokenQ :: String -> Doc -showTokenQ "" = Disp.empty -showTokenQ x@('-':'-':_) = Disp.text (show x) -showTokenQ x@('.':[]) = Disp.text (show x) -showTokenQ x = showToken x - --- This is just a copy of parseTokenQ, using the fixed parseHaskellString -parseTokenQ :: ReadP r String -parseTokenQ = parseHaskellString - <++ Parse.munch1 (\x -> not (isSpace x) && x /= ',') - ---TODO: [code cleanup] use this to replace the parseHaskellString in --- Distribution.ParseUtils. It turns out Read instance for String accepts --- the ['a', 'b'] syntax, which we do not want. In particular it messes --- up any token starting with []. -parseHaskellString :: ReadP r String -parseHaskellString = - Parse.readS_to_P $ - Read.readPrec_to_S (do Read.String s <- Read.lexP; return s) 0 - --- Handy util -addFields :: [FieldDescr a] - -> ([FieldDescr a] -> [FieldDescr a]) -addFields = (++) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/ProjectConfig/Types.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/ProjectConfig/Types.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/ProjectConfig/Types.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/ProjectConfig/Types.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,366 +0,0 @@ -{-# LANGUAGE DeriveGeneric, DeriveDataTypeable, GeneralizedNewtypeDeriving #-} - --- | Handling project configuration, types. --- -module Distribution.Client.ProjectConfig.Types ( - - -- * Types for project config - ProjectConfig(..), - ProjectConfigBuildOnly(..), - ProjectConfigShared(..), - PackageConfig(..), - - -- * Resolving configuration - SolverSettings(..), - BuildTimeSettings(..), - - -- * Extra useful Monoids - MapLast(..), - MapMappend(..), - ) where - -import Distribution.Client.Types - ( RemoteRepo ) -import Distribution.Client.Dependency.Types - ( PreSolver, ConstraintSource ) -import Distribution.Client.Targets - ( UserConstraint ) -import Distribution.Client.BuildReports.Types - ( ReportLevel(..) ) - -import Distribution.Package - ( PackageName, PackageId, UnitId, Dependency ) -import Distribution.Version - ( Version ) -import Distribution.System - ( Platform ) -import Distribution.PackageDescription - ( FlagAssignment, SourceRepo(..) ) -import Distribution.Simple.Compiler - ( Compiler, CompilerFlavor - , OptimisationLevel(..), ProfDetailLevel, DebugInfoLevel(..) ) -import Distribution.Simple.Setup - ( Flag, AllowNewer(..) ) -import Distribution.Simple.InstallDirs - ( PathTemplate ) -import Distribution.Utils.NubList - ( NubList ) -import Distribution.Verbosity - ( Verbosity ) - -import Data.Map (Map) -import qualified Data.Map as Map -import Distribution.Compat.Binary (Binary) -import Distribution.Compat.Semigroup -import GHC.Generics (Generic) - - -------------------------------- --- Project config types --- - --- | This type corresponds directly to what can be written in the --- @cabal.project@ file. Other sources of configuration can also be injected --- into this type, such as the user-wide @~/.cabal/config@ file and the --- command line of @cabal configure@ or @cabal build@. --- --- Since it corresponds to the external project file it is an instance of --- 'Monoid' and all the fields can be empty. This also means there has to --- be a step where we resolve configuration. At a minimum resolving means --- applying defaults but it can also mean merging information from multiple --- sources. For example for package-specific configuration the project file --- can specify configuration that applies to all local packages, and then --- additional configuration for a specific package. --- --- Future directions: multiple profiles, conditionals. If we add these --- features then the gap between configuration as written in the config file --- and resolved settings we actually use will become even bigger. --- -data ProjectConfig - = ProjectConfig { - - -- | Packages in this project, including local dirs, local .cabal files - -- local and remote tarballs. Where these are file globs, they must - -- match something. - projectPackages :: [String], - - -- | Like 'projectConfigPackageGlobs' but /optional/ in the sense that - -- file globs are allowed to match nothing. The primary use case for - -- this is to be able to say @optional-packages: */@ to automagically - -- pick up deps that we unpack locally. - projectPackagesOptional :: [String], - - -- | Packages in this project from remote source repositories. - projectPackagesRepo :: [SourceRepo], - - -- | Packages in this project from hackage repositories. - projectPackagesNamed :: [Dependency], - - projectConfigBuildOnly :: ProjectConfigBuildOnly, - projectConfigShared :: ProjectConfigShared, - projectConfigLocalPackages :: PackageConfig, - projectConfigSpecificPackage :: MapMappend PackageName PackageConfig - } - deriving (Eq, Show, Generic) - --- | That part of the project configuration that only affects /how/ we build --- and not the /value/ of the things we build. This means this information --- does not need to be tracked for changes since it does not affect the --- outcome. --- -data ProjectConfigBuildOnly - = ProjectConfigBuildOnly { - projectConfigVerbosity :: Flag Verbosity, - projectConfigDryRun :: Flag Bool, - projectConfigOnlyDeps :: Flag Bool, - projectConfigSummaryFile :: NubList PathTemplate, - projectConfigLogFile :: Flag PathTemplate, - projectConfigBuildReports :: Flag ReportLevel, - projectConfigReportPlanningFailure :: Flag Bool, - projectConfigSymlinkBinDir :: Flag FilePath, - projectConfigOneShot :: Flag Bool, - projectConfigNumJobs :: Flag (Maybe Int), - projectConfigOfflineMode :: Flag Bool, - projectConfigKeepTempFiles :: Flag Bool, - projectConfigHttpTransport :: Flag String, - projectConfigIgnoreExpiry :: Flag Bool, - projectConfigCacheDir :: Flag FilePath, - projectConfigLogsDir :: Flag FilePath, - projectConfigWorldFile :: Flag FilePath, - projectConfigRootCmd :: Flag String - } - deriving (Eq, Show, Generic) - - --- | Project configuration that is shared between all packages in the project. --- In particular this includes configuration that affects the solver. --- -data ProjectConfigShared - = ProjectConfigShared { - projectConfigHcFlavor :: Flag CompilerFlavor, - projectConfigHcPath :: Flag FilePath, - projectConfigHcPkg :: Flag FilePath, - projectConfigHaddockIndex :: Flag PathTemplate, - - -- Things that only make sense for manual mode, not --local mode - -- too much control! - --projectConfigUserInstall :: Flag Bool, - --projectConfigInstallDirs :: InstallDirs (Flag PathTemplate), - --TODO: [required eventually] decide what to do with InstallDirs - -- currently we don't allow it to be specified in the config file - --projectConfigPackageDBs :: [Maybe PackageDB], - - -- configuration used both by the solver and other phases - projectConfigRemoteRepos :: NubList RemoteRepo, -- ^ Available Hackage servers. - projectConfigLocalRepos :: NubList FilePath, - - -- solver configuration - projectConfigConstraints :: [(UserConstraint, ConstraintSource)], - projectConfigPreferences :: [Dependency], - projectConfigCabalVersion :: Flag Version, --TODO: [required eventually] unused - projectConfigSolver :: Flag PreSolver, - projectConfigAllowNewer :: Maybe AllowNewer, - projectConfigMaxBackjumps :: Flag Int, - projectConfigReorderGoals :: Flag Bool, - projectConfigStrongFlags :: Flag Bool - - -- More things that only make sense for manual mode, not --local mode - -- too much control! - --projectConfigIndependentGoals :: Flag Bool, - --projectConfigShadowPkgs :: Flag Bool, - --projectConfigReinstall :: Flag Bool, - --projectConfigAvoidReinstalls :: Flag Bool, - --projectConfigOverrideReinstall :: Flag Bool, - --projectConfigUpgradeDeps :: Flag Bool - } - deriving (Eq, Show, Generic) - - --- | Project configuration that is specific to each package, that is where we --- can in principle have different values for different packages in the same --- project. --- -data PackageConfig - = PackageConfig { - packageConfigProgramPaths :: MapLast String FilePath, - packageConfigProgramArgs :: MapMappend String [String], - packageConfigProgramPathExtra :: NubList FilePath, - packageConfigFlagAssignment :: FlagAssignment, - packageConfigVanillaLib :: Flag Bool, - packageConfigSharedLib :: Flag Bool, - packageConfigDynExe :: Flag Bool, - packageConfigProf :: Flag Bool, --TODO: [code cleanup] sort out - packageConfigProfLib :: Flag Bool, -- this duplication - packageConfigProfExe :: Flag Bool, -- and consistency - packageConfigProfDetail :: Flag ProfDetailLevel, - packageConfigProfLibDetail :: Flag ProfDetailLevel, - packageConfigConfigureArgs :: [String], - packageConfigOptimization :: Flag OptimisationLevel, - packageConfigProgPrefix :: Flag PathTemplate, - packageConfigProgSuffix :: Flag PathTemplate, - packageConfigExtraLibDirs :: [FilePath], - packageConfigExtraFrameworkDirs :: [FilePath], - packageConfigExtraIncludeDirs :: [FilePath], - packageConfigGHCiLib :: Flag Bool, - packageConfigSplitObjs :: Flag Bool, - packageConfigStripExes :: Flag Bool, - packageConfigStripLibs :: Flag Bool, - packageConfigTests :: Flag Bool, - packageConfigBenchmarks :: Flag Bool, - packageConfigCoverage :: Flag Bool, - packageConfigRelocatable :: Flag Bool, - packageConfigDebugInfo :: Flag DebugInfoLevel, - packageConfigRunTests :: Flag Bool, --TODO: [required eventually] use this - packageConfigDocumentation :: Flag Bool, --TODO: [required eventually] use this - packageConfigHaddockHoogle :: Flag Bool, --TODO: [required eventually] use this - packageConfigHaddockHtml :: Flag Bool, --TODO: [required eventually] use this - packageConfigHaddockHtmlLocation :: Flag String, --TODO: [required eventually] use this - packageConfigHaddockExecutables :: Flag Bool, --TODO: [required eventually] use this - packageConfigHaddockTestSuites :: Flag Bool, --TODO: [required eventually] use this - packageConfigHaddockBenchmarks :: Flag Bool, --TODO: [required eventually] use this - packageConfigHaddockInternal :: Flag Bool, --TODO: [required eventually] use this - packageConfigHaddockCss :: Flag FilePath, --TODO: [required eventually] use this - packageConfigHaddockHscolour :: Flag Bool, --TODO: [required eventually] use this - packageConfigHaddockHscolourCss :: Flag FilePath, --TODO: [required eventually] use this - packageConfigHaddockContents :: Flag PathTemplate --TODO: [required eventually] use this - } - deriving (Eq, Show, Generic) - -instance Binary ProjectConfig -instance Binary ProjectConfigBuildOnly -instance Binary ProjectConfigShared -instance Binary PackageConfig - - --- | Newtype wrapper for 'Map' that provides a 'Monoid' instance that takes --- the last value rather than the first value for overlapping keys. -newtype MapLast k v = MapLast { getMapLast :: Map k v } - deriving (Eq, Show, Functor, Generic, Binary) - -instance Ord k => Monoid (MapLast k v) where - mempty = MapLast Map.empty - mappend = (<>) - -instance Ord k => Semigroup (MapLast k v) where - MapLast a <> MapLast b = MapLast (flip Map.union a b) - -- rather than Map.union which is the normal Map monoid instance - - --- | Newtype wrapper for 'Map' that provides a 'Monoid' instance that --- 'mappend's values of overlapping keys rather than taking the first. -newtype MapMappend k v = MapMappend { getMapMappend :: Map k v } - deriving (Eq, Show, Functor, Generic, Binary) - -instance (Semigroup v, Ord k) => Monoid (MapMappend k v) where - mempty = MapMappend Map.empty - mappend = (<>) - -instance (Semigroup v, Ord k) => Semigroup (MapMappend k v) where - MapMappend a <> MapMappend b = MapMappend (Map.unionWith (<>) a b) - -- rather than Map.union which is the normal Map monoid instance - - -instance Monoid ProjectConfig where - mempty = gmempty - mappend = (<>) - -instance Semigroup ProjectConfig where - (<>) = gmappend - - -instance Monoid ProjectConfigBuildOnly where - mempty = gmempty - mappend = (<>) - -instance Semigroup ProjectConfigBuildOnly where - (<>) = gmappend - - -instance Monoid ProjectConfigShared where - mempty = gmempty - mappend = (<>) - -instance Semigroup ProjectConfigShared where - (<>) = gmappend - - -instance Monoid PackageConfig where - mempty = gmempty - mappend = (<>) - -instance Semigroup PackageConfig where - (<>) = gmappend - ----------------------------------------- --- Resolving configuration to settings --- - --- | Resolved configuration for the solver. The idea is that this is easier to --- use than the raw configuration because in the raw configuration everything --- is optional (monoidial). In the 'BuildTimeSettings' every field is filled --- in, if only with the defaults. --- --- Use 'resolveSolverSettings' to make one from the project config (by --- applying defaults etc). --- -data SolverSettings - = SolverSettings { - solverSettingRemoteRepos :: [RemoteRepo], -- ^ Available Hackage servers. - solverSettingLocalRepos :: [FilePath], - solverSettingConstraints :: [(UserConstraint, ConstraintSource)], - solverSettingPreferences :: [Dependency], - solverSettingFlagAssignment :: FlagAssignment, -- ^ For all local packages - solverSettingFlagAssignments :: Map PackageName FlagAssignment, - solverSettingCabalVersion :: Maybe Version, --TODO: [required eventually] unused - solverSettingSolver :: PreSolver, - solverSettingAllowNewer :: AllowNewer, - solverSettingMaxBackjumps :: Maybe Int, - solverSettingReorderGoals :: Bool, - solverSettingStrongFlags :: Bool - -- Things that only make sense for manual mode, not --local mode - -- too much control! - --solverSettingIndependentGoals :: Bool, - --solverSettingShadowPkgs :: Bool, - --solverSettingReinstall :: Bool, - --solverSettingAvoidReinstalls :: Bool, - --solverSettingOverrideReinstall :: Bool, - --solverSettingUpgradeDeps :: Bool - } - deriving (Eq, Show, Generic) - -instance Binary SolverSettings - - --- | Resolved configuration for things that affect how we build and not the --- value of the things we build. The idea is that this is easier to use than --- the raw configuration because in the raw configuration everything is --- optional (monoidial). In the 'BuildTimeSettings' every field is filled in, --- if only with the defaults. --- --- Use 'resolveBuildTimeSettings' to make one from the project config (by --- applying defaults etc). --- -data BuildTimeSettings - = BuildTimeSettings { - buildSettingDryRun :: Bool, - buildSettingOnlyDeps :: Bool, - buildSettingSummaryFile :: [PathTemplate], - buildSettingLogFile :: Maybe (Compiler -> Platform - -> PackageId -> UnitId - -> FilePath), - buildSettingLogVerbosity :: Verbosity, - buildSettingBuildReports :: ReportLevel, - buildSettingReportPlanningFailure :: Bool, - buildSettingSymlinkBinDir :: [FilePath], - buildSettingOneShot :: Bool, - buildSettingNumJobs :: Int, - buildSettingOfflineMode :: Bool, - buildSettingKeepTempFiles :: Bool, - buildSettingRemoteRepos :: [RemoteRepo], - buildSettingLocalRepos :: [FilePath], - buildSettingCacheDir :: FilePath, - buildSettingHttpTransport :: Maybe String, - buildSettingIgnoreExpiry :: Bool, - buildSettingRootCmd :: Maybe String - } - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/ProjectConfig.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/ProjectConfig.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/ProjectConfig.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/ProjectConfig.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,747 +0,0 @@ -{-# LANGUAGE CPP, RecordWildCards, NamedFieldPuns, DeriveDataTypeable #-} - --- | Handling project configuration. --- -module Distribution.Client.ProjectConfig ( - - -- * Types for project config - ProjectConfig(..), - ProjectConfigBuildOnly(..), - ProjectConfigShared(..), - PackageConfig(..), - MapLast(..), - MapMappend(..), - - -- * Project config files - findProjectRoot, - readProjectConfig, - writeProjectLocalExtraConfig, - writeProjectConfigFile, - commandLineFlagsToProjectConfig, - - -- * Packages within projects - ProjectPackageLocation(..), - BadPackageLocations(..), - BadPackageLocation(..), - BadPackageLocationMatch(..), - findProjectPackages, - readSourcePackage, - - -- * Resolving configuration - lookupLocalPackageConfig, - projectConfigWithBuilderRepoContext, - projectConfigWithSolverRepoContext, - SolverSettings(..), - resolveSolverSettings, - BuildTimeSettings(..), - resolveBuildTimeSettings, - - -- * Checking configuration - checkBadPerPackageCompilerPaths, - BadPerPackageCompilerPaths(..) - ) where - -import Distribution.Client.ProjectConfig.Types -import Distribution.Client.ProjectConfig.Legacy -import Distribution.Client.RebuildMonad -import Distribution.Client.Glob - ( isTrivialFilePathGlob ) - -import Distribution.Client.Types -import Distribution.Client.DistDirLayout - ( CabalDirLayout(..) ) -import Distribution.Client.GlobalFlags - ( RepoContext(..), withRepoContext' ) -import Distribution.Client.BuildReports.Types - ( ReportLevel(..) ) -import Distribution.Client.Config - ( loadConfig, defaultConfigFile ) - -import Distribution.Package - ( PackageName, PackageId, packageId, UnitId, Dependency ) -import Distribution.System - ( Platform ) -import Distribution.PackageDescription - ( SourceRepo(..) ) -import Distribution.PackageDescription.Parse - ( readPackageDescription ) -import Distribution.Simple.Compiler - ( Compiler, compilerInfo ) -import Distribution.Simple.Program - ( ConfiguredProgram(..) ) -import Distribution.Simple.Setup - ( Flag(Flag), toFlag, flagToMaybe, flagToList - , fromFlag, AllowNewer(..) ) -import Distribution.Client.Setup - ( defaultSolver, defaultMaxBackjumps, ) -import Distribution.Simple.InstallDirs - ( PathTemplate, fromPathTemplate - , toPathTemplate, substPathTemplate, initialPathTemplateEnv ) -import Distribution.Simple.Utils - ( die, warn ) -import Distribution.Client.Utils - ( determineNumJobs ) -import Distribution.Utils.NubList - ( fromNubList ) -import Distribution.Verbosity - ( Verbosity, verbose ) -import Distribution.Text -import Distribution.ParseUtils - ( ParseResult(..), locatedErrorMsg, showPWarning ) - -#if !MIN_VERSION_base(4,8,0) -import Control.Applicative -#endif -import Control.Monad -import Control.Monad.Trans (liftIO) -import Control.Exception -import Data.Typeable -import Data.Maybe -import Data.Either -import qualified Data.Map as Map -import Data.Map (Map) -import qualified Data.Set as Set -import Distribution.Compat.Semigroup -import System.FilePath hiding (combine) -import System.Directory -import Network.URI (URI(..), URIAuth(..), parseAbsoluteURI) - - ----------------------------------------- --- Resolving configuration to settings --- - --- | Look up a 'PackageConfig' field in the 'ProjectConfig' for a specific --- 'PackageName'. This returns the configuration that applies to all local --- packages plus any package-specific configuration for this package. --- -lookupLocalPackageConfig :: (Semigroup a, Monoid a) - => (PackageConfig -> a) - -> ProjectConfig - -> PackageName -> a -lookupLocalPackageConfig field ProjectConfig { - projectConfigLocalPackages, - projectConfigSpecificPackage - } pkgname = - field projectConfigLocalPackages - <> maybe mempty field - (Map.lookup pkgname (getMapMappend projectConfigSpecificPackage)) - - --- | Use a 'RepoContext' based on the 'BuildTimeSettings'. --- -projectConfigWithBuilderRepoContext :: Verbosity - -> BuildTimeSettings - -> (RepoContext -> IO a) -> IO a -projectConfigWithBuilderRepoContext verbosity BuildTimeSettings{..} = - withRepoContext' - verbosity - buildSettingRemoteRepos - buildSettingLocalRepos - buildSettingCacheDir - buildSettingHttpTransport - (Just buildSettingIgnoreExpiry) - - --- | Use a 'RepoContext', but only for the solver. The solver does not use the --- full facilities of the 'RepoContext' so we can get away with making one --- that doesn't have an http transport. And that avoids having to have access --- to the 'BuildTimeSettings' --- -projectConfigWithSolverRepoContext :: Verbosity - -> FilePath - -> ProjectConfigShared - -> ProjectConfigBuildOnly - -> (RepoContext -> IO a) -> IO a -projectConfigWithSolverRepoContext verbosity downloadCacheRootDir - ProjectConfigShared{..} - ProjectConfigBuildOnly{..} = - withRepoContext' - verbosity - (fromNubList projectConfigRemoteRepos) - (fromNubList projectConfigLocalRepos) - downloadCacheRootDir - (flagToMaybe projectConfigHttpTransport) - (flagToMaybe projectConfigIgnoreExpiry) - - --- | Resolve the project configuration, with all its optional fields, into --- 'SolverSettings' with no optional fields (by applying defaults). --- -resolveSolverSettings :: ProjectConfig -> SolverSettings -resolveSolverSettings ProjectConfig{ - projectConfigShared, - projectConfigLocalPackages, - projectConfigSpecificPackage - } = - SolverSettings {..} - where - --TODO: [required eventually] some of these settings need validation, e.g. - -- the flag assignments need checking. - solverSettingRemoteRepos = fromNubList projectConfigRemoteRepos - solverSettingLocalRepos = fromNubList projectConfigLocalRepos - solverSettingConstraints = projectConfigConstraints - solverSettingPreferences = projectConfigPreferences - solverSettingFlagAssignment = packageConfigFlagAssignment projectConfigLocalPackages - solverSettingFlagAssignments = fmap packageConfigFlagAssignment - (getMapMappend projectConfigSpecificPackage) - solverSettingCabalVersion = flagToMaybe projectConfigCabalVersion - solverSettingSolver = fromFlag projectConfigSolver - solverSettingAllowNewer = fromJust projectConfigAllowNewer - solverSettingMaxBackjumps = case fromFlag projectConfigMaxBackjumps of - n | n < 0 -> Nothing - | otherwise -> Just n - solverSettingReorderGoals = fromFlag projectConfigReorderGoals - solverSettingStrongFlags = fromFlag projectConfigStrongFlags - --solverSettingIndependentGoals = fromFlag projectConfigIndependentGoals - --solverSettingShadowPkgs = fromFlag projectConfigShadowPkgs - --solverSettingReinstall = fromFlag projectConfigReinstall - --solverSettingAvoidReinstalls = fromFlag projectConfigAvoidReinstalls - --solverSettingOverrideReinstall = fromFlag projectConfigOverrideReinstall - --solverSettingUpgradeDeps = fromFlag projectConfigUpgradeDeps - - ProjectConfigShared {..} = defaults <> projectConfigShared - - defaults = mempty { - projectConfigSolver = Flag defaultSolver, - projectConfigAllowNewer = Just AllowNewerNone, - projectConfigMaxBackjumps = Flag defaultMaxBackjumps, - projectConfigReorderGoals = Flag False, - projectConfigStrongFlags = Flag False - --projectConfigIndependentGoals = Flag False, - --projectConfigShadowPkgs = Flag False, - --projectConfigReinstall = Flag False, - --projectConfigAvoidReinstalls = Flag False, - --projectConfigOverrideReinstall = Flag False, - --projectConfigUpgradeDeps = Flag False - } - - --- | Resolve the project configuration, with all its optional fields, into --- 'BuildTimeSettings' with no optional fields (by applying defaults). --- -resolveBuildTimeSettings :: Verbosity - -> CabalDirLayout - -> ProjectConfigShared - -> ProjectConfigBuildOnly - -> ProjectConfigBuildOnly - -> BuildTimeSettings -resolveBuildTimeSettings verbosity - CabalDirLayout { - cabalLogsDirectory, - cabalPackageCacheDirectory - } - ProjectConfigShared { - projectConfigRemoteRepos, - projectConfigLocalRepos - } - fromProjectFile - fromCommandLine = - BuildTimeSettings {..} - where - buildSettingDryRun = fromFlag projectConfigDryRun - buildSettingOnlyDeps = fromFlag projectConfigOnlyDeps - buildSettingSummaryFile = fromNubList projectConfigSummaryFile - --buildSettingLogFile -- defined below, more complicated - --buildSettingLogVerbosity -- defined below, more complicated - buildSettingBuildReports = fromFlag projectConfigBuildReports - buildSettingSymlinkBinDir = flagToList projectConfigSymlinkBinDir - buildSettingOneShot = fromFlag projectConfigOneShot - buildSettingNumJobs = determineNumJobs projectConfigNumJobs - buildSettingOfflineMode = fromFlag projectConfigOfflineMode - buildSettingKeepTempFiles = fromFlag projectConfigKeepTempFiles - buildSettingRemoteRepos = fromNubList projectConfigRemoteRepos - buildSettingLocalRepos = fromNubList projectConfigLocalRepos - buildSettingCacheDir = cabalPackageCacheDirectory - buildSettingHttpTransport = flagToMaybe projectConfigHttpTransport - buildSettingIgnoreExpiry = fromFlag projectConfigIgnoreExpiry - buildSettingReportPlanningFailure - = fromFlag projectConfigReportPlanningFailure - buildSettingRootCmd = flagToMaybe projectConfigRootCmd - - ProjectConfigBuildOnly{..} = defaults - <> fromProjectFile - <> fromCommandLine - - defaults = mempty { - projectConfigDryRun = toFlag False, - projectConfigOnlyDeps = toFlag False, - projectConfigBuildReports = toFlag NoReports, - projectConfigReportPlanningFailure = toFlag False, - projectConfigOneShot = toFlag False, - projectConfigOfflineMode = toFlag False, - projectConfigKeepTempFiles = toFlag False, - projectConfigIgnoreExpiry = toFlag False - } - - -- The logging logic: what log file to use and what verbosity. - -- - -- If the user has specified --remote-build-reporting=detailed, use the - -- default log file location. If the --build-log option is set, use the - -- provided location. Otherwise don't use logging, unless building in - -- parallel (in which case the default location is used). - -- - buildSettingLogFile :: Maybe (Compiler -> Platform - -> PackageId -> UnitId -> FilePath) - buildSettingLogFile - | useDefaultTemplate = Just (substLogFileName defaultTemplate) - | otherwise = fmap substLogFileName givenTemplate - - defaultTemplate = toPathTemplate $ - cabalLogsDirectory "$pkgid" <.> "log" - givenTemplate = flagToMaybe projectConfigLogFile - - useDefaultTemplate - | buildSettingBuildReports == DetailedReports = True - | isJust givenTemplate = False - | isParallelBuild = True - | otherwise = False - - isParallelBuild = buildSettingNumJobs >= 2 - - substLogFileName :: PathTemplate - -> Compiler -> Platform - -> PackageId -> UnitId -> FilePath - substLogFileName template compiler platform pkgid uid = - fromPathTemplate (substPathTemplate env template) - where - env = initialPathTemplateEnv - pkgid uid (compilerInfo compiler) platform - - -- If the user has specified --remote-build-reporting=detailed or - -- --build-log, use more verbose logging. - -- - buildSettingLogVerbosity - | overrideVerbosity = max verbose verbosity - | otherwise = verbosity - - overrideVerbosity - | buildSettingBuildReports == DetailedReports = True - | isJust givenTemplate = True - | isParallelBuild = False - | otherwise = False - - ---------------------------------------------- --- Reading and writing project config files --- - --- | Find the root of this project. --- --- Searches for an explicit @cabal.project@ file, in the current directory or --- parent directories. If no project file is found then the current dir is the --- project root (and the project will use an implicit config). --- -findProjectRoot :: IO FilePath -findProjectRoot = do - - curdir <- getCurrentDirectory - homedir <- getHomeDirectory - - -- Search upwards. If we get to the users home dir or the filesystem root, - -- then use the current dir - let probe dir | isDrive dir || dir == homedir - = return curdir -- implicit project root - probe dir = do - exists <- doesFileExist (dir "cabal.project") - if exists - then return dir -- explicit project root - else probe (takeDirectory dir) - - probe curdir - --TODO: [nice to have] add compat support for old style sandboxes - - --- | Read all the config relevant for a project. This includes the project --- file if any, plus other global config. --- -readProjectConfig :: Verbosity -> FilePath -> Rebuild ProjectConfig -readProjectConfig verbosity projectRootDir = do - global <- readGlobalConfig verbosity - local <- readProjectLocalConfig verbosity projectRootDir - extra <- readProjectLocalExtraConfig verbosity projectRootDir - return (global <> local <> extra) - - --- | Reads an explicit @cabal.project@ file in the given project root dir, --- or returns the default project config for an implicitly defined project. --- -readProjectLocalConfig :: Verbosity -> FilePath -> Rebuild ProjectConfig -readProjectLocalConfig verbosity projectRootDir = do - usesExplicitProjectRoot <- liftIO $ doesFileExist projectFile - if usesExplicitProjectRoot - then do - monitorFiles [monitorFileHashed projectFile] - liftIO readProjectFile - else do - monitorFiles [monitorNonExistentFile projectFile] - return defaultImplicitProjectConfig - - where - projectFile = projectRootDir "cabal.project" - readProjectFile = - reportParseResult verbosity "project file" projectFile - . parseProjectConfig - =<< readFile projectFile - - defaultImplicitProjectConfig :: ProjectConfig - defaultImplicitProjectConfig = - mempty { - -- We expect a package in the current directory. - projectPackages = [ "./*.cabal" ], - - -- This is to automatically pick up deps that we unpack locally. - projectPackagesOptional = [ "./*/*.cabal" ] - } - - --- | Reads a @cabal.project.extra@ file in the given project root dir, --- or returns empty. This file gets written by @cabal configure@, or in --- principle can be edited manually or by other tools. --- -readProjectLocalExtraConfig :: Verbosity -> FilePath -> Rebuild ProjectConfig -readProjectLocalExtraConfig verbosity projectRootDir = do - hasExtraConfig <- liftIO $ doesFileExist projectExtraConfigFile - if hasExtraConfig - then do monitorFiles [monitorFileHashed projectExtraConfigFile] - liftIO readProjectExtraConfigFile - else do monitorFiles [monitorNonExistentFile projectExtraConfigFile] - return mempty - where - projectExtraConfigFile = projectRootDir "cabal.project.local" - - readProjectExtraConfigFile = - reportParseResult verbosity "project local configuration file" - projectExtraConfigFile - . parseProjectConfig - =<< readFile projectExtraConfigFile - - --- | Parse the 'ProjectConfig' format. --- --- For the moment this is implemented in terms of parsers for legacy --- configuration types, plus a conversion. --- -parseProjectConfig :: String -> ParseResult ProjectConfig -parseProjectConfig content = - convertLegacyProjectConfig <$> - parseLegacyProjectConfig content - - --- | Render the 'ProjectConfig' format. --- --- For the moment this is implemented in terms of a pretty printer for the --- legacy configuration types, plus a conversion. --- -showProjectConfig :: ProjectConfig -> String -showProjectConfig = - showLegacyProjectConfig . convertToLegacyProjectConfig - - --- | Write a @cabal.project.extra@ file in the given project root dir. --- -writeProjectLocalExtraConfig :: FilePath -> ProjectConfig -> IO () -writeProjectLocalExtraConfig projectRootDir = - writeProjectConfigFile projectExtraConfigFile - where - projectExtraConfigFile = projectRootDir "cabal.project.local" - - --- | Write in the @cabal.project@ format to the given file. --- -writeProjectConfigFile :: FilePath -> ProjectConfig -> IO () -writeProjectConfigFile file = - writeFile file . showProjectConfig - - --- | Read the user's @~/.cabal/config@ file. --- -readGlobalConfig :: Verbosity -> Rebuild ProjectConfig -readGlobalConfig verbosity = do - config <- liftIO (loadConfig verbosity mempty) - configFile <- liftIO defaultConfigFile - monitorFiles [monitorFileHashed configFile] - return (convertLegacyGlobalConfig config) - --TODO: do this properly, there's several possible locations - -- and env vars, and flags for selecting the global config - - -reportParseResult :: Verbosity -> String -> FilePath -> ParseResult a -> IO a -reportParseResult verbosity _filetype filename (ParseOk warnings x) = do - unless (null warnings) $ - let msg = unlines (map (showPWarning filename) warnings) - in warn verbosity msg - return x -reportParseResult _verbosity filetype filename (ParseFailed err) = - let (line, msg) = locatedErrorMsg err - in die $ "Error parsing " ++ filetype ++ " " ++ filename - ++ maybe "" (\n -> ':' : show n) line ++ ":\n" ++ msg - - ---------------------------------------------- --- Reading packages in the project --- - --- | The location of a package as part of a project. Local file paths are --- either absolute (if the user specified it as such) or they are relative --- to the project root. --- -data ProjectPackageLocation = - ProjectPackageLocalCabalFile FilePath - | ProjectPackageLocalDirectory FilePath FilePath -- dir and .cabal file - | ProjectPackageLocalTarball FilePath - | ProjectPackageRemoteTarball URI - | ProjectPackageRemoteRepo SourceRepo - | ProjectPackageNamed Dependency - deriving Show - - --- | Exception thrown by 'findProjectPackages'. --- -newtype BadPackageLocations = BadPackageLocations [BadPackageLocation] - deriving (Show, Typeable) - -instance Exception BadPackageLocations ---TODO: [required eventually] displayException for nice rendering ---TODO: [nice to have] custom exception subclass for Doc rendering, colour etc - -data BadPackageLocation - = BadPackageLocationFile BadPackageLocationMatch - | BadLocGlobEmptyMatch String - | BadLocGlobBadMatches String [BadPackageLocationMatch] - | BadLocUnexpectedUriScheme String - | BadLocUnrecognisedUri String - | BadLocUnrecognised String - deriving Show - -data BadPackageLocationMatch - = BadLocUnexpectedFile String - | BadLocNonexistantFile String - | BadLocDirNoCabalFile String - | BadLocDirManyCabalFiles String - deriving Show - - --- | Given the project config, --- --- Throws 'BadPackageLocations'. --- -findProjectPackages :: FilePath -> ProjectConfig - -> Rebuild [ProjectPackageLocation] -findProjectPackages projectRootDir ProjectConfig{..} = do - - requiredPkgs <- findPackageLocations True projectPackages - optionalPkgs <- findPackageLocations False projectPackagesOptional - let repoPkgs = map ProjectPackageRemoteRepo projectPackagesRepo - namedPkgs = map ProjectPackageNamed projectPackagesNamed - - return (concat [requiredPkgs, optionalPkgs, repoPkgs, namedPkgs]) - where - findPackageLocations required pkglocstr = do - (problems, pkglocs) <- - partitionEithers <$> mapM (findPackageLocation required) pkglocstr - unless (null problems) $ - liftIO $ throwIO $ BadPackageLocations problems - return (concat pkglocs) - - - findPackageLocation :: Bool -> String - -> Rebuild (Either BadPackageLocation - [ProjectPackageLocation]) - findPackageLocation _required@True pkglocstr = - -- strategy: try first as a file:// or http(s):// URL. - -- then as a file glob (usually encompassing single file) - -- finally as a single file, for files that fail to parse as globs - checkIsUriPackage pkglocstr - `mplusMaybeT` checkIsFileGlobPackage pkglocstr - `mplusMaybeT` checkIsSingleFilePackage pkglocstr - >>= maybe (return (Left (BadLocUnrecognised pkglocstr))) return - - - findPackageLocation _required@False pkglocstr = do - -- just globs for optional case - res <- checkIsFileGlobPackage pkglocstr - case res of - Nothing -> return (Left (BadLocUnrecognised pkglocstr)) - Just (Left _) -> return (Right []) -- it's optional - Just (Right pkglocs) -> return (Right pkglocs) - - - checkIsUriPackage, checkIsFileGlobPackage, checkIsSingleFilePackage - :: String -> Rebuild (Maybe (Either BadPackageLocation - [ProjectPackageLocation])) - checkIsUriPackage pkglocstr = - return $! - case parseAbsoluteURI pkglocstr of - Just uri@URI { - uriScheme = scheme, - uriAuthority = Just URIAuth { uriRegName = host } - } - | recognisedScheme && not (null host) -> - Just (Right [ProjectPackageRemoteTarball uri]) - - | not recognisedScheme && not (null host) -> - Just (Left (BadLocUnexpectedUriScheme pkglocstr)) - - | recognisedScheme && null host -> - Just (Left (BadLocUnrecognisedUri pkglocstr)) - where - recognisedScheme = scheme == "http:" || scheme == "https:" - || scheme == "file:" - - _ -> Nothing - - - checkIsFileGlobPackage pkglocstr = - case simpleParse pkglocstr of - Nothing -> return Nothing - Just glob -> liftM Just $ do - matches <- matchFileGlob glob - case matches of - [] | isJust (isTrivialFilePathGlob glob) - -> return (Left (BadPackageLocationFile - (BadLocNonexistantFile pkglocstr))) - - [] -> return (Left (BadLocGlobEmptyMatch pkglocstr)) - - _ -> do - (failures, pkglocs) <- partitionEithers <$> - mapM checkFilePackageMatch matches - if null pkglocs - then return (Left (BadLocGlobBadMatches pkglocstr failures)) - else return (Right pkglocs) - - - checkIsSingleFilePackage pkglocstr = do - let filename = projectRootDir pkglocstr - isFile <- liftIO $ doesFileExist filename - isDir <- liftIO $ doesDirectoryExist filename - if isFile || isDir - then checkFilePackageMatch pkglocstr - >>= either (return . Just . Left . BadPackageLocationFile) - (return . Just . Right . (\x->[x])) - else return Nothing - - - checkFilePackageMatch :: String -> Rebuild (Either BadPackageLocationMatch - ProjectPackageLocation) - checkFilePackageMatch pkglocstr = do - -- The pkglocstr may be absolute or may be relative to the project root. - -- Either way, does the right thing here. We return relative paths if - -- they were relative in the first place. - let abspath = projectRootDir pkglocstr - isDir <- liftIO $ doesDirectoryExist abspath - parentDirExists <- case takeDirectory abspath of - [] -> return False - dir -> liftIO $ doesDirectoryExist dir - case () of - _ | isDir - -> do matches <- matchFileGlob (globStarDotCabal pkglocstr) - case matches of - [cabalFile] - -> return (Right (ProjectPackageLocalDirectory - pkglocstr cabalFile)) - [] -> return (Left (BadLocDirNoCabalFile pkglocstr)) - _ -> return (Left (BadLocDirManyCabalFiles pkglocstr)) - - | extensionIsTarGz pkglocstr - -> return (Right (ProjectPackageLocalTarball pkglocstr)) - - | takeExtension pkglocstr == ".cabal" - -> return (Right (ProjectPackageLocalCabalFile pkglocstr)) - - | parentDirExists - -> return (Left (BadLocNonexistantFile pkglocstr)) - - | otherwise - -> return (Left (BadLocUnexpectedFile pkglocstr)) - - - extensionIsTarGz f = takeExtension f == ".gz" - && takeExtension (dropExtension f) == ".tar" - - --- | A glob to find all the cabal files in a directory. --- --- For a directory @some/dir/@, this is a glob of the form @some/dir/\*.cabal@. --- The directory part can be either absolute or relative. --- -globStarDotCabal :: FilePath -> FilePathGlob -globStarDotCabal dir = - FilePathGlob - (if isAbsolute dir then FilePathRoot root else FilePathRelative) - (foldr (\d -> GlobDir [Literal d]) - (GlobFile [WildCard, Literal ".cabal"]) dirComponents) - where - (root, dirComponents) = fmap splitDirectories (splitDrive dir) - - ---TODO: [code cleanup] use sufficiently recent transformers package -mplusMaybeT :: Monad m => m (Maybe a) -> m (Maybe a) -> m (Maybe a) -mplusMaybeT ma mb = do - mx <- ma - case mx of - Nothing -> mb - Just x -> return (Just x) - - --- | Read the @.cabal@ file of the given package. --- --- Note here is where we convert from project-root relative paths to absolute --- paths. --- -readSourcePackage :: Verbosity -> ProjectPackageLocation - -> Rebuild SourcePackage -readSourcePackage verbosity (ProjectPackageLocalCabalFile cabalFile) = - readSourcePackage verbosity (ProjectPackageLocalDirectory dir cabalFile) - where - dir = takeDirectory cabalFile - -readSourcePackage verbosity (ProjectPackageLocalDirectory dir cabalFile) = do - monitorFiles [monitorFileHashed cabalFile] - root <- askRoot - pkgdesc <- liftIO $ readPackageDescription verbosity (root cabalFile) - return SourcePackage { - packageInfoId = packageId pkgdesc, - packageDescription = pkgdesc, - packageSource = LocalUnpackedPackage (root dir), - packageDescrOverride = Nothing - } -readSourcePackage _verbosity _ = - fail $ "TODO: add support for fetching and reading local tarballs, remote " - ++ "tarballs, remote repos and passing named packages through" - - ---------------------------------------------- --- Checking configuration sanity --- - -data BadPerPackageCompilerPaths - = BadPerPackageCompilerPaths [(PackageName, String)] - deriving (Show, Typeable) - -instance Exception BadPerPackageCompilerPaths ---TODO: [required eventually] displayException for nice rendering ---TODO: [nice to have] custom exception subclass for Doc rendering, colour etc - --- | The project configuration is not allowed to specify program locations for --- programs used by the compiler as these have to be the same for each set of --- packages. --- --- We cannot check this until we know which programs the compiler uses, which --- in principle is not until we've configured the compiler. --- --- Throws 'BadPerPackageCompilerPaths' --- -checkBadPerPackageCompilerPaths :: [ConfiguredProgram] - -> Map PackageName PackageConfig - -> IO () -checkBadPerPackageCompilerPaths compilerPrograms packagesConfig = - case [ (pkgname, progname) - | let compProgNames = Set.fromList (map programId compilerPrograms) - , (pkgname, pkgconf) <- Map.toList packagesConfig - , progname <- Map.keys (getMapLast (packageConfigProgramPaths pkgconf)) - , progname `Set.member` compProgNames ] of - [] -> return () - ps -> throwIO (BadPerPackageCompilerPaths ps) - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/ProjectOrchestration.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/ProjectOrchestration.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/ProjectOrchestration.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/ProjectOrchestration.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,493 +0,0 @@ -{-# LANGUAGE RecordWildCards, NamedFieldPuns #-} - --- | This module deals with building and incrementally rebuilding a collection --- of packages. It is what backs the @cabal build@ and @configure@ commands, --- as well as being a core part of @run@, @test@, @bench@ and others. --- --- The primary thing is in fact rebuilding (and trying to make that quick by --- not redoing unnecessary work), so building from scratch is just a special --- case. --- --- The build process and the code can be understood by breaking it down into --- three major parts: --- --- * The 'ElaboratedInstallPlan' type --- --- * The \"what to do\" phase, where we look at the all input configuration --- (project files, .cabal files, command line etc) and produce a detailed --- plan of what to do -- the 'ElaboratedInstallPlan'. --- --- * The \"do it\" phase, where we take the 'ElaboratedInstallPlan' and we --- re-execute it. --- --- As far as possible, the \"what to do\" phase embodies all the policy, leaving --- the \"do it\" phase policy free. The first phase contains more of the --- complicated logic, but it is contained in code that is either pure or just --- has read effects (except cache updates). Then the second phase does all the --- actions to build packages, but as far as possible it just follows the --- instructions and avoids any logic for deciding what to do (apart from --- recompilation avoidance in executing the plan). --- --- This division helps us keep the code under control, making it easier to --- understand, test and debug. So when you are extending these modules, please --- think about which parts of your change belong in which part. It is --- perfectly ok to extend the description of what to do (i.e. the --- 'ElaboratedInstallPlan') if that helps keep the policy decisions in the --- first phase. Also, the second phase does not have direct access to any of --- the input configuration anyway; all the information has to flow via the --- 'ElaboratedInstallPlan'. --- -module Distribution.Client.ProjectOrchestration ( - -- * Pre-build phase: decide what to do. - runProjectPreBuildPhase, - CliConfigFlags, - PreBuildHooks(..), - ProjectBuildContext(..), - - -- ** Adjusting the plan - selectTargets, - printPlan, - - -- * Build phase: now do it. - runProjectBuildPhase, - - -- * Post build actions - reportBuildFailures, - ) where - -import Distribution.Client.ProjectConfig -import Distribution.Client.ProjectPlanning -import Distribution.Client.ProjectBuilding - -import Distribution.Client.Types - hiding ( BuildResult, BuildSuccess(..), BuildFailure(..) - , DocsResult(..), TestsResult(..) ) -import qualified Distribution.Client.InstallPlan as InstallPlan -import Distribution.Client.BuildTarget - ( UserBuildTarget, resolveUserBuildTargets - , BuildTarget(..), buildTargetPackage ) -import Distribution.Client.DistDirLayout -import Distribution.Client.Config (defaultCabalDir) -import Distribution.Client.Setup hiding (packageName) - -import Distribution.Package - hiding (InstalledPackageId, installedPackageId) -import qualified Distribution.PackageDescription as PD -import Distribution.PackageDescription (FlagAssignment) -import qualified Distribution.InstalledPackageInfo as Installed -import Distribution.Simple.Setup (HaddockFlags) - -import Distribution.Simple.Utils (die, notice) -import Distribution.Verbosity -import Distribution.Text - -import qualified Data.Set as Set -import qualified Data.Map as Map -import Data.Map (Map) -import Data.List -import Data.Either -import System.Exit (exitFailure) - - --- | Command line configuration flags. These are used to extend\/override the --- project configuration. --- -type CliConfigFlags = ( GlobalFlags - , ConfigFlags, ConfigExFlags - , InstallFlags, HaddockFlags ) - --- | Hooks to alter the behaviour of 'runProjectPreBuildPhase'. --- --- For example the @configure@, @build@ and @repl@ commands use this to get --- their different behaviour. --- -data PreBuildHooks = PreBuildHooks { - hookPrePlanning :: FilePath - -> DistDirLayout - -> ProjectConfig - -> IO (), - hookSelectPlanSubset :: ElaboratedInstallPlan - -> IO ElaboratedInstallPlan - } - --- | This holds the context between the pre-build and build phases. --- -data ProjectBuildContext = ProjectBuildContext { - distDirLayout :: DistDirLayout, - elaboratedPlan :: ElaboratedInstallPlan, - elaboratedShared :: ElaboratedSharedConfig, - pkgsBuildStatus :: BuildStatusMap, - buildSettings :: BuildTimeSettings - } - - --- | Pre-build phase: decide what to do. --- -runProjectPreBuildPhase :: Verbosity - -> CliConfigFlags - -> PreBuildHooks - -> IO ProjectBuildContext -runProjectPreBuildPhase - verbosity - ( globalFlags - , configFlags, configExFlags - , installFlags, haddockFlags ) - PreBuildHooks{..} = do - - cabalDir <- defaultCabalDir - let cabalDirLayout = defaultCabalDirLayout cabalDir - - projectRootDir <- findProjectRoot - let distDirLayout = defaultDistDirLayout projectRootDir - - let cliConfig = commandLineFlagsToProjectConfig - globalFlags configFlags configExFlags - installFlags haddockFlags - - hookPrePlanning - projectRootDir - distDirLayout - cliConfig - - -- Take the project configuration and make a plan for how to build - -- everything in the project. This is independent of any specific targets - -- the user has asked for. - -- - (elaboratedPlan, elaboratedShared, projectConfig) <- - rebuildInstallPlan verbosity - projectRootDir distDirLayout cabalDirLayout - cliConfig - - let buildSettings = resolveBuildTimeSettings - verbosity cabalDirLayout - (projectConfigShared projectConfig) - (projectConfigBuildOnly projectConfig) - (projectConfigBuildOnly cliConfig) - - -- The plan for what to do is represented by an 'ElaboratedInstallPlan' - - -- Now given the specific targets the user has asked for, decide - -- which bits of the plan we will want to execute. - -- - elaboratedPlan' <- hookSelectPlanSubset elaboratedPlan - - -- Check if any packages don't need rebuilding, and improve the plan. - -- This also gives us more accurate reasons for the --dry-run output. - -- - (elaboratedPlan'', pkgsBuildStatus) <- - rebuildTargetsDryRun distDirLayout - elaboratedPlan' - - return ProjectBuildContext { - distDirLayout, - elaboratedPlan = elaboratedPlan'', - elaboratedShared, - pkgsBuildStatus, - buildSettings - } - - --- | Build phase: now do it. --- --- Execute all or parts of the description of what to do to build or --- rebuild the various packages needed. --- -runProjectBuildPhase :: Verbosity - -> ProjectBuildContext - -> IO ElaboratedInstallPlan -runProjectBuildPhase verbosity ProjectBuildContext {..} = - rebuildTargets verbosity - distDirLayout - elaboratedPlan - elaboratedShared - pkgsBuildStatus - buildSettings - - -- Note that it is a deliberate design choice that the 'buildTargets' is - -- not passed to phase 1, and the various bits of input config is not - -- passed to phase 2. - -- - -- We make the install plan without looking at the particular targets the - -- user asks us to build. The set of available things we can build is - -- discovered from the env and config and is used to make the install plan. - -- The targets just tell us which parts of the install plan to execute. - -- - -- Conversely, executing the plan does not directly depend on any of the - -- input config. The bits that are needed (or better, the decisions based - -- on it) all go into the install plan. - - -- Notionally, the 'BuildFlags' should be things that do not affect what - -- we build, just how we do it. These ones of course do - - ------------------------------------------------------------------------------- --- Taking targets into account, selecting what to build --- - --- | Adjust an 'ElaboratedInstallPlan' by selecting just those parts of it --- required to build the given user targets. --- --- How to get the 'PackageTarget's from the 'UserBuildTarget' is customisable. --- -selectTargets :: PackageTarget - -> (ComponentTarget -> PackageTarget) - -> [UserBuildTarget] - -> ElaboratedInstallPlan - -> IO ElaboratedInstallPlan -selectTargets targetDefaultComponents targetSpecificComponent - userBuildTargets installPlan = do - - -- Match the user targets against the available targets. If no targets are - -- given this uses the package in the current directory, if any. - -- - buildTargets <- resolveUserBuildTargets localPackages userBuildTargets - --TODO: [required eventually] report something if there are no targets - - --TODO: [required eventually] - -- we cannot resolve names of packages other than those that are - -- directly in the current plan. We ought to keep a set of the known - -- hackage packages so we can resolve names to those. Though we don't - -- really need that until we can do something sensible with packages - -- outside of the project. - - -- Now check if those targets belong to the current project or not. - -- Ultimately we want to do something sensible for targets not in this - -- project, but for now we just bail. This gives us back the ipkgid from - -- the plan. - -- - buildTargets' <- either reportBuildTargetProblems return - $ resolveAndCheckTargets - targetDefaultComponents - targetSpecificComponent - installPlan - buildTargets - - -- Finally, prune the install plan to cover just those target packages - -- and their deps. - -- - return (pruneInstallPlanToTargets buildTargets' installPlan) - where - localPackages = - [ (pkgDescription pkg, pkgSourceLocation pkg) - | InstallPlan.Configured pkg <- InstallPlan.toList installPlan ] - --TODO: [code cleanup] is there a better way to identify local packages? - - - -resolveAndCheckTargets :: PackageTarget - -> (ComponentTarget -> PackageTarget) - -> ElaboratedInstallPlan - -> [BuildTarget PackageName] - -> Either [BuildTargetProblem] - (Map InstalledPackageId [PackageTarget]) -resolveAndCheckTargets targetDefaultComponents - targetSpecificComponent - installPlan targets = - case partitionEithers (map checkTarget targets) of - ([], targets') -> Right $ Map.fromListWith (++) - [ (ipkgid, [t]) | (ipkgid, t) <- targets' ] - (problems, _) -> Left problems - where - -- TODO [required eventually] currently all build targets refer to packages - -- inside the project. Ultimately this has to be generalised to allow - -- referring to other packages and targets. - - -- We can ask to build any whole package, project-local or a dependency - checkTarget (BuildTargetPackage pn) - | Just ipkgid <- Map.lookup pn projAllPkgs - = Right (ipkgid, targetDefaultComponents) - - -- But if we ask to build an individual component, then that component - -- had better be in a package that is local to the project. - -- TODO: and if it's an optional stanza, then that stanza must be available - checkTarget t@(BuildTargetComponent pn cn) - | Just ipkgid <- Map.lookup pn projLocalPkgs - = Right (ipkgid, targetSpecificComponent - (ComponentTarget cn WholeComponent)) - - | Map.member pn projAllPkgs - = Left (BuildTargetComponentNotProjectLocal t) - - checkTarget t@(BuildTargetModule pn cn mn) - | Just ipkgid <- Map.lookup pn projLocalPkgs - = Right (ipkgid, BuildSpecificComponent (ComponentTarget cn (ModuleTarget mn))) - - | Map.member pn projAllPkgs - = Left (BuildTargetComponentNotProjectLocal t) - - checkTarget t@(BuildTargetFile pn cn fn) - | Just ipkgid <- Map.lookup pn projLocalPkgs - = Right (ipkgid, BuildSpecificComponent (ComponentTarget cn (FileTarget fn))) - - | Map.member pn projAllPkgs - = Left (BuildTargetComponentNotProjectLocal t) - - checkTarget t - = Left (BuildTargetNotInProject (buildTargetPackage t)) - - - projAllPkgs, projLocalPkgs :: Map PackageName InstalledPackageId - projAllPkgs = - Map.fromList - [ (packageName pkg, installedPackageId pkg) - | pkg <- InstallPlan.toList installPlan ] - - projLocalPkgs = - Map.fromList - [ (packageName pkg, installedPackageId pkg) - | InstallPlan.Configured pkg <- InstallPlan.toList installPlan - , case pkgSourceLocation pkg of - LocalUnpackedPackage _ -> True; _ -> False - --TODO: [code cleanup] is there a better way to identify local packages? - ] - - --TODO: [research required] what if the solution has multiple versions of this package? - -- e.g. due to setup deps or due to multiple independent sets of - -- packages being built (e.g. ghc + ghcjs in a project) - -data BuildTargetProblem - = BuildTargetNotInProject PackageName - | BuildTargetComponentNotProjectLocal (BuildTarget PackageName) - | BuildTargetOptionalStanzaDisabled Bool - -- ^ @True@: explicitly disabled by user - -- @False@: disabled by solver - -reportBuildTargetProblems :: [BuildTargetProblem] -> IO a -reportBuildTargetProblems = die . unlines . map reportBuildTargetProblem - -reportBuildTargetProblem :: BuildTargetProblem -> String -reportBuildTargetProblem (BuildTargetNotInProject pn) = - "Cannot build the package " ++ display pn ++ ", it is not in this project." - ++ "(either directly or indirectly). If you want to add it to the " - ++ "project then edit the cabal.project file." - -reportBuildTargetProblem (BuildTargetComponentNotProjectLocal t) = - "The package " ++ display (buildTargetPackage t) ++ " is in the " - ++ "project but it is not a locally unpacked package, so " - -reportBuildTargetProblem (BuildTargetOptionalStanzaDisabled _) = undefined - - ------------------------------------------------------------------------------- --- Displaying what we plan to do --- - --- | Print a user-oriented presentation of the install plan, indicating what --- will be built. --- -printPlan :: Verbosity -> ProjectBuildContext -> IO () -printPlan verbosity - ProjectBuildContext { - elaboratedPlan, - pkgsBuildStatus, - buildSettings = BuildTimeSettings{buildSettingDryRun} - } - - | null pkgs - = notice verbosity "Up to date" - - | verbosity >= verbose - = notice verbosity $ unlines $ - ("In order, the following " ++ wouldWill ++ " be built:") - : map showPkgAndReason pkgs - - | otherwise - = notice verbosity $ unlines $ - ("In order, the following " ++ wouldWill - ++ " be built (use -v for more details):") - : map showPkg pkgs - where - pkgs = linearizeInstallPlan elaboratedPlan - - wouldWill | buildSettingDryRun = "would" - | otherwise = "will" - - showPkg pkg = display (packageId pkg) - - showPkgAndReason :: ElaboratedReadyPackage -> String - showPkgAndReason (ReadyPackage pkg _) = - display (packageId pkg) ++ - showTargets pkg ++ - showFlagAssignment (nonDefaultFlags pkg) ++ - showStanzas pkg ++ - let buildStatus = pkgsBuildStatus Map.! installedPackageId pkg in - " (" ++ showBuildStatus buildStatus ++ ")" - - nonDefaultFlags :: ElaboratedConfiguredPackage -> FlagAssignment - nonDefaultFlags pkg = pkgFlagAssignment pkg \\ pkgFlagDefaults pkg - - showStanzas pkg = concat - $ [ " *test" - | TestStanzas `Set.member` pkgStanzasEnabled pkg ] - ++ [ " *bench" - | BenchStanzas `Set.member` pkgStanzasEnabled pkg ] - - showTargets pkg - | null (pkgBuildTargets pkg) = "" - | otherwise - = " (" ++ unwords [ showComponentTarget pkg t | t <- pkgBuildTargets pkg ] - ++ ")" - - -- TODO: [code cleanup] this should be a proper function in a proper place - showFlagAssignment :: FlagAssignment -> String - showFlagAssignment = concatMap ((' ' :) . showFlagValue) - showFlagValue (f, True) = '+' : showFlagName f - showFlagValue (f, False) = '-' : showFlagName f - showFlagName (PD.FlagName f) = f - - showBuildStatus status = case status of - BuildStatusPreExisting -> "already installed" - BuildStatusDownload {} -> "requires download & build" - BuildStatusUnpack {} -> "requires build" - BuildStatusRebuild _ rebuild -> case rebuild of - BuildStatusConfigure - (MonitoredValueChanged _) -> "configuration changed" - BuildStatusConfigure mreason -> showMonitorChangedReason mreason - BuildStatusBuild _ buildreason -> case buildreason of - BuildReasonDepsRebuilt -> "dependency rebuilt" - BuildReasonFilesChanged - (MonitoredFileChanged _) -> "files changed" - BuildReasonFilesChanged - mreason -> showMonitorChangedReason mreason - BuildReasonExtraTargets _ -> "additional components to build" - BuildReasonEphemeralTargets -> "ephemeral targets" - BuildStatusUpToDate {} -> "up to date" -- doesn't happen - - showMonitorChangedReason (MonitoredFileChanged file) = "file " ++ file - showMonitorChangedReason (MonitoredValueChanged _) = "value changed" - showMonitorChangedReason MonitorFirstRun = "first run" - showMonitorChangedReason MonitorCorruptCache = "cannot read state cache" - -linearizeInstallPlan :: ElaboratedInstallPlan -> [ElaboratedReadyPackage] -linearizeInstallPlan = - unfoldr next - where - next plan = case InstallPlan.ready plan of - [] -> Nothing - (pkg:_) -> Just (pkg, plan') - where - ipkgid = installedPackageId pkg - ipkg = Installed.emptyInstalledPackageInfo { - Installed.sourcePackageId = packageId pkg, - Installed.installedUnitId = ipkgid - } - plan' = InstallPlan.completed ipkgid (Just ipkg) - (BuildOk DocsNotTried TestsNotTried) - (InstallPlan.processing [pkg] plan) - --TODO: [code cleanup] This is a bit of a hack, pretending that each package is installed - -- could we use InstallPlan.topologicalOrder? - - -reportBuildFailures :: ElaboratedInstallPlan -> IO () -reportBuildFailures plan = - - case [ (pkg, reason) - | InstallPlan.Failed pkg reason <- InstallPlan.toList plan ] of - [] -> return () - _failed -> exitFailure - --TODO: [required eventually] see the old printBuildFailures for an example - -- of the kind of things we could report, but we want to handle the special - -- case of the current package better, since if you do "cabal build" then - -- you don't need a lot of context to explain where the ghc error message - -- comes from, and indeed extra noise would just be annoying. - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/ProjectPlanning/Types.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/ProjectPlanning/Types.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/ProjectPlanning/Types.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/ProjectPlanning/Types.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,369 +0,0 @@ -{-# LANGUAGE DeriveGeneric, DeriveDataTypeable #-} - --- | Types used while planning how to build everything in a project. --- --- Primarily this is the 'ElaboratedInstallPlan'. --- -module Distribution.Client.ProjectPlanning.Types ( - SolverInstallPlan, - - -- * Elaborated install plan types - ElaboratedInstallPlan, - ElaboratedConfiguredPackage(..), - ElaboratedPlanPackage, - ElaboratedSharedConfig(..), - ElaboratedReadyPackage, - BuildStyle(..), - CabalFileText, - - -- * Types used in executing an install plan - --TODO: [code cleanup] these types should live with execution, not with - -- plan definition. Need to better separate InstallPlan definition. - GenericBuildResult(..), - BuildResult, - BuildSuccess(..), - BuildFailure(..), - DocsResult(..), - TestsResult(..), - - -- * Build targets - PackageTarget(..), - ComponentTarget(..), - SubComponentTarget(..), - - -- * Setup script - SetupScriptStyle(..), - ) where - -import Distribution.Client.PackageHash - -import Distribution.Client.Types - hiding ( BuildResult, BuildSuccess(..), BuildFailure(..) - , DocsResult(..), TestsResult(..) ) -import Distribution.Client.InstallPlan - ( GenericInstallPlan, InstallPlan, GenericPlanPackage ) -import Distribution.Client.ComponentDeps (ComponentDeps) - -import Distribution.Package - hiding (InstalledPackageId, installedPackageId) -import Distribution.System -import qualified Distribution.PackageDescription as Cabal -import Distribution.InstalledPackageInfo (InstalledPackageInfo) -import Distribution.Simple.Compiler -import Distribution.Simple.Program.Db -import Distribution.ModuleName (ModuleName) -import Distribution.Simple.LocalBuildInfo (ComponentName(..)) -import qualified Distribution.Simple.InstallDirs as InstallDirs -import Distribution.Simple.InstallDirs (PathTemplate) -import Distribution.Version - -import Data.Map (Map) -import Data.Set (Set) -import qualified Data.ByteString.Lazy as LBS -import Distribution.Compat.Binary -import GHC.Generics (Generic) -import Data.Typeable (Typeable) -import Control.Exception - - - --- | The type of install plan produced by the solver and used as the starting --- point for the 'ElaboratedInstallPlan'. --- -type SolverInstallPlan - = InstallPlan --TODO: [code cleanup] redefine locally or move def to solver interface - - --- | The combination of an elaborated install plan plus a --- 'ElaboratedSharedConfig' contains all the details necessary to be able --- to execute the plan without having to make further policy decisions. --- --- It does not include dynamic elements such as resources (such as http --- connections). --- -type ElaboratedInstallPlan - = GenericInstallPlan InstalledPackageInfo - ElaboratedConfiguredPackage - BuildSuccess BuildFailure - -type ElaboratedPlanPackage - = GenericPlanPackage InstalledPackageInfo - ElaboratedConfiguredPackage - BuildSuccess BuildFailure - ---TODO: [code cleanup] decide if we really need this, there's not much in it, and in principle --- even platform and compiler could be different if we're building things --- like a server + client with ghc + ghcjs -data ElaboratedSharedConfig - = ElaboratedSharedConfig { - - pkgConfigPlatform :: Platform, - pkgConfigCompiler :: Compiler, --TODO: [code cleanup] replace with CompilerInfo - -- | The programs that the compiler configured (e.g. for GHC, the progs - -- ghc & ghc-pkg). Once constructed, only the 'configuredPrograms' are - -- used. - pkgConfigCompilerProgs :: ProgramDb - } - deriving (Show, Generic) - --TODO: [code cleanup] no Eq instance - -instance Binary ElaboratedSharedConfig - -data ElaboratedConfiguredPackage - = ElaboratedConfiguredPackage { - - pkgInstalledId :: InstalledPackageId, - pkgSourceId :: PackageId, - - -- | TODO: [code cleanup] we don't need this, just a few bits from it: - -- build type, spec version - pkgDescription :: Cabal.PackageDescription, - - -- | A total flag assignment for the package - pkgFlagAssignment :: Cabal.FlagAssignment, - - -- | The original default flag assignment, used only for reporting. - pkgFlagDefaults :: Cabal.FlagAssignment, - - -- | The exact dependencies (on other plan packages) - -- - pkgDependencies :: ComponentDeps [ConfiguredId], - - -- | Which optional stanzas (ie testsuites, benchmarks) can be built. - -- This means the solver produced a plan that has them available. - -- This doesn't necessary mean we build them by default. - pkgStanzasAvailable :: Set OptionalStanza, - - -- | Which optional stanzas the user explicitly asked to enable or - -- to disable. This tells us which ones we build by default, and - -- helps with error messages when the user asks to build something - -- they explicitly disabled. - pkgStanzasRequested :: Map OptionalStanza Bool, - - -- | Which optional stanzas (ie testsuites, benchmarks) will actually - -- be enabled during the package configure step. - pkgStanzasEnabled :: Set OptionalStanza, - - -- | Where the package comes from, e.g. tarball, local dir etc. This - -- is not the same as where it may be unpacked to for the build. - pkgSourceLocation :: PackageLocation (Maybe FilePath), - - -- | The hash of the source, e.g. the tarball. We don't have this for - -- local source dir packages. - pkgSourceHash :: Maybe PackageSourceHash, - - --pkgSourceDir ? -- currently passed in later because they can use temp locations - --pkgBuildDir ? -- but could in principle still have it here, with optional instr to use temp loc - - pkgBuildStyle :: BuildStyle, - - pkgSetupPackageDBStack :: PackageDBStack, - pkgBuildPackageDBStack :: PackageDBStack, - pkgRegisterPackageDBStack :: PackageDBStack, - - -- | The package contains a library and so must be registered - pkgRequiresRegistration :: Bool, - pkgDescriptionOverride :: Maybe CabalFileText, - - pkgVanillaLib :: Bool, - pkgSharedLib :: Bool, - pkgDynExe :: Bool, - pkgGHCiLib :: Bool, - pkgProfLib :: Bool, - pkgProfExe :: Bool, - pkgProfLibDetail :: ProfDetailLevel, - pkgProfExeDetail :: ProfDetailLevel, - pkgCoverage :: Bool, - pkgOptimization :: OptimisationLevel, - pkgSplitObjs :: Bool, - pkgStripLibs :: Bool, - pkgStripExes :: Bool, - pkgDebugInfo :: DebugInfoLevel, - - pkgProgramPaths :: Map String FilePath, - pkgProgramArgs :: Map String [String], - pkgProgramPathExtra :: [FilePath], - pkgConfigureScriptArgs :: [String], - pkgExtraLibDirs :: [FilePath], - pkgExtraFrameworkDirs :: [FilePath], - pkgExtraIncludeDirs :: [FilePath], - pkgProgPrefix :: Maybe PathTemplate, - pkgProgSuffix :: Maybe PathTemplate, - - pkgInstallDirs :: InstallDirs.InstallDirs FilePath, - - pkgHaddockHoogle :: Bool, - pkgHaddockHtml :: Bool, - pkgHaddockHtmlLocation :: Maybe String, - pkgHaddockExecutables :: Bool, - pkgHaddockTestSuites :: Bool, - pkgHaddockBenchmarks :: Bool, - pkgHaddockInternal :: Bool, - pkgHaddockCss :: Maybe FilePath, - pkgHaddockHscolour :: Bool, - pkgHaddockHscolourCss :: Maybe FilePath, - pkgHaddockContents :: Maybe PathTemplate, - - -- Setup.hs related things: - - -- | One of four modes for how we build and interact with the Setup.hs - -- script, based on whether it's a build-type Custom, with or without - -- explicit deps and the cabal spec version the .cabal file needs. - pkgSetupScriptStyle :: SetupScriptStyle, - - -- | The version of the Cabal command line interface that we are using - -- for this package. This is typically the version of the Cabal lib - -- that the Setup.hs is built against. - pkgSetupScriptCliVersion :: Version, - - -- Build time related: - pkgBuildTargets :: [ComponentTarget], - pkgReplTarget :: Maybe ComponentTarget, - pkgBuildHaddocks :: Bool - } - deriving (Eq, Show, Generic) - -instance Binary ElaboratedConfiguredPackage - -instance Package ElaboratedConfiguredPackage where - packageId = pkgSourceId - -instance HasUnitId ElaboratedConfiguredPackage where - installedUnitId = pkgInstalledId - -instance PackageFixedDeps ElaboratedConfiguredPackage where - depends = fmap (map installedPackageId) . pkgDependencies - --- | This is used in the install plan to indicate how the package will be --- built. --- -data BuildStyle = - -- | The classic approach where the package is built, then the files - -- installed into some location and the result registered in a package db. - -- - -- If the package came from a tarball then it's built in a temp dir and - -- the results discarded. - BuildAndInstall - - -- | The package is built, but the files are not installed anywhere, - -- rather the build dir is kept and the package is registered inplace. - -- - -- Such packages can still subsequently be installed. - -- - -- Typically 'BuildAndInstall' packages will only depend on other - -- 'BuildAndInstall' style packages and not on 'BuildInplaceOnly' ones. - -- - | BuildInplaceOnly - deriving (Eq, Show, Generic) - -instance Binary BuildStyle - -type CabalFileText = LBS.ByteString - -type ElaboratedReadyPackage = GenericReadyPackage ElaboratedConfiguredPackage - InstalledPackageInfo - ---TODO: [code cleanup] this duplicates the InstalledPackageInfo quite a bit in an install plan --- because the same ipkg is used by many packages. So the binary file will be big. --- Could we keep just (ipkgid, deps) instead of the whole InstalledPackageInfo? --- or transform to a shared form when serialising / deserialising - -data GenericBuildResult ipkg iresult ifailure - = BuildFailure ifailure - | BuildSuccess (Maybe ipkg) iresult - deriving (Eq, Show, Generic) - -instance (Binary ipkg, Binary iresult, Binary ifailure) => - Binary (GenericBuildResult ipkg iresult ifailure) - -type BuildResult = GenericBuildResult InstalledPackageInfo - BuildSuccess BuildFailure - -data BuildSuccess = BuildOk DocsResult TestsResult - deriving (Eq, Show, Generic) - -data DocsResult = DocsNotTried | DocsFailed | DocsOk - deriving (Eq, Show, Generic) - -data TestsResult = TestsNotTried | TestsOk - deriving (Eq, Show, Generic) - -data BuildFailure = PlanningFailed --TODO: [required eventually] not yet used - | DependentFailed PackageId - | DownloadFailed String --TODO: [required eventually] not yet used - | UnpackFailed String --TODO: [required eventually] not yet used - | ConfigureFailed String - | BuildFailed String - | TestsFailed String --TODO: [required eventually] not yet used - | InstallFailed String - deriving (Eq, Show, Typeable, Generic) - -instance Exception BuildFailure - -instance Binary BuildFailure -instance Binary BuildSuccess -instance Binary DocsResult -instance Binary TestsResult - - ---------------------------- --- Build targets --- - --- | The various targets within a package. This is more of a high level --- specification than a elaborated prescription. --- -data PackageTarget = - -- | Build the default components in this package. This usually means - -- just the lib and exes, but it can also mean the testsuites and - -- benchmarks if the user explicitly requested them. - BuildDefaultComponents - -- | Build a specific component in this package. - | BuildSpecificComponent ComponentTarget - | ReplDefaultComponent - | ReplSpecificComponent ComponentTarget - | HaddockDefaultComponents - deriving (Eq, Show, Generic) - -data ComponentTarget = ComponentTarget ComponentName SubComponentTarget - deriving (Eq, Show, Generic) - -data SubComponentTarget = WholeComponent - | ModuleTarget ModuleName - | FileTarget FilePath - deriving (Eq, Show, Generic) - -instance Binary PackageTarget -instance Binary ComponentTarget -instance Binary SubComponentTarget - - ---------------------------- --- Setup.hs script policy --- - --- | There are four major cases for Setup.hs handling: --- --- 1. @build-type@ Custom with a @custom-setup@ section --- 2. @build-type@ Custom without a @custom-setup@ section --- 3. @build-type@ not Custom with @cabal-version > $our-cabal-version@ --- 4. @build-type@ not Custom with @cabal-version <= $our-cabal-version@ --- --- It's also worth noting that packages specifying @cabal-version: >= 1.23@ --- or later that have @build-type@ Custom will always have a @custom-setup@ --- section. Therefore in case 2, the specified @cabal-version@ will always be --- less than 1.23. --- --- In cases 1 and 2 we obviously have to build an external Setup.hs script, --- while in case 4 we can use the internal library API. In case 3 we also have --- to build an external Setup.hs script because the package needs a later --- Cabal lib version than we can support internally. --- -data SetupScriptStyle = SetupCustomExplicitDeps - | SetupCustomImplicitDeps - | SetupNonCustomExternalLib - | SetupNonCustomInternalLib - deriving (Eq, Show, Generic) - -instance Binary SetupScriptStyle - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/ProjectPlanning.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/ProjectPlanning.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/ProjectPlanning.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/ProjectPlanning.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2283 +0,0 @@ -{-# LANGUAGE CPP, RecordWildCards, NamedFieldPuns, RankNTypes #-} - --- | Planning how to build everything in a project. --- -module Distribution.Client.ProjectPlanning ( - -- * elaborated install plan types - ElaboratedInstallPlan, - ElaboratedConfiguredPackage(..), - ElaboratedPlanPackage, - ElaboratedSharedConfig(..), - ElaboratedReadyPackage, - BuildStyle(..), - CabalFileText, - - --TODO: [code cleanup] these types should live with execution, not with - -- plan definition. Need to better separate InstallPlan definition. - GenericBuildResult(..), - BuildResult, - BuildSuccess(..), - BuildFailure(..), - DocsResult(..), - TestsResult(..), - - -- * Producing the elaborated install plan - rebuildInstallPlan, - - -- * Build targets - PackageTarget(..), - ComponentTarget(..), - SubComponentTarget(..), - showComponentTarget, - - -- * Selecting a plan subset - pruneInstallPlanToTargets, - - -- * Utils required for building - pkgHasEphemeralBuildTargets, - pkgBuildTargetWholeComponents, - - -- * Setup.hs CLI flags for building - setupHsScriptOptions, - setupHsConfigureFlags, - setupHsBuildFlags, - setupHsBuildArgs, - setupHsReplFlags, - setupHsReplArgs, - setupHsCopyFlags, - setupHsRegisterFlags, - setupHsHaddockFlags, - - packageHashInputs, - - -- TODO: [code cleanup] utils that should live in some shared place? - createPackageDBIfMissing - ) where - -import Distribution.Client.ProjectPlanning.Types -import Distribution.Client.PackageHash -import Distribution.Client.RebuildMonad -import Distribution.Client.ProjectConfig -import Distribution.Client.ProjectPlanOutput - -import Distribution.Client.Types - hiding ( BuildResult, BuildSuccess(..), BuildFailure(..) - , DocsResult(..), TestsResult(..) ) -import qualified Distribution.Client.InstallPlan as InstallPlan -import Distribution.Client.Dependency -import Distribution.Client.Dependency.Types -import qualified Distribution.Client.ComponentDeps as CD -import Distribution.Client.ComponentDeps (ComponentDeps) -import qualified Distribution.Client.IndexUtils as IndexUtils -import qualified Distribution.Client.PackageIndex as SourcePackageIndex -import Distribution.Client.Targets (userToPackageConstraint) -import Distribution.Client.DistDirLayout -import Distribution.Client.SetupWrapper -import Distribution.Client.JobControl -import Distribution.Client.FetchUtils -import qualified Hackage.Security.Client as Sec -import Distribution.Client.PkgConfigDb -import Distribution.Client.Setup hiding (packageName, cabalVersion) -import Distribution.Utils.NubList - -import Distribution.Package hiding - (InstalledPackageId, installedPackageId) -import Distribution.System -import qualified Distribution.PackageDescription as Cabal -import qualified Distribution.PackageDescription as PD -import qualified Distribution.PackageDescription.Configuration as PD -import qualified Distribution.InstalledPackageInfo as Installed -import Distribution.Simple.PackageIndex (InstalledPackageIndex) -import qualified Distribution.Simple.PackageIndex as PackageIndex -import Distribution.Simple.Compiler hiding (Flag) -import qualified Distribution.Simple.GHC as GHC --TODO: [code cleanup] eliminate -import qualified Distribution.Simple.GHCJS as GHCJS --TODO: [code cleanup] eliminate -import Distribution.Simple.Program -import Distribution.Simple.Program.Db -import Distribution.Simple.Program.Find -import qualified Distribution.Simple.Setup as Cabal -import Distribution.Simple.Setup - (Flag, toFlag, flagToMaybe, flagToList, fromFlagOrDefault) -import qualified Distribution.Simple.Configure as Cabal -import qualified Distribution.Simple.LocalBuildInfo as Cabal -import Distribution.Simple.LocalBuildInfo (ComponentName(..)) -import qualified Distribution.Simple.Register as Cabal -import qualified Distribution.Simple.InstallDirs as InstallDirs -import qualified Distribution.Simple.BuildTarget as Cabal - -import Distribution.Simple.Utils hiding (matchFileGlob) -import Distribution.Version -import Distribution.Verbosity -import Distribution.Text - -import Data.Map (Map) -import qualified Data.Map as Map -import Data.Set (Set) -import qualified Data.Set as Set -import qualified Data.Graph as Graph -import qualified Data.Tree as Tree -#if !MIN_VERSION_base(4,8,0) -import Control.Applicative -#endif -import Control.Monad -import Control.Monad.State as State -import Control.Exception -import Data.List -import Data.Maybe -import Data.Either -import Data.Monoid -import Data.Function -import System.FilePath -import System.Directory (doesDirectoryExist) - - ------------------------------------------------------------------------------- --- * Elaborated install plan ------------------------------------------------------------------------------- - --- "Elaborated" -- worked out with great care and nicety of detail; --- executed with great minuteness: elaborate preparations; --- elaborate care. --- --- So here's the idea: --- --- Rather than a miscellaneous collection of 'ConfigFlags', 'InstallFlags' etc --- all passed in as separate args and which are then further selected, --- transformed etc during the execution of the build. Instead we construct --- an elaborated install plan that includes everything we will need, and then --- during the execution of the plan we do as little transformation of this --- info as possible. --- --- So we're trying to split the work into two phases: construction of the --- elaborated install plan (which as far as possible should be pure) and --- then simple execution of that plan without any smarts, just doing what the --- plan says to do. --- --- So that means we need a representation of this fully elaborated install --- plan. The representation consists of two parts: --- --- * A 'ElaboratedInstallPlan'. This is a 'GenericInstallPlan' with a --- representation of source packages that includes a lot more detail about --- that package's individual configuration --- --- * A 'ElaboratedSharedConfig'. Some package configuration is the same for --- every package in a plan. Rather than duplicate that info every entry in --- the 'GenericInstallPlan' we keep that separately. --- --- The division between the shared and per-package config is /not set in stone --- for all time/. For example if we wanted to generalise the install plan to --- describe a situation where we want to build some packages with GHC and some --- with GHCJS then the platform and compiler would no longer be shared between --- all packages but would have to be per-package (probably with some sanity --- condition on the graph structure). --- - --- Refer to ProjectPlanning.Types for details of these important types: - --- type ElaboratedInstallPlan = ... --- type ElaboratedPlanPackage = ... --- data ElaboratedSharedConfig = ... --- data ElaboratedConfiguredPackage = ... --- data BuildStyle = - - -sanityCheckElaboratedConfiguredPackage :: ElaboratedSharedConfig - -> ElaboratedConfiguredPackage - -> Bool -sanityCheckElaboratedConfiguredPackage sharedConfig - pkg@ElaboratedConfiguredPackage{..} = - - pkgStanzasEnabled `Set.isSubsetOf` pkgStanzasAvailable - - -- the stanzas explicitly enabled should be available and enabled - && Map.keysSet (Map.filter id pkgStanzasRequested) - `Set.isSubsetOf` pkgStanzasEnabled - - -- the stanzas explicitly disabled should not be available - && Set.null (Map.keysSet (Map.filter not pkgStanzasRequested) - `Set.intersection` pkgStanzasAvailable) - - && (pkgBuildStyle == BuildInplaceOnly || - installedPackageId pkg == hashedInstalledPackageId - (packageHashInputs sharedConfig pkg)) - - && (pkgBuildStyle == BuildInplaceOnly || - Set.null pkgStanzasAvailable) - - ------------------------------------------------------------------------------- --- * Deciding what to do: making an 'ElaboratedInstallPlan' ------------------------------------------------------------------------------- - -rebuildInstallPlan :: Verbosity - -> FilePath -> DistDirLayout -> CabalDirLayout - -> ProjectConfig - -> IO ( ElaboratedInstallPlan - , ElaboratedSharedConfig - , ProjectConfig ) -rebuildInstallPlan verbosity - projectRootDir - distDirLayout@DistDirLayout { - distDirectory, - distProjectCacheFile, - distProjectCacheDirectory - } - cabalDirLayout@CabalDirLayout { - cabalPackageCacheDirectory, - cabalStoreDirectory, - cabalStorePackageDB - } - cliConfig = - runRebuild projectRootDir $ do - progsearchpath <- liftIO $ getSystemSearchPath - let cliConfigPersistent = cliConfig { projectConfigBuildOnly = mempty } - - -- The overall improved plan is cached - rerunIfChanged verbosity fileMonitorImprovedPlan - -- react to changes in command line args and the path - (cliConfigPersistent, progsearchpath) $ do - - -- And so is the elaborated plan that the improved plan based on - (elaboratedPlan, elaboratedShared, - projectConfig) <- - rerunIfChanged verbosity fileMonitorElaboratedPlan - (cliConfigPersistent, progsearchpath) $ do - - (projectConfig, projectConfigTransient) <- phaseReadProjectConfig - localPackages <- phaseReadLocalPackages projectConfig - compilerEtc <- phaseConfigureCompiler projectConfig - _ <- phaseConfigurePrograms projectConfig compilerEtc - solverPlan <- phaseRunSolver projectConfigTransient - compilerEtc localPackages - (elaboratedPlan, - elaboratedShared) <- phaseElaboratePlan projectConfigTransient - compilerEtc - solverPlan localPackages - phaseMaintainPlanOutputs elaboratedPlan elaboratedShared - - return (elaboratedPlan, elaboratedShared, - projectConfig) - - -- The improved plan changes each time we install something, whereas - -- the underlying elaborated plan only changes when input config - -- changes, so it's worth caching them separately. - improvedPlan <- phaseImprovePlan elaboratedPlan elaboratedShared - return (improvedPlan, elaboratedShared, projectConfig) - - where - fileMonitorCompiler = newFileMonitorInCacheDir "compiler" - fileMonitorSolverPlan = newFileMonitorInCacheDir "solver-plan" - fileMonitorSourceHashes = newFileMonitorInCacheDir "source-hashes" - fileMonitorElaboratedPlan = newFileMonitorInCacheDir "elaborated-plan" - fileMonitorImprovedPlan = newFileMonitorInCacheDir "improved-plan" - - newFileMonitorInCacheDir :: Eq a => FilePath -> FileMonitor a b - newFileMonitorInCacheDir = newFileMonitor . distProjectCacheFile - - -- Read the cabal.project (or implicit config) and combine it with - -- arguments from the command line - -- - phaseReadProjectConfig :: Rebuild (ProjectConfig, ProjectConfig) - phaseReadProjectConfig = do - liftIO $ do - info verbosity "Project settings changed, reconfiguring..." - createDirectoryIfMissingVerbose verbosity False distDirectory - createDirectoryIfMissingVerbose verbosity False distProjectCacheDirectory - - projectConfig <- readProjectConfig verbosity projectRootDir - - -- The project config comming from the command line includes "build only" - -- flags that we don't cache persistently (because like all "build only" - -- flags they do not affect the value of the outcome) but that we do - -- sometimes using during planning (in particular the http transport) - let projectConfigTransient = projectConfig <> cliConfig - projectConfigPersistent = projectConfig - <> cliConfig { - projectConfigBuildOnly = mempty - } - liftIO $ writeProjectConfigFile (distProjectCacheFile "config") - projectConfigPersistent - return (projectConfigPersistent, projectConfigTransient) - - -- Look for all the cabal packages in the project - -- some of which may be local src dirs, tarballs etc - -- - phaseReadLocalPackages :: ProjectConfig - -> Rebuild [SourcePackage] - phaseReadLocalPackages projectConfig = do - - localCabalFiles <- findProjectPackages projectRootDir projectConfig - mapM (readSourcePackage verbosity) localCabalFiles - - - -- Configure the compiler we're using. - -- - -- This is moderately expensive and doesn't change that often so we cache - -- it independently. - -- - phaseConfigureCompiler :: ProjectConfig - -> Rebuild (Compiler, Platform, ProgramDb) - phaseConfigureCompiler ProjectConfig { - projectConfigShared = ProjectConfigShared { - projectConfigHcFlavor, - projectConfigHcPath, - projectConfigHcPkg - }, - projectConfigLocalPackages = PackageConfig { - packageConfigProgramPaths, - packageConfigProgramArgs, - packageConfigProgramPathExtra - } - } = do - progsearchpath <- liftIO $ getSystemSearchPath - rerunIfChanged verbosity fileMonitorCompiler - (hcFlavor, hcPath, hcPkg, progsearchpath, - packageConfigProgramPaths, - packageConfigProgramArgs, - packageConfigProgramPathExtra) $ do - - liftIO $ info verbosity "Compiler settings changed, reconfiguring..." - result@(_, _, progdb') <- liftIO $ - Cabal.configCompilerEx - hcFlavor hcPath hcPkg - progdb verbosity - - -- Note that we added the user-supplied program locations and args - -- for /all/ programs, not just those for the compiler prog and - -- compiler-related utils. In principle we don't know which programs - -- the compiler will configure (and it does vary between compilers). - -- We do know however that the compiler will only configure the - -- programs it cares about, and those are the ones we monitor here. - monitorFiles (programsMonitorFiles progdb') - - return result - where - hcFlavor = flagToMaybe projectConfigHcFlavor - hcPath = flagToMaybe projectConfigHcPath - hcPkg = flagToMaybe projectConfigHcPkg - progdb = - userSpecifyPaths (Map.toList (getMapLast packageConfigProgramPaths)) - . userSpecifyArgss (Map.toList (getMapMappend packageConfigProgramArgs)) - . modifyProgramSearchPath - (++ [ ProgramSearchPathDir dir - | dir <- fromNubList packageConfigProgramPathExtra ]) - $ defaultProgramDb - - - -- Configuring other programs. - -- - -- Having configred the compiler, now we configure all the remaining - -- programs. This is to check we can find them, and to monitor them for - -- changes. - -- - -- TODO: [required eventually] we don't actually do this yet. - -- - -- We rely on the fact that the previous phase added the program config for - -- all local packages, but that all the programs configured so far are the - -- compiler program or related util programs. - -- - phaseConfigurePrograms :: ProjectConfig - -> (Compiler, Platform, ProgramDb) - -> Rebuild () - phaseConfigurePrograms projectConfig (_, _, compilerprogdb) = do - -- Users are allowed to specify program locations independently for - -- each package (e.g. to use a particular version of a pre-processor - -- for some packages). However they cannot do this for the compiler - -- itself as that's just not going to work. So we check for this. - liftIO $ checkBadPerPackageCompilerPaths - (configuredPrograms compilerprogdb) - (getMapMappend (projectConfigSpecificPackage projectConfig)) - - --TODO: [required eventually] find/configure other programs that the - -- user specifies. - - --TODO: [required eventually] find/configure all build-tools - -- but note that some of them may be built as part of the plan. - - - -- Run the solver to get the initial install plan. - -- This is expensive so we cache it independently. - -- - phaseRunSolver :: ProjectConfig - -> (Compiler, Platform, ProgramDb) - -> [SourcePackage] - -> Rebuild (SolverInstallPlan, PackagesImplicitSetupDeps) - phaseRunSolver projectConfig@ProjectConfig { - projectConfigShared, - projectConfigBuildOnly - } - (compiler, platform, progdb) - localPackages = - rerunIfChanged verbosity fileMonitorSolverPlan - (solverSettings, cabalPackageCacheDirectory, - localPackages, localPackagesEnabledStanzas, - compiler, platform, programsDbSignature progdb) $ do - - installedPkgIndex <- getInstalledPackages verbosity - compiler progdb platform - corePackageDbs - sourcePkgDb <- getSourcePackages verbosity withRepoCtx - pkgConfigDB <- getPkgConfigDb verbosity progdb - - --TODO: [code cleanup] it'd be better if the Compiler contained the - -- ConfiguredPrograms that it needs, rather than relying on the progdb - -- since we don't need to depend on all the programs here, just the - -- ones relevant for the compiler. - - liftIO $ do - solver <- chooseSolver verbosity - (solverSettingSolver solverSettings) - (compilerInfo compiler) - - notice verbosity "Resolving dependencies..." - foldProgress logMsg die return $ - planPackages compiler platform solver solverSettings - installedPkgIndex sourcePkgDb pkgConfigDB - localPackages localPackagesEnabledStanzas - where - corePackageDbs = [GlobalPackageDB] - withRepoCtx = projectConfigWithSolverRepoContext verbosity - cabalPackageCacheDirectory - projectConfigShared - projectConfigBuildOnly - solverSettings = resolveSolverSettings projectConfig - logMsg message rest = debugNoWrap verbosity message >> rest - - localPackagesEnabledStanzas = - Map.fromList - [ (pkgname, stanzas) - | pkg <- localPackages - , let pkgname = packageName pkg - testsEnabled = lookupLocalPackageConfig - packageConfigTests - projectConfig pkgname - benchmarksEnabled = lookupLocalPackageConfig - packageConfigBenchmarks - projectConfig pkgname - stanzas = - Map.fromList $ - [ (TestStanzas, enabled) - | enabled <- flagToList testsEnabled ] - ++ [ (BenchStanzas , enabled) - | enabled <- flagToList benchmarksEnabled ] - ] - - -- Elaborate the solver's install plan to get a fully detailed plan. This - -- version of the plan has the final nix-style hashed ids. - -- - phaseElaboratePlan :: ProjectConfig - -> (Compiler, Platform, ProgramDb) - -> (SolverInstallPlan, PackagesImplicitSetupDeps) - -> [SourcePackage] - -> Rebuild ( ElaboratedInstallPlan - , ElaboratedSharedConfig ) - phaseElaboratePlan ProjectConfig { - projectConfigShared, - projectConfigLocalPackages, - projectConfigSpecificPackage, - projectConfigBuildOnly - } - (compiler, platform, progdb) - (solverPlan, pkgsImplicitSetupDeps) - localPackages = do - - liftIO $ debug verbosity "Elaborating the install plan..." - - sourcePackageHashes <- - rerunIfChanged verbosity fileMonitorSourceHashes - (packageLocationsSignature solverPlan) $ - getPackageSourceHashes verbosity withRepoCtx solverPlan - - defaultInstallDirs <- liftIO $ userInstallDirTemplates compiler - return $ - elaborateInstallPlan - platform compiler progdb - distDirLayout - cabalDirLayout - solverPlan - pkgsImplicitSetupDeps - localPackages - sourcePackageHashes - defaultInstallDirs - projectConfigShared - projectConfigLocalPackages - (getMapMappend projectConfigSpecificPackage) - where - withRepoCtx = projectConfigWithSolverRepoContext verbosity - cabalPackageCacheDirectory - projectConfigShared - projectConfigBuildOnly - - - -- Update the files we maintain that reflect our current build environment. - -- In particular we maintain a JSON representation of the elaborated - -- install plan. - -- - -- TODO: [required eventually] maintain the ghc environment file reflecting - -- the libs available. This will need to be after plan improvement phase. - -- - phaseMaintainPlanOutputs :: ElaboratedInstallPlan - -> ElaboratedSharedConfig - -> Rebuild () - phaseMaintainPlanOutputs elaboratedPlan elaboratedShared = do - liftIO $ debug verbosity "Updating plan.json" - liftIO $ writePlanExternalRepresentation - distDirLayout - elaboratedPlan - elaboratedShared - - - -- Improve the elaborated install plan. The elaborated plan consists - -- mostly of source packages (with full nix-style hashed ids). Where - -- corresponding installed packages already exist in the store, replace - -- them in the plan. - -- - -- Note that we do monitor the store's package db here, so we will redo - -- this improvement phase when the db changes -- including as a result of - -- executing a plan and installing things. - -- - phaseImprovePlan :: ElaboratedInstallPlan - -> ElaboratedSharedConfig - -> Rebuild ElaboratedInstallPlan - phaseImprovePlan elaboratedPlan elaboratedShared = do - - liftIO $ debug verbosity "Improving the install plan..." - recreateDirectory verbosity True storeDirectory - storePkgIndex <- getPackageDBContents verbosity - compiler progdb platform - storePackageDb - let improvedPlan = improveInstallPlanWithPreExistingPackages - storePkgIndex - elaboratedPlan - return improvedPlan - - where - storeDirectory = cabalStoreDirectory (compilerId compiler) - storePackageDb = cabalStorePackageDB (compilerId compiler) - ElaboratedSharedConfig { - pkgConfigPlatform = platform, - pkgConfigCompiler = compiler, - pkgConfigCompilerProgs = progdb - } = elaboratedShared - - -programsMonitorFiles :: ProgramDb -> [MonitorFilePath] -programsMonitorFiles progdb = - [ monitor - | prog <- configuredPrograms progdb - , monitor <- monitorFileSearchPath (programMonitorFiles prog) - (programPath prog) - ] - --- | Select the bits of a 'ProgramDb' to monitor for value changes. --- Use 'programsMonitorFiles' for the files to monitor. --- -programsDbSignature :: ProgramDb -> [ConfiguredProgram] -programsDbSignature progdb = - [ prog { programMonitorFiles = [] - , programOverrideEnv = filter ((/="PATH") . fst) - (programOverrideEnv prog) } - | prog <- configuredPrograms progdb ] - -getInstalledPackages :: Verbosity - -> Compiler -> ProgramDb -> Platform - -> PackageDBStack - -> Rebuild InstalledPackageIndex -getInstalledPackages verbosity compiler progdb platform packagedbs = do - monitorFiles . map monitorFileOrDirectory - =<< liftIO (IndexUtils.getInstalledPackagesMonitorFiles - verbosity compiler - packagedbs progdb platform) - liftIO $ IndexUtils.getInstalledPackages - verbosity compiler - packagedbs progdb - -getPackageDBContents :: Verbosity - -> Compiler -> ProgramDb -> Platform - -> PackageDB - -> Rebuild InstalledPackageIndex -getPackageDBContents verbosity compiler progdb platform packagedb = do - monitorFiles . map monitorFileOrDirectory - =<< liftIO (IndexUtils.getInstalledPackagesMonitorFiles - verbosity compiler - [packagedb] progdb platform) - liftIO $ do - createPackageDBIfMissing verbosity compiler - progdb [packagedb] - Cabal.getPackageDBContents verbosity compiler - packagedb progdb - -getSourcePackages :: Verbosity -> (forall a. (RepoContext -> IO a) -> IO a) - -> Rebuild SourcePackageDb -getSourcePackages verbosity withRepoCtx = do - (sourcePkgDb, repos) <- - liftIO $ - withRepoCtx $ \repoctx -> do - sourcePkgDb <- IndexUtils.getSourcePackages verbosity repoctx - return (sourcePkgDb, repoContextRepos repoctx) - - monitorFiles . map monitorFile - . IndexUtils.getSourcePackagesMonitorFiles - $ repos - return sourcePkgDb - -createPackageDBIfMissing :: Verbosity -> Compiler -> ProgramDb - -> PackageDBStack -> IO () -createPackageDBIfMissing verbosity compiler progdb packageDbs = - case reverse packageDbs of - SpecificPackageDB dbPath : _ -> do - exists <- liftIO $ Cabal.doesPackageDBExist dbPath - unless exists $ do - createDirectoryIfMissingVerbose verbosity False (takeDirectory dbPath) - Cabal.createPackageDB verbosity compiler progdb False dbPath - _ -> return () - - -getPkgConfigDb :: Verbosity -> ProgramDb -> Rebuild PkgConfigDb -getPkgConfigDb verbosity progdb = do - dirs <- liftIO $ getPkgConfigDbDirs verbosity progdb - -- Just monitor the dirs so we'll notice new .pc files. - -- Alternatively we could monitor all the .pc files too. - forM_ dirs $ \dir -> do - dirExists <- liftIO $ doesDirectoryExist dir - -- TODO: turn this into a utility function - monitorFiles [if dirExists - then monitorDirectory dir - else monitorNonExistentDirectory dir] - - liftIO $ readPkgConfigDb verbosity progdb - - -recreateDirectory :: Verbosity -> Bool -> FilePath -> Rebuild () -recreateDirectory verbosity createParents dir = do - liftIO $ createDirectoryIfMissingVerbose verbosity createParents dir - monitorFiles [monitorDirectoryExistence dir] - - --- | Select the config values to monitor for changes package source hashes. -packageLocationsSignature :: SolverInstallPlan - -> [(PackageId, PackageLocation (Maybe FilePath))] -packageLocationsSignature solverPlan = - [ (packageId pkg, packageSource pkg) - | InstallPlan.Configured - (ConfiguredPackage pkg _ _ _) <- InstallPlan.toList solverPlan - ] - - --- | Get the 'HashValue' for all the source packages where we use hashes, --- and download any packages required to do so. --- --- Note that we don't get hashes for local unpacked packages. --- -getPackageSourceHashes :: Verbosity - -> (forall a. (RepoContext -> IO a) -> IO a) - -> SolverInstallPlan - -> Rebuild (Map PackageId PackageSourceHash) -getPackageSourceHashes verbosity withRepoCtx solverPlan = do - - -- Determine if and where to get the package's source hash from. - -- - let allPkgLocations :: [(PackageId, PackageLocation (Maybe FilePath))] - allPkgLocations = - [ (packageId pkg, packageSource pkg) - | InstallPlan.Configured - (ConfiguredPackage pkg _ _ _) <- InstallPlan.toList solverPlan ] - - -- Tarballs that were local in the first place. - -- We'll hash these tarball files directly. - localTarballPkgs :: [(PackageId, FilePath)] - localTarballPkgs = - [ (pkgid, tarball) - | (pkgid, LocalTarballPackage tarball) <- allPkgLocations ] - - -- Tarballs from remote URLs. We must have downloaded these already - -- (since we extracted the .cabal file earlier) - --TODO: [required eventually] finish remote tarball functionality --- allRemoteTarballPkgs = --- [ (pkgid, ) --- | (pkgid, RemoteTarballPackage ) <- allPkgLocations ] - - -- Tarballs from repositories, either where the repository provides - -- hashes as part of the repo metadata, or where we will have to - -- download and hash the tarball. - repoTarballPkgsWithMetadata :: [(PackageId, Repo)] - repoTarballPkgsWithoutMetadata :: [(PackageId, Repo)] - (repoTarballPkgsWithMetadata, - repoTarballPkgsWithoutMetadata) = - partitionEithers - [ case repo of - RepoSecure{} -> Left (pkgid, repo) - _ -> Right (pkgid, repo) - | (pkgid, RepoTarballPackage repo _ _) <- allPkgLocations ] - - -- For tarballs from repos that do not have hashes available we now have - -- to check if the packages were downloaded already. - -- - (repoTarballPkgsToDownload, - repoTarballPkgsDownloaded) - <- fmap partitionEithers $ - liftIO $ sequence - [ do mtarball <- checkRepoTarballFetched repo pkgid - case mtarball of - Nothing -> return (Left (pkgid, repo)) - Just tarball -> return (Right (pkgid, tarball)) - | (pkgid, repo) <- repoTarballPkgsWithoutMetadata ] - - (hashesFromRepoMetadata, - repoTarballPkgsNewlyDownloaded) <- - -- Avoid having to initialise the repository (ie 'withRepoCtx') if we - -- don't have to. (The main cost is configuring the http client.) - if null repoTarballPkgsToDownload && null repoTarballPkgsWithMetadata - then return (Map.empty, []) - else liftIO $ withRepoCtx $ \repoctx -> do - - -- For tarballs from repos that do have hashes available as part of the - -- repo metadata we now load up the index for each repo and retrieve - -- the hashes for the packages - -- - hashesFromRepoMetadata <- - Sec.uncheckClientErrors $ --TODO: [code cleanup] wrap in our own exceptions - fmap (Map.fromList . concat) $ - sequence - -- Reading the repo index is expensive so we group the packages by repo - [ repoContextWithSecureRepo repoctx repo $ \secureRepo -> - Sec.withIndex secureRepo $ \repoIndex -> - sequence - [ do hash <- Sec.trusted <$> -- strip off Trusted tag - Sec.indexLookupHash repoIndex pkgid - -- Note that hackage-security currently uses SHA256 - -- but this API could in principle give us some other - -- choice in future. - return (pkgid, hashFromTUF hash) - | pkgid <- pkgids ] - | (repo, pkgids) <- - map (\grp@((_,repo):_) -> (repo, map fst grp)) - . groupBy ((==) `on` (remoteRepoName . repoRemote . snd)) - . sortBy (compare `on` (remoteRepoName . repoRemote . snd)) - $ repoTarballPkgsWithMetadata - ] - - -- For tarballs from repos that do not have hashes available, download - -- the ones we previously determined we need. - -- - repoTarballPkgsNewlyDownloaded <- - sequence - [ do tarball <- fetchRepoTarball verbosity repoctx repo pkgid - return (pkgid, tarball) - | (pkgid, repo) <- repoTarballPkgsToDownload ] - - return (hashesFromRepoMetadata, - repoTarballPkgsNewlyDownloaded) - - -- Hash tarball files for packages where we have to do that. This includes - -- tarballs that were local in the first place, plus tarballs from repos, - -- either previously cached or freshly downloaded. - -- - let allTarballFilePkgs :: [(PackageId, FilePath)] - allTarballFilePkgs = localTarballPkgs - ++ repoTarballPkgsDownloaded - ++ repoTarballPkgsNewlyDownloaded - hashesFromTarballFiles <- liftIO $ - fmap Map.fromList $ - sequence - [ do srchash <- readFileHashValue tarball - return (pkgid, srchash) - | (pkgid, tarball) <- allTarballFilePkgs - ] - monitorFiles [ monitorFile tarball - | (_pkgid, tarball) <- allTarballFilePkgs ] - - -- Return the combination - return $! hashesFromRepoMetadata - <> hashesFromTarballFiles - - --- ------------------------------------------------------------ --- * Installation planning --- ------------------------------------------------------------ - -planPackages :: Compiler - -> Platform - -> Solver -> SolverSettings - -> InstalledPackageIndex - -> SourcePackageDb - -> PkgConfigDb - -> [SourcePackage] - -> Map PackageName (Map OptionalStanza Bool) - -> Progress String String - (SolverInstallPlan, PackagesImplicitSetupDeps) -planPackages comp platform solver SolverSettings{..} - installedPkgIndex sourcePkgDb pkgConfigDB - localPackages pkgStanzasEnable = - - rememberImplicitSetupDeps (depResolverSourcePkgIndex stdResolverParams) <$> - - resolveDependencies - platform (compilerInfo comp) - pkgConfigDB solver - resolverParams - - where - - --TODO: [nice to have] disable multiple instances restriction in the solver, but then - -- make sure we can cope with that in the output. - resolverParams = - - setMaxBackjumps solverSettingMaxBackjumps - - --TODO: [required eventually] should only be configurable for custom installs - -- . setIndependentGoals solverSettingIndependentGoals - - . setReorderGoals solverSettingReorderGoals - - --TODO: [required eventually] should only be configurable for custom installs - -- . setAvoidReinstalls solverSettingAvoidReinstalls - - --TODO: [required eventually] should only be configurable for custom installs - -- . setShadowPkgs solverSettingShadowPkgs - - . setStrongFlags solverSettingStrongFlags - - --TODO: [required eventually] decide if we need to prefer installed for - -- global packages, or prefer latest even for global packages. Perhaps - -- should be configurable but with a different name than "upgrade-dependencies". - . setPreferenceDefault PreferLatestForSelected - {-(if solverSettingUpgradeDeps - then PreferAllLatest - else PreferLatestForSelected)-} - - . removeUpperBounds solverSettingAllowNewer - - . addDefaultSetupDependencies (defaultSetupDeps comp platform - . PD.packageDescription - . packageDescription) - - . addPreferences - -- preferences from the config file or command line - [ PackageVersionPreference name ver - | Dependency name ver <- solverSettingPreferences ] - - . addConstraints - -- version constraints from the config file or command line - [ LabeledPackageConstraint (userToPackageConstraint pc) src - | (pc, src) <- solverSettingConstraints ] - - . addPreferences - -- enable stanza preference where the user did not specify - [ PackageStanzasPreference pkgname stanzas - | pkg <- localPackages - , let pkgname = packageName pkg - stanzaM = Map.findWithDefault Map.empty pkgname pkgStanzasEnable - stanzas = [ stanza | stanza <- [minBound..maxBound] - , Map.lookup stanza stanzaM == Nothing ] - , not (null stanzas) - ] - - . addConstraints - -- enable stanza constraints where the user asked to enable - [ LabeledPackageConstraint - (PackageConstraintStanzas pkgname stanzas) - ConstraintSourceConfigFlagOrTarget - | pkg <- localPackages - , let pkgname = packageName pkg - stanzaM = Map.findWithDefault Map.empty pkgname pkgStanzasEnable - stanzas = [ stanza | stanza <- [minBound..maxBound] - , Map.lookup stanza stanzaM == Just True ] - , not (null stanzas) - ] - - . addConstraints - --TODO: [nice to have] should have checked at some point that the - -- package in question actually has these flags. - [ LabeledPackageConstraint - (PackageConstraintFlags pkgname flags) - ConstraintSourceConfigFlagOrTarget - | (pkgname, flags) <- Map.toList solverSettingFlagAssignments ] - - . addConstraints - --TODO: [nice to have] we have user-supplied flags for unspecified - -- local packages (as well as specific per-package flags). For the - -- former we just apply all these flags to all local targets which - -- is silly. We should check if the flags are appropriate. - [ LabeledPackageConstraint - (PackageConstraintFlags pkgname flags) - ConstraintSourceConfigFlagOrTarget - | let flags = solverSettingFlagAssignment - , not (null flags) - , pkg <- localPackages - , let pkgname = packageName pkg ] - - $ stdResolverParams - - stdResolverParams = - standardInstallPolicy - installedPkgIndex sourcePkgDb - (map SpecificSourcePackage localPackages) - - ------------------------------------------------------------------------------- --- * Install plan post-processing ------------------------------------------------------------------------------- - --- This phase goes from the InstallPlan we get from the solver and has to --- make an elaborated install plan. --- --- We go in two steps: --- --- 1. elaborate all the source packages that the solver has chosen. --- 2. swap source packages for pre-existing installed packages wherever --- possible. --- --- We do it in this order, elaborating and then replacing, because the easiest --- way to calculate the installed package ids used for the replacement step is --- from the elaborated configuration for each package. - - - - ------------------------------------------------------------------------------- --- * Install plan elaboration ------------------------------------------------------------------------------- - --- | Produce an elaborated install plan using the policy for local builds with --- a nix-style shared store. --- --- In theory should be able to make an elaborated install plan with a policy --- matching that of the classic @cabal install --user@ or @--global@ --- -elaborateInstallPlan - :: Platform -> Compiler -> ProgramDb - -> DistDirLayout - -> CabalDirLayout - -> SolverInstallPlan - -> PackagesImplicitSetupDeps - -> [SourcePackage] - -> Map PackageId PackageSourceHash - -> InstallDirs.InstallDirTemplates - -> ProjectConfigShared - -> PackageConfig - -> Map PackageName PackageConfig - -> (ElaboratedInstallPlan, ElaboratedSharedConfig) -elaborateInstallPlan platform compiler compilerprogdb - DistDirLayout{..} - cabalDirLayout@CabalDirLayout{cabalStorePackageDB} - solverPlan pkgsImplicitSetupDeps localPackages - sourcePackageHashes - defaultInstallDirs - _sharedPackageConfig - localPackagesConfig - perPackageConfig = - (elaboratedInstallPlan, elaboratedSharedConfig) - where - elaboratedSharedConfig = - ElaboratedSharedConfig { - pkgConfigPlatform = platform, - pkgConfigCompiler = compiler, - pkgConfigCompilerProgs = compilerprogdb - } - - elaboratedInstallPlan = - flip InstallPlan.mapPreservingGraph solverPlan $ \mapDep planpkg -> - case planpkg of - InstallPlan.PreExisting pkg -> - InstallPlan.PreExisting pkg - - InstallPlan.Configured pkg -> - InstallPlan.Configured - (elaborateConfiguredPackage (fixupDependencies mapDep pkg)) - - _ -> error "elaborateInstallPlan: unexpected package state" - - -- remap the installed package ids of the direct deps, since we're - -- changing the installed package ids of all the packages to use the - -- final nix-style hashed ids. - fixupDependencies mapDep - (ConfiguredPackage pkg flags stanzas deps) = - ConfiguredPackage pkg flags stanzas deps' - where - deps' = fmap (map (\d -> d { confInstId = mapDep (confInstId d) })) deps - - elaborateConfiguredPackage :: ConfiguredPackage - -> ElaboratedConfiguredPackage - elaborateConfiguredPackage - pkg@(ConfiguredPackage (SourcePackage pkgid gdesc srcloc descOverride) - flags stanzas deps) = - elaboratedPackage - where - -- Knot tying: the final elaboratedPackage includes the - -- pkgInstalledId, which is calculated by hashing many - -- of the other fields of the elaboratedPackage. - -- - elaboratedPackage = ElaboratedConfiguredPackage {..} - - pkgInstalledId - | shouldBuildInplaceOnly pkg - = mkUnitId (display pkgid ++ "-inplace") - - | otherwise - = assert (isJust pkgSourceHash) $ - hashedInstalledPackageId - (packageHashInputs - elaboratedSharedConfig - elaboratedPackage) -- recursive use of elaboratedPackage - - | otherwise - = error $ "elaborateInstallPlan: non-inplace package " - ++ " is missing a source hash: " ++ display pkgid - - -- All the other fields of the ElaboratedConfiguredPackage - -- - pkgSourceId = pkgid - pkgDescription = let Right (desc, _) = - PD.finalizePackageDescription - flags (const True) - platform (compilerInfo compiler) - [] gdesc - in desc - pkgFlagAssignment = flags - pkgFlagDefaults = [ (Cabal.flagName flag, Cabal.flagDefault flag) - | flag <- PD.genPackageFlags gdesc ] - pkgDependencies = deps - pkgStanzasAvailable = Set.fromList stanzas - pkgStanzasRequested = - Map.fromList $ [ (TestStanzas, v) | v <- maybeToList tests ] - ++ [ (BenchStanzas, v) | v <- maybeToList benchmarks ] - where - tests, benchmarks :: Maybe Bool - tests = perPkgOptionMaybe pkgid packageConfigTests - benchmarks = perPkgOptionMaybe pkgid packageConfigBenchmarks - - -- These sometimes get adjusted later - pkgStanzasEnabled = Set.empty - pkgBuildTargets = [] - pkgReplTarget = Nothing - pkgBuildHaddocks = False - - pkgSourceLocation = srcloc - pkgSourceHash = Map.lookup pkgid sourcePackageHashes - pkgBuildStyle = if shouldBuildInplaceOnly pkg - then BuildInplaceOnly else BuildAndInstall - pkgBuildPackageDBStack = buildAndRegisterDbs - pkgRegisterPackageDBStack = buildAndRegisterDbs - pkgRequiresRegistration = isJust (Cabal.condLibrary gdesc) - - pkgSetupScriptStyle = packageSetupScriptStylePostSolver - pkgsImplicitSetupDeps pkg pkgDescription - pkgSetupScriptCliVersion = packageSetupScriptSpecVersion - pkgSetupScriptStyle pkgDescription deps - pkgSetupPackageDBStack = buildAndRegisterDbs - - buildAndRegisterDbs - | shouldBuildInplaceOnly pkg = inplacePackageDbs - | otherwise = storePackageDbs - - pkgDescriptionOverride = descOverride - - pkgVanillaLib = perPkgOptionFlag pkgid True packageConfigVanillaLib --TODO: [required feature]: also needs to be handled recursively - pkgSharedLib = pkgid `Set.member` pkgsUseSharedLibrary - pkgDynExe = perPkgOptionFlag pkgid False packageConfigDynExe - pkgGHCiLib = perPkgOptionFlag pkgid False packageConfigGHCiLib --TODO: [required feature] needs to default to enabled on windows still - - pkgProfExe = perPkgOptionFlag pkgid False packageConfigProf - pkgProfLib = pkgid `Set.member` pkgsUseProfilingLibrary - - (pkgProfExeDetail, - pkgProfLibDetail) = perPkgOptionLibExeFlag pkgid ProfDetailDefault - packageConfigProfDetail - packageConfigProfLibDetail - pkgCoverage = perPkgOptionFlag pkgid False packageConfigCoverage - - pkgOptimization = perPkgOptionFlag pkgid NormalOptimisation packageConfigOptimization - pkgSplitObjs = perPkgOptionFlag pkgid False packageConfigSplitObjs - pkgStripLibs = perPkgOptionFlag pkgid False packageConfigStripLibs - pkgStripExes = perPkgOptionFlag pkgid False packageConfigStripExes - pkgDebugInfo = perPkgOptionFlag pkgid NoDebugInfo packageConfigDebugInfo - - -- Combine the configured compiler prog settings with the user-supplied - -- config. For the compiler progs any user-supplied config was taken - -- into account earlier when configuring the compiler so its ok that - -- our configured settings for the compiler override the user-supplied - -- config here. - pkgProgramPaths = Map.fromList - [ (programId prog, programPath prog) - | prog <- configuredPrograms compilerprogdb ] - <> perPkgOptionMapLast pkgid packageConfigProgramPaths - pkgProgramArgs = Map.fromList - [ (programId prog, args) - | prog <- configuredPrograms compilerprogdb - , let args = programOverrideArgs prog - , not (null args) - ] - <> perPkgOptionMapMappend pkgid packageConfigProgramArgs - pkgProgramPathExtra = perPkgOptionNubList pkgid packageConfigProgramPathExtra - pkgConfigureScriptArgs = perPkgOptionList pkgid packageConfigConfigureArgs - pkgExtraLibDirs = perPkgOptionList pkgid packageConfigExtraLibDirs - pkgExtraFrameworkDirs = perPkgOptionList pkgid packageConfigExtraFrameworkDirs - pkgExtraIncludeDirs = perPkgOptionList pkgid packageConfigExtraIncludeDirs - pkgProgPrefix = perPkgOptionMaybe pkgid packageConfigProgPrefix - pkgProgSuffix = perPkgOptionMaybe pkgid packageConfigProgSuffix - - pkgInstallDirs - | shouldBuildInplaceOnly pkg - -- use the ordinary default install dirs - = (InstallDirs.absoluteInstallDirs - pkgid - (installedUnitId pkg) - (compilerInfo compiler) - InstallDirs.NoCopyDest - platform - defaultInstallDirs) { - - InstallDirs.libsubdir = "", -- absoluteInstallDirs sets these as - InstallDirs.datasubdir = "" -- 'undefined' but we have to use - } -- them as "Setup.hs configure" args - - | otherwise - -- use special simplified install dirs - = storePackageInstallDirs - cabalDirLayout - (compilerId compiler) - pkgInstalledId - - pkgHaddockHoogle = perPkgOptionFlag pkgid False packageConfigHaddockHoogle - pkgHaddockHtml = perPkgOptionFlag pkgid False packageConfigHaddockHtml - pkgHaddockHtmlLocation = perPkgOptionMaybe pkgid packageConfigHaddockHtmlLocation - pkgHaddockExecutables = perPkgOptionFlag pkgid False packageConfigHaddockExecutables - pkgHaddockTestSuites = perPkgOptionFlag pkgid False packageConfigHaddockTestSuites - pkgHaddockBenchmarks = perPkgOptionFlag pkgid False packageConfigHaddockBenchmarks - pkgHaddockInternal = perPkgOptionFlag pkgid False packageConfigHaddockInternal - pkgHaddockCss = perPkgOptionMaybe pkgid packageConfigHaddockCss - pkgHaddockHscolour = perPkgOptionFlag pkgid False packageConfigHaddockHscolour - pkgHaddockHscolourCss = perPkgOptionMaybe pkgid packageConfigHaddockHscolourCss - pkgHaddockContents = perPkgOptionMaybe pkgid packageConfigHaddockContents - - perPkgOptionFlag :: PackageId -> a -> (PackageConfig -> Flag a) -> a - perPkgOptionMaybe :: PackageId -> (PackageConfig -> Flag a) -> Maybe a - perPkgOptionList :: PackageId -> (PackageConfig -> [a]) -> [a] - - perPkgOptionFlag pkgid def f = fromFlagOrDefault def (lookupPerPkgOption pkgid f) - perPkgOptionMaybe pkgid f = flagToMaybe (lookupPerPkgOption pkgid f) - perPkgOptionList pkgid f = lookupPerPkgOption pkgid f - perPkgOptionNubList pkgid f = fromNubList (lookupPerPkgOption pkgid f) - perPkgOptionMapLast pkgid f = getMapLast (lookupPerPkgOption pkgid f) - perPkgOptionMapMappend pkgid f = getMapMappend (lookupPerPkgOption pkgid f) - - perPkgOptionLibExeFlag pkgid def fboth flib = (exe, lib) - where - exe = fromFlagOrDefault def bothflag - lib = fromFlagOrDefault def (bothflag <> libflag) - - bothflag = lookupPerPkgOption pkgid fboth - libflag = lookupPerPkgOption pkgid flib - - lookupPerPkgOption :: (Package pkg, Monoid m) - => pkg -> (PackageConfig -> m) -> m - lookupPerPkgOption pkg f - -- the project config specifies values that apply to packages local to - -- but by default non-local packages get all default config values - -- the project, and can specify per-package values for any package, - | isLocalToProject pkg = local <> perpkg - | otherwise = perpkg - where - local = f localPackagesConfig - perpkg = maybe mempty f (Map.lookup (packageName pkg) perPackageConfig) - - inplacePackageDbs = storePackageDbs - ++ [ distPackageDB (compilerId compiler) ] - - storePackageDbs = [ GlobalPackageDB - , cabalStorePackageDB (compilerId compiler) ] - - -- For this local build policy, every package that lives in a local source - -- dir (as opposed to a tarball), or depends on such a package, will be - -- built inplace into a shared dist dir. Tarball packages that depend on - -- source dir packages will also get unpacked locally. - shouldBuildInplaceOnly :: HasUnitId pkg => pkg -> Bool - shouldBuildInplaceOnly pkg = Set.member (installedPackageId pkg) - pkgsToBuildInplaceOnly - - pkgsToBuildInplaceOnly :: Set InstalledPackageId - pkgsToBuildInplaceOnly = - Set.fromList - $ map installedPackageId - $ InstallPlan.reverseDependencyClosure - solverPlan - [ fakeUnitId (packageId pkg) - | pkg <- localPackages ] - - isLocalToProject :: Package pkg => pkg -> Bool - isLocalToProject pkg = Set.member (packageId pkg) - pkgsLocalToProject - - pkgsLocalToProject :: Set PackageId - pkgsLocalToProject = Set.fromList [ packageId pkg | pkg <- localPackages ] - - pkgsUseSharedLibrary :: Set PackageId - pkgsUseSharedLibrary = - packagesWithDownwardClosedProperty needsSharedLib - where - needsSharedLib pkg = - fromMaybe compilerShouldUseSharedLibByDefault - (liftM2 (||) pkgSharedLib pkgDynExe) - where - pkgid = packageId pkg - pkgSharedLib = perPkgOptionMaybe pkgid packageConfigSharedLib - pkgDynExe = perPkgOptionMaybe pkgid packageConfigDynExe - - --TODO: [code cleanup] move this into the Cabal lib. It's currently open - -- coded in Distribution.Simple.Configure, but should be made a proper - -- function of the Compiler or CompilerInfo. - compilerShouldUseSharedLibByDefault = - case compilerFlavor compiler of - GHC -> GHC.isDynamic compiler - GHCJS -> GHCJS.isDynamic compiler - _ -> False - - pkgsUseProfilingLibrary :: Set PackageId - pkgsUseProfilingLibrary = - packagesWithDownwardClosedProperty needsProfilingLib - where - needsProfilingLib pkg = - fromFlagOrDefault False (profBothFlag <> profLibFlag) - where - pkgid = packageId pkg - profBothFlag = lookupPerPkgOption pkgid packageConfigProf - profLibFlag = lookupPerPkgOption pkgid packageConfigProfLib - --TODO: [code cleanup] unused: the old deprecated packageConfigProfExe - - packagesWithDownwardClosedProperty property = - Set.fromList - $ map packageId - $ InstallPlan.dependencyClosure - solverPlan - [ installedPackageId pkg - | pkg <- InstallPlan.toList solverPlan - , property pkg ] -- just the packages that satisfy the propety - --TODO: [nice to have] this does not check the config consistency, - -- e.g. a package explicitly turning off profiling, but something - -- depending on it that needs profiling. This really needs a separate - -- package config validation/resolution pass. - - --TODO: [nice to have] config consistency checking: - -- * profiling libs & exes, exe needs lib, recursive - -- * shared libs & exes, exe needs lib, recursive - -- * vanilla libs & exes, exe needs lib, recursive - -- * ghci or shared lib needed by TH, recursive, ghc version dependent - - ---------------------------- --- Build targets --- - --- Refer to ProjectPlanning.Types for details of these important types: - --- data PackageTarget = ... --- data ComponentTarget = ... --- data SubComponentTarget = ... - - ---TODO: this needs to report some user target/config errors -elaboratePackageTargets :: ElaboratedConfiguredPackage -> [PackageTarget] - -> ([ComponentTarget], Maybe ComponentTarget, Bool) -elaboratePackageTargets ElaboratedConfiguredPackage{..} targets = - let buildTargets = nubComponentTargets - . map compatSubComponentTargets - . concatMap elaborateBuildTarget - $ targets - --TODO: instead of listToMaybe we should be reporting an error here - replTargets = listToMaybe - . nubComponentTargets - . map compatSubComponentTargets - . concatMap elaborateReplTarget - $ targets - buildHaddocks = HaddockDefaultComponents `elem` targets - - in (buildTargets, replTargets, buildHaddocks) - where - --TODO: need to report an error here if defaultComponents is empty - elaborateBuildTarget BuildDefaultComponents = pkgDefaultComponents - elaborateBuildTarget (BuildSpecificComponent t) = [t] - elaborateBuildTarget _ = [] - - --TODO: need to report an error here if defaultComponents is empty - elaborateReplTarget ReplDefaultComponent = take 1 pkgDefaultComponents - elaborateReplTarget (ReplSpecificComponent t) = [t] - elaborateReplTarget _ = [] - - pkgDefaultComponents = - [ ComponentTarget cname WholeComponent - | c <- Cabal.pkgComponents pkgDescription - , PD.buildable (Cabal.componentBuildInfo c) - , let cname = Cabal.componentName c - , enabledOptionalStanza cname - ] - where - enabledOptionalStanza cname = - case componentOptionalStanza cname of - Nothing -> True - Just stanza -> Map.lookup stanza pkgStanzasRequested - == Just True - - -- Not all Cabal Setup.hs versions support sub-component targets, so switch - -- them over to the whole component - compatSubComponentTargets :: ComponentTarget -> ComponentTarget - compatSubComponentTargets target@(ComponentTarget cname _subtarget) - | not setupHsSupportsSubComponentTargets - = ComponentTarget cname WholeComponent - | otherwise = target - - -- Actually the reality is that no current version of Cabal's Setup.hs - -- build command actually support building specific files or modules. - setupHsSupportsSubComponentTargets = False - -- TODO: when that changes, adjust this test, e.g. - -- | pkgSetupScriptCliVersion >= Version [x,y] [] - - nubComponentTargets :: [ComponentTarget] -> [ComponentTarget] - nubComponentTargets = - concatMap (wholeComponentOverrides . map snd) - . groupBy ((==) `on` fst) - . sortBy (compare `on` fst) - . map (\t@(ComponentTarget cname _) -> (cname, t)) - - -- If we're building the whole component then that the only target all we - -- need, otherwise we can have several targets within the component. - wholeComponentOverrides :: [ComponentTarget] -> [ComponentTarget] - wholeComponentOverrides ts = - case [ t | t@(ComponentTarget _ WholeComponent) <- ts ] of - (t:_) -> [t] - [] -> ts - - -pkgHasEphemeralBuildTargets :: ElaboratedConfiguredPackage -> Bool -pkgHasEphemeralBuildTargets pkg = - isJust (pkgReplTarget pkg) - || (not . null) [ () | ComponentTarget _ subtarget <- pkgBuildTargets pkg - , subtarget /= WholeComponent ] - --- | The components that we'll build all of, meaning that after they're built --- we can skip building them again (unlike with building just some modules or --- other files within a component). --- -pkgBuildTargetWholeComponents :: ElaboratedConfiguredPackage - -> Set ComponentName -pkgBuildTargetWholeComponents pkg = - Set.fromList - [ cname | ComponentTarget cname WholeComponent <- pkgBuildTargets pkg ] - - ------------------------------------------------------------------------------- --- * Install plan pruning ------------------------------------------------------------------------------- - --- | Given a set of package targets (and optionally component targets within --- those packages), take the subset of the install plan needed to build those --- targets. Also, update the package config to specify which optional stanzas --- to enable, and which targets within each package to build. --- -pruneInstallPlanToTargets :: Map InstalledPackageId [PackageTarget] - -> ElaboratedInstallPlan -> ElaboratedInstallPlan -pruneInstallPlanToTargets perPkgTargetsMap = - either (\_ -> assert False undefined) id - . InstallPlan.new False - . PackageIndex.fromList - -- We have to do this in two passes - . pruneInstallPlanPass2 - . pruneInstallPlanPass1 perPkgTargetsMap - . InstallPlan.toList - --- The first pass does three things: --- --- * Set the build targets based on the user targets (but not rev deps yet). --- * A first go at determining which optional stanzas (testsuites, benchmarks) --- are needed. We have a second go in the next pass. --- * Take the dependency closure using pruned dependencies. We prune deps that --- are used only by unneeded optional stanzas. These pruned deps are only --- used for the dependency closure and are not persisted in this pass. --- -pruneInstallPlanPass1 :: Map InstalledPackageId [PackageTarget] - -> [ElaboratedPlanPackage] - -> [ElaboratedPlanPackage] -pruneInstallPlanPass1 perPkgTargetsMap pkgs = - map fst $ - dependencyClosure - (installedPackageId . fst) -- the pkg id - snd -- the pruned deps - [ (pkg', pruneOptionalDependencies pkg') - | pkg <- pkgs - , let pkg' = mapConfiguredPackage - (pruneOptionalStanzas . setBuildTargets) pkg - ] - (Map.keys perPkgTargetsMap) - where - -- Elaborate and set the targets we'll build for this package. This is just - -- based on the targets from the user, not targets implied by reverse - -- depencencies. Those comes in the second pass once we know the rev deps. - -- - setBuildTargets pkg = - pkg { - pkgBuildTargets = buildTargets, - pkgReplTarget = replTarget, - pkgBuildHaddocks = buildHaddocks - } - where - (buildTargets, replTarget, buildHaddocks) - = elaboratePackageTargets pkg targets - targets = fromMaybe [] - $ Map.lookup (installedPackageId pkg) perPkgTargetsMap - - -- Decide whether or not to enable testsuites and benchmarks - -- - -- The testsuite and benchmark targets are somewhat special in that we need - -- to configure the packages with them enabled, and we need to do that even - -- if we only want to build one of several testsuites. - -- - -- There are two cases in which we will enable the testsuites (or - -- benchmarks): if one of the targets is a testsuite, or if all of the - -- testsuite depencencies are already cached in the store. The rationale - -- for the latter is to minimise how often we have to reconfigure due to - -- the particular targets we choose to build. Otherwise choosing to build - -- a testsuite target, and then later choosing to build an exe target - -- would involve unnecessarily reconfiguring the package with testsuites - -- disabled. Technically this introduces a little bit of stateful - -- behaviour to make this "sticky", but it should be benign. - -- - pruneOptionalStanzas pkg = pkg { pkgStanzasEnabled = stanzas } - where - stanzas :: Set OptionalStanza - stanzas = optionalStanzasRequiredByTargets pkg - <> optionalStanzasRequestedByDefault pkg - <> optionalStanzasWithDepsAvailable availablePkgs pkg - - -- Calculate package depencencies but cut out those needed only by - -- optional stanzas that we've determined we will not enable. - -- These pruned deps are not persisted in this pass since they're based on - -- the optional stanzas and we'll make further tweaks to the optional - -- stanzas in the next pass. - -- - pruneOptionalDependencies :: ElaboratedPlanPackage -> [InstalledPackageId] - pruneOptionalDependencies (InstallPlan.Configured pkg) = - (CD.flatDeps . CD.filterDeps keepNeeded) (depends pkg) - where - keepNeeded (CD.ComponentTest _) _ = TestStanzas `Set.member` stanzas - keepNeeded (CD.ComponentBench _) _ = BenchStanzas `Set.member` stanzas - keepNeeded _ _ = True - stanzas = pkgStanzasEnabled pkg - pruneOptionalDependencies pkg = - CD.flatDeps (depends pkg) - - optionalStanzasRequiredByTargets :: ElaboratedConfiguredPackage - -> Set OptionalStanza - optionalStanzasRequiredByTargets pkg = - Set.fromList - [ stanza - | ComponentTarget cname _ <- pkgBuildTargets pkg - ++ maybeToList (pkgReplTarget pkg) - , stanza <- maybeToList (componentOptionalStanza cname) - ] - - optionalStanzasRequestedByDefault :: ElaboratedConfiguredPackage - -> Set OptionalStanza - optionalStanzasRequestedByDefault = - Map.keysSet - . Map.filter (id :: Bool -> Bool) - . pkgStanzasRequested - - availablePkgs = - Set.fromList - [ installedPackageId pkg - | InstallPlan.PreExisting pkg <- pkgs ] - -optionalStanzasWithDepsAvailable :: Set InstalledPackageId - -> ElaboratedConfiguredPackage - -> Set OptionalStanza -optionalStanzasWithDepsAvailable availablePkgs pkg = - Set.fromList - [ stanza - | stanza <- Set.toList (pkgStanzasAvailable pkg) - , let deps :: [InstalledPackageId] - deps = map installedPackageId - $ CD.select (optionalStanzaDeps stanza) - (pkgDependencies pkg) - , all (`Set.member` availablePkgs) deps - ] - where - optionalStanzaDeps TestStanzas (CD.ComponentTest _) = True - optionalStanzaDeps BenchStanzas (CD.ComponentBench _) = True - optionalStanzaDeps _ _ = False - - --- The second pass does three things: --- --- * A second go at deciding which optional stanzas to enable. --- * Prune the depencencies based on the final choice of optional stanzas. --- * Extend the targets within each package to build, now we know the reverse --- depencencies, ie we know which libs are needed as deps by other packages. --- --- Achieving sticky behaviour with enabling\/disabling optional stanzas is --- tricky. The first approximation was handled by the first pass above, but --- it's not quite enough. That pass will enable stanzas if all of the deps --- of the optional stanza are already instaled /in the store/. That's important --- but it does not account for depencencies that get built inplace as part of --- the project. We cannot take those inplace build deps into account in the --- pruning pass however because we don't yet know which ones we're going to --- build. Once we do know, we can have another go and enable stanzas that have --- all their deps available. Now we can consider all packages in the pruned --- plan to be available, including ones we already decided to build from --- source. --- --- Deciding which targets to build depends on knowing which packages have --- reverse dependencies (ie are needed). This requires the result of first --- pass, which is another reason we have to split it into two passes. --- --- Note that just because we might enable testsuites or benchmarks (in the --- first or second pass) doesn't mean that we build all (or even any) of them. --- That depends on which targets we picked in the first pass. --- -pruneInstallPlanPass2 :: [ElaboratedPlanPackage] - -> [ElaboratedPlanPackage] -pruneInstallPlanPass2 pkgs = - map (mapConfiguredPackage setStanzasDepsAndTargets) pkgs - where - setStanzasDepsAndTargets pkg = - pkg { - pkgStanzasEnabled = stanzas, - pkgDependencies = CD.filterDeps keepNeeded (pkgDependencies pkg), - pkgBuildTargets = pkgBuildTargets pkg ++ targetsRequiredForRevDeps - } - where - stanzas :: Set OptionalStanza - stanzas = pkgStanzasEnabled pkg - <> optionalStanzasWithDepsAvailable availablePkgs pkg - - keepNeeded (CD.ComponentTest _) _ = TestStanzas `Set.member` stanzas - keepNeeded (CD.ComponentBench _) _ = BenchStanzas `Set.member` stanzas - keepNeeded _ _ = True - - targetsRequiredForRevDeps = - [ ComponentTarget CLibName WholeComponent - -- if anything needs this pkg, build the library component - | installedPackageId pkg `Set.member` hasReverseLibDeps - ] - --TODO: also need to track build-tool rev-deps for exes - - availablePkgs :: Set InstalledPackageId - availablePkgs = Set.fromList (map installedPackageId pkgs) - - hasReverseLibDeps :: Set InstalledPackageId - hasReverseLibDeps = - Set.fromList [ depid | pkg <- pkgs - , depid <- CD.flatDeps (depends pkg) ] - - -mapConfiguredPackage :: (ElaboratedConfiguredPackage -> ElaboratedConfiguredPackage) - -> ElaboratedPlanPackage - -> ElaboratedPlanPackage -mapConfiguredPackage f (InstallPlan.Configured pkg) = - InstallPlan.Configured (f pkg) -mapConfiguredPackage _ pkg = pkg - -componentOptionalStanza :: Cabal.ComponentName -> Maybe OptionalStanza -componentOptionalStanza (Cabal.CTestName _) = Just TestStanzas -componentOptionalStanza (Cabal.CBenchName _) = Just BenchStanzas -componentOptionalStanza _ = Nothing - - -dependencyClosure :: (pkg -> InstalledPackageId) - -> (pkg -> [InstalledPackageId]) - -> [pkg] - -> [InstalledPackageId] - -> [pkg] -dependencyClosure pkgid deps allpkgs = - map vertexToPkg - . concatMap Tree.flatten - . Graph.dfs graph - . map pkgidToVertex - where - (graph, vertexToPkg, pkgidToVertex) = dependencyGraph pkgid deps allpkgs - -dependencyGraph :: (pkg -> InstalledPackageId) - -> (pkg -> [InstalledPackageId]) - -> [pkg] - -> (Graph.Graph, - Graph.Vertex -> pkg, - InstalledPackageId -> Graph.Vertex) -dependencyGraph pkgid deps pkgs = - (graph, vertexToPkg', pkgidToVertex') - where - (graph, vertexToPkg, pkgidToVertex) = - Graph.graphFromEdges [ ( pkg, pkgid pkg, deps pkg ) - | pkg <- pkgs ] - vertexToPkg' = (\(pkg,_,_) -> pkg) - . vertexToPkg - pkgidToVertex' = fromMaybe (error "dependencyGraph: lookup failure") - . pkgidToVertex - - ---------------------------- --- Setup.hs script policy --- - --- Handling for Setup.hs scripts is a bit tricky, part of it lives in the --- solver phase, and part in the elaboration phase. We keep the helper --- functions for both phases together here so at least you can see all of it --- in one place. --- --- There are four major cases for Setup.hs handling: --- --- 1. @build-type@ Custom with a @custom-setup@ section --- 2. @build-type@ Custom without a @custom-setup@ section --- 3. @build-type@ not Custom with @cabal-version > $our-cabal-version@ --- 4. @build-type@ not Custom with @cabal-version <= $our-cabal-version@ --- --- It's also worth noting that packages specifying @cabal-version: >= 1.23@ --- or later that have @build-type@ Custom will always have a @custom-setup@ --- section. Therefore in case 2, the specified @cabal-version@ will always be --- less than 1.23. --- --- In cases 1 and 2 we obviously have to build an external Setup.hs script, --- while in case 4 we can use the internal library API. In case 3 we also have --- to build an external Setup.hs script because the package needs a later --- Cabal lib version than we can support internally. --- --- data SetupScriptStyle = ... -- see ProjectPlanning.Types - --- | Work out the 'SetupScriptStyle' given the package description. --- --- This only works on original packages before we give them to the solver, --- since after the solver some implicit setup deps are made explicit. --- --- See 'rememberImplicitSetupDeps' and 'packageSetupScriptStylePostSolver'. --- -packageSetupScriptStylePreSolver :: PD.PackageDescription -> SetupScriptStyle -packageSetupScriptStylePreSolver pkg - | buildType == PD.Custom - , isJust (PD.setupBuildInfo pkg) - = SetupCustomExplicitDeps - - | buildType == PD.Custom - = SetupCustomImplicitDeps - - | PD.specVersion pkg > cabalVersion -- one cabal-install is built against - = SetupNonCustomExternalLib - - | otherwise - = SetupNonCustomInternalLib - where - buildType = fromMaybe PD.Custom (PD.buildType pkg) - - --- | Part of our Setup.hs handling policy is implemented by getting the solver --- to work out setup dependencies for packages. The solver already handles --- packages that explicitly specify setup dependencies, but we can also tell --- the solver to treat other packages as if they had setup dependencies. --- That's what this function does, it gets called by the solver for all --- packages that don't already have setup dependencies. --- --- The dependencies we want to add is different for each 'SetupScriptStyle'. --- --- Note that adding default deps means these deps are actually /added/ to the --- packages that we get out of the solver in the 'SolverInstallPlan'. Making --- implicit setup deps explicit is a problem in the post-solver stages because --- we still need to distinguish the case of explicit and implict setup deps. --- See 'rememberImplicitSetupDeps'. --- -defaultSetupDeps :: Compiler -> Platform - -> PD.PackageDescription - -> Maybe [Dependency] -defaultSetupDeps compiler platform pkg = - case packageSetupScriptStylePreSolver pkg of - - -- For packages with build type custom that do not specify explicit - -- setup dependencies, we add a dependency on Cabal and a number - -- of other packages. - SetupCustomImplicitDeps -> - Just $ - [ Dependency depPkgname anyVersion - | depPkgname <- legacyCustomSetupPkgs compiler platform ] ++ - [ Dependency cabalPkgname cabalConstraint - | packageName pkg /= cabalPkgname ] - where - -- The Cabal dep is slightly special: - -- * We omit the dep for the Cabal lib itself, since it bootstraps. - -- * We constrain it to be >= 1.18 < 2 - -- - cabalConstraint = orLaterVersion cabalCompatMinVer - `intersectVersionRanges` - orLaterVersion (PD.specVersion pkg) - `intersectVersionRanges` - earlierVersion cabalCompatMaxVer - -- The idea here is that at some point we will make significant - -- breaking changes to the Cabal API that Setup.hs scripts use. - -- So for old custom Setup scripts that do not specify explicit - -- constraints, we constrain them to use a compatible Cabal version. - cabalCompatMaxVer = Version [1,25] [] - -- In principle we can talk to any old Cabal version, and we need to - -- be able to do that for custom Setup scripts that require older - -- Cabal lib versions. However in practice we have currently have - -- problems with Cabal-1.16. (1.16 does not know about build targets) - -- If this is fixed we can relax this constraint. - cabalCompatMinVer = Version [1,18] [] - - -- For other build types (like Simple) if we still need to compile an - -- external Setup.hs, it'll be one of the simple ones that only depends - -- on Cabal and base. - SetupNonCustomExternalLib -> - Just [ Dependency cabalPkgname cabalConstraint - , Dependency basePkgname anyVersion ] - where - cabalConstraint = orLaterVersion (PD.specVersion pkg) - - -- The internal setup wrapper method has no deps at all. - SetupNonCustomInternalLib -> Just [] - - SetupCustomExplicitDeps -> - error $ "defaultSetupDeps: called for a package with explicit " - ++ "setup deps: " ++ display (packageId pkg) - - --- | See 'rememberImplicitSetupDeps' for details. -type PackagesImplicitSetupDeps = Set InstalledPackageId - --- | A consequence of using 'defaultSetupDeps' in 'planPackages' is that by --- making implicit setup deps explicit we loose track of which packages --- originally had implicit setup deps. That's important because we do still --- have different behaviour based on the setup style (in particular whether to --- compile a Setup.hs script with version macros). --- --- So we remember the necessary information in an auxilliary set and use it --- in 'packageSetupScriptStylePreSolver' to recover the full info. --- -rememberImplicitSetupDeps :: SourcePackageIndex.PackageIndex SourcePackage - -> SolverInstallPlan - -> (SolverInstallPlan, PackagesImplicitSetupDeps) -rememberImplicitSetupDeps sourcePkgIndex plan = - (plan, pkgsImplicitSetupDeps) - where - pkgsImplicitSetupDeps = - Set.fromList - [ installedPackageId pkg - | InstallPlan.Configured - pkg@(ConfiguredPackage newpkg _ _ _) <- InstallPlan.toList plan - -- has explicit setup deps now - , hasExplicitSetupDeps newpkg - -- but originally had no setup deps - , let Just origpkg = SourcePackageIndex.lookupPackageId - sourcePkgIndex (packageId pkg) - , not (hasExplicitSetupDeps origpkg) - ] - - hasExplicitSetupDeps = - (SetupCustomExplicitDeps==) - . packageSetupScriptStylePreSolver - . PD.packageDescription . packageDescription - - --- | Use the extra info saved by 'rememberImplicitSetupDeps' to let us work --- out the correct 'SetupScriptStyle'. This should give the same result as --- 'packageSetupScriptStylePreSolver' gave prior to munging the package info --- through the solver. --- -packageSetupScriptStylePostSolver :: Set InstalledPackageId - -> ConfiguredPackage - -> PD.PackageDescription - -> SetupScriptStyle -packageSetupScriptStylePostSolver pkgsImplicitSetupDeps pkg pkgDescription = - case packageSetupScriptStylePreSolver pkgDescription of - SetupCustomExplicitDeps - | Set.member (installedPackageId pkg) pkgsImplicitSetupDeps - -> SetupCustomImplicitDeps - other -> other - - --- | Work out which version of the Cabal spec we will be using to talk to the --- Setup.hs interface for this package. --- --- This depends somewhat on the 'SetupScriptStyle' but most cases are a result --- of what the solver picked for us, based on the explicit setup deps or the --- ones added implicitly by 'defaultSetupDeps'. --- -packageSetupScriptSpecVersion :: Package pkg - => SetupScriptStyle - -> PD.PackageDescription - -> ComponentDeps [pkg] - -> Version - --- We're going to be using the internal Cabal library, so the spec version of --- that is simply the version of the Cabal library that cabal-install has been --- built with. -packageSetupScriptSpecVersion SetupNonCustomInternalLib _ _ = - cabalVersion - --- If we happen to be building the Cabal lib itself then because that --- bootstraps itself then we use the version of the lib we're building. -packageSetupScriptSpecVersion SetupCustomImplicitDeps pkg _ - | packageName pkg == cabalPkgname - = packageVersion pkg - --- In all other cases we have a look at what version of the Cabal lib the --- solver picked. Or if it didn't depend on Cabal at all (which is very rare) --- then we look at the .cabal file to see what spec version it declares. -packageSetupScriptSpecVersion _ pkg deps = - case find ((cabalPkgname ==) . packageName) (CD.setupDeps deps) of - Just dep -> packageVersion dep - Nothing -> PD.specVersion pkg - - -cabalPkgname, basePkgname :: PackageName -cabalPkgname = PackageName "Cabal" -basePkgname = PackageName "base" - - -legacyCustomSetupPkgs :: Compiler -> Platform -> [PackageName] -legacyCustomSetupPkgs compiler (Platform _ os) = - map PackageName $ - [ "array", "base", "binary", "bytestring", "containers" - , "deepseq", "directory", "filepath", "old-time", "pretty" - , "process", "time", "transformers" ] - ++ [ "Win32" | os == Windows ] - ++ [ "unix" | os /= Windows ] - ++ [ "ghc-prim" | isGHC ] - ++ [ "template-haskell" | isGHC ] - where - isGHC = compilerCompatFlavor GHC compiler - - -- This util is copied here just in this branch to avoid requiring a new - -- Cabal version. The master branch already does the right thing. - compilerCompatFlavor :: CompilerFlavor -> Compiler -> Bool - compilerCompatFlavor flavor comp = - flavor == compilerFlavor comp - || flavor `elem` [ flavor' | CompilerId flavor' _ <- compilerCompat comp ] - --- The other aspects of our Setup.hs policy lives here where we decide on --- the 'SetupScriptOptions'. --- --- Our current policy for the 'SetupCustomImplicitDeps' case is that we --- try to make the implicit deps cover everything, and we don't allow the --- compiler to pick up other deps. This may or may not be sustainable, and --- we might have to allow the deps to be non-exclusive, but that itself would --- be tricky since we would have to allow the Setup access to all the packages --- in the store and local dbs. - -setupHsScriptOptions :: ElaboratedReadyPackage - -> ElaboratedSharedConfig - -> FilePath - -> FilePath - -> Bool - -> Lock - -> SetupScriptOptions -setupHsScriptOptions (ReadyPackage ElaboratedConfiguredPackage{..} deps) - ElaboratedSharedConfig{..} srcdir builddir - isParallelBuild cacheLock = - SetupScriptOptions { - useCabalVersion = thisVersion pkgSetupScriptCliVersion, - useCabalSpecVersion = Just pkgSetupScriptCliVersion, - useCompiler = Just pkgConfigCompiler, - usePlatform = Just pkgConfigPlatform, - usePackageDB = pkgSetupPackageDBStack, - usePackageIndex = Nothing, - useDependencies = [ (installedPackageId ipkg, packageId ipkg) - | ipkg <- CD.setupDeps deps ], - useDependenciesExclusive = True, - useVersionMacros = pkgSetupScriptStyle == SetupCustomExplicitDeps, - useProgramConfig = pkgConfigCompilerProgs, - useDistPref = builddir, - useLoggingHandle = Nothing, -- this gets set later - useWorkingDir = Just srcdir, - useWin32CleanHack = False, --TODO: [required eventually] - forceExternalSetupMethod = isParallelBuild, - setupCacheLock = Just cacheLock - } - - --- | To be used for the input for elaborateInstallPlan. --- --- TODO: [code cleanup] make InstallDirs.defaultInstallDirs pure. --- -userInstallDirTemplates :: Compiler - -> IO InstallDirs.InstallDirTemplates -userInstallDirTemplates compiler = do - InstallDirs.defaultInstallDirs - (compilerFlavor compiler) - True -- user install - False -- unused - -storePackageInstallDirs :: CabalDirLayout - -> CompilerId - -> InstalledPackageId - -> InstallDirs.InstallDirs FilePath -storePackageInstallDirs CabalDirLayout{cabalStorePackageDirectory} - compid ipkgid = - InstallDirs.InstallDirs {..} - where - prefix = cabalStorePackageDirectory compid ipkgid - bindir = prefix "bin" - libdir = prefix "lib" - libsubdir = "" - dynlibdir = libdir - libexecdir = prefix "libexec" - includedir = libdir "include" - datadir = prefix "share" - datasubdir = "" - docdir = datadir "doc" - mandir = datadir "man" - htmldir = docdir "html" - haddockdir = htmldir - sysconfdir = prefix "etc" - - ---TODO: [code cleanup] perhaps reorder this code --- based on the ElaboratedInstallPlan + ElaboratedSharedConfig, --- make the various Setup.hs {configure,build,copy} flags - - -setupHsConfigureFlags :: ElaboratedReadyPackage - -> ElaboratedSharedConfig - -> Verbosity - -> FilePath - -> Cabal.ConfigFlags -setupHsConfigureFlags (ReadyPackage - pkg@ElaboratedConfiguredPackage{..} - pkgdeps) - sharedConfig@ElaboratedSharedConfig{..} - verbosity builddir = - assert (sanityCheckElaboratedConfiguredPackage sharedConfig pkg) - Cabal.ConfigFlags {..} - where - configDistPref = toFlag builddir - configVerbosity = toFlag verbosity - - configIPID = toFlag (display (installedUnitId pkg)) - - configProgramPaths = Map.toList pkgProgramPaths - configProgramArgs = Map.toList pkgProgramArgs - configProgramPathExtra = toNubList pkgProgramPathExtra - configHcFlavor = toFlag (compilerFlavor pkgConfigCompiler) - configHcPath = mempty -- we use configProgramPaths instead - configHcPkg = mempty -- we use configProgramPaths instead - - configVanillaLib = toFlag pkgVanillaLib - configSharedLib = toFlag pkgSharedLib - configDynExe = toFlag pkgDynExe - configGHCiLib = toFlag pkgGHCiLib - configProfExe = mempty - configProfLib = toFlag pkgProfLib - configProf = toFlag pkgProfExe - - -- configProfDetail is for exe+lib, but overridden by configProfLibDetail - -- so we specify both so we can specify independently - configProfDetail = toFlag pkgProfExeDetail - configProfLibDetail = toFlag pkgProfLibDetail - - configCoverage = toFlag pkgCoverage - configLibCoverage = mempty - - configOptimization = toFlag pkgOptimization - configSplitObjs = toFlag pkgSplitObjs - configStripExes = toFlag pkgStripExes - configStripLibs = toFlag pkgStripLibs - configDebugInfo = toFlag pkgDebugInfo - configAllowNewer = mempty -- we use configExactConfiguration True - - configConfigurationsFlags = pkgFlagAssignment - configConfigureArgs = pkgConfigureScriptArgs - configExtraLibDirs = pkgExtraLibDirs - configExtraFrameworkDirs = pkgExtraFrameworkDirs - configExtraIncludeDirs = pkgExtraIncludeDirs - configProgPrefix = maybe mempty toFlag pkgProgPrefix - configProgSuffix = maybe mempty toFlag pkgProgSuffix - - configInstallDirs = fmap (toFlag . InstallDirs.toPathTemplate) - pkgInstallDirs - - -- we only use configDependencies, unless we're talking to an old Cabal - -- in which case we use configConstraints - configDependencies = [ (packageName (Installed.sourcePackageId deppkg), - Installed.installedUnitId deppkg) - | deppkg <- CD.nonSetupDeps pkgdeps ] - configConstraints = [ thisPackageVersion (packageId deppkg) - | deppkg <- CD.nonSetupDeps pkgdeps ] - - -- explicitly clear, then our package db stack - -- TODO: [required eventually] have to do this differently for older Cabal versions - configPackageDBs = Nothing : map Just pkgBuildPackageDBStack - - configTests = toFlag (TestStanzas `Set.member` pkgStanzasEnabled) - configBenchmarks = toFlag (BenchStanzas `Set.member` pkgStanzasEnabled) - - configExactConfiguration = toFlag True - configFlagError = mempty --TODO: [research required] appears not to be implemented - configRelocatable = mempty --TODO: [research required] ??? - configScratchDir = mempty -- never use - configUserInstall = mempty -- don't rely on defaults - configPrograms_ = mempty -- never use, shouldn't exist - - -setupHsBuildFlags :: ElaboratedConfiguredPackage - -> ElaboratedSharedConfig - -> Verbosity - -> FilePath - -> Cabal.BuildFlags -setupHsBuildFlags ElaboratedConfiguredPackage{..} _ verbosity builddir = - Cabal.BuildFlags { - buildProgramPaths = mempty, --unused, set at configure time - buildProgramArgs = mempty, --unused, set at configure time - buildVerbosity = toFlag verbosity, - buildDistPref = toFlag builddir, - buildNumJobs = mempty, --TODO: [nice to have] sometimes want to use toFlag (Just numBuildJobs), - buildArgs = mempty -- unused, passed via args not flags - } - - -setupHsBuildArgs :: ElaboratedConfiguredPackage -> [String] -setupHsBuildArgs pkg = - map (showComponentTarget pkg) (pkgBuildTargets pkg) - - -showComponentTarget :: ElaboratedConfiguredPackage -> ComponentTarget -> String -showComponentTarget pkg = - showBuildTarget . toBuildTarget - where - showBuildTarget t = - Cabal.showBuildTarget (qlBuildTarget t) (packageId pkg) t - - qlBuildTarget Cabal.BuildTargetComponent{} = Cabal.QL2 - qlBuildTarget _ = Cabal.QL3 - - toBuildTarget :: ComponentTarget -> Cabal.BuildTarget - toBuildTarget (ComponentTarget cname subtarget) = - case subtarget of - WholeComponent -> Cabal.BuildTargetComponent cname - ModuleTarget mname -> Cabal.BuildTargetModule cname mname - FileTarget fname -> Cabal.BuildTargetFile cname fname - - -setupHsReplFlags :: ElaboratedConfiguredPackage - -> ElaboratedSharedConfig - -> Verbosity - -> FilePath - -> Cabal.ReplFlags -setupHsReplFlags ElaboratedConfiguredPackage{..} _ verbosity builddir = - Cabal.ReplFlags { - replProgramPaths = mempty, --unused, set at configure time - replProgramArgs = mempty, --unused, set at configure time - replVerbosity = toFlag verbosity, - replDistPref = toFlag builddir, - replReload = mempty --only used as callback from repl - } - - -setupHsReplArgs :: ElaboratedConfiguredPackage -> [String] -setupHsReplArgs pkg = - maybe [] (\t -> [showComponentTarget pkg t]) (pkgReplTarget pkg) - --TODO: should be able to give multiple modules in one component - - -setupHsCopyFlags :: ElaboratedConfiguredPackage - -> ElaboratedSharedConfig - -> Verbosity - -> FilePath - -> Cabal.CopyFlags -setupHsCopyFlags _ _ verbosity builddir = - Cabal.CopyFlags { - --TODO: [nice to have] we currently just rely on Setup.hs copy to always do the right - -- thing, but perhaps we ought really to copy into an image dir and do - -- some sanity checks and move into the final location ourselves - copyDest = toFlag InstallDirs.NoCopyDest, - copyDistPref = toFlag builddir, - copyVerbosity = toFlag verbosity - } - -setupHsRegisterFlags :: ElaboratedConfiguredPackage - -> ElaboratedSharedConfig - -> Verbosity - -> FilePath - -> FilePath - -> Cabal.RegisterFlags -setupHsRegisterFlags ElaboratedConfiguredPackage {pkgBuildStyle} _ - verbosity builddir pkgConfFile = - Cabal.RegisterFlags { - regPackageDB = mempty, -- misfeature - regGenScript = mempty, -- never use - regGenPkgConf = toFlag (Just pkgConfFile), - regInPlace = case pkgBuildStyle of - BuildInplaceOnly -> toFlag True - _ -> toFlag False, - regPrintId = mempty, -- never use - regDistPref = toFlag builddir, - regVerbosity = toFlag verbosity - } - -setupHsHaddockFlags :: ElaboratedConfiguredPackage - -> ElaboratedSharedConfig - -> Verbosity - -> FilePath - -> Cabal.HaddockFlags -setupHsHaddockFlags ElaboratedConfiguredPackage{..} _ verbosity builddir = - Cabal.HaddockFlags { - haddockProgramPaths = mempty, --unused, set at configure time - haddockProgramArgs = mempty, --unused, set at configure time - haddockHoogle = toFlag pkgHaddockHoogle, - haddockHtml = toFlag pkgHaddockHtml, - haddockHtmlLocation = maybe mempty toFlag pkgHaddockHtmlLocation, - haddockForHackage = mempty, --TODO: new flag - haddockExecutables = toFlag pkgHaddockExecutables, - haddockTestSuites = toFlag pkgHaddockTestSuites, - haddockBenchmarks = toFlag pkgHaddockBenchmarks, - haddockInternal = toFlag pkgHaddockInternal, - haddockCss = maybe mempty toFlag pkgHaddockCss, - haddockHscolour = toFlag pkgHaddockHscolour, - haddockHscolourCss = maybe mempty toFlag pkgHaddockHscolourCss, - haddockContents = maybe mempty toFlag pkgHaddockContents, - haddockDistPref = toFlag builddir, - haddockKeepTempFiles = mempty, --TODO: from build settings - haddockVerbosity = toFlag verbosity - } - -{- -setupHsTestFlags :: ElaboratedConfiguredPackage - -> ElaboratedSharedConfig - -> Verbosity - -> FilePath - -> Cabal.TestFlags -setupHsTestFlags _ _ verbosity builddir = - Cabal.TestFlags { - } --} - ------------------------------------------------------------------------------- --- * Sharing installed packages ------------------------------------------------------------------------------- - --- --- Nix style store management for tarball packages --- --- So here's our strategy: --- --- We use a per-user nix-style hashed store, but /only/ for tarball packages. --- So that includes packages from hackage repos (and other http and local --- tarballs). For packages in local directories we do not register them into --- the shared store by default, we just build them locally inplace. --- --- The reason we do it like this is that it's easy to make stable hashes for --- tarball packages, and these packages benefit most from sharing. By contrast --- unpacked dir packages are harder to hash and they tend to change more --- frequently so there's less benefit to sharing them. --- --- When using the nix store approach we have to run the solver *without* --- looking at the packages installed in the store, just at the source packages --- (plus core\/global installed packages). Then we do a post-processing pass --- to replace configured packages in the plan with pre-existing ones, where --- possible. Where possible of course means where the nix-style package hash --- equals one that's already in the store. --- --- One extra wrinkle is that unless we know package tarball hashes upfront, we --- will have to download the tarballs to find their hashes. So we have two --- options: delay replacing source with pre-existing installed packages until --- the point during the execution of the install plan where we have the --- tarball, or try to do as much up-front as possible and then check again --- during plan execution. The former isn't great because we would end up --- telling users we're going to re-install loads of packages when in fact we --- would just share them. It'd be better to give as accurate a prediction as --- we can. The latter is better for users, but we do still have to check --- during plan execution because it's important that we don't replace existing --- installed packages even if they have the same package hash, because we --- don't guarantee ABI stability. - --- TODO: [required eventually] for safety of concurrent installs, we must make sure we register but --- not replace installed packages with ghc-pkg. - -packageHashInputs :: ElaboratedSharedConfig - -> ElaboratedConfiguredPackage - -> PackageHashInputs -packageHashInputs - pkgshared - pkg@ElaboratedConfiguredPackage{ - pkgSourceId, - pkgSourceHash = Just srchash, - pkgDependencies - } = - PackageHashInputs { - pkgHashPkgId = pkgSourceId, - pkgHashSourceHash = srchash, - pkgHashDirectDeps = Set.fromList - [ installedPackageId dep - | dep <- CD.select relevantDeps pkgDependencies ], - pkgHashOtherConfig = packageHashConfigInputs pkgshared pkg - } - where - -- Obviously the main deps are relevant - relevantDeps CD.ComponentLib = True - relevantDeps (CD.ComponentExe _) = True - -- Setup deps can affect the Setup.hs behaviour and thus what is built - relevantDeps CD.ComponentSetup = True - -- However testsuites and benchmarks do not get installed and should not - -- affect the result, so we do not include them. - relevantDeps (CD.ComponentTest _) = False - relevantDeps (CD.ComponentBench _) = False - -packageHashInputs _ pkg = - error $ "packageHashInputs: only for packages with source hashes. " - ++ display (packageId pkg) - -packageHashConfigInputs :: ElaboratedSharedConfig - -> ElaboratedConfiguredPackage - -> PackageHashConfigInputs -packageHashConfigInputs - ElaboratedSharedConfig{..} - ElaboratedConfiguredPackage{..} = - - PackageHashConfigInputs { - pkgHashCompilerId = compilerId pkgConfigCompiler, - pkgHashPlatform = pkgConfigPlatform, - pkgHashFlagAssignment = pkgFlagAssignment, - pkgHashConfigureScriptArgs = pkgConfigureScriptArgs, - pkgHashVanillaLib = pkgVanillaLib, - pkgHashSharedLib = pkgSharedLib, - pkgHashDynExe = pkgDynExe, - pkgHashGHCiLib = pkgGHCiLib, - pkgHashProfLib = pkgProfLib, - pkgHashProfExe = pkgProfExe, - pkgHashProfLibDetail = pkgProfLibDetail, - pkgHashProfExeDetail = pkgProfExeDetail, - pkgHashCoverage = pkgCoverage, - pkgHashOptimization = pkgOptimization, - pkgHashSplitObjs = pkgSplitObjs, - pkgHashStripLibs = pkgStripLibs, - pkgHashStripExes = pkgStripExes, - pkgHashDebugInfo = pkgDebugInfo, - pkgHashExtraLibDirs = pkgExtraLibDirs, - pkgHashExtraFrameworkDirs = pkgExtraFrameworkDirs, - pkgHashExtraIncludeDirs = pkgExtraIncludeDirs, - pkgHashProgPrefix = pkgProgPrefix, - pkgHashProgSuffix = pkgProgSuffix - } - - --- | Given the 'InstalledPackageIndex' for a nix-style package store, and an --- 'ElaboratedInstallPlan', replace configured source packages by pre-existing --- installed packages whenever they exist. --- -improveInstallPlanWithPreExistingPackages :: InstalledPackageIndex - -> ElaboratedInstallPlan - -> ElaboratedInstallPlan -improveInstallPlanWithPreExistingPackages installedPkgIndex installPlan = - replaceWithPreExisting installPlan - [ ipkg - | InstallPlan.Configured pkg - <- InstallPlan.reverseTopologicalOrder installPlan - , ipkg <- maybeToList (canPackageBeImproved pkg) ] - where - --TODO: sanity checks: - -- * the installed package must have the expected deps etc - -- * the installed package must not be broken, valid dep closure - - --TODO: decide what to do if we encounter broken installed packages, - -- since overwriting is never safe. - - canPackageBeImproved pkg = - PackageIndex.lookupUnitId - installedPkgIndex (installedPackageId pkg) - - replaceWithPreExisting = - foldl' (\plan ipkg -> InstallPlan.preexisting - (installedPackageId ipkg) ipkg plan) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/ProjectPlanOutput.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/ProjectPlanOutput.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/ProjectPlanOutput.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/ProjectPlanOutput.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,100 +0,0 @@ -{-# LANGUAGE BangPatterns, RecordWildCards, NamedFieldPuns, - DeriveGeneric, DeriveDataTypeable, GeneralizedNewtypeDeriving, - ScopedTypeVariables #-} - --- | An experimental new UI for cabal for working with multiple packages ------------------------------------------------------------------------------ -module Distribution.Client.ProjectPlanOutput ( - writePlanExternalRepresentation, - ) where - -import Distribution.Client.ProjectPlanning.Types - ( ElaboratedInstallPlan, ElaboratedConfiguredPackage(..) - , ElaboratedSharedConfig(..) ) -import Distribution.Client.DistDirLayout - -import qualified Distribution.Client.InstallPlan as InstallPlan -import qualified Distribution.Client.Utils.Json as J -import qualified Distribution.Client.ComponentDeps as ComponentDeps - -import Distribution.Package -import qualified Distribution.PackageDescription as PD -import Distribution.Text -import Distribution.Simple.Utils -import qualified Paths_cabal_install as Our (version) - -import Data.Monoid -import qualified Data.ByteString.Builder as BB - - --- | Write out a representation of the elaborated install plan. --- --- This is for the benefit of debugging and external tools like editors. --- -writePlanExternalRepresentation :: DistDirLayout - -> ElaboratedInstallPlan - -> ElaboratedSharedConfig - -> IO () -writePlanExternalRepresentation distDirLayout elaboratedInstallPlan - elaboratedSharedConfig = - writeFileAtomic (distProjectCacheFile distDirLayout "plan.json") $ - BB.toLazyByteString - . J.encodeToBuilder - $ encodePlanAsJson elaboratedInstallPlan elaboratedSharedConfig - --- | Renders a subset of the elaborated install plan in a semi-stable JSON --- format. --- -encodePlanAsJson :: ElaboratedInstallPlan -> ElaboratedSharedConfig -> J.Value -encodePlanAsJson elaboratedInstallPlan _elaboratedSharedConfig = - --TODO: [nice to have] include all of the sharedPackageConfig and all of - -- the parts of the elaboratedInstallPlan - J.object [ "cabal-version" J..= jdisplay Our.version - , "cabal-lib-version" J..= jdisplay cabalVersion - , "install-plan" J..= jsonIPlan - ] - where - jsonIPlan = map toJ (InstallPlan.toList elaboratedInstallPlan) - - -- ipi :: InstalledPackageInfo - toJ (InstallPlan.PreExisting ipi) = - -- installed packages currently lack configuration information - -- such as their flag settings or non-lib components. - -- - -- TODO: how to find out whether package is "local"? - J.object - [ "type" J..= J.String "pre-existing" - , "id" J..= jdisplay (installedUnitId ipi) - , "components" J..= J.object - [ "lib" J..= J.object [ "depends" J..= map jdisplay (installedDepends ipi) ] ] - ] - - -- ecp :: ElaboratedConfiguredPackage - toJ (InstallPlan.Configured ecp) = - J.object - [ "type" J..= J.String "configured" - , "id" J..= (jdisplay . installedUnitId) ecp - , "components" J..= components - , "flags" J..= J.object [ fn J..= v - | (PD.FlagName fn,v) <- pkgFlagAssignment ecp ] - ] - where - components = J.object - [ comp2str c J..= J.object - [ "depends" J..= map (jdisplay . installedUnitId) v ] - | (c,v) <- ComponentDeps.toList (pkgDependencies ecp) ] - - toJ _ = error "encodePlanToJson: only expecting PreExisting and Configured" - - -- TODO: maybe move this helper to "ComponentDeps" module? - -- Or maybe define a 'Text' instance? - comp2str c = case c of - ComponentDeps.ComponentLib -> "lib" - ComponentDeps.ComponentExe s -> "exe:" <> s - ComponentDeps.ComponentTest s -> "test:" <> s - ComponentDeps.ComponentBench s -> "bench:" <> s - ComponentDeps.ComponentSetup -> "setup" - - jdisplay :: Text a => a -> J.Value - jdisplay = J.String . display - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/RebuildMonad.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/RebuildMonad.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/RebuildMonad.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/RebuildMonad.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,147 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} - --- | An abstraction for re-running actions if values or files have changed. --- --- This is not a full-blown make-style incremental build system, it's a bit --- more ad-hoc than that, but it's easier to integrate with existing code. --- --- It's a convenient interface to the "Distribution.Client.FileMonitor" --- functions. --- -module Distribution.Client.RebuildMonad ( - -- * Rebuild monad - Rebuild, - runRebuild, - askRoot, - - -- * Setting up file monitoring - monitorFiles, - MonitorFilePath, - monitorFile, - monitorFileHashed, - monitorNonExistentFile, - monitorDirectory, - monitorNonExistentDirectory, - monitorDirectoryExistence, - monitorFileOrDirectory, - monitorFileSearchPath, - monitorFileHashedSearchPath, - -- ** Monitoring file globs - monitorFileGlob, - monitorFileGlobExistence, - FilePathGlob(..), - FilePathRoot(..), - FilePathGlobRel(..), - GlobPiece(..), - - -- * Using a file monitor - FileMonitor(..), - newFileMonitor, - rerunIfChanged, - - -- * Utils - matchFileGlob, - ) where - -import Distribution.Client.FileMonitor -import Distribution.Client.Glob hiding (matchFileGlob) -import qualified Distribution.Client.Glob as Glob (matchFileGlob) - -import Distribution.Simple.Utils (debug) -import Distribution.Verbosity (Verbosity) - -#if !MIN_VERSION_base(4,8,0) -import Control.Applicative -#endif -import Control.Monad.State as State -import Control.Monad.Reader as Reader -import Distribution.Compat.Binary (Binary) -import System.FilePath (takeFileName) - - --- | A monad layered on top of 'IO' to help with re-running actions when the --- input files and values they depend on change. The crucial operations are --- 'rerunIfChanged' and 'monitorFiles'. --- -newtype Rebuild a = Rebuild (ReaderT FilePath (StateT [MonitorFilePath] IO) a) - deriving (Functor, Applicative, Monad, MonadIO) - --- | Use this wihin the body action of 'rerunIfChanged' to declare that the --- action depends on the given files. This can be based on what the action --- actually did. It is these files that will be checked for changes next --- time 'rerunIfChanged' is called for that 'FileMonitor'. --- --- Relative paths are interpreted as relative to an implicit root, ultimately --- passed in to 'runRebuild'. --- -monitorFiles :: [MonitorFilePath] -> Rebuild () -monitorFiles filespecs = Rebuild (State.modify (filespecs++)) - --- | Run a 'Rebuild' IO action. -unRebuild :: FilePath -> Rebuild a -> IO (a, [MonitorFilePath]) -unRebuild rootDir (Rebuild action) = runStateT (runReaderT action rootDir) [] - --- | Run a 'Rebuild' IO action. -runRebuild :: FilePath -> Rebuild a -> IO a -runRebuild rootDir (Rebuild action) = evalStateT (runReaderT action rootDir) [] - --- | The root that relative paths are interpreted as being relative to. -askRoot :: Rebuild FilePath -askRoot = Rebuild Reader.ask - --- | This captures the standard use pattern for a 'FileMonitor': given a --- monitor, an action and the input value the action depends on, either --- re-run the action to get its output, or if the value and files the action --- depends on have not changed then return a previously cached action result. --- --- The result is still in the 'Rebuild' monad, so these can be nested. --- --- Do not share 'FileMonitor's between different uses of 'rerunIfChanged'. --- -rerunIfChanged :: (Binary a, Binary b) - => Verbosity - -> FileMonitor a b - -> a - -> Rebuild b - -> Rebuild b -rerunIfChanged verbosity monitor key action = do - rootDir <- askRoot - changed <- liftIO $ checkFileMonitorChanged monitor rootDir key - case changed of - MonitorUnchanged result files -> do - liftIO $ debug verbosity $ "File monitor '" ++ monitorName - ++ "' unchanged." - monitorFiles files - return result - - MonitorChanged reason -> do - liftIO $ debug verbosity $ "File monitor '" ++ monitorName - ++ "' changed: " ++ showReason reason - startTime <- liftIO $ beginUpdateFileMonitor - (result, files) <- liftIO $ unRebuild rootDir action - liftIO $ updateFileMonitor monitor rootDir - (Just startTime) files key result - monitorFiles files - return result - where - monitorName = takeFileName (fileMonitorCacheFile monitor) - - showReason (MonitoredFileChanged file) = "file " ++ file - showReason (MonitoredValueChanged _) = "monitor value changed" - showReason MonitorFirstRun = "first run" - showReason MonitorCorruptCache = "invalid cache file" - - --- | Utility to match a file glob against the file system, starting from a --- given root directory. The results are all relative to the given root. --- --- Since this operates in the 'Rebuild' monad, it also monitors the given glob --- for changes. --- -matchFileGlob :: FilePathGlob -> Rebuild [FilePath] -matchFileGlob glob = do - root <- askRoot - monitorFiles [monitorFileGlobExistence glob] - liftIO $ Glob.matchFileGlob root glob - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Run.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Run.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Run.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Run.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,139 +0,0 @@ -{-# LANGUAGE CPP #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Run --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Implementation of the 'run' command. ------------------------------------------------------------------------------ - -module Distribution.Client.Run ( run, splitRunArgs ) - where - -import Distribution.Client.Utils (tryCanonicalizePath) - -import Distribution.PackageDescription (Executable (..), - TestSuite(..), - Benchmark(..), - PackageDescription (..), - BuildInfo(buildable)) -import Distribution.Simple.Compiler (compilerFlavor, CompilerFlavor(..)) -import Distribution.Simple.Build.PathsModule (pkgPathEnvVar) -import Distribution.Simple.BuildPaths (exeExtension) -import Distribution.Simple.LocalBuildInfo (ComponentName (..), - LocalBuildInfo (..), - getComponentLocalBuildInfo, - depLibraryPaths) -import Distribution.Simple.Utils (die, notice, warn, - rawSystemExitWithEnv, - addLibraryPath) -import Distribution.System (Platform (..)) -import Distribution.Verbosity (Verbosity) - -import qualified Distribution.Simple.GHCJS as GHCJS - -#if !MIN_VERSION_base(4,8,0) -import Data.Functor ((<$>)) -#endif -import Data.List (find) -import Data.Foldable (traverse_) -import System.Directory (getCurrentDirectory) -import Distribution.Compat.Environment (getEnvironment) -import System.FilePath ((<.>), ()) - - --- | Return the executable to run and any extra arguments that should be --- forwarded to it. Die in case of error. -splitRunArgs :: Verbosity -> LocalBuildInfo -> [String] - -> IO (Executable, [String]) -splitRunArgs verbosity lbi args = - case whichExecutable of -- Either err (wasManuallyChosen, exe, paramsRest) - Left err -> do - warn verbosity `traverse_` maybeWarning -- If there is a warning, print it. - die err - Right (True, exe, xs) -> return (exe, xs) - Right (False, exe, xs) -> do - let addition = " Interpreting all parameters to `run` as a parameter to" - ++ " the default executable." - -- If there is a warning, print it together with the addition. - warn verbosity `traverse_` fmap (++addition) maybeWarning - return (exe, xs) - where - pkg_descr = localPkgDescr lbi - whichExecutable :: Either String -- Error string. - ( Bool -- If it was manually chosen. - , Executable -- The executable. - , [String] -- The remaining parameters. - ) - whichExecutable = case (enabledExes, args) of - ([] , _) -> Left "Couldn't find any enabled executables." - ([exe], []) -> return (False, exe, []) - ([exe], (x:xs)) - | x == exeName exe -> return (True, exe, xs) - | otherwise -> return (False, exe, args) - (_ , []) -> Left - $ "This package contains multiple executables. " - ++ "You must pass the executable name as the first argument " - ++ "to 'cabal run'." - (_ , (x:xs)) -> - case find (\exe -> exeName exe == x) enabledExes of - Nothing -> Left $ "No executable named '" ++ x ++ "'." - Just exe -> return (True, exe, xs) - where - enabledExes = filter (buildable . buildInfo) (executables pkg_descr) - - maybeWarning :: Maybe String - maybeWarning = case args of - [] -> Nothing - (x:_) -> lookup x components - where - components :: [(String, String)] -- Component name, message. - components = - [ (name, "The executable '" ++ name ++ "' is disabled.") - | e <- executables pkg_descr - , not . buildable . buildInfo $ e, let name = exeName e] - - ++ [ (name, "There is a test-suite '" ++ name ++ "'," - ++ " but the `run` command is only for executables.") - | t <- testSuites pkg_descr - , let name = testName t] - - ++ [ (name, "There is a benchmark '" ++ name ++ "'," - ++ " but the `run` command is only for executables.") - | b <- benchmarks pkg_descr - , let name = benchmarkName b] - --- | Run a given executable. -run :: Verbosity -> LocalBuildInfo -> Executable -> [String] -> IO () -run verbosity lbi exe exeArgs = do - curDir <- getCurrentDirectory - let buildPref = buildDir lbi - pkg_descr = localPkgDescr lbi - dataDirEnvVar = (pkgPathEnvVar pkg_descr "datadir", - curDir dataDir pkg_descr) - - (path, runArgs) <- - case compilerFlavor (compiler lbi) of - GHCJS -> do - let (script, cmd, cmdArgs) = - GHCJS.runCmd (withPrograms lbi) - (buildPref exeName exe exeName exe) - script' <- tryCanonicalizePath script - return (cmd, cmdArgs ++ [script']) - _ -> do - p <- tryCanonicalizePath $ - buildPref exeName exe (exeName exe <.> exeExtension) - return (p, []) - - env <- (dataDirEnvVar:) <$> getEnvironment - -- Add (DY)LD_LIBRARY_PATH if needed - env' <- if withDynExe lbi - then do let (Platform _ os) = hostPlatform lbi - clbi = getComponentLocalBuildInfo lbi - (CExeName (exeName exe)) - paths <- depLibraryPaths True False lbi clbi - return (addLibraryPath os paths env) - else return env - notice verbosity $ "Running " ++ exeName exe ++ "..." - rawSystemExitWithEnv verbosity path (runArgs++exeArgs) env' diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Sandbox/Index.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Sandbox/Index.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Sandbox/Index.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Sandbox/Index.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,281 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Sandbox.Index --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Querying and modifying local build tree references in the package index. ------------------------------------------------------------------------------ - -module Distribution.Client.Sandbox.Index ( - createEmpty, - addBuildTreeRefs, - removeBuildTreeRefs, - ListIgnoredBuildTreeRefs(..), RefTypesToList(..), - DeleteSourceError(..), - listBuildTreeRefs, - validateIndexPath, - - defaultIndexFileName - ) where - -import qualified Codec.Archive.Tar as Tar -import qualified Codec.Archive.Tar.Entry as Tar -import qualified Codec.Archive.Tar.Index as Tar -import qualified Distribution.Client.Tar as Tar -import Distribution.Client.IndexUtils ( BuildTreeRefType(..) - , refTypeFromTypeCode - , typeCodeFromRefType - , updatePackageIndexCacheFile - , readCacheStrict - , Index(..) ) -import qualified Distribution.Client.IndexUtils as IndexUtils -import Distribution.Client.Utils ( byteStringToFilePath, filePathToByteString - , makeAbsoluteToCwd, tryCanonicalizePath - , tryFindAddSourcePackageDesc ) - -import Distribution.Simple.Utils ( die, debug ) -import Distribution.Compat.Exception ( tryIO ) -import Distribution.Verbosity ( Verbosity ) - -import qualified Data.ByteString.Lazy as BS -import Control.Exception ( evaluate, throw, Exception ) -import Control.Monad ( liftM, unless ) -import Control.Monad.Writer.Lazy (WriterT(..), runWriterT, tell) -import Data.List ( (\\), intersect, nub, find ) -import Data.Maybe ( catMaybes, fromMaybe ) -import Data.Either (partitionEithers) -import System.Directory ( createDirectoryIfMissing, - doesDirectoryExist, doesFileExist, - renameFile, canonicalizePath) -import System.FilePath ( (), (<.>), takeDirectory, takeExtension ) -import System.IO ( IOMode(..), withBinaryFile ) - --- | A reference to a local build tree. -data BuildTreeRef = BuildTreeRef { - buildTreeRefType :: !BuildTreeRefType, - buildTreePath :: !FilePath - } - -defaultIndexFileName :: FilePath -defaultIndexFileName = "00-index.tar" - --- | Given a path, ensure that it refers to a local build tree. -buildTreeRefFromPath :: BuildTreeRefType -> FilePath -> IO (Maybe BuildTreeRef) -buildTreeRefFromPath refType dir = do - dirExists <- doesDirectoryExist dir - unless dirExists $ - die $ "directory '" ++ dir ++ "' does not exist" - _ <- tryFindAddSourcePackageDesc dir "Error adding source reference." - return . Just $ BuildTreeRef refType dir - --- | Given a tar archive entry, try to parse it as a local build tree reference. -readBuildTreeRef :: Tar.Entry -> Maybe BuildTreeRef -readBuildTreeRef entry = case Tar.entryContent entry of - (Tar.OtherEntryType typeCode bs size) - | (Tar.isBuildTreeRefTypeCode typeCode) - && (size == BS.length bs) -> Just $! BuildTreeRef - (refTypeFromTypeCode typeCode) - (byteStringToFilePath bs) - | otherwise -> Nothing - _ -> Nothing - --- | Given a sequence of tar archive entries, extract all references to local --- build trees. -readBuildTreeRefs :: Exception e => Tar.Entries e -> [BuildTreeRef] -readBuildTreeRefs = - catMaybes - . Tar.foldEntries (\e r -> readBuildTreeRef e : r) - [] throw - --- | Given a path to a tar archive, extract all references to local build trees. -readBuildTreeRefsFromFile :: FilePath -> IO [BuildTreeRef] -readBuildTreeRefsFromFile = liftM (readBuildTreeRefs . Tar.read) . BS.readFile - --- | Read build tree references from an index cache -readBuildTreeRefsFromCache :: Verbosity -> FilePath -> IO [BuildTreeRef] -readBuildTreeRefsFromCache verbosity indexPath = do - (mRefs, _prefs) <- readCacheStrict verbosity (SandboxIndex indexPath) buildTreeRef - return (catMaybes mRefs) - where - buildTreeRef pkgEntry = - case pkgEntry of - IndexUtils.NormalPackage _ _ _ _ -> Nothing - IndexUtils.BuildTreeRef typ _ _ path _ -> Just $ BuildTreeRef typ path - --- | Given a local build tree ref, serialise it to a tar archive entry. -writeBuildTreeRef :: BuildTreeRef -> Tar.Entry -writeBuildTreeRef (BuildTreeRef refType path) = Tar.simpleEntry tarPath content - where - bs = filePathToByteString path - -- Provide a filename for tools that treat custom entries as ordinary files. - tarPath' = "local-build-tree-reference" - -- fromRight can't fail because the path is shorter than 255 characters. - tarPath = fromRight $ Tar.toTarPath True tarPath' - content = Tar.OtherEntryType (typeCodeFromRefType refType) bs (BS.length bs) - - -- TODO: Move this to D.C.Utils? - fromRight (Left err) = error err - fromRight (Right a) = a - --- | Check that the provided path is either an existing directory, or a tar --- archive in an existing directory. -validateIndexPath :: FilePath -> IO FilePath -validateIndexPath path' = do - path <- makeAbsoluteToCwd path' - if (== ".tar") . takeExtension $ path - then return path - else do dirExists <- doesDirectoryExist path - unless dirExists $ - die $ "directory does not exist: '" ++ path ++ "'" - return $ path defaultIndexFileName - --- | Create an empty index file. -createEmpty :: Verbosity -> FilePath -> IO () -createEmpty verbosity path = do - indexExists <- doesFileExist path - if indexExists - then debug verbosity $ "Package index already exists: " ++ path - else do - debug verbosity $ "Creating the index file '" ++ path ++ "'" - createDirectoryIfMissing True (takeDirectory path) - -- Equivalent to 'tar cvf empty.tar --files-from /dev/null'. - let zeros = BS.replicate (512*20) 0 - BS.writeFile path zeros - --- | Add given local build tree references to the index. -addBuildTreeRefs :: Verbosity -> FilePath -> [FilePath] -> BuildTreeRefType - -> IO () -addBuildTreeRefs _ _ [] _ = - error "Distribution.Client.Sandbox.Index.addBuildTreeRefs: unexpected" -addBuildTreeRefs verbosity path l' refType = do - checkIndexExists path - l <- liftM nub . mapM tryCanonicalizePath $ l' - treesInIndex <- fmap (map buildTreePath) (readBuildTreeRefsFromFile path) - -- Add only those paths that aren't already in the index. - treesToAdd <- mapM (buildTreeRefFromPath refType) (l \\ treesInIndex) - let entries = map writeBuildTreeRef (catMaybes treesToAdd) - unless (null entries) $ do - withBinaryFile path ReadWriteMode $ \h -> do - block <- Tar.hSeekEndEntryOffset h Nothing - debug verbosity $ "Writing at tar block: " ++ show block - BS.hPut h (Tar.write entries) - debug verbosity $ "Successfully appended to '" ++ path ++ "'" - updatePackageIndexCacheFile verbosity $ SandboxIndex path - -data DeleteSourceError = ErrNonregisteredSource { nrPath :: FilePath } - | ErrNonexistentSource { nePath :: FilePath } deriving Show - --- | Remove given local build tree references from the index. --- --- Returns a tuple with either removed build tree refs or errors and a function --- that converts from a provided build tree ref to corresponding full directory path. -removeBuildTreeRefs :: Verbosity -> FilePath -> [FilePath] - -> IO ([Either DeleteSourceError FilePath], - (FilePath -> FilePath)) -removeBuildTreeRefs _ _ [] = - error "Distribution.Client.Sandbox.Index.removeBuildTreeRefs: unexpected" -removeBuildTreeRefs verbosity indexPath l = do - checkIndexExists indexPath - let tmpFile = indexPath <.> "tmp" - - canonRes <- mapM (\btr -> do res <- tryIO $ canonicalizePath btr - return $ case res of - Right pth -> Right (btr, pth) - Left _ -> Left $ ErrNonexistentSource btr) l - let (failures, convDict) = partitionEithers canonRes - allRefs = fmap snd convDict - - -- Performance note: on my system, it takes 'index --remove-source' - -- approx. 3,5s to filter a 65M file. Real-life indices are expected to be - -- much smaller. - removedRefs <- doRemove convDict tmpFile - - renameFile tmpFile indexPath - debug verbosity $ "Successfully renamed '" ++ tmpFile - ++ "' to '" ++ indexPath ++ "'" - - unless (null removedRefs) $ - updatePackageIndexCacheFile verbosity $ SandboxIndex indexPath - - let results = fmap Right removedRefs - ++ fmap Left failures - ++ fmap (Left . ErrNonregisteredSource) - (fmap (convertWith convDict) (allRefs \\ removedRefs)) - - return (results, convertWith convDict) - - where - doRemove :: [(FilePath, FilePath)] -> FilePath -> IO [FilePath] - doRemove srcRefs tmpFile = do - (newIdx, changedPaths) <- - Tar.read `fmap` BS.readFile indexPath - >>= runWriterT . Tar.filterEntriesM (p $ fmap snd srcRefs) - BS.writeFile tmpFile . Tar.write . Tar.entriesToList $ newIdx - return changedPaths - - p :: [FilePath] -> Tar.Entry -> WriterT [FilePath] IO Bool - p refs entry = case readBuildTreeRef entry of - Nothing -> return True - -- FIXME: removing snapshot deps is done with `delete-source - -- .cabal-sandbox/snapshots/$SNAPSHOT_NAME`. Perhaps we also want to - -- support removing snapshots by providing the original path. - (Just (BuildTreeRef _ pth)) -> if pth `elem` refs - then tell [pth] >> return False - else return True - - convertWith dict pth = fromMaybe pth $ fmap fst $ find ((==pth) . snd) dict - --- | A build tree ref can become ignored if the user later adds a build tree ref --- with the same package ID. We display ignored build tree refs when the user --- runs 'cabal sandbox list-sources', but do not look at their timestamps in --- 'reinstallAddSourceDeps'. -data ListIgnoredBuildTreeRefs = ListIgnored | DontListIgnored - --- | Which types of build tree refs should be listed? -data RefTypesToList = OnlySnapshots | OnlyLinks | LinksAndSnapshots - --- | List the local build trees that are referred to from the index. -listBuildTreeRefs :: Verbosity -> ListIgnoredBuildTreeRefs -> RefTypesToList - -> FilePath - -> IO [FilePath] -listBuildTreeRefs verbosity listIgnored refTypesToList path = do - checkIndexExists path - buildTreeRefs <- - case listIgnored of - DontListIgnored -> do - paths <- listWithoutIgnored - case refTypesToList of - LinksAndSnapshots -> return paths - _ -> do - allPathsFiltered <- fmap (map buildTreePath . filter predicate) - listWithIgnored - _ <- evaluate (length allPathsFiltered) - return (paths `intersect` allPathsFiltered) - - ListIgnored -> fmap (map buildTreePath . filter predicate) listWithIgnored - - _ <- evaluate (length buildTreeRefs) - return buildTreeRefs - - where - predicate :: BuildTreeRef -> Bool - predicate = case refTypesToList of - OnlySnapshots -> (==) SnapshotRef . buildTreeRefType - OnlyLinks -> (==) LinkRef . buildTreeRefType - LinksAndSnapshots -> const True - - listWithIgnored :: IO [BuildTreeRef] - listWithIgnored = readBuildTreeRefsFromFile path - - listWithoutIgnored :: IO [FilePath] - listWithoutIgnored = fmap (map buildTreePath) - $ readBuildTreeRefsFromCache verbosity path - - --- | Check that the package index file exists and exit with error if it does not. -checkIndexExists :: FilePath -> IO () -checkIndexExists path = do - indexExists <- doesFileExist path - unless indexExists $ - die $ "index does not exist: '" ++ path ++ "'" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Sandbox/PackageEnvironment.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Sandbox/PackageEnvironment.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Sandbox/PackageEnvironment.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Sandbox/PackageEnvironment.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,557 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Sandbox.PackageEnvironment --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Utilities for working with the package environment file. Patterned after --- Distribution.Client.Config. ------------------------------------------------------------------------------ - -module Distribution.Client.Sandbox.PackageEnvironment ( - PackageEnvironment(..) - , PackageEnvironmentType(..) - , classifyPackageEnvironment - , createPackageEnvironmentFile - , tryLoadSandboxPackageEnvironmentFile - , readPackageEnvironmentFile - , showPackageEnvironment - , showPackageEnvironmentWithComments - , setPackageDB - , sandboxPackageDBPath - , loadUserConfig - - , basePackageEnvironment - , initialPackageEnvironment - , commentPackageEnvironment - , sandboxPackageEnvironmentFile - , userPackageEnvironmentFile - ) where - -import Distribution.Client.Config ( SavedConfig(..), commentSavedConfig - , loadConfig, configFieldDescriptions - , haddockFlagsFields - , installDirsFields, withProgramsFields - , withProgramOptionsFields - , defaultCompiler ) -import Distribution.Client.Dependency.Types ( ConstraintSource (..) ) -import Distribution.Client.ParseUtils ( parseFields, ppFields, ppSection ) -import Distribution.Client.Setup ( GlobalFlags(..), ConfigExFlags(..) - , InstallFlags(..) - , defaultSandboxLocation ) -import Distribution.Utils.NubList ( toNubList ) -import Distribution.Simple.Compiler ( Compiler, PackageDB(..) - , compilerFlavor, showCompilerIdWithAbi ) -import Distribution.Simple.InstallDirs ( InstallDirs(..), PathTemplate - , defaultInstallDirs, combineInstallDirs - , fromPathTemplate, toPathTemplate ) -import Distribution.Simple.Setup ( Flag(..) - , ConfigFlags(..), HaddockFlags(..) - , fromFlagOrDefault, toFlag, flagToMaybe ) -import Distribution.Simple.Utils ( die, info, notice, warn ) -import Distribution.ParseUtils ( FieldDescr(..), ParseResult(..) - , commaListField, commaNewLineListField - , liftField, lineNo, locatedErrorMsg - , parseFilePathQ, readFields - , showPWarning, simpleField - , syntaxError, warning ) -import Distribution.System ( Platform ) -import Distribution.Verbosity ( Verbosity, normal ) -import Control.Monad ( foldM, liftM2, when, unless ) -import Data.List ( partition ) -import Data.Maybe ( isJust ) -import Distribution.Compat.Exception ( catchIO ) -import Distribution.Compat.Semigroup -import System.Directory ( doesDirectoryExist, doesFileExist - , renameFile ) -import System.FilePath ( (<.>), (), takeDirectory ) -import System.IO.Error ( isDoesNotExistError ) -import Text.PrettyPrint ( ($+$) ) - -import qualified Text.PrettyPrint as Disp -import qualified Distribution.Compat.ReadP as Parse -import qualified Distribution.ParseUtils as ParseUtils ( Field(..) ) -import qualified Distribution.Text as Text -import GHC.Generics ( Generic ) - - --- --- * Configuration saved in the package environment file --- - --- TODO: would be nice to remove duplication between --- D.C.Sandbox.PackageEnvironment and D.C.Config. -data PackageEnvironment = PackageEnvironment { - -- The 'inherit' feature is not used ATM, but could be useful in the future - -- for constructing nested sandboxes (see discussion in #1196). - pkgEnvInherit :: Flag FilePath, - pkgEnvSavedConfig :: SavedConfig -} deriving Generic - -instance Monoid PackageEnvironment where - mempty = gmempty - mappend = (<>) - -instance Semigroup PackageEnvironment where - (<>) = gmappend - --- | The automatically-created package environment file that should not be --- touched by the user. -sandboxPackageEnvironmentFile :: FilePath -sandboxPackageEnvironmentFile = "cabal.sandbox.config" - --- | Optional package environment file that can be used to customize the default --- settings. Created by the user. -userPackageEnvironmentFile :: FilePath -userPackageEnvironmentFile = "cabal.config" - --- | Type of the current package environment. -data PackageEnvironmentType = - SandboxPackageEnvironment -- ^ './cabal.sandbox.config' - | UserPackageEnvironment -- ^ './cabal.config' - | AmbientPackageEnvironment -- ^ '~/.cabal/config' - --- | Is there a 'cabal.sandbox.config' or 'cabal.config' in this --- directory? -classifyPackageEnvironment :: FilePath -> Flag FilePath -> Flag Bool - -> IO PackageEnvironmentType -classifyPackageEnvironment pkgEnvDir sandboxConfigFileFlag ignoreSandboxFlag = - do isSandbox <- liftM2 (||) (return forceSandboxConfig) - (configExists sandboxPackageEnvironmentFile) - isUser <- configExists userPackageEnvironmentFile - return (classify isSandbox isUser) - where - configExists fname = doesFileExist (pkgEnvDir fname) - ignoreSandbox = fromFlagOrDefault False ignoreSandboxFlag - forceSandboxConfig = isJust . flagToMaybe $ sandboxConfigFileFlag - - classify :: Bool -> Bool -> PackageEnvironmentType - classify True _ - | not ignoreSandbox = SandboxPackageEnvironment - classify _ True = UserPackageEnvironment - classify _ False = AmbientPackageEnvironment - --- | Defaults common to 'initialPackageEnvironment' and --- 'commentPackageEnvironment'. -commonPackageEnvironmentConfig :: FilePath -> SavedConfig -commonPackageEnvironmentConfig sandboxDir = - mempty { - savedConfigureFlags = mempty { - -- TODO: Currently, we follow cabal-dev and set 'user-install: False' in - -- the config file. In the future we may want to distinguish between - -- global, sandbox and user install types. - configUserInstall = toFlag False, - configInstallDirs = installDirs - }, - savedUserInstallDirs = installDirs, - savedGlobalInstallDirs = installDirs, - savedGlobalFlags = mempty { - globalLogsDir = toFlag $ sandboxDir "logs", - -- Is this right? cabal-dev uses the global world file. - globalWorldFile = toFlag $ sandboxDir "world" - } - } - where - installDirs = sandboxInstallDirs sandboxDir - --- | 'commonPackageEnvironmentConfig' wrapped inside a 'PackageEnvironment'. -commonPackageEnvironment :: FilePath -> PackageEnvironment -commonPackageEnvironment sandboxDir = mempty { - pkgEnvSavedConfig = commonPackageEnvironmentConfig sandboxDir - } - --- | Given a path to a sandbox, return the corresponding InstallDirs record. -sandboxInstallDirs :: FilePath -> InstallDirs (Flag PathTemplate) -sandboxInstallDirs sandboxDir = mempty { - prefix = toFlag (toPathTemplate sandboxDir) - } - --- | These are the absolute basic defaults, the fields that must be --- initialised. When we load the package environment from the file we layer the --- loaded values over these ones. -basePackageEnvironment :: PackageEnvironment -basePackageEnvironment = - mempty { - pkgEnvSavedConfig = mempty { - savedConfigureFlags = mempty { - configHcFlavor = toFlag defaultCompiler, - configVerbosity = toFlag normal - } - } - } - --- | Initial configuration that we write out to the package environment file if --- it does not exist. When the package environment gets loaded this --- configuration gets layered on top of 'basePackageEnvironment'. -initialPackageEnvironment :: FilePath -> Compiler -> Platform - -> IO PackageEnvironment -initialPackageEnvironment sandboxDir compiler platform = do - defInstallDirs <- defaultInstallDirs (compilerFlavor compiler) - {- userInstall= -} False {- _hasLibs= -} False - let initialConfig = commonPackageEnvironmentConfig sandboxDir - installDirs = combineInstallDirs (\d f -> Flag $ fromFlagOrDefault d f) - defInstallDirs (savedUserInstallDirs initialConfig) - return $ mempty { - pkgEnvSavedConfig = initialConfig { - savedUserInstallDirs = installDirs, - savedGlobalInstallDirs = installDirs, - savedGlobalFlags = (savedGlobalFlags initialConfig) { - globalLocalRepos = toNubList [sandboxDir "packages"] - }, - savedConfigureFlags = setPackageDB sandboxDir compiler platform - (savedConfigureFlags initialConfig), - savedInstallFlags = (savedInstallFlags initialConfig) { - installSummaryFile = toNubList [toPathTemplate (sandboxDir - "logs" "build.log")] - } - } - } - --- | Return the path to the sandbox package database. -sandboxPackageDBPath :: FilePath -> Compiler -> Platform -> String -sandboxPackageDBPath sandboxDir compiler platform = - sandboxDir - (Text.display platform ++ "-" - ++ showCompilerIdWithAbi compiler - ++ "-packages.conf.d") --- The path in sandboxPackageDBPath should be kept in sync with the --- path in the bootstrap.sh which is used to bootstrap cabal-install --- into a sandbox. - --- | Use the package DB location specific for this compiler. -setPackageDB :: FilePath -> Compiler -> Platform -> ConfigFlags -> ConfigFlags -setPackageDB sandboxDir compiler platform configFlags = - configFlags { - configPackageDBs = [Just (SpecificPackageDB $ sandboxPackageDBPath - sandboxDir - compiler - platform)] - } - --- | Almost the same as 'savedConf `mappend` pkgEnv', but some settings are --- overridden instead of mappend'ed. -overrideSandboxSettings :: PackageEnvironment -> PackageEnvironment -> - PackageEnvironment -overrideSandboxSettings pkgEnv0 pkgEnv = - pkgEnv { - pkgEnvSavedConfig = mappendedConf { - savedConfigureFlags = (savedConfigureFlags mappendedConf) { - configPackageDBs = configPackageDBs pkgEnvConfigureFlags - } - , savedInstallFlags = (savedInstallFlags mappendedConf) { - installSummaryFile = installSummaryFile pkgEnvInstallFlags - } - }, - pkgEnvInherit = pkgEnvInherit pkgEnv0 - } - where - pkgEnvConf = pkgEnvSavedConfig pkgEnv - mappendedConf = (pkgEnvSavedConfig pkgEnv0) `mappend` pkgEnvConf - pkgEnvConfigureFlags = savedConfigureFlags pkgEnvConf - pkgEnvInstallFlags = savedInstallFlags pkgEnvConf - --- | Default values that get used if no value is given. Used here to include in --- comments when we write out the initial package environment. -commentPackageEnvironment :: FilePath -> IO PackageEnvironment -commentPackageEnvironment sandboxDir = do - commentConf <- commentSavedConfig - let baseConf = commonPackageEnvironmentConfig sandboxDir - return $ mempty { - pkgEnvSavedConfig = commentConf `mappend` baseConf - } - --- | If this package environment inherits from some other package environment, --- return that package environment; otherwise return mempty. -inheritedPackageEnvironment :: Verbosity -> PackageEnvironment - -> IO PackageEnvironment -inheritedPackageEnvironment verbosity pkgEnv = do - case (pkgEnvInherit pkgEnv) of - NoFlag -> return mempty - confPathFlag@(Flag _) -> do - conf <- loadConfig verbosity confPathFlag - return $ mempty { pkgEnvSavedConfig = conf } - --- | Load the user package environment if it exists (the optional "cabal.config" --- file). If it does not exist locally, attempt to load an optional global one. -userPackageEnvironment :: Verbosity -> FilePath -> Maybe FilePath -> IO PackageEnvironment -userPackageEnvironment verbosity pkgEnvDir globalConfigLocation = do - let path = pkgEnvDir userPackageEnvironmentFile - minp <- readPackageEnvironmentFile (ConstraintSourceUserConfig path) mempty path - case (minp, globalConfigLocation) of - (Just parseRes, _) -> processConfigParse path parseRes - (_, Just globalLoc) -> maybe (warn verbosity ("no constraints file found at " ++ globalLoc) >> return mempty) (processConfigParse globalLoc) =<< readPackageEnvironmentFile (ConstraintSourceUserConfig globalLoc) mempty globalLoc - _ -> return mempty - where - processConfigParse path (ParseOk warns parseResult) = do - when (not $ null warns) $ warn verbosity $ - unlines (map (showPWarning path) warns) - return parseResult - processConfigParse path (ParseFailed err) = do - let (line, msg) = locatedErrorMsg err - warn verbosity $ "Error parsing package environment file " ++ path - ++ maybe "" (\n -> ":" ++ show n) line ++ ":\n" ++ msg - return mempty - --- | Same as @userPackageEnvironmentFile@, but returns a SavedConfig. -loadUserConfig :: Verbosity -> FilePath -> Maybe FilePath -> IO SavedConfig -loadUserConfig verbosity pkgEnvDir globalConfigLocation = - fmap pkgEnvSavedConfig $ userPackageEnvironment verbosity pkgEnvDir globalConfigLocation - --- | Common error handling code used by 'tryLoadSandboxPackageEnvironment' and --- 'updatePackageEnvironment'. -handleParseResult :: Verbosity -> FilePath - -> Maybe (ParseResult PackageEnvironment) - -> IO PackageEnvironment -handleParseResult verbosity path minp = - case minp of - Nothing -> die $ - "The package environment file '" ++ path ++ "' doesn't exist" - Just (ParseOk warns parseResult) -> do - when (not $ null warns) $ warn verbosity $ - unlines (map (showPWarning path) warns) - return parseResult - Just (ParseFailed err) -> do - let (line, msg) = locatedErrorMsg err - die $ "Error parsing package environment file " ++ path - ++ maybe "" (\n -> ":" ++ show n) line ++ ":\n" ++ msg - --- | Try to load the given package environment file, exiting with error if it --- doesn't exist. Also returns the path to the sandbox directory. The path --- parameter should refer to an existing file. -tryLoadSandboxPackageEnvironmentFile :: Verbosity -> FilePath -> (Flag FilePath) - -> IO (FilePath, PackageEnvironment) -tryLoadSandboxPackageEnvironmentFile verbosity pkgEnvFile configFileFlag = do - let pkgEnvDir = takeDirectory pkgEnvFile - minp <- readPackageEnvironmentFile - (ConstraintSourceSandboxConfig pkgEnvFile) mempty pkgEnvFile - pkgEnv <- handleParseResult verbosity pkgEnvFile minp - - -- Get the saved sandbox directory. - -- TODO: Use substPathTemplate with - -- compilerTemplateEnv ++ platformTemplateEnv ++ abiTemplateEnv. - let sandboxDir = fromFlagOrDefault defaultSandboxLocation - . fmap fromPathTemplate . prefix . savedUserInstallDirs - . pkgEnvSavedConfig $ pkgEnv - - -- Do some sanity checks - dirExists <- doesDirectoryExist sandboxDir - -- TODO: Also check for an initialised package DB? - unless dirExists $ - die ("No sandbox exists at " ++ sandboxDir) - info verbosity $ "Using a sandbox located at " ++ sandboxDir - - let base = basePackageEnvironment - let common = commonPackageEnvironment sandboxDir - user <- userPackageEnvironment verbosity pkgEnvDir Nothing --TODO - inherited <- inheritedPackageEnvironment verbosity user - - -- Layer the package environment settings over settings from ~/.cabal/config. - cabalConfig <- fmap unsetSymlinkBinDir $ loadConfig verbosity configFileFlag - return (sandboxDir, - updateInstallDirs $ - (base `mappend` (toPkgEnv cabalConfig) `mappend` - common `mappend` inherited `mappend` user) - `overrideSandboxSettings` pkgEnv) - where - toPkgEnv config = mempty { pkgEnvSavedConfig = config } - - updateInstallDirs pkgEnv = - let config = pkgEnvSavedConfig pkgEnv - configureFlags = savedConfigureFlags config - installDirs = savedUserInstallDirs config - in pkgEnv { - pkgEnvSavedConfig = config { - savedConfigureFlags = configureFlags { - configInstallDirs = installDirs - } - } - } - - -- We don't want to inherit the value of 'symlink-bindir' from - -- '~/.cabal/config'. See #1514. - unsetSymlinkBinDir config = - let installFlags = savedInstallFlags config - in config { - savedInstallFlags = installFlags { - installSymlinkBinDir = NoFlag - } - } - --- | Create a new package environment file, replacing the existing one if it --- exists. Note that the path parameters should point to existing directories. -createPackageEnvironmentFile :: Verbosity -> FilePath -> FilePath - -> Compiler - -> Platform - -> IO () -createPackageEnvironmentFile verbosity sandboxDir pkgEnvFile compiler platform = do - notice verbosity $ "Writing a default package environment file to " ++ pkgEnvFile - initialPkgEnv <- initialPackageEnvironment sandboxDir compiler platform - writePackageEnvironmentFile pkgEnvFile initialPkgEnv - --- | Descriptions of all fields in the package environment file. -pkgEnvFieldDescrs :: ConstraintSource -> [FieldDescr PackageEnvironment] -pkgEnvFieldDescrs src = [ - simpleField "inherit" - (fromFlagOrDefault Disp.empty . fmap Disp.text) (optional parseFilePathQ) - pkgEnvInherit (\v pkgEnv -> pkgEnv { pkgEnvInherit = v }) - - , commaNewLineListField "constraints" - (Text.disp . fst) ((\pc -> (pc, src)) `fmap` Text.parse) - (configExConstraints . savedConfigureExFlags . pkgEnvSavedConfig) - (\v pkgEnv -> updateConfigureExFlags pkgEnv - (\flags -> flags { configExConstraints = v })) - - , commaListField "preferences" - Text.disp Text.parse - (configPreferences . savedConfigureExFlags . pkgEnvSavedConfig) - (\v pkgEnv -> updateConfigureExFlags pkgEnv - (\flags -> flags { configPreferences = v })) - ] - ++ map toPkgEnv configFieldDescriptions' - where - optional = Parse.option mempty . fmap toFlag - - configFieldDescriptions' :: [FieldDescr SavedConfig] - configFieldDescriptions' = filter - (\(FieldDescr name _ _) -> name /= "preference" && name /= "constraint") - (configFieldDescriptions src) - - toPkgEnv :: FieldDescr SavedConfig -> FieldDescr PackageEnvironment - toPkgEnv fieldDescr = - liftField pkgEnvSavedConfig - (\savedConfig pkgEnv -> pkgEnv { pkgEnvSavedConfig = savedConfig}) - fieldDescr - - updateConfigureExFlags :: PackageEnvironment - -> (ConfigExFlags -> ConfigExFlags) - -> PackageEnvironment - updateConfigureExFlags pkgEnv f = pkgEnv { - pkgEnvSavedConfig = (pkgEnvSavedConfig pkgEnv) { - savedConfigureExFlags = f . savedConfigureExFlags . pkgEnvSavedConfig - $ pkgEnv - } - } - --- | Read the package environment file. -readPackageEnvironmentFile :: ConstraintSource -> PackageEnvironment -> FilePath - -> IO (Maybe (ParseResult PackageEnvironment)) -readPackageEnvironmentFile src initial file = - handleNotExists $ - fmap (Just . parsePackageEnvironment src initial) (readFile file) - where - handleNotExists action = catchIO action $ \ioe -> - if isDoesNotExistError ioe - then return Nothing - else ioError ioe - --- | Parse the package environment file. -parsePackageEnvironment :: ConstraintSource -> PackageEnvironment -> String - -> ParseResult PackageEnvironment -parsePackageEnvironment src initial str = do - fields <- readFields str - let (knownSections, others) = partition isKnownSection fields - pkgEnv <- parse others - let config = pkgEnvSavedConfig pkgEnv - installDirs0 = savedUserInstallDirs config - (haddockFlags, installDirs, paths, args) <- - foldM parseSections - (savedHaddockFlags config, installDirs0, [], []) - knownSections - return pkgEnv { - pkgEnvSavedConfig = config { - savedConfigureFlags = (savedConfigureFlags config) { - configProgramPaths = paths, - configProgramArgs = args - }, - savedHaddockFlags = haddockFlags, - savedUserInstallDirs = installDirs, - savedGlobalInstallDirs = installDirs - } - } - - where - isKnownSection :: ParseUtils.Field -> Bool - isKnownSection (ParseUtils.Section _ "haddock" _ _) = True - isKnownSection (ParseUtils.Section _ "install-dirs" _ _) = True - isKnownSection (ParseUtils.Section _ "program-locations" _ _) = True - isKnownSection (ParseUtils.Section _ "program-default-options" _ _) = True - isKnownSection _ = False - - parse :: [ParseUtils.Field] -> ParseResult PackageEnvironment - parse = parseFields (pkgEnvFieldDescrs src) initial - - parseSections :: SectionsAccum -> ParseUtils.Field - -> ParseResult SectionsAccum - parseSections accum@(h,d,p,a) - (ParseUtils.Section _ "haddock" name fs) - | name == "" = do h' <- parseFields haddockFlagsFields h fs - return (h', d, p, a) - | otherwise = do - warning "The 'haddock' section should be unnamed" - return accum - parseSections (h,d,p,a) - (ParseUtils.Section line "install-dirs" name fs) - | name == "" = do d' <- parseFields installDirsFields d fs - return (h, d',p,a) - | otherwise = - syntaxError line $ - "Named 'install-dirs' section: '" ++ name - ++ "'. Note that named 'install-dirs' sections are not allowed in the '" - ++ userPackageEnvironmentFile ++ "' file." - parseSections accum@(h, d,p,a) - (ParseUtils.Section _ "program-locations" name fs) - | name == "" = do p' <- parseFields withProgramsFields p fs - return (h, d, p', a) - | otherwise = do - warning "The 'program-locations' section should be unnamed" - return accum - parseSections accum@(h, d, p, a) - (ParseUtils.Section _ "program-default-options" name fs) - | name == "" = do a' <- parseFields withProgramOptionsFields a fs - return (h, d, p, a') - | otherwise = do - warning "The 'program-default-options' section should be unnamed" - return accum - parseSections accum f = do - warning $ "Unrecognized stanza on line " ++ show (lineNo f) - return accum - --- | Accumulator type for 'parseSections'. -type SectionsAccum = (HaddockFlags, InstallDirs (Flag PathTemplate) - , [(String, FilePath)], [(String, [String])]) - --- | Write out the package environment file. -writePackageEnvironmentFile :: FilePath -> PackageEnvironment -> IO () -writePackageEnvironmentFile path pkgEnv = do - let tmpPath = (path <.> "tmp") - writeFile tmpPath $ explanation ++ pkgEnvStr ++ "\n" - renameFile tmpPath path - where - pkgEnvStr = showPackageEnvironment pkgEnv - explanation = unlines - ["-- This is a Cabal package environment file." - ,"-- THIS FILE IS AUTO-GENERATED. DO NOT EDIT DIRECTLY." - ,"-- Please create a 'cabal.config' file in the same directory" - ,"-- if you want to change the default settings for this sandbox." - ,"","" - ] - --- | Pretty-print the package environment. -showPackageEnvironment :: PackageEnvironment -> String -showPackageEnvironment pkgEnv = showPackageEnvironmentWithComments Nothing pkgEnv - --- | Pretty-print the package environment with default values for empty fields --- commented out (just like the default ~/.cabal/config). -showPackageEnvironmentWithComments :: (Maybe PackageEnvironment) - -> PackageEnvironment - -> String -showPackageEnvironmentWithComments mdefPkgEnv pkgEnv = Disp.render $ - ppFields (pkgEnvFieldDescrs ConstraintSourceUnknown) - mdefPkgEnv pkgEnv - $+$ Disp.text "" - $+$ ppSection "install-dirs" "" installDirsFields - (fmap installDirsSection mdefPkgEnv) (installDirsSection pkgEnv) - where - installDirsSection = savedUserInstallDirs . pkgEnvSavedConfig diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Sandbox/Timestamp.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Sandbox/Timestamp.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Sandbox/Timestamp.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Sandbox/Timestamp.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,268 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Sandbox.Timestamp --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Timestamp file handling (for add-source dependencies). ------------------------------------------------------------------------------ - -module Distribution.Client.Sandbox.Timestamp ( - AddSourceTimestamp, - withAddTimestamps, - withUpdateTimestamps, - maybeAddCompilerTimestampRecord, - listModifiedDeps, - removeTimestamps, - - -- * For testing - TimestampFileRecord, - readTimestampFile, - writeTimestampFile - ) where - -import Control.Monad (filterM, forM, when) -import Data.Char (isSpace) -import Data.List (partition) -import System.Directory (renameFile) -import System.FilePath ((<.>), ()) -import qualified Data.Map as M - -import Distribution.Compiler (CompilerId) -import Distribution.Simple.Utils (debug, die, warn) -import Distribution.System (Platform) -import Distribution.Text (display) -import Distribution.Verbosity (Verbosity) - -import Distribution.Client.SrcDist (allPackageSourceFiles) -import Distribution.Client.Sandbox.Index - (ListIgnoredBuildTreeRefs (ListIgnored), RefTypesToList(OnlyLinks) - ,listBuildTreeRefs) - -import Distribution.Compat.Exception (catchIO) -import Distribution.Client.Compat.Time (ModTime, getCurTime, - getModTime, - posixSecondsToModTime) - - --- | Timestamp of an add-source dependency. -type AddSourceTimestamp = (FilePath, ModTime) --- | Timestamp file record - a string identifying the compiler & platform plus a --- list of add-source timestamps. -type TimestampFileRecord = (String, [AddSourceTimestamp]) - -timestampRecordKey :: CompilerId -> Platform -> String -timestampRecordKey compId platform = display platform ++ "-" ++ display compId - --- | The 'add-source-timestamps' file keeps the timestamps of all add-source --- dependencies. It is initially populated by 'sandbox add-source' and kept --- current by 'reinstallAddSourceDeps' and 'configure -w'. The user can install --- add-source deps manually with 'cabal install' after having edited them, so we --- can err on the side of caution sometimes. --- FIXME: We should keep this info in the index file, together with build tree --- refs. -timestampFileName :: FilePath -timestampFileName = "add-source-timestamps" - --- | Read the timestamp file. Exits with error if the timestamp file is --- corrupted. Returns an empty list if the file doesn't exist. -readTimestampFile :: FilePath -> IO [TimestampFileRecord] -readTimestampFile timestampFile = do - timestampString <- readFile timestampFile `catchIO` \_ -> return "[]" - case reads timestampString of - [(version, s)] - | version == (2::Int) -> - case reads s of - [(timestamps, s')] | all isSpace s' -> return timestamps - _ -> dieCorrupted - | otherwise -> dieWrongFormat - - -- Old format (timestamps are POSIX seconds). Convert to new format. - [] -> - case reads timestampString of - [(timestamps, s)] | all isSpace s -> do - let timestamps' = map (\(i, ts) -> - (i, map (\(p, t) -> - (p, posixSecondsToModTime t)) ts)) - timestamps - writeTimestampFile timestampFile timestamps' - return timestamps' - _ -> dieCorrupted - _ -> dieCorrupted - where - dieWrongFormat = die $ wrongFormat ++ deleteAndRecreate - dieCorrupted = die $ corrupted ++ deleteAndRecreate - wrongFormat = "The timestamps file is in the wrong format." - corrupted = "The timestamps file is corrupted." - deleteAndRecreate = " Please delete and recreate the sandbox." - --- | Write the timestamp file, atomically. -writeTimestampFile :: FilePath -> [TimestampFileRecord] -> IO () -writeTimestampFile timestampFile timestamps = do - writeFile timestampTmpFile "2\n" -- version - appendFile timestampTmpFile (show timestamps ++ "\n") - renameFile timestampTmpFile timestampFile - where - timestampTmpFile = timestampFile <.> "tmp" - --- | Read, process and write the timestamp file in one go. -withTimestampFile :: FilePath - -> ([TimestampFileRecord] -> IO [TimestampFileRecord]) - -> IO () -withTimestampFile sandboxDir process = do - let timestampFile = sandboxDir timestampFileName - timestampRecords <- readTimestampFile timestampFile >>= process - writeTimestampFile timestampFile timestampRecords - --- | Given a list of 'AddSourceTimestamp's, a list of paths to add-source deps --- we've added and an initial timestamp, add an 'AddSourceTimestamp' to the list --- for each path. If a timestamp for a given path already exists in the list, --- update it. -addTimestamps :: ModTime -> [AddSourceTimestamp] -> [FilePath] - -> [AddSourceTimestamp] -addTimestamps initial timestamps newPaths = - [ (p, initial) | p <- newPaths ] ++ oldTimestamps - where - (oldTimestamps, _toBeUpdated) = - partition (\(path, _) -> path `notElem` newPaths) timestamps - --- | Given a list of 'AddSourceTimestamp's, a list of paths to add-source deps --- we've reinstalled and a new timestamp value, update the timestamp value for --- the deps in the list. If there are new paths in the list, ignore them. -updateTimestamps :: [AddSourceTimestamp] -> [FilePath] -> ModTime - -> [AddSourceTimestamp] -updateTimestamps timestamps pathsToUpdate newTimestamp = - foldr updateTimestamp [] timestamps - where - updateTimestamp t@(path, _oldTimestamp) rest - | path `elem` pathsToUpdate = (path, newTimestamp) : rest - | otherwise = t : rest - --- | Given a list of 'TimestampFileRecord's and a list of paths to add-source --- deps we've removed, remove those deps from the list. -removeTimestamps' :: [AddSourceTimestamp] -> [FilePath] -> [AddSourceTimestamp] -removeTimestamps' l pathsToRemove = foldr removeTimestamp [] l - where - removeTimestamp t@(path, _oldTimestamp) rest = - if path `elem` pathsToRemove - then rest - else t : rest - --- | If a timestamp record for this compiler doesn't exist, add a new one. -maybeAddCompilerTimestampRecord :: Verbosity -> FilePath -> FilePath - -> CompilerId -> Platform - -> IO () -maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile - compId platform = do - let key = timestampRecordKey compId platform - withTimestampFile sandboxDir $ \timestampRecords -> do - case lookup key timestampRecords of - Just _ -> return timestampRecords - Nothing -> do - buildTreeRefs <- listBuildTreeRefs verbosity ListIgnored OnlyLinks - indexFile - now <- getCurTime - let timestamps = map (\p -> (p, now)) buildTreeRefs - return $ (key, timestamps):timestampRecords - --- | Given an IO action that returns a list of build tree refs, add those --- build tree refs to the timestamps file (for all compilers). -withAddTimestamps :: FilePath -> IO [FilePath] -> IO () -withAddTimestamps sandboxDir act = do - let initialTimestamp = minBound - withActionOnAllTimestamps (addTimestamps initialTimestamp) sandboxDir act - --- | Given a list of build tree refs, remove those --- build tree refs from the timestamps file (for all compilers). -removeTimestamps :: FilePath -> [FilePath] -> IO () -removeTimestamps idxFile = - withActionOnAllTimestamps removeTimestamps' idxFile . return - --- | Given an IO action that returns a list of build tree refs, update the --- timestamps of the returned build tree refs to the current time (only for the --- given compiler & platform). -withUpdateTimestamps :: FilePath -> CompilerId -> Platform - ->([AddSourceTimestamp] -> IO [FilePath]) - -> IO () -withUpdateTimestamps = - withActionOnCompilerTimestamps updateTimestamps - --- | Helper for implementing 'withAddTimestamps' and --- 'withRemoveTimestamps'. Runs a given action on the list of --- 'AddSourceTimestamp's for all compilers, applies 'f' to the result and then --- updates the timestamp file. The IO action is run only once. -withActionOnAllTimestamps :: ([AddSourceTimestamp] -> [FilePath] - -> [AddSourceTimestamp]) - -> FilePath - -> IO [FilePath] - -> IO () -withActionOnAllTimestamps f sandboxDir act = - withTimestampFile sandboxDir $ \timestampRecords -> do - paths <- act - return [(key, f timestamps paths) | (key, timestamps) <- timestampRecords] - --- | Helper for implementing 'withUpdateTimestamps'. Runs a given action on the --- list of 'AddSourceTimestamp's for this compiler, applies 'f' to the result --- and then updates the timestamp file record. The IO action is run only once. -withActionOnCompilerTimestamps :: ([AddSourceTimestamp] - -> [FilePath] -> ModTime - -> [AddSourceTimestamp]) - -> FilePath - -> CompilerId - -> Platform - -> ([AddSourceTimestamp] -> IO [FilePath]) - -> IO () -withActionOnCompilerTimestamps f sandboxDir compId platform act = do - let needle = timestampRecordKey compId platform - withTimestampFile sandboxDir $ \timestampRecords -> do - timestampRecords' <- forM timestampRecords $ \r@(key, timestamps) -> - if key == needle - then do paths <- act timestamps - now <- getCurTime - return (key, f timestamps paths now) - else return r - return timestampRecords' - --- | Has this dependency been modified since we have last looked at it? -isDepModified :: Verbosity -> ModTime -> AddSourceTimestamp -> IO Bool -isDepModified verbosity now (packageDir, timestamp) = do - debug verbosity ("Checking whether the dependency is modified: " ++ packageDir) - depSources <- allPackageSourceFiles verbosity packageDir - go depSources - - where - go [] = return False - go (dep0:rest) = do - -- FIXME: What if the clock jumps backwards at any point? For now we only - -- print a warning. - let dep = packageDir dep0 - modTime <- getModTime dep - when (modTime > now) $ - warn verbosity $ "File '" ++ dep - ++ "' has a modification time that is in the future." - if modTime >= timestamp - then do - debug verbosity ("Dependency has a modified source file: " ++ dep) - return True - else go rest - --- | List all modified dependencies. -listModifiedDeps :: Verbosity -> FilePath -> CompilerId -> Platform - -> M.Map FilePath a - -- ^ The set of all installed add-source deps. - -> IO [FilePath] -listModifiedDeps verbosity sandboxDir compId platform installedDepsMap = do - timestampRecords <- readTimestampFile (sandboxDir timestampFileName) - let needle = timestampRecordKey compId platform - timestamps <- maybe noTimestampRecord return - (lookup needle timestampRecords) - now <- getCurTime - fmap (map fst) . filterM (isDepModified verbosity now) - . filter (\ts -> fst ts `M.member` installedDepsMap) - $ timestamps - - where - noTimestampRecord = die $ "Сouldn't find a timestamp record for the given " - ++ "compiler/platform pair. " - ++ "Please report this on the Cabal bug tracker: " - ++ "https://github.com/haskell/cabal/issues/new ." diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Sandbox/Types.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Sandbox/Types.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Sandbox/Types.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Sandbox/Types.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,67 +0,0 @@ -{-# LANGUAGE CPP #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Sandbox.Types --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Helpers for writing code that works both inside and outside a sandbox. ------------------------------------------------------------------------------ - -module Distribution.Client.Sandbox.Types ( - UseSandbox(..), isUseSandbox, whenUsingSandbox, - SandboxPackageInfo(..) - ) where - -import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex -import Distribution.Client.Types (SourcePackage) -import Distribution.Compat.Semigroup (Semigroup((<>))) - -#if !MIN_VERSION_base(4,8,0) -import Data.Monoid (Monoid(..)) -#endif -import qualified Data.Set as S - --- | Are we using a sandbox? -data UseSandbox = UseSandbox FilePath | NoSandbox - -instance Monoid UseSandbox where - mempty = NoSandbox - mappend = (<>) - -instance Semigroup UseSandbox where - NoSandbox <> s = s - u0@(UseSandbox _) <> NoSandbox = u0 - (UseSandbox _) <> u1@(UseSandbox _) = u1 - --- | Convert a @UseSandbox@ value to a boolean. Useful in conjunction with --- @when@. -isUseSandbox :: UseSandbox -> Bool -isUseSandbox (UseSandbox _) = True -isUseSandbox NoSandbox = False - --- | Execute an action only if we're in a sandbox, feeding to it the path to the --- sandbox directory. -whenUsingSandbox :: UseSandbox -> (FilePath -> IO ()) -> IO () -whenUsingSandbox NoSandbox _ = return () -whenUsingSandbox (UseSandbox sandboxDir) act = act sandboxDir - --- | Data about the packages installed in the sandbox that is passed from --- 'reinstallAddSourceDeps' to the solver. -data SandboxPackageInfo = SandboxPackageInfo { - modifiedAddSourceDependencies :: ![SourcePackage], - -- ^ Modified add-source deps that we want to reinstall. These are guaranteed - -- to be already installed in the sandbox. - - otherAddSourceDependencies :: ![SourcePackage], - -- ^ Remaining add-source deps. Some of these may be not installed in the - -- sandbox. - - otherInstalledSandboxPackages :: !InstalledPackageIndex.InstalledPackageIndex, - -- ^ All packages installed in the sandbox. Intersection with - -- 'modifiedAddSourceDependencies' and/or 'otherAddSourceDependencies' can be - -- non-empty. - - allAddSourceDependencies :: !(S.Set FilePath) - -- ^ A set of paths to all add-source dependencies, for convenience. - } diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Sandbox.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Sandbox.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Sandbox.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Sandbox.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,886 +0,0 @@ -{-# LANGUAGE CPP #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Sandbox --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- UI for the sandboxing functionality. ------------------------------------------------------------------------------ - -module Distribution.Client.Sandbox ( - sandboxInit, - sandboxDelete, - sandboxAddSource, - sandboxAddSourceSnapshot, - sandboxDeleteSource, - sandboxListSources, - sandboxHcPkg, - dumpPackageEnvironment, - withSandboxBinDirOnSearchPath, - - getSandboxConfigFilePath, - loadConfigOrSandboxConfig, - findSavedDistPref, - initPackageDBIfNeeded, - maybeWithSandboxDirOnSearchPath, - - WereDepsReinstalled(..), - reinstallAddSourceDeps, - maybeReinstallAddSourceDeps, - - SandboxPackageInfo(..), - maybeWithSandboxPackageInfo, - - tryGetIndexFilePath, - sandboxBuildDir, - getInstalledPackagesInSandbox, - updateSandboxConfigFileFlag, - updateInstallDirs, - - configPackageDB', configCompilerAux', getPersistOrConfigCompiler - ) where - -import Distribution.Client.Setup - ( SandboxFlags(..), ConfigFlags(..), ConfigExFlags(..), InstallFlags(..) - , GlobalFlags(..), defaultConfigExFlags, defaultInstallFlags - , defaultSandboxLocation, withRepoContext ) -import Distribution.Client.Sandbox.Timestamp ( listModifiedDeps - , maybeAddCompilerTimestampRecord - , withAddTimestamps - , removeTimestamps ) -import Distribution.Client.Config - ( SavedConfig(..), defaultUserInstall, loadConfig ) -import Distribution.Client.Dependency ( foldProgress ) -import Distribution.Client.IndexUtils ( BuildTreeRefType(..) ) -import Distribution.Client.Install ( InstallArgs, - makeInstallContext, - makeInstallPlan, - processInstallPlan ) -import Distribution.Utils.NubList ( fromNubList ) - -import Distribution.Client.Sandbox.PackageEnvironment - ( PackageEnvironment(..), PackageEnvironmentType(..) - , createPackageEnvironmentFile, classifyPackageEnvironment - , tryLoadSandboxPackageEnvironmentFile, loadUserConfig - , commentPackageEnvironment, showPackageEnvironmentWithComments - , sandboxPackageEnvironmentFile, userPackageEnvironmentFile - , sandboxPackageDBPath ) -import Distribution.Client.Sandbox.Types ( SandboxPackageInfo(..) - , UseSandbox(..) ) -import Distribution.Client.SetupWrapper - ( SetupScriptOptions(..), defaultSetupScriptOptions ) -import Distribution.Client.Types ( PackageLocation(..) - , SourcePackage(..) ) -import Distribution.Client.Utils ( inDir, tryCanonicalizePath - , tryFindAddSourcePackageDesc) -import Distribution.PackageDescription.Configuration - ( flattenPackageDescription ) -import Distribution.PackageDescription.Parse ( readPackageDescription ) -import Distribution.Simple.Compiler ( Compiler(..), PackageDB(..) - , PackageDBStack ) -import Distribution.Simple.Configure ( configCompilerAuxEx - , interpretPackageDbFlags - , getPackageDBContents - , maybeGetPersistBuildConfig - , findDistPrefOrDefault - , findDistPref ) -import qualified Distribution.Simple.LocalBuildInfo as LocalBuildInfo -import Distribution.Simple.PreProcess ( knownSuffixHandlers ) -import Distribution.Simple.Program ( ProgramConfiguration ) -import Distribution.Simple.Setup ( Flag(..), HaddockFlags(..) - , fromFlagOrDefault, flagToMaybe ) -import Distribution.Simple.SrcDist ( prepareTree ) -import Distribution.Simple.Utils ( die, debug, notice, info, warn - , debugNoWrap, defaultPackageDesc - , intercalate, topHandlerWith - , createDirectoryIfMissingVerbose ) -import Distribution.Package ( Package(..) ) -import Distribution.System ( Platform ) -import Distribution.Text ( display ) -import Distribution.Verbosity ( Verbosity, lessVerbose ) -import Distribution.Compat.Environment ( lookupEnv, setEnv ) -import Distribution.Client.Compat.FilePerms ( setFileHidden ) -import qualified Distribution.Client.Sandbox.Index as Index -import Distribution.Simple.PackageIndex ( InstalledPackageIndex ) -import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex -import qualified Distribution.Simple.Register as Register -import qualified Data.Map as M -import qualified Data.Set as S -import Data.Either (partitionEithers) -import Control.Exception ( assert, bracket_ ) -import Control.Monad ( forM, liftM, liftM2, unless, when ) -import Data.Bits ( shiftL, shiftR, xor ) -import Data.Char ( ord ) -import Data.IORef ( newIORef, writeIORef, readIORef ) -import Data.List ( delete - , foldl' - , intersperse - , isPrefixOf - , groupBy ) -import Data.Maybe ( fromJust ) -#if !MIN_VERSION_base(4,8,0) -import Data.Monoid ( mempty, mappend ) -#endif -import Data.Word ( Word32 ) -import Numeric ( showHex ) -import System.Directory ( canonicalizePath - , createDirectory - , doesDirectoryExist - , doesFileExist - , getCurrentDirectory - , removeDirectoryRecursive - , removeFile - , renameDirectory ) -import System.FilePath ( (), equalFilePath - , getSearchPath - , searchPathSeparator - , splitSearchPath - , takeDirectory ) - --- --- * Constants --- - --- | The name of the sandbox subdirectory where we keep snapshots of add-source --- dependencies. -snapshotDirectoryName :: FilePath -snapshotDirectoryName = "snapshots" - --- | Non-standard build dir that is used for building add-source deps instead of --- "dist". Fixes surprising behaviour in some cases (see issue #1281). -sandboxBuildDir :: FilePath -> FilePath -sandboxBuildDir sandboxDir = "dist/dist-sandbox-" ++ showHex sandboxDirHash "" - where - sandboxDirHash = jenkins sandboxDir - - -- See http://en.wikipedia.org/wiki/Jenkins_hash_function - jenkins :: String -> Word32 - jenkins str = loop_finish $ foldl' loop 0 str - where - loop :: Word32 -> Char -> Word32 - loop hash key_i' = hash''' - where - key_i = toEnum . ord $ key_i' - hash' = hash + key_i - hash'' = hash' + (shiftL hash' 10) - hash''' = hash'' `xor` (shiftR hash'' 6) - - loop_finish :: Word32 -> Word32 - loop_finish hash = hash''' - where - hash' = hash + (shiftL hash 3) - hash'' = hash' `xor` (shiftR hash' 11) - hash''' = hash'' + (shiftL hash'' 15) - --- --- * Basic sandbox functions. --- - --- | If @--sandbox-config-file@ wasn't given on the command-line, set it to the --- value of the @CABAL_SANDBOX_CONFIG@ environment variable, or else to --- 'NoFlag'. -updateSandboxConfigFileFlag :: GlobalFlags -> IO GlobalFlags -updateSandboxConfigFileFlag globalFlags = - case globalSandboxConfigFile globalFlags of - Flag _ -> return globalFlags - NoFlag -> do - f' <- fmap (maybe NoFlag Flag) . lookupEnv $ "CABAL_SANDBOX_CONFIG" - return globalFlags { globalSandboxConfigFile = f' } - --- | Return the path to the sandbox config file - either the default or the one --- specified with @--sandbox-config-file@. -getSandboxConfigFilePath :: GlobalFlags -> IO FilePath -getSandboxConfigFilePath globalFlags = do - let sandboxConfigFileFlag = globalSandboxConfigFile globalFlags - case sandboxConfigFileFlag of - NoFlag -> do pkgEnvDir <- getCurrentDirectory - return (pkgEnvDir sandboxPackageEnvironmentFile) - Flag path -> return path - --- | Load the @cabal.sandbox.config@ file (and possibly the optional --- @cabal.config@). In addition to a @PackageEnvironment@, also return a --- canonical path to the sandbox. Exit with error if the sandbox directory or --- the package environment file do not exist. -tryLoadSandboxConfig :: Verbosity -> GlobalFlags - -> IO (FilePath, PackageEnvironment) -tryLoadSandboxConfig verbosity globalFlags = do - path <- getSandboxConfigFilePath globalFlags - tryLoadSandboxPackageEnvironmentFile verbosity path - (globalConfigFile globalFlags) - --- | Return the name of the package index file for this package environment. -tryGetIndexFilePath :: SavedConfig -> IO FilePath -tryGetIndexFilePath config = tryGetIndexFilePath' (savedGlobalFlags config) - --- | The same as 'tryGetIndexFilePath', but takes 'GlobalFlags' instead of --- 'SavedConfig'. -tryGetIndexFilePath' :: GlobalFlags -> IO FilePath -tryGetIndexFilePath' globalFlags = do - let paths = fromNubList $ globalLocalRepos globalFlags - case paths of - [] -> die $ "Distribution.Client.Sandbox.tryGetIndexFilePath: " ++ - "no local repos found. " ++ checkConfiguration - _ -> return $ (last paths) Index.defaultIndexFileName - where - checkConfiguration = "Please check your configuration ('" - ++ userPackageEnvironmentFile ++ "')." - --- | Try to extract a 'PackageDB' from 'ConfigFlags'. Gives a better error --- message than just pattern-matching. -getSandboxPackageDB :: ConfigFlags -> IO PackageDB -getSandboxPackageDB configFlags = do - case configPackageDBs configFlags of - [Just sandboxDB@(SpecificPackageDB _)] -> return sandboxDB - -- TODO: should we allow multiple package DBs (e.g. with 'inherit')? - - [] -> - die $ "Sandbox package DB is not specified. " ++ sandboxConfigCorrupt - [_] -> - die $ "Unexpected contents of the 'package-db' field. " - ++ sandboxConfigCorrupt - _ -> - die $ "Too many package DBs provided. " ++ sandboxConfigCorrupt - - where - sandboxConfigCorrupt = "Your 'cabal.sandbox.config' is probably corrupt." - - --- | Which packages are installed in the sandbox package DB? -getInstalledPackagesInSandbox :: Verbosity -> ConfigFlags - -> Compiler -> ProgramConfiguration - -> IO InstalledPackageIndex -getInstalledPackagesInSandbox verbosity configFlags comp conf = do - sandboxDB <- getSandboxPackageDB configFlags - getPackageDBContents verbosity comp sandboxDB conf - --- | Temporarily add $SANDBOX_DIR/bin to $PATH. -withSandboxBinDirOnSearchPath :: FilePath -> IO a -> IO a -withSandboxBinDirOnSearchPath sandboxDir = bracket_ addBinDir rmBinDir - where - -- TODO: Instead of modifying the global process state, it'd be better to - -- set the environment individually for each subprocess invocation. This - -- will have to wait until the Shell monad is implemented; without it the - -- required changes are too intrusive. - addBinDir :: IO () - addBinDir = do - mbOldPath <- lookupEnv "PATH" - let newPath = maybe sandboxBin ((++) sandboxBin . (:) searchPathSeparator) - mbOldPath - setEnv "PATH" newPath - - rmBinDir :: IO () - rmBinDir = do - oldPath <- getSearchPath - let newPath = intercalate [searchPathSeparator] - (delete sandboxBin oldPath) - setEnv "PATH" newPath - - sandboxBin = sandboxDir "bin" - --- | Initialise a package DB for this compiler if it doesn't exist. -initPackageDBIfNeeded :: Verbosity -> ConfigFlags - -> Compiler -> ProgramConfiguration - -> IO () -initPackageDBIfNeeded verbosity configFlags comp conf = do - SpecificPackageDB dbPath <- getSandboxPackageDB configFlags - packageDBExists <- doesDirectoryExist dbPath - unless packageDBExists $ - Register.initPackageDB verbosity comp conf dbPath - when packageDBExists $ - debug verbosity $ "The package database already exists: " ++ dbPath - --- | Entry point for the 'cabal sandbox dump-pkgenv' command. -dumpPackageEnvironment :: Verbosity -> SandboxFlags -> GlobalFlags -> IO () -dumpPackageEnvironment verbosity _sandboxFlags globalFlags = do - (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags - commentPkgEnv <- commentPackageEnvironment sandboxDir - putStrLn . showPackageEnvironmentWithComments (Just commentPkgEnv) $ pkgEnv - --- | Entry point for the 'cabal sandbox init' command. -sandboxInit :: Verbosity -> SandboxFlags -> GlobalFlags -> IO () -sandboxInit verbosity sandboxFlags globalFlags = do - -- Warn if there's a 'cabal-dev' sandbox. - isCabalDevSandbox <- liftM2 (&&) (doesDirectoryExist "cabal-dev") - (doesFileExist $ "cabal-dev" "cabal.config") - when isCabalDevSandbox $ - warn verbosity $ - "You are apparently using a legacy (cabal-dev) sandbox. " - ++ "Legacy sandboxes may interact badly with native Cabal sandboxes. " - ++ "You may want to delete the 'cabal-dev' directory to prevent issues." - - -- Create the sandbox directory. - let sandboxDir' = fromFlagOrDefault defaultSandboxLocation - (sandboxLocation sandboxFlags) - createDirectoryIfMissingVerbose verbosity True sandboxDir' - sandboxDir <- tryCanonicalizePath sandboxDir' - setFileHidden sandboxDir - - -- Determine which compiler to use (using the value from ~/.cabal/config). - userConfig <- loadConfig verbosity (globalConfigFile globalFlags) - (comp, platform, conf) <- configCompilerAuxEx (savedConfigureFlags userConfig) - - -- Create the package environment file. - pkgEnvFile <- getSandboxConfigFilePath globalFlags - createPackageEnvironmentFile verbosity sandboxDir pkgEnvFile comp platform - (_sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags - let config = pkgEnvSavedConfig pkgEnv - configFlags = savedConfigureFlags config - - -- Create the index file if it doesn't exist. - indexFile <- tryGetIndexFilePath config - indexFileExists <- doesFileExist indexFile - if indexFileExists - then notice verbosity $ "Using an existing sandbox located at " ++ sandboxDir - else notice verbosity $ "Creating a new sandbox at " ++ sandboxDir - Index.createEmpty verbosity indexFile - - -- Create the package DB for the default compiler. - initPackageDBIfNeeded verbosity configFlags comp conf - maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile - (compilerId comp) platform - --- | Entry point for the 'cabal sandbox delete' command. -sandboxDelete :: Verbosity -> SandboxFlags -> GlobalFlags -> IO () -sandboxDelete verbosity _sandboxFlags globalFlags = do - (useSandbox, _) <- loadConfigOrSandboxConfig - verbosity - globalFlags { globalRequireSandbox = Flag False } - case useSandbox of - NoSandbox -> warn verbosity "Not in a sandbox." - UseSandbox sandboxDir -> do - curDir <- getCurrentDirectory - pkgEnvFile <- getSandboxConfigFilePath globalFlags - - -- Remove the @cabal.sandbox.config@ file, unless it's in a non-standard - -- location. - let isNonDefaultConfigLocation = not $ equalFilePath pkgEnvFile $ - curDir sandboxPackageEnvironmentFile - - if isNonDefaultConfigLocation - then warn verbosity $ "Sandbox config file is in non-default location: '" - ++ pkgEnvFile ++ "'.\n Please delete manually." - else removeFile pkgEnvFile - - -- Remove the sandbox directory, unless we're using a shared sandbox. - let isNonDefaultSandboxLocation = not $ equalFilePath sandboxDir $ - curDir defaultSandboxLocation - - when isNonDefaultSandboxLocation $ - die $ "Non-default sandbox location used: '" ++ sandboxDir - ++ "'.\nAssuming a shared sandbox. Please delete '" - ++ sandboxDir ++ "' manually." - - absSandboxDir <- canonicalizePath sandboxDir - notice verbosity $ "Deleting the sandbox located at " ++ absSandboxDir - removeDirectoryRecursive absSandboxDir - - let - pathInsideSandbox = isPrefixOf absSandboxDir - - -- Warn the user if deleting the sandbox deleted a package database - -- referenced in the current environment. - checkPackagePaths var = do - let - checkPath path = do - absPath <- canonicalizePath path - (when (pathInsideSandbox absPath) . warn verbosity) - (var ++ " refers to package database " ++ path - ++ " inside the deleted sandbox.") - liftM (maybe [] splitSearchPath) (lookupEnv var) >>= mapM_ checkPath - - checkPackagePaths "CABAL_SANDBOX_PACKAGE_PATH" - checkPackagePaths "GHC_PACKAGE_PATH" - checkPackagePaths "GHCJS_PACKAGE_PATH" - --- Common implementation of 'sandboxAddSource' and 'sandboxAddSourceSnapshot'. -doAddSource :: Verbosity -> [FilePath] -> FilePath -> PackageEnvironment - -> BuildTreeRefType - -> IO () -doAddSource verbosity buildTreeRefs sandboxDir pkgEnv refType = do - let savedConfig = pkgEnvSavedConfig pkgEnv - indexFile <- tryGetIndexFilePath savedConfig - - -- If we're running 'sandbox add-source' for the first time for this compiler, - -- we need to create an initial timestamp record. - (comp, platform, _) <- configCompilerAuxEx . savedConfigureFlags $ savedConfig - maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile - (compilerId comp) platform - - withAddTimestamps sandboxDir $ do - -- Path canonicalisation is done in addBuildTreeRefs, but we do it - -- twice because of the timestamps file. - buildTreeRefs' <- mapM tryCanonicalizePath buildTreeRefs - Index.addBuildTreeRefs verbosity indexFile buildTreeRefs' refType - return buildTreeRefs' - --- | Entry point for the 'cabal sandbox add-source' command. -sandboxAddSource :: Verbosity -> [FilePath] -> SandboxFlags -> GlobalFlags - -> IO () -sandboxAddSource verbosity buildTreeRefs sandboxFlags globalFlags = do - (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags - - if fromFlagOrDefault False (sandboxSnapshot sandboxFlags) - then sandboxAddSourceSnapshot verbosity buildTreeRefs sandboxDir pkgEnv - else doAddSource verbosity buildTreeRefs sandboxDir pkgEnv LinkRef - --- | Entry point for the 'cabal sandbox add-source --snapshot' command. -sandboxAddSourceSnapshot :: Verbosity -> [FilePath] -> FilePath - -> PackageEnvironment - -> IO () -sandboxAddSourceSnapshot verbosity buildTreeRefs sandboxDir pkgEnv = do - let snapshotDir = sandboxDir snapshotDirectoryName - - -- Use 'D.S.SrcDist.prepareTree' to copy each package's files to our private - -- location. - createDirectoryIfMissingVerbose verbosity True snapshotDir - - -- Collect the package descriptions first, so that if some path does not refer - -- to a cabal package, we fail immediately. - pkgs <- forM buildTreeRefs $ \buildTreeRef -> - inDir (Just buildTreeRef) $ - return . flattenPackageDescription - =<< readPackageDescription verbosity - =<< defaultPackageDesc verbosity - - -- Copy the package sources to "snapshots/$PKGNAME-$VERSION-tmp". If - -- 'prepareTree' throws an error at any point, the old snapshots will still be - -- in consistent state. - tmpDirs <- forM (zip buildTreeRefs pkgs) $ \(buildTreeRef, pkg) -> - inDir (Just buildTreeRef) $ do - let targetDir = snapshotDir (display . packageId $ pkg) - targetTmpDir = targetDir ++ "-tmp" - dirExists <- doesDirectoryExist targetTmpDir - when dirExists $ - removeDirectoryRecursive targetDir - createDirectory targetTmpDir - prepareTree verbosity pkg Nothing targetTmpDir knownSuffixHandlers - return (targetTmpDir, targetDir) - - -- Now rename the "snapshots/$PKGNAME-$VERSION-tmp" dirs to - -- "snapshots/$PKGNAME-$VERSION". - snapshots <- forM tmpDirs $ \(targetTmpDir, targetDir) -> do - dirExists <- doesDirectoryExist targetDir - when dirExists $ - removeDirectoryRecursive targetDir - renameDirectory targetTmpDir targetDir - return targetDir - - -- Once the packages are copied, just 'add-source' them as usual. - doAddSource verbosity snapshots sandboxDir pkgEnv SnapshotRef - --- | Entry point for the 'cabal sandbox delete-source' command. -sandboxDeleteSource :: Verbosity -> [FilePath] -> SandboxFlags -> GlobalFlags - -> IO () -sandboxDeleteSource verbosity buildTreeRefs _sandboxFlags globalFlags = do - (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags - indexFile <- tryGetIndexFilePath (pkgEnvSavedConfig pkgEnv) - - (results, convDict) <- - Index.removeBuildTreeRefs verbosity indexFile buildTreeRefs - - let (failedPaths, removedPaths) = partitionEithers results - removedRefs = fmap convDict removedPaths - - unless (null removedPaths) $ do - removeTimestamps sandboxDir removedPaths - - notice verbosity $ "Success deleting sources: " ++ - showL removedRefs ++ "\n\n" - - unless (null failedPaths) $ do - let groupedFailures = groupBy errorType failedPaths - mapM_ handleErrors groupedFailures - die $ "The sources with the above errors were skipped. (" ++ - showL (fmap getPath failedPaths) ++ ")" - - notice verbosity $ "Note: 'sandbox delete-source' only unregisters the " ++ - "source dependency, but does not remove the package " ++ - "from the sandbox package DB.\n\n" ++ - "Use 'sandbox hc-pkg -- unregister' to do that." - where - getPath (Index.ErrNonregisteredSource p) = p - getPath (Index.ErrNonexistentSource p) = p - - showPaths f = concat . intersperse " " . fmap (show . f) - - showL = showPaths id - - showE [] = return ' ' - showE errs = showPaths getPath errs - - errorType Index.ErrNonregisteredSource{} Index.ErrNonregisteredSource{} = - True - errorType Index.ErrNonexistentSource{} Index.ErrNonexistentSource{} = True - errorType _ _ = False - - handleErrors [] = return () - handleErrors errs@(Index.ErrNonregisteredSource{}:_) = - warn verbosity ("Sources not registered: " ++ showE errs ++ "\n\n") - handleErrors errs@(Index.ErrNonexistentSource{}:_) = - warn verbosity - ("Source directory not found for paths: " ++ showE errs ++ "\n" - ++ "If you are trying to delete a reference to a removed directory, " - ++ "please provide the full absolute path " - ++ "(as given by `sandbox list-sources`).\n\n") - --- | Entry point for the 'cabal sandbox list-sources' command. -sandboxListSources :: Verbosity -> SandboxFlags -> GlobalFlags - -> IO () -sandboxListSources verbosity _sandboxFlags globalFlags = do - (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags - indexFile <- tryGetIndexFilePath (pkgEnvSavedConfig pkgEnv) - - refs <- Index.listBuildTreeRefs verbosity - Index.ListIgnored Index.LinksAndSnapshots indexFile - when (null refs) $ - notice verbosity $ "Index file '" ++ indexFile - ++ "' has no references to local build trees." - when (not . null $ refs) $ do - notice verbosity $ "Source dependencies registered " - ++ "in the current sandbox ('" ++ sandboxDir ++ "'):\n\n" - mapM_ putStrLn refs - notice verbosity $ "\nTo unregister source dependencies, " - ++ "use the 'sandbox delete-source' command." - --- | Entry point for the 'cabal sandbox hc-pkg' command. Invokes the @hc-pkg@ --- tool with provided arguments, restricted to the sandbox. -sandboxHcPkg :: Verbosity -> SandboxFlags -> GlobalFlags -> [String] -> IO () -sandboxHcPkg verbosity _sandboxFlags globalFlags extraArgs = do - (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags - let configFlags = savedConfigureFlags . pkgEnvSavedConfig $ pkgEnv - -- Invoke hc-pkg for the most recently configured compiler (if any), - -- using the right package-db for the compiler (see #1935). - (comp, platform, conf) <- getPersistOrConfigCompiler configFlags - let dir = sandboxPackageDBPath sandboxDir comp platform - dbStack = [GlobalPackageDB, SpecificPackageDB dir] - Register.invokeHcPkg verbosity comp conf dbStack extraArgs - -updateInstallDirs :: Flag Bool - -> (UseSandbox, SavedConfig) -> (UseSandbox, SavedConfig) -updateInstallDirs userInstallFlag (useSandbox, savedConfig) = - case useSandbox of - NoSandbox -> - let savedConfig' = savedConfig { - savedConfigureFlags = configureFlags { - configInstallDirs = installDirs - } - } - in (useSandbox, savedConfig') - _ -> (useSandbox, savedConfig) - where - configureFlags = savedConfigureFlags savedConfig - userInstallDirs = savedUserInstallDirs savedConfig - globalInstallDirs = savedGlobalInstallDirs savedConfig - installDirs | userInstall = userInstallDirs - | otherwise = globalInstallDirs - userInstall = fromFlagOrDefault defaultUserInstall - (configUserInstall configureFlags `mappend` userInstallFlag) - --- | Check which type of package environment we're in and return a --- correctly-initialised @SavedConfig@ and a @UseSandbox@ value that indicates --- whether we're working in a sandbox. -loadConfigOrSandboxConfig :: Verbosity - -> GlobalFlags -- ^ For @--config-file@ and - -- @--sandbox-config-file@. - -> IO (UseSandbox, SavedConfig) -loadConfigOrSandboxConfig verbosity globalFlags = do - let configFileFlag = globalConfigFile globalFlags - sandboxConfigFileFlag = globalSandboxConfigFile globalFlags - ignoreSandboxFlag = globalIgnoreSandbox globalFlags - - pkgEnvDir <- getPkgEnvDir sandboxConfigFileFlag - pkgEnvType <- classifyPackageEnvironment pkgEnvDir sandboxConfigFileFlag - ignoreSandboxFlag - case pkgEnvType of - -- A @cabal.sandbox.config@ file (and possibly @cabal.config@) is present. - SandboxPackageEnvironment -> do - (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags - -- ^ Prints an error message and exits on error. - let config = pkgEnvSavedConfig pkgEnv - return (UseSandbox sandboxDir, config) - - -- Only @cabal.config@ is present. - UserPackageEnvironment -> do - config <- loadConfig verbosity configFileFlag - userConfig <- loadUserConfig verbosity pkgEnvDir Nothing - let config' = config `mappend` userConfig - dieIfSandboxRequired config' - return (NoSandbox, config') - - -- Neither @cabal.sandbox.config@ nor @cabal.config@ are present. - AmbientPackageEnvironment -> do - config <- loadConfig verbosity configFileFlag - let globalConstraintsOpt = - flagToMaybe . globalConstraintsFile . savedGlobalFlags $ config - globalConstraintConfig <- - loadUserConfig verbosity pkgEnvDir globalConstraintsOpt - let config' = config `mappend` globalConstraintConfig - dieIfSandboxRequired config - return (NoSandbox, config') - - where - -- Return the path to the package environment directory - either the - -- current directory or the one that @--sandbox-config-file@ resides in. - getPkgEnvDir :: (Flag FilePath) -> IO FilePath - getPkgEnvDir sandboxConfigFileFlag = do - case sandboxConfigFileFlag of - NoFlag -> getCurrentDirectory - Flag path -> tryCanonicalizePath . takeDirectory $ path - - -- Die if @--require-sandbox@ was specified and we're not inside a sandbox. - dieIfSandboxRequired :: SavedConfig -> IO () - dieIfSandboxRequired config = checkFlag flag - where - flag = (globalRequireSandbox . savedGlobalFlags $ config) - `mappend` (globalRequireSandbox globalFlags) - checkFlag (Flag True) = - die $ "'require-sandbox' is set to True, but no sandbox is present. " - ++ "Use '--no-require-sandbox' if you want to override " - ++ "'require-sandbox' temporarily." - checkFlag (Flag False) = return () - checkFlag (NoFlag) = return () - --- | Return the saved \"dist/\" prefix, or the default prefix. -findSavedDistPref :: SavedConfig -> Flag FilePath -> IO FilePath -findSavedDistPref config flagDistPref = do - let defDistPref = useDistPref defaultSetupScriptOptions - flagDistPref' = configDistPref (savedConfigureFlags config) - `mappend` flagDistPref - findDistPref defDistPref flagDistPref' - --- | If we're in a sandbox, call @withSandboxBinDirOnSearchPath@, otherwise do --- nothing. -maybeWithSandboxDirOnSearchPath :: UseSandbox -> IO a -> IO a -maybeWithSandboxDirOnSearchPath NoSandbox act = act -maybeWithSandboxDirOnSearchPath (UseSandbox sandboxDir) act = - withSandboxBinDirOnSearchPath sandboxDir $ act - --- | Had reinstallAddSourceDeps actually reinstalled any dependencies? -data WereDepsReinstalled = ReinstalledSomeDeps | NoDepsReinstalled - --- | Reinstall those add-source dependencies that have been modified since --- we've last installed them. Assumes that we're working inside a sandbox. -reinstallAddSourceDeps :: Verbosity - -> ConfigFlags -> ConfigExFlags - -> InstallFlags -> GlobalFlags - -> FilePath - -> IO WereDepsReinstalled -reinstallAddSourceDeps verbosity configFlags' configExFlags - installFlags globalFlags sandboxDir = topHandler' $ do - let sandboxDistPref = sandboxBuildDir sandboxDir - configFlags = configFlags' - { configDistPref = Flag sandboxDistPref } - haddockFlags = mempty - { haddockDistPref = Flag sandboxDistPref } - (comp, platform, conf) <- configCompilerAux' configFlags - retVal <- newIORef NoDepsReinstalled - - withSandboxPackageInfo verbosity configFlags globalFlags - comp platform conf sandboxDir $ \sandboxPkgInfo -> - unless (null $ modifiedAddSourceDependencies sandboxPkgInfo) $ do - - withRepoContext verbosity globalFlags $ \repoContext -> do - let args :: InstallArgs - args = ((configPackageDB' configFlags) - ,repoContext - ,comp, platform, conf - ,UseSandbox sandboxDir, Just sandboxPkgInfo - ,globalFlags, configFlags, configExFlags, installFlags - ,haddockFlags) - - -- This can actually be replaced by a call to 'install', but we use a - -- lower-level API because of layer separation reasons. Additionally, we - -- might want to use some lower-level features this in the future. - withSandboxBinDirOnSearchPath sandboxDir $ do - installContext <- makeInstallContext verbosity args Nothing - installPlan <- foldProgress logMsg die' return =<< - makeInstallPlan verbosity args installContext - - processInstallPlan verbosity args installContext installPlan - writeIORef retVal ReinstalledSomeDeps - - readIORef retVal - - where - die' message = die (message ++ installFailedInSandbox) - -- TODO: use a better error message, remove duplication. - installFailedInSandbox = - "Note: when using a sandbox, all packages are required to have " - ++ "consistent dependencies. Try reinstalling/unregistering the " - ++ "offending packages or recreating the sandbox." - logMsg message rest = debugNoWrap verbosity message >> rest - - topHandler' = topHandlerWith $ \_ -> do - warn verbosity "Couldn't reinstall some add-source dependencies." - -- Here we can't know whether any deps have been reinstalled, so we have - -- to be conservative. - return ReinstalledSomeDeps - --- | Produce a 'SandboxPackageInfo' and feed it to the given action. Note that --- we don't update the timestamp file here - this is done in --- 'postInstallActions'. -withSandboxPackageInfo :: Verbosity -> ConfigFlags -> GlobalFlags - -> Compiler -> Platform -> ProgramConfiguration - -> FilePath - -> (SandboxPackageInfo -> IO ()) - -> IO () -withSandboxPackageInfo verbosity configFlags globalFlags - comp platform conf sandboxDir cont = do - -- List all add-source deps. - indexFile <- tryGetIndexFilePath' globalFlags - buildTreeRefs <- Index.listBuildTreeRefs verbosity - Index.DontListIgnored Index.OnlyLinks indexFile - let allAddSourceDepsSet = S.fromList buildTreeRefs - - -- List all packages installed in the sandbox. - installedPkgIndex <- getInstalledPackagesInSandbox verbosity - configFlags comp conf - let err = "Error reading sandbox package information." - -- Get the package descriptions for all add-source deps. - depsCabalFiles <- mapM (flip tryFindAddSourcePackageDesc err) buildTreeRefs - depsPkgDescs <- mapM (readPackageDescription verbosity) depsCabalFiles - let depsMap = M.fromList (zip buildTreeRefs depsPkgDescs) - isInstalled pkgid = not . null - . InstalledPackageIndex.lookupSourcePackageId installedPkgIndex $ pkgid - installedDepsMap = M.filter (isInstalled . packageId) depsMap - - -- Get the package ids of modified (and installed) add-source deps. - modifiedAddSourceDeps <- listModifiedDeps verbosity sandboxDir - (compilerId comp) platform installedDepsMap - -- 'fromJust' here is safe because 'modifiedAddSourceDeps' are guaranteed to - -- be a subset of the keys of 'depsMap'. - let modifiedDeps = [ (modDepPath, fromJust $ M.lookup modDepPath depsMap) - | modDepPath <- modifiedAddSourceDeps ] - modifiedDepsMap = M.fromList modifiedDeps - - assert (all (`S.member` allAddSourceDepsSet) modifiedAddSourceDeps) (return ()) - if (null modifiedDeps) - then info verbosity $ "Found no modified add-source deps." - else notice verbosity $ "Some add-source dependencies have been modified. " - ++ "They will be reinstalled..." - - -- Get the package ids of the remaining add-source deps (some are possibly not - -- installed). - let otherDeps = M.assocs (depsMap `M.difference` modifiedDepsMap) - - -- Finally, assemble a 'SandboxPackageInfo'. - cont $ SandboxPackageInfo (map toSourcePackage modifiedDeps) - (map toSourcePackage otherDeps) installedPkgIndex allAddSourceDepsSet - - where - toSourcePackage (path, pkgDesc) = SourcePackage - (packageId pkgDesc) pkgDesc (LocalUnpackedPackage path) Nothing - --- | Same as 'withSandboxPackageInfo' if we're inside a sandbox and the --- identity otherwise. -maybeWithSandboxPackageInfo :: Verbosity -> ConfigFlags -> GlobalFlags - -> Compiler -> Platform -> ProgramConfiguration - -> UseSandbox - -> (Maybe SandboxPackageInfo -> IO ()) - -> IO () -maybeWithSandboxPackageInfo verbosity configFlags globalFlags - comp platform conf useSandbox cont = - case useSandbox of - NoSandbox -> cont Nothing - UseSandbox sandboxDir -> withSandboxPackageInfo verbosity - configFlags globalFlags - comp platform conf sandboxDir - (\spi -> cont (Just spi)) - --- | Check if a sandbox is present and call @reinstallAddSourceDeps@ in that --- case. -maybeReinstallAddSourceDeps :: Verbosity - -> Flag (Maybe Int) -- ^ The '-j' flag - -> ConfigFlags -- ^ Saved configure flags - -- (from dist/setup-config) - -> GlobalFlags - -> (UseSandbox, SavedConfig) - -> IO WereDepsReinstalled -maybeReinstallAddSourceDeps verbosity numJobsFlag configFlags' - globalFlags' (useSandbox, config) = do - case useSandbox of - NoSandbox -> return NoDepsReinstalled - UseSandbox sandboxDir -> do - -- Reinstall the modified add-source deps. - let configFlags = savedConfigureFlags config - `mappendSomeSavedFlags` - configFlags' - configExFlags = defaultConfigExFlags - `mappend` savedConfigureExFlags config - installFlags' = defaultInstallFlags - `mappend` savedInstallFlags config - installFlags = installFlags' { - installNumJobs = installNumJobs installFlags' - `mappend` numJobsFlag - } - globalFlags = savedGlobalFlags config - -- This makes it possible to override things like 'remote-repo-cache' - -- from the command line. These options are hidden, and are only - -- useful for debugging, so this should be fine. - `mappend` globalFlags' - reinstallAddSourceDeps - verbosity configFlags configExFlags - installFlags globalFlags sandboxDir - - where - - -- NOTE: we can't simply do @sandboxConfigFlags `mappend` savedFlags@ - -- because we don't want to auto-enable things like 'library-profiling' for - -- all add-source dependencies even if the user has passed - -- '--enable-library-profiling' to 'cabal configure'. These options are - -- supposed to be set in 'cabal.config'. - mappendSomeSavedFlags :: ConfigFlags -> ConfigFlags -> ConfigFlags - mappendSomeSavedFlags sandboxConfigFlags savedFlags = - sandboxConfigFlags { - configHcFlavor = configHcFlavor sandboxConfigFlags - `mappend` configHcFlavor savedFlags, - configHcPath = configHcPath sandboxConfigFlags - `mappend` configHcPath savedFlags, - configHcPkg = configHcPkg sandboxConfigFlags - `mappend` configHcPkg savedFlags, - configProgramPaths = configProgramPaths sandboxConfigFlags - `mappend` configProgramPaths savedFlags, - configProgramArgs = configProgramArgs sandboxConfigFlags - `mappend` configProgramArgs savedFlags, - -- NOTE: Unconditionally choosing the value from - -- 'dist/setup-config'. Sandbox package DB location may have been - -- changed by 'configure -w'. - configPackageDBs = configPackageDBs savedFlags - -- FIXME: Is this compatible with the 'inherit' feature? - } - --- --- Utils (transitionary) --- --- FIXME: configPackageDB' and configCompilerAux' don't really belong in this --- module --- - -configPackageDB' :: ConfigFlags -> PackageDBStack -configPackageDB' cfg = - interpretPackageDbFlags userInstall (configPackageDBs cfg) - where - userInstall = fromFlagOrDefault True (configUserInstall cfg) - -configCompilerAux' :: ConfigFlags - -> IO (Compiler, Platform, ProgramConfiguration) -configCompilerAux' configFlags = - configCompilerAuxEx configFlags - --FIXME: make configCompilerAux use a sensible verbosity - { configVerbosity = fmap lessVerbose (configVerbosity configFlags) } - --- | Try to read the most recently configured compiler from the --- 'localBuildInfoFile', falling back on 'configCompilerAuxEx' if it --- cannot be read. -getPersistOrConfigCompiler :: ConfigFlags - -> IO (Compiler, Platform, ProgramConfiguration) -getPersistOrConfigCompiler configFlags = do - distPref <- findDistPrefOrDefault (configDistPref configFlags) - mlbi <- maybeGetPersistBuildConfig distPref - case mlbi of - Nothing -> do configCompilerAux' configFlags - Just lbi -> return ( LocalBuildInfo.compiler lbi - , LocalBuildInfo.hostPlatform lbi - , LocalBuildInfo.withPrograms lbi - ) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Security/HTTP.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Security/HTTP.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Security/HTTP.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Security/HTTP.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,174 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE StandaloneDeriving #-} --- | Implementation of 'HttpLib' using cabal-install's own 'HttpTransport' -module Distribution.Client.Security.HTTP (HttpLib, transportAdapter) where - --- stdlibs -import Control.Exception - ( Exception(..), IOException ) -import Data.List - ( intercalate ) -import Data.Typeable - ( Typeable ) -import System.Directory - ( getTemporaryDirectory ) -import Network.URI - ( URI ) -import qualified Data.ByteString.Lazy as BS.L -import qualified Network.HTTP as HTTP - --- Cabal/cabal-install -import Distribution.Verbosity - ( Verbosity ) -import Distribution.Client.HttpUtils - ( HttpTransport(..), HttpCode ) -import Distribution.Client.Utils - ( withTempFileName ) - --- hackage-security -import Hackage.Security.Client -import Hackage.Security.Client.Repository.HttpLib -import Hackage.Security.Util.Checked -import Hackage.Security.Util.Pretty -import qualified Hackage.Security.Util.Lens as Lens - -{------------------------------------------------------------------------------- - 'HttpLib' implementation --------------------------------------------------------------------------------} - --- | Translate from hackage-security's 'HttpLib' to cabal-install's 'HttpTransport' --- --- NOTE: The match between these two APIs is currently not perfect: --- --- * We don't get any response headers back from the 'HttpTransport', so we --- don't know if the server supports range requests. For now we optimistically --- assume that it does. --- * The 'HttpTransport' wants to know where to place the resulting file, --- whereas the 'HttpLib' expects an 'IO' action which streams the download; --- the security library then makes sure that the file gets written to a --- location which is suitable (in particular, to a temporary file in the --- directory where the file needs to end up, so that it can "finalize" the --- file simply by doing 'renameFile'). Right now we write the file to a --- temporary file in the system temp directory here and then read it again --- to pass it to the security library; this is a problem for two reasons: it --- is a source of inefficiency; and it means that the security library cannot --- insist on a minimum download rate (potential security attack). --- Fixing it however would require changing the 'HttpTransport'. -transportAdapter :: Verbosity -> IO HttpTransport -> HttpLib -transportAdapter verbosity getTransport = HttpLib{ - httpGet = \headers uri callback -> do - transport <- getTransport - get verbosity transport headers uri callback - , httpGetRange = \headers uri range callback -> do - transport <- getTransport - getRange verbosity transport headers uri range callback - } - -get :: Throws SomeRemoteError - => Verbosity - -> HttpTransport - -> [HttpRequestHeader] -> URI - -> ([HttpResponseHeader] -> BodyReader -> IO a) - -> IO a -get verbosity transport reqHeaders uri callback = wrapCustomEx $ do - get' verbosity transport reqHeaders uri Nothing $ \code respHeaders br -> - case code of - 200 -> callback respHeaders br - _ -> throwChecked $ UnexpectedResponse uri code - -getRange :: Throws SomeRemoteError - => Verbosity - -> HttpTransport - -> [HttpRequestHeader] -> URI -> (Int, Int) - -> (HttpStatus -> [HttpResponseHeader] -> BodyReader -> IO a) - -> IO a -getRange verbosity transport reqHeaders uri range callback = wrapCustomEx $ do - get' verbosity transport reqHeaders uri (Just range) $ \code respHeaders br -> - case code of - 200 -> callback HttpStatus200OK respHeaders br - 206 -> callback HttpStatus206PartialContent respHeaders br - _ -> throwChecked $ UnexpectedResponse uri code - --- | Internal generalization of 'get' and 'getRange' -get' :: Verbosity - -> HttpTransport - -> [HttpRequestHeader] -> URI -> Maybe (Int, Int) - -> (HttpCode -> [HttpResponseHeader] -> BodyReader -> IO a) - -> IO a -get' verbosity transport reqHeaders uri mRange callback = do - tempDir <- getTemporaryDirectory - withTempFileName tempDir "transportAdapterGet" $ \temp -> do - (code, _etag) <- getHttp transport verbosity uri Nothing temp reqHeaders' - br <- bodyReaderFromBS =<< BS.L.readFile temp - callback code [HttpResponseAcceptRangesBytes] br - where - reqHeaders' = mkReqHeaders reqHeaders mRange - -{------------------------------------------------------------------------------- - Request headers --------------------------------------------------------------------------------} - -mkRangeHeader :: Int -> Int -> HTTP.Header -mkRangeHeader from to = HTTP.Header HTTP.HdrRange rangeHeader - where - -- Content-Range header uses inclusive rather than exclusive bounds - -- See - rangeHeader = "bytes=" ++ show from ++ "-" ++ show (to - 1) - -mkReqHeaders :: [HttpRequestHeader] -> Maybe (Int, Int) -> [HTTP.Header] -mkReqHeaders reqHeaders mRange = concat [ - tr [] reqHeaders - , [mkRangeHeader fr to | Just (fr, to) <- [mRange]] - ] - where - tr :: [(HTTP.HeaderName, [String])] -> [HttpRequestHeader] -> [HTTP.Header] - tr acc [] = - concatMap finalize acc - tr acc (HttpRequestMaxAge0:os) = - tr (insert HTTP.HdrCacheControl ["max-age=0"] acc) os - tr acc (HttpRequestNoTransform:os) = - tr (insert HTTP.HdrCacheControl ["no-transform"] acc) os - - -- Some headers are comma-separated, others need multiple headers for - -- multiple options. - -- - -- TODO: Right we we just comma-separate all of them. - finalize :: (HTTP.HeaderName, [String]) -> [HTTP.Header] - finalize (name, strs) = [HTTP.Header name (intercalate ", " (reverse strs))] - - insert :: Eq a => a -> [b] -> [(a, [b])] -> [(a, [b])] - insert x y = Lens.modify (Lens.lookupM x) (++ y) - -{------------------------------------------------------------------------------- - Custom exceptions --------------------------------------------------------------------------------} - -data UnexpectedResponse = UnexpectedResponse URI Int - deriving (Typeable) - -instance Pretty UnexpectedResponse where - pretty (UnexpectedResponse uri code) = "Unexpected response " ++ show code - ++ "for " ++ show uri - -#if MIN_VERSION_base(4,8,0) -deriving instance Show UnexpectedResponse -instance Exception UnexpectedResponse where displayException = pretty -#else -instance Show UnexpectedResponse where show = pretty -instance Exception UnexpectedResponse -#endif - -wrapCustomEx :: ( ( Throws UnexpectedResponse - , Throws IOException - ) => IO a) - -> (Throws SomeRemoteError => IO a) -wrapCustomEx act = handleChecked (\(ex :: UnexpectedResponse) -> go ex) - $ handleChecked (\(ex :: IOException) -> go ex) - $ act - where - go ex = throwChecked (SomeRemoteError ex) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Setup.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2192 +0,0 @@ -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE DeriveGeneric #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Setup --- Copyright : (c) David Himmelstrup 2005 --- License : BSD-like --- --- Maintainer : lemmih@gmail.com --- Stability : provisional --- Portability : portable --- --- ------------------------------------------------------------------------------ -module Distribution.Client.Setup - ( globalCommand, GlobalFlags(..), defaultGlobalFlags - , RepoContext(..), withRepoContext - , configureCommand, ConfigFlags(..), filterConfigureFlags - , configureExCommand, ConfigExFlags(..), defaultConfigExFlags - , configureExOptions - , buildCommand, BuildFlags(..), BuildExFlags(..), SkipAddSourceDepsCheck(..) - , replCommand, testCommand, benchmarkCommand - , installCommand, InstallFlags(..), installOptions, defaultInstallFlags - , defaultSolver, defaultMaxBackjumps - , listCommand, ListFlags(..) - , updateCommand - , upgradeCommand - , uninstallCommand - , infoCommand, InfoFlags(..) - , fetchCommand, FetchFlags(..) - , freezeCommand, FreezeFlags(..) - , genBoundsCommand - , getCommand, unpackCommand, GetFlags(..) - , checkCommand - , formatCommand - , uploadCommand, UploadFlags(..) - , reportCommand, ReportFlags(..) - , runCommand - , initCommand, IT.InitFlags(..) - , sdistCommand, SDistFlags(..), SDistExFlags(..), ArchiveFormat(..) - , win32SelfUpgradeCommand, Win32SelfUpgradeFlags(..) - , actAsSetupCommand, ActAsSetupFlags(..) - , sandboxCommand, defaultSandboxLocation, SandboxFlags(..) - , execCommand, ExecFlags(..) - , userConfigCommand, UserConfigFlags(..) - , manpageCommand - - , parsePackageArgs - --TODO: stop exporting these: - , showRepo - , parseRepo - , readRepo - ) where - -import Distribution.Client.Types - ( Username(..), Password(..), RemoteRepo(..) ) -import Distribution.Client.BuildReports.Types - ( ReportLevel(..) ) -import Distribution.Client.Dependency.Types - ( PreSolver(..), ConstraintSource(..) ) -import qualified Distribution.Client.Init.Types as IT - ( InitFlags(..), PackageType(..) ) -import Distribution.Client.Targets - ( UserConstraint, readUserConstraint ) -import Distribution.Utils.NubList - ( NubList, toNubList, fromNubList) - - -import Distribution.Simple.Compiler (PackageDB) -import Distribution.Simple.Program - ( defaultProgramConfiguration ) -import Distribution.Simple.Command hiding (boolOpt, boolOpt') -import qualified Distribution.Simple.Command as Command -import Distribution.Simple.Configure ( computeEffectiveProfiling ) -import qualified Distribution.Simple.Setup as Cabal -import Distribution.Simple.Setup - ( ConfigFlags(..), BuildFlags(..), ReplFlags - , TestFlags(..), BenchmarkFlags(..) - , SDistFlags(..), HaddockFlags(..) - , readPackageDbList, showPackageDbList - , Flag(..), toFlag, flagToMaybe, flagToList - , optionVerbosity, boolOpt, boolOpt', trueArg, falseArg - , readPToMaybe, optionNumJobs ) -import Distribution.Simple.InstallDirs - ( PathTemplate, InstallDirs(dynlibdir, sysconfdir) - , toPathTemplate, fromPathTemplate ) -import Distribution.Version - ( Version(Version), anyVersion, thisVersion ) -import Distribution.Package - ( PackageIdentifier, packageName, packageVersion, Dependency(..) ) -import Distribution.PackageDescription - ( BuildType(..), RepoKind(..) ) -import Distribution.Text - ( Text(..), display ) -import Distribution.ReadE - ( ReadE(..), readP_to_E, succeedReadE ) -import qualified Distribution.Compat.ReadP as Parse - ( ReadP, char, munch1, pfail, (+++) ) -import Distribution.Compat.Semigroup -import Distribution.Verbosity - ( Verbosity, normal ) -import Distribution.Simple.Utils - ( wrapText, wrapLine ) -import Distribution.Client.GlobalFlags - ( GlobalFlags(..), defaultGlobalFlags - , RepoContext(..), withRepoContext - ) - -import Data.Char - ( isAlphaNum ) -import Data.List - ( intercalate, deleteFirstsBy ) -import Data.Maybe - ( maybeToList, fromMaybe ) -import GHC.Generics (Generic) -import Distribution.Compat.Binary (Binary) -import Control.Monad - ( liftM ) -import System.FilePath - ( () ) -import Network.URI - ( parseAbsoluteURI, uriToString ) - -globalCommand :: [Command action] -> CommandUI GlobalFlags -globalCommand commands = CommandUI { - commandName = "", - commandSynopsis = - "Command line interface to the Haskell Cabal infrastructure.", - commandUsage = \pname -> - "See http://www.haskell.org/cabal/ for more information.\n" - ++ "\n" - ++ "Usage: " ++ pname ++ " [GLOBAL FLAGS] [COMMAND [FLAGS]]\n", - commandDescription = Just $ \pname -> - let - commands' = commands ++ [commandAddAction helpCommandUI undefined] - cmdDescs = getNormalCommandDescriptions commands' - -- if new commands are added, we want them to appear even if they - -- are not included in the custom listing below. Thus, we calculate - -- the `otherCmds` list and append it under the `other` category. - -- Alternatively, a new testcase could be added that ensures that - -- the set of commands listed here is equal to the set of commands - -- that are actually available. - otherCmds = deleteFirstsBy (==) (map fst cmdDescs) - [ "help" - , "update" - , "install" - , "fetch" - , "list" - , "info" - , "user-config" - , "get" - , "init" - , "configure" - , "build" - , "clean" - , "run" - , "repl" - , "test" - , "bench" - , "check" - , "sdist" - , "upload" - , "report" - , "freeze" - , "gen-bounds" - , "haddock" - , "hscolour" - , "copy" - , "register" - , "sandbox" - , "exec" - ] - maxlen = maximum $ [length name | (name, _) <- cmdDescs] - align str = str ++ replicate (maxlen - length str) ' ' - startGroup n = " ["++n++"]" - par = "" - addCmd n = case lookup n cmdDescs of - Nothing -> "" - Just d -> " " ++ align n ++ " " ++ d - addCmdCustom n d = case lookup n cmdDescs of -- make sure that the - -- command still exists. - Nothing -> "" - Just _ -> " " ++ align n ++ " " ++ d - in - "Commands:\n" - ++ unlines ( - [ startGroup "global" - , addCmd "update" - , addCmd "install" - , par - , addCmd "help" - , addCmd "info" - , addCmd "list" - , addCmd "fetch" - , addCmd "user-config" - , par - , startGroup "package" - , addCmd "get" - , addCmd "init" - , par - , addCmd "configure" - , addCmd "build" - , addCmd "clean" - , par - , addCmd "run" - , addCmd "repl" - , addCmd "test" - , addCmd "bench" - , par - , addCmd "check" - , addCmd "sdist" - , addCmd "upload" - , addCmd "report" - , par - , addCmd "freeze" - , addCmd "gen-bounds" - , addCmd "haddock" - , addCmd "hscolour" - , addCmd "copy" - , addCmd "register" - , par - , startGroup "sandbox" - , addCmd "sandbox" - , addCmd "exec" - , addCmdCustom "repl" "Open interpreter with access to sandbox packages." - ] ++ if null otherCmds then [] else par - :startGroup "other" - :[addCmd n | n <- otherCmds]) - ++ "\n" - ++ "For more information about a command use:\n" - ++ " " ++ pname ++ " COMMAND --help\n" - ++ "or " ++ pname ++ " help COMMAND\n" - ++ "\n" - ++ "To install Cabal packages from hackage use:\n" - ++ " " ++ pname ++ " install foo [--dry-run]\n" - ++ "\n" - ++ "Occasionally you need to update the list of available packages:\n" - ++ " " ++ pname ++ " update\n", - commandNotes = Nothing, - commandDefaultFlags = mempty, - commandOptions = args - } - where - args :: ShowOrParseArgs -> [OptionField GlobalFlags] - args ShowArgs = argsShown - args ParseArgs = argsShown ++ argsNotShown - - -- arguments we want to show in the help - argsShown :: [OptionField GlobalFlags] - argsShown = [ - option ['V'] ["version"] - "Print version information" - globalVersion (\v flags -> flags { globalVersion = v }) - trueArg - - ,option [] ["numeric-version"] - "Print just the version number" - globalNumericVersion (\v flags -> flags { globalNumericVersion = v }) - trueArg - - ,option [] ["config-file"] - "Set an alternate location for the config file" - globalConfigFile (\v flags -> flags { globalConfigFile = v }) - (reqArgFlag "FILE") - - ,option [] ["sandbox-config-file"] - "Set an alternate location for the sandbox config file (default: './cabal.sandbox.config')" - globalSandboxConfigFile (\v flags -> flags { globalSandboxConfigFile = v }) - (reqArgFlag "FILE") - - ,option [] ["default-user-config"] - "Set a location for a cabal.config file for projects without their own cabal.config freeze file." - globalConstraintsFile (\v flags -> flags {globalConstraintsFile = v}) - (reqArgFlag "FILE") - - ,option [] ["require-sandbox"] - "requiring the presence of a sandbox for sandbox-aware commands" - globalRequireSandbox (\v flags -> flags { globalRequireSandbox = v }) - (boolOpt' ([], ["require-sandbox"]) ([], ["no-require-sandbox"])) - - ,option [] ["ignore-sandbox"] - "Ignore any existing sandbox" - globalIgnoreSandbox (\v flags -> flags { globalIgnoreSandbox = v }) - trueArg - - ,option [] ["ignore-expiry"] - "Ignore expiry dates on signed metadata (use only in exceptional circumstances)" - globalIgnoreExpiry (\v flags -> flags { globalIgnoreExpiry = v }) - trueArg - - ,option [] ["http-transport"] - "Set a transport for http(s) requests. Accepts 'curl', 'wget', 'powershell', and 'plain-http'. (default: 'curl')" - globalHttpTransport (\v flags -> flags { globalHttpTransport = v }) - (reqArgFlag "HttpTransport") - ] - - -- arguments we don't want shown in the help - argsNotShown :: [OptionField GlobalFlags] - argsNotShown = [ - option [] ["remote-repo"] - "The name and url for a remote repository" - globalRemoteRepos (\v flags -> flags { globalRemoteRepos = v }) - (reqArg' "NAME:URL" (toNubList . maybeToList . readRepo) (map showRepo . fromNubList)) - - ,option [] ["remote-repo-cache"] - "The location where downloads from all remote repos are cached" - globalCacheDir (\v flags -> flags { globalCacheDir = v }) - (reqArgFlag "DIR") - - ,option [] ["local-repo"] - "The location of a local repository" - globalLocalRepos (\v flags -> flags { globalLocalRepos = v }) - (reqArg' "DIR" (\x -> toNubList [x]) fromNubList) - - ,option [] ["logs-dir"] - "The location to put log files" - globalLogsDir (\v flags -> flags { globalLogsDir = v }) - (reqArgFlag "DIR") - - ,option [] ["world-file"] - "The location of the world file" - globalWorldFile (\v flags -> flags { globalWorldFile = v }) - (reqArgFlag "FILE") - ] - --- ------------------------------------------------------------ --- * Config flags --- ------------------------------------------------------------ - -configureCommand :: CommandUI ConfigFlags -configureCommand = c - { commandDefaultFlags = mempty - , commandNotes = Just $ \pname -> (case commandNotes c of - Nothing -> "" - Just n -> n pname ++ "\n") - ++ "Examples:\n" - ++ " " ++ pname ++ " configure\n" - ++ " Configure with defaults;\n" - ++ " " ++ pname ++ " configure --enable-tests -fcustomflag\n" - ++ " Configure building package including tests,\n" - ++ " with some package-specific flag.\n" - } - where - c = Cabal.configureCommand defaultProgramConfiguration - -configureOptions :: ShowOrParseArgs -> [OptionField ConfigFlags] -configureOptions = commandOptions configureCommand - -filterConfigureFlags :: ConfigFlags -> Version -> ConfigFlags -filterConfigureFlags flags cabalLibVersion - | cabalLibVersion >= Version [1,24,1] [] = flags_latest - -- ^ NB: we expect the latest version to be the most common case. - | cabalLibVersion < Version [1,3,10] [] = flags_1_3_10 - | cabalLibVersion < Version [1,10,0] [] = flags_1_10_0 - | cabalLibVersion < Version [1,12,0] [] = flags_1_12_0 - | cabalLibVersion < Version [1,14,0] [] = flags_1_14_0 - | cabalLibVersion < Version [1,18,0] [] = flags_1_18_0 - | cabalLibVersion < Version [1,19,1] [] = flags_1_19_0 - | cabalLibVersion < Version [1,19,2] [] = flags_1_19_1 - | cabalLibVersion < Version [1,21,1] [] = flags_1_20_0 - | cabalLibVersion < Version [1,22,0] [] = flags_1_21_0 - | cabalLibVersion < Version [1,23,0] [] = flags_1_22_0 - | cabalLibVersion < Version [1,24,1] [] = flags_1_24_0 - | otherwise = flags_latest - where - (profEnabledLib, profEnabledExe) = computeEffectiveProfiling flags - flags_latest = flags { - -- Cabal >= 1.19.1 uses '--dependency' and does not need '--constraint'. - configConstraints = [], - -- Passing '--allow-newer' to Setup.hs is unnecessary, we use - -- '--exact-configuration' instead. - configAllowNewer = Just Cabal.AllowNewerNone - } - - -- Cabal < 1.24.1 doesn't know about --dynlibdir. - flags_1_24_0 = flags_latest { configInstallDirs = configInstallDirs_1_24_0} - configInstallDirs_1_24_0 = (configInstallDirs flags) { dynlibdir = NoFlag } - - -- Cabal < 1.23 doesn't know about '--profiling-detail'. - -- Cabal < 1.23 has a hacked up version of 'enable-profiling' - -- which we shouldn't use. - flags_1_22_0 = flags_1_24_0 { configProfDetail = NoFlag - , configProfLibDetail = NoFlag - , configIPID = NoFlag - , configProf = NoFlag - , configProfExe = Flag profEnabledExe - , configProfLib = Flag profEnabledLib - } - - -- Cabal < 1.22 doesn't know about '--disable-debug-info'. - flags_1_21_0 = flags_1_22_0 { configDebugInfo = NoFlag } - - -- Cabal < 1.21.1 doesn't know about 'disable-relocatable' - -- Cabal < 1.21.1 doesn't know about 'enable-profiling' - -- (but we already dealt with it in flags_1_22_0) - flags_1_20_0 = - flags_1_21_0 { configRelocatable = NoFlag - , configCoverage = NoFlag - , configLibCoverage = configCoverage flags - } - -- Cabal < 1.19.2 doesn't know about '--exact-configuration' and - -- '--enable-library-stripping'. - flags_1_19_1 = flags_1_20_0 { configExactConfiguration = NoFlag - , configStripLibs = NoFlag } - -- Cabal < 1.19.1 uses '--constraint' instead of '--dependency'. - flags_1_19_0 = flags_1_19_1 { configDependencies = [] - , configConstraints = configConstraints flags } - -- Cabal < 1.18.0 doesn't know about --extra-prog-path and --sysconfdir. - flags_1_18_0 = flags_1_19_0 { configProgramPathExtra = toNubList [] - , configInstallDirs = configInstallDirs_1_18_0} - configInstallDirs_1_18_0 = (configInstallDirs flags_1_19_0) { sysconfdir = NoFlag } - -- Cabal < 1.14.0 doesn't know about '--disable-benchmarks'. - flags_1_14_0 = flags_1_18_0 { configBenchmarks = NoFlag } - -- Cabal < 1.12.0 doesn't know about '--enable/disable-executable-dynamic' - -- and '--enable/disable-library-coverage'. - flags_1_12_0 = flags_1_14_0 { configLibCoverage = NoFlag - , configDynExe = NoFlag } - -- Cabal < 1.10.0 doesn't know about '--disable-tests'. - flags_1_10_0 = flags_1_12_0 { configTests = NoFlag } - -- Cabal < 1.3.10 does not grok the '--constraints' flag. - flags_1_3_10 = flags_1_10_0 { configConstraints = [] } - --- ------------------------------------------------------------ --- * Config extra flags --- ------------------------------------------------------------ - --- | cabal configure takes some extra flags beyond runghc Setup configure --- -data ConfigExFlags = ConfigExFlags { - configCabalVersion :: Flag Version, - configExConstraints:: [(UserConstraint, ConstraintSource)], - configPreferences :: [Dependency], - configSolver :: Flag PreSolver - } - deriving (Eq, Generic) - -defaultConfigExFlags :: ConfigExFlags -defaultConfigExFlags = mempty { configSolver = Flag defaultSolver } - -configureExCommand :: CommandUI (ConfigFlags, ConfigExFlags) -configureExCommand = configureCommand { - commandDefaultFlags = (mempty, defaultConfigExFlags), - commandOptions = \showOrParseArgs -> - liftOptions fst setFst - (filter ((`notElem` ["constraint", "dependency", "exact-configuration"]) - . optionName) $ configureOptions showOrParseArgs) - ++ liftOptions snd setSnd - (configureExOptions showOrParseArgs ConstraintSourceCommandlineFlag) - } - where - setFst a (_,b) = (a,b) - setSnd b (a,_) = (a,b) - -configureExOptions :: ShowOrParseArgs - -> ConstraintSource - -> [OptionField ConfigExFlags] -configureExOptions _showOrParseArgs src = - [ option [] ["cabal-lib-version"] - ("Select which version of the Cabal lib to use to build packages " - ++ "(useful for testing).") - configCabalVersion (\v flags -> flags { configCabalVersion = v }) - (reqArg "VERSION" (readP_to_E ("Cannot parse cabal lib version: "++) - (fmap toFlag parse)) - (map display . flagToList)) - , option [] ["constraint"] - "Specify constraints on a package (version, installed/source, flags)" - configExConstraints (\v flags -> flags { configExConstraints = v }) - (reqArg "CONSTRAINT" - ((\x -> [(x, src)]) `fmap` ReadE readUserConstraint) - (map $ display . fst)) - - , option [] ["preference"] - "Specify preferences (soft constraints) on the version of a package" - configPreferences (\v flags -> flags { configPreferences = v }) - (reqArg "CONSTRAINT" - (readP_to_E (const "dependency expected") - (fmap (\x -> [x]) parse)) - (map display)) - - , optionSolver configSolver (\v flags -> flags { configSolver = v }) - - ] - -instance Monoid ConfigExFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup ConfigExFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Build flags --- ------------------------------------------------------------ - -data SkipAddSourceDepsCheck = - SkipAddSourceDepsCheck | DontSkipAddSourceDepsCheck - deriving Eq - -data BuildExFlags = BuildExFlags { - buildOnly :: Flag SkipAddSourceDepsCheck -} deriving Generic - -buildExOptions :: ShowOrParseArgs -> [OptionField BuildExFlags] -buildExOptions _showOrParseArgs = - option [] ["only"] - "Don't reinstall add-source dependencies (sandbox-only)" - buildOnly (\v flags -> flags { buildOnly = v }) - (noArg (Flag SkipAddSourceDepsCheck)) - - : [] - -buildCommand :: CommandUI (BuildFlags, BuildExFlags) -buildCommand = parent { - commandDefaultFlags = (commandDefaultFlags parent, mempty), - commandOptions = - \showOrParseArgs -> liftOptions fst setFst - (commandOptions parent showOrParseArgs) - ++ - liftOptions snd setSnd (buildExOptions showOrParseArgs) - } - where - setFst a (_,b) = (a,b) - setSnd b (a,_) = (a,b) - - parent = Cabal.buildCommand defaultProgramConfiguration - -instance Monoid BuildExFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup BuildExFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Repl command --- ------------------------------------------------------------ - -replCommand :: CommandUI (ReplFlags, BuildExFlags) -replCommand = parent { - commandDefaultFlags = (commandDefaultFlags parent, mempty), - commandOptions = - \showOrParseArgs -> liftOptions fst setFst - (commandOptions parent showOrParseArgs) - ++ - liftOptions snd setSnd (buildExOptions showOrParseArgs) - } - where - setFst a (_,b) = (a,b) - setSnd b (a,_) = (a,b) - - parent = Cabal.replCommand defaultProgramConfiguration - --- ------------------------------------------------------------ --- * Test command --- ------------------------------------------------------------ - -testCommand :: CommandUI (TestFlags, BuildFlags, BuildExFlags) -testCommand = parent { - commandDefaultFlags = (commandDefaultFlags parent, - Cabal.defaultBuildFlags, mempty), - commandOptions = - \showOrParseArgs -> liftOptions get1 set1 - (commandOptions parent showOrParseArgs) - ++ - liftOptions get2 set2 - (Cabal.buildOptions progConf showOrParseArgs) - ++ - liftOptions get3 set3 (buildExOptions showOrParseArgs) - } - where - get1 (a,_,_) = a; set1 a (_,b,c) = (a,b,c) - get2 (_,b,_) = b; set2 b (a,_,c) = (a,b,c) - get3 (_,_,c) = c; set3 c (a,b,_) = (a,b,c) - - parent = Cabal.testCommand - progConf = defaultProgramConfiguration - --- ------------------------------------------------------------ --- * Bench command --- ------------------------------------------------------------ - -benchmarkCommand :: CommandUI (BenchmarkFlags, BuildFlags, BuildExFlags) -benchmarkCommand = parent { - commandDefaultFlags = (commandDefaultFlags parent, - Cabal.defaultBuildFlags, mempty), - commandOptions = - \showOrParseArgs -> liftOptions get1 set1 - (commandOptions parent showOrParseArgs) - ++ - liftOptions get2 set2 - (Cabal.buildOptions progConf showOrParseArgs) - ++ - liftOptions get3 set3 (buildExOptions showOrParseArgs) - } - where - get1 (a,_,_) = a; set1 a (_,b,c) = (a,b,c) - get2 (_,b,_) = b; set2 b (a,_,c) = (a,b,c) - get3 (_,_,c) = c; set3 c (a,b,_) = (a,b,c) - - parent = Cabal.benchmarkCommand - progConf = defaultProgramConfiguration - --- ------------------------------------------------------------ --- * Fetch command --- ------------------------------------------------------------ - -data FetchFlags = FetchFlags { --- fetchOutput :: Flag FilePath, - fetchDeps :: Flag Bool, - fetchDryRun :: Flag Bool, - fetchSolver :: Flag PreSolver, - fetchMaxBackjumps :: Flag Int, - fetchReorderGoals :: Flag Bool, - fetchIndependentGoals :: Flag Bool, - fetchShadowPkgs :: Flag Bool, - fetchStrongFlags :: Flag Bool, - fetchVerbosity :: Flag Verbosity - } - -defaultFetchFlags :: FetchFlags -defaultFetchFlags = FetchFlags { --- fetchOutput = mempty, - fetchDeps = toFlag True, - fetchDryRun = toFlag False, - fetchSolver = Flag defaultSolver, - fetchMaxBackjumps = Flag defaultMaxBackjumps, - fetchReorderGoals = Flag False, - fetchIndependentGoals = Flag False, - fetchShadowPkgs = Flag False, - fetchStrongFlags = Flag False, - fetchVerbosity = toFlag normal - } - -fetchCommand :: CommandUI FetchFlags -fetchCommand = CommandUI { - commandName = "fetch", - commandSynopsis = "Downloads packages for later installation.", - commandUsage = usageAlternatives "fetch" [ "[FLAGS] PACKAGES" - ], - commandDescription = Just $ \_ -> - "Note that it currently is not possible to fetch the dependencies for a\n" - ++ "package in the current directory.\n", - commandNotes = Nothing, - commandDefaultFlags = defaultFetchFlags, - commandOptions = \ showOrParseArgs -> [ - optionVerbosity fetchVerbosity (\v flags -> flags { fetchVerbosity = v }) - --- , option "o" ["output"] --- "Put the package(s) somewhere specific rather than the usual cache." --- fetchOutput (\v flags -> flags { fetchOutput = v }) --- (reqArgFlag "PATH") - - , option [] ["dependencies", "deps"] - "Resolve and fetch dependencies (default)" - fetchDeps (\v flags -> flags { fetchDeps = v }) - trueArg - - , option [] ["no-dependencies", "no-deps"] - "Ignore dependencies" - fetchDeps (\v flags -> flags { fetchDeps = v }) - falseArg - - , option [] ["dry-run"] - "Do not install anything, only print what would be installed." - fetchDryRun (\v flags -> flags { fetchDryRun = v }) - trueArg - - ] ++ - - optionSolver fetchSolver (\v flags -> flags { fetchSolver = v }) : - optionSolverFlags showOrParseArgs - fetchMaxBackjumps (\v flags -> flags { fetchMaxBackjumps = v }) - fetchReorderGoals (\v flags -> flags { fetchReorderGoals = v }) - fetchIndependentGoals (\v flags -> flags { fetchIndependentGoals = v }) - fetchShadowPkgs (\v flags -> flags { fetchShadowPkgs = v }) - fetchStrongFlags (\v flags -> flags { fetchStrongFlags = v }) - - } - --- ------------------------------------------------------------ --- * Freeze command --- ------------------------------------------------------------ - -data FreezeFlags = FreezeFlags { - freezeDryRun :: Flag Bool, - freezeTests :: Flag Bool, - freezeBenchmarks :: Flag Bool, - freezeSolver :: Flag PreSolver, - freezeMaxBackjumps :: Flag Int, - freezeReorderGoals :: Flag Bool, - freezeIndependentGoals :: Flag Bool, - freezeShadowPkgs :: Flag Bool, - freezeStrongFlags :: Flag Bool, - freezeVerbosity :: Flag Verbosity - } - -defaultFreezeFlags :: FreezeFlags -defaultFreezeFlags = FreezeFlags { - freezeDryRun = toFlag False, - freezeTests = toFlag False, - freezeBenchmarks = toFlag False, - freezeSolver = Flag defaultSolver, - freezeMaxBackjumps = Flag defaultMaxBackjumps, - freezeReorderGoals = Flag False, - freezeIndependentGoals = Flag False, - freezeShadowPkgs = Flag False, - freezeStrongFlags = Flag False, - freezeVerbosity = toFlag normal - } - -freezeCommand :: CommandUI FreezeFlags -freezeCommand = CommandUI { - commandName = "freeze", - commandSynopsis = "Freeze dependencies.", - commandDescription = Just $ \_ -> wrapText $ - "Calculates a valid set of dependencies and their exact versions. " - ++ "If successful, saves the result to the file `cabal.config`.\n" - ++ "\n" - ++ "The package versions specified in `cabal.config` will be used for " - ++ "any future installs.\n" - ++ "\n" - ++ "An existing `cabal.config` is ignored and overwritten.\n", - commandNotes = Nothing, - commandUsage = usageFlags "freeze", - commandDefaultFlags = defaultFreezeFlags, - commandOptions = \ showOrParseArgs -> [ - optionVerbosity freezeVerbosity (\v flags -> flags { freezeVerbosity = v }) - - , option [] ["dry-run"] - "Do not freeze anything, only print what would be frozen" - freezeDryRun (\v flags -> flags { freezeDryRun = v }) - trueArg - - , option [] ["tests"] - "freezing of the dependencies of any tests suites in the package description file." - freezeTests (\v flags -> flags { freezeTests = v }) - (boolOpt [] []) - - , option [] ["benchmarks"] - "freezing of the dependencies of any benchmarks suites in the package description file." - freezeBenchmarks (\v flags -> flags { freezeBenchmarks = v }) - (boolOpt [] []) - - ] ++ - - optionSolver freezeSolver (\v flags -> flags { freezeSolver = v }) : - optionSolverFlags showOrParseArgs - freezeMaxBackjumps (\v flags -> flags { freezeMaxBackjumps = v }) - freezeReorderGoals (\v flags -> flags { freezeReorderGoals = v }) - freezeIndependentGoals (\v flags -> flags { freezeIndependentGoals = v }) - freezeShadowPkgs (\v flags -> flags { freezeShadowPkgs = v }) - freezeStrongFlags (\v flags -> flags { freezeStrongFlags = v }) - - } - -genBoundsCommand :: CommandUI FreezeFlags -genBoundsCommand = CommandUI { - commandName = "gen-bounds", - commandSynopsis = "Generate dependency bounds.", - commandDescription = Just $ \_ -> wrapText $ - "Generates bounds for all dependencies that do not currently have them. " - ++ "Generated bounds are printed to stdout. You can then paste them into your .cabal file.\n" - ++ "\n", - commandNotes = Nothing, - commandUsage = usageFlags "gen-bounds", - commandDefaultFlags = defaultFreezeFlags, - commandOptions = \ _ -> [ - optionVerbosity freezeVerbosity (\v flags -> flags { freezeVerbosity = v }) - ] - } - --- ------------------------------------------------------------ --- * Other commands --- ------------------------------------------------------------ - -updateCommand :: CommandUI (Flag Verbosity) -updateCommand = CommandUI { - commandName = "update", - commandSynopsis = "Updates list of known packages.", - commandDescription = Just $ \_ -> - "For all known remote repositories, download the package list.\n", - commandNotes = Just $ \_ -> - relevantConfigValuesText ["remote-repo" - ,"remote-repo-cache" - ,"local-repo"], - commandUsage = usageFlags "update", - commandDefaultFlags = toFlag normal, - commandOptions = \_ -> [optionVerbosity id const] - } - -upgradeCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) -upgradeCommand = configureCommand { - commandName = "upgrade", - commandSynopsis = "(command disabled, use install instead)", - commandDescription = Nothing, - commandUsage = usageFlagsOrPackages "upgrade", - commandDefaultFlags = (mempty, mempty, mempty, mempty), - commandOptions = commandOptions installCommand - } - -{- -cleanCommand :: CommandUI () -cleanCommand = makeCommand name shortDesc longDesc emptyFlags options - where - name = "clean" - shortDesc = "Removes downloaded files" - longDesc = Nothing - emptyFlags = () - options _ = [] --} - -checkCommand :: CommandUI (Flag Verbosity) -checkCommand = CommandUI { - commandName = "check", - commandSynopsis = "Check the package for common mistakes.", - commandDescription = Just $ \_ -> wrapText $ - "Expects a .cabal package file in the current directory.\n" - ++ "\n" - ++ "The checks correspond to the requirements to packages on Hackage. " - ++ "If no errors and warnings are reported, Hackage will accept this " - ++ "package.\n", - commandNotes = Nothing, - commandUsage = \pname -> "Usage: " ++ pname ++ " check\n", - commandDefaultFlags = toFlag normal, - commandOptions = \_ -> [] - } - -formatCommand :: CommandUI (Flag Verbosity) -formatCommand = CommandUI { - commandName = "format", - commandSynopsis = "Reformat the .cabal file using the standard style.", - commandDescription = Nothing, - commandNotes = Nothing, - commandUsage = usageAlternatives "format" ["[FILE]"], - commandDefaultFlags = toFlag normal, - commandOptions = \_ -> [] - } - -uninstallCommand :: CommandUI (Flag Verbosity) -uninstallCommand = CommandUI { - commandName = "uninstall", - commandSynopsis = "Warn about 'uninstall' not being implemented.", - commandDescription = Nothing, - commandNotes = Nothing, - commandUsage = usageAlternatives "uninstall" ["PACKAGES"], - commandDefaultFlags = toFlag normal, - commandOptions = \_ -> [] - } - -manpageCommand :: CommandUI (Flag Verbosity) -manpageCommand = CommandUI { - commandName = "manpage", - commandSynopsis = "Outputs manpage source.", - commandDescription = Just $ \_ -> - "Output manpage source to STDOUT.\n", - commandNotes = Nothing, - commandUsage = usageFlags "manpage", - commandDefaultFlags = toFlag normal, - commandOptions = \_ -> [optionVerbosity id const] - } - -runCommand :: CommandUI (BuildFlags, BuildExFlags) -runCommand = CommandUI { - commandName = "run", - commandSynopsis = "Builds and runs an executable.", - commandDescription = Just $ \pname -> wrapText $ - "Builds and then runs the specified executable. If no executable is " - ++ "specified, but the package contains just one executable, that one " - ++ "is built and executed.\n" - ++ "\n" - ++ "Use `" ++ pname ++ " test --show-details=streaming` to run a " - ++ "test-suite and get its full output.\n", - commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " run\n" - ++ " Run the only executable in the current package;\n" - ++ " " ++ pname ++ " run foo -- --fooflag\n" - ++ " Works similar to `./foo --fooflag`.\n", - commandUsage = usageAlternatives "run" - ["[FLAGS] [EXECUTABLE] [-- EXECUTABLE_FLAGS]"], - commandDefaultFlags = mempty, - commandOptions = - \showOrParseArgs -> liftOptions fst setFst - (commandOptions parent showOrParseArgs) - ++ - liftOptions snd setSnd - (buildExOptions showOrParseArgs) - } - where - setFst a (_,b) = (a,b) - setSnd b (a,_) = (a,b) - - parent = Cabal.buildCommand defaultProgramConfiguration - --- ------------------------------------------------------------ --- * Report flags --- ------------------------------------------------------------ - -data ReportFlags = ReportFlags { - reportUsername :: Flag Username, - reportPassword :: Flag Password, - reportVerbosity :: Flag Verbosity - } deriving Generic - -defaultReportFlags :: ReportFlags -defaultReportFlags = ReportFlags { - reportUsername = mempty, - reportPassword = mempty, - reportVerbosity = toFlag normal - } - -reportCommand :: CommandUI ReportFlags -reportCommand = CommandUI { - commandName = "report", - commandSynopsis = "Upload build reports to a remote server.", - commandDescription = Nothing, - commandNotes = Just $ \_ -> - "You can store your Hackage login in the ~/.cabal/config file\n", - commandUsage = usageAlternatives "report" ["[FLAGS]"], - commandDefaultFlags = defaultReportFlags, - commandOptions = \_ -> - [optionVerbosity reportVerbosity (\v flags -> flags { reportVerbosity = v }) - - ,option ['u'] ["username"] - "Hackage username." - reportUsername (\v flags -> flags { reportUsername = v }) - (reqArg' "USERNAME" (toFlag . Username) - (flagToList . fmap unUsername)) - - ,option ['p'] ["password"] - "Hackage password." - reportPassword (\v flags -> flags { reportPassword = v }) - (reqArg' "PASSWORD" (toFlag . Password) - (flagToList . fmap unPassword)) - ] - } - -instance Monoid ReportFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup ReportFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Get flags --- ------------------------------------------------------------ - -data GetFlags = GetFlags { - getDestDir :: Flag FilePath, - getPristine :: Flag Bool, - getSourceRepository :: Flag (Maybe RepoKind), - getVerbosity :: Flag Verbosity - } deriving Generic - -defaultGetFlags :: GetFlags -defaultGetFlags = GetFlags { - getDestDir = mempty, - getPristine = mempty, - getSourceRepository = mempty, - getVerbosity = toFlag normal - } - -getCommand :: CommandUI GetFlags -getCommand = CommandUI { - commandName = "get", - commandSynopsis = "Download/Extract a package's source code (repository).", - commandDescription = Just $ \_ -> wrapText $ - "Creates a local copy of a package's source code. By default it gets " - ++ "the source\ntarball and unpacks it in a local subdirectory. " - ++ "Alternatively, with -s it will\nget the code from the source " - ++ "repository specified by the package.\n", - commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " get hlint\n" - ++ " Download the latest stable version of hlint;\n" - ++ " " ++ pname ++ " get lens --source-repository=head\n" - ++ " Download the source repository (i.e. git clone from github).\n", - commandUsage = usagePackages "get", - commandDefaultFlags = defaultGetFlags, - commandOptions = \_ -> [ - optionVerbosity getVerbosity (\v flags -> flags { getVerbosity = v }) - - ,option "d" ["destdir"] - "Where to place the package source, defaults to the current directory." - getDestDir (\v flags -> flags { getDestDir = v }) - (reqArgFlag "PATH") - - ,option "s" ["source-repository"] - "Copy the package's source repository (ie git clone, darcs get, etc as appropriate)." - getSourceRepository (\v flags -> flags { getSourceRepository = v }) - (optArg "[head|this|...]" (readP_to_E (const "invalid source-repository") - (fmap (toFlag . Just) parse)) - (Flag Nothing) - (map (fmap show) . flagToList)) - - , option [] ["pristine"] - ("Unpack the original pristine tarball, rather than updating the " - ++ ".cabal file with the latest revision from the package archive.") - getPristine (\v flags -> flags { getPristine = v }) - trueArg - ] - } - --- 'cabal unpack' is a deprecated alias for 'cabal get'. -unpackCommand :: CommandUI GetFlags -unpackCommand = getCommand { - commandName = "unpack", - commandUsage = usagePackages "unpack" - } - -instance Monoid GetFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup GetFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * List flags --- ------------------------------------------------------------ - -data ListFlags = ListFlags { - listInstalled :: Flag Bool, - listSimpleOutput :: Flag Bool, - listVerbosity :: Flag Verbosity, - listPackageDBs :: [Maybe PackageDB] - } deriving Generic - -defaultListFlags :: ListFlags -defaultListFlags = ListFlags { - listInstalled = Flag False, - listSimpleOutput = Flag False, - listVerbosity = toFlag normal, - listPackageDBs = [] - } - -listCommand :: CommandUI ListFlags -listCommand = CommandUI { - commandName = "list", - commandSynopsis = "List packages matching a search string.", - commandDescription = Just $ \_ -> wrapText $ - "List all packages, or all packages matching one of the search" - ++ " strings.\n" - ++ "\n" - ++ "If there is a sandbox in the current directory and " - ++ "config:ignore-sandbox is False, use the sandbox package database. " - ++ "Otherwise, use the package database specified with --package-db. " - ++ "If not specified, use the user package database.\n", - commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " list pandoc\n" - ++ " Will find pandoc, pandoc-citeproc, pandoc-lens, ...\n", - commandUsage = usageAlternatives "list" [ "[FLAGS]" - , "[FLAGS] STRINGS"], - commandDefaultFlags = defaultListFlags, - commandOptions = \_ -> [ - optionVerbosity listVerbosity (\v flags -> flags { listVerbosity = v }) - - , option [] ["installed"] - "Only print installed packages" - listInstalled (\v flags -> flags { listInstalled = v }) - trueArg - - , option [] ["simple-output"] - "Print in a easy-to-parse format" - listSimpleOutput (\v flags -> flags { listSimpleOutput = v }) - trueArg - - , option "" ["package-db"] - ( "Append the given package database to the list of package" - ++ " databases used (to satisfy dependencies and register into)." - ++ " May be a specific file, 'global' or 'user'. The initial list" - ++ " is ['global'], ['global', 'user'], or ['global', $sandbox]," - ++ " depending on context. Use 'clear' to reset the list to empty." - ++ " See the user guide for details.") - listPackageDBs (\v flags -> flags { listPackageDBs = v }) - (reqArg' "DB" readPackageDbList showPackageDbList) - - ] - } - -instance Monoid ListFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup ListFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Info flags --- ------------------------------------------------------------ - -data InfoFlags = InfoFlags { - infoVerbosity :: Flag Verbosity, - infoPackageDBs :: [Maybe PackageDB] - } deriving Generic - -defaultInfoFlags :: InfoFlags -defaultInfoFlags = InfoFlags { - infoVerbosity = toFlag normal, - infoPackageDBs = [] - } - -infoCommand :: CommandUI InfoFlags -infoCommand = CommandUI { - commandName = "info", - commandSynopsis = "Display detailed information about a particular package.", - commandDescription = Just $ \_ -> wrapText $ - "If there is a sandbox in the current directory and " - ++ "config:ignore-sandbox is False, use the sandbox package database. " - ++ "Otherwise, use the package database specified with --package-db. " - ++ "If not specified, use the user package database.\n", - commandNotes = Nothing, - commandUsage = usageAlternatives "info" ["[FLAGS] PACKAGES"], - commandDefaultFlags = defaultInfoFlags, - commandOptions = \_ -> [ - optionVerbosity infoVerbosity (\v flags -> flags { infoVerbosity = v }) - - , option "" ["package-db"] - ( "Append the given package database to the list of package" - ++ " databases used (to satisfy dependencies and register into)." - ++ " May be a specific file, 'global' or 'user'. The initial list" - ++ " is ['global'], ['global', 'user'], or ['global', $sandbox]," - ++ " depending on context. Use 'clear' to reset the list to empty." - ++ " See the user guide for details.") - infoPackageDBs (\v flags -> flags { infoPackageDBs = v }) - (reqArg' "DB" readPackageDbList showPackageDbList) - - ] - } - -instance Monoid InfoFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup InfoFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Install flags --- ------------------------------------------------------------ - --- | Install takes the same flags as configure along with a few extras. --- -data InstallFlags = InstallFlags { - installDocumentation :: Flag Bool, - installHaddockIndex :: Flag PathTemplate, - installDryRun :: Flag Bool, - installMaxBackjumps :: Flag Int, - installReorderGoals :: Flag Bool, - installIndependentGoals :: Flag Bool, - installShadowPkgs :: Flag Bool, - installStrongFlags :: Flag Bool, - installReinstall :: Flag Bool, - installAvoidReinstalls :: Flag Bool, - installOverrideReinstall :: Flag Bool, - installUpgradeDeps :: Flag Bool, - installOnly :: Flag Bool, - installOnlyDeps :: Flag Bool, - installRootCmd :: Flag String, - installSummaryFile :: NubList PathTemplate, - installLogFile :: Flag PathTemplate, - installBuildReports :: Flag ReportLevel, - installReportPlanningFailure :: Flag Bool, - installSymlinkBinDir :: Flag FilePath, - installOneShot :: Flag Bool, - installNumJobs :: Flag (Maybe Int), - installRunTests :: Flag Bool, - installOfflineMode :: Flag Bool - } - deriving (Eq, Generic) - -instance Binary InstallFlags - -defaultInstallFlags :: InstallFlags -defaultInstallFlags = InstallFlags { - installDocumentation = Flag False, - installHaddockIndex = Flag docIndexFile, - installDryRun = Flag False, - installMaxBackjumps = Flag defaultMaxBackjumps, - installReorderGoals = Flag False, - installIndependentGoals= Flag False, - installShadowPkgs = Flag False, - installStrongFlags = Flag False, - installReinstall = Flag False, - installAvoidReinstalls = Flag False, - installOverrideReinstall = Flag False, - installUpgradeDeps = Flag False, - installOnly = Flag False, - installOnlyDeps = Flag False, - installRootCmd = mempty, - installSummaryFile = mempty, - installLogFile = mempty, - installBuildReports = Flag NoReports, - installReportPlanningFailure = Flag False, - installSymlinkBinDir = mempty, - installOneShot = Flag False, - installNumJobs = mempty, - installRunTests = mempty, - installOfflineMode = Flag False - } - where - docIndexFile = toPathTemplate ("$datadir" "doc" - "$arch-$os-$compiler" "index.html") - -defaultMaxBackjumps :: Int -defaultMaxBackjumps = 2000 - -defaultSolver :: PreSolver -defaultSolver = Choose - -allSolvers :: String -allSolvers = intercalate ", " (map display ([minBound .. maxBound] :: [PreSolver])) - -installCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) -installCommand = CommandUI { - commandName = "install", - commandSynopsis = "Install packages.", - commandUsage = usageAlternatives "install" [ "[FLAGS]" - , "[FLAGS] PACKAGES" - ], - commandDescription = Just $ \_ -> wrapText $ - "Installs one or more packages. By default, the installed package" - ++ " will be registered in the user's package database or, if a sandbox" - ++ " is present in the current directory, inside the sandbox.\n" - ++ "\n" - ++ "If PACKAGES are specified, downloads and installs those packages." - ++ " Otherwise, install the package in the current directory (and/or its" - ++ " dependencies) (there must be exactly one .cabal file in the current" - ++ " directory).\n" - ++ "\n" - ++ "When using a sandbox, the flags for `install` only affect the" - ++ " current command and have no effect on future commands. (To achieve" - ++ " that, `configure` must be used.)\n" - ++ " In contrast, without a sandbox, the flags to `install` are saved and" - ++ " affect future commands such as `build` and `repl`. See the help for" - ++ " `configure` for a list of commands being affected.\n" - ++ "\n" - ++ "Installed executables will by default (and without a sandbox)" - ++ " be put into `~/.cabal/bin/`." - ++ " If you want installed executable to be available globally, make" - ++ " sure that the PATH environment variable contains that directory.\n" - ++ "When using a sandbox, executables will be put into" - ++ " `$SANDBOX/bin/` (by default: `./.cabal-sandbox/bin/`).\n" - ++ "\n" - ++ "When specifying --bindir, consider also specifying --datadir;" - ++ " this way the sandbox can be deleted and the executable should" - ++ " continue working as long as bindir and datadir are left untouched.", - commandNotes = Just $ \pname -> - ( case commandNotes - $ Cabal.configureCommand defaultProgramConfiguration - of Just desc -> desc pname ++ "\n" - Nothing -> "" - ) - ++ "Examples:\n" - ++ " " ++ pname ++ " install " - ++ " Package in the current directory\n" - ++ " " ++ pname ++ " install foo " - ++ " Package from the hackage server\n" - ++ " " ++ pname ++ " install foo-1.0 " - ++ " Specific version of a package\n" - ++ " " ++ pname ++ " install 'foo < 2' " - ++ " Constrained package version\n" - ++ " " ++ pname ++ " install haddock --bindir=$HOME/hask-bin/ --datadir=$HOME/hask-data/\n" - ++ " " ++ (map (const ' ') pname) - ++ " " - ++ " Change installation destination\n", - commandDefaultFlags = (mempty, mempty, mempty, mempty), - commandOptions = \showOrParseArgs -> - liftOptions get1 set1 - (filter ((`notElem` ["constraint", "dependency" - , "exact-configuration"]) - . optionName) $ - configureOptions showOrParseArgs) - ++ liftOptions get2 set2 (configureExOptions showOrParseArgs ConstraintSourceCommandlineFlag) - ++ liftOptions get3 set3 (installOptions showOrParseArgs) - ++ liftOptions get4 set4 (haddockOptions showOrParseArgs) - } - where - get1 (a,_,_,_) = a; set1 a (_,b,c,d) = (a,b,c,d) - get2 (_,b,_,_) = b; set2 b (a,_,c,d) = (a,b,c,d) - get3 (_,_,c,_) = c; set3 c (a,b,_,d) = (a,b,c,d) - get4 (_,_,_,d) = d; set4 d (a,b,c,_) = (a,b,c,d) - -haddockOptions :: ShowOrParseArgs -> [OptionField HaddockFlags] -haddockOptions showOrParseArgs - = [ opt { optionName = "haddock-" ++ name, - optionDescr = [ fmapOptFlags (\(_, lflags) -> ([], map ("haddock-" ++) lflags)) descr - | descr <- optionDescr opt] } - | opt <- commandOptions Cabal.haddockCommand showOrParseArgs - , let name = optionName opt - , name `elem` ["hoogle", "html", "html-location" - ,"executables", "tests", "benchmarks", "all", "internal", "css" - ,"hyperlink-source", "hscolour-css" - ,"contents-location"] - ] - where - fmapOptFlags :: (OptFlags -> OptFlags) -> OptDescr a -> OptDescr a - fmapOptFlags modify (ReqArg d f p r w) = ReqArg d (modify f) p r w - fmapOptFlags modify (OptArg d f p r i w) = OptArg d (modify f) p r i w - fmapOptFlags modify (ChoiceOpt xs) = ChoiceOpt [(d, modify f, i, w) | (d, f, i, w) <- xs] - fmapOptFlags modify (BoolOpt d f1 f2 r w) = BoolOpt d (modify f1) (modify f2) r w - -installOptions :: ShowOrParseArgs -> [OptionField InstallFlags] -installOptions showOrParseArgs = - [ option "" ["documentation"] - "building of documentation" - installDocumentation (\v flags -> flags { installDocumentation = v }) - (boolOpt [] []) - - , option [] ["doc-index-file"] - "A central index of haddock API documentation (template cannot use $pkgid)" - installHaddockIndex (\v flags -> flags { installHaddockIndex = v }) - (reqArg' "TEMPLATE" (toFlag.toPathTemplate) - (flagToList . fmap fromPathTemplate)) - - , option [] ["dry-run"] - "Do not install anything, only print what would be installed." - installDryRun (\v flags -> flags { installDryRun = v }) - trueArg - ] ++ - - optionSolverFlags showOrParseArgs - installMaxBackjumps (\v flags -> flags { installMaxBackjumps = v }) - installReorderGoals (\v flags -> flags { installReorderGoals = v }) - installIndependentGoals (\v flags -> flags { installIndependentGoals = v }) - installShadowPkgs (\v flags -> flags { installShadowPkgs = v }) - installStrongFlags (\v flags -> flags { installStrongFlags = v }) ++ - - [ option [] ["reinstall"] - "Install even if it means installing the same version again." - installReinstall (\v flags -> flags { installReinstall = v }) - (yesNoOpt showOrParseArgs) - - , option [] ["avoid-reinstalls"] - "Do not select versions that would destructively overwrite installed packages." - installAvoidReinstalls (\v flags -> flags { installAvoidReinstalls = v }) - (yesNoOpt showOrParseArgs) - - , option [] ["force-reinstalls"] - "Reinstall packages even if they will most likely break other installed packages." - installOverrideReinstall (\v flags -> flags { installOverrideReinstall = v }) - (yesNoOpt showOrParseArgs) - - , option [] ["upgrade-dependencies"] - "Pick the latest version for all dependencies, rather than trying to pick an installed version." - installUpgradeDeps (\v flags -> flags { installUpgradeDeps = v }) - (yesNoOpt showOrParseArgs) - - , option [] ["only-dependencies"] - "Install only the dependencies necessary to build the given packages" - installOnlyDeps (\v flags -> flags { installOnlyDeps = v }) - (yesNoOpt showOrParseArgs) - - , option [] ["dependencies-only"] - "A synonym for --only-dependencies" - installOnlyDeps (\v flags -> flags { installOnlyDeps = v }) - (yesNoOpt showOrParseArgs) - - , option [] ["root-cmd"] - "Command used to gain root privileges, when installing with --global." - installRootCmd (\v flags -> flags { installRootCmd = v }) - (reqArg' "COMMAND" toFlag flagToList) - - , option [] ["symlink-bindir"] - "Add symlinks to installed executables into this directory." - installSymlinkBinDir (\v flags -> flags { installSymlinkBinDir = v }) - (reqArgFlag "DIR") - - , option [] ["build-summary"] - "Save build summaries to file (name template can use $pkgid, $compiler, $os, $arch)" - installSummaryFile (\v flags -> flags { installSummaryFile = v }) - (reqArg' "TEMPLATE" (\x -> toNubList [toPathTemplate x]) (map fromPathTemplate . fromNubList)) - - , option [] ["build-log"] - "Log all builds to file (name template can use $pkgid, $compiler, $os, $arch)" - installLogFile (\v flags -> flags { installLogFile = v }) - (reqArg' "TEMPLATE" (toFlag.toPathTemplate) - (flagToList . fmap fromPathTemplate)) - - , option [] ["remote-build-reporting"] - "Generate build reports to send to a remote server (none, anonymous or detailed)." - installBuildReports (\v flags -> flags { installBuildReports = v }) - (reqArg "LEVEL" (readP_to_E (const $ "report level must be 'none', " - ++ "'anonymous' or 'detailed'") - (toFlag `fmap` parse)) - (flagToList . fmap display)) - - , option [] ["report-planning-failure"] - "Generate build reports when the dependency solver fails. This is used by the Hackage build bot." - installReportPlanningFailure (\v flags -> flags { installReportPlanningFailure = v }) - trueArg - - , option [] ["one-shot"] - "Do not record the packages in the world file." - installOneShot (\v flags -> flags { installOneShot = v }) - (yesNoOpt showOrParseArgs) - - , option [] ["run-tests"] - "Run package test suites during installation." - installRunTests (\v flags -> flags { installRunTests = v }) - trueArg - - , optionNumJobs - installNumJobs (\v flags -> flags { installNumJobs = v }) - - , option [] ["offline"] - "Don't download packages from the Internet." - installOfflineMode (\v flags -> flags { installOfflineMode = v }) - (yesNoOpt showOrParseArgs) - ] ++ case showOrParseArgs of -- TODO: remove when "cabal install" - -- avoids - ParseArgs -> - [ option [] ["only"] - "Only installs the package in the current directory." - installOnly (\v flags -> flags { installOnly = v }) - trueArg ] - _ -> [] - - -instance Monoid InstallFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup InstallFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Upload flags --- ------------------------------------------------------------ - -data UploadFlags = UploadFlags { - uploadCheck :: Flag Bool, - uploadDoc :: Flag Bool, - uploadUsername :: Flag Username, - uploadPassword :: Flag Password, - uploadPasswordCmd :: Flag [String], - uploadVerbosity :: Flag Verbosity - } deriving Generic - -defaultUploadFlags :: UploadFlags -defaultUploadFlags = UploadFlags { - uploadCheck = toFlag False, - uploadDoc = toFlag False, - uploadUsername = mempty, - uploadPassword = mempty, - uploadPasswordCmd = mempty, - uploadVerbosity = toFlag normal - } - -uploadCommand :: CommandUI UploadFlags -uploadCommand = CommandUI { - commandName = "upload", - commandSynopsis = "Uploads source packages or documentation to Hackage.", - commandDescription = Nothing, - commandNotes = Just $ \_ -> - "You can store your Hackage login in the ~/.cabal/config file\n" - ++ relevantConfigValuesText ["username", "password"], - commandUsage = \pname -> - "Usage: " ++ pname ++ " upload [FLAGS] TARFILES\n", - commandDefaultFlags = defaultUploadFlags, - commandOptions = \_ -> - [optionVerbosity uploadVerbosity (\v flags -> flags { uploadVerbosity = v }) - - ,option ['c'] ["check"] - "Do not upload, just do QA checks." - uploadCheck (\v flags -> flags { uploadCheck = v }) - trueArg - - ,option ['d'] ["documentation"] - "Upload documentation instead of a source package. Cannot be used together with --check." - uploadDoc (\v flags -> flags { uploadDoc = v }) - trueArg - - ,option ['u'] ["username"] - "Hackage username." - uploadUsername (\v flags -> flags { uploadUsername = v }) - (reqArg' "USERNAME" (toFlag . Username) - (flagToList . fmap unUsername)) - - ,option ['p'] ["password"] - "Hackage password." - uploadPassword (\v flags -> flags { uploadPassword = v }) - (reqArg' "PASSWORD" (toFlag . Password) - (flagToList . fmap unPassword)) - - ,option ['P'] ["password-command"] - "Command to get Hackage password." - uploadPasswordCmd (\v flags -> flags { uploadPasswordCmd = v }) - (reqArg' "PASSWORD" (Flag . words) (fromMaybe [] . flagToMaybe)) - ] - } - -instance Monoid UploadFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup UploadFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Init flags --- ------------------------------------------------------------ - -emptyInitFlags :: IT.InitFlags -emptyInitFlags = mempty - -defaultInitFlags :: IT.InitFlags -defaultInitFlags = emptyInitFlags { IT.initVerbosity = toFlag normal } - -initCommand :: CommandUI IT.InitFlags -initCommand = CommandUI { - commandName = "init", - commandSynopsis = "Create a new .cabal package file (interactively).", - commandDescription = Just $ \_ -> wrapText $ - "Cabalise a project by creating a .cabal, Setup.hs, and " - ++ "optionally a LICENSE file.\n" - ++ "\n" - ++ "Calling init with no arguments (recommended) uses an " - ++ "interactive mode, which will try to guess as much as " - ++ "possible and prompt you for the rest. Command-line " - ++ "arguments are provided for scripting purposes. " - ++ "If you don't want interactive mode, be sure to pass " - ++ "the -n flag.\n", - commandNotes = Nothing, - commandUsage = \pname -> - "Usage: " ++ pname ++ " init [FLAGS]\n", - commandDefaultFlags = defaultInitFlags, - commandOptions = \_ -> - [ option ['n'] ["non-interactive"] - "Non-interactive mode." - IT.nonInteractive (\v flags -> flags { IT.nonInteractive = v }) - trueArg - - , option ['q'] ["quiet"] - "Do not generate log messages to stdout." - IT.quiet (\v flags -> flags { IT.quiet = v }) - trueArg - - , option [] ["no-comments"] - "Do not generate explanatory comments in the .cabal file." - IT.noComments (\v flags -> flags { IT.noComments = v }) - trueArg - - , option ['m'] ["minimal"] - "Generate a minimal .cabal file, that is, do not include extra empty fields. Also implies --no-comments." - IT.minimal (\v flags -> flags { IT.minimal = v }) - trueArg - - , option [] ["overwrite"] - "Overwrite any existing .cabal, LICENSE, or Setup.hs files without warning." - IT.overwrite (\v flags -> flags { IT.overwrite = v }) - trueArg - - , option [] ["package-dir"] - "Root directory of the package (default = current directory)." - IT.packageDir (\v flags -> flags { IT.packageDir = v }) - (reqArgFlag "DIRECTORY") - - , option ['p'] ["package-name"] - "Name of the Cabal package to create." - IT.packageName (\v flags -> flags { IT.packageName = v }) - (reqArg "PACKAGE" (readP_to_E ("Cannot parse package name: "++) - (toFlag `fmap` parse)) - (flagToList . fmap display)) - - , option [] ["version"] - "Initial version of the package." - IT.version (\v flags -> flags { IT.version = v }) - (reqArg "VERSION" (readP_to_E ("Cannot parse package version: "++) - (toFlag `fmap` parse)) - (flagToList . fmap display)) - - , option [] ["cabal-version"] - "Required version of the Cabal library." - IT.cabalVersion (\v flags -> flags { IT.cabalVersion = v }) - (reqArg "VERSION_RANGE" (readP_to_E ("Cannot parse Cabal version range: "++) - (toFlag `fmap` parse)) - (flagToList . fmap display)) - - , option ['l'] ["license"] - "Project license." - IT.license (\v flags -> flags { IT.license = v }) - (reqArg "LICENSE" (readP_to_E ("Cannot parse license: "++) - (toFlag `fmap` parse)) - (flagToList . fmap display)) - - , option ['a'] ["author"] - "Name of the project's author." - IT.author (\v flags -> flags { IT.author = v }) - (reqArgFlag "NAME") - - , option ['e'] ["email"] - "Email address of the maintainer." - IT.email (\v flags -> flags { IT.email = v }) - (reqArgFlag "EMAIL") - - , option ['u'] ["homepage"] - "Project homepage and/or repository." - IT.homepage (\v flags -> flags { IT.homepage = v }) - (reqArgFlag "URL") - - , option ['s'] ["synopsis"] - "Short project synopsis." - IT.synopsis (\v flags -> flags { IT.synopsis = v }) - (reqArgFlag "TEXT") - - , option ['c'] ["category"] - "Project category." - IT.category (\v flags -> flags { IT.category = v }) - (reqArg' "CATEGORY" (\s -> toFlag $ maybe (Left s) Right (readMaybe s)) - (flagToList . fmap (either id show))) - - , option ['x'] ["extra-source-file"] - "Extra source file to be distributed with tarball." - IT.extraSrc (\v flags -> flags { IT.extraSrc = v }) - (reqArg' "FILE" (Just . (:[])) - (fromMaybe [])) - - , option [] ["is-library"] - "Build a library." - IT.packageType (\v flags -> flags { IT.packageType = v }) - (noArg (Flag IT.Library)) - - , option [] ["is-executable"] - "Build an executable." - IT.packageType - (\v flags -> flags { IT.packageType = v }) - (noArg (Flag IT.Executable)) - - , option [] ["main-is"] - "Specify the main module." - IT.mainIs - (\v flags -> flags { IT.mainIs = v }) - (reqArgFlag "FILE") - - , option [] ["language"] - "Specify the default language." - IT.language - (\v flags -> flags { IT.language = v }) - (reqArg "LANGUAGE" (readP_to_E ("Cannot parse language: "++) - (toFlag `fmap` parse)) - (flagToList . fmap display)) - - , option ['o'] ["expose-module"] - "Export a module from the package." - IT.exposedModules - (\v flags -> flags { IT.exposedModules = v }) - (reqArg "MODULE" (readP_to_E ("Cannot parse module name: "++) - ((Just . (:[])) `fmap` parse)) - (maybe [] (fmap display))) - - , option [] ["extension"] - "Use a LANGUAGE extension (in the other-extensions field)." - IT.otherExts - (\v flags -> flags { IT.otherExts = v }) - (reqArg "EXTENSION" (readP_to_E ("Cannot parse extension: "++) - ((Just . (:[])) `fmap` parse)) - (maybe [] (fmap display))) - - , option ['d'] ["dependency"] - "Package dependency." - IT.dependencies (\v flags -> flags { IT.dependencies = v }) - (reqArg "PACKAGE" (readP_to_E ("Cannot parse dependency: "++) - ((Just . (:[])) `fmap` parse)) - (maybe [] (fmap display))) - - , option [] ["source-dir"] - "Directory containing package source." - IT.sourceDirs (\v flags -> flags { IT.sourceDirs = v }) - (reqArg' "DIR" (Just . (:[])) - (fromMaybe [])) - - , option [] ["build-tool"] - "Required external build tool." - IT.buildTools (\v flags -> flags { IT.buildTools = v }) - (reqArg' "TOOL" (Just . (:[])) - (fromMaybe [])) - - , optionVerbosity IT.initVerbosity (\v flags -> flags { IT.initVerbosity = v }) - ] - } - where readMaybe s = case reads s of - [(x,"")] -> Just x - _ -> Nothing - --- ------------------------------------------------------------ --- * SDist flags --- ------------------------------------------------------------ - --- | Extra flags to @sdist@ beyond runghc Setup sdist --- -data SDistExFlags = SDistExFlags { - sDistFormat :: Flag ArchiveFormat - } - deriving (Show, Generic) - -data ArchiveFormat = TargzFormat | ZipFormat -- | ... - deriving (Show, Eq) - -defaultSDistExFlags :: SDistExFlags -defaultSDistExFlags = SDistExFlags { - sDistFormat = Flag TargzFormat - } - -sdistCommand :: CommandUI (SDistFlags, SDistExFlags) -sdistCommand = Cabal.sdistCommand { - commandDefaultFlags = (commandDefaultFlags Cabal.sdistCommand, defaultSDistExFlags), - commandOptions = \showOrParseArgs -> - liftOptions fst setFst (commandOptions Cabal.sdistCommand showOrParseArgs) - ++ liftOptions snd setSnd sdistExOptions - } - where - setFst a (_,b) = (a,b) - setSnd b (a,_) = (a,b) - - sdistExOptions = - [option [] ["archive-format"] "archive-format" - sDistFormat (\v flags -> flags { sDistFormat = v }) - (choiceOpt - [ (Flag TargzFormat, ([], ["targz"]), - "Produce a '.tar.gz' format archive (default and required for uploading to hackage)") - , (Flag ZipFormat, ([], ["zip"]), - "Produce a '.zip' format archive") - ]) - ] - -instance Monoid SDistExFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup SDistExFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Win32SelfUpgrade flags --- ------------------------------------------------------------ - -data Win32SelfUpgradeFlags = Win32SelfUpgradeFlags { - win32SelfUpgradeVerbosity :: Flag Verbosity -} deriving Generic - -defaultWin32SelfUpgradeFlags :: Win32SelfUpgradeFlags -defaultWin32SelfUpgradeFlags = Win32SelfUpgradeFlags { - win32SelfUpgradeVerbosity = toFlag normal -} - -win32SelfUpgradeCommand :: CommandUI Win32SelfUpgradeFlags -win32SelfUpgradeCommand = CommandUI { - commandName = "win32selfupgrade", - commandSynopsis = "Self-upgrade the executable on Windows", - commandDescription = Nothing, - commandNotes = Nothing, - commandUsage = \pname -> - "Usage: " ++ pname ++ " win32selfupgrade PID PATH\n", - commandDefaultFlags = defaultWin32SelfUpgradeFlags, - commandOptions = \_ -> - [optionVerbosity win32SelfUpgradeVerbosity - (\v flags -> flags { win32SelfUpgradeVerbosity = v}) - ] -} - -instance Monoid Win32SelfUpgradeFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup Win32SelfUpgradeFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * ActAsSetup flags --- ------------------------------------------------------------ - -data ActAsSetupFlags = ActAsSetupFlags { - actAsSetupBuildType :: Flag BuildType -} deriving Generic - -defaultActAsSetupFlags :: ActAsSetupFlags -defaultActAsSetupFlags = ActAsSetupFlags { - actAsSetupBuildType = toFlag Simple -} - -actAsSetupCommand :: CommandUI ActAsSetupFlags -actAsSetupCommand = CommandUI { - commandName = "act-as-setup", - commandSynopsis = "Run as-if this was a Setup.hs", - commandDescription = Nothing, - commandNotes = Nothing, - commandUsage = \pname -> - "Usage: " ++ pname ++ " act-as-setup\n", - commandDefaultFlags = defaultActAsSetupFlags, - commandOptions = \_ -> - [option "" ["build-type"] - "Use the given build type." - actAsSetupBuildType (\v flags -> flags { actAsSetupBuildType = v }) - (reqArg "BUILD-TYPE" (readP_to_E ("Cannot parse build type: "++) - (fmap toFlag parse)) - (map display . flagToList)) - ] -} - -instance Monoid ActAsSetupFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup ActAsSetupFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Sandbox-related flags --- ------------------------------------------------------------ - -data SandboxFlags = SandboxFlags { - sandboxVerbosity :: Flag Verbosity, - sandboxSnapshot :: Flag Bool, -- FIXME: this should be an 'add-source'-only - -- flag. - sandboxLocation :: Flag FilePath -} deriving Generic - -defaultSandboxLocation :: FilePath -defaultSandboxLocation = ".cabal-sandbox" - -defaultSandboxFlags :: SandboxFlags -defaultSandboxFlags = SandboxFlags { - sandboxVerbosity = toFlag normal, - sandboxSnapshot = toFlag False, - sandboxLocation = toFlag defaultSandboxLocation - } - -sandboxCommand :: CommandUI SandboxFlags -sandboxCommand = CommandUI { - commandName = "sandbox", - commandSynopsis = "Create/modify/delete a sandbox.", - commandDescription = Just $ \pname -> concat - [ paragraph $ "Sandboxes are isolated package databases that can be used" - ++ " to prevent dependency conflicts that arise when many different" - ++ " packages are installed in the same database (i.e. the user's" - ++ " database in the home directory)." - , paragraph $ "A sandbox in the current directory (created by" - ++ " `sandbox init`) will be used instead of the user's database for" - ++ " commands such as `install` and `build`. Note that (a directly" - ++ " invoked) GHC will not automatically be aware of sandboxes;" - ++ " only if called via appropriate " ++ pname - ++ " commands, e.g. `repl`, `build`, `exec`." - , paragraph $ "Currently, " ++ pname ++ " will not search for a sandbox" - ++ " in folders above the current one, so cabal will not see the sandbox" - ++ " if you are in a subfolder of a sandbox." - , paragraph "Subcommands:" - , headLine "init:" - , indentParagraph $ "Initialize a sandbox in the current directory." - ++ " An existing package database will not be modified, but settings" - ++ " (such as the location of the database) can be modified this way." - , headLine "delete:" - , indentParagraph $ "Remove the sandbox; deleting all the packages" - ++ " installed inside." - , headLine "add-source:" - , indentParagraph $ "Make one or more local packages available in the" - ++ " sandbox. PATHS may be relative or absolute." - ++ " Typical usecase is when you need" - ++ " to make a (temporary) modification to a dependency: You download" - ++ " the package into a different directory, make the modification," - ++ " and add that directory to the sandbox with `add-source`." - , indentParagraph $ "Unless given `--snapshot`, any add-source'd" - ++ " dependency that was modified since the last build will be" - ++ " re-installed automatically." - , headLine "delete-source:" - , indentParagraph $ "Remove an add-source dependency; however, this will" - ++ " not delete the package(s) that have been installed in the sandbox" - ++ " from this dependency. You can either unregister the package(s) via" - ++ " `" ++ pname ++ " sandbox hc-pkg unregister` or re-create the" - ++ " sandbox (`sandbox delete; sandbox init`)." - , headLine "list-sources:" - , indentParagraph $ "List the directories of local packages made" - ++ " available via `" ++ pname ++ " add-source`." - , headLine "hc-pkg:" - , indentParagraph $ "Similar to `ghc-pkg`, but for the sandbox package" - ++ " database. Can be used to list specific/all packages that are" - ++ " installed in the sandbox. For subcommands, see the help for" - ++ " ghc-pkg. Affected by the compiler version specified by `configure`." - ], - commandNotes = Just $ \pname -> - relevantConfigValuesText ["require-sandbox" - ,"ignore-sandbox"] - ++ "\n" - ++ "Examples:\n" - ++ " Set up a sandbox with one local dependency, located at ../foo:\n" - ++ " " ++ pname ++ " sandbox init\n" - ++ " " ++ pname ++ " sandbox add-source ../foo\n" - ++ " " ++ pname ++ " install --only-dependencies\n" - ++ " Reset the sandbox:\n" - ++ " " ++ pname ++ " sandbox delete\n" - ++ " " ++ pname ++ " sandbox init\n" - ++ " " ++ pname ++ " install --only-dependencies\n" - ++ " List the packages in the sandbox:\n" - ++ " " ++ pname ++ " sandbox hc-pkg list\n" - ++ " Unregister the `broken` package from the sandbox:\n" - ++ " " ++ pname ++ " sandbox hc-pkg -- --force unregister broken\n", - commandUsage = usageAlternatives "sandbox" - [ "init [FLAGS]" - , "delete [FLAGS]" - , "add-source [FLAGS] PATHS" - , "delete-source [FLAGS] PATHS" - , "list-sources [FLAGS]" - , "hc-pkg [FLAGS] [--] COMMAND [--] [ARGS]" - ], - - commandDefaultFlags = defaultSandboxFlags, - commandOptions = \_ -> - [ optionVerbosity sandboxVerbosity - (\v flags -> flags { sandboxVerbosity = v }) - - , option [] ["snapshot"] - "Take a snapshot instead of creating a link (only applies to 'add-source')" - sandboxSnapshot (\v flags -> flags { sandboxSnapshot = v }) - trueArg - - , option [] ["sandbox"] - "Sandbox location (default: './.cabal-sandbox')." - sandboxLocation (\v flags -> flags { sandboxLocation = v }) - (reqArgFlag "DIR") - ] - } - -instance Monoid SandboxFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup SandboxFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Exec Flags --- ------------------------------------------------------------ - -data ExecFlags = ExecFlags { - execVerbosity :: Flag Verbosity -} deriving Generic - -defaultExecFlags :: ExecFlags -defaultExecFlags = ExecFlags { - execVerbosity = toFlag normal - } - -execCommand :: CommandUI ExecFlags -execCommand = CommandUI { - commandName = "exec", - commandSynopsis = "Give a command access to the sandbox package repository.", - commandDescription = Just $ \pname -> wrapText $ - -- TODO: this is too GHC-focused for my liking.. - "A directly invoked GHC will not automatically be aware of any" - ++ " sandboxes: the GHC_PACKAGE_PATH environment variable controls what" - ++ " GHC uses. `" ++ pname ++ " exec` can be used to modify this variable:" - ++ " COMMAND will be executed in a modified environment and thereby uses" - ++ " the sandbox package database.\n" - ++ "\n" - ++ "If there is no sandbox, behaves as identity (executing COMMAND).\n" - ++ "\n" - ++ "Note that other " ++ pname ++ " commands change the environment" - ++ " variable appropriately already, so there is no need to wrap those" - ++ " in `" ++ pname ++ " exec`. But with `" ++ pname ++ " exec`, the user" - ++ " has more control and can, for example, execute custom scripts which" - ++ " indirectly execute GHC.\n" - ++ "\n" - ++ "Note that `" ++ pname ++ " repl` is different from `" ++ pname - ++ " exec -- ghci` as the latter will not forward any additional flags" - ++ " being defined in the local package to ghci.\n" - ++ "\n" - ++ "See `" ++ pname ++ " sandbox`.\n", - commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " exec -- ghci -Wall\n" - ++ " Start a repl session with sandbox packages and all warnings;\n" - ++ " " ++ pname ++ " exec gitit -- -f gitit.cnf\n" - ++ " Give gitit access to the sandbox packages, and pass it a flag;\n" - ++ " " ++ pname ++ " exec runghc Foo.hs\n" - ++ " Execute runghc on Foo.hs with runghc configured to use the\n" - ++ " sandbox package database (if a sandbox is being used).\n", - commandUsage = \pname -> - "Usage: " ++ pname ++ " exec [FLAGS] [--] COMMAND [--] [ARGS]\n", - - commandDefaultFlags = defaultExecFlags, - commandOptions = \_ -> - [ optionVerbosity execVerbosity - (\v flags -> flags { execVerbosity = v }) - ] - } - -instance Monoid ExecFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup ExecFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * UserConfig flags --- ------------------------------------------------------------ - -data UserConfigFlags = UserConfigFlags { - userConfigVerbosity :: Flag Verbosity, - userConfigForce :: Flag Bool -} deriving Generic - -instance Monoid UserConfigFlags where - mempty = UserConfigFlags { - userConfigVerbosity = toFlag normal, - userConfigForce = toFlag False - } - mappend = (<>) - -instance Semigroup UserConfigFlags where - (<>) = gmappend - -userConfigCommand :: CommandUI UserConfigFlags -userConfigCommand = CommandUI { - commandName = "user-config", - commandSynopsis = "Display and update the user's global cabal configuration.", - commandDescription = Just $ \_ -> wrapText $ - "When upgrading cabal, the set of configuration keys and their default" - ++ " values may change. This command provides means to merge the existing" - ++ " config in ~/.cabal/config" - ++ " (i.e. all bindings that are actually defined and not commented out)" - ++ " and the default config of the new version.\n" - ++ "\n" - ++ "init: Creates a new config file at either ~/.cabal/config or as" - ++ " specified by --config-file, if given. An existing file won't be " - ++ " overwritten unless -f or --force is given.\n" - ++ "diff: Shows a pseudo-diff of the user's ~/.cabal/config file and" - ++ " the default configuration that would be created by cabal if the" - ++ " config file did not exist.\n" - ++ "update: Applies the pseudo-diff to the configuration that would be" - ++ " created by default, and write the result back to ~/.cabal/config.", - - commandNotes = Nothing, - commandUsage = usageAlternatives "user-config" ["init", "diff", "update"], - commandDefaultFlags = mempty, - commandOptions = \ _ -> [ - optionVerbosity userConfigVerbosity (\v flags -> flags { userConfigVerbosity = v }) - , option ['f'] ["force"] - "Overwrite the config file if it already exists." - userConfigForce (\v flags -> flags { userConfigForce = v }) - trueArg - ] - } - --- ------------------------------------------------------------ --- * GetOpt Utils --- ------------------------------------------------------------ - -reqArgFlag :: ArgPlaceHolder -> - MkOptDescr (b -> Flag String) (Flag String -> b -> b) b -reqArgFlag ad = reqArg ad (succeedReadE Flag) flagToList - -liftOptions :: (b -> a) -> (a -> b -> b) - -> [OptionField a] -> [OptionField b] -liftOptions get set = map (liftOption get set) - -yesNoOpt :: ShowOrParseArgs -> MkOptDescr (b -> Flag Bool) (Flag Bool -> b -> b) b -yesNoOpt ShowArgs sf lf = trueArg sf lf -yesNoOpt _ sf lf = Command.boolOpt' flagToMaybe Flag (sf, lf) ([], map ("no-" ++) lf) sf lf - -optionSolver :: (flags -> Flag PreSolver) - -> (Flag PreSolver -> flags -> flags) - -> OptionField flags -optionSolver get set = - option [] ["solver"] - ("Select dependency solver to use (default: " ++ display defaultSolver ++ "). Choices: " ++ allSolvers ++ ", where 'choose' chooses between 'topdown' and 'modular' based on compiler version.") - get set - (reqArg "SOLVER" (readP_to_E (const $ "solver must be one of: " ++ allSolvers) - (toFlag `fmap` parse)) - (flagToList . fmap display)) - -optionSolverFlags :: ShowOrParseArgs - -> (flags -> Flag Int ) -> (Flag Int -> flags -> flags) - -> (flags -> Flag Bool ) -> (Flag Bool -> flags -> flags) - -> (flags -> Flag Bool ) -> (Flag Bool -> flags -> flags) - -> (flags -> Flag Bool ) -> (Flag Bool -> flags -> flags) - -> (flags -> Flag Bool ) -> (Flag Bool -> flags -> flags) - -> [OptionField flags] -optionSolverFlags showOrParseArgs getmbj setmbj getrg setrg _getig _setig getsip setsip getstrfl setstrfl = - [ option [] ["max-backjumps"] - ("Maximum number of backjumps allowed while solving (default: " ++ show defaultMaxBackjumps ++ "). Use a negative number to enable unlimited backtracking. Use 0 to disable backtracking completely.") - getmbj setmbj - (reqArg "NUM" (readP_to_E ("Cannot parse number: "++) (fmap toFlag parse)) - (map show . flagToList)) - , option [] ["reorder-goals"] - "Try to reorder goals according to certain heuristics. Slows things down on average, but may make backtracking faster for some packages." - getrg setrg - (yesNoOpt showOrParseArgs) - -- TODO: Disabled for now because it does not work as advertised (yet). -{- - , option [] ["independent-goals"] - "Treat several goals on the command line as independent. If several goals depend on the same package, different versions can be chosen." - getig setig - (yesNoOpt showOrParseArgs) --} - , option [] ["shadow-installed-packages"] - "If multiple package instances of the same version are installed, treat all but one as shadowed." - getsip setsip - (yesNoOpt showOrParseArgs) - , option [] ["strong-flags"] - "Do not defer flag choices (this used to be the default in cabal-install <= 1.20)." - getstrfl setstrfl - (yesNoOpt showOrParseArgs) - ] - -usageFlagsOrPackages :: String -> String -> String -usageFlagsOrPackages name pname = - "Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n" - ++ " or: " ++ pname ++ " " ++ name ++ " [PACKAGES]\n" - -usagePackages :: String -> String -> String -usagePackages name pname = - "Usage: " ++ pname ++ " " ++ name ++ " [PACKAGES]\n" - -usageFlags :: String -> String -> String -usageFlags name pname = - "Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n" - ---TODO: do we want to allow per-package flags? -parsePackageArgs :: [String] -> Either String [Dependency] -parsePackageArgs = parsePkgArgs [] - where - parsePkgArgs ds [] = Right (reverse ds) - parsePkgArgs ds (arg:args) = - case readPToMaybe parseDependencyOrPackageId arg of - Just dep -> parsePkgArgs (dep:ds) args - Nothing -> Left $ - show arg ++ " is not valid syntax for a package name or" - ++ " package dependency." - -parseDependencyOrPackageId :: Parse.ReadP r Dependency -parseDependencyOrPackageId = parse Parse.+++ liftM pkgidToDependency parse - where - pkgidToDependency :: PackageIdentifier -> Dependency - pkgidToDependency p = case packageVersion p of - Version [] _ -> Dependency (packageName p) anyVersion - version -> Dependency (packageName p) (thisVersion version) - -showRepo :: RemoteRepo -> String -showRepo repo = remoteRepoName repo ++ ":" - ++ uriToString id (remoteRepoURI repo) [] - -readRepo :: String -> Maybe RemoteRepo -readRepo = readPToMaybe parseRepo - -parseRepo :: Parse.ReadP r RemoteRepo -parseRepo = do - name <- Parse.munch1 (\c -> isAlphaNum c || c `elem` "_-.") - _ <- Parse.char ':' - uriStr <- Parse.munch1 (\c -> isAlphaNum c || c `elem` "+-=._/*()@'$:;&!?~") - uri <- maybe Parse.pfail return (parseAbsoluteURI uriStr) - return RemoteRepo { - remoteRepoName = name, - remoteRepoURI = uri, - remoteRepoSecure = Nothing, - remoteRepoRootKeys = [], - remoteRepoKeyThreshold = 0, - remoteRepoShouldTryHttps = False - } - --- ------------------------------------------------------------ --- * Helpers for Documentation --- ------------------------------------------------------------ - -headLine :: String -> String -headLine = unlines - . map unwords - . wrapLine 79 - . words - -paragraph :: String -> String -paragraph = (++"\n") - . unlines - . map unwords - . wrapLine 79 - . words - -indentParagraph :: String -> String -indentParagraph = unlines - . (flip (++)) [""] - . map ((" "++).unwords) - . wrapLine 77 - . words - -relevantConfigValuesText :: [String] -> String -relevantConfigValuesText vs = - "Relevant global configuration keys:\n" - ++ concat [" " ++ v ++ "\n" |v <- vs] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/SetupWrapper.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/SetupWrapper.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/SetupWrapper.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/SetupWrapper.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,742 +0,0 @@ -{-# LANGUAGE CPP #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.SetupWrapper --- Copyright : (c) The University of Glasgow 2006, --- Duncan Coutts 2008 --- --- Maintainer : cabal-devel@haskell.org --- Stability : alpha --- Portability : portable --- --- An interface to building and installing Cabal packages. --- If the @Built-Type@ field is specified as something other than --- 'Custom', and the current version of Cabal is acceptable, this performs --- setup actions directly. Otherwise it builds the setup script and --- runs it with the given arguments. - -module Distribution.Client.SetupWrapper ( - setupWrapper, - SetupScriptOptions(..), - defaultSetupScriptOptions, - ) where - -import qualified Distribution.Make as Make -import qualified Distribution.Simple as Simple -import Distribution.Version - ( Version(..), VersionRange, anyVersion - , intersectVersionRanges, orLaterVersion - , withinRange ) -import Distribution.InstalledPackageInfo (installedUnitId) -import Distribution.Package - ( UnitId(..), PackageIdentifier(..), PackageId, - PackageName(..), Package(..), packageName - , packageVersion, Dependency(..) ) -import Distribution.PackageDescription - ( GenericPackageDescription(packageDescription) - , PackageDescription(..), specVersion - , BuildType(..), knownBuildTypes, defaultRenaming ) -import Distribution.PackageDescription.Parse - ( readPackageDescription ) -import Distribution.Simple.Configure - ( configCompilerEx ) -import Distribution.Compiler - ( buildCompilerId, CompilerFlavor(GHC, GHCJS) ) -import Distribution.Simple.Compiler - ( Compiler(compilerId), compilerFlavor, PackageDB(..), PackageDBStack ) -import Distribution.Simple.PreProcess - ( runSimplePreProcessor, ppUnlit ) -import Distribution.Simple.Build.Macros - ( generatePackageVersionMacros ) -import Distribution.Simple.Program - ( ProgramConfiguration, emptyProgramConfiguration - , getProgramSearchPath, getDbProgramOutput, runDbProgram, ghcProgram - , ghcjsProgram ) -import Distribution.Simple.Program.Find - ( programSearchPathAsPATHVar ) -import Distribution.Simple.Program.Run - ( getEffectiveEnvironment ) -import qualified Distribution.Simple.Program.Strip as Strip -import Distribution.Simple.BuildPaths - ( defaultDistPref, exeExtension ) - -import Distribution.Simple.Command - ( CommandUI(..), commandShowOptions ) -import Distribution.Simple.Program.GHC - ( GhcMode(..), GhcOptions(..), renderGhcOptions ) -import qualified Distribution.Simple.PackageIndex as PackageIndex -import Distribution.Simple.PackageIndex (InstalledPackageIndex) -import Distribution.Client.Config - ( defaultCabalDir ) -import Distribution.Client.IndexUtils - ( getInstalledPackages ) -import Distribution.Client.JobControl - ( Lock, criticalSection ) -import Distribution.Simple.Setup - ( Flag(..) ) -import Distribution.Simple.Utils - ( die, debug, info, cabalVersion, tryFindPackageDesc, comparing - , createDirectoryIfMissingVerbose, installExecutableFile - , copyFileVerbose, rewriteFile, intercalate ) -import Distribution.Client.Utils - ( inDir, tryCanonicalizePath - , existsAndIsMoreRecentThan, moreRecentFile -#if mingw32_HOST_OS - , canonicalizePathNoThrow -#endif - ) -import Distribution.System ( Platform(..), buildPlatform ) -import Distribution.Text - ( display ) -import Distribution.Utils.NubList - ( toNubListR ) -import Distribution.Verbosity - ( Verbosity ) -import Distribution.Compat.Exception - ( catchIO ) - -import System.Directory ( doesFileExist ) -import System.FilePath ( (), (<.>) ) -import System.IO ( Handle, hPutStr ) -import System.Exit ( ExitCode(..), exitWith ) -import System.Process ( runProcess, waitForProcess ) -#if !MIN_VERSION_base(4,8,0) -import Control.Applicative ( (<$>), (<*>) ) -import Data.Monoid ( mempty ) -#endif -import Control.Monad ( when, unless ) -import Data.List ( find, foldl1' ) -import Data.Maybe ( fromMaybe, isJust ) -import Data.Char ( isSpace ) -import Distribution.Client.Compat.ExecutablePath ( getExecutablePath ) - -#ifdef mingw32_HOST_OS -import Distribution.Simple.Utils - ( withTempDirectory ) - -import Control.Exception ( bracket ) -import System.FilePath ( equalFilePath, takeDirectory ) -import System.Directory ( doesDirectoryExist ) -import qualified System.Win32 as Win32 -#endif - ---TODO: The 'setupWrapper' and 'SetupScriptOptions' should be split into two --- parts: one that has no policy and just does as it's told with all the --- explicit options, and an optional initial part that applies certain --- policies (like if we should add the Cabal lib as a dep, and if so which --- version). This could be structured as an action that returns a fully --- elaborated 'SetupScriptOptions' containing no remaining policy choices. --- --- See also the discussion at https://github.com/haskell/cabal/pull/3094 - -data SetupScriptOptions = SetupScriptOptions { - -- | The version of the Cabal library to use (if 'useDependenciesExclusive' - -- is not set). A suitable version of the Cabal library must be installed - -- (or for some build-types be the one cabal-install was built with). - -- - -- The version found also determines the version of the Cabal specification - -- that we us for talking to the Setup.hs, unless overridden by - -- 'useCabalSpecVersion'. - -- - useCabalVersion :: VersionRange, - - -- | This is the version of the Cabal specification that we believe that - -- this package uses. This affects the semantics and in particular the - -- Setup command line interface. - -- - -- This is similar to 'useCabalVersion' but instead of probing the system - -- for a version of the /Cabal library/ you just say exactly which version - -- of the /spec/ we will use. Using this also avoid adding the Cabal - -- library as an additional dependency, so add it to 'useDependencies' - -- if needed. - -- - useCabalSpecVersion :: Maybe Version, - useCompiler :: Maybe Compiler, - usePlatform :: Maybe Platform, - usePackageDB :: PackageDBStack, - usePackageIndex :: Maybe InstalledPackageIndex, - useProgramConfig :: ProgramConfiguration, - useDistPref :: FilePath, - useLoggingHandle :: Maybe Handle, - useWorkingDir :: Maybe FilePath, - forceExternalSetupMethod :: Bool, - - -- | List of dependencies to use when building Setup.hs. - useDependencies :: [(UnitId, PackageId)], - - -- | Is the list of setup dependencies exclusive? - -- - -- When this is @False@, if we compile the Setup.hs script we do so with the - -- list in 'useDependencies' but all other packages in the environment are - -- also visible. A suitable version of @Cabal@ library (see - -- 'useCabalVersion') is also added to the list of dependencies, unless - -- 'useDependencies' already contains a Cabal dependency. - -- - -- When @True@, only the 'useDependencies' packages are used, with other - -- packages in the environment hidden. - -- - -- This feature is here to support the setup stanza in .cabal files that - -- specifies explicit (and exclusive) dependencies, as well as the old - -- style with no dependencies. - useDependenciesExclusive :: Bool, - - -- | Should we build the Setup.hs with CPP version macros available? - -- We turn this on when we have a setup stanza in .cabal that declares - -- explicit setup dependencies. - -- - useVersionMacros :: Bool, - - -- Used only by 'cabal clean' on Windows. - -- - -- Note: win32 clean hack - ------------------------- - -- On Windows, running './dist/setup/setup clean' doesn't work because the - -- setup script will try to delete itself (which causes it to fail horribly, - -- unlike on Linux). So we have to move the setup exe out of the way first - -- and then delete it manually. This applies only to the external setup - -- method. - useWin32CleanHack :: Bool, - - -- Used only when calling setupWrapper from parallel code to serialise - -- access to the setup cache; should be Nothing otherwise. - -- - -- Note: setup exe cache - ------------------------ - -- When we are installing in parallel, we always use the external setup - -- method. Since compiling the setup script each time adds noticeable - -- overhead, we use a shared setup script cache - -- ('~/.cabal/setup-exe-cache'). For each (compiler, platform, Cabal - -- version) combination the cache holds a compiled setup script - -- executable. This only affects the Simple build type; for the Custom, - -- Configure and Make build types we always compile the setup script anew. - setupCacheLock :: Maybe Lock - } - -defaultSetupScriptOptions :: SetupScriptOptions -defaultSetupScriptOptions = SetupScriptOptions { - useCabalVersion = anyVersion, - useCabalSpecVersion = Nothing, - useCompiler = Nothing, - usePlatform = Nothing, - usePackageDB = [GlobalPackageDB, UserPackageDB], - usePackageIndex = Nothing, - useDependencies = [], - useDependenciesExclusive = False, - useVersionMacros = False, - useProgramConfig = emptyProgramConfiguration, - useDistPref = defaultDistPref, - useLoggingHandle = Nothing, - useWorkingDir = Nothing, - useWin32CleanHack = False, - forceExternalSetupMethod = False, - setupCacheLock = Nothing - } - -setupWrapper :: Verbosity - -> SetupScriptOptions - -> Maybe PackageDescription - -> CommandUI flags - -> (Version -> flags) - -> [String] - -> IO () -setupWrapper verbosity options mpkg cmd flags extraArgs = do - pkg <- maybe getPkg return mpkg - let setupMethod = determineSetupMethod options' buildType' - options' = options { - useCabalVersion = intersectVersionRanges - (useCabalVersion options) - (orLaterVersion (specVersion pkg)) - } - buildType' = fromMaybe Custom (buildType pkg) - mkArgs cabalLibVersion = commandName cmd - : commandShowOptions cmd (flags cabalLibVersion) - ++ extraArgs - checkBuildType buildType' - setupMethod verbosity options' (packageId pkg) buildType' mkArgs - where - getPkg = tryFindPackageDesc (fromMaybe "." (useWorkingDir options)) - >>= readPackageDescription verbosity - >>= return . packageDescription - - checkBuildType (UnknownBuildType name) = - die $ "The build-type '" ++ name ++ "' is not known. Use one of: " - ++ intercalate ", " (map display knownBuildTypes) ++ "." - checkBuildType _ = return () - --- | Decide if we're going to be able to do a direct internal call to the --- entry point in the Cabal library or if we're going to have to compile --- and execute an external Setup.hs script. --- -determineSetupMethod :: SetupScriptOptions -> BuildType -> SetupMethod -determineSetupMethod options buildType' - -- This order is picked so that it's stable. The build type and - -- required cabal version are external info, coming from .cabal - -- files and the command line. Those do switch between the - -- external and self & internal methods, but that info itself can - -- be considered stable. The logging and force-external conditions - -- are internally generated choices but now these only switch - -- between the self and internal setup methods, which are - -- consistent with each other. - | buildType' == Custom = externalSetupMethod - | maybe False (cabalVersion /=) - (useCabalSpecVersion options) - || not (cabalVersion `withinRange` - useCabalVersion options) = externalSetupMethod - | isJust (useLoggingHandle options) - -- Forcing is done to use an external process e.g. due to parallel - -- build concerns. - || forceExternalSetupMethod options = selfExecSetupMethod - | otherwise = internalSetupMethod - -type SetupMethod = Verbosity - -> SetupScriptOptions - -> PackageIdentifier - -> BuildType - -> (Version -> [String]) -> IO () - --- ------------------------------------------------------------ --- * Internal SetupMethod --- ------------------------------------------------------------ - -internalSetupMethod :: SetupMethod -internalSetupMethod verbosity options _ bt mkargs = do - let args = mkargs cabalVersion - debug verbosity $ "Using internal setup method with build-type " ++ show bt - ++ " and args:\n " ++ show args - inDir (useWorkingDir options) $ - buildTypeAction bt args - -buildTypeAction :: BuildType -> ([String] -> IO ()) -buildTypeAction Simple = Simple.defaultMainArgs -buildTypeAction Configure = Simple.defaultMainWithHooksArgs - Simple.autoconfUserHooks -buildTypeAction Make = Make.defaultMainArgs -buildTypeAction Custom = error "buildTypeAction Custom" -buildTypeAction (UnknownBuildType _) = error "buildTypeAction UnknownBuildType" - --- ------------------------------------------------------------ --- * Self-Exec SetupMethod --- ------------------------------------------------------------ - -selfExecSetupMethod :: SetupMethod -selfExecSetupMethod verbosity options _pkg bt mkargs = do - let args = ["act-as-setup", - "--build-type=" ++ display bt, - "--"] ++ mkargs cabalVersion - debug verbosity $ "Using self-exec internal setup method with build-type " - ++ show bt ++ " and args:\n " ++ show args - path <- getExecutablePath - info verbosity $ unwords (path : args) - case useLoggingHandle options of - Nothing -> return () - Just logHandle -> info verbosity $ "Redirecting build log to " - ++ show logHandle - - searchpath <- programSearchPathAsPATHVar - (getProgramSearchPath (useProgramConfig options)) - env <- getEffectiveEnvironment [("PATH", Just searchpath)] - - process <- runProcess path args - (useWorkingDir options) env Nothing - (useLoggingHandle options) (useLoggingHandle options) - exitCode <- waitForProcess process - unless (exitCode == ExitSuccess) $ exitWith exitCode - --- ------------------------------------------------------------ --- * External SetupMethod --- ------------------------------------------------------------ - -externalSetupMethod :: SetupMethod -externalSetupMethod verbosity options pkg bt mkargs = do - debug verbosity $ "Using external setup method with build-type " ++ show bt - debug verbosity $ "Using explicit dependencies: " - ++ show (useDependenciesExclusive options) - createDirectoryIfMissingVerbose verbosity True setupDir - (cabalLibVersion, mCabalLibInstalledPkgId, options') <- cabalLibVersionToUse - debug verbosity $ "Using Cabal library version " ++ display cabalLibVersion - path <- if useCachedSetupExecutable - then getCachedSetupExecutable options' - cabalLibVersion mCabalLibInstalledPkgId - else compileSetupExecutable options' - cabalLibVersion mCabalLibInstalledPkgId False - invokeSetupScript options' path (mkargs cabalLibVersion) - - where - workingDir = case fromMaybe "" (useWorkingDir options) of - [] -> "." - dir -> dir - setupDir = workingDir useDistPref options "setup" - setupVersionFile = setupDir "setup" <.> "version" - setupHs = setupDir "setup" <.> "hs" - setupProgFile = setupDir "setup" <.> exeExtension - platform = fromMaybe buildPlatform (usePlatform options) - - useCachedSetupExecutable = (bt == Simple || bt == Configure || bt == Make) - - maybeGetInstalledPackages :: SetupScriptOptions -> Compiler - -> ProgramConfiguration -> IO InstalledPackageIndex - maybeGetInstalledPackages options' comp conf = - case usePackageIndex options' of - Just index -> return index - Nothing -> getInstalledPackages verbosity - comp (usePackageDB options') conf - - -- Choose the version of Cabal to use if the setup script has a dependency on - -- Cabal, and possibly update the setup script options. The version also - -- determines how to filter the flags to Setup. - -- - -- We first check whether the dependency solver has specified a Cabal version. - -- If it has, we use the solver's version without looking at the installed - -- package index (See issue #3436). Otherwise, we pick the Cabal version by - -- checking 'useCabalSpecVersion', then the saved version, and finally the - -- versions available in the index. - -- - -- The version chosen here must match the one used in 'compileSetupExecutable' - -- (See issue #3433). - cabalLibVersionToUse :: IO (Version, Maybe UnitId - ,SetupScriptOptions) - cabalLibVersionToUse = - case find (hasCabal . snd) (useDependencies options) of - Just (unitId, pkgId) -> do - let version = pkgVersion pkgId - updateSetupScript version bt - writeSetupVersionFile version - return (version, Just unitId, options) - Nothing -> - case useCabalSpecVersion options of - Just version -> do - updateSetupScript version bt - writeSetupVersionFile version - return (version, Nothing, options) - Nothing -> do - savedVer <- savedVersion - case savedVer of - Just version | version `withinRange` useCabalVersion options - -> do updateSetupScript version bt - -- Does the previously compiled setup executable still exist - -- and is it up-to date? - useExisting <- canUseExistingSetup version - if useExisting - then return (version, Nothing, options) - else installedVersion - _ -> installedVersion - where - -- This check duplicates the checks in 'getCachedSetupExecutable' / - -- 'compileSetupExecutable'. Unfortunately, we have to perform it twice - -- because the selected Cabal version may change as a result of this - -- check. - canUseExistingSetup :: Version -> IO Bool - canUseExistingSetup version = - if useCachedSetupExecutable - then do - (_, cachedSetupProgFile) <- cachedSetupDirAndProg options version - doesFileExist cachedSetupProgFile - else - (&&) <$> setupProgFile `existsAndIsMoreRecentThan` setupHs - <*> setupProgFile `existsAndIsMoreRecentThan` setupVersionFile - - writeSetupVersionFile :: Version -> IO () - writeSetupVersionFile version = - writeFile setupVersionFile (show version ++ "\n") - - hasCabal (PackageIdentifier (PackageName "Cabal") _) = True - hasCabal _ = False - - installedVersion :: IO (Version, Maybe UnitId - ,SetupScriptOptions) - installedVersion = do - (comp, conf, options') <- configureCompiler options - (version, mipkgid, options'') <- installedCabalVersion options' comp conf - updateSetupScript version bt - writeSetupVersionFile version - return (version, mipkgid, options'') - - savedVersion :: IO (Maybe Version) - savedVersion = do - versionString <- readFile setupVersionFile `catchIO` \_ -> return "" - case reads versionString of - [(version,s)] | all isSpace s -> return (Just version) - _ -> return Nothing - - -- | Update a Setup.hs script, creating it if necessary. - updateSetupScript :: Version -> BuildType -> IO () - updateSetupScript _ Custom = do - useHs <- doesFileExist customSetupHs - useLhs <- doesFileExist customSetupLhs - unless (useHs || useLhs) $ die - "Using 'build-type: Custom' but there is no Setup.hs or Setup.lhs script." - let src = (if useHs then customSetupHs else customSetupLhs) - srcNewer <- src `moreRecentFile` setupHs - when srcNewer $ if useHs - then copyFileVerbose verbosity src setupHs - else runSimplePreProcessor ppUnlit src setupHs verbosity - where - customSetupHs = workingDir "Setup.hs" - customSetupLhs = workingDir "Setup.lhs" - - updateSetupScript cabalLibVersion _ = - rewriteFile setupHs (buildTypeScript cabalLibVersion) - - buildTypeScript :: Version -> String - buildTypeScript cabalLibVersion = case bt of - Simple -> "import Distribution.Simple; main = defaultMain\n" - Configure -> "import Distribution.Simple; main = defaultMainWithHooks " - ++ if cabalLibVersion >= Version [1,3,10] [] - then "autoconfUserHooks\n" - else "defaultUserHooks\n" - Make -> "import Distribution.Make; main = defaultMain\n" - Custom -> error "buildTypeScript Custom" - UnknownBuildType _ -> error "buildTypeScript UnknownBuildType" - - installedCabalVersion :: SetupScriptOptions -> Compiler -> ProgramConfiguration - -> IO (Version, Maybe UnitId - ,SetupScriptOptions) - installedCabalVersion options' compiler conf = do - index <- maybeGetInstalledPackages options' compiler conf - let cabalDep = Dependency (PackageName "Cabal") (useCabalVersion options') - options'' = options' { usePackageIndex = Just index } - case PackageIndex.lookupDependency index cabalDep of - [] -> die $ "The package '" ++ display (packageName pkg) - ++ "' requires Cabal library version " - ++ display (useCabalVersion options) - ++ " but no suitable version is installed." - pkgs -> let ipkginfo = head . snd . bestVersion fst $ pkgs - in return (packageVersion ipkginfo - ,Just . installedUnitId $ ipkginfo, options'') - - bestVersion :: (a -> Version) -> [a] -> a - bestVersion f = firstMaximumBy (comparing (preference . f)) - where - -- Like maximumBy, but picks the first maximum element instead of the - -- last. In general, we expect the preferred version to go first in the - -- list. For the default case, this has the effect of choosing the version - -- installed in the user package DB instead of the global one. See #1463. - -- - -- Note: firstMaximumBy could be written as just - -- `maximumBy cmp . reverse`, but the problem is that the behaviour of - -- maximumBy is not fully specified in the case when there is not a single - -- greatest element. - firstMaximumBy :: (a -> a -> Ordering) -> [a] -> a - firstMaximumBy _ [] = - error "Distribution.Client.firstMaximumBy: empty list" - firstMaximumBy cmp xs = foldl1' maxBy xs - where - maxBy x y = case cmp x y of { GT -> x; EQ -> x; LT -> y; } - - preference version = (sameVersion, sameMajorVersion - ,stableVersion, latestVersion) - where - sameVersion = version == cabalVersion - sameMajorVersion = majorVersion version == majorVersion cabalVersion - majorVersion = take 2 . versionBranch - stableVersion = case versionBranch version of - (_:x:_) -> even x - _ -> False - latestVersion = version - - configureCompiler :: SetupScriptOptions - -> IO (Compiler, ProgramConfiguration, SetupScriptOptions) - configureCompiler options' = do - (comp, conf) <- case useCompiler options' of - Just comp -> return (comp, useProgramConfig options') - Nothing -> do (comp, _, conf) <- - configCompilerEx (Just GHC) Nothing Nothing - (useProgramConfig options') verbosity - return (comp, conf) - -- Whenever we need to call configureCompiler, we also need to access the - -- package index, so let's cache it in SetupScriptOptions. - index <- maybeGetInstalledPackages options' comp conf - return (comp, conf, options' { useCompiler = Just comp, - usePackageIndex = Just index, - useProgramConfig = conf }) - - -- | Path to the setup exe cache directory and path to the cached setup - -- executable. - cachedSetupDirAndProg :: SetupScriptOptions -> Version - -> IO (FilePath, FilePath) - cachedSetupDirAndProg options' cabalLibVersion = do - cabalDir <- defaultCabalDir - let setupCacheDir = cabalDir "setup-exe-cache" - cachedSetupProgFile = setupCacheDir - ("setup-" ++ buildTypeString ++ "-" - ++ cabalVersionString ++ "-" - ++ platformString ++ "-" - ++ compilerVersionString) - <.> exeExtension - return (setupCacheDir, cachedSetupProgFile) - where - buildTypeString = show bt - cabalVersionString = "Cabal-" ++ (display cabalLibVersion) - compilerVersionString = display $ - fromMaybe buildCompilerId - (fmap compilerId . useCompiler $ options') - platformString = display platform - - -- | Look up the setup executable in the cache; update the cache if the setup - -- executable is not found. - getCachedSetupExecutable :: SetupScriptOptions - -> Version -> Maybe UnitId - -> IO FilePath - getCachedSetupExecutable options' cabalLibVersion - maybeCabalLibInstalledPkgId = do - (setupCacheDir, cachedSetupProgFile) <- - cachedSetupDirAndProg options' cabalLibVersion - cachedSetupExists <- doesFileExist cachedSetupProgFile - if cachedSetupExists - then debug verbosity $ - "Found cached setup executable: " ++ cachedSetupProgFile - else criticalSection' $ do - -- The cache may have been populated while we were waiting. - cachedSetupExists' <- doesFileExist cachedSetupProgFile - if cachedSetupExists' - then debug verbosity $ - "Found cached setup executable: " ++ cachedSetupProgFile - else do - debug verbosity $ "Setup executable not found in the cache." - src <- compileSetupExecutable options' - cabalLibVersion maybeCabalLibInstalledPkgId True - createDirectoryIfMissingVerbose verbosity True setupCacheDir - installExecutableFile verbosity src cachedSetupProgFile - -- Do not strip if we're using GHCJS, since the result may be a script - when (maybe True ((/=GHCJS).compilerFlavor) $ useCompiler options') $ - Strip.stripExe verbosity platform (useProgramConfig options') - cachedSetupProgFile - return cachedSetupProgFile - where - criticalSection' = fromMaybe id - (fmap criticalSection $ setupCacheLock options') - - -- | If the Setup.hs is out of date wrt the executable then recompile it. - -- Currently this is GHC/GHCJS only. It should really be generalised. - -- - compileSetupExecutable :: SetupScriptOptions - -> Version -> Maybe UnitId -> Bool - -> IO FilePath - compileSetupExecutable options' cabalLibVersion maybeCabalLibInstalledPkgId - forceCompile = do - setupHsNewer <- setupHs `moreRecentFile` setupProgFile - cabalVersionNewer <- setupVersionFile `moreRecentFile` setupProgFile - let outOfDate = setupHsNewer || cabalVersionNewer - when (outOfDate || forceCompile) $ do - debug verbosity "Setup executable needs to be updated, compiling..." - (compiler, conf, options'') <- configureCompiler options' - let cabalPkgid = PackageIdentifier (PackageName "Cabal") cabalLibVersion - (program, extraOpts) - = case compilerFlavor compiler of - GHCJS -> (ghcjsProgram, ["-build-runner"]) - _ -> (ghcProgram, ["-threaded"]) - cabalDep = maybe [] (\ipkgid -> [(ipkgid, cabalPkgid)]) - maybeCabalLibInstalledPkgId - - -- With 'useDependenciesExclusive' we enforce the deps specified, - -- so only the given ones can be used. Otherwise we allow the use - -- of packages in the ambient environment, and add on a dep on the - -- Cabal library (unless 'useDependencies' already contains one). - -- - -- With 'useVersionMacros' we use a version CPP macros .h file. - -- - -- Both of these options should be enabled for packages that have - -- opted-in and declared a custom-settup stanza. - -- - hasCabal (_, PackageIdentifier (PackageName "Cabal") _) = True - hasCabal _ = False - - selectedDeps | useDependenciesExclusive options' - = useDependencies options' - | otherwise = useDependencies options' ++ - if any hasCabal (useDependencies options') - then [] - else cabalDep - addRenaming (ipid, pid) = (ipid, pid, defaultRenaming) - cppMacrosFile = setupDir "setup_macros.h" - ghcOptions = mempty { - ghcOptVerbosity = Flag verbosity - , ghcOptMode = Flag GhcModeMake - , ghcOptInputFiles = toNubListR [setupHs] - , ghcOptOutputFile = Flag setupProgFile - , ghcOptObjDir = Flag setupDir - , ghcOptHiDir = Flag setupDir - , ghcOptSourcePathClear = Flag True - , ghcOptSourcePath = case bt of - Custom -> toNubListR [workingDir] - _ -> mempty - , ghcOptPackageDBs = usePackageDB options'' - , ghcOptHideAllPackages = Flag (useDependenciesExclusive options') - , ghcOptCabal = Flag (useDependenciesExclusive options') - , ghcOptPackages = toNubListR $ map addRenaming selectedDeps - , ghcOptCppIncludes = toNubListR [ cppMacrosFile - | useVersionMacros options' ] - , ghcOptExtra = toNubListR extraOpts - } - let ghcCmdLine = renderGhcOptions compiler platform ghcOptions - when (useVersionMacros options') $ - rewriteFile cppMacrosFile (generatePackageVersionMacros - [ pid | (_ipid, pid) <- selectedDeps ]) - case useLoggingHandle options of - Nothing -> runDbProgram verbosity program conf ghcCmdLine - - -- If build logging is enabled, redirect compiler output to the log file. - (Just logHandle) -> do output <- getDbProgramOutput verbosity program - conf ghcCmdLine - hPutStr logHandle output - return setupProgFile - - invokeSetupScript :: SetupScriptOptions -> FilePath -> [String] -> IO () - invokeSetupScript options' path args = do - info verbosity $ unwords (path : args) - case useLoggingHandle options' of - Nothing -> return () - Just logHandle -> info verbosity $ "Redirecting build log to " - ++ show logHandle - - -- Since useWorkingDir can change the relative path, the path argument must - -- be turned into an absolute path. On some systems, runProcess will take - -- path as relative to the new working directory instead of the current - -- working directory. - path' <- tryCanonicalizePath path - - -- See 'Note: win32 clean hack' above. -#if mingw32_HOST_OS - -- setupProgFile may not exist if we're using a cached program - setupProgFile' <- canonicalizePathNoThrow setupProgFile - let win32CleanHackNeeded = (useWin32CleanHack options') - -- Skip when a cached setup script is used. - && setupProgFile' `equalFilePath` path' - if win32CleanHackNeeded then doWin32CleanHack path' else doInvoke path' -#else - doInvoke path' -#endif - - where - doInvoke path' = do - searchpath <- programSearchPathAsPATHVar - (getProgramSearchPath (useProgramConfig options')) - env <- getEffectiveEnvironment [("PATH", Just searchpath)] - - process <- runProcess path' args - (useWorkingDir options') env Nothing - (useLoggingHandle options') (useLoggingHandle options') - exitCode <- waitForProcess process - unless (exitCode == ExitSuccess) $ exitWith exitCode - -#if mingw32_HOST_OS - doWin32CleanHack path' = do - info verbosity $ "Using the Win32 clean hack." - -- Recursively removes the temp dir on exit. - withTempDirectory verbosity workingDir "cabal-tmp" $ \tmpDir -> - bracket (moveOutOfTheWay tmpDir path') - (maybeRestore path') - doInvoke - - moveOutOfTheWay tmpDir path' = do - let newPath = tmpDir "setup" <.> exeExtension - Win32.moveFile path' newPath - return newPath - - maybeRestore oldPath path' = do - let oldPathDir = takeDirectory oldPath - oldPathDirExists <- doesDirectoryExist oldPathDir - -- 'setup clean' didn't complete, 'dist/setup' still exists. - when oldPathDirExists $ - Win32.moveFile path' oldPath -#endif diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/SrcDist.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/SrcDist.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/SrcDist.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/SrcDist.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,189 +0,0 @@ -{-# LANGUAGE NondecreasingIndentation #-} --- Implements the \"@.\/cabal sdist@\" command, which creates a source --- distribution for this package. That is, packs up the source code --- into a tarball, making use of the corresponding Cabal module. -module Distribution.Client.SrcDist ( - sdist, - allPackageSourceFiles - ) where - - -import Distribution.Client.SetupWrapper - ( SetupScriptOptions(..), defaultSetupScriptOptions, setupWrapper ) -import Distribution.Client.Tar (createTarGzFile) - -import Distribution.Package - ( Package(..), packageName ) -import Distribution.PackageDescription - ( PackageDescription ) -import Distribution.PackageDescription.Configuration - ( flattenPackageDescription ) -import Distribution.PackageDescription.Parse - ( readPackageDescription ) -import Distribution.Simple.Utils - ( createDirectoryIfMissingVerbose, defaultPackageDesc - , warn, die, notice, withTempDirectory ) -import Distribution.Client.Setup - ( SDistFlags(..), SDistExFlags(..), ArchiveFormat(..) ) -import Distribution.Simple.Setup - ( Flag(..), sdistCommand, flagToList, fromFlag, fromFlagOrDefault - , defaultSDistFlags ) -import Distribution.Simple.BuildPaths ( srcPref) -import Distribution.Simple.Program (requireProgram, simpleProgram, programPath) -import Distribution.Simple.Program.Db (emptyProgramDb) -import Distribution.Text ( display ) -import Distribution.Verbosity (Verbosity, normal, lessVerbose) -import Distribution.Version (Version(..), orLaterVersion) - -import Distribution.Client.Utils - (tryFindAddSourcePackageDesc) -import Distribution.Compat.Exception (catchIO) - -import System.FilePath ((), (<.>)) -import Control.Monad (when, unless, liftM) -import System.Directory (doesFileExist, removeFile, canonicalizePath, getTemporaryDirectory) -import System.Process (runProcess, waitForProcess) -import System.Exit (ExitCode(..)) -import Control.Exception (IOException, evaluate) - --- |Create a source distribution. -sdist :: SDistFlags -> SDistExFlags -> IO () -sdist flags exflags = do - pkg <- liftM flattenPackageDescription - (readPackageDescription verbosity =<< defaultPackageDesc verbosity) - let withDir = if not needMakeArchive then (\f -> f tmpTargetDir) - else withTempDirectory verbosity tmpTargetDir "sdist." - -- 'withTempDir' fails if we don't create 'tmpTargetDir'... - when needMakeArchive $ - createDirectoryIfMissingVerbose verbosity True tmpTargetDir - withDir $ \tmpDir -> do - let outDir = if isOutDirectory then tmpDir else tmpDir tarBallName pkg - flags' = (if not needMakeArchive then flags - else flags { sDistDirectory = Flag outDir }) - unless isListSources $ - createDirectoryIfMissingVerbose verbosity True outDir - - -- Run 'setup sdist --output-directory=tmpDir' (or - -- '--list-source'/'--output-directory=someOtherDir') in case we were passed - -- those options. - setupWrapper verbosity setupOpts (Just pkg) sdistCommand (const flags') [] - - -- Unless we were given --list-sources or --output-directory ourselves, - -- create an archive. - when needMakeArchive $ - createArchive verbosity pkg tmpDir distPref - - when isOutDirectory $ - notice verbosity $ "Source directory created: " ++ tmpTargetDir - - when isListSources $ - notice verbosity $ "List of package sources written to file '" - ++ (fromFlag . sDistListSources $ flags) ++ "'" - - where - flagEnabled f = not . null . flagToList . f $ flags - - isListSources = flagEnabled sDistListSources - isOutDirectory = flagEnabled sDistDirectory - needMakeArchive = not (isListSources || isOutDirectory) - verbosity = fromFlag (sDistVerbosity flags) - distPref = fromFlag (sDistDistPref flags) - tmpTargetDir = fromFlagOrDefault (srcPref distPref) (sDistDirectory flags) - setupOpts = defaultSetupScriptOptions { - useDistPref = distPref, - -- The '--output-directory' sdist flag was introduced in Cabal 1.12, and - -- '--list-sources' in 1.17. - useCabalVersion = if isListSources - then orLaterVersion $ Version [1,17,0] [] - else orLaterVersion $ Version [1,12,0] [] - } - format = fromFlag (sDistFormat exflags) - createArchive = case format of - TargzFormat -> createTarGzArchive - ZipFormat -> createZipArchive - -tarBallName :: PackageDescription -> String -tarBallName = display . packageId - --- | Create a tar.gz archive from a tree of source files. -createTarGzArchive :: Verbosity -> PackageDescription -> FilePath -> FilePath - -> IO () -createTarGzArchive verbosity pkg tmpDir targetPref = do - createTarGzFile tarBallFilePath tmpDir (tarBallName pkg) - notice verbosity $ "Source tarball created: " ++ tarBallFilePath - where - tarBallFilePath = targetPref tarBallName pkg <.> "tar.gz" - --- | Create a zip archive from a tree of source files. -createZipArchive :: Verbosity -> PackageDescription -> FilePath -> FilePath - -> IO () -createZipArchive verbosity pkg tmpDir targetPref = do - let dir = tarBallName pkg - zipfile = targetPref dir <.> "zip" - (zipProg, _) <- requireProgram verbosity zipProgram emptyProgramDb - - -- zip has an annoying habit of updating the target rather than creating - -- it from scratch. While that might sound like an optimisation, it doesn't - -- remove files already in the archive that are no longer present in the - -- uncompressed tree. - alreadyExists <- doesFileExist zipfile - when alreadyExists $ removeFile zipfile - - -- We call zip with a different CWD, so have to make the path - -- absolute. Can't just use 'canonicalizePath zipfile' since this function - -- requires its argument to refer to an existing file. - zipfileAbs <- fmap ( dir <.> "zip") . canonicalizePath $ targetPref - - --TODO: use runProgramInvocation, but has to be able to set CWD - hnd <- runProcess (programPath zipProg) ["-q", "-r", zipfileAbs, dir] - (Just tmpDir) - Nothing Nothing Nothing Nothing - exitCode <- waitForProcess hnd - unless (exitCode == ExitSuccess) $ - die $ "Generating the zip file failed " - ++ "(zip returned exit code " ++ show exitCode ++ ")" - notice verbosity $ "Source zip archive created: " ++ zipfile - where - zipProgram = simpleProgram "zip" - --- | List all source files of a given add-source dependency. Exits with error if --- something is wrong (e.g. there is no .cabal file in the given directory). -allPackageSourceFiles :: Verbosity -> FilePath -> IO [FilePath] -allPackageSourceFiles verbosity packageDir = do - pkg <- do - let err = "Error reading source files of package." - desc <- tryFindAddSourcePackageDesc packageDir err - flattenPackageDescription `fmap` readPackageDescription verbosity desc - globalTmp <- getTemporaryDirectory - withTempDirectory verbosity globalTmp "cabal-list-sources." $ \tempDir -> do - let file = tempDir "cabal-sdist-list-sources" - flags = defaultSDistFlags { - sDistVerbosity = Flag $ if verbosity == normal - then lessVerbose verbosity else verbosity, - sDistListSources = Flag file - } - setupOpts = defaultSetupScriptOptions { - -- 'sdist --list-sources' was introduced in Cabal 1.18. - useCabalVersion = orLaterVersion $ Version [1,18,0] [], - useWorkingDir = Just packageDir - } - - doListSources :: IO [FilePath] - doListSources = do - setupWrapper verbosity setupOpts (Just pkg) sdistCommand (const flags) [] - fmap lines . readFile $ file - - onFailedListSources :: IOException -> IO () - onFailedListSources e = do - warn verbosity $ - "Could not list sources of the package '" - ++ display (packageName pkg) ++ "'." - warn verbosity $ - "Exception was: " ++ show e - - -- Run setup sdist --list-sources=TMPFILE - r <- doListSources `catchIO` (\e -> onFailedListSources e >> return []) - -- Ensure that we've closed the 'readFile' handle before we exit the - -- temporary directory. - _ <- evaluate (length r) - return r diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Targets.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Targets.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Targets.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Targets.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,818 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE BangPatterns #-} -{-# LANGUAGE DeriveGeneric #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Targets --- Copyright : (c) Duncan Coutts 2011 --- License : BSD-like --- --- Maintainer : duncan@community.haskell.org --- --- Handling for user-specified targets ------------------------------------------------------------------------------ -module Distribution.Client.Targets ( - -- * User targets - UserTarget(..), - readUserTargets, - - -- * Package specifiers - PackageSpecifier(..), - pkgSpecifierTarget, - pkgSpecifierConstraints, - - -- * Resolving user targets to package specifiers - resolveUserTargets, - - -- ** Detailed interface - UserTargetProblem(..), - readUserTarget, - reportUserTargetProblems, - expandUserTarget, - - PackageTarget(..), - fetchPackageTarget, - readPackageTarget, - - PackageTargetProblem(..), - reportPackageTargetProblems, - - disambiguatePackageTargets, - disambiguatePackageName, - - -- * User constraints - UserConstraint(..), - userConstraintPackageName, - readUserConstraint, - userToPackageConstraint, - dispFlagAssignment, - parseFlagAssignment, - - ) where - -import Distribution.Package - ( Package(..), PackageName(..) - , PackageIdentifier(..), packageName, packageVersion - , Dependency(Dependency) ) -import Distribution.Client.Types - ( SourcePackage(..), PackageLocation(..), OptionalStanza(..) ) -import Distribution.Client.Dependency.Types - ( PackageConstraint(..), ConstraintSource(..) - , LabeledPackageConstraint(..) ) - -import qualified Distribution.Client.World as World -import Distribution.Client.PackageIndex (PackageIndex) -import qualified Distribution.Client.PackageIndex as PackageIndex -import qualified Codec.Archive.Tar as Tar -import qualified Codec.Archive.Tar.Entry as Tar -import qualified Distribution.Client.Tar as Tar -import Distribution.Client.FetchUtils -import Distribution.Client.Utils ( tryFindPackageDesc ) -import Distribution.Client.GlobalFlags - ( RepoContext(..) ) - -import Distribution.PackageDescription - ( GenericPackageDescription, FlagName(..), FlagAssignment ) -import Distribution.PackageDescription.Parse - ( readPackageDescription, parsePackageDescription, ParseResult(..) ) -import Distribution.Version - ( Version(Version), thisVersion, anyVersion, isAnyVersion - , VersionRange ) -import Distribution.Text - ( Text(..), display ) -import Distribution.Verbosity (Verbosity) -import Distribution.Simple.Utils - ( die, warn, intercalate, fromUTF8, lowercase, ignoreBOM ) - -import Data.List - ( find, nub ) -import Data.Maybe - ( listToMaybe ) -import Data.Either - ( partitionEithers ) -#if !MIN_VERSION_base(4,8,0) -import Data.Monoid - ( Monoid(..) ) -#endif -import qualified Data.Map as Map -import qualified Data.ByteString.Lazy as BS -import qualified Data.ByteString.Lazy.Char8 as BS.Char8 -import qualified Distribution.Client.GZipUtils as GZipUtils -import Control.Monad (liftM) -import qualified Distribution.Compat.ReadP as Parse -import Distribution.Compat.ReadP - ( (+++), (<++) ) -import qualified Distribution.Compat.Semigroup as Semi - ( Semigroup((<>)) ) -import qualified Text.PrettyPrint as Disp -import Text.PrettyPrint - ( (<>), (<+>) ) -import Data.Char - ( isSpace, isAlphaNum ) -import System.FilePath - ( takeExtension, dropExtension, takeDirectory, splitPath ) -import System.Directory - ( doesFileExist, doesDirectoryExist ) -import Network.URI - ( URI(..), URIAuth(..), parseAbsoluteURI ) -import GHC.Generics (Generic) -import Distribution.Compat.Binary (Binary) - --- ------------------------------------------------------------ --- * User targets --- ------------------------------------------------------------ - --- | Various ways that a user may specify a package or package collection. --- -data UserTarget = - - -- | A partially specified package, identified by name and possibly with - -- an exact version or a version constraint. - -- - -- > cabal install foo - -- > cabal install foo-1.0 - -- > cabal install 'foo < 2' - -- - UserTargetNamed Dependency - - -- | A special virtual package that refers to the collection of packages - -- recorded in the world file that the user specifically installed. - -- - -- > cabal install world - -- - | UserTargetWorld - - -- | A specific package that is unpacked in a local directory, often the - -- current directory. - -- - -- > cabal install . - -- > cabal install ../lib/other - -- - -- * Note: in future, if multiple @.cabal@ files are allowed in a single - -- directory then this will refer to the collection of packages. - -- - | UserTargetLocalDir FilePath - - -- | A specific local unpacked package, identified by its @.cabal@ file. - -- - -- > cabal install foo.cabal - -- > cabal install ../lib/other/bar.cabal - -- - | UserTargetLocalCabalFile FilePath - - -- | A specific package that is available as a local tarball file - -- - -- > cabal install dist/foo-1.0.tar.gz - -- > cabal install ../build/baz-1.0.tar.gz - -- - | UserTargetLocalTarball FilePath - - -- | A specific package that is available as a remote tarball file - -- - -- > cabal install http://code.haskell.org/~user/foo/foo-0.9.tar.gz - -- - | UserTargetRemoteTarball URI - deriving (Show,Eq) - - --- ------------------------------------------------------------ --- * Package specifier --- ------------------------------------------------------------ - --- | A fully or partially resolved reference to a package. --- -data PackageSpecifier pkg = - - -- | A partially specified reference to a package (either source or - -- installed). It is specified by package name and optionally some - -- additional constraints. Use a dependency resolver to pick a specific - -- package satisfying these constraints. - -- - NamedPackage PackageName [PackageConstraint] - - -- | A fully specified source package. - -- - | SpecificSourcePackage pkg - deriving (Eq, Show, Generic) - -instance Binary pkg => Binary (PackageSpecifier pkg) - -pkgSpecifierTarget :: Package pkg => PackageSpecifier pkg -> PackageName -pkgSpecifierTarget (NamedPackage name _) = name -pkgSpecifierTarget (SpecificSourcePackage pkg) = packageName pkg - -pkgSpecifierConstraints :: Package pkg - => PackageSpecifier pkg -> [LabeledPackageConstraint] -pkgSpecifierConstraints (NamedPackage _ constraints) = map toLpc constraints - where - toLpc pc = LabeledPackageConstraint pc ConstraintSourceUserTarget -pkgSpecifierConstraints (SpecificSourcePackage pkg) = - [LabeledPackageConstraint pc ConstraintSourceUserTarget] - where - pc = PackageConstraintVersion (packageName pkg) - (thisVersion (packageVersion pkg)) - --- ------------------------------------------------------------ --- * Parsing and checking user targets --- ------------------------------------------------------------ - -readUserTargets :: Verbosity -> [String] -> IO [UserTarget] -readUserTargets _verbosity targetStrs = do - (problems, targets) <- liftM partitionEithers - (mapM readUserTarget targetStrs) - reportUserTargetProblems problems - return targets - - -data UserTargetProblem - = UserTargetUnexpectedFile String - | UserTargetNonexistantFile String - | UserTargetUnexpectedUriScheme String - | UserTargetUnrecognisedUri String - | UserTargetUnrecognised String - | UserTargetBadWorldPkg - deriving Show - -readUserTarget :: String -> IO (Either UserTargetProblem UserTarget) -readUserTarget targetstr = - case testNamedTargets targetstr of - Just (Dependency (PackageName "world") verrange) - | verrange == anyVersion -> return (Right UserTargetWorld) - | otherwise -> return (Left UserTargetBadWorldPkg) - Just dep -> return (Right (UserTargetNamed dep)) - Nothing -> do - fileTarget <- testFileTargets targetstr - case fileTarget of - Just target -> return target - Nothing -> - case testUriTargets targetstr of - Just target -> return target - Nothing -> return (Left (UserTargetUnrecognised targetstr)) - where - testNamedTargets = readPToMaybe parseDependencyOrPackageId - - testFileTargets filename = do - isDir <- doesDirectoryExist filename - isFile <- doesFileExist filename - parentDirExists <- case takeDirectory filename of - [] -> return False - dir -> doesDirectoryExist dir - let result - | isDir - = Just (Right (UserTargetLocalDir filename)) - - | isFile && extensionIsTarGz filename - = Just (Right (UserTargetLocalTarball filename)) - - | isFile && takeExtension filename == ".cabal" - = Just (Right (UserTargetLocalCabalFile filename)) - - | isFile - = Just (Left (UserTargetUnexpectedFile filename)) - - | parentDirExists - = Just (Left (UserTargetNonexistantFile filename)) - - | otherwise - = Nothing - return result - - testUriTargets str = - case parseAbsoluteURI str of - Just uri@URI { - uriScheme = scheme, - uriAuthority = Just URIAuth { uriRegName = host } - } - | scheme /= "http:" && scheme /= "https:" -> - Just (Left (UserTargetUnexpectedUriScheme targetstr)) - - | null host -> - Just (Left (UserTargetUnrecognisedUri targetstr)) - - | otherwise -> - Just (Right (UserTargetRemoteTarball uri)) - _ -> Nothing - - extensionIsTarGz f = takeExtension f == ".gz" - && takeExtension (dropExtension f) == ".tar" - - parseDependencyOrPackageId :: Parse.ReadP r Dependency - parseDependencyOrPackageId = parse - +++ liftM pkgidToDependency parse - where - pkgidToDependency :: PackageIdentifier -> Dependency - pkgidToDependency p = case packageVersion p of - Version [] _ -> Dependency (packageName p) anyVersion - version -> Dependency (packageName p) (thisVersion version) - -readPToMaybe :: Parse.ReadP a a -> String -> Maybe a -readPToMaybe p str = listToMaybe [ r | (r,s) <- Parse.readP_to_S p str - , all isSpace s ] - - -reportUserTargetProblems :: [UserTargetProblem] -> IO () -reportUserTargetProblems problems = do - case [ target | UserTargetUnrecognised target <- problems ] of - [] -> return () - target -> die - $ unlines - [ "Unrecognised target '" ++ name ++ "'." - | name <- target ] - ++ "Targets can be:\n" - ++ " - package names, e.g. 'pkgname', 'pkgname-1.0.1', 'pkgname < 2.0'\n" - ++ " - the special 'world' target\n" - ++ " - cabal files 'pkgname.cabal' or package directories 'pkgname/'\n" - ++ " - package tarballs 'pkgname.tar.gz' or 'http://example.com/pkgname.tar.gz'" - - case [ () | UserTargetBadWorldPkg <- problems ] of - [] -> return () - _ -> die "The special 'world' target does not take any version." - - case [ target | UserTargetNonexistantFile target <- problems ] of - [] -> return () - target -> die - $ unlines - [ "The file does not exist '" ++ name ++ "'." - | name <- target ] - - case [ target | UserTargetUnexpectedFile target <- problems ] of - [] -> return () - target -> die - $ unlines - [ "Unrecognised file target '" ++ name ++ "'." - | name <- target ] - ++ "File targets can be either package tarballs 'pkgname.tar.gz' " - ++ "or cabal files 'pkgname.cabal'." - - case [ target | UserTargetUnexpectedUriScheme target <- problems ] of - [] -> return () - target -> die - $ unlines - [ "URL target not supported '" ++ name ++ "'." - | name <- target ] - ++ "Only 'http://' and 'https://' URLs are supported." - - case [ target | UserTargetUnrecognisedUri target <- problems ] of - [] -> return () - target -> die - $ unlines - [ "Unrecognise URL target '" ++ name ++ "'." - | name <- target ] - - --- ------------------------------------------------------------ --- * Resolving user targets to package specifiers --- ------------------------------------------------------------ - --- | Given a bunch of user-specified targets, try to resolve what it is they --- refer to. They can either be specific packages (local dirs, tarballs etc) --- or they can be named packages (with or without version info). --- -resolveUserTargets :: Package pkg - => Verbosity - -> RepoContext - -> FilePath - -> PackageIndex pkg - -> [UserTarget] - -> IO [PackageSpecifier SourcePackage] -resolveUserTargets verbosity repoCtxt worldFile available userTargets = do - - -- given the user targets, get a list of fully or partially resolved - -- package references - packageTargets <- mapM (readPackageTarget verbosity) - =<< mapM (fetchPackageTarget verbosity repoCtxt) . concat - =<< mapM (expandUserTarget worldFile) userTargets - - -- users are allowed to give package names case-insensitively, so we must - -- disambiguate named package references - let (problems, packageSpecifiers) = - disambiguatePackageTargets available availableExtra packageTargets - - -- use any extra specific available packages to help us disambiguate - availableExtra = [ packageName pkg - | PackageTargetLocation pkg <- packageTargets ] - - reportPackageTargetProblems verbosity problems - - return packageSpecifiers - - --- ------------------------------------------------------------ --- * Package targets --- ------------------------------------------------------------ - --- | An intermediate between a 'UserTarget' and a resolved 'PackageSpecifier'. --- Unlike a 'UserTarget', a 'PackageTarget' refers only to a single package. --- -data PackageTarget pkg = - PackageTargetNamed PackageName [PackageConstraint] UserTarget - - -- | A package identified by name, but case insensitively, so it needs - -- to be resolved to the right case-sensitive name. - | PackageTargetNamedFuzzy PackageName [PackageConstraint] UserTarget - | PackageTargetLocation pkg - deriving Show - - --- ------------------------------------------------------------ --- * Converting user targets to package targets --- ------------------------------------------------------------ - --- | Given a user-specified target, expand it to a bunch of package targets --- (each of which refers to only one package). --- -expandUserTarget :: FilePath - -> UserTarget - -> IO [PackageTarget (PackageLocation ())] -expandUserTarget worldFile userTarget = case userTarget of - - UserTargetNamed (Dependency name vrange) -> - let constraints = [ PackageConstraintVersion name vrange - | not (isAnyVersion vrange) ] - in return [PackageTargetNamedFuzzy name constraints userTarget] - - UserTargetWorld -> do - worldPkgs <- World.getContents worldFile - --TODO: should we warn if there are no world targets? - return [ PackageTargetNamed name constraints userTarget - | World.WorldPkgInfo (Dependency name vrange) flags <- worldPkgs - , let constraints = [ PackageConstraintVersion name vrange - | not (isAnyVersion vrange) ] - ++ [ PackageConstraintFlags name flags - | not (null flags) ] ] - - UserTargetLocalDir dir -> - return [ PackageTargetLocation (LocalUnpackedPackage dir) ] - - UserTargetLocalCabalFile file -> do - let dir = takeDirectory file - _ <- tryFindPackageDesc dir (localPackageError dir) -- just as a check - return [ PackageTargetLocation (LocalUnpackedPackage dir) ] - - UserTargetLocalTarball tarballFile -> - return [ PackageTargetLocation (LocalTarballPackage tarballFile) ] - - UserTargetRemoteTarball tarballURL -> - return [ PackageTargetLocation (RemoteTarballPackage tarballURL ()) ] - -localPackageError :: FilePath -> String -localPackageError dir = - "Error reading local package.\nCouldn't find .cabal file in: " ++ dir - --- ------------------------------------------------------------ --- * Fetching and reading package targets --- ------------------------------------------------------------ - - --- | Fetch any remote targets so that they can be read. --- -fetchPackageTarget :: Verbosity - -> RepoContext - -> PackageTarget (PackageLocation ()) - -> IO (PackageTarget (PackageLocation FilePath)) -fetchPackageTarget verbosity repoCtxt target = case target of - PackageTargetNamed n cs ut -> return (PackageTargetNamed n cs ut) - PackageTargetNamedFuzzy n cs ut -> return (PackageTargetNamedFuzzy n cs ut) - PackageTargetLocation location -> do - location' <- fetchPackage verbosity repoCtxt (fmap (const Nothing) location) - return (PackageTargetLocation location') - - --- | Given a package target that has been fetched, read the .cabal file. --- --- This only affects targets given by location, named targets are unaffected. --- -readPackageTarget :: Verbosity - -> PackageTarget (PackageLocation FilePath) - -> IO (PackageTarget SourcePackage) -readPackageTarget verbosity target = case target of - - PackageTargetNamed pkgname constraints userTarget -> - return (PackageTargetNamed pkgname constraints userTarget) - - PackageTargetNamedFuzzy pkgname constraints userTarget -> - return (PackageTargetNamedFuzzy pkgname constraints userTarget) - - PackageTargetLocation location -> case location of - - LocalUnpackedPackage dir -> do - pkg <- tryFindPackageDesc dir (localPackageError dir) >>= - readPackageDescription verbosity - return $ PackageTargetLocation $ - SourcePackage { - packageInfoId = packageId pkg, - packageDescription = pkg, - packageSource = fmap Just location, - packageDescrOverride = Nothing - } - - LocalTarballPackage tarballFile -> - readTarballPackageTarget location tarballFile tarballFile - - RemoteTarballPackage tarballURL tarballFile -> - readTarballPackageTarget location tarballFile (show tarballURL) - - RepoTarballPackage _repo _pkgid _ -> - error "TODO: readPackageTarget RepoTarballPackage" - -- For repo tarballs this info should be obtained from the index. - - where - readTarballPackageTarget location tarballFile tarballOriginalLoc = do - (filename, content) <- extractTarballPackageCabalFile - tarballFile tarballOriginalLoc - case parsePackageDescription' content of - Nothing -> die $ "Could not parse the cabal file " - ++ filename ++ " in " ++ tarballFile - Just pkg -> - return $ PackageTargetLocation $ - SourcePackage { - packageInfoId = packageId pkg, - packageDescription = pkg, - packageSource = fmap Just location, - packageDescrOverride = Nothing - } - - extractTarballPackageCabalFile :: FilePath -> String - -> IO (FilePath, BS.ByteString) - extractTarballPackageCabalFile tarballFile tarballOriginalLoc = - either (die . formatErr) return - . check - . accumEntryMap - . Tar.filterEntries isCabalFile - . Tar.read - . GZipUtils.maybeDecompress - =<< BS.readFile tarballFile - where - formatErr msg = "Error reading " ++ tarballOriginalLoc ++ ": " ++ msg - - accumEntryMap = Tar.foldlEntries - (\m e -> Map.insert (Tar.entryTarPath e) e m) - Map.empty - - check (Left e) = Left (show e) - check (Right m) = case Map.elems m of - [] -> Left noCabalFile - [file] -> case Tar.entryContent file of - Tar.NormalFile content _ -> Right (Tar.entryPath file, content) - _ -> Left noCabalFile - _files -> Left multipleCabalFiles - where - noCabalFile = "No cabal file found" - multipleCabalFiles = "Multiple cabal files found" - - isCabalFile e = case splitPath (Tar.entryPath e) of - [ _dir, file] -> takeExtension file == ".cabal" - [".", _dir, file] -> takeExtension file == ".cabal" - _ -> False - - parsePackageDescription' :: BS.ByteString -> Maybe GenericPackageDescription - parsePackageDescription' content = - case parsePackageDescription . ignoreBOM . fromUTF8 . BS.Char8.unpack $ content of - ParseOk _ pkg -> Just pkg - _ -> Nothing - - --- ------------------------------------------------------------ --- * Checking package targets --- ------------------------------------------------------------ - -data PackageTargetProblem - = PackageNameUnknown PackageName UserTarget - | PackageNameAmbiguous PackageName [PackageName] UserTarget - deriving Show - - --- | Users are allowed to give package names case-insensitively, so we must --- disambiguate named package references. --- -disambiguatePackageTargets :: Package pkg' - => PackageIndex pkg' - -> [PackageName] - -> [PackageTarget pkg] - -> ( [PackageTargetProblem] - , [PackageSpecifier pkg] ) -disambiguatePackageTargets availablePkgIndex availableExtra targets = - partitionEithers (map disambiguatePackageTarget targets) - where - disambiguatePackageTarget packageTarget = case packageTarget of - PackageTargetLocation pkg -> Right (SpecificSourcePackage pkg) - - PackageTargetNamed pkgname constraints userTarget - | null (PackageIndex.lookupPackageName availablePkgIndex pkgname) - -> Left (PackageNameUnknown pkgname userTarget) - | otherwise -> Right (NamedPackage pkgname constraints) - - PackageTargetNamedFuzzy pkgname constraints userTarget -> - case disambiguatePackageName packageNameEnv pkgname of - None -> Left (PackageNameUnknown - pkgname userTarget) - Ambiguous pkgnames -> Left (PackageNameAmbiguous - pkgname pkgnames userTarget) - Unambiguous pkgname' -> Right (NamedPackage pkgname' constraints') - where - constraints' = map (renamePackageConstraint pkgname') constraints - - -- use any extra specific available packages to help us disambiguate - packageNameEnv :: PackageNameEnv - packageNameEnv = mappend (indexPackageNameEnv availablePkgIndex) - (extraPackageNameEnv availableExtra) - - --- | Report problems to the user. That is, if there are any problems --- then raise an exception. -reportPackageTargetProblems :: Verbosity - -> [PackageTargetProblem] -> IO () -reportPackageTargetProblems verbosity problems = do - case [ pkg | PackageNameUnknown pkg originalTarget <- problems - , not (isUserTagetWorld originalTarget) ] of - [] -> return () - pkgs -> die $ unlines - [ "There is no package named '" ++ display name ++ "'. " - | name <- pkgs ] - ++ "You may need to run 'cabal update' to get the latest " - ++ "list of available packages." - - case [ (pkg, matches) | PackageNameAmbiguous pkg matches _ <- problems ] of - [] -> return () - ambiguities -> die $ unlines - [ "The package name '" ++ display name - ++ "' is ambiguous. It could be: " - ++ intercalate ", " (map display matches) - | (name, matches) <- ambiguities ] - - case [ pkg | PackageNameUnknown pkg UserTargetWorld <- problems ] of - [] -> return () - pkgs -> warn verbosity $ - "The following 'world' packages will be ignored because " - ++ "they refer to packages that cannot be found: " - ++ intercalate ", " (map display pkgs) ++ "\n" - ++ "You can suppress this warning by correcting the world file." - where - isUserTagetWorld UserTargetWorld = True; isUserTagetWorld _ = False - - --- ------------------------------------------------------------ --- * Disambiguating package names --- ------------------------------------------------------------ - -data MaybeAmbiguous a = None | Unambiguous a | Ambiguous [a] - --- | Given a package name and a list of matching names, figure out which one it --- might be referring to. If there is an exact case-sensitive match then that's --- ok. If it matches just one package case-insensitively then that's also ok. --- The only problem is if it matches multiple packages case-insensitively, in --- that case it is ambiguous. --- -disambiguatePackageName :: PackageNameEnv - -> PackageName - -> MaybeAmbiguous PackageName -disambiguatePackageName (PackageNameEnv pkgNameLookup) name = - case nub (pkgNameLookup name) of - [] -> None - [name'] -> Unambiguous name' - names -> case find (name==) names of - Just name' -> Unambiguous name' - Nothing -> Ambiguous names - - -newtype PackageNameEnv = PackageNameEnv (PackageName -> [PackageName]) - -instance Monoid PackageNameEnv where - mempty = PackageNameEnv (const []) - mappend = (Semi.<>) - -instance Semi.Semigroup PackageNameEnv where - PackageNameEnv lookupA <> PackageNameEnv lookupB = - PackageNameEnv (\name -> lookupA name ++ lookupB name) - -indexPackageNameEnv :: PackageIndex pkg -> PackageNameEnv -indexPackageNameEnv pkgIndex = PackageNameEnv pkgNameLookup - where - pkgNameLookup (PackageName name) = - map fst (PackageIndex.searchByName pkgIndex name) - -extraPackageNameEnv :: [PackageName] -> PackageNameEnv -extraPackageNameEnv names = PackageNameEnv pkgNameLookup - where - pkgNameLookup (PackageName name) = - [ PackageName name' - | let lname = lowercase name - , PackageName name' <- names - , lowercase name' == lname ] - - --- ------------------------------------------------------------ --- * Package constraints --- ------------------------------------------------------------ - -data UserConstraint = - UserConstraintVersion PackageName VersionRange - | UserConstraintInstalled PackageName - | UserConstraintSource PackageName - | UserConstraintFlags PackageName FlagAssignment - | UserConstraintStanzas PackageName [OptionalStanza] - deriving (Eq, Show, Generic) - -instance Binary UserConstraint - -userConstraintPackageName :: UserConstraint -> PackageName -userConstraintPackageName uc = case uc of - UserConstraintVersion name _ -> name - UserConstraintInstalled name -> name - UserConstraintSource name -> name - UserConstraintFlags name _ -> name - UserConstraintStanzas name _ -> name - -userToPackageConstraint :: UserConstraint -> PackageConstraint --- At the moment, the types happen to be directly equivalent -userToPackageConstraint uc = case uc of - UserConstraintVersion name ver -> PackageConstraintVersion name ver - UserConstraintInstalled name -> PackageConstraintInstalled name - UserConstraintSource name -> PackageConstraintSource name - UserConstraintFlags name flags -> PackageConstraintFlags name flags - UserConstraintStanzas name stanzas -> PackageConstraintStanzas name stanzas - -renamePackageConstraint :: PackageName -> PackageConstraint -> PackageConstraint -renamePackageConstraint name pc = case pc of - PackageConstraintVersion _ ver -> PackageConstraintVersion name ver - PackageConstraintInstalled _ -> PackageConstraintInstalled name - PackageConstraintSource _ -> PackageConstraintSource name - PackageConstraintFlags _ flags -> PackageConstraintFlags name flags - PackageConstraintStanzas _ stanzas -> PackageConstraintStanzas name stanzas - -readUserConstraint :: String -> Either String UserConstraint -readUserConstraint str = - case readPToMaybe parse str of - Nothing -> Left msgCannotParse - Just c -> Right c - where - msgCannotParse = - "expected a package name followed by a constraint, which is " - ++ "either a version range, 'installed', 'source' or flags" - -instance Text UserConstraint where - disp (UserConstraintVersion pkgname verrange) = disp pkgname - <+> disp verrange - disp (UserConstraintInstalled pkgname) = disp pkgname - <+> Disp.text "installed" - disp (UserConstraintSource pkgname) = disp pkgname - <+> Disp.text "source" - disp (UserConstraintFlags pkgname flags) = disp pkgname - <+> dispFlagAssignment flags - disp (UserConstraintStanzas pkgname stanzas) = disp pkgname - <+> dispStanzas stanzas - where - dispStanzas = Disp.hsep . map dispStanza - dispStanza TestStanzas = Disp.text "test" - dispStanza BenchStanzas = Disp.text "bench" - - parse = parse >>= parseConstraint - where - parseConstraint pkgname = - ((parse >>= return . UserConstraintVersion pkgname) - +++ (do skipSpaces1 - _ <- Parse.string "installed" - return (UserConstraintInstalled pkgname)) - +++ (do skipSpaces1 - _ <- Parse.string "source" - return (UserConstraintSource pkgname)) - +++ (do skipSpaces1 - _ <- Parse.string "test" - return (UserConstraintStanzas pkgname [TestStanzas])) - +++ (do skipSpaces1 - _ <- Parse.string "bench" - return (UserConstraintStanzas pkgname [BenchStanzas]))) - <++ (do skipSpaces1 - flags <- parseFlagAssignment - return (UserConstraintFlags pkgname flags)) - ---TODO: [code cleanup] move these somewhere else -dispFlagAssignment :: FlagAssignment -> Disp.Doc -dispFlagAssignment = Disp.hsep . map dispFlagValue - where - dispFlagValue (f, True) = Disp.char '+' <> dispFlagName f - dispFlagValue (f, False) = Disp.char '-' <> dispFlagName f - dispFlagName (FlagName f) = Disp.text f - -parseFlagAssignment :: Parse.ReadP r FlagAssignment -parseFlagAssignment = Parse.sepBy1 parseFlagValue skipSpaces1 - where - parseFlagValue = - (do Parse.optional (Parse.char '+') - f <- parseFlagName - return (f, True)) - +++ (do _ <- Parse.char '-' - f <- parseFlagName - return (f, False)) - parseFlagName = liftM (FlagName . lowercase) ident - - ident :: Parse.ReadP r String - ident = Parse.munch1 identChar >>= \s -> check s >> return s - where - identChar c = isAlphaNum c || c == '_' || c == '-' - check ('-':_) = Parse.pfail - check _ = return () - -skipSpaces1 :: Parse.ReadP r () -skipSpaces1 = Parse.satisfy isSpace >> Parse.skipSpaces - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Tar.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Tar.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Tar.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Tar.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,110 +0,0 @@ -{-# LANGUAGE DeriveFunctor #-} -{-# OPTIONS_GHC -fno-warn-orphans #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Tar --- Copyright : (c) 2007 Bjorn Bringert, --- 2008 Andrea Vezzosi, --- 2008-2009 Duncan Coutts --- License : BSD3 --- --- Maintainer : duncan@community.haskell.org --- Portability : portable --- --- Reading, writing and manipulating \"@.tar@\" archive files. --- ------------------------------------------------------------------------------ -module Distribution.Client.Tar ( - -- * @tar.gz@ operations - createTarGzFile, - extractTarGzFile, - - -- * Other local utils - buildTreeRefTypeCode, - buildTreeSnapshotTypeCode, - isBuildTreeRefTypeCode, - filterEntries, - filterEntriesM, - entriesToList, - ) where - -import qualified Data.ByteString.Lazy as BS -import qualified Codec.Archive.Tar as Tar -import qualified Codec.Archive.Tar.Entry as Tar -import qualified Codec.Archive.Tar.Check as Tar -import qualified Codec.Compression.GZip as GZip -import qualified Distribution.Client.GZipUtils as GZipUtils - -import Control.Exception (Exception(..), throw) - --- --- * High level operations --- - -createTarGzFile :: FilePath -- ^ Full Tarball path - -> FilePath -- ^ Base directory - -> FilePath -- ^ Directory to archive, relative to base dir - -> IO () -createTarGzFile tar base dir = - BS.writeFile tar . GZip.compress . Tar.write =<< Tar.pack base [dir] - -extractTarGzFile :: FilePath -- ^ Destination directory - -> FilePath -- ^ Expected subdir (to check for tarbombs) - -> FilePath -- ^ Tarball - -> IO () -extractTarGzFile dir expected tar = - Tar.unpack dir . Tar.checkTarbomb expected . Tar.read - . GZipUtils.maybeDecompress =<< BS.readFile tar - -instance (Exception a, Exception b) => Exception (Either a b) where - toException (Left e) = toException e - toException (Right e) = toException e - - fromException e = - case fromException e of - Just e' -> Just (Left e') - Nothing -> case fromException e of - Just e' -> Just (Right e') - Nothing -> Nothing - - --- | Type code for the local build tree reference entry type. We don't use the --- symbolic link entry type because it allows only 100 ASCII characters for the --- path. -buildTreeRefTypeCode :: Tar.TypeCode -buildTreeRefTypeCode = 'C' - --- | Type code for the local build tree snapshot entry type. -buildTreeSnapshotTypeCode :: Tar.TypeCode -buildTreeSnapshotTypeCode = 'S' - --- | Is this a type code for a build tree reference? -isBuildTreeRefTypeCode :: Tar.TypeCode -> Bool -isBuildTreeRefTypeCode typeCode - | (typeCode == buildTreeRefTypeCode - || typeCode == buildTreeSnapshotTypeCode) = True - | otherwise = False - -filterEntries :: (Tar.Entry -> Bool) -> Tar.Entries e -> Tar.Entries e -filterEntries p = - Tar.foldEntries - (\e es -> if p e then Tar.Next e es else es) - Tar.Done - Tar.Fail - -filterEntriesM :: Monad m => (Tar.Entry -> m Bool) - -> Tar.Entries e -> m (Tar.Entries e) -filterEntriesM p = - Tar.foldEntries - (\entry rest -> do - keep <- p entry - xs <- rest - if keep - then return (Tar.Next entry xs) - else return xs) - (return Tar.Done) - (return . Tar.Fail) - -entriesToList :: Exception e => Tar.Entries e -> [Tar.Entry] -entriesToList = Tar.foldEntries (:) [] throw - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Types.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Types.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Types.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Types.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,371 +0,0 @@ -{-# LANGUAGE DeriveFunctor #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE StandaloneDeriving #-} -{-# LANGUAGE BangPatterns #-} -{-# OPTIONS_GHC -fno-warn-orphans #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Types --- Copyright : (c) David Himmelstrup 2005 --- Duncan Coutts 2011 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- Various common data types for the entire cabal-install system ------------------------------------------------------------------------------ -module Distribution.Client.Types where - -import Distribution.Package - ( PackageName, PackageId, Package(..) - , UnitId(..), mkUnitId - , HasUnitId(..), PackageInstalled(..) ) -import Distribution.InstalledPackageInfo - ( InstalledPackageInfo ) -import Distribution.PackageDescription - ( Benchmark(..), GenericPackageDescription(..), FlagAssignment - , TestSuite(..) ) -import Distribution.PackageDescription.Configuration - ( mapTreeData ) -import Distribution.Client.PackageIndex - ( PackageIndex ) -import Distribution.Client.ComponentDeps - ( ComponentDeps ) -import qualified Distribution.Client.ComponentDeps as CD -import Distribution.Version - ( VersionRange ) -import Distribution.Text (display) - -import Data.Map (Map) -import Network.URI (URI(..), URIAuth(..), nullURI) -import Data.ByteString.Lazy (ByteString) -import Control.Exception - ( SomeException ) -import GHC.Generics (Generic) -import Distribution.Compat.Binary (Binary(..)) - - -newtype Username = Username { unUsername :: String } -newtype Password = Password { unPassword :: String } - --- | This is the information we get from a @00-index.tar.gz@ hackage index. --- -data SourcePackageDb = SourcePackageDb { - packageIndex :: PackageIndex SourcePackage, - packagePreferences :: Map PackageName VersionRange -} - deriving (Eq, Generic) - -instance Binary SourcePackageDb - --- ------------------------------------------------------------ --- * Various kinds of information about packages --- ------------------------------------------------------------ - --- | Within Cabal the library we no longer have a @InstalledPackageId@ type. --- That's because it deals with the compilers' notion of a registered library, --- and those really are libraries not packages. Those are now named units. --- --- The package management layer does however deal with installed packages, as --- whole packages not just as libraries. So we do still need a type for --- installed package ids. At the moment however we track instaled packages via --- their primary library, which is a unit id. In future this may change --- slightly and we may distinguish these two types and have an explicit --- conversion when we register units with the compiler. --- -type InstalledPackageId = UnitId - -installedPackageId :: HasUnitId pkg => pkg -> InstalledPackageId -installedPackageId = installedUnitId - --- | Subclass of packages that have specific versioned dependencies. --- --- So for example a not-yet-configured package has dependencies on version --- ranges, not specific versions. A configured or an already installed package --- depends on exact versions. Some operations or data structures (like --- dependency graphs) only make sense on this subclass of package types. --- -class Package pkg => PackageFixedDeps pkg where - depends :: pkg -> ComponentDeps [UnitId] - -instance PackageFixedDeps InstalledPackageInfo where - depends = CD.fromInstalled . installedDepends - - --- | In order to reuse the implementation of PackageIndex which relies on --- 'UnitId', we need to be able to synthesize these IDs prior --- to installation. Eventually, we'll move to a representation of --- 'UnitId' which can be properly computed before compilation --- (of course, it's a bit of a misnomer since the packages are not actually --- installed yet.) In any case, we'll synthesize temporary installed package --- IDs to use as keys during install planning. These should never be written --- out! Additionally, they need to be guaranteed unique within the install --- plan. -fakeUnitId :: PackageId -> UnitId -fakeUnitId = mkUnitId . (".fake."++) . display - --- | A 'ConfiguredPackage' is a not-yet-installed package along with the --- total configuration information. The configuration information is total in --- the sense that it provides all the configuration information and so the --- final configure process will be independent of the environment. --- -data ConfiguredPackage = ConfiguredPackage - SourcePackage -- package info, including repo - FlagAssignment -- complete flag assignment for the package - [OptionalStanza] -- list of enabled optional stanzas for the package - (ComponentDeps [ConfiguredId]) - -- set of exact dependencies (installed or source). - -- These must be consistent with the 'buildDepends' - -- in the 'PackageDescription' that you'd get by - -- applying the flag assignment and optional stanzas. - deriving (Eq, Show, Generic) - -instance Binary ConfiguredPackage - --- | A ConfiguredId is a package ID for a configured package. --- --- Once we configure a source package we know it's UnitId --- (at least, in principle, even if we have to fake it currently). It is still --- however useful in lots of places to also know the source ID for the package. --- We therefore bundle the two. --- --- An already installed package of course is also "configured" (all it's --- configuration parameters and dependencies have been specified). --- --- TODO: I wonder if it would make sense to promote this datatype to Cabal --- and use it consistently instead of UnitIds? -data ConfiguredId = ConfiguredId { - confSrcId :: PackageId - , confInstId :: UnitId - } - deriving (Eq, Generic) - -instance Binary ConfiguredId - -instance Show ConfiguredId where - show = show . confSrcId - -instance Package ConfiguredId where - packageId = confSrcId - -instance HasUnitId ConfiguredId where - installedUnitId = confInstId - -instance Package ConfiguredPackage where - packageId (ConfiguredPackage pkg _ _ _) = packageId pkg - -instance PackageFixedDeps ConfiguredPackage where - depends (ConfiguredPackage _ _ _ deps) = fmap (map confInstId) deps - -instance HasUnitId ConfiguredPackage where - installedUnitId = fakeUnitId . packageId - --- | Like 'ConfiguredPackage', but with all dependencies guaranteed to be --- installed already, hence itself ready to be installed. -data GenericReadyPackage srcpkg ipkg - = ReadyPackage - srcpkg -- see 'ConfiguredPackage'. - (ComponentDeps [ipkg]) -- Installed dependencies. - deriving (Eq, Show, Generic) - -type ReadyPackage = GenericReadyPackage ConfiguredPackage InstalledPackageInfo - -instance Package srcpkg => Package (GenericReadyPackage srcpkg ipkg) where - packageId (ReadyPackage srcpkg _deps) = packageId srcpkg - -instance (Package srcpkg, HasUnitId ipkg) => - PackageFixedDeps (GenericReadyPackage srcpkg ipkg) where - depends (ReadyPackage _ deps) = fmap (map installedUnitId) deps - -instance HasUnitId srcpkg => - HasUnitId (GenericReadyPackage srcpkg ipkg) where - installedUnitId (ReadyPackage pkg _) = installedUnitId pkg - -instance (Binary srcpkg, Binary ipkg) => Binary (GenericReadyPackage srcpkg ipkg) - - --- | A package description along with the location of the package sources. --- -data SourcePackage = SourcePackage { - packageInfoId :: PackageId, - packageDescription :: GenericPackageDescription, - packageSource :: PackageLocation (Maybe FilePath), - packageDescrOverride :: PackageDescriptionOverride - } - deriving (Eq, Show, Generic) - -instance Binary SourcePackage - --- | We sometimes need to override the .cabal file in the tarball with --- the newer one from the package index. -type PackageDescriptionOverride = Maybe ByteString - -instance Package SourcePackage where packageId = packageInfoId - -data OptionalStanza - = TestStanzas - | BenchStanzas - deriving (Eq, Ord, Enum, Bounded, Show, Generic) - -instance Binary OptionalStanza - -enableStanzas - :: [OptionalStanza] - -> GenericPackageDescription - -> GenericPackageDescription -enableStanzas stanzas gpkg = gpkg - { condBenchmarks = flagBenchmarks $ condBenchmarks gpkg - , condTestSuites = flagTests $ condTestSuites gpkg - } - where - enableTest t = t { testEnabled = TestStanzas `elem` stanzas } - enableBenchmark bm = bm { benchmarkEnabled = BenchStanzas `elem` stanzas } - flagBenchmarks = map (\(n, bm) -> (n, mapTreeData enableBenchmark bm)) - flagTests = map (\(n, t) -> (n, mapTreeData enableTest t)) - --- ------------------------------------------------------------ --- * Package locations and repositories --- ------------------------------------------------------------ - -data PackageLocation local = - - -- | An unpacked package in the given dir, or current dir - LocalUnpackedPackage FilePath - - -- | A package as a tarball that's available as a local tarball - | LocalTarballPackage FilePath - - -- | A package as a tarball from a remote URI - | RemoteTarballPackage URI local - - -- | A package available as a tarball from a repository. - -- - -- It may be from a local repository or from a remote repository, with a - -- locally cached copy. ie a package available from hackage - | RepoTarballPackage Repo PackageId local - ---TODO: --- * add support for darcs and other SCM style remote repos with a local cache --- | ScmPackage - deriving (Show, Functor, Eq, Ord, Generic) - -instance Binary local => Binary (PackageLocation local) - --- note, network-uri-2.6.0.3+ provide a Generic instance but earlier --- versions do not, so we use manual Binary instances here -instance Binary URI where - put (URI a b c d e) = do put a; put b; put c; put d; put e - get = do !a <- get; !b <- get; !c <- get; !d <- get; !e <- get - return (URI a b c d e) - -instance Binary URIAuth where - put (URIAuth a b c) = do put a; put b; put c - get = do !a <- get; !b <- get; !c <- get; return (URIAuth a b c) - -data RemoteRepo = - RemoteRepo { - remoteRepoName :: String, - remoteRepoURI :: URI, - - -- | Enable secure access? - -- - -- 'Nothing' here represents "whatever the default is"; this is important - -- to allow for a smooth transition from opt-in to opt-out security - -- (once we switch to opt-out, all access to the central Hackage - -- repository should be secure by default) - remoteRepoSecure :: Maybe Bool, - - -- | Root key IDs (for bootstrapping) - remoteRepoRootKeys :: [String], - - -- | Threshold for verification during bootstrapping - remoteRepoKeyThreshold :: Int, - - -- | Normally a repo just specifies an HTTP or HTTPS URI, but as a - -- special case we may know a repo supports both and want to try HTTPS - -- if we can, but still allow falling back to HTTP. - -- - -- This field is not currently stored in the config file, but is filled - -- in automagically for known repos. - remoteRepoShouldTryHttps :: Bool - } - - deriving (Show, Eq, Ord, Generic) - -instance Binary RemoteRepo - --- | Construct a partial 'RemoteRepo' value to fold the field parser list over. -emptyRemoteRepo :: String -> RemoteRepo -emptyRemoteRepo name = RemoteRepo name nullURI Nothing [] 0 False - --- | Different kinds of repositories --- --- NOTE: It is important that this type remains serializable. -data Repo = - -- | Local repositories - RepoLocal { - repoLocalDir :: FilePath - } - - -- | Standard (unsecured) remote repositores - | RepoRemote { - repoRemote :: RemoteRepo - , repoLocalDir :: FilePath - } - - -- | Secure repositories - -- - -- Although this contains the same fields as 'RepoRemote', we use a separate - -- constructor to avoid confusing the two. - -- - -- Not all access to a secure repo goes through the hackage-security - -- library currently; code paths that do not still make use of the - -- 'repoRemote' and 'repoLocalDir' fields directly. - | RepoSecure { - repoRemote :: RemoteRepo - , repoLocalDir :: FilePath - } - deriving (Show, Eq, Ord, Generic) - -instance Binary Repo - --- | Check if this is a remote repo -maybeRepoRemote :: Repo -> Maybe RemoteRepo -maybeRepoRemote (RepoLocal _localDir) = Nothing -maybeRepoRemote (RepoRemote r _localDir) = Just r -maybeRepoRemote (RepoSecure r _localDir) = Just r - --- ------------------------------------------------------------ --- * Build results --- ------------------------------------------------------------ - -type BuildResult = Either BuildFailure BuildSuccess -data BuildFailure = PlanningFailed - | DependentFailed PackageId - | DownloadFailed SomeException - | UnpackFailed SomeException - | ConfigureFailed SomeException - | BuildFailed SomeException - | TestsFailed SomeException - | InstallFailed SomeException - deriving (Show, Generic) -data BuildSuccess = BuildOk DocsResult TestsResult - (Maybe InstalledPackageInfo) - deriving (Show, Generic) - -data DocsResult = DocsNotTried | DocsFailed | DocsOk - deriving (Show, Generic) -data TestsResult = TestsNotTried | TestsOk - deriving (Show, Generic) - -instance Binary BuildFailure -instance Binary BuildSuccess -instance Binary DocsResult -instance Binary TestsResult - ---FIXME: this is a total cheat -instance Binary SomeException where - put _ = return () - get = fail "cannot serialise exceptions" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Update.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Update.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Update.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Update.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,88 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Update --- Copyright : (c) David Himmelstrup 2005 --- License : BSD-like --- --- Maintainer : lemmih@gmail.com --- Stability : provisional --- Portability : portable --- --- ------------------------------------------------------------------------------ -{-# LANGUAGE RecordWildCards #-} -module Distribution.Client.Update - ( update - ) where - -import Distribution.Client.Types - ( Repo(..), RemoteRepo(..), maybeRepoRemote ) -import Distribution.Client.HttpUtils - ( DownloadResult(..) ) -import Distribution.Client.FetchUtils - ( downloadIndex ) -import Distribution.Client.IndexUtils - ( updateRepoIndexCache, Index(..) ) -import Distribution.Client.JobControl - ( newParallelJobControl, spawnJob, collectJob ) -import Distribution.Client.Setup - ( RepoContext(..) ) -import Distribution.Verbosity - ( Verbosity ) - -import Distribution.Simple.Utils - ( writeFileAtomic, warn, notice ) - -import qualified Data.ByteString.Lazy as BS -import Distribution.Client.GZipUtils (maybeDecompress) -import System.FilePath (dropExtension) -import Data.Maybe (catMaybes) -import Data.Time (getCurrentTime) - -import qualified Hackage.Security.Client as Sec - --- | 'update' downloads the package list from all known servers -update :: Verbosity -> RepoContext -> IO () -update verbosity repoCtxt | null (repoContextRepos repoCtxt) = do - warn verbosity $ "No remote package servers have been specified. Usually " - ++ "you would have one specified in the config file." -update verbosity repoCtxt = do - jobCtrl <- newParallelJobControl - let repos = repoContextRepos repoCtxt - remoteRepos = catMaybes (map maybeRepoRemote repos) - case remoteRepos of - [] -> return () - [remoteRepo] -> - notice verbosity $ "Downloading the latest package list from " - ++ remoteRepoName remoteRepo - _ -> notice verbosity . unlines - $ "Downloading the latest package lists from: " - : map (("- " ++) . remoteRepoName) remoteRepos - mapM_ (spawnJob jobCtrl . updateRepo verbosity repoCtxt) repos - mapM_ (\_ -> collectJob jobCtrl) repos - -updateRepo :: Verbosity -> RepoContext -> Repo -> IO () -updateRepo verbosity repoCtxt repo = do - transport <- repoContextGetTransport repoCtxt - case repo of - RepoLocal{..} -> return () - RepoRemote{..} -> do - downloadResult <- downloadIndex transport verbosity repoRemote repoLocalDir - case downloadResult of - FileAlreadyInCache -> return () - FileDownloaded indexPath -> do - writeFileAtomic (dropExtension indexPath) . maybeDecompress - =<< BS.readFile indexPath - updateRepoIndexCache verbosity (RepoIndex repoCtxt repo) - RepoSecure{} -> repoContextWithSecureRepo repoCtxt repo $ \repoSecure -> do - ce <- if repoContextIgnoreExpiry repoCtxt - then Just `fmap` getCurrentTime - else return Nothing - updated <- Sec.uncheckClientErrors $ Sec.checkForUpdates repoSecure ce - -- Update cabal's internal index as well so that it's not out of sync - -- (If all access to the cache goes through hackage-security this can go) - case updated of - Sec.NoUpdates -> - return () - Sec.HasUpdates -> - updateRepoIndexCache verbosity (RepoIndex repoCtxt repo) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Upload.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Upload.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Upload.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Upload.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,163 +0,0 @@ -module Distribution.Client.Upload (check, upload, uploadDoc, report) where - -import Distribution.Client.Types ( Username(..), Password(..) - , RemoteRepo(..), maybeRepoRemote ) -import Distribution.Client.HttpUtils - ( HttpTransport(..), remoteRepoTryUpgradeToHttps ) -import Distribution.Client.Setup - ( RepoContext(..) ) - -import Distribution.Simple.Utils (notice, warn, info, die) -import Distribution.Verbosity (Verbosity) -import Distribution.Text (display) -import Distribution.Client.Config - -import qualified Distribution.Client.BuildReports.Anonymous as BuildReport -import qualified Distribution.Client.BuildReports.Upload as BuildReport - -import Network.URI (URI(uriPath), parseURI) -import Network.HTTP (Header(..), HeaderName(..)) - -import System.IO (hFlush, stdin, stdout, hGetEcho, hSetEcho) -import System.Exit (exitFailure) -import Control.Exception (bracket) -import System.FilePath ((), takeExtension, takeFileName) -import qualified System.FilePath.Posix as FilePath.Posix (()) -import System.Directory -import Control.Monad (forM_, when) -import Data.Maybe (catMaybes) - -type Auth = Maybe (String, String) - -checkURI :: URI -Just checkURI = parseURI $ "http://hackage.haskell.org/cgi-bin/" - ++ "hackage-scripts/check-pkg" - -upload :: Verbosity -> RepoContext - -> Maybe Username -> Maybe Password -> [FilePath] - -> IO () -upload verbosity repoCtxt mUsername mPassword paths = do - let repos = repoContextRepos repoCtxt - transport <- repoContextGetTransport repoCtxt - targetRepo <- - case [ remoteRepo | Just remoteRepo <- map maybeRepoRemote repos ] of - [] -> die "Cannot upload. No remote repositories are configured." - rs -> remoteRepoTryUpgradeToHttps transport (last rs) - let targetRepoURI = remoteRepoURI targetRepo - rootIfEmpty x = if null x then "/" else x - uploadURI = targetRepoURI { - uriPath = rootIfEmpty (uriPath targetRepoURI) - FilePath.Posix. "upload" - } - Username username <- maybe promptUsername return mUsername - Password password <- maybe promptPassword return mPassword - let auth = Just (username,password) - forM_ paths $ \path -> do - notice verbosity $ "Uploading " ++ path ++ "... " - handlePackage transport verbosity uploadURI auth path - -uploadDoc :: Verbosity -> RepoContext - -> Maybe Username -> Maybe Password -> FilePath - -> IO () -uploadDoc verbosity repoCtxt mUsername mPassword path = do - let repos = repoContextRepos repoCtxt - transport <- repoContextGetTransport repoCtxt - targetRepo <- - case [ remoteRepo | Just remoteRepo <- map maybeRepoRemote repos ] of - [] -> die $ "Cannot upload. No remote repositories are configured." - rs -> remoteRepoTryUpgradeToHttps transport (last rs) - let targetRepoURI = remoteRepoURI targetRepo - rootIfEmpty x = if null x then "/" else x - uploadURI = targetRepoURI { - uriPath = rootIfEmpty (uriPath targetRepoURI) - FilePath.Posix. "package/" ++ pkgid ++ "/docs" - } - (reverseSuffix, reversePkgid) = break (== '-') - (reverse (takeFileName path)) - pkgid = reverse $ tail reversePkgid - when (reverse reverseSuffix /= "docs.tar.gz" - || null reversePkgid || head reversePkgid /= '-') $ - die "Expected a file name matching the pattern -docs.tar.gz" - Username username <- maybe promptUsername return mUsername - Password password <- maybe promptPassword return mPassword - - let auth = Just (username,password) - headers = - [ Header HdrContentType "application/x-tar" - , Header HdrContentEncoding "gzip" - ] - notice verbosity $ "Uploading documentation " ++ path ++ "... " - resp <- putHttpFile transport verbosity uploadURI path auth headers - case resp of - (200,_) -> - notice verbosity "Ok" - (code,err) -> do - notice verbosity $ "Error uploading documentation " - ++ path ++ ": " - ++ "http code " ++ show code ++ "\n" - ++ err - exitFailure - -promptUsername :: IO Username -promptUsername = do - putStr "Hackage username: " - hFlush stdout - fmap Username getLine - -promptPassword :: IO Password -promptPassword = do - putStr "Hackage password: " - hFlush stdout - -- save/restore the terminal echoing status - passwd <- bracket (hGetEcho stdin) (hSetEcho stdin) $ \_ -> do - hSetEcho stdin False -- no echoing for entering the password - fmap Password getLine - putStrLn "" - return passwd - -report :: Verbosity -> RepoContext -> Maybe Username -> Maybe Password -> IO () -report verbosity repoCtxt mUsername mPassword = do - Username username <- maybe promptUsername return mUsername - Password password <- maybe promptPassword return mPassword - let auth = (username, password) - repos = repoContextRepos repoCtxt - remoteRepos = catMaybes (map maybeRepoRemote repos) - forM_ remoteRepos $ \remoteRepo -> - do dotCabal <- defaultCabalDir - let srcDir = dotCabal "reports" remoteRepoName remoteRepo - -- We don't want to bomb out just because we haven't built any packages - -- from this repo yet. - srcExists <- doesDirectoryExist srcDir - when srcExists $ do - contents <- getDirectoryContents srcDir - forM_ (filter (\c -> takeExtension c ==".log") contents) $ \logFile -> - do inp <- readFile (srcDir logFile) - let (reportStr, buildLog) = read inp :: (String,String) - case BuildReport.parse reportStr of - Left errs -> warn verbosity $ "Errors: " ++ errs -- FIXME - Right report' -> - do info verbosity $ "Uploading report for " - ++ display (BuildReport.package report') - BuildReport.uploadReports verbosity repoCtxt auth - (remoteRepoURI remoteRepo) [(report', Just buildLog)] - return () - -check :: Verbosity -> RepoContext -> [FilePath] -> IO () -check verbosity repoCtxt paths = do - transport <- repoContextGetTransport repoCtxt - forM_ paths $ \path -> do - notice verbosity $ "Checking " ++ path ++ "... " - handlePackage transport verbosity checkURI Nothing path - -handlePackage :: HttpTransport -> Verbosity -> URI -> Auth - -> FilePath -> IO () -handlePackage transport verbosity uri auth path = - do resp <- postHttpFile transport verbosity uri path auth - case resp of - (200,_) -> - notice verbosity "Ok" - (code,err) -> do - notice verbosity $ "Error uploading " ++ path ++ ": " - ++ "http code " ++ show code ++ "\n" - ++ err - exitFailure diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Utils/Json.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Utils/Json.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Utils/Json.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Utils/Json.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,225 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} - --- | Minimal JSON / RFC 7159 support --- --- The API is heavily inspired by @aeson@'s API but puts emphasis on --- simplicity rather than performance. The 'ToJSON' instances are --- intended to have an encoding compatible with @aeson@'s encoding. --- -module Distribution.Client.Utils.Json - ( Value(..) - , Object, object, Pair, (.=) - , encodeToString - , encodeToBuilder - , ToJSON(toJSON) - ) - where - -import Data.Char -import Data.Int -import Data.String -import Data.Word -import Data.List -import Data.Monoid - -import Data.ByteString.Builder (Builder) -import qualified Data.ByteString.Builder as BB - --- TODO: We may want to replace 'String' with 'Text' or 'ByteString' - --- | A JSON value represented as a Haskell value. -data Value = Object !Object - | Array [Value] - | String String - | Number !Double - | Bool !Bool - | Null - deriving (Eq, Read, Show) - --- | A key\/value pair for an 'Object' -type Pair = (String, Value) - --- | A JSON \"object\" (key/value map). -type Object = [Pair] - -infixr 8 .= - --- | A key-value pair for encoding a JSON object. -(.=) :: ToJSON v => String -> v -> Pair -k .= v = (k, toJSON v) - --- | Create a 'Value' from a list of name\/value 'Pair's. -object :: [Pair] -> Value -object = Object - -instance IsString Value where - fromString = String - - --- | A type that can be converted to JSON. -class ToJSON a where - -- | Convert a Haskell value to a JSON-friendly intermediate type. - toJSON :: a -> Value - -instance ToJSON () where - toJSON () = Array [] - -instance ToJSON Value where - toJSON = id - -instance ToJSON Bool where - toJSON = Bool - -instance ToJSON a => ToJSON [a] where - toJSON = Array . map toJSON - -instance ToJSON a => ToJSON (Maybe a) where - toJSON Nothing = Null - toJSON (Just a) = toJSON a - -instance (ToJSON a,ToJSON b) => ToJSON (a,b) where - toJSON (a,b) = Array [toJSON a, toJSON b] - -instance (ToJSON a,ToJSON b,ToJSON c) => ToJSON (a,b,c) where - toJSON (a,b,c) = Array [toJSON a, toJSON b, toJSON c] - -instance (ToJSON a,ToJSON b,ToJSON c, ToJSON d) => ToJSON (a,b,c,d) where - toJSON (a,b,c,d) = Array [toJSON a, toJSON b, toJSON c, toJSON d] - -instance ToJSON Float where - toJSON = Number . realToFrac - -instance ToJSON Double where - toJSON = Number - -instance ToJSON Int where toJSON = Number . realToFrac -instance ToJSON Int8 where toJSON = Number . realToFrac -instance ToJSON Int16 where toJSON = Number . realToFrac -instance ToJSON Int32 where toJSON = Number . realToFrac - -instance ToJSON Word where toJSON = Number . realToFrac -instance ToJSON Word8 where toJSON = Number . realToFrac -instance ToJSON Word16 where toJSON = Number . realToFrac -instance ToJSON Word32 where toJSON = Number . realToFrac - --- | Possibly lossy due to conversion to 'Double' -instance ToJSON Int64 where toJSON = Number . realToFrac - --- | Possibly lossy due to conversion to 'Double' -instance ToJSON Word64 where toJSON = Number . realToFrac - --- | Possibly lossy due to conversion to 'Double' -instance ToJSON Integer where toJSON = Number . fromInteger - ------------------------------------------------------------------------------- --- 'BB.Builder'-based encoding - --- | Serialise value as JSON/UTF8-encoded 'Builder' -encodeToBuilder :: ToJSON a => a -> Builder -encodeToBuilder = encodeValueBB . toJSON - -encodeValueBB :: Value -> Builder -encodeValueBB jv = case jv of - Bool True -> "true" - Bool False -> "false" - Null -> "null" - Number n - | isNaN n || isInfinite n -> encodeValueBB Null - | Just i <- doubleToInt64 n -> BB.int64Dec i - | otherwise -> BB.doubleDec n - Array a -> encodeArrayBB a - String s -> encodeStringBB s - Object o -> encodeObjectBB o - -encodeArrayBB :: [Value] -> Builder -encodeArrayBB [] = "[]" -encodeArrayBB jvs = BB.char8 '[' <> go jvs <> BB.char8 ']' - where - go = Data.Monoid.mconcat . intersperse (BB.char8 ',') . map encodeValueBB - -encodeObjectBB :: Object -> Builder -encodeObjectBB [] = "{}" -encodeObjectBB jvs = BB.char8 '{' <> go jvs <> BB.char8 '}' - where - go = Data.Monoid.mconcat . intersperse (BB.char8 ',') . map encPair - encPair (l,x) = encodeStringBB l <> BB.char8 ':' <> encodeValueBB x - -encodeStringBB :: String -> Builder -encodeStringBB str = BB.char8 '"' <> go str <> BB.char8 '"' - where - go = BB.stringUtf8 . escapeString - ------------------------------------------------------------------------------- --- 'String'-based encoding - --- | Serialise value as JSON-encoded Unicode 'String' -encodeToString :: ToJSON a => a -> String -encodeToString jv = encodeValue (toJSON jv) [] - -encodeValue :: Value -> ShowS -encodeValue jv = case jv of - Bool b -> showString (if b then "true" else "false") - Null -> showString "null" - Number n - | isNaN n || isInfinite n -> encodeValue Null - | Just i <- doubleToInt64 n -> shows i - | otherwise -> shows n - Array a -> encodeArray a - String s -> encodeString s - Object o -> encodeObject o - -encodeArray :: [Value] -> ShowS -encodeArray [] = showString "[]" -encodeArray jvs = ('[':) . go jvs . (']':) - where - go [] = id - go [x] = encodeValue x - go (x:xs) = encodeValue x . (',':) . go xs - -encodeObject :: Object -> ShowS -encodeObject [] = showString "{}" -encodeObject jvs = ('{':) . go jvs . ('}':) - where - go [] = id - go [(l,x)] = encodeString l . (':':) . encodeValue x - go ((l,x):lxs) = encodeString l . (':':) . encodeValue x . (',':) . go lxs - -encodeString :: String -> ShowS -encodeString str = ('"':) . showString (escapeString str) . ('"':) - ------------------------------------------------------------------------------- --- helpers - --- | Try to convert 'Double' into 'Int64', return 'Nothing' if not --- representable loss-free as integral 'Int64' value. -doubleToInt64 :: Double -> Maybe Int64 -doubleToInt64 x - | fromInteger x' == x - , x' <= toInteger (maxBound :: Int64) - , x' >= toInteger (minBound :: Int64) - = Just (fromIntegral x') - | otherwise = Nothing - where - x' = round x - --- | Minimally escape a 'String' in accordance with RFC 7159, "7. Strings" -escapeString :: String -> String -escapeString s - | not (any needsEscape s) = s - | otherwise = escape s - where - escape [] = [] - escape (x:xs) = case x of - '\\' -> '\\':'\\':escape xs - '"' -> '\\':'"':escape xs - '\b' -> '\\':'b':escape xs - '\f' -> '\\':'f':escape xs - '\n' -> '\\':'n':escape xs - '\r' -> '\\':'r':escape xs - '\t' -> '\\':'t':escape xs - c | ord c < 0x10 -> '\\':'u':'0':'0':'0':intToDigit (ord c):escape xs - | ord c < 0x20 -> '\\':'u':'0':'0':'1':intToDigit (ord c - 0x10):escape xs - | otherwise -> c : escape xs - - -- unescaped = %x20-21 / %x23-5B / %x5D-10FFFF - needsEscape c = ord c < 0x20 || c `elem` ['\\','"'] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Utils/LabeledGraph.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Utils/LabeledGraph.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Utils/LabeledGraph.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Utils/LabeledGraph.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,116 +0,0 @@ --- | Wrapper around Data.Graph with support for edge labels -{-# LANGUAGE ScopedTypeVariables #-} -module Distribution.Client.Utils.LabeledGraph ( - -- * Graphs - Graph - , Vertex - -- ** Building graphs - , graphFromEdges - , graphFromEdges' - , buildG - , transposeG - -- ** Graph properties - , vertices - , edges - -- ** Operations on the underlying unlabeled graph - , forgetLabels - , topSort - ) where - -import Data.Array -import Data.Graph (Vertex, Bounds) -import Data.List (sortBy) -import Data.Maybe (mapMaybe) -import qualified Data.Graph as G - -{------------------------------------------------------------------------------- - Types --------------------------------------------------------------------------------} - -type Graph e = Array Vertex [(e, Vertex)] -type Edge e = (Vertex, e, Vertex) - -{------------------------------------------------------------------------------- - Building graphs --------------------------------------------------------------------------------} - --- | Construct an edge-labeled graph --- --- This is a simple adaptation of the definition in Data.Graph -graphFromEdges :: forall key node edge. Ord key - => [ (node, key, [(edge, key)]) ] - -> ( Graph edge - , Vertex -> (node, key, [(edge, key)]) - , key -> Maybe Vertex - ) -graphFromEdges edges0 = - (graph, \v -> vertex_map ! v, key_vertex) - where - max_v = length edges0 - 1 - bounds0 = (0, max_v) :: (Vertex, Vertex) - sorted_edges = sortBy lt edges0 - edges1 = zipWith (,) [0..] sorted_edges - - graph = array bounds0 [(v, (mapMaybe mk_edge ks)) - | (v, (_, _, ks)) <- edges1] - key_map = array bounds0 [(v, k ) - | (v, (_, k, _ )) <- edges1] - vertex_map = array bounds0 edges1 - - (_,k1,_) `lt` (_,k2,_) = k1 `compare` k2 - - mk_edge :: (edge, key) -> Maybe (edge, Vertex) - mk_edge (edge, key) = do v <- key_vertex key ; return (edge, v) - - -- returns Nothing for non-interesting vertices - key_vertex :: key -> Maybe Vertex - key_vertex k = findVertex 0 max_v - where - findVertex a b - | a > b = Nothing - | otherwise = case compare k (key_map ! mid) of - LT -> findVertex a (mid-1) - EQ -> Just mid - GT -> findVertex (mid+1) b - where - mid = a + (b - a) `div` 2 - -graphFromEdges' :: Ord key - => [ (node, key, [(edge, key)]) ] - -> ( Graph edge - , Vertex -> (node, key, [(edge, key)]) - ) -graphFromEdges' x = (a,b) - where - (a,b,_) = graphFromEdges x - -transposeG :: Graph e -> Graph e -transposeG g = buildG (bounds g) (reverseE g) - -buildG :: Bounds -> [Edge e] -> Graph e -buildG bounds0 edges0 = accumArray (flip (:)) [] bounds0 (map reassoc edges0) - where - reassoc (v, e, w) = (v, (e, w)) - -reverseE :: Graph e -> [Edge e] -reverseE g = [ (w, e, v) | (v, e, w) <- edges g ] - -{------------------------------------------------------------------------------- - Graph properties --------------------------------------------------------------------------------} - -vertices :: Graph e -> [Vertex] -vertices = indices - -edges :: Graph e -> [Edge e] -edges g = [ (v, e, w) | v <- vertices g, (e, w) <- g!v ] - -{------------------------------------------------------------------------------- - Operations on the underlying unlabelled graph --------------------------------------------------------------------------------} - -forgetLabels :: Graph e -> G.Graph -forgetLabels = fmap (map snd) - -topSort :: Graph e -> [Vertex] -topSort = G.topSort . forgetLabels diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Utils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Utils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Utils.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Utils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,301 +0,0 @@ -{-# LANGUAGE ForeignFunctionInterface, CPP #-} - -module Distribution.Client.Utils ( MergeResult(..) - , mergeBy, duplicates, duplicatesBy - , readMaybe - , inDir, logDirChange - , determineNumJobs, numberOfProcessors - , removeExistingFile - , withTempFileName - , makeAbsoluteToCwd - , makeRelativeToCwd, makeRelativeToDir - , filePathToByteString - , byteStringToFilePath, tryCanonicalizePath - , canonicalizePathNoThrow - , moreRecentFile, existsAndIsMoreRecentThan - , tryFindAddSourcePackageDesc - , tryFindPackageDesc - , relaxEncodingErrors) - where - -import Distribution.Compat.Exception ( catchIO ) -import Distribution.Client.Compat.Time ( getModTime ) -import Distribution.Simple.Setup ( Flag(..) ) -import Distribution.Simple.Utils ( die, findPackageDesc ) -import qualified Data.ByteString.Lazy as BS -#if !MIN_VERSION_base(4,8,0) -import Control.Applicative -#endif -import Control.Monad - ( when ) -import Data.Bits - ( (.|.), shiftL, shiftR ) -import Data.Char - ( ord, chr ) -#if MIN_VERSION_base(4,6,0) -import Text.Read - ( readMaybe ) -#endif -import Data.List - ( isPrefixOf, sortBy, groupBy ) -import Data.Word - ( Word8, Word32) -import Foreign.C.Types ( CInt(..) ) -import qualified Control.Exception as Exception - ( finally, bracket ) -import System.Directory - ( canonicalizePath, doesFileExist, getCurrentDirectory - , removeFile, setCurrentDirectory ) -import System.FilePath - ( (), isAbsolute, takeDrive, splitPath, joinPath ) -import System.IO - ( Handle, hClose, openTempFile -#if MIN_VERSION_base(4,4,0) - , hGetEncoding, hSetEncoding -#endif - ) -import System.IO.Unsafe ( unsafePerformIO ) - -#if MIN_VERSION_base(4,4,0) -import GHC.IO.Encoding - ( recover, TextEncoding(TextEncoding) ) -import GHC.IO.Encoding.Failure - ( recoverEncode, CodingFailureMode(TransliterateCodingFailure) ) -#endif - -#if defined(mingw32_HOST_OS) || MIN_VERSION_directory(1,2,3) -import Prelude hiding (ioError) -import Control.Monad (liftM2, unless) -import System.Directory (doesDirectoryExist) -import System.IO.Error (ioError, mkIOError, doesNotExistErrorType) -#endif - --- | Generic merging utility. For sorted input lists this is a full outer join. --- --- * The result list never contains @(Nothing, Nothing)@. --- -mergeBy :: (a -> b -> Ordering) -> [a] -> [b] -> [MergeResult a b] -mergeBy cmp = merge - where - merge [] ys = [ OnlyInRight y | y <- ys] - merge xs [] = [ OnlyInLeft x | x <- xs] - merge (x:xs) (y:ys) = - case x `cmp` y of - GT -> OnlyInRight y : merge (x:xs) ys - EQ -> InBoth x y : merge xs ys - LT -> OnlyInLeft x : merge xs (y:ys) - -data MergeResult a b = OnlyInLeft a | InBoth a b | OnlyInRight b - -duplicates :: Ord a => [a] -> [[a]] -duplicates = duplicatesBy compare - -duplicatesBy :: (a -> a -> Ordering) -> [a] -> [[a]] -duplicatesBy cmp = filter moreThanOne . groupBy eq . sortBy cmp - where - eq a b = case cmp a b of - EQ -> True - _ -> False - moreThanOne (_:_:_) = True - moreThanOne _ = False - -#if !MIN_VERSION_base(4,6,0) --- | An implementation of readMaybe, for compatability with older base versions. -readMaybe :: Read a => String -> Maybe a -readMaybe s = case reads s of - [(x,"")] -> Just x - _ -> Nothing -#endif - --- | Like 'removeFile', but does not throw an exception when the file does not --- exist. -removeExistingFile :: FilePath -> IO () -removeExistingFile path = do - exists <- doesFileExist path - when exists $ - removeFile path - --- | A variant of 'withTempFile' that only gives us the file name, and while --- it will clean up the file afterwards, it's lenient if the file is --- moved\/deleted. --- -withTempFileName :: FilePath - -> String - -> (FilePath -> IO a) -> IO a -withTempFileName tmpDir template action = - Exception.bracket - (openTempFile tmpDir template) - (\(name, _) -> removeExistingFile name) - (\(name, h) -> hClose h >> action name) - --- | Executes the action in the specified directory. -inDir :: Maybe FilePath -> IO a -> IO a -inDir Nothing m = m -inDir (Just d) m = do - old <- getCurrentDirectory - setCurrentDirectory d - m `Exception.finally` setCurrentDirectory old - --- | Log directory change in 'make' compatible syntax -logDirChange :: (String -> IO ()) -> Maybe FilePath -> IO a -> IO a -logDirChange _ Nothing m = m -logDirChange l (Just d) m = do - l $ "cabal: Entering directory '" ++ d ++ "'\n" - m `Exception.finally` - (l $ "cabal: Leaving directory '" ++ d ++ "'\n") - -foreign import ccall "getNumberOfProcessors" c_getNumberOfProcessors :: IO CInt - --- The number of processors is not going to change during the duration of the --- program, so unsafePerformIO is safe here. -numberOfProcessors :: Int -numberOfProcessors = fromEnum $ unsafePerformIO c_getNumberOfProcessors - --- | Determine the number of jobs to use given the value of the '-j' flag. -determineNumJobs :: Flag (Maybe Int) -> Int -determineNumJobs numJobsFlag = - case numJobsFlag of - NoFlag -> 1 - Flag Nothing -> numberOfProcessors - Flag (Just n) -> n - --- | Given a relative path, make it absolute relative to the current --- directory. Absolute paths are returned unmodified. -makeAbsoluteToCwd :: FilePath -> IO FilePath -makeAbsoluteToCwd path | isAbsolute path = return path - | otherwise = do cwd <- getCurrentDirectory - return $! cwd path - --- | Given a path (relative or absolute), make it relative to the current --- directory, including using @../..@ if necessary. -makeRelativeToCwd :: FilePath -> IO FilePath -makeRelativeToCwd path = - makeRelativeCanonical <$> canonicalizePath path <*> getCurrentDirectory - --- | Given a path (relative or absolute), make it relative to the given --- directory, including using @../..@ if necessary. -makeRelativeToDir :: FilePath -> FilePath -> IO FilePath -makeRelativeToDir path dir = - makeRelativeCanonical <$> canonicalizePath path <*> canonicalizePath dir - --- | Given a canonical absolute path and canonical absolute dir, make the path --- relative to the directory, including using @../..@ if necessary. Returns --- the original absolute path if it is not on the same drive as the given dir. -makeRelativeCanonical :: FilePath -> FilePath -> FilePath -makeRelativeCanonical path dir - | takeDrive path /= takeDrive dir = path - | otherwise = go (splitPath path) (splitPath dir) - where - go (p:ps) (d:ds) | p == d = go ps ds - go [] [] = "./" - go ps ds = joinPath (replicate (length ds) ".." ++ ps) - --- | Convert a 'FilePath' to a lazy 'ByteString'. Each 'Char' is --- encoded as a little-endian 'Word32'. -filePathToByteString :: FilePath -> BS.ByteString -filePathToByteString p = - BS.pack $ foldr conv [] codepts - where - codepts :: [Word32] - codepts = map (fromIntegral . ord) p - - conv :: Word32 -> [Word8] -> [Word8] - conv w32 rest = b0:b1:b2:b3:rest - where - b0 = fromIntegral $ w32 - b1 = fromIntegral $ w32 `shiftR` 8 - b2 = fromIntegral $ w32 `shiftR` 16 - b3 = fromIntegral $ w32 `shiftR` 24 - --- | Reverse operation to 'filePathToByteString'. -byteStringToFilePath :: BS.ByteString -> FilePath -byteStringToFilePath bs | bslen `mod` 4 /= 0 = unexpected - | otherwise = go 0 - where - unexpected = "Distribution.Client.Utils.byteStringToFilePath: unexpected" - bslen = BS.length bs - - go i | i == bslen = [] - | otherwise = (chr . fromIntegral $ w32) : go (i+4) - where - w32 :: Word32 - w32 = b0 .|. (b1 `shiftL` 8) .|. (b2 `shiftL` 16) .|. (b3 `shiftL` 24) - b0 = fromIntegral $ BS.index bs i - b1 = fromIntegral $ BS.index bs (i + 1) - b2 = fromIntegral $ BS.index bs (i + 2) - b3 = fromIntegral $ BS.index bs (i + 3) - --- | Workaround for the inconsistent behaviour of 'canonicalizePath'. Always --- throws an error if the path refers to a non-existent file. -tryCanonicalizePath :: FilePath -> IO FilePath -tryCanonicalizePath path = do - ret <- canonicalizePath path -#if defined(mingw32_HOST_OS) || MIN_VERSION_directory(1,2,3) - exists <- liftM2 (||) (doesFileExist ret) (doesDirectoryExist ret) - unless exists $ - ioError $ mkIOError doesNotExistErrorType "canonicalizePath" - Nothing (Just ret) -#endif - return ret - --- | A non-throwing wrapper for 'canonicalizePath'. If 'canonicalizePath' throws --- an exception, returns the path argument unmodified. -canonicalizePathNoThrow :: FilePath -> IO FilePath -canonicalizePathNoThrow path = do - canonicalizePath path `catchIO` (\_ -> return path) - --------------------- --- Modification time - --- | Like Distribution.Simple.Utils.moreRecentFile, but uses getModTime instead --- of getModificationTime for higher precision. We can't merge the two because --- Distribution.Client.Time uses MIN_VERSION macros. -moreRecentFile :: FilePath -> FilePath -> IO Bool -moreRecentFile a b = do - exists <- doesFileExist b - if not exists - then return True - else do tb <- getModTime b - ta <- getModTime a - return (ta > tb) - --- | Like 'moreRecentFile', but also checks that the first file exists. -existsAndIsMoreRecentThan :: FilePath -> FilePath -> IO Bool -existsAndIsMoreRecentThan a b = do - exists <- doesFileExist a - if not exists - then return False - else a `moreRecentFile` b - --- | Sets the handler for encoding errors to one that transliterates invalid --- characters into one present in the encoding (i.e., \'?\'). --- This is opposed to the default behavior, which is to throw an exception on --- error. This function will ignore file handles that have a Unicode encoding --- set. It's a no-op for versions of `base` less than 4.4. -relaxEncodingErrors :: Handle -> IO () -relaxEncodingErrors handle = do -#if MIN_VERSION_base(4,4,0) - maybeEncoding <- hGetEncoding handle - case maybeEncoding of - Just (TextEncoding name decoder encoder) | not ("UTF" `isPrefixOf` name) -> - let relax x = x { recover = recoverEncode TransliterateCodingFailure } - in hSetEncoding handle (TextEncoding name decoder (fmap relax encoder)) - _ -> -#endif - return () - --- |Like 'tryFindPackageDesc', but with error specific to add-source deps. -tryFindAddSourcePackageDesc :: FilePath -> String -> IO FilePath -tryFindAddSourcePackageDesc depPath err = tryFindPackageDesc depPath $ - err ++ "\n" ++ "Failed to read cabal file of add-source dependency: " - ++ depPath - --- |Try to find a @.cabal@ file, in directory @depPath@. Fails if one cannot be --- found, with @err@ prefixing the error message. This function simply allows --- us to give a more descriptive error than that provided by @findPackageDesc@. -tryFindPackageDesc :: FilePath -> String -> IO FilePath -tryFindPackageDesc depPath err = do - errOrCabalFile <- findPackageDesc depPath - case errOrCabalFile of - Right file -> return file - Left _ -> die err diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Win32SelfUpgrade.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Win32SelfUpgrade.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/Win32SelfUpgrade.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/Win32SelfUpgrade.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,225 +0,0 @@ -{-# LANGUAGE CPP, ForeignFunctionInterface #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Win32SelfUpgrade --- Copyright : (c) Duncan Coutts 2008 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- Support for self-upgrading executables on Windows platforms. ------------------------------------------------------------------------------ -module Distribution.Client.Win32SelfUpgrade ( --- * Explanation --- --- | Windows inherited a design choice from DOS that while initially innocuous --- has rather unfortunate consequences. It maintains the invariant that every --- open file has a corresponding name on disk. One positive consequence of this --- is that an executable can always find it's own executable file. The downside --- is that a program cannot be deleted or upgraded while it is running without --- hideous workarounds. This module implements one such hideous workaround. --- --- The basic idea is: --- --- * Move our own exe file to a new name --- * Copy a new exe file to the previous name --- * Run the new exe file, passing our own PID and new path --- * Wait for the new process to start --- * Close the new exe file --- * Exit old process --- --- Then in the new process: --- --- * Inform the old process that we've started --- * Wait for the old process to die --- * Delete the old exe file --- * Exit new process --- - - possibleSelfUpgrade, - deleteOldExeFile, - ) where - -#if mingw32_HOST_OS - -import qualified System.Win32 as Win32 -import System.Win32 (DWORD, BOOL, HANDLE, LPCTSTR) -import Foreign.Ptr (Ptr, nullPtr) -import System.Process (runProcess) -import System.Directory (canonicalizePath) -import System.FilePath (takeBaseName, replaceBaseName, equalFilePath) - -import Distribution.Verbosity as Verbosity (Verbosity, showForCabal) -import Distribution.Simple.Utils (debug, info) - -import Prelude hiding (log) - --- | If one of the given files is our own exe file then we arrange things such --- that the nested action can replace our own exe file. --- --- We require that the new process accepts a command line invocation that --- calls 'deleteOldExeFile', passing in the PID and exe file. --- -possibleSelfUpgrade :: Verbosity - -> [FilePath] - -> IO a -> IO a -possibleSelfUpgrade verbosity newPaths action = do - dstPath <- canonicalizePath =<< Win32.getModuleFileName Win32.nullHANDLE - - newPaths' <- mapM canonicalizePath newPaths - let doingSelfUpgrade = any (equalFilePath dstPath) newPaths' - - if not doingSelfUpgrade - then action - else do - info verbosity $ "cabal-install does the replace-own-exe-file dance..." - tmpPath <- moveOurExeOutOfTheWay verbosity - result <- action - scheduleOurDemise verbosity dstPath tmpPath - (\pid path -> ["win32selfupgrade", pid, path - ,"--verbose=" ++ Verbosity.showForCabal verbosity]) - return result - --- | The name of a Win32 Event object that we use to synchronise between the --- old and new processes. We need to synchronise to make sure that the old --- process has not yet terminated by the time the new one starts up and looks --- for the old process. Otherwise the old one might have already terminated --- and we could not wait on it terminating reliably (eg the PID might get --- re-used). --- -syncEventName :: String -syncEventName = "Local\\cabal-install-upgrade" - --- | The first part of allowing our exe file to be replaced is to move the --- existing exe file out of the way. Although we cannot delete our exe file --- while we're still running, fortunately we can rename it, at least within --- the same directory. --- -moveOurExeOutOfTheWay :: Verbosity -> IO FilePath -moveOurExeOutOfTheWay verbosity = do - ourPID <- getCurrentProcessId - dstPath <- Win32.getModuleFileName Win32.nullHANDLE - - let tmpPath = replaceBaseName dstPath (takeBaseName dstPath ++ show ourPID) - - debug verbosity $ "moving " ++ dstPath ++ " to " ++ tmpPath - Win32.moveFile dstPath tmpPath - return tmpPath - --- | Assuming we've now installed the new exe file in the right place, we --- launch it and ask it to delete our exe file when we eventually terminate. --- -scheduleOurDemise :: Verbosity -> FilePath -> FilePath - -> (String -> FilePath -> [String]) -> IO () -scheduleOurDemise verbosity dstPath tmpPath mkArgs = do - ourPID <- getCurrentProcessId - event <- createEvent syncEventName - - let args = mkArgs (show ourPID) tmpPath - log $ "launching child " ++ unwords (dstPath : map show args) - _ <- runProcess dstPath args Nothing Nothing Nothing Nothing Nothing - - log $ "waiting for the child to start up" - waitForSingleObject event (10*1000) -- wait at most 10 sec - log $ "child started ok" - - where - log msg = debug verbosity ("Win32Reinstall.parent: " ++ msg) - --- | Assuming we're now in the new child process, we've been asked by the old --- process to wait for it to terminate and then we can remove the old exe file --- that it renamed itself to. --- -deleteOldExeFile :: Verbosity -> Int -> FilePath -> IO () -deleteOldExeFile verbosity oldPID tmpPath = do - log $ "process started. Will delete exe file of process " - ++ show oldPID ++ " at path " ++ tmpPath - - log $ "getting handle of parent process " ++ show oldPID - oldPHANDLE <- Win32.openProcess Win32.sYNCHORNIZE False (fromIntegral oldPID) - - log $ "synchronising with parent" - event <- openEvent syncEventName - setEvent event - - log $ "waiting for parent process to terminate" - waitForSingleObject oldPHANDLE Win32.iNFINITE - log $ "parent process terminated" - - log $ "deleting parent's old .exe file" - Win32.deleteFile tmpPath - - where - log msg = debug verbosity ("Win32Reinstall.child: " ++ msg) - ------------------------- --- Win32 foreign imports --- - --- A bunch of functions sadly not provided by the Win32 package. - -#ifdef x86_64_HOST_ARCH -#define CALLCONV ccall -#else -#define CALLCONV stdcall -#endif - -foreign import CALLCONV unsafe "windows.h GetCurrentProcessId" - getCurrentProcessId :: IO DWORD - -foreign import CALLCONV unsafe "windows.h WaitForSingleObject" - waitForSingleObject_ :: HANDLE -> DWORD -> IO DWORD - -waitForSingleObject :: HANDLE -> DWORD -> IO () -waitForSingleObject handle timeout = - Win32.failIf_ bad "WaitForSingleObject" $ - waitForSingleObject_ handle timeout - where - bad result = not (result == 0 || result == wAIT_TIMEOUT) - wAIT_TIMEOUT = 0x00000102 - -foreign import CALLCONV unsafe "windows.h CreateEventW" - createEvent_ :: Ptr () -> BOOL -> BOOL -> LPCTSTR -> IO HANDLE - -createEvent :: String -> IO HANDLE -createEvent name = do - Win32.failIfNull "CreateEvent" $ - Win32.withTString name $ - createEvent_ nullPtr False False - -foreign import CALLCONV unsafe "windows.h OpenEventW" - openEvent_ :: DWORD -> BOOL -> LPCTSTR -> IO HANDLE - -openEvent :: String -> IO HANDLE -openEvent name = do - Win32.failIfNull "OpenEvent" $ - Win32.withTString name $ - openEvent_ eVENT_MODIFY_STATE False - where - eVENT_MODIFY_STATE :: DWORD - eVENT_MODIFY_STATE = 0x0002 - -foreign import CALLCONV unsafe "windows.h SetEvent" - setEvent_ :: HANDLE -> IO BOOL - -setEvent :: HANDLE -> IO () -setEvent handle = - Win32.failIfFalse_ "SetEvent" $ - setEvent_ handle - -#else - -import Distribution.Verbosity (Verbosity) -import Distribution.Simple.Utils (die) - -possibleSelfUpgrade :: Verbosity - -> [FilePath] - -> IO a -> IO a -possibleSelfUpgrade _ _ action = action - -deleteOldExeFile :: Verbosity -> Int -> FilePath -> IO () -deleteOldExeFile _ _ _ = die "win32selfupgrade not needed except on win32" - -#endif diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/World.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/World.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Distribution/Client/World.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Distribution/Client/World.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,172 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.World --- Copyright : (c) Peter Robinson 2009 --- License : BSD-like --- --- Maintainer : thaldyron@gmail.com --- Stability : provisional --- Portability : portable --- --- Interface to the world-file that contains a list of explicitly --- requested packages. Meant to be imported qualified. --- --- A world file entry stores the package-name, package-version, and --- user flags. --- For example, the entry generated by --- # cabal install stm-io-hooks --flags="-debug" --- looks like this: --- # stm-io-hooks -any --flags="-debug" --- To rebuild/upgrade the packages in world (e.g. when updating the compiler) --- use --- # cabal install world --- ------------------------------------------------------------------------------ -module Distribution.Client.World ( - WorldPkgInfo(..), - insert, - delete, - getContents, - ) where - -import Distribution.Package - ( Dependency(..) ) -import Distribution.PackageDescription - ( FlagAssignment, FlagName(FlagName) ) -import Distribution.Verbosity - ( Verbosity ) -import Distribution.Simple.Utils - ( die, info, chattyTry, writeFileAtomic ) -import Distribution.Text - ( Text(..), display, simpleParse ) -import qualified Distribution.Compat.ReadP as Parse -import Distribution.Compat.Exception ( catchIO ) -import qualified Text.PrettyPrint as Disp -import Text.PrettyPrint ( (<>), (<+>) ) - - -import Data.Char as Char - -import Data.List - ( unionBy, deleteFirstsBy, nubBy ) -import System.IO.Error - ( isDoesNotExistError ) -import qualified Data.ByteString.Lazy.Char8 as B -import Prelude hiding (getContents) - - -data WorldPkgInfo = WorldPkgInfo Dependency FlagAssignment - deriving (Show,Eq) - --- | Adds packages to the world file; creates the file if it doesn't --- exist yet. Version constraints and flag assignments for a package are --- updated if already present. IO errors are non-fatal. -insert :: Verbosity -> FilePath -> [WorldPkgInfo] -> IO () -insert = modifyWorld $ unionBy equalUDep - --- | Removes packages from the world file. --- Note: Currently unused as there is no mechanism in Cabal (yet) to --- handle uninstalls. IO errors are non-fatal. -delete :: Verbosity -> FilePath -> [WorldPkgInfo] -> IO () -delete = modifyWorld $ flip (deleteFirstsBy equalUDep) - --- | WorldPkgInfo values are considered equal if they refer to --- the same package, i.e., we don't care about differing versions or flags. -equalUDep :: WorldPkgInfo -> WorldPkgInfo -> Bool -equalUDep (WorldPkgInfo (Dependency pkg1 _) _) - (WorldPkgInfo (Dependency pkg2 _) _) = pkg1 == pkg2 - --- | Modifies the world file by applying an update-function ('unionBy' --- for 'insert', 'deleteFirstsBy' for 'delete') to the given list of --- packages. IO errors are considered non-fatal. -modifyWorld :: ([WorldPkgInfo] -> [WorldPkgInfo] - -> [WorldPkgInfo]) - -- ^ Function that defines how - -- the list of user packages are merged with - -- existing world packages. - -> Verbosity - -> FilePath -- ^ Location of the world file - -> [WorldPkgInfo] -- ^ list of user supplied packages - -> IO () -modifyWorld _ _ _ [] = return () -modifyWorld f verbosity world pkgs = - chattyTry "Error while updating world-file. " $ do - pkgsOldWorld <- getContents world - -- Filter out packages that are not in the world file: - let pkgsNewWorld = nubBy equalUDep $ f pkgs pkgsOldWorld - -- 'Dependency' is not an Ord instance, so we need to check for - -- equivalence the awkward way: - if not (all (`elem` pkgsOldWorld) pkgsNewWorld && - all (`elem` pkgsNewWorld) pkgsOldWorld) - then do - info verbosity "Updating world file..." - writeFileAtomic world . B.pack $ unlines - [ (display pkg) | pkg <- pkgsNewWorld] - else - info verbosity "World file is already up to date." - - --- | Returns the content of the world file as a list -getContents :: FilePath -> IO [WorldPkgInfo] -getContents world = do - content <- safelyReadFile world - let result = map simpleParse (lines $ B.unpack content) - case sequence result of - Nothing -> die "Could not parse world file." - Just xs -> return xs - where - safelyReadFile :: FilePath -> IO B.ByteString - safelyReadFile file = B.readFile file `catchIO` handler - where - handler e | isDoesNotExistError e = return B.empty - | otherwise = ioError e - - -instance Text WorldPkgInfo where - disp (WorldPkgInfo dep flags) = disp dep <+> dispFlags flags - where - dispFlags [] = Disp.empty - dispFlags fs = Disp.text "--flags=" - <> Disp.doubleQuotes (flagAssToDoc fs) - flagAssToDoc = foldr (\(FlagName fname,val) flagAssDoc -> - (if not val then Disp.char '-' - else Disp.empty) - Disp.<> Disp.text fname - Disp.<+> flagAssDoc) - Disp.empty - parse = do - dep <- parse - Parse.skipSpaces - flagAss <- Parse.option [] parseFlagAssignment - return $ WorldPkgInfo dep flagAss - where - parseFlagAssignment :: Parse.ReadP r FlagAssignment - parseFlagAssignment = do - _ <- Parse.string "--flags" - Parse.skipSpaces - _ <- Parse.char '=' - Parse.skipSpaces - inDoubleQuotes $ Parse.many1 flag - where - inDoubleQuotes :: Parse.ReadP r a -> Parse.ReadP r a - inDoubleQuotes = Parse.between (Parse.char '"') (Parse.char '"') - - flag = do - Parse.skipSpaces - val <- negative Parse.+++ positive - name <- ident - Parse.skipSpaces - return (FlagName name,val) - negative = do - _ <- Parse.char '-' - return False - positive = return True - - ident :: Parse.ReadP r String - ident = do - -- First character must be a letter/digit to avoid flags - -- like "+-debug": - c <- Parse.satisfy Char.isAlphaNum - cs <- Parse.munch (\ch -> Char.isAlphaNum ch || ch == '_' - || ch == '-') - return (c:cs) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/LICENSE cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/LICENSE --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/LICENSE 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,34 +0,0 @@ -Copyright (c) 2003-2008, Isaac Jones, Simon Marlow, Martin Sjögren, - Bjorn Bringert, Krasimir Angelov, - Malcolm Wallace, Ross Patterson, - Lemmih, Paolo Martini, Don Stewart, - Duncan Coutts -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - - * Neither the name of Isaac Jones nor the names of other - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Main.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Main.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Main.hs 2016-11-07 10:02:40.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Main.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1326 +0,0 @@ -{-# LANGUAGE CPP #-} - ------------------------------------------------------------------------------ --- | --- Module : Main --- Copyright : (c) David Himmelstrup 2005 --- License : BSD-like --- --- Maintainer : lemmih@gmail.com --- Stability : provisional --- Portability : portable --- --- Entry point to the default cabal-install front-end. ------------------------------------------------------------------------------ - -module Main (main) where - -import Distribution.Client.Setup - ( GlobalFlags(..), globalCommand, withRepoContext - , ConfigFlags(..) - , ConfigExFlags(..), defaultConfigExFlags, configureExCommand - , BuildFlags(..), BuildExFlags(..), SkipAddSourceDepsCheck(..) - , buildCommand, replCommand, testCommand, benchmarkCommand - , InstallFlags(..), defaultInstallFlags - , installCommand, upgradeCommand, uninstallCommand - , FetchFlags(..), fetchCommand - , FreezeFlags(..), freezeCommand - , genBoundsCommand - , GetFlags(..), getCommand, unpackCommand - , checkCommand - , formatCommand - , updateCommand - , ListFlags(..), listCommand - , InfoFlags(..), infoCommand - , UploadFlags(..), uploadCommand - , ReportFlags(..), reportCommand - , runCommand - , InitFlags(initVerbosity), initCommand - , SDistFlags(..), SDistExFlags(..), sdistCommand - , Win32SelfUpgradeFlags(..), win32SelfUpgradeCommand - , ActAsSetupFlags(..), actAsSetupCommand - , SandboxFlags(..), sandboxCommand - , ExecFlags(..), execCommand - , UserConfigFlags(..), userConfigCommand - , reportCommand - , manpageCommand - ) -import Distribution.Simple.Setup - ( HaddockTarget(..) - , HaddockFlags(..), haddockCommand, defaultHaddockFlags - , HscolourFlags(..), hscolourCommand - , ReplFlags(..) - , CopyFlags(..), copyCommand - , RegisterFlags(..), registerCommand - , CleanFlags(..), cleanCommand - , TestFlags(..), BenchmarkFlags(..) - , Flag(..), fromFlag, fromFlagOrDefault, flagToMaybe, toFlag - , configAbsolutePaths - ) - -import Distribution.Client.SetupWrapper - ( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions ) -import Distribution.Client.Config - ( SavedConfig(..), loadConfig, defaultConfigFile, userConfigDiff - , userConfigUpdate, createDefaultConfigFile, getConfigFilePath ) -import Distribution.Client.Targets - ( readUserTargets ) -import qualified Distribution.Client.List as List - ( list, info ) - -import qualified Distribution.Client.CmdConfigure as CmdConfigure -import qualified Distribution.Client.CmdBuild as CmdBuild -import qualified Distribution.Client.CmdRepl as CmdRepl - -import Distribution.Client.Install (install) -import Distribution.Client.Configure (configure) -import Distribution.Client.Update (update) -import Distribution.Client.Exec (exec) -import Distribution.Client.Fetch (fetch) -import Distribution.Client.Freeze (freeze) -import Distribution.Client.GenBounds (genBounds) -import Distribution.Client.Check as Check (check) ---import Distribution.Client.Clean (clean) -import qualified Distribution.Client.Upload as Upload -import Distribution.Client.Run (run, splitRunArgs) -import Distribution.Client.SrcDist (sdist) -import Distribution.Client.Get (get) -import Distribution.Client.Sandbox (sandboxInit - ,sandboxAddSource - ,sandboxDelete - ,sandboxDeleteSource - ,sandboxListSources - ,sandboxHcPkg - ,dumpPackageEnvironment - - ,getSandboxConfigFilePath - ,loadConfigOrSandboxConfig - ,findSavedDistPref - ,initPackageDBIfNeeded - ,maybeWithSandboxDirOnSearchPath - ,maybeWithSandboxPackageInfo - ,WereDepsReinstalled(..) - ,maybeReinstallAddSourceDeps - ,tryGetIndexFilePath - ,sandboxBuildDir - ,updateSandboxConfigFileFlag - ,updateInstallDirs - - ,configCompilerAux' - ,getPersistOrConfigCompiler - ,configPackageDB') -import Distribution.Client.Sandbox.PackageEnvironment - (setPackageDB - ,userPackageEnvironmentFile) -import Distribution.Client.Sandbox.Timestamp (maybeAddCompilerTimestampRecord) -import Distribution.Client.Sandbox.Types (UseSandbox(..), whenUsingSandbox) -import Distribution.Client.Tar (createTarGzFile) -import Distribution.Client.Types (Password (..)) -import Distribution.Client.Init (initCabal) -import Distribution.Client.Manpage (manpage) -import qualified Distribution.Client.Win32SelfUpgrade as Win32SelfUpgrade -import Distribution.Client.Utils (determineNumJobs -#if defined(mingw32_HOST_OS) - ,relaxEncodingErrors -#endif - ,existsAndIsMoreRecentThan) - -import Distribution.Package (packageId) -import Distribution.PackageDescription - ( BuildType(..), Executable(..), buildable ) -import Distribution.PackageDescription.Parse - ( readPackageDescription ) -import Distribution.PackageDescription.PrettyPrint - ( writeGenericPackageDescription ) -import qualified Distribution.Simple as Simple -import qualified Distribution.Make as Make -import Distribution.Simple.Build - ( startInterpreter ) -import Distribution.Simple.Command - ( CommandParse(..), CommandUI(..), Command, CommandSpec(..) - , CommandType(..), commandsRun, commandAddAction, hiddenCommand - , commandFromSpec) -import Distribution.Simple.Compiler - ( Compiler(..) ) -import Distribution.Simple.Configure - ( checkPersistBuildConfigOutdated, configCompilerAuxEx - , ConfigStateFileError(..), localBuildInfoFile - , getPersistBuildConfig, tryGetPersistBuildConfig ) -import qualified Distribution.Simple.LocalBuildInfo as LBI -import Distribution.Simple.Program (defaultProgramConfiguration - ,configureAllKnownPrograms - ,simpleProgramInvocation - ,getProgramInvocationOutput) -import Distribution.Simple.Program.Db (reconfigurePrograms) -import qualified Distribution.Simple.Setup as Cabal -import Distribution.Simple.Utils - ( cabalVersion, die, notice, info, topHandler - , findPackageDesc, tryFindPackageDesc ) -import Distribution.Text - ( display ) -import Distribution.Verbosity as Verbosity - ( Verbosity, normal ) -import Distribution.Version - ( Version(..), orLaterVersion ) -import qualified Paths_cabal_install (version) - -import System.Environment (getArgs, getProgName) -import System.Exit (exitFailure, exitSuccess) -import System.FilePath ( dropExtension, splitExtension - , takeExtension, (), (<.>)) -import System.IO ( BufferMode(LineBuffering), hSetBuffering -#ifdef mingw32_HOST_OS - , stderr -#endif - , stdout ) -import System.Directory (doesFileExist, getCurrentDirectory) -import Data.List (intercalate) -import Data.Maybe (listToMaybe) -#if !MIN_VERSION_base(4,8,0) -import Data.Monoid (Monoid(..)) -import Control.Applicative (pure, (<$>)) -#endif -import Control.Exception (SomeException(..), try) -import Control.Monad (when, unless, void) - --- | Entry point --- -main :: IO () -main = do - -- Enable line buffering so that we can get fast feedback even when piped. - -- This is especially important for CI and build systems. - hSetBuffering stdout LineBuffering - -- The default locale encoding for Windows CLI is not UTF-8 and printing - -- Unicode characters to it will fail unless we relax the handling of encoding - -- errors when writing to stderr and stdout. -#ifdef mingw32_HOST_OS - relaxEncodingErrors stdout - relaxEncodingErrors stderr -#endif - getArgs >>= mainWorker - -mainWorker :: [String] -> IO () -mainWorker args = topHandler $ - case commandsRun (globalCommand commands) commands args of - CommandHelp help -> printGlobalHelp help - CommandList opts -> printOptionsList opts - CommandErrors errs -> printErrors errs - CommandReadyToGo (globalFlags, commandParse) -> - case commandParse of - _ | fromFlagOrDefault False (globalVersion globalFlags) - -> printVersion - | fromFlagOrDefault False (globalNumericVersion globalFlags) - -> printNumericVersion - CommandHelp help -> printCommandHelp help - CommandList opts -> printOptionsList opts - CommandErrors errs -> printErrors errs - CommandReadyToGo action -> do - globalFlags' <- updateSandboxConfigFileFlag globalFlags - action globalFlags' - - where - printCommandHelp help = do - pname <- getProgName - putStr (help pname) - printGlobalHelp help = do - pname <- getProgName - configFile <- defaultConfigFile - putStr (help pname) - putStr $ "\nYou can edit the cabal configuration file to set defaults:\n" - ++ " " ++ configFile ++ "\n" - exists <- doesFileExist configFile - when (not exists) $ - putStrLn $ "This file will be generated with sensible " - ++ "defaults if you run 'cabal update'." - printOptionsList = putStr . unlines - printErrors errs = die $ intercalate "\n" errs - printNumericVersion = putStrLn $ display Paths_cabal_install.version - printVersion = putStrLn $ "cabal-install version " - ++ display Paths_cabal_install.version - ++ "\ncompiled using version " - ++ display cabalVersion - ++ " of the Cabal library " - - commands = map commandFromSpec commandSpecs - commandSpecs = - [ regularCmd installCommand installAction - , regularCmd updateCommand updateAction - , regularCmd listCommand listAction - , regularCmd infoCommand infoAction - , regularCmd fetchCommand fetchAction - , regularCmd freezeCommand freezeAction - , regularCmd getCommand getAction - , hiddenCmd unpackCommand unpackAction - , regularCmd checkCommand checkAction - , regularCmd sdistCommand sdistAction - , regularCmd uploadCommand uploadAction - , regularCmd reportCommand reportAction - , regularCmd runCommand runAction - , regularCmd initCommand initAction - , regularCmd configureExCommand configureAction - , regularCmd buildCommand buildAction - , regularCmd replCommand replAction - , regularCmd sandboxCommand sandboxAction - , regularCmd haddockCommand haddockAction - , regularCmd execCommand execAction - , regularCmd userConfigCommand userConfigAction - , regularCmd cleanCommand cleanAction - , regularCmd genBoundsCommand genBoundsAction - , wrapperCmd copyCommand copyVerbosity copyDistPref - , wrapperCmd hscolourCommand hscolourVerbosity hscolourDistPref - , wrapperCmd registerCommand regVerbosity regDistPref - , regularCmd testCommand testAction - , regularCmd benchmarkCommand benchmarkAction - , hiddenCmd uninstallCommand uninstallAction - , hiddenCmd formatCommand formatAction - , hiddenCmd upgradeCommand upgradeAction - , hiddenCmd win32SelfUpgradeCommand win32SelfUpgradeAction - , hiddenCmd actAsSetupCommand actAsSetupAction - , hiddenCmd manpageCommand (manpageAction commandSpecs) - - , hiddenCmd installCommand { commandName = "new-configure" } - CmdConfigure.configureAction - , hiddenCmd installCommand { commandName = "new-build" } - CmdBuild.buildAction - , hiddenCmd installCommand { commandName = "new-repl" } - CmdRepl.replAction - ] - -type Action = GlobalFlags -> IO () - -regularCmd :: CommandUI flags -> (flags -> [String] -> action) - -> CommandSpec action -regularCmd ui action = - CommandSpec ui ((flip commandAddAction) action) NormalCommand - -hiddenCmd :: CommandUI flags -> (flags -> [String] -> action) - -> CommandSpec action -hiddenCmd ui action = - CommandSpec ui (\ui' -> hiddenCommand (commandAddAction ui' action)) - HiddenCommand - -wrapperCmd :: Monoid flags => CommandUI flags -> (flags -> Flag Verbosity) - -> (flags -> Flag String) -> CommandSpec Action -wrapperCmd ui verbosity distPref = - CommandSpec ui (\ui' -> wrapperAction ui' verbosity distPref) NormalCommand - -wrapperAction :: Monoid flags - => CommandUI flags - -> (flags -> Flag Verbosity) - -> (flags -> Flag String) - -> Command Action -wrapperAction command verbosityFlag distPrefFlag = - commandAddAction command - { commandDefaultFlags = mempty } $ \flags extraArgs globalFlags -> do - let verbosity = fromFlagOrDefault normal (verbosityFlag flags) - load <- try (loadConfigOrSandboxConfig verbosity globalFlags) - let config = either (\(SomeException _) -> mempty) snd load - distPref <- findSavedDistPref config (distPrefFlag flags) - let setupScriptOptions = defaultSetupScriptOptions { useDistPref = distPref } - setupWrapper verbosity setupScriptOptions Nothing - command (const flags) extraArgs - -configureAction :: (ConfigFlags, ConfigExFlags) - -> [String] -> Action -configureAction (configFlags, configExFlags) extraArgs globalFlags = do - let verbosity = fromFlagOrDefault normal (configVerbosity configFlags) - - (useSandbox, config) <- fmap - (updateInstallDirs (configUserInstall configFlags)) - (loadConfigOrSandboxConfig verbosity globalFlags) - let configFlags' = savedConfigureFlags config `mappend` configFlags - configExFlags' = savedConfigureExFlags config `mappend` configExFlags - globalFlags' = savedGlobalFlags config `mappend` globalFlags - (comp, platform, conf) <- configCompilerAuxEx configFlags' - - -- If we're working inside a sandbox and the user has set the -w option, we - -- may need to create a sandbox-local package DB for this compiler and add a - -- timestamp record for this compiler to the timestamp file. - let configFlags'' = case useSandbox of - NoSandbox -> configFlags' - (UseSandbox sandboxDir) -> setPackageDB sandboxDir - comp platform configFlags' - - whenUsingSandbox useSandbox $ \sandboxDir -> do - initPackageDBIfNeeded verbosity configFlags'' comp conf - -- NOTE: We do not write the new sandbox package DB location to - -- 'cabal.sandbox.config' here because 'configure -w' must not affect - -- subsequent 'install' (for UI compatibility with non-sandboxed mode). - - indexFile <- tryGetIndexFilePath config - maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile - (compilerId comp) platform - - maybeWithSandboxDirOnSearchPath useSandbox $ - withRepoContext verbosity globalFlags' $ \repoContext -> - configure verbosity - (configPackageDB' configFlags'') - repoContext - comp platform conf configFlags'' configExFlags' extraArgs - -buildAction :: (BuildFlags, BuildExFlags) -> [String] -> Action -buildAction (buildFlags, buildExFlags) extraArgs globalFlags = do - let verbosity = fromFlagOrDefault normal (buildVerbosity buildFlags) - noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck - (buildOnly buildExFlags) - - -- Calls 'configureAction' to do the real work, so nothing special has to be - -- done to support sandboxes. - (useSandbox, config, distPref) <- reconfigure verbosity - (buildDistPref buildFlags) - mempty [] globalFlags noAddSource - (buildNumJobs buildFlags) (const Nothing) - - maybeWithSandboxDirOnSearchPath useSandbox $ - build verbosity config distPref buildFlags extraArgs - - --- | Actually do the work of building the package. This is separate from --- 'buildAction' so that 'testAction' and 'benchmarkAction' do not invoke --- 'reconfigure' twice. -build :: Verbosity -> SavedConfig -> FilePath -> BuildFlags -> [String] -> IO () -build verbosity config distPref buildFlags extraArgs = - setupWrapper verbosity setupOptions Nothing - (Cabal.buildCommand progConf) mkBuildFlags extraArgs - where - progConf = defaultProgramConfiguration - setupOptions = defaultSetupScriptOptions { useDistPref = distPref } - - mkBuildFlags version = filterBuildFlags version config buildFlags' - buildFlags' = buildFlags - { buildVerbosity = toFlag verbosity - , buildDistPref = toFlag distPref - } - --- | Make sure that we don't pass new flags to setup scripts compiled against --- old versions of Cabal. -filterBuildFlags :: Version -> SavedConfig -> BuildFlags -> BuildFlags -filterBuildFlags version config buildFlags - | version >= Version [1,19,1] [] = buildFlags_latest - -- Cabal < 1.19.1 doesn't support 'build -j'. - | otherwise = buildFlags_pre_1_19_1 - where - buildFlags_pre_1_19_1 = buildFlags { - buildNumJobs = NoFlag - } - buildFlags_latest = buildFlags { - -- Take the 'jobs' setting '~/.cabal/config' into account. - buildNumJobs = Flag . Just . determineNumJobs $ - (numJobsConfigFlag `mappend` numJobsCmdLineFlag) - } - numJobsConfigFlag = installNumJobs . savedInstallFlags $ config - numJobsCmdLineFlag = buildNumJobs buildFlags - - -replAction :: (ReplFlags, BuildExFlags) -> [String] -> Action -replAction (replFlags, buildExFlags) extraArgs globalFlags = do - cwd <- getCurrentDirectory - pkgDesc <- findPackageDesc cwd - either (const onNoPkgDesc) (const onPkgDesc) pkgDesc - where - verbosity = fromFlagOrDefault normal (replVerbosity replFlags) - - -- There is a .cabal file in the current directory: start a REPL and load - -- the project's modules. - onPkgDesc = do - let noAddSource = case replReload replFlags of - Flag True -> SkipAddSourceDepsCheck - _ -> fromFlagOrDefault DontSkipAddSourceDepsCheck - (buildOnly buildExFlags) - -- Calls 'configureAction' to do the real work, so nothing special has to - -- be done to support sandboxes. - (useSandbox, _config, distPref) <- - reconfigure verbosity (replDistPref replFlags) - mempty [] globalFlags noAddSource NoFlag - (const Nothing) - let progConf = defaultProgramConfiguration - setupOptions = defaultSetupScriptOptions - { useCabalVersion = orLaterVersion $ Version [1,18,0] [] - , useDistPref = distPref - } - replFlags' = replFlags - { replVerbosity = toFlag verbosity - , replDistPref = toFlag distPref - } - - maybeWithSandboxDirOnSearchPath useSandbox $ - setupWrapper verbosity setupOptions Nothing - (Cabal.replCommand progConf) (const replFlags') extraArgs - - -- No .cabal file in the current directory: just start the REPL (possibly - -- using the sandbox package DB). - onNoPkgDesc = do - (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags - let configFlags = savedConfigureFlags config - (comp, platform, programDb) <- configCompilerAux' configFlags - programDb' <- reconfigurePrograms verbosity - (replProgramPaths replFlags) - (replProgramArgs replFlags) - programDb - startInterpreter verbosity programDb' comp platform - (configPackageDB' configFlags) - --- | Re-configure the package in the current directory if needed. Deciding --- when to reconfigure and with which options is convoluted: --- --- If we are reconfiguring, we must always run @configure@ with the --- verbosity option we are given; however, that a previous configuration --- uses a different verbosity setting is not reason enough to reconfigure. --- --- The package should be configured to use the same \"dist\" prefix as --- given to the @build@ command, otherwise the build will probably --- fail. Not only does this determine the \"dist\" prefix setting if we --- need to reconfigure anyway, but an existing configuration should be --- invalidated if its \"dist\" prefix differs. --- --- If the package has never been configured (i.e., there is no --- LocalBuildInfo), we must configure first, using the default options. --- --- If the package has been configured, there will be a 'LocalBuildInfo'. --- If there no package description file, we assume that the --- 'PackageDescription' is up to date, though the configuration may need --- to be updated for other reasons (see above). If there is a package --- description file, and it has been modified since the 'LocalBuildInfo' --- was generated, then we need to reconfigure. --- --- The caller of this function may also have specific requirements --- regarding the flags the last configuration used. For example, --- 'testAction' requires that the package be configured with test suites --- enabled. The caller may pass the required settings to this function --- along with a function to check the validity of the saved 'ConfigFlags'; --- these required settings will be checked first upon determining that --- a previous configuration exists. -reconfigure :: Verbosity -- ^ Verbosity setting - -> Flag FilePath -- ^ \"dist\" prefix - -> ConfigFlags -- ^ Additional config flags to set. These flags - -- will be 'mappend'ed to the last used or - -- default 'ConfigFlags' as appropriate, so - -- this value should be 'mempty' with only the - -- required flags set. The required verbosity - -- and \"dist\" prefix flags will be set - -- automatically because they are always - -- required; therefore, it is not necessary to - -- set them here. - -> [String] -- ^ Extra arguments - -> GlobalFlags -- ^ Global flags - -> SkipAddSourceDepsCheck - -- ^ Should we skip the timestamp check for modified - -- add-source dependencies? - -> Flag (Maybe Int) - -- ^ -j flag for reinstalling add-source deps. - -> (ConfigFlags -> Maybe String) - -- ^ Check that the required flags are set in - -- the last used 'ConfigFlags'. If the required - -- flags are not set, provide a message to the - -- user explaining the reason for - -- reconfiguration. Because the correct \"dist\" - -- prefix setting is always required, it is checked - -- automatically; this function need not check - -- for it. - -> IO (UseSandbox, SavedConfig, FilePath) -reconfigure verbosity flagDistPref addConfigFlags extraArgs globalFlags - skipAddSourceDepsCheck numJobsFlag checkFlags = do - (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags - distPref <- findSavedDistPref config flagDistPref - eLbi <- tryGetPersistBuildConfig distPref - config' <- case eLbi of - Left err -> onNoBuildConfig (useSandbox, config) distPref err - Right lbi -> onBuildConfig (useSandbox, config) distPref lbi - return (useSandbox, config', distPref) - - where - - -- We couldn't load the saved package config file. - -- - -- If we're in a sandbox: add-source deps don't have to be reinstalled - -- (since we don't know the compiler & platform). - onNoBuildConfig :: (UseSandbox, SavedConfig) -> FilePath - -> ConfigStateFileError -> IO SavedConfig - onNoBuildConfig (_, config) distPref err = do - let msg = case err of - ConfigStateFileMissing -> "Package has never been configured." - ConfigStateFileNoParse -> "Saved package config file seems " - ++ "to be corrupt." - _ -> show err - case err of - -- Note: the build config could have been generated by a custom setup - -- script built against a different Cabal version, so it's crucial that - -- we ignore the bad version error here. - ConfigStateFileBadVersion _ _ _ -> info verbosity msg - _ -> do - let distVerbFlags = mempty - { configVerbosity = toFlag verbosity - , configDistPref = toFlag distPref - } - defaultFlags = mappend addConfigFlags distVerbFlags - notice verbosity - $ msg ++ " Configuring with default flags." ++ configureManually - configureAction (defaultFlags, defaultConfigExFlags) - extraArgs globalFlags - return config - - -- Package has been configured, but the configuration may be out of - -- date or required flags may not be set. - -- - -- If we're in a sandbox: reinstall the modified add-source deps and - -- force reconfigure if we did. - onBuildConfig :: (UseSandbox, SavedConfig) -> FilePath - -> LBI.LocalBuildInfo -> IO SavedConfig - onBuildConfig (useSandbox, config) distPref lbi = do - let configFlags = LBI.configFlags lbi - distVerbFlags = mempty - { configVerbosity = toFlag verbosity - , configDistPref = toFlag distPref - } - flags = mconcat [configFlags, addConfigFlags, distVerbFlags] - - -- Was the sandbox created after the package was already configured? We - -- may need to skip reinstallation of add-source deps and force - -- reconfigure. - let buildConfig = localBuildInfoFile distPref - sandboxConfig <- getSandboxConfigFilePath globalFlags - isSandboxConfigNewer <- - sandboxConfig `existsAndIsMoreRecentThan` buildConfig - - let skipAddSourceDepsCheck' - | isSandboxConfigNewer = SkipAddSourceDepsCheck - | otherwise = skipAddSourceDepsCheck - - when (skipAddSourceDepsCheck' == SkipAddSourceDepsCheck) $ - info verbosity "Skipping add-source deps check..." - - let (_, config') = updateInstallDirs - (configUserInstall flags) - (useSandbox, config) - - depsReinstalled <- - case skipAddSourceDepsCheck' of - DontSkipAddSourceDepsCheck -> - maybeReinstallAddSourceDeps - verbosity numJobsFlag flags globalFlags - (useSandbox, config') - SkipAddSourceDepsCheck -> do - return NoDepsReinstalled - - -- Is the @cabal.config@ file newer than @dist/setup.config@? Then we need - -- to force reconfigure. Note that it's possible to use @cabal.config@ - -- even without sandboxes. - isUserPackageEnvironmentFileNewer <- - userPackageEnvironmentFile `existsAndIsMoreRecentThan` buildConfig - - -- Determine whether we need to reconfigure and which message to show to - -- the user if that is the case. - mMsg <- determineMessageToShow distPref lbi configFlags - depsReinstalled isSandboxConfigNewer - isUserPackageEnvironmentFileNewer - case mMsg of - - -- No message for the user indicates that reconfiguration - -- is not required. - Nothing -> return config' - - -- Show the message and reconfigure. - Just msg -> do - notice verbosity msg - configureAction (flags, defaultConfigExFlags) - extraArgs globalFlags - return config' - - -- Determine what message, if any, to display to the user if reconfiguration - -- is required. - determineMessageToShow :: FilePath -> LBI.LocalBuildInfo -> ConfigFlags - -> WereDepsReinstalled -> Bool -> Bool - -> IO (Maybe String) - determineMessageToShow _ _ _ _ True _ = - -- The sandbox was created after the package was already configured. - return $! Just $! sandboxConfigNewerMessage - - determineMessageToShow _ _ _ _ False True = - -- The user package environment file was modified. - return $! Just $! userPackageEnvironmentFileModifiedMessage - - determineMessageToShow distPref lbi configFlags depsReinstalled - False False = do - let savedDistPref = fromFlagOrDefault - (useDistPref defaultSetupScriptOptions) - (configDistPref configFlags) - case depsReinstalled of - ReinstalledSomeDeps -> - -- Some add-source deps were reinstalled. - return $! Just $! reinstalledDepsMessage - NoDepsReinstalled -> - case checkFlags configFlags of - -- Flag required by the caller is not set. - Just msg -> return $! Just $! msg ++ configureManually - - Nothing - -- Required "dist" prefix is not set. - | savedDistPref /= distPref -> - return $! Just distPrefMessage - - -- All required flags are set, but the configuration - -- may be outdated. - | otherwise -> case LBI.pkgDescrFile lbi of - Nothing -> return Nothing - Just pdFile -> do - outdated <- checkPersistBuildConfigOutdated - distPref pdFile - return $! if outdated - then Just $! outdatedMessage pdFile - else Nothing - - reconfiguringMostRecent = " Re-configuring with most recently used options." - configureManually = " If this fails, please run configure manually." - sandboxConfigNewerMessage = - "The sandbox was created after the package was already configured." - ++ reconfiguringMostRecent - ++ configureManually - userPackageEnvironmentFileModifiedMessage = - "The user package environment file ('" - ++ userPackageEnvironmentFile ++ "') was modified." - ++ reconfiguringMostRecent - ++ configureManually - distPrefMessage = - "Package previously configured with different \"dist\" prefix." - ++ reconfiguringMostRecent - ++ configureManually - outdatedMessage pdFile = - pdFile ++ " has been changed." - ++ reconfiguringMostRecent - ++ configureManually - reinstalledDepsMessage = - "Some add-source dependencies have been reinstalled." - ++ reconfiguringMostRecent - ++ configureManually - -installAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) - -> [String] -> Action -installAction (configFlags, _, installFlags, _) _ globalFlags - | fromFlagOrDefault False (installOnly installFlags) = do - let verbosity = fromFlagOrDefault normal (configVerbosity configFlags) - load <- try (loadConfigOrSandboxConfig verbosity globalFlags) - let config = either (\(SomeException _) -> mempty) snd load - distPref <- findSavedDistPref config (configDistPref configFlags) - let setupOpts = defaultSetupScriptOptions { useDistPref = distPref } - setupWrapper verbosity setupOpts Nothing installCommand (const mempty) [] - -installAction (configFlags, configExFlags, installFlags, haddockFlags) - extraArgs globalFlags = do - let verbosity = fromFlagOrDefault normal (configVerbosity configFlags) - (useSandbox, config) <- fmap - (updateInstallDirs (configUserInstall configFlags)) - (loadConfigOrSandboxConfig verbosity globalFlags) - targets <- readUserTargets verbosity extraArgs - - -- TODO: It'd be nice if 'cabal install' picked up the '-w' flag passed to - -- 'configure' when run inside a sandbox. Right now, running - -- - -- $ cabal sandbox init && cabal configure -w /path/to/ghc - -- && cabal build && cabal install - -- - -- performs the compilation twice unless you also pass -w to 'install'. - -- However, this is the same behaviour that 'cabal install' has in the normal - -- mode of operation, so we stick to it for consistency. - - let sandboxDistPref = case useSandbox of - NoSandbox -> NoFlag - UseSandbox sandboxDir -> Flag $ sandboxBuildDir sandboxDir - distPref <- findSavedDistPref config - (configDistPref configFlags `mappend` sandboxDistPref) - - let configFlags' = maybeForceTests installFlags' $ - savedConfigureFlags config `mappend` - configFlags { configDistPref = toFlag distPref } - configExFlags' = defaultConfigExFlags `mappend` - savedConfigureExFlags config `mappend` configExFlags - installFlags' = defaultInstallFlags `mappend` - savedInstallFlags config `mappend` installFlags - haddockFlags' = defaultHaddockFlags `mappend` - savedHaddockFlags config `mappend` - haddockFlags { haddockDistPref = toFlag distPref } - globalFlags' = savedGlobalFlags config `mappend` globalFlags - (comp, platform, conf) <- configCompilerAux' configFlags' - -- TODO: Redesign ProgramDB API to prevent such problems as #2241 in the - -- future. - conf' <- configureAllKnownPrograms verbosity conf - - -- If we're working inside a sandbox and the user has set the -w option, we - -- may need to create a sandbox-local package DB for this compiler and add a - -- timestamp record for this compiler to the timestamp file. - configFlags'' <- case useSandbox of - NoSandbox -> configAbsolutePaths $ configFlags' - (UseSandbox sandboxDir) -> return $ setPackageDB sandboxDir comp platform - configFlags' - - whenUsingSandbox useSandbox $ \sandboxDir -> do - initPackageDBIfNeeded verbosity configFlags'' comp conf' - - indexFile <- tryGetIndexFilePath config - maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile - (compilerId comp) platform - - -- TODO: Passing 'SandboxPackageInfo' to install unconditionally here means - -- that 'cabal install some-package' inside a sandbox will sometimes reinstall - -- modified add-source deps, even if they are not among the dependencies of - -- 'some-package'. This can also prevent packages that depend on older - -- versions of add-source'd packages from building (see #1362). - maybeWithSandboxPackageInfo verbosity configFlags'' globalFlags' - comp platform conf useSandbox $ \mSandboxPkgInfo -> - maybeWithSandboxDirOnSearchPath useSandbox $ - withRepoContext verbosity globalFlags' $ \repoContext -> - install verbosity - (configPackageDB' configFlags'') - repoContext - comp platform conf' - useSandbox mSandboxPkgInfo - globalFlags' configFlags'' configExFlags' - installFlags' haddockFlags' - targets - - where - -- '--run-tests' implies '--enable-tests'. - maybeForceTests installFlags' configFlags' = - if fromFlagOrDefault False (installRunTests installFlags') - then configFlags' { configTests = toFlag True } - else configFlags' - -testAction :: (TestFlags, BuildFlags, BuildExFlags) -> [String] -> GlobalFlags - -> IO () -testAction (testFlags, buildFlags, buildExFlags) extraArgs globalFlags = do - let verbosity = fromFlagOrDefault normal (testVerbosity testFlags) - addConfigFlags = mempty { configTests = toFlag True } - noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck - (buildOnly buildExFlags) - buildFlags' = buildFlags - { buildVerbosity = testVerbosity testFlags } - checkFlags flags - | fromFlagOrDefault False (configTests flags) = Nothing - | otherwise = Just "Re-configuring with test suites enabled." - - -- reconfigure also checks if we're in a sandbox and reinstalls add-source - -- deps if needed. - (useSandbox, config, distPref) <- - reconfigure verbosity (testDistPref testFlags) - addConfigFlags [] globalFlags noAddSource - (buildNumJobs buildFlags') checkFlags - let setupOptions = defaultSetupScriptOptions { useDistPref = distPref } - testFlags' = testFlags { testDistPref = toFlag distPref } - - -- The package was just configured, so the LBI must be available. - names <- componentNamesFromLBI verbosity distPref "test suites" - (\c -> case c of { LBI.CTest{} -> True; _ -> False }) - let extraArgs' - | null extraArgs = case names of - ComponentNamesUnknown -> [] - ComponentNames names' -> [ name | LBI.CTestName name <- names' ] - | otherwise = extraArgs - - maybeWithSandboxDirOnSearchPath useSandbox $ - build verbosity config distPref buildFlags' extraArgs' - - maybeWithSandboxDirOnSearchPath useSandbox $ - setupWrapper verbosity setupOptions Nothing - Cabal.testCommand (const testFlags') extraArgs' - -data ComponentNames = ComponentNamesUnknown - | ComponentNames [LBI.ComponentName] - --- | Return the names of all buildable components matching a given predicate. -componentNamesFromLBI :: Verbosity -> FilePath -> String - -> (LBI.Component -> Bool) - -> IO ComponentNames -componentNamesFromLBI verbosity distPref targetsDescr compPred = do - eLBI <- tryGetPersistBuildConfig distPref - case eLBI of - Left err -> case err of - -- Note: the build config could have been generated by a custom setup - -- script built against a different Cabal version, so it's crucial that - -- we ignore the bad version error here. - ConfigStateFileBadVersion _ _ _ -> return ComponentNamesUnknown - _ -> die (show err) - Right lbi -> do - let pkgDescr = LBI.localPkgDescr lbi - names = map LBI.componentName - . filter (buildable . LBI.componentBuildInfo) - . filter compPred $ - LBI.pkgComponents pkgDescr - if null names - then do notice verbosity $ "Package has no buildable " - ++ targetsDescr ++ "." - exitSuccess -- See #3215. - - else return $! (ComponentNames names) - -benchmarkAction :: (BenchmarkFlags, BuildFlags, BuildExFlags) - -> [String] -> GlobalFlags - -> IO () -benchmarkAction (benchmarkFlags, buildFlags, buildExFlags) - extraArgs globalFlags = do - let verbosity = fromFlagOrDefault normal - (benchmarkVerbosity benchmarkFlags) - addConfigFlags = mempty { configBenchmarks = toFlag True } - buildFlags' = buildFlags - { buildVerbosity = benchmarkVerbosity benchmarkFlags } - checkFlags flags - | fromFlagOrDefault False (configBenchmarks flags) = Nothing - | otherwise = Just "Re-configuring with benchmarks enabled." - noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck - (buildOnly buildExFlags) - - -- reconfigure also checks if we're in a sandbox and reinstalls add-source - -- deps if needed. - (useSandbox, config, distPref) <- - reconfigure verbosity (benchmarkDistPref benchmarkFlags) - addConfigFlags [] globalFlags noAddSource - (buildNumJobs buildFlags') checkFlags - let setupOptions = defaultSetupScriptOptions { useDistPref = distPref } - benchmarkFlags'= benchmarkFlags { benchmarkDistPref = toFlag distPref } - - -- The package was just configured, so the LBI must be available. - names <- componentNamesFromLBI verbosity distPref "benchmarks" - (\c -> case c of { LBI.CBench{} -> True; _ -> False; }) - let extraArgs' - | null extraArgs = case names of - ComponentNamesUnknown -> [] - ComponentNames names' -> [name | LBI.CBenchName name <- names'] - | otherwise = extraArgs - - maybeWithSandboxDirOnSearchPath useSandbox $ - build verbosity config distPref buildFlags' extraArgs' - - maybeWithSandboxDirOnSearchPath useSandbox $ - setupWrapper verbosity setupOptions Nothing - Cabal.benchmarkCommand (const benchmarkFlags') extraArgs' - -haddockAction :: HaddockFlags -> [String] -> Action -haddockAction haddockFlags extraArgs globalFlags = do - let verbosity = fromFlag (haddockVerbosity haddockFlags) - (_useSandbox, config, distPref) <- - reconfigure verbosity (haddockDistPref haddockFlags) - mempty [] globalFlags DontSkipAddSourceDepsCheck - NoFlag (const Nothing) - let haddockFlags' = defaultHaddockFlags `mappend` - savedHaddockFlags config `mappend` - haddockFlags { haddockDistPref = toFlag distPref } - setupScriptOptions = defaultSetupScriptOptions { useDistPref = distPref } - setupWrapper verbosity setupScriptOptions Nothing - haddockCommand (const haddockFlags') extraArgs - when (haddockForHackage haddockFlags == Flag ForHackage) $ do - pkg <- fmap LBI.localPkgDescr (getPersistBuildConfig distPref) - let dest = distPref name <.> "tar.gz" - name = display (packageId pkg) ++ "-docs" - docDir = distPref "doc" "html" - createTarGzFile dest docDir name - notice verbosity $ "Documentation tarball created: " ++ dest - -cleanAction :: CleanFlags -> [String] -> Action -cleanAction cleanFlags extraArgs globalFlags = do - load <- try (loadConfigOrSandboxConfig verbosity globalFlags) - let config = either (\(SomeException _) -> mempty) snd load - distPref <- findSavedDistPref config (cleanDistPref cleanFlags) - let setupScriptOptions = defaultSetupScriptOptions - { useDistPref = distPref - , useWin32CleanHack = True - } - cleanFlags' = cleanFlags { cleanDistPref = toFlag distPref } - setupWrapper verbosity setupScriptOptions Nothing - cleanCommand (const cleanFlags') extraArgs - where - verbosity = fromFlagOrDefault normal (cleanVerbosity cleanFlags) - -listAction :: ListFlags -> [String] -> Action -listAction listFlags extraArgs globalFlags = do - let verbosity = fromFlag (listVerbosity listFlags) - (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity - (globalFlags { globalRequireSandbox = Flag False }) - let configFlags' = savedConfigureFlags config - configFlags = configFlags' { - configPackageDBs = configPackageDBs configFlags' - `mappend` listPackageDBs listFlags - } - globalFlags' = savedGlobalFlags config `mappend` globalFlags - (comp, _, conf) <- configCompilerAux' configFlags - withRepoContext verbosity globalFlags' $ \repoContext -> - List.list verbosity - (configPackageDB' configFlags) - repoContext - comp - conf - listFlags - extraArgs - -infoAction :: InfoFlags -> [String] -> Action -infoAction infoFlags extraArgs globalFlags = do - let verbosity = fromFlag (infoVerbosity infoFlags) - targets <- readUserTargets verbosity extraArgs - (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity - (globalFlags { globalRequireSandbox = Flag False }) - let configFlags' = savedConfigureFlags config - configFlags = configFlags' { - configPackageDBs = configPackageDBs configFlags' - `mappend` infoPackageDBs infoFlags - } - globalFlags' = savedGlobalFlags config `mappend` globalFlags - (comp, _, conf) <- configCompilerAuxEx configFlags - withRepoContext verbosity globalFlags' $ \repoContext -> - List.info verbosity - (configPackageDB' configFlags) - repoContext - comp - conf - globalFlags' - infoFlags - targets - -updateAction :: Flag Verbosity -> [String] -> Action -updateAction verbosityFlag extraArgs globalFlags = do - unless (null extraArgs) $ - die $ "'update' doesn't take any extra arguments: " ++ unwords extraArgs - let verbosity = fromFlag verbosityFlag - (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity - (globalFlags { globalRequireSandbox = Flag False }) - let globalFlags' = savedGlobalFlags config `mappend` globalFlags - withRepoContext verbosity globalFlags' $ \repoContext -> - update verbosity repoContext - -upgradeAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) - -> [String] -> Action -upgradeAction _ _ _ = die $ - "Use the 'cabal install' command instead of 'cabal upgrade'.\n" - ++ "You can install the latest version of a package using 'cabal install'. " - ++ "The 'cabal upgrade' command has been removed because people found it " - ++ "confusing and it often led to broken packages.\n" - ++ "If you want the old upgrade behaviour then use the install command " - ++ "with the --upgrade-dependencies flag (but check first with --dry-run " - ++ "to see what would happen). This will try to pick the latest versions " - ++ "of all dependencies, rather than the usual behaviour of trying to pick " - ++ "installed versions of all dependencies. If you do use " - ++ "--upgrade-dependencies, it is recommended that you do not upgrade core " - ++ "packages (e.g. by using appropriate --constraint= flags)." - -fetchAction :: FetchFlags -> [String] -> Action -fetchAction fetchFlags extraArgs globalFlags = do - let verbosity = fromFlag (fetchVerbosity fetchFlags) - targets <- readUserTargets verbosity extraArgs - config <- loadConfig verbosity (globalConfigFile globalFlags) - let configFlags = savedConfigureFlags config - globalFlags' = savedGlobalFlags config `mappend` globalFlags - (comp, platform, conf) <- configCompilerAux' configFlags - withRepoContext verbosity globalFlags' $ \repoContext -> - fetch verbosity - (configPackageDB' configFlags) - repoContext - comp platform conf globalFlags' fetchFlags - targets - -freezeAction :: FreezeFlags -> [String] -> Action -freezeAction freezeFlags _extraArgs globalFlags = do - let verbosity = fromFlag (freezeVerbosity freezeFlags) - (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags - let configFlags = savedConfigureFlags config - globalFlags' = savedGlobalFlags config `mappend` globalFlags - (comp, platform, conf) <- configCompilerAux' configFlags - - maybeWithSandboxPackageInfo verbosity configFlags globalFlags' - comp platform conf useSandbox $ \mSandboxPkgInfo -> - maybeWithSandboxDirOnSearchPath useSandbox $ - withRepoContext verbosity globalFlags' $ \repoContext -> - freeze verbosity - (configPackageDB' configFlags) - repoContext - comp platform conf - mSandboxPkgInfo - globalFlags' freezeFlags - -genBoundsAction :: FreezeFlags -> [String] -> GlobalFlags -> IO () -genBoundsAction freezeFlags _extraArgs globalFlags = do - let verbosity = fromFlag (freezeVerbosity freezeFlags) - (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags - let configFlags = savedConfigureFlags config - globalFlags' = savedGlobalFlags config `mappend` globalFlags - (comp, platform, conf) <- configCompilerAux' configFlags - - maybeWithSandboxPackageInfo verbosity configFlags globalFlags' - comp platform conf useSandbox $ \mSandboxPkgInfo -> - maybeWithSandboxDirOnSearchPath useSandbox $ - withRepoContext verbosity globalFlags' $ \repoContext -> - genBounds verbosity - (configPackageDB' configFlags) - repoContext - comp platform conf - mSandboxPkgInfo - globalFlags' freezeFlags - -uploadAction :: UploadFlags -> [String] -> Action -uploadAction uploadFlags extraArgs globalFlags = do - config <- loadConfig verbosity (globalConfigFile globalFlags) - let uploadFlags' = savedUploadFlags config `mappend` uploadFlags - globalFlags' = savedGlobalFlags config `mappend` globalFlags - tarfiles = extraArgs - when (null tarfiles && not (fromFlag (uploadDoc uploadFlags'))) $ - die "the 'upload' command expects at least one .tar.gz archive." - when (fromFlag (uploadCheck uploadFlags') - && fromFlag (uploadDoc uploadFlags')) $ - die "--check and --doc cannot be used together." - checkTarFiles extraArgs - maybe_password <- - case uploadPasswordCmd uploadFlags' - of Flag (xs:xss) -> Just . Password <$> - getProgramInvocationOutput verbosity - (simpleProgramInvocation xs xss) - _ -> pure $ flagToMaybe $ uploadPassword uploadFlags' - withRepoContext verbosity globalFlags' $ \repoContext -> do - if fromFlag (uploadCheck uploadFlags') - then do - Upload.check verbosity repoContext tarfiles - else if fromFlag (uploadDoc uploadFlags') - then do - when (length tarfiles > 1) $ - die $ "the 'upload' command can only upload documentation " - ++ "for one package at a time." - tarfile <- maybe (generateDocTarball config) return $ listToMaybe tarfiles - Upload.uploadDoc verbosity - repoContext - (flagToMaybe $ uploadUsername uploadFlags') - maybe_password - tarfile - else do - Upload.upload verbosity - repoContext - (flagToMaybe $ uploadUsername uploadFlags') - maybe_password - tarfiles - where - verbosity = fromFlag (uploadVerbosity uploadFlags) - checkTarFiles tarfiles - | not (null otherFiles) - = die $ "the 'upload' command expects only .tar.gz archives: " - ++ intercalate ", " otherFiles - | otherwise = sequence_ - [ do exists <- doesFileExist tarfile - unless exists $ die $ "file not found: " ++ tarfile - | tarfile <- tarfiles ] - - where otherFiles = filter (not . isTarGzFile) tarfiles - isTarGzFile file = case splitExtension file of - (file', ".gz") -> takeExtension file' == ".tar" - _ -> False - generateDocTarball config = do - notice verbosity $ - "No documentation tarball specified. " - ++ "Building a documentation tarball with default settings...\n" - ++ "If you need to customise Haddock options, " - ++ "run 'haddock --for-hackage' first " - ++ "to generate a documentation tarball." - haddockAction (defaultHaddockFlags { haddockForHackage = Flag ForHackage }) - [] globalFlags - distPref <- findSavedDistPref config NoFlag - pkg <- fmap LBI.localPkgDescr (getPersistBuildConfig distPref) - return $ distPref display (packageId pkg) ++ "-docs" <.> "tar.gz" - -checkAction :: Flag Verbosity -> [String] -> Action -checkAction verbosityFlag extraArgs _globalFlags = do - unless (null extraArgs) $ - die $ "'check' doesn't take any extra arguments: " ++ unwords extraArgs - allOk <- Check.check (fromFlag verbosityFlag) - unless allOk exitFailure - -formatAction :: Flag Verbosity -> [String] -> Action -formatAction verbosityFlag extraArgs _globalFlags = do - let verbosity = fromFlag verbosityFlag - path <- case extraArgs of - [] -> do cwd <- getCurrentDirectory - tryFindPackageDesc cwd - (p:_) -> return p - pkgDesc <- readPackageDescription verbosity path - -- Uses 'writeFileAtomic' under the hood. - writeGenericPackageDescription path pkgDesc - -uninstallAction :: Flag Verbosity -> [String] -> Action -uninstallAction _verbosityFlag extraArgs _globalFlags = do - let package = case extraArgs of - p:_ -> p - _ -> "PACKAGE_NAME" - die $ "This version of 'cabal-install' does not support the 'uninstall' " - ++ "operation. " - ++ "It will likely be implemented at some point in the future; " - ++ "in the meantime you're advised to use either 'ghc-pkg unregister " - ++ package ++ "' or 'cabal sandbox hc-pkg -- unregister " ++ package ++ "'." - - -sdistAction :: (SDistFlags, SDistExFlags) -> [String] -> Action -sdistAction (sdistFlags, sdistExFlags) extraArgs globalFlags = do - unless (null extraArgs) $ - die $ "'sdist' doesn't take any extra arguments: " ++ unwords extraArgs - let verbosity = fromFlag (sDistVerbosity sdistFlags) - load <- try (loadConfigOrSandboxConfig verbosity globalFlags) - let config = either (\(SomeException _) -> mempty) snd load - distPref <- findSavedDistPref config (sDistDistPref sdistFlags) - let sdistFlags' = sdistFlags { sDistDistPref = toFlag distPref } - sdist sdistFlags' sdistExFlags - -reportAction :: ReportFlags -> [String] -> Action -reportAction reportFlags extraArgs globalFlags = do - unless (null extraArgs) $ - die $ "'report' doesn't take any extra arguments: " ++ unwords extraArgs - - let verbosity = fromFlag (reportVerbosity reportFlags) - config <- loadConfig verbosity (globalConfigFile globalFlags) - let globalFlags' = savedGlobalFlags config `mappend` globalFlags - reportFlags' = savedReportFlags config `mappend` reportFlags - - withRepoContext verbosity globalFlags' $ \repoContext -> - Upload.report verbosity repoContext - (flagToMaybe $ reportUsername reportFlags') - (flagToMaybe $ reportPassword reportFlags') - -runAction :: (BuildFlags, BuildExFlags) -> [String] -> Action -runAction (buildFlags, buildExFlags) extraArgs globalFlags = do - let verbosity = fromFlagOrDefault normal (buildVerbosity buildFlags) - let noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck - (buildOnly buildExFlags) - - -- reconfigure also checks if we're in a sandbox and reinstalls add-source - -- deps if needed. - (useSandbox, config, distPref) <- - reconfigure verbosity (buildDistPref buildFlags) mempty [] - globalFlags noAddSource (buildNumJobs buildFlags) - (const Nothing) - - lbi <- getPersistBuildConfig distPref - (exe, exeArgs) <- splitRunArgs verbosity lbi extraArgs - - maybeWithSandboxDirOnSearchPath useSandbox $ - build verbosity config distPref buildFlags ["exe:" ++ exeName exe] - - maybeWithSandboxDirOnSearchPath useSandbox $ - run verbosity lbi exe exeArgs - -getAction :: GetFlags -> [String] -> Action -getAction getFlags extraArgs globalFlags = do - let verbosity = fromFlag (getVerbosity getFlags) - targets <- readUserTargets verbosity extraArgs - (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity - (globalFlags { globalRequireSandbox = Flag False }) - let globalFlags' = savedGlobalFlags config `mappend` globalFlags - withRepoContext verbosity (savedGlobalFlags config) $ \repoContext -> - get verbosity - repoContext - globalFlags' - getFlags - targets - -unpackAction :: GetFlags -> [String] -> Action -unpackAction getFlags extraArgs globalFlags = do - getAction getFlags extraArgs globalFlags - -initAction :: InitFlags -> [String] -> Action -initAction initFlags extraArgs globalFlags = do - when (extraArgs /= []) $ - die $ "'init' doesn't take any extra arguments: " ++ unwords extraArgs - let verbosity = fromFlag (initVerbosity initFlags) - (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity - (globalFlags { globalRequireSandbox = Flag False }) - let configFlags = savedConfigureFlags config - let globalFlags' = savedGlobalFlags config `mappend` globalFlags - (comp, _, conf) <- configCompilerAux' configFlags - withRepoContext verbosity globalFlags' $ \repoContext -> - initCabal verbosity - (configPackageDB' configFlags) - repoContext - comp - conf - initFlags - -sandboxAction :: SandboxFlags -> [String] -> Action -sandboxAction sandboxFlags extraArgs globalFlags = do - let verbosity = fromFlag (sandboxVerbosity sandboxFlags) - case extraArgs of - -- Basic sandbox commands. - ["init"] -> sandboxInit verbosity sandboxFlags globalFlags - ["delete"] -> sandboxDelete verbosity sandboxFlags globalFlags - ("add-source":extra) -> do - when (noExtraArgs extra) $ - die "The 'sandbox add-source' command expects at least one argument" - sandboxAddSource verbosity extra sandboxFlags globalFlags - ("delete-source":extra) -> do - when (noExtraArgs extra) $ - die ("The 'sandbox delete-source' command expects " ++ - "at least one argument") - sandboxDeleteSource verbosity extra sandboxFlags globalFlags - ["list-sources"] -> sandboxListSources verbosity sandboxFlags globalFlags - - -- More advanced commands. - ("hc-pkg":extra) -> do - when (noExtraArgs extra) $ - die $ "The 'sandbox hc-pkg' command expects at least one argument" - sandboxHcPkg verbosity sandboxFlags globalFlags extra - ["buildopts"] -> die "Not implemented!" - - -- Hidden commands. - ["dump-pkgenv"] -> dumpPackageEnvironment verbosity sandboxFlags globalFlags - - -- Error handling. - [] -> die $ "Please specify a subcommand (see 'help sandbox')" - _ -> die $ "Unknown 'sandbox' subcommand: " ++ unwords extraArgs - - where - noExtraArgs = (<1) . length - -execAction :: ExecFlags -> [String] -> Action -execAction execFlags extraArgs globalFlags = do - let verbosity = fromFlag (execVerbosity execFlags) - (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags - let configFlags = savedConfigureFlags config - (comp, platform, conf) <- getPersistOrConfigCompiler configFlags - exec verbosity useSandbox comp platform conf extraArgs - -userConfigAction :: UserConfigFlags -> [String] -> Action -userConfigAction ucflags extraArgs globalFlags = do - let verbosity = fromFlag (userConfigVerbosity ucflags) - force = fromFlag (userConfigForce ucflags) - case extraArgs of - ("init":_) -> do - path <- configFile - fileExists <- doesFileExist path - if (not fileExists || (fileExists && force)) - then void $ createDefaultConfigFile verbosity path - else die $ path ++ " already exists." - ("diff":_) -> mapM_ putStrLn =<< userConfigDiff globalFlags - ("update":_) -> userConfigUpdate verbosity globalFlags - -- Error handling. - [] -> die $ "Please specify a subcommand (see 'help user-config')" - _ -> die $ "Unknown 'user-config' subcommand: " ++ unwords extraArgs - where configFile = getConfigFilePath (globalConfigFile globalFlags) - --- | See 'Distribution.Client.Install.withWin32SelfUpgrade' for details. --- -win32SelfUpgradeAction :: Win32SelfUpgradeFlags -> [String] -> Action -win32SelfUpgradeAction selfUpgradeFlags (pid:path:_extraArgs) _globalFlags = do - let verbosity = fromFlag (win32SelfUpgradeVerbosity selfUpgradeFlags) - Win32SelfUpgrade.deleteOldExeFile verbosity (read pid) path -win32SelfUpgradeAction _ _ _ = return () - --- | Used as an entry point when cabal-install needs to invoke itself --- as a setup script. This can happen e.g. when doing parallel builds. --- -actAsSetupAction :: ActAsSetupFlags -> [String] -> Action -actAsSetupAction actAsSetupFlags args _globalFlags = - let bt = fromFlag (actAsSetupBuildType actAsSetupFlags) - in case bt of - Simple -> Simple.defaultMainArgs args - Configure -> Simple.defaultMainWithHooksArgs - Simple.autoconfUserHooks args - Make -> Make.defaultMainArgs args - Custom -> error "actAsSetupAction Custom" - (UnknownBuildType _) -> error "actAsSetupAction UnknownBuildType" - -manpageAction :: [CommandSpec action] -> Flag Verbosity -> [String] -> Action -manpageAction commands _ extraArgs _ = do - unless (null extraArgs) $ - die $ "'manpage' doesn't take any extra arguments: " ++ unwords extraArgs - pname <- getProgName - let cabalCmd = if takeExtension pname == ".exe" - then dropExtension pname - else pname - putStrLn $ manpage cabalCmd commands diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/README.md cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/README.md --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/README.md 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,155 +0,0 @@ -The cabal-install package -========================= - -See the [Cabal web site] for more information. - -The `cabal-install` package provides a command line tool named `cabal`. -It uses the [Cabal] library and provides a user interface to the -Cabal/[Hackage] build automation and package management system. It can -build and install both local and remote packages, including -dependencies. - -[Cabal web site]: http://www.haskell.org/cabal/ -[Cabal]: ../Cabal/README.md - -Installing the `cabal` command-line tool -======================================== - -The `cabal-install` package requires a number of other packages, most of -which come with a standard GHC installation. It requires the [network] -package, which is sometimes packaged separately by Linux distributions; -for example, on Debian or Ubuntu, it is located in the -"libghc6-network-dev" package. - -`cabal` requires a few other Haskell packages that are not always -installed. The exact list is specified in the [.cabal] file or in the -[bootstrap.sh] file. All these packages are available from [Hackage]. - -Note that on some Unix systems you may need to install an additional -zlib development package using your system package manager; for example, -on Debian or Ubuntu, it is located in the "zlib1g-dev" package; on -Fedora, it is located in the "zlib-devel" package. It is required -because the Haskell zlib package uses the system zlib C library and -header files. - -The `cabal-install` package is now part of the [Haskell Platform], so you -do not usually need to install it separately. However, if you are -starting from a minimal GHC installation, you need to install -`cabal-install` manually. Since it is an ordinary Cabal package, -`cabal-install` can be built the standard way; to facilitate this, the -process has been partially automated. It is described below. - -[.cabal]: cabal-install.cabal -[network]: http://hackage.haskell.org/package/network -[Haskell Platform]: http://www.haskell.org/platform/ - -Quick start on Unix-like systems --------------------------------- - -As a convenience for users on Unix-like systems, there is a -[bootstrap.sh] script that will download and install each of -`cabal-install`'s dependencies in turn. - - $ ./bootstrap.sh - -It will download and install the dependencies. The script will install the -library packages (vanilla, profiling and shared) into `$HOME/.cabal/` and the -`cabal` program into `$HOME/.cabal/bin/`. If you don't want to install profiling -and shared versions of the libraries, use - - $ EXTRA_CONFIGURE_OPTS="" ./bootstrap.sh - -You then have the choice either to place `$HOME/.cabal/bin` on your -`$PATH` or move the `cabal` program to somewhere on your `$PATH`. Next, -you can get the latest list of packages by running: - - $ cabal update - -This will also create a default configuration file, if it does not -already exist, at `$HOME/.cabal/config`. - -By default, `cabal` will install programs to `$HOME/.cabal/bin`. If you -do not want to add this directory to your `$PATH`, you can change -the setting in the config file; for example, you could use the -following: - - symlink-bindir: $HOME/bin - - -Quick start on Windows systems ------------------------------- - -For Windows users, a precompiled program ([cabal.exe]) is provided. -Download and put it somewhere on your `%PATH%` (for example, -`C:\Program Files\Haskell\bin`.) - -Next, you can get the latest list of packages by running: - - $ cabal update - -This will also create a default configuration file (if it does not -already exist) at -`C:\Documents and Settings\%USERNAME%\Application Data\cabal\config`. - -[cabal.exe]: http://www.haskell.org/cabal/release/cabal-install-latest/ - -Using `cabal` -============= - -There are two sets of commands: commands for working with a local -project build tree and those for working with packages distributed -from [Hackage]. - -For the list of the full set of commands and flags for each command, -run: - - $ cabal help - - -Commands for developers for local build trees ---------------------------------------------- - -The commands for local project build trees are almost the same as the -`runghc Setup` command-line interface you may already be familiar with. -In particular, it has the following commands: - - * `cabal configure` - * `cabal build` - * `cabal haddock` - * `cabal clean` - * `cabal sdist` - -The `install` command is somewhat different; it is an all-in-one -operation. If you run `cabal install` in your build tree, it will -configure, build, and install. It takes all the flags that `configure` -takes such as `--global` and `--prefix`. - -In addition, `cabal` will download and install any dependencies that are -not already installed. It can also rebuild packages to ensure a -consistent set of dependencies. - - -Commands for released Hackage packages --------------------------------------- - - $ cabal update - -This command gets the latest list of packages from the [Hackage] server. -On occasion, this command must be run manually--for instance, if you -want to install a newly released package. - - $ cabal install xmonad - -This command installs one or more named packages, and all their -dependencies, from Hackage. By default, it installs the latest available -version; however, you may specify exact versions or version ranges. For -example, `cabal install alex-2.2` or `cabal install parsec < 3`. - - $ cabal list xml - -This does a search of the installed and available packages. It does a -case-insensitive substring match on the package name. - - -[Hackage]: http://hackage.haskell.org -[bootstrap.sh]: bootstrap.sh diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/Setup.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,53 +0,0 @@ -import Distribution.PackageDescription ( PackageDescription ) -import Distribution.Simple ( defaultMainWithHooks - , simpleUserHooks - , postBuild - , postCopy - , postInst - ) -import Distribution.Simple.InstallDirs ( mandir - , CopyDest (NoCopyDest) - ) -import Distribution.Simple.LocalBuildInfo ( LocalBuildInfo(..) - , absoluteInstallDirs - ) -import Distribution.Simple.Utils ( copyFiles - , notice ) -import Distribution.Simple.Setup ( buildVerbosity - , copyDest - , copyVerbosity - , fromFlag - , installVerbosity - ) -import Distribution.Verbosity ( Verbosity ) - -import System.IO ( openFile - , IOMode (WriteMode) - ) -import System.Process ( runProcess ) -import System.FilePath ( () ) - - -main :: IO () -main = defaultMainWithHooks $ simpleUserHooks - { postBuild = \ _ flags _ lbi -> - buildManpage lbi (fromFlag $ buildVerbosity flags) - , postCopy = \ _ flags pkg lbi -> - installManpage pkg lbi (fromFlag $ copyVerbosity flags) (fromFlag $ copyDest flags) - , postInst = \ _ flags pkg lbi -> - installManpage pkg lbi (fromFlag $ installVerbosity flags) NoCopyDest - } - -buildManpage :: LocalBuildInfo -> Verbosity -> IO () -buildManpage lbi verbosity = do - let cabal = buildDir lbi "cabal/cabal" - manpage = buildDir lbi "cabal/cabal.1" - manpageHandle <- openFile manpage WriteMode - notice verbosity ("Generating manual page " ++ manpage ++ " ...") - _ <- runProcess cabal ["manpage"] Nothing Nothing Nothing (Just manpageHandle) Nothing - return () - -installManpage :: PackageDescription -> LocalBuildInfo -> Verbosity -> CopyDest -> IO () -installManpage pkg lbi verbosity copy = do - let destDir = mandir (absoluteInstallDirs pkg lbi copy) "man1" - copyFiles verbosity destDir [(buildDir lbi "cabal", "cabal.1")] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom/common.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom/common.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom/common.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom/common.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -# Helper to run Cabal -cabal() { - "$CABAL" $CABAL_ARGS "$@" -} - -die() { - echo "die: $@" - exit 1 -} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom/should_run/plain/A.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom/should_run/plain/A.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom/should_run/plain/A.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom/should_run/plain/A.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -module A where diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom/should_run/plain/plain.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom/should_run/plain/plain.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom/should_run/plain/plain.cabal 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom/should_run/plain/plain.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,12 +0,0 @@ -name: plain -version: 0.1.0.0 -license: BSD3 -author: Edward Z. Yang -maintainer: ezyang@cs.stanford.edu -build-type: Custom -cabal-version: >=1.10 - -library - exposed-modules: A - build-depends: base - default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom/should_run/plain/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom/should_run/plain/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom/should_run/plain/Setup.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom/should_run/plain/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -import Distribution.Simple -import System.IO -main = hPutStrLn stderr "Custom" >> defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom/should_run/plain.err cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom/should_run/plain.err --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom/should_run/plain.err 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom/should_run/plain.err 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -Custom -Custom diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom/should_run/plain.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom/should_run/plain.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom/should_run/plain.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom/should_run/plain.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -. ../common.sh -cd plain -cabal configure -cabal build diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/common.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/common.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/common.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/common.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -# Helper to run Cabal -cabal() { - "$CABAL" $CABAL_ARGS "$@" -} - -die() { - echo "die: $@" - exit 1 -} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/Cabal-99998/Cabal.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/Cabal-99998/Cabal.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/Cabal-99998/Cabal.cabal 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/Cabal-99998/Cabal.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -name: Cabal -version: 99998 -build-type: Simple -cabal-version: >= 1.2 - -library - build-depends: base - exposed-modules: CabalMessage diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/Cabal-99998/CabalMessage.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/Cabal-99998/CabalMessage.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/Cabal-99998/CabalMessage.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/Cabal-99998/CabalMessage.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -module CabalMessage where - -message = "This is Cabal-99998" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/Cabal-99999/Cabal.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/Cabal-99999/Cabal.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/Cabal-99999/Cabal.cabal 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/Cabal-99999/Cabal.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -name: Cabal -version: 99999 -build-type: Simple -cabal-version: >= 1.2 - -library - build-depends: base - exposed-modules: CabalMessage diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/Cabal-99999/CabalMessage.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/Cabal-99999/CabalMessage.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/Cabal-99999/CabalMessage.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/Cabal-99999/CabalMessage.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -module CabalMessage where - -message = "This is Cabal-99999" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom-setup/custom-setup.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom-setup/custom-setup.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom-setup/custom-setup.cabal 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom-setup/custom-setup.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -name: custom-setup -version: 1.0 -build-type: Custom -cabal-version: >= 99999 - -custom-setup - setup-depends: base, Cabal >= 99999 - -library diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom-setup/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom-setup/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom-setup/Setup.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom-setup/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -import CabalMessage (message) -import System.Exit -import System.IO - -main = hPutStrLn stderr message >> exitFailure diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal/custom-setup-without-cabal.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal/custom-setup-without-cabal.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal/custom-setup-without-cabal.cabal 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal/custom-setup-without-cabal.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -name: custom-setup-without-cabal -version: 1.0 -build-type: Custom -cabal-version: >= 99999 - -custom-setup - setup-depends: base - -library diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal/Setup.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -import System.Exit -import System.IO - -main = hPutStrLn stderr "My custom Setup" >> exitFailure diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal-defaultMain/custom-setup-without-cabal-defaultMain.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal-defaultMain/custom-setup-without-cabal-defaultMain.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal-defaultMain/custom-setup-without-cabal-defaultMain.cabal 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal-defaultMain/custom-setup-without-cabal-defaultMain.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -name: custom-setup-without-cabal-defaultMain -version: 1.0 -build-type: Custom -cabal-version: >= 1.2 - -custom-setup - setup-depends: base - -library diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal-defaultMain/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal-defaultMain/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal-defaultMain/Setup.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal-defaultMain/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -import Distribution.Simple - -main = defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom_setup_without_Cabal_doesnt_allow_Cabal_import.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom_setup_without_Cabal_doesnt_allow_Cabal_import.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom_setup_without_Cabal_doesnt_allow_Cabal_import.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom_setup_without_Cabal_doesnt_allow_Cabal_import.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,12 +0,0 @@ -. ../common.sh -cd custom-setup-without-cabal-defaultMain - -# This package has explicit setup dependencies that do not include Cabal. -# Compilation should fail because Setup.hs imports Distribution.Simple. -! cabal new-build custom-setup-without-cabal-defaultMain > output 2>&1 -cat output -grep -q "\(Could not find module\|Failed to load interface for\).*Distribution\\.Simple" output \ - || die "Should not have been able to import Cabal" - -grep -q "It is a member of the hidden package .*Cabal-" output \ - || die "Cabal should be available" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom_setup_without_Cabal_doesnt_require_Cabal.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom_setup_without_Cabal_doesnt_require_Cabal.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom_setup_without_Cabal_doesnt_require_Cabal.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/custom_setup_without_Cabal_doesnt_require_Cabal.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -. ../common.sh -cd custom-setup-without-cabal - -# This package has explicit setup dependencies that do not include Cabal. -# new-build should try to build it, even though the cabal-version cannot be -# satisfied by an installed version of Cabal (cabal-version: >= 99999). However, -# configure should fail because Setup.hs just prints an error message and exits. -! cabal new-build custom-setup-without-cabal > output 2>&1 -cat output -grep -q "My custom Setup" output \ - || die "Expected output from custom Setup" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/installs_Cabal_as_setup_dep.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/installs_Cabal_as_setup_dep.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/installs_Cabal_as_setup_dep.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/custom-setup/should_run/installs_Cabal_as_setup_dep.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -# Regression test for issue #3436 - -. ../common.sh -cabal sandbox init -cabal install ./Cabal-99998 -cabal sandbox add-source Cabal-99999 - -# Install custom-setup, which has a setup dependency on Cabal-99999. -# cabal should build the setup script with Cabal-99999, but then -# configure should fail because Setup just prints an error message -# imported from Cabal and exits. -! cabal install custom-setup/ > output 2>&1 - -cat output -grep -q "This is Cabal-99999" output || die "Expected output from Cabal-99999" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/common.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/common.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/common.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/common.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -# Helper to run Cabal -cabal() { - "$CABAL" $CABAL_ARGS "$@" -} - -die() { - echo "die: $@" - exit 1 -} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_fail/exit_with_failure_without_args.err cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_fail/exit_with_failure_without_args.err --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_fail/exit_with_failure_without_args.err 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_fail/exit_with_failure_without_args.err 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -RE:^cabal(\.exe)?: Please specify an executable to run$ diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_fail/exit_with_failure_without_args.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_fail/exit_with_failure_without_args.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_fail/exit_with_failure_without_args.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_fail/exit_with_failure_without_args.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -. ../common.sh - -cabal exec diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/adds_sandbox_bin_directory_to_path.out cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/adds_sandbox_bin_directory_to_path.out --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/adds_sandbox_bin_directory_to_path.out 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/adds_sandbox_bin_directory_to_path.out 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -This is my-executable diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/adds_sandbox_bin_directory_to_path.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/adds_sandbox_bin_directory_to_path.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/adds_sandbox_bin_directory_to_path.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/adds_sandbox_bin_directory_to_path.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -. ../common.sh - -cabal sandbox delete > /dev/null -cabal exec my-executable && die "Unexpectedly found executable" - -cabal sandbox init > /dev/null -cabal install > /dev/null - -# Execute indirectly via bash to ensure that we go through $PATH -cabal exec sh -- -c my-executable || die "Did not find executable" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/auto_configures_on_exec.out cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/auto_configures_on_exec.out --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/auto_configures_on_exec.out 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/auto_configures_on_exec.out 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -Config file path source is commandline option. -Config file config-file not found. -Writing default configuration to config-file -find_me_in_output diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/auto_configures_on_exec.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/auto_configures_on_exec.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/auto_configures_on_exec.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/auto_configures_on_exec.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -. ../common.sh -cabal exec echo find_me_in_output diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/can_run_executables_installed_in_sandbox.out cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/can_run_executables_installed_in_sandbox.out --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/can_run_executables_installed_in_sandbox.out 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/can_run_executables_installed_in_sandbox.out 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -This is my-executable diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/can_run_executables_installed_in_sandbox.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/can_run_executables_installed_in_sandbox.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/can_run_executables_installed_in_sandbox.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/can_run_executables_installed_in_sandbox.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -. ../common.sh - -cabal sandbox delete > /dev/null -cabal exec my-executable && die "Unexpectedly found executable" - -cabal sandbox init > /dev/null -cabal install > /dev/null - -cabal exec my-executable || die "Did not find executable" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/configures_cabal_to_use_sandbox.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/configures_cabal_to_use_sandbox.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/configures_cabal_to_use_sandbox.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/configures_cabal_to_use_sandbox.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,14 +0,0 @@ -. ../common.sh - -cabal sandbox delete > /dev/null -cabal exec my-executable && die "Unexpectedly found executable" - -cabal sandbox init > /dev/null -cabal install > /dev/null - -# The library should not be available outside the sandbox -"$GHC_PKG" list | grep -v "my-0.1" - -# When run inside 'cabal-exec' the 'sandbox hc-pkg list' sub-command -# should find the library. -cabal exec sh -- -c 'cd subdir && "$CABAL" sandbox hc-pkg list' | grep "my-0.1" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/configures_ghc_to_use_sandbox.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/configures_ghc_to_use_sandbox.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/configures_ghc_to_use_sandbox.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/configures_ghc_to_use_sandbox.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,13 +0,0 @@ -. ../common.sh - -cabal sandbox delete > /dev/null -cabal exec my-executable && die "Unexpectedly found executable" - -cabal sandbox init > /dev/null -cabal install > /dev/null - -# The library should not be available outside the sandbox -"$GHC_PKG" list | grep -v "my-0.1" - -# Execute ghc-pkg inside the sandbox; it should find my-0.1 -cabal exec ghc-pkg list | grep "my-0.1" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/Foo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/Foo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/Foo.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/Foo.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -module Foo where - -foo :: String -foo = "foo" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/my.cabal 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,14 +0,0 @@ -name: my -version: 0.1 -license: BSD3 -cabal-version: >= 1.2 -build-type: Simple - -library - exposed-modules: Foo - build-depends: base - - -executable my-executable - main-is: My.hs - build-depends: base diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/My.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/My.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/My.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/My.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -module Main where - -main :: IO () -main = do - putStrLn "This is my-executable" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/runs_given_command.out cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/runs_given_command.out --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/runs_given_command.out 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/runs_given_command.out 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -this string diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/runs_given_command.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/runs_given_command.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/runs_given_command.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/exec/should_run/runs_given_command.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -. ../common.sh -cabal configure > /dev/null -cabal exec echo this string diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/common.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/common.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/common.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/common.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -# Helper to run Cabal -cabal() { - "$CABAL" $CABAL_ARGS "$@" -} - -die() { - echo "die: $@" - exit 1 -} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/disable_benchmarks_freezes_bench_deps.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/disable_benchmarks_freezes_bench_deps.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/disable_benchmarks_freezes_bench_deps.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/disable_benchmarks_freezes_bench_deps.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -. ../common.sh -cabal freeze --disable-benchmarks -grep -v " criterion ==" cabal.config || die "should NOT have frozen criterion" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/disable_tests_freezes_test_deps.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/disable_tests_freezes_test_deps.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/disable_tests_freezes_test_deps.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/disable_tests_freezes_test_deps.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -. ../common.sh -cabal freeze --disable-tests -grep -v " test-framework ==" cabal.config || die "should NOT have frozen test-framework" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/does_not_freeze_nondeps.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/does_not_freeze_nondeps.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/does_not_freeze_nondeps.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/does_not_freeze_nondeps.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -. ../common.sh -# TODO: Test this against a package installed in the sandbox but not -# depended upon. -cabal freeze -grep -v "exceptions ==" cabal.config || die "should not have frozen exceptions" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/does_not_freeze_self.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/does_not_freeze_self.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/does_not_freeze_self.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/does_not_freeze_self.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -. ../common.sh -cabal freeze -grep -v " my ==" cabal.config || die "should not have frozen self" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/dry_run_does_not_create_config.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/dry_run_does_not_create_config.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/dry_run_does_not_create_config.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/dry_run_does_not_create_config.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -. ../common.sh -cabal freeze --dry-run -[ ! -e cabal.config ] || die "cabal.config file should not have been created" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/enable_benchmarks_freezes_bench_deps.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/enable_benchmarks_freezes_bench_deps.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/enable_benchmarks_freezes_bench_deps.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/enable_benchmarks_freezes_bench_deps.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -. ../common.sh -# TODO: solver should find solution without extra flags too -cabal freeze --enable-benchmarks --reorder-goals --max-backjumps=-1 -grep " criterion ==" cabal.config || die "should have frozen criterion" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/enable_tests_freezes_test_deps.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/enable_tests_freezes_test_deps.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/enable_tests_freezes_test_deps.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/enable_tests_freezes_test_deps.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -. ../common.sh -cabal freeze --enable-tests -grep " test-framework ==" cabal.config || die "should have frozen test-framework" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/freezes_direct_dependencies.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/freezes_direct_dependencies.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/freezes_direct_dependencies.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/freezes_direct_dependencies.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -. ../common.sh -cabal freeze -grep " base ==" cabal.config || die "'base' should have been frozen" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/freezes_transitive_dependencies.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/freezes_transitive_dependencies.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/freezes_transitive_dependencies.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/freezes_transitive_dependencies.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -. ../common.sh -cabal freeze -grep " ghc-prim ==" cabal.config || die "'ghc-prim' should have been frozen" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/my.cabal 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/my.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -name: my -version: 0.1 -license: BSD3 -cabal-version: >= 1.20.0 -build-type: Simple - -library - exposed-modules: Foo - build-depends: base - -test-suite test-Foo - type: exitcode-stdio-1.0 - hs-source-dirs: tests - main-is: test-Foo.hs - build-depends: base, my, test-framework - -benchmark bench-Foo - type: exitcode-stdio-1.0 - hs-source-dirs: benchmarks - main-is: benchmark-Foo.hs - build-depends: base, my, criterion diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/runs_without_error.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/runs_without_error.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/runs_without_error.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/freeze/should_run/runs_without_error.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -. ../common.sh -cabal freeze diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/manpage/common.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/manpage/common.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/manpage/common.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/manpage/common.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -# Helper to run Cabal -cabal() { - "$CABAL" $CABAL_ARGS "$@" -} - -die() { - echo "die: $@" - exit 1 -} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/manpage/should_run/outputs_manpage.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/manpage/should_run/outputs_manpage.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/manpage/should_run/outputs_manpage.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/manpage/should_run/outputs_manpage.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -. ../common.sh - -OUTPUT=`cabal manpage` - -# contains visible command descriptions -echo $OUTPUT | grep -q '\.B cabal install' || die "visible command description line not found in:\n----$OUTPUT\n----" - -# does not contain hidden command descriptions -echo $OUTPUT | grep -q '\.B cabal manpage' && die "hidden command description line found in:\n----$OUTPUT\n----" - -exit 0 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/multiple-source/common.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/multiple-source/common.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/multiple-source/common.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/multiple-source/common.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -cabal() { - "$CABAL" $CABAL_ARGS "$@" -} - -die() { - echo "die: $@" - exit 1 -} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/multiple-source/should_run/finds_second_source_of_multiple_source.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/multiple-source/should_run/finds_second_source_of_multiple_source.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/multiple-source/should_run/finds_second_source_of_multiple_source.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/multiple-source/should_run/finds_second_source_of_multiple_source.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -. ../common.sh - -# Create the sandbox -cabal sandbox init - -# Add the sources -cabal sandbox add-source p -cabal sandbox add-source q - -# Install the second package -cabal install q diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/multiple-source/should_run/p/p.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/multiple-source/should_run/p/p.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/multiple-source/should_run/p/p.cabal 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/multiple-source/should_run/p/p.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -name: p -version: 0.1.0.0 -license-file: LICENSE -author: Edward Z. Yang -maintainer: ezyang@cs.stanford.edu -build-type: Simple -cabal-version: >=1.10 - -library - build-depends: base - default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/multiple-source/should_run/p/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/multiple-source/should_run/p/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/multiple-source/should_run/p/Setup.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/multiple-source/should_run/p/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/multiple-source/should_run/q/q.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/multiple-source/should_run/q/q.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/multiple-source/should_run/q/q.cabal 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/multiple-source/should_run/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -name: q -version: 0.1.0.0 -license-file: LICENSE -author: Edward Z. Yang -maintainer: ezyang@cs.stanford.edu -build-type: Simple -cabal-version: >=1.10 - -library - build-depends: base - default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/multiple-source/should_run/q/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/multiple-source/should_run/q/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/multiple-source/should_run/q/Setup.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/multiple-source/should_run/q/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/p/p.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/p/p.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/p/p.cabal 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/p/p.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,12 +0,0 @@ -name: p -version: 1.0 -license: BSD3 -author: Edward Z. Yang -maintainer: ezyang@cs.stanford.edu -build-type: Simple -cabal-version: >=1.10 - -library - exposed-modules: P - build-depends: base - default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/p/P.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/p/P.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/p/P.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/p/P.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -module P where diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/p/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/p/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/p/Setup.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/p/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/q/Main.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/q/Main.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/q/Main.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/q/Main.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -module Main where -import P -main :: IO () -main = return () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/q/q-broken.cabal.in cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/q/q-broken.cabal.in --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/q/q-broken.cabal.in 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/q/q-broken.cabal.in 1970-01-01 00:00:00.000000000 +0000 @@ -1,12 +0,0 @@ -name: q -version: 0.1.0.0 -license: BSD3 -author: Edward Z. Yang -maintainer: ezyang@cs.stanford.edu -build-type: Simple -cabal-version: >=1.10 - -executable q - main-is: Main.hs - build-depends: base - default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/q/q-fixed.cabal.in cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/q/q-fixed.cabal.in --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/q/q-fixed.cabal.in 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/q/q-fixed.cabal.in 1970-01-01 00:00:00.000000000 +0000 @@ -1,12 +0,0 @@ -name: q -version: 0.1.0.0 -license: BSD3 -author: Edward Z. Yang -maintainer: ezyang@cs.stanford.edu -build-type: Simple -cabal-version: >=1.10 - -executable q - main-is: Main.hs - build-depends: base, p - default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/q/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/q/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/q/Setup.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files/q/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/new-build/monitor_cabal_files.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -. ./common.sh -cd monitor_cabal_files -cp q/q-broken.cabal.in q/q.cabal -echo "Run 1" | awk '{print;print > "/dev/stderr"}' -! cabal new-build q -cp q/q-fixed.cabal.in q/q.cabal -echo "Run 2" | awk '{print;print > "/dev/stderr"}' -cabal new-build q diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/regression/common.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/regression/common.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/regression/common.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/regression/common.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -# Helper to run Cabal -cabal() { - "$CABAL" $CABAL_ARGS "$@" -} - -die() { - echo "die: $@" - exit 1 -} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/regression/t3199/Main.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/regression/t3199/Main.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/regression/t3199/Main.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/regression/t3199/Main.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -module Main where - -main :: IO () -main = putStrLn "Hello, Haskell!" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/regression/t3199/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/regression/t3199/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/regression/t3199/Setup.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/regression/t3199/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/regression/t3199/test-3199.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/regression/t3199/test-3199.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/regression/t3199/test-3199.cabal 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/regression/t3199/test-3199.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -name: test-t3199 -version: 0.1.0.0 -license: BSD3 -author: Mikhail Glushenkov -maintainer: mikhail.glushenkov@gmail.com -category: Test -build-type: Custom -cabal-version: >=1.10 - -flag exe_2 - description: Build second exe - default: False - -executable test-3199-1 - main-is: Main.hs - build-depends: base - default-language: Haskell2010 - -executable test-3199-2 - main-is: Main.hs - build-depends: base, ansi-terminal - default-language: Haskell2010 - - if flag(exe_2) - buildable: True - else - buildable: False diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/regression/t3199.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/regression/t3199.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/regression/t3199.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/regression/t3199.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,12 +0,0 @@ -. ./common.sh - -if [[ `ghc --numeric-version` =~ "7\\." ]]; then - cd t3199 - tmpfile=$(mktemp /tmp/cabal-t3199.XXXXXX) - cabal sandbox init - cabal sandbox add-source ../../../../../Cabal - cabal install --package-db=clear --package-db=global --only-dep --dry-run > $tmpfile - grep -q "the following would be installed" $tmpfile || die "Should've installed Cabal" - grep -q Cabal $tmpfile || die "Should've installed Cabal" - rm $tmpfile -fi diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/common.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/common.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/common.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/common.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -cabal() { - "$CABAL" $CABAL_ARGS "$@" -} - -die() { - echo "die: $@" - exit 1 -} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_fail/fail_removing_source_thats_not_registered.err cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_fail/fail_removing_source_thats_not_registered.err --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_fail/fail_removing_source_thats_not_registered.err 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_fail/fail_removing_source_thats_not_registered.err 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -Warning: Sources not registered: "q" - -RE:^cabal(\.exe)?: The sources with the above errors were skipped\. \("q"\)$ diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_fail/fail_removing_source_thats_not_registered.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_fail/fail_removing_source_thats_not_registered.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_fail/fail_removing_source_thats_not_registered.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_fail/fail_removing_source_thats_not_registered.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -. ../common.sh - -# Create the sandbox -cabal sandbox init > /dev/null - -# Add one source -cabal sandbox add-source p > /dev/null - -# Remove a source that exists on disk, but is not registered -cabal sandbox delete-source q diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_fail/p/p.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_fail/p/p.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_fail/p/p.cabal 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_fail/p/p.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -name: p -version: 0.1.0.0 -license-file: LICENSE -author: Edward Z. Yang -maintainer: ezyang@cs.stanford.edu -build-type: Simple -cabal-version: >=1.10 - -library - build-depends: base - default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_fail/p/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_fail/p/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_fail/p/Setup.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_fail/p/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_fail/q/q.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_fail/q/q.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_fail/q/q.cabal 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_fail/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -name: q -version: 0.1.0.0 -license-file: LICENSE -author: Edward Z. Yang -maintainer: ezyang@cs.stanford.edu -build-type: Simple -cabal-version: >=1.10 - -library - build-depends: base - default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_fail/q/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_fail/q/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_fail/q/Setup.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_fail/q/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/p/p.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/p/p.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/p/p.cabal 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/p/p.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -name: p -version: 0.1.0.0 -license-file: LICENSE -author: Edward Z. Yang -maintainer: ezyang@cs.stanford.edu -build-type: Simple -cabal-version: >=1.10 - -library - build-depends: base - default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/p/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/p/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/p/Setup.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/p/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/q/q.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/q/q.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/q/q.cabal 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -name: q -version: 0.1.0.0 -license-file: LICENSE -author: Edward Z. Yang -maintainer: ezyang@cs.stanford.edu -build-type: Simple -cabal-version: >=1.10 - -library - build-depends: base - default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/q/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/q/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/q/Setup.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/q/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/remove_nonexistent_source.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/remove_nonexistent_source.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/remove_nonexistent_source.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/remove_nonexistent_source.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -. ../common.sh - -# Create the sandbox -cabal sandbox init - -# Add the sources -cabal sandbox add-source p -cabal sandbox add-source q - -# delete the directory on disk -rm -R p - -# Remove the registered source which is no longer on disk. cabal's handling of -# non-existent sources depends on the behavior of the directory package. -if OUTPUT=`cabal sandbox delete-source p 2>&1`; then - # 'canonicalizePath' should always succeed with directory >= 1.2.3.0 - echo $OUTPUT | grep 'Success deleting sources: "p"' \ - || die "Incorrect success message: $OUTPUT" -else - echo $OUTPUT | grep 'Warning: Source directory not found for paths: "p"' \ - || die "Incorrect failure message: $OUTPUT" -fi diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/report_success_removing_source.out cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/report_success_removing_source.out --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/report_success_removing_source.out 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/report_success_removing_source.out 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -Success deleting sources: "p" "q" - -Note: 'sandbox delete-source' only unregisters the source dependency, but does -not remove the package from the sandbox package DB. - -Use 'sandbox hc-pkg -- unregister' to do that. diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/report_success_removing_source.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/report_success_removing_source.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/report_success_removing_source.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/sandbox-sources/should_run/report_success_removing_source.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -. ../common.sh - -# Create the sandbox -cabal sandbox init > /dev/null - -# Add the sources -cabal sandbox add-source p > /dev/null -cabal sandbox add-source q > /dev/null - -# Remove one of the sources -cabal sandbox delete-source p q diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/common.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/common.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/common.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/common.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -# Helper to run Cabal -cabal() { - "$CABAL" $CABAL_ARGS_NO_CONFIG_FILE "$@" -} - -die() { - echo "die: $@" - exit 1 -} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_fail/doesnt_overwrite_without_f.err cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_fail/doesnt_overwrite_without_f.err --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_fail/doesnt_overwrite_without_f.err 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_fail/doesnt_overwrite_without_f.err 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -RE:^cabal(\.exe)?: \./cabal-config already exists\.$ diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_fail/doesnt_overwrite_without_f.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_fail/doesnt_overwrite_without_f.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_fail/doesnt_overwrite_without_f.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_fail/doesnt_overwrite_without_f.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -. ../common.sh - -rm -f ./cabal-config -cabal --config-file=./cabal-config user-config init > /dev/null -cabal --config-file=./cabal-config user-config init -rm -f ./cabal-config diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_run/overwrites_with_f.out cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_run/overwrites_with_f.out --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_run/overwrites_with_f.out 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_run/overwrites_with_f.out 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -Writing default configuration to ./cabal-config -Writing default configuration to ./cabal-config diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_run/overwrites_with_f.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_run/overwrites_with_f.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_run/overwrites_with_f.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_run/overwrites_with_f.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -. ../common.sh - -rm -f ./cabal-config -cabal --config-file=./cabal-config user-config init \ - || die "Couldn't create config file" -cabal --config-file=./cabal-config user-config -f init \ - || die "Couldn't create config file" -test -e ./cabal-config || die "Config file doesn't exist" -rm -f ./cabal-config diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_run/runs_without_error.out cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_run/runs_without_error.out --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_run/runs_without_error.out 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_run/runs_without_error.out 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Writing default configuration to ./cabal-config diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_run/runs_without_error.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_run/runs_without_error.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_run/runs_without_error.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_run/runs_without_error.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,7 +0,0 @@ -. ../common.sh - -rm -f ./cabal-config -cabal --config-file=./cabal-config user-config init \ - || die "Couldn't create config file" -test -e ./cabal-config || die "Config file doesn't exist" -rm -f ./cabal-config diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_run/uses_CABAL_CONFIG.out cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_run/uses_CABAL_CONFIG.out --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_run/uses_CABAL_CONFIG.out 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_run/uses_CABAL_CONFIG.out 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Writing default configuration to ./my-config diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_run/uses_CABAL_CONFIG.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_run/uses_CABAL_CONFIG.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_run/uses_CABAL_CONFIG.sh 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests/user-config/should_run/uses_CABAL_CONFIG.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -. ../common.sh - -export CABAL_CONFIG=./my-config -cabal user-config init || die "Couldn't create config file" -test -e ./my-config || die "Config file doesn't exist" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/IntegrationTests.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/IntegrationTests.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,310 +0,0 @@ -{-# LANGUAGE CPP #-} --- | Groups black-box tests of cabal-install and configures them to test --- the correct binary. --- --- This file should do nothing but import tests from other modules and run --- them with the path to the correct cabal-install binary. -module Main - where - --- Modules from Cabal. -import Distribution.Compat.CreatePipe (createPipe) -import Distribution.Compat.Environment (setEnv) -import Distribution.Compat.Internal.TempFile (createTempDirectory) -import Distribution.Simple.Configure (findDistPrefOrDefault) -import Distribution.Simple.Program.Builtin (ghcPkgProgram) -import Distribution.Simple.Program.Db - (defaultProgramDb, requireProgram, setProgramSearchPath) -import Distribution.Simple.Program.Find - (ProgramSearchPathEntry(ProgramSearchPathDir), defaultProgramSearchPath) -import Distribution.Simple.Program.Types - ( Program(..), simpleProgram, programPath) -import Distribution.Simple.Setup ( Flag(..) ) -import Distribution.Simple.Utils ( findProgramVersion, copyDirectoryRecursive ) -import Distribution.Verbosity (normal) - --- Third party modules. -import Control.Concurrent.Async (withAsync, wait) -import Control.Exception (bracket) -import Data.Maybe (fromMaybe) -import System.Directory - ( canonicalizePath - , findExecutable - , getDirectoryContents - , getTemporaryDirectory - , doesDirectoryExist - , removeDirectoryRecursive - , doesFileExist ) -import System.FilePath -import Test.Tasty (TestTree, defaultMain, testGroup) -import Test.Tasty.HUnit (testCase, Assertion, assertFailure) -import Control.Monad ( filterM, forM, unless, when ) -import Data.List (isPrefixOf, isSuffixOf, sort) -import Data.IORef (newIORef, writeIORef, readIORef) -import System.Exit (ExitCode(..)) -import System.IO (withBinaryFile, IOMode(ReadMode)) -import System.Process (runProcess, waitForProcess) -import Text.Regex.Posix ((=~)) -import qualified Data.ByteString as B -import qualified Data.ByteString.Char8 as C8 -import Data.ByteString (ByteString) - -#if MIN_VERSION_base(4,6,0) -import System.Environment ( getExecutablePath ) -#endif - --- | Test case. -data TestCase = TestCase - { tcName :: String -- ^ Name of the shell script - , tcBaseDirectory :: FilePath - , tcCategory :: String - , tcShouldX :: String - , tcStdOutPath :: Maybe FilePath -- ^ File path of "golden standard output" - , tcStdErrPath :: Maybe FilePath -- ^ File path of "golden standard error" - } - --- | Test result. -data TestResult = TestResult - { trExitCode :: ExitCode - , trStdOut :: ByteString - , trStdErr :: ByteString - , trWorkingDirectory :: FilePath - } - --- | Cabal executable -cabalProgram :: Program -cabalProgram = (simpleProgram "cabal") { - programFindVersion = findProgramVersion "--numeric-version" id - } - --- | Convert test result to string. -testResultToString :: TestResult -> String -testResultToString testResult = - exitStatus ++ "\n" ++ workingDirectory ++ "\n\n" ++ stdOut ++ "\n\n" ++ stdErr - where - exitStatus = "Exit status: " ++ show (trExitCode testResult) - workingDirectory = "Working directory: " ++ (trWorkingDirectory testResult) - stdOut = " was:\n" ++ C8.unpack (trStdOut testResult) - stdErr = " was:\n" ++ C8.unpack (trStdErr testResult) - --- | Returns the command that was issued, the return code, and the output text -run :: FilePath -> String -> [String] -> IO TestResult -run cwd path args = do - -- path is relative to the current directory; canonicalizePath makes it - -- absolute, so that runProcess will find it even when changing directory. - path' <- canonicalizePath path - - (pid, hReadStdOut, hReadStdErr) <- do - -- Create pipes for StdOut and StdErr - (hReadStdOut, hWriteStdOut) <- createPipe - (hReadStdErr, hWriteStdErr) <- createPipe - -- Run the process - pid <- runProcess path' args (Just cwd) Nothing Nothing (Just hWriteStdOut) (Just hWriteStdErr) - -- Return the pid and read ends of the pipes - return (pid, hReadStdOut, hReadStdErr) - -- Read subprocess output using asynchronous threads; we need to - -- do this aynchronously to avoid deadlocks due to buffers filling - -- up. - withAsync (B.hGetContents hReadStdOut) $ \stdOutAsync -> do - withAsync (B.hGetContents hReadStdErr) $ \stdErrAsync -> do - -- Wait for the subprocess to terminate - exitcode <- waitForProcess pid - -- We can now be sure that no further output is going to arrive, - -- so we wait for the results of the asynchronous reads. - stdOut <- wait stdOutAsync - stdErr <- wait stdErrAsync - -- Done - return $ TestResult exitcode stdOut stdErr cwd - --- | Get a list of all names in a directory, excluding all hidden or --- system files/directories such as '.', '..' or any files/directories --- starting with a '.'. -listDirectory :: FilePath -> IO [String] -listDirectory directory = do - fmap (filter notHidden) $ getDirectoryContents directory - where - notHidden = not . isHidden - isHidden name = "." `isPrefixOf` name - --- | List a directory as per 'listDirectory', but return an empty list --- in case the directory does not exist. -listDirectoryLax :: FilePath -> IO [String] -listDirectoryLax directory = do - d <- doesDirectoryExist directory - if d then - listDirectory directory - else - return [ ] - -pathIfExists :: FilePath -> IO (Maybe FilePath) -pathIfExists p = do - e <- doesFileExist p - if e then - return $ Just p - else - return Nothing - -fileMatchesString :: FilePath -> ByteString -> IO Bool -fileMatchesString p s = do - withBinaryFile p ReadMode $ \h -> do - expected <- (C8.lines . normalizeLinebreaks) `fmap` B.hGetContents h -- Strict - let actual = C8.lines $ normalizeLinebreaks s - return $ length expected == length actual && - and (zipWith matches expected actual) - where - matches :: ByteString -> ByteString -> Bool - matches pattern line - | C8.pack "RE:" `B.isPrefixOf` pattern = line =~ C8.drop 3 pattern - | otherwise = line == pattern - - -- This is a bit of a hack, but since we're comparing - -- *text* output, we should be OK. - normalizeLinebreaks = B.filter (not . ((==) 13)) - -mustMatch :: TestResult -> String -> ByteString -> Maybe FilePath -> Assertion -mustMatch _ _ _ Nothing = return () -mustMatch testResult handleName actual (Just expected) = do - m <- fileMatchesString expected actual - unless m $ assertFailure $ - "<" ++ handleName ++ "> did not match file '" - ++ expected ++ "'.\n" ++ testResultToString testResult - -discoverTestCategories :: FilePath -> IO [String] -discoverTestCategories directory = do - names <- listDirectory directory - fmap sort $ filterM (\name -> doesDirectoryExist $ directory name) names - -discoverTestCases :: FilePath -> String -> String -> IO [TestCase] -discoverTestCases baseDirectory category shouldX = do - -- Find the names of the shell scripts - names <- fmap (filter isTestCase) $ listDirectoryLax directory - -- Fill in TestCase for each script - forM (sort names) $ \name -> do - stdOutPath <- pathIfExists $ directory name `replaceExtension` ".out" - stdErrPath <- pathIfExists $ directory name `replaceExtension` ".err" - return $ TestCase { tcName = name - , tcBaseDirectory = baseDirectory - , tcCategory = category - , tcShouldX = shouldX - , tcStdOutPath = stdOutPath - , tcStdErrPath = stdErrPath - } - where - directory = baseDirectory category shouldX - isTestCase name = ".sh" `isSuffixOf` name - -createTestCases :: [TestCase] -> (TestCase -> Assertion) -> IO [TestTree] -createTestCases testCases mk = - return $ (flip map) testCases $ \tc -> testCase (tcName tc ++ suffix tc) $ mk tc - where - suffix tc = case (tcStdOutPath tc, tcStdErrPath tc) of - (Nothing, Nothing) -> " (ignoring stdout+stderr)" - (Just _ , Nothing) -> " (ignoring stderr)" - (Nothing, Just _ ) -> " (ignoring stdout)" - (Just _ , Just _ ) -> "" - -runTestCase :: (TestResult -> Assertion) -> TestCase -> IO () -runTestCase assertResult tc = do - doRemove <- newIORef False - bracket createWorkDirectory (removeWorkDirectory doRemove) $ \workDirectory -> do - -- Run - let scriptDirectory = workDirectory tcShouldX tc - sh <- fmap (fromMaybe $ error "Cannot find 'sh' executable") $ findExecutable "sh" - testResult <- run scriptDirectory sh [ "-e", tcName tc] - -- Assert that we got what we expected - assertResult testResult - mustMatch testResult "stdout" (trStdOut testResult) (tcStdOutPath tc) - mustMatch testResult "stderr" (trStdErr testResult) (tcStdErrPath tc) - -- Only remove working directory if test succeeded - writeIORef doRemove True - where - createWorkDirectory = do - -- Create the temporary directory - tempDirectory <- getTemporaryDirectory - workDirectory <- createTempDirectory tempDirectory "cabal-install-test" - -- Copy all the files from the category into the working directory. - copyDirectoryRecursive normal - (tcBaseDirectory tc tcCategory tc) - workDirectory - -- Done - return workDirectory - removeWorkDirectory doRemove workDirectory = do - remove <- readIORef doRemove - when remove $ removeDirectoryRecursive workDirectory - -makeShouldXTests :: FilePath -> String -> String -> (TestResult -> Assertion) -> IO [TestTree] -makeShouldXTests baseDirectory category shouldX assertResult = do - testCases <- discoverTestCases baseDirectory category shouldX - createTestCases testCases $ \tc -> - runTestCase assertResult tc - -makeShouldRunTests :: FilePath -> String -> IO [TestTree] -makeShouldRunTests baseDirectory category = do - makeShouldXTests baseDirectory category "should_run" $ \testResult -> do - case trExitCode testResult of - ExitSuccess -> - return () -- We're good - ExitFailure _ -> - assertFailure $ "Unexpected exit status.\n\n" ++ testResultToString testResult - -makeShouldFailTests :: FilePath -> String -> IO [TestTree] -makeShouldFailTests baseDirectory category = do - makeShouldXTests baseDirectory category "should_fail" $ \testResult -> do - case trExitCode testResult of - ExitSuccess -> - assertFailure $ "Unexpected exit status.\n\n" ++ testResultToString testResult - ExitFailure _ -> - return () -- We're good - -discoverCategoryTests :: FilePath -> String -> IO [TestTree] -discoverCategoryTests baseDirectory category = do - srTests <- makeShouldRunTests baseDirectory category - sfTests <- makeShouldFailTests baseDirectory category - return [ testGroup "should_run" srTests - , testGroup "should_fail" sfTests - ] - -main :: IO () -main = do - -- Find executables and build directories, etc. - distPref <- guessDistDir - buildDir <- canonicalizePath (distPref "build/cabal") - let programSearchPath = ProgramSearchPathDir buildDir : defaultProgramSearchPath - (cabal, _) <- requireProgram normal cabalProgram (setProgramSearchPath programSearchPath defaultProgramDb) - (ghcPkg, _) <- requireProgram normal ghcPkgProgram defaultProgramDb - baseDirectory <- canonicalizePath $ "tests" "IntegrationTests" - -- Set up environment variables for test scripts - setEnv "GHC_PKG" $ programPath ghcPkg - setEnv "CABAL" $ programPath cabal - -- Define default arguments - setEnv "CABAL_ARGS" $ "--config-file=config-file" - setEnv "CABAL_ARGS_NO_CONFIG_FILE" " " - -- Discover all the test caregories - categories <- discoverTestCategories baseDirectory - -- Discover tests in each category - tests <- forM categories $ \category -> do - categoryTests <- discoverCategoryTests baseDirectory category - return (category, categoryTests) - -- Map into a test tree - let testTree = map (\(category, categoryTests) -> testGroup category categoryTests) tests - -- Run the tests - defaultMain $ testGroup "Integration Tests" $ testTree - --- See this function in Cabal's PackageTests. If you update this, --- update its copy in cabal-install. (Why a copy here? I wanted --- to try moving this into the Cabal library, but to do this properly --- I'd have to BC'ify getExecutablePath, and then it got hairy, so --- I aborted and did something simple.) -guessDistDir :: IO FilePath -guessDistDir = do -#if MIN_VERSION_base(4,6,0) - exe_path <- canonicalizePath =<< getExecutablePath - let dist0 = dropFileName exe_path ".." ".." - b <- doesFileExist (dist0 "setup-config") -#else - let dist0 = error "no path" - b = False -#endif - -- Method (2) - if b then canonicalizePath dist0 - else findDistPrefOrDefault NoFlag >>= canonicalizePath diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/README.md cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/README.md --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/README.md 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -Integration Tests -================= - -Each test is a shell script. Tests that share files (e.g., `.cabal` files) are -grouped under a common sub-directory of [IntegrationTests]. The framework -copies the whole group's directory before running each test, which allows tests -to reuse files, yet run independently. A group's tests are further divided into -`should_run` and `should_fail` directories, based on the expected exit status. -For example, the test -`IntegrationTests/exec/should_fail/exit_with_failure_without_args.sh` has access -to all files under `exec` and is expected to fail. - -Tests can specify their expected output. For a test named `x.sh`, `x.out` -specifies `stdout` and `x.err` specifies `stderr`. Both files are optional. -The framework expects an exact match between lines in the file and output, -except for lines beginning with "RE:", which are interpreted as regular -expressions. - -[IntegrationTests.hs] defines several environment variables: - -* `CABAL` - The path to the executable being tested. -* `GHC_PKG` - The path to ghc-pkg. -* `CABAL_ARGS` - A common set of arguments for running cabal. -* `CABAL_ARGS_NO_CONFIG_FILE` - `CABAL_ARGS` without `--config-file`. - -[IntegrationTests]: IntegrationTests -[IntegrationTests.hs]: IntegrationTests.hs diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/ArbitraryInstances.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/ArbitraryInstances.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/ArbitraryInstances.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/ArbitraryInstances.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,174 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# OPTIONS_GHC -fno-warn-orphans #-} - -module UnitTests.Distribution.Client.ArbitraryInstances ( - adjustSize, - shortListOf, - shortListOf1, - arbitraryFlag, - ShortToken(..), - arbitraryShortToken, - NonMEmpty(..), - NoShrink(..), - ) where - -import Data.Char -import Data.List -#if !MIN_VERSION_base(4,8,0) -import Data.Monoid -import Control.Applicative -#endif -import Control.Monad - -import Distribution.Version -import Distribution.Package -import Distribution.System -import Distribution.Verbosity - -import Distribution.Simple.Setup -import Distribution.Simple.InstallDirs - -import Distribution.Utils.NubList - -import Test.QuickCheck - - -adjustSize :: (Int -> Int) -> Gen a -> Gen a -adjustSize adjust gen = sized (\n -> resize (adjust n) gen) - -shortListOf :: Int -> Gen a -> Gen [a] -shortListOf bound gen = - sized $ \n -> do - k <- choose (0, (n `div` 2) `min` bound) - vectorOf k gen - -shortListOf1 :: Int -> Gen a -> Gen [a] -shortListOf1 bound gen = - sized $ \n -> do - k <- choose (1, 1 `max` ((n `div` 2) `min` bound)) - vectorOf k gen - -newtype ShortToken = ShortToken { getShortToken :: String } - deriving Show - -instance Arbitrary ShortToken where - arbitrary = - ShortToken <$> - (shortListOf1 5 (choose ('#', '~')) - `suchThat` (not . ("[]" `isPrefixOf`))) - --TODO: [code cleanup] need to replace parseHaskellString impl to stop - -- accepting Haskell list syntax [], ['a'] etc, just allow String syntax. - -- Workaround, don't generate [] as this does not round trip. - - - shrink (ShortToken cs) = - [ ShortToken cs' | cs' <- shrink cs, not (null cs') ] - -arbitraryShortToken :: Gen String -arbitraryShortToken = getShortToken <$> arbitrary - -#if !MIN_VERSION_QuickCheck(2,9,0) -instance Arbitrary Version where - arbitrary = do - branch <- shortListOf1 4 $ - frequency [(3, return 0) - ,(3, return 1) - ,(2, return 2) - ,(1, return 3)] - return (Version branch []) -- deliberate [] - where - - shrink (Version branch []) = - [ Version branch' [] | branch' <- shrink branch, not (null branch') ] - shrink (Version branch _tags) = - [ Version branch [] ] -#endif - -instance Arbitrary VersionRange where - arbitrary = canonicaliseVersionRange <$> sized verRangeExp - where - verRangeExp n = frequency $ - [ (2, return anyVersion) - , (1, liftM thisVersion arbitrary) - , (1, liftM laterVersion arbitrary) - , (1, liftM orLaterVersion arbitrary) - , (1, liftM orLaterVersion' arbitrary) - , (1, liftM earlierVersion arbitrary) - , (1, liftM orEarlierVersion arbitrary) - , (1, liftM orEarlierVersion' arbitrary) - , (1, liftM withinVersion arbitrary) - , (2, liftM VersionRangeParens arbitrary) - ] ++ if n == 0 then [] else - [ (2, liftM2 unionVersionRanges verRangeExp2 verRangeExp2) - , (2, liftM2 intersectVersionRanges verRangeExp2 verRangeExp2) - ] - where - verRangeExp2 = verRangeExp (n `div` 2) - - orLaterVersion' v = - unionVersionRanges (laterVersion v) (thisVersion v) - orEarlierVersion' v = - unionVersionRanges (earlierVersion v) (thisVersion v) - - canonicaliseVersionRange = fromVersionIntervals . toVersionIntervals - -instance Arbitrary PackageName where - arbitrary = PackageName . intercalate "-" <$> shortListOf1 2 nameComponent - where - nameComponent = shortListOf1 5 (elements packageChars) - `suchThat` (not . all isDigit) - packageChars = filter isAlphaNum ['\0'..'\127'] - -instance Arbitrary Dependency where - arbitrary = Dependency <$> arbitrary <*> arbitrary - -instance Arbitrary OS where - arbitrary = elements knownOSs - -instance Arbitrary Arch where - arbitrary = elements knownArches - -instance Arbitrary Platform where - arbitrary = Platform <$> arbitrary <*> arbitrary - -instance Arbitrary a => Arbitrary (Flag a) where - arbitrary = arbitraryFlag arbitrary - shrink NoFlag = [] - shrink (Flag x) = NoFlag : [ Flag x' | x' <- shrink x ] - -arbitraryFlag :: Gen a -> Gen (Flag a) -arbitraryFlag genA = - sized $ \sz -> - case sz of - 0 -> pure NoFlag - _ -> frequency [ (1, pure NoFlag) - , (3, Flag <$> genA) ] - - -instance (Arbitrary a, Ord a) => Arbitrary (NubList a) where - arbitrary = toNubList <$> arbitrary - shrink xs = [ toNubList [] | (not . null) (fromNubList xs) ] - -- try empty, otherwise don't shrink as it can loop - -instance Arbitrary Verbosity where - arbitrary = elements [minBound..maxBound] - -instance Arbitrary PathTemplate where - arbitrary = toPathTemplate <$> arbitraryShortToken - shrink t = [ toPathTemplate s | s <- shrink (show t), not (null s) ] - - -newtype NonMEmpty a = NonMEmpty { getNonMEmpty :: a } - deriving (Eq, Ord, Show) - -instance (Arbitrary a, Monoid a, Eq a) => Arbitrary (NonMEmpty a) where - arbitrary = NonMEmpty <$> (arbitrary `suchThat` (/= mempty)) - shrink (NonMEmpty x) = [ NonMEmpty x' | x' <- shrink x, x' /= mempty ] - -newtype NoShrink a = NoShrink { getNoShrink :: a } - deriving (Eq, Ord, Show) - -instance Arbitrary a => Arbitrary (NoShrink a) where - arbitrary = NoShrink <$> arbitrary - shrink _ = [] - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Compat/Time.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Compat/Time.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Compat/Time.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Compat/Time.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,49 +0,0 @@ -module UnitTests.Distribution.Client.Compat.Time (tests) where - -import Control.Concurrent (threadDelay) -import System.FilePath - -import Distribution.Simple.Utils (withTempDirectory) -import Distribution.Verbosity - -import Distribution.Client.Compat.Time - -import Test.Tasty -import Test.Tasty.HUnit - -tests :: Int -> [TestTree] -tests mtimeChange = - [ testCase "getModTime has sub-second resolution" $ getModTimeTest mtimeChange - , testCase "getCurTime works as expected" $ getCurTimeTest mtimeChange - ] - -getModTimeTest :: Int -> Assertion -getModTimeTest mtimeChange = - withTempDirectory silent "." "getmodtime-" $ \dir -> do - let fileName = dir "foo" - writeFile fileName "bar" - t0 <- getModTime fileName - threadDelay mtimeChange - writeFile fileName "baz" - t1 <- getModTime fileName - assertBool "expected different file mtimes" (t1 > t0) - - -getCurTimeTest :: Int -> Assertion -getCurTimeTest mtimeChange = - withTempDirectory silent "." "getmodtime-" $ \dir -> do - let fileName = dir "foo" - writeFile fileName "bar" - t0 <- getModTime fileName - threadDelay mtimeChange - t1 <- getCurTime - assertBool("expected file mtime (" ++ show t0 - ++ ") to be earlier than current time (" ++ show t1 ++ ")") - (t0 < t1) - - threadDelay mtimeChange - writeFile fileName "baz" - t2 <- getModTime fileName - assertBool ("expected current time (" ++ show t1 - ++ ") to be earlier than file mtime (" ++ show t2 ++ ")") - (t1 < t2) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Dependency/Modular/DSL.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Dependency/Modular/DSL.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Dependency/Modular/DSL.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Dependency/Modular/DSL.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,418 +0,0 @@ -{-# LANGUAGE RecordWildCards #-} --- | DSL for testing the modular solver -module UnitTests.Distribution.Client.Dependency.Modular.DSL ( - ExampleDependency(..) - , Dependencies(..) - , ExTest(..) - , ExPreference(..) - , ExampleDb - , ExampleVersionRange - , ExamplePkgVersion - , exAv - , exInst - , exFlag - , exResolve - , extractInstallPlan - , withSetupDeps - , withTest - , withTests - ) where - --- base -import Data.Either (partitionEithers) -import Data.Maybe (catMaybes) -import Data.List (nub) -import Data.Monoid -import Data.Version -import qualified Data.Map as Map - --- Cabal -import qualified Distribution.Compiler as C -import qualified Distribution.InstalledPackageInfo as C -import qualified Distribution.Package as C - hiding (HasUnitId(..)) -import qualified Distribution.PackageDescription as C -import qualified Distribution.Simple.PackageIndex as C.PackageIndex -import qualified Distribution.System as C -import qualified Distribution.Version as C -import Language.Haskell.Extension (Extension(..), Language) - --- cabal-install -import Distribution.Client.ComponentDeps (ComponentDeps) -import Distribution.Client.Dependency -import Distribution.Client.Dependency.Types -import Distribution.Client.Types -import qualified Distribution.Client.InstallPlan as CI.InstallPlan -import qualified Distribution.Client.PackageIndex as CI.PackageIndex -import qualified Distribution.Client.PkgConfigDb as PC -import qualified Distribution.Client.ComponentDeps as CD - -{------------------------------------------------------------------------------- - Example package database DSL - - In order to be able to set simple examples up quickly, we define a very - simple version of the package database here explicitly designed for use in - tests. - - The design of `ExampleDb` takes the perspective of the solver, not the - perspective of the package DB. This makes it easier to set up tests for - various parts of the solver, but makes the mapping somewhat awkward, because - it means we first map from "solver perspective" `ExampleDb` to the package - database format, and then the modular solver internally in `IndexConversion` - maps this back to the solver specific data structures. - - IMPLEMENTATION NOTES - -------------------- - - TODO: Perhaps these should be made comments of the corresponding data type - definitions. For now these are just my own conclusions and may be wrong. - - * The difference between `GenericPackageDescription` and `PackageDescription` - is that `PackageDescription` describes a particular _configuration_ of a - package (for instance, see documentation for `checkPackage`). A - `GenericPackageDescription` can be turned into a `PackageDescription` in - two ways: - - a. `finalizePackageDescription` does the proper translation, by taking - into account the platform, available dependencies, etc. and picks a - flag assignment (or gives an error if no flag assignment can be found) - b. `flattenPackageDescription` ignores flag assignment and just joins all - components together. - - The slightly odd thing is that a `GenericPackageDescription` contains a - `PackageDescription` as a field; both of the above functions do the same - thing: they take the embedded `PackageDescription` as a basis for the result - value, but override `library`, `executables`, `testSuites`, `benchmarks` - and `buildDepends`. - * The `condTreeComponents` fields of a `CondTree` is a list of triples - `(condition, then-branch, else-branch)`, where the `else-branch` is - optional. --------------------------------------------------------------------------------} - -type ExamplePkgName = String -type ExamplePkgVersion = Int -type ExamplePkgHash = String -- for example "installed" packages -type ExampleFlagName = String -type ExampleTestName = String -type ExampleVersionRange = C.VersionRange -data Dependencies = NotBuildable | Buildable [ExampleDependency] - -data ExampleDependency = - -- | Simple dependency on any version - ExAny ExamplePkgName - - -- | Simple dependency on a fixed version - | ExFix ExamplePkgName ExamplePkgVersion - - -- | Dependencies indexed by a flag - | ExFlag ExampleFlagName Dependencies Dependencies - - -- | Dependency on a language extension - | ExExt Extension - - -- | Dependency on a language version - | ExLang Language - - -- | Dependency on a pkg-config package - | ExPkg (ExamplePkgName, ExamplePkgVersion) - -data ExTest = ExTest ExampleTestName [ExampleDependency] - -exFlag :: ExampleFlagName -> [ExampleDependency] -> [ExampleDependency] - -> ExampleDependency -exFlag n t e = ExFlag n (Buildable t) (Buildable e) - -data ExPreference = ExPref String ExampleVersionRange - -data ExampleAvailable = ExAv { - exAvName :: ExamplePkgName - , exAvVersion :: ExamplePkgVersion - , exAvDeps :: ComponentDeps [ExampleDependency] - } - -exAv :: ExamplePkgName -> ExamplePkgVersion -> [ExampleDependency] - -> ExampleAvailable -exAv n v ds = ExAv { exAvName = n, exAvVersion = v - , exAvDeps = CD.fromLibraryDeps ds } - -withSetupDeps :: ExampleAvailable -> [ExampleDependency] -> ExampleAvailable -withSetupDeps ex setupDeps = ex { - exAvDeps = exAvDeps ex <> CD.fromSetupDeps setupDeps - } - -withTest :: ExampleAvailable -> ExTest -> ExampleAvailable -withTest ex test = withTests ex [test] - -withTests :: ExampleAvailable -> [ExTest] -> ExampleAvailable -withTests ex tests = - let testCDs = CD.fromList [(CD.ComponentTest name, deps) - | ExTest name deps <- tests] - in ex { exAvDeps = exAvDeps ex <> testCDs } - -data ExampleInstalled = ExInst { - exInstName :: ExamplePkgName - , exInstVersion :: ExamplePkgVersion - , exInstHash :: ExamplePkgHash - , exInstBuildAgainst :: [ExampleInstalled] - } - -exInst :: ExamplePkgName -> ExamplePkgVersion -> ExamplePkgHash - -> [ExampleInstalled] -> ExampleInstalled -exInst = ExInst - -type ExampleDb = [Either ExampleInstalled ExampleAvailable] - -type DependencyTree a = C.CondTree C.ConfVar [C.Dependency] a - -exDbPkgs :: ExampleDb -> [ExamplePkgName] -exDbPkgs = map (either exInstName exAvName) - -exAvSrcPkg :: ExampleAvailable -> SourcePackage -exAvSrcPkg ex = - let (libraryDeps, exts, mlang, pcpkgs) = splitTopLevel (CD.libraryDeps (exAvDeps ex)) - testSuites = [(name, deps) | (CD.ComponentTest name, deps) <- CD.toList (exAvDeps ex)] - in SourcePackage { - packageInfoId = exAvPkgId ex - , packageSource = LocalTarballPackage "<>" - , packageDescrOverride = Nothing - , packageDescription = C.GenericPackageDescription { - C.packageDescription = C.emptyPackageDescription { - C.package = exAvPkgId ex - , C.library = error "not yet configured: library" - , C.executables = error "not yet configured: executables" - , C.testSuites = error "not yet configured: testSuites" - , C.benchmarks = error "not yet configured: benchmarks" - , C.buildDepends = error "not yet configured: buildDepends" - , C.setupBuildInfo = Just C.SetupBuildInfo { - C.setupDepends = mkSetupDeps (CD.setupDeps (exAvDeps ex)), - C.defaultSetupDepends = False - } - } - , C.genPackageFlags = nub $ concatMap extractFlags $ - CD.libraryDeps (exAvDeps ex) ++ concatMap snd testSuites - , C.condLibrary = Just $ mkCondTree (extsLib exts <> langLib mlang <> pcpkgLib pcpkgs) - disableLib - (Buildable libraryDeps) - , C.condExecutables = [] - , C.condTestSuites = - let mkTree = mkCondTree mempty disableTest . Buildable - in map (\(t, deps) -> (t, mkTree deps)) testSuites - , C.condBenchmarks = [] - } - } - where - -- Split the set of dependencies into the set of dependencies of the library, - -- the dependencies of the test suites and extensions. - splitTopLevel :: [ExampleDependency] - -> ( [ExampleDependency] - , [Extension] - , Maybe Language - , [(ExamplePkgName, ExamplePkgVersion)] -- pkg-config - ) - splitTopLevel [] = - ([], [], Nothing, []) - splitTopLevel (ExExt ext:deps) = - let (other, exts, lang, pcpkgs) = splitTopLevel deps - in (other, ext:exts, lang, pcpkgs) - splitTopLevel (ExLang lang:deps) = - case splitTopLevel deps of - (other, exts, Nothing, pcpkgs) -> (other, exts, Just lang, pcpkgs) - _ -> error "Only 1 Language dependency is supported" - splitTopLevel (ExPkg pkg:deps) = - let (other, exts, lang, pcpkgs) = splitTopLevel deps - in (other, exts, lang, pkg:pcpkgs) - splitTopLevel (dep:deps) = - let (other, exts, lang, pcpkgs) = splitTopLevel deps - in (dep:other, exts, lang, pcpkgs) - - -- Extract the total set of flags used - extractFlags :: ExampleDependency -> [C.Flag] - extractFlags (ExAny _) = [] - extractFlags (ExFix _ _) = [] - extractFlags (ExFlag f a b) = C.MkFlag { - C.flagName = C.FlagName f - , C.flagDescription = "" - , C.flagDefault = True - , C.flagManual = False - } - : concatMap extractFlags (deps a ++ deps b) - where - deps :: Dependencies -> [ExampleDependency] - deps NotBuildable = [] - deps (Buildable ds) = ds - extractFlags (ExExt _) = [] - extractFlags (ExLang _) = [] - extractFlags (ExPkg _) = [] - - mkCondTree :: Monoid a => a -> (a -> a) -> Dependencies -> DependencyTree a - mkCondTree x dontBuild NotBuildable = - C.CondNode { - C.condTreeData = dontBuild x - , C.condTreeConstraints = [] - , C.condTreeComponents = [] - } - mkCondTree x dontBuild (Buildable deps) = - let (directDeps, flaggedDeps) = splitDeps deps - in C.CondNode { - C.condTreeData = x -- Necessary for language extensions - , C.condTreeConstraints = map mkDirect directDeps - , C.condTreeComponents = map (mkFlagged dontBuild) flaggedDeps - } - - mkDirect :: (ExamplePkgName, Maybe ExamplePkgVersion) -> C.Dependency - mkDirect (dep, Nothing) = C.Dependency (C.PackageName dep) C.anyVersion - mkDirect (dep, Just n) = C.Dependency (C.PackageName dep) (C.thisVersion v) - where - v = Version [n, 0, 0] [] - - mkFlagged :: Monoid a - => (a -> a) - -> (ExampleFlagName, Dependencies, Dependencies) - -> (C.Condition C.ConfVar - , DependencyTree a, Maybe (DependencyTree a)) - mkFlagged dontBuild (f, a, b) = ( C.Var (C.Flag (C.FlagName f)) - , mkCondTree mempty dontBuild a - , Just (mkCondTree mempty dontBuild b) - ) - - -- Split a set of dependencies into direct dependencies and flagged - -- dependencies. A direct dependency is a tuple of the name of package and - -- maybe its version (no version means any version) meant to be converted - -- to a 'C.Dependency' with 'mkDirect' for example. A flagged dependency is - -- the set of dependencies guarded by a flag. - -- - -- TODO: Take care of flagged language extensions and language flavours. - splitDeps :: [ExampleDependency] - -> ( [(ExamplePkgName, Maybe Int)] - , [(ExampleFlagName, Dependencies, Dependencies)] - ) - splitDeps [] = - ([], []) - splitDeps (ExAny p:deps) = - let (directDeps, flaggedDeps) = splitDeps deps - in ((p, Nothing):directDeps, flaggedDeps) - splitDeps (ExFix p v:deps) = - let (directDeps, flaggedDeps) = splitDeps deps - in ((p, Just v):directDeps, flaggedDeps) - splitDeps (ExFlag f a b:deps) = - let (directDeps, flaggedDeps) = splitDeps deps - in (directDeps, (f, a, b):flaggedDeps) - splitDeps (_:deps) = splitDeps deps - - -- Currently we only support simple setup dependencies - mkSetupDeps :: [ExampleDependency] -> [C.Dependency] - mkSetupDeps deps = - let (directDeps, []) = splitDeps deps in map mkDirect directDeps - - -- A 'C.Library' with just the given extensions in its 'BuildInfo' - extsLib :: [Extension] -> C.Library - extsLib es = mempty { C.libBuildInfo = mempty { C.otherExtensions = es } } - - -- A 'C.Library' with just the given extensions in its 'BuildInfo' - langLib :: Maybe Language -> C.Library - langLib (Just lang) = mempty { C.libBuildInfo = mempty { C.defaultLanguage = Just lang } } - langLib _ = mempty - - disableLib :: C.Library -> C.Library - disableLib lib = - lib { C.libBuildInfo = (C.libBuildInfo lib) { C.buildable = False }} - - disableTest :: C.TestSuite -> C.TestSuite - disableTest test = - test { C.testBuildInfo = (C.testBuildInfo test) { C.buildable = False }} - - -- A 'C.Library' with just the given pkgconfig-depends in its 'BuildInfo' - pcpkgLib :: [(ExamplePkgName, ExamplePkgVersion)] -> C.Library - pcpkgLib ds = mempty { C.libBuildInfo = mempty { C.pkgconfigDepends = [mkDirect (n, (Just v)) | (n,v) <- ds] } } - -exAvPkgId :: ExampleAvailable -> C.PackageIdentifier -exAvPkgId ex = C.PackageIdentifier { - pkgName = C.PackageName (exAvName ex) - , pkgVersion = Version [exAvVersion ex, 0, 0] [] - } - -exInstInfo :: ExampleInstalled -> C.InstalledPackageInfo -exInstInfo ex = C.emptyInstalledPackageInfo { - C.installedUnitId = C.mkUnitId (exInstHash ex) - , C.sourcePackageId = exInstPkgId ex - , C.depends = map (C.mkUnitId . exInstHash) - (exInstBuildAgainst ex) - } - -exInstPkgId :: ExampleInstalled -> C.PackageIdentifier -exInstPkgId ex = C.PackageIdentifier { - pkgName = C.PackageName (exInstName ex) - , pkgVersion = Version [exInstVersion ex, 0, 0] [] - } - -exAvIdx :: [ExampleAvailable] -> CI.PackageIndex.PackageIndex SourcePackage -exAvIdx = CI.PackageIndex.fromList . map exAvSrcPkg - -exInstIdx :: [ExampleInstalled] -> C.PackageIndex.InstalledPackageIndex -exInstIdx = C.PackageIndex.fromList . map exInstInfo - -exResolve :: ExampleDb - -- List of extensions supported by the compiler, or Nothing if unknown. - -> Maybe [Extension] - -- List of languages supported by the compiler, or Nothing if unknown. - -> Maybe [Language] - -> PC.PkgConfigDb - -> [ExamplePkgName] - -> Bool - -> [ExPreference] - -> ([String], Either String CI.InstallPlan.InstallPlan) -exResolve db exts langs pkgConfigDb targets indepGoals prefs = runProgress $ - resolveDependencies C.buildPlatform - compiler pkgConfigDb - Modular - params - where - defaultCompiler = C.unknownCompilerInfo C.buildCompilerId C.NoAbiTag - compiler = defaultCompiler { C.compilerInfoExtensions = exts - , C.compilerInfoLanguages = langs - } - (inst, avai) = partitionEithers db - instIdx = exInstIdx inst - avaiIdx = SourcePackageDb { - packageIndex = exAvIdx avai - , packagePreferences = Map.empty - } - enableTests = fmap (\p -> PackageConstraintStanzas - (C.PackageName p) [TestStanzas]) - (exDbPkgs db) - targets' = fmap (\p -> NamedPackage (C.PackageName p) []) targets - params = addPreferences (fmap toPref prefs) - $ addConstraints (fmap toLpc enableTests) - $ (standardInstallPolicy instIdx avaiIdx targets') { - depResolverIndependentGoals = indepGoals - } - toLpc pc = LabeledPackageConstraint pc ConstraintSourceUnknown - toPref (ExPref n v) = PackageVersionPreference (C.PackageName n) v - -extractInstallPlan :: CI.InstallPlan.InstallPlan - -> [(ExamplePkgName, ExamplePkgVersion)] -extractInstallPlan = catMaybes . map confPkg . CI.InstallPlan.toList - where - confPkg :: CI.InstallPlan.PlanPackage -> Maybe (String, Int) - confPkg (CI.InstallPlan.Configured pkg) = Just $ srcPkg pkg - confPkg _ = Nothing - - srcPkg :: ConfiguredPackage -> (String, Int) - srcPkg (ConfiguredPackage pkg _flags _stanzas _deps) = - let C.PackageIdentifier (C.PackageName p) (Version (n:_) _) = - packageInfoId pkg - in (p, n) - -{------------------------------------------------------------------------------- - Auxiliary --------------------------------------------------------------------------------} - --- | Run Progress computation --- --- Like `runLog`, but for the more general `Progress` type. -runProgress :: Progress step e a -> ([step], Either e a) -runProgress = go - where - go (Step s p) = let (ss, result) = go p in (s:ss, result) - go (Fail e) = ([], Left e) - go (Done a) = ([], Right a) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Dependency/Modular/PSQ.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Dependency/Modular/PSQ.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Dependency/Modular/PSQ.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Dependency/Modular/PSQ.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -module UnitTests.Distribution.Client.Dependency.Modular.PSQ ( - tests - ) where - -import Distribution.Client.Dependency.Modular.PSQ - -import Test.Tasty -import Test.Tasty.QuickCheck - -tests :: [TestTree] -tests = [ testProperty "splitsAltImplementation" splitsTest - ] - --- | Original splits implementation -splits' :: PSQ k a -> PSQ k (a, PSQ k a) -splits' xs = - casePSQ xs - (PSQ []) - (\ k v ys -> cons k (v, ys) (fmap (\ (w, zs) -> (w, cons k v zs)) (splits' ys))) - -splitsTest :: [(Int, Int)] -> Bool -splitsTest psq = splits' (PSQ psq) == splits (PSQ psq) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Dependency/Modular/Solver.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Dependency/Modular/Solver.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Dependency/Modular/Solver.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Dependency/Modular/Solver.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,805 +0,0 @@ -{-# LANGUAGE RecordWildCards #-} -module UnitTests.Distribution.Client.Dependency.Modular.Solver (tests) - where - --- base -import Control.Monad -import Data.List (isInfixOf) - -import qualified Data.Version as V -import qualified Distribution.Version as V - --- test-framework -import Test.Tasty as TF -import Test.Tasty.HUnit (testCase, assertEqual, assertBool) - --- Cabal -import Language.Haskell.Extension ( Extension(..) - , KnownExtension(..), Language(..)) - --- cabal-install -import Distribution.Client.PkgConfigDb (PkgConfigDb, pkgConfigDbFromList) -import UnitTests.Distribution.Client.Dependency.Modular.DSL -import UnitTests.Options - -tests :: [TF.TestTree] -tests = [ - testGroup "Simple dependencies" [ - runTest $ mkTest db1 "alreadyInstalled" ["A"] (SolverSuccess []) - , runTest $ mkTest db1 "installLatest" ["B"] (SolverSuccess [("B", 2)]) - , runTest $ mkTest db1 "simpleDep1" ["C"] (SolverSuccess [("B", 1), ("C", 1)]) - , runTest $ mkTest db1 "simpleDep2" ["D"] (SolverSuccess [("B", 2), ("D", 1)]) - , runTest $ mkTest db1 "failTwoVersions" ["C", "D"] anySolverFailure - , runTest $ indep $ mkTest db1 "indepTwoVersions" ["C", "D"] (SolverSuccess [("B", 1), ("B", 2), ("C", 1), ("D", 1)]) - , runTest $ indep $ mkTest db1 "aliasWhenPossible1" ["C", "E"] (SolverSuccess [("B", 1), ("C", 1), ("E", 1)]) - , runTest $ indep $ mkTest db1 "aliasWhenPossible2" ["D", "E"] (SolverSuccess [("B", 2), ("D", 1), ("E", 1)]) - , runTest $ indep $ mkTest db2 "aliasWhenPossible3" ["C", "D"] (SolverSuccess [("A", 1), ("A", 2), ("B", 1), ("B", 2), ("C", 1), ("D", 1)]) - , runTest $ mkTest db1 "buildDepAgainstOld" ["F"] (SolverSuccess [("B", 1), ("E", 1), ("F", 1)]) - , runTest $ mkTest db1 "buildDepAgainstNew" ["G"] (SolverSuccess [("B", 2), ("E", 1), ("G", 1)]) - , runTest $ indep $ mkTest db1 "multipleInstances" ["F", "G"] anySolverFailure - , runTest $ mkTest db21 "unknownPackage1" ["A"] (SolverSuccess [("A", 1), ("B", 1)]) - , runTest $ mkTest db22 "unknownPackage2" ["A"] (SolverFailure (isInfixOf "unknown package: C")) - ] - , testGroup "Flagged dependencies" [ - runTest $ mkTest db3 "forceFlagOn" ["C"] (SolverSuccess [("A", 1), ("B", 1), ("C", 1)]) - , runTest $ mkTest db3 "forceFlagOff" ["D"] (SolverSuccess [("A", 2), ("B", 1), ("D", 1)]) - , runTest $ indep $ mkTest db3 "linkFlags1" ["C", "D"] anySolverFailure - , runTest $ indep $ mkTest db4 "linkFlags2" ["C", "D"] anySolverFailure - , runTest $ indep $ mkTest db18 "linkFlags3" ["A", "B"] (SolverSuccess [("A", 1), ("B", 1), ("C", 1), ("D", 1), ("D", 2), ("F", 1)]) - ] - , testGroup "Stanzas" [ - runTest $ mkTest db5 "simpleTest1" ["C"] (SolverSuccess [("A", 2), ("C", 1)]) - , runTest $ mkTest db5 "simpleTest2" ["D"] anySolverFailure - , runTest $ mkTest db5 "simpleTest3" ["E"] (SolverSuccess [("A", 1), ("E", 1)]) - , runTest $ mkTest db5 "simpleTest4" ["F"] anySolverFailure -- TODO - , runTest $ mkTest db5 "simpleTest5" ["G"] (SolverSuccess [("A", 2), ("G", 1)]) - , runTest $ mkTest db5 "simpleTest6" ["E", "G"] anySolverFailure - , runTest $ indep $ mkTest db5 "simpleTest7" ["E", "G"] (SolverSuccess [("A", 1), ("A", 2), ("E", 1), ("G", 1)]) - , runTest $ mkTest db6 "depsWithTests1" ["C"] (SolverSuccess [("A", 1), ("B", 1), ("C", 1)]) - , runTest $ indep $ mkTest db6 "depsWithTests2" ["C", "D"] (SolverSuccess [("A", 1), ("B", 1), ("C", 1), ("D", 1)]) - ] - , testGroup "Setup dependencies" [ - runTest $ mkTest db7 "setupDeps1" ["B"] (SolverSuccess [("A", 2), ("B", 1)]) - , runTest $ mkTest db7 "setupDeps2" ["C"] (SolverSuccess [("A", 2), ("C", 1)]) - , runTest $ mkTest db7 "setupDeps3" ["D"] (SolverSuccess [("A", 1), ("D", 1)]) - , runTest $ mkTest db7 "setupDeps4" ["E"] (SolverSuccess [("A", 1), ("A", 2), ("E", 1)]) - , runTest $ mkTest db7 "setupDeps5" ["F"] (SolverSuccess [("A", 1), ("A", 2), ("F", 1)]) - , runTest $ mkTest db8 "setupDeps6" ["C", "D"] (SolverSuccess [("A", 1), ("B", 1), ("B", 2), ("C", 1), ("D", 1)]) - , runTest $ mkTest db9 "setupDeps7" ["F", "G"] (SolverSuccess [("A", 1), ("B", 1), ("B",2 ), ("C", 1), ("D", 1), ("E", 1), ("E", 2), ("F", 1), ("G", 1)]) - , runTest $ mkTest db10 "setupDeps8" ["C"] (SolverSuccess [("C", 1)]) - , runTest $ indep $ mkTest dbSetupDeps "setupDeps9" ["A", "B"] (SolverSuccess [("A", 1), ("B", 1), ("C", 1), ("D", 1), ("D", 2)]) - ] - , testGroup "Base shim" [ - runTest $ mkTest db11 "baseShim1" ["A"] (SolverSuccess [("A", 1)]) - , runTest $ mkTest db12 "baseShim2" ["A"] (SolverSuccess [("A", 1)]) - , runTest $ mkTest db12 "baseShim3" ["B"] (SolverSuccess [("B", 1)]) - , runTest $ mkTest db12 "baseShim4" ["C"] (SolverSuccess [("A", 1), ("B", 1), ("C", 1)]) - , runTest $ mkTest db12 "baseShim5" ["D"] anySolverFailure - , runTest $ mkTest db12 "baseShim6" ["E"] (SolverSuccess [("E", 1), ("syb", 2)]) - ] - , testGroup "Cycles" [ - runTest $ mkTest db14 "simpleCycle1" ["A"] anySolverFailure - , runTest $ mkTest db14 "simpleCycle2" ["A", "B"] anySolverFailure - , runTest $ mkTest db14 "cycleWithFlagChoice1" ["C"] (SolverSuccess [("C", 1), ("E", 1)]) - , runTest $ mkTest db15 "cycleThroughSetupDep1" ["A"] anySolverFailure - , runTest $ mkTest db15 "cycleThroughSetupDep2" ["B"] anySolverFailure - , runTest $ mkTest db15 "cycleThroughSetupDep3" ["C"] (SolverSuccess [("C", 2), ("D", 1)]) - , runTest $ mkTest db15 "cycleThroughSetupDep4" ["D"] (SolverSuccess [("D", 1)]) - , runTest $ mkTest db15 "cycleThroughSetupDep5" ["E"] (SolverSuccess [("C", 2), ("D", 1), ("E", 1)]) - ] - , testGroup "Extensions" [ - runTest $ mkTestExts [EnableExtension CPP] dbExts1 "unsupported" ["A"] anySolverFailure - , runTest $ mkTestExts [EnableExtension CPP] dbExts1 "unsupportedIndirect" ["B"] anySolverFailure - , runTest $ mkTestExts [EnableExtension RankNTypes] dbExts1 "supported" ["A"] (SolverSuccess [("A",1)]) - , runTest $ mkTestExts (map EnableExtension [CPP,RankNTypes]) dbExts1 "supportedIndirect" ["C"] (SolverSuccess [("A",1),("B",1), ("C",1)]) - , runTest $ mkTestExts [EnableExtension CPP] dbExts1 "disabledExtension" ["D"] anySolverFailure - , runTest $ mkTestExts (map EnableExtension [CPP,RankNTypes]) dbExts1 "disabledExtension" ["D"] anySolverFailure - , runTest $ mkTestExts (UnknownExtension "custom" : map EnableExtension [CPP,RankNTypes]) dbExts1 "supportedUnknown" ["E"] (SolverSuccess [("A",1),("B",1),("C",1),("E",1)]) - ] - , testGroup "Languages" [ - runTest $ mkTestLangs [Haskell98] dbLangs1 "unsupported" ["A"] anySolverFailure - , runTest $ mkTestLangs [Haskell98,Haskell2010] dbLangs1 "supported" ["A"] (SolverSuccess [("A",1)]) - , runTest $ mkTestLangs [Haskell98] dbLangs1 "unsupportedIndirect" ["B"] anySolverFailure - , runTest $ mkTestLangs [Haskell98, Haskell2010, UnknownLanguage "Haskell3000"] dbLangs1 "supportedUnknown" ["C"] (SolverSuccess [("A",1),("B",1),("C",1)]) - ] - - , testGroup "Soft Constraints" [ - runTest $ soft [ ExPref "A" $ mkvrThis 1] $ mkTest db13 "selectPreferredVersionSimple" ["A"] (SolverSuccess [("A", 1)]) - , runTest $ soft [ ExPref "A" $ mkvrOrEarlier 2] $ mkTest db13 "selectPreferredVersionSimple2" ["A"] (SolverSuccess [("A", 2)]) - , runTest $ soft [ ExPref "A" $ mkvrOrEarlier 2 - , ExPref "A" $ mkvrOrEarlier 1] $ mkTest db13 "selectPreferredVersionMultiple" ["A"] (SolverSuccess [("A", 1)]) - , runTest $ soft [ ExPref "A" $ mkvrOrEarlier 1 - , ExPref "A" $ mkvrOrEarlier 2] $ mkTest db13 "selectPreferredVersionMultiple2" ["A"] (SolverSuccess [("A", 1)]) - , runTest $ soft [ ExPref "A" $ mkvrThis 1 - , ExPref "A" $ mkvrThis 2] $ mkTest db13 "selectPreferredVersionMultiple3" ["A"] (SolverSuccess [("A", 2)]) - , runTest $ soft [ ExPref "A" $ mkvrThis 1 - , ExPref "A" $ mkvrOrEarlier 2] $ mkTest db13 "selectPreferredVersionMultiple4" ["A"] (SolverSuccess [("A", 1)]) - ] - , testGroup "Buildable Field" [ - testBuildable "avoid building component with unknown dependency" (ExAny "unknown") - , testBuildable "avoid building component with unknown extension" (ExExt (UnknownExtension "unknown")) - , testBuildable "avoid building component with unknown language" (ExLang (UnknownLanguage "unknown")) - , runTest $ mkTest dbBuildable1 "choose flags that set buildable to false" ["pkg"] (SolverSuccess [("flag1-false", 1), ("flag2-true", 1), ("pkg", 1)]) - , runTest $ mkTest dbBuildable2 "choose version that sets buildable to false" ["A"] (SolverSuccess [("A", 1), ("B", 2)]) - ] - , testGroup "Pkg-config dependencies" [ - runTest $ mkTestPCDepends [] dbPC1 "noPkgs" ["A"] anySolverFailure - , runTest $ mkTestPCDepends [("pkgA", "0")] dbPC1 "tooOld" ["A"] anySolverFailure - , runTest $ mkTestPCDepends [("pkgA", "1.0.0"), ("pkgB", "1.0.0")] dbPC1 "pruneNotFound" ["C"] (SolverSuccess [("A", 1), ("B", 1), ("C", 1)]) - , runTest $ mkTestPCDepends [("pkgA", "1.0.0"), ("pkgB", "2.0.0")] dbPC1 "chooseNewest" ["C"] (SolverSuccess [("A", 1), ("B", 2), ("C", 1)]) - ] - , testGroup "Independent goals" [ - runTest $ indep $ mkTest db16 "indepGoals1" ["A", "B"] (SolverSuccess [("A", 1), ("B", 1), ("C", 1), ("D", 1), ("D", 2), ("E", 1)]) - , runTest $ indep $ mkTest db17 "indepGoals2" ["A", "B"] (SolverSuccess [("A", 1), ("B", 1), ("C", 1), ("D", 1)]) - , runTest $ indep $ mkTest db19 "indepGoals3" ["D", "E", "F"] anySolverFailure -- The target order is important. - , runTest $ indep $ mkTest db20 "indepGoals4" ["C", "A", "B"] (SolverSuccess [("A", 1), ("B", 1), ("C", 1), ("D", 1), ("D", 2)]) - , runTest $ indep $ mkTest db23 "indepGoals5" ["X", "Y"] (SolverSuccess [("A", 1), ("A", 2), ("B", 1), ("C", 1), ("C", 2), ("X", 1), ("Y", 1)]) - , runTest $ indep $ mkTest db24 "indepGoals6" ["X", "Y"] (SolverSuccess [("A", 1), ("A", 2), ("B", 1), ("B", 2), ("X", 1), ("Y", 1)]) - ] - ] - where - indep test = test { testIndepGoals = True } - soft prefs test = test { testSoftConstraints = prefs } - mkvrThis = V.thisVersion . makeV - mkvrOrEarlier = V.orEarlierVersion . makeV - makeV v = V.Version [v,0,0] [] - -{------------------------------------------------------------------------------- - Solver tests --------------------------------------------------------------------------------} - -data SolverTest = SolverTest { - testLabel :: String - , testTargets :: [String] - , testResult :: SolverResult - , testIndepGoals :: Bool - , testSoftConstraints :: [ExPreference] - , testDb :: ExampleDb - , testSupportedExts :: Maybe [Extension] - , testSupportedLangs :: Maybe [Language] - , testPkgConfigDb :: PkgConfigDb - } - --- | Result of a solver test. -data SolverResult = - SolverSuccess [(String, Int)] -- ^ succeeds with given plan - | SolverFailure (String -> Bool) -- ^ fails, and the error message satisfies the predicate - --- | Can be used for test cases where we just want to verify that --- they fail, but do not care about the error message. -anySolverFailure :: SolverResult -anySolverFailure = SolverFailure (const True) - -mkTest :: ExampleDb - -> String - -> [String] - -> SolverResult - -> SolverTest -mkTest = mkTestExtLangPC Nothing Nothing [] - -mkTestExts :: [Extension] - -> ExampleDb - -> String - -> [String] - -> SolverResult - -> SolverTest -mkTestExts exts = mkTestExtLangPC (Just exts) Nothing [] - -mkTestLangs :: [Language] - -> ExampleDb - -> String - -> [String] - -> SolverResult - -> SolverTest -mkTestLangs langs = mkTestExtLangPC Nothing (Just langs) [] - -mkTestPCDepends :: [(String, String)] - -> ExampleDb - -> String - -> [String] - -> SolverResult - -> SolverTest -mkTestPCDepends pkgConfigDb = mkTestExtLangPC Nothing Nothing pkgConfigDb - -mkTestExtLangPC :: Maybe [Extension] - -> Maybe [Language] - -> [(String, String)] - -> ExampleDb - -> String - -> [String] - -> SolverResult - -> SolverTest -mkTestExtLangPC exts langs pkgConfigDb db label targets result = SolverTest { - testLabel = label - , testTargets = targets - , testResult = result - , testIndepGoals = False - , testSoftConstraints = [] - , testDb = db - , testSupportedExts = exts - , testSupportedLangs = langs - , testPkgConfigDb = pkgConfigDbFromList pkgConfigDb - } - -runTest :: SolverTest -> TF.TestTree -runTest SolverTest{..} = askOption $ \(OptionShowSolverLog showSolverLog) -> - testCase testLabel $ do - let (_msgs, result) = exResolve testDb testSupportedExts testSupportedLangs - testPkgConfigDb testTargets testIndepGoals testSoftConstraints - when showSolverLog $ mapM_ putStrLn _msgs - case result of - Left err -> assertBool ("Unexpected error:\n" ++ err) (check testResult err) - Right plan -> assertEqual "" (toMaybe testResult) (Just (extractInstallPlan plan)) - where - toMaybe :: SolverResult -> Maybe ([(String, Int)]) - toMaybe (SolverSuccess plan) = Just plan - toMaybe (SolverFailure _ ) = Nothing - - check :: SolverResult -> (String -> Bool) - check (SolverFailure f) = f - check _ = const False - -{------------------------------------------------------------------------------- - Specific example database for the tests --------------------------------------------------------------------------------} - -db1 :: ExampleDb -db1 = - let a = exInst "A" 1 "A-1" [] - in [ Left a - , Right $ exAv "B" 1 [ExAny "A"] - , Right $ exAv "B" 2 [ExAny "A"] - , Right $ exAv "C" 1 [ExFix "B" 1] - , Right $ exAv "D" 1 [ExFix "B" 2] - , Right $ exAv "E" 1 [ExAny "B"] - , Right $ exAv "F" 1 [ExFix "B" 1, ExAny "E"] - , Right $ exAv "G" 1 [ExFix "B" 2, ExAny "E"] - , Right $ exAv "Z" 1 [] - ] - --- In this example, we _can_ install C and D as independent goals, but we have --- to pick two diferent versions for B (arbitrarily) -db2 :: ExampleDb -db2 = [ - Right $ exAv "A" 1 [] - , Right $ exAv "A" 2 [] - , Right $ exAv "B" 1 [ExAny "A"] - , Right $ exAv "B" 2 [ExAny "A"] - , Right $ exAv "C" 1 [ExAny "B", ExFix "A" 1] - , Right $ exAv "D" 1 [ExAny "B", ExFix "A" 2] - ] - -db3 :: ExampleDb -db3 = [ - Right $ exAv "A" 1 [] - , Right $ exAv "A" 2 [] - , Right $ exAv "B" 1 [exFlag "flagB" [ExFix "A" 1] [ExFix "A" 2]] - , Right $ exAv "C" 1 [ExFix "A" 1, ExAny "B"] - , Right $ exAv "D" 1 [ExFix "A" 2, ExAny "B"] - ] - --- | Like db3, but the flag picks a different package rather than a --- different package version --- --- In db3 we cannot install C and D as independent goals because: --- --- * The multiple instance restriction says C and D _must_ share B --- * Since C relies on A-1, C needs B to be compiled with flagB on --- * Since D relies on A-2, D needs B to be compiled with flagB off --- * Hence C and D have incompatible requirements on B's flags. --- --- However, _even_ if we don't check explicitly that we pick the same flag --- assignment for 0.B and 1.B, we will still detect the problem because --- 0.B depends on 0.A-1, 1.B depends on 1.A-2, hence we cannot link 0.A to --- 1.A and therefore we cannot link 0.B to 1.B. --- --- In db4 the situation however is trickier. We again cannot install --- packages C and D as independent goals because: --- --- * As above, the multiple instance restriction says that C and D _must_ share B --- * Since C relies on Ax-2, it requires B to be compiled with flagB off --- * Since D relies on Ay-2, it requires B to be compiled with flagB on --- * Hence C and D have incompatible requirements on B's flags. --- --- But now this requirement is more indirect. If we only check dependencies --- we don't see the problem: --- --- * We link 0.B to 1.B --- * 0.B relies on Ay-1 --- * 1.B relies on Ax-1 --- --- We will insist that 0.Ay will be linked to 1.Ay, and 0.Ax to 1.Ax, but since --- we only ever assign to one of these, these constraints are never broken. -db4 :: ExampleDb -db4 = [ - Right $ exAv "Ax" 1 [] - , Right $ exAv "Ax" 2 [] - , Right $ exAv "Ay" 1 [] - , Right $ exAv "Ay" 2 [] - , Right $ exAv "B" 1 [exFlag "flagB" [ExFix "Ax" 1] [ExFix "Ay" 1]] - , Right $ exAv "C" 1 [ExFix "Ax" 2, ExAny "B"] - , Right $ exAv "D" 1 [ExFix "Ay" 2, ExAny "B"] - ] - --- | Some tests involving testsuites --- --- Note that in this test framework test suites are always enabled; if you --- want to test without test suites just set up a test database without --- test suites. --- --- * C depends on A (through its test suite) --- * D depends on B-2 (through its test suite), but B-2 is unavailable --- * E depends on A-1 directly and on A through its test suite. We prefer --- to use A-1 for the test suite in this case. --- * F depends on A-1 directly and on A-2 through its test suite. In this --- case we currently fail to install F, although strictly speaking --- test suites should be considered independent goals. --- * G is like E, but for version A-2. This means that if we cannot install --- E and G together, unless we regard them as independent goals. -db5 :: ExampleDb -db5 = [ - Right $ exAv "A" 1 [] - , Right $ exAv "A" 2 [] - , Right $ exAv "B" 1 [] - , Right $ exAv "C" 1 [] `withTest` ExTest "testC" [ExAny "A"] - , Right $ exAv "D" 1 [] `withTest` ExTest "testD" [ExFix "B" 2] - , Right $ exAv "E" 1 [ExFix "A" 1] `withTest` ExTest "testE" [ExAny "A"] - , Right $ exAv "F" 1 [ExFix "A" 1] `withTest` ExTest "testF" [ExFix "A" 2] - , Right $ exAv "G" 1 [ExFix "A" 2] `withTest` ExTest "testG" [ExAny "A"] - ] - --- Now the _dependencies_ have test suites --- --- * Installing C is a simple example. C wants version 1 of A, but depends on --- B, and B's testsuite depends on an any version of A. In this case we prefer --- to link (if we don't regard test suites as independent goals then of course --- linking here doesn't even come into it). --- * Installing [C, D] means that we prefer to link B -- depending on how we --- set things up, this means that we should also link their test suites. -db6 :: ExampleDb -db6 = [ - Right $ exAv "A" 1 [] - , Right $ exAv "A" 2 [] - , Right $ exAv "B" 1 [] `withTest` ExTest "testA" [ExAny "A"] - , Right $ exAv "C" 1 [ExFix "A" 1, ExAny "B"] - , Right $ exAv "D" 1 [ExAny "B"] - ] - --- Packages with setup dependencies --- --- Install.. --- * B: Simple example, just make sure setup deps are taken into account at all --- * C: Both the package and the setup script depend on any version of A. --- In this case we prefer to link --- * D: Variation on C.1 where the package requires a specific (not latest) --- version but the setup dependency is not fixed. Again, we prefer to --- link (picking the older version) --- * E: Variation on C.2 with the setup dependency the more inflexible. --- Currently, in this case we do not see the opportunity to link because --- we consider setup dependencies after normal dependencies; we will --- pick A.2 for E, then realize we cannot link E.setup.A to A.2, and pick --- A.1 instead. This isn't so easy to fix (if we want to fix it at all); --- in particular, considering setup dependencies _before_ other deps is --- not an improvement, because in general we would prefer to link setup --- setups to package deps, rather than the other way around. (For example, --- if we change this ordering then the test for D would start to install --- two versions of A). --- * F: The package and the setup script depend on different versions of A. --- This will only work if setup dependencies are considered independent. -db7 :: ExampleDb -db7 = [ - Right $ exAv "A" 1 [] - , Right $ exAv "A" 2 [] - , Right $ exAv "B" 1 [] `withSetupDeps` [ExAny "A"] - , Right $ exAv "C" 1 [ExAny "A" ] `withSetupDeps` [ExAny "A" ] - , Right $ exAv "D" 1 [ExFix "A" 1] `withSetupDeps` [ExAny "A" ] - , Right $ exAv "E" 1 [ExAny "A" ] `withSetupDeps` [ExFix "A" 1] - , Right $ exAv "F" 1 [ExFix "A" 2] `withSetupDeps` [ExFix "A" 1] - ] - --- If we install C and D together (not as independent goals), we need to build --- both B.1 and B.2, both of which depend on A. -db8 :: ExampleDb -db8 = [ - Right $ exAv "A" 1 [] - , Right $ exAv "B" 1 [ExAny "A"] - , Right $ exAv "B" 2 [ExAny "A"] - , Right $ exAv "C" 1 [] `withSetupDeps` [ExFix "B" 1] - , Right $ exAv "D" 1 [] `withSetupDeps` [ExFix "B" 2] - ] - --- Extended version of `db8` so that we have nested setup dependencies -db9 :: ExampleDb -db9 = db8 ++ [ - Right $ exAv "E" 1 [ExAny "C"] - , Right $ exAv "E" 2 [ExAny "D"] - , Right $ exAv "F" 1 [] `withSetupDeps` [ExFix "E" 1] - , Right $ exAv "G" 1 [] `withSetupDeps` [ExFix "E" 2] - ] - --- Multiple already-installed packages with inter-dependencies, and one package --- (C) that depends on package A-1 for its setup script and package A-2 as a --- library dependency. -db10 :: ExampleDb -db10 = - let rts = exInst "rts" 1 "rts-inst" [] - ghc_prim = exInst "ghc-prim" 1 "ghc-prim-inst" [rts] - base = exInst "base" 1 "base-inst" [rts, ghc_prim] - a1 = exInst "A" 1 "A1-inst" [base] - a2 = exInst "A" 2 "A2-inst" [base] - in [ - Left rts - , Left ghc_prim - , Left base - , Left a1 - , Left a2 - , Right $ exAv "C" 1 [ExFix "A" 2] `withSetupDeps` [ExFix "A" 1] - ] - --- | This database tests that a package's setup dependencies are correctly --- linked when the package is linked. See pull request #3268. --- --- When A and B are installed as independent goals, their dependencies on C must --- be linked, due to the single instance restriction. Since C depends on D, 0.D --- and 1.D must be linked. C also has a setup dependency on D, so 0.C-setup.D --- and 1.C-setup.D must be linked. However, D's two link groups must remain --- independent. The solver should be able to choose D-1 for C's library and D-2 --- for C's setup script. -dbSetupDeps :: ExampleDb -dbSetupDeps = [ - Right $ exAv "A" 1 [ExAny "C"] - , Right $ exAv "B" 1 [ExAny "C"] - , Right $ exAv "C" 1 [ExFix "D" 1] `withSetupDeps` [ExFix "D" 2] - , Right $ exAv "D" 1 [] - , Right $ exAv "D" 2 [] - ] - --- | Tests for dealing with base shims -db11 :: ExampleDb -db11 = - let base3 = exInst "base" 3 "base-3-inst" [base4] - base4 = exInst "base" 4 "base-4-inst" [] - in [ - Left base3 - , Left base4 - , Right $ exAv "A" 1 [ExFix "base" 3] - ] - --- | Slightly more realistic version of db11 where base-3 depends on syb --- This means that if a package depends on base-3 and on syb, then they MUST --- share the version of syb --- --- * Package A relies on base-3 (which relies on base-4) --- * Package B relies on base-4 --- * Package C relies on both A and B --- * Package D relies on base-3 and on syb-2, which is not possible because --- base-3 has a dependency on syb-1 (non-inheritance of the Base qualifier) --- * Package E relies on base-4 and on syb-2, which is fine. -db12 :: ExampleDb -db12 = - let base3 = exInst "base" 3 "base-3-inst" [base4, syb1] - base4 = exInst "base" 4 "base-4-inst" [] - syb1 = exInst "syb" 1 "syb-1-inst" [base4] - in [ - Left base3 - , Left base4 - , Left syb1 - , Right $ exAv "syb" 2 [ExFix "base" 4] - , Right $ exAv "A" 1 [ExFix "base" 3, ExAny "syb"] - , Right $ exAv "B" 1 [ExFix "base" 4, ExAny "syb"] - , Right $ exAv "C" 1 [ExAny "A", ExAny "B"] - , Right $ exAv "D" 1 [ExFix "base" 3, ExFix "syb" 2] - , Right $ exAv "E" 1 [ExFix "base" 4, ExFix "syb" 2] - ] - -db13 :: ExampleDb -db13 = [ - Right $ exAv "A" 1 [] - , Right $ exAv "A" 2 [] - , Right $ exAv "A" 3 [] - ] - --- | Database with some cycles --- --- * Simplest non-trivial cycle: A -> B and B -> A --- * There is a cycle C -> D -> C, but it can be broken by picking the --- right flag assignment. -db14 :: ExampleDb -db14 = [ - Right $ exAv "A" 1 [ExAny "B"] - , Right $ exAv "B" 1 [ExAny "A"] - , Right $ exAv "C" 1 [exFlag "flagC" [ExAny "D"] [ExAny "E"]] - , Right $ exAv "D" 1 [ExAny "C"] - , Right $ exAv "E" 1 [] - ] - --- | Cycles through setup dependencies --- --- The first cycle is unsolvable: package A has a setup dependency on B, --- B has a regular dependency on A, and we only have a single version available --- for both. --- --- The second cycle can be broken by picking different versions: package C-2.0 --- has a setup dependency on D, and D has a regular dependency on C-*. However, --- version C-1.0 is already available (perhaps it didn't have this setup dep). --- Thus, we should be able to break this cycle even if we are installing package --- E, which explictly depends on C-2.0. -db15 :: ExampleDb -db15 = [ - -- First example (real cycle, no solution) - Right $ exAv "A" 1 [] `withSetupDeps` [ExAny "B"] - , Right $ exAv "B" 1 [ExAny "A"] - -- Second example (cycle can be broken by picking versions carefully) - , Left $ exInst "C" 1 "C-1-inst" [] - , Right $ exAv "C" 2 [] `withSetupDeps` [ExAny "D"] - , Right $ exAv "D" 1 [ExAny "C" ] - , Right $ exAv "E" 1 [ExFix "C" 2] - ] - --- | Check that the solver can backtrack after encountering the SIR (issue #2843) --- --- When A and B are installed as independent goals, the single instance --- restriction prevents B from depending on C. This database tests that the --- solver can backtrack after encountering the single instance restriction and --- choose the only valid flag assignment (-flagA +flagB): --- --- > flagA flagB B depends on --- > On _ C-* --- > Off On E-* <-- only valid flag assignment --- > Off Off D-2.0, C-* --- --- Since A depends on C-* and D-1.0, and C-1.0 depends on any version of D, --- we must build C-1.0 against D-1.0. Since B depends on D-2.0, we cannot have --- C in the transitive closure of B's dependencies, because that would mean we --- would need two instances of C: one built against D-1.0 and one built against --- D-2.0. -db16 :: ExampleDb -db16 = [ - Right $ exAv "A" 1 [ExAny "C", ExFix "D" 1] - , Right $ exAv "B" 1 [ ExFix "D" 2 - , exFlag "flagA" - [ExAny "C"] - [exFlag "flagB" - [ExAny "E"] - [ExAny "C"]]] - , Right $ exAv "C" 1 [ExAny "D"] - , Right $ exAv "D" 1 [] - , Right $ exAv "D" 2 [] - , Right $ exAv "E" 1 [] - ] - --- | This database checks that when the solver discovers a constraint on a --- package's version after choosing to link that package, it can backtrack to --- try alternative versions for the linked-to package. See pull request #3327. --- --- When A and B are installed as independent goals, their dependencies on C --- must be linked. Since C depends on D, A and B's dependencies on D must also --- be linked. This test relies on the fact that the solver chooses D-2 for both --- 0.D and 1.D before it encounters the test suites' constraints. The solver --- must backtrack to try D-1 for both 0.D and 1.D. -db17 :: ExampleDb -db17 = [ - Right $ exAv "A" 1 [ExAny "C"] `withTest` ExTest "test" [ExFix "D" 1] - , Right $ exAv "B" 1 [ExAny "C"] `withTest` ExTest "test" [ExFix "D" 1] - , Right $ exAv "C" 1 [ExAny "D"] - , Right $ exAv "D" 1 [] - , Right $ exAv "D" 2 [] - ] - --- | Issue #2834 --- When both A and B are installed as independent goals, their dependencies on --- C must be linked. The only combination of C's flags that is consistent with --- A and B's dependencies on D is -flagA +flagB. This database tests that the --- solver can backtrack to find the right combination of flags (requiring F, but --- not E or G) and apply it to both 0.C and 1.C. --- --- > flagA flagB C depends on --- > On _ D-1, E-* --- > Off On F-* <-- Only valid choice --- > Off Off D-2, G-* --- --- The single instance restriction means we cannot have one instance of C --- built against D-1 and one instance built against D-2; since A depends on --- D-1, and B depends on C-2, it is therefore important that C cannot depend --- on any version of D. -db18 :: ExampleDb -db18 = [ - Right $ exAv "A" 1 [ExAny "C", ExFix "D" 1] - , Right $ exAv "B" 1 [ExAny "C", ExFix "D" 2] - , Right $ exAv "C" 1 [exFlag "flagA" - [ExFix "D" 1, ExAny "E"] - [exFlag "flagB" - [ExAny "F"] - [ExFix "D" 2, ExAny "G"]]] - , Right $ exAv "D" 1 [] - , Right $ exAv "D" 2 [] - , Right $ exAv "E" 1 [] - , Right $ exAv "F" 1 [] - , Right $ exAv "G" 1 [] - ] - --- | Tricky test case with independent goals (issue #2842) --- --- Suppose we are installing D, E, and F as independent goals: --- --- * D depends on A-* and C-1, requiring A-1 to be built against C-1 --- * E depends on B-* and C-2, requiring B-1 to be built against C-2 --- * F depends on A-* and B-*; this means we need A-1 and B-1 both to be built --- against the same version of C, violating the single instance restriction. --- --- We can visualize this DB as: --- --- > C-1 C-2 --- > /|\ /|\ --- > / | \ / | \ --- > / | X | \ --- > | | / \ | | --- > | |/ \| | --- > | + + | --- > | | | | --- > | A B | --- > \ |\ /| / --- > \ | \ / | / --- > \| V |/ --- > D F E -db19 :: ExampleDb -db19 = [ - Right $ exAv "A" 1 [ExAny "C"] - , Right $ exAv "B" 1 [ExAny "C"] - , Right $ exAv "C" 1 [] - , Right $ exAv "C" 2 [] - , Right $ exAv "D" 1 [ExAny "A", ExFix "C" 1] - , Right $ exAv "E" 1 [ExAny "B", ExFix "C" 2] - , Right $ exAv "F" 1 [ExAny "A", ExAny "B"] - ] - --- | This database tests that the solver correctly backjumps when dependencies --- of linked packages are not linked. It is an example where the conflict set --- from enforcing the single instance restriction is not sufficient. See pull --- request #3327. --- --- When C, A, and B are installed as independent goals, the solver first --- chooses 0.C-1 and 0.D-2. When choosing dependencies for A and B, it links --- 1.D and 2.D to 0.D. Finally, the solver discovers the test's constraint on --- D. It must backjump to try 1.D-1 and then link 2.D to 1.D. -db20 :: ExampleDb -db20 = [ - Right $ exAv "A" 1 [ExAny "B"] - , Right $ exAv "B" 1 [ExAny "D"] `withTest` ExTest "test" [ExFix "D" 1] - , Right $ exAv "C" 1 [ExFix "D" 2] - , Right $ exAv "D" 1 [] - , Right $ exAv "D" 2 [] - ] - --- | Test the trace messages that we get when a package refers to an unknown pkg --- --- TODO: Currently we don't actually test the trace messages, and this particular --- test still suceeds. The trace can only be verified by hand. -db21 :: ExampleDb -db21 = [ - Right $ exAv "A" 1 [ExAny "B"] - , Right $ exAv "A" 2 [ExAny "C"] -- A-2.0 will be tried first, but C unknown - , Right $ exAv "B" 1 [] - ] - --- | A variant of 'db21', which actually fails. -db22 :: ExampleDb -db22 = [ - Right $ exAv "A" 1 [ExAny "B"] - , Right $ exAv "A" 2 [ExAny "C"] - ] - --- | Database for (unsuccessfully) trying to expose a bug in the handling --- of implied linking constraints. The question is whether an implied linking --- constraint should only have the introducing package in its conflict set, --- or also its link target. --- --- It turns out that as long as the Single Instance Restriction is in place, --- it does not matter, because there will aways be an option that is failing --- due to the SIR, which contains the link target in its conflict set. --- --- Even if the SIR is not in place, if there is a solution, one will always --- be found, because without the SIR, linking is always optional, but never --- necessary. --- -db23 :: ExampleDb -db23 = [ - Right $ exAv "X" 1 [ExFix "C" 2, ExAny "A"] - , Right $ exAv "Y" 1 [ExFix "C" 1, ExFix "A" 2] - , Right $ exAv "A" 1 [] - , Right $ exAv "A" 2 [ExAny "B"] - , Right $ exAv "B" 1 [ExAny "C"] - , Right $ exAv "C" 1 [] - , Right $ exAv "C" 2 [] - ] - --- | A simplified version of 'db23'. -db24 :: ExampleDb -db24 = [ - Right $ exAv "X" 1 [ExFix "B" 2, ExAny "A"] - , Right $ exAv "Y" 1 [ExFix "B" 1, ExFix "A" 2] - , Right $ exAv "A" 1 [] - , Right $ exAv "A" 2 [ExAny "B"] - , Right $ exAv "B" 1 [] - , Right $ exAv "B" 2 [] - ] - -dbExts1 :: ExampleDb -dbExts1 = [ - Right $ exAv "A" 1 [ExExt (EnableExtension RankNTypes)] - , Right $ exAv "B" 1 [ExExt (EnableExtension CPP), ExAny "A"] - , Right $ exAv "C" 1 [ExAny "B"] - , Right $ exAv "D" 1 [ExExt (DisableExtension CPP), ExAny "B"] - , Right $ exAv "E" 1 [ExExt (UnknownExtension "custom"), ExAny "C"] - ] - -dbLangs1 :: ExampleDb -dbLangs1 = [ - Right $ exAv "A" 1 [ExLang Haskell2010] - , Right $ exAv "B" 1 [ExLang Haskell98, ExAny "A"] - , Right $ exAv "C" 1 [ExLang (UnknownLanguage "Haskell3000"), ExAny "B"] - ] - --- | cabal must set enable-lib to false in order to avoid the unavailable --- dependency. Flags are true by default. The flag choice causes "pkg" to --- depend on "false-dep". -testBuildable :: String -> ExampleDependency -> TestTree -testBuildable testName unavailableDep = - runTest $ mkTestExtLangPC (Just []) (Just []) [] db testName ["pkg"] expected - where - expected = SolverSuccess [("false-dep", 1), ("pkg", 1)] - db = [ - Right $ exAv "pkg" 1 - [ unavailableDep - , ExFlag "enable-lib" (Buildable []) NotBuildable ] - `withTest` - ExTest "test" [exFlag "enable-lib" - [ExAny "true-dep"] - [ExAny "false-dep"]] - , Right $ exAv "true-dep" 1 [] - , Right $ exAv "false-dep" 1 [] - ] - --- | cabal must choose -flag1 +flag2 for "pkg", which requires packages --- "flag1-false" and "flag2-true". -dbBuildable1 :: ExampleDb -dbBuildable1 = [ - Right $ exAv "pkg" 1 - [ ExAny "unknown" - , ExFlag "flag1" (Buildable []) NotBuildable - , ExFlag "flag2" (Buildable []) NotBuildable] - `withTests` - [ ExTest "optional-test" - [ ExAny "unknown" - , ExFlag "flag1" - (Buildable []) - (Buildable [ExFlag "flag2" NotBuildable (Buildable [])])] - , ExTest "test" [ exFlag "flag1" [ExAny "flag1-true"] [ExAny "flag1-false"] - , exFlag "flag2" [ExAny "flag2-true"] [ExAny "flag2-false"]] - ] - , Right $ exAv "flag1-true" 1 [] - , Right $ exAv "flag1-false" 1 [] - , Right $ exAv "flag2-true" 1 [] - , Right $ exAv "flag2-false" 1 [] - ] - --- | Package databases for testing @pkg-config@ dependencies. -dbPC1 :: ExampleDb -dbPC1 = [ - Right $ exAv "A" 1 [ExPkg ("pkgA", 1)] - , Right $ exAv "B" 1 [ExPkg ("pkgB", 1), ExAny "A"] - , Right $ exAv "B" 2 [ExPkg ("pkgB", 2), ExAny "A"] - , Right $ exAv "C" 1 [ExAny "B"] - ] - --- | cabal must pick B-2 to avoid the unknown dependency. -dbBuildable2 :: ExampleDb -dbBuildable2 = [ - Right $ exAv "A" 1 [ExAny "B"] - , Right $ exAv "B" 1 [ExAny "unknown"] - , Right $ exAv "B" 2 - [ ExAny "unknown" - , ExFlag "disable-lib" NotBuildable (Buildable []) - ] - , Right $ exAv "B" 3 [ExAny "unknown"] - ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/FileMonitor.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/FileMonitor.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/FileMonitor.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/FileMonitor.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,769 +0,0 @@ -module UnitTests.Distribution.Client.FileMonitor (tests) where - -import Control.Monad -import Control.Exception -import Control.Concurrent (threadDelay) -import qualified Data.Set as Set -import System.FilePath -import qualified System.Directory as IO -import Prelude hiding (writeFile) -import qualified Prelude as IO (writeFile) - -import Distribution.Text (simpleParse) -import Distribution.Compat.Binary -import Distribution.Simple.Utils (withTempDirectory) -import Distribution.Verbosity (silent) - -import Distribution.Client.FileMonitor -import Distribution.Client.Compat.Time - -import Test.Tasty -import Test.Tasty.HUnit - - -tests :: Int -> [TestTree] -tests mtimeChange = - [ testCase "sanity check mtimes" $ testFileMTimeSanity mtimeChange - , testCase "no monitor cache" testNoMonitorCache - , testCase "corrupt monitor cache" testCorruptMonitorCache - , testCase "empty monitor" testEmptyMonitor - , testCase "missing file" testMissingFile - , testCase "change file" $ testChangedFile mtimeChange - , testCase "file mtime vs content" $ testChangedFileMtimeVsContent mtimeChange - , testCase "update during action" $ testUpdateDuringAction mtimeChange - , testCase "remove file" testRemoveFile - , testCase "non-existent file" testNonExistentFile - , testCase "changed file type" $ testChangedFileType mtimeChange - - , testGroup "glob matches" - [ testCase "no change" testGlobNoChange - , testCase "add match" $ testGlobAddMatch mtimeChange - , testCase "remove match" $ testGlobRemoveMatch mtimeChange - , testCase "change match" $ testGlobChangeMatch mtimeChange - - , testCase "add match subdir" $ testGlobAddMatchSubdir mtimeChange - , testCase "remove match subdir" $ testGlobRemoveMatchSubdir mtimeChange - , testCase "change match subdir" $ testGlobChangeMatchSubdir mtimeChange - - , testCase "match toplevel dir" $ testGlobMatchTopDir mtimeChange - , testCase "add non-match" $ testGlobAddNonMatch mtimeChange - , testCase "remove non-match" $ testGlobRemoveNonMatch mtimeChange - - , testCase "add non-match" $ testGlobAddNonMatchSubdir mtimeChange - , testCase "remove non-match" $ testGlobRemoveNonMatchSubdir mtimeChange - - , testCase "invariant sorted 1" $ testInvariantMonitorStateGlobFiles - mtimeChange - , testCase "invariant sorted 2" $ testInvariantMonitorStateGlobDirs - mtimeChange - - , testCase "match dirs" $ testGlobMatchDir mtimeChange - , testCase "match dirs only" $ testGlobMatchDirOnly mtimeChange - , testCase "change file type" $ testGlobChangeFileType mtimeChange - , testCase "absolute paths" $ testGlobAbsolutePath mtimeChange - ] - - , testCase "value unchanged" testValueUnchanged - , testCase "value changed" testValueChanged - , testCase "value & file changed" $ testValueAndFileChanged mtimeChange - , testCase "value updated" testValueUpdated - ] - --- we rely on file mtimes having a reasonable resolution -testFileMTimeSanity :: Int -> Assertion -testFileMTimeSanity mtimeChange = - withTempDirectory silent "." "file-status-" $ \dir -> do - replicateM_ 10 $ do - IO.writeFile (dir "a") "content" - t1 <- getModTime (dir "a") - threadDelay mtimeChange - IO.writeFile (dir "a") "content" - t2 <- getModTime (dir "a") - assertBool "expected different file mtimes" (t2 > t1) - --- first run, where we don't even call updateMonitor -testNoMonitorCache :: Assertion -testNoMonitorCache = - withFileMonitor $ \root monitor -> do - reason <- expectMonitorChanged root (monitor :: FileMonitor () ()) () - reason @?= MonitorFirstRun - --- write garbage into the binary cache file -testCorruptMonitorCache :: Assertion -testCorruptMonitorCache = - withFileMonitor $ \root monitor -> do - IO.writeFile (fileMonitorCacheFile monitor) "broken" - reason <- expectMonitorChanged root monitor () - reason @?= MonitorCorruptCache - - updateMonitor root monitor [] () () - (res, files) <- expectMonitorUnchanged root monitor () - res @?= () - files @?= [] - - IO.writeFile (fileMonitorCacheFile monitor) "broken" - reason2 <- expectMonitorChanged root monitor () - reason2 @?= MonitorCorruptCache - --- no files to monitor -testEmptyMonitor :: Assertion -testEmptyMonitor = - withFileMonitor $ \root monitor -> do - touchFile root "a" - updateMonitor root monitor [] () () - touchFile root "b" - (res, files) <- expectMonitorUnchanged root monitor () - res @?= () - files @?= [] - --- monitor a file that is expected to exist -testMissingFile :: Assertion -testMissingFile = do - test monitorFile touchFile "a" - test monitorFileHashed touchFile "a" - test monitorFile touchFile ("dir" "a") - test monitorFileHashed touchFile ("dir" "a") - test monitorDirectory touchDir "a" - test monitorDirectory touchDir ("dir" "a") - where - test :: (FilePath -> MonitorFilePath) - -> (RootPath -> FilePath -> IO ()) - -> FilePath - -> IO () - test monitorKind touch file = - withFileMonitor $ \root monitor -> do - -- a file that doesn't exist at snapshot time is considered to have - -- changed - updateMonitor root monitor [monitorKind file] () () - reason <- expectMonitorChanged root monitor () - reason @?= MonitoredFileChanged file - - -- a file doesn't exist at snapshot time, but gets added afterwards is - -- also considered to have changed - updateMonitor root monitor [monitorKind file] () () - touch root file - reason2 <- expectMonitorChanged root monitor () - reason2 @?= MonitoredFileChanged file - - -testChangedFile :: Int -> Assertion -testChangedFile mtimeChange = do - test monitorFile touchFile touchFile "a" - test monitorFileHashed touchFile touchFileContent "a" - test monitorFile touchFile touchFile ("dir" "a") - test monitorFileHashed touchFile touchFileContent ("dir" "a") - test monitorDirectory touchDir touchDir "a" - test monitorDirectory touchDir touchDir ("dir" "a") - where - test :: (FilePath -> MonitorFilePath) - -> (RootPath -> FilePath -> IO ()) - -> (RootPath -> FilePath -> IO ()) - -> FilePath - -> IO () - test monitorKind touch touch' file = - withFileMonitor $ \root monitor -> do - touch root file - updateMonitor root monitor [monitorKind file] () () - threadDelay mtimeChange - touch' root file - reason <- expectMonitorChanged root monitor () - reason @?= MonitoredFileChanged file - - -testChangedFileMtimeVsContent :: Int -> Assertion -testChangedFileMtimeVsContent mtimeChange = - withFileMonitor $ \root monitor -> do - -- if we don't touch the file, it's unchanged - touchFile root "a" - updateMonitor root monitor [monitorFile "a"] () () - (res, files) <- expectMonitorUnchanged root monitor () - res @?= () - files @?= [monitorFile "a"] - - -- if we do touch the file, it's changed if we only consider mtime - updateMonitor root monitor [monitorFile "a"] () () - threadDelay mtimeChange - touchFile root "a" - reason <- expectMonitorChanged root monitor () - reason @?= MonitoredFileChanged "a" - - -- but if we touch the file, it's unchanged if we consider content hash - updateMonitor root monitor [monitorFileHashed "a"] () () - threadDelay mtimeChange - touchFile root "a" - (res2, files2) <- expectMonitorUnchanged root monitor () - res2 @?= () - files2 @?= [monitorFileHashed "a"] - - -- finally if we change the content it's changed - updateMonitor root monitor [monitorFileHashed "a"] () () - threadDelay mtimeChange - touchFileContent root "a" - reason2 <- expectMonitorChanged root monitor () - reason2 @?= MonitoredFileChanged "a" - - -testUpdateDuringAction :: Int -> Assertion -testUpdateDuringAction mtimeChange = do - test (monitorFile "a") touchFile "a" - test (monitorFileHashed "a") touchFile "a" - test (monitorDirectory "a") touchDir "a" - test (monitorFileGlobStr "*") touchFile "a" - test (monitorFileGlobStr "*") { monitorKindDir = DirModTime } - touchDir "a" - where - test :: MonitorFilePath - -> (RootPath -> FilePath -> IO ()) - -> FilePath - -> IO () - test monitorSpec touch file = - withFileMonitor $ \root monitor -> do - touch root file - updateMonitor root monitor [monitorSpec] () () - - -- start doing an update action... - threadDelay mtimeChange -- some time passes - touch root file -- a file gets updates during the action - threadDelay mtimeChange -- some time passes then we finish - updateMonitor root monitor [monitorSpec] () () - -- we don't notice this change since we took the timestamp after the - -- action finished - (res, files) <- expectMonitorUnchanged root monitor () - res @?= () - files @?= [monitorSpec] - - -- Let's try again, this time taking the timestamp before the action - timestamp' <- beginUpdateFileMonitor - threadDelay mtimeChange -- some time passes - touch root file -- a file gets updates during the action - threadDelay mtimeChange -- some time passes then we finish - updateMonitorWithTimestamp root monitor timestamp' [monitorSpec] () () - -- now we do notice the change since we took the snapshot before the - -- action finished - reason <- expectMonitorChanged root monitor () - reason @?= MonitoredFileChanged file - - -testRemoveFile :: Assertion -testRemoveFile = do - test monitorFile touchFile removeFile "a" - test monitorFileHashed touchFile removeFile "a" - test monitorFile touchFile removeFile ("dir" "a") - test monitorFileHashed touchFile removeFile ("dir" "a") - test monitorDirectory touchDir removeDir "a" - test monitorDirectory touchDir removeDir ("dir" "a") - where - test :: (FilePath -> MonitorFilePath) - -> (RootPath -> FilePath -> IO ()) - -> (RootPath -> FilePath -> IO ()) - -> FilePath - -> IO () - test monitorKind touch remove file = - withFileMonitor $ \root monitor -> do - touch root file - updateMonitor root monitor [monitorKind file] () () - remove root file - reason <- expectMonitorChanged root monitor () - reason @?= MonitoredFileChanged file - - --- monitor a file that we expect not to exist -testNonExistentFile :: Assertion -testNonExistentFile = - withFileMonitor $ \root monitor -> do - -- a file that doesn't exist at snapshot time or check time is unchanged - updateMonitor root monitor [monitorNonExistentFile "a"] () () - (res, files) <- expectMonitorUnchanged root monitor () - res @?= () - files @?= [monitorNonExistentFile "a"] - - -- if the file then exists it has changed - touchFile root "a" - reason <- expectMonitorChanged root monitor () - reason @?= MonitoredFileChanged "a" - - -- if the file then exists at snapshot and check time it has changed - updateMonitor root monitor [monitorNonExistentFile "a"] () () - reason2 <- expectMonitorChanged root monitor () - reason2 @?= MonitoredFileChanged "a" - - -- but if the file existed at snapshot time and doesn't exist at check time - -- it is consider unchanged. This is unlike files we expect to exist, but - -- that's because files that exist can have different content and actions - -- can depend on that content, whereas if the action expected a file not to - -- exist and it now does not, it'll give the same result, irrespective of - -- the fact that the file might have existed in the meantime. - updateMonitor root monitor [monitorNonExistentFile "a"] () () - removeFile root "a" - (res2, files2) <- expectMonitorUnchanged root monitor () - res2 @?= () - files2 @?= [monitorNonExistentFile "a"] - - -testChangedFileType :: Int-> Assertion -testChangedFileType mtimeChange = do - test (monitorFile "a") touchFile removeFile createDir - test (monitorFileHashed "a") touchFile removeFile createDir - - test (monitorDirectory "a") createDir removeDir touchFile - test (monitorFileOrDirectory "a") createDir removeDir touchFile - - test (monitorFileGlobStr "*") { monitorKindDir = DirModTime } - touchFile removeFile createDir - test (monitorFileGlobStr "*") { monitorKindDir = DirModTime } - createDir removeDir touchFile - where - test :: MonitorFilePath - -> (RootPath -> String -> IO ()) - -> (RootPath -> String -> IO ()) - -> (RootPath -> String -> IO ()) - -> IO () - test monitorKind touch remove touch' = - withFileMonitor $ \root monitor -> do - touch root "a" - updateMonitor root monitor [monitorKind] () () - threadDelay mtimeChange - remove root "a" - touch' root "a" - reason <- expectMonitorChanged root monitor () - reason @?= MonitoredFileChanged "a" - - ------------------- --- globs --- - -testGlobNoChange :: Assertion -testGlobNoChange = - withFileMonitor $ \root monitor -> do - touchFile root ("dir" "good-a") - touchFile root ("dir" "good-b") - updateMonitor root monitor [monitorFileGlobStr "dir/good-*"] () () - (res, files) <- expectMonitorUnchanged root monitor () - res @?= () - files @?= [monitorFileGlobStr "dir/good-*"] - -testGlobAddMatch :: Int -> Assertion -testGlobAddMatch mtimeChange = - withFileMonitor $ \root monitor -> do - touchFile root ("dir" "good-a") - updateMonitor root monitor [monitorFileGlobStr "dir/good-*"] () () - (res, files) <- expectMonitorUnchanged root monitor () - res @?= () - files @?= [monitorFileGlobStr "dir/good-*"] - threadDelay mtimeChange - touchFile root ("dir" "good-b") - reason <- expectMonitorChanged root monitor () - reason @?= MonitoredFileChanged ("dir" "good-b") - -testGlobRemoveMatch :: Int -> Assertion -testGlobRemoveMatch mtimeChange = - withFileMonitor $ \root monitor -> do - touchFile root ("dir" "good-a") - touchFile root ("dir" "good-b") - updateMonitor root monitor [monitorFileGlobStr "dir/good-*"] () () - threadDelay mtimeChange - removeFile root "dir/good-a" - reason <- expectMonitorChanged root monitor () - reason @?= MonitoredFileChanged ("dir" "good-a") - -testGlobChangeMatch :: Int -> Assertion -testGlobChangeMatch mtimeChange = - withFileMonitor $ \root monitor -> do - touchFile root ("dir" "good-a") - touchFile root ("dir" "good-b") - updateMonitor root monitor [monitorFileGlobStr "dir/good-*"] () () - threadDelay mtimeChange - touchFile root ("dir" "good-b") - (res, files) <- expectMonitorUnchanged root monitor () - res @?= () - files @?= [monitorFileGlobStr "dir/good-*"] - - touchFileContent root ("dir" "good-b") - reason <- expectMonitorChanged root monitor () - reason @?= MonitoredFileChanged ("dir" "good-b") - -testGlobAddMatchSubdir :: Int -> Assertion -testGlobAddMatchSubdir mtimeChange = - withFileMonitor $ \root monitor -> do - touchFile root ("dir" "a" "good-a") - updateMonitor root monitor [monitorFileGlobStr "dir/*/good-*"] () () - threadDelay mtimeChange - touchFile root ("dir" "b" "good-b") - reason <- expectMonitorChanged root monitor () - reason @?= MonitoredFileChanged ("dir" "b" "good-b") - -testGlobRemoveMatchSubdir :: Int -> Assertion -testGlobRemoveMatchSubdir mtimeChange = - withFileMonitor $ \root monitor -> do - touchFile root ("dir" "a" "good-a") - touchFile root ("dir" "b" "good-b") - updateMonitor root monitor [monitorFileGlobStr "dir/*/good-*"] () () - threadDelay mtimeChange - removeDir root ("dir" "a") - reason <- expectMonitorChanged root monitor () - reason @?= MonitoredFileChanged ("dir" "a" "good-a") - -testGlobChangeMatchSubdir :: Int -> Assertion -testGlobChangeMatchSubdir mtimeChange = - withFileMonitor $ \root monitor -> do - touchFile root ("dir" "a" "good-a") - touchFile root ("dir" "b" "good-b") - updateMonitor root monitor [monitorFileGlobStr "dir/*/good-*"] () () - threadDelay mtimeChange - touchFile root ("dir" "b" "good-b") - (res, files) <- expectMonitorUnchanged root monitor () - res @?= () - files @?= [monitorFileGlobStr "dir/*/good-*"] - - touchFileContent root "dir/b/good-b" - reason <- expectMonitorChanged root monitor () - reason @?= MonitoredFileChanged ("dir" "b" "good-b") - --- check nothing goes squiffy with matching in the top dir -testGlobMatchTopDir :: Int -> Assertion -testGlobMatchTopDir mtimeChange = - withFileMonitor $ \root monitor -> do - updateMonitor root monitor [monitorFileGlobStr "*"] () () - threadDelay mtimeChange - touchFile root "a" - reason <- expectMonitorChanged root monitor () - reason @?= MonitoredFileChanged "a" - -testGlobAddNonMatch :: Int -> Assertion -testGlobAddNonMatch mtimeChange = - withFileMonitor $ \root monitor -> do - touchFile root ("dir" "good-a") - updateMonitor root monitor [monitorFileGlobStr "dir/good-*"] () () - threadDelay mtimeChange - touchFile root ("dir" "bad") - (res, files) <- expectMonitorUnchanged root monitor () - res @?= () - files @?= [monitorFileGlobStr "dir/good-*"] - -testGlobRemoveNonMatch :: Int -> Assertion -testGlobRemoveNonMatch mtimeChange = - withFileMonitor $ \root monitor -> do - touchFile root ("dir" "good-a") - touchFile root ("dir" "bad") - updateMonitor root monitor [monitorFileGlobStr "dir/good-*"] () () - threadDelay mtimeChange - removeFile root "dir/bad" - (res, files) <- expectMonitorUnchanged root monitor () - res @?= () - files @?= [monitorFileGlobStr "dir/good-*"] - -testGlobAddNonMatchSubdir :: Int -> Assertion -testGlobAddNonMatchSubdir mtimeChange = - withFileMonitor $ \root monitor -> do - touchFile root ("dir" "a" "good-a") - updateMonitor root monitor [monitorFileGlobStr "dir/*/good-*"] () () - threadDelay mtimeChange - touchFile root ("dir" "b" "bad") - (res, files) <- expectMonitorUnchanged root monitor () - res @?= () - files @?= [monitorFileGlobStr "dir/*/good-*"] - -testGlobRemoveNonMatchSubdir :: Int -> Assertion -testGlobRemoveNonMatchSubdir mtimeChange = - withFileMonitor $ \root monitor -> do - touchFile root ("dir" "a" "good-a") - touchFile root ("dir" "b" "bad") - updateMonitor root monitor [monitorFileGlobStr "dir/*/good-*"] () () - threadDelay mtimeChange - removeDir root ("dir" "b") - (res, files) <- expectMonitorUnchanged root monitor () - res @?= () - files @?= [monitorFileGlobStr "dir/*/good-*"] - - --- try and tickle a bug that happens if we don't maintain the invariant that --- MonitorStateGlobFiles entries are sorted -testInvariantMonitorStateGlobFiles :: Int -> Assertion -testInvariantMonitorStateGlobFiles mtimeChange = - withFileMonitor $ \root monitor -> do - touchFile root ("dir" "a") - touchFile root ("dir" "b") - touchFile root ("dir" "c") - touchFile root ("dir" "d") - updateMonitor root monitor [monitorFileGlobStr "dir/*"] () () - threadDelay mtimeChange - -- so there should be no change (since we're doing content checks) - -- but if we can get the dir entries to appear in the wrong order - -- then if the sorted invariant is not maintained then we can fool - -- the 'probeGlobStatus' into thinking there's changes - removeFile root ("dir" "a") - removeFile root ("dir" "b") - removeFile root ("dir" "c") - removeFile root ("dir" "d") - touchFile root ("dir" "d") - touchFile root ("dir" "c") - touchFile root ("dir" "b") - touchFile root ("dir" "a") - (res, files) <- expectMonitorUnchanged root monitor () - res @?= () - files @?= [monitorFileGlobStr "dir/*"] - --- same thing for the subdirs case -testInvariantMonitorStateGlobDirs :: Int -> Assertion -testInvariantMonitorStateGlobDirs mtimeChange = - withFileMonitor $ \root monitor -> do - touchFile root ("dir" "a" "file") - touchFile root ("dir" "b" "file") - touchFile root ("dir" "c" "file") - touchFile root ("dir" "d" "file") - updateMonitor root monitor [monitorFileGlobStr "dir/*/file"] () () - threadDelay mtimeChange - removeDir root ("dir" "a") - removeDir root ("dir" "b") - removeDir root ("dir" "c") - removeDir root ("dir" "d") - touchFile root ("dir" "d" "file") - touchFile root ("dir" "c" "file") - touchFile root ("dir" "b" "file") - touchFile root ("dir" "a" "file") - (res, files) <- expectMonitorUnchanged root monitor () - res @?= () - files @?= [monitorFileGlobStr "dir/*/file"] - --- ensure that a glob can match a directory as well as a file -testGlobMatchDir :: Int -> Assertion -testGlobMatchDir mtimeChange = - withFileMonitor $ \root monitor -> do - createDir root ("dir" "a") - updateMonitor root monitor [monitorFileGlobStr "dir/*"] () () - threadDelay mtimeChange - -- nothing changed yet - (res, files) <- expectMonitorUnchanged root monitor () - res @?= () - files @?= [monitorFileGlobStr "dir/*"] - -- expect dir/b to match and be detected as changed - createDir root ("dir" "b") - reason <- expectMonitorChanged root monitor () - reason @?= MonitoredFileChanged ("dir" "b") - -- now remove dir/a and expect it to be detected as changed - updateMonitor root monitor [monitorFileGlobStr "dir/*"] () () - threadDelay mtimeChange - removeDir root ("dir" "a") - reason2 <- expectMonitorChanged root monitor () - reason2 @?= MonitoredFileChanged ("dir" "a") - -testGlobMatchDirOnly :: Int -> Assertion -testGlobMatchDirOnly mtimeChange = - withFileMonitor $ \root monitor -> do - updateMonitor root monitor [monitorFileGlobStr "dir/*/"] () () - threadDelay mtimeChange - -- expect file dir/a to not match, so not detected as changed - touchFile root ("dir" "a") - (res, files) <- expectMonitorUnchanged root monitor () - res @?= () - files @?= [monitorFileGlobStr "dir/*/"] - -- note that checking the file monitor for changes can updates the - -- cached dir mtimes (when it has to record that there's new matches) - -- so we need an extra mtime delay - threadDelay mtimeChange - -- but expect dir/b to match - createDir root ("dir" "b") - reason <- expectMonitorChanged root monitor () - reason @?= MonitoredFileChanged ("dir" "b") - -testGlobChangeFileType :: Int -> Assertion -testGlobChangeFileType mtimeChange = - withFileMonitor $ \root monitor -> do - -- change file to dir - touchFile root ("dir" "a") - updateMonitor root monitor [monitorFileGlobStr "dir/*"] () () - threadDelay mtimeChange - removeFile root ("dir" "a") - createDir root ("dir" "a") - reason <- expectMonitorChanged root monitor () - reason @?= MonitoredFileChanged ("dir" "a") - -- change dir to file - updateMonitor root monitor [monitorFileGlobStr "dir/*"] () () - threadDelay mtimeChange - removeDir root ("dir" "a") - touchFile root ("dir" "a") - reason2 <- expectMonitorChanged root monitor () - reason2 @?= MonitoredFileChanged ("dir" "a") - -testGlobAbsolutePath :: Int -> Assertion -testGlobAbsolutePath mtimeChange = - withFileMonitor $ \root monitor -> do - root' <- absoluteRoot root - -- absolute glob, removing a file - touchFile root ("dir/good-a") - touchFile root ("dir/good-b") - updateMonitor root monitor [monitorFileGlobStr (root' "dir/good-*")] () () - threadDelay mtimeChange - removeFile root "dir/good-a" - reason <- expectMonitorChanged root monitor () - reason @?= MonitoredFileChanged (root' "dir/good-a") - -- absolute glob, adding a file - updateMonitor root monitor [monitorFileGlobStr (root' "dir/good-*")] () () - threadDelay mtimeChange - touchFile root ("dir/good-a") - reason2 <- expectMonitorChanged root monitor () - reason2 @?= MonitoredFileChanged (root' "dir/good-a") - -- absolute glob, changing a file - updateMonitor root monitor [monitorFileGlobStr (root' "dir/good-*")] () () - threadDelay mtimeChange - touchFileContent root "dir/good-b" - reason3 <- expectMonitorChanged root monitor () - reason3 @?= MonitoredFileChanged (root' "dir/good-b") - - ------------------- --- value changes --- - -testValueUnchanged :: Assertion -testValueUnchanged = - withFileMonitor $ \root monitor -> do - touchFile root "a" - updateMonitor root monitor [monitorFile "a"] (42 :: Int) "ok" - (res, files) <- expectMonitorUnchanged root monitor 42 - res @?= "ok" - files @?= [monitorFile "a"] - -testValueChanged :: Assertion -testValueChanged = - withFileMonitor $ \root monitor -> do - touchFile root "a" - updateMonitor root monitor [monitorFile "a"] (42 :: Int) "ok" - reason <- expectMonitorChanged root monitor 43 - reason @?= MonitoredValueChanged 42 - -testValueAndFileChanged :: Int -> Assertion -testValueAndFileChanged mtimeChange = - withFileMonitor $ \root monitor -> do - touchFile root "a" - - -- we change the value and the file, and the value change is reported - updateMonitor root monitor [monitorFile "a"] (42 :: Int) "ok" - threadDelay mtimeChange - touchFile root "a" - reason <- expectMonitorChanged root monitor 43 - reason @?= MonitoredValueChanged 42 - - -- if fileMonitorCheckIfOnlyValueChanged then if only the value changed - -- then it's reported as MonitoredValueChanged - let monitor' :: FileMonitor Int String - monitor' = monitor { fileMonitorCheckIfOnlyValueChanged = True } - updateMonitor root monitor' [monitorFile "a"] 42 "ok" - reason2 <- expectMonitorChanged root monitor' 43 - reason2 @?= MonitoredValueChanged 42 - - -- but if a file changed too then we don't report MonitoredValueChanged - updateMonitor root monitor' [monitorFile "a"] 42 "ok" - threadDelay mtimeChange - touchFile root "a" - reason3 <- expectMonitorChanged root monitor' 43 - reason3 @?= MonitoredFileChanged "a" - -testValueUpdated :: Assertion -testValueUpdated = - withFileMonitor $ \root monitor -> do - touchFile root "a" - - let monitor' :: FileMonitor (Set.Set Int) String - monitor' = (monitor :: FileMonitor (Set.Set Int) String) { - fileMonitorCheckIfOnlyValueChanged = True, - fileMonitorKeyValid = Set.isSubsetOf - } - - updateMonitor root monitor' [monitorFile "a"] (Set.fromList [42,43]) "ok" - (res,_files) <- expectMonitorUnchanged root monitor' (Set.fromList [42]) - res @?= "ok" - - reason <- expectMonitorChanged root monitor' (Set.fromList [42,44]) - reason @?= MonitoredValueChanged (Set.fromList [42,43]) - - -------------- --- Utils - -newtype RootPath = RootPath FilePath - -touchFile :: RootPath -> FilePath -> IO () -touchFile (RootPath root) fname = do - let path = root fname - IO.createDirectoryIfMissing True (takeDirectory path) - IO.writeFile path "touched" - -touchFileContent :: RootPath -> FilePath -> IO () -touchFileContent (RootPath root) fname = do - let path = root fname - IO.createDirectoryIfMissing True (takeDirectory path) - IO.writeFile path "different" - -removeFile :: RootPath -> FilePath -> IO () -removeFile (RootPath root) fname = IO.removeFile (root fname) - -touchDir :: RootPath -> FilePath -> IO () -touchDir root@(RootPath rootdir) dname = do - IO.createDirectoryIfMissing True (rootdir dname) - touchFile root (dname "touch") - removeFile root (dname "touch") - -createDir :: RootPath -> FilePath -> IO () -createDir (RootPath root) dname = do - let path = root dname - IO.createDirectoryIfMissing True (takeDirectory path) - IO.createDirectory path - -removeDir :: RootPath -> FilePath -> IO () -removeDir (RootPath root) dname = IO.removeDirectoryRecursive (root dname) - -absoluteRoot :: RootPath -> IO FilePath -absoluteRoot (RootPath root) = IO.canonicalizePath root - -monitorFileGlobStr :: String -> MonitorFilePath -monitorFileGlobStr globstr - | Just glob <- simpleParse globstr = monitorFileGlob glob - | otherwise = error $ "Failed to parse " ++ globstr - - -expectMonitorChanged :: (Binary a, Binary b) - => RootPath -> FileMonitor a b -> a - -> IO (MonitorChangedReason a) -expectMonitorChanged root monitor key = do - res <- checkChanged root monitor key - case res of - MonitorChanged reason -> return reason - MonitorUnchanged _ _ -> throwIO $ HUnitFailure "expected change" - -expectMonitorUnchanged :: (Binary a, Binary b) - => RootPath -> FileMonitor a b -> a - -> IO (b, [MonitorFilePath]) -expectMonitorUnchanged root monitor key = do - res <- checkChanged root monitor key - case res of - MonitorChanged _reason -> throwIO $ HUnitFailure "expected no change" - MonitorUnchanged b files -> return (b, files) - -checkChanged :: (Binary a, Binary b) - => RootPath -> FileMonitor a b - -> a -> IO (MonitorChanged a b) -checkChanged (RootPath root) monitor key = - checkFileMonitorChanged monitor root key - -updateMonitor :: (Binary a, Binary b) - => RootPath -> FileMonitor a b - -> [MonitorFilePath] -> a -> b -> IO () -updateMonitor (RootPath root) monitor files key result = - updateFileMonitor monitor root Nothing files key result - -updateMonitorWithTimestamp :: (Binary a, Binary b) - => RootPath -> FileMonitor a b -> MonitorTimestamp - -> [MonitorFilePath] -> a -> b -> IO () -updateMonitorWithTimestamp (RootPath root) monitor timestamp files key result = - updateFileMonitor monitor root (Just timestamp) files key result - -withFileMonitor :: Eq a => (RootPath -> FileMonitor a b -> IO c) -> IO c -withFileMonitor action = do - withTempDirectory silent "." "file-status-" $ \root -> do - let file = root <.> "monitor" - monitor = newFileMonitor file - finally (action (RootPath root) monitor) $ do - exists <- IO.doesFileExist file - when exists $ IO.removeFile file diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Glob.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Glob.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Glob.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Glob.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,203 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# OPTIONS_GHC -fno-warn-orphans #-} - -module UnitTests.Distribution.Client.Glob (tests) where - -#if !MIN_VERSION_base(4,8,0) -import Control.Applicative -#endif -import Data.Char -import Data.List -import Distribution.Text (display, parse, simpleParse) -import Distribution.Compat.ReadP - -import Distribution.Client.Glob -import UnitTests.Distribution.Client.ArbitraryInstances - -import Test.Tasty -import Test.Tasty.QuickCheck -import Test.Tasty.HUnit -import Control.Exception - - -tests :: [TestTree] -tests = - [ testProperty "print/parse roundtrip" prop_roundtrip_printparse - , testCase "parse examples" testParseCases - ] - ---TODO: [nice to have] tests for trivial globs, tests for matching, --- tests for windows style file paths - -prop_roundtrip_printparse :: FilePathGlob -> Bool -prop_roundtrip_printparse pathglob = - -- can't use simpleParse because it mis-handles trailing spaces - case [ x | (x, []) <- readP_to_S parse (display pathglob) ] of - xs@(_:_) -> last xs == pathglob - _ -> False - --- first run, where we don't even call updateMonitor -testParseCases :: Assertion -testParseCases = do - - FilePathGlob (FilePathRoot "/") GlobDirTrailing <- testparse "/" - FilePathGlob FilePathHomeDir GlobDirTrailing <- testparse "~/" - - FilePathGlob (FilePathRoot "A:\\") GlobDirTrailing <- testparse "A:/" - FilePathGlob (FilePathRoot "Z:\\") GlobDirTrailing <- testparse "z:/" - FilePathGlob (FilePathRoot "C:\\") GlobDirTrailing <- testparse "C:\\" - FilePathGlob FilePathRelative (GlobFile [Literal "_:"]) <- testparse "_:" - - FilePathGlob FilePathRelative - (GlobFile [Literal "."]) <- testparse "." - - FilePathGlob FilePathRelative - (GlobFile [Literal "~"]) <- testparse "~" - - FilePathGlob FilePathRelative - (GlobDir [Literal "."] GlobDirTrailing) <- testparse "./" - - FilePathGlob FilePathRelative - (GlobFile [Literal "foo"]) <- testparse "foo" - - FilePathGlob FilePathRelative - (GlobDir [Literal "foo"] - (GlobFile [Literal "bar"])) <- testparse "foo/bar" - - FilePathGlob FilePathRelative - (GlobDir [Literal "foo"] - (GlobDir [Literal "bar"] GlobDirTrailing)) <- testparse "foo/bar/" - - FilePathGlob (FilePathRoot "/") - (GlobDir [Literal "foo"] - (GlobDir [Literal "bar"] GlobDirTrailing)) <- testparse "/foo/bar/" - - FilePathGlob FilePathRelative - (GlobFile [WildCard]) <- testparse "*" - - FilePathGlob FilePathRelative - (GlobFile [WildCard,WildCard]) <- testparse "**" -- not helpful but valid - - FilePathGlob FilePathRelative - (GlobFile [WildCard, Literal "foo", WildCard]) <- testparse "*foo*" - - FilePathGlob FilePathRelative - (GlobFile [Literal "foo", WildCard, Literal "bar"]) <- testparse "foo*bar" - - FilePathGlob FilePathRelative - (GlobFile [Union [[WildCard], [Literal "foo"]]]) <- testparse "{*,foo}" - - parseFail "{" - parseFail "}" - parseFail "," - parseFail "{" - parseFail "{{}" - parseFail "{}" - parseFail "{,}" - parseFail "{foo,}" - parseFail "{,foo}" - - return () - -testparse :: String -> IO FilePathGlob -testparse s = - case simpleParse s of - Just p -> return p - Nothing -> throwIO $ HUnitFailure ("expected parse of: " ++ s) - -parseFail :: String -> Assertion -parseFail s = - case simpleParse s :: Maybe FilePathGlob of - Just _ -> throwIO $ HUnitFailure ("expected no parse of: " ++ s) - Nothing -> return () - -instance Arbitrary FilePathGlob where - arbitrary = (FilePathGlob <$> arbitrary <*> arbitrary) - `suchThat` validFilePathGlob - - shrink (FilePathGlob root pathglob) = - [ FilePathGlob root' pathglob' - | (root', pathglob') <- shrink (root, pathglob) - , validFilePathGlob (FilePathGlob root' pathglob') ] - -validFilePathGlob :: FilePathGlob -> Bool -validFilePathGlob (FilePathGlob FilePathRelative pathglob) = - case pathglob of - GlobDirTrailing -> False - GlobDir [Literal "~"] _ -> False - GlobDir [Literal (d:":")] _ - | isLetter d -> False - _ -> True -validFilePathGlob _ = True - -instance Arbitrary FilePathRoot where - arbitrary = - frequency - [ (3, pure FilePathRelative) - , (1, pure (FilePathRoot unixroot)) - , (1, FilePathRoot <$> windrive) - , (1, pure FilePathHomeDir) - ] - where - unixroot = "/" - windrive = do d <- choose ('A', 'Z'); return (d : ":\\") - - shrink FilePathRelative = [] - shrink (FilePathRoot _) = [FilePathRelative] - shrink FilePathHomeDir = [FilePathRelative] - - -instance Arbitrary FilePathGlobRel where - arbitrary = sized $ \sz -> - oneof $ take (max 1 sz) - [ pure GlobDirTrailing - , GlobFile <$> (getGlobPieces <$> arbitrary) - , GlobDir <$> (getGlobPieces <$> arbitrary) - <*> resize (sz `div` 2) arbitrary - ] - - shrink GlobDirTrailing = [] - shrink (GlobFile glob) = - GlobDirTrailing - : [ GlobFile (getGlobPieces glob') | glob' <- shrink (GlobPieces glob) ] - shrink (GlobDir glob pathglob) = - pathglob - : GlobFile glob - : [ GlobDir (getGlobPieces glob') pathglob' - | (glob', pathglob') <- shrink (GlobPieces glob, pathglob) ] - -newtype GlobPieces = GlobPieces { getGlobPieces :: [GlobPiece] } - deriving Eq - -instance Arbitrary GlobPieces where - arbitrary = GlobPieces . mergeLiterals <$> shortListOf1 5 arbitrary - - shrink (GlobPieces glob) = - [ GlobPieces (mergeLiterals (getNonEmpty glob')) - | glob' <- shrink (NonEmpty glob) ] - -mergeLiterals :: [GlobPiece] -> [GlobPiece] -mergeLiterals (Literal a : Literal b : ps) = mergeLiterals (Literal (a++b) : ps) -mergeLiterals (Union as : ps) = Union (map mergeLiterals as) : mergeLiterals ps -mergeLiterals (p:ps) = p : mergeLiterals ps -mergeLiterals [] = [] - -instance Arbitrary GlobPiece where - arbitrary = sized $ \sz -> - frequency - [ (3, Literal <$> shortListOf1 10 (elements globLiteralChars)) - , (1, pure WildCard) - , (1, Union <$> resize (sz `div` 2) (shortListOf1 5 (shortListOf1 5 arbitrary))) - ] - - shrink (Literal str) = [ Literal str' - | str' <- shrink str - , not (null str') - , all (`elem` globLiteralChars) str' ] - shrink WildCard = [] - shrink (Union as) = [ Union (map getGlobPieces (getNonEmpty as')) - | as' <- shrink (NonEmpty (map GlobPieces as)) ] - -globLiteralChars :: [Char] -globLiteralChars = ['\0'..'\128'] \\ "*{},/\\" - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/GZipUtils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/GZipUtils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/GZipUtils.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/GZipUtils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,60 +0,0 @@ -module UnitTests.Distribution.Client.GZipUtils ( - tests - ) where - -import Codec.Compression.GZip as GZip -import Codec.Compression.Zlib as Zlib -import Control.Exception.Base (evaluate) -import Control.Exception (try, SomeException) -import Control.Monad (void) -import Data.ByteString as BS (null) -import Data.ByteString.Lazy as BSL (pack, toChunks) -import Data.ByteString.Lazy.Char8 as BSLL (pack, init, length) -import Data.Monoid ((<>)) -import Distribution.Client.GZipUtils (maybeDecompress) -import Data.Word (Word8) - -import Test.Tasty -import Test.Tasty.HUnit -import Test.Tasty.QuickCheck - -tests :: [TestTree] -tests = [ testCase "maybeDecompress" maybeDecompressUnitTest - -- "decompress plain" property is non-trivial to state, - -- maybeDecompress returns input bytestring only if error occurs right at the beginning of the decompression process - -- generating such input would essentially duplicate maybeDecompress implementation - , testProperty "decompress zlib" prop_maybeDecompress_zlib - , testProperty "decompress gzip" prop_maybeDecompress_gzip - ] - -maybeDecompressUnitTest :: Assertion -maybeDecompressUnitTest = - assertBool "decompress plain" (maybeDecompress original == original) - >> assertBool "decompress zlib (with show)" (show (maybeDecompress compressedZlib) == show original) - >> assertBool "decompress gzip (with show)" (show (maybeDecompress compressedGZip) == show original) - >> assertBool "decompress zlib" (maybeDecompress compressedZlib == original) - >> assertBool "decompress gzip" (maybeDecompress compressedGZip == original) - >> assertBool "have no empty chunks" (Prelude.all (not . BS.null) . BSL.toChunks . maybeDecompress $ compressedZlib) - >> (runBrokenStream >>= assertBool "decompress broken stream" . isLeft) - where - original = BSLL.pack "original uncompressed input" - compressedZlib = Zlib.compress original - compressedGZip = GZip.compress original - - runBrokenStream :: IO (Either SomeException ()) - runBrokenStream = try . void . evaluate . BSLL.length $ maybeDecompress (BSLL.init compressedZlib <> BSLL.pack "*") - -prop_maybeDecompress_zlib :: [Word8] -> Property -prop_maybeDecompress_zlib ws = property $ maybeDecompress compressedZlib === original - where original = BSL.pack ws - compressedZlib = Zlib.compress original - -prop_maybeDecompress_gzip :: [Word8] -> Property -prop_maybeDecompress_gzip ws = property $ maybeDecompress compressedGZip === original - where original = BSL.pack ws - compressedGZip = GZip.compress original - --- (Only available from "Data.Either" since 7.8.) -isLeft :: Either a b -> Bool -isLeft (Right _) = False -isLeft (Left _) = True diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/ProjectConfig.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/ProjectConfig.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/ProjectConfig.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/ProjectConfig.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,616 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# OPTIONS_GHC -fno-warn-orphans #-} - -module UnitTests.Distribution.Client.ProjectConfig (tests) where - -#if !MIN_VERSION_base(4,8,0) -import Data.Monoid -import Control.Applicative -#endif -import Data.Map (Map) -import qualified Data.Map as Map -import Data.List - -import Distribution.Package -import Distribution.PackageDescription hiding (Flag) -import Distribution.Compiler -import Distribution.Version -import Distribution.ParseUtils -import Distribution.Simple.Compiler -import Distribution.Simple.Setup -import Distribution.Simple.InstallDirs -import qualified Distribution.Compat.ReadP as Parse -import Distribution.Simple.Utils -import Distribution.Simple.Program.Types -import Distribution.Simple.Program.Db - -import Distribution.Client.Types -import Distribution.Client.Dependency.Types -import Distribution.Client.BuildReports.Types -import Distribution.Client.Targets -import Distribution.Utils.NubList -import Network.URI - -import Distribution.Client.ProjectConfig -import Distribution.Client.ProjectConfig.Legacy - -import UnitTests.Distribution.Client.ArbitraryInstances - -import Test.Tasty -import Test.Tasty.QuickCheck - -tests :: [TestTree] -tests = - [ testGroup "ProjectConfig <-> LegacyProjectConfig round trip" $ - [ testProperty "packages" prop_roundtrip_legacytypes_packages - , testProperty "buildonly" prop_roundtrip_legacytypes_buildonly - , testProperty "specific" prop_roundtrip_legacytypes_specific - ] ++ - -- a couple tests seem to trigger a RTS fault in ghc-7.6 and older - -- unclear why as of yet - concat - [ [ testProperty "shared" prop_roundtrip_legacytypes_shared - , testProperty "local" prop_roundtrip_legacytypes_local - , testProperty "all" prop_roundtrip_legacytypes_all - ] - | not usingGhc76orOlder - ] - - , testGroup "individual parser tests" - [ testProperty "package location" prop_parsePackageLocationTokenQ - ] - - , testGroup "ProjectConfig printing/parsing round trip" - [ testProperty "packages" prop_roundtrip_printparse_packages - , testProperty "buildonly" prop_roundtrip_printparse_buildonly - , testProperty "shared" prop_roundtrip_printparse_shared - , testProperty "local" prop_roundtrip_printparse_local - , testProperty "specific" prop_roundtrip_printparse_specific - , testProperty "all" prop_roundtrip_printparse_all - ] - ] - where - usingGhc76orOlder = - case buildCompilerId of - CompilerId GHC v -> v < Version [7,7] [] - _ -> False - - ------------------------------------------------- --- Round trip: conversion to/from legacy types --- - -roundtrip :: Eq a => (a -> b) -> (b -> a) -> a -> Bool -roundtrip f f_inv x = - (f_inv . f) x == x - -roundtrip_legacytypes :: ProjectConfig -> Bool -roundtrip_legacytypes = - roundtrip convertToLegacyProjectConfig - convertLegacyProjectConfig - - -prop_roundtrip_legacytypes_all :: ProjectConfig -> Bool -prop_roundtrip_legacytypes_all = - roundtrip_legacytypes - -prop_roundtrip_legacytypes_packages :: ProjectConfig -> Bool -prop_roundtrip_legacytypes_packages config = - roundtrip_legacytypes - config { - projectConfigBuildOnly = mempty, - projectConfigShared = mempty, - projectConfigLocalPackages = mempty, - projectConfigSpecificPackage = mempty - } - -prop_roundtrip_legacytypes_buildonly :: ProjectConfigBuildOnly -> Bool -prop_roundtrip_legacytypes_buildonly config = - roundtrip_legacytypes - mempty { projectConfigBuildOnly = config } - -prop_roundtrip_legacytypes_shared :: ProjectConfigShared -> Bool -prop_roundtrip_legacytypes_shared config = - roundtrip_legacytypes - mempty { projectConfigShared = config } - -prop_roundtrip_legacytypes_local :: PackageConfig -> Bool -prop_roundtrip_legacytypes_local config = - roundtrip_legacytypes - mempty { projectConfigLocalPackages = config } - -prop_roundtrip_legacytypes_specific :: Map PackageName PackageConfig -> Bool -prop_roundtrip_legacytypes_specific config = - roundtrip_legacytypes - mempty { projectConfigSpecificPackage = MapMappend config } - - --------------------------------------------- --- Round trip: printing and parsing config --- - -roundtrip_printparse :: ProjectConfig -> Bool -roundtrip_printparse config = - case (fmap convertLegacyProjectConfig - . parseLegacyProjectConfig - . showLegacyProjectConfig - . convertToLegacyProjectConfig) - config of - ParseOk _ x -> x == config - _ -> False - - -prop_roundtrip_printparse_all :: ProjectConfig -> Bool -prop_roundtrip_printparse_all config = - roundtrip_printparse config { - projectConfigBuildOnly = - hackProjectConfigBuildOnly (projectConfigBuildOnly config), - - projectConfigShared = - hackProjectConfigShared (projectConfigShared config) - } - -prop_roundtrip_printparse_packages :: [PackageLocationString] - -> [PackageLocationString] - -> [SourceRepo] - -> [Dependency] - -> Bool -prop_roundtrip_printparse_packages pkglocstrs1 pkglocstrs2 repos named = - roundtrip_printparse - mempty { - projectPackages = map getPackageLocationString pkglocstrs1, - projectPackagesOptional = map getPackageLocationString pkglocstrs2, - projectPackagesRepo = repos, - projectPackagesNamed = named - } - -prop_roundtrip_printparse_buildonly :: ProjectConfigBuildOnly -> Bool -prop_roundtrip_printparse_buildonly config = - roundtrip_printparse - mempty { - projectConfigBuildOnly = hackProjectConfigBuildOnly config - } - -hackProjectConfigBuildOnly :: ProjectConfigBuildOnly -> ProjectConfigBuildOnly -hackProjectConfigBuildOnly config = - config { - -- These two fields are only command line transitory things, not - -- something to be recorded persistently in a config file - projectConfigOnlyDeps = mempty, - projectConfigDryRun = mempty - } - -prop_roundtrip_printparse_shared :: ProjectConfigShared -> Bool -prop_roundtrip_printparse_shared config = - roundtrip_printparse - mempty { - projectConfigShared = hackProjectConfigShared config - } - -hackProjectConfigShared :: ProjectConfigShared -> ProjectConfigShared -hackProjectConfigShared config = - config { - projectConfigConstraints = - --TODO: [required eventually] parse ambiguity in constraint - -- "pkgname -any" as either any version or disabled flag "any". - let ambiguous ((UserConstraintFlags _pkg flags), _) = - (not . null) [ () | (FlagName name, False) <- flags - , "any" `isPrefixOf` name ] - ambiguous _ = False - in filter (not . ambiguous) (projectConfigConstraints config) - } - - -prop_roundtrip_printparse_local :: PackageConfig -> Bool -prop_roundtrip_printparse_local config = - roundtrip_printparse - mempty { - projectConfigLocalPackages = config - } - -prop_roundtrip_printparse_specific :: Map PackageName (NonMEmpty PackageConfig) - -> Bool -prop_roundtrip_printparse_specific config = - roundtrip_printparse - mempty { - projectConfigSpecificPackage = MapMappend (fmap getNonMEmpty config) - } - - ----------------------------- --- Individual Parser tests --- - -prop_parsePackageLocationTokenQ :: PackageLocationString -> Bool -prop_parsePackageLocationTokenQ (PackageLocationString str) = - case [ x | (x,"") <- Parse.readP_to_S parsePackageLocationTokenQ - (renderPackageLocationToken str) ] of - [str'] -> str' == str - _ -> False - - ------------------------- --- Arbitrary instances --- - -instance Arbitrary ProjectConfig where - arbitrary = - ProjectConfig - <$> (map getPackageLocationString <$> arbitrary) - <*> (map getPackageLocationString <$> arbitrary) - <*> shortListOf 3 arbitrary - <*> arbitrary - <*> arbitrary <*> arbitrary - <*> arbitrary - <*> (MapMappend . fmap getNonMEmpty . Map.fromList - <$> shortListOf 3 arbitrary) - -- package entries with no content are equivalent to - -- the entry not existing at all, so exclude empty - - shrink (ProjectConfig x0 x1 x2 x3 x4 x5 x6 x7) = - [ ProjectConfig x0' x1' x2' x3' - x4' x5' x6' (MapMappend (fmap getNonMEmpty x7')) - | ((x0', x1', x2', x3'), (x4', x5', x6', x7')) - <- shrink ((x0, x1, x2, x3), - (x4, x5, x6, fmap NonMEmpty (getMapMappend x7))) - ] - -newtype PackageLocationString - = PackageLocationString { getPackageLocationString :: String } - deriving Show - -instance Arbitrary PackageLocationString where - arbitrary = - PackageLocationString <$> - oneof - [ show . getNonEmpty <$> (arbitrary :: Gen (NonEmptyList String)) - , arbitraryGlobLikeStr - , show <$> (arbitrary :: Gen URI) - ] - -arbitraryGlobLikeStr :: Gen String -arbitraryGlobLikeStr = outerTerm - where - outerTerm = concat <$> shortListOf1 4 - (frequency [ (2, token) - , (1, braces <$> innerTerm) ]) - innerTerm = intercalate "," <$> shortListOf1 3 - (frequency [ (3, token) - , (1, braces <$> innerTerm) ]) - token = shortListOf1 4 (elements (['#'..'~'] \\ "{,}")) - braces s = "{" ++ s ++ "}" - - -instance Arbitrary ProjectConfigBuildOnly where - arbitrary = - ProjectConfigBuildOnly - <$> arbitrary - <*> arbitrary - <*> arbitrary - <*> (toNubList <$> shortListOf 2 arbitrary) -- 4 - <*> arbitrary - <*> arbitrary - <*> arbitrary - <*> (fmap getShortToken <$> arbitrary) -- 8 - <*> arbitrary - <*> arbitraryNumJobs - <*> arbitrary - <*> arbitrary -- 12 - <*> (fmap getShortToken <$> arbitrary) - <*> arbitrary - <*> (fmap getShortToken <$> arbitrary) - <*> (fmap getShortToken <$> arbitrary) -- 16 - <*> (fmap getShortToken <$> arbitrary) - <*> (fmap getShortToken <$> arbitrary) - where - arbitraryNumJobs = fmap (fmap getPositive) <$> arbitrary - - shrink (ProjectConfigBuildOnly - x00 x01 x02 x03 x04 x05 x06 x07 - x08 x09 x10 x11 x12 x13 x14 x15 - x16 x17) = - [ ProjectConfigBuildOnly - x00' x01' x02' x03' x04' - x05' x06' x07' x08' (postShrink_NumJobs x09') - x10' x11' x12 x13' x14 - x15 x16 x17 - | ((x00', x01', x02', x03', x04'), - (x05', x06', x07', x08', x09'), - (x10', x11', x13')) - <- shrink - ((x00, x01, x02, x03, x04), - (x05, x06, x07, x08, preShrink_NumJobs x09), - (x10, x11, x13)) - ] - where - preShrink_NumJobs = fmap (fmap Positive) - postShrink_NumJobs = fmap (fmap getPositive) - -instance Arbitrary ProjectConfigShared where - arbitrary = - ProjectConfigShared - <$> arbitrary -- 4 - <*> arbitraryFlag arbitraryShortToken - <*> arbitraryFlag arbitraryShortToken - <*> arbitrary - <*> arbitrary - <*> (toNubList <$> listOf arbitraryShortToken) - <*> arbitraryConstraints - <*> shortListOf 2 arbitrary - <*> arbitrary <*> arbitrary - <*> arbitrary <*> arbitrary - <*> arbitrary <*> arbitrary - where - arbitraryConstraints :: Gen [(UserConstraint, ConstraintSource)] - arbitraryConstraints = - map (\uc -> (uc, projectConfigConstraintSource)) <$> arbitrary - - shrink (ProjectConfigShared - x00 x01 x02 x03 x04 - x05 x06 x07 x08 x09 - x10 x11 x12 x13) = - [ ProjectConfigShared - x00' (fmap getNonEmpty x01') (fmap getNonEmpty x02') x03' x04' - x05' (postShrink_Constraints x06') x07' x08' x09' - x10' x11' x12' x13' - | ((x00', x01', x02', x03', x04'), - (x05', x06', x07', x08', x09'), - (x10', x11', x12', x13')) - <- shrink - ((x00, fmap NonEmpty x01, fmap NonEmpty x02, x03, x04), - (x05, preShrink_Constraints x06, x07, x08, x09), - (x10, x11, x12, x13)) - ] - where - preShrink_Constraints = map fst - postShrink_Constraints = map (\uc -> (uc, projectConfigConstraintSource)) - -projectConfigConstraintSource :: ConstraintSource -projectConfigConstraintSource = - ConstraintSourceProjectConfig "TODO" - -instance Arbitrary PackageConfig where - arbitrary = - PackageConfig - <$> (MapLast . Map.fromList <$> shortListOf 10 - ((,) <$> arbitraryProgramName - <*> arbitraryShortToken)) - <*> (MapMappend . Map.fromList <$> shortListOf 10 - ((,) <$> arbitraryProgramName - <*> listOf arbitraryShortToken)) - <*> (toNubList <$> listOf arbitraryShortToken) - <*> arbitrary - <*> arbitrary <*> arbitrary - <*> arbitrary <*> arbitrary - <*> arbitrary <*> arbitrary - <*> arbitrary <*> arbitrary - <*> shortListOf 5 arbitraryShortToken - <*> arbitrary - <*> arbitrary <*> arbitrary - <*> shortListOf 5 arbitraryShortToken - <*> shortListOf 5 arbitraryShortToken - <*> shortListOf 5 arbitraryShortToken - <*> arbitrary - <*> arbitrary <*> arbitrary - <*> arbitrary <*> arbitrary - <*> arbitrary <*> arbitrary - <*> arbitrary <*> arbitrary - <*> arbitrary <*> arbitrary - <*> arbitrary <*> arbitrary - <*> arbitraryFlag arbitraryShortToken - <*> arbitrary - <*> arbitrary <*> arbitrary - <*> arbitrary - <*> arbitraryFlag arbitraryShortToken - <*> arbitrary - <*> arbitraryFlag arbitraryShortToken - <*> arbitrary - where - arbitraryProgramName :: Gen String - arbitraryProgramName = - elements [ programName prog - | (prog, _) <- knownPrograms (defaultProgramDb) ] - - shrink (PackageConfig - x00 x01 x02 x03 x04 - x05 x06 x07 x08 x09 - x10 x11 x12 x13 x14 - x15 x16 x17 x18 x19 - x20 x21 x22 x23 x24 - x25 x26 x27 x28 x29 - x30 x31 x32 x33 x34 - x35 x36 x37 x38 x39 - x40) = - [ PackageConfig - (postShrink_Paths x00') - (postShrink_Args x01') x02' x03' x04' - x05' x06' x07' x08' x09' - x10' x11' (map getNonEmpty x12') x13' x14' - x15' (map getNonEmpty x16') - (map getNonEmpty x17') - (map getNonEmpty x18') - x19' - x20' x21' x22' x23' x24' - x25' x26' x27' x28' x29' - x30' x31' x32' x33' x34' - x35' x36' (fmap getNonEmpty x37') x38' - (fmap getNonEmpty x39') - x40' - | (((x00', x01', x02', x03', x04'), - (x05', x06', x07', x08', x09'), - (x10', x11', x12', x13', x14'), - (x15', x16', x17', x18', x19')), - ((x20', x21', x22', x23', x24'), - (x25', x26', x27', x28', x29'), - (x30', x31', x32', x33', x34'), - (x35', x36', x37', x38', x39'), - (x40'))) - <- shrink - (((preShrink_Paths x00, preShrink_Args x01, x02, x03, x04), - (x05, x06, x07, x08, x09), - (x10, x11, map NonEmpty x12, x13, x14), - (x15, map NonEmpty x16, - map NonEmpty x17, - map NonEmpty x18, - x19)), - ((x20, x21, x22, x23, x24), - (x25, x26, x27, x28, x29), - (x30, x31, x32, x33, x34), - (x35, x36, fmap NonEmpty x37, x38, fmap NonEmpty x39), - (x40))) - ] - where - preShrink_Paths = Map.map NonEmpty - . Map.mapKeys NoShrink - . getMapLast - postShrink_Paths = MapLast - . Map.map getNonEmpty - . Map.mapKeys getNoShrink - preShrink_Args = Map.map (NonEmpty . map NonEmpty) - . Map.mapKeys NoShrink - . getMapMappend - postShrink_Args = MapMappend - . Map.map (map getNonEmpty . getNonEmpty) - . Map.mapKeys getNoShrink - - -instance Arbitrary SourceRepo where - arbitrary = (SourceRepo RepoThis - <$> arbitrary - <*> (fmap getShortToken <$> arbitrary) - <*> (fmap getShortToken <$> arbitrary) - <*> (fmap getShortToken <$> arbitrary) - <*> (fmap getShortToken <$> arbitrary) - <*> (fmap getShortToken <$> arbitrary)) - `suchThat` (/= emptySourceRepo) - - shrink (SourceRepo _ x1 x2 x3 x4 x5 x6) = - [ repo - | ((x1', x2', x3'), (x4', x5', x6')) - <- shrink ((x1, - fmap ShortToken x2, - fmap ShortToken x3), - (fmap ShortToken x4, - fmap ShortToken x5, - fmap ShortToken x6)) - , let repo = SourceRepo RepoThis x1' - (fmap getShortToken x2') - (fmap getShortToken x3') - (fmap getShortToken x4') - (fmap getShortToken x5') - (fmap getShortToken x6') - , repo /= emptySourceRepo - ] - -emptySourceRepo :: SourceRepo -emptySourceRepo = SourceRepo RepoThis Nothing Nothing Nothing - Nothing Nothing Nothing - - -instance Arbitrary RepoType where - arbitrary = elements knownRepoTypes - -instance Arbitrary ReportLevel where - arbitrary = elements [NoReports .. DetailedReports] - -instance Arbitrary CompilerFlavor where - arbitrary = elements knownCompilerFlavors - where - --TODO: [code cleanup] export knownCompilerFlavors from D.Compiler - -- it's already defined there, just need it exported. - knownCompilerFlavors = - [GHC, GHCJS, NHC, YHC, Hugs, HBC, Helium, JHC, LHC, UHC] - -instance Arbitrary a => Arbitrary (InstallDirs a) where - arbitrary = - InstallDirs - <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary -- 4 - <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary -- 8 - <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary -- 12 - <*> arbitrary <*> arbitrary -- 14 - -instance Arbitrary PackageDB where - arbitrary = oneof [ pure GlobalPackageDB - , pure UserPackageDB - , SpecificPackageDB . getShortToken <$> arbitrary - ] - -instance Arbitrary RemoteRepo where - arbitrary = - RemoteRepo - <$> arbitraryShortToken `suchThat` (not . (":" `isPrefixOf`)) - <*> arbitrary -- URI - <*> arbitrary - <*> listOf arbitraryRootKey - <*> (fmap getNonNegative arbitrary) - <*> pure False - where - arbitraryRootKey = - shortListOf1 5 (oneof [ choose ('0', '9') - , choose ('a', 'f') ]) - -instance Arbitrary UserConstraint where - arbitrary = - oneof - [ UserConstraintVersion <$> arbitrary <*> arbitrary - , UserConstraintInstalled <$> arbitrary - , UserConstraintSource <$> arbitrary - , UserConstraintFlags <$> arbitrary <*> shortListOf1 3 arbitrary - , UserConstraintStanzas <$> arbitrary <*> ((\x->[x]) <$> arbitrary) - ] - -instance Arbitrary OptionalStanza where - arbitrary = elements [minBound..maxBound] - -instance Arbitrary FlagName where - arbitrary = FlagName <$> flagident - where - flagident = lowercase <$> shortListOf1 5 (elements flagChars) - `suchThat` (("-" /=) . take 1) - flagChars = "-_" ++ ['a'..'z'] - -instance Arbitrary PreSolver where - arbitrary = elements [minBound..maxBound] - -instance Arbitrary AllowNewer where - arbitrary = oneof [ pure AllowNewerNone - , AllowNewerSome <$> shortListOf1 3 arbitrary - , pure AllowNewerAll - ] - -instance Arbitrary AllowNewerDep where - arbitrary = oneof [ AllowNewerDep <$> arbitrary - , AllowNewerDepScoped <$> arbitrary <*> arbitrary - ] - -instance Arbitrary ProfDetailLevel where - arbitrary = elements [ d | (_,_,d) <- knownProfDetailLevels ] - -instance Arbitrary OptimisationLevel where - arbitrary = elements [minBound..maxBound] - -instance Arbitrary DebugInfoLevel where - arbitrary = elements [minBound..maxBound] - -instance Arbitrary URI where - arbitrary = - URI <$> elements ["file:", "http:", "https:"] - <*> (Just <$> arbitrary) - <*> (('/':) <$> arbitraryURIToken) - <*> (('?':) <$> arbitraryURIToken) - <*> pure "" - -instance Arbitrary URIAuth where - arbitrary = - URIAuth <$> pure "" -- no password as this does not roundtrip - <*> arbitraryURIToken - <*> arbitraryURIPort - -arbitraryURIToken :: Gen String -arbitraryURIToken = - shortListOf1 6 (elements (filter isUnreserved ['\0'..'\255'])) - -arbitraryURIPort :: Gen String -arbitraryURIPort = - oneof [ pure "", (':':) <$> shortListOf1 4 (choose ('0','9')) ] - diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Sandbox/Timestamp.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Sandbox/Timestamp.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Sandbox/Timestamp.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Sandbox/Timestamp.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,63 +0,0 @@ -module UnitTests.Distribution.Client.Sandbox.Timestamp (tests) where - -import System.FilePath - -import Distribution.Simple.Utils (withTempDirectory) -import Distribution.Verbosity - -import Distribution.Client.Compat.Time -import Distribution.Client.Sandbox.Timestamp - -import Test.Tasty -import Test.Tasty.HUnit - -tests :: [TestTree] -tests = - [ testCase "timestamp record version 1 can be read" timestampReadTest_v1 - , testCase "timestamp record version 2 can be read" timestampReadTest_v2 - , testCase "written timestamp record can be read" timestampReadWriteTest ] - -timestampRecord_v1 :: String -timestampRecord_v1 = - "[(\"i386-linux-ghc-8.0.0.20160204\",[(\"/foo/bar/Baz\",1455350946)])" ++ - ",(\"i386-linux-ghc-7.10.3\",[(\"/foo/bar/Baz\",1455484719)])]\n" - -timestampRecord_v2 :: String -timestampRecord_v2 = - "2\n" ++ - "[(\"i386-linux-ghc-8.0.0.20160204\",[(\"/foo/bar/Baz\",1455350946)])" ++ - ",(\"i386-linux-ghc-7.10.3\",[(\"/foo/bar/Baz\",1455484719)])]" - -timestampReadTest_v1 :: Assertion -timestampReadTest_v1 = - timestampReadTest timestampRecord_v1 $ - map (\(i, ts) -> - (i, map (\(p, ModTime t) -> - (p, posixSecondsToModTime . fromIntegral $ t)) ts)) - timestampRecord - -timestampReadTest_v2 :: Assertion -timestampReadTest_v2 = timestampReadTest timestampRecord_v2 timestampRecord - -timestampReadTest :: FilePath -> [TimestampFileRecord] -> Assertion -timestampReadTest fileContent expected = - withTempDirectory silent "." "cabal-timestamp-" $ \dir -> do - let fileName = dir "timestamp-record" - writeFile fileName fileContent - tRec <- readTimestampFile fileName - assertEqual "expected timestamp records to be equal" - expected tRec - -timestampRecord :: [TimestampFileRecord] -timestampRecord = - [("i386-linux-ghc-8.0.0.20160204",[("/foo/bar/Baz",ModTime 1455350946)]) - ,("i386-linux-ghc-7.10.3",[("/foo/bar/Baz",ModTime 1455484719)])] - -timestampReadWriteTest :: Assertion -timestampReadWriteTest = - withTempDirectory silent "." "cabal-timestamp-" $ \dir -> do - let fileName = dir "timestamp-record" - writeTimestampFile fileName timestampRecord - tRec <- readTimestampFile fileName - assertEqual "expected timestamp records to be equal" - timestampRecord tRec diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Sandbox.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Sandbox.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Sandbox.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Sandbox.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,28 +0,0 @@ -module UnitTests.Distribution.Client.Sandbox ( - tests - ) where - -import Distribution.Client.Sandbox (withSandboxBinDirOnSearchPath) - -import Test.Tasty -import Test.Tasty.HUnit - -import System.FilePath (getSearchPath, ()) - -tests :: [TestTree] -tests = [ testCase "sandboxBinDirOnSearchPath" sandboxBinDirOnSearchPathTest - , testCase "oldSearchPathRestored" oldSearchPathRestoreTest - ] - -sandboxBinDirOnSearchPathTest :: Assertion -sandboxBinDirOnSearchPathTest = - withSandboxBinDirOnSearchPath "foo" $ do - r <- getSearchPath - assertBool "'foo/bin' not on search path" $ ("foo" "bin") `elem` r - -oldSearchPathRestoreTest :: Assertion -oldSearchPathRestoreTest = do - r <- getSearchPath - withSandboxBinDirOnSearchPath "foo" $ return () - r' <- getSearchPath - assertEqual "Old search path wasn't restored" r r' diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Targets.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Targets.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Targets.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Targets.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,58 +0,0 @@ -module UnitTests.Distribution.Client.Targets ( - tests - ) where - -import Distribution.Client.Targets (UserConstraint (..), readUserConstraint) -import Distribution.Compat.ReadP (ReadP, readP_to_S) -import Distribution.Package (PackageName (..)) -import Distribution.ParseUtils (parseCommaList) -import Distribution.Text (parse) - -import Test.Tasty -import Test.Tasty.HUnit - -import Data.Char (isSpace) - -tests :: [TestTree] -tests = [ testCase "readUserConstraint" readUserConstraintTest - , testCase "parseUserConstraint" parseUserConstraintTest - , testCase "readUserConstraints" readUserConstraintsTest - ] - -readUserConstraintTest :: Assertion -readUserConstraintTest = - assertEqual ("Couldn't read constraint: '" ++ constr ++ "'") expected actual - where - pkgName = "template-haskell" - constr = pkgName ++ " installed" - - expected = UserConstraintInstalled (PackageName pkgName) - actual = let (Right r) = readUserConstraint constr in r - -parseUserConstraintTest :: Assertion -parseUserConstraintTest = - assertEqual ("Couldn't parse constraint: '" ++ constr ++ "'") expected actual - where - pkgName = "template-haskell" - constr = pkgName ++ " installed" - - expected = [UserConstraintInstalled (PackageName pkgName)] - actual = [ x | (x, ys) <- readP_to_S parseUserConstraint constr - , all isSpace ys] - - parseUserConstraint :: ReadP r UserConstraint - parseUserConstraint = parse - -readUserConstraintsTest :: Assertion -readUserConstraintsTest = - assertEqual ("Couldn't read constraints: '" ++ constr ++ "'") expected actual - where - pkgName = "template-haskell" - constr = pkgName ++ " installed" - - expected = [[UserConstraintInstalled (PackageName pkgName)]] - actual = [ x | (x, ys) <- readP_to_S parseUserConstraints constr - , all isSpace ys] - - parseUserConstraints :: ReadP r [UserConstraint] - parseUserConstraints = parseCommaList parse diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Tar.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Tar.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Tar.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/Tar.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,75 +0,0 @@ -module UnitTests.Distribution.Client.Tar ( - tests - ) where - -import Distribution.Client.Tar ( filterEntries - , filterEntriesM - ) -import Codec.Archive.Tar ( Entries(..) - , foldEntries - ) -import Codec.Archive.Tar.Entry ( EntryContent(..) - , simpleEntry - , Entry(..) - , toTarPath - ) - -import Test.Tasty -import Test.Tasty.HUnit - -import qualified Data.ByteString.Lazy as BS -import qualified Data.ByteString.Lazy.Char8 as BS.Char8 -import Control.Monad.Writer.Lazy (runWriterT, tell) - -tests :: [TestTree] -tests = [ testCase "filterEntries" filterTest - , testCase "filterEntriesM" filterMTest - ] - -filterTest :: Assertion -filterTest = do - let e1 = getFileEntry "file1" "x" - e2 = getFileEntry "file2" "y" - p = (\e -> let (NormalFile dta _) = entryContent e - str = BS.Char8.unpack dta - in not . (=="y") $ str) - assertEqual "Unexpected result for filter" "xz" $ - entriesToString $ filterEntries p $ Next e1 $ Next e2 Done - assertEqual "Unexpected result for filter" "z" $ - entriesToString $ filterEntries p $ Done - assertEqual "Unexpected result for filter" "xf" $ - entriesToString $ filterEntries p $ Next e1 $ Next e2 $ Fail "f" - -filterMTest :: Assertion -filterMTest = do - let e1 = getFileEntry "file1" "x" - e2 = getFileEntry "file2" "y" - p = (\e -> let (NormalFile dta _) = entryContent e - str = BS.Char8.unpack dta - in tell "t" >> return (not . (=="y") $ str)) - - (r, w) <- runWriterT $ filterEntriesM p $ Next e1 $ Next e2 Done - assertEqual "Unexpected result for filterM" "xz" $ entriesToString r - assertEqual "Unexpected result for filterM w" "tt" w - - (r1, w1) <- runWriterT $ filterEntriesM p $ Done - assertEqual "Unexpected result for filterM" "z" $ entriesToString r1 - assertEqual "Unexpected result for filterM w" "" w1 - - (r2, w2) <- runWriterT $ filterEntriesM p $ Next e1 $ Next e2 $ Fail "f" - assertEqual "Unexpected result for filterM" "xf" $ entriesToString r2 - assertEqual "Unexpected result for filterM w" "tt" w2 - -getFileEntry :: FilePath -> [Char] -> Entry -getFileEntry pth dta = - simpleEntry tp $ NormalFile dta' $ BS.length dta' - where tp = case toTarPath False pth of - Right tp' -> tp' - Left e -> error e - dta' = BS.Char8.pack dta - -entriesToString :: Entries String -> String -entriesToString = - foldEntries (\e acc -> let (NormalFile dta _) = entryContent e - str = BS.Char8.unpack dta - in str ++ acc) "z" id diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/UserConfig.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/UserConfig.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/UserConfig.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Distribution/Client/UserConfig.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,112 +0,0 @@ -{-# LANGUAGE CPP #-} -module UnitTests.Distribution.Client.UserConfig - ( tests - ) where - -import Control.Exception (bracket) -import Control.Monad (replicateM_) -import Data.List (sort, nub) -#if !MIN_VERSION_base(4,8,0) -import Data.Monoid -#endif -import System.Directory (doesFileExist, - getCurrentDirectory, getTemporaryDirectory) -import System.FilePath (()) - -import Test.Tasty -import Test.Tasty.HUnit - -import Distribution.Client.Config -import Distribution.Utils.NubList (fromNubList) -import Distribution.Client.Setup (GlobalFlags (..), InstallFlags (..)) -import Distribution.Client.Utils (removeExistingFile) -import Distribution.Simple.Setup (Flag (..), ConfigFlags (..), fromFlag) -import Distribution.Simple.Utils (withTempDirectory) -import Distribution.Verbosity (silent) - -tests :: [TestTree] -tests = [ testCase "nullDiffOnCreate" nullDiffOnCreateTest - , testCase "canDetectDifference" canDetectDifference - , testCase "canUpdateConfig" canUpdateConfig - , testCase "doubleUpdateConfig" doubleUpdateConfig - , testCase "newDefaultConfig" newDefaultConfig - ] - -nullDiffOnCreateTest :: Assertion -nullDiffOnCreateTest = bracketTest $ \configFile -> do - -- Create a new default config file in our test directory. - _ <- loadConfig silent (Flag configFile) - -- Now we read it in and compare it against the default. - diff <- userConfigDiff $ globalFlags configFile - assertBool (unlines $ "Following diff should be empty:" : diff) $ null diff - - -canDetectDifference :: Assertion -canDetectDifference = bracketTest $ \configFile -> do - -- Create a new default config file in our test directory. - _ <- loadConfig silent (Flag configFile) - appendFile configFile "verbose: 0\n" - diff <- userConfigDiff $ globalFlags configFile - assertBool (unlines $ "Should detect a difference:" : diff) $ - diff == [ "+ verbose: 0" ] - - -canUpdateConfig :: Assertion -canUpdateConfig = bracketTest $ \configFile -> do - -- Write a trivial cabal file. - writeFile configFile "tests: True\n" - -- Update the config file. - userConfigUpdate silent $ globalFlags configFile - -- Load it again. - updated <- loadConfig silent (Flag configFile) - assertBool ("Field 'tests' should be True") $ - fromFlag (configTests $ savedConfigureFlags updated) - - -doubleUpdateConfig :: Assertion -doubleUpdateConfig = bracketTest $ \configFile -> do - -- Create a new default config file in our test directory. - _ <- loadConfig silent (Flag configFile) - -- Update it twice. - replicateM_ 2 . userConfigUpdate silent $ globalFlags configFile - -- Load it again. - updated <- loadConfig silent (Flag configFile) - - assertBool ("Field 'remote-repo' doesn't contain duplicates") $ - listUnique (map show . fromNubList . globalRemoteRepos $ savedGlobalFlags updated) - assertBool ("Field 'extra-prog-path' doesn't contain duplicates") $ - listUnique (map show . fromNubList . configProgramPathExtra $ savedConfigureFlags updated) - assertBool ("Field 'build-summary' doesn't contain duplicates") $ - listUnique (map show . fromNubList . installSummaryFile $ savedInstallFlags updated) - - -newDefaultConfig :: Assertion -newDefaultConfig = do - sysTmpDir <- getTemporaryDirectory - withTempDirectory silent sysTmpDir "cabal-test" $ \tmpDir -> do - let configFile = tmpDir "tmp.config" - _ <- createDefaultConfigFile silent configFile - exists <- doesFileExist configFile - assertBool ("Config file should be written to " ++ configFile) exists - - -globalFlags :: FilePath -> GlobalFlags -globalFlags configFile = mempty { globalConfigFile = Flag configFile } - - -listUnique :: Ord a => [a] -> Bool -listUnique xs = - let sorted = sort xs - in nub sorted == xs - - -bracketTest :: (FilePath -> IO ()) -> Assertion -bracketTest = - bracket testSetup testTearDown - where - testSetup :: IO FilePath - testSetup = fmap ( "test-user-config") getCurrentDirectory - - testTearDown :: FilePath -> IO () - testTearDown configFile = - mapM_ removeExistingFile [configFile, configFile ++ ".backup"] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Options.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Options.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests/Options.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests/Options.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,41 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} - -module UnitTests.Options ( OptionShowSolverLog(..) - , OptionMtimeChangeDelay(..) - , extraOptions ) - where - -import Data.Proxy -import Data.Typeable - -import Test.Tasty.Options - -{------------------------------------------------------------------------------- - Test options --------------------------------------------------------------------------------} - -extraOptions :: [OptionDescription] -extraOptions = - [ Option (Proxy :: Proxy OptionShowSolverLog) - , Option (Proxy :: Proxy OptionMtimeChangeDelay) - ] - -newtype OptionShowSolverLog = OptionShowSolverLog Bool - deriving Typeable - -instance IsOption OptionShowSolverLog where - defaultValue = OptionShowSolverLog False - parseValue = fmap OptionShowSolverLog . safeRead - optionName = return "show-solver-log" - optionHelp = return "Show full log from the solver" - optionCLParser = flagCLParser Nothing (OptionShowSolverLog True) - -newtype OptionMtimeChangeDelay = OptionMtimeChangeDelay Int - deriving Typeable - -instance IsOption OptionMtimeChangeDelay where - defaultValue = OptionMtimeChangeDelay 0 - parseValue = fmap OptionMtimeChangeDelay . safeRead - optionName = return "mtime-change-delay" - optionHelp = return $ "How long to wait before attempting to detect" - ++ "file modification, in microseconds" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.1/tests/UnitTests.hs 2016-11-07 10:02:42.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.1/tests/UnitTests.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,106 +0,0 @@ -{-# LANGUAGE ScopedTypeVariables #-} - -module Main - where - -import Test.Tasty - -import Control.Monad -import Data.Time.Clock -import System.FilePath - -import Distribution.Simple.Utils -import Distribution.Verbosity - -import Distribution.Client.Compat.Time - -import qualified UnitTests.Distribution.Client.Compat.Time -import qualified UnitTests.Distribution.Client.Dependency.Modular.PSQ -import qualified UnitTests.Distribution.Client.Dependency.Modular.Solver -import qualified UnitTests.Distribution.Client.FileMonitor -import qualified UnitTests.Distribution.Client.Glob -import qualified UnitTests.Distribution.Client.GZipUtils -import qualified UnitTests.Distribution.Client.Sandbox -import qualified UnitTests.Distribution.Client.Sandbox.Timestamp -import qualified UnitTests.Distribution.Client.Tar -import qualified UnitTests.Distribution.Client.Targets -import qualified UnitTests.Distribution.Client.UserConfig -import qualified UnitTests.Distribution.Client.ProjectConfig - -import UnitTests.Options - - -tests :: Int -> TestTree -tests mtimeChangeCalibrated = - askOption $ \(OptionMtimeChangeDelay mtimeChangeProvided) -> - let mtimeChange = if mtimeChangeProvided /= 0 - then mtimeChangeProvided - else mtimeChangeCalibrated - in - testGroup "Unit Tests" - [ testGroup "UnitTests.Distribution.Client.Compat.Time" $ - UnitTests.Distribution.Client.Compat.Time.tests mtimeChange - , testGroup "UnitTests.Distribution.Client.Dependency.Modular.PSQ" - UnitTests.Distribution.Client.Dependency.Modular.PSQ.tests - , testGroup "UnitTests.Distribution.Client.Dependency.Modular.Solver" - UnitTests.Distribution.Client.Dependency.Modular.Solver.tests - , testGroup "UnitTests.Distribution.Client.FileMonitor" $ - UnitTests.Distribution.Client.FileMonitor.tests mtimeChange - , testGroup "UnitTests.Distribution.Client.Glob" - UnitTests.Distribution.Client.Glob.tests - , testGroup "Distribution.Client.GZipUtils" - UnitTests.Distribution.Client.GZipUtils.tests - , testGroup "Distribution.Client.Sandbox" - UnitTests.Distribution.Client.Sandbox.tests - , testGroup "Distribution.Client.Sandbox.Timestamp" - UnitTests.Distribution.Client.Sandbox.Timestamp.tests - , testGroup "Distribution.Client.Tar" - UnitTests.Distribution.Client.Tar.tests - , testGroup "Distribution.Client.Targets" - UnitTests.Distribution.Client.Targets.tests - , testGroup "UnitTests.Distribution.Client.UserConfig" - UnitTests.Distribution.Client.UserConfig.tests - , testGroup "UnitTests.Distribution.Client.ProjectConfig" - UnitTests.Distribution.Client.ProjectConfig.tests - ] - -main :: IO () -main = do - mtimeChangeDelay <- calibrateMtimeChangeDelay - defaultMainWithIngredients - (includingOptions extraOptions : defaultIngredients) - (tests mtimeChangeDelay) - --- Based on code written by Neill Mitchell for Shake. See --- 'sleepFileTimeCalibrate' in 'Test.Type'. The returned delay is never smaller --- than 10 ms, but never larger than 1 second. -calibrateMtimeChangeDelay :: IO Int -calibrateMtimeChangeDelay = do - withTempDirectory silent "." "calibration-" $ \dir -> do - let fileName = dir "probe" - mtimes <- forM [1..25] $ \(i::Int) -> time $ do - writeFile fileName $ show i - t0 <- getModTime fileName - let spin j = do - writeFile fileName $ show (i,j) - t1 <- getModTime fileName - unless (t0 < t1) (spin $ j + 1) - spin (0::Int) - let mtimeChange = maximum mtimes - mtimeChange' = min 1000000 $ (max 10000 mtimeChange) * 2 - notice normal $ "File modification time resolution calibration completed, " - ++ "maximum delay observed: " - ++ (show . toMillis $ mtimeChange ) ++ " ms. " - ++ "Will be using delay of " ++ (show . toMillis $ mtimeChange') - ++ " for test runs." - return mtimeChange' - where - toMillis :: Int -> Double - toMillis x = fromIntegral x / 1000.0 - - time :: IO () -> IO Int - time act = do - t0 <- getCurrentTime - act - t1 <- getCurrentTime - return . ceiling $! (t1 `diffUTCTime` t0) * 1e6 -- microseconds diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/bash-completion/cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/bash-completion/cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/bash-completion/cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/bash-completion/cabal 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,93 @@ +# cabal command line completion +# Copyright 2007-2008 "Lennart Kolmodin" +# "Duncan Coutts" +# + +# List cabal targets by type, pass: +# - test-suite for test suites +# - benchmark for benchmarks +# - executable for executables +# - executable|test-suite|benchmark for the three +_cabal_list() +{ + cat *.cabal | + grep -Ei "^[[:space:]]*($1)[[:space:]]" | + sed -e "s/.* \([^ ]*\).*/\1/" +} + +# List possible targets depending on the command supplied as parameter. The +# ideal option would be to implement this via --list-options on cabal directly. +# This is a temporary workaround. +_cabal_targets() +{ + # If command ($*) contains build, repl, test or bench completes with + # targets of according type. + [ -f *.cabal ] || return 0 + local comp + for comp in $*; do + [ $comp == build ] && _cabal_list "executable|test-suite|benchmark" && break + [ $comp == repl ] && _cabal_list "executable|test-suite|benchmark" && break + [ $comp == run ] && _cabal_list "executable" && break + [ $comp == test ] && _cabal_list "test-suite" && break + [ $comp == bench ] && _cabal_list "benchmark" && break + done +} + +# List possible subcommands of a cabal subcommand. +# +# In example "sandbox" is a cabal subcommand that itself has subcommands. Since +# "cabal --list-options" doesn't work in such cases we have to get the list +# using other means. +_cabal_subcommands() +{ + local word + for word in "$@"; do + case "$word" in + sandbox) + # Get list of "cabal sandbox" subcommands from its help message. + "$1" help sandbox | + sed -n '1,/^Subcommands:$/d;/^Flags for sandbox:$/,$d;/^ /d;s/^\(.*\):/\1/p' + break # Terminate for loop. + ;; + esac + done +} + +__cabal_has_doubledash () +{ + local c=1 + # Ignore the last word, because it is replaced anyways. + # This allows expansion for flags on "cabal foo --", + # but does not try to complete after "cabal foo -- ". + local n=$((${#COMP_WORDS[@]} - 1)) + while [ $c -lt $n ]; do + if [ "--" = "${COMP_WORDS[c]}" ]; then + return 0 + fi + ((c++)) + done + return 1 +} + +_cabal() +{ + # no completion past cabal arguments. + __cabal_has_doubledash && return + + # get the word currently being completed + local cur + cur=${COMP_WORDS[$COMP_CWORD]} + + # create a command line to run + local cmd + # copy all words the user has entered + cmd=( ${COMP_WORDS[@]} ) + + # replace the current word with --list-options + cmd[${COMP_CWORD}]="--list-options" + + # the resulting completions should be put into this array + COMPREPLY=( $( compgen -W "$( ${cmd[@]} ) $( _cabal_targets ${cmd[@]} ) $( _cabal_subcommands ${COMP_WORDS[@]} )" -- $cur ) ) +} + +complete -F _cabal -o default cabal diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/bootstrap.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/bootstrap.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/bootstrap.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/bootstrap.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,564 @@ +#!/bin/sh + +# A script to bootstrap cabal-install. + +# It works by downloading and installing the Cabal, zlib and +# HTTP packages. It then installs cabal-install itself. +# It expects to be run inside the cabal-install directory. + +# Install settings, you can override these by setting environment vars. E.g. if +# you don't want profiling and dynamic versions of libraries to be installed in +# addition to vanilla, run 'EXTRA_CONFIGURE_OPTS="" ./bootstrap.sh' + +#VERBOSE +DEFAULT_CONFIGURE_OPTS="--enable-library-profiling --enable-shared" +EXTRA_CONFIGURE_OPTS=${EXTRA_CONFIGURE_OPTS-$DEFAULT_CONFIGURE_OPTS} +#EXTRA_BUILD_OPTS +#EXTRA_INSTALL_OPTS + +die () { printf "\nError during cabal-install bootstrap:\n$1\n" >&2 && exit 2 ;} + +# programs, you can override these by setting environment vars +GHC="${GHC:-ghc}" +GHC_PKG="${GHC_PKG:-ghc-pkg}" +GHC_VER="$(${GHC} --numeric-version)" +HADDOCK=${HADDOCK:-haddock} +WGET="${WGET:-wget}" +CURL="${CURL:-curl}" +FETCH="${FETCH:-fetch}" +TAR="${TAR:-tar}" +GZIP_PROGRAM="${GZIP_PROGRAM:-gzip}" + +# The variable SCOPE_OF_INSTALLATION can be set on the command line to +# use/install the libaries needed to build cabal-install to a custom package +# database instead of the user or global package database. +# +# Example: +# +# $ ghc-pkg init /my/package/database +# $ SCOPE_OF_INSTALLATION='--package-db=/my/package/database' ./bootstrap.sh +# +# You can also combine SCOPE_OF_INSTALLATION with PREFIX: +# +# $ ghc-pkg init /my/prefix/packages.conf.d +# $ SCOPE_OF_INSTALLATION='--package-db=/my/prefix/packages.conf.d' \ +# PREFIX=/my/prefix ./bootstrap.sh +# +# If you use the --global,--user or --sandbox arguments, this will +# override the SCOPE_OF_INSTALLATION setting and not use the package +# database you pass in the SCOPE_OF_INSTALLATION variable. + +SCOPE_OF_INSTALLATION="${SCOPE_OF_INSTALLATION:---user}" +DEFAULT_PREFIX="${HOME}/.cabal" + +# Try to respect $TMPDIR. +[ -"$TMPDIR"- = -""- ] && + export TMPDIR=/tmp/cabal-$(echo $(od -XN4 -An /dev/random)) && mkdir $TMPDIR + +# Check for a C compiler, using user-set $CC, if any, first. +for c in $CC gcc clang cc icc; do + $c --version 2>&1 >/dev/null && CC=$c && + echo "Using $c for C compiler. If this is not what you want, set CC." >&2 && + break +done + +# None found. +[ -"$CC"- = -""- ] && die 'C compiler not found (or could not be run). + If a C compiler is installed make sure it is on your PATH, or set $CC.' + +# Find the correct linker/linker-wrapper. +LINK="$(for link in collect2 ld; do + [ $($CC -print-prog-name=$link) = $link ] && continue || + $CC -print-prog-name=$link + done)" + +# Fall back to "ld"... might work. +[ -$LINK- = -""- ] && LINK=ld + +# And finally, see if we can compile and link something. + echo 'int main(){}' | $CC -xc - -o /dev/null || + die "C compiler and linker could not compile a simple test program. + Please check your toolchain." + +# Warn that were's overriding $LD if set (if you want). +[ -"$LD"- != -""- ] && [ -"$LD"- != -"$LINK"- ] && + echo "Warning: value set in $LD is not the same as C compiler's $LINK." >&2 + echo "Using $LINK instead." >&2 + +# Set LD, overriding environment if necessary. +export LD=$LINK + +# Check we're in the right directory, etc. +grep "cabal-install" ./cabal-install.cabal > /dev/null 2>&1 || + die "The bootstrap.sh script must be run in the cabal-install directory" + +${GHC} --numeric-version > /dev/null 2>&1 || + die "${GHC} not found (or could not be run). + If ghc is installed, make sure it is on your PATH, + or set the GHC and GHC_PKG vars." + +${GHC_PKG} --version > /dev/null 2>&1 || die "${GHC_PKG} not found." + +GHC_PKG_VER="$(${GHC_PKG} --version | cut -d' ' -f 5)" + +[ ${GHC_VER} = ${GHC_PKG_VER} ] || + die "Version mismatch between ${GHC} and ${GHC_PKG}. + If you set the GHC variable then set GHC_PKG too." + +JOBS="-j1" +while [ "$#" -gt 0 ]; do + case "${1}" in + "--user") + SCOPE_OF_INSTALLATION="${1}" + shift;; + "--global") + SCOPE_OF_INSTALLATION="${1}" + DEFAULT_PREFIX="/usr/local" + shift;; + "--sandbox") + shift + # check if there is another argument which doesn't start with -- + if [ "$#" -le 0 ] || [ ! -z $(echo "${1}" | grep "^--") ] + then + SANDBOX=".cabal-sandbox" + else + SANDBOX="${1}" + shift + fi;; + "--no-doc") + NO_DOCUMENTATION=1 + shift;; + "-j"|"--jobs") + shift + # check if there is another argument which doesn't start with - or -- + if [ "$#" -le 0 ] \ + || [ ! -z $(echo "${1}" | grep "^-") ] \ + || [ ! -z $(echo "${1}" | grep "^--") ] + then + JOBS="-j" + else + JOBS="-j${1}" + shift + fi;; + *) + echo "Unknown argument or option, quitting: ${1}" + echo "usage: bootstrap.sh [OPTION]" + echo + echo "options:" + echo " -j/--jobs Number of concurrent workers to use (Default: 1)" + echo " -j without an argument will use all available cores" + echo " --user Install for the local user (default)" + echo " --global Install systemwide (must be run as root)" + echo " --no-doc Do not generate documentation for installed"\ + "packages" + echo " --sandbox Install to a sandbox in the default location"\ + "(.cabal-sandbox)" + echo " --sandbox path Install to a sandbox located at path" + exit;; + esac +done + +# Do not try to use -j with GHC older than 7.8 +case $GHC_VER in + 7.4*|7.6*) + JOBS="" + ;; + *) + ;; +esac + +abspath () { case "$1" in /*)printf "%s\n" "$1";; *)printf "%s\n" "$PWD/$1";; + esac; } + +if [ ! -z "$SANDBOX" ] +then # set up variables for sandbox bootstrap + # Make the sandbox path absolute since it will be used from + # different working directories when the dependency packages are + # installed. + SANDBOX=$(abspath "$SANDBOX") + # Get the name of the package database which cabal sandbox would use. + GHC_ARCH=$(ghc --info | + sed -n 's/.*"Target platform".*"\([^-]\+\)-[^-]\+-\([^"]\+\)".*/\1-\2/p') + PACKAGEDB="$SANDBOX/${GHC_ARCH}-ghc-${GHC_VER}-packages.conf.d" + # Assume that if the directory is already there, it is already a + # package database. We will get an error immediately below if it + # isn't. Uses -r to try to be compatible with Solaris, and allow + # symlinks as well as a normal dir/file. + [ ! -r "$PACKAGEDB" ] && ghc-pkg init "$PACKAGEDB" + PREFIX="$SANDBOX" + SCOPE_OF_INSTALLATION="--package-db=$PACKAGEDB" + echo Bootstrapping in sandbox at \'$SANDBOX\'. +fi + +# Check for haddock unless no documentation should be generated. +if [ ! ${NO_DOCUMENTATION} ] +then + ${HADDOCK} --version > /dev/null 2>&1 || die "${HADDOCK} not found." +fi + +PREFIX=${PREFIX:-${DEFAULT_PREFIX}} + +# Versions of the packages to install. +# The version regex says what existing installed versions are ok. +PARSEC_VER="3.1.9"; PARSEC_VER_REGEXP="[3]\.[01]\." + # >= 3.0 && < 3.2 +DEEPSEQ_VER="1.4.2.0"; DEEPSEQ_VER_REGEXP="1\.[1-9]\." + # >= 1.1 && < 2 + +case "$GHC_VER" in + 7.4*|7.6*) + # GHC 7.4 or 7.6 + BINARY_VER="0.8.2.1" + BINARY_VER_REGEXP="[0]\.[78]\.[0-2]\." # >= 0.7 && < 0.8.3 + ;; + *) + # GHC >= 7.8 + BINARY_VER="0.8.3.0" + BINARY_VER_REGEXP="[0]\.[78]\." # >= 0.7 && < 0.9 + ;; +esac + + +TEXT_VER="1.2.2.1"; TEXT_VER_REGEXP="((1\.[012]\.)|(0\.([2-9]|(1[0-1]))\.))" + # >= 0.2 && < 1.3 +NETWORK_VER="2.6.3.1"; NETWORK_VER_REGEXP="2\.[0-6]\." + # >= 2.0 && < 2.7 +NETWORK_URI_VER="2.6.1.0"; NETWORK_URI_VER_REGEXP="2\.6\." + # >= 2.6 && < 2.7 +CABAL_VER="1.24.2.0"; CABAL_VER_REGEXP="1\.24\.[2-9]" + # >= 1.24.2 && < 1.25 +TRANS_VER="0.5.2.0"; TRANS_VER_REGEXP="0\.[45]\." + # >= 0.2.* && < 0.6 +MTL_VER="2.2.1"; MTL_VER_REGEXP="[2]\." + # >= 2.0 && < 3 +HTTP_VER="4000.3.3"; HTTP_VER_REGEXP="4000\.(2\.([5-9]|1[0-9]|2[0-9])|3\.?)" + # >= 4000.2.5 < 4000.4 +ZLIB_VER="0.6.1.2"; ZLIB_VER_REGEXP="(0\.5\.([3-9]|1[0-9])|0\.6)" + # >= 0.5.3 && <= 0.7 +TIME_VER="1.7" TIME_VER_REGEXP="1\.[1-7]\.?" + # >= 1.1 && < 1.8 +RANDOM_VER="1.1" RANDOM_VER_REGEXP="1\.[01]\.?" + # >= 1 && < 1.2 +STM_VER="2.4.4.1"; STM_VER_REGEXP="2\." + # == 2.* +ASYNC_VER="2.1.0"; ASYNC_VER_REGEXP="2\." + # 2.* +OLD_TIME_VER="1.1.0.3"; OLD_TIME_VER_REGEXP="1\.[01]\.?" + # >=1.0.0.0 && <1.2 +OLD_LOCALE_VER="1.0.0.7"; OLD_LOCALE_VER_REGEXP="1\.0\.?" + # >=1.0.0.0 && <1.1 +BASE16_BYTESTRING_VER="0.1.1.6"; BASE16_BYTESTRING_VER_REGEXP="0\.1" + # 0.1.* +BASE64_BYTESTRING_VER="1.0.0.1"; BASE64_BYTESTRING_REGEXP="1\." + # >=1.0 +CRYPTOHASH_SHA256_VER="0.11.100.1"; CRYPTOHASH_SHA256_VER_REGEXP="0\.11\.?" + # 0.11.* +ED25519_VER="0.0.5.0"; ED25519_VER_REGEXP="0\.0\.?" + # 0.0.* +HACKAGE_SECURITY_VER="0.5.2.2"; HACKAGE_SECURITY_VER_REGEXP="0\.5\.(2\.[2-9]|[3-9])" + # >= 0.5.2 && < 0.6 +BYTESTRING_BUILDER_VER="0.10.8.1.0"; BYTESTRING_BUILDER_VER_REGEXP="0\.10\.?" +TAR_VER="0.5.0.3"; TAR_VER_REGEXP="0\.5\.([1-9]|1[0-9]|0\.[3-9]|0\.1[0-9])\.?" + # >= 0.5.0.3 && < 0.6 +HASHABLE_VER="1.2.4.0"; HASHABLE_VER_REGEXP="1\." + # 1.* + +HACKAGE_URL="https://hackage.haskell.org/package" + +# Haddock fails for network-2.5.0.0, and for hackage-security for +# GHC <8, c.f. https://github.com/well-typed/hackage-security/issues/149 +NO_DOCS_PACKAGES_VER_REGEXP="network-uri-2\.5\.[0-9]+\.[0-9]+|hackage-security-0\.5\.[0-9]+\.[0-9]+" + +# Cache the list of packages: +echo "Checking installed packages for ghc-${GHC_VER}..." +${GHC_PKG} list --global ${SCOPE_OF_INSTALLATION} > ghc-pkg.list || + die "running '${GHC_PKG} list' failed" + +# Will we need to install this package, or is a suitable version installed? +need_pkg () { + PKG=$1 + VER_MATCH=$2 + if egrep " ${PKG}-${VER_MATCH}" ghc-pkg.list > /dev/null 2>&1 + then + return 1; + else + return 0; + fi + #Note: we cannot use "! grep" here as Solaris 9 /bin/sh doesn't like it. +} + +info_pkg () { + PKG=$1 + VER=$2 + VER_MATCH=$3 + + if need_pkg ${PKG} ${VER_MATCH} + then + if [ -r "${PKG}-${VER}.tar.gz" ] + then + echo "${PKG}-${VER} will be installed from local tarball." + else + echo "${PKG}-${VER} will be downloaded and installed." + fi + else + echo "${PKG} is already installed and the version is ok." + fi +} + +fetch_pkg () { + PKG=$1 + VER=$2 + + URL_PKG=${HACKAGE_URL}/${PKG}-${VER}/${PKG}-${VER}.tar.gz + URL_PKGDESC=${HACKAGE_URL}/${PKG}-${VER}/${PKG}.cabal + if which ${CURL} > /dev/null + then + # TODO: switch back to resuming curl command once + # https://github.com/haskell/hackage-server/issues/111 is resolved + #${CURL} -L --fail -C - -O ${URL_PKG} || die "Failed to download ${PKG}." + ${CURL} -L --fail -O ${URL_PKG} || die "Failed to download ${PKG}." + ${CURL} -L --fail -O ${URL_PKGDESC} \ + || die "Failed to download '${PKG}.cabal'." + elif which ${WGET} > /dev/null + then + ${WGET} -c ${URL_PKG} || die "Failed to download ${PKG}." + ${WGET} -c ${URL_PKGDESC} || die "Failed to download '${PKG}.cabal'." + elif which ${FETCH} > /dev/null + then + ${FETCH} ${URL_PKG} || die "Failed to download ${PKG}." + ${FETCH} ${URL_PKGDESC} || die "Failed to download '${PKG}.cabal'." + else + die "Failed to find a downloader. 'curl', 'wget' or 'fetch' is required." + fi + [ -f "${PKG}-${VER}.tar.gz" ] || + die "Downloading ${URL_PKG} did not create ${PKG}-${VER}.tar.gz" + [ -f "${PKG}.cabal" ] || + die "Downloading ${URL_PKGDESC} did not create ${PKG}.cabal" + mv "${PKG}.cabal" "${PKG}.cabal.hackage" +} + +unpack_pkg () { + PKG=$1 + VER=$2 + + rm -rf "${PKG}-${VER}.tar" "${PKG}-${VER}" + ${GZIP_PROGRAM} -d < "${PKG}-${VER}.tar.gz" | ${TAR} -xf - + [ -d "${PKG}-${VER}" ] || die "Failed to unpack ${PKG}-${VER}.tar.gz" + cp "${PKG}.cabal.hackage" "${PKG}-${VER}/${PKG}.cabal" +} + +install_pkg () { + PKG=$1 + VER=$2 + + [ -x Setup ] && ./Setup clean + [ -f Setup ] && rm Setup + + ${GHC} --make ${JOBS} Setup -o Setup || + die "Compiling the Setup script failed." + + [ -x Setup ] || die "The Setup script does not exist or cannot be run" + + args="${SCOPE_OF_INSTALLATION} --prefix=${PREFIX} --with-compiler=${GHC}" + args="$args --with-hc-pkg=${GHC_PKG} --with-gcc=${CC} --with-ld=${LD}" + args="$args ${EXTRA_CONFIGURE_OPTS} ${VERBOSE}" + + ./Setup configure $args || die "Configuring the ${PKG} package failed." + + ./Setup build ${JOBS} ${EXTRA_BUILD_OPTS} ${VERBOSE} || + die "Building the ${PKG} package failed." + + if [ ! ${NO_DOCUMENTATION} ] + then + if echo "${PKG}-${VER}" | egrep ${NO_DOCS_PACKAGES_VER_REGEXP} \ + > /dev/null 2>&1 + then + echo "Skipping documentation for the ${PKG} package." + else + ./Setup haddock --with-ghc=${GHC} --with-haddock=${HADDOCK} ${VERBOSE} || + die "Documenting the ${PKG} package failed." + fi + fi + + ./Setup install ${EXTRA_INSTALL_OPTS} ${VERBOSE} || + die "Installing the ${PKG} package failed." +} + +do_pkg () { + PKG=$1 + VER=$2 + VER_MATCH=$3 + + if need_pkg ${PKG} ${VER_MATCH} + then + echo + if [ -r "${PKG}-${VER}.tar.gz" ] + then + echo "Using local tarball for ${PKG}-${VER}." + else + echo "Downloading ${PKG}-${VER}..." + fetch_pkg ${PKG} ${VER} + fi + unpack_pkg ${PKG} ${VER} + cd "${PKG}-${VER}" + install_pkg ${PKG} ${VER} + cd .. + fi +} + +# If we're bootstrapping from a Git clone, install the local version of Cabal +# instead of downloading one from Hackage. +do_Cabal_pkg () { + if [ -d "../.git" ] + then + if need_pkg "Cabal" ${CABAL_VER_REGEXP} + then + echo "Cabal-${CABAL_VER} will be installed from the local Git clone." + cd ../Cabal + install_pkg ${CABAL_VER} ${CABAL_VER_REGEXP} + cd ../cabal-install + else + echo "Cabal is already installed and the version is ok." + fi + else + info_pkg "Cabal" ${CABAL_VER} ${CABAL_VER_REGEXP} + do_pkg "Cabal" ${CABAL_VER} ${CABAL_VER_REGEXP} + fi +} + +# Replicate the flag selection logic for network-uri in the .cabal file. +do_network_uri_pkg () { + # Refresh installed package list. + ${GHC_PKG} list --global ${SCOPE_OF_INSTALLATION} > ghc-pkg-stage2.list \ + || die "running '${GHC_PKG} list' failed" + + NETWORK_URI_DUMMY_VER="2.5.0.0"; NETWORK_URI_DUMMY_VER_REGEXP="2\.5\." # < 2.6 + if egrep " network-2\.[6-9]\." ghc-pkg-stage2.list > /dev/null 2>&1 + then + # Use network >= 2.6 && network-uri >= 2.6 + info_pkg "network-uri" ${NETWORK_URI_VER} ${NETWORK_URI_VER_REGEXP} + do_pkg "network-uri" ${NETWORK_URI_VER} ${NETWORK_URI_VER_REGEXP} + else + # Use network < 2.6 && network-uri < 2.6 + info_pkg "network-uri" ${NETWORK_URI_DUMMY_VER} \ + ${NETWORK_URI_DUMMY_VER_REGEXP} + do_pkg "network-uri" ${NETWORK_URI_DUMMY_VER} \ + ${NETWORK_URI_DUMMY_VER_REGEXP} + fi +} + +# Conditionally install bytestring-builder if bytestring is < 0.10.2. +do_bytestring_builder_pkg () { + if egrep "bytestring-0\.(9|10\.[0,1])\.?" ghc-pkg-stage2.list > /dev/null 2>&1 + then + info_pkg "bytestring-builder" ${BYTESTRING_BUILDER_VER} \ + ${BYTESTRING_BUILDER_VER_REGEXP} + do_pkg "bytestring-builder" ${BYTESTRING_BUILDER_VER} \ + ${BYTESTRING_BUILDER_VER_REGEXP} + fi +} + +# Actually do something! + +info_pkg "deepseq" ${DEEPSEQ_VER} ${DEEPSEQ_VER_REGEXP} +info_pkg "binary" ${BINARY_VER} ${BINARY_VER_REGEXP} +info_pkg "time" ${TIME_VER} ${TIME_VER_REGEXP} +info_pkg "transformers" ${TRANS_VER} ${TRANS_VER_REGEXP} +info_pkg "mtl" ${MTL_VER} ${MTL_VER_REGEXP} +info_pkg "text" ${TEXT_VER} ${TEXT_VER_REGEXP} +info_pkg "parsec" ${PARSEC_VER} ${PARSEC_VER_REGEXP} +info_pkg "network" ${NETWORK_VER} ${NETWORK_VER_REGEXP} +info_pkg "old-locale" ${OLD_LOCALE_VER} ${OLD_LOCALE_VER_REGEXP} +info_pkg "old-time" ${OLD_TIME_VER} ${OLD_TIME_VER_REGEXP} +info_pkg "HTTP" ${HTTP_VER} ${HTTP_VER_REGEXP} +info_pkg "zlib" ${ZLIB_VER} ${ZLIB_VER_REGEXP} +info_pkg "random" ${RANDOM_VER} ${RANDOM_VER_REGEXP} +info_pkg "stm" ${STM_VER} ${STM_VER_REGEXP} +info_pkg "async" ${ASYNC_VER} ${ASYNC_VER_REGEXP} +info_pkg "base16-bytestring" ${BASE16_BYTESTRING_VER} \ + ${BASE16_BYTESTRING_VER_REGEXP} +info_pkg "base64-bytestring" ${BASE64_BYTESTRING_VER} \ + ${BASE64_BYTESTRING_VER_REGEXP} +info_pkg "cryptohash-sha256" ${CRYPTOHASH_SHA256_VER} \ + ${CRYPTOHASH_SHA256_VER_REGEXP} +info_pkg "ed25519" ${ED25519_VER} ${ED25519_VER_REGEXP} +info_pkg "tar" ${TAR_VER} ${TAR_VER_REGEXP} +info_pkg "hashable" ${HASHABLE_VER} ${HASHABLE_VER_REGEXP} +info_pkg "hackage-security" ${HACKAGE_SECURITY_VER} \ + ${HACKAGE_SECURITY_VER_REGEXP} + +do_pkg "deepseq" ${DEEPSEQ_VER} ${DEEPSEQ_VER_REGEXP} +do_pkg "binary" ${BINARY_VER} ${BINARY_VER_REGEXP} +do_pkg "time" ${TIME_VER} ${TIME_VER_REGEXP} + +# Install the Cabal library from the local Git clone if possible. +do_Cabal_pkg + +do_pkg "transformers" ${TRANS_VER} ${TRANS_VER_REGEXP} +do_pkg "mtl" ${MTL_VER} ${MTL_VER_REGEXP} +do_pkg "text" ${TEXT_VER} ${TEXT_VER_REGEXP} +do_pkg "parsec" ${PARSEC_VER} ${PARSEC_VER_REGEXP} +do_pkg "network" ${NETWORK_VER} ${NETWORK_VER_REGEXP} + +# We conditionally install network-uri, depending on the network version. +do_network_uri_pkg + +do_pkg "old-locale" ${OLD_LOCALE_VER} ${OLD_LOCALE_VER_REGEXP} +do_pkg "old-time" ${OLD_TIME_VER} ${OLD_TIME_VER_REGEXP} +do_pkg "HTTP" ${HTTP_VER} ${HTTP_VER_REGEXP} +do_pkg "zlib" ${ZLIB_VER} ${ZLIB_VER_REGEXP} +do_pkg "random" ${RANDOM_VER} ${RANDOM_VER_REGEXP} +do_pkg "stm" ${STM_VER} ${STM_VER_REGEXP} +do_pkg "async" ${ASYNC_VER} ${ASYNC_VER_REGEXP} +do_pkg "base16-bytestring" ${BASE16_BYTESTRING_VER} \ + ${BASE16_BYTESTRING_VER_REGEXP} +do_pkg "base64-bytestring" ${BASE64_BYTESTRING_VER} \ + ${BASE64_BYTESTRING_VER_REGEXP} +do_pkg "cryptohash-sha256" ${CRYPTOHASH_SHA256_VER} \ + ${CRYPTOHASH_SHA256_VER_REGEXP} +do_pkg "ed25519" ${ED25519_VER} ${ED25519_VER_REGEXP} + +# We conditionally install bytestring-builder, depending on the bytestring +# version. +do_bytestring_builder_pkg + +do_pkg "tar" ${TAR_VER} ${TAR_VER_REGEXP} +do_pkg "hashable" ${HASHABLE_VER} ${HASHABLE_VER_REGEXP} +do_pkg "hackage-security" ${HACKAGE_SECURITY_VER} \ + ${HACKAGE_SECURITY_VER_REGEXP} + + +install_pkg "cabal-install" + +# Use the newly built cabal to turn the prefix/package database into a +# legit cabal sandbox. This works because 'cabal sandbox init' will +# reuse the already existing package database and other files if they +# are in the expected locations. +[ ! -z "$SANDBOX" ] && $SANDBOX/bin/cabal sandbox init --sandbox $SANDBOX + +echo +echo "===========================================" +CABAL_BIN="$PREFIX/bin" +if [ -x "$CABAL_BIN/cabal" ] +then + echo "The 'cabal' program has been installed in $CABAL_BIN/" + echo "You should either add $CABAL_BIN to your PATH" + echo "or copy the cabal program to a directory that is on your PATH." + echo + echo "The first thing to do is to get the latest list of packages with:" + echo " cabal update" + echo "This will also create a default config file (if it does not already" + echo "exist) at $HOME/.cabal/config" + echo + echo "By default cabal will install programs to $HOME/.cabal/bin" + echo "If you do not want to add this directory to your PATH then you can" + echo "change the setting in the config file, for example you could use:" + echo "symlink-bindir: $HOME/bin" +else + echo "Sorry, something went wrong." + echo "The 'cabal' executable was not successfully installed into" + echo "$CABAL_BIN/" +fi +echo + +rm ghc-pkg.list diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/cabal-install.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/cabal-install.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/cabal-install.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/cabal-install.cabal 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,418 @@ +Name: cabal-install +Version: 1.24.0.2 +Synopsis: The command-line interface for Cabal and Hackage. +Description: + The \'cabal\' command-line program simplifies the process of managing + Haskell software by automating the fetching, configuration, compilation + and installation of Haskell libraries and programs. +homepage: http://www.haskell.org/cabal/ +bug-reports: https://github.com/haskell/cabal/issues +License: BSD3 +License-File: LICENSE +Author: Lemmih + Paolo Martini + Bjorn Bringert + Isaac Potoczny-Jones + Duncan Coutts +Maintainer: cabal-devel@haskell.org +Copyright: 2005 Lemmih + 2006 Paolo Martini + 2007 Bjorn Bringert + 2007 Isaac Potoczny-Jones + 2007-2012 Duncan Coutts +Category: Distribution +Build-type: Custom +Cabal-Version: >= 1.10 +Extra-Source-Files: + README.md bash-completion/cabal bootstrap.sh changelog + tests/README.md + + -- Generated with '../Cabal/misc/gen-extra-source-files.sh' + -- Do NOT edit this section manually; instead, run the script. + -- BEGIN gen-extra-source-files + tests/IntegrationTests/custom-setup/common.sh + tests/IntegrationTests/custom-setup/should_run/Cabal-99998/Cabal.cabal + tests/IntegrationTests/custom-setup/should_run/Cabal-99998/CabalMessage.hs + tests/IntegrationTests/custom-setup/should_run/Cabal-99999/Cabal.cabal + tests/IntegrationTests/custom-setup/should_run/Cabal-99999/CabalMessage.hs + tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal-defaultMain/Setup.hs + tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal-defaultMain/custom-setup-without-cabal-defaultMain.cabal + tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal/Setup.hs + tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal/custom-setup-without-cabal.cabal + tests/IntegrationTests/custom-setup/should_run/custom-setup/Setup.hs + tests/IntegrationTests/custom-setup/should_run/custom-setup/custom-setup.cabal + tests/IntegrationTests/custom-setup/should_run/custom_setup_without_Cabal_doesnt_allow_Cabal_import.sh + tests/IntegrationTests/custom-setup/should_run/custom_setup_without_Cabal_doesnt_require_Cabal.sh + tests/IntegrationTests/custom-setup/should_run/installs_Cabal_as_setup_dep.sh + tests/IntegrationTests/custom/common.sh + tests/IntegrationTests/custom/should_run/plain.err + tests/IntegrationTests/custom/should_run/plain.sh + tests/IntegrationTests/custom/should_run/plain/A.hs + tests/IntegrationTests/custom/should_run/plain/Setup.hs + tests/IntegrationTests/custom/should_run/plain/plain.cabal + tests/IntegrationTests/exec/common.sh + tests/IntegrationTests/exec/should_fail/exit_with_failure_without_args.err + tests/IntegrationTests/exec/should_fail/exit_with_failure_without_args.sh + tests/IntegrationTests/exec/should_run/Foo.hs + tests/IntegrationTests/exec/should_run/My.hs + tests/IntegrationTests/exec/should_run/adds_sandbox_bin_directory_to_path.out + tests/IntegrationTests/exec/should_run/adds_sandbox_bin_directory_to_path.sh + tests/IntegrationTests/exec/should_run/auto_configures_on_exec.out + tests/IntegrationTests/exec/should_run/auto_configures_on_exec.sh + tests/IntegrationTests/exec/should_run/can_run_executables_installed_in_sandbox.out + tests/IntegrationTests/exec/should_run/can_run_executables_installed_in_sandbox.sh + tests/IntegrationTests/exec/should_run/configures_cabal_to_use_sandbox.sh + tests/IntegrationTests/exec/should_run/configures_ghc_to_use_sandbox.sh + tests/IntegrationTests/exec/should_run/my.cabal + tests/IntegrationTests/exec/should_run/runs_given_command.out + tests/IntegrationTests/exec/should_run/runs_given_command.sh + tests/IntegrationTests/freeze/common.sh + tests/IntegrationTests/freeze/should_run/disable_benchmarks_freezes_bench_deps.sh + tests/IntegrationTests/freeze/should_run/disable_tests_freezes_test_deps.sh + tests/IntegrationTests/freeze/should_run/does_not_freeze_nondeps.sh + tests/IntegrationTests/freeze/should_run/does_not_freeze_self.sh + tests/IntegrationTests/freeze/should_run/dry_run_does_not_create_config.sh + tests/IntegrationTests/freeze/should_run/enable_benchmarks_freezes_bench_deps.sh + tests/IntegrationTests/freeze/should_run/enable_tests_freezes_test_deps.sh + tests/IntegrationTests/freeze/should_run/freezes_direct_dependencies.sh + tests/IntegrationTests/freeze/should_run/freezes_transitive_dependencies.sh + tests/IntegrationTests/freeze/should_run/my.cabal + tests/IntegrationTests/freeze/should_run/runs_without_error.sh + tests/IntegrationTests/manpage/common.sh + tests/IntegrationTests/manpage/should_run/outputs_manpage.sh + tests/IntegrationTests/multiple-source/common.sh + tests/IntegrationTests/multiple-source/should_run/finds_second_source_of_multiple_source.sh + tests/IntegrationTests/multiple-source/should_run/p/Setup.hs + tests/IntegrationTests/multiple-source/should_run/p/p.cabal + tests/IntegrationTests/multiple-source/should_run/q/Setup.hs + tests/IntegrationTests/multiple-source/should_run/q/q.cabal + tests/IntegrationTests/new-build/monitor_cabal_files.sh + tests/IntegrationTests/new-build/monitor_cabal_files/p/P.hs + tests/IntegrationTests/new-build/monitor_cabal_files/p/Setup.hs + tests/IntegrationTests/new-build/monitor_cabal_files/p/p.cabal + tests/IntegrationTests/new-build/monitor_cabal_files/q/Main.hs + tests/IntegrationTests/new-build/monitor_cabal_files/q/Setup.hs + tests/IntegrationTests/new-build/monitor_cabal_files/q/q-broken.cabal.in + tests/IntegrationTests/new-build/monitor_cabal_files/q/q-fixed.cabal.in + tests/IntegrationTests/regression/common.sh + tests/IntegrationTests/regression/t3199.sh + tests/IntegrationTests/regression/t3199/Main.hs + tests/IntegrationTests/regression/t3199/Setup.hs + tests/IntegrationTests/regression/t3199/test-3199.cabal + tests/IntegrationTests/sandbox-sources/common.sh + tests/IntegrationTests/sandbox-sources/should_fail/fail_removing_source_thats_not_registered.err + tests/IntegrationTests/sandbox-sources/should_fail/fail_removing_source_thats_not_registered.sh + tests/IntegrationTests/sandbox-sources/should_fail/p/Setup.hs + tests/IntegrationTests/sandbox-sources/should_fail/p/p.cabal + tests/IntegrationTests/sandbox-sources/should_fail/q/Setup.hs + tests/IntegrationTests/sandbox-sources/should_fail/q/q.cabal + tests/IntegrationTests/sandbox-sources/should_run/p/Setup.hs + tests/IntegrationTests/sandbox-sources/should_run/p/p.cabal + tests/IntegrationTests/sandbox-sources/should_run/q/Setup.hs + tests/IntegrationTests/sandbox-sources/should_run/q/q.cabal + tests/IntegrationTests/sandbox-sources/should_run/remove_nonexistent_source.sh + tests/IntegrationTests/sandbox-sources/should_run/report_success_removing_source.out + tests/IntegrationTests/sandbox-sources/should_run/report_success_removing_source.sh + tests/IntegrationTests/user-config/common.sh + tests/IntegrationTests/user-config/should_fail/doesnt_overwrite_without_f.err + tests/IntegrationTests/user-config/should_fail/doesnt_overwrite_without_f.sh + tests/IntegrationTests/user-config/should_run/overwrites_with_f.out + tests/IntegrationTests/user-config/should_run/overwrites_with_f.sh + tests/IntegrationTests/user-config/should_run/runs_without_error.out + tests/IntegrationTests/user-config/should_run/runs_without_error.sh + tests/IntegrationTests/user-config/should_run/uses_CABAL_CONFIG.out + tests/IntegrationTests/user-config/should_run/uses_CABAL_CONFIG.sh + -- END gen-extra-source-files + +source-repository head + type: git + location: https://github.com/haskell/cabal/ + subdir: cabal-install + +Flag old-bytestring + description: Use bytestring < 0.10.2 and bytestring-builder + default: False + +Flag old-directory + description: Use directory < 1.2 and old-time + default: False + +Flag network-uri + description: Get Network.URI from the network-uri package + default: True + +executable cabal + main-is: Main.hs + ghc-options: -Wall -fwarn-tabs + if impl(ghc >= 8.0) + ghc-options: -Wcompat + -Wnoncanonical-monad-instances + -Wnoncanonical-monadfail-instances + + other-modules: + Distribution.Client.BuildTarget + Distribution.Client.BuildReports.Anonymous + Distribution.Client.BuildReports.Storage + Distribution.Client.BuildReports.Types + Distribution.Client.BuildReports.Upload + Distribution.Client.Check + Distribution.Client.CmdBuild + Distribution.Client.CmdConfigure + Distribution.Client.CmdRepl + Distribution.Client.ComponentDeps + Distribution.Client.Config + Distribution.Client.Configure + Distribution.Client.Dependency + Distribution.Client.Dependency.TopDown + Distribution.Client.Dependency.TopDown.Constraints + Distribution.Client.Dependency.TopDown.Types + Distribution.Client.Dependency.Types + Distribution.Client.Dependency.Modular + Distribution.Client.Dependency.Modular.Assignment + Distribution.Client.Dependency.Modular.Builder + Distribution.Client.Dependency.Modular.Configured + Distribution.Client.Dependency.Modular.ConfiguredConversion + Distribution.Client.Dependency.Modular.ConflictSet + Distribution.Client.Dependency.Modular.Cycles + Distribution.Client.Dependency.Modular.Dependency + Distribution.Client.Dependency.Modular.Explore + Distribution.Client.Dependency.Modular.Flag + Distribution.Client.Dependency.Modular.Index + Distribution.Client.Dependency.Modular.IndexConversion + Distribution.Client.Dependency.Modular.Linking + Distribution.Client.Dependency.Modular.Log + Distribution.Client.Dependency.Modular.Message + Distribution.Client.Dependency.Modular.Package + Distribution.Client.Dependency.Modular.Preference + Distribution.Client.Dependency.Modular.PSQ + Distribution.Client.Dependency.Modular.Solver + Distribution.Client.Dependency.Modular.Tree + Distribution.Client.Dependency.Modular.Validate + Distribution.Client.Dependency.Modular.Var + Distribution.Client.Dependency.Modular.Version + Distribution.Client.DistDirLayout + Distribution.Client.Exec + Distribution.Client.Fetch + Distribution.Client.FetchUtils + Distribution.Client.FileMonitor + Distribution.Client.Freeze + Distribution.Client.GenBounds + Distribution.Client.Get + Distribution.Client.Glob + Distribution.Client.GlobalFlags + Distribution.Client.GZipUtils + Distribution.Client.Haddock + Distribution.Client.HttpUtils + Distribution.Client.IndexUtils + Distribution.Client.Init + Distribution.Client.Init.Heuristics + Distribution.Client.Init.Licenses + Distribution.Client.Init.Types + Distribution.Client.Install + Distribution.Client.InstallPlan + Distribution.Client.InstallSymlink + Distribution.Client.JobControl + Distribution.Client.List + Distribution.Client.Manpage + Distribution.Client.PackageHash + Distribution.Client.PackageIndex + Distribution.Client.PackageUtils + Distribution.Client.ParseUtils + Distribution.Client.PkgConfigDb + Distribution.Client.PlanIndex + Distribution.Client.ProjectBuilding + Distribution.Client.ProjectConfig + Distribution.Client.ProjectConfig.Types + Distribution.Client.ProjectConfig.Legacy + Distribution.Client.ProjectOrchestration + Distribution.Client.ProjectPlanning + Distribution.Client.ProjectPlanning.Types + Distribution.Client.ProjectPlanOutput + Distribution.Client.Run + Distribution.Client.RebuildMonad + Distribution.Client.Sandbox + Distribution.Client.Sandbox.Index + Distribution.Client.Sandbox.PackageEnvironment + Distribution.Client.Sandbox.Timestamp + Distribution.Client.Sandbox.Types + Distribution.Client.Security.HTTP + Distribution.Client.Setup + Distribution.Client.SetupWrapper + Distribution.Client.SrcDist + Distribution.Client.Tar + Distribution.Client.Targets + Distribution.Client.Types + Distribution.Client.Update + Distribution.Client.Upload + Distribution.Client.Utils + Distribution.Client.Utils.LabeledGraph + Distribution.Client.Utils.Json + Distribution.Client.World + Distribution.Client.Win32SelfUpgrade + Distribution.Client.Compat.ExecutablePath + Distribution.Client.Compat.FilePerms + Distribution.Client.Compat.Process + Distribution.Client.Compat.Semaphore + Distribution.Client.Compat.Time + Paths_cabal_install + + -- NOTE: when updating build-depends, don't forget to update version regexps + -- in bootstrap.sh. + build-depends: + async >= 2.0 && < 3, + array >= 0.4 && < 0.6, + base >= 4.5 && < 5, + base16-bytestring >= 0.1.1 && < 0.2, + binary >= 0.5 && < 0.9, + bytestring >= 0.9 && < 1, + Cabal >= 1.24.2 && < 1.25, + containers >= 0.4 && < 0.6, + cryptohash-sha256 >= 0.11 && < 0.12, + filepath >= 1.3 && < 1.5, + hashable >= 1.0 && < 2, + HTTP >= 4000.1.5 && < 4000.4, + mtl >= 2.0 && < 3, + pretty >= 1.1 && < 1.2, + random >= 1 && < 1.2, + stm >= 2.0 && < 3, + tar >= 0.5.0.3 && < 0.6, + time >= 1.4 && < 1.8, + zlib >= 0.5.3 && < 0.7, + hackage-security >= 0.5.2.2 && < 0.6 + + if flag(old-bytestring) + build-depends: bytestring < 0.10.2, bytestring-builder >= 0.10 && < 1 + else + build-depends: bytestring >= 0.10.2 + + if flag(old-directory) + build-depends: directory >= 1.1 && < 1.2, old-time >= 1 && < 1.2, + process >= 1.0.1.1 && < 1.1.0.2 + else + build-depends: directory >= 1.2 && < 1.4, + process >= 1.1.0.2 && < 1.5 + + -- NOTE: you MUST include the network dependency even when network-uri + -- is pulled in, otherwise the constraint solver doesn't have enough + -- information + if flag(network-uri) + build-depends: network-uri >= 2.6 && < 2.7, network >= 2.6 && < 2.7 + else + build-depends: network >= 2.4 && < 2.6 + + -- Needed for GHC.Generics before GHC 7.6 + if impl(ghc < 7.6) + build-depends: ghc-prim >= 0.2 && < 0.3 + + if os(windows) + build-depends: Win32 >= 2 && < 3 + else + build-depends: unix >= 2.5 && < 2.8 + + if arch(arm) && impl(ghc < 7.6) + -- older ghc on arm does not support -threaded + cc-options: -DCABAL_NO_THREADED + else + ghc-options: -threaded + + c-sources: cbits/getnumcores.c + default-language: Haskell2010 + +-- Small, fast running tests. +Test-Suite unit-tests + type: exitcode-stdio-1.0 + main-is: UnitTests.hs + hs-source-dirs: tests, . + ghc-options: -Wall -fwarn-tabs + other-modules: + UnitTests.Distribution.Client.ArbitraryInstances + UnitTests.Distribution.Client.Targets + UnitTests.Distribution.Client.Compat.Time + UnitTests.Distribution.Client.Dependency.Modular.PSQ + UnitTests.Distribution.Client.Dependency.Modular.Solver + UnitTests.Distribution.Client.Dependency.Modular.DSL + UnitTests.Distribution.Client.FileMonitor + UnitTests.Distribution.Client.Glob + UnitTests.Distribution.Client.GZipUtils + UnitTests.Distribution.Client.Sandbox + UnitTests.Distribution.Client.Sandbox.Timestamp + UnitTests.Distribution.Client.Tar + UnitTests.Distribution.Client.UserConfig + UnitTests.Distribution.Client.ProjectConfig + UnitTests.Options + build-depends: + base, + array, + bytestring, + Cabal, + containers, + mtl, + pretty, + process, + directory, + filepath, + hashable, + stm, + tar, + time, + HTTP, + zlib, + binary, + random, + hackage-security, + tasty, + tasty-hunit, + tasty-quickcheck, + tagged, + QuickCheck >= 2.8.2 + + if flag(old-directory) + build-depends: old-time + + if flag(network-uri) + build-depends: network-uri >= 2.6, network >= 2.6 + else + build-depends: network-uri < 2.6, network < 2.6 + + if impl(ghc < 7.6) + build-depends: ghc-prim >= 0.2 && < 0.3 + + if os(windows) + build-depends: Win32 + else + build-depends: unix + + if arch(arm) + cc-options: -DCABAL_NO_THREADED + else + ghc-options: -threaded + default-language: Haskell2010 + +test-suite integration-tests + type: exitcode-stdio-1.0 + hs-source-dirs: tests + main-is: IntegrationTests.hs + build-depends: + Cabal, + async, + base, + bytestring, + directory, + filepath, + process, + regex-posix, + tasty, + tasty-hunit + + if os(windows) + build-depends: Win32 >= 2 && < 3 + else + build-depends: unix >= 2.5 && < 2.8 + + if arch(arm) + cc-options: -DCABAL_NO_THREADED + else + ghc-options: -threaded + + ghc-options: -Wall -fwarn-tabs -fno-ignore-asserts + default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/cbits/getnumcores.c cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/cbits/getnumcores.c --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/cbits/getnumcores.c 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/cbits/getnumcores.c 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,46 @@ +#if defined(__GLASGOW_HASKELL__) && (__GLASGOW_HASKELL__ >= 612) && !defined(CABAL_NO_THREADED) +/* Since version 6.12, GHC's threaded RTS includes a getNumberOfProcessors + function, so we try to use that if available. cabal-install is always built + with -threaded nowadays. */ +#define HAS_GET_NUMBER_OF_PROCESSORS +#endif + + +#ifndef HAS_GET_NUMBER_OF_PROCESSORS + +#if defined(_WIN32) && !defined(__CYGWIN__) +#include +#elif MACOS +#include +#include +#elif __linux__ +#include +#endif + +int getNumberOfProcessors() { +#if defined(_WIN32) && !defined(__CYGWIN__) + SYSTEM_INFO sysinfo; + GetSystemInfo(&sysinfo); + return sysinfo.dwNumberOfProcessors; +#elif MACOS + int nm[2]; + size_t len = 4; + uint32_t count; + + nm[0] = CTL_HW; nm[1] = HW_AVAILCPU; + sysctl(nm, 2, &count, &len, NULL, 0); + + if(count < 1) { + nm[1] = HW_NCPU; + sysctl(nm, 2, &count, &len, NULL, 0); + if(count < 1) { count = 1; } + } + return count; +#elif __linux__ + return sysconf(_SC_NPROCESSORS_ONLN); +#else + return 1; +#endif +} + +#endif /* HAS_GET_NUMBER_OF_PROCESSORS */ diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/changelog cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/changelog --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/changelog 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/changelog 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,259 @@ +-*-change-log-*- +1.24.0.2 Mikhail Glushenkov December 2016 + * Adapted to the revert of a PVP-noncompliant API change in + Cabal 1.24.2.0 (#4123). + * Bumped the directory upper bound to < 1.4 (#4158). + +1.24.0.1 Ryan Thomas October 2016 + * Fixed issue with passing '--enable-profiling' when invoking + Setup scripts built with older versions of Cabal (#3873). + * Fixed various behaviour differences between network transports + (#3429). + * Updated to depend on the latest hackage-security that fixes + various issues on Windows. + * Fixed 'new-build' to exit with a non-zero exit code on failure + (#3506). + * Store secure repo index data as 01-index.* (#3862). + * Added new hackage-security root keys for distribution with + cabal-install. + * Fix an issue where 'cabal install' sometimes had to be run twice + for packages with build-type: Custom and a custom-setup stanza + (#3723). + * 'cabal sdist' no longer ignores '--builddir' when the package's + build-type is Custom (#3794). + +1.24.0.0 Ryan Thomas May 2016 + * If there are multiple remote repos, 'cabal update' now updates + them in parallel (#2503). + * New 'cabal upload' option '-P'/'--password-command' for reading + Hackage password from arbitrary program output (#2506). + * Better warning for 'cabal run' (#2510). + * 'cabal init' now warns if the chosen package name is already + registered in the source package index (#2436). + * New 'cabal install' option: '--offline' (#2578). + * Accept 'builddir' field in cabal.config (#2484) + * Read 'builddir' option from 'CABAL_BUILDDIR' environment variable. + * Remote repos may now be configured to use https URLs. This uses + either curl or wget or, on Windows, PowerShell, under the hood (#2687). + * Install target URLs can now use https e.g. 'cabal install + https://example.com/foo-1.0.tar.gz'. + * Automatically use https for cabal upload for the main + hackage.haskell.org (other repos will use whatever they are + configured to use). + * Support for dependencies of custom Setup.hs scripts + (see http://www.well-typed.com/blog/2015/07/cabal-setup-deps/). + * 'cabal' program itself now can be used as an external setup + method. This fixes an issue when Cabal version mismatch caused + unnecessary reconfigures (#2633). + * Improved error message for unsatisfiable package constraints + (#2727). + * Fixed a space leak in 'cabal update' (#2826). + * 'cabal exec' and 'sandbox hc-pkg' now use the configured + compiler (#2859). + * New 'cabal haddock' option: '--for-hackage' (#2852). + * Added a warning when the solver cannot find a dependency (#2853). + * New 'cabal upload' option: '--doc': upload documentation to + hackage (#2890). + * Improved error handling for 'sandbox delete-source' (#2943). + * Solver support for extension and language flavours (#2873). + * Support for secure repos using hackage-security (#2983). + * Added a log file message similar to one printed by 'make' when + building in another directory (#2642). + * Added new subcommand 'init' to 'cabal user-config'. This + subcommand creates a cabal configuration file in either the + default location or as specified by --config-file (#2553). + * The man page for 'cabal-install' is now automatically generated + (#2877). + * The '--allow-newer' option now works as expected when specified + multiple times (#2588). + * New config file field: 'extra-framework-dirs' (extra locations + to find OS X frameworks in). Can be also specified as an argument + for 'install' and 'configure' commands (#3158). + * It's now possible to limit the scope of '--allow-newer' to + single packages in the install plan (#2756). + * Full '--allow-newer' syntax is now supported in the config file + (that is, 'allow-newer: base, ghc-prim, some-package:vector') + (#3171). + * Improved performance of '--reorder-goals' (#3208). + * Fixed space leaks in modular solver (#2916, #2914). + * Made the solver aware of pkg-config constraints (#3023). + * Added a new command: 'gen-bounds' (#3223). See + http://softwaresimply.blogspot.se/2015/08/cabal-gen-bounds-easy-generation-of.html. + * Tech preview of new nix-style isolated project-based builds. + Currently provides the commands (new-)build/repl/configure. + +1.22.0.0 Johan Tibell January 2015 + * New command: user-config (#2159). + * Implement 'cabal repl --only' (#2016). + * Fix an issue when 'cabal repl' was doing unnecessary compilation + (#1715). + * Prompt the user to specify source directory in 'cabal init' + (#1989). + * Remove the self-upgrade check (#2090). + * Don't redownload already downloaded packages when bootstrapping + (#2133). + * Support sandboxes in 'bootstrap.sh' (#2137). + * Install profiling and shared libs by default in 'bootstrap.sh' + (#2009). + +1.20.0.3 Johan Tibell June 2014 + * Don't attempt to rename dist if it is already named correctly + * Treat all flags of a package as interdependent. + * Allow template-haskell to be upgradable again + +1.20.0.2 Johan Tibell May 2014 + * Increase max-backjumps to 2000. + * Fix solver bug which led to missed install plans. + * Fix streaming test output. + * Tweak solver heuristics to avoid reinstalls. + +1.20.0.1 Johan Tibell May 2014 + * Fix cabal repl search path bug on Windows + * Include OS and arch in cabal-install user agent + * Revert --constraint flag behavior in configure to 1.18 behavior + +1.20.0.0 Johan Tibell April 2014 + * Build only selected executables + * Add -j flag to build/test/bench/run + * Improve install log file + * Don't symlink executables when in a sandbox + * Add --package-db flag to 'list' and 'info' + * Make upload more efficient + * Add --require-sandbox option + * Add experimental Cabal file format command + * Add haddock section to config file + * Add --main-is flag to init + +0.14.0 Andres Loeh April 2012 + * Works with ghc-7.4 + * Completely new modular dependency solver (default in most cases) + * Some tweaks to old topdown dependency solver + * Install plans are now checked for reinstalls that break packages + * Flags --constraint and --preference work for nonexisting packages + * New constraint forms for source and installed packages + * New constraint form for package-specific use flags + * New constraint form for package-specific stanza flags + * Test suite dependencies are pulled in on demand + * No longer install packages on --enable-tests when tests fail + * New "cabal bench" command + * Various "cabal init" tweaks + +0.10.0 Duncan Coutts February 2011 + * New package targets: local dirs, local and remote tarballs + * Initial support for a "world" package target + * Partial fix for situation where user packages mask global ones + * Removed cabal upgrade, new --upgrade-dependencies flag + * New cabal install --only-dependencies flag + * New cabal fetch --no-dependencies and --dry-run flags + * Improved output for cabal info + * Simpler and faster bash command line completion + * Fix for broken proxies that decompress wrongly + * Fix for cabal unpack to preserve executable permissions + * Adjusted the output for the -v verbosity level in a few places + +0.8.2 Duncan Coutts March 2010 + * Fix for cabal update on Windows + * On windows switch to per-user installs (rather than global) + * Handle intra-package dependencies in dependency planning + * Minor tweaks to cabal init feature + * Fix various -Wall warnings + * Fix for cabal sdist --snapshot + +0.8.0 Duncan Coutts Dec 2009 + * Works with ghc-6.12 + * New "cabal init" command for making initial project .cabal file + * New feature to maintain an index of haddock documentation + +0.6.4 Duncan Coutts Nov 2009 + * Improve the algorithm for selecting the base package version + * Hackage errors now reported by "cabal upload [--check]" + * Improved format of messages from "cabal check" + * Config file can now be selected by an env var + * Updated tar reading/writing code + * Improve instructions in the README and bootstrap output + * Fix bootstrap.sh on Solaris 9 + * Fix bootstrap for systems where network uses parsec 3 + * Fix building with ghc-6.6 + +0.6.2 Duncan Coutts Feb 2009 + * The upgrade command has been disabled in this release + * The configure and install commands now have consistent behaviour + * Reduce the tendancy to re-install already existing packages + * The --constraint= flag now works for the install command + * New --preference= flag for soft constraints / version preferences + * Improved bootstrap.sh script, smarter and better error checking + * New cabal info command to display detailed info on packages + * New cabal unpack command to download and untar a package + * HTTP-4000 package required, should fix bugs with http proxies + * Now works with authenticated proxies. + * On Windows can now override the proxy setting using an env var + * Fix compatability with config files generated by older versions + * Warn if the hackage package list is very old + * More helpful --help output, mention config file and examples + * Better documentation in ~/.cabal/config file + * Improved command line interface for logging and build reporting + * Minor improvements to some messages + +0.6.0 Duncan Coutts Oct 2008 + * Constraint solver can now cope with base 3 and base 4 + * Allow use of package version preferences from hackage index + * More detailed output from cabal install --dry-run -v + * Improved bootstrap.sh + +0.5.2 Duncan Coutts Aug 2008 + * Suport building haddock documentaion + * Self-reinstall now works on Windows + * Allow adding symlinks to excutables into a separate bindir + * New self-documenting config file + * New install --reinstall flag + * More helpful status messages in a couple places + * Upload failures now report full text error message from the server + * Support for local package repositories + * New build logging and reporting + * New command to upload build reports to (a compatible) server + * Allow tilde in hackage server URIs + * Internal code improvements + * Many other minor improvements and bug fixes + +0.5.1 Duncan Coutts June 2008 + * Restore minimal hugs support in dependency resolver + * Fix for disabled http proxies on Windows + * Revert to global installs on Windows by default + +0.5.0 Duncan Coutts June 2008 + * New package dependency resolver, solving diamond dep problem + * Integrate cabal-setup functionality + * Integrate cabal-upload functionality + * New cabal update and check commands + * Improved behavior for install and upgrade commands + * Full Windows support + * New command line handling + * Bash command line completion + * Allow case insensitive package names on command line + * New --dry-run flag for install, upgrade and fetch commands + * New --root-cmd flag to allow installing as root + * New --cabal-lib-version flag to select different Cabal lib versions + * Support for HTTP proxies + * Improved cabal list output + * Build other non-dependent packages even when some fail + * Report a summary of all build failures at the end + * Partial support for hugs + * Partial implementation of build reporting and logging + * More consistent logging and verbosity + * Significant internal code restructuring + +0.4 Duncan Coutts Oct 2007 + * Renamed executable from 'cabal-install' to 'cabal' + * Partial Windows compatability + * Do per-user installs by default + * cabal install now installs the package in the current directory + * Allow multiple remote servers + * Use zlib lib and internal tar code and rather than external tar + * Reorganised configuration files + * Significant code restructuring + * Cope with packages with conditional dependencies + +0.3 and older versions by Lemmih, Paolo Martini and others 2006-2007 + * Switch from smart-server, dumb-client model to the reverse + * New .tar.gz based index format + * New remote and local package archive format diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/BuildReports/Anonymous.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/BuildReports/Anonymous.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/BuildReports/Anonymous.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/BuildReports/Anonymous.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,315 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Reporting +-- Copyright : (c) David Waern 2008 +-- License : BSD-like +-- +-- Maintainer : david.waern@gmail.com +-- Stability : experimental +-- Portability : portable +-- +-- Anonymous build report data structure, printing and parsing +-- +----------------------------------------------------------------------------- +module Distribution.Client.BuildReports.Anonymous ( + BuildReport(..), + InstallOutcome(..), + Outcome(..), + + -- * Constructing and writing reports + new, + + -- * parsing and pretty printing + parse, + parseList, + show, +-- showList, + ) where + +import qualified Distribution.Client.Types as BR + ( BuildResult, BuildFailure(..), BuildSuccess(..) + , DocsResult(..), TestsResult(..) ) +import Distribution.Client.Utils + ( mergeBy, MergeResult(..) ) +import qualified Paths_cabal_install (version) + +import Distribution.Package + ( PackageIdentifier(..), PackageName(..) ) +import Distribution.PackageDescription + ( FlagName(..), FlagAssignment ) +--import Distribution.Version +-- ( Version ) +import Distribution.System + ( OS, Arch ) +import Distribution.Compiler + ( CompilerId(..) ) +import qualified Distribution.Text as Text + ( Text(disp, parse) ) +import Distribution.ParseUtils + ( FieldDescr(..), ParseResult(..), Field(..) + , simpleField, listField, ppFields, readFields + , syntaxError, locatedErrorMsg ) +import Distribution.Simple.Utils + ( comparing ) + +import qualified Distribution.Compat.ReadP as Parse + ( ReadP, pfail, munch1, skipSpaces ) +import qualified Text.PrettyPrint as Disp + ( Doc, render, char, text ) +import Text.PrettyPrint + ( (<+>), (<>) ) + +import Data.List + ( unfoldr, sortBy ) +import Data.Char as Char + ( isAlpha, isAlphaNum ) + +import Prelude hiding (show) + +data BuildReport + = BuildReport { + -- | The package this build report is about + package :: PackageIdentifier, + + -- | The OS and Arch the package was built on + os :: OS, + arch :: Arch, + + -- | The Haskell compiler (and hopefully version) used + compiler :: CompilerId, + + -- | The uploading client, ie cabal-install-x.y.z + client :: PackageIdentifier, + + -- | Which configurations flags we used + flagAssignment :: FlagAssignment, + + -- | Which dependent packages we were using exactly + dependencies :: [PackageIdentifier], + + -- | Did installing work ok? + installOutcome :: InstallOutcome, + + -- Which version of the Cabal library was used to compile the Setup.hs +-- cabalVersion :: Version, + + -- Which build tools we were using (with versions) +-- tools :: [PackageIdentifier], + + -- | Configure outcome, did configure work ok? + docsOutcome :: Outcome, + + -- | Configure outcome, did configure work ok? + testsOutcome :: Outcome + } + +data InstallOutcome + = PlanningFailed + | DependencyFailed PackageIdentifier + | DownloadFailed + | UnpackFailed + | SetupFailed + | ConfigureFailed + | BuildFailed + | TestsFailed + | InstallFailed + | InstallOk + deriving Eq + +data Outcome = NotTried | Failed | Ok + deriving Eq + +new :: OS -> Arch -> CompilerId -> PackageIdentifier -> FlagAssignment + -> [PackageIdentifier] -> BR.BuildResult -> BuildReport +new os' arch' comp pkgid flags deps result = + BuildReport { + package = pkgid, + os = os', + arch = arch', + compiler = comp, + client = cabalInstallID, + flagAssignment = flags, + dependencies = deps, + installOutcome = convertInstallOutcome, +-- cabalVersion = undefined + docsOutcome = convertDocsOutcome, + testsOutcome = convertTestsOutcome + } + where + convertInstallOutcome = case result of + Left BR.PlanningFailed -> PlanningFailed + Left (BR.DependentFailed p) -> DependencyFailed p + Left (BR.DownloadFailed _) -> DownloadFailed + Left (BR.UnpackFailed _) -> UnpackFailed + Left (BR.ConfigureFailed _) -> ConfigureFailed + Left (BR.BuildFailed _) -> BuildFailed + Left (BR.TestsFailed _) -> TestsFailed + Left (BR.InstallFailed _) -> InstallFailed + Right (BR.BuildOk _ _ _) -> InstallOk + convertDocsOutcome = case result of + Left _ -> NotTried + Right (BR.BuildOk BR.DocsNotTried _ _) -> NotTried + Right (BR.BuildOk BR.DocsFailed _ _) -> Failed + Right (BR.BuildOk BR.DocsOk _ _) -> Ok + convertTestsOutcome = case result of + Left (BR.TestsFailed _) -> Failed + Left _ -> NotTried + Right (BR.BuildOk _ BR.TestsNotTried _) -> NotTried + Right (BR.BuildOk _ BR.TestsOk _) -> Ok + +cabalInstallID :: PackageIdentifier +cabalInstallID = + PackageIdentifier (PackageName "cabal-install") Paths_cabal_install.version + +-- ------------------------------------------------------------ +-- * External format +-- ------------------------------------------------------------ + +initialBuildReport :: BuildReport +initialBuildReport = BuildReport { + package = requiredField "package", + os = requiredField "os", + arch = requiredField "arch", + compiler = requiredField "compiler", + client = requiredField "client", + flagAssignment = [], + dependencies = [], + installOutcome = requiredField "install-outcome", +-- cabalVersion = Nothing, +-- tools = [], + docsOutcome = NotTried, + testsOutcome = NotTried + } + where + requiredField fname = error ("required field: " ++ fname) + +-- ----------------------------------------------------------------------------- +-- Parsing + +parse :: String -> Either String BuildReport +parse s = case parseFields s of + ParseFailed perror -> Left msg where (_, msg) = locatedErrorMsg perror + ParseOk _ report -> Right report + +parseFields :: String -> ParseResult BuildReport +parseFields input = do + fields <- mapM extractField =<< readFields input + let merged = mergeBy (\desc (_,name,_) -> compare (fieldName desc) name) + sortedFieldDescrs + (sortBy (comparing (\(_,name,_) -> name)) fields) + checkMerged initialBuildReport merged + + where + extractField :: Field -> ParseResult (Int, String, String) + extractField (F line name value) = return (line, name, value) + extractField (Section line _ _ _) = syntaxError line "Unrecognized stanza" + extractField (IfBlock line _ _ _) = syntaxError line "Unrecognized stanza" + + checkMerged report [] = return report + checkMerged report (merged:remaining) = case merged of + InBoth fieldDescr (line, _name, value) -> do + report' <- fieldSet fieldDescr line value report + checkMerged report' remaining + OnlyInRight (line, name, _) -> + syntaxError line ("Unrecognized field " ++ name) + OnlyInLeft fieldDescr -> + fail ("Missing field " ++ fieldName fieldDescr) + +parseList :: String -> [BuildReport] +parseList str = + [ report | Right report <- map parse (split str) ] + + where + split :: String -> [String] + split = filter (not . null) . unfoldr chunk . lines + chunk [] = Nothing + chunk ls = case break null ls of + (r, rs) -> Just (unlines r, dropWhile null rs) + +-- ----------------------------------------------------------------------------- +-- Pretty-printing + +show :: BuildReport -> String +show = Disp.render . ppFields fieldDescrs + +-- ----------------------------------------------------------------------------- +-- Description of the fields, for parsing/printing + +fieldDescrs :: [FieldDescr BuildReport] +fieldDescrs = + [ simpleField "package" Text.disp Text.parse + package (\v r -> r { package = v }) + , simpleField "os" Text.disp Text.parse + os (\v r -> r { os = v }) + , simpleField "arch" Text.disp Text.parse + arch (\v r -> r { arch = v }) + , simpleField "compiler" Text.disp Text.parse + compiler (\v r -> r { compiler = v }) + , simpleField "client" Text.disp Text.parse + client (\v r -> r { client = v }) + , listField "flags" dispFlag parseFlag + flagAssignment (\v r -> r { flagAssignment = v }) + , listField "dependencies" Text.disp Text.parse + dependencies (\v r -> r { dependencies = v }) + , simpleField "install-outcome" Text.disp Text.parse + installOutcome (\v r -> r { installOutcome = v }) + , simpleField "docs-outcome" Text.disp Text.parse + docsOutcome (\v r -> r { docsOutcome = v }) + , simpleField "tests-outcome" Text.disp Text.parse + testsOutcome (\v r -> r { testsOutcome = v }) + ] + +sortedFieldDescrs :: [FieldDescr BuildReport] +sortedFieldDescrs = sortBy (comparing fieldName) fieldDescrs + +dispFlag :: (FlagName, Bool) -> Disp.Doc +dispFlag (FlagName name, True) = Disp.text name +dispFlag (FlagName name, False) = Disp.char '-' <> Disp.text name + +parseFlag :: Parse.ReadP r (FlagName, Bool) +parseFlag = do + name <- Parse.munch1 (\c -> Char.isAlphaNum c || c == '_' || c == '-') + case name of + ('-':flag) -> return (FlagName flag, False) + flag -> return (FlagName flag, True) + +instance Text.Text InstallOutcome where + disp PlanningFailed = Disp.text "PlanningFailed" + disp (DependencyFailed pkgid) = Disp.text "DependencyFailed" <+> Text.disp pkgid + disp DownloadFailed = Disp.text "DownloadFailed" + disp UnpackFailed = Disp.text "UnpackFailed" + disp SetupFailed = Disp.text "SetupFailed" + disp ConfigureFailed = Disp.text "ConfigureFailed" + disp BuildFailed = Disp.text "BuildFailed" + disp TestsFailed = Disp.text "TestsFailed" + disp InstallFailed = Disp.text "InstallFailed" + disp InstallOk = Disp.text "InstallOk" + + parse = do + name <- Parse.munch1 Char.isAlphaNum + case name of + "PlanningFailed" -> return PlanningFailed + "DependencyFailed" -> do Parse.skipSpaces + pkgid <- Text.parse + return (DependencyFailed pkgid) + "DownloadFailed" -> return DownloadFailed + "UnpackFailed" -> return UnpackFailed + "SetupFailed" -> return SetupFailed + "ConfigureFailed" -> return ConfigureFailed + "BuildFailed" -> return BuildFailed + "TestsFailed" -> return TestsFailed + "InstallFailed" -> return InstallFailed + "InstallOk" -> return InstallOk + _ -> Parse.pfail + +instance Text.Text Outcome where + disp NotTried = Disp.text "NotTried" + disp Failed = Disp.text "Failed" + disp Ok = Disp.text "Ok" + parse = do + name <- Parse.munch1 Char.isAlpha + case name of + "NotTried" -> return NotTried + "Failed" -> return Failed + "Ok" -> return Ok + _ -> Parse.pfail diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/BuildReports/Storage.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/BuildReports/Storage.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/BuildReports/Storage.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/BuildReports/Storage.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,161 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Reporting +-- Copyright : (c) David Waern 2008 +-- License : BSD-like +-- +-- Maintainer : david.waern@gmail.com +-- Stability : experimental +-- Portability : portable +-- +-- Anonymous build report data structure, printing and parsing +-- +----------------------------------------------------------------------------- +module Distribution.Client.BuildReports.Storage ( + + -- * Storing and retrieving build reports + storeAnonymous, + storeLocal, +-- retrieve, + + -- * 'InstallPlan' support + fromInstallPlan, + fromPlanningFailure, + ) where + +import qualified Distribution.Client.BuildReports.Anonymous as BuildReport +import Distribution.Client.BuildReports.Anonymous (BuildReport) + +import Distribution.Client.Types +import qualified Distribution.Client.InstallPlan as InstallPlan +import qualified Distribution.Client.ComponentDeps as CD +import Distribution.Client.InstallPlan + ( InstallPlan ) + +import Distribution.Package + ( PackageId, packageId ) +import Distribution.PackageDescription + ( FlagAssignment ) +import Distribution.Simple.InstallDirs + ( PathTemplate, fromPathTemplate + , initialPathTemplateEnv, substPathTemplate ) +import Distribution.System + ( Platform(Platform) ) +import Distribution.Compiler + ( CompilerId(..), CompilerInfo(..) ) +import Distribution.Simple.Utils + ( comparing, equating ) + +import Data.List + ( groupBy, sortBy ) +import Data.Maybe + ( catMaybes ) +import System.FilePath + ( (), takeDirectory ) +import System.Directory + ( createDirectoryIfMissing ) + +storeAnonymous :: [(BuildReport, Maybe Repo)] -> IO () +storeAnonymous reports = sequence_ + [ appendFile file (concatMap format reports') + | (repo, reports') <- separate reports + , let file = repoLocalDir repo "build-reports.log" ] + --TODO: make this concurrency safe, either lock the report file or make sure + -- the writes for each report are atomic (under 4k and flush at boundaries) + + where + format r = '\n' : BuildReport.show r ++ "\n" + separate :: [(BuildReport, Maybe Repo)] + -> [(Repo, [BuildReport])] + separate = map (\rs@((_,repo,_):_) -> (repo, [ r | (r,_,_) <- rs ])) + . map concat + . groupBy (equating (repoName . head)) + . sortBy (comparing (repoName . head)) + . groupBy (equating repoName) + . onlyRemote + repoName (_,_,rrepo) = remoteRepoName rrepo + + onlyRemote :: [(BuildReport, Maybe Repo)] + -> [(BuildReport, Repo, RemoteRepo)] + onlyRemote rs = + [ (report, repo, remoteRepo) + | (report, Just repo) <- rs + , Just remoteRepo <- [maybeRepoRemote repo] + ] + +storeLocal :: CompilerInfo -> [PathTemplate] -> [(BuildReport, Maybe Repo)] + -> Platform -> IO () +storeLocal cinfo templates reports platform = sequence_ + [ do createDirectoryIfMissing True (takeDirectory file) + appendFile file output + --TODO: make this concurrency safe, either lock the report file or make + -- sure the writes for each report are atomic + | (file, reports') <- groupByFileName + [ (reportFileName template report, report) + | template <- templates + , (report, _repo) <- reports ] + , let output = concatMap format reports' + ] + where + format r = '\n' : BuildReport.show r ++ "\n" + + reportFileName template report = + fromPathTemplate (substPathTemplate env template) + where env = initialPathTemplateEnv + (BuildReport.package report) + -- ToDo: In principle, we can support $pkgkey, but only + -- if the configure step succeeds. So add a Maybe field + -- to the build report, and either use that or make up + -- a fake identifier if it's not available. + (error "storeLocal: package key not available") + cinfo + platform + + groupByFileName = map (\grp@((filename,_):_) -> (filename, map snd grp)) + . groupBy (equating fst) + . sortBy (comparing fst) + +-- ------------------------------------------------------------ +-- * InstallPlan support +-- ------------------------------------------------------------ + +fromInstallPlan :: Platform -> CompilerId + -> InstallPlan + -> [(BuildReport, Maybe Repo)] +fromInstallPlan platform comp plan = + catMaybes + . map (fromPlanPackage platform comp) + . InstallPlan.toList + $ plan + +fromPlanPackage :: Platform -> CompilerId + -> InstallPlan.PlanPackage + -> Maybe (BuildReport, Maybe Repo) +fromPlanPackage (Platform arch os) comp planPackage = case planPackage of + InstallPlan.Installed (ReadyPackage (ConfiguredPackage srcPkg flags _ _) deps) + _ result + -> Just $ ( BuildReport.new os arch comp + (packageId srcPkg) flags + (map packageId (CD.nonSetupDeps deps)) + (Right result) + , extractRepo srcPkg) + + InstallPlan.Failed (ConfiguredPackage srcPkg flags _ deps) result + -> Just $ ( BuildReport.new os arch comp + (packageId srcPkg) flags + (map confSrcId (CD.nonSetupDeps deps)) + (Left result) + , extractRepo srcPkg ) + + _ -> Nothing + + where + extractRepo (SourcePackage { packageSource = RepoTarballPackage repo _ _ }) + = Just repo + extractRepo _ = Nothing + +fromPlanningFailure :: Platform -> CompilerId + -> [PackageId] -> FlagAssignment -> [(BuildReport, Maybe Repo)] +fromPlanningFailure (Platform arch os) comp pkgids flags = + [ (BuildReport.new os arch comp pkgid flags [] (Left PlanningFailed), Nothing) + | pkgid <- pkgids ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/BuildReports/Types.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/BuildReports/Types.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/BuildReports/Types.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/BuildReports/Types.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,50 @@ +{-# LANGUAGE DeriveGeneric #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.BuildReports.Types +-- Copyright : (c) Duncan Coutts 2009 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Types related to build reporting +-- +----------------------------------------------------------------------------- +module Distribution.Client.BuildReports.Types ( + ReportLevel(..), + ) where + +import qualified Distribution.Text as Text + ( Text(..) ) + +import qualified Distribution.Compat.ReadP as Parse + ( pfail, munch1 ) +import qualified Text.PrettyPrint as Disp + ( text ) + +import Data.Char as Char + ( isAlpha, toLower ) +import GHC.Generics (Generic) +import Distribution.Compat.Binary (Binary) + + +data ReportLevel = NoReports | AnonymousReports | DetailedReports + deriving (Eq, Ord, Enum, Show, Generic) + +instance Binary ReportLevel + +instance Text.Text ReportLevel where + disp NoReports = Disp.text "none" + disp AnonymousReports = Disp.text "anonymous" + disp DetailedReports = Disp.text "detailed" + parse = do + name <- Parse.munch1 Char.isAlpha + case lowercase name of + "none" -> return NoReports + "anonymous" -> return AnonymousReports + "detailed" -> return DetailedReports + _ -> Parse.pfail + +lowercase :: String -> String +lowercase = map Char.toLower diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/BuildReports/Upload.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/BuildReports/Upload.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/BuildReports/Upload.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/BuildReports/Upload.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,92 @@ +{-# LANGUAGE CPP, PatternGuards #-} +-- This is a quick hack for uploading build reports to Hackage. + +module Distribution.Client.BuildReports.Upload + ( BuildLog + , BuildReportId + , uploadReports + ) where + +{- +import Network.Browser + ( BrowserAction, request, setAllowRedirects ) +import Network.HTTP + ( Header(..), HeaderName(..) + , Request(..), RequestMethod(..), Response(..) ) +import Network.TCP (HandleStream) +-} +import Network.URI (URI, uriPath) --parseRelativeReference, relativeTo) + +import Control.Monad + ( forM_ ) +import System.FilePath.Posix + ( () ) +import qualified Distribution.Client.BuildReports.Anonymous as BuildReport +import Distribution.Client.BuildReports.Anonymous (BuildReport) +import Distribution.Text (display) +import Distribution.Verbosity (Verbosity) +import Distribution.Simple.Utils (die) +import Distribution.Client.HttpUtils +import Distribution.Client.Setup + ( RepoContext(..) ) + +type BuildReportId = URI +type BuildLog = String + +uploadReports :: Verbosity -> RepoContext -> (String, String) -> URI -> [(BuildReport, Maybe BuildLog)] -> IO () +uploadReports verbosity repoCtxt auth uri reports = do + forM_ reports $ \(report, mbBuildLog) -> do + buildId <- postBuildReport verbosity repoCtxt auth uri report + case mbBuildLog of + Just buildLog -> putBuildLog verbosity repoCtxt auth buildId buildLog + Nothing -> return () + +postBuildReport :: Verbosity -> RepoContext -> (String, String) -> URI -> BuildReport -> IO BuildReportId +postBuildReport verbosity repoCtxt auth uri buildReport = do + let fullURI = uri { uriPath = "/package" display (BuildReport.package buildReport) "reports" } + transport <- repoContextGetTransport repoCtxt + res <- postHttp transport verbosity fullURI (BuildReport.show buildReport) (Just auth) + case res of + (303, redir) -> return $ undefined redir --TODO parse redir + _ -> die "unrecognized response" -- give response + +{- + setAllowRedirects False + (_, response) <- request Request { + rqURI = uri { uriPath = "/package" display (BuildReport.package buildReport) "reports" }, + rqMethod = POST, + rqHeaders = [Header HdrContentType ("text/plain"), + Header HdrContentLength (show (length body)), + Header HdrAccept ("text/plain")], + rqBody = body + } + case rspCode response of + (3,0,3) | [Just buildId] <- [ do rel <- parseRelativeReference location +#if defined(VERSION_network_uri) + return $ relativeTo rel uri +#elif defined(VERSION_network) +#if MIN_VERSION_network(2,4,0) + return $ relativeTo rel uri +#else + relativeTo rel uri +#endif +#endif + | Header HdrLocation location <- rspHeaders response ] + -> return $ buildId + _ -> error "Unrecognised response from server." + where body = BuildReport.show buildReport +-} + + +-- TODO force this to be a PUT? + +putBuildLog :: Verbosity -> RepoContext -> (String, String) + -> BuildReportId -> BuildLog + -> IO () +putBuildLog verbosity repoCtxt auth reportId buildLog = do + let fullURI = reportId {uriPath = uriPath reportId "log"} + transport <- repoContextGetTransport repoCtxt + res <- postHttp transport verbosity fullURI buildLog (Just auth) + case res of + (200, _) -> return () + _ -> die "unrecognized response" -- give response diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/BuildTarget.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/BuildTarget.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/BuildTarget.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/BuildTarget.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,1623 @@ +{-# LANGUAGE CPP, DeriveGeneric, DeriveFunctor #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.BuildTargets +-- Copyright : (c) Duncan Coutts 2012, 2015 +-- License : BSD-like +-- +-- Maintainer : duncan@community.haskell.org +-- +-- Handling for user-specified build targets +----------------------------------------------------------------------------- +module Distribution.Client.BuildTarget ( + + -- * Build targets + BuildTarget(..), + --showBuildTarget, + QualLevel(..), + buildTargetPackage, + buildTargetComponentName, + + -- * Top level convenience + readUserBuildTargets, + resolveUserBuildTargets, + + -- * Parsing user build targets + UserBuildTarget, + parseUserBuildTargets, + showUserBuildTarget, + UserBuildTargetProblem(..), + reportUserBuildTargetProblems, + + -- * Resolving build targets + resolveBuildTargets, + BuildTargetProblem(..), + reportBuildTargetProblems, + ) where + +import Distribution.Package + ( Package(..), PackageId, PackageName, packageName ) +import Distribution.Client.Types + ( PackageLocation(..) ) + +import Distribution.PackageDescription + ( PackageDescription + , Executable(..) + , TestSuite(..), TestSuiteInterface(..), testModules + , Benchmark(..), BenchmarkInterface(..), benchmarkModules + , BuildInfo(..), libModules, exeModules ) +import Distribution.ModuleName + ( ModuleName, toFilePath ) +import Distribution.Simple.LocalBuildInfo + ( Component(..), ComponentName(..) + , pkgComponents, componentName, componentBuildInfo ) + +import Distribution.Text + ( display, simpleParse ) +import Distribution.Simple.Utils + ( die, lowercase ) +import Distribution.Client.Utils + ( makeRelativeToCwd ) + +import Data.List + ( nub, nubBy, stripPrefix, partition, intercalate, sortBy, groupBy ) +import Data.Maybe + ( listToMaybe, maybeToList ) +import Data.Either + ( partitionEithers ) +import Data.Function + ( on ) +import GHC.Generics (Generic) +#if MIN_VERSION_containers(0,5,0) +import qualified Data.Map.Lazy as Map.Lazy +import qualified Data.Map.Strict as Map +import Data.Map.Strict (Map) +#else +import qualified Data.Map as Map.Lazy +import qualified Data.Map as Map +import Data.Map (Map) +#endif +import Control.Monad +#if __GLASGOW_HASKELL__ < 710 +import Control.Applicative (Applicative(..), (<$>)) +#endif +import Control.Applicative (Alternative(..)) +import qualified Distribution.Compat.ReadP as Parse +import Distribution.Compat.ReadP + ( (+++), (<++) ) +import Data.Char + ( isSpace, isAlphaNum ) +import System.FilePath as FilePath + ( takeExtension, dropExtension, addTrailingPathSeparator + , splitDirectories, joinPath, splitPath ) +import System.Directory + ( doesFileExist, doesDirectoryExist, canonicalizePath + , getCurrentDirectory ) +import System.FilePath + ( (), (<.>), normalise ) + + +-- ------------------------------------------------------------ +-- * User build targets +-- ------------------------------------------------------------ + +-- | Various ways that a user may specify a build target. +-- +-- The main general form has lots of optional parts: +-- +-- > [ package name | package dir | package .cabal file ] +-- > [ [lib:|exe:] component name ] +-- > [ module name | source file ] +-- +-- There's also a special case of a package tarball. It doesn't take part in +-- the main general form since we always build a tarball package as a whole. +-- +-- > [package tar.gz file] +-- +data UserBuildTarget = + + -- | A simple target specified by a single part. This is any of the + -- general forms that can be expressed using one part, which are: + -- + -- > cabal build foo -- package name + -- > cabal build ../bar ../bar/bar.cabal -- package dir or package file + -- > cabal build foo -- component name + -- > cabal build Data.Foo -- module name + -- > cabal build Data/Foo.hs bar/Main.hsc -- file name + -- + -- It can also be a package tarball. + -- + -- > cabal build bar.tar.gz + -- + UserBuildTarget1 String + + -- | A qualified target with two parts. This is any of the general + -- forms that can be expressed using two parts, which are: + -- + -- > cabal build foo:foo -- package : component + -- > cabal build foo:Data.Foo -- package : module + -- > cabal build foo:Data/Foo.hs -- package : filename + -- + -- > cabal build ./foo:foo -- package dir : component + -- > cabal build ./foo:Data.Foo -- package dir : module + -- + -- > cabal build ./foo.cabal:foo -- package file : component + -- > cabal build ./foo.cabal:Data.Foo -- package file : module + -- > cabal build ./foo.cabal:Main.hs -- package file : filename + -- + -- > cabal build lib:foo exe:foo -- namespace : component + -- > cabal build foo:Data.Foo -- component : module + -- > cabal build foo:Data/Foo.hs -- component : filename + -- + | UserBuildTarget2 String String + + -- A (very) qualified target with three parts. This is any of the general + -- forms that can be expressed using three parts, which are: + -- + -- > cabal build foo:lib:foo -- package : namespace : component + -- > cabal build foo:foo:Data.Foo -- package : component : module + -- > cabal build foo:foo:Data/Foo.hs -- package : component : filename + -- + -- > cabal build foo/:lib:foo -- pkg dir : namespace : component + -- > cabal build foo/:foo:Data.Foo -- pkg dir : component : module + -- > cabal build foo/:foo:Data/Foo.hs -- pkg dir : component : filename + -- + -- > cabal build foo.cabal:lib:foo -- pkg file : namespace : component + -- > cabal build foo.cabal:foo:Data.Foo -- pkg file : component : module + -- > cabal build foo.cabal:foo:Data/Foo.hs -- pkg file : component : filename + -- + -- > cabal build lib:foo:Data.Foo -- namespace : component : module + -- > cabal build lib:foo:Data/Foo.hs -- namespace : component : filename + -- + | UserBuildTarget3 String String String + + -- A (rediculously) qualified target with four parts. This is any of the + -- general forms that can be expressed using all four parts, which are: + -- + -- > cabal build foo:lib:foo:Data.Foo -- package : namespace : component : module + -- > cabal build foo:lib:foo:Data/Foo.hs -- package : namespace : component : filename + -- + -- > cabal build foo/:lib:foo:Data.Foo -- pkg dir : namespace : component : module + -- > cabal build foo/:lib:foo:Data/Foo.hs -- pkg dir : namespace : component : filename + -- + -- > cabal build foo.cabal:lib:foo:Data.Foo -- pkg file : namespace : component : module + -- > cabal build foo.cabal:lib:foo:Data/Foo.hs -- pkg file : namespace : component : filename + -- + | UserBuildTarget4 String String String String + deriving (Show, Eq, Ord) + + +-- ------------------------------------------------------------ +-- * Resolved build targets +-- ------------------------------------------------------------ + +-- | A fully resolved build target. +-- +data BuildTarget pkg = + + -- | A package as a whole + -- + BuildTargetPackage pkg + + -- | A specific component + -- + | BuildTargetComponent pkg ComponentName + + -- | A specific module within a specific component. + -- + | BuildTargetModule pkg ComponentName ModuleName + + -- | A specific file within a specific component. + -- + | BuildTargetFile pkg ComponentName FilePath + deriving (Eq, Ord, Functor, Show, Generic) + + +-- | Get the package that the 'BuildTarget' is referring to. +-- +buildTargetPackage :: BuildTarget pkg -> pkg +buildTargetPackage (BuildTargetPackage p) = p +buildTargetPackage (BuildTargetComponent p _cn) = p +buildTargetPackage (BuildTargetModule p _cn _mn) = p +buildTargetPackage (BuildTargetFile p _cn _fn) = p + + +-- | Get the 'ComponentName' that the 'BuildTarget' is referring to, if any. +-- The 'BuildTargetPackage' target kind doesn't refer to any individual +-- component, while the component, module and file kinds do. +-- +buildTargetComponentName :: BuildTarget pkg -> Maybe ComponentName +buildTargetComponentName (BuildTargetPackage _p) = Nothing +buildTargetComponentName (BuildTargetComponent _p cn) = Just cn +buildTargetComponentName (BuildTargetModule _p cn _mn) = Just cn +buildTargetComponentName (BuildTargetFile _p cn _fn) = Just cn + + +-- ------------------------------------------------------------ +-- * Top level, do everything +-- ------------------------------------------------------------ + + +-- | Parse a bunch of command line args as user build targets, failing with an +-- error if any targets are unrecognised. +-- +readUserBuildTargets :: [String] -> IO [UserBuildTarget] +readUserBuildTargets targetStrs = do + let (uproblems, utargets) = parseUserBuildTargets targetStrs + reportUserBuildTargetProblems uproblems + return utargets + + +-- | A 'UserBuildTarget's is just a semi-structured string. We sill have quite +-- a bit of work to do to figure out which targets they refer to (ie packages, +-- components, file locations etc). +-- +-- The possible targets are based on the available packages (and their +-- locations). It fails with an error if any user string cannot be matched to +-- a valid target. +-- +resolveUserBuildTargets :: [(PackageDescription, PackageLocation a)] + -> [UserBuildTarget] -> IO [BuildTarget PackageName] +resolveUserBuildTargets pkgs utargets = do + utargets' <- mapM getUserTargetFileStatus utargets + pkgs' <- mapM (uncurry selectPackageInfo) pkgs + pwd <- getCurrentDirectory + let (primaryPkg, otherPkgs) = selectPrimaryLocalPackage pwd pkgs' + (bproblems, btargets) = resolveBuildTargets + primaryPkg otherPkgs utargets'' + -- default local dir target if there's no given target + utargets'' + | not (null primaryPkg) + , null utargets = [UserBuildTargetFileStatus1 "./" + (FileStatusExistsDir pwd)] + | otherwise = utargets' + + reportBuildTargetProblems bproblems + return (map (fmap packageName) btargets) + where + selectPrimaryLocalPackage :: FilePath + -> [PackageInfo] + -> ([PackageInfo], [PackageInfo]) + selectPrimaryLocalPackage pwd pkgs' = + let (primary, others) = partition isPrimary pkgs' + in (primary, others) + where + isPrimary PackageInfo { pinfoDirectory = Just (dir,_) } + | dir == pwd = True + isPrimary _ = False + + +-- ------------------------------------------------------------ +-- * Checking if targets exist as files +-- ------------------------------------------------------------ + +data UserBuildTargetFileStatus = + UserBuildTargetFileStatus1 String FileStatus + | UserBuildTargetFileStatus2 String FileStatus String + | UserBuildTargetFileStatus3 String FileStatus String String + | UserBuildTargetFileStatus4 String FileStatus String String String + deriving (Eq, Ord, Show) + +data FileStatus = FileStatusExistsFile FilePath -- the canonicalised filepath + | FileStatusExistsDir FilePath -- the canonicalised filepath + | FileStatusNotExists Bool -- does the parent dir exist even? + deriving (Eq, Ord, Show) + +getUserTargetFileStatus :: UserBuildTarget -> IO UserBuildTargetFileStatus +getUserTargetFileStatus t = + case t of + UserBuildTarget1 s1 -> + (\f1 -> UserBuildTargetFileStatus1 s1 f1) <$> fileStatus s1 + UserBuildTarget2 s1 s2 -> + (\f1 -> UserBuildTargetFileStatus2 s1 f1 s2) <$> fileStatus s1 + UserBuildTarget3 s1 s2 s3 -> + (\f1 -> UserBuildTargetFileStatus3 s1 f1 s2 s3) <$> fileStatus s1 + UserBuildTarget4 s1 s2 s3 s4 -> + (\f1 -> UserBuildTargetFileStatus4 s1 f1 s2 s3 s4) <$> fileStatus s1 + where + fileStatus f = do + fexists <- doesFileExist f + dexists <- doesDirectoryExist f + case splitPath f of + _ | fexists -> FileStatusExistsFile <$> canonicalizePath f + | dexists -> FileStatusExistsDir <$> canonicalizePath f + (d:_) -> FileStatusNotExists <$> doesDirectoryExist d + _ -> error "getUserTargetFileStatus: empty path" + +forgetFileStatus :: UserBuildTargetFileStatus -> UserBuildTarget +forgetFileStatus t = case t of + UserBuildTargetFileStatus1 s1 _ -> UserBuildTarget1 s1 + UserBuildTargetFileStatus2 s1 _ s2 -> UserBuildTarget2 s1 s2 + UserBuildTargetFileStatus3 s1 _ s2 s3 -> UserBuildTarget3 s1 s2 s3 + UserBuildTargetFileStatus4 s1 _ s2 s3 s4 -> UserBuildTarget4 s1 s2 s3 s4 + + +-- ------------------------------------------------------------ +-- * Parsing user targets +-- ------------------------------------------------------------ + + +-- | Parse a bunch of 'UserBuildTarget's (purely without throwing exceptions). +-- +parseUserBuildTargets :: [String] -> ([UserBuildTargetProblem] + ,[UserBuildTarget]) +parseUserBuildTargets = partitionEithers . map parseUserBuildTarget + +parseUserBuildTarget :: String -> Either UserBuildTargetProblem + UserBuildTarget +parseUserBuildTarget targetstr = + case readPToMaybe parseTargetApprox targetstr of + Nothing -> Left (UserBuildTargetUnrecognised targetstr) + Just tgt -> Right tgt + + where + parseTargetApprox :: Parse.ReadP r UserBuildTarget + parseTargetApprox = + (do a <- tokenQ + return (UserBuildTarget1 a)) + +++ (do a <- tokenQ + _ <- Parse.char ':' + b <- tokenQ + return (UserBuildTarget2 a b)) + +++ (do a <- tokenQ + _ <- Parse.char ':' + b <- tokenQ + _ <- Parse.char ':' + c <- tokenQ + return (UserBuildTarget3 a b c)) + +++ (do a <- tokenQ + _ <- Parse.char ':' + b <- token + _ <- Parse.char ':' + c <- tokenQ + _ <- Parse.char ':' + d <- tokenQ + return (UserBuildTarget4 a b c d)) + + token = Parse.munch1 (\x -> not (isSpace x) && x /= ':') + tokenQ = parseHaskellString <++ token + parseHaskellString :: Parse.ReadP r String + parseHaskellString = Parse.readS_to_P reads + + readPToMaybe :: Parse.ReadP a a -> String -> Maybe a + readPToMaybe p str = listToMaybe [ r | (r,s) <- Parse.readP_to_S p str + , all isSpace s ] + +-- | Syntax error when trying to parse a 'UserBuildTarget'. +data UserBuildTargetProblem + = UserBuildTargetUnrecognised String + deriving Show + +-- | Throw an exception with a formatted message if there are any problems. +-- +reportUserBuildTargetProblems :: [UserBuildTargetProblem] -> IO () +reportUserBuildTargetProblems problems = do + case [ target | UserBuildTargetUnrecognised target <- problems ] of + [] -> return () + target -> + die $ unlines + [ "Unrecognised build target syntax for '" ++ name ++ "'." + | name <- target ] + ++ "Syntax:\n" + ++ " - build [package]\n" + ++ " - build [package:]component\n" + ++ " - build [package:][component:]module\n" + ++ " - build [package:][component:]file\n" + ++ " where\n" + ++ " package is a package name, package dir or .cabal file\n\n" + ++ "Examples:\n" + ++ " - build foo -- package name\n" + ++ " - build tests -- component name\n" + ++ " (name of library, executable, test-suite or benchmark)\n" + ++ " - build Data.Foo -- module name\n" + ++ " - build Data/Foo.hsc -- file name\n\n" + ++ "An ambigious target can be qualified by package, component\n" + ++ "and/or component kind (lib|exe|test|bench)\n" + ++ " - build foo:tests -- component qualified by package\n" + ++ " - build tests:Data.Foo -- module qualified by component\n" + ++ " - build lib:foo -- component qualified by kind" + + +-- | Render a 'UserBuildTarget' back as the external syntax. This is mainly for +-- error messages. +-- +showUserBuildTarget :: UserBuildTarget -> String +showUserBuildTarget = intercalate ":" . components + where + components (UserBuildTarget1 s1) = [s1] + components (UserBuildTarget2 s1 s2) = [s1,s2] + components (UserBuildTarget3 s1 s2 s3) = [s1,s2,s3] + components (UserBuildTarget4 s1 s2 s3 s4) = [s1,s2,s3,s4] + +showBuildTarget :: QualLevel -> BuildTarget PackageInfo -> String +showBuildTarget ql = showUserBuildTarget . forgetFileStatus + . head . renderBuildTarget ql + + +-- ------------------------------------------------------------ +-- * Resolving user targets to build targets +-- ------------------------------------------------------------ + + +-- | Given a bunch of user-specified targets, try to resolve what it is they +-- refer to. +-- +resolveBuildTargets :: [PackageInfo] -- any primary pkg, e.g. cur dir + -> [PackageInfo] -- all the other local packages + -> [UserBuildTargetFileStatus] + -> ([BuildTargetProblem], [BuildTarget PackageInfo]) +resolveBuildTargets ppinfo opinfo = + partitionEithers + . map (resolveBuildTarget ppinfo opinfo) + +resolveBuildTarget :: [PackageInfo] -> [PackageInfo] + -> UserBuildTargetFileStatus + -> Either BuildTargetProblem (BuildTarget PackageInfo) +resolveBuildTarget ppinfo opinfo userTarget = + case findMatch (matcher userTarget) of + Unambiguous target -> Right target + None errs -> Left (classifyMatchErrors errs) + Ambiguous exactMatch targets -> + case disambiguateBuildTargets + matcher userTarget exactMatch + targets of + Right targets' -> Left (BuildTargetAmbiguous userTarget' targets') + Left ((m, ms):_) -> Left (MatchingInternalError userTarget' m ms) + Left [] -> internalError "resolveBuildTarget" + where + matcher = matchBuildTarget ppinfo opinfo + + userTarget' = forgetFileStatus userTarget + + classifyMatchErrors errs + | not (null expected) + = let (things, got:_) = unzip expected in + BuildTargetExpected userTarget' things got + + | not (null nosuch) + = BuildTargetNoSuch userTarget' nosuch + + | otherwise + = internalError $ "classifyMatchErrors: " ++ show errs + where + expected = [ (thing, got) + | (_, MatchErrorExpected thing got) + <- map (innerErr Nothing) errs ] + nosuch = [ (inside, thing, got, alts) + | (inside, MatchErrorNoSuch thing got alts) + <- map (innerErr Nothing) errs ] + + innerErr _ (MatchErrorIn kind thing m) + = innerErr (Just (kind,thing)) m + innerErr c m = (c,m) + + +-- | The various ways that trying to resolve a 'UserBuildTarget' to a +-- 'BuildTarget' can fail. +-- +data BuildTargetProblem + = BuildTargetExpected UserBuildTarget [String] String + -- ^ [expected thing] (actually got) + | BuildTargetNoSuch UserBuildTarget + [(Maybe (String, String), String, String, [String])] + -- ^ [([in thing], no such thing, actually got, alternatives)] + | BuildTargetAmbiguous UserBuildTarget + [(UserBuildTarget, BuildTarget PackageInfo)] + + | MatchingInternalError UserBuildTarget (BuildTarget PackageInfo) + [(UserBuildTarget, [BuildTarget PackageInfo])] + + +disambiguateBuildTargets + :: (UserBuildTargetFileStatus -> Match (BuildTarget PackageInfo)) + -> UserBuildTargetFileStatus -> Bool + -> [BuildTarget PackageInfo] + -> Either [(BuildTarget PackageInfo, + [(UserBuildTarget, [BuildTarget PackageInfo])])] + [(UserBuildTarget, BuildTarget PackageInfo)] +disambiguateBuildTargets matcher matchInput exactMatch matchResults = + case partitionEithers results of + (errs@(_:_), _) -> Left errs + ([], ok) -> Right ok + where + -- So, here's the strategy. We take the original match results, and make a + -- table of all their renderings at all qualification levels. + -- Note there can be multiple renderings at each qualification level. + matchResultsRenderings :: [(BuildTarget PackageInfo, [UserBuildTargetFileStatus])] + matchResultsRenderings = + [ (matchResult, matchRenderings) + | matchResult <- matchResults + , let matchRenderings = + [ rendering + | ql <- [QL1 .. QL4] + , rendering <- renderBuildTarget ql matchResult ] + ] + + -- Of course the point is that we're looking for renderings that are + -- unambiguous matches. So we build another memo table of all the matches + -- for all of those renderings. So by looking up in this table we can see + -- if we've got an unambiguous match. + + memoisedMatches :: Map UserBuildTargetFileStatus + (Match (BuildTarget PackageInfo)) + memoisedMatches = + -- avoid recomputing the main one if it was an exact match + (if exactMatch then Map.insert matchInput (ExactMatch 0 matchResults) + else id) + $ Map.Lazy.fromList + [ (rendering, matcher rendering) + | rendering <- concatMap snd matchResultsRenderings ] + + -- Finally, for each of the match results, we go through all their + -- possible renderings (in order of qualification level, though remember + -- there can be multiple renderings per level), and find the first one + -- that has an unambiguous match. + results :: [Either (BuildTarget PackageInfo, + [(UserBuildTarget, [BuildTarget PackageInfo])]) + (UserBuildTarget, BuildTarget PackageInfo)] + results = + [ case findUnambiguous originalMatch matchRenderings of + Just unambiguousRendering -> + Right ( forgetFileStatus unambiguousRendering + , originalMatch) + + -- This case is an internal error, but we bubble it up and report it + Nothing -> + Left ( originalMatch + , [ (forgetFileStatus rendering, matches) + | rendering <- matchRenderings + , let (ExactMatch _ matches) = + memoisedMatches Map.! rendering + ] ) + + | (originalMatch, matchRenderings) <- matchResultsRenderings ] + + findUnambiguous :: BuildTarget PackageInfo -> [UserBuildTargetFileStatus] + -> Maybe UserBuildTargetFileStatus + findUnambiguous _ [] = Nothing + findUnambiguous t (r:rs) = + case memoisedMatches Map.! r of + ExactMatch _ [t'] | fmap packageName t == fmap packageName t' + -> Just r + ExactMatch _ _ -> findUnambiguous t rs + InexactMatch _ _ -> internalError "InexactMatch" + NoMatch _ _ -> internalError "NoMatch" + +internalError :: String -> a +internalError msg = + error $ "BuildTargets: internal error: " ++ msg + + +data QualLevel = QL1 | QL2 | QL3 | QL4 + deriving (Enum, Show) + +renderBuildTarget :: QualLevel -> BuildTarget PackageInfo + -> [UserBuildTargetFileStatus] +renderBuildTarget ql t = + case t of + BuildTargetPackage p -> + case ql of + QL1 -> [t1 (dispP p)] + QL2 -> [t1' pf fs | (pf, fs) <- dispPF p] + QL3 -> [] + QL4 -> [] + + BuildTargetComponent p c -> + case ql of + QL1 -> [t1 (dispC p c)] + QL2 -> [t2 (dispP p) (dispC p c), + t2 (dispK c) (dispC p c)] + QL3 -> [t3 (dispP p) (dispK c) (dispC p c)] + QL4 -> [] + + BuildTargetModule p c m -> + case ql of + QL1 -> [t1 (dispM m)] + QL2 -> [t2 (dispP p) (dispM m), + t2 (dispC p c) (dispM m)] + QL3 -> [t3 (dispP p) (dispC p c) (dispM m), + t3 (dispK c) (dispC p c) (dispM m)] + QL4 -> [t4 (dispP p) (dispK c) (dispC p c) (dispM m)] + + BuildTargetFile p c f -> + case ql of + QL1 -> [t1 f] + QL2 -> [t2 (dispP p) f, + t2 (dispC p c) f] + QL3 -> [t3 (dispP p) (dispC p c) f, + t3 (dispK c) (dispC p c) f] + QL4 -> [t4 (dispP p) (dispK c) (dispC p c) f] + where + t1 s1 = UserBuildTargetFileStatus1 s1 none + t1' s1 = UserBuildTargetFileStatus1 s1 + t2 s1 = UserBuildTargetFileStatus2 s1 none + t3 s1 = UserBuildTargetFileStatus3 s1 none + t4 s1 = UserBuildTargetFileStatus4 s1 none + none = FileStatusNotExists False + + dispP = display . packageName + dispC = componentStringName . packageName + dispK = showComponentKindShort . componentKind + dispM = display + + dispPF p = [ (addTrailingPathSeparator drel, FileStatusExistsDir dabs) + | PackageInfo { pinfoDirectory = Just (dabs,drel) } <- [p] ] + ++ [ (frel, FileStatusExistsFile fabs) + | PackageInfo { pinfoPackageFile = Just (fabs,frel) } <- [p] ] + + +-- | Throw an exception with a formatted message if there are any problems. +-- +reportBuildTargetProblems :: [BuildTargetProblem] -> IO () +reportBuildTargetProblems problems = do + + case [ (t, m, ms) | MatchingInternalError t m ms <- problems ] of + [] -> return () + ((target, originalMatch, renderingsAndMatches):_) -> + die $ "Internal error in build target matching. It should always be " + ++ "possible to find a syntax that's sufficiently qualified to " + ++ "give an unambigious match. However when matching '" + ++ showUserBuildTarget target ++ "' we found " + ++ showBuildTarget QL1 originalMatch + ++ " (" ++ showBuildTargetKind originalMatch ++ ") which does not " + ++ "have an unambigious syntax. The possible syntax and the " + ++ "targets they match are as follows:\n" + ++ unlines + [ "'" ++ showUserBuildTarget rendering ++ "' which matches " + ++ intercalate ", " + [ showBuildTarget QL1 match ++ + " (" ++ showBuildTargetKind match ++ ")" + | match <- matches ] + | (rendering, matches) <- renderingsAndMatches ] + + case [ (t, e, g) | BuildTargetExpected t e g <- problems ] of + [] -> return () + targets -> + die $ unlines + [ "Unrecognised build target '" ++ showUserBuildTarget target + ++ "'.\n" + ++ "Expected a " ++ intercalate " or " expected + ++ ", rather than '" ++ got ++ "'." + | (target, expected, got) <- targets ] + + case [ (t, e) | BuildTargetNoSuch t e <- problems ] of + [] -> return () + targets -> + die $ unlines + [ "Unknown build target '" ++ showUserBuildTarget target ++ + "'.\n" ++ unlines + [ (case inside of + Just (kind, thing) + -> "The " ++ kind ++ " " ++ thing ++ " has no " + Nothing -> "There is no ") + ++ intercalate " or " [ mungeThing thing ++ " '" ++ got ++ "'" + | (thing, got, _alts) <- nosuch' ] ++ "." + ++ if null alternatives then "" else + "\nPerhaps you meant " ++ intercalate ";\nor " + [ "the " ++ thing ++ " " ++ intercalate " or " alts + | (thing, alts) <- alternatives ] + | (inside, nosuch') <- groupByContainer nosuch + , let alternatives = + [ (thing, take 10 alts) --TODO: select best ones + | (thing,_got,alts@(_:_)) <- nosuch' ] + ] + | (target, nosuch) <- targets + , let groupByContainer = + map (\g@((inside,_,_,_):_) -> + (inside, [ (thing,got,alts) + | (_,thing,got,alts) <- g ])) + . groupBy ((==) `on` (\(x,_,_,_) -> x)) + . sortBy (compare `on` (\(x,_,_,_) -> x)) + ] + where + mungeThing "file" = "file target" + mungeThing thing = thing + + case [ (t, ts) | BuildTargetAmbiguous t ts <- problems ] of + [] -> return () + targets -> + die $ unlines + [ "Ambiguous build target '" ++ showUserBuildTarget target + ++ "'. It could be:\n " + ++ unlines [ " "++ showUserBuildTarget ut ++ + " (" ++ showBuildTargetKind bt ++ ")" + | (ut, bt) <- amb ] + | (target, amb) <- targets ] + + where + showBuildTargetKind (BuildTargetPackage _ ) = "package" + showBuildTargetKind (BuildTargetComponent _ _ ) = "component" + showBuildTargetKind (BuildTargetModule _ _ _) = "module" + showBuildTargetKind (BuildTargetFile _ _ _) = "file" + + +---------------------------------- +-- Top level BuildTarget matcher +-- + +matchBuildTarget :: [PackageInfo] -> [PackageInfo] + -> UserBuildTargetFileStatus + -> Match (BuildTarget PackageInfo) +matchBuildTarget ppinfo opinfo = \utarget -> + nubMatchesBy ((==) `on` (fmap packageName)) $ + case utarget of + UserBuildTargetFileStatus1 str1 fstatus1 -> + matchBuildTarget1 ppinfo opinfo str1 fstatus1 + + UserBuildTargetFileStatus2 str1 fstatus1 str2 -> + matchBuildTarget2 pinfo str1 fstatus1 str2 + + UserBuildTargetFileStatus3 str1 fstatus1 str2 str3 -> + matchBuildTarget3 pinfo str1 fstatus1 str2 str3 + + UserBuildTargetFileStatus4 str1 fstatus1 str2 str3 str4 -> + matchBuildTarget4 pinfo str1 fstatus1 str2 str3 str4 + where + pinfo = ppinfo ++ opinfo + --TODO: sort this out + + +matchBuildTarget1 :: [PackageInfo] -> [PackageInfo] + -> String -> FileStatus -> Match (BuildTarget PackageInfo) +matchBuildTarget1 ppinfo opinfo = \str1 fstatus1 -> + match1Cmp pcinfo str1 + match1Pkg pinfo str1 fstatus1 + match1Cmp ocinfo str1 + match1Mod cinfo str1 + match1Fil pinfo str1 fstatus1 + where + pinfo = ppinfo ++ opinfo + cinfo = concatMap pinfoComponents pinfo + pcinfo = concatMap pinfoComponents ppinfo + ocinfo = concatMap pinfoComponents opinfo + + +matchBuildTarget2 :: [PackageInfo] -> String -> FileStatus -> String + -> Match (BuildTarget PackageInfo) +matchBuildTarget2 pinfo str1 fstatus1 str2 = + match2PkgCmp pinfo str1 fstatus1 str2 + <|> match2KndCmp cinfo str1 str2 + match2PkgMod pinfo str1 fstatus1 str2 + match2CmpMod cinfo str1 str2 + match2PkgFil pinfo str1 fstatus1 str2 + match2CmpFil cinfo str1 str2 + where + cinfo = concatMap pinfoComponents pinfo + --TODO: perhaps we actually do want to prioritise local/primary components + + +matchBuildTarget3 :: [PackageInfo] -> String -> FileStatus -> String -> String + -> Match (BuildTarget PackageInfo) +matchBuildTarget3 pinfo str1 fstatus1 str2 str3 = + match3PkgKndCmp pinfo str1 fstatus1 str2 str3 + match3PkgCmpMod pinfo str1 fstatus1 str2 str3 + match3PkgCmpFil pinfo str1 fstatus1 str2 str3 + match3KndCmpMod cinfo str1 str2 str3 + match3KndCmpFil cinfo str1 str2 str3 + where + cinfo = concatMap pinfoComponents pinfo + + +matchBuildTarget4 :: [PackageInfo] + -> String -> FileStatus -> String -> String -> String + -> Match (BuildTarget PackageInfo) +matchBuildTarget4 pinfo str1 fstatus1 str2 str3 str4 = + match4PkgKndCmpMod pinfo str1 fstatus1 str2 str3 str4 + match4PkgKndCmpFil pinfo str1 fstatus1 str2 str3 str4 + + +------------------------------------ +-- Individual BuildTarget matchers +-- + +match1Pkg :: [PackageInfo] -> String -> FileStatus + -> Match (BuildTarget PackageInfo) +match1Pkg pinfo = \str1 fstatus1 -> do + guardPackage str1 fstatus1 + p <- matchPackage pinfo str1 fstatus1 + return (BuildTargetPackage p) + +match1Cmp :: [ComponentInfo] -> String -> Match (BuildTarget PackageInfo) +match1Cmp cs = \str1 -> do + guardComponentName str1 + c <- matchComponentName cs str1 + return (BuildTargetComponent (cinfoPackage c) (cinfoName c)) + +match1Mod :: [ComponentInfo] -> String -> Match (BuildTarget PackageInfo) +match1Mod cs = \str1 -> do + guardModuleName str1 + let ms = [ (m,c) | c <- cs, m <- cinfoModules c ] + (m,c) <- matchModuleNameAnd ms str1 + return (BuildTargetModule (cinfoPackage c) (cinfoName c) m) + +match1Fil :: [PackageInfo] -> String -> FileStatus + -> Match (BuildTarget PackageInfo) +match1Fil ps str1 fstatus1 = + expecting "file" str1 $ do + (pkgfile, p) <- matchPackageDirectoryPrefix ps fstatus1 + orNoThingIn "package" (display (packageName p)) $ do + (filepath, c) <- matchComponentFile (pinfoComponents p) pkgfile + return (BuildTargetFile p (cinfoName c) filepath) + +--- + +match2PkgCmp :: [PackageInfo] + -> String -> FileStatus -> String + -> Match (BuildTarget PackageInfo) +match2PkgCmp ps = \str1 fstatus1 str2 -> do + guardPackage str1 fstatus1 + guardComponentName str2 + p <- matchPackage ps str1 fstatus1 + orNoThingIn "package" (display (packageName p)) $ do + c <- matchComponentName (pinfoComponents p) str2 + return (BuildTargetComponent p (cinfoName c)) + --TODO: the error here ought to say there's no component by that name in + -- this package, and name the package + +match2KndCmp :: [ComponentInfo] -> String -> String + -> Match (BuildTarget PackageInfo) +match2KndCmp cs = \str1 str2 -> do + ckind <- matchComponentKind str1 + guardComponentName str2 + c <- matchComponentKindAndName cs ckind str2 + return (BuildTargetComponent (cinfoPackage c) (cinfoName c)) + +match2PkgMod :: [PackageInfo] -> String -> FileStatus -> String + -> Match (BuildTarget PackageInfo) +match2PkgMod ps = \str1 fstatus1 str2 -> do + guardPackage str1 fstatus1 + guardModuleName str2 + p <- matchPackage ps str1 fstatus1 + orNoThingIn "package" (display (packageName p)) $ do + let ms = [ (m,c) | c <- pinfoComponents p, m <- cinfoModules c ] + (m,c) <- matchModuleNameAnd ms str2 + return (BuildTargetModule p (cinfoName c) m) + +match2CmpMod :: [ComponentInfo] -> String -> String + -> Match (BuildTarget PackageInfo) +match2CmpMod cs = \str1 str2 -> do + guardComponentName str1 + guardModuleName str2 + c <- matchComponentName cs str1 + orNoThingIn "component" (cinfoStrName c) $ do + let ms = cinfoModules c + m <- matchModuleName ms str2 + return (BuildTargetModule (cinfoPackage c) (cinfoName c) m) + +match2PkgFil :: [PackageInfo] -> String -> FileStatus -> String + -> Match (BuildTarget PackageInfo) +match2PkgFil ps str1 fstatus1 str2 = do + guardPackage str1 fstatus1 + p <- matchPackage ps str1 fstatus1 + orNoThingIn "package" (display (packageName p)) $ do + (filepath, c) <- matchComponentFile (pinfoComponents p) str2 + return (BuildTargetFile p (cinfoName c) filepath) + +match2CmpFil :: [ComponentInfo] -> String -> String + -> Match (BuildTarget PackageInfo) +match2CmpFil cs str1 str2 = do + guardComponentName str1 + c <- matchComponentName cs str1 + orNoThingIn "component" (cinfoStrName c) $ do + (filepath, _) <- matchComponentFile [c] str2 + return (BuildTargetFile (cinfoPackage c) (cinfoName c) filepath) + +--- + +match3PkgKndCmp :: [PackageInfo] + -> String -> FileStatus -> String -> String + -> Match (BuildTarget PackageInfo) +match3PkgKndCmp ps = \str1 fstatus1 str2 str3 -> do + guardPackage str1 fstatus1 + ckind <- matchComponentKind str2 + guardComponentName str3 + p <- matchPackage ps str1 fstatus1 + orNoThingIn "package" (display (packageName p)) $ do + c <- matchComponentKindAndName (pinfoComponents p) ckind str3 + return (BuildTargetComponent p (cinfoName c)) + +match3PkgCmpMod :: [PackageInfo] + -> String -> FileStatus -> String -> String + -> Match (BuildTarget PackageInfo) +match3PkgCmpMod ps = \str1 fstatus1 str2 str3 -> do + guardPackage str1 fstatus1 + guardComponentName str2 + guardModuleName str3 + p <- matchPackage ps str1 fstatus1 + orNoThingIn "package" (display (packageName p)) $ do + c <- matchComponentName (pinfoComponents p) str2 + orNoThingIn "component" (cinfoStrName c) $ do + let ms = cinfoModules c + m <- matchModuleName ms str3 + return (BuildTargetModule p (cinfoName c) m) + +match3KndCmpMod :: [ComponentInfo] + -> String -> String -> String + -> Match (BuildTarget PackageInfo) +match3KndCmpMod cs = \str1 str2 str3 -> do + ckind <- matchComponentKind str1 + guardComponentName str2 + guardModuleName str3 + c <- matchComponentKindAndName cs ckind str2 + orNoThingIn "component" (cinfoStrName c) $ do + let ms = cinfoModules c + m <- matchModuleName ms str3 + return (BuildTargetModule (cinfoPackage c) (cinfoName c) m) + +match3PkgCmpFil :: [PackageInfo] + -> String -> FileStatus -> String -> String + -> Match (BuildTarget PackageInfo) +match3PkgCmpFil ps = \str1 fstatus1 str2 str3 -> do + guardPackage str1 fstatus1 + guardComponentName str2 + p <- matchPackage ps str1 fstatus1 + orNoThingIn "package" (display (packageName p)) $ do + c <- matchComponentName (pinfoComponents p) str2 + orNoThingIn "component" (cinfoStrName c) $ do + (filepath, _) <- matchComponentFile [c] str3 + return (BuildTargetFile p (cinfoName c) filepath) + +match3KndCmpFil :: [ComponentInfo] -> String -> String -> String + -> Match (BuildTarget PackageInfo) +match3KndCmpFil cs = \str1 str2 str3 -> do + ckind <- matchComponentKind str1 + guardComponentName str2 + c <- matchComponentKindAndName cs ckind str2 + orNoThingIn "component" (cinfoStrName c) $ do + (filepath, _) <- matchComponentFile [c] str3 + return (BuildTargetFile (cinfoPackage c) (cinfoName c) filepath) + +-- + +match4PkgKndCmpMod :: [PackageInfo] + -> String-> FileStatus -> String -> String -> String + -> Match (BuildTarget PackageInfo) +match4PkgKndCmpMod ps = \str1 fstatus1 str2 str3 str4 -> do + guardPackage str1 fstatus1 + ckind <- matchComponentKind str2 + guardComponentName str3 + guardModuleName str4 + p <- matchPackage ps str1 fstatus1 + orNoThingIn "package" (display (packageName p)) $ do + c <- matchComponentKindAndName (pinfoComponents p) ckind str3 + orNoThingIn "component" (cinfoStrName c) $ do + let ms = cinfoModules c + m <- matchModuleName ms str4 + return (BuildTargetModule p (cinfoName c) m) + +match4PkgKndCmpFil :: [PackageInfo] + -> String -> FileStatus -> String -> String -> String + -> Match (BuildTarget PackageInfo) +match4PkgKndCmpFil ps = \str1 fstatus1 str2 str3 str4 -> do + guardPackage str1 fstatus1 + ckind <- matchComponentKind str2 + guardComponentName str3 + p <- matchPackage ps str1 fstatus1 + orNoThingIn "package" (display (packageName p)) $ do + c <- matchComponentKindAndName (pinfoComponents p) ckind str3 + orNoThingIn "component" (cinfoStrName c) $ do + (filepath,_) <- matchComponentFile [c] str4 + return (BuildTargetFile p (cinfoName c) filepath) + + +------------------------------- +-- Package and component info +-- + +data PackageInfo = PackageInfo { + pinfoId :: PackageId, + pinfoLocation :: PackageLocation (), + pinfoDirectory :: Maybe (FilePath, FilePath), + pinfoPackageFile :: Maybe (FilePath, FilePath), + pinfoComponents :: [ComponentInfo] + } + +data ComponentInfo = ComponentInfo { + cinfoName :: ComponentName, + cinfoStrName :: ComponentStringName, + cinfoPackage :: PackageInfo, + cinfoSrcDirs :: [FilePath], + cinfoModules :: [ModuleName], + cinfoHsFiles :: [FilePath], -- other hs files (like main.hs) + cinfoCFiles :: [FilePath], + cinfoJsFiles :: [FilePath] + } + +type ComponentStringName = String + +instance Package PackageInfo where + packageId = pinfoId + +--TODO: [required eventually] need the original GenericPackageDescription or +-- the flattening thereof because we need to be able to target modules etc +-- that are not enabled in the current configuration. +selectPackageInfo :: PackageDescription -> PackageLocation a -> IO PackageInfo +selectPackageInfo pkg loc = do + (pkgdir, pkgfile) <- + case loc of + --TODO: local tarballs, remote tarballs etc + LocalUnpackedPackage dir -> do + dirabs <- canonicalizePath dir + dirrel <- makeRelativeToCwd dirabs + --TODO: ought to get this earlier in project reading + let fileabs = dirabs display (packageName pkg) <.> "cabal" + filerel = dirrel display (packageName pkg) <.> "cabal" + exists <- doesFileExist fileabs + return ( Just (dirabs, dirrel) + , if exists then Just (fileabs, filerel) else Nothing + ) + _ -> return (Nothing, Nothing) + let pinfo = + PackageInfo { + pinfoId = packageId pkg, + pinfoLocation = fmap (const ()) loc, + pinfoDirectory = pkgdir, + pinfoPackageFile = pkgfile, + pinfoComponents = selectComponentInfo pinfo pkg + } + return pinfo + + +selectComponentInfo :: PackageInfo -> PackageDescription -> [ComponentInfo] +selectComponentInfo pinfo pkg = + [ ComponentInfo { + cinfoName = componentName c, + cinfoStrName = componentStringName (packageName pkg) (componentName c), + cinfoPackage = pinfo, + cinfoSrcDirs = hsSourceDirs bi, +-- [ pkgroot srcdir +-- | (pkgroot,_) <- maybeToList (pinfoDirectory pinfo) +-- , srcdir <- hsSourceDirs bi ], + cinfoModules = componentModules c, + cinfoHsFiles = componentHsFiles c, + cinfoCFiles = cSources bi, + cinfoJsFiles = jsSources bi + } + | c <- pkgComponents pkg + , let bi = componentBuildInfo c ] + + +componentStringName :: PackageName -> ComponentName -> ComponentStringName +componentStringName pkgname CLibName = display pkgname +componentStringName _ (CExeName name) = name +componentStringName _ (CTestName name) = name +componentStringName _ (CBenchName name) = name + +componentModules :: Component -> [ModuleName] +componentModules (CLib lib) = libModules lib +componentModules (CExe exe) = exeModules exe +componentModules (CTest test) = testModules test +componentModules (CBench bench) = benchmarkModules bench + +componentHsFiles :: Component -> [FilePath] +componentHsFiles (CExe exe) = [modulePath exe] +componentHsFiles (CTest TestSuite { + testInterface = TestSuiteExeV10 _ mainfile + }) = [mainfile] +componentHsFiles (CBench Benchmark { + benchmarkInterface = BenchmarkExeV10 _ mainfile + }) = [mainfile] +componentHsFiles _ = [] + + +------------------------------ +-- Matching component kinds +-- + +data ComponentKind = LibKind | ExeKind | TestKind | BenchKind + deriving (Eq, Ord, Show) + +componentKind :: ComponentName -> ComponentKind +componentKind CLibName = LibKind +componentKind (CExeName _) = ExeKind +componentKind (CTestName _) = TestKind +componentKind (CBenchName _) = BenchKind + +cinfoKind :: ComponentInfo -> ComponentKind +cinfoKind = componentKind . cinfoName + +matchComponentKind :: String -> Match ComponentKind +matchComponentKind s + | s `elem` ["lib", "library"] = increaseConfidence >> return LibKind + | s `elem` ["exe", "executable"] = increaseConfidence >> return ExeKind + | s `elem` ["tst", "test", "test-suite"] = increaseConfidence + >> return TestKind + | s `elem` ["bench", "benchmark"] = increaseConfidence + >> return BenchKind + | otherwise = matchErrorExpected + "component kind" s + +showComponentKind :: ComponentKind -> String +showComponentKind LibKind = "library" +showComponentKind ExeKind = "executable" +showComponentKind TestKind = "test-suite" +showComponentKind BenchKind = "benchmark" + +showComponentKindShort :: ComponentKind -> String +showComponentKindShort LibKind = "lib" +showComponentKindShort ExeKind = "exe" +showComponentKindShort TestKind = "test" +showComponentKindShort BenchKind = "bench" + +------------------------------ +-- Matching package targets +-- + +guardPackage :: String -> FileStatus -> Match () +guardPackage str fstatus = + guardPackageName str + <|> guardPackageDir str fstatus + <|> guardPackageFile str fstatus + + +guardPackageName :: String -> Match () +guardPackageName s + | validPackgageName s = increaseConfidence + | otherwise = matchErrorExpected "package name" s + where + +validPackgageName :: String -> Bool +validPackgageName s = + all validPackgageNameChar s + && not (null s) + where + validPackgageNameChar c = isAlphaNum c || c == '-' + + +guardPackageDir :: String -> FileStatus -> Match () +guardPackageDir _ (FileStatusExistsDir _) = increaseConfidence +guardPackageDir str _ = matchErrorExpected "package directory" str + + +guardPackageFile :: String -> FileStatus -> Match () +guardPackageFile _ (FileStatusExistsFile file) + | takeExtension file == ".cabal" + = increaseConfidence +guardPackageFile str _ = matchErrorExpected "package .cabal file" str + + +matchPackage :: [PackageInfo] -> String -> FileStatus -> Match PackageInfo +matchPackage pinfo = \str fstatus -> + orNoThingIn "project" "" $ + matchPackageName pinfo str + (matchPackageDir pinfo str fstatus + <|> matchPackageFile pinfo str fstatus) + + +matchPackageName :: [PackageInfo] -> String -> Match PackageInfo +matchPackageName ps = \str -> do + guard (validPackgageName str) + orNoSuchThing "package" str + (map (display . packageName) ps) $ + increaseConfidenceFor $ + matchInexactly caseFold (display . packageName) ps str + + +matchPackageDir :: [PackageInfo] + -> String -> FileStatus -> Match PackageInfo +matchPackageDir ps = \str fstatus -> + case fstatus of + FileStatusExistsDir canondir -> + orNoSuchThing "package directory" str (map (snd . fst) dirs) $ + increaseConfidenceFor $ + fmap snd $ matchExactly (fst . fst) dirs canondir + _ -> mzero + where + dirs = [ ((dabs,drel),p) + | p@PackageInfo{ pinfoDirectory = Just (dabs,drel) } <- ps ] + + +matchPackageFile :: [PackageInfo] -> String -> FileStatus -> Match PackageInfo +matchPackageFile ps = \str fstatus -> do + case fstatus of + FileStatusExistsFile canonfile -> + orNoSuchThing "package .cabal file" str (map (snd . fst) files) $ + increaseConfidenceFor $ + fmap snd $ matchExactly (fst . fst) files canonfile + _ -> mzero + where + files = [ ((fabs,frel),p) + | p@PackageInfo{ pinfoPackageFile = Just (fabs,frel) } <- ps ] + +--TODO: test outcome when dir exists but doesn't match any known one + +--TODO: perhaps need another distinction, vs no such thing, point is the +-- thing is not known, within the project, but could be outside project + + +------------------------------ +-- Matching component targets +-- + + +guardComponentName :: String -> Match () +guardComponentName s + | all validComponentChar s + && not (null s) = increaseConfidence + | otherwise = matchErrorExpected "component name" s + where + validComponentChar c = isAlphaNum c || c == '.' + || c == '_' || c == '-' || c == '\'' + + +matchComponentName :: [ComponentInfo] -> String -> Match ComponentInfo +matchComponentName cs str = + orNoSuchThing "component" str (map cinfoStrName cs) + $ increaseConfidenceFor + $ matchInexactly caseFold cinfoStrName cs str + + +matchComponentKindAndName :: [ComponentInfo] -> ComponentKind -> String + -> Match ComponentInfo +matchComponentKindAndName cs ckind str = + orNoSuchThing (showComponentKind ckind ++ " component") str + (map render cs) + $ increaseConfidenceFor + $ matchInexactly (\(ck, cn) -> (ck, caseFold cn)) + (\c -> (cinfoKind c, cinfoStrName c)) + cs + (ckind, str) + where + render c = showComponentKindShort (cinfoKind c) ++ ":" ++ cinfoStrName c + + +------------------------------ +-- Matching module targets +-- + +guardModuleName :: String -> Match () +guardModuleName s = + case simpleParse s :: Maybe ModuleName of + Just _ -> increaseConfidence + _ | all validModuleChar s + && not (null s) -> return () + | otherwise -> matchErrorExpected "module name" s + where + validModuleChar c = isAlphaNum c || c == '.' || c == '_' || c == '\'' + + +matchModuleName :: [ModuleName] -> String -> Match ModuleName +matchModuleName ms str = + orNoSuchThing "module" str (map display ms) + $ increaseConfidenceFor + $ matchInexactly caseFold display ms str + + +matchModuleNameAnd :: [(ModuleName, a)] -> String -> Match (ModuleName, a) +matchModuleNameAnd ms str = + orNoSuchThing "module" str (map (display . fst) ms) + $ increaseConfidenceFor + $ matchInexactly caseFold (display . fst) ms str + + +------------------------------ +-- Matching file targets +-- + +matchPackageDirectoryPrefix :: [PackageInfo] -> FileStatus + -> Match (FilePath, PackageInfo) +matchPackageDirectoryPrefix ps (FileStatusExistsFile filepath) = + increaseConfidenceFor $ + matchDirectoryPrefix pkgdirs filepath + where + pkgdirs = [ (dir, p) + | p@PackageInfo { pinfoDirectory = Just (dir,_) } <- ps ] +matchPackageDirectoryPrefix _ _ = mzero + + +matchComponentFile :: [ComponentInfo] -> String + -> Match (FilePath, ComponentInfo) +matchComponentFile cs str = + orNoSuchThing "file" str [] $ + matchComponentModuleFile cs str + <|> matchComponentOtherFile cs str + + +matchComponentOtherFile :: [ComponentInfo] -> String + -> Match (FilePath, ComponentInfo) +matchComponentOtherFile cs = + matchFile + [ (file, c) + | c <- cs + , file <- cinfoHsFiles c + ++ cinfoCFiles c + ++ cinfoJsFiles c + ] + + +matchComponentModuleFile :: [ComponentInfo] -> String + -> Match (FilePath, ComponentInfo) +matchComponentModuleFile cs str = do + matchFile + [ (normalise (d toFilePath m), c) + | c <- cs + , d <- cinfoSrcDirs c + , m <- cinfoModules c + ] + (dropExtension (normalise str)) + +-- utils + +matchFile :: [(FilePath, a)] -> FilePath -> Match (FilePath, a) +matchFile fs = + increaseConfidenceFor + . matchInexactly caseFold fst fs + +matchDirectoryPrefix :: [(FilePath, a)] -> FilePath -> Match (FilePath, a) +matchDirectoryPrefix dirs filepath = + tryEach $ + [ (file, x) + | (dir,x) <- dirs + , file <- maybeToList (stripDirectory dir) ] + where + stripDirectory :: FilePath -> Maybe FilePath + stripDirectory dir = + joinPath `fmap` stripPrefix (splitDirectories dir) filepathsplit + + filepathsplit = splitDirectories filepath + + +------------------------------ +-- Matching monad +-- + +-- | A matcher embodies a way to match some input as being some recognised +-- value. In particular it deals with multiple and ambiguous matches. +-- +-- There are various matcher primitives ('matchExactly', 'matchInexactly'), +-- ways to combine matchers ('matchPlus', 'matchPlusShadowing') and finally we +-- can run a matcher against an input using 'findMatch'. +-- +data Match a = NoMatch Confidence [MatchError] + | ExactMatch Confidence [a] + | InexactMatch Confidence [a] + deriving Show + +type Confidence = Int + +data MatchError = MatchErrorExpected String String -- thing got + | MatchErrorNoSuch String String [String] -- thing got alts + | MatchErrorIn String String MatchError -- kind thing + deriving (Show, Eq) + + +instance Functor Match where + fmap _ (NoMatch d ms) = NoMatch d ms + fmap f (ExactMatch d xs) = ExactMatch d (fmap f xs) + fmap f (InexactMatch d xs) = InexactMatch d (fmap f xs) + +instance Applicative Match where + pure a = ExactMatch 0 [a] + (<*>) = ap + +instance Alternative Match where + empty = NoMatch 0 [] + (<|>) = matchPlus + +instance Monad Match where + return = pure + NoMatch d ms >>= _ = NoMatch d ms + ExactMatch d xs >>= f = addDepth d + $ msum (map f xs) + InexactMatch d xs >>= f = addDepth d . forceInexact + $ msum (map f xs) + +instance MonadPlus Match where + mzero = empty + mplus = matchPlus + +() :: Match a -> Match a -> Match a +() = matchPlusShadowing + +infixl 3 + +addDepth :: Confidence -> Match a -> Match a +addDepth d' (NoMatch d msgs) = NoMatch (d'+d) msgs +addDepth d' (ExactMatch d xs) = ExactMatch (d'+d) xs +addDepth d' (InexactMatch d xs) = InexactMatch (d'+d) xs + +forceInexact :: Match a -> Match a +forceInexact (ExactMatch d ys) = InexactMatch d ys +forceInexact m = m + +-- | Combine two matchers. Exact matches are used over inexact matches +-- but if we have multiple exact, or inexact then the we collect all the +-- ambiguous matches. +-- +matchPlus :: Match a -> Match a -> Match a +matchPlus (ExactMatch d1 xs) (ExactMatch d2 xs') = + ExactMatch (max d1 d2) (xs ++ xs') +matchPlus a@(ExactMatch _ _ ) (InexactMatch _ _ ) = a +matchPlus a@(ExactMatch _ _ ) (NoMatch _ _ ) = a +matchPlus (InexactMatch _ _ ) b@(ExactMatch _ _ ) = b +matchPlus (InexactMatch d1 xs) (InexactMatch d2 xs') = + InexactMatch (max d1 d2) (xs ++ xs') +matchPlus a@(InexactMatch _ _ ) (NoMatch _ _ ) = a +matchPlus (NoMatch _ _ ) b@(ExactMatch _ _ ) = b +matchPlus (NoMatch _ _ ) b@(InexactMatch _ _ ) = b +matchPlus a@(NoMatch d1 ms) b@(NoMatch d2 ms') + | d1 > d2 = a + | d1 < d2 = b + | otherwise = NoMatch d1 (ms ++ ms') + +-- | Combine two matchers. This is similar to 'matchPlus' with the +-- difference that an exact match from the left matcher shadows any exact +-- match on the right. Inexact matches are still collected however. +-- +matchPlusShadowing :: Match a -> Match a -> Match a +matchPlusShadowing a@(ExactMatch _ _) (ExactMatch _ _) = a +matchPlusShadowing a b = matchPlus a b + + +------------------------------ +-- Various match primitives +-- + +matchErrorExpected :: String -> String -> Match a +matchErrorExpected thing got = NoMatch 0 [MatchErrorExpected thing got] + +matchErrorNoSuch :: String -> String -> [String] -> Match a +matchErrorNoSuch thing got alts = NoMatch 0 [MatchErrorNoSuch thing got alts] + +expecting :: String -> String -> Match a -> Match a +expecting thing got (NoMatch 0 _) = matchErrorExpected thing got +expecting _ _ m = m + +orNoSuchThing :: String -> String -> [String] -> Match a -> Match a +orNoSuchThing thing got alts (NoMatch 0 _) = matchErrorNoSuch thing got alts +orNoSuchThing _ _ _ m = m + +orNoThingIn :: String -> String -> Match a -> Match a +orNoThingIn kind name (NoMatch n ms) = + NoMatch n [ MatchErrorIn kind name m | m <- ms ] +orNoThingIn _ _ m = m + +increaseConfidence :: Match () +increaseConfidence = ExactMatch 1 [()] + +increaseConfidenceFor :: Match a -> Match a +increaseConfidenceFor m = m >>= \r -> increaseConfidence >> return r + +nubMatchesBy :: (a -> a -> Bool) -> Match a -> Match a +nubMatchesBy _ (NoMatch d msgs) = NoMatch d msgs +nubMatchesBy eq (ExactMatch d xs) = ExactMatch d (nubBy eq xs) +nubMatchesBy eq (InexactMatch d xs) = InexactMatch d (nubBy eq xs) + +nubMatchErrors :: Match a -> Match a +nubMatchErrors (NoMatch d msgs) = NoMatch d (nub msgs) +nubMatchErrors (ExactMatch d xs) = ExactMatch d xs +nubMatchErrors (InexactMatch d xs) = InexactMatch d xs + +-- | Lift a list of matches to an exact match. +-- +exactMatches, inexactMatches :: [a] -> Match a + +exactMatches [] = mzero +exactMatches xs = ExactMatch 0 xs + +inexactMatches [] = mzero +inexactMatches xs = InexactMatch 0 xs + +tryEach :: [a] -> Match a +tryEach = exactMatches + + +------------------------------ +-- Top level match runner +-- + +-- | Given a matcher and a key to look up, use the matcher to find all the +-- possible matches. There may be 'None', a single 'Unambiguous' match or +-- you may have an 'Ambiguous' match with several possibilities. +-- +findMatch :: Match a -> MaybeAmbiguous a +findMatch match = + case nubMatchErrors match of + NoMatch _ msgs -> None msgs + ExactMatch _ [x] -> Unambiguous x + InexactMatch _ [x] -> Unambiguous x + ExactMatch _ [] -> error "findMatch: impossible: ExactMatch []" + InexactMatch _ [] -> error "findMatch: impossible: InexactMatch []" + ExactMatch _ xs -> Ambiguous True xs + InexactMatch _ xs -> Ambiguous False xs + +data MaybeAmbiguous a = None [MatchError] | Unambiguous a | Ambiguous Bool [a] + deriving Show + + +------------------------------ +-- Basic matchers +-- + +-- | A primitive matcher that looks up a value in a finite 'Map'. The +-- value must match exactly. +-- +matchExactly :: Ord k => (a -> k) -> [a] -> (k -> Match a) +matchExactly key xs = + \k -> case Map.lookup k m of + Nothing -> mzero + Just ys -> exactMatches ys + where + m = Map.fromListWith (++) [ (key x, [x]) | x <- xs ] + +-- | A primitive matcher that looks up a value in a finite 'Map'. It checks +-- for an exact or inexact match. We get an inexact match if the match +-- is not exact, but the canonical forms match. It takes a canonicalisation +-- function for this purpose. +-- +-- So for example if we used string case fold as the canonicalisation +-- function, then we would get case insensitive matching (but it will still +-- report an exact match when the case matches too). +-- +matchInexactly :: (Ord k, Ord k') => (k -> k') -> (a -> k) + -> [a] -> (k -> Match a) +matchInexactly cannonicalise key xs = + \k -> case Map.lookup k m of + Just ys -> exactMatches ys + Nothing -> case Map.lookup (cannonicalise k) m' of + Just ys -> inexactMatches ys + Nothing -> mzero + where + m = Map.fromListWith (++) [ (key x, [x]) | x <- xs ] + + -- the map of canonicalised keys to groups of inexact matches + m' = Map.mapKeysWith (++) cannonicalise m + + +------------------------------ +-- Utils +-- + +caseFold :: String -> String +caseFold = lowercase + + +------------------------------ +-- Example inputs +-- + +{- +ex1pinfo :: [PackageInfo] +ex1pinfo = + [ PackageInfo { + pinfoName = PackageName "foo", + pinfoDirectory = Just "/the/foo", + pinfoPackageFile = Just "/the/foo/foo.cabal", + pinfoComponents = [] + } + , PackageInfo { + pinfoName = PackageName "bar", + pinfoDirectory = Just "/the/bar", + pinfoPackageFile = Just "/the/bar/bar.cabal", + pinfoComponents = [] + } + ] +-} +{- +stargets = + [ BuildTargetComponent (CExeName "foo") + , BuildTargetModule (CExeName "foo") (mkMn "Foo") + , BuildTargetModule (CExeName "tst") (mkMn "Foo") + ] + where + mkMn :: String -> ModuleName + mkMn = fromJust . simpleParse + +ex_pkgid :: PackageIdentifier +Just ex_pkgid = simpleParse "thelib" +-} + +{- +ex_cs :: [ComponentInfo] +ex_cs = + [ (mkC (CExeName "foo") ["src1", "src1/src2"] ["Foo", "Src2.Bar", "Bar"]) + , (mkC (CExeName "tst") ["src1", "test"] ["Foo"]) + ] + where + mkC n ds ms = ComponentInfo n (componentStringName pkgid n) ds (map mkMn ms) + mkMn :: String -> ModuleName + mkMn = fromJust . simpleParse + pkgid :: PackageIdentifier + Just pkgid = simpleParse "thelib" +-} + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Check.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Check.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Check.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Check.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,89 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Check +-- Copyright : (c) Lennart Kolmodin 2008 +-- License : BSD-like +-- +-- Maintainer : kolmodin@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Check a package for common mistakes +-- +----------------------------------------------------------------------------- +module Distribution.Client.Check ( + check + ) where + +import Control.Monad ( when, unless ) + +import Distribution.PackageDescription.Parse + ( readPackageDescription ) +import Distribution.PackageDescription.Check +import Distribution.PackageDescription.Configuration + ( flattenPackageDescription ) +import Distribution.Verbosity + ( Verbosity ) +import Distribution.Simple.Utils + ( defaultPackageDesc, toUTF8, wrapText ) + +check :: Verbosity -> IO Bool +check verbosity = do + pdfile <- defaultPackageDesc verbosity + ppd <- readPackageDescription verbosity pdfile + -- flatten the generic package description into a regular package + -- description + -- TODO: this may give more warnings than it should give; + -- consider two branches of a condition, one saying + -- ghc-options: -Wall + -- and the other + -- ghc-options: -Werror + -- joined into + -- ghc-options: -Wall -Werror + -- checkPackages will yield a warning on the last line, but it + -- would not on each individual branch. + -- Hovever, this is the same way hackage does it, so we will yield + -- the exact same errors as it will. + let pkg_desc = flattenPackageDescription ppd + ioChecks <- checkPackageFiles pkg_desc "." + let packageChecks = ioChecks ++ checkPackage ppd (Just pkg_desc) + buildImpossible = [ x | x@PackageBuildImpossible {} <- packageChecks ] + buildWarning = [ x | x@PackageBuildWarning {} <- packageChecks ] + distSuspicious = [ x | x@PackageDistSuspicious {} <- packageChecks ] + ++ [ x | x@PackageDistSuspiciousWarn {} <- packageChecks ] + distInexusable = [ x | x@PackageDistInexcusable {} <- packageChecks ] + + unless (null buildImpossible) $ do + putStrLn "The package will not build sanely due to these errors:" + printCheckMessages buildImpossible + + unless (null buildWarning) $ do + putStrLn "The following warnings are likely to affect your build negatively:" + printCheckMessages buildWarning + + unless (null distSuspicious) $ do + putStrLn "These warnings may cause trouble when distributing the package:" + printCheckMessages distSuspicious + + unless (null distInexusable) $ do + putStrLn "The following errors will cause portability problems on other environments:" + printCheckMessages distInexusable + + let isDistError (PackageDistSuspicious {}) = False + isDistError (PackageDistSuspiciousWarn {}) = False + isDistError _ = True + isCheckError (PackageDistSuspiciousWarn {}) = False + isCheckError _ = True + errors = filter isDistError packageChecks + + unless (null errors) $ + putStrLn "Hackage would reject this package." + + when (null packageChecks) $ + putStrLn "No errors or warnings could be found in the package." + + return (null . filter isCheckError $ packageChecks) + + where + printCheckMessages = mapM_ (putStrLn . format . explanation) + format = toUTF8 . wrapText . ("* "++) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/CmdBuild.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/CmdBuild.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/CmdBuild.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/CmdBuild.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,70 @@ +{-# LANGUAGE NamedFieldPuns #-} + +-- | cabal-install CLI command: build +-- +module Distribution.Client.CmdBuild ( + buildAction, + ) where + +import Distribution.Client.ProjectOrchestration + ( PreBuildHooks(..), runProjectPreBuildPhase, selectTargets + , ProjectBuildContext(..), runProjectBuildPhase + , printPlan, reportBuildFailures ) +import Distribution.Client.ProjectConfig + ( BuildTimeSettings(..) ) +import Distribution.Client.ProjectPlanning + ( PackageTarget(..) ) +import Distribution.Client.BuildTarget + ( readUserBuildTargets ) + +import Distribution.Client.Setup + ( GlobalFlags, ConfigFlags(..), ConfigExFlags, InstallFlags ) +import Distribution.Simple.Setup + ( HaddockFlags, fromFlagOrDefault ) +import Distribution.Verbosity + ( normal ) + +import Control.Monad (unless) + + +-- | The @build@ command does a lot. It brings the install plan up to date, +-- selects that part of the plan needed by the given or implicit targets and +-- then executes the plan. +-- +-- For more details on how this works, see the module +-- "Distribution.Client.ProjectOrchestration" +-- +buildAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) + -> [String] -> GlobalFlags -> IO () +buildAction (configFlags, configExFlags, installFlags, haddockFlags) + targetStrings globalFlags = do + + userTargets <- readUserBuildTargets targetStrings + + buildCtx@ProjectBuildContext{buildSettings} <- + runProjectPreBuildPhase + verbosity + ( globalFlags, configFlags, configExFlags + , installFlags, haddockFlags ) + PreBuildHooks { + hookPrePlanning = \_ _ _ -> return (), + hookSelectPlanSubset = selectBuildTargets userTargets + } + + printPlan verbosity buildCtx + + unless (buildSettingDryRun buildSettings) $ do + plan <- runProjectBuildPhase + verbosity + buildCtx + reportBuildFailures plan + where + verbosity = fromFlagOrDefault normal (configVerbosity configFlags) + + -- When we interpret the targets on the command line, interpret them as + -- repl targets (as opposed to say repl or haddock targets). + selectBuildTargets = + selectTargets + BuildDefaultComponents + BuildSpecificComponent + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/CmdConfigure.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/CmdConfigure.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/CmdConfigure.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/CmdConfigure.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,60 @@ +-- | cabal-install CLI command: configure +-- +module Distribution.Client.CmdConfigure ( + configureAction, + ) where + +import Distribution.Client.ProjectOrchestration +import Distribution.Client.ProjectConfig + +import Distribution.Client.Setup + ( GlobalFlags, ConfigFlags(..), ConfigExFlags, InstallFlags ) +import Distribution.Simple.Setup + ( HaddockFlags, fromFlagOrDefault ) +import Distribution.Verbosity + ( normal ) + + +-- | To a first approximation, the @configure@ just runs the first phase of +-- the @build@ command where we bring the install plan up to date (thus +-- checking that it's possible). +-- +-- The only difference is that @configure@ also allows the user to specify +-- some extra config flags which we save in the file @cabal.project.local@. +-- +-- For more details on how this works, see the module +-- "Distribution.Client.ProjectOrchestration" +-- +configureAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) + -> [String] -> GlobalFlags -> IO () +configureAction (configFlags, configExFlags, installFlags, haddockFlags) + _extraArgs globalFlags = do + --TODO: deal with _extraArgs, since flags with wrong syntax end up there + + buildCtx <- + runProjectPreBuildPhase + verbosity + ( globalFlags, configFlags, configExFlags + , installFlags, haddockFlags ) + PreBuildHooks { + hookPrePlanning = \projectRootDir _ cliConfig -> + -- Write out the @cabal.project.local@ so it gets picked up by the + -- planning phase. + writeProjectLocalExtraConfig projectRootDir cliConfig, + + hookSelectPlanSubset = return + } + + --TODO: Hmm, but we don't have any targets. Currently this prints what we + -- would build if we were to build everything. Could pick implicit target like "." + --TODO: should we say what's in the project (+deps) as a whole? + printPlan + verbosity + buildCtx { + buildSettings = (buildSettings buildCtx) { + buildSettingDryRun = True + } + } + where + verbosity = fromFlagOrDefault normal (configVerbosity configFlags) + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/CmdRepl.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/CmdRepl.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/CmdRepl.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/CmdRepl.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,74 @@ +{-# LANGUAGE NamedFieldPuns #-} + +-- | cabal-install CLI command: repl +-- +module Distribution.Client.CmdRepl ( + replAction, + ) where + +import Distribution.Client.ProjectOrchestration + ( PreBuildHooks(..), runProjectPreBuildPhase, selectTargets + , ProjectBuildContext(..), runProjectBuildPhase + , printPlan, reportBuildFailures ) +import Distribution.Client.ProjectConfig + ( BuildTimeSettings(..) ) +import Distribution.Client.ProjectPlanning + ( PackageTarget(..) ) +import Distribution.Client.BuildTarget + ( readUserBuildTargets ) + +import Distribution.Client.Setup + ( GlobalFlags, ConfigFlags(..), ConfigExFlags, InstallFlags ) +import Distribution.Simple.Setup + ( HaddockFlags, fromFlagOrDefault ) +import Distribution.Verbosity + ( normal ) + +import Control.Monad (unless) + + +-- | The @repl@ command is very much like @build@. It brings the install plan +-- up to date, selects that part of the plan needed by the given or implicit +-- repl target and then executes the plan. +-- +-- Compared to @build@ the difference is that only one target is allowed +-- (given or implicit) and the target type is repl rather than build. The +-- general plan execution infrastructure handles both build and repl targets. +-- +-- For more details on how this works, see the module +-- "Distribution.Client.ProjectOrchestration" +-- +replAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) + -> [String] -> GlobalFlags -> IO () +replAction (configFlags, configExFlags, installFlags, haddockFlags) + targetStrings globalFlags = do + + userTargets <- readUserBuildTargets targetStrings + + buildCtx@ProjectBuildContext{buildSettings} <- + runProjectPreBuildPhase + verbosity + ( globalFlags, configFlags, configExFlags + , installFlags, haddockFlags ) + PreBuildHooks { + hookPrePlanning = \_ _ _ -> return (), + hookSelectPlanSubset = selectReplTargets userTargets + } + + printPlan verbosity buildCtx + + unless (buildSettingDryRun buildSettings) $ do + plan <- runProjectBuildPhase + verbosity + buildCtx + reportBuildFailures plan + where + verbosity = fromFlagOrDefault normal (configVerbosity configFlags) + + -- When we interpret the targets on the command line, interpret them as + -- repl targets (as opposed to say build or haddock targets). + selectReplTargets = + selectTargets + ReplDefaultComponent + ReplSpecificComponent + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Compat/ExecutablePath.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Compat/ExecutablePath.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Compat/ExecutablePath.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Compat/ExecutablePath.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,183 @@ +{-# LANGUAGE ForeignFunctionInterface #-} +{-# LANGUAGE CPP #-} + +-- Copied verbatim from base-4.6.0.0. We can't simply import +-- System.Environment.getExecutablePath because we need compatibility with older +-- GHCs. + +module Distribution.Client.Compat.ExecutablePath ( getExecutablePath ) where + +-- The imports are purposely kept completely disjoint to prevent edits +-- to one OS implementation from breaking another. + +#if defined(darwin_HOST_OS) +import Data.Word +import Foreign.C +import Foreign.Marshal.Alloc +import Foreign.Ptr +import Foreign.Storable +import System.Posix.Internals +#elif defined(linux_HOST_OS) +import Foreign.C +import Foreign.Marshal.Array +import System.Posix.Internals +#elif defined(mingw32_HOST_OS) +import Data.Word +import Foreign.C +import Foreign.Marshal.Array +import Foreign.Ptr +import System.Posix.Internals +#else +import Foreign.C +import Foreign.Marshal.Alloc +import Foreign.Ptr +import Foreign.Storable +import System.Posix.Internals +#endif + +-- GHC 7.0.* compatibility. 'System.Posix.Internals' in base-4.3.* doesn't +-- provide 'peekFilePath' and 'peekFilePathLen'. +#if !MIN_VERSION_base(4,4,0) +#ifdef mingw32_HOST_OS + +peekFilePath :: CWString -> IO FilePath +peekFilePath = peekCWString + +#else + +peekFilePath :: CString -> IO FilePath +peekFilePath = peekCString + +peekFilePathLen :: CStringLen -> IO FilePath +peekFilePathLen = peekCStringLen + +#endif +#endif + +-- The exported function is defined outside any if-guard to make sure +-- every OS implements it with the same type. + +-- | Returns the absolute pathname of the current executable. +-- +-- Note that for scripts and interactive sessions, this is the path to +-- the interpreter (e.g. ghci.) +-- +-- /Since: 4.6.0.0/ +getExecutablePath :: IO FilePath + +-------------------------------------------------------------------------------- +-- Mac OS X + +#if defined(darwin_HOST_OS) + +type UInt32 = Word32 + +foreign import ccall unsafe "mach-o/dyld.h _NSGetExecutablePath" + c__NSGetExecutablePath :: CString -> Ptr UInt32 -> IO CInt + +-- | Returns the path of the main executable. The path may be a +-- symbolic link and not the real file. +-- +-- See dyld(3) +_NSGetExecutablePath :: IO FilePath +_NSGetExecutablePath = + allocaBytes 1024 $ \ buf -> -- PATH_MAX is 1024 on OS X + alloca $ \ bufsize -> do + poke bufsize 1024 + status <- c__NSGetExecutablePath buf bufsize + if status == 0 + then peekFilePath buf + else do reqBufsize <- fromIntegral `fmap` peek bufsize + allocaBytes reqBufsize $ \ newBuf -> do + status2 <- c__NSGetExecutablePath newBuf bufsize + if status2 == 0 + then peekFilePath newBuf + else error "_NSGetExecutablePath: buffer too small" + +foreign import ccall unsafe "stdlib.h realpath" + c_realpath :: CString -> CString -> IO CString + +-- | Resolves all symbolic links, extra \/ characters, and references +-- to \/.\/ and \/..\/. Returns an absolute pathname. +-- +-- See realpath(3) +realpath :: FilePath -> IO FilePath +realpath path = + withFilePath path $ \ fileName -> + allocaBytes 1024 $ \ resolvedName -> do + _ <- throwErrnoIfNull "realpath" $ c_realpath fileName resolvedName + peekFilePath resolvedName + +getExecutablePath = _NSGetExecutablePath >>= realpath + +-------------------------------------------------------------------------------- +-- Linux + +#elif defined(linux_HOST_OS) + +foreign import ccall unsafe "readlink" + c_readlink :: CString -> CString -> CSize -> IO CInt + +-- | Reads the @FilePath@ pointed to by the symbolic link and returns +-- it. +-- +-- See readlink(2) +readSymbolicLink :: FilePath -> IO FilePath +readSymbolicLink file = + allocaArray0 4096 $ \buf -> do + withFilePath file $ \s -> do + len <- throwErrnoPathIfMinus1 "readSymbolicLink" file $ + c_readlink s buf 4096 + peekFilePathLen (buf,fromIntegral len) + +getExecutablePath = readSymbolicLink $ "/proc/self/exe" + +-------------------------------------------------------------------------------- +-- Windows + +#elif defined(mingw32_HOST_OS) + +# if defined(i386_HOST_ARCH) +# define WINDOWS_CCONV stdcall +# elif defined(x86_64_HOST_ARCH) +# define WINDOWS_CCONV ccall +# else +# error Unknown mingw32 arch +# endif + +foreign import WINDOWS_CCONV unsafe "windows.h GetModuleFileNameW" + c_GetModuleFileName :: Ptr () -> CWString -> Word32 -> IO Word32 + +getExecutablePath = go 2048 -- plenty, PATH_MAX is 512 under Win32 + where + go size = allocaArray (fromIntegral size) $ \ buf -> do + ret <- c_GetModuleFileName nullPtr buf size + case ret of + 0 -> error "getExecutablePath: GetModuleFileNameW returned an error" + _ | ret < size -> peekFilePath buf + | otherwise -> go (size * 2) + +-------------------------------------------------------------------------------- +-- Fallback to argv[0] + +#else + +foreign import ccall unsafe "getFullProgArgv" + c_getFullProgArgv :: Ptr CInt -> Ptr (Ptr CString) -> IO () + +getExecutablePath = + alloca $ \ p_argc -> + alloca $ \ p_argv -> do + c_getFullProgArgv p_argc p_argv + argc <- peek p_argc + if argc > 0 + -- If argc > 0 then argv[0] is guaranteed by the standard + -- to be a pointer to a null-terminated string. + then peek p_argv >>= peek >>= peekFilePath + else error $ "getExecutablePath: " ++ msg + where msg = "no OS specific implementation and program name couldn't be " ++ + "found in argv" + +-------------------------------------------------------------------------------- + +#endif diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Compat/FilePerms.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Compat/FilePerms.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Compat/FilePerms.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Compat/FilePerms.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,36 @@ +{-# LANGUAGE CPP #-} +{-# OPTIONS_HADDOCK hide #-} +module Distribution.Client.Compat.FilePerms ( + setFileOrdinary, + setFileExecutable, + setFileHidden, + ) where + +#ifndef mingw32_HOST_OS +import System.Posix.Types + ( FileMode ) +import System.Posix.Internals + ( c_chmod ) +import Foreign.C + ( withCString ) +import Foreign.C + ( throwErrnoPathIfMinus1_ ) +#else +import System.Win32.File (setFileAttributes, fILE_ATTRIBUTE_HIDDEN) +#endif /* mingw32_HOST_OS */ + +setFileHidden, setFileOrdinary, setFileExecutable :: FilePath -> IO () +#ifndef mingw32_HOST_OS +setFileOrdinary path = setFileMode path 0o644 -- file perms -rw-r--r-- +setFileExecutable path = setFileMode path 0o755 -- file perms -rwxr-xr-x +setFileHidden _ = return () + +setFileMode :: FilePath -> FileMode -> IO () +setFileMode name m = + withCString name $ \s -> + throwErrnoPathIfMinus1_ "setFileMode" name (c_chmod s m) +#else +setFileOrdinary _ = return () +setFileExecutable _ = return () +setFileHidden path = setFileAttributes path fILE_ATTRIBUTE_HIDDEN +#endif diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Compat/Process.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Compat/Process.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Compat/Process.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Compat/Process.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,48 @@ +{-# LANGUAGE CPP #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Compat.Process +-- Copyright : (c) 2013 Liu Hao, Brent Yorgey +-- License : BSD-style (see the file LICENSE) +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Cross-platform utilities for invoking processes. +-- +----------------------------------------------------------------------------- + +module Distribution.Client.Compat.Process ( + readProcessWithExitCode +) where + +#if !MIN_VERSION_base(4,6,0) +import Prelude hiding (catch) +#endif + +import Control.Exception (catch, throw) +import System.Exit (ExitCode (ExitFailure)) +import System.IO.Error (isDoesNotExistError) +import qualified System.Process as P + +-- | @readProcessWithExitCode@ creates an external process, reads its +-- standard output and standard error strictly, waits until the +-- process terminates, and then returns the @ExitCode@ of the +-- process, the standard output, and the standard error. +-- +-- See the documentation of the version from @System.Process@ for +-- more information. +-- +-- The version from @System.Process@ behaves inconsistently across +-- platforms when an executable with the given name is not found: in +-- some cases it returns an @ExitFailure@, in others it throws an +-- exception. This variant catches \"does not exist\" exceptions and +-- turns them into @ExitFailure@s. +readProcessWithExitCode :: FilePath -> [String] -> String -> IO (ExitCode, String, String) +readProcessWithExitCode cmd args input = + P.readProcessWithExitCode cmd args input + `catch` \e -> if isDoesNotExistError e + then return (ExitFailure 127, "", "") + else throw e diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Compat/Semaphore.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Compat/Semaphore.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Compat/Semaphore.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Compat/Semaphore.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,104 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# OPTIONS_GHC -funbox-strict-fields #-} +module Distribution.Client.Compat.Semaphore + ( QSem + , newQSem + , waitQSem + , signalQSem + ) where + +import Control.Concurrent.STM (TVar, atomically, newTVar, readTVar, retry, + writeTVar) +import Control.Exception (mask_, onException) +import Control.Monad (join, when) +import Data.Typeable (Typeable) + +-- | 'QSem' is a quantity semaphore in which the resource is aqcuired +-- and released in units of one. It provides guaranteed FIFO ordering +-- for satisfying blocked `waitQSem` calls. +-- +data QSem = QSem !(TVar Int) !(TVar [TVar Bool]) !(TVar [TVar Bool]) + deriving (Eq, Typeable) + +newQSem :: Int -> IO QSem +newQSem i = atomically $ do + q <- newTVar i + b1 <- newTVar [] + b2 <- newTVar [] + return (QSem q b1 b2) + +waitQSem :: QSem -> IO () +waitQSem s@(QSem q _b1 b2) = + mask_ $ join $ atomically $ do + -- join, because if we need to block, we have to add a TVar to + -- the block queue. + -- mask_, because we need a chance to set up an exception handler + -- after the join returns. + v <- readTVar q + if v == 0 + then do b <- newTVar False + ys <- readTVar b2 + writeTVar b2 (b:ys) + return (wait b) + else do writeTVar q $! v - 1 + return (return ()) + where + -- + -- very careful here: if we receive an exception, then we need to + -- (a) write True into the TVar, so that another signalQSem doesn't + -- try to wake up this thread, and + -- (b) if the TVar is *already* True, then we need to do another + -- signalQSem to avoid losing a unit of the resource. + -- + -- The 'wake' function does both (a) and (b), so we can just call + -- it here. + -- + wait t = + flip onException (wake s t) $ + atomically $ do + b <- readTVar t + when (not b) retry + + +wake :: QSem -> TVar Bool -> IO () +wake s x = join $ atomically $ do + b <- readTVar x + if b then return (signalQSem s) + else do writeTVar x True + return (return ()) + +{- + property we want: + + bracket waitQSem (\_ -> signalQSem) (\_ -> ...) + + never loses a unit of the resource. +-} + +signalQSem :: QSem -> IO () +signalQSem s@(QSem q b1 b2) = + mask_ $ join $ atomically $ do + -- join, so we don't force the reverse inside the txn + -- mask_ is needed so we don't lose a wakeup + v <- readTVar q + if v /= 0 + then do writeTVar q $! v + 1 + return (return ()) + else do xs <- readTVar b1 + checkwake1 xs + where + checkwake1 [] = do + ys <- readTVar b2 + checkwake2 ys + checkwake1 (x:xs) = do + writeTVar b1 xs + return (wake s x) + + checkwake2 [] = do + writeTVar q 1 + return (return ()) + checkwake2 ys = do + let (z:zs) = reverse ys + writeTVar b1 zs + writeTVar b2 [] + return (wake s z) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Compat/Time.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Compat/Time.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Compat/Time.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Compat/Time.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,167 @@ +{-# LANGUAGE CPP, ForeignFunctionInterface, GeneralizedNewtypeDeriving #-} +module Distribution.Client.Compat.Time + ( ModTime(..) -- Needed for testing + , getModTime, getFileAge, getCurTime + , posixSecondsToModTime ) + where + +import Control.Arrow ( first ) +import Data.Int ( Int64 ) +import Data.Word ( Word64 ) +import System.Directory ( getModificationTime ) + +import Distribution.Compat.Binary ( Binary ) + +import Data.Time.Clock.POSIX ( POSIXTime, getPOSIXTime ) +#if MIN_VERSION_directory(1,2,0) +import Data.Time.Clock.POSIX ( posixDayLength ) +import Data.Time ( diffUTCTime, getCurrentTime ) +#else +import System.Time ( getClockTime, diffClockTimes + , normalizeTimeDiff, tdDay, tdHour ) +#endif + +#if defined mingw32_HOST_OS + +import Data.Bits ((.|.), unsafeShiftL) +#if MIN_VERSION_base(4,7,0) +import Data.Bits (finiteBitSize) +#else +import Data.Bits (bitSize) +#endif + +import Data.Int ( Int32 ) +import Foreign ( allocaBytes, peekByteOff ) +import System.IO.Error ( mkIOError, doesNotExistErrorType ) +import System.Win32.Types ( BOOL, DWORD, LPCTSTR, LPVOID, withTString ) + +#else + +import System.Posix.Files ( FileStatus, getFileStatus ) + +#if MIN_VERSION_unix(2,6,0) +import System.Posix.Files ( modificationTimeHiRes ) +#else +import System.Posix.Files ( modificationTime ) +#endif + +#endif + +-- | An opaque type representing a file's modification time, represented +-- internally as a 64-bit unsigned integer in the Windows UTC format. +newtype ModTime = ModTime Word64 + deriving (Binary, Bounded, Eq, Ord) + +instance Show ModTime where + show (ModTime x) = show x + +instance Read ModTime where + readsPrec p str = map (first ModTime) (readsPrec p str) + +-- | Return modification time of the given file. Works around the low clock +-- resolution problem that 'getModificationTime' has on GHC < 7.8. +-- +-- This is a modified version of the code originally written for Shake by Neil +-- Mitchell. See module Development.Shake.FileInfo. +getModTime :: FilePath -> IO ModTime + +#if defined mingw32_HOST_OS + +-- Directly against the Win32 API. +getModTime path = allocaBytes size_WIN32_FILE_ATTRIBUTE_DATA $ \info -> do + res <- getFileAttributesEx path info + if not res + then do + let err = mkIOError doesNotExistErrorType + "Distribution.Client.Compat.Time.getModTime" + Nothing (Just path) + ioError err + else do + dwLow <- peekByteOff info + index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwLowDateTime + dwHigh <- peekByteOff info + index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwHighDateTime +#if MIN_VERSION_base(4,7,0) + let qwTime = + (fromIntegral (dwHigh :: DWORD) `unsafeShiftL` finiteBitSize dwHigh) + .|. (fromIntegral (dwLow :: DWORD)) +#else + let qwTime = + (fromIntegral (dwHigh :: DWORD) `unsafeShiftL` bitSize dwHigh) + .|. (fromIntegral (dwLow :: DWORD)) +#endif + return $! ModTime (qwTime :: Word64) + +#ifdef x86_64_HOST_ARCH +#define CALLCONV ccall +#else +#define CALLCONV stdcall +#endif + +foreign import CALLCONV "windows.h GetFileAttributesExW" + c_getFileAttributesEx :: LPCTSTR -> Int32 -> LPVOID -> IO BOOL + +getFileAttributesEx :: String -> LPVOID -> IO BOOL +getFileAttributesEx path lpFileInformation = + withTString path $ \c_path -> + c_getFileAttributesEx c_path getFileExInfoStandard lpFileInformation + +getFileExInfoStandard :: Int32 +getFileExInfoStandard = 0 + +size_WIN32_FILE_ATTRIBUTE_DATA :: Int +size_WIN32_FILE_ATTRIBUTE_DATA = 36 + +index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwLowDateTime :: Int +index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwLowDateTime = 20 + +index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwHighDateTime :: Int +index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwHighDateTime = 24 + +#else + +-- Directly against the unix library. +getModTime path = do + st <- getFileStatus path + return $! (extractFileTime st) + +extractFileTime :: FileStatus -> ModTime +#if MIN_VERSION_unix(2,6,0) +extractFileTime x = posixTimeToModTime (modificationTimeHiRes x) +#else +extractFileTime x = posixSecondsToModTime $ fromIntegral $ fromEnum $ + modificationTime x +#endif + +#endif + +windowsTick, secToUnixEpoch :: Word64 +windowsTick = 10000000 +secToUnixEpoch = 11644473600 + +-- | Convert POSIX seconds to ModTime. +posixSecondsToModTime :: Int64 -> ModTime +posixSecondsToModTime s = + ModTime $ ((fromIntegral s :: Word64) + secToUnixEpoch) * windowsTick + +-- | Convert 'POSIXTime' to 'ModTime'. +posixTimeToModTime :: POSIXTime -> ModTime +posixTimeToModTime p = ModTime $ (ceiling $ p * 1e7) -- 100 ns precision + + (secToUnixEpoch * windowsTick) + +-- | Return age of given file in days. +getFileAge :: FilePath -> IO Double +getFileAge file = do + t0 <- getModificationTime file +#if MIN_VERSION_directory(1,2,0) + t1 <- getCurrentTime + return $ realToFrac (t1 `diffUTCTime` t0) / realToFrac posixDayLength +#else + t1 <- getClockTime + let dt = normalizeTimeDiff (t1 `diffClockTimes` t0) + return $ fromIntegral ((24 * tdDay dt) + tdHour dt) / 24.0 +#endif + +-- | Return the current time as 'ModTime'. +getCurTime :: IO ModTime +getCurTime = posixTimeToModTime `fmap` getPOSIXTime -- Uses 'gettimeofday'. diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/ComponentDeps.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/ComponentDeps.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/ComponentDeps.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/ComponentDeps.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,161 @@ +-- | Fine-grained package dependencies +-- +-- Like many others, this module is meant to be "double-imported": +-- +-- > import Distribution.Client.ComponentDeps ( +-- > Component +-- > , ComponentDep +-- > , ComponentDeps +-- > ) +-- > import qualified Distribution.Client.ComponentDeps as CD +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE DeriveGeneric #-} +module Distribution.Client.ComponentDeps ( + -- * Fine-grained package dependencies + Component(..) + , ComponentDep + , ComponentDeps -- opaque + -- ** Constructing ComponentDeps + , empty + , fromList + , singleton + , insert + , filterDeps + , fromLibraryDeps + , fromSetupDeps + , fromInstalled + -- ** Deconstructing ComponentDeps + , toList + , flatDeps + , nonSetupDeps + , libraryDeps + , setupDeps + , select + ) where + +import Data.Map (Map) +import qualified Data.Map as Map +import Distribution.Compat.Binary (Binary) +import Distribution.Compat.Semigroup (Semigroup((<>))) +import GHC.Generics +import Data.Foldable (fold) + +#if !MIN_VERSION_base(4,8,0) +import Data.Foldable (Foldable(foldMap)) +import Data.Monoid (Monoid(..)) +import Data.Traversable (Traversable(traverse)) +#endif + +{------------------------------------------------------------------------------- + Types +-------------------------------------------------------------------------------} + +-- | Component of a package. +data Component = + ComponentLib + | ComponentExe String + | ComponentTest String + | ComponentBench String + | ComponentSetup + deriving (Show, Eq, Ord, Generic) + +instance Binary Component + +-- | Dependency for a single component. +type ComponentDep a = (Component, a) + +-- | Fine-grained dependencies for a package. +-- +-- Typically used as @ComponentDeps [Dependency]@, to represent the list of +-- dependencies for each named component within a package. +-- +newtype ComponentDeps a = ComponentDeps { unComponentDeps :: Map Component a } + deriving (Show, Functor, Eq, Ord, Generic) + +instance Semigroup a => Monoid (ComponentDeps a) where + mempty = ComponentDeps Map.empty + mappend = (<>) + +instance Semigroup a => Semigroup (ComponentDeps a) where + ComponentDeps d <> ComponentDeps d' = + ComponentDeps (Map.unionWith (<>) d d') + +instance Foldable ComponentDeps where + foldMap f = foldMap f . unComponentDeps + +instance Traversable ComponentDeps where + traverse f = fmap ComponentDeps . traverse f . unComponentDeps + +instance Binary a => Binary (ComponentDeps a) + +{------------------------------------------------------------------------------- + Construction +-------------------------------------------------------------------------------} + +empty :: ComponentDeps a +empty = ComponentDeps $ Map.empty + +fromList :: Monoid a => [ComponentDep a] -> ComponentDeps a +fromList = ComponentDeps . Map.fromListWith mappend + +singleton :: Component -> a -> ComponentDeps a +singleton comp = ComponentDeps . Map.singleton comp + +insert :: Monoid a => Component -> a -> ComponentDeps a -> ComponentDeps a +insert comp a = ComponentDeps . Map.alter aux comp . unComponentDeps + where + aux Nothing = Just a + aux (Just a') = Just $ a `mappend` a' + +-- | Keep only selected components (and their associated deps info). +filterDeps :: (Component -> a -> Bool) -> ComponentDeps a -> ComponentDeps a +filterDeps p = ComponentDeps . Map.filterWithKey p . unComponentDeps + +-- | ComponentDeps containing library dependencies only +fromLibraryDeps :: a -> ComponentDeps a +fromLibraryDeps = singleton ComponentLib + +-- | ComponentDeps containing setup dependencies only. +fromSetupDeps :: a -> ComponentDeps a +fromSetupDeps = singleton ComponentSetup + +-- | ComponentDeps for installed packages. +-- +-- We assume that installed packages only record their library dependencies. +fromInstalled :: a -> ComponentDeps a +fromInstalled = fromLibraryDeps + +{------------------------------------------------------------------------------- + Deconstruction +-------------------------------------------------------------------------------} + +toList :: ComponentDeps a -> [ComponentDep a] +toList = Map.toList . unComponentDeps + +-- | All dependencies of a package. +-- +-- This is just a synonym for 'fold', but perhaps a use of 'flatDeps' is more +-- obvious than a use of 'fold', and moreover this avoids introducing lots of +-- @#ifdef@s for 7.10 just for the use of 'fold'. +flatDeps :: Monoid a => ComponentDeps a -> a +flatDeps = fold + +-- | All dependencies except the setup dependencies. +-- +-- Prior to the introduction of setup dependencies in version 1.24 this +-- would have been _all_ dependencies. +nonSetupDeps :: Monoid a => ComponentDeps a -> a +nonSetupDeps = select (/= ComponentSetup) + +-- | Library dependencies proper only. +libraryDeps :: Monoid a => ComponentDeps a -> a +libraryDeps = select (== ComponentLib) + +-- | Setup dependencies. +setupDeps :: Monoid a => ComponentDeps a -> a +setupDeps = select (== ComponentSetup) + +-- | Select dependencies satisfying a given predicate. +select :: Monoid a => (Component -> Bool) -> ComponentDeps a -> a +select p = foldMap snd . filter (p . fst) . toList diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Config.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Config.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Config.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Config.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,1163 @@ +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Config +-- Copyright : (c) David Himmelstrup 2005 +-- License : BSD-like +-- +-- Maintainer : lemmih@gmail.com +-- Stability : provisional +-- Portability : portable +-- +-- Utilities for handling saved state such as known packages, known servers and +-- downloaded packages. +----------------------------------------------------------------------------- +module Distribution.Client.Config ( + SavedConfig(..), + loadConfig, + getConfigFilePath, + + showConfig, + showConfigWithComments, + parseConfig, + + defaultCabalDir, + defaultConfigFile, + defaultCacheDir, + defaultCompiler, + defaultLogsDir, + defaultUserInstall, + + baseSavedConfig, + commentSavedConfig, + initialSavedConfig, + configFieldDescriptions, + haddockFlagsFields, + installDirsFields, + withProgramsFields, + withProgramOptionsFields, + userConfigDiff, + userConfigUpdate, + createDefaultConfigFile, + + remoteRepoFields + ) where + +import Distribution.Client.Types + ( RemoteRepo(..), Username(..), Password(..), emptyRemoteRepo ) +import Distribution.Client.BuildReports.Types + ( ReportLevel(..) ) +import Distribution.Client.Dependency.Types + ( ConstraintSource(..) ) +import Distribution.Client.Setup + ( GlobalFlags(..), globalCommand, defaultGlobalFlags + , ConfigExFlags(..), configureExOptions, defaultConfigExFlags + , InstallFlags(..), installOptions, defaultInstallFlags + , UploadFlags(..), uploadCommand + , ReportFlags(..), reportCommand + , showRepo, parseRepo, readRepo ) +import Distribution.Utils.NubList + ( NubList, fromNubList, toNubList, overNubList ) + +import Distribution.Simple.Compiler + ( DebugInfoLevel(..), OptimisationLevel(..) ) +import Distribution.Simple.Setup + ( ConfigFlags(..), configureOptions, defaultConfigFlags + , AllowNewer(..) + , HaddockFlags(..), haddockOptions, defaultHaddockFlags + , installDirsOptions, optionDistPref + , programConfigurationPaths', programConfigurationOptions + , Flag(..), toFlag, flagToMaybe, fromFlagOrDefault ) +import Distribution.Simple.InstallDirs + ( InstallDirs(..), defaultInstallDirs + , PathTemplate, toPathTemplate ) +import Distribution.ParseUtils + ( FieldDescr(..), liftField + , ParseResult(..), PError(..), PWarning(..) + , locatedErrorMsg, showPWarning + , readFields, warning, lineNo + , simpleField, listField, spaceListField + , parseFilePathQ, parseOptCommaList, parseTokenQ ) +import Distribution.Client.ParseUtils + ( parseFields, ppFields, ppSection ) +import Distribution.Client.HttpUtils + ( isOldHackageURI ) +import qualified Distribution.ParseUtils as ParseUtils + ( Field(..) ) +import qualified Distribution.Text as Text + ( Text(..) ) +import Distribution.Simple.Command + ( CommandUI(commandOptions), commandDefaultFlags, ShowOrParseArgs(..) + , viewAsFieldDescr ) +import Distribution.Simple.Program + ( defaultProgramConfiguration ) +import Distribution.Simple.Utils + ( die, notice, warn, lowercase, cabalVersion ) +import Distribution.Compiler + ( CompilerFlavor(..), defaultCompilerFlavor ) +import Distribution.Verbosity + ( Verbosity, normal ) + +import Data.List + ( partition, find, foldl' ) +import Data.Maybe + ( fromMaybe ) +import Control.Monad + ( when, unless, foldM, liftM ) +import qualified Distribution.Compat.ReadP as Parse + ( (<++), option ) +import Distribution.Compat.Semigroup +import qualified Text.PrettyPrint as Disp + ( render, text, empty ) +import Text.PrettyPrint + ( ($+$) ) +import Text.PrettyPrint.HughesPJ + ( text, Doc ) +import System.Directory + ( createDirectoryIfMissing, getAppUserDataDirectory, renameFile ) +import Network.URI + ( URI(..), URIAuth(..), parseURI ) +import System.FilePath + ( (<.>), (), takeDirectory ) +import System.IO.Error + ( isDoesNotExistError ) +import Distribution.Compat.Environment + ( getEnvironment ) +import Distribution.Compat.Exception + ( catchIO ) +import qualified Paths_cabal_install + ( version ) +import Data.Version + ( showVersion ) +import Data.Char + ( isSpace ) +import qualified Data.Map as M +import Data.Function + ( on ) +import Data.List + ( nubBy ) +import GHC.Generics ( Generic ) + +-- +-- * Configuration saved in the config file +-- + +data SavedConfig = SavedConfig { + savedGlobalFlags :: GlobalFlags, + savedInstallFlags :: InstallFlags, + savedConfigureFlags :: ConfigFlags, + savedConfigureExFlags :: ConfigExFlags, + savedUserInstallDirs :: InstallDirs (Flag PathTemplate), + savedGlobalInstallDirs :: InstallDirs (Flag PathTemplate), + savedUploadFlags :: UploadFlags, + savedReportFlags :: ReportFlags, + savedHaddockFlags :: HaddockFlags + } deriving Generic + +instance Monoid SavedConfig where + mempty = gmempty + mappend = (<>) + +instance Semigroup SavedConfig where + a <> b = SavedConfig { + savedGlobalFlags = combinedSavedGlobalFlags, + savedInstallFlags = combinedSavedInstallFlags, + savedConfigureFlags = combinedSavedConfigureFlags, + savedConfigureExFlags = combinedSavedConfigureExFlags, + savedUserInstallDirs = combinedSavedUserInstallDirs, + savedGlobalInstallDirs = combinedSavedGlobalInstallDirs, + savedUploadFlags = combinedSavedUploadFlags, + savedReportFlags = combinedSavedReportFlags, + savedHaddockFlags = combinedSavedHaddockFlags + } + where + -- This is ugly, but necessary. If we're mappending two config files, we + -- want the values of the *non-empty* list fields from the second one to + -- *override* the corresponding values from the first one. Default + -- behaviour (concatenation) is confusing and makes some use cases (see + -- #1884) impossible. + -- + -- However, we also want to allow specifying multiple values for a list + -- field in a *single* config file. For example, we want the following to + -- continue to work: + -- + -- remote-repo: hackage.haskell.org:http://hackage.haskell.org/ + -- remote-repo: private-collection:http://hackage.local/ + -- + -- So we can't just wrap the list fields inside Flags; we have to do some + -- special-casing just for SavedConfig. + + -- NB: the signature prevents us from using 'combine' on lists. + combine' :: (SavedConfig -> flags) -> (flags -> Flag a) -> Flag a + combine' field subfield = + (subfield . field $ a) `mappend` (subfield . field $ b) + + combineMonoid :: Monoid mon => (SavedConfig -> flags) -> (flags -> mon) + -> mon + combineMonoid field subfield = + (subfield . field $ a) `mappend` (subfield . field $ b) + + lastNonEmpty' :: (SavedConfig -> flags) -> (flags -> [a]) -> [a] + lastNonEmpty' field subfield = + let a' = subfield . field $ a + b' = subfield . field $ b + in case b' of [] -> a' + _ -> b' + + lastNonEmptyNL' :: (SavedConfig -> flags) -> (flags -> NubList a) + -> NubList a + lastNonEmptyNL' field subfield = + let a' = subfield . field $ a + b' = subfield . field $ b + in case fromNubList b' of [] -> a' + _ -> b' + + combinedSavedGlobalFlags = GlobalFlags { + globalVersion = combine globalVersion, + globalNumericVersion = combine globalNumericVersion, + globalConfigFile = combine globalConfigFile, + globalSandboxConfigFile = combine globalSandboxConfigFile, + globalConstraintsFile = combine globalConstraintsFile, + globalRemoteRepos = lastNonEmptyNL globalRemoteRepos, + globalCacheDir = combine globalCacheDir, + globalLocalRepos = lastNonEmptyNL globalLocalRepos, + globalLogsDir = combine globalLogsDir, + globalWorldFile = combine globalWorldFile, + globalRequireSandbox = combine globalRequireSandbox, + globalIgnoreSandbox = combine globalIgnoreSandbox, + globalIgnoreExpiry = combine globalIgnoreExpiry, + globalHttpTransport = combine globalHttpTransport + } + where + combine = combine' savedGlobalFlags + lastNonEmptyNL = lastNonEmptyNL' savedGlobalFlags + + combinedSavedInstallFlags = InstallFlags { + installDocumentation = combine installDocumentation, + installHaddockIndex = combine installHaddockIndex, + installDryRun = combine installDryRun, + installMaxBackjumps = combine installMaxBackjumps, + installReorderGoals = combine installReorderGoals, + installIndependentGoals = combine installIndependentGoals, + installShadowPkgs = combine installShadowPkgs, + installStrongFlags = combine installStrongFlags, + installReinstall = combine installReinstall, + installAvoidReinstalls = combine installAvoidReinstalls, + installOverrideReinstall = combine installOverrideReinstall, + installUpgradeDeps = combine installUpgradeDeps, + installOnly = combine installOnly, + installOnlyDeps = combine installOnlyDeps, + installRootCmd = combine installRootCmd, + installSummaryFile = lastNonEmptyNL installSummaryFile, + installLogFile = combine installLogFile, + installBuildReports = combine installBuildReports, + installReportPlanningFailure = combine installReportPlanningFailure, + installSymlinkBinDir = combine installSymlinkBinDir, + installOneShot = combine installOneShot, + installNumJobs = combine installNumJobs, + installRunTests = combine installRunTests, + installOfflineMode = combine installOfflineMode + } + where + combine = combine' savedInstallFlags + lastNonEmptyNL = lastNonEmptyNL' savedInstallFlags + + combinedSavedConfigureFlags = ConfigFlags { + configPrograms_ = configPrograms_ . savedConfigureFlags $ b, + -- TODO: NubListify + configProgramPaths = lastNonEmpty configProgramPaths, + -- TODO: NubListify + configProgramArgs = lastNonEmpty configProgramArgs, + configProgramPathExtra = lastNonEmptyNL configProgramPathExtra, + configHcFlavor = combine configHcFlavor, + configHcPath = combine configHcPath, + configHcPkg = combine configHcPkg, + configVanillaLib = combine configVanillaLib, + configProfLib = combine configProfLib, + configProf = combine configProf, + configSharedLib = combine configSharedLib, + configDynExe = combine configDynExe, + configProfExe = combine configProfExe, + configProfDetail = combine configProfDetail, + configProfLibDetail = combine configProfLibDetail, + -- TODO: NubListify + configConfigureArgs = lastNonEmpty configConfigureArgs, + configOptimization = combine configOptimization, + configDebugInfo = combine configDebugInfo, + configProgPrefix = combine configProgPrefix, + configProgSuffix = combine configProgSuffix, + -- Parametrised by (Flag PathTemplate), so safe to use 'mappend'. + configInstallDirs = + (configInstallDirs . savedConfigureFlags $ a) + `mappend` (configInstallDirs . savedConfigureFlags $ b), + configScratchDir = combine configScratchDir, + -- TODO: NubListify + configExtraLibDirs = lastNonEmpty configExtraLibDirs, + -- TODO: NubListify + configExtraFrameworkDirs = lastNonEmpty configExtraFrameworkDirs, + -- TODO: NubListify + configExtraIncludeDirs = lastNonEmpty configExtraIncludeDirs, + configIPID = combine configIPID, + configDistPref = combine configDistPref, + configVerbosity = combine configVerbosity, + configUserInstall = combine configUserInstall, + -- TODO: NubListify + configPackageDBs = lastNonEmpty configPackageDBs, + configGHCiLib = combine configGHCiLib, + configSplitObjs = combine configSplitObjs, + configStripExes = combine configStripExes, + configStripLibs = combine configStripLibs, + -- TODO: NubListify + configConstraints = lastNonEmpty configConstraints, + -- TODO: NubListify + configDependencies = lastNonEmpty configDependencies, + -- TODO: NubListify + configConfigurationsFlags = lastNonEmpty configConfigurationsFlags, + configTests = combine configTests, + configBenchmarks = combine configBenchmarks, + configCoverage = combine configCoverage, + configLibCoverage = combine configLibCoverage, + configExactConfiguration = combine configExactConfiguration, + configFlagError = combine configFlagError, + configRelocatable = combine configRelocatable, + configAllowNewer = combineMonoid savedConfigureFlags + configAllowNewer + } + where + combine = combine' savedConfigureFlags + lastNonEmpty = lastNonEmpty' savedConfigureFlags + lastNonEmptyNL = lastNonEmptyNL' savedConfigureFlags + + combinedSavedConfigureExFlags = ConfigExFlags { + configCabalVersion = combine configCabalVersion, + -- TODO: NubListify + configExConstraints = lastNonEmpty configExConstraints, + -- TODO: NubListify + configPreferences = lastNonEmpty configPreferences, + configSolver = combine configSolver + } + where + combine = combine' savedConfigureExFlags + lastNonEmpty = lastNonEmpty' savedConfigureExFlags + + -- Parametrised by (Flag PathTemplate), so safe to use 'mappend'. + combinedSavedUserInstallDirs = savedUserInstallDirs a + `mappend` savedUserInstallDirs b + + -- Parametrised by (Flag PathTemplate), so safe to use 'mappend'. + combinedSavedGlobalInstallDirs = savedGlobalInstallDirs a + `mappend` savedGlobalInstallDirs b + + combinedSavedUploadFlags = UploadFlags { + uploadCheck = combine uploadCheck, + uploadDoc = combine uploadDoc, + uploadUsername = combine uploadUsername, + uploadPassword = combine uploadPassword, + uploadPasswordCmd = combine uploadPasswordCmd, + uploadVerbosity = combine uploadVerbosity + } + where + combine = combine' savedUploadFlags + + combinedSavedReportFlags = ReportFlags { + reportUsername = combine reportUsername, + reportPassword = combine reportPassword, + reportVerbosity = combine reportVerbosity + } + where + combine = combine' savedReportFlags + + combinedSavedHaddockFlags = HaddockFlags { + -- TODO: NubListify + haddockProgramPaths = lastNonEmpty haddockProgramPaths, + -- TODO: NubListify + haddockProgramArgs = lastNonEmpty haddockProgramArgs, + haddockHoogle = combine haddockHoogle, + haddockHtml = combine haddockHtml, + haddockHtmlLocation = combine haddockHtmlLocation, + haddockForHackage = combine haddockForHackage, + haddockExecutables = combine haddockExecutables, + haddockTestSuites = combine haddockTestSuites, + haddockBenchmarks = combine haddockBenchmarks, + haddockInternal = combine haddockInternal, + haddockCss = combine haddockCss, + haddockHscolour = combine haddockHscolour, + haddockHscolourCss = combine haddockHscolourCss, + haddockContents = combine haddockContents, + haddockDistPref = combine haddockDistPref, + haddockKeepTempFiles = combine haddockKeepTempFiles, + haddockVerbosity = combine haddockVerbosity + } + where + combine = combine' savedHaddockFlags + lastNonEmpty = lastNonEmpty' savedHaddockFlags + + +-- +-- * Default config +-- + +-- | These are the absolute basic defaults. The fields that must be +-- initialised. When we load the config from the file we layer the loaded +-- values over these ones, so any missing fields in the file take their values +-- from here. +-- +baseSavedConfig :: IO SavedConfig +baseSavedConfig = do + userPrefix <- defaultCabalDir + logsDir <- defaultLogsDir + worldFile <- defaultWorldFile + return mempty { + savedConfigureFlags = mempty { + configHcFlavor = toFlag defaultCompiler, + configUserInstall = toFlag defaultUserInstall, + configVerbosity = toFlag normal + }, + savedUserInstallDirs = mempty { + prefix = toFlag (toPathTemplate userPrefix) + }, + savedGlobalFlags = mempty { + globalLogsDir = toFlag logsDir, + globalWorldFile = toFlag worldFile + } + } + +-- | This is the initial configuration that we write out to to the config file +-- if the file does not exist (or the config we use if the file cannot be read +-- for some other reason). When the config gets loaded it gets layered on top +-- of 'baseSavedConfig' so we do not need to include it into the initial +-- values we save into the config file. +-- +initialSavedConfig :: IO SavedConfig +initialSavedConfig = do + cacheDir <- defaultCacheDir + logsDir <- defaultLogsDir + worldFile <- defaultWorldFile + extraPath <- defaultExtraPath + return mempty { + savedGlobalFlags = mempty { + globalCacheDir = toFlag cacheDir, + globalRemoteRepos = toNubList [defaultRemoteRepo], + globalWorldFile = toFlag worldFile + }, + savedConfigureFlags = mempty { + configProgramPathExtra = toNubList extraPath + }, + savedInstallFlags = mempty { + installSummaryFile = toNubList [toPathTemplate (logsDir "build.log")], + installBuildReports= toFlag AnonymousReports, + installNumJobs = toFlag Nothing + } + } + +--TODO: misleading, there's no way to override this default +-- either make it possible or rename to simply getCabalDir. +defaultCabalDir :: IO FilePath +defaultCabalDir = getAppUserDataDirectory "cabal" + +defaultConfigFile :: IO FilePath +defaultConfigFile = do + dir <- defaultCabalDir + return $ dir "config" + +defaultCacheDir :: IO FilePath +defaultCacheDir = do + dir <- defaultCabalDir + return $ dir "packages" + +defaultLogsDir :: IO FilePath +defaultLogsDir = do + dir <- defaultCabalDir + return $ dir "logs" + +-- | Default position of the world file +defaultWorldFile :: IO FilePath +defaultWorldFile = do + dir <- defaultCabalDir + return $ dir "world" + +defaultExtraPath :: IO [FilePath] +defaultExtraPath = do + dir <- defaultCabalDir + return [dir "bin"] + +defaultCompiler :: CompilerFlavor +defaultCompiler = fromMaybe GHC defaultCompilerFlavor + +defaultUserInstall :: Bool +defaultUserInstall = True +-- We do per-user installs by default on all platforms. We used to default to +-- global installs on Windows but that no longer works on Windows Vista or 7. + +defaultRemoteRepo :: RemoteRepo +defaultRemoteRepo = RemoteRepo name uri Nothing [] 0 False + where + name = "hackage.haskell.org" + uri = URI "http:" (Just (URIAuth "" name "")) "/" "" "" + -- Note that lots of old ~/.cabal/config files will have the old url + -- http://hackage.haskell.org/packages/archive + -- but new config files can use the new url (without the /packages/archive) + -- and avoid having to do a http redirect + +-- For the default repo we know extra information, fill this in. +-- +-- We need this because the 'defaultRemoteRepo' above is only used for the +-- first time when a config file is made. So for users with older config files +-- we might have only have older info. This lets us fill that in even for old +-- config files. +-- +addInfoForKnownRepos :: RemoteRepo -> RemoteRepo +addInfoForKnownRepos repo + | remoteRepoName repo == remoteRepoName defaultRemoteRepo + = useSecure . tryHttps . fixOldURI $ repo + where + fixOldURI r + | isOldHackageURI (remoteRepoURI r) + = r { remoteRepoURI = remoteRepoURI defaultRemoteRepo } + | otherwise = r + + tryHttps r = r { remoteRepoShouldTryHttps = True } + + useSecure r@RemoteRepo{ + remoteRepoSecure = secure, + remoteRepoRootKeys = [], + remoteRepoKeyThreshold = 0 + } | secure /= Just False + = r { + --TODO: When we want to switch us from using opt-in to opt-out + -- security for the central hackage server, uncomment the + -- following line. That will cause the default (of unspecified) + -- to get interpreted as if it were "secure: True". For the + -- moment it means the keys get added but you have to manually + -- set "secure: True" to opt-in. + --remoteRepoSecure = Just True, + remoteRepoRootKeys = defaultHackageRemoteRepoKeys, + remoteRepoKeyThreshold = defaultHackageRemoteRepoKeyThreshold + } + useSecure r = r +addInfoForKnownRepos other = other + +-- | The current hackage.haskell.org repo root keys that we ship with cabal. +--- +-- This lets us bootstrap trust in this repo without user intervention. +-- These keys need to be periodically updated when new root keys are added. +-- See the root key procedures for details. +-- +defaultHackageRemoteRepoKeys :: [String] +defaultHackageRemoteRepoKeys = + [ "fe331502606802feac15e514d9b9ea83fee8b6ffef71335479a2e68d84adc6b0", + "1ea9ba32c526d1cc91ab5e5bd364ec5e9e8cb67179a471872f6e26f0ae773d42", + "2c6c3627bd6c982990239487f1abd02e08a02e6cf16edb105a8012d444d870c3", + "0a5c7ea47cd1b15f01f5f51a33adda7e655bc0f0b0615baa8e271f4c3351e21d", + "51f0161b906011b52c6613376b1ae937670da69322113a246a09f807c62f6921" + ] + +-- | The required threshold of root key signatures for hackage.haskell.org +-- +defaultHackageRemoteRepoKeyThreshold :: Int +defaultHackageRemoteRepoKeyThreshold = 3 + +-- +-- * Config file reading +-- + +-- | Loads the main configuration, and applies additional defaults to give the +-- effective configuration. To loads just what is actually in the config file, +-- use 'loadRawConfig'. +-- +loadConfig :: Verbosity -> Flag FilePath -> IO SavedConfig +loadConfig verbosity configFileFlag = do + config <- loadRawConfig verbosity configFileFlag + extendToEffectiveConfig config + +extendToEffectiveConfig :: SavedConfig -> IO SavedConfig +extendToEffectiveConfig config = do + base <- baseSavedConfig + let effective0 = base `mappend` config + globalFlags0 = savedGlobalFlags effective0 + effective = effective0 { + savedGlobalFlags = globalFlags0 { + globalRemoteRepos = + overNubList (map addInfoForKnownRepos) + (globalRemoteRepos globalFlags0) + } + } + return effective + +-- | Like 'loadConfig' but does not apply any additional defaults, it just +-- loads what is actually in the config file. This is thus suitable for +-- comparing or editing a config file, but not suitable for using as the +-- effective configuration. +-- +loadRawConfig :: Verbosity -> Flag FilePath -> IO SavedConfig +loadRawConfig verbosity configFileFlag = do + (source, configFile) <- getConfigFilePathAndSource configFileFlag + minp <- readConfigFile mempty configFile + case minp of + Nothing -> do + notice verbosity $ "Config file path source is " ++ sourceMsg source ++ "." + notice verbosity $ "Config file " ++ configFile ++ " not found." + createDefaultConfigFile verbosity configFile + Just (ParseOk ws conf) -> do + unless (null ws) $ warn verbosity $ + unlines (map (showPWarning configFile) ws) + return conf + Just (ParseFailed err) -> do + let (line, msg) = locatedErrorMsg err + die $ + "Error parsing config file " ++ configFile + ++ maybe "" (\n -> ':' : show n) line ++ ":\n" ++ msg + + where + sourceMsg CommandlineOption = "commandline option" + sourceMsg EnvironmentVariable = "env var CABAL_CONFIG" + sourceMsg Default = "default config file" + +data ConfigFileSource = CommandlineOption + | EnvironmentVariable + | Default + +-- | Returns the config file path, without checking that the file exists. +-- The order of precedence is: input flag, CABAL_CONFIG, default location. +getConfigFilePath :: Flag FilePath -> IO FilePath +getConfigFilePath = fmap snd . getConfigFilePathAndSource + +getConfigFilePathAndSource :: Flag FilePath -> IO (ConfigFileSource, FilePath) +getConfigFilePathAndSource configFileFlag = + getSource sources + where + sources = + [ (CommandlineOption, return . flagToMaybe $ configFileFlag) + , (EnvironmentVariable, lookup "CABAL_CONFIG" `liftM` getEnvironment) + , (Default, Just `liftM` defaultConfigFile) ] + + getSource [] = error "no config file path candidate found." + getSource ((source,action): xs) = + action >>= maybe (getSource xs) (return . (,) source) + +readConfigFile :: SavedConfig -> FilePath -> IO (Maybe (ParseResult SavedConfig)) +readConfigFile initial file = handleNotExists $ + fmap (Just . parseConfig (ConstraintSourceMainConfig file) initial) + (readFile file) + + where + handleNotExists action = catchIO action $ \ioe -> + if isDoesNotExistError ioe + then return Nothing + else ioError ioe + +createDefaultConfigFile :: Verbosity -> FilePath -> IO SavedConfig +createDefaultConfigFile verbosity filePath = do + commentConf <- commentSavedConfig + initialConf <- initialSavedConfig + notice verbosity $ "Writing default configuration to " ++ filePath + writeConfigFile filePath commentConf initialConf + return initialConf + +writeConfigFile :: FilePath -> SavedConfig -> SavedConfig -> IO () +writeConfigFile file comments vals = do + let tmpFile = file <.> "tmp" + createDirectoryIfMissing True (takeDirectory file) + writeFile tmpFile $ explanation ++ showConfigWithComments comments vals ++ "\n" + renameFile tmpFile file + where + explanation = unlines + ["-- This is the configuration file for the 'cabal' command line tool." + ,"" + ,"-- The available configuration options are listed below." + ,"-- Some of them have default values listed." + ,"" + ,"-- Lines (like this one) beginning with '--' are comments." + ,"-- Be careful with spaces and indentation because they are" + ,"-- used to indicate layout for nested sections." + ,"" + ,"-- Cabal library version: " ++ showVersion cabalVersion + ,"-- cabal-install version: " ++ showVersion Paths_cabal_install.version + ,"","" + ] + +-- | These are the default values that get used in Cabal if a no value is +-- given. We use these here to include in comments when we write out the +-- initial config file so that the user can see what default value they are +-- overriding. +-- +commentSavedConfig :: IO SavedConfig +commentSavedConfig = do + userInstallDirs <- defaultInstallDirs defaultCompiler True True + globalInstallDirs <- defaultInstallDirs defaultCompiler False True + return SavedConfig { + savedGlobalFlags = defaultGlobalFlags, + savedInstallFlags = defaultInstallFlags, + savedConfigureExFlags = defaultConfigExFlags, + savedConfigureFlags = (defaultConfigFlags defaultProgramConfiguration) { + configUserInstall = toFlag defaultUserInstall, + configAllowNewer = Just AllowNewerNone + }, + savedUserInstallDirs = fmap toFlag userInstallDirs, + savedGlobalInstallDirs = fmap toFlag globalInstallDirs, + savedUploadFlags = commandDefaultFlags uploadCommand, + savedReportFlags = commandDefaultFlags reportCommand, + savedHaddockFlags = defaultHaddockFlags + } + +-- | All config file fields. +-- +configFieldDescriptions :: ConstraintSource -> [FieldDescr SavedConfig] +configFieldDescriptions src = + + toSavedConfig liftGlobalFlag + (commandOptions (globalCommand []) ParseArgs) + ["version", "numeric-version", "config-file", "sandbox-config-file"] [] + + ++ toSavedConfig liftConfigFlag + (configureOptions ParseArgs) + (["builddir", "constraint", "dependency", "ipid"] + ++ map fieldName installDirsFields) + + -- This is only here because viewAsFieldDescr gives us a parser + -- that only recognises 'ghc' etc, the case-sensitive flag names, not + -- what the normal case-insensitive parser gives us. + [simpleField "compiler" + (fromFlagOrDefault Disp.empty . fmap Text.disp) (optional Text.parse) + configHcFlavor (\v flags -> flags { configHcFlavor = v }) + ,let showAllowNewer Nothing = mempty + showAllowNewer (Just AllowNewerNone) = Disp.text "False" + showAllowNewer (Just _) = Disp.text "True" + + toAllowNewer True = Just AllowNewerAll + toAllowNewer False = Just AllowNewerNone + + pkgs = (Just . AllowNewerSome) `fmap` parseOptCommaList Text.parse + parseAllowNewer = (toAllowNewer `fmap` Text.parse) Parse.<++ pkgs in + simpleField "allow-newer" + showAllowNewer parseAllowNewer + configAllowNewer (\v flags -> flags { configAllowNewer = v }) + -- TODO: The following is a temporary fix. The "optimization" + -- and "debug-info" fields are OptArg, and viewAsFieldDescr + -- fails on that. Instead of a hand-written hackaged parser + -- and printer, we should handle this case properly in the + -- library. + ,liftField configOptimization (\v flags -> + flags { configOptimization = v }) $ + let name = "optimization" in + FieldDescr name + (\f -> case f of + Flag NoOptimisation -> Disp.text "False" + Flag NormalOptimisation -> Disp.text "True" + Flag MaximumOptimisation -> Disp.text "2" + _ -> Disp.empty) + (\line str _ -> case () of + _ | str == "False" -> ParseOk [] (Flag NoOptimisation) + | str == "True" -> ParseOk [] (Flag NormalOptimisation) + | str == "0" -> ParseOk [] (Flag NoOptimisation) + | str == "1" -> ParseOk [] (Flag NormalOptimisation) + | str == "2" -> ParseOk [] (Flag MaximumOptimisation) + | lstr == "false" -> ParseOk [caseWarning] (Flag NoOptimisation) + | lstr == "true" -> ParseOk [caseWarning] (Flag NormalOptimisation) + | otherwise -> ParseFailed (NoParse name line) + where + lstr = lowercase str + caseWarning = PWarning $ + "The '" ++ name + ++ "' field is case sensitive, use 'True' or 'False'.") + ,liftField configDebugInfo (\v flags -> flags { configDebugInfo = v }) $ + let name = "debug-info" in + FieldDescr name + (\f -> case f of + Flag NoDebugInfo -> Disp.text "False" + Flag MinimalDebugInfo -> Disp.text "1" + Flag NormalDebugInfo -> Disp.text "True" + Flag MaximalDebugInfo -> Disp.text "3" + _ -> Disp.empty) + (\line str _ -> case () of + _ | str == "False" -> ParseOk [] (Flag NoDebugInfo) + | str == "True" -> ParseOk [] (Flag NormalDebugInfo) + | str == "0" -> ParseOk [] (Flag NoDebugInfo) + | str == "1" -> ParseOk [] (Flag MinimalDebugInfo) + | str == "2" -> ParseOk [] (Flag NormalDebugInfo) + | str == "3" -> ParseOk [] (Flag MaximalDebugInfo) + | lstr == "false" -> ParseOk [caseWarning] (Flag NoDebugInfo) + | lstr == "true" -> ParseOk [caseWarning] (Flag NormalDebugInfo) + | otherwise -> ParseFailed (NoParse name line) + where + lstr = lowercase str + caseWarning = PWarning $ + "The '" ++ name + ++ "' field is case sensitive, use 'True' or 'False'.") + ] + + ++ toSavedConfig liftConfigExFlag + (configureExOptions ParseArgs src) + [] [] + + ++ toSavedConfig liftInstallFlag + (installOptions ParseArgs) + ["dry-run", "only", "only-dependencies", "dependencies-only"] [] + + ++ toSavedConfig liftUploadFlag + (commandOptions uploadCommand ParseArgs) + ["verbose", "check", "documentation"] [] + + ++ toSavedConfig liftReportFlag + (commandOptions reportCommand ParseArgs) + ["verbose", "username", "password"] [] + --FIXME: this is a hack, hiding the user name and password. + -- But otherwise it masks the upload ones. Either need to + -- share the options or make then distinct. In any case + -- they should probably be per-server. + + ++ [ viewAsFieldDescr + $ optionDistPref + (configDistPref . savedConfigureFlags) + (\distPref config -> + config + { savedConfigureFlags = (savedConfigureFlags config) { + configDistPref = distPref } + , savedHaddockFlags = (savedHaddockFlags config) { + haddockDistPref = distPref } + } + ) + ParseArgs + ] + + where + toSavedConfig lift options exclusions replacements = + [ lift (fromMaybe field replacement) + | opt <- options + , let field = viewAsFieldDescr opt + name = fieldName field + replacement = find ((== name) . fieldName) replacements + , name `notElem` exclusions ] + optional = Parse.option mempty . fmap toFlag + +-- TODO: next step, make the deprecated fields elicit a warning. +-- +deprecatedFieldDescriptions :: [FieldDescr SavedConfig] +deprecatedFieldDescriptions = + [ liftGlobalFlag $ + listField "repos" + (Disp.text . showRepo) parseRepo + (fromNubList . globalRemoteRepos) + (\rs cfg -> cfg { globalRemoteRepos = toNubList rs }) + , liftGlobalFlag $ + simpleField "cachedir" + (Disp.text . fromFlagOrDefault "") (optional parseFilePathQ) + globalCacheDir (\d cfg -> cfg { globalCacheDir = d }) + , liftUploadFlag $ + simpleField "hackage-username" + (Disp.text . fromFlagOrDefault "" . fmap unUsername) + (optional (fmap Username parseTokenQ)) + uploadUsername (\d cfg -> cfg { uploadUsername = d }) + , liftUploadFlag $ + simpleField "hackage-password" + (Disp.text . fromFlagOrDefault "" . fmap unPassword) + (optional (fmap Password parseTokenQ)) + uploadPassword (\d cfg -> cfg { uploadPassword = d }) + , liftUploadFlag $ + spaceListField "hackage-password-command" + Disp.text parseTokenQ + (fromFlagOrDefault [] . uploadPasswordCmd) + (\d cfg -> cfg { uploadPasswordCmd = Flag d }) + ] + ++ map (modifyFieldName ("user-"++) . liftUserInstallDirs) installDirsFields + ++ map (modifyFieldName ("global-"++) . liftGlobalInstallDirs) installDirsFields + where + optional = Parse.option mempty . fmap toFlag + modifyFieldName :: (String -> String) -> FieldDescr a -> FieldDescr a + modifyFieldName f d = d { fieldName = f (fieldName d) } + +liftUserInstallDirs :: FieldDescr (InstallDirs (Flag PathTemplate)) + -> FieldDescr SavedConfig +liftUserInstallDirs = liftField + savedUserInstallDirs (\flags conf -> conf { savedUserInstallDirs = flags }) + +liftGlobalInstallDirs :: FieldDescr (InstallDirs (Flag PathTemplate)) + -> FieldDescr SavedConfig +liftGlobalInstallDirs = liftField + savedGlobalInstallDirs (\flags conf -> conf { savedGlobalInstallDirs = flags }) + +liftGlobalFlag :: FieldDescr GlobalFlags -> FieldDescr SavedConfig +liftGlobalFlag = liftField + savedGlobalFlags (\flags conf -> conf { savedGlobalFlags = flags }) + +liftConfigFlag :: FieldDescr ConfigFlags -> FieldDescr SavedConfig +liftConfigFlag = liftField + savedConfigureFlags (\flags conf -> conf { savedConfigureFlags = flags }) + +liftConfigExFlag :: FieldDescr ConfigExFlags -> FieldDescr SavedConfig +liftConfigExFlag = liftField + savedConfigureExFlags (\flags conf -> conf { savedConfigureExFlags = flags }) + +liftInstallFlag :: FieldDescr InstallFlags -> FieldDescr SavedConfig +liftInstallFlag = liftField + savedInstallFlags (\flags conf -> conf { savedInstallFlags = flags }) + +liftUploadFlag :: FieldDescr UploadFlags -> FieldDescr SavedConfig +liftUploadFlag = liftField + savedUploadFlags (\flags conf -> conf { savedUploadFlags = flags }) + +liftReportFlag :: FieldDescr ReportFlags -> FieldDescr SavedConfig +liftReportFlag = liftField + savedReportFlags (\flags conf -> conf { savedReportFlags = flags }) + +parseConfig :: ConstraintSource + -> SavedConfig + -> String + -> ParseResult SavedConfig +parseConfig src initial = \str -> do + fields <- readFields str + let (knownSections, others) = partition isKnownSection fields + config <- parse others + let user0 = savedUserInstallDirs config + global0 = savedGlobalInstallDirs config + (remoteRepoSections0, haddockFlags, user, global, paths, args) <- + foldM parseSections + ([], savedHaddockFlags config, user0, global0, [], []) + knownSections + + let remoteRepoSections = + reverse + . nubBy ((==) `on` remoteRepoName) + $ remoteRepoSections0 + + return config { + savedGlobalFlags = (savedGlobalFlags config) { + globalRemoteRepos = toNubList remoteRepoSections + }, + savedConfigureFlags = (savedConfigureFlags config) { + configProgramPaths = paths, + configProgramArgs = args + }, + savedHaddockFlags = haddockFlags, + savedUserInstallDirs = user, + savedGlobalInstallDirs = global + } + + where + isKnownSection (ParseUtils.Section _ "repository" _ _) = True + isKnownSection (ParseUtils.F _ "remote-repo" _) = True + isKnownSection (ParseUtils.Section _ "haddock" _ _) = True + isKnownSection (ParseUtils.Section _ "install-dirs" _ _) = True + isKnownSection (ParseUtils.Section _ "program-locations" _ _) = True + isKnownSection (ParseUtils.Section _ "program-default-options" _ _) = True + isKnownSection _ = False + + parse = parseFields (configFieldDescriptions src + ++ deprecatedFieldDescriptions) initial + + parseSections (rs, h, u, g, p, a) + (ParseUtils.Section _ "repository" name fs) = do + r' <- parseFields remoteRepoFields (emptyRemoteRepo name) fs + when (remoteRepoKeyThreshold r' > length (remoteRepoRootKeys r')) $ + warning $ "'key-threshold' for repository " ++ show (remoteRepoName r') + ++ " higher than number of keys" + when (not (null (remoteRepoRootKeys r')) + && remoteRepoSecure r' /= Just True) $ + warning $ "'root-keys' for repository " ++ show (remoteRepoName r') + ++ " non-empty, but 'secure' not set to True." + return (r':rs, h, u, g, p, a) + + parseSections (rs, h, u, g, p, a) + (ParseUtils.F lno "remote-repo" raw) = do + let mr' = readRepo raw + r' <- maybe (ParseFailed $ NoParse "remote-repo" lno) return mr' + return (r':rs, h, u, g, p, a) + + parseSections accum@(rs, h, u, g, p, a) + (ParseUtils.Section _ "haddock" name fs) + | name == "" = do h' <- parseFields haddockFlagsFields h fs + return (rs, h', u, g, p, a) + | otherwise = do + warning "The 'haddock' section should be unnamed" + return accum + parseSections accum@(rs, h, u, g, p, a) + (ParseUtils.Section _ "install-dirs" name fs) + | name' == "user" = do u' <- parseFields installDirsFields u fs + return (rs, h, u', g, p, a) + | name' == "global" = do g' <- parseFields installDirsFields g fs + return (rs, h, u, g', p, a) + | otherwise = do + warning "The 'install-paths' section should be for 'user' or 'global'" + return accum + where name' = lowercase name + parseSections accum@(rs, h, u, g, p, a) + (ParseUtils.Section _ "program-locations" name fs) + | name == "" = do p' <- parseFields withProgramsFields p fs + return (rs, h, u, g, p', a) + | otherwise = do + warning "The 'program-locations' section should be unnamed" + return accum + parseSections accum@(rs, h, u, g, p, a) + (ParseUtils.Section _ "program-default-options" name fs) + | name == "" = do a' <- parseFields withProgramOptionsFields a fs + return (rs, h, u, g, p, a') + | otherwise = do + warning "The 'program-default-options' section should be unnamed" + return accum + parseSections accum f = do + warning $ "Unrecognized stanza on line " ++ show (lineNo f) + return accum + +showConfig :: SavedConfig -> String +showConfig = showConfigWithComments mempty + +showConfigWithComments :: SavedConfig -> SavedConfig -> String +showConfigWithComments comment vals = Disp.render $ + case fmap ppRemoteRepoSection . fromNubList . globalRemoteRepos + . savedGlobalFlags $ vals of + [] -> Disp.text "" + (x:xs) -> foldl' (\ r r' -> r $+$ Disp.text "" $+$ r') x xs + $+$ Disp.text "" + $+$ ppFields (skipSomeFields (configFieldDescriptions ConstraintSourceUnknown)) + mcomment vals + $+$ Disp.text "" + $+$ ppSection "haddock" "" haddockFlagsFields + (fmap savedHaddockFlags mcomment) (savedHaddockFlags vals) + $+$ Disp.text "" + $+$ installDirsSection "user" savedUserInstallDirs + $+$ Disp.text "" + $+$ installDirsSection "global" savedGlobalInstallDirs + $+$ Disp.text "" + $+$ configFlagsSection "program-locations" withProgramsFields + configProgramPaths + $+$ Disp.text "" + $+$ configFlagsSection "program-default-options" withProgramOptionsFields + configProgramArgs + where + mcomment = Just comment + installDirsSection name field = + ppSection "install-dirs" name installDirsFields + (fmap field mcomment) (field vals) + configFlagsSection name fields field = + ppSection name "" fields + (fmap (field . savedConfigureFlags) mcomment) + ((field . savedConfigureFlags) vals) + + -- skip fields based on field name. currently only skips "remote-repo", + -- because that is rendered as a section. (see 'ppRemoteRepoSection'.) + skipSomeFields = filter ((/= "remote-repo") . fieldName) + +-- | Fields for the 'install-dirs' sections. +installDirsFields :: [FieldDescr (InstallDirs (Flag PathTemplate))] +installDirsFields = map viewAsFieldDescr installDirsOptions + +ppRemoteRepoSection :: RemoteRepo -> Doc +ppRemoteRepoSection vals = ppSection "repository" (remoteRepoName vals) + remoteRepoFields def vals + where + def = Just (emptyRemoteRepo "ignored") { remoteRepoSecure = Just False } + +remoteRepoFields :: [FieldDescr RemoteRepo] +remoteRepoFields = + [ simpleField "url" + (text . show) (parseTokenQ >>= parseURI') + remoteRepoURI (\x repo -> repo { remoteRepoURI = x }) + , simpleField "secure" + showSecure (Just `fmap` Text.parse) + remoteRepoSecure (\x repo -> repo { remoteRepoSecure = x }) + , listField "root-keys" + text parseTokenQ + remoteRepoRootKeys (\x repo -> repo { remoteRepoRootKeys = x }) + , simpleField "key-threshold" + showThreshold Text.parse + remoteRepoKeyThreshold (\x repo -> repo { remoteRepoKeyThreshold = x }) + ] + where + parseURI' uriString = + case parseURI uriString of + Nothing -> fail $ "remote-repo: no parse on " ++ show uriString + Just uri -> return uri + + showSecure Nothing = mempty -- default 'secure' setting + showSecure (Just True) = text "True" -- user explicitly enabled it + showSecure (Just False) = text "False" -- user explicitly disabled it + + -- If the key-threshold is set to 0, we omit it as this is the default + -- and it looks odd to have a value for key-threshold but not for 'secure' + -- (note that an empty list of keys is already omitted by default, since + -- that is what we do for all list fields) + showThreshold 0 = mempty + showThreshold t = text (show t) + +-- | Fields for the 'haddock' section. +haddockFlagsFields :: [FieldDescr HaddockFlags] +haddockFlagsFields = [ field + | opt <- haddockOptions ParseArgs + , let field = viewAsFieldDescr opt + name = fieldName field + , name `notElem` exclusions ] + where + exclusions = ["verbose", "builddir", "for-hackage"] + +-- | Fields for the 'program-locations' section. +withProgramsFields :: [FieldDescr [(String, FilePath)]] +withProgramsFields = + map viewAsFieldDescr $ + programConfigurationPaths' (++ "-location") defaultProgramConfiguration + ParseArgs id (++) + +-- | Fields for the 'program-default-options' section. +withProgramOptionsFields :: [FieldDescr [(String, [String])]] +withProgramOptionsFields = + map viewAsFieldDescr $ + programConfigurationOptions defaultProgramConfiguration ParseArgs id (++) + +-- | Get the differences (as a pseudo code diff) between the user's +-- '~/.cabal/config' and the one that cabal would generate if it didn't exist. +userConfigDiff :: GlobalFlags -> IO [String] +userConfigDiff globalFlags = do + userConfig <- loadRawConfig normal (globalConfigFile globalFlags) + testConfig <- initialSavedConfig + return $ reverse . foldl' createDiff [] . M.toList + $ M.unionWith combine + (M.fromList . map justFst $ filterShow testConfig) + (M.fromList . map justSnd $ filterShow userConfig) + where + justFst (a, b) = (a, (Just b, Nothing)) + justSnd (a, b) = (a, (Nothing, Just b)) + + combine (Nothing, Just b) (Just a, Nothing) = (Just a, Just b) + combine (Just a, Nothing) (Nothing, Just b) = (Just a, Just b) + combine x y = error $ "Can't happen : userConfigDiff " + ++ show x ++ " " ++ show y + + createDiff :: [String] -> (String, (Maybe String, Maybe String)) -> [String] + createDiff acc (key, (Just a, Just b)) + | a == b = acc + | otherwise = ("+ " ++ key ++ ": " ++ b) + : ("- " ++ key ++ ": " ++ a) : acc + createDiff acc (key, (Nothing, Just b)) = ("+ " ++ key ++ ": " ++ b) : acc + createDiff acc (key, (Just a, Nothing)) = ("- " ++ key ++ ": " ++ a) : acc + createDiff acc (_, (Nothing, Nothing)) = acc + + filterShow :: SavedConfig -> [(String, String)] + filterShow cfg = map keyValueSplit + . filter (\s -> not (null s) && any (== ':') s) + . map nonComment + . lines + $ showConfig cfg + + nonComment [] = [] + nonComment ('-':'-':_) = [] + nonComment (x:xs) = x : nonComment xs + + topAndTail = reverse . dropWhile isSpace . reverse . dropWhile isSpace + + keyValueSplit s = + let (left, right) = break (== ':') s + in (topAndTail left, topAndTail (drop 1 right)) + + +-- | Update the user's ~/.cabal/config' keeping the user's customizations. +userConfigUpdate :: Verbosity -> GlobalFlags -> IO () +userConfigUpdate verbosity globalFlags = do + userConfig <- loadRawConfig normal (globalConfigFile globalFlags) + newConfig <- initialSavedConfig + commentConf <- commentSavedConfig + cabalFile <- getConfigFilePath $ globalConfigFile globalFlags + let backup = cabalFile ++ ".backup" + notice verbosity $ "Renaming " ++ cabalFile ++ " to " ++ backup ++ "." + renameFile cabalFile backup + notice verbosity $ "Writing merged config to " ++ cabalFile ++ "." + writeConfigFile cabalFile commentConf (newConfig `mappend` userConfig) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Configure.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Configure.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Configure.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Configure.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,392 @@ +{-# LANGUAGE CPP #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Configure +-- Copyright : (c) David Himmelstrup 2005, +-- Duncan Coutts 2005 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- High level interface to configuring a package. +----------------------------------------------------------------------------- +module Distribution.Client.Configure ( + configure, + configureSetupScript, + chooseCabalVersion, + checkConfigExFlags + ) where + +import Distribution.Client.Dependency +import Distribution.Client.Dependency.Types + ( ConstraintSource(..) + , LabeledPackageConstraint(..), showConstraintSource ) +import qualified Distribution.Client.InstallPlan as InstallPlan +import Distribution.Client.InstallPlan (InstallPlan) +import Distribution.Client.IndexUtils as IndexUtils + ( getSourcePackages, getInstalledPackages ) +import Distribution.Client.PackageIndex ( PackageIndex, elemByPackageName ) +import Distribution.Client.PkgConfigDb (PkgConfigDb, readPkgConfigDb) +import Distribution.Client.Setup + ( ConfigExFlags(..), configureCommand, filterConfigureFlags + , RepoContext(..) ) +import Distribution.Client.Types as Source +import Distribution.Client.SetupWrapper + ( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions ) +import Distribution.Client.Targets + ( userToPackageConstraint, userConstraintPackageName ) +import qualified Distribution.Client.ComponentDeps as CD +import Distribution.Package (PackageId) +import Distribution.Client.JobControl (Lock) + +import Distribution.Simple.Compiler + ( Compiler, CompilerInfo, compilerInfo, PackageDB(..), PackageDBStack ) +import Distribution.Simple.Program (ProgramConfiguration ) +import Distribution.Simple.Setup + ( ConfigFlags(..), AllowNewer(..) + , fromFlag, toFlag, flagToMaybe, fromFlagOrDefault ) +import Distribution.Simple.PackageIndex + ( InstalledPackageIndex, lookupPackageName ) +import Distribution.Simple.Utils + ( defaultPackageDesc ) +import qualified Distribution.InstalledPackageInfo as Installed +import Distribution.Package + ( Package(..), UnitId, packageName + , Dependency(..), thisPackageVersion + ) +import qualified Distribution.PackageDescription as PkgDesc +import Distribution.PackageDescription.Parse + ( readPackageDescription ) +import Distribution.PackageDescription.Configuration + ( finalizePackageDescription ) +import Distribution.Version + ( anyVersion, thisVersion ) +import Distribution.Simple.Utils as Utils + ( warn, notice, debug, die ) +import Distribution.Simple.Setup + ( isAllowNewer ) +import Distribution.System + ( Platform ) +import Distribution.Text ( display ) +import Distribution.Verbosity as Verbosity + ( Verbosity ) +import Distribution.Version + ( Version(..), VersionRange, orLaterVersion ) + +import Control.Monad (unless) +#if !MIN_VERSION_base(4,8,0) +import Data.Monoid (Monoid(..)) +#endif +import Data.Maybe (isJust, fromMaybe) + +-- | Choose the Cabal version such that the setup scripts compiled against this +-- version will support the given command-line flags. +chooseCabalVersion :: ConfigFlags -> Maybe Version -> VersionRange +chooseCabalVersion configFlags maybeVersion = + maybe defaultVersionRange thisVersion maybeVersion + where + -- Cabal < 1.19.2 doesn't support '--exact-configuration' which is needed + -- for '--allow-newer' to work. + allowNewer = isAllowNewer + (fromMaybe AllowNewerNone $ configAllowNewer configFlags) + + defaultVersionRange = if allowNewer + then orLaterVersion (Version [1,19,2] []) + else anyVersion + +-- | Configure the package found in the local directory +configure :: Verbosity + -> PackageDBStack + -> RepoContext + -> Compiler + -> Platform + -> ProgramConfiguration + -> ConfigFlags + -> ConfigExFlags + -> [String] + -> IO () +configure verbosity packageDBs repoCtxt comp platform conf + configFlags configExFlags extraArgs = do + + installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf + sourcePkgDb <- getSourcePackages verbosity repoCtxt + pkgConfigDb <- readPkgConfigDb verbosity conf + + checkConfigExFlags verbosity installedPkgIndex + (packageIndex sourcePkgDb) configExFlags + + progress <- planLocalPackage verbosity comp platform configFlags configExFlags + installedPkgIndex sourcePkgDb pkgConfigDb + + notice verbosity "Resolving dependencies..." + maybePlan <- foldProgress logMsg (return . Left) (return . Right) + progress + case maybePlan of + Left message -> do + warn verbosity $ + "solver failed to find a solution:\n" + ++ message + ++ "Trying configure anyway." + setupWrapper verbosity (setupScriptOptions installedPkgIndex Nothing) + Nothing configureCommand (const configFlags) extraArgs + + Right installPlan -> case InstallPlan.ready installPlan of + [pkg@(ReadyPackage + (ConfiguredPackage (SourcePackage _ _ (LocalUnpackedPackage _) _) + _ _ _) + _)] -> do + configurePackage verbosity + platform (compilerInfo comp) + (setupScriptOptions installedPkgIndex (Just pkg)) + configFlags pkg extraArgs + + _ -> die $ "internal error: configure install plan should have exactly " + ++ "one local ready package." + + where + setupScriptOptions :: InstalledPackageIndex + -> Maybe ReadyPackage + -> SetupScriptOptions + setupScriptOptions = + configureSetupScript + packageDBs + comp + platform + conf + (fromFlagOrDefault + (useDistPref defaultSetupScriptOptions) + (configDistPref configFlags)) + (chooseCabalVersion + configFlags + (flagToMaybe (configCabalVersion configExFlags))) + Nothing + False + + logMsg message rest = debug verbosity message >> rest + +configureSetupScript :: PackageDBStack + -> Compiler + -> Platform + -> ProgramConfiguration + -> FilePath + -> VersionRange + -> Maybe Lock + -> Bool + -> InstalledPackageIndex + -> Maybe ReadyPackage + -> SetupScriptOptions +configureSetupScript packageDBs + comp + platform + conf + distPref + cabalVersion + lock + forceExternal + index + mpkg + = SetupScriptOptions { + useCabalVersion = cabalVersion + , useCabalSpecVersion = Nothing + , useCompiler = Just comp + , usePlatform = Just platform + , usePackageDB = packageDBs' + , usePackageIndex = index' + , useProgramConfig = conf + , useDistPref = distPref + , useLoggingHandle = Nothing + , useWorkingDir = Nothing + , setupCacheLock = lock + , useWin32CleanHack = False + , forceExternalSetupMethod = forceExternal + -- If we have explicit setup dependencies, list them; otherwise, we give + -- the empty list of dependencies; ideally, we would fix the version of + -- Cabal here, so that we no longer need the special case for that in + -- `compileSetupExecutable` in `externalSetupMethod`, but we don't yet + -- know the version of Cabal at this point, but only find this there. + -- Therefore, for now, we just leave this blank. + , useDependencies = fromMaybe [] explicitSetupDeps + , useDependenciesExclusive = not defaultSetupDeps && isJust explicitSetupDeps + , useVersionMacros = not defaultSetupDeps && isJust explicitSetupDeps + } + where + -- When we are compiling a legacy setup script without an explicit + -- setup stanza, we typically want to allow the UserPackageDB for + -- finding the Cabal lib when compiling any Setup.hs even if we're doing + -- a global install. However we also allow looking in a specific package + -- db. + packageDBs' :: PackageDBStack + index' :: Maybe InstalledPackageIndex + (packageDBs', index') = + case packageDBs of + (GlobalPackageDB:dbs) | UserPackageDB `notElem` dbs + , Nothing <- explicitSetupDeps + -> (GlobalPackageDB:UserPackageDB:dbs, Nothing) + -- but if the user is using an odd db stack, don't touch it + _otherwise -> (packageDBs, Just index) + + maybeSetupBuildInfo :: Maybe PkgDesc.SetupBuildInfo + maybeSetupBuildInfo = do + ReadyPackage (ConfiguredPackage (SourcePackage _ gpkg _ _) _ _ _) _ + <- mpkg + PkgDesc.setupBuildInfo (PkgDesc.packageDescription gpkg) + + -- Was a default 'custom-setup' stanza added by 'cabal-install' itself? If + -- so, 'setup-depends' must not be exclusive. See #3199. + defaultSetupDeps :: Bool + defaultSetupDeps = maybe False PkgDesc.defaultSetupDepends + maybeSetupBuildInfo + + explicitSetupDeps :: Maybe [(UnitId, PackageId)] + explicitSetupDeps = do + -- Check if there is an explicit setup stanza. + _buildInfo <- maybeSetupBuildInfo + -- Return the setup dependencies computed by the solver + ReadyPackage _ deps <- mpkg + return [ ( Installed.installedUnitId deppkg + , Installed.sourcePackageId deppkg + ) + | deppkg <- CD.setupDeps deps + ] + +-- | Warn if any constraints or preferences name packages that are not in the +-- source package index or installed package index. +checkConfigExFlags :: Package pkg + => Verbosity + -> InstalledPackageIndex + -> PackageIndex pkg + -> ConfigExFlags + -> IO () +checkConfigExFlags verbosity installedPkgIndex sourcePkgIndex flags = do + unless (null unknownConstraints) $ warn verbosity $ + "Constraint refers to an unknown package: " + ++ showConstraint (head unknownConstraints) + unless (null unknownPreferences) $ warn verbosity $ + "Preference refers to an unknown package: " + ++ display (head unknownPreferences) + where + unknownConstraints = filter (unknown . userConstraintPackageName . fst) $ + configExConstraints flags + unknownPreferences = filter (unknown . \(Dependency name _) -> name) $ + configPreferences flags + unknown pkg = null (lookupPackageName installedPkgIndex pkg) + && not (elemByPackageName sourcePkgIndex pkg) + showConstraint (uc, src) = + display uc ++ " (" ++ showConstraintSource src ++ ")" + +-- | Make an 'InstallPlan' for the unpacked package in the current directory, +-- and all its dependencies. +-- +planLocalPackage :: Verbosity -> Compiler + -> Platform + -> ConfigFlags -> ConfigExFlags + -> InstalledPackageIndex + -> SourcePackageDb + -> PkgConfigDb + -> IO (Progress String String InstallPlan) +planLocalPackage verbosity comp platform configFlags configExFlags + installedPkgIndex (SourcePackageDb _ packagePrefs) pkgConfigDb = do + pkg <- readPackageDescription verbosity =<< defaultPackageDesc verbosity + solver <- chooseSolver verbosity (fromFlag $ configSolver configExFlags) + (compilerInfo comp) + + let -- We create a local package and ask to resolve a dependency on it + localPkg = SourcePackage { + packageInfoId = packageId pkg, + Source.packageDescription = pkg, + packageSource = LocalUnpackedPackage ".", + packageDescrOverride = Nothing + } + + testsEnabled = fromFlagOrDefault False $ configTests configFlags + benchmarksEnabled = + fromFlagOrDefault False $ configBenchmarks configFlags + + resolverParams = + removeUpperBounds + (fromMaybe AllowNewerNone $ configAllowNewer configFlags) + + . addPreferences + -- preferences from the config file or command line + [ PackageVersionPreference name ver + | Dependency name ver <- configPreferences configExFlags ] + + . addConstraints + -- version constraints from the config file or command line + -- TODO: should warn or error on constraints that are not on direct + -- deps or flag constraints not on the package in question. + [ LabeledPackageConstraint (userToPackageConstraint uc) src + | (uc, src) <- configExConstraints configExFlags ] + + . addConstraints + -- package flags from the config file or command line + [ let pc = PackageConstraintFlags (packageName pkg) + (configConfigurationsFlags configFlags) + in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget + ] + + . addConstraints + -- '--enable-tests' and '--enable-benchmarks' constraints from + -- the config file or command line + [ let pc = PackageConstraintStanzas (packageName pkg) $ + [ TestStanzas | testsEnabled ] ++ + [ BenchStanzas | benchmarksEnabled ] + in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget + ] + + $ standardInstallPolicy + installedPkgIndex + (SourcePackageDb mempty packagePrefs) + [SpecificSourcePackage localPkg] + + return (resolveDependencies platform (compilerInfo comp) pkgConfigDb solver resolverParams) + + +-- | Call an installer for an 'SourcePackage' but override the configure +-- flags with the ones given by the 'ReadyPackage'. In particular the +-- 'ReadyPackage' specifies an exact 'FlagAssignment' and exactly +-- versioned package dependencies. So we ignore any previous partial flag +-- assignment or dependency constraints and use the new ones. +-- +-- NB: when updating this function, don't forget to also update +-- 'installReadyPackage' in D.C.Install. +configurePackage :: Verbosity + -> Platform -> CompilerInfo + -> SetupScriptOptions + -> ConfigFlags + -> ReadyPackage + -> [String] + -> IO () +configurePackage verbosity platform comp scriptOptions configFlags + (ReadyPackage (ConfiguredPackage (SourcePackage _ gpkg _ _) + flags stanzas _) + deps) + extraArgs = + + setupWrapper verbosity + scriptOptions (Just pkg) configureCommand configureFlags extraArgs + + where + configureFlags = filterConfigureFlags configFlags { + configConfigurationsFlags = flags, + -- We generate the legacy constraints as well as the new style precise + -- deps. In the end only one set gets passed to Setup.hs configure, + -- depending on the Cabal version we are talking to. + configConstraints = [ thisPackageVersion (packageId deppkg) + | deppkg <- CD.nonSetupDeps deps ], + configDependencies = [ (packageName (Installed.sourcePackageId deppkg), + Installed.installedUnitId deppkg) + | deppkg <- CD.nonSetupDeps deps ], + -- Use '--exact-configuration' if supported. + configExactConfiguration = toFlag True, + configVerbosity = toFlag verbosity, + configBenchmarks = toFlag (BenchStanzas `elem` stanzas), + configTests = toFlag (TestStanzas `elem` stanzas) + } + + pkg = case finalizePackageDescription flags + (const True) + platform comp [] (enableStanzas stanzas gpkg) of + Left _ -> error "finalizePackageDescription ReadyPackage failed" + Right (desc, _) -> desc diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Assignment.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Assignment.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Assignment.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Assignment.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,150 @@ +module Distribution.Client.Dependency.Modular.Assignment + ( Assignment(..) + , FAssignment + , SAssignment + , PreAssignment(..) + , extend + , toCPs + ) where + +import Control.Applicative +import Control.Monad +import Data.Array as A +import Data.List as L +import Data.Map as M +import Data.Maybe +import Prelude hiding (pi) + +import Language.Haskell.Extension (Extension, Language) + +import Distribution.PackageDescription (FlagAssignment) -- from Cabal +import Distribution.Client.Types (OptionalStanza) +import Distribution.Client.Utils.LabeledGraph +import Distribution.Client.ComponentDeps (ComponentDeps, Component) +import qualified Distribution.Client.ComponentDeps as CD + +import Distribution.Client.Dependency.Modular.Configured +import Distribution.Client.Dependency.Modular.Dependency +import Distribution.Client.Dependency.Modular.Flag +import Distribution.Client.Dependency.Modular.Package +import Distribution.Client.Dependency.Modular.Version + +-- | A (partial) package assignment. Qualified package names +-- are associated with instances. +type PAssignment = Map QPN I + +-- | A (partial) package preassignment. Qualified package names +-- are associated with constrained instances. Constrained instances +-- record constraints about the instances that can still be chosen, +-- and in the extreme case fix a concrete instance. +type PPreAssignment = Map QPN (CI QPN) +type FAssignment = Map QFN Bool +type SAssignment = Map QSN Bool + +-- | A (partial) assignment of variables. +data Assignment = A PAssignment FAssignment SAssignment + deriving (Show, Eq) + +-- | A preassignment comprises knowledge about variables, but not +-- necessarily fixed values. +data PreAssignment = PA PPreAssignment FAssignment SAssignment + +-- | Extend a package preassignment. +-- +-- Takes the variable that causes the new constraints, a current preassignment +-- and a set of new dependency constraints. +-- +-- We're trying to extend the preassignment with each dependency one by one. +-- Each dependency is for a particular variable. We check if we already have +-- constraints for that variable in the current preassignment. If so, we're +-- trying to merge the constraints. +-- +-- Either returns a witness of the conflict that would arise during the merge, +-- or the successfully extended assignment. +extend :: (Extension -> Bool) -- ^ is a given extension supported + -> (Language -> Bool) -- ^ is a given language supported + -> (PN -> VR -> Bool) -- ^ is a given pkg-config requirement satisfiable + -> Var QPN + -> PPreAssignment -> [Dep QPN] -> Either (ConflictSet QPN, [Dep QPN]) PPreAssignment +extend extSupported langSupported pkgPresent var = foldM extendSingle + where + + extendSingle :: PPreAssignment -> Dep QPN + -> Either (ConflictSet QPN, [Dep QPN]) PPreAssignment + extendSingle a (Ext ext ) = + if extSupported ext then Right a + else Left (varToConflictSet var, [Ext ext]) + extendSingle a (Lang lang) = + if langSupported lang then Right a + else Left (varToConflictSet var, [Lang lang]) + extendSingle a (Pkg pn vr) = + if pkgPresent pn vr then Right a + else Left (varToConflictSet var, [Pkg pn vr]) + extendSingle a (Dep qpn ci) = + let ci' = M.findWithDefault (Constrained []) qpn a + in case (\ x -> M.insert qpn x a) <$> merge ci' ci of + Left (c, (d, d')) -> Left (c, L.map (Dep qpn) (simplify (P qpn) d d')) + Right x -> Right x + + -- We're trying to remove trivial elements of the conflict. If we're just + -- making a choice pkg == instance, and pkg => pkg == instance is a part + -- of the conflict, then this info is clear from the context and does not + -- have to be repeated. + simplify v (Fixed _ var') c | v == var && var' == var = [c] + simplify v c (Fixed _ var') | v == var && var' == var = [c] + simplify _ c d = [c, d] + +-- | Delivers an ordered list of fully configured packages. +-- +-- TODO: This function is (sort of) ok. However, there's an open bug +-- w.r.t. unqualification. There might be several different instances +-- of one package version chosen by the solver, which will lead to +-- clashes. +toCPs :: Assignment -> RevDepMap -> [CP QPN] +toCPs (A pa fa sa) rdm = + let + -- get hold of the graph + g :: Graph Component + vm :: Vertex -> ((), QPN, [(Component, QPN)]) + cvm :: QPN -> Maybe Vertex + -- Note that the RevDepMap contains duplicate dependencies. Therefore the nub. + (g, vm, cvm) = graphFromEdges (L.map (\ (x, xs) -> ((), x, nub xs)) + (M.toList rdm)) + tg :: Graph Component + tg = transposeG g + -- Topsort the dependency graph, yielding a list of pkgs in the right order. + -- The graph will still contain all the installed packages, and it might + -- contain duplicates, because several variables might actually resolve to + -- the same package in the presence of qualified package names. + ps :: [PI QPN] + ps = L.map ((\ (_, x, _) -> PI x (pa M.! x)) . vm) $ + topSort g + -- Determine the flags per package, by walking over and regrouping the + -- complete flag assignment by package. + fapp :: Map QPN FlagAssignment + fapp = M.fromListWith (++) $ + L.map (\ ((FN (PI qpn _) fn), b) -> (qpn, [(fn, b)])) $ + M.toList $ + fa + -- Stanzas per package. + sapp :: Map QPN [OptionalStanza] + sapp = M.fromListWith (++) $ + L.map (\ ((SN (PI qpn _) sn), b) -> (qpn, if b then [sn] else [])) $ + M.toList $ + sa + -- Dependencies per package. + depp :: QPN -> [(Component, PI QPN)] + depp qpn = let v :: Vertex + v = fromJust (cvm qpn) + dvs :: [(Component, Vertex)] + dvs = tg A.! v + in L.map (\ (comp, dv) -> case vm dv of (_, x, _) -> (comp, PI x (pa M.! x))) dvs + -- Translated to PackageDeps + depp' :: QPN -> ComponentDeps [PI QPN] + depp' = CD.fromList . L.map (\(comp, d) -> (comp, [d])) . depp + in + L.map (\ pi@(PI qpn _) -> CP pi + (M.findWithDefault [] qpn fapp) + (M.findWithDefault [] qpn sapp) + (depp' qpn)) + ps diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Builder.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Builder.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Builder.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Builder.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,188 @@ +{-# LANGUAGE CPP #-} +module Distribution.Client.Dependency.Modular.Builder (buildTree) where + +-- Building the search tree. +-- +-- In this phase, we build a search tree that is too large, i.e, it contains +-- invalid solutions. We keep track of the open goals at each point. We +-- nondeterministically pick an open goal (via a goal choice node), create +-- subtrees according to the index and the available solutions, and extend the +-- set of open goals by superficially looking at the dependencies recorded in +-- the index. +-- +-- For each goal, we keep track of all the *reasons* why it is being +-- introduced. These are for debugging and error messages, mainly. A little bit +-- of care has to be taken due to the way we treat flags. If a package has +-- flag-guarded dependencies, we cannot introduce them immediately. Instead, we +-- store the entire dependency. + +import Data.List as L +import Data.Map as M +import Prelude hiding (sequence, mapM) + +import Distribution.Client.Dependency.Modular.Dependency +import Distribution.Client.Dependency.Modular.Flag +import Distribution.Client.Dependency.Modular.Index +import Distribution.Client.Dependency.Modular.Package +import Distribution.Client.Dependency.Modular.PSQ (PSQ) +import qualified Distribution.Client.Dependency.Modular.PSQ as P +import Distribution.Client.Dependency.Modular.Tree + +import Distribution.Client.ComponentDeps (Component) + +-- | The state needed during the build phase of the search tree. +data BuildState = BS { + index :: Index, -- ^ information about packages and their dependencies + rdeps :: RevDepMap, -- ^ set of all package goals, completed and open, with reverse dependencies + open :: PSQ (OpenGoal ()) (), -- ^ set of still open goals (flag and package goals) + next :: BuildType, -- ^ kind of node to generate next + qualifyOptions :: QualifyOptions -- ^ qualification options +} + +-- | Extend the set of open goals with the new goals listed. +-- +-- We also adjust the map of overall goals, and keep track of the +-- reverse dependencies of each of the goals. +extendOpen :: QPN -> [OpenGoal Component] -> BuildState -> BuildState +extendOpen qpn' gs s@(BS { rdeps = gs', open = o' }) = go gs' o' gs + where + go :: RevDepMap -> PSQ (OpenGoal ()) () -> [OpenGoal Component] -> BuildState + go g o [] = s { rdeps = g, open = o } + go g o (ng@(OpenGoal (Flagged _ _ _ _) _gr) : ngs) = go g (cons' ng () o) ngs + -- Note: for 'Flagged' goals, we always insert, so later additions win. + -- This is important, because in general, if a goal is inserted twice, + -- the later addition will have better dependency information. + go g o (ng@(OpenGoal (Stanza _ _ ) _gr) : ngs) = go g (cons' ng () o) ngs + go g o (ng@(OpenGoal (Simple (Dep qpn _) c) _gr) : ngs) + | qpn == qpn' = go g o ngs + -- we ignore self-dependencies at this point; TODO: more care may be needed + | qpn `M.member` g = go (M.adjust ((c, qpn'):) qpn g) o ngs + | otherwise = go (M.insert qpn [(c, qpn')] g) (cons' ng () o) ngs + -- code above is correct; insert/adjust have different arg order + go g o ( (OpenGoal (Simple (Ext _ext ) _) _gr) : ngs) = go g o ngs + go g o ( (OpenGoal (Simple (Lang _lang)_) _gr) : ngs) = go g o ngs + go g o ( (OpenGoal (Simple (Pkg _pn _vr)_) _gr) : ngs)= go g o ngs + + cons' = P.cons . forgetCompOpenGoal + +-- | Given the current scope, qualify all the package names in the given set of +-- dependencies and then extend the set of open goals accordingly. +scopedExtendOpen :: QPN -> I -> QGoalReason -> FlaggedDeps Component PN -> FlagInfo -> + BuildState -> BuildState +scopedExtendOpen qpn i gr fdeps fdefs s = extendOpen qpn gs s + where + -- Qualify all package names + qfdeps = qualifyDeps (qualifyOptions s) qpn fdeps + -- Introduce all package flags + qfdefs = L.map (\ (fn, b) -> Flagged (FN (PI qpn i) fn) b [] []) $ M.toList fdefs + -- Combine new package and flag goals + gs = L.map (flip OpenGoal gr) (qfdefs ++ qfdeps) + -- NOTE: + -- + -- In the expression @qfdefs ++ qfdeps@ above, flags occur potentially + -- multiple times, both via the flag declaration and via dependencies. + -- The order is potentially important, because the occurrences via + -- dependencies may record flag-dependency information. After a number + -- of bugs involving computing this information incorrectly, however, + -- we're currently not using carefully computed inter-flag dependencies + -- anymore, but instead use 'simplifyVar' when computing conflict sets + -- to map all flags of one package to a single flag for conflict set + -- purposes, thereby treating them all as interdependent. + -- + -- If we ever move to a more clever algorithm again, then the line above + -- needs to be looked at very carefully, and probably be replaced by + -- more systematically computed flag dependency information. + +-- | Datatype that encodes what to build next +data BuildType = + Goals -- ^ build a goal choice node + | OneGoal (OpenGoal ()) -- ^ build a node for this goal + | Instance QPN I PInfo QGoalReason -- ^ build a tree for a concrete instance + deriving Show + +build :: BuildState -> Tree QGoalReason +build = ana go + where + go :: BuildState -> TreeF QGoalReason BuildState + + -- If we have a choice between many goals, we just record the choice in + -- the tree. We select each open goal in turn, and before we descend, remove + -- it from the queue of open goals. + go bs@(BS { rdeps = rds, open = gs, next = Goals }) + | P.null gs = DoneF rds + | otherwise = GoalChoiceF (P.mapWithKey (\ g (_sc, gs') -> bs { next = OneGoal g, open = gs' }) + (P.splits gs)) + + -- If we have already picked a goal, then the choice depends on the kind + -- of goal. + -- + -- For a package, we look up the instances available in the global info, + -- and then handle each instance in turn. + go (BS { index = _ , next = OneGoal (OpenGoal (Simple (Ext _ ) _) _ ) }) = + error "Distribution.Client.Dependency.Modular.Builder: build.go called with Ext goal" + go (BS { index = _ , next = OneGoal (OpenGoal (Simple (Lang _ ) _) _ ) }) = + error "Distribution.Client.Dependency.Modular.Builder: build.go called with Lang goal" + go (BS { index = _ , next = OneGoal (OpenGoal (Simple (Pkg _ _ ) _) _ ) }) = + error "Distribution.Client.Dependency.Modular.Builder: build.go called with Pkg goal" + go bs@(BS { index = idx, next = OneGoal (OpenGoal (Simple (Dep qpn@(Q _ pn) _) _) gr) }) = + -- If the package does not exist in the index, we construct an emty PChoiceF node for it + -- After all, we have no choices here. Alternatively, we could immediately construct + -- a Fail node here, but that would complicate the construction of conflict sets. + -- We will probably want to give this case special treatment when generating error + -- messages though. + case M.lookup pn idx of + Nothing -> PChoiceF qpn gr (P.fromList []) + Just pis -> PChoiceF qpn gr (P.fromList (L.map (\ (i, info) -> + (POption i Nothing, bs { next = Instance qpn i info gr })) + (M.toList pis))) + -- TODO: data structure conversion is rather ugly here + + -- For a flag, we create only two subtrees, and we create them in the order + -- that is indicated by the flag default. + -- + -- TODO: Should we include the flag default in the tree? + go bs@(BS { next = OneGoal (OpenGoal (Flagged qfn@(FN (PI qpn _) _) (FInfo b m w) t f) gr) }) = + FChoiceF qfn gr (w || trivial) m (P.fromList (reorder b + [(True, (extendOpen qpn (L.map (flip OpenGoal (FDependency qfn True )) t) bs) { next = Goals }), + (False, (extendOpen qpn (L.map (flip OpenGoal (FDependency qfn False)) f) bs) { next = Goals })])) + where + reorder True = id + reorder False = reverse + trivial = L.null t && L.null f + + -- For a stanza, we also create only two subtrees. The order is initially + -- False, True. This can be changed later by constraints (force enabling + -- the stanza by replacing the False branch with failure) or preferences + -- (try enabling the stanza if possible by moving the True branch first). + + go bs@(BS { next = OneGoal (OpenGoal (Stanza qsn@(SN (PI qpn _) _) t) gr) }) = + SChoiceF qsn gr trivial (P.fromList + [(False, bs { next = Goals }), + (True, (extendOpen qpn (L.map (flip OpenGoal (SDependency qsn)) t) bs) { next = Goals })]) + where + trivial = L.null t + + -- For a particular instance, we change the state: we update the scope, + -- and furthermore we update the set of goals. + -- + -- TODO: We could inline this above. + go bs@(BS { next = Instance qpn i (PInfo fdeps fdefs _) _gr }) = + go ((scopedExtendOpen qpn i (PDependency (PI qpn i)) fdeps fdefs bs) + { next = Goals }) + +-- | Interface to the tree builder. Just takes an index and a list of package names, +-- and computes the initial state and then the tree from there. +buildTree :: Index -> Bool -> [PN] -> Tree QGoalReason +buildTree idx ind igs = + build BS { + index = idx + , rdeps = M.fromList (L.map (\ qpn -> (qpn, [])) qpns) + , open = P.fromList (L.map (\ qpn -> (topLevelGoal qpn, ())) qpns) + , next = Goals + , qualifyOptions = defaultQualifyOptions idx + } + where + topLevelGoal qpn = OpenGoal (Simple (Dep qpn (Constrained [])) ()) UserGoal + + qpns | ind = makeIndependent igs + | otherwise = L.map (Q (PP DefaultNamespace Unqualified)) igs diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/ConfiguredConversion.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/ConfiguredConversion.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/ConfiguredConversion.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/ConfiguredConversion.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,54 @@ +module Distribution.Client.Dependency.Modular.ConfiguredConversion + ( convCP + ) where + +import Data.Maybe +import Prelude hiding (pi) + +import Distribution.Package (UnitId) + +import Distribution.Client.Types +import Distribution.Client.Dependency.Types (ResolverPackage(..)) +import qualified Distribution.Client.PackageIndex as CI +import qualified Distribution.Simple.PackageIndex as SI + +import Distribution.Client.Dependency.Modular.Configured +import Distribution.Client.Dependency.Modular.Package + +import Distribution.Client.ComponentDeps (ComponentDeps) + +-- | Converts from the solver specific result @CP QPN@ into +-- a 'ResolverPackage', which can then be converted into +-- the install plan. +convCP :: SI.InstalledPackageIndex -> + CI.PackageIndex SourcePackage -> + CP QPN -> ResolverPackage +convCP iidx sidx (CP qpi fa es ds) = + case convPI qpi of + Left pi -> PreExisting + (fromJust $ SI.lookupUnitId iidx pi) + Right pi -> Configured $ ConfiguredPackage + srcpkg + fa + es + ds' + where + Just srcpkg = CI.lookupPackageId sidx pi + where + ds' :: ComponentDeps [ConfiguredId] + ds' = fmap (map convConfId) ds + +convPI :: PI QPN -> Either UnitId PackageId +convPI (PI _ (I _ (Inst pi))) = Left pi +convPI qpi = Right $ confSrcId $ convConfId qpi + +convConfId :: PI QPN -> ConfiguredId +convConfId (PI (Q _ pn) (I v loc)) = ConfiguredId { + confSrcId = sourceId + , confInstId = installedId + } + where + sourceId = PackageIdentifier pn v + installedId = case loc of + Inst pi -> pi + _otherwise -> fakeUnitId sourceId diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Configured.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Configured.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Configured.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Configured.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,13 @@ +module Distribution.Client.Dependency.Modular.Configured + ( CP(..) + ) where + +import Distribution.PackageDescription (FlagAssignment) -- from Cabal +import Distribution.Client.Types (OptionalStanza) +import Distribution.Client.ComponentDeps (ComponentDeps) + +import Distribution.Client.Dependency.Modular.Package + +-- | A configured package is a package instance together with +-- a flag assignment and complete dependencies. +data CP qpn = CP (PI qpn) FlagAssignment [OptionalStanza] (ComponentDeps [PI qpn]) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/ConflictSet.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/ConflictSet.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/ConflictSet.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/ConflictSet.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,74 @@ +{-# LANGUAGE CPP #-} +-- | Conflict sets +-- +-- Intended for double import +-- +-- > import Distribution.Client.Dependency.Modular.ConflictSet (ConflictSet) +-- > import qualified Distribution.Client.Dependency.Modular.ConflictSet as CS +module Distribution.Client.Dependency.Modular.ConflictSet ( + ConflictSet -- opaque + , showCS + -- Set-like operations + , toList + , union + , unions + , insert + , empty + , singleton + , member + , filter + , fromList + ) where + +import Prelude hiding (filter) +import Data.List (intercalate) +import Data.Set (Set) +import qualified Data.Set as S + +import Distribution.Client.Dependency.Modular.Package +import Distribution.Client.Dependency.Modular.Var + +-- | The set of variables involved in a solver conflict +-- +-- Since these variables should be preprocessed in some way, this type is +-- kept abstract. +newtype ConflictSet qpn = CS { fromConflictSet :: Set (Var qpn) } + deriving (Eq, Ord, Show) + +showCS :: ConflictSet QPN -> String +showCS = intercalate ", " . map showVar . toList + +{------------------------------------------------------------------------------- + Set-like operations +-------------------------------------------------------------------------------} + +toList :: ConflictSet qpn -> [Var qpn] +toList = S.toList . fromConflictSet + +union :: Ord qpn => ConflictSet qpn -> ConflictSet qpn -> ConflictSet qpn +union (CS a) (CS b) = CS (a `S.union` b) + +unions :: Ord qpn => [ConflictSet qpn] -> ConflictSet qpn +unions = CS . S.unions . map fromConflictSet + +insert :: Ord qpn => Var qpn -> ConflictSet qpn -> ConflictSet qpn +insert var (CS set) = CS (S.insert (simplifyVar var) set) + +empty :: ConflictSet qpn +empty = CS S.empty + +singleton :: Var qpn -> ConflictSet qpn +singleton = CS . S.singleton . simplifyVar + +member :: Ord qpn => Var qpn -> ConflictSet qpn -> Bool +member var (CS set) = S.member (simplifyVar var) set + +#if MIN_VERSION_containers(0,5,0) +filter :: (Var qpn -> Bool) -> ConflictSet qpn -> ConflictSet qpn +#else +filter :: Ord qpn => (Var qpn -> Bool) -> ConflictSet qpn -> ConflictSet qpn +#endif +filter p (CS set) = CS $ S.filter p set + +fromList :: Ord qpn => [Var qpn] -> ConflictSet qpn +fromList = CS . S.fromList . map simplifyVar diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Cycles.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Cycles.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Cycles.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Cycles.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,52 @@ +{-# LANGUAGE CPP #-} +module Distribution.Client.Dependency.Modular.Cycles ( + detectCyclesPhase + ) where + +import Prelude hiding (cycle) +import Data.Graph (SCC) +import qualified Data.Graph as Gr +import qualified Data.Map as Map + +import Distribution.Client.Dependency.Modular.Dependency +import Distribution.Client.Dependency.Modular.Package +import Distribution.Client.Dependency.Modular.Tree +import qualified Distribution.Client.Dependency.Modular.ConflictSet as CS + +-- | Find and reject any solutions that are cyclic +detectCyclesPhase :: Tree QGoalReason -> Tree QGoalReason +detectCyclesPhase = cata go + where + -- The only node of interest is DoneF + go :: TreeF QGoalReason (Tree QGoalReason) -> Tree QGoalReason + go (PChoiceF qpn gr cs) = PChoice qpn gr cs + go (FChoiceF qfn gr w m cs) = FChoice qfn gr w m cs + go (SChoiceF qsn gr w cs) = SChoice qsn gr w cs + go (GoalChoiceF cs) = GoalChoice cs + go (FailF cs reason) = Fail cs reason + + -- We check for cycles only if we have actually found a solution + -- This minimizes the number of cycle checks we do as cycles are rare + go (DoneF revDeps) = do + case findCycles revDeps of + Nothing -> Done revDeps + Just relSet -> Fail relSet CyclicDependencies + +-- | Given the reverse dependency map from a 'Done' node in the tree, as well +-- as the full conflict set containing all decisions that led to that 'Done' +-- node, check if the solution is cyclic. If it is, return the conflict set +-- containing all decisions that could potentially break the cycle. +findCycles :: RevDepMap -> Maybe (ConflictSet QPN) +findCycles revDeps = + case cycles of + [] -> Nothing + c:_ -> Just $ CS.unions $ map (varToConflictSet . P) c + where + cycles :: [[QPN]] + cycles = [vs | Gr.CyclicSCC vs <- scc] + + scc :: [SCC QPN] + scc = Gr.stronglyConnComp . map aux . Map.toList $ revDeps + + aux :: (QPN, [(comp, QPN)]) -> (QPN, QPN, [QPN]) + aux (fr, to) = (fr, fr, map snd to) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Dependency.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Dependency.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Dependency.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Dependency.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,400 @@ +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE RecordWildCards #-} +module Distribution.Client.Dependency.Modular.Dependency ( + -- * Variables + Var(..) + , simplifyVar + , varPI + -- * Conflict sets + , ConflictSet + , CS.showCS + -- * Constrained instances + , CI(..) + , merge + -- * Flagged dependencies + , FlaggedDeps + , FlaggedDep(..) + , Dep(..) + , showDep + , flattenFlaggedDeps + , QualifyOptions(..) + , qualifyDeps + , unqualifyDeps + -- ** Setting/forgetting components + , forgetCompOpenGoal + , setCompFlaggedDeps + -- * Reverse dependency map + , RevDepMap + -- * Goals + , Goal(..) + , GoalReason(..) + , QGoalReason + , ResetVar(..) + , goalVarToConflictSet + , varToConflictSet + , goalReasonToVars + -- * Open goals + , OpenGoal(..) + , close + ) where + +import Prelude hiding (pi) + +import Data.Map (Map) +import qualified Data.List as L + +import Language.Haskell.Extension (Extension(..), Language(..)) + +import Distribution.Text + +import Distribution.Client.Dependency.Modular.ConflictSet (ConflictSet) +import Distribution.Client.Dependency.Modular.Flag +import Distribution.Client.Dependency.Modular.Package +import Distribution.Client.Dependency.Modular.Var +import Distribution.Client.Dependency.Modular.Version +import qualified Distribution.Client.Dependency.Modular.ConflictSet as CS + +import Distribution.Client.ComponentDeps (Component(..)) + +{------------------------------------------------------------------------------- + Constrained instances +-------------------------------------------------------------------------------} + +-- | Constrained instance. If the choice has already been made, this is +-- a fixed instance, and we record the package name for which the choice +-- is for convenience. Otherwise, it is a list of version ranges paired with +-- the goals / variables that introduced them. +data CI qpn = Fixed I (Var qpn) | Constrained [VROrigin qpn] + deriving (Eq, Show, Functor) + +showCI :: CI QPN -> String +showCI (Fixed i _) = "==" ++ showI i +showCI (Constrained vr) = showVR (collapse vr) + +-- | Merge constrained instances. We currently adopt a lazy strategy for +-- merging, i.e., we only perform actual checking if one of the two choices +-- is fixed. If the merge fails, we return a conflict set indicating the +-- variables responsible for the failure, as well as the two conflicting +-- fragments. +-- +-- Note that while there may be more than one conflicting pair of version +-- ranges, we only return the first we find. +-- +-- TODO: Different pairs might have different conflict sets. We're +-- obviously interested to return a conflict that has a "better" conflict +-- set in the sense the it contains variables that allow us to backjump +-- further. We might apply some heuristics here, such as to change the +-- order in which we check the constraints. +merge :: Ord qpn => CI qpn -> CI qpn -> Either (ConflictSet qpn, (CI qpn, CI qpn)) (CI qpn) +merge c@(Fixed i g1) d@(Fixed j g2) + | i == j = Right c + | otherwise = Left (CS.union (varToConflictSet g1) (varToConflictSet g2), (c, d)) +merge c@(Fixed (I v _) g1) (Constrained rs) = go rs -- I tried "reverse rs" here, but it seems to slow things down ... + where + go [] = Right c + go (d@(vr, g2) : vrs) + | checkVR vr v = go vrs + | otherwise = Left (CS.union (varToConflictSet g1) (varToConflictSet g2), (c, Constrained [d])) +merge c@(Constrained _) d@(Fixed _ _) = merge d c +merge (Constrained rs) (Constrained ss) = Right (Constrained (rs ++ ss)) + +{------------------------------------------------------------------------------- + Flagged dependencies +-------------------------------------------------------------------------------} + +-- | Flagged dependencies +-- +-- 'FlaggedDeps' is the modular solver's view of a packages dependencies: +-- rather than having the dependencies indexed by component, each dependency +-- defines what component it is in. +-- +-- However, top-level goals are also modelled as dependencies, but of course +-- these don't actually belong in any component of any package. Therefore, we +-- parameterize 'FlaggedDeps' and derived datatypes with a type argument that +-- specifies whether or not we have a component: we only ever instantiate this +-- type argument with @()@ for top-level goals, or 'Component' for everything +-- else (we could express this as a kind at the type-level, but that would +-- require a very recent GHC). +-- +-- Note however, crucially, that independent of the type parameters, the list +-- of dependencies underneath a flag choice or stanza choices _always_ uses +-- Component as the type argument. This is important: when we pick a value for +-- a flag, we _must_ know what component the new dependencies belong to, or +-- else we don't be able to construct fine-grained reverse dependencies. +type FlaggedDeps comp qpn = [FlaggedDep comp qpn] + +-- | Flagged dependencies can either be plain dependency constraints, +-- or flag-dependent dependency trees. +data FlaggedDep comp qpn = + Flagged (FN qpn) FInfo (TrueFlaggedDeps qpn) (FalseFlaggedDeps qpn) + | Stanza (SN qpn) (TrueFlaggedDeps qpn) + | Simple (Dep qpn) comp + deriving (Eq, Show) + +-- | Conversatively flatten out flagged dependencies +-- +-- NOTE: We do not filter out duplicates. +flattenFlaggedDeps :: FlaggedDeps Component qpn -> [(Dep qpn, Component)] +flattenFlaggedDeps = concatMap aux + where + aux :: FlaggedDep Component qpn -> [(Dep qpn, Component)] + aux (Flagged _ _ t f) = flattenFlaggedDeps t ++ flattenFlaggedDeps f + aux (Stanza _ t) = flattenFlaggedDeps t + aux (Simple d c) = [(d, c)] + +type TrueFlaggedDeps qpn = FlaggedDeps Component qpn +type FalseFlaggedDeps qpn = FlaggedDeps Component qpn + +-- | A dependency (constraint) associates a package name with a +-- constrained instance. +-- +-- 'Dep' intentionally has no 'Functor' instance because the type variable +-- is used both to record the dependencies as well as who's doing the +-- depending; having a 'Functor' instance makes bugs where we don't distinguish +-- these two far too likely. (By rights 'Dep' ought to have two type variables.) +data Dep qpn = Dep qpn (CI qpn) -- dependency on a package + | Ext Extension -- dependency on a language extension + | Lang Language -- dependency on a language version + | Pkg PN VR -- dependency on a pkg-config package + deriving (Eq, Show) + +showDep :: Dep QPN -> String +showDep (Dep qpn (Fixed i v) ) = + (if P qpn /= v then showVar v ++ " => " else "") ++ + showQPN qpn ++ "==" ++ showI i +showDep (Dep qpn (Constrained [(vr, v)])) = + showVar v ++ " => " ++ showQPN qpn ++ showVR vr +showDep (Dep qpn ci ) = + showQPN qpn ++ showCI ci +showDep (Ext ext) = "requires " ++ display ext +showDep (Lang lang) = "requires " ++ display lang +showDep (Pkg pn vr) = "requires pkg-config package " + ++ display pn ++ display vr + ++ ", not found in the pkg-config database" + +-- | Options for goal qualification (used in 'qualifyDeps') +-- +-- See also 'defaultQualifyOptions' +data QualifyOptions = QO { + -- | Do we have a version of base relying on another version of base? + qoBaseShim :: Bool + + -- Should dependencies of the setup script be treated as independent? + , qoSetupIndependent :: Bool + } + deriving Show + +-- | Apply built-in rules for package qualifiers +-- +-- Although the behaviour of 'qualifyDeps' depends on the 'QualifyOptions', +-- it is important that these 'QualifyOptions' are _static_. Qualification +-- does NOT depend on flag assignment; in other words, it behaves the same no +-- matter which choices the solver makes (modulo the global 'QualifyOptions'); +-- we rely on this in 'linkDeps' (see comment there). +-- +-- NOTE: It's the _dependencies_ of a package that may or may not be independent +-- from the package itself. Package flag choices must of course be consistent. +qualifyDeps :: QualifyOptions -> QPN -> FlaggedDeps Component PN -> FlaggedDeps Component QPN +qualifyDeps QO{..} (Q pp@(PP ns q) pn) = go + where + go :: FlaggedDeps Component PN -> FlaggedDeps Component QPN + go = map go1 + + go1 :: FlaggedDep Component PN -> FlaggedDep Component QPN + go1 (Flagged fn nfo t f) = Flagged (fmap (Q pp) fn) nfo (go t) (go f) + go1 (Stanza sn t) = Stanza (fmap (Q pp) sn) (go t) + go1 (Simple dep comp) = Simple (goD dep comp) comp + + -- Suppose package B has a setup dependency on package A. + -- This will be recorded as something like + -- + -- > Dep "A" (Constrained [(AnyVersion, Goal (P "B") reason]) + -- + -- Observe that when we qualify this dependency, we need to turn that + -- @"A"@ into @"B-setup.A"@, but we should not apply that same qualifier + -- to the goal or the goal reason chain. + goD :: Dep PN -> Component -> Dep QPN + goD (Ext ext) _ = Ext ext + goD (Lang lang) _ = Lang lang + goD (Pkg pkn vr) _ = Pkg pkn vr + goD (Dep dep ci) comp + | qBase dep = Dep (Q (PP ns (Base pn)) dep) (fmap (Q pp) ci) + | qSetup comp = Dep (Q (PP ns (Setup pn)) dep) (fmap (Q pp) ci) + | otherwise = Dep (Q (PP ns inheritedQ) dep) (fmap (Q pp) ci) + + -- If P has a setup dependency on Q, and Q has a regular dependency on R, then + -- we say that the 'Setup' qualifier is inherited: P has an (indirect) setup + -- dependency on R. We do not do this for the base qualifier however. + -- + -- The inherited qualifier is only used for regular dependencies; for setup + -- and base deppendencies we override the existing qualifier. See #3160 for + -- a detailed discussion. + inheritedQ :: Qualifier + inheritedQ = case q of + Setup _ -> q + Unqualified -> q + Base _ -> Unqualified + + -- Should we qualify this goal with the 'Base' package path? + qBase :: PN -> Bool + qBase dep = qoBaseShim && unPackageName dep == "base" + + -- Should we qualify this goal with the 'Setup' packaeg path? + qSetup :: Component -> Bool + qSetup comp = qoSetupIndependent && comp == ComponentSetup + +-- | Remove qualifiers from set of dependencies +-- +-- This is used during link validation: when we link package @Q.A@ to @Q'.A@, +-- then all dependencies @Q.B@ need to be linked to @Q'.B@. In order to compute +-- what to link these dependencies to, we need to requalify @Q.B@ to become +-- @Q'.B@; we do this by first removing all qualifiers and then calling +-- 'qualifyDeps' again. +unqualifyDeps :: FlaggedDeps comp QPN -> FlaggedDeps comp PN +unqualifyDeps = go + where + go :: FlaggedDeps comp QPN -> FlaggedDeps comp PN + go = map go1 + + go1 :: FlaggedDep comp QPN -> FlaggedDep comp PN + go1 (Flagged fn nfo t f) = Flagged (fmap unq fn) nfo (go t) (go f) + go1 (Stanza sn t) = Stanza (fmap unq sn) (go t) + go1 (Simple dep comp) = Simple (goD dep) comp + + goD :: Dep QPN -> Dep PN + goD (Dep qpn ci) = Dep (unq qpn) (fmap unq ci) + goD (Ext ext) = Ext ext + goD (Lang lang) = Lang lang + goD (Pkg pn vr) = Pkg pn vr + + unq :: QPN -> PN + unq (Q _ pn) = pn + +{------------------------------------------------------------------------------- + Setting/forgetting the Component +-------------------------------------------------------------------------------} + +forgetCompOpenGoal :: OpenGoal Component -> OpenGoal () +forgetCompOpenGoal = mapCompOpenGoal $ const () + +setCompFlaggedDeps :: Component -> FlaggedDeps () qpn -> FlaggedDeps Component qpn +setCompFlaggedDeps = mapCompFlaggedDeps . const + +{------------------------------------------------------------------------------- + Auxiliary: Mapping over the Component goal + + We don't export these, because the only type instantiations for 'a' and 'b' + here should be () or Component. (We could express this at the type level + if we relied on newer versions of GHC.) +-------------------------------------------------------------------------------} + +mapCompOpenGoal :: (a -> b) -> OpenGoal a -> OpenGoal b +mapCompOpenGoal g (OpenGoal d gr) = OpenGoal (mapCompFlaggedDep g d) gr + +mapCompFlaggedDeps :: (a -> b) -> FlaggedDeps a qpn -> FlaggedDeps b qpn +mapCompFlaggedDeps = L.map . mapCompFlaggedDep + +mapCompFlaggedDep :: (a -> b) -> FlaggedDep a qpn -> FlaggedDep b qpn +mapCompFlaggedDep _ (Flagged fn nfo t f) = Flagged fn nfo t f +mapCompFlaggedDep _ (Stanza sn t ) = Stanza sn t +mapCompFlaggedDep g (Simple pn a ) = Simple pn (g a) + +{------------------------------------------------------------------------------- + Reverse dependency map +-------------------------------------------------------------------------------} + +-- | A map containing reverse dependencies between qualified +-- package names. +type RevDepMap = Map QPN [(Component, QPN)] + +{------------------------------------------------------------------------------- + Goals +-------------------------------------------------------------------------------} + +-- | A goal is just a solver variable paired with a reason. +-- The reason is only used for tracing. +data Goal qpn = Goal (Var qpn) (GoalReason qpn) + deriving (Eq, Show, Functor) + +-- | Reason why a goal is being added to a goal set. +data GoalReason qpn = + UserGoal + | PDependency (PI qpn) + | FDependency (FN qpn) Bool + | SDependency (SN qpn) + deriving (Eq, Show, Functor) + +type QGoalReason = GoalReason QPN + +class ResetVar f where + resetVar :: Var qpn -> f qpn -> f qpn + +instance ResetVar CI where + resetVar v (Fixed i _) = Fixed i v + resetVar v (Constrained vrs) = Constrained (L.map (\ (x, y) -> (x, resetVar v y)) vrs) + +instance ResetVar Dep where + resetVar v (Dep qpn ci) = Dep qpn (resetVar v ci) + resetVar _ (Ext ext) = Ext ext + resetVar _ (Lang lang) = Lang lang + resetVar _ (Pkg pn vr) = Pkg pn vr + +instance ResetVar Var where + resetVar = const + +-- | Compute a singleton conflict set from a goal, containing just +-- the goal variable. +-- +-- NOTE: This is just a call to 'varToConflictSet' under the hood; +-- the 'GoalReason' is ignored. +goalVarToConflictSet :: Goal qpn -> ConflictSet qpn +goalVarToConflictSet (Goal g _gr) = varToConflictSet g + +-- | Compute a singleton conflict set from a 'Var' +varToConflictSet :: Var qpn -> ConflictSet qpn +varToConflictSet = CS.singleton + +-- | A goal reason is mostly just a variable paired with the +-- decision we made for that variable (except for user goals, +-- where we cannot really point to a solver variable). This +-- function drops the decision and recovers the list of +-- variables (which will be empty or contain one element). +-- +goalReasonToVars :: GoalReason qpn -> [Var qpn] +goalReasonToVars UserGoal = [] +goalReasonToVars (PDependency (PI qpn _)) = [P qpn] +goalReasonToVars (FDependency qfn _) = [F qfn] +goalReasonToVars (SDependency qsn) = [S qsn] + +{------------------------------------------------------------------------------- + Open goals +-------------------------------------------------------------------------------} + +-- | For open goals as they occur during the build phase, we need to store +-- additional information about flags. +data OpenGoal comp = OpenGoal (FlaggedDep comp QPN) QGoalReason + deriving (Eq, Show) + +-- | Closes a goal, i.e., removes all the extraneous information that we +-- need only during the build phase. +close :: OpenGoal comp -> Goal QPN +close (OpenGoal (Simple (Dep qpn _) _) gr) = Goal (P qpn) gr +close (OpenGoal (Simple (Ext _) _) _ ) = + error "Distribution.Client.Dependency.Modular.Dependency.close: called on Ext goal" +close (OpenGoal (Simple (Lang _) _) _ ) = + error "Distribution.Client.Dependency.Modular.Dependency.close: called on Lang goal" +close (OpenGoal (Simple (Pkg _ _) _) _ ) = + error "Distribution.Client.Dependency.Modular.Dependency.close: called on Pkg goal" +close (OpenGoal (Flagged qfn _ _ _ ) gr) = Goal (F qfn) gr +close (OpenGoal (Stanza qsn _) gr) = Goal (S qsn) gr + +{------------------------------------------------------------------------------- + Version ranges paired with origins +-------------------------------------------------------------------------------} + +type VROrigin qpn = (VR, Var qpn) + +-- | Helper function to collapse a list of version ranges with origins into +-- a single, simplified, version range. +collapse :: [VROrigin qpn] -> VR +collapse = simplifyVR . L.foldr ((.&&.) . fst) anyVR diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Explore.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Explore.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Explore.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Explore.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,123 @@ +module Distribution.Client.Dependency.Modular.Explore + ( backjump + , backjumpAndExplore + ) where + +import Data.Foldable as F +import Data.Map as M + +import Distribution.Client.Dependency.Modular.Assignment +import Distribution.Client.Dependency.Modular.Dependency +import Distribution.Client.Dependency.Modular.Log +import Distribution.Client.Dependency.Modular.Message +import Distribution.Client.Dependency.Modular.Package +import qualified Distribution.Client.Dependency.Modular.PSQ as P +import qualified Distribution.Client.Dependency.Modular.ConflictSet as CS +import Distribution.Client.Dependency.Modular.Tree +import qualified Distribution.Client.Dependency.Types as T + +-- | This function takes the variable we're currently considering, an +-- initial conflict set and a +-- list of children's logs. Each log yields either a solution or a +-- conflict set. The result is a combined log for the parent node that +-- has explored a prefix of the children. +-- +-- We can stop traversing the children's logs if we find an individual +-- conflict set that does not contain the current variable. In this +-- case, we can just lift the conflict set to the current level, +-- because the current level cannot possibly have contributed to this +-- conflict, so no other choice at the current level would avoid the +-- conflict. +-- +-- If any of the children might contain a successful solution, we can +-- return it immediately. If all children contain conflict sets, we can +-- take the union as the combined conflict set. +-- +-- The initial conflict set corresponds to the justification that we +-- have to choose this goal at all. There is a reason why we have +-- introduced the goal in the first place, and this reason is in conflict +-- with the (virtual) option not to choose anything for the current +-- variable. See also the comments for 'avoidSet'. +-- +backjump :: F.Foldable t => Var QPN -> ConflictSet QPN -> t (ConflictSetLog a) -> ConflictSetLog a +backjump var initial xs = F.foldr combine logBackjump xs initial + where + combine :: ConflictSetLog a + -> (ConflictSet QPN -> ConflictSetLog a) + -> ConflictSet QPN -> ConflictSetLog a + combine (T.Done x) _ _ = T.Done x + combine (T.Fail cs) f csAcc + | not (var `CS.member` cs) = logBackjump cs + | otherwise = f (csAcc `CS.union` cs) + combine (T.Step m ms) f cs = T.Step m (combine ms f cs) + + logBackjump :: ConflictSet QPN -> ConflictSetLog a + logBackjump cs = failWith (Failure cs Backjump) cs + +type ConflictSetLog = T.Progress Message (ConflictSet QPN) + +-- | A tree traversal that simultaneously propagates conflict sets up +-- the tree from the leaves and creates a log. +exploreLog :: Tree QGoalReason -> (Assignment -> ConflictSetLog (Assignment, RevDepMap)) +exploreLog = cata go + where + go :: TreeF QGoalReason (Assignment -> ConflictSetLog (Assignment, RevDepMap)) + -> (Assignment -> ConflictSetLog (Assignment, RevDepMap)) + go (FailF c fr) _ = failWith (Failure c fr) c + go (DoneF rdm) a = succeedWith Success (a, rdm) + go (PChoiceF qpn gr ts) (A pa fa sa) = + backjump (P qpn) (avoidSet (P qpn) gr) $ -- try children in order, + P.mapWithKey -- when descending ... + (\ i@(POption k _) r -> tryWith (TryP qpn i) $ -- log and ... + r (A (M.insert qpn k pa) fa sa)) -- record the pkg choice + ts + go (FChoiceF qfn gr _ _ ts) (A pa fa sa) = + backjump (F qfn) (avoidSet (F qfn) gr) $ -- try children in order, + P.mapWithKey -- when descending ... + (\ k r -> tryWith (TryF qfn k) $ -- log and ... + r (A pa (M.insert qfn k fa) sa)) -- record the pkg choice + ts + go (SChoiceF qsn gr _ ts) (A pa fa sa) = + backjump (S qsn) (avoidSet (S qsn) gr) $ -- try children in order, + P.mapWithKey -- when descending ... + (\ k r -> tryWith (TryS qsn k) $ -- log and ... + r (A pa fa (M.insert qsn k sa))) -- record the pkg choice + ts + go (GoalChoiceF ts) a = + P.casePSQ ts + (failWith (Failure CS.empty EmptyGoalChoice) CS.empty) -- empty goal choice is an internal error + (\ k v _xs -> continueWith (Next (close k)) (v a)) -- commit to the first goal choice + +-- | Build a conflict set corresponding to the (virtual) option not to +-- choose a solution for a goal at all. +-- +-- In the solver, the set of goals is not statically determined, but depends +-- on the choices we make. Therefore, when dealing with conflict sets, we +-- always have to consider that we could perhaps make choices that would +-- avoid the existence of the goal completely. +-- +-- Whenever we actual introduce a choice in the tree, we have already established +-- that the goal cannot be avoided. This is tracked in the "goal reason". +-- The choice to avoid the goal therefore is a conflict between the goal itself +-- and its goal reason. We build this set here, and pass it to the 'backjump' +-- function as the initial conflict set. +-- +-- This has two effects: +-- +-- - In a situation where there are no choices available at all (this happens +-- if an unknown package is requested), the initial conflict set becomes the +-- actual conflict set. +-- +-- - In a situation where we backjump past the current node, the goal reason +-- of the current node will be added to the conflict set. +-- +avoidSet :: Var QPN -> QGoalReason -> ConflictSet QPN +avoidSet var gr = + CS.fromList (var : goalReasonToVars gr) + +-- | Interface. +backjumpAndExplore :: Tree QGoalReason -> Log Message (Assignment, RevDepMap) +backjumpAndExplore t = toLog $ exploreLog t (A M.empty M.empty M.empty) + where + toLog :: T.Progress step fail done -> Log step done + toLog = T.foldProgress T.Step (const (T.Fail ())) T.Done diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Flag.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Flag.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Flag.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Flag.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,80 @@ +{-# LANGUAGE DeriveFunctor #-} +module Distribution.Client.Dependency.Modular.Flag + ( FInfo(..) + , Flag + , FlagInfo + , FN(..) + , QFN + , QSN + , SN(..) + , mkFlag + , showFBool + , showQFN + , showQFNBool + , showQSN + , showQSNBool + ) where + +import Data.Map as M +import Prelude hiding (pi) + +import Distribution.PackageDescription hiding (Flag) -- from Cabal + +import Distribution.Client.Dependency.Modular.Package +import Distribution.Client.Types (OptionalStanza(..)) + +-- | Flag name. Consists of a package instance and the flag identifier itself. +data FN qpn = FN (PI qpn) Flag + deriving (Eq, Ord, Show, Functor) + +-- | Flag identifier. Just a string. +type Flag = FlagName + +unFlag :: Flag -> String +unFlag (FlagName fn) = fn + +mkFlag :: String -> Flag +mkFlag fn = FlagName fn + +-- | Flag info. Default value, whether the flag is manual, and +-- whether the flag is weak. Manual flags can only be set explicitly. +-- Weak flags are typically deferred by the solver. +data FInfo = FInfo { fdefault :: Bool, fmanual :: Bool, fweak :: Bool } + deriving (Eq, Ord, Show) + +-- | Flag defaults. +type FlagInfo = Map Flag FInfo + +-- | Qualified flag name. +type QFN = FN QPN + +-- | Stanza name. Paired with a package name, much like a flag. +data SN qpn = SN (PI qpn) OptionalStanza + deriving (Eq, Ord, Show, Functor) + +-- | Qualified stanza name. +type QSN = SN QPN + +unStanza :: OptionalStanza -> String +unStanza TestStanzas = "test" +unStanza BenchStanzas = "bench" + +showQFNBool :: QFN -> Bool -> String +showQFNBool qfn@(FN pi _f) b = showPI pi ++ ":" ++ showFBool qfn b + +showQSNBool :: QSN -> Bool -> String +showQSNBool qsn@(SN pi _f) b = showPI pi ++ ":" ++ showSBool qsn b + +showFBool :: FN qpn -> Bool -> String +showFBool (FN _ f) True = "+" ++ unFlag f +showFBool (FN _ f) False = "-" ++ unFlag f + +showSBool :: SN qpn -> Bool -> String +showSBool (SN _ s) True = "*" ++ unStanza s +showSBool (SN _ s) False = "!" ++ unStanza s + +showQFN :: QFN -> String +showQFN (FN pi f) = showPI pi ++ ":" ++ unFlag f + +showQSN :: QSN -> String +showQSN (SN pi f) = showPI pi ++ ":" ++ unStanza f diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/IndexConversion.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/IndexConversion.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/IndexConversion.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/IndexConversion.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,219 @@ +module Distribution.Client.Dependency.Modular.IndexConversion + ( convPIs + ) where + +import Data.List as L +import Data.Map as M +import Data.Maybe +import Data.Monoid as Mon +import Prelude hiding (pi) + +import qualified Distribution.Client.PackageIndex as CI +import Distribution.Client.Types +import Distribution.Client.ComponentDeps (Component(..)) +import Distribution.Compiler +import Distribution.InstalledPackageInfo as IPI +import Distribution.Package -- from Cabal +import Distribution.PackageDescription as PD -- from Cabal +import Distribution.PackageDescription.Configuration as PDC +import qualified Distribution.Simple.PackageIndex as SI +import Distribution.System + +import Distribution.Client.Dependency.Modular.Dependency as D +import Distribution.Client.Dependency.Modular.Flag as F +import Distribution.Client.Dependency.Modular.Index +import Distribution.Client.Dependency.Modular.Package +import Distribution.Client.Dependency.Modular.Tree +import Distribution.Client.Dependency.Modular.Version + +-- | Convert both the installed package index and the source package +-- index into one uniform solver index. +-- +-- We use 'allPackagesBySourcePackageId' for the installed package index +-- because that returns us several instances of the same package and version +-- in order of preference. This allows us in principle to \"shadow\" +-- packages if there are several installed packages of the same version. +-- There are currently some shortcomings in both GHC and Cabal in +-- resolving these situations. However, the right thing to do is to +-- fix the problem there, so for now, shadowing is only activated if +-- explicitly requested. +convPIs :: OS -> Arch -> CompilerInfo -> Bool -> Bool -> + SI.InstalledPackageIndex -> CI.PackageIndex SourcePackage -> Index +convPIs os arch comp sip strfl iidx sidx = + mkIndex (convIPI' sip iidx ++ convSPI' os arch comp strfl sidx) + +-- | Convert a Cabal installed package index to the simpler, +-- more uniform index format of the solver. +convIPI' :: Bool -> SI.InstalledPackageIndex -> [(PN, I, PInfo)] +convIPI' sip idx = + -- apply shadowing whenever there are multiple installed packages with + -- the same version + [ maybeShadow (convIP idx pkg) + | (_pkgid, pkgs) <- SI.allPackagesBySourcePackageId idx + , (maybeShadow, pkg) <- zip (id : repeat shadow) pkgs ] + where + + -- shadowing is recorded in the package info + shadow (pn, i, PInfo fdeps fds _) | sip = (pn, i, PInfo fdeps fds (Just Shadowed)) + shadow x = x + +-- | Convert a single installed package into the solver-specific format. +convIP :: SI.InstalledPackageIndex -> InstalledPackageInfo -> (PN, I, PInfo) +convIP idx ipi = + let ipid = IPI.installedUnitId ipi + i = I (pkgVersion (sourcePackageId ipi)) (Inst ipid) + pn = pkgName (sourcePackageId ipi) + in case mapM (convIPId pn idx) (IPI.depends ipi) of + Nothing -> (pn, i, PInfo [] M.empty (Just Broken)) + Just fds -> (pn, i, PInfo (setComp fds) M.empty Nothing) + where + -- We assume that all dependencies of installed packages are _library_ deps + setComp = setCompFlaggedDeps ComponentLib +-- TODO: Installed packages should also store their encapsulations! + +-- | Convert dependencies specified by an installed package id into +-- flagged dependencies of the solver. +-- +-- May return Nothing if the package can't be found in the index. That +-- indicates that the original package having this dependency is broken +-- and should be ignored. +convIPId :: PN -> SI.InstalledPackageIndex -> UnitId -> Maybe (FlaggedDep () PN) +convIPId pn' idx ipid = + case SI.lookupUnitId idx ipid of + Nothing -> Nothing + Just ipi -> let i = I (pkgVersion (sourcePackageId ipi)) (Inst ipid) + pn = pkgName (sourcePackageId ipi) + in Just (D.Simple (Dep pn (Fixed i (P pn'))) ()) + +-- | Convert a cabal-install source package index to the simpler, +-- more uniform index format of the solver. +convSPI' :: OS -> Arch -> CompilerInfo -> Bool -> + CI.PackageIndex SourcePackage -> [(PN, I, PInfo)] +convSPI' os arch cinfo strfl = L.map (convSP os arch cinfo strfl) . CI.allPackages + +-- | Convert a single source package into the solver-specific format. +convSP :: OS -> Arch -> CompilerInfo -> Bool -> SourcePackage -> (PN, I, PInfo) +convSP os arch cinfo strfl (SourcePackage (PackageIdentifier pn pv) gpd _ _pl) = + let i = I pv InRepo + in (pn, i, convGPD os arch cinfo strfl (PI pn i) gpd) + +-- We do not use 'flattenPackageDescription' or 'finalizePackageDescription' +-- from 'Distribution.PackageDescription.Configuration' here, because we +-- want to keep the condition tree, but simplify much of the test. + +-- | Convert a generic package description to a solver-specific 'PInfo'. +convGPD :: OS -> Arch -> CompilerInfo -> Bool -> + PI PN -> GenericPackageDescription -> PInfo +convGPD os arch cinfo strfl pi + (GenericPackageDescription pkg flags libs exes tests benchs) = + let + fds = flagInfo strfl flags + + conv :: Mon.Monoid a => Component -> (a -> BuildInfo) -> + CondTree ConfVar [Dependency] a -> FlaggedDeps Component PN + conv comp getInfo = convCondTree os arch cinfo pi fds comp getInfo . + PDC.addBuildableCondition getInfo + in + PInfo + (maybe [] (conv ComponentLib libBuildInfo ) libs ++ + maybe [] (convSetupBuildInfo pi) (setupBuildInfo pkg) ++ + concatMap (\(nm, ds) -> conv (ComponentExe nm) buildInfo ds) exes ++ + prefix (Stanza (SN pi TestStanzas)) + (L.map (\(nm, ds) -> conv (ComponentTest nm) testBuildInfo ds) tests) ++ + prefix (Stanza (SN pi BenchStanzas)) + (L.map (\(nm, ds) -> conv (ComponentBench nm) benchmarkBuildInfo ds) benchs)) + fds + Nothing + +prefix :: (FlaggedDeps comp qpn -> FlaggedDep comp' qpn) -> [FlaggedDeps comp qpn] -> FlaggedDeps comp' qpn +prefix _ [] = [] +prefix f fds = [f (concat fds)] + +-- | Convert flag information. Automatic flags are now considered weak +-- unless strong flags have been selected explicitly. +flagInfo :: Bool -> [PD.Flag] -> FlagInfo +flagInfo strfl = M.fromList . L.map (\ (MkFlag fn _ b m) -> (fn, FInfo b m (not (strfl || m)))) + +-- | Convert condition trees to flagged dependencies. +convCondTree :: OS -> Arch -> CompilerInfo -> PI PN -> FlagInfo -> + Component -> + (a -> BuildInfo) -> + CondTree ConfVar [Dependency] a -> FlaggedDeps Component PN +convCondTree os arch cinfo pi@(PI pn _) fds comp getInfo (CondNode info ds branches) = + L.map (\d -> D.Simple (convDep pn d) comp) ds -- unconditional package dependencies + ++ L.map (\e -> D.Simple (Ext e) comp) (PD.allExtensions bi) -- unconditional extension dependencies + ++ L.map (\l -> D.Simple (Lang l) comp) (PD.allLanguages bi) -- unconditional language dependencies + ++ L.map (\(Dependency pkn vr) -> D.Simple (Pkg pkn vr) comp) (PD.pkgconfigDepends bi) -- unconditional pkg-config dependencies + ++ concatMap (convBranch os arch cinfo pi fds comp getInfo) branches + where + bi = getInfo info + +-- | Branch interpreter. +-- +-- Here, we try to simplify one of Cabal's condition tree branches into the +-- solver's flagged dependency format, which is weaker. Condition trees can +-- contain complex logical expression composed from flag choices and special +-- flags (such as architecture, or compiler flavour). We try to evaluate the +-- special flags and subsequently simplify to a tree that only depends on +-- simple flag choices. +convBranch :: OS -> Arch -> CompilerInfo -> + PI PN -> FlagInfo -> + Component -> + (a -> BuildInfo) -> + (Condition ConfVar, + CondTree ConfVar [Dependency] a, + Maybe (CondTree ConfVar [Dependency] a)) -> FlaggedDeps Component PN +convBranch os arch cinfo pi@(PI pn _) fds comp getInfo (c', t', mf') = + go c' ( convCondTree os arch cinfo pi fds comp getInfo t') + (maybe [] (convCondTree os arch cinfo pi fds comp getInfo) mf') + where + go :: Condition ConfVar -> + FlaggedDeps Component PN -> FlaggedDeps Component PN -> FlaggedDeps Component PN + go (Lit True) t _ = t + go (Lit False) _ f = f + go (CNot c) t f = go c f t + go (CAnd c d) t f = go c (go d t f) f + go (COr c d) t f = go c t (go d t f) + go (Var (Flag fn)) t f = extractCommon t f ++ [Flagged (FN pi fn) (fds ! fn) t f] + go (Var (OS os')) t f + | os == os' = t + | otherwise = f + go (Var (Arch arch')) t f + | arch == arch' = t + | otherwise = f + go (Var (Impl cf cvr)) t f + | matchImpl (compilerInfoId cinfo) || + -- fixme: Nothing should be treated as unknown, rather than empty + -- list. This code should eventually be changed to either + -- support partial resolution of compiler flags or to + -- complain about incompletely configured compilers. + any matchImpl (fromMaybe [] $ compilerInfoCompat cinfo) = t + | otherwise = f + where + matchImpl (CompilerId cf' cv) = cf == cf' && checkVR cvr cv + + -- If both branches contain the same package as a simple dep, we lift it to + -- the next higher-level, but without constraints. This heuristic together + -- with deferring flag choices will then usually first resolve this package, + -- and try an already installed version before imposing a default flag choice + -- that might not be what we want. + -- + -- Note that we make assumptions here on the form of the dependencies that + -- can occur at this point. In particular, no occurrences of Fixed, and no + -- occurrences of multiple version ranges, as all dependencies below this + -- point have been generated using 'convDep'. + extractCommon :: FlaggedDeps Component PN -> FlaggedDeps Component PN -> FlaggedDeps Component PN + extractCommon ps ps' = [ D.Simple (Dep pn1 (Constrained [(vr1 .||. vr2, P pn)])) comp + | D.Simple (Dep pn1 (Constrained [(vr1, _)])) _ <- ps + , D.Simple (Dep pn2 (Constrained [(vr2, _)])) _ <- ps' + , pn1 == pn2 + ] + +-- | Convert a Cabal dependency to a solver-specific dependency. +convDep :: PN -> Dependency -> Dep PN +convDep pn' (Dependency pn vr) = Dep pn (Constrained [(vr, P pn')]) + +-- | Convert setup dependencies +convSetupBuildInfo :: PI PN -> SetupBuildInfo -> FlaggedDeps Component PN +convSetupBuildInfo (PI pn _i) nfo = + L.map (\d -> D.Simple (convDep pn d) ComponentSetup) (PD.setupDepends nfo) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Index.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Index.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Index.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Index.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,52 @@ +module Distribution.Client.Dependency.Modular.Index + ( Index + , PInfo(..) + , defaultQualifyOptions + , mkIndex + ) where + +import Data.List as L +import Data.Map as M +import Prelude hiding (pi) + +import Distribution.Client.Dependency.Modular.Dependency +import Distribution.Client.Dependency.Modular.Flag +import Distribution.Client.Dependency.Modular.Package +import Distribution.Client.Dependency.Modular.Tree + +import Distribution.Client.ComponentDeps (Component) + +-- | An index contains information about package instances. This is a nested +-- dictionary. Package names are mapped to instances, which in turn is mapped +-- to info. +type Index = Map PN (Map I PInfo) + +-- | Info associated with a package instance. +-- Currently, dependencies, flags and failure reasons. +-- Packages that have a failure reason recorded for them are disabled +-- globally, for reasons external to the solver. We currently use this +-- for shadowing which essentially is a GHC limitation, and for +-- installed packages that are broken. +data PInfo = PInfo (FlaggedDeps Component PN) FlagInfo (Maybe FailReason) + deriving (Show) + +mkIndex :: [(PN, I, PInfo)] -> Index +mkIndex xs = M.map M.fromList (groupMap (L.map (\ (pn, i, pi) -> (pn, (i, pi))) xs)) + +groupMap :: Ord a => [(a, b)] -> Map a [b] +groupMap xs = M.fromListWith (flip (++)) (L.map (\ (x, y) -> (x, [y])) xs) + +defaultQualifyOptions :: Index -> QualifyOptions +defaultQualifyOptions idx = QO { + qoBaseShim = or [ dep == base + | -- Find all versions of base .. + Just is <- [M.lookup base idx] + -- .. which are installed .. + , (I _ver (Inst _), PInfo deps _flagNfo _fr) <- M.toList is + -- .. and flatten all their dependencies .. + , (Dep dep _ci, _comp) <- flattenFlaggedDeps deps + ] + , qoSetupIndependent = True + } + where + base = PackageName "base" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Linking.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Linking.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Linking.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Linking.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,574 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE MultiParamTypeClasses #-} +module Distribution.Client.Dependency.Modular.Linking ( + addLinking + , validateLinking + ) where + +import Prelude hiding (pi) +import Control.Exception (assert) +import Control.Monad.Reader +import Control.Monad.State +import Data.Maybe (catMaybes) +import Data.Map (Map, (!)) +import Data.List (intercalate) +import Data.Set (Set) +import qualified Data.Map as M +import qualified Data.Set as S +import qualified Data.Traversable as T + +#if !MIN_VERSION_base(4,8,0) +import Control.Applicative +#endif + +import Distribution.Client.Dependency.Modular.Assignment +import Distribution.Client.Dependency.Modular.Dependency +import Distribution.Client.Dependency.Modular.Flag +import Distribution.Client.Dependency.Modular.Index +import Distribution.Client.Dependency.Modular.Package +import Distribution.Client.Dependency.Modular.Tree +import qualified Distribution.Client.Dependency.Modular.PSQ as P +import qualified Distribution.Client.Dependency.Modular.ConflictSet as CS + +import Distribution.Client.Types (OptionalStanza(..)) +import Distribution.Client.ComponentDeps (Component) + +{------------------------------------------------------------------------------- + Add linking +-------------------------------------------------------------------------------} + +type RelatedGoals = Map (PN, I) [PP] +type Linker = Reader RelatedGoals + +-- | Introduce link nodes into tree tree +-- +-- Linking is a traversal of the solver tree that adapts package choice nodes +-- and adds the option to link wherever appropriate: Package goals are called +-- "related" if they are for the same version of the same package (but have +-- different prefixes). A link option is available in a package choice node +-- whenever we can choose an instance that has already been chosen for a related +-- goal at a higher position in the tree. +-- +-- The code here proceeds by maintaining a finite map recording choices that +-- have been made at higher positions in the tree. For each pair of package name +-- and instance, it stores the prefixes at which we have made a choice for this +-- package instance. Whenever we make a choice, we extend the map. Whenever we +-- find a choice, we look into the map in order to find out what link options we +-- have to add. +addLinking :: Tree QGoalReason -> Tree QGoalReason +addLinking = (`runReader` M.empty) . cata go + where + go :: TreeF QGoalReason (Linker (Tree QGoalReason)) -> Linker (Tree QGoalReason) + + -- The only nodes of interest are package nodes + go (PChoiceF qpn gr cs) = do + env <- ask + cs' <- T.sequence $ P.mapWithKey (goP qpn) cs + let newCs = concatMap (linkChoices env qpn) (P.toList cs') + return $ PChoice qpn gr (cs' `P.union` P.fromList newCs) + go _otherwise = + innM _otherwise + + -- Recurse underneath package choices. Here we just need to make sure + -- that we record the package choice so that it is available below + goP :: QPN -> POption -> Linker (Tree QGoalReason) -> Linker (Tree QGoalReason) + goP (Q pp pn) (POption i Nothing) = local (M.insertWith (++) (pn, i) [pp]) + goP _ _ = alreadyLinked + +linkChoices :: RelatedGoals -> QPN -> (POption, Tree QGoalReason) -> [(POption, Tree QGoalReason)] +linkChoices related (Q _pp pn) (POption i Nothing, subtree) = + map aux (M.findWithDefault [] (pn, i) related) + where + aux :: PP -> (POption, Tree QGoalReason) + aux pp = (POption i (Just pp), subtree) +linkChoices _ _ (POption _ (Just _), _) = + alreadyLinked + +alreadyLinked :: a +alreadyLinked = error "addLinking called on tree that already contains linked nodes" + +{------------------------------------------------------------------------------- + Validation + + Validation of links is a separate pass that's performed after normal + validation. Validation of links checks that if the tree indicates that a + package is linked, then everything underneath that choice really matches the + package we have linked to. + + This is interesting because it isn't unidirectional. Consider that we've + chosen a.foo to be version 1 and later decide that b.foo should link to a.foo. + Now foo depends on bar. Because a.foo and b.foo are linked, it's required that + a.bar and b.bar are also linked. However, it's not required that we actually + choose a.bar before b.bar. Goal choice order is relatively free. It's possible + that we choose a.bar first, but also possible that we choose b.bar first. In + both cases, we have to recognize that we have freedom of choice for the first + of the two, but no freedom of choice for the second. + + This is what LinkGroups are all about. Using LinkGroup, we can record (in the + situation above) that a.bar and b.bar need to be linked even if we haven't + chosen either of them yet. +-------------------------------------------------------------------------------} + +data ValidateState = VS { + vsIndex :: Index + , vsLinks :: Map QPN LinkGroup + , vsFlags :: FAssignment + , vsStanzas :: SAssignment + , vsQualifyOptions :: QualifyOptions + } + deriving Show + +type Validate = Reader ValidateState + +-- | Validate linked packages +-- +-- Verify that linked packages have +-- +-- * Linked dependencies, +-- * Equal flag assignments +-- * Equal stanza assignments +validateLinking :: Index -> Tree QGoalReason -> Tree QGoalReason +validateLinking index = (`runReader` initVS) . cata go + where + go :: TreeF QGoalReason (Validate (Tree QGoalReason)) -> Validate (Tree QGoalReason) + + go (PChoiceF qpn gr cs) = + PChoice qpn gr <$> T.sequence (P.mapWithKey (goP qpn) cs) + go (FChoiceF qfn gr t m cs) = + FChoice qfn gr t m <$> T.sequence (P.mapWithKey (goF qfn) cs) + go (SChoiceF qsn gr t cs) = + SChoice qsn gr t <$> T.sequence (P.mapWithKey (goS qsn) cs) + + -- For the other nodes we just recurse + go (GoalChoiceF cs) = GoalChoice <$> T.sequence cs + go (DoneF revDepMap) = return $ Done revDepMap + go (FailF conflictSet failReason) = return $ Fail conflictSet failReason + + -- Package choices + goP :: QPN -> POption -> Validate (Tree QGoalReason) -> Validate (Tree QGoalReason) + goP qpn@(Q _pp pn) opt@(POption i _) r = do + vs <- ask + let PInfo deps _ _ = vsIndex vs ! pn ! i + qdeps = qualifyDeps (vsQualifyOptions vs) qpn deps + case execUpdateState (pickPOption qpn opt qdeps) vs of + Left (cs, err) -> return $ Fail cs (DependenciesNotLinked err) + Right vs' -> local (const vs') r + + -- Flag choices + goF :: QFN -> Bool -> Validate (Tree QGoalReason) -> Validate (Tree QGoalReason) + goF qfn b r = do + vs <- ask + case execUpdateState (pickFlag qfn b) vs of + Left (cs, err) -> return $ Fail cs (DependenciesNotLinked err) + Right vs' -> local (const vs') r + + -- Stanza choices (much the same as flag choices) + goS :: QSN -> Bool -> Validate (Tree QGoalReason) -> Validate (Tree QGoalReason) + goS qsn b r = do + vs <- ask + case execUpdateState (pickStanza qsn b) vs of + Left (cs, err) -> return $ Fail cs (DependenciesNotLinked err) + Right vs' -> local (const vs') r + + initVS :: ValidateState + initVS = VS { + vsIndex = index + , vsLinks = M.empty + , vsFlags = M.empty + , vsStanzas = M.empty + , vsQualifyOptions = defaultQualifyOptions index + } + +{------------------------------------------------------------------------------- + Updating the validation state +-------------------------------------------------------------------------------} + +type Conflict = (ConflictSet QPN, String) + +newtype UpdateState a = UpdateState { + unUpdateState :: StateT ValidateState (Either Conflict) a + } + deriving (Functor, Applicative, Monad) + +instance MonadState ValidateState UpdateState where + get = UpdateState $ get + put st = UpdateState $ do + assert (lgInvariant $ vsLinks st) $ return () + put st + +lift' :: Either Conflict a -> UpdateState a +lift' = UpdateState . lift + +conflict :: Conflict -> UpdateState a +conflict = lift' . Left + +execUpdateState :: UpdateState () -> ValidateState -> Either Conflict ValidateState +execUpdateState = execStateT . unUpdateState + +pickPOption :: QPN -> POption -> FlaggedDeps Component QPN -> UpdateState () +pickPOption qpn (POption i Nothing) _deps = pickConcrete qpn i +pickPOption qpn (POption i (Just pp')) deps = pickLink qpn i pp' deps + +pickConcrete :: QPN -> I -> UpdateState () +pickConcrete qpn@(Q pp _) i = do + vs <- get + case M.lookup qpn (vsLinks vs) of + -- Package is not yet in a LinkGroup. Create a new singleton link group. + Nothing -> do + let lg = lgSingleton qpn (Just $ PI pp i) + updateLinkGroup lg + + -- Package is already in a link group. Since we are picking a concrete + -- instance here, it must by definition be the canonical package. + Just lg -> + makeCanonical lg qpn i + +pickLink :: QPN -> I -> PP -> FlaggedDeps Component QPN -> UpdateState () +pickLink qpn@(Q _pp pn) i pp' deps = do + vs <- get + + -- The package might already be in a link group + -- (because one of its reverse dependencies is) + let lgSource = case M.lookup qpn (vsLinks vs) of + Nothing -> lgSingleton qpn Nothing + Just lg -> lg + + -- Find the link group for the package we are linking to + -- + -- Since the builder never links to a package without having first picked a + -- concrete instance for that package, and since we create singleton link + -- groups for concrete instances, this link group must exist (and must + -- in fact already have a canonical member). + let target = Q pp' pn + lgTarget = vsLinks vs ! target + + -- Verify here that the member we add is in fact for the same package and + -- matches the version of the canonical instance. However, violations of + -- these checks would indicate a bug in the linker, not a true conflict. + let sanityCheck :: Maybe (PI PP) -> Bool + sanityCheck Nothing = False + sanityCheck (Just (PI _ canonI)) = pn == lgPackage lgTarget && i == canonI + assert (sanityCheck (lgCanon lgTarget)) $ return () + + -- Merge the two link groups (updateLinkGroup will propagate the change) + lgTarget' <- lift' $ lgMerge [] lgSource lgTarget + updateLinkGroup lgTarget' + + -- Make sure all dependencies are linked as well + linkDeps target [P qpn] deps + +makeCanonical :: LinkGroup -> QPN -> I -> UpdateState () +makeCanonical lg qpn@(Q pp _) i = + case lgCanon lg of + -- There is already a canonical member. Fail. + Just _ -> + conflict ( CS.insert (P qpn) (lgConflictSet lg) + , "cannot make " ++ showQPN qpn + ++ " canonical member of " ++ showLinkGroup lg + ) + Nothing -> do + let lg' = lg { lgCanon = Just (PI pp i) } + updateLinkGroup lg' + +-- | Link the dependencies of linked parents. +-- +-- When we decide to link one package against another we walk through the +-- package's direct depedencies and make sure that they're all linked to each +-- other by merging their link groups (or creating new singleton link groups if +-- they don't have link groups yet). We do not need to do this recursively, +-- because having the direct dependencies in a link group means that we must +-- have already made or will make sooner or later a link choice for one of these +-- as well, and cover their dependencies at that point. +linkDeps :: QPN -> [Var QPN] -> FlaggedDeps Component QPN -> UpdateState () +linkDeps target = \blame deps -> do + -- linkDeps is called in two places: when we first link one package to + -- another, and when we discover more dependencies of an already linked + -- package after doing some flag assignment. It is therefore important that + -- flag assignments cannot influence _how_ dependencies are qualified; + -- fortunately this is a documented property of 'qualifyDeps'. + rdeps <- requalify deps + go blame deps rdeps + where + go :: [Var QPN] -> FlaggedDeps Component QPN -> FlaggedDeps Component QPN -> UpdateState () + go = zipWithM_ . go1 + + go1 :: [Var QPN] -> FlaggedDep Component QPN -> FlaggedDep Component QPN -> UpdateState () + go1 blame dep rdep = case (dep, rdep) of + (Simple (Dep qpn _) _, ~(Simple (Dep qpn' _) _)) -> do + vs <- get + let lg = M.findWithDefault (lgSingleton qpn Nothing) qpn $ vsLinks vs + lg' = M.findWithDefault (lgSingleton qpn' Nothing) qpn' $ vsLinks vs + lg'' <- lift' $ lgMerge blame lg lg' + updateLinkGroup lg'' + (Flagged fn _ t f, ~(Flagged _ _ t' f')) -> do + vs <- get + case M.lookup fn (vsFlags vs) of + Nothing -> return () -- flag assignment not yet known + Just True -> go (F fn:blame) t t' + Just False -> go (F fn:blame) f f' + (Stanza sn t, ~(Stanza _ t')) -> do + vs <- get + case M.lookup sn (vsStanzas vs) of + Nothing -> return () -- stanza assignment not yet known + Just True -> go (S sn:blame) t t' + Just False -> return () -- stanza not enabled; no new deps + -- For extensions and language dependencies, there is nothing to do. + -- No choice is involved, just checking, so there is nothing to link. + -- The same goes for for pkg-config constraints. + (Simple (Ext _) _, _) -> return () + (Simple (Lang _) _, _) -> return () + (Simple (Pkg _ _) _, _) -> return () + + requalify :: FlaggedDeps Component QPN -> UpdateState (FlaggedDeps Component QPN) + requalify deps = do + vs <- get + return $ qualifyDeps (vsQualifyOptions vs) target (unqualifyDeps deps) + +pickFlag :: QFN -> Bool -> UpdateState () +pickFlag qfn b = do + modify $ \vs -> vs { vsFlags = M.insert qfn b (vsFlags vs) } + verifyFlag qfn + linkNewDeps (F qfn) b + +pickStanza :: QSN -> Bool -> UpdateState () +pickStanza qsn b = do + modify $ \vs -> vs { vsStanzas = M.insert qsn b (vsStanzas vs) } + verifyStanza qsn + linkNewDeps (S qsn) b + +-- | Link dependencies that we discover after making a flag choice. +-- +-- When we make a flag choice for a package, then new dependencies for that +-- package might become available. If the package under consideration is in a +-- non-trivial link group, then these new dependencies have to be linked as +-- well. In linkNewDeps, we compute such new dependencies and make sure they are +-- linked. +linkNewDeps :: Var QPN -> Bool -> UpdateState () +linkNewDeps var b = do + vs <- get + let (qpn@(Q pp pn), Just i) = varPI var + PInfo deps _ _ = vsIndex vs ! pn ! i + qdeps = qualifyDeps (vsQualifyOptions vs) qpn deps + lg = vsLinks vs ! qpn + (parents, newDeps) = findNewDeps vs qdeps + linkedTo = S.delete pp (lgMembers lg) + forM_ (S.toList linkedTo) $ \pp' -> linkDeps (Q pp' pn) (P qpn : parents) newDeps + where + findNewDeps :: ValidateState -> FlaggedDeps comp QPN -> ([Var QPN], FlaggedDeps Component QPN) + findNewDeps vs = concatMapUnzip (findNewDeps' vs) + + findNewDeps' :: ValidateState -> FlaggedDep comp QPN -> ([Var QPN], FlaggedDeps Component QPN) + findNewDeps' _ (Simple _ _) = ([], []) + findNewDeps' vs (Flagged qfn _ t f) = + case (F qfn == var, M.lookup qfn (vsFlags vs)) of + (True, _) -> ([F qfn], if b then t else f) + (_, Nothing) -> ([], []) -- not yet known + (_, Just b') -> let (parents, deps) = findNewDeps vs (if b' then t else f) + in (F qfn:parents, deps) + findNewDeps' vs (Stanza qsn t) = + case (S qsn == var, M.lookup qsn (vsStanzas vs)) of + (True, _) -> ([S qsn], if b then t else []) + (_, Nothing) -> ([], []) -- not yet known + (_, Just b') -> let (parents, deps) = findNewDeps vs (if b' then t else []) + in (S qsn:parents, deps) + +updateLinkGroup :: LinkGroup -> UpdateState () +updateLinkGroup lg = do + verifyLinkGroup lg + modify $ \vs -> vs { + vsLinks = M.fromList (map aux (S.toList (lgMembers lg))) + `M.union` vsLinks vs + } + where + aux pp = (Q pp (lgPackage lg), lg) + +{------------------------------------------------------------------------------- + Verification +-------------------------------------------------------------------------------} + +verifyLinkGroup :: LinkGroup -> UpdateState () +verifyLinkGroup lg = + case lgInstance lg of + -- No instance picked yet. Nothing to verify + Nothing -> + return () + + -- We picked an instance. Verify flags and stanzas + -- TODO: The enumeration of OptionalStanza names is very brittle; + -- if a constructor is added to the datatype we won't notice it here + Just i -> do + vs <- get + let PInfo _deps finfo _ = vsIndex vs ! lgPackage lg ! i + flags = M.keys finfo + stanzas = [TestStanzas, BenchStanzas] + forM_ flags $ \fn -> do + let flag = FN (PI (lgPackage lg) i) fn + verifyFlag' flag lg + forM_ stanzas $ \sn -> do + let stanza = SN (PI (lgPackage lg) i) sn + verifyStanza' stanza lg + +verifyFlag :: QFN -> UpdateState () +verifyFlag (FN (PI qpn@(Q _pp pn) i) fn) = do + vs <- get + -- We can only pick a flag after picking an instance; link group must exist + verifyFlag' (FN (PI pn i) fn) (vsLinks vs ! qpn) + +verifyStanza :: QSN -> UpdateState () +verifyStanza (SN (PI qpn@(Q _pp pn) i) sn) = do + vs <- get + -- We can only pick a stanza after picking an instance; link group must exist + verifyStanza' (SN (PI pn i) sn) (vsLinks vs ! qpn) + +-- | Verify that all packages in the link group agree on flag assignments +-- +-- For the given flag and the link group, obtain all assignments for the flag +-- that have already been made for link group members, and check that they are +-- equal. +verifyFlag' :: FN PN -> LinkGroup -> UpdateState () +verifyFlag' (FN (PI pn i) fn) lg = do + vs <- get + let flags = map (\pp' -> FN (PI (Q pp' pn) i) fn) (S.toList (lgMembers lg)) + vals = map (`M.lookup` vsFlags vs) flags + if allEqual (catMaybes vals) -- We ignore not-yet assigned flags + then return () + else conflict ( CS.fromList (map F flags) `CS.union` lgConflictSet lg + , "flag " ++ show fn ++ " incompatible" + ) + +-- | Verify that all packages in the link group agree on stanza assignments +-- +-- For the given stanza and the link group, obtain all assignments for the +-- stanza that have already been made for link group members, and check that +-- they are equal. +-- +-- This function closely mirrors 'verifyFlag''. +verifyStanza' :: SN PN -> LinkGroup -> UpdateState () +verifyStanza' (SN (PI pn i) sn) lg = do + vs <- get + let stanzas = map (\pp' -> SN (PI (Q pp' pn) i) sn) (S.toList (lgMembers lg)) + vals = map (`M.lookup` vsStanzas vs) stanzas + if allEqual (catMaybes vals) -- We ignore not-yet assigned stanzas + then return () + else conflict ( CS.fromList (map S stanzas) `CS.union` lgConflictSet lg + , "stanza " ++ show sn ++ " incompatible" + ) + +{------------------------------------------------------------------------------- + Link groups +-------------------------------------------------------------------------------} + +-- | Set of packages that must be linked together +-- +-- A LinkGroup is between several qualified package names. In the validation +-- state, we maintain a map vsLinks from qualified package names to link groups. +-- There is an invariant that for all members of a link group, vsLinks must map +-- to the same link group. The function updateLinkGroup can be used to +-- re-establish this invariant after creating or expanding a LinkGroup. +data LinkGroup = LinkGroup { + -- | The name of the package of this link group + lgPackage :: PN + + -- | The canonical member of this link group (the one where we picked + -- a concrete instance). Once we have picked a canonical member, all + -- other packages must link to this one. + -- + -- We may not know this yet (if we are constructing link groups + -- for dependencies) + , lgCanon :: Maybe (PI PP) + + -- | The members of the link group + , lgMembers :: Set PP + + -- | The set of variables that should be added to the conflict set if + -- something goes wrong with this link set (in addition to the members + -- of the link group itself) + , lgBlame :: ConflictSet QPN + } + deriving (Show, Eq) + +-- | Invariant for the set of link groups: every element in the link group +-- must be pointing to the /same/ link group +lgInvariant :: Map QPN LinkGroup -> Bool +lgInvariant links = all invGroup (M.elems links) + where + invGroup :: LinkGroup -> Bool + invGroup lg = allEqual $ map (`M.lookup` links) members + where + members :: [QPN] + members = map (`Q` lgPackage lg) $ S.toList (lgMembers lg) + +-- | Package version of this group +-- +-- This is only known once we have picked a canonical element. +lgInstance :: LinkGroup -> Maybe I +lgInstance = fmap (\(PI _ i) -> i) . lgCanon + +showLinkGroup :: LinkGroup -> String +showLinkGroup lg = + "{" ++ intercalate "," (map showMember (S.toList (lgMembers lg))) ++ "}" + where + showMember :: PP -> String + showMember pp = case lgCanon lg of + Just (PI pp' _i) | pp == pp' -> "*" + _otherwise -> "" + ++ case lgInstance lg of + Nothing -> showQPN (qpn pp) + Just i -> showPI (PI (qpn pp) i) + + qpn :: PP -> QPN + qpn pp = Q pp (lgPackage lg) + +-- | Creates a link group that contains a single member. +lgSingleton :: QPN -> Maybe (PI PP) -> LinkGroup +lgSingleton (Q pp pn) canon = LinkGroup { + lgPackage = pn + , lgCanon = canon + , lgMembers = S.singleton pp + , lgBlame = CS.empty + } + +lgMerge :: [Var QPN] -> LinkGroup -> LinkGroup -> Either Conflict LinkGroup +lgMerge blame lg lg' = do + canon <- pick (lgCanon lg) (lgCanon lg') + return LinkGroup { + lgPackage = lgPackage lg + , lgCanon = canon + , lgMembers = lgMembers lg `S.union` lgMembers lg' + , lgBlame = CS.unions [CS.fromList blame, lgBlame lg, lgBlame lg'] + } + where + pick :: Eq a => Maybe a -> Maybe a -> Either Conflict (Maybe a) + pick Nothing Nothing = Right Nothing + pick (Just x) Nothing = Right $ Just x + pick Nothing (Just y) = Right $ Just y + pick (Just x) (Just y) = + if x == y then Right $ Just x + else Left ( CS.unions [ + CS.fromList blame + , lgConflictSet lg + , lgConflictSet lg' + ] + , "cannot merge " ++ showLinkGroup lg + ++ " and " ++ showLinkGroup lg' + ) + +lgConflictSet :: LinkGroup -> ConflictSet QPN +lgConflictSet lg = + CS.fromList (map aux (S.toList (lgMembers lg))) + `CS.union` lgBlame lg + where + aux pp = P (Q pp (lgPackage lg)) + +{------------------------------------------------------------------------------- + Auxiliary +-------------------------------------------------------------------------------} + +allEqual :: Eq a => [a] -> Bool +allEqual [] = True +allEqual [_] = True +allEqual (x:y:ys) = x == y && allEqual (y:ys) + +concatMapUnzip :: (a -> ([b], [c])) -> [a] -> ([b], [c]) +concatMapUnzip f = (\(xs, ys) -> (concat xs, concat ys)) . unzip . map f diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Log.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Log.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Log.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Log.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,106 @@ +module Distribution.Client.Dependency.Modular.Log + ( Log + , continueWith + , failWith + , logToProgress + , succeedWith + , tryWith + ) where + +import Control.Applicative +import Data.List as L +import Data.Maybe (isNothing) + +import Distribution.Client.Dependency.Types -- from Cabal + +import Distribution.Client.Dependency.Modular.Dependency +import Distribution.Client.Dependency.Modular.Message +import Distribution.Client.Dependency.Modular.Package +import Distribution.Client.Dependency.Modular.Tree (FailReason(..)) +import qualified Distribution.Client.Dependency.Modular.ConflictSet as CS + +-- | The 'Log' datatype. +-- +-- Represents the progress of a computation lazily. +-- +-- Parameterized over the type of actual messages and the final result. +type Log m a = Progress m () a + +messages :: Progress step fail done -> [step] +messages = foldProgress (:) (const []) (const []) + +-- | Postprocesses a log file. Takes as an argument a limit on allowed backjumps. +-- If the limit is 'Nothing', then infinitely many backjumps are allowed. If the +-- limit is 'Just 0', backtracking is completely disabled. +logToProgress :: Maybe Int -> Log Message a -> Progress String String a +logToProgress mbj l = let + es = proc (Just 0) l -- catch first error (always) + ms = useFirstError (proc mbj l) + in go es es -- trace for first error + (showMessages (const True) True ms) -- run with backjump limit applied + where + -- Proc takes the allowed number of backjumps and a 'Progress' and explores the + -- messages until the maximum number of backjumps has been reached. It filters out + -- and ignores repeated backjumps. If proc reaches the backjump limit, it truncates + -- the 'Progress' and ends it with the last conflict set. Otherwise, it leaves the + -- original success result or replaces the original failure with 'Nothing'. + proc :: Maybe Int -> Progress Message a b -> Progress Message (Maybe (ConflictSet QPN)) b + proc _ (Done x) = Done x + proc _ (Fail _) = Fail Nothing + proc mbj' (Step (Failure cs Backjump) xs@(Step Leave (Step (Failure cs' Backjump) _))) + | cs == cs' = proc mbj' xs -- repeated backjumps count as one + proc (Just 0) (Step (Failure cs Backjump) _) = Fail (Just cs) + proc (Just n) (Step x@(Failure _ Backjump) xs) = Step x (proc (Just (n - 1)) xs) + proc mbj' (Step x xs) = Step x (proc mbj' xs) + + -- Sets the conflict set from the first backjump as the final error, and records + -- whether the search was exhaustive. + useFirstError :: Progress Message (Maybe (ConflictSet QPN)) b + -> Progress Message (Bool, Maybe (ConflictSet QPN)) b + useFirstError = replace Nothing + where + replace _ (Done x) = Done x + replace cs' (Fail cs) = -- 'Nothing' means backjump limit not reached. + -- Prefer first error over later error. + Fail (isNothing cs, cs' <|> cs) + replace Nothing (Step x@(Failure cs Backjump) xs) = Step x $ replace (Just cs) xs + replace cs' (Step x xs) = Step x $ replace cs' xs + + -- The first two arguments are both supposed to be the log up to the first error. + -- That's the error that will always be printed in case we do not find a solution. + -- We pass this log twice, because we evaluate it in parallel with the full log, + -- but we also want to retain the reference to its beginning for when we print it. + -- This trick prevents a space leak! + -- + -- The third argument is the full log, ending with either the solution or the + -- exhaustiveness and first conflict set. + go :: Progress Message a b + -> Progress Message a b + -> Progress String (Bool, Maybe (ConflictSet QPN)) b + -> Progress String String b + go ms (Step _ ns) (Step x xs) = Step x (go ms ns xs) + go ms r (Step x xs) = Step x (go ms r xs) + go ms _ (Fail (exh, Just cs)) = Fail $ + "Could not resolve dependencies:\n" ++ + unlines (messages $ showMessages (L.foldr (\ v _ -> v `CS.member` cs) True) False ms) ++ + (if exh then "Dependency tree exhaustively searched.\n" + else "Backjump limit reached (" ++ currlimit mbj ++ + "change with --max-backjumps or try to run with --reorder-goals).\n") + where currlimit (Just n) = "currently " ++ show n ++ ", " + currlimit Nothing = "" + go _ _ (Done s) = Done s + go _ _ (Fail (_, Nothing)) = Fail ("Could not resolve dependencies; something strange happened.") -- should not happen + +failWith :: step -> fail -> Progress step fail done +failWith s f = Step s (Fail f) + +succeedWith :: step -> done -> Progress step fail done +succeedWith s d = Step s (Done d) + +continueWith :: step -> Progress step fail done -> Progress step fail done +continueWith = Step + +tryWith :: Message + -> Progress Message fail done + -> Progress Message fail done +tryWith m = Step m . Step Enter . foldProgress Step (failWith Leave) Done diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Message.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Message.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Message.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Message.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,154 @@ +{-# LANGUAGE BangPatterns #-} + +module Distribution.Client.Dependency.Modular.Message ( + Message(..), + showMessages + ) where + +import qualified Data.List as L +import Prelude hiding (pi) + +import Distribution.Text -- from Cabal + +import Distribution.Client.Dependency.Modular.Dependency +import Distribution.Client.Dependency.Modular.Flag +import Distribution.Client.Dependency.Modular.Package +import Distribution.Client.Dependency.Modular.Tree + ( FailReason(..), POption(..) ) +import Distribution.Client.Dependency.Types + ( ConstraintSource(..), showConstraintSource, Progress(..) ) + +data Message = + Enter -- ^ increase indentation level + | Leave -- ^ decrease indentation level + | TryP QPN POption + | TryF QFN Bool + | TryS QSN Bool + | Next (Goal QPN) + | Success + | Failure (ConflictSet QPN) FailReason + +-- | Transforms the structured message type to actual messages (strings). +-- +-- Takes an additional relevance predicate. The predicate gets a stack of goal +-- variables and can decide whether messages regarding these goals are relevant. +-- You can plug in 'const True' if you're interested in a full trace. If you +-- want a slice of the trace concerning a particular conflict set, then plug in +-- a predicate returning 'True' on the empty stack and if the head is in the +-- conflict set. +-- +-- The second argument indicates if the level numbers should be shown. This is +-- recommended for any trace that involves backtracking, because only the level +-- numbers will allow to keep track of backjumps. +showMessages :: ([Var QPN] -> Bool) -> Bool -> Progress Message a b -> Progress String a b +showMessages p sl = go [] 0 + where + -- The stack 'v' represents variables that are currently assigned by the + -- solver. 'go' pushes a variable for a recursive call when it encounters + -- 'TryP', 'TryF', or 'TryS' and pops a variable when it encounters 'Leave'. + -- When 'go' processes a package goal, or a package goal followed by a + -- 'Failure', it calls 'atLevel' with the goal variable at the head of the + -- stack so that the predicate can also select messages relating to package + -- goal choices. + go :: [Var QPN] -> Int -> Progress Message a b -> Progress String a b + go !_ !_ (Done x) = Done x + go !_ !_ (Fail x) = Fail x + -- complex patterns + go !v !l (Step (TryP qpn i) (Step Enter (Step (Failure c fr) (Step Leave ms)))) = + goPReject v l qpn [i] c fr ms + go !v !l (Step (TryF qfn b) (Step Enter (Step (Failure c fr) (Step Leave ms)))) = + (atLevel (add (F qfn) v) l $ "rejecting: " ++ showQFNBool qfn b ++ showFR c fr) (go v l ms) + go !v !l (Step (TryS qsn b) (Step Enter (Step (Failure c fr) (Step Leave ms)))) = + (atLevel (add (S qsn) v) l $ "rejecting: " ++ showQSNBool qsn b ++ showFR c fr) (go v l ms) + go !v !l (Step (Next (Goal (P qpn) gr)) (Step (TryP qpn' i) ms@(Step Enter (Step (Next _) _)))) = + (atLevel (add (P qpn) v) l $ "trying: " ++ showQPNPOpt qpn' i ++ showGR gr) (go (add (P qpn) v) l ms) + go !v !l (Step (Next (Goal (P qpn) gr)) ms@(Fail _)) = + (atLevel (add (P qpn) v) l $ "unknown package: " ++ showQPN qpn ++ showGR gr) $ go v l ms + -- the previous case potentially arises in the error output, because we remove the backjump itself + -- if we cut the log after the first error + go !v !l (Step (Next (Goal (P qpn) gr)) ms@(Step (Failure _c Backjump) _)) = + (atLevel (add (P qpn) v) l $ "unknown package: " ++ showQPN qpn ++ showGR gr) $ go v l ms + go !v !l (Step (Next (Goal (P qpn) gr)) (Step (Failure c fr) ms)) = + let v' = add (P qpn) v + in (atLevel v' l $ showPackageGoal qpn gr) $ (atLevel v' l $ showFailure c fr) (go v l ms) + go !v !l (Step (Failure c Backjump) ms@(Step Leave (Step (Failure c' Backjump) _))) + | c == c' = go v l ms + -- standard display + go !v !l (Step Enter ms) = go v (l+1) ms + go !v !l (Step Leave ms) = go (drop 1 v) (l-1) ms + go !v !l (Step (TryP qpn i) ms) = (atLevel (add (P qpn) v) l $ "trying: " ++ showQPNPOpt qpn i) (go (add (P qpn) v) l ms) + go !v !l (Step (TryF qfn b) ms) = (atLevel (add (F qfn) v) l $ "trying: " ++ showQFNBool qfn b) (go (add (F qfn) v) l ms) + go !v !l (Step (TryS qsn b) ms) = (atLevel (add (S qsn) v) l $ "trying: " ++ showQSNBool qsn b) (go (add (S qsn) v) l ms) + go !v !l (Step (Next (Goal (P qpn) gr)) ms) = (atLevel (add (P qpn) v) l $ showPackageGoal qpn gr) (go v l ms) + go !v !l (Step (Next _) ms) = go v l ms -- ignore flag goals in the log + go !v !l (Step (Success) ms) = (atLevel v l $ "done") (go v l ms) + go !v !l (Step (Failure c fr) ms) = (atLevel v l $ showFailure c fr) (go v l ms) + + showPackageGoal :: QPN -> QGoalReason -> String + showPackageGoal qpn gr = "next goal: " ++ showQPN qpn ++ showGR gr + + showFailure :: ConflictSet QPN -> FailReason -> String + showFailure c fr = "fail" ++ showFR c fr + + add :: Var QPN -> [Var QPN] -> [Var QPN] + add v vs = simplifyVar v : vs + + -- special handler for many subsequent package rejections + goPReject :: [Var QPN] + -> Int + -> QPN + -> [POption] + -> ConflictSet QPN + -> FailReason + -> Progress Message a b + -> Progress String a b + goPReject v l qpn is c fr (Step (TryP qpn' i) (Step Enter (Step (Failure _ fr') (Step Leave ms)))) + | qpn == qpn' && fr == fr' = goPReject v l qpn (i : is) c fr ms + goPReject v l qpn is c fr ms = + (atLevel (P qpn : v) l $ "rejecting: " ++ L.intercalate ", " (map (showQPNPOpt qpn) (reverse is)) ++ showFR c fr) (go v l ms) + + -- write a message, but only if it's relevant; we can also enable or disable the display of the current level + atLevel :: [Var QPN] -> Int -> String -> Progress String a b -> Progress String a b + atLevel v l x xs + | sl && p v = let s = show l + in Step ("[" ++ replicate (3 - length s) '_' ++ s ++ "] " ++ x) xs + | p v = Step x xs + | otherwise = xs + +showQPNPOpt :: QPN -> POption -> String +showQPNPOpt qpn@(Q _pp pn) (POption i linkedTo) = + case linkedTo of + Nothing -> showPI (PI qpn i) -- Consistent with prior to POption + Just pp' -> showQPN qpn ++ "~>" ++ showPI (PI (Q pp' pn) i) + +showGR :: QGoalReason -> String +showGR UserGoal = " (user goal)" +showGR (PDependency pi) = " (dependency of " ++ showPI pi ++ ")" +showGR (FDependency qfn b) = " (dependency of " ++ showQFNBool qfn b ++ ")" +showGR (SDependency qsn) = " (dependency of " ++ showQSNBool qsn True ++ ")" + +showFR :: ConflictSet QPN -> FailReason -> String +showFR _ InconsistentInitialConstraints = " (inconsistent initial constraints)" +showFR _ (Conflicting ds) = " (conflict: " ++ L.intercalate ", " (map showDep ds) ++ ")" +showFR _ CannotInstall = " (only already installed instances can be used)" +showFR _ CannotReinstall = " (avoiding to reinstall a package with same version but new dependencies)" +showFR _ Shadowed = " (shadowed by another installed package with same version)" +showFR _ Broken = " (package is broken)" +showFR _ (GlobalConstraintVersion vr src) = " (" ++ constraintSource src ++ " requires " ++ display vr ++ ")" +showFR _ (GlobalConstraintInstalled src) = " (" ++ constraintSource src ++ " requires installed instance)" +showFR _ (GlobalConstraintSource src) = " (" ++ constraintSource src ++ " requires source instance)" +showFR _ (GlobalConstraintFlag src) = " (" ++ constraintSource src ++ " requires opposite flag selection)" +showFR _ ManualFlag = " (manual flag can only be changed explicitly)" +showFR c Backjump = " (backjumping, conflict set: " ++ showCS c ++ ")" +showFR _ MultipleInstances = " (multiple instances)" +showFR c (DependenciesNotLinked msg) = " (dependencies not linked: " ++ msg ++ "; conflict set: " ++ showCS c ++ ")" +showFR c CyclicDependencies = " (cyclic dependencies; conflict set: " ++ showCS c ++ ")" +-- The following are internal failures. They should not occur. In the +-- interest of not crashing unnecessarily, we still just print an error +-- message though. +showFR _ (MalformedFlagChoice qfn) = " (INTERNAL ERROR: MALFORMED FLAG CHOICE: " ++ showQFN qfn ++ ")" +showFR _ (MalformedStanzaChoice qsn) = " (INTERNAL ERROR: MALFORMED STANZA CHOICE: " ++ showQSN qsn ++ ")" +showFR _ EmptyGoalChoice = " (INTERNAL ERROR: EMPTY GOAL CHOICE)" + +constraintSource :: ConstraintSource -> String +constraintSource src = "constraint from " ++ showConstraintSource src diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Package.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Package.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Package.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Package.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,175 @@ +{-# LANGUAGE DeriveFunctor #-} +module Distribution.Client.Dependency.Modular.Package + ( I(..) + , Loc(..) + , PackageId + , PackageIdentifier(..) + , PackageName(..) + , PI(..) + , PN + , PP(..) + , Namespace(..) + , Qualifier(..) + , QPN + , QPV + , Q(..) + , instI + , makeIndependent + , primaryPP + , showI + , showPI + , showQPN + , unPN + ) where + +import Data.List as L + +import Distribution.Package -- from Cabal +import Distribution.Text -- from Cabal + +import Distribution.Client.Dependency.Modular.Version + +-- | A package name. +type PN = PackageName + +-- | Unpacking a package name. +unPN :: PN -> String +unPN (PackageName pn) = pn + +-- | Package version. A package name plus a version number. +type PV = PackageId + +-- | Qualified package version. +type QPV = Q PV + +-- | Package id. Currently just a black-box string. +type PId = UnitId + +-- | Location. Info about whether a package is installed or not, and where +-- exactly it is located. For installed packages, uniquely identifies the +-- package instance via its 'PId'. +-- +-- TODO: More information is needed about the repo. +data Loc = Inst PId | InRepo + deriving (Eq, Ord, Show) + +-- | Instance. A version number and a location. +data I = I Ver Loc + deriving (Eq, Ord, Show) + +-- | String representation of an instance. +showI :: I -> String +showI (I v InRepo) = showVer v +showI (I v (Inst uid)) = showVer v ++ "/installed" ++ shortId uid + where + -- A hack to extract the beginning of the package ABI hash + shortId (SimpleUnitId (ComponentId i)) + = snip (splitAt 4) (++ "...") + . snip ((\ (x, y) -> (reverse x, y)) . break (=='-') . reverse) ('-':) + $ i + snip p f xs = case p xs of + (ys, zs) -> (if L.null zs then id else f) ys + +-- | Package instance. A package name and an instance. +data PI qpn = PI qpn I + deriving (Eq, Ord, Show, Functor) + +-- | String representation of a package instance. +showPI :: PI QPN -> String +showPI (PI qpn i) = showQPN qpn ++ "-" ++ showI i + +instI :: I -> Bool +instI (I _ (Inst _)) = True +instI _ = False + +-- | A package path consists of a namespace and a package path inside that +-- namespace. +data PP = PP Namespace Qualifier + deriving (Eq, Ord, Show) + +-- | Top-level namespace +-- +-- Package choices in different namespaces are considered completely independent +-- by the solver. +data Namespace = + -- | The default namespace + DefaultNamespace + + -- | Independent namespace + -- + -- For now we just number these (rather than giving them more structure). + | Independent Int + deriving (Eq, Ord, Show) + +-- | Qualifier of a package within a namespace (see 'PP') +data Qualifier = + -- | Top-level dependency in this namespace + Unqualified + + -- | Any dependency on base is considered independent + -- + -- This makes it possible to have base shims. + | Base PN + + -- | Setup dependency + -- + -- By rights setup dependencies ought to be nestable; after all, the setup + -- dependencies of a package might themselves have setup dependencies, which + -- are independent from everything else. However, this very quickly leads to + -- infinite search trees in the solver. Therefore we limit ourselves to + -- a single qualifier (within a given namespace). + | Setup PN + deriving (Eq, Ord, Show) + +-- | Is the package in the primary group of packages. In particular this +-- does not include packages pulled in as setup deps. +-- +primaryPP :: PP -> Bool +primaryPP (PP _ns q) = go q + where + go Unqualified = True + go (Base _) = True + go (Setup _) = False + +-- | String representation of a package path. +-- +-- NOTE: The result of 'showPP' is either empty or results in a period, so that +-- it can be prepended to a package name. +showPP :: PP -> String +showPP (PP ns q) = + case ns of + DefaultNamespace -> go q + Independent i -> show i ++ "." ++ go q + where + -- Print the qualifier + -- + -- NOTE: the base qualifier is for a dependency _on_ base; the qualifier is + -- there to make sure different dependencies on base are all independent. + -- So we want to print something like @"A.base"@, where the @"A."@ part + -- is the qualifier and @"base"@ is the actual dependency (which, for the + -- 'Base' qualifier, will always be @base@). + go Unqualified = "" + go (Setup pn) = display pn ++ "-setup." + go (Base pn) = display pn ++ "." + +-- | A qualified entity. Pairs a package path with the entity. +data Q a = Q PP a + deriving (Eq, Ord, Show) + +-- | Standard string representation of a qualified entity. +showQ :: (a -> String) -> (Q a -> String) +showQ showa (Q pp x) = showPP pp ++ showa x + +-- | Qualified package name. +type QPN = Q PN + +-- | String representation of a qualified package path. +showQPN :: QPN -> String +showQPN = showQ display + +-- | Create artificial parents for each of the package names, making +-- them all independent. +makeIndependent :: [PN] -> [QPN] +makeIndependent ps = [ Q pp pn | (pn, i) <- zip ps [0::Int ..] + , let pp = PP (Independent i) Unqualified + ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Preference.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Preference.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Preference.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Preference.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,397 @@ +{-# LANGUAGE CPP #-} +module Distribution.Client.Dependency.Modular.Preference + ( avoidReinstalls + , deferSetupChoices + , deferWeakFlagChoices + , enforceManualFlags + , enforcePackageConstraints + , enforceSingleInstanceRestriction + , firstGoal + , preferBaseGoalChoice + , preferEasyGoalChoices + , preferLinked + , preferPackagePreferences + , preferReallyEasyGoalChoices + , requireInstalled + ) where + +-- Reordering or pruning the tree in order to prefer or make certain choices. + +import qualified Data.List as L +import qualified Data.Map as M +#if !MIN_VERSION_base(4,8,0) +import Data.Monoid +import Control.Applicative +#endif +import Prelude hiding (sequence) +import Control.Monad.Reader hiding (sequence) +import Data.Map (Map) +import Data.Traversable (sequence) + +import Distribution.Client.Dependency.Types + ( PackageConstraint(..), LabeledPackageConstraint(..), ConstraintSource(..) + , PackagePreferences(..), InstalledPreference(..) ) +import Distribution.Client.Types + ( OptionalStanza(..) ) + +import Distribution.Client.Dependency.Modular.Dependency +import Distribution.Client.Dependency.Modular.Flag +import Distribution.Client.Dependency.Modular.Package +import qualified Distribution.Client.Dependency.Modular.PSQ as P +import Distribution.Client.Dependency.Modular.Tree +import Distribution.Client.Dependency.Modular.Version +import qualified Distribution.Client.Dependency.Modular.ConflictSet as CS + +-- | Generic abstraction for strategies that just rearrange the package order. +-- Only packages that match the given predicate are reordered. +packageOrderFor :: (PN -> Bool) -> (PN -> I -> I -> Ordering) -> Tree a -> Tree a +packageOrderFor p cmp' = trav go + where + go (PChoiceF v@(Q _ pn) r cs) + | p pn = PChoiceF v r (P.sortByKeys (flip (cmp pn)) cs) + | otherwise = PChoiceF v r cs + go x = x + + cmp :: PN -> POption -> POption -> Ordering + cmp pn (POption i _) (POption i' _) = cmp' pn i i' + +-- | Prefer to link packages whenever possible +preferLinked :: Tree a -> Tree a +preferLinked = trav go + where + go (PChoiceF qn a cs) = PChoiceF qn a (P.sortByKeys cmp cs) + go x = x + + cmp (POption _ linkedTo) (POption _ linkedTo') = cmpL linkedTo linkedTo' + + cmpL Nothing Nothing = EQ + cmpL Nothing (Just _) = GT + cmpL (Just _) Nothing = LT + cmpL (Just _) (Just _) = EQ + +-- | Ordering that treats versions satisfying more preferred ranges as greater +-- than versions satisfying less preferred ranges. +preferredVersionsOrdering :: [VR] -> Ver -> Ver -> Ordering +preferredVersionsOrdering vrs v1 v2 = compare (check v1) (check v2) + where + check v = Prelude.length . Prelude.filter (==True) . + Prelude.map (flip checkVR v) $ vrs + +-- | Traversal that tries to establish package preferences (not constraints). +-- Works by reordering choice nodes. Also applies stanza preferences. +preferPackagePreferences :: (PN -> PackagePreferences) -> Tree a -> Tree a +preferPackagePreferences pcs = preferPackageStanzaPreferences pcs + . packageOrderFor (const True) preference + where + preference pn i1@(I v1 _) i2@(I v2 _) = + let PackagePreferences vrs ipref _ = pcs pn + in preferredVersionsOrdering vrs v1 v2 `mappend` -- combines lexically + locationsOrdering ipref i1 i2 + + -- Note that we always rank installed before uninstalled, and later + -- versions before earlier, but we can change the priority of the + -- two orderings. + locationsOrdering PreferInstalled v1 v2 = + preferInstalledOrdering v1 v2 `mappend` preferLatestOrdering v1 v2 + locationsOrdering PreferLatest v1 v2 = + preferLatestOrdering v1 v2 `mappend` preferInstalledOrdering v1 v2 + +-- | Ordering that treats installed instances as greater than uninstalled ones. +preferInstalledOrdering :: I -> I -> Ordering +preferInstalledOrdering (I _ (Inst _)) (I _ (Inst _)) = EQ +preferInstalledOrdering (I _ (Inst _)) _ = GT +preferInstalledOrdering _ (I _ (Inst _)) = LT +preferInstalledOrdering _ _ = EQ + +-- | Compare instances by their version numbers. +preferLatestOrdering :: I -> I -> Ordering +preferLatestOrdering (I v1 _) (I v2 _) = compare v1 v2 + +-- | Traversal that tries to establish package stanza enable\/disable +-- preferences. Works by reordering the branches of stanza choices. +preferPackageStanzaPreferences :: (PN -> PackagePreferences) -> Tree a -> Tree a +preferPackageStanzaPreferences pcs = trav go + where + go (SChoiceF qsn@(SN (PI (Q pp pn) _) s) gr _tr ts) | primaryPP pp = + let PackagePreferences _ _ spref = pcs pn + enableStanzaPref = s `elem` spref + -- move True case first to try enabling the stanza + ts' | enableStanzaPref = P.sortByKeys (flip compare) ts + | otherwise = ts + in SChoiceF qsn gr True ts' -- True: now weak choice + go x = x + +-- | Helper function that tries to enforce a single package constraint on a +-- given instance for a P-node. Translates the constraint into a +-- tree-transformer that either leaves the subtree untouched, or replaces it +-- with an appropriate failure node. +processPackageConstraintP :: PP + -> ConflictSet QPN + -> I + -> LabeledPackageConstraint + -> Tree a + -> Tree a +processPackageConstraintP pp _ _ (LabeledPackageConstraint _ src) r + | src == ConstraintSourceUserTarget && not (primaryPP pp) = r + -- the constraints arising from targets, like "foo-1.0" only apply to + -- the main packages in the solution, they don't constrain setup deps + +processPackageConstraintP _ c i (LabeledPackageConstraint pc src) r = go i pc + where + go (I v _) (PackageConstraintVersion _ vr) + | checkVR vr v = r + | otherwise = Fail c (GlobalConstraintVersion vr src) + go _ (PackageConstraintInstalled _) + | instI i = r + | otherwise = Fail c (GlobalConstraintInstalled src) + go _ (PackageConstraintSource _) + | not (instI i) = r + | otherwise = Fail c (GlobalConstraintSource src) + go _ _ = r + +-- | Helper function that tries to enforce a single package constraint on a +-- given flag setting for an F-node. Translates the constraint into a +-- tree-transformer that either leaves the subtree untouched, or replaces it +-- with an appropriate failure node. +processPackageConstraintF :: Flag + -> ConflictSet QPN + -> Bool + -> LabeledPackageConstraint + -> Tree a + -> Tree a +processPackageConstraintF f c b' (LabeledPackageConstraint pc src) r = go pc + where + go (PackageConstraintFlags _ fa) = + case L.lookup f fa of + Nothing -> r + Just b | b == b' -> r + | otherwise -> Fail c (GlobalConstraintFlag src) + go _ = r + +-- | Helper function that tries to enforce a single package constraint on a +-- given flag setting for an F-node. Translates the constraint into a +-- tree-transformer that either leaves the subtree untouched, or replaces it +-- with an appropriate failure node. +processPackageConstraintS :: OptionalStanza + -> ConflictSet QPN + -> Bool + -> LabeledPackageConstraint + -> Tree a + -> Tree a +processPackageConstraintS s c b' (LabeledPackageConstraint pc src) r = go pc + where + go (PackageConstraintStanzas _ ss) = + if not b' && s `elem` ss then Fail c (GlobalConstraintFlag src) + else r + go _ = r + +-- | Traversal that tries to establish various kinds of user constraints. Works +-- by selectively disabling choices that have been ruled out by global user +-- constraints. +enforcePackageConstraints :: M.Map PN [LabeledPackageConstraint] + -> Tree QGoalReason + -> Tree QGoalReason +enforcePackageConstraints pcs = trav go + where + go (PChoiceF qpn@(Q pp pn) gr ts) = + let c = varToConflictSet (P qpn) + -- compose the transformation functions for each of the relevant constraint + g = \ (POption i _) -> foldl (\ h pc -> h . processPackageConstraintP pp c i pc) id + (M.findWithDefault [] pn pcs) + in PChoiceF qpn gr (P.mapWithKey g ts) + go (FChoiceF qfn@(FN (PI (Q _ pn) _) f) gr tr m ts) = + let c = varToConflictSet (F qfn) + -- compose the transformation functions for each of the relevant constraint + g = \ b -> foldl (\ h pc -> h . processPackageConstraintF f c b pc) id + (M.findWithDefault [] pn pcs) + in FChoiceF qfn gr tr m (P.mapWithKey g ts) + go (SChoiceF qsn@(SN (PI (Q _ pn) _) f) gr tr ts) = + let c = varToConflictSet (S qsn) + -- compose the transformation functions for each of the relevant constraint + g = \ b -> foldl (\ h pc -> h . processPackageConstraintS f c b pc) id + (M.findWithDefault [] pn pcs) + in SChoiceF qsn gr tr (P.mapWithKey g ts) + go x = x + +-- | Transformation that tries to enforce manual flags. Manual flags +-- can only be re-set explicitly by the user. This transformation should +-- be run after user preferences have been enforced. For manual flags, +-- it checks if a user choice has been made. If not, it disables all but +-- the first choice. +enforceManualFlags :: Tree QGoalReason -> Tree QGoalReason +enforceManualFlags = trav go + where + go (FChoiceF qfn gr tr True ts) = FChoiceF qfn gr tr True $ + let c = varToConflictSet (F qfn) + in case span isDisabled (P.toList ts) of + ([], y : ys) -> P.fromList (y : L.map (\ (b, _) -> (b, Fail c ManualFlag)) ys) + _ -> ts -- something has been manually selected, leave things alone + where + isDisabled (_, Fail _ (GlobalConstraintFlag _)) = True + isDisabled _ = False + go x = x + +-- | Require installed packages. +requireInstalled :: (PN -> Bool) -> Tree QGoalReason -> Tree QGoalReason +requireInstalled p = trav go + where + go (PChoiceF v@(Q _ pn) gr cs) + | p pn = PChoiceF v gr (P.mapWithKey installed cs) + | otherwise = PChoiceF v gr cs + where + installed (POption (I _ (Inst _)) _) x = x + installed _ _ = Fail (varToConflictSet (P v)) CannotInstall + go x = x + +-- | Avoid reinstalls. +-- +-- This is a tricky strategy. If a package version is installed already and the +-- same version is available from a repo, the repo version will never be chosen. +-- This would result in a reinstall (either destructively, or potentially, +-- shadowing). The old instance won't be visible or even present anymore, but +-- other packages might have depended on it. +-- +-- TODO: It would be better to actually check the reverse dependencies of installed +-- packages. If they're not depended on, then reinstalling should be fine. Even if +-- they are, perhaps this should just result in trying to reinstall those other +-- packages as well. However, doing this all neatly in one pass would require to +-- change the builder, or at least to change the goal set after building. +avoidReinstalls :: (PN -> Bool) -> Tree QGoalReason -> Tree QGoalReason +avoidReinstalls p = trav go + where + go (PChoiceF qpn@(Q _ pn) gr cs) + | p pn = PChoiceF qpn gr disableReinstalls + | otherwise = PChoiceF qpn gr cs + where + disableReinstalls = + let installed = [ v | (POption (I v (Inst _)) _, _) <- P.toList cs ] + in P.mapWithKey (notReinstall installed) cs + + notReinstall vs (POption (I v InRepo) _) _ | v `elem` vs = + Fail (varToConflictSet (P qpn)) CannotReinstall + notReinstall _ _ x = + x + go x = x + +-- | Always choose the first goal in the list next, abandoning all +-- other choices. +-- +-- This is unnecessary for the default search strategy, because +-- it descends only into the first goal choice anyway, +-- but may still make sense to just reduce the tree size a bit. +firstGoal :: Tree a -> Tree a +firstGoal = trav go + where + go (GoalChoiceF xs) = GoalChoiceF (P.firstOnly xs) + go x = x + -- Note that we keep empty choice nodes, because they mean success. + +-- | Transformation that tries to make a decision on base as early as +-- possible. In nearly all cases, there's a single choice for the base +-- package. Also, fixing base early should lead to better error messages. +preferBaseGoalChoice :: Tree a -> Tree a +preferBaseGoalChoice = trav go + where + go (GoalChoiceF xs) = GoalChoiceF (P.preferByKeys isBase xs) + go x = x + + isBase :: OpenGoal comp -> Bool + isBase (OpenGoal (Simple (Dep (Q _pp pn) _) _) _) | unPN pn == "base" = True + isBase _ = False + +-- | Deal with setup dependencies after regular dependencies, so that we can +-- will link setup depencencies against package dependencies when possible +deferSetupChoices :: Tree a -> Tree a +deferSetupChoices = trav go + where + go (GoalChoiceF xs) = GoalChoiceF (P.preferByKeys noSetup xs) + go x = x + + noSetup :: OpenGoal comp -> Bool + noSetup (OpenGoal (Simple (Dep (Q (PP _ns (Setup _)) _) _) _) _) = False + noSetup _ = True + +-- | Transformation that tries to avoid making weak flag choices early. +-- Weak flags are trivial flags (not influencing dependencies) or such +-- flags that are explicitly declared to be weak in the index. +deferWeakFlagChoices :: Tree a -> Tree a +deferWeakFlagChoices = trav go + where + go (GoalChoiceF xs) = GoalChoiceF (P.prefer noWeakStanza (P.prefer noWeakFlag xs)) + go x = x + + noWeakStanza :: Tree a -> Bool + noWeakStanza (SChoice _ _ True _) = False + noWeakStanza _ = True + + noWeakFlag :: Tree a -> Bool + noWeakFlag (FChoice _ _ True _ _) = False + noWeakFlag _ = True + +-- | Transformation that sorts choice nodes so that +-- child nodes with a small branching degree are preferred. +-- +-- Only approximates the number of choices in the branches. +-- In particular, we try to take any goal immediately if it has +-- a branching degree of 0 (guaranteed failure) or 1 (no other +-- choice possible). +-- +-- Returns at most one choice. +-- +preferEasyGoalChoices :: Tree a -> Tree a +preferEasyGoalChoices = trav go + where + go (GoalChoiceF xs) = GoalChoiceF (P.dminimumBy dchoices xs) + -- (a different implementation that seems slower): + -- GoalChoiceF (P.firstOnly (P.preferOrElse zeroOrOneChoices (P.minimumBy choices) xs)) + go x = x + +-- | A variant of 'preferEasyGoalChoices' that just keeps the +-- ones with a branching degree of 0 or 1. Note that unlike +-- 'preferEasyGoalChoices', this may return more than one +-- choice. +-- +preferReallyEasyGoalChoices :: Tree a -> Tree a +preferReallyEasyGoalChoices = trav go + where + go (GoalChoiceF xs) = GoalChoiceF (P.prefer zeroOrOneChoices xs) + go x = x + +-- | Monad used internally in enforceSingleInstanceRestriction +-- +-- For each package instance we record the goal for which we picked a concrete +-- instance. The SIR means that for any package instance there can only be one. +type EnforceSIR = Reader (Map (PI PN) QPN) + +-- | Enforce ghc's single instance restriction +-- +-- From the solver's perspective, this means that for any package instance +-- (that is, package name + package version) there can be at most one qualified +-- goal resolving to that instance (there may be other goals _linking_ to that +-- instance however). +enforceSingleInstanceRestriction :: Tree QGoalReason -> Tree QGoalReason +enforceSingleInstanceRestriction = (`runReader` M.empty) . cata go + where + go :: TreeF QGoalReason (EnforceSIR (Tree QGoalReason)) -> EnforceSIR (Tree QGoalReason) + + -- We just verify package choices. + go (PChoiceF qpn gr cs) = + PChoice qpn gr <$> sequence (P.mapWithKey (goP qpn) cs) + go _otherwise = + innM _otherwise + + -- The check proper + goP :: QPN -> POption -> EnforceSIR (Tree QGoalReason) -> EnforceSIR (Tree QGoalReason) + goP qpn@(Q _ pn) (POption i linkedTo) r = do + let inst = PI pn i + env <- ask + case (linkedTo, M.lookup inst env) of + (Just _, _) -> + -- For linked nodes we don't check anything + r + (Nothing, Nothing) -> + -- Not linked, not already used + local (M.insert inst qpn) r + (Nothing, Just qpn') -> do + -- Not linked, already used. This is an error + return $ Fail (CS.union (varToConflictSet (P qpn)) (varToConflictSet (P qpn'))) MultipleInstances diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/PSQ.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/PSQ.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/PSQ.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/PSQ.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,213 @@ +{-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveTraversable #-} +module Distribution.Client.Dependency.Modular.PSQ + ( PSQ(..) -- Unit test needs constructor access + , Degree(..) + , casePSQ + , cons + , degree + , delete + , dminimumBy + , length + , lookup + , filter + , filterKeys + , firstOnly + , fromList + , isZeroOrOne + , keys + , map + , mapKeys + , mapWithKey + , mapWithKeyState + , minimumBy + , null + , prefer + , preferByKeys + , preferOrElse + , snoc + , sortBy + , sortByKeys + , splits + , toList + , union + ) where + +-- Priority search queues. +-- +-- I am not yet sure what exactly is needed. But we need a data structure with +-- key-based lookup that can be sorted. We're using a sequence right now with +-- (inefficiently implemented) lookup, because I think that queue-based +-- operations and sorting turn out to be more efficiency-critical in practice. + +import Control.Arrow (first, second) + +import qualified Data.Foldable as F +import Data.Function +import qualified Data.List as S +import Data.Ord (comparing) +import Data.Traversable +import Prelude hiding (foldr, length, lookup, filter, null, map) + +newtype PSQ k v = PSQ [(k, v)] + deriving (Eq, Show, Functor, F.Foldable, Traversable) -- Qualified Foldable to avoid issues with FTP + +keys :: PSQ k v -> [k] +keys (PSQ xs) = fmap fst xs + +lookup :: Eq k => k -> PSQ k v -> Maybe v +lookup k (PSQ xs) = S.lookup k xs + +map :: (v1 -> v2) -> PSQ k v1 -> PSQ k v2 +map f (PSQ xs) = PSQ (fmap (second f) xs) + +mapKeys :: (k1 -> k2) -> PSQ k1 v -> PSQ k2 v +mapKeys f (PSQ xs) = PSQ (fmap (first f) xs) + +mapWithKey :: (k -> a -> b) -> PSQ k a -> PSQ k b +mapWithKey f (PSQ xs) = PSQ (fmap (\ (k, v) -> (k, f k v)) xs) + +mapWithKeyState :: (s -> k -> a -> (b, s)) -> PSQ k a -> s -> PSQ k b +mapWithKeyState p (PSQ xs) s0 = + PSQ (F.foldr (\ (k, v) r s -> case p s k v of + (w, n) -> (k, w) : (r n)) + (const []) xs s0) + +delete :: Eq k => k -> PSQ k a -> PSQ k a +delete k (PSQ xs) = PSQ (snd (S.partition ((== k) . fst) xs)) + +fromList :: [(k, a)] -> PSQ k a +fromList = PSQ + +cons :: k -> a -> PSQ k a -> PSQ k a +cons k x (PSQ xs) = PSQ ((k, x) : xs) + +snoc :: PSQ k a -> k -> a -> PSQ k a +snoc (PSQ xs) k x = PSQ (xs ++ [(k, x)]) + +casePSQ :: PSQ k a -> r -> (k -> a -> PSQ k a -> r) -> r +casePSQ (PSQ xs) n c = + case xs of + [] -> n + (k, v) : ys -> c k v (PSQ ys) + +splits :: PSQ k a -> PSQ k (a, PSQ k a) +splits = go id + where + go f xs = casePSQ xs + (PSQ []) + (\ k v ys -> cons k (v, f ys) (go (f . cons k v) ys)) + +sortBy :: (a -> a -> Ordering) -> PSQ k a -> PSQ k a +sortBy cmp (PSQ xs) = PSQ (S.sortBy (cmp `on` snd) xs) + +sortByKeys :: (k -> k -> Ordering) -> PSQ k a -> PSQ k a +sortByKeys cmp (PSQ xs) = PSQ (S.sortBy (cmp `on` fst) xs) + +-- | Given a measure in form of a pseudo-peano-natural number, +-- determine the approximate minimum. This is designed to stop +-- even traversing the list as soon as we find any element with +-- measure 'ZeroOrOne'. +-- +-- Always returns a one-element queue (except if the queue is +-- empty, then we return an empty queue again). +-- +dminimumBy :: (a -> Degree) -> PSQ k a -> PSQ k a +dminimumBy _ (PSQ []) = PSQ [] +dminimumBy sel (PSQ (x : xs)) = go (sel (snd x)) x xs + where + go ZeroOrOne v _ = PSQ [v] + go _ v [] = PSQ [v] + go c v (y : ys) = case compare c d of + LT -> go c v ys + EQ -> go c v ys + GT -> go d y ys + where + d = sel (snd y) + +minimumBy :: (a -> Int) -> PSQ k a -> PSQ k a +minimumBy sel (PSQ xs) = + PSQ [snd (S.minimumBy (comparing fst) (S.map (\ x -> (sel (snd x), x)) xs))] + +-- | Will partition the list according to the predicate. If +-- there is any element that satisfies the precidate, then only +-- the elements satisfying the predicate are returned. +-- Otherwise, the rest is returned. +-- +prefer :: (a -> Bool) -> PSQ k a -> PSQ k a +prefer p (PSQ xs) = + let + (pro, con) = S.partition (p . snd) xs + in + if S.null pro then PSQ con else PSQ pro + +-- | Variant of 'prefer' that takes a continuation for the case +-- that there are none of the desired elements. +preferOrElse :: (a -> Bool) -> (PSQ k a -> PSQ k a) -> PSQ k a -> PSQ k a +preferOrElse p k (PSQ xs) = + let + (pro, con) = S.partition (p . snd) xs + in + if S.null pro then k (PSQ con) else PSQ pro + +-- | Variant of 'prefer' that takes a predicate on the keys +-- rather than on the values. +-- +preferByKeys :: (k -> Bool) -> PSQ k a -> PSQ k a +preferByKeys p (PSQ xs) = + let + (pro, con) = S.partition (p . fst) xs + in + if S.null pro then PSQ con else PSQ pro + +filterKeys :: (k -> Bool) -> PSQ k a -> PSQ k a +filterKeys p (PSQ xs) = PSQ (S.filter (p . fst) xs) + +filter :: (a -> Bool) -> PSQ k a -> PSQ k a +filter p (PSQ xs) = PSQ (S.filter (p . snd) xs) + +length :: PSQ k a -> Int +length (PSQ xs) = S.length xs + +-- | Approximation of the branching degree. +-- +-- This is designed for computing the branching degree of a goal choice +-- node. If the degree is 0 or 1, it is always good to take that goal, +-- because we can either abort immediately, or have no other choice anyway. +-- +-- So we do not actually want to compute the full degree (which is +-- somewhat costly) in cases where we have such an easy choice. +-- +data Degree = ZeroOrOne | Two | Other + deriving (Show, Eq) + +instance Ord Degree where + compare ZeroOrOne _ = LT -- lazy approximation + compare _ ZeroOrOne = GT -- approximation + compare Two Two = EQ + compare Two Other = LT + compare Other Two = GT + compare Other Other = EQ + +degree :: PSQ k a -> Degree +degree (PSQ []) = ZeroOrOne +degree (PSQ [_]) = ZeroOrOne +degree (PSQ [_, _]) = Two +degree (PSQ _) = Other + +null :: PSQ k a -> Bool +null (PSQ xs) = S.null xs + +isZeroOrOne :: PSQ k a -> Bool +isZeroOrOne (PSQ []) = True +isZeroOrOne (PSQ [_]) = True +isZeroOrOne _ = False + +firstOnly :: PSQ k a -> PSQ k a +firstOnly (PSQ []) = PSQ [] +firstOnly (PSQ (x : _)) = PSQ [x] + +toList :: PSQ k a -> [(k, a)] +toList (PSQ xs) = xs + +union :: PSQ k a -> PSQ k a -> PSQ k a +union (PSQ xs) (PSQ ys) = PSQ (xs ++ ys) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Solver.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Solver.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Solver.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Solver.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,100 @@ +module Distribution.Client.Dependency.Modular.Solver + ( SolverConfig(..) + , solve + ) where + +import Data.Map as M + +import Distribution.Compiler (CompilerInfo) + +import Distribution.Client.PkgConfigDb (PkgConfigDb) + +import Distribution.Client.Dependency.Types + +import Distribution.Client.Dependency.Modular.Assignment +import Distribution.Client.Dependency.Modular.Builder +import Distribution.Client.Dependency.Modular.Cycles +import Distribution.Client.Dependency.Modular.Dependency +import Distribution.Client.Dependency.Modular.Explore +import Distribution.Client.Dependency.Modular.Index +import Distribution.Client.Dependency.Modular.Log +import Distribution.Client.Dependency.Modular.Message +import Distribution.Client.Dependency.Modular.Package +import qualified Distribution.Client.Dependency.Modular.Preference as P +import Distribution.Client.Dependency.Modular.Validate +import Distribution.Client.Dependency.Modular.Linking + +-- | Various options for the modular solver. +data SolverConfig = SolverConfig { + preferEasyGoalChoices :: Bool, + independentGoals :: Bool, + avoidReinstalls :: Bool, + shadowPkgs :: Bool, + strongFlags :: Bool, + maxBackjumps :: Maybe Int +} + +-- | Run all solver phases. +-- +-- In principle, we have a valid tree after 'validationPhase', which +-- means that every 'Done' node should correspond to valid solution. +-- +-- There is one exception, though, and that is cycle detection, which +-- has been added relatively recently. Cycles are only removed directly +-- before exploration. +-- +-- Semantically, there is no difference. Cycle detection, as implemented +-- now, only occurs for 'Done' nodes we encounter during exploration, +-- and cycle detection itself does not change the shape of the tree, +-- it only marks some 'Done' nodes as 'Fail', if they contain cyclic +-- solutions. +-- +-- There is a tiny performance impact, however, in doing cycle detection +-- directly after validation. Probably because cycle detection maintains +-- some information, and the various reorderings implemented by +-- 'preferencesPhase' and 'heuristicsPhase' are ever so slightly more +-- costly if that information is already around during the reorderings. +-- +-- With the current positioning directly before the 'explorePhase', there +-- seems to be no statistically significant performance impact of cycle +-- detection in the common case where there are no cycles. +-- +solve :: SolverConfig -> -- ^ solver parameters + CompilerInfo -> + Index -> -- ^ all available packages as an index + PkgConfigDb -> -- ^ available pkg-config pkgs + (PN -> PackagePreferences) -> -- ^ preferences + Map PN [LabeledPackageConstraint] -> -- ^ global constraints + [PN] -> -- ^ global goals + Log Message (Assignment, RevDepMap) +solve sc cinfo idx pkgConfigDB userPrefs userConstraints userGoals = + explorePhase $ + detectCyclesPhase$ + heuristicsPhase $ + preferencesPhase $ + validationPhase $ + prunePhase $ + buildPhase + where + explorePhase = backjumpAndExplore + heuristicsPhase = (if preferEasyGoalChoices sc + then P.preferEasyGoalChoices -- also leaves just one choice + else P.firstGoal) . -- after doing goal-choice heuristics, commit to the first choice (saves space) + P.deferWeakFlagChoices . + P.deferSetupChoices . + P.preferBaseGoalChoice . + P.preferLinked + preferencesPhase = P.preferPackagePreferences userPrefs + validationPhase = P.enforceManualFlags . -- can only be done after user constraints + P.enforcePackageConstraints userConstraints . + P.enforceSingleInstanceRestriction . + validateLinking idx . + validateTree cinfo idx pkgConfigDB + prunePhase = (if avoidReinstalls sc then P.avoidReinstalls (const True) else id) . + -- packages that can never be "upgraded": + P.requireInstalled (`elem` [ PackageName "base" + , PackageName "ghc-prim" + , PackageName "integer-gmp" + , PackageName "integer-simple" + ]) + buildPhase = addLinking $ buildTree idx (independentGoals sc) userGoals diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Tree.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Tree.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Tree.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Tree.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,168 @@ +{-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveTraversable #-} +module Distribution.Client.Dependency.Modular.Tree + ( FailReason(..) + , POption(..) + , Tree(..) + , TreeF(..) + , ana + , cata + , choices + , dchoices + , inn + , innM + , para + , trav + , zeroOrOneChoices + ) where + +import Control.Monad hiding (mapM, sequence) +import Data.Foldable +import Data.Traversable +import Prelude hiding (foldr, mapM, sequence) + +import Distribution.Client.Dependency.Modular.Dependency +import Distribution.Client.Dependency.Modular.Flag +import Distribution.Client.Dependency.Modular.Package +import Distribution.Client.Dependency.Modular.PSQ (PSQ) +import qualified Distribution.Client.Dependency.Modular.PSQ as P +import Distribution.Client.Dependency.Modular.Version +import Distribution.Client.Dependency.Types ( ConstraintSource(..) ) + +-- | Type of the search tree. Inlining the choice nodes for now. +data Tree a = + PChoice QPN a (PSQ POption (Tree a)) + | FChoice QFN a Bool Bool (PSQ Bool (Tree a)) -- Bool indicates whether it's weak/trivial, second Bool whether it's manual + | SChoice QSN a Bool (PSQ Bool (Tree a)) -- Bool indicates whether it's trivial + | GoalChoice (PSQ (OpenGoal ()) (Tree a)) -- PSQ should never be empty + | Done RevDepMap + | Fail (ConflictSet QPN) FailReason + deriving (Eq, Show, Functor) + -- Above, a choice is called trivial if it clearly does not matter. The + -- special case of triviality we actually consider is if there are no new + -- dependencies introduced by this node. + -- + -- A (flag) choice is called weak if we do want to defer it. This is the + -- case for flags that should be implied by what's currently installed on + -- the system, as opposed to flags that are used to explicitly enable or + -- disable some functionality. + +-- | A package option is a package instance with an optional linking annotation +-- +-- The modular solver has a number of package goals to solve for, and can only +-- pick a single package version for a single goal. In order to allow to +-- install multiple versions of the same package as part of a single solution +-- the solver uses qualified goals. For example, @0.P@ and @1.P@ might both +-- be qualified goals for @P@, allowing to pick a difference version of package +-- @P@ for @0.P@ and @1.P@. +-- +-- Linking is an essential part of this story. In addition to picking a specific +-- version for @1.P@, the solver can also decide to link @1.P@ to @0.P@ (or +-- vice versa). Teans that @1.P@ and @0.P@ really must be the very same package +-- (and hence must have the same build time configuration, and their +-- dependencies must also be the exact same). +-- +-- See for details. +data POption = POption I (Maybe PP) + deriving (Eq, Show) + +data FailReason = InconsistentInitialConstraints + | Conflicting [Dep QPN] + | CannotInstall + | CannotReinstall + | Shadowed + | Broken + | GlobalConstraintVersion VR ConstraintSource + | GlobalConstraintInstalled ConstraintSource + | GlobalConstraintSource ConstraintSource + | GlobalConstraintFlag ConstraintSource + | ManualFlag + | MalformedFlagChoice QFN + | MalformedStanzaChoice QSN + | EmptyGoalChoice + | Backjump + | MultipleInstances + | DependenciesNotLinked String + | CyclicDependencies + deriving (Eq, Show) + +-- | Functor for the tree type. +data TreeF a b = + PChoiceF QPN a (PSQ POption b) + | FChoiceF QFN a Bool Bool (PSQ Bool b) + | SChoiceF QSN a Bool (PSQ Bool b) + | GoalChoiceF (PSQ (OpenGoal ()) b) + | DoneF RevDepMap + | FailF (ConflictSet QPN) FailReason + deriving (Functor, Foldable, Traversable) + +out :: Tree a -> TreeF a (Tree a) +out (PChoice p i ts) = PChoiceF p i ts +out (FChoice p i b m ts) = FChoiceF p i b m ts +out (SChoice p i b ts) = SChoiceF p i b ts +out (GoalChoice ts) = GoalChoiceF ts +out (Done x ) = DoneF x +out (Fail c x ) = FailF c x + +inn :: TreeF a (Tree a) -> Tree a +inn (PChoiceF p i ts) = PChoice p i ts +inn (FChoiceF p i b m ts) = FChoice p i b m ts +inn (SChoiceF p i b ts) = SChoice p i b ts +inn (GoalChoiceF ts) = GoalChoice ts +inn (DoneF x ) = Done x +inn (FailF c x ) = Fail c x + +innM :: Monad m => TreeF a (m (Tree a)) -> m (Tree a) +innM (PChoiceF p i ts) = liftM (PChoice p i ) (sequence ts) +innM (FChoiceF p i b m ts) = liftM (FChoice p i b m) (sequence ts) +innM (SChoiceF p i b ts) = liftM (SChoice p i b ) (sequence ts) +innM (GoalChoiceF ts) = liftM (GoalChoice ) (sequence ts) +innM (DoneF x ) = return $ Done x +innM (FailF c x ) = return $ Fail c x + +-- | Determines whether a tree is active, i.e., isn't a failure node. +active :: Tree a -> Bool +active (Fail _ _) = False +active _ = True + +-- | Determines how many active choices are available in a node. Note that we +-- count goal choices as having one choice, always. +choices :: Tree a -> Int +choices (PChoice _ _ ts) = P.length (P.filter active ts) +choices (FChoice _ _ _ _ ts) = P.length (P.filter active ts) +choices (SChoice _ _ _ ts) = P.length (P.filter active ts) +choices (GoalChoice _ ) = 1 +choices (Done _ ) = 1 +choices (Fail _ _ ) = 0 + +-- | Variant of 'choices' that only approximates the number of choices. +dchoices :: Tree a -> P.Degree +dchoices (PChoice _ _ ts) = P.degree (P.filter active ts) +dchoices (FChoice _ _ _ _ ts) = P.degree (P.filter active ts) +dchoices (SChoice _ _ _ ts) = P.degree (P.filter active ts) +dchoices (GoalChoice _ ) = P.ZeroOrOne +dchoices (Done _ ) = P.ZeroOrOne +dchoices (Fail _ _ ) = P.ZeroOrOne + +-- | Variant of 'choices' that only approximates the number of choices. +zeroOrOneChoices :: Tree a -> Bool +zeroOrOneChoices (PChoice _ _ ts) = P.isZeroOrOne (P.filter active ts) +zeroOrOneChoices (FChoice _ _ _ _ ts) = P.isZeroOrOne (P.filter active ts) +zeroOrOneChoices (SChoice _ _ _ ts) = P.isZeroOrOne (P.filter active ts) +zeroOrOneChoices (GoalChoice _ ) = True +zeroOrOneChoices (Done _ ) = True +zeroOrOneChoices (Fail _ _ ) = True + +-- | Catamorphism on trees. +cata :: (TreeF a b -> b) -> Tree a -> b +cata phi x = (phi . fmap (cata phi) . out) x + +trav :: (TreeF a (Tree b) -> TreeF b (Tree b)) -> Tree a -> Tree b +trav psi x = cata (inn . psi) x + +-- | Paramorphism on trees. +para :: (TreeF a (b, Tree a) -> b) -> Tree a -> b +para phi = phi . fmap (\ x -> (para phi x, x)) . out + +-- | Anamorphism on trees. +ana :: (b -> TreeF a b) -> b -> Tree a +ana psi = inn . fmap (ana psi) . psi diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Validate.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Validate.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Validate.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Validate.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,269 @@ +module Distribution.Client.Dependency.Modular.Validate (validateTree) where + +-- Validation of the tree. +-- +-- The task here is to make sure all constraints hold. After validation, any +-- assignment returned by exploration of the tree should be a complete valid +-- assignment, i.e., actually constitute a solution. + +import Control.Applicative +import Control.Monad.Reader hiding (sequence) +import Data.List as L +import Data.Map as M +import Data.Set as S +import Data.Traversable +import Prelude hiding (sequence) + +import Language.Haskell.Extension (Extension, Language) + +import Distribution.Compiler (CompilerInfo(..)) + +import Distribution.Client.Dependency.Modular.Assignment +import Distribution.Client.Dependency.Modular.Dependency +import Distribution.Client.Dependency.Modular.Flag +import Distribution.Client.Dependency.Modular.Index +import Distribution.Client.Dependency.Modular.Package +import qualified Distribution.Client.Dependency.Modular.PSQ as P +import Distribution.Client.Dependency.Modular.Tree +import Distribution.Client.Dependency.Modular.Version (VR) + +import Distribution.Client.ComponentDeps (Component) +import Distribution.Client.PkgConfigDb (PkgConfigDb, pkgConfigPkgIsPresent) + +-- In practice, most constraints are implication constraints (IF we have made +-- a number of choices, THEN we also have to ensure that). We call constraints +-- that for which the preconditions are fulfilled ACTIVE. We maintain a set +-- of currently active constraints that we pass down the node. +-- +-- We aim at detecting inconsistent states as early as possible. +-- +-- Whenever we make a choice, there are two things that need to happen: +-- +-- (1) We must check that the choice is consistent with the currently +-- active constraints. +-- +-- (2) The choice increases the set of active constraints. For the new +-- active constraints, we must check that they are consistent with +-- the current state. +-- +-- We can actually merge (1) and (2) by saying the the current choice is +-- a new active constraint, fixing the choice. +-- +-- If a test fails, we have detected an inconsistent state. We can +-- disable the current subtree and do not have to traverse it any further. +-- +-- We need a good way to represent the current state, i.e., the current +-- set of active constraints. Since the main situation where we have to +-- search in it is (1), it seems best to store the state by package: for +-- every package, we store which versions are still allowed. If for any +-- package, we have inconsistent active constraints, we can also stop. +-- This is a particular way to read task (2): +-- +-- (2, weak) We only check if the new constraints are consistent with +-- the choices we've already made, and add them to the active set. +-- +-- (2, strong) We check if the new constraints are consistent with the +-- choices we've already made, and the constraints we already have. +-- +-- It currently seems as if we're implementing the weak variant. However, +-- when used together with 'preferEasyGoalChoices', we will find an +-- inconsistent state in the very next step. +-- +-- What do we do about flags? +-- +-- Like for packages, we store the flag choices we have already made. +-- Now, regarding (1), we only have to test whether we've decided the +-- current flag before. Regarding (2), the interesting bit is in discovering +-- the new active constraints. To this end, we look up the constraints for +-- the package the flag belongs to, and traverse its flagged dependencies. +-- Wherever we find the flag in question, we start recording dependencies +-- underneath as new active dependencies. If we encounter other flags, we +-- check if we've chosen them already and either proceed or stop. + +-- | The state needed during validation. +data ValidateState = VS { + supportedExt :: Extension -> Bool, + supportedLang :: Language -> Bool, + presentPkgs :: PN -> VR -> Bool, + index :: Index, + saved :: Map QPN (FlaggedDeps Component QPN), -- saved, scoped, dependencies + pa :: PreAssignment, + qualifyOptions :: QualifyOptions +} + +type Validate = Reader ValidateState + +validate :: Tree QGoalReason -> Validate (Tree QGoalReason) +validate = cata go + where + go :: TreeF QGoalReason (Validate (Tree QGoalReason)) -> Validate (Tree QGoalReason) + + go (PChoiceF qpn gr ts) = PChoice qpn gr <$> sequence (P.mapWithKey (goP qpn) ts) + go (FChoiceF qfn gr b m ts) = + do + -- Flag choices may occur repeatedly (because they can introduce new constraints + -- in various places). However, subsequent choices must be consistent. We thereby + -- collapse repeated flag choice nodes. + PA _ pfa _ <- asks pa -- obtain current flag-preassignment + case M.lookup qfn pfa of + Just rb -> -- flag has already been assigned; collapse choice to the correct branch + case P.lookup rb ts of + Just t -> goF qfn rb t + Nothing -> return $ Fail (varToConflictSet (F qfn)) (MalformedFlagChoice qfn) + Nothing -> -- flag choice is new, follow both branches + FChoice qfn gr b m <$> sequence (P.mapWithKey (goF qfn) ts) + go (SChoiceF qsn gr b ts) = + do + -- Optional stanza choices are very similar to flag choices. + PA _ _ psa <- asks pa -- obtain current stanza-preassignment + case M.lookup qsn psa of + Just rb -> -- stanza choice has already been made; collapse choice to the correct branch + case P.lookup rb ts of + Just t -> goS qsn rb t + Nothing -> return $ Fail (varToConflictSet (S qsn)) (MalformedStanzaChoice qsn) + Nothing -> -- stanza choice is new, follow both branches + SChoice qsn gr b <$> sequence (P.mapWithKey (goS qsn) ts) + + -- We don't need to do anything for goal choices or failure nodes. + go (GoalChoiceF ts) = GoalChoice <$> sequence ts + go (DoneF rdm ) = pure (Done rdm) + go (FailF c fr ) = pure (Fail c fr) + + -- What to do for package nodes ... + goP :: QPN -> POption -> Validate (Tree QGoalReason) -> Validate (Tree QGoalReason) + goP qpn@(Q _pp pn) (POption i _) r = do + PA ppa pfa psa <- asks pa -- obtain current preassignment + extSupported <- asks supportedExt -- obtain the supported extensions + langSupported <- asks supportedLang -- obtain the supported languages + pkgPresent <- asks presentPkgs -- obtain the present pkg-config pkgs + idx <- asks index -- obtain the index + svd <- asks saved -- obtain saved dependencies + qo <- asks qualifyOptions + -- obtain dependencies and index-dictated exclusions introduced by the choice + let (PInfo deps _ mfr) = idx ! pn ! i + -- qualify the deps in the current scope + let qdeps = qualifyDeps qo qpn deps + -- the new active constraints are given by the instance we have chosen, + -- plus the dependency information we have for that instance + let newactives = Dep qpn (Fixed i (P qpn)) : L.map (resetVar (P qpn)) (extractDeps pfa psa qdeps) + -- We now try to extend the partial assignment with the new active constraints. + let mnppa = extend extSupported langSupported pkgPresent (P qpn) ppa newactives + -- In case we continue, we save the scoped dependencies + let nsvd = M.insert qpn qdeps svd + case mfr of + Just fr -> -- The index marks this as an invalid choice. We can stop. + return (Fail (varToConflictSet (P qpn)) fr) + _ -> case mnppa of + Left (c, d) -> -- We have an inconsistency. We can stop. + return (Fail c (Conflicting d)) + Right nppa -> -- We have an updated partial assignment for the recursive validation. + local (\ s -> s { pa = PA nppa pfa psa, saved = nsvd }) r + + -- What to do for flag nodes ... + goF :: QFN -> Bool -> Validate (Tree QGoalReason) -> Validate (Tree QGoalReason) + goF qfn@(FN (PI qpn _i) _f) b r = do + PA ppa pfa psa <- asks pa -- obtain current preassignment + extSupported <- asks supportedExt -- obtain the supported extensions + langSupported <- asks supportedLang -- obtain the supported languages + pkgPresent <- asks presentPkgs -- obtain the present pkg-config pkgs + svd <- asks saved -- obtain saved dependencies + -- Note that there should be saved dependencies for the package in question, + -- because while building, we do not choose flags before we see the packages + -- that define them. + let qdeps = svd ! qpn + -- We take the *saved* dependencies, because these have been qualified in the + -- correct scope. + -- + -- Extend the flag assignment + let npfa = M.insert qfn b pfa + -- We now try to get the new active dependencies we might learn about because + -- we have chosen a new flag. + let newactives = extractNewDeps (F qfn) b npfa psa qdeps + -- As in the package case, we try to extend the partial assignment. + case extend extSupported langSupported pkgPresent (F qfn) ppa newactives of + Left (c, d) -> return (Fail c (Conflicting d)) -- inconsistency found + Right nppa -> local (\ s -> s { pa = PA nppa npfa psa }) r + + -- What to do for stanza nodes (similar to flag nodes) ... + goS :: QSN -> Bool -> Validate (Tree QGoalReason) -> Validate (Tree QGoalReason) + goS qsn@(SN (PI qpn _i) _f) b r = do + PA ppa pfa psa <- asks pa -- obtain current preassignment + extSupported <- asks supportedExt -- obtain the supported extensions + langSupported <- asks supportedLang -- obtain the supported languages + pkgPresent <- asks presentPkgs -- obtain the present pkg-config pkgs + svd <- asks saved -- obtain saved dependencies + -- Note that there should be saved dependencies for the package in question, + -- because while building, we do not choose flags before we see the packages + -- that define them. + let qdeps = svd ! qpn + -- We take the *saved* dependencies, because these have been qualified in the + -- correct scope. + -- + -- Extend the flag assignment + let npsa = M.insert qsn b psa + -- We now try to get the new active dependencies we might learn about because + -- we have chosen a new flag. + let newactives = extractNewDeps (S qsn) b pfa npsa qdeps + -- As in the package case, we try to extend the partial assignment. + case extend extSupported langSupported pkgPresent (S qsn) ppa newactives of + Left (c, d) -> return (Fail c (Conflicting d)) -- inconsistency found + Right nppa -> local (\ s -> s { pa = PA nppa pfa npsa }) r + +-- | We try to extract as many concrete dependencies from the given flagged +-- dependencies as possible. We make use of all the flag knowledge we have +-- already acquired. +extractDeps :: FAssignment -> SAssignment -> FlaggedDeps comp QPN -> [Dep QPN] +extractDeps fa sa deps = do + d <- deps + case d of + Simple sd _ -> return sd + Flagged qfn _ td fd -> case M.lookup qfn fa of + Nothing -> mzero + Just True -> extractDeps fa sa td + Just False -> extractDeps fa sa fd + Stanza qsn td -> case M.lookup qsn sa of + Nothing -> mzero + Just True -> extractDeps fa sa td + Just False -> [] + +-- | We try to find new dependencies that become available due to the given +-- flag or stanza choice. We therefore look for the choice in question, and then call +-- 'extractDeps' for everything underneath. +extractNewDeps :: Var QPN -> Bool -> FAssignment -> SAssignment -> FlaggedDeps comp QPN -> [Dep QPN] +extractNewDeps v b fa sa = go + where + go :: FlaggedDeps comp QPN -> [Dep QPN] -- Type annotation necessary (polymorphic recursion) + go deps = do + d <- deps + case d of + Simple _ _ -> mzero + Flagged qfn' _ td fd + | v == F qfn' -> L.map (resetVar v) $ + if b then extractDeps fa sa td else extractDeps fa sa fd + | otherwise -> case M.lookup qfn' fa of + Nothing -> mzero + Just True -> go td + Just False -> go fd + Stanza qsn' td + | v == S qsn' -> L.map (resetVar v) $ + if b then extractDeps fa sa td else [] + | otherwise -> case M.lookup qsn' sa of + Nothing -> mzero + Just True -> go td + Just False -> [] + +-- | Interface. +validateTree :: CompilerInfo -> Index -> PkgConfigDb -> Tree QGoalReason -> Tree QGoalReason +validateTree cinfo idx pkgConfigDb t = runReader (validate t) VS { + supportedExt = maybe (const True) -- if compiler has no list of extensions, we assume everything is supported + (\ es -> let s = S.fromList es in \ x -> S.member x s) + (compilerInfoExtensions cinfo) + , supportedLang = maybe (const True) + (flip L.elem) -- use list lookup because language list is small and no Ord instance + (compilerInfoLanguages cinfo) + , presentPkgs = pkgConfigPkgIsPresent pkgConfigDb + , index = idx + , saved = M.empty + , pa = PA M.empty M.empty M.empty + , qualifyOptions = defaultQualifyOptions idx + } diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Var.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Var.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Var.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Var.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,45 @@ +{-# LANGUAGE DeriveFunctor #-} +module Distribution.Client.Dependency.Modular.Var ( + Var(..) + , simplifyVar + , showVar + , varPI + ) where + +import Prelude hiding (pi) + +import Distribution.Client.Dependency.Modular.Flag +import Distribution.Client.Dependency.Modular.Package + +{------------------------------------------------------------------------------- + Variables +-------------------------------------------------------------------------------} + +-- | The type of variables that play a role in the solver. +-- Note that the tree currently does not use this type directly, +-- and rather has separate tree nodes for the different types of +-- variables. This fits better with the fact that in most cases, +-- these have to be treated differently. +data Var qpn = P qpn | F (FN qpn) | S (SN qpn) + deriving (Eq, Ord, Show, Functor) + +-- | For computing conflict sets, we map flag choice vars to a +-- single flag choice. This means that all flag choices are treated +-- as interdependent. So if one flag of a package ends up in a +-- conflict set, then all flags are being treated as being part of +-- the conflict set. +simplifyVar :: Var qpn -> Var qpn +simplifyVar (P qpn) = P qpn +simplifyVar (F (FN pi _)) = F (FN pi (mkFlag "flag")) +simplifyVar (S qsn) = S qsn + +showVar :: Var QPN -> String +showVar (P qpn) = showQPN qpn +showVar (F qfn) = showQFN qfn +showVar (S qsn) = showQSN qsn + +-- | Extract the package instance from a Var +varPI :: Var QPN -> (QPN, Maybe I) +varPI (P qpn) = (qpn, Nothing) +varPI (F (FN (PI qpn i) _)) = (qpn, Just i) +varPI (S (SN (PI qpn i) _)) = (qpn, Just i) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Version.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Version.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Version.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular/Version.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,53 @@ +module Distribution.Client.Dependency.Modular.Version + ( Ver + , VR + , anyVR + , checkVR + , eqVR + , showVer + , showVR + , simplifyVR + , (.&&.) + , (.||.) + ) where + +import qualified Distribution.Version as CV -- from Cabal +import Distribution.Text -- from Cabal + +-- | Preliminary type for versions. +type Ver = CV.Version + +-- | String representation of a version. +showVer :: Ver -> String +showVer = display + +-- | Version range. Consists of a lower and upper bound. +type VR = CV.VersionRange + +-- | String representation of a version range. +showVR :: VR -> String +showVR = display + +-- | Unconstrained version range. +anyVR :: VR +anyVR = CV.anyVersion + +-- | Version range fixing a single version. +eqVR :: Ver -> VR +eqVR = CV.thisVersion + +-- | Intersect two version ranges. +(.&&.) :: VR -> VR -> VR +(.&&.) = CV.intersectVersionRanges + +-- | Union of two version ranges. +(.||.) :: VR -> VR -> VR +(.||.) = CV.unionVersionRanges + +-- | Simplify a version range. +simplifyVR :: VR -> VR +simplifyVR = CV.simplifyVersionRange + +-- | Checking a version against a version range. +checkVR :: VR -> Ver -> Bool +checkVR = flip CV.withinRange diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Modular.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,59 @@ +module Distribution.Client.Dependency.Modular + ( modularResolver, SolverConfig(..)) where + +-- Here, we try to map between the external cabal-install solver +-- interface and the internal interface that the solver actually +-- expects. There are a number of type conversions to perform: we +-- have to convert the package indices to the uniform index used +-- by the solver; we also have to convert the initial constraints; +-- and finally, we have to convert back the resulting install +-- plan. + +import Data.Map as M + ( fromListWith ) +import Distribution.Client.Dependency.Modular.Assignment + ( Assignment, toCPs ) +import Distribution.Client.Dependency.Modular.Dependency + ( RevDepMap ) +import Distribution.Client.Dependency.Modular.ConfiguredConversion + ( convCP ) +import Distribution.Client.Dependency.Modular.IndexConversion + ( convPIs ) +import Distribution.Client.Dependency.Modular.Log + ( logToProgress ) +import Distribution.Client.Dependency.Modular.Package + ( PN ) +import Distribution.Client.Dependency.Modular.Solver + ( SolverConfig(..), solve ) +import Distribution.Client.Dependency.Types + ( DependencyResolver, ResolverPackage + , PackageConstraint(..), unlabelPackageConstraint ) +import Distribution.System + ( Platform(..) ) + +-- | Ties the two worlds together: classic cabal-install vs. the modular +-- solver. Performs the necessary translations before and after. +modularResolver :: SolverConfig -> DependencyResolver +modularResolver sc (Platform arch os) cinfo iidx sidx pkgConfigDB pprefs pcs pns = + fmap (uncurry postprocess) $ -- convert install plan + logToProgress (maxBackjumps sc) $ -- convert log format into progress format + solve sc cinfo idx pkgConfigDB pprefs gcs pns + where + -- Indices have to be converted into solver-specific uniform index. + idx = convPIs os arch cinfo (shadowPkgs sc) (strongFlags sc) iidx sidx + -- Constraints have to be converted into a finite map indexed by PN. + gcs = M.fromListWith (++) (map pair pcs) + where + pair lpc = (pcName $ unlabelPackageConstraint lpc, [lpc]) + + -- Results have to be converted into an install plan. + postprocess :: Assignment -> RevDepMap -> [ResolverPackage] + postprocess a rdm = map (convCP iidx sidx) (toCPs a rdm) + + -- Helper function to extract the PN from a constraint. + pcName :: PackageConstraint -> PN + pcName (PackageConstraintVersion pn _) = pn + pcName (PackageConstraintInstalled pn ) = pn + pcName (PackageConstraintSource pn ) = pn + pcName (PackageConstraintFlags pn _) = pn + pcName (PackageConstraintStanzas pn _) = pn diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/TopDown/Constraints.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/TopDown/Constraints.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/TopDown/Constraints.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/TopDown/Constraints.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,599 @@ +{-# LANGUAGE CPP #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Dependency.TopDown.Constraints +-- Copyright : (c) Duncan Coutts 2008 +-- License : BSD-like +-- +-- Maintainer : duncan@community.haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- A set of satisfiable constraints on a set of packages. +----------------------------------------------------------------------------- +module Distribution.Client.Dependency.TopDown.Constraints ( + Constraints, + empty, + packages, + choices, + isPaired, + + addTarget, + constrain, + Satisfiable(..), + conflicting, + ) where + +import Distribution.Client.Dependency.TopDown.Types +import qualified Distribution.Client.PackageIndex as PackageIndex +import Distribution.Client.PackageIndex + ( PackageIndex ) +import Distribution.Package + ( PackageName, PackageId, PackageIdentifier(..) + , Package(packageId), packageName, packageVersion + , Dependency ) +import Distribution.Version + ( Version ) +import Distribution.Client.Utils + ( mergeBy, MergeResult(..) ) + +#if !MIN_VERSION_base(4,8,0) +import Data.Monoid + ( Monoid(mempty) ) +#endif +import Data.Either + ( partitionEithers ) +import qualified Data.Map as Map +import Data.Map (Map) +import qualified Data.Set as Set +import Data.Set (Set) +import Control.Exception + ( assert ) + + +-- | A set of satisfiable constraints on a set of packages. +-- +-- The 'Constraints' type keeps track of a set of targets (identified by +-- package name) that we know that we need. It also keeps track of a set of +-- constraints over all packages in the environment. +-- +-- It maintains the guarantee that, for the target set, the constraints are +-- satisfiable, meaning that there is at least one instance available for each +-- package name that satisfies the constraints on that package name. +-- +-- Note that it is possible to over-constrain a package in the environment that +-- is not in the target set -- the satisfiability guarantee is only maintained +-- for the target set. This is useful because it allows us to exclude packages +-- without needing to know if it would ever be needed or not (e.g. allows +-- excluding broken installed packages). +-- +-- Adding a constraint for a target package can fail if it would mean that +-- there are no remaining choices. +-- +-- Adding a constraint for package that is not a target never fails. +-- +-- Adding a new target package can fail if that package already has conflicting +-- constraints. +-- +data Constraints installed source reason + = Constraints + + -- | Targets that we know we need. This is the set for which we + -- guarantee the constraints are satisfiable. + !(Set PackageName) + + -- | The available/remaining set. These are packages that have available + -- choices remaining. This is guaranteed to cover the target packages, + -- but can also cover other packages in the environment. New targets can + -- only be added if there are available choices remaining for them. + !(PackageIndex (InstalledOrSource installed source)) + + -- | The excluded set. Choices that we have excluded by applying + -- constraints. Excluded choices are tagged with the reason. + !(PackageIndex (ExcludedPkg (InstalledOrSource installed source) reason)) + + -- | Paired choices, this is an ugly hack. + !(Map PackageName (Version, Version)) + + -- | Purely for the invariant, we keep a copy of the original index + !(PackageIndex (InstalledOrSource installed source)) + + +-- | Reasons for excluding all, or some choices for a package version. +-- +-- Each package version can have a source instance, an installed instance or +-- both. We distinguish reasons for constraints that excluded both instances, +-- from reasons for constraints that excluded just one instance. +-- +data ExcludedPkg pkg reason + = ExcludedPkg pkg + [reason] -- ^ reasons for excluding both source and installed instances + [reason] -- ^ reasons for excluding the installed instance + [reason] -- ^ reasons for excluding the source instance + +instance Package pkg => Package (ExcludedPkg pkg reason) where + packageId (ExcludedPkg p _ _ _) = packageId p + + +-- | There is a conservation of packages property. Packages are never gained or +-- lost, they just transfer from the remaining set to the excluded set. +-- +invariant :: (Package installed, Package source) + => Constraints installed source a -> Bool +invariant (Constraints targets available excluded _ original) = + + -- Relationship between available, excluded and original + all check merged + + -- targets is a subset of available + && all (PackageIndex.elemByPackageName available) (Set.elems targets) + + where + merged = mergeBy (\a b -> packageId a `compare` mergedPackageId b) + (PackageIndex.allPackages original) + (mergeBy (\a b -> packageId a `compare` packageId b) + (PackageIndex.allPackages available) + (PackageIndex.allPackages excluded)) + where + mergedPackageId (OnlyInLeft p ) = packageId p + mergedPackageId (OnlyInRight p) = packageId p + mergedPackageId (InBoth p _) = packageId p + + -- If the package was originally installed only, then + check (InBoth (InstalledOnly _) cur) = case cur of + -- now it's either still remaining as installed only + OnlyInLeft (InstalledOnly _) -> True + -- or it has been excluded + OnlyInRight (ExcludedPkg (InstalledOnly _) [] (_:_) []) -> True + _ -> False + + -- If the package was originally available only, then + check (InBoth (SourceOnly _) cur) = case cur of + -- now it's either still remaining as source only + OnlyInLeft (SourceOnly _) -> True + -- or it has been excluded + OnlyInRight (ExcludedPkg (SourceOnly _) [] [] (_:_)) -> True + _ -> False + + -- If the package was originally installed and source, then + check (InBoth (InstalledAndSource _ _) cur) = case cur of + -- We can have both remaining: + OnlyInLeft (InstalledAndSource _ _) -> True + + -- both excluded, in particular it can have had the just source or + -- installed excluded and later had both excluded so we do not mind if + -- the source or installed excluded is empty or non-empty. + OnlyInRight (ExcludedPkg (InstalledAndSource _ _) _ _ _) -> True + + -- the installed remaining and the source excluded: + InBoth (InstalledOnly _) + (ExcludedPkg (SourceOnly _) [] [] (_:_)) -> True + + -- the source remaining and the installed excluded: + InBoth (SourceOnly _) + (ExcludedPkg (InstalledOnly _) [] (_:_) []) -> True + _ -> False + + check _ = False + + +-- | An update to the constraints can move packages between the two piles +-- but not gain or loose packages. +transitionsTo :: (Package installed, Package source) + => Constraints installed source a + -> Constraints installed source a -> Bool +transitionsTo constraints @(Constraints _ available excluded _ _) + constraints'@(Constraints _ available' excluded' _ _) = + + invariant constraints && invariant constraints' + && null availableGained && null excludedLost + && map (mapInstalledOrSource packageId packageId) availableLost + == map (mapInstalledOrSource packageId packageId) excludedGained + + where + (availableLost, availableGained) + = partitionEithers (foldr lostAndGained [] availableChange) + + (excludedLost, excludedGained) + = partitionEithers (foldr lostAndGained [] excludedChange) + + availableChange = + mergeBy (\a b -> packageId a `compare` packageId b) + (PackageIndex.allPackages available) + (PackageIndex.allPackages available') + + excludedChange = + mergeBy (\a b -> packageId a `compare` packageId b) + [ pkg | ExcludedPkg pkg _ _ _ <- PackageIndex.allPackages excluded ] + [ pkg | ExcludedPkg pkg _ _ _ <- PackageIndex.allPackages excluded' ] + + lostAndGained mr rest = case mr of + OnlyInLeft pkg -> Left pkg : rest + InBoth (InstalledAndSource pkg _) + (SourceOnly _) -> Left (InstalledOnly pkg) : rest + InBoth (InstalledAndSource _ pkg) + (InstalledOnly _) -> Left (SourceOnly pkg) : rest + InBoth (SourceOnly _) + (InstalledAndSource pkg _) -> Right (InstalledOnly pkg) : rest + InBoth (InstalledOnly _) + (InstalledAndSource _ pkg) -> Right (SourceOnly pkg) : rest + OnlyInRight pkg -> Right pkg : rest + _ -> rest + + mapInstalledOrSource f g pkg = case pkg of + InstalledOnly a -> InstalledOnly (f a) + SourceOnly b -> SourceOnly (g b) + InstalledAndSource a b -> InstalledAndSource (f a) (g b) + +-- | We construct 'Constraints' with an initial 'PackageIndex' of all the +-- packages available. +-- +empty :: PackageIndex InstalledPackageEx + -> PackageIndex UnconfiguredPackage + -> Constraints InstalledPackageEx UnconfiguredPackage reason +empty installed source = + Constraints targets pkgs excluded pairs pkgs + where + targets = mempty + excluded = mempty + pkgs = PackageIndex.fromList + . map toInstalledOrSource + $ mergeBy (\a b -> packageId a `compare` packageId b) + (PackageIndex.allPackages installed) + (PackageIndex.allPackages source) + toInstalledOrSource (OnlyInLeft i ) = InstalledOnly i + toInstalledOrSource (OnlyInRight a) = SourceOnly a + toInstalledOrSource (InBoth i a) = InstalledAndSource i a + + -- pick up cases like base-3 and 4 where one version depends on the other: + pairs = Map.fromList + [ (name, (packageVersion pkgid1, packageVersion pkgid2)) + | [pkg1, pkg2] <- PackageIndex.allPackagesByName installed + , let name = packageName pkg1 + pkgid1 = packageId pkg1 + pkgid2 = packageId pkg2 + , any ((pkgid1==) . packageId) (sourceDeps pkg2) + || any ((pkgid2==) . packageId) (sourceDeps pkg1) ] + + +-- | The package targets. +-- +packages :: Constraints installed source reason + -> Set PackageName +packages (Constraints ts _ _ _ _) = ts + + +-- | The package choices that are still available. +-- +choices :: Constraints installed source reason + -> PackageIndex (InstalledOrSource installed source) +choices (Constraints _ available _ _ _) = available + +isPaired :: Constraints installed source reason + -> PackageId -> Maybe PackageId +isPaired (Constraints _ _ _ pairs _) (PackageIdentifier name version) = + case Map.lookup name pairs of + Just (v1, v2) + | version == v1 -> Just (PackageIdentifier name v2) + | version == v2 -> Just (PackageIdentifier name v1) + _ -> Nothing + + +data Satisfiable constraints discarded reason + = Satisfiable constraints discarded + | Unsatisfiable + | ConflictsWith [(PackageId, [reason])] + + +addTarget :: (Package installed, Package source) + => PackageName + -> Constraints installed source reason + -> Satisfiable (Constraints installed source reason) + () reason +addTarget pkgname + constraints@(Constraints targets available excluded paired original) + + -- If it's already a target then there's no change + | pkgname `Set.member` targets + = Satisfiable constraints () + + -- If there is some possible choice available for this target then we're ok + | PackageIndex.elemByPackageName available pkgname + = let targets' = Set.insert pkgname targets + constraints' = Constraints targets' available excluded paired original + in assert (constraints `transitionsTo` constraints') $ + Satisfiable constraints' () + + -- If it's not available and it is excluded then we return the conflicts + | PackageIndex.elemByPackageName excluded pkgname + = ConflictsWith conflicts + + -- Otherwise, it's not available and it has not been excluded so the + -- package is simply completely unknown. + | otherwise + = Unsatisfiable + + where + conflicts = + [ (packageId pkg, reasons) + | let excludedChoices = PackageIndex.lookupPackageName excluded pkgname + , ExcludedPkg pkg isReasons iReasons sReasons <- excludedChoices + , let reasons = isReasons ++ iReasons ++ sReasons ] + + +constrain :: (Package installed, Package source) + => PackageName -- ^ which package to constrain + -> (Version -> Bool -> Bool) -- ^ the constraint test + -> reason -- ^ the reason for the constraint + -> Constraints installed source reason + -> Satisfiable (Constraints installed source reason) + [PackageId] reason +constrain pkgname constraint reason + constraints@(Constraints targets available excluded paired original) + + | pkgname `Set.member` targets && not anyRemaining + = if null conflicts then Unsatisfiable + else ConflictsWith conflicts + + | otherwise + = let constraints' = Constraints targets available' excluded' paired original + in assert (constraints `transitionsTo` constraints') $ + Satisfiable constraints' (map packageId newExcluded) + + where + -- This tells us if any packages would remain at all for this package name if + -- we applied this constraint. This amounts to checking if any package + -- satisfies the given constraint, including version range and installation + -- status. + -- + (available', excluded', newExcluded, anyRemaining, conflicts) = + updatePkgsStatus + available excluded + [] False [] + (mergeBy (\pkg pkg' -> packageVersion pkg `compare` packageVersion pkg') + (PackageIndex.lookupPackageName available pkgname) + (PackageIndex.lookupPackageName excluded pkgname)) + + testConstraint pkg = + let ver = packageVersion pkg in + case Map.lookup (packageName pkg) paired of + + Just (v1, v2) + | ver == v1 || ver == v2 + -> case pkg of + InstalledOnly ipkg -> InstalledOnly (ipkg, iOk) + SourceOnly spkg -> SourceOnly (spkg, sOk) + InstalledAndSource ipkg spkg -> + InstalledAndSource (ipkg, iOk) (spkg, sOk) + where + iOk = constraint v1 True || constraint v2 True + sOk = constraint v1 False || constraint v2 False + + _ -> case pkg of + InstalledOnly ipkg -> InstalledOnly (ipkg, iOk) + SourceOnly spkg -> SourceOnly (spkg, sOk) + InstalledAndSource ipkg spkg -> + InstalledAndSource (ipkg, iOk) (spkg, sOk) + where + iOk = constraint ver True + sOk = constraint ver False + + -- For the info about available and excluded versions of the package in + -- question, update the info given the current constraint + -- + -- We update the available package map and the excluded package map + -- we also collect: + -- * the change in available packages (for logging) + -- * whether there are any remaining choices + -- * any constraints that conflict with the current constraint + + updatePkgsStatus _ _ nePkgs ok cs _ + | seq nePkgs $ seq ok $ seq cs False = undefined + + updatePkgsStatus aPkgs ePkgs nePkgs ok cs [] + = (aPkgs, ePkgs, reverse nePkgs, ok, reverse cs) + + updatePkgsStatus aPkgs ePkgs nePkgs ok cs (pkg:pkgs) = + let (aPkgs', ePkgs', mnePkg, ok', mc) = updatePkgStatus aPkgs ePkgs pkg + nePkgs' = maybeCons mnePkg nePkgs + cs' = maybeCons mc cs + in updatePkgsStatus aPkgs' ePkgs' nePkgs' (ok' || ok) cs' pkgs + + maybeCons Nothing xs = xs + maybeCons (Just x) xs = x:xs + + + -- For the info about an available or excluded version of the package in + -- question, update the info given the current constraint. + -- + updatePkgStatus aPkgs ePkgs pkg = + case viewPackageStatus pkg of + AllAvailable (InstalledOnly (aiPkg, False)) -> + removeAvailable False + (InstalledOnly aiPkg) + (PackageIndex.deletePackageId pkgid) + (ExcludedPkg (InstalledOnly aiPkg) [] [reason] []) + Nothing + + AllAvailable (SourceOnly (asPkg, False)) -> + removeAvailable False + (SourceOnly asPkg) + (PackageIndex.deletePackageId pkgid) + (ExcludedPkg (SourceOnly asPkg) [] [] [reason]) + Nothing + + AllAvailable (InstalledAndSource (aiPkg, False) (asPkg, False)) -> + removeAvailable False + (InstalledAndSource aiPkg asPkg) + (PackageIndex.deletePackageId pkgid) + (ExcludedPkg (InstalledAndSource aiPkg asPkg) [reason] [] []) + Nothing + + AllAvailable (InstalledAndSource (aiPkg, True) (asPkg, False)) -> + removeAvailable True + (SourceOnly asPkg) + (PackageIndex.insert (InstalledOnly aiPkg)) + (ExcludedPkg (SourceOnly asPkg) [] [] [reason]) + Nothing + + AllAvailable (InstalledAndSource (aiPkg, False) (asPkg, True)) -> + removeAvailable True + (InstalledOnly aiPkg) + (PackageIndex.insert (SourceOnly asPkg)) + (ExcludedPkg (InstalledOnly aiPkg) [] [reason] []) + Nothing + + AllAvailable _ -> noChange True Nothing + + AvailableExcluded (aiPkg, False) (ExcludedPkg (esPkg, False) _ _ srs) -> + removeAvailable False + (InstalledOnly aiPkg) + (PackageIndex.deletePackageId pkgid) + (ExcludedPkg (InstalledAndSource aiPkg esPkg) [reason] [] srs) + Nothing + + AvailableExcluded (_aiPkg, True) (ExcludedPkg (esPkg, False) _ _ srs) -> + addExtraExclusion True + (ExcludedPkg (SourceOnly esPkg) [] [] (reason:srs)) + Nothing + + AvailableExcluded (aiPkg, False) (ExcludedPkg (esPkg, True) _ _ srs) -> + removeAvailable True + (InstalledOnly aiPkg) + (PackageIndex.deletePackageId pkgid) + (ExcludedPkg (InstalledAndSource aiPkg esPkg) [] [reason] srs) + (Just (pkgid, srs)) + + AvailableExcluded (_aiPkg, True) (ExcludedPkg (_esPkg, True) _ _ srs) -> + noChange True + (Just (pkgid, srs)) + + ExcludedAvailable (ExcludedPkg (eiPkg, False) _ irs _) (asPkg, False) -> + removeAvailable False + (SourceOnly asPkg) + (PackageIndex.deletePackageId pkgid) + (ExcludedPkg (InstalledAndSource eiPkg asPkg) [reason] irs []) + Nothing + + ExcludedAvailable (ExcludedPkg (eiPkg, True) _ irs _) (asPkg, False) -> + removeAvailable False + (SourceOnly asPkg) + (PackageIndex.deletePackageId pkgid) + (ExcludedPkg (InstalledAndSource eiPkg asPkg) [] irs [reason]) + (Just (pkgid, irs)) + + ExcludedAvailable (ExcludedPkg (eiPkg, False) _ irs _) (_asPkg, True) -> + addExtraExclusion True + (ExcludedPkg (InstalledOnly eiPkg) [] (reason:irs) []) + Nothing + + ExcludedAvailable (ExcludedPkg (_eiPkg, True) _ irs _) (_asPkg, True) -> + noChange True + (Just (pkgid, irs)) + + AllExcluded (ExcludedPkg (InstalledOnly (eiPkg, False)) _ irs _) -> + addExtraExclusion False + (ExcludedPkg (InstalledOnly eiPkg) [] (reason:irs) []) + Nothing + + AllExcluded (ExcludedPkg (InstalledOnly (_eiPkg, True)) _ irs _) -> + noChange False + (Just (pkgid, irs)) + + AllExcluded (ExcludedPkg (SourceOnly (esPkg, False)) _ _ srs) -> + addExtraExclusion False + (ExcludedPkg (SourceOnly esPkg) [] [] (reason:srs)) + Nothing + + AllExcluded (ExcludedPkg (SourceOnly (_esPkg, True)) _ _ srs) -> + noChange False + (Just (pkgid, srs)) + + AllExcluded (ExcludedPkg (InstalledAndSource (eiPkg, False) (esPkg, False)) isrs irs srs) -> + addExtraExclusion False + (ExcludedPkg (InstalledAndSource eiPkg esPkg) (reason:isrs) irs srs) + Nothing + + AllExcluded (ExcludedPkg (InstalledAndSource (eiPkg, True) (esPkg, False)) isrs irs srs) -> + addExtraExclusion False + (ExcludedPkg (InstalledAndSource eiPkg esPkg) isrs irs (reason:srs)) + (Just (pkgid, irs)) + + AllExcluded (ExcludedPkg (InstalledAndSource (eiPkg, False) (esPkg, True)) isrs irs srs) -> + addExtraExclusion False + (ExcludedPkg (InstalledAndSource eiPkg esPkg) isrs (reason:irs) srs) + (Just (pkgid, srs)) + + AllExcluded (ExcludedPkg (InstalledAndSource (_eiPkg, True) (_esPkg, True)) isrs irs srs) -> + noChange False + (Just (pkgid, isrs ++ irs ++ srs)) + + where + removeAvailable ok nePkg adjustAvailable ePkg c = + let aPkgs' = adjustAvailable aPkgs + ePkgs' = PackageIndex.insert ePkg ePkgs + in aPkgs' `seq` ePkgs' `seq` + (aPkgs', ePkgs', Just nePkg, ok, c) + + addExtraExclusion ok ePkg c = + let ePkgs' = PackageIndex.insert ePkg ePkgs + in ePkgs' `seq` + (aPkgs, ePkgs', Nothing, ok, c) + + noChange ok c = + (aPkgs, ePkgs, Nothing, ok, c) + + pkgid = case pkg of OnlyInLeft p -> packageId p + OnlyInRight p -> packageId p + InBoth p _ -> packageId p + + + viewPackageStatus + :: (Package installed, Package source) + => MergeResult (InstalledOrSource installed source) + (ExcludedPkg (InstalledOrSource installed source) reason) + -> PackageStatus (installed, Bool) (source, Bool) reason + viewPackageStatus merged = + case merged of + OnlyInLeft aPkg -> + AllAvailable (testConstraint aPkg) + + OnlyInRight (ExcludedPkg ePkg isrs irs srs) -> + AllExcluded (ExcludedPkg (testConstraint ePkg) isrs irs srs) + + InBoth (InstalledOnly aiPkg) + (ExcludedPkg (SourceOnly esPkg) [] [] srs) -> + case testConstraint (InstalledAndSource aiPkg esPkg) of + InstalledAndSource (aiPkg', iOk) (esPkg', sOk) -> + AvailableExcluded (aiPkg', iOk) (ExcludedPkg (esPkg', sOk) [] [] srs) + _ -> impossible + + InBoth (SourceOnly asPkg) + (ExcludedPkg (InstalledOnly eiPkg) [] irs []) -> + case testConstraint (InstalledAndSource eiPkg asPkg) of + InstalledAndSource (eiPkg', iOk) (asPkg', sOk) -> + ExcludedAvailable (ExcludedPkg (eiPkg', iOk) [] irs []) (asPkg', sOk) + _ -> impossible + _ -> impossible + where + impossible = error "impossible: viewPackageStatus invariant violation" + +-- A intermediate structure that enumerates all the possible cases given the +-- invariant. This helps us to get simpler and complete pattern matching in +-- updatePkg above +-- +data PackageStatus installed source reason + = AllAvailable (InstalledOrSource installed source) + | AllExcluded (ExcludedPkg (InstalledOrSource installed source) reason) + | AvailableExcluded installed (ExcludedPkg source reason) + | ExcludedAvailable (ExcludedPkg installed reason) source + + +conflicting :: (Package installed, Package source) + => Constraints installed source reason + -> Dependency + -> [(PackageId, [reason])] +conflicting (Constraints _ _ excluded _ _) dep = + [ (packageId pkg, reasonsAll ++ reasonsAvail ++ reasonsInstalled) --TODO + | ExcludedPkg pkg reasonsAll reasonsAvail reasonsInstalled <- + PackageIndex.lookupDependency excluded dep ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/TopDown/Types.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/TopDown/Types.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/TopDown/Types.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/TopDown/Types.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,143 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Dependency.TopDown.Types +-- Copyright : (c) Duncan Coutts 2008 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Types for the top-down dependency resolver. +----------------------------------------------------------------------------- +{-# LANGUAGE CPP #-} +module Distribution.Client.Dependency.TopDown.Types where + +import Distribution.Client.Types + ( SourcePackage(..), ConfiguredPackage(..) + , OptionalStanza, ConfiguredId(..) ) +import Distribution.InstalledPackageInfo + ( InstalledPackageInfo ) +import qualified Distribution.Client.ComponentDeps as CD + +import Distribution.Package + ( PackageId, PackageIdentifier, Dependency + , Package(packageId) ) +import Distribution.PackageDescription + ( FlagAssignment ) + +-- ------------------------------------------------------------ +-- * The various kinds of packages +-- ------------------------------------------------------------ + +type SelectablePackage + = InstalledOrSource InstalledPackageEx UnconfiguredPackage + +type SelectedPackage + = InstalledOrSource InstalledPackageEx SemiConfiguredPackage + +data InstalledOrSource installed source + = InstalledOnly installed + | SourceOnly source + | InstalledAndSource installed source + deriving Eq + +data FinalSelectedPackage + = SelectedInstalled InstalledPackage + | SelectedSource ConfiguredPackage + +type TopologicalSortNumber = Int + +-- | InstalledPackage caches its dependencies as source package IDs. +data InstalledPackage + = InstalledPackage + InstalledPackageInfo + [PackageId] + +data InstalledPackageEx + = InstalledPackageEx + InstalledPackage + !TopologicalSortNumber + [PackageIdentifier] -- transitive closure of installed deps + +data UnconfiguredPackage + = UnconfiguredPackage + SourcePackage + !TopologicalSortNumber + FlagAssignment + [OptionalStanza] + +data SemiConfiguredPackage + = SemiConfiguredPackage + SourcePackage -- package info + FlagAssignment -- total flag assignment for the package + [OptionalStanza] -- enabled optional stanzas + [Dependency] -- dependencies we end up with when we apply + -- the flag assignment + +instance Package InstalledPackage where + packageId (InstalledPackage pkg _) = packageId pkg + +instance Package InstalledPackageEx where + packageId (InstalledPackageEx p _ _) = packageId p + +instance Package UnconfiguredPackage where + packageId (UnconfiguredPackage p _ _ _) = packageId p + +instance Package SemiConfiguredPackage where + packageId (SemiConfiguredPackage p _ _ _) = packageId p + +instance (Package installed, Package source) + => Package (InstalledOrSource installed source) where + packageId (InstalledOnly p ) = packageId p + packageId (SourceOnly p ) = packageId p + packageId (InstalledAndSource p _) = packageId p + +instance Package FinalSelectedPackage where + packageId (SelectedInstalled pkg) = packageId pkg + packageId (SelectedSource pkg) = packageId pkg + + +-- | We can have constraints on selecting just installed or just source +-- packages. +-- +-- In particular, installed packages can only depend on other installed +-- packages while packages that are not yet installed but which we plan to +-- install can depend on installed or other not-yet-installed packages. +-- +data InstalledConstraint = InstalledConstraint + | SourceConstraint + deriving (Eq, Show) + +-- | Package dependencies +-- +-- The top-down solver uses its down type class for package dependencies, +-- because it wants to know these dependencies as PackageIds, rather than as +-- ComponentIds (so it cannot use PackageFixedDeps). +-- +-- Ideally we would switch the top-down solver over to use ComponentIds +-- throughout; that means getting rid of this type class, and changing over the +-- package index type to use Cabal's rather than cabal-install's. That will +-- avoid the need for the local definitions of dependencyGraph and +-- reverseTopologicalOrder in the top-down solver. +-- +-- Note that the top-down solver does not (and probably will never) make a +-- distinction between the various kinds of dependencies, so we return a flat +-- list here. If we get rid of this type class then any use of `sourceDeps` +-- should be replaced by @fold . depends@. +class Package a => PackageSourceDeps a where + sourceDeps :: a -> [PackageIdentifier] + +instance PackageSourceDeps InstalledPackageEx where + sourceDeps (InstalledPackageEx _ _ deps) = deps + +instance PackageSourceDeps ConfiguredPackage where + sourceDeps (ConfiguredPackage _ _ _ deps) = map confSrcId $ CD.nonSetupDeps deps + +instance PackageSourceDeps InstalledPackage where + sourceDeps (InstalledPackage _ deps) = deps + +instance PackageSourceDeps FinalSelectedPackage where + sourceDeps (SelectedInstalled pkg) = sourceDeps pkg + sourceDeps (SelectedSource pkg) = sourceDeps pkg + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/TopDown.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/TopDown.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/TopDown.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/TopDown.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,1079 @@ +{-# LANGUAGE CPP #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Dependency.Types +-- Copyright : (c) Duncan Coutts 2008 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Common types for dependency resolution. +----------------------------------------------------------------------------- +module Distribution.Client.Dependency.TopDown ( + topDownResolver + ) where + +import Distribution.Client.Dependency.TopDown.Types +import qualified Distribution.Client.Dependency.TopDown.Constraints as Constraints +import Distribution.Client.Dependency.TopDown.Constraints + ( Satisfiable(..) ) +import Distribution.Client.Types + ( SourcePackage(..), ConfiguredPackage(..) + , enableStanzas, ConfiguredId(..), fakeUnitId ) +import Distribution.Client.Dependency.Types + ( DependencyResolver, ResolverPackage(..) + , PackageConstraint(..), unlabelPackageConstraint + , PackagePreferences(..), InstalledPreference(..) + , Progress(..), foldProgress ) + +import qualified Distribution.Client.PackageIndex as PackageIndex +import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex +import Distribution.Simple.PackageIndex (InstalledPackageIndex) +import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo +import Distribution.Client.ComponentDeps + ( ComponentDeps ) +import qualified Distribution.Client.ComponentDeps as CD +import Distribution.Client.PackageIndex + ( PackageIndex ) +import Distribution.Package + ( PackageName(..), PackageId, PackageIdentifier(..) + , UnitId(..), ComponentId(..) + , Package(..), packageVersion, packageName + , Dependency(Dependency), thisPackageVersion, simplifyDependency ) +import Distribution.PackageDescription + ( PackageDescription(buildDepends) ) +import Distribution.Client.PackageUtils + ( externalBuildDepends ) +import Distribution.PackageDescription.Configuration + ( finalizePackageDescription, flattenPackageDescription ) +import Distribution.Version + ( Version(..), VersionRange, withinRange, simplifyVersionRange + , UpperBound(..), asVersionIntervals ) +import Distribution.Compiler + ( CompilerInfo ) +import Distribution.System + ( Platform ) +import Distribution.Simple.Utils + ( equating, comparing ) +import Distribution.Text + ( display ) + +import Data.List + ( foldl', maximumBy, minimumBy, nub, sort, sortBy, groupBy ) +import Data.Maybe + ( fromJust, fromMaybe, catMaybes ) +#if !MIN_VERSION_base(4,8,0) +import Data.Monoid + ( Monoid(mempty) ) +#endif +import Control.Monad + ( guard ) +import qualified Data.Set as Set +import Data.Set (Set) +import qualified Data.Map as Map +import qualified Data.Graph as Graph +import qualified Data.Array as Array +import Control.Exception + ( assert ) + +-- ------------------------------------------------------------ +-- * Search state types +-- ------------------------------------------------------------ + +type Constraints = Constraints.Constraints + InstalledPackageEx UnconfiguredPackage ExclusionReason +type SelectedPackages = PackageIndex SelectedPackage + +-- ------------------------------------------------------------ +-- * The search tree type +-- ------------------------------------------------------------ + +data SearchSpace inherited pkg + = ChoiceNode inherited [[(pkg, SearchSpace inherited pkg)]] + | Failure Failure + +-- ------------------------------------------------------------ +-- * Traverse a search tree +-- ------------------------------------------------------------ + +explore :: (PackageName -> PackagePreferences) + -> SearchSpace (SelectedPackages, Constraints, SelectionChanges) + SelectablePackage + -> Progress Log Failure (SelectedPackages, Constraints) + +explore _ (Failure failure) = Fail failure +explore _ (ChoiceNode (s,c,_) []) = Done (s,c) +explore pref (ChoiceNode _ choices) = + case [ choice | [choice] <- choices ] of + ((_, node'):_) -> Step (logInfo node') (explore pref node') + [] -> Step (logInfo node') (explore pref node') + where + choice = minimumBy (comparing topSortNumber) choices + pkgname = packageName . fst . head $ choice + (_, node') = maximumBy (bestByPref pkgname) choice + where + topSortNumber choice = case fst (head choice) of + InstalledOnly (InstalledPackageEx _ i _) -> i + SourceOnly (UnconfiguredPackage _ i _ _) -> i + InstalledAndSource _ (UnconfiguredPackage _ i _ _) -> i + + bestByPref pkgname = case packageInstalledPreference of + PreferLatest -> + comparing (\(p,_) -> ( isPreferred p, packageId p)) + PreferInstalled -> + comparing (\(p,_) -> (isInstalled p, isPreferred p, packageId p)) + where + isInstalled (SourceOnly _) = False + isInstalled _ = True + isPreferred p = length . filter (packageVersion p `withinRange`) $ + preferredVersions + + (PackagePreferences preferredVersions packageInstalledPreference _) + = pref pkgname + + logInfo node = Select selected discarded + where (selected, discarded) = case node of + Failure _ -> ([], []) + ChoiceNode (_,_,changes) _ -> changes + +-- ------------------------------------------------------------ +-- * Generate a search tree +-- ------------------------------------------------------------ + +type ConfigurePackage = PackageIndex SelectablePackage + -> SelectablePackage + -> Either [Dependency] SelectedPackage + +-- | (packages selected, packages discarded) +type SelectionChanges = ([SelectedPackage], [PackageId]) + +searchSpace :: ConfigurePackage + -> Constraints + -> SelectedPackages + -> SelectionChanges + -> Set PackageName + -> SearchSpace (SelectedPackages, Constraints, SelectionChanges) + SelectablePackage +searchSpace configure constraints selected changes next = + assert (Set.null (selectedSet `Set.intersection` next)) $ + assert (selectedSet `Set.isSubsetOf` Constraints.packages constraints) $ + assert (next `Set.isSubsetOf` Constraints.packages constraints) $ + + ChoiceNode (selected, constraints, changes) + [ [ (pkg, select name pkg) + | pkg <- PackageIndex.lookupPackageName available name ] + | name <- Set.elems next ] + where + available = Constraints.choices constraints + + selectedSet = Set.fromList (map packageName (PackageIndex.allPackages selected)) + + select name pkg = case configure available pkg of + Left missing -> Failure $ ConfigureFailed pkg + [ (dep, Constraints.conflicting constraints dep) + | dep <- missing ] + Right pkg' -> + case constrainDeps pkg' newDeps (addDeps constraints newPkgs) [] of + Left failure -> Failure failure + Right (constraints', newDiscarded) -> + searchSpace configure + constraints' selected' (newSelected, newDiscarded) next' + where + selected' = foldl' (flip PackageIndex.insert) selected newSelected + newSelected = + case Constraints.isPaired constraints (packageId pkg) of + Nothing -> [pkg'] + Just pkgid' -> [pkg', pkg''] + where + Just pkg'' = fmap (\(InstalledOnly p) -> InstalledOnly p) + (PackageIndex.lookupPackageId available pkgid') + + newPkgs = [ name' + | (Dependency name' _, _) <- newDeps + , null (PackageIndex.lookupPackageName selected' name') ] + newDeps = concatMap packageConstraints newSelected + next' = Set.delete name + $ foldl' (flip Set.insert) next newPkgs + +packageConstraints :: SelectedPackage -> [(Dependency, Bool)] +packageConstraints = either installedConstraints availableConstraints + . preferSource + where + preferSource (InstalledOnly pkg) = Left pkg + preferSource (SourceOnly pkg) = Right pkg + preferSource (InstalledAndSource _ pkg) = Right pkg + installedConstraints (InstalledPackageEx _ _ deps) = + [ (thisPackageVersion dep, True) + | dep <- deps ] + availableConstraints (SemiConfiguredPackage _ _ _ deps) = + [ (dep, False) | dep <- deps ] + +addDeps :: Constraints -> [PackageName] -> Constraints +addDeps = + foldr $ \pkgname cs -> + case Constraints.addTarget pkgname cs of + Satisfiable cs' () -> cs' + _ -> impossible "addDeps unsatisfiable" + +constrainDeps :: SelectedPackage -> [(Dependency, Bool)] -> Constraints + -> [PackageId] + -> Either Failure (Constraints, [PackageId]) +constrainDeps pkg [] cs discard = + case addPackageSelectConstraint (packageId pkg) cs of + Satisfiable cs' discard' -> Right (cs', discard' ++ discard) + _ -> impossible "constrainDeps unsatisfiable(1)" +constrainDeps pkg ((dep, installedConstraint):deps) cs discard = + case addPackageDependencyConstraint (packageId pkg) dep installedConstraint cs of + Satisfiable cs' discard' -> constrainDeps pkg deps cs' (discard' ++ discard) + Unsatisfiable -> impossible "constrainDeps unsatisfiable(2)" + ConflictsWith conflicts -> + Left (DependencyConflict pkg dep installedConstraint conflicts) + +-- ------------------------------------------------------------ +-- * The main algorithm +-- ------------------------------------------------------------ + +search :: ConfigurePackage + -> (PackageName -> PackagePreferences) + -> Constraints + -> Set PackageName + -> Progress Log Failure (SelectedPackages, Constraints) +search configure pref constraints = + explore pref . searchSpace configure constraints mempty ([], []) + +-- ------------------------------------------------------------ +-- * The top level resolver +-- ------------------------------------------------------------ + +-- | The main exported resolver, with string logging and failure types to fit +-- the standard 'DependencyResolver' interface. +-- +topDownResolver :: DependencyResolver +topDownResolver platform cinfo installedPkgIndex sourcePkgIndex _pkgConfigDB + preferences constraints targets = + mapMessages $ topDownResolver' + platform cinfo + (convertInstalledPackageIndex installedPkgIndex) + sourcePkgIndex + preferences + (map unlabelPackageConstraint constraints) + targets + where + mapMessages :: Progress Log Failure a -> Progress String String a + mapMessages = foldProgress (Step . showLog) (Fail . showFailure) Done + +-- | The native resolver with detailed structured logging and failure types. +-- +topDownResolver' :: Platform -> CompilerInfo + -> PackageIndex InstalledPackage + -> PackageIndex SourcePackage + -> (PackageName -> PackagePreferences) + -> [PackageConstraint] + -> [PackageName] + -> Progress Log Failure [ResolverPackage] +topDownResolver' platform cinfo installedPkgIndex sourcePkgIndex + preferences constraints targets = + fmap (uncurry finalise) + . (\cs -> search configure preferences cs initialPkgNames) + =<< pruneBottomUp platform cinfo + =<< addTopLevelConstraints constraints + =<< addTopLevelTargets targets emptyConstraintSet + + where + configure = configurePackage platform cinfo + emptyConstraintSet :: Constraints + emptyConstraintSet = Constraints.empty + (annotateInstalledPackages topSortNumber installedPkgIndex') + (annotateSourcePackages constraints topSortNumber sourcePkgIndex') + (installedPkgIndex', sourcePkgIndex') = + selectNeededSubset installedPkgIndex sourcePkgIndex initialPkgNames + topSortNumber = topologicalSortNumbering installedPkgIndex' sourcePkgIndex' + + initialPkgNames = Set.fromList targets + + finalise selected' constraints' = + map toResolverPackage + . PackageIndex.allPackages + . fst . improvePlan installedPkgIndex' constraints' + . PackageIndex.fromList + $ finaliseSelectedPackages preferences selected' constraints' + + toResolverPackage :: FinalSelectedPackage -> ResolverPackage + toResolverPackage (SelectedInstalled (InstalledPackage pkg _)) + = PreExisting pkg + toResolverPackage (SelectedSource pkg) = Configured pkg + +addTopLevelTargets :: [PackageName] + -> Constraints + -> Progress a Failure Constraints +addTopLevelTargets [] cs = Done cs +addTopLevelTargets (pkg:pkgs) cs = + case Constraints.addTarget pkg cs of + Satisfiable cs' () -> addTopLevelTargets pkgs cs' + Unsatisfiable -> Fail (NoSuchPackage pkg) + ConflictsWith _conflicts -> impossible "addTopLevelTargets conflicts" + + +addTopLevelConstraints :: [PackageConstraint] -> Constraints + -> Progress Log Failure Constraints +addTopLevelConstraints [] cs = Done cs +addTopLevelConstraints (PackageConstraintFlags _ _ :deps) cs = + addTopLevelConstraints deps cs + +addTopLevelConstraints (PackageConstraintVersion pkg ver:deps) cs = + case addTopLevelVersionConstraint pkg ver cs of + Satisfiable cs' pkgids -> + Step (AppliedVersionConstraint pkg ver pkgids) + (addTopLevelConstraints deps cs') + + Unsatisfiable -> + Fail (TopLevelVersionConstraintUnsatisfiable pkg ver) + + ConflictsWith conflicts -> + Fail (TopLevelVersionConstraintConflict pkg ver conflicts) + +addTopLevelConstraints (PackageConstraintInstalled pkg:deps) cs = + case addTopLevelInstalledConstraint pkg cs of + Satisfiable cs' pkgids -> + Step (AppliedInstalledConstraint pkg InstalledConstraint pkgids) + (addTopLevelConstraints deps cs') + + Unsatisfiable -> + Fail (TopLevelInstallConstraintUnsatisfiable pkg InstalledConstraint) + + ConflictsWith conflicts -> + Fail (TopLevelInstallConstraintConflict pkg InstalledConstraint conflicts) + +addTopLevelConstraints (PackageConstraintSource pkg:deps) cs = + case addTopLevelSourceConstraint pkg cs of + Satisfiable cs' pkgids -> + Step (AppliedInstalledConstraint pkg SourceConstraint pkgids) + (addTopLevelConstraints deps cs') + + Unsatisfiable -> + Fail (TopLevelInstallConstraintUnsatisfiable pkg SourceConstraint) + + ConflictsWith conflicts -> + Fail (TopLevelInstallConstraintConflict pkg SourceConstraint conflicts) + +addTopLevelConstraints (PackageConstraintStanzas _ _ : deps) cs = + addTopLevelConstraints deps cs + +-- | Add exclusion on available packages that cannot be configured. +-- +pruneBottomUp :: Platform -> CompilerInfo + -> Constraints -> Progress Log Failure Constraints +pruneBottomUp platform comp constraints = + foldr prune Done (initialPackages constraints) constraints + + where + prune pkgs rest cs = foldr addExcludeConstraint rest unconfigurable cs + where + unconfigurable = + [ (pkg, missing) -- if necessary we could look up missing reasons + | (Just pkg', pkg) <- zip (map getSourcePkg pkgs) pkgs + , Left missing <- [configure cs pkg'] ] + + addExcludeConstraint (pkg, missing) rest cs = + let reason = ExcludedByConfigureFail missing in + case addPackageExcludeConstraint (packageId pkg) reason cs of + Satisfiable cs' [pkgid]| packageId pkg == pkgid + -> Step (ExcludeUnconfigurable pkgid) (rest cs') + Satisfiable _ _ -> impossible "pruneBottomUp satisfiable" + _ -> Fail $ ConfigureFailed pkg + [ (dep, Constraints.conflicting cs dep) + | dep <- missing ] + + configure cs (UnconfiguredPackage (SourcePackage _ pkg _ _) _ flags stanzas) = + finalizePackageDescription flags (dependencySatisfiable cs) + platform comp [] (enableStanzas stanzas pkg) + dependencySatisfiable cs = + not . null . PackageIndex.lookupDependency (Constraints.choices cs) + + -- collect each group of packages (by name) in reverse topsort order + initialPackages = + reverse + . sortBy (comparing (topSortNumber . head)) + . PackageIndex.allPackagesByName + . Constraints.choices + + topSortNumber (InstalledOnly (InstalledPackageEx _ i _)) = i + topSortNumber (SourceOnly (UnconfiguredPackage _ i _ _)) = i + topSortNumber (InstalledAndSource _ (UnconfiguredPackage _ i _ _)) = i + + getSourcePkg (InstalledOnly _ ) = Nothing + getSourcePkg (SourceOnly spkg) = Just spkg + getSourcePkg (InstalledAndSource _ spkg) = Just spkg + + +configurePackage :: Platform -> CompilerInfo -> ConfigurePackage +configurePackage platform cinfo available spkg = case spkg of + InstalledOnly ipkg -> Right (InstalledOnly ipkg) + SourceOnly apkg -> fmap SourceOnly (configure apkg) + InstalledAndSource ipkg apkg -> fmap (InstalledAndSource ipkg) + (configure apkg) + where + configure (UnconfiguredPackage apkg@(SourcePackage _ p _ _) _ flags stanzas) = + case finalizePackageDescription flags dependencySatisfiable + platform cinfo [] + (enableStanzas stanzas p) of + Left missing -> Left missing + Right (pkg, flags') -> Right $ + SemiConfiguredPackage apkg flags' stanzas (externalBuildDepends pkg) + + dependencySatisfiable = not . null . PackageIndex.lookupDependency available + +-- | Annotate each installed packages with its set of transitive dependencies +-- and its topological sort number. +-- +annotateInstalledPackages :: (PackageName -> TopologicalSortNumber) + -> PackageIndex InstalledPackage + -> PackageIndex InstalledPackageEx +annotateInstalledPackages dfsNumber installed = PackageIndex.fromList + [ InstalledPackageEx pkg (dfsNumber (packageName pkg)) (transitiveDepends pkg) + | pkg <- PackageIndex.allPackages installed ] + where + transitiveDepends :: InstalledPackage -> [PackageId] + transitiveDepends = map (packageId . toPkg) . tail . Graph.reachable graph + . fromJust . toVertex . packageId + (graph, toPkg, toVertex) = dependencyGraph installed + + +-- | Annotate each available packages with its topological sort number and any +-- user-supplied partial flag assignment. +-- +annotateSourcePackages :: [PackageConstraint] + -> (PackageName -> TopologicalSortNumber) + -> PackageIndex SourcePackage + -> PackageIndex UnconfiguredPackage +annotateSourcePackages constraints dfsNumber sourcePkgIndex = + PackageIndex.fromList + [ UnconfiguredPackage pkg (dfsNumber name) (flagsFor name) (stanzasFor name) + | pkg <- PackageIndex.allPackages sourcePkgIndex + , let name = packageName pkg ] + where + flagsFor = fromMaybe [] . flip Map.lookup flagsMap + flagsMap = Map.fromList + [ (name, flags) + | PackageConstraintFlags name flags <- constraints ] + stanzasFor = fromMaybe [] . flip Map.lookup stanzasMap + stanzasMap = Map.fromListWith (++) + [ (name, stanzas) + | PackageConstraintStanzas name stanzas <- constraints ] + +-- | One of the heuristics we use when guessing which path to take in the +-- search space is an ordering on the choices we make. It's generally better +-- to make decisions about packages higer in the dep graph first since they +-- place constraints on packages lower in the dep graph. +-- +-- To pick them in that order we annotate each package with its topological +-- sort number. So if package A depends on package B then package A will have +-- a lower topological sort number than B and we'll make a choice about which +-- version of A to pick before we make a choice about B (unless there is only +-- one possible choice for B in which case we pick that immediately). +-- +-- To construct these topological sort numbers we combine and flatten the +-- installed and source package sets. We consider only dependencies between +-- named packages, not including versions and for not-yet-configured packages +-- we look at all the possible dependencies, not just those under any single +-- flag assignment. This means we can actually get impossible combinations of +-- edges and even cycles, but that doesn't really matter here, it's only a +-- heuristic. +-- +topologicalSortNumbering :: PackageIndex InstalledPackage + -> PackageIndex SourcePackage + -> (PackageName -> TopologicalSortNumber) +topologicalSortNumbering installedPkgIndex sourcePkgIndex = + \pkgname -> let Just vertex = toVertex pkgname + in topologicalSortNumbers Array.! vertex + where + topologicalSortNumbers = Array.array (Array.bounds graph) + (zip (Graph.topSort graph) [0..]) + (graph, _, toVertex) = Graph.graphFromEdges $ + [ ((), packageName pkg, nub deps) + | pkgs@(pkg:_) <- PackageIndex.allPackagesByName installedPkgIndex + , let deps = [ packageName dep + | pkg' <- pkgs + , dep <- sourceDeps pkg' ] ] + ++ [ ((), packageName pkg, nub deps) + | pkgs@(pkg:_) <- PackageIndex.allPackagesByName sourcePkgIndex + , let deps = [ depName + | SourcePackage _ pkg' _ _ <- pkgs + , Dependency depName _ <- + buildDepends (flattenPackageDescription pkg') ] ] + +-- | We don't need the entire index (which is rather large and costly if we +-- force it by examining the whole thing). So trace out the maximul subset of +-- each index that we could possibly ever need. Do this by flattening packages +-- and looking at the names of all possible dependencies. +-- +selectNeededSubset :: PackageIndex InstalledPackage + -> PackageIndex SourcePackage + -> Set PackageName + -> (PackageIndex InstalledPackage + ,PackageIndex SourcePackage) +selectNeededSubset installedPkgIndex sourcePkgIndex = select mempty mempty + where + select :: PackageIndex InstalledPackage + -> PackageIndex SourcePackage + -> Set PackageName + -> (PackageIndex InstalledPackage + ,PackageIndex SourcePackage) + select installedPkgIndex' sourcePkgIndex' remaining + | Set.null remaining = (installedPkgIndex', sourcePkgIndex') + | otherwise = select installedPkgIndex'' sourcePkgIndex'' remaining'' + where + (next, remaining') = Set.deleteFindMin remaining + moreInstalled = PackageIndex.lookupPackageName installedPkgIndex next + moreSource = PackageIndex.lookupPackageName sourcePkgIndex next + moreRemaining = -- we filter out packages already included in the indexes + -- this avoids an infinite loop if a package depends on itself + -- like base-3.0.3.0 with base-4.0.0.0 + filter notAlreadyIncluded + $ [ packageName dep + | pkg <- moreInstalled + , dep <- sourceDeps pkg ] + ++ [ name + | SourcePackage _ pkg _ _ <- moreSource + , Dependency name _ <- + buildDepends (flattenPackageDescription pkg) ] + installedPkgIndex'' = foldl' (flip PackageIndex.insert) + installedPkgIndex' moreInstalled + sourcePkgIndex'' = foldl' (flip PackageIndex.insert) + sourcePkgIndex' moreSource + remaining'' = foldl' (flip Set.insert) + remaining' moreRemaining + notAlreadyIncluded name = + null (PackageIndex.lookupPackageName installedPkgIndex' name) + && null (PackageIndex.lookupPackageName sourcePkgIndex' name) + + +-- | The old top down solver assumes that installed packages are indexed by +-- their source package id. But these days they're actually indexed by an +-- installed package id and there can be many installed packages with the same +-- source package id. This function tries to do a convertion, but it can only +-- be partial. +-- +convertInstalledPackageIndex :: InstalledPackageIndex + -> PackageIndex InstalledPackage +convertInstalledPackageIndex index' = PackageIndex.fromList + -- There can be multiple installed instances of each package version, + -- like when the same package is installed in the global & user DBs. + -- InstalledPackageIndex.allPackagesBySourcePackageId gives us the + -- installed packages with the most preferred instances first, so by + -- picking the first we should get the user one. This is almost but not + -- quite the same as what ghc does. + [ InstalledPackage ipkg (sourceDepsOf index' ipkg) + | (_,ipkg:_) <- InstalledPackageIndex.allPackagesBySourcePackageId index' ] + where + -- The InstalledPackageInfo only lists dependencies by the + -- UnitId, which means we do not directly know the corresponding + -- source dependency. The only way to find out is to lookup the + -- UnitId to get the InstalledPackageInfo and look at its + -- source PackageId. But if the package is broken because it depends on + -- other packages that do not exist then we have a problem we cannot find + -- the original source package id. Instead we make up a bogus package id. + -- This should have the same effect since it should be a dependency on a + -- nonexistent package. + sourceDepsOf index ipkg = + [ maybe (brokenPackageId depid) packageId mdep + | let depids = InstalledPackageInfo.depends ipkg + getpkg = InstalledPackageIndex.lookupUnitId index + , (depid, mdep) <- zip depids (map getpkg depids) ] + + brokenPackageId (SimpleUnitId (ComponentId str)) = + PackageIdentifier (PackageName (str ++ "-broken")) (Version [] []) + +-- ------------------------------------------------------------ +-- * Post processing the solution +-- ------------------------------------------------------------ + +finaliseSelectedPackages :: (PackageName -> PackagePreferences) + -> SelectedPackages + -> Constraints + -> [FinalSelectedPackage] +finaliseSelectedPackages pref selected constraints = + map finaliseSelected (PackageIndex.allPackages selected) + where + remainingChoices = Constraints.choices constraints + finaliseSelected (InstalledOnly ipkg ) = finaliseInstalled ipkg + finaliseSelected (SourceOnly apkg) = finaliseSource Nothing apkg + finaliseSelected (InstalledAndSource ipkg apkg) = + case PackageIndex.lookupPackageId remainingChoices (packageId ipkg) of + --picked package not in constraints + Nothing -> impossible "finaliseSelected no pkg" + -- to constrain to avail only: + Just (SourceOnly _) -> impossible "finaliseSelected src only" + Just (InstalledOnly _) -> finaliseInstalled ipkg + Just (InstalledAndSource _ _) -> finaliseSource (Just ipkg) apkg + + finaliseInstalled (InstalledPackageEx pkg _ _) = SelectedInstalled pkg + finaliseSource mipkg (SemiConfiguredPackage pkg flags stanzas deps) = + SelectedSource (ConfiguredPackage pkg flags stanzas deps') + where + -- We cheat in the cabal solver, and classify all dependencies as + -- library dependencies. + deps' :: ComponentDeps [ConfiguredId] + deps' = CD.fromLibraryDeps $ map (confId . pickRemaining mipkg) deps + + -- InstalledOrSource indicates that we either have a source package + -- available, or an installed one, or both. In the case that we have both + -- available, we don't yet know if we can pick the installed one (the + -- dependencies may not match up, for instance); this is verified in + -- `improvePlan`. + -- + -- This means that at this point we cannot construct a valid installed + -- package ID yet for the dependencies. We therefore have two options: + -- + -- * We could leave the installed package ID undefined here, and have a + -- separate pass over the output of the top-down solver, fixing all + -- dependencies so that if we depend on an already installed package we + -- use the proper installed package ID. + -- + -- * We can _always_ use fake installed IDs, irrespective of whether we the + -- dependency is on an already installed package or not. This is okay + -- because (i) the top-down solver does not (and never will) support + -- multiple package instances, and (ii) we initialize the FakeMap with + -- fake IDs for already installed packages. + -- + -- For now we use the second option; if however we change the implementation + -- of these fake IDs so that we do away with the FakeMap and update a + -- package reverse dependencies as we execute the install plan and discover + -- real package IDs, then this is no longer possible and we have to + -- implement the first option (see also Note [FakeMap] in Cabal). + confId :: InstalledOrSource InstalledPackageEx UnconfiguredPackage -> ConfiguredId + confId pkg = ConfiguredId { + confSrcId = packageId pkg + , confInstId = fakeUnitId (packageId pkg) + } + + pickRemaining mipkg dep@(Dependency _name versionRange) = + case PackageIndex.lookupDependency remainingChoices dep of + [] -> impossible "pickRemaining no pkg" + [pkg'] -> pkg' + remaining -> assert (checkIsPaired remaining) + $ maximumBy bestByPref remaining + where + -- We order candidate packages to pick for a dependency by these + -- three factors. The last factor is just highest version wins. + bestByPref = + comparing (\p -> (isCurrent p, isPreferred p, packageVersion p)) + -- Is the package already used by the installed version of this + -- package? If so we should pick that first. This stops us from doing + -- silly things like deciding to rebuild haskell98 against base 3. + isCurrent = case mipkg :: Maybe InstalledPackageEx of + Nothing -> \_ -> False + Just ipkg -> \p -> packageId p `elem` sourceDeps ipkg + -- If there is no upper bound on the version range then we apply a + -- preferred version according to the hackage or user's suggested + -- version constraints. TODO: distinguish hacks from prefs + bounded = boundedAbove versionRange + isPreferred p + | bounded = boundedRank -- this is a dummy constant + | otherwise = length . filter (packageVersion p `withinRange`) $ + preferredVersions + where (PackagePreferences preferredVersions _ _) = pref (packageName p) + boundedRank = 0 -- any value will do + + boundedAbove :: VersionRange -> Bool + boundedAbove vr = case asVersionIntervals vr of + [] -> True -- this is the inconsistent version range. + intervals -> case last intervals of + (_, UpperBound _ _) -> True + (_, NoUpperBound ) -> False + + -- We really only expect to find more than one choice remaining when + -- we're finalising a dependency on a paired package. + checkIsPaired [p1, p2] = + case Constraints.isPaired constraints (packageId p1) of + Just p2' -> packageId p2' == packageId p2 + Nothing -> False + checkIsPaired _ = False + +-- | Improve an existing installation plan by, where possible, swapping +-- packages we plan to install with ones that are already installed. +-- This may add additional constraints due to the dependencies of installed +-- packages on other installed packages. +-- +improvePlan :: PackageIndex InstalledPackage + -> Constraints + -> PackageIndex FinalSelectedPackage + -> (PackageIndex FinalSelectedPackage, Constraints) +improvePlan installed constraints0 selected0 = + foldl' improve (selected0, constraints0) (reverseTopologicalOrder selected0) + where + improve (selected, constraints) = fromMaybe (selected, constraints) + . improvePkg selected constraints + + -- The idea is to improve the plan by swapping a configured package for + -- an equivalent installed one. For a particular package the condition is + -- that the package be in a configured state, that a the same version be + -- already installed with the exact same dependencies and all the packages + -- in the plan that it depends on are in the installed state + improvePkg selected constraints pkgid = do + SelectedSource pkg <- PackageIndex.lookupPackageId selected pkgid + ipkg <- PackageIndex.lookupPackageId installed pkgid + guard $ all (isInstalled selected) (sourceDeps pkg) + tryInstalled selected constraints [ipkg] + + isInstalled selected pkgid = + case PackageIndex.lookupPackageId selected pkgid of + Just (SelectedInstalled _) -> True + _ -> False + + tryInstalled :: PackageIndex FinalSelectedPackage -> Constraints + -> [InstalledPackage] + -> Maybe (PackageIndex FinalSelectedPackage, Constraints) + tryInstalled selected constraints [] = Just (selected, constraints) + tryInstalled selected constraints (pkg:pkgs) = + case constraintsOk (packageId pkg) (sourceDeps pkg) constraints of + Nothing -> Nothing + Just constraints' -> tryInstalled selected' constraints' pkgs' + where + selected' = PackageIndex.insert (SelectedInstalled pkg) selected + pkgs' = catMaybes (map notSelected (sourceDeps pkg)) ++ pkgs + notSelected pkgid = + case (PackageIndex.lookupPackageId installed pkgid + ,PackageIndex.lookupPackageId selected pkgid) of + (Just pkg', Nothing) -> Just pkg' + _ -> Nothing + + constraintsOk _ [] constraints = Just constraints + constraintsOk pkgid (pkgid':pkgids) constraints = + case addPackageDependencyConstraint pkgid dep True constraints of + Satisfiable constraints' _ -> constraintsOk pkgid pkgids constraints' + _ -> Nothing + where + dep = thisPackageVersion pkgid' + + reverseTopologicalOrder :: PackageIndex FinalSelectedPackage -> [PackageId] + reverseTopologicalOrder index = map (packageId . toPkg) + . Graph.topSort + . Graph.transposeG + $ graph + where (graph, toPkg, _) = dependencyGraph index + +-- ------------------------------------------------------------ +-- * Adding and recording constraints +-- ------------------------------------------------------------ + +addPackageSelectConstraint :: PackageId -> Constraints + -> Satisfiable Constraints + [PackageId] ExclusionReason +addPackageSelectConstraint pkgid = + Constraints.constrain pkgname constraint reason + where + pkgname = packageName pkgid + constraint ver _ = ver == packageVersion pkgid + reason = SelectedOther pkgid + +addPackageExcludeConstraint :: PackageId -> ExclusionReason + -> Constraints + -> Satisfiable Constraints + [PackageId] ExclusionReason +addPackageExcludeConstraint pkgid reason = + Constraints.constrain pkgname constraint reason + where + pkgname = packageName pkgid + constraint ver installed + | ver == packageVersion pkgid = installed + | otherwise = True + +addPackageDependencyConstraint :: PackageId -> Dependency -> Bool + -> Constraints + -> Satisfiable Constraints + [PackageId] ExclusionReason +addPackageDependencyConstraint pkgid dep@(Dependency pkgname verrange) + installedConstraint = + Constraints.constrain pkgname constraint reason + where + constraint ver installed = ver `withinRange` verrange + && if installedConstraint then installed else True + reason = ExcludedByPackageDependency pkgid dep installedConstraint + +addTopLevelVersionConstraint :: PackageName -> VersionRange + -> Constraints + -> Satisfiable Constraints + [PackageId] ExclusionReason +addTopLevelVersionConstraint pkgname verrange = + Constraints.constrain pkgname constraint reason + where + constraint ver _installed = ver `withinRange` verrange + reason = ExcludedByTopLevelConstraintVersion pkgname verrange + +addTopLevelInstalledConstraint, + addTopLevelSourceConstraint :: PackageName + -> Constraints + -> Satisfiable Constraints + [PackageId] ExclusionReason +addTopLevelInstalledConstraint pkgname = + Constraints.constrain pkgname constraint reason + where + constraint _ver installed = installed + reason = ExcludedByTopLevelConstraintInstalled pkgname + +addTopLevelSourceConstraint pkgname = + Constraints.constrain pkgname constraint reason + where + constraint _ver installed = not installed + reason = ExcludedByTopLevelConstraintSource pkgname + + +-- ------------------------------------------------------------ +-- * Reasons for constraints +-- ------------------------------------------------------------ + +-- | For every constraint we record we also record the reason that constraint +-- is needed. So if we end up failing due to conflicting constraints then we +-- can give an explnanation as to what was conflicting and why. +-- +data ExclusionReason = + + -- | We selected this other version of the package. That means we exclude + -- all the other versions. + SelectedOther PackageId + + -- | We excluded this version of the package because it failed to + -- configure probably because of unsatisfiable deps. + | ExcludedByConfigureFail [Dependency] + + -- | We excluded this version of the package because another package that + -- we selected imposed a dependency which this package did not satisfy. + | ExcludedByPackageDependency PackageId Dependency Bool + + -- | We excluded this version of the package because it did not satisfy + -- a dependency given as an original top level input. + -- + | ExcludedByTopLevelConstraintVersion PackageName VersionRange + | ExcludedByTopLevelConstraintInstalled PackageName + | ExcludedByTopLevelConstraintSource PackageName + + deriving Eq + +-- | Given an excluded package and the reason it was excluded, produce a human +-- readable explanation. +-- +showExclusionReason :: PackageId -> ExclusionReason -> String +showExclusionReason pkgid (SelectedOther pkgid') = + display pkgid ++ " was excluded because " ++ + display pkgid' ++ " was selected instead" +showExclusionReason pkgid (ExcludedByConfigureFail missingDeps) = + display pkgid ++ " was excluded because it could not be configured. " + ++ "It requires " ++ listOf displayDep missingDeps +showExclusionReason pkgid (ExcludedByPackageDependency pkgid' dep installedConstraint) + = display pkgid ++ " was excluded because " ++ display pkgid' ++ " requires " + ++ (if installedConstraint then "an installed instance of " else "") + ++ displayDep dep +showExclusionReason pkgid (ExcludedByTopLevelConstraintVersion pkgname verRange) = + display pkgid ++ " was excluded because of the top level constraint " ++ + displayDep (Dependency pkgname verRange) +showExclusionReason pkgid (ExcludedByTopLevelConstraintInstalled pkgname) + = display pkgid ++ " was excluded because of the top level constraint '" + ++ display pkgname ++ " installed' which means that only installed instances " + ++ "of the package may be selected." +showExclusionReason pkgid (ExcludedByTopLevelConstraintSource pkgname) + = display pkgid ++ " was excluded because of the top level constraint '" + ++ display pkgname ++ " source' which means that only source versions " + ++ "of the package may be selected." + + +-- ------------------------------------------------------------ +-- * Logging progress and failures +-- ------------------------------------------------------------ + +data Log = Select [SelectedPackage] [PackageId] + | AppliedVersionConstraint PackageName VersionRange [PackageId] + | AppliedInstalledConstraint PackageName InstalledConstraint [PackageId] + | ExcludeUnconfigurable PackageId + +data Failure + = NoSuchPackage + PackageName + | ConfigureFailed + SelectablePackage + [(Dependency, [(PackageId, [ExclusionReason])])] + | DependencyConflict + SelectedPackage Dependency Bool + [(PackageId, [ExclusionReason])] + | TopLevelVersionConstraintConflict + PackageName VersionRange + [(PackageId, [ExclusionReason])] + | TopLevelVersionConstraintUnsatisfiable + PackageName VersionRange + | TopLevelInstallConstraintConflict + PackageName InstalledConstraint + [(PackageId, [ExclusionReason])] + | TopLevelInstallConstraintUnsatisfiable + PackageName InstalledConstraint + +showLog :: Log -> String +showLog (Select selected discarded) = case (selectedMsg, discardedMsg) of + ("", y) -> y + (x, "") -> x + (x, y) -> x ++ " and " ++ y + + where + selectedMsg = "selecting " ++ case selected of + [] -> "" + [s] -> display (packageId s) ++ " " ++ kind s + (s:ss) -> listOf id + $ (display (packageId s) ++ " " ++ kind s) + : [ display (packageVersion s') ++ " " ++ kind s' + | s' <- ss ] + + kind (InstalledOnly _) = "(installed)" + kind (SourceOnly _) = "(source)" + kind (InstalledAndSource _ _) = "(installed or source)" + + discardedMsg = case discarded of + [] -> "" + _ -> "discarding " ++ listOf id + [ element + | (pkgid:pkgids) <- groupBy (equating packageName) (sort discarded) + , element <- display pkgid : map (display . packageVersion) pkgids ] +showLog (AppliedVersionConstraint pkgname ver pkgids) = + "applying constraint " ++ display (Dependency pkgname ver) + ++ if null pkgids + then "" + else " which excludes " ++ listOf display pkgids +showLog (AppliedInstalledConstraint pkgname inst pkgids) = + "applying constraint " ++ display pkgname ++ " '" + ++ (case inst of InstalledConstraint -> "installed"; _ -> "source") ++ "' " + ++ if null pkgids + then "" + else "which excludes " ++ listOf display pkgids +showLog (ExcludeUnconfigurable pkgid) = + "excluding " ++ display pkgid ++ " (it cannot be configured)" + +showFailure :: Failure -> String +showFailure (NoSuchPackage pkgname) = + "The package " ++ display pkgname ++ " is unknown." +showFailure (ConfigureFailed pkg missingDeps) = + "cannot configure " ++ displayPkg pkg ++ ". It requires " + ++ listOf (displayDep . fst) missingDeps + ++ '\n' : unlines (map (uncurry whyNot) missingDeps) + + where + whyNot (Dependency name ver) [] = + "There is no available version of " ++ display name + ++ " that satisfies " ++ displayVer ver + + whyNot dep conflicts = + "For the dependency on " ++ displayDep dep + ++ " there are these packages: " ++ listOf display pkgs + ++ ". However none of them are available.\n" + ++ unlines [ showExclusionReason (packageId pkg') reason + | (pkg', reasons) <- conflicts, reason <- reasons ] + + where pkgs = map fst conflicts + +showFailure (DependencyConflict pkg dep installedConstraint conflicts) = + "dependencies conflict: " + ++ displayPkg pkg ++ " requires " + ++ (if installedConstraint then "an installed instance of " else "") + ++ displayDep dep ++ " however:\n" + ++ unlines [ showExclusionReason (packageId pkg') reason + | (pkg', reasons) <- conflicts, reason <- reasons ] + +showFailure (TopLevelVersionConstraintConflict name ver conflicts) = + "constraints conflict: we have the top level constraint " + ++ displayDep (Dependency name ver) ++ ", but\n" + ++ unlines [ showExclusionReason (packageId pkg') reason + | (pkg', reasons) <- conflicts, reason <- reasons ] + +showFailure (TopLevelVersionConstraintUnsatisfiable name ver) = + "There is no available version of " ++ display name + ++ " that satisfies " ++ displayVer ver + +showFailure (TopLevelInstallConstraintConflict name InstalledConstraint conflicts) = + "constraints conflict: " + ++ "top level constraint '" ++ display name ++ " installed' however\n" + ++ unlines [ showExclusionReason (packageId pkg') reason + | (pkg', reasons) <- conflicts, reason <- reasons ] + +showFailure (TopLevelInstallConstraintUnsatisfiable name InstalledConstraint) = + "There is no installed version of " ++ display name + +showFailure (TopLevelInstallConstraintConflict name SourceConstraint conflicts) = + "constraints conflict: " + ++ "top level constraint '" ++ display name ++ " source' however\n" + ++ unlines [ showExclusionReason (packageId pkg') reason + | (pkg', reasons) <- conflicts, reason <- reasons ] + +showFailure (TopLevelInstallConstraintUnsatisfiable name SourceConstraint) = + "There is no available source version of " ++ display name + +displayVer :: VersionRange -> String +displayVer = display . simplifyVersionRange + +displayDep :: Dependency -> String +displayDep = display . simplifyDependency + + +-- ------------------------------------------------------------ +-- * Utils +-- ------------------------------------------------------------ + +impossible :: String -> a +impossible msg = internalError $ "assertion failure: " ++ msg + +internalError :: String -> a +internalError msg = error $ "internal error: " ++ msg + +displayPkg :: Package pkg => pkg -> String +displayPkg = display . packageId + +listOf :: (a -> String) -> [a] -> String +listOf _ [] = [] +listOf disp [x0] = disp x0 +listOf disp (x0:x1:xs) = disp x0 ++ go x1 xs + where go x [] = " and " ++ disp x + go x (x':xs') = ", " ++ disp x ++ go x' xs' + +-- ------------------------------------------------------------ +-- * Construct a dependency graph +-- ------------------------------------------------------------ + +-- | Builds a graph of the package dependencies. +-- +-- Dependencies on other packages that are not in the index are discarded. +-- You can check if there are any such dependencies with 'brokenPackages'. +-- +-- The top-down solver gets its own implementation, because both +-- `dependencyGraph` in `Distribution.Client.PlanIndex` (in cabal-install) and +-- `dependencyGraph` in `Distribution.Simple.PackageIndex` (in Cabal) both work +-- with `PackageIndex` from `Cabal` (that is, a package index indexed by +-- installed package IDs rather than package names). +-- +-- Ideally we would switch the top-down solver over to use that too, so that +-- this duplication could be avoided, but that's a bit of work and the top-down +-- solver is legacy code anyway. +-- +-- (NOTE: This is called at two types: InstalledPackage and FinalSelectedPackage.) +dependencyGraph :: PackageSourceDeps pkg + => PackageIndex pkg + -> (Graph.Graph, + Graph.Vertex -> pkg, + PackageId -> Maybe Graph.Vertex) +dependencyGraph index = (graph, vertexToPkg, pkgIdToVertex) + where + graph = Array.listArray bounds $ + map (catMaybes . map pkgIdToVertex . sourceDeps) pkgs + vertexToPkg vertex = pkgTable Array.! vertex + pkgIdToVertex = binarySearch 0 topBound + + pkgTable = Array.listArray bounds pkgs + pkgIdTable = Array.listArray bounds (map packageId pkgs) + pkgs = sortBy (comparing packageId) (PackageIndex.allPackages index) + topBound = length pkgs - 1 + bounds = (0, topBound) + + binarySearch a b key + | a > b = Nothing + | otherwise = case compare key (pkgIdTable Array.! mid) of + LT -> binarySearch a (mid-1) key + EQ -> Just mid + GT -> binarySearch (mid+1) b key + where mid = (a + b) `div` 2 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Types.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Types.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Types.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency/Types.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,318 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE DeriveGeneric #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Dependency.Types +-- Copyright : (c) Duncan Coutts 2008 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Common types for dependency resolution. +----------------------------------------------------------------------------- +module Distribution.Client.Dependency.Types ( + PreSolver(..), + Solver(..), + DependencyResolver, + ResolverPackage(..), + + PackageConstraint(..), + showPackageConstraint, + PackagePreferences(..), + InstalledPreference(..), + PackagesPreferenceDefault(..), + + Progress(..), + foldProgress, + + LabeledPackageConstraint(..), + ConstraintSource(..), + unlabelPackageConstraint, + showConstraintSource + + ) where + +#if !MIN_VERSION_base(4,8,0) +import Control.Applicative + ( Applicative(..) ) +#endif +import Control.Applicative + ( Alternative(..) ) + +import Data.Char + ( isAlpha, toLower ) +#if !MIN_VERSION_base(4,8,0) +import Data.Monoid + ( Monoid(..) ) +#endif + +import Distribution.Client.PkgConfigDb + ( PkgConfigDb ) +import Distribution.Client.Types + ( OptionalStanza(..), SourcePackage(..), ConfiguredPackage ) + +import qualified Distribution.Compat.ReadP as Parse + ( pfail, munch1 ) +import Distribution.PackageDescription + ( FlagAssignment, FlagName(..) ) +import Distribution.InstalledPackageInfo + ( InstalledPackageInfo ) +import qualified Distribution.Client.PackageIndex as PackageIndex + ( PackageIndex ) +import Distribution.Simple.PackageIndex ( InstalledPackageIndex ) +import Distribution.Package + ( PackageName ) +import Distribution.Version + ( VersionRange, simplifyVersionRange ) +import Distribution.Compiler + ( CompilerInfo ) +import Distribution.System + ( Platform ) +import Distribution.Text + ( Text(..), display ) + +import Text.PrettyPrint + ( text ) +import GHC.Generics (Generic) +import Distribution.Compat.Binary (Binary(..)) + +import Prelude hiding (fail) + + +-- | All the solvers that can be selected. +data PreSolver = AlwaysTopDown | AlwaysModular | Choose + deriving (Eq, Ord, Show, Bounded, Enum, Generic) + +-- | All the solvers that can be used. +data Solver = TopDown | Modular + deriving (Eq, Ord, Show, Bounded, Enum, Generic) + +instance Binary PreSolver +instance Binary Solver + +instance Text PreSolver where + disp AlwaysTopDown = text "topdown" + disp AlwaysModular = text "modular" + disp Choose = text "choose" + parse = do + name <- Parse.munch1 isAlpha + case map toLower name of + "topdown" -> return AlwaysTopDown + "modular" -> return AlwaysModular + "choose" -> return Choose + _ -> Parse.pfail + +-- | A dependency resolver is a function that works out an installation plan +-- given the set of installed and available packages and a set of deps to +-- solve for. +-- +-- The reason for this interface is because there are dozens of approaches to +-- solving the package dependency problem and we want to make it easy to swap +-- in alternatives. +-- +type DependencyResolver = Platform + -> CompilerInfo + -> InstalledPackageIndex + -> PackageIndex.PackageIndex SourcePackage + -> PkgConfigDb + -> (PackageName -> PackagePreferences) + -> [LabeledPackageConstraint] + -> [PackageName] + -> Progress String String [ResolverPackage] + +-- | The dependency resolver picks either pre-existing installed packages +-- or it picks source packages along with package configuration. +-- +-- This is like the 'InstallPlan.PlanPackage' but with fewer cases. +-- +data ResolverPackage = PreExisting InstalledPackageInfo + | Configured ConfiguredPackage + +-- | Per-package constraints. Package constraints must be respected by the +-- solver. Multiple constraints for each package can be given, though obviously +-- it is possible to construct conflicting constraints (eg impossible version +-- range or inconsistent flag assignment). +-- +data PackageConstraint + = PackageConstraintVersion PackageName VersionRange + | PackageConstraintInstalled PackageName + | PackageConstraintSource PackageName + | PackageConstraintFlags PackageName FlagAssignment + | PackageConstraintStanzas PackageName [OptionalStanza] + deriving (Eq,Show,Generic) + +instance Binary PackageConstraint + +-- | Provide a textual representation of a package constraint +-- for debugging purposes. +-- +showPackageConstraint :: PackageConstraint -> String +showPackageConstraint (PackageConstraintVersion pn vr) = + display pn ++ " " ++ display (simplifyVersionRange vr) +showPackageConstraint (PackageConstraintInstalled pn) = + display pn ++ " installed" +showPackageConstraint (PackageConstraintSource pn) = + display pn ++ " source" +showPackageConstraint (PackageConstraintFlags pn fs) = + "flags " ++ display pn ++ " " ++ unwords (map (uncurry showFlag) fs) + where + showFlag (FlagName f) True = "+" ++ f + showFlag (FlagName f) False = "-" ++ f +showPackageConstraint (PackageConstraintStanzas pn ss) = + "stanzas " ++ display pn ++ " " ++ unwords (map showStanza ss) + where + showStanza TestStanzas = "test" + showStanza BenchStanzas = "bench" + +-- | Per-package preferences on the version. It is a soft constraint that the +-- 'DependencyResolver' should try to respect where possible. It consists of +-- an 'InstalledPreference' which says if we prefer versions of packages +-- that are already installed. It also has (possibly multiple) +-- 'PackageVersionPreference's which are suggested constraints on the version +-- number. The resolver should try to use package versions that satisfy +-- the maximum number of the suggested version constraints. +-- +-- It is not specified if preferences on some packages are more important than +-- others. +-- +data PackagePreferences = PackagePreferences [VersionRange] + InstalledPreference + [OptionalStanza] + +-- | Whether we prefer an installed version of a package or simply the latest +-- version. +-- +data InstalledPreference = PreferInstalled | PreferLatest + deriving Show + +-- | Global policy for all packages to say if we prefer package versions that +-- are already installed locally or if we just prefer the latest available. +-- +data PackagesPreferenceDefault = + + -- | Always prefer the latest version irrespective of any existing + -- installed version. + -- + -- * This is the standard policy for upgrade. + -- + PreferAllLatest + + -- | Always prefer the installed versions over ones that would need to be + -- installed. Secondarily, prefer latest versions (eg the latest installed + -- version or if there are none then the latest source version). + | PreferAllInstalled + + -- | Prefer the latest version for packages that are explicitly requested + -- but prefers the installed version for any other packages. + -- + -- * This is the standard policy for install. + -- + | PreferLatestForSelected + deriving Show + +-- | A type to represent the unfolding of an expensive long running +-- calculation that may fail. We may get intermediate steps before the final +-- result which may be used to indicate progress and\/or logging messages. +-- +data Progress step fail done = Step step (Progress step fail done) + | Fail fail + | Done done + deriving (Functor) + +-- | Consume a 'Progress' calculation. Much like 'foldr' for lists but with two +-- base cases, one for a final result and one for failure. +-- +-- Eg to convert into a simple 'Either' result use: +-- +-- > foldProgress (flip const) Left Right +-- +foldProgress :: (step -> a -> a) -> (fail -> a) -> (done -> a) + -> Progress step fail done -> a +foldProgress step fail done = fold + where fold (Step s p) = step s (fold p) + fold (Fail f) = fail f + fold (Done r) = done r + +instance Monad (Progress step fail) where + return = pure + p >>= f = foldProgress Step Fail f p + +instance Applicative (Progress step fail) where + pure a = Done a + p <*> x = foldProgress Step Fail (flip fmap x) p + +instance Monoid fail => Alternative (Progress step fail) where + empty = Fail mempty + p <|> q = foldProgress Step (const q) Done p + +-- | 'PackageConstraint' labeled with its source. +data LabeledPackageConstraint + = LabeledPackageConstraint PackageConstraint ConstraintSource + +unlabelPackageConstraint :: LabeledPackageConstraint -> PackageConstraint +unlabelPackageConstraint (LabeledPackageConstraint pc _) = pc + +-- | Source of a 'PackageConstraint'. +data ConstraintSource = + + -- | Main config file, which is ~/.cabal/config by default. + ConstraintSourceMainConfig FilePath + + -- | Local cabal.project file + | ConstraintSourceProjectConfig FilePath + + -- | Sandbox config file, which is ./cabal.sandbox.config by default. + | ConstraintSourceSandboxConfig FilePath + + -- | User config file, which is ./cabal.config by default. + | ConstraintSourceUserConfig FilePath + + -- | Flag specified on the command line. + | ConstraintSourceCommandlineFlag + + -- | Target specified by the user, e.g., @cabal install package-0.1.0.0@ + -- implies @package==0.1.0.0@. + | ConstraintSourceUserTarget + + -- | Internal requirement to use installed versions of packages like ghc-prim. + | ConstraintSourceNonUpgradeablePackage + + -- | Internal requirement to use the add-source version of a package when that + -- version is installed and the source is modified. + | ConstraintSourceModifiedAddSourceDep + + -- | Internal constraint used by @cabal freeze@. + | ConstraintSourceFreeze + + -- | Constraint specified by a config file, a command line flag, or a user + -- target, when a more specific source is not known. + | ConstraintSourceConfigFlagOrTarget + + -- | The source of the constraint is not specified. + | ConstraintSourceUnknown + deriving (Eq, Show, Generic) + +instance Binary ConstraintSource + +-- | Description of a 'ConstraintSource'. +showConstraintSource :: ConstraintSource -> String +showConstraintSource (ConstraintSourceMainConfig path) = + "main config " ++ path +showConstraintSource (ConstraintSourceProjectConfig path) = + "project config " ++ path +showConstraintSource (ConstraintSourceSandboxConfig path) = + "sandbox config " ++ path +showConstraintSource (ConstraintSourceUserConfig path)= "user config " ++ path +showConstraintSource ConstraintSourceCommandlineFlag = "command line flag" +showConstraintSource ConstraintSourceUserTarget = "user target" +showConstraintSource ConstraintSourceNonUpgradeablePackage = + "non-upgradeable package" +showConstraintSource ConstraintSourceModifiedAddSourceDep = + "modified add-source dependency" +showConstraintSource ConstraintSourceFreeze = "cabal freeze" +showConstraintSource ConstraintSourceConfigFlagOrTarget = + "config file, command line flag, or user target" +showConstraintSource ConstraintSourceUnknown = "unknown source" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Dependency.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Dependency.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,890 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Dependency +-- Copyright : (c) David Himmelstrup 2005, +-- Bjorn Bringert 2007 +-- Duncan Coutts 2008 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@gmail.com +-- Stability : provisional +-- Portability : portable +-- +-- Top level interface to dependency resolution. +----------------------------------------------------------------------------- +module Distribution.Client.Dependency ( + -- * The main package dependency resolver + chooseSolver, + resolveDependencies, + Progress(..), + foldProgress, + + -- * Alternate, simple resolver that does not do dependencies recursively + resolveWithoutDependencies, + + -- * Constructing resolver policies + DepResolverParams(..), + PackageConstraint(..), + PackagesPreferenceDefault(..), + PackagePreference(..), + InstalledPreference(..), + + -- ** Standard policy + standardInstallPolicy, + PackageSpecifier(..), + + -- ** Sandbox policy + applySandboxInstallPolicy, + + -- ** Extra policy options + dontUpgradeNonUpgradeablePackages, + hideBrokenInstalledPackages, + upgradeDependencies, + reinstallTargets, + + -- ** Policy utils + addConstraints, + addPreferences, + setPreferenceDefault, + setReorderGoals, + setIndependentGoals, + setAvoidReinstalls, + setShadowPkgs, + setStrongFlags, + setMaxBackjumps, + addSourcePackages, + hideInstalledPackagesSpecificByUnitId, + hideInstalledPackagesSpecificBySourcePackageId, + hideInstalledPackagesAllVersions, + removeUpperBounds, + addDefaultSetupDependencies, + ) where + +import Distribution.Client.Dependency.TopDown + ( topDownResolver ) +import Distribution.Client.Dependency.Modular + ( modularResolver, SolverConfig(..) ) +import qualified Distribution.Client.PackageIndex as PackageIndex +import Distribution.Simple.PackageIndex (InstalledPackageIndex) +import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex +import qualified Distribution.Client.InstallPlan as InstallPlan +import Distribution.Client.InstallPlan (InstallPlan) +import Distribution.Client.PkgConfigDb (PkgConfigDb) +import Distribution.Client.Types + ( SourcePackageDb(SourcePackageDb), SourcePackage(..) + , ConfiguredPackage(..), ConfiguredId(..) + , OptionalStanza(..), enableStanzas ) +import Distribution.Client.Dependency.Types + ( PreSolver(..), Solver(..), DependencyResolver, ResolverPackage(..) + , PackageConstraint(..), showPackageConstraint + , LabeledPackageConstraint(..), unlabelPackageConstraint + , ConstraintSource(..), showConstraintSource + , PackagePreferences(..), InstalledPreference(..) + , PackagesPreferenceDefault(..) + , Progress(..), foldProgress ) +import Distribution.Client.Sandbox.Types + ( SandboxPackageInfo(..) ) +import Distribution.Client.Targets +import Distribution.Client.ComponentDeps (ComponentDeps) +import qualified Distribution.Client.ComponentDeps as CD +import qualified Distribution.InstalledPackageInfo as Installed +import Distribution.Package + ( PackageName(..), PackageIdentifier(PackageIdentifier), PackageId + , Package(..), packageName, packageVersion + , UnitId, Dependency(Dependency)) +import qualified Distribution.PackageDescription as PD +import qualified Distribution.PackageDescription.Configuration as PD +import Distribution.PackageDescription.Configuration + ( finalizePackageDescription ) +import Distribution.Client.PackageUtils + ( externalBuildDepends ) +import Distribution.Version + ( VersionRange, Version(..), anyVersion, orLaterVersion, thisVersion + , withinRange, simplifyVersionRange ) +import Distribution.Compiler + ( CompilerInfo(..) ) +import Distribution.System + ( Platform ) +import Distribution.Client.Utils + ( duplicates, duplicatesBy, mergeBy, MergeResult(..) ) +import Distribution.Simple.Utils + ( comparing, warn, info ) +import Distribution.Simple.Configure + ( relaxPackageDeps ) +import Distribution.Simple.Setup + ( AllowNewer(..) ) +import Distribution.Text + ( display ) +import Distribution.Verbosity + ( Verbosity ) + +import Data.List + ( foldl', sort, sortBy, nubBy, maximumBy, intercalate, nub ) +import Data.Function (on) +import Data.Maybe (fromMaybe) +import qualified Data.Map as Map +import qualified Data.Set as Set +import Data.Set (Set) +import Control.Exception + ( assert ) + + +-- ------------------------------------------------------------ +-- * High level planner policy +-- ------------------------------------------------------------ + +-- | The set of parameters to the dependency resolver. These parameters are +-- relatively low level but many kinds of high level policies can be +-- implemented in terms of adjustments to the parameters. +-- +data DepResolverParams = DepResolverParams { + depResolverTargets :: [PackageName], + depResolverConstraints :: [LabeledPackageConstraint], + depResolverPreferences :: [PackagePreference], + depResolverPreferenceDefault :: PackagesPreferenceDefault, + depResolverInstalledPkgIndex :: InstalledPackageIndex, + depResolverSourcePkgIndex :: PackageIndex.PackageIndex SourcePackage, + depResolverReorderGoals :: Bool, + depResolverIndependentGoals :: Bool, + depResolverAvoidReinstalls :: Bool, + depResolverShadowPkgs :: Bool, + depResolverStrongFlags :: Bool, + depResolverMaxBackjumps :: Maybe Int + } + +showDepResolverParams :: DepResolverParams -> String +showDepResolverParams p = + "targets: " ++ intercalate ", " (map display (depResolverTargets p)) + ++ "\nconstraints: " + ++ concatMap (("\n " ++) . showLabeledConstraint) + (depResolverConstraints p) + ++ "\npreferences: " + ++ concatMap (("\n " ++) . showPackagePreference) + (depResolverPreferences p) + ++ "\nstrategy: " ++ show (depResolverPreferenceDefault p) + ++ "\nreorder goals: " ++ show (depResolverReorderGoals p) + ++ "\nindependent goals: " ++ show (depResolverIndependentGoals p) + ++ "\navoid reinstalls: " ++ show (depResolverAvoidReinstalls p) + ++ "\nshadow packages: " ++ show (depResolverShadowPkgs p) + ++ "\nstrong flags: " ++ show (depResolverStrongFlags p) + ++ "\nmax backjumps: " ++ maybe "infinite" show + (depResolverMaxBackjumps p) + where + showLabeledConstraint :: LabeledPackageConstraint -> String + showLabeledConstraint (LabeledPackageConstraint pc src) = + showPackageConstraint pc ++ " (" ++ showConstraintSource src ++ ")" + +-- | A package selection preference for a particular package. +-- +-- Preferences are soft constraints that the dependency resolver should try to +-- respect where possible. It is not specified if preferences on some packages +-- are more important than others. +-- +data PackagePreference = + + -- | A suggested constraint on the version number. + PackageVersionPreference PackageName VersionRange + + -- | If we prefer versions of packages that are already installed. + | PackageInstalledPreference PackageName InstalledPreference + + -- | If we would prefer to enable these optional stanzas + -- (i.e. test suites and/or benchmarks) + | PackageStanzasPreference PackageName [OptionalStanza] + + +-- | Provide a textual representation of a package preference +-- for debugging purposes. +-- +showPackagePreference :: PackagePreference -> String +showPackagePreference (PackageVersionPreference pn vr) = + display pn ++ " " ++ display (simplifyVersionRange vr) +showPackagePreference (PackageInstalledPreference pn ip) = + display pn ++ " " ++ show ip +showPackagePreference (PackageStanzasPreference pn st) = + display pn ++ " " ++ show st + +basicDepResolverParams :: InstalledPackageIndex + -> PackageIndex.PackageIndex SourcePackage + -> DepResolverParams +basicDepResolverParams installedPkgIndex sourcePkgIndex = + DepResolverParams { + depResolverTargets = [], + depResolverConstraints = [], + depResolverPreferences = [], + depResolverPreferenceDefault = PreferLatestForSelected, + depResolverInstalledPkgIndex = installedPkgIndex, + depResolverSourcePkgIndex = sourcePkgIndex, + depResolverReorderGoals = False, + depResolverIndependentGoals = False, + depResolverAvoidReinstalls = False, + depResolverShadowPkgs = False, + depResolverStrongFlags = False, + depResolverMaxBackjumps = Nothing + } + +addTargets :: [PackageName] + -> DepResolverParams -> DepResolverParams +addTargets extraTargets params = + params { + depResolverTargets = extraTargets ++ depResolverTargets params + } + +addConstraints :: [LabeledPackageConstraint] + -> DepResolverParams -> DepResolverParams +addConstraints extraConstraints params = + params { + depResolverConstraints = extraConstraints + ++ depResolverConstraints params + } + +addPreferences :: [PackagePreference] + -> DepResolverParams -> DepResolverParams +addPreferences extraPreferences params = + params { + depResolverPreferences = extraPreferences + ++ depResolverPreferences params + } + +setPreferenceDefault :: PackagesPreferenceDefault + -> DepResolverParams -> DepResolverParams +setPreferenceDefault preferenceDefault params = + params { + depResolverPreferenceDefault = preferenceDefault + } + +setReorderGoals :: Bool -> DepResolverParams -> DepResolverParams +setReorderGoals b params = + params { + depResolverReorderGoals = b + } + +setIndependentGoals :: Bool -> DepResolverParams -> DepResolverParams +setIndependentGoals b params = + params { + depResolverIndependentGoals = b + } + +setAvoidReinstalls :: Bool -> DepResolverParams -> DepResolverParams +setAvoidReinstalls b params = + params { + depResolverAvoidReinstalls = b + } + +setShadowPkgs :: Bool -> DepResolverParams -> DepResolverParams +setShadowPkgs b params = + params { + depResolverShadowPkgs = b + } + +setStrongFlags :: Bool -> DepResolverParams -> DepResolverParams +setStrongFlags b params = + params { + depResolverStrongFlags = b + } + +setMaxBackjumps :: Maybe Int -> DepResolverParams -> DepResolverParams +setMaxBackjumps n params = + params { + depResolverMaxBackjumps = n + } + +-- | Some packages are specific to a given compiler version and should never be +-- upgraded. +dontUpgradeNonUpgradeablePackages :: DepResolverParams -> DepResolverParams +dontUpgradeNonUpgradeablePackages params = + addConstraints extraConstraints params + where + extraConstraints = + [ LabeledPackageConstraint + (PackageConstraintInstalled pkgname) + ConstraintSourceNonUpgradeablePackage + | notElem (PackageName "base") (depResolverTargets params) + , pkgname <- map PackageName [ "base", "ghc-prim", "integer-gmp" + , "integer-simple" ] + , isInstalled pkgname ] + -- TODO: the top down resolver chokes on the base constraints + -- below when there are no targets and thus no dep on base. + -- Need to refactor constraints separate from needing packages. + isInstalled = not . null + . InstalledPackageIndex.lookupPackageName + (depResolverInstalledPkgIndex params) + +addSourcePackages :: [SourcePackage] + -> DepResolverParams -> DepResolverParams +addSourcePackages pkgs params = + params { + depResolverSourcePkgIndex = + foldl (flip PackageIndex.insert) + (depResolverSourcePkgIndex params) pkgs + } + +hideInstalledPackagesSpecificByUnitId :: [UnitId] + -> DepResolverParams + -> DepResolverParams +hideInstalledPackagesSpecificByUnitId pkgids params = + --TODO: this should work using exclude constraints instead + params { + depResolverInstalledPkgIndex = + foldl' (flip InstalledPackageIndex.deleteUnitId) + (depResolverInstalledPkgIndex params) pkgids + } + +hideInstalledPackagesSpecificBySourcePackageId :: [PackageId] + -> DepResolverParams + -> DepResolverParams +hideInstalledPackagesSpecificBySourcePackageId pkgids params = + --TODO: this should work using exclude constraints instead + params { + depResolverInstalledPkgIndex = + foldl' (flip InstalledPackageIndex.deleteSourcePackageId) + (depResolverInstalledPkgIndex params) pkgids + } + +hideInstalledPackagesAllVersions :: [PackageName] + -> DepResolverParams -> DepResolverParams +hideInstalledPackagesAllVersions pkgnames params = + --TODO: this should work using exclude constraints instead + params { + depResolverInstalledPkgIndex = + foldl' (flip InstalledPackageIndex.deletePackageName) + (depResolverInstalledPkgIndex params) pkgnames + } + + +hideBrokenInstalledPackages :: DepResolverParams -> DepResolverParams +hideBrokenInstalledPackages params = + hideInstalledPackagesSpecificByUnitId pkgids params + where + pkgids = map Installed.installedUnitId + . InstalledPackageIndex.reverseDependencyClosure + (depResolverInstalledPkgIndex params) + . map (Installed.installedUnitId . fst) + . InstalledPackageIndex.brokenPackages + $ depResolverInstalledPkgIndex params + +-- | Remove upper bounds in dependencies using the policy specified by the +-- 'AllowNewer' argument (all/some/none). +-- +-- Note: It's important to apply 'removeUpperBounds' after +-- 'addSourcePackages'. Otherwise, the packages inserted by +-- 'addSourcePackages' won't have upper bounds in dependencies relaxed. +-- +removeUpperBounds :: AllowNewer -> DepResolverParams -> DepResolverParams +removeUpperBounds AllowNewerNone params = params +removeUpperBounds allowNewer params = + params { + depResolverSourcePkgIndex = sourcePkgIndex' + } + where + sourcePkgIndex' = fmap relaxDeps $ depResolverSourcePkgIndex params + + relaxDeps :: SourcePackage -> SourcePackage + relaxDeps srcPkg = srcPkg { + packageDescription = relaxPackageDeps allowNewer + (packageDescription srcPkg) + } + +-- | Supply defaults for packages without explicit Setup dependencies +-- +-- Note: It's important to apply 'addDefaultSetupDepends' after +-- 'addSourcePackages'. Otherwise, the packages inserted by +-- 'addSourcePackages' won't have upper bounds in dependencies relaxed. +-- +addDefaultSetupDependencies :: (SourcePackage -> Maybe [Dependency]) + -> DepResolverParams -> DepResolverParams +addDefaultSetupDependencies defaultSetupDeps params = + params { + depResolverSourcePkgIndex = + fmap applyDefaultSetupDeps (depResolverSourcePkgIndex params) + } + where + applyDefaultSetupDeps :: SourcePackage -> SourcePackage + applyDefaultSetupDeps srcpkg = + srcpkg { + packageDescription = gpkgdesc { + PD.packageDescription = pkgdesc { + PD.setupBuildInfo = + case PD.setupBuildInfo pkgdesc of + Just sbi -> Just sbi + Nothing -> case defaultSetupDeps srcpkg of + Nothing -> Nothing + Just deps -> Just PD.SetupBuildInfo { + PD.defaultSetupDepends = True, + PD.setupDepends = deps + } + } + } + } + where + gpkgdesc = packageDescription srcpkg + pkgdesc = PD.packageDescription gpkgdesc + + +upgradeDependencies :: DepResolverParams -> DepResolverParams +upgradeDependencies = setPreferenceDefault PreferAllLatest + + +reinstallTargets :: DepResolverParams -> DepResolverParams +reinstallTargets params = + hideInstalledPackagesAllVersions (depResolverTargets params) params + + +standardInstallPolicy :: InstalledPackageIndex + -> SourcePackageDb + -> [PackageSpecifier SourcePackage] + -> DepResolverParams +standardInstallPolicy + installedPkgIndex (SourcePackageDb sourcePkgIndex sourcePkgPrefs) + pkgSpecifiers + + = addPreferences + [ PackageVersionPreference name ver + | (name, ver) <- Map.toList sourcePkgPrefs ] + + . addConstraints + (concatMap pkgSpecifierConstraints pkgSpecifiers) + + . addTargets + (map pkgSpecifierTarget pkgSpecifiers) + + . hideInstalledPackagesSpecificBySourcePackageId + [ packageId pkg | SpecificSourcePackage pkg <- pkgSpecifiers ] + + . addDefaultSetupDependencies mkDefaultSetupDeps + + . addSourcePackages + [ pkg | SpecificSourcePackage pkg <- pkgSpecifiers ] + + $ basicDepResolverParams + installedPkgIndex sourcePkgIndex + + where + -- Force Cabal >= 1.24 dep when the package is affected by #3199. + mkDefaultSetupDeps :: SourcePackage -> Maybe [Dependency] + mkDefaultSetupDeps srcpkg | affected = + Just [Dependency (PackageName "Cabal") + (orLaterVersion $ Version [1,24] [])] + | otherwise = Nothing + where + gpkgdesc = packageDescription srcpkg + pkgdesc = PD.packageDescription gpkgdesc + bt = fromMaybe PD.Custom (PD.buildType pkgdesc) + affected = bt == PD.Custom && hasBuildableFalse gpkgdesc + + -- Does this package contain any components with non-empty 'build-depends' + -- and a 'buildable' field that could potentially be set to 'False'? False + -- positives are possible. + hasBuildableFalse :: PD.GenericPackageDescription -> Bool + hasBuildableFalse gpkg = + not (all alwaysTrue (zipWith PD.cOr buildableConditions noDepConditions)) + where + buildableConditions = PD.extractConditions PD.buildable gpkg + noDepConditions = PD.extractConditions + (null . PD.targetBuildDepends) gpkg + alwaysTrue (PD.Lit True) = True + alwaysTrue _ = False + + +applySandboxInstallPolicy :: SandboxPackageInfo + -> DepResolverParams + -> DepResolverParams +applySandboxInstallPolicy + (SandboxPackageInfo modifiedDeps otherDeps allSandboxPkgs _allDeps) + params + + = addPreferences [ PackageInstalledPreference n PreferInstalled + | n <- installedNotModified ] + + . addTargets installedNotModified + + . addPreferences + [ PackageVersionPreference (packageName pkg) + (thisVersion (packageVersion pkg)) | pkg <- otherDeps ] + + . addConstraints + [ let pc = PackageConstraintVersion (packageName pkg) + (thisVersion (packageVersion pkg)) + in LabeledPackageConstraint pc ConstraintSourceModifiedAddSourceDep + | pkg <- modifiedDeps ] + + . addTargets [ packageName pkg | pkg <- modifiedDeps ] + + . hideInstalledPackagesSpecificBySourcePackageId + [ packageId pkg | pkg <- modifiedDeps ] + + -- We don't need to add source packages for add-source deps to the + -- 'installedPkgIndex' since 'getSourcePackages' did that for us. + + $ params + + where + installedPkgIds = + map fst . InstalledPackageIndex.allPackagesBySourcePackageId + $ allSandboxPkgs + modifiedPkgIds = map packageId modifiedDeps + installedNotModified = [ packageName pkg | pkg <- installedPkgIds, + pkg `notElem` modifiedPkgIds ] + +-- ------------------------------------------------------------ +-- * Interface to the standard resolver +-- ------------------------------------------------------------ + +chooseSolver :: Verbosity -> PreSolver -> CompilerInfo -> IO Solver +chooseSolver verbosity preSolver _cinfo = + case preSolver of + AlwaysTopDown -> do + warn verbosity "Topdown solver is deprecated" + return TopDown + AlwaysModular -> do + return Modular + Choose -> do + info verbosity "Choosing modular solver." + return Modular + +runSolver :: Solver -> SolverConfig -> DependencyResolver +runSolver TopDown = const topDownResolver -- TODO: warn about unsupported options +runSolver Modular = modularResolver + +-- | Run the dependency solver. +-- +-- Since this is potentially an expensive operation, the result is wrapped in a +-- a 'Progress' structure that can be unfolded to provide progress information, +-- logging messages and the final result or an error. +-- +resolveDependencies :: Platform + -> CompilerInfo + -> PkgConfigDb + -> Solver + -> DepResolverParams + -> Progress String String InstallPlan + + --TODO: is this needed here? see dontUpgradeNonUpgradeablePackages +resolveDependencies platform comp _pkgConfigDB _solver params + | null (depResolverTargets params) + = return (validateSolverResult platform comp indGoals []) + where + indGoals = depResolverIndependentGoals params + +resolveDependencies platform comp pkgConfigDB solver params = + + Step (showDepResolverParams finalparams) + $ fmap (validateSolverResult platform comp indGoals) + $ runSolver solver (SolverConfig reorderGoals indGoals noReinstalls + shadowing strFlags maxBkjumps) + platform comp installedPkgIndex sourcePkgIndex + pkgConfigDB preferences constraints targets + where + + finalparams @ (DepResolverParams + targets constraints + prefs defpref + installedPkgIndex + sourcePkgIndex + reorderGoals + indGoals + noReinstalls + shadowing + strFlags + maxBkjumps) = dontUpgradeNonUpgradeablePackages + -- TODO: + -- The modular solver can properly deal with broken + -- packages and won't select them. So the + -- 'hideBrokenInstalledPackages' function should be moved + -- into a module that is specific to the top-down solver. + . (if solver /= Modular then hideBrokenInstalledPackages + else id) + $ params + + preferences = interpretPackagesPreference + (Set.fromList targets) defpref prefs + + +-- | Give an interpretation to the global 'PackagesPreference' as +-- specific per-package 'PackageVersionPreference'. +-- +interpretPackagesPreference :: Set PackageName + -> PackagesPreferenceDefault + -> [PackagePreference] + -> (PackageName -> PackagePreferences) +interpretPackagesPreference selected defaultPref prefs = + \pkgname -> PackagePreferences (versionPref pkgname) + (installPref pkgname) + (stanzasPref pkgname) + where + versionPref pkgname = + fromMaybe [anyVersion] (Map.lookup pkgname versionPrefs) + versionPrefs = Map.fromListWith (++) + [(pkgname, [pref]) + | PackageVersionPreference pkgname pref <- prefs] + + installPref pkgname = + fromMaybe (installPrefDefault pkgname) (Map.lookup pkgname installPrefs) + installPrefs = Map.fromList + [ (pkgname, pref) + | PackageInstalledPreference pkgname pref <- prefs ] + installPrefDefault = case defaultPref of + PreferAllLatest -> const PreferLatest + PreferAllInstalled -> const PreferInstalled + PreferLatestForSelected -> \pkgname -> + -- When you say cabal install foo, what you really mean is, prefer the + -- latest version of foo, but the installed version of everything else + if pkgname `Set.member` selected then PreferLatest + else PreferInstalled + + stanzasPref pkgname = + fromMaybe [] (Map.lookup pkgname stanzasPrefs) + stanzasPrefs = Map.fromListWith (\a b -> nub (a ++ b)) + [ (pkgname, pref) + | PackageStanzasPreference pkgname pref <- prefs ] + + +-- ------------------------------------------------------------ +-- * Checking the result of the solver +-- ------------------------------------------------------------ + +-- | Make an install plan from the output of the dep resolver. +-- It checks that the plan is valid, or it's an error in the dep resolver. +-- +validateSolverResult :: Platform + -> CompilerInfo + -> Bool + -> [ResolverPackage] + -> InstallPlan +validateSolverResult platform comp indepGoals pkgs = + case planPackagesProblems platform comp pkgs of + [] -> case InstallPlan.new indepGoals index of + Right plan -> plan + Left problems -> error (formatPlanProblems problems) + problems -> error (formatPkgProblems problems) + + where + index = InstalledPackageIndex.fromList (map toPlanPackage pkgs) + + toPlanPackage (PreExisting pkg) = InstallPlan.PreExisting pkg + toPlanPackage (Configured pkg) = InstallPlan.Configured pkg + + formatPkgProblems = formatProblemMessage . map showPlanPackageProblem + formatPlanProblems = formatProblemMessage . map InstallPlan.showPlanProblem + + formatProblemMessage problems = + unlines $ + "internal error: could not construct a valid install plan." + : "The proposed (invalid) plan contained the following problems:" + : problems + ++ "Proposed plan:" + : [InstallPlan.showPlanIndex index] + + +data PlanPackageProblem = + InvalidConfiguredPackage ConfiguredPackage [PackageProblem] + +showPlanPackageProblem :: PlanPackageProblem -> String +showPlanPackageProblem (InvalidConfiguredPackage pkg packageProblems) = + "Package " ++ display (packageId pkg) + ++ " has an invalid configuration, in particular:\n" + ++ unlines [ " " ++ showPackageProblem problem + | problem <- packageProblems ] + +planPackagesProblems :: Platform -> CompilerInfo + -> [ResolverPackage] + -> [PlanPackageProblem] +planPackagesProblems platform cinfo pkgs = + [ InvalidConfiguredPackage pkg packageProblems + | Configured pkg <- pkgs + , let packageProblems = configuredPackageProblems platform cinfo pkg + , not (null packageProblems) ] + +data PackageProblem = DuplicateFlag PD.FlagName + | MissingFlag PD.FlagName + | ExtraFlag PD.FlagName + | DuplicateDeps [PackageId] + | MissingDep Dependency + | ExtraDep PackageId + | InvalidDep Dependency PackageId + +showPackageProblem :: PackageProblem -> String +showPackageProblem (DuplicateFlag (PD.FlagName flag)) = + "duplicate flag in the flag assignment: " ++ flag + +showPackageProblem (MissingFlag (PD.FlagName flag)) = + "missing an assignment for the flag: " ++ flag + +showPackageProblem (ExtraFlag (PD.FlagName flag)) = + "extra flag given that is not used by the package: " ++ flag + +showPackageProblem (DuplicateDeps pkgids) = + "duplicate packages specified as selected dependencies: " + ++ intercalate ", " (map display pkgids) + +showPackageProblem (MissingDep dep) = + "the package has a dependency " ++ display dep + ++ " but no package has been selected to satisfy it." + +showPackageProblem (ExtraDep pkgid) = + "the package configuration specifies " ++ display pkgid + ++ " but (with the given flag assignment) the package does not actually" + ++ " depend on any version of that package." + +showPackageProblem (InvalidDep dep pkgid) = + "the package depends on " ++ display dep + ++ " but the configuration specifies " ++ display pkgid + ++ " which does not satisfy the dependency." + +-- | A 'ConfiguredPackage' is valid if the flag assignment is total and if +-- in the configuration given by the flag assignment, all the package +-- dependencies are satisfied by the specified packages. +-- +configuredPackageProblems :: Platform -> CompilerInfo + -> ConfiguredPackage -> [PackageProblem] +configuredPackageProblems platform cinfo + (ConfiguredPackage pkg specifiedFlags stanzas specifiedDeps') = + [ DuplicateFlag flag | ((flag,_):_) <- duplicates specifiedFlags ] + ++ [ MissingFlag flag | OnlyInLeft flag <- mergedFlags ] + ++ [ ExtraFlag flag | OnlyInRight flag <- mergedFlags ] + ++ [ DuplicateDeps pkgs + | pkgs <- CD.nonSetupDeps (fmap (duplicatesBy (comparing packageName)) + specifiedDeps) ] + ++ [ MissingDep dep | OnlyInLeft dep <- mergedDeps ] + ++ [ ExtraDep pkgid | OnlyInRight pkgid <- mergedDeps ] + ++ [ InvalidDep dep pkgid | InBoth dep pkgid <- mergedDeps + , not (packageSatisfiesDependency pkgid dep) ] + where + specifiedDeps :: ComponentDeps [PackageId] + specifiedDeps = fmap (map confSrcId) specifiedDeps' + + mergedFlags = mergeBy compare + (sort $ map PD.flagName (PD.genPackageFlags (packageDescription pkg))) + (sort $ map fst specifiedFlags) + + packageSatisfiesDependency + (PackageIdentifier name version) + (Dependency name' versionRange) = assert (name == name') $ + version `withinRange` versionRange + + dependencyName (Dependency name _) = name + + mergedDeps :: [MergeResult Dependency PackageId] + mergedDeps = mergeDeps requiredDeps (CD.flatDeps specifiedDeps) + + mergeDeps :: [Dependency] -> [PackageId] + -> [MergeResult Dependency PackageId] + mergeDeps required specified = + let sortNubOn f = nubBy ((==) `on` f) . sortBy (compare `on` f) in + mergeBy + (\dep pkgid -> dependencyName dep `compare` packageName pkgid) + (sortNubOn dependencyName required) + (sortNubOn packageName specified) + + -- TODO: It would be nicer to use ComponentDeps here so we can be more + -- precise in our checks. That's a bit tricky though, as this currently + -- relies on the 'buildDepends' field of 'PackageDescription'. (OTOH, that + -- field is deprecated and should be removed anyway.) As long as we _do_ + -- use a flat list here, we have to allow for duplicates when we fold + -- specifiedDeps; once we have proper ComponentDeps here we should get rid + -- of the `nubOn` in `mergeDeps`. + requiredDeps :: [Dependency] + requiredDeps = + --TODO: use something lower level than finalizePackageDescription + case finalizePackageDescription specifiedFlags + (const True) + platform cinfo + [] + (enableStanzas stanzas $ packageDescription pkg) of + Right (resolvedPkg, _) -> + externalBuildDepends resolvedPkg + ++ maybe [] PD.setupDepends (PD.setupBuildInfo resolvedPkg) + Left _ -> + error "configuredPackageInvalidDeps internal error" + + +-- ------------------------------------------------------------ +-- * Simple resolver that ignores dependencies +-- ------------------------------------------------------------ + +-- | A simplistic method of resolving a list of target package names to +-- available packages. +-- +-- Specifically, it does not consider package dependencies at all. Unlike +-- 'resolveDependencies', no attempt is made to ensure that the selected +-- packages have dependencies that are satisfiable or consistent with +-- each other. +-- +-- It is suitable for tasks such as selecting packages to download for user +-- inspection. It is not suitable for selecting packages to install. +-- +-- Note: if no installed package index is available, it is OK to pass 'mempty'. +-- It simply means preferences for installed packages will be ignored. +-- +resolveWithoutDependencies :: DepResolverParams + -> Either [ResolveNoDepsError] [SourcePackage] +resolveWithoutDependencies (DepResolverParams targets constraints + prefs defpref installedPkgIndex sourcePkgIndex + _reorderGoals _indGoals _avoidReinstalls + _shadowing _strFlags _maxBjumps) = + collectEithers (map selectPackage targets) + where + selectPackage :: PackageName -> Either ResolveNoDepsError SourcePackage + selectPackage pkgname + | null choices = Left $! ResolveUnsatisfiable pkgname requiredVersions + | otherwise = Right $! maximumBy bestByPrefs choices + + where + -- Constraints + requiredVersions = packageConstraints pkgname + pkgDependency = Dependency pkgname requiredVersions + choices = PackageIndex.lookupDependency sourcePkgIndex + pkgDependency + + -- Preferences + PackagePreferences preferredVersions preferInstalled _ + = packagePreferences pkgname + + bestByPrefs = comparing $ \pkg -> + (installPref pkg, versionPref pkg, packageVersion pkg) + installPref = case preferInstalled of + PreferLatest -> const False + PreferInstalled -> not . null + . InstalledPackageIndex.lookupSourcePackageId + installedPkgIndex + . packageId + versionPref pkg = length . filter (packageVersion pkg `withinRange`) $ + preferredVersions + + packageConstraints :: PackageName -> VersionRange + packageConstraints pkgname = + Map.findWithDefault anyVersion pkgname packageVersionConstraintMap + packageVersionConstraintMap = + let pcs = map unlabelPackageConstraint constraints + in Map.fromList [ (name, range) + | PackageConstraintVersion name range <- pcs ] + + packagePreferences :: PackageName -> PackagePreferences + packagePreferences = interpretPackagesPreference + (Set.fromList targets) defpref prefs + + +collectEithers :: [Either a b] -> Either [a] [b] +collectEithers = collect . partitionEithers + where + collect ([], xs) = Right xs + collect (errs,_) = Left errs + partitionEithers :: [Either a b] -> ([a],[b]) + partitionEithers = foldr (either left right) ([],[]) + where + left a (l, r) = (a:l, r) + right a (l, r) = (l, a:r) + +-- | Errors for 'resolveWithoutDependencies'. +-- +data ResolveNoDepsError = + + -- | A package name which cannot be resolved to a specific package. + -- Also gives the constraint on the version and whether there was + -- a constraint on the package being installed. + ResolveUnsatisfiable PackageName VersionRange + +instance Show ResolveNoDepsError where + show (ResolveUnsatisfiable name ver) = + "There is no available version of " ++ display name + ++ " that satisfies " ++ display (simplifyVersionRange ver) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/DistDirLayout.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/DistDirLayout.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/DistDirLayout.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/DistDirLayout.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,134 @@ +{-# LANGUAGE RecordWildCards #-} + +-- | +-- +-- The layout of the .\/dist\/ directory where cabal keeps all of it's state +-- and build artifacts. +-- +module Distribution.Client.DistDirLayout where + +import System.FilePath +import Distribution.Package + ( PackageId ) +import Distribution.Compiler +import Distribution.Simple.Compiler (PackageDB(..)) +import Distribution.Text +import Distribution.Client.Types + ( InstalledPackageId ) + + + +-- | The layout of the project state directory. Traditionally this has been +-- called the @dist@ directory. +-- +data DistDirLayout = DistDirLayout { + + -- | The dist directory, which is the root of where cabal keeps all its + -- state including the build artifacts from each package we build. + -- + distDirectory :: FilePath, + + -- | The directory under dist where we keep the build artifacts for a + -- package we're building from a local directory. + -- + -- This uses a 'PackageId' not just a 'PackageName' because technically + -- we can have multiple instances of the same package in a solution + -- (e.g. setup deps). + -- + distBuildDirectory :: PackageId -> FilePath, + distBuildRootDirectory :: FilePath, + + -- | The directory under dist where we put the unpacked sources of + -- packages, in those cases where it makes sense to keep the build + -- artifacts to reduce rebuild times. These can be tarballs or could be + -- scm repos. + -- + distUnpackedSrcDirectory :: PackageId -> FilePath, + distUnpackedSrcRootDirectory :: FilePath, + + -- | The location for project-wide cache files (e.g. state used in + -- incremental rebuilds). + -- + distProjectCacheFile :: String -> FilePath, + distProjectCacheDirectory :: FilePath, + + -- | The location for package-specific cache files (e.g. state used in + -- incremental rebuilds). + -- + distPackageCacheFile :: PackageId -> String -> FilePath, + distPackageCacheDirectory :: PackageId -> FilePath, + + distTempDirectory :: FilePath, + distBinDirectory :: FilePath, + + distPackageDB :: CompilerId -> PackageDB + } + + + +--TODO: move to another module, e.g. CabalDirLayout? +data CabalDirLayout = CabalDirLayout { + cabalStoreDirectory :: CompilerId -> FilePath, + cabalStorePackageDirectory :: CompilerId -> InstalledPackageId + -> FilePath, + cabalStorePackageDBPath :: CompilerId -> FilePath, + cabalStorePackageDB :: CompilerId -> PackageDB, + + cabalPackageCacheDirectory :: FilePath, + cabalLogsDirectory :: FilePath, + cabalWorldFile :: FilePath + } + + +defaultDistDirLayout :: FilePath -> DistDirLayout +defaultDistDirLayout projectRootDirectory = + DistDirLayout {..} + where + distDirectory = projectRootDirectory "dist-newstyle" + --TODO: switch to just dist at some point, or some other new name + + distBuildRootDirectory = distDirectory "build" + distBuildDirectory pkgid = distBuildRootDirectory display pkgid + + distUnpackedSrcRootDirectory = distDirectory "src" + distUnpackedSrcDirectory pkgid = distUnpackedSrcRootDirectory + display pkgid + + distProjectCacheDirectory = distDirectory "cache" + distProjectCacheFile name = distProjectCacheDirectory name + + distPackageCacheDirectory pkgid = distBuildDirectory pkgid "cache" + distPackageCacheFile pkgid name = distPackageCacheDirectory pkgid name + + distTempDirectory = distDirectory "tmp" + + distBinDirectory = distDirectory "bin" + + distPackageDBPath compid = distDirectory "packagedb" display compid + distPackageDB = SpecificPackageDB . distPackageDBPath + + + +defaultCabalDirLayout :: FilePath -> CabalDirLayout +defaultCabalDirLayout cabalDir = + CabalDirLayout {..} + where + + cabalStoreDirectory compid = + cabalDir "store" display compid + + cabalStorePackageDirectory compid ipkgid = + cabalStoreDirectory compid display ipkgid + + cabalStorePackageDBPath compid = + cabalStoreDirectory compid "package.db" + + cabalStorePackageDB = + SpecificPackageDB . cabalStorePackageDBPath + + cabalPackageCacheDirectory = cabalDir "packages" + + cabalLogsDirectory = cabalDir "logs" + + cabalWorldFile = cabalDir "world" + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Exec.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Exec.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Exec.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Exec.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,128 @@ +{-# LANGUAGE CPP #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Exec +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Implementation of the 'exec' command. Runs an arbitrary executable in an +-- environment suitable for making use of the sandbox. +----------------------------------------------------------------------------- + +module Distribution.Client.Exec ( exec + ) where + +import Control.Monad (unless) + +import qualified Distribution.Simple.GHC as GHC +import qualified Distribution.Simple.GHCJS as GHCJS + +import Distribution.Client.Sandbox (getSandboxConfigFilePath) +import Distribution.Client.Sandbox.PackageEnvironment (sandboxPackageDBPath) +import Distribution.Client.Sandbox.Types (UseSandbox (..)) + +import Distribution.Simple.Compiler (Compiler, CompilerFlavor(..), compilerFlavor) +import Distribution.Simple.Program (ghcProgram, ghcjsProgram, lookupProgram) +import Distribution.Simple.Program.Db (ProgramDb, requireProgram, modifyProgramSearchPath) +import Distribution.Simple.Program.Find (ProgramSearchPathEntry(..)) +import Distribution.Simple.Program.Run (programInvocation, runProgramInvocation) +import Distribution.Simple.Program.Types ( simpleProgram, ConfiguredProgram(..) ) +import Distribution.Simple.Utils (die, warn) + +import Distribution.System (Platform) +import Distribution.Verbosity (Verbosity) + +import System.Directory ( doesDirectoryExist ) +import System.FilePath (searchPathSeparator, ()) +#if !MIN_VERSION_base(4,8,0) +import Control.Applicative ((<$>)) +import Data.Monoid (mempty) +#endif + + +-- | Execute the given command in the package's environment. +-- +-- The given command is executed with GHC configured to use the correct +-- package database and with the sandbox bin directory added to the PATH. +exec :: Verbosity + -> UseSandbox + -> Compiler + -> Platform + -> ProgramDb + -> [String] + -> IO () +exec verbosity useSandbox comp platform programDb extraArgs = + case extraArgs of + (exe:args) -> do + program <- requireProgram' verbosity useSandbox programDb exe + env <- ((++) (programOverrideEnv program)) <$> environmentOverrides + let invocation = programInvocation + program { programOverrideEnv = env } + args + runProgramInvocation verbosity invocation + + [] -> die "Please specify an executable to run" + where + environmentOverrides = + case useSandbox of + NoSandbox -> return [] + (UseSandbox sandboxDir) -> + sandboxEnvironment verbosity sandboxDir comp platform programDb + + +-- | Return the package's sandbox environment. +-- +-- The environment sets GHC_PACKAGE_PATH so that GHC will use the sandbox. +sandboxEnvironment :: Verbosity + -> FilePath + -> Compiler + -> Platform + -> ProgramDb + -> IO [(String, Maybe String)] +sandboxEnvironment verbosity sandboxDir comp platform programDb = + case compilerFlavor comp of + GHC -> env GHC.getGlobalPackageDB ghcProgram "GHC_PACKAGE_PATH" + GHCJS -> env GHCJS.getGlobalPackageDB ghcjsProgram "GHCJS_PACKAGE_PATH" + _ -> die "exec only works with GHC and GHCJS" + where + env getGlobalPackageDB hcProgram packagePathEnvVar = do + let Just program = lookupProgram hcProgram programDb + gDb <- getGlobalPackageDB verbosity program + sandboxConfigFilePath <- getSandboxConfigFilePath mempty + let sandboxPackagePath = sandboxPackageDBPath sandboxDir comp platform + compilerPackagePaths = prependToSearchPath gDb sandboxPackagePath + -- Packages database must exist, otherwise things will start + -- failing in mysterious ways. + exists <- doesDirectoryExist sandboxPackagePath + unless exists $ warn verbosity $ "Package database is not a directory: " + ++ sandboxPackagePath + -- Build the environment + return [ (packagePathEnvVar, Just compilerPackagePaths) + , ("CABAL_SANDBOX_PACKAGE_PATH", Just compilerPackagePaths) + , ("CABAL_SANDBOX_CONFIG", Just sandboxConfigFilePath) + ] + + prependToSearchPath path newValue = + newValue ++ [searchPathSeparator] ++ path + + +-- | Check that a program is configured and available to be run. If +-- a sandbox is available check in the sandbox's directory. +requireProgram' :: Verbosity + -> UseSandbox + -> ProgramDb + -> String + -> IO ConfiguredProgram +requireProgram' verbosity useSandbox programDb exe = do + (program, _) <- requireProgram + verbosity + (simpleProgram exe) + updateSearchPath + return program + where + updateSearchPath = + flip modifyProgramSearchPath programDb $ \searchPath -> + case useSandbox of + NoSandbox -> searchPath + UseSandbox sandboxDir -> + ProgramSearchPathDir (sandboxDir "bin") : searchPath diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Fetch.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Fetch.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Fetch.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Fetch.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,199 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Fetch +-- Copyright : (c) David Himmelstrup 2005 +-- Duncan Coutts 2011 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@gmail.com +-- Stability : provisional +-- Portability : portable +-- +-- The cabal fetch command +----------------------------------------------------------------------------- +module Distribution.Client.Fetch ( + fetch, + ) where + +import Distribution.Client.Types +import Distribution.Client.Targets +import Distribution.Client.FetchUtils hiding (fetchPackage) +import Distribution.Client.Dependency +import Distribution.Client.IndexUtils as IndexUtils + ( getSourcePackages, getInstalledPackages ) +import qualified Distribution.Client.InstallPlan as InstallPlan +import Distribution.Client.PkgConfigDb + ( PkgConfigDb, readPkgConfigDb ) +import Distribution.Client.Setup + ( GlobalFlags(..), FetchFlags(..), RepoContext(..) ) + +import Distribution.Package + ( packageId ) +import Distribution.Simple.Compiler + ( Compiler, compilerInfo, PackageDBStack ) +import Distribution.Simple.PackageIndex (InstalledPackageIndex) +import Distribution.Simple.Program + ( ProgramConfiguration ) +import Distribution.Simple.Setup + ( fromFlag ) +import Distribution.Simple.Utils + ( die, notice, debug ) +import Distribution.System + ( Platform ) +import Distribution.Text + ( display ) +import Distribution.Verbosity + ( Verbosity ) + +import Control.Monad + ( filterM ) + +-- ------------------------------------------------------------ +-- * The fetch command +-- ------------------------------------------------------------ + +--TODO: +-- * add fetch -o support +-- * support tarball URLs via ad-hoc download cache (or in -o mode?) +-- * suggest using --no-deps, unpack or fetch -o if deps cannot be satisfied +-- * Port various flags from install: +-- * --updage-dependencies +-- * --constraint and --preference +-- * --only-dependencies, but note it conflicts with --no-deps + + +-- | Fetch a list of packages and their dependencies. +-- +fetch :: Verbosity + -> PackageDBStack + -> RepoContext + -> Compiler + -> Platform + -> ProgramConfiguration + -> GlobalFlags + -> FetchFlags + -> [UserTarget] + -> IO () +fetch verbosity _ _ _ _ _ _ _ [] = + notice verbosity "No packages requested. Nothing to do." + +fetch verbosity packageDBs repoCtxt comp platform conf + globalFlags fetchFlags userTargets = do + + mapM_ checkTarget userTargets + + installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf + sourcePkgDb <- getSourcePackages verbosity repoCtxt + pkgConfigDb <- readPkgConfigDb verbosity conf + + pkgSpecifiers <- resolveUserTargets verbosity repoCtxt + (fromFlag $ globalWorldFile globalFlags) + (packageIndex sourcePkgDb) + userTargets + + pkgs <- planPackages + verbosity comp platform fetchFlags + installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers + + pkgs' <- filterM (fmap not . isFetched . packageSource) pkgs + if null pkgs' + --TODO: when we add support for remote tarballs then this message + -- will need to be changed because for remote tarballs we fetch them + -- at the earlier phase. + then notice verbosity $ "No packages need to be fetched. " + ++ "All the requested packages are already local " + ++ "or cached locally." + else if dryRun + then notice verbosity $ unlines $ + "The following packages would be fetched:" + : map (display . packageId) pkgs' + + else mapM_ (fetchPackage verbosity repoCtxt . packageSource) pkgs' + + where + dryRun = fromFlag (fetchDryRun fetchFlags) + +planPackages :: Verbosity + -> Compiler + -> Platform + -> FetchFlags + -> InstalledPackageIndex + -> SourcePackageDb + -> PkgConfigDb + -> [PackageSpecifier SourcePackage] + -> IO [SourcePackage] +planPackages verbosity comp platform fetchFlags + installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers + + | includeDependencies = do + solver <- chooseSolver verbosity + (fromFlag (fetchSolver fetchFlags)) (compilerInfo comp) + notice verbosity "Resolving dependencies..." + installPlan <- foldProgress logMsg die return $ + resolveDependencies + platform (compilerInfo comp) pkgConfigDb + solver + resolverParams + + -- The packages we want to fetch are those packages the 'InstallPlan' + -- that are in the 'InstallPlan.Configured' state. + return + [ pkg + | (InstallPlan.Configured (ConfiguredPackage pkg _ _ _)) + <- InstallPlan.toList installPlan ] + + | otherwise = + either (die . unlines . map show) return $ + resolveWithoutDependencies resolverParams + + where + resolverParams = + + setMaxBackjumps (if maxBackjumps < 0 then Nothing + else Just maxBackjumps) + + . setIndependentGoals independentGoals + + . setReorderGoals reorderGoals + + . setShadowPkgs shadowPkgs + + . setStrongFlags strongFlags + + -- Reinstall the targets given on the command line so that the dep + -- resolver will decide that they need fetching, even if they're + -- already installed. Since we want to get the source packages of + -- things we might have installed (but not have the sources for). + . reinstallTargets + + $ standardInstallPolicy installedPkgIndex sourcePkgDb pkgSpecifiers + + includeDependencies = fromFlag (fetchDeps fetchFlags) + logMsg message rest = debug verbosity message >> rest + + reorderGoals = fromFlag (fetchReorderGoals fetchFlags) + independentGoals = fromFlag (fetchIndependentGoals fetchFlags) + shadowPkgs = fromFlag (fetchShadowPkgs fetchFlags) + strongFlags = fromFlag (fetchStrongFlags fetchFlags) + maxBackjumps = fromFlag (fetchMaxBackjumps fetchFlags) + + +checkTarget :: UserTarget -> IO () +checkTarget target = case target of + UserTargetRemoteTarball _uri + -> die $ "The 'fetch' command does not yet support remote tarballs. " + ++ "In the meantime you can use the 'unpack' commands." + _ -> return () + +fetchPackage :: Verbosity -> RepoContext -> PackageLocation a -> IO () +fetchPackage verbosity repoCtxt pkgsrc = case pkgsrc of + LocalUnpackedPackage _dir -> return () + LocalTarballPackage _file -> return () + + RemoteTarballPackage _uri _ -> + die $ "The 'fetch' command does not yet support remote tarballs. " + ++ "In the meantime you can use the 'unpack' commands." + + RepoTarballPackage repo pkgid _ -> do + _ <- fetchRepoTarball verbosity repoCtxt repo pkgid + return () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/FetchUtils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/FetchUtils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/FetchUtils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/FetchUtils.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,226 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.FetchUtils +-- Copyright : (c) David Himmelstrup 2005 +-- Duncan Coutts 2011 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@gmail.com +-- Stability : provisional +-- Portability : portable +-- +-- Functions for fetching packages +----------------------------------------------------------------------------- +{-# LANGUAGE RecordWildCards #-} +module Distribution.Client.FetchUtils ( + + -- * fetching packages + fetchPackage, + isFetched, + checkFetched, + + -- ** specifically for repo packages + checkRepoTarballFetched, + fetchRepoTarball, + + -- * fetching other things + downloadIndex, + ) where + +import Distribution.Client.Types +import Distribution.Client.HttpUtils + ( downloadURI, isOldHackageURI, DownloadResult(..) + , HttpTransport(..), transportCheckHttps, remoteRepoCheckHttps ) + +import Distribution.Package + ( PackageId, packageName, packageVersion ) +import Distribution.Simple.Utils + ( notice, info, setupMessage ) +import Distribution.Text + ( display ) +import Distribution.Verbosity + ( Verbosity ) +import Distribution.Client.GlobalFlags + ( RepoContext(..) ) + +import Data.Maybe +import System.Directory + ( doesFileExist, createDirectoryIfMissing, getTemporaryDirectory ) +import System.IO + ( openTempFile, hClose ) +import System.FilePath + ( (), (<.>) ) +import qualified System.FilePath.Posix as FilePath.Posix + ( combine, joinPath ) +import Network.URI + ( URI(uriPath) ) + +import qualified Hackage.Security.Client as Sec + +-- ------------------------------------------------------------ +-- * Actually fetch things +-- ------------------------------------------------------------ + +-- | Returns @True@ if the package has already been fetched +-- or does not need fetching. +-- +isFetched :: PackageLocation (Maybe FilePath) -> IO Bool +isFetched loc = case loc of + LocalUnpackedPackage _dir -> return True + LocalTarballPackage _file -> return True + RemoteTarballPackage _uri local -> return (isJust local) + RepoTarballPackage repo pkgid _ -> doesFileExist (packageFile repo pkgid) + + +-- | Checks if the package has already been fetched (or does not need +-- fetching) and if so returns evidence in the form of a 'PackageLocation' +-- with a resolved local file location. +-- +checkFetched :: PackageLocation (Maybe FilePath) + -> IO (Maybe (PackageLocation FilePath)) +checkFetched loc = case loc of + LocalUnpackedPackage dir -> + return (Just $ LocalUnpackedPackage dir) + LocalTarballPackage file -> + return (Just $ LocalTarballPackage file) + RemoteTarballPackage uri (Just file) -> + return (Just $ RemoteTarballPackage uri file) + RepoTarballPackage repo pkgid (Just file) -> + return (Just $ RepoTarballPackage repo pkgid file) + + RemoteTarballPackage _uri Nothing -> return Nothing + RepoTarballPackage repo pkgid Nothing -> + fmap (fmap (RepoTarballPackage repo pkgid)) + (checkRepoTarballFetched repo pkgid) + + +-- | Like 'checkFetched' but for the specific case of a 'RepoTarballPackage'. +-- +checkRepoTarballFetched :: Repo -> PackageId -> IO (Maybe FilePath) +checkRepoTarballFetched repo pkgid = do + let file = packageFile repo pkgid + exists <- doesFileExist file + if exists + then return (Just file) + else return Nothing + + +-- | Fetch a package if we don't have it already. +-- +fetchPackage :: Verbosity + -> RepoContext + -> PackageLocation (Maybe FilePath) + -> IO (PackageLocation FilePath) +fetchPackage verbosity repoCtxt loc = case loc of + LocalUnpackedPackage dir -> + return (LocalUnpackedPackage dir) + LocalTarballPackage file -> + return (LocalTarballPackage file) + RemoteTarballPackage uri (Just file) -> + return (RemoteTarballPackage uri file) + RepoTarballPackage repo pkgid (Just file) -> + return (RepoTarballPackage repo pkgid file) + + RemoteTarballPackage uri Nothing -> do + path <- downloadTarballPackage uri + return (RemoteTarballPackage uri path) + RepoTarballPackage repo pkgid Nothing -> do + local <- fetchRepoTarball verbosity repoCtxt repo pkgid + return (RepoTarballPackage repo pkgid local) + where + downloadTarballPackage uri = do + transport <- repoContextGetTransport repoCtxt + transportCheckHttps transport uri + notice verbosity ("Downloading " ++ show uri) + tmpdir <- getTemporaryDirectory + (path, hnd) <- openTempFile tmpdir "cabal-.tar.gz" + hClose hnd + _ <- downloadURI transport verbosity uri path + return path + + +-- | Fetch a repo package if we don't have it already. +-- +fetchRepoTarball :: Verbosity -> RepoContext -> Repo -> PackageId -> IO FilePath +fetchRepoTarball verbosity repoCtxt repo pkgid = do + fetched <- doesFileExist (packageFile repo pkgid) + if fetched + then do info verbosity $ display pkgid ++ " has already been downloaded." + return (packageFile repo pkgid) + else do setupMessage verbosity "Downloading" pkgid + downloadRepoPackage + where + downloadRepoPackage = case repo of + RepoLocal{..} -> return (packageFile repo pkgid) + + RepoRemote{..} -> do + transport <- repoContextGetTransport repoCtxt + remoteRepoCheckHttps transport repoRemote + let uri = packageURI repoRemote pkgid + dir = packageDir repo pkgid + path = packageFile repo pkgid + createDirectoryIfMissing True dir + _ <- downloadURI transport verbosity uri path + return path + + RepoSecure{} -> repoContextWithSecureRepo repoCtxt repo $ \rep -> do + let dir = packageDir repo pkgid + path = packageFile repo pkgid + createDirectoryIfMissing True dir + Sec.uncheckClientErrors $ do + info verbosity ("writing " ++ path) + Sec.downloadPackage' rep pkgid path + return path + +-- | Downloads an index file to [config-dir/packages/serv-id]. +-- +downloadIndex :: HttpTransport -> Verbosity -> RemoteRepo -> FilePath -> IO DownloadResult +downloadIndex transport verbosity remoteRepo cacheDir = do + remoteRepoCheckHttps transport remoteRepo + let uri = (remoteRepoURI remoteRepo) { + uriPath = uriPath (remoteRepoURI remoteRepo) + `FilePath.Posix.combine` "00-index.tar.gz" + } + path = cacheDir "00-index" <.> "tar.gz" + createDirectoryIfMissing True cacheDir + downloadURI transport verbosity uri path + + +-- ------------------------------------------------------------ +-- * Path utilities +-- ------------------------------------------------------------ + +-- | Generate the full path to the locally cached copy of +-- the tarball for a given @PackageIdentifer@. +-- +packageFile :: Repo -> PackageId -> FilePath +packageFile repo pkgid = packageDir repo pkgid + display pkgid + <.> "tar.gz" + +-- | Generate the full path to the directory where the local cached copy of +-- the tarball for a given @PackageIdentifer@ is stored. +-- +packageDir :: Repo -> PackageId -> FilePath +packageDir repo pkgid = repoLocalDir repo + display (packageName pkgid) + display (packageVersion pkgid) + +-- | Generate the URI of the tarball for a given package. +-- +packageURI :: RemoteRepo -> PackageId -> URI +packageURI repo pkgid | isOldHackageURI (remoteRepoURI repo) = + (remoteRepoURI repo) { + uriPath = FilePath.Posix.joinPath + [uriPath (remoteRepoURI repo) + ,display (packageName pkgid) + ,display (packageVersion pkgid) + ,display pkgid <.> "tar.gz"] + } +packageURI repo pkgid = + (remoteRepoURI repo) { + uriPath = FilePath.Posix.joinPath + [uriPath (remoteRepoURI repo) + ,"package" + ,display pkgid <.> "tar.gz"] + } diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/FileMonitor.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/FileMonitor.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/FileMonitor.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/FileMonitor.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,1119 @@ +{-# LANGUAGE CPP, DeriveGeneric, DeriveFunctor, GeneralizedNewtypeDeriving, + NamedFieldPuns, BangPatterns #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} + +-- | An abstraction to help with re-running actions when files or other +-- input values they depend on have changed. +-- +module Distribution.Client.FileMonitor ( + + -- * Declaring files to monitor + MonitorFilePath(..), + MonitorKindFile(..), + MonitorKindDir(..), + FilePathGlob(..), + monitorFile, + monitorFileHashed, + monitorNonExistentFile, + monitorDirectory, + monitorNonExistentDirectory, + monitorDirectoryExistence, + monitorFileOrDirectory, + monitorFileGlob, + monitorFileGlobExistence, + monitorFileSearchPath, + monitorFileHashedSearchPath, + + -- * Creating and checking sets of monitored files + FileMonitor(..), + newFileMonitor, + MonitorChanged(..), + MonitorChangedReason(..), + checkFileMonitorChanged, + updateFileMonitor, + MonitorTimestamp, + beginUpdateFileMonitor, + ) where + + +#if MIN_VERSION_containers(0,5,0) +import Data.Map.Strict (Map) +import qualified Data.Map.Strict as Map +#else +import Data.Map (Map) +import qualified Data.Map as Map +#endif +import qualified Data.ByteString.Lazy as BS +import Distribution.Compat.Binary +import qualified Distribution.Compat.Binary as Binary +import qualified Data.Hashable as Hashable +import Data.List (sort) + +#if !MIN_VERSION_base(4,8,0) +import Control.Applicative +#endif +import Control.Monad +import Control.Monad.Trans (MonadIO, liftIO) +import Control.Monad.State (StateT, mapStateT) +import qualified Control.Monad.State as State +import Control.Monad.Except (ExceptT, runExceptT, withExceptT, + throwError) +import Control.Exception + +import Distribution.Client.Compat.Time +import Distribution.Client.Glob +import Distribution.Simple.Utils (handleDoesNotExist, writeFileAtomic) +import Distribution.Client.Utils (mergeBy, MergeResult(..)) + +import System.FilePath +import System.Directory +import System.IO +import GHC.Generics (Generic) + + +------------------------------------------------------------------------------ +-- Types for specifying files to monitor +-- + + +-- | A description of a file (or set of files) to monitor for changes. +-- +-- Where file paths are relative they are relative to a common directory +-- (e.g. project root), not necessarily the process current directory. +-- +data MonitorFilePath = + MonitorFile { + monitorKindFile :: !MonitorKindFile, + monitorKindDir :: !MonitorKindDir, + monitorPath :: !FilePath + } + | MonitorFileGlob { + monitorKindFile :: !MonitorKindFile, + monitorKindDir :: !MonitorKindDir, + monitorPathGlob :: !FilePathGlob + } + deriving (Eq, Show, Generic) + +data MonitorKindFile = FileExists + | FileModTime + | FileHashed + | FileNotExists + deriving (Eq, Show, Generic) + +data MonitorKindDir = DirExists + | DirModTime + | DirNotExists + deriving (Eq, Show, Generic) + +instance Binary MonitorFilePath +instance Binary MonitorKindFile +instance Binary MonitorKindDir + +-- | Monitor a single file for changes, based on its modification time. +-- The monitored file is considered to have changed if it no longer +-- exists or if its modification time has changed. +-- +monitorFile :: FilePath -> MonitorFilePath +monitorFile = MonitorFile FileModTime DirNotExists + +-- | Monitor a single file for changes, based on its modification time +-- and content hash. The monitored file is considered to have changed if +-- it no longer exists or if its modification time and content hash have +-- changed. +-- +monitorFileHashed :: FilePath -> MonitorFilePath +monitorFileHashed = MonitorFile FileHashed DirNotExists + +-- | Monitor a single non-existent file for changes. The monitored file +-- is considered to have changed if it exists. +-- +monitorNonExistentFile :: FilePath -> MonitorFilePath +monitorNonExistentFile = MonitorFile FileNotExists DirNotExists + +-- | Monitor a single directory for changes, based on its modification +-- time. The monitored directory is considered to have changed if it no +-- longer exists or if its modification time has changed. +-- +monitorDirectory :: FilePath -> MonitorFilePath +monitorDirectory = MonitorFile FileNotExists DirModTime + +-- | Monitor a single non-existent directory for changes. The monitored +-- directory is considered to have changed if it exists. +-- +monitorNonExistentDirectory :: FilePath -> MonitorFilePath +-- Just an alias for monitorNonExistentFile, since you can't +-- tell the difference between a non-existent directory and +-- a non-existent file :) +monitorNonExistentDirectory = monitorNonExistentFile + +-- | Monitor a single directory for existence. The monitored directory is +-- considered to have changed only if it no longer exists. +-- +monitorDirectoryExistence :: FilePath -> MonitorFilePath +monitorDirectoryExistence = MonitorFile FileNotExists DirExists + +-- | Monitor a single file or directory for changes, based on its modification +-- time. The monitored file is considered to have changed if it no longer +-- exists or if its modification time has changed. +-- +monitorFileOrDirectory :: FilePath -> MonitorFilePath +monitorFileOrDirectory = MonitorFile FileModTime DirModTime + +-- | Monitor a set of files (or directories) identified by a file glob. +-- The monitored glob is considered to have changed if the set of files +-- matching the glob changes (i.e. creations or deletions), or for files if the +-- modification time and content hash of any matching file has changed. +-- +monitorFileGlob :: FilePathGlob -> MonitorFilePath +monitorFileGlob = MonitorFileGlob FileHashed DirExists + +-- | Monitor a set of files (or directories) identified by a file glob for +-- existence only. The monitored glob is considered to have changed if the set +-- of files matching the glob changes (i.e. creations or deletions). +-- +monitorFileGlobExistence :: FilePathGlob -> MonitorFilePath +monitorFileGlobExistence = MonitorFileGlob FileExists DirExists + +-- | Creates a list of files to monitor when you search for a file which +-- unsuccessfully looked in @notFoundAtPaths@ before finding it at +-- @foundAtPath@. +monitorFileSearchPath :: [FilePath] -> FilePath -> [MonitorFilePath] +monitorFileSearchPath notFoundAtPaths foundAtPath = + monitorFile foundAtPath + : map monitorNonExistentFile notFoundAtPaths + +-- | Similar to 'monitorFileSearchPath', but also instructs us to +-- monitor the hash of the found file. +monitorFileHashedSearchPath :: [FilePath] -> FilePath -> [MonitorFilePath] +monitorFileHashedSearchPath notFoundAtPaths foundAtPath = + monitorFileHashed foundAtPath + : map monitorNonExistentFile notFoundAtPaths + + +------------------------------------------------------------------------------ +-- Implementation types, files status +-- + +-- | The state necessary to determine whether a set of monitored +-- files has changed. It consists of two parts: a set of specific +-- files to be monitored (index by their path), and a list of +-- globs, which monitor may files at once. +data MonitorStateFileSet + = MonitorStateFileSet !(Map FilePath MonitorStateFile) + ![MonitorStateGlob] + deriving Show + +type Hash = Int + +-- | The state necessary to determine whether a monitored file has changed. +-- +-- This covers all the cases of 'MonitorFilePath' except for globs which is +-- covered separately by 'MonitorStateGlob'. +-- +-- The @Maybe ModTime@ is to cover the case where we already consider the +-- file to have changed, either because it had already changed by the time we +-- did the snapshot (i.e. too new, changed since start of update process) or it +-- no longer exists at all. +-- +data MonitorStateFile = MonitorStateFile !MonitorKindFile !MonitorKindDir + !MonitorStateFileStatus + deriving (Show, Generic) + +data MonitorStateFileStatus + = MonitorStateFileExists + | MonitorStateFileModTime !ModTime -- ^ cached file mtime + | MonitorStateFileHashed !ModTime !Hash -- ^ cached mtime and content hash + | MonitorStateDirExists + | MonitorStateDirModTime !ModTime -- ^ cached dir mtime + | MonitorStateNonExistent + | MonitorStateAlreadyChanged + deriving (Show, Generic) + +instance Binary MonitorStateFile +instance Binary MonitorStateFileStatus + +-- | The state necessary to determine whether the files matched by a globbing +-- match have changed. +-- +data MonitorStateGlob = MonitorStateGlob !MonitorKindFile !MonitorKindDir + !FilePathRoot !MonitorStateGlobRel + deriving (Show, Generic) + +data MonitorStateGlobRel + = MonitorStateGlobDirs + !Glob !FilePathGlobRel + !ModTime + ![(FilePath, MonitorStateGlobRel)] -- invariant: sorted + + | MonitorStateGlobFiles + !Glob + !ModTime + ![(FilePath, MonitorStateFileStatus)] -- invariant: sorted + + | MonitorStateGlobDirTrailing + deriving (Show, Generic) + +instance Binary MonitorStateGlob +instance Binary MonitorStateGlobRel + +-- | We can build a 'MonitorStateFileSet' from a set of 'MonitorFilePath' by +-- inspecting the state of the file system, and we can go in the reverse +-- direction by just forgetting the extra info. +-- +reconstructMonitorFilePaths :: MonitorStateFileSet -> [MonitorFilePath] +reconstructMonitorFilePaths (MonitorStateFileSet singlePaths globPaths) = + Map.foldrWithKey (\k x r -> getSinglePath k x : r) + (map getGlobPath globPaths) + singlePaths + where + getSinglePath filepath (MonitorStateFile kindfile kinddir _) = + MonitorFile kindfile kinddir filepath + + getGlobPath (MonitorStateGlob kindfile kinddir root gstate) = + MonitorFileGlob kindfile kinddir $ FilePathGlob root $ + case gstate of + MonitorStateGlobDirs glob globs _ _ -> GlobDir glob globs + MonitorStateGlobFiles glob _ _ -> GlobFile glob + MonitorStateGlobDirTrailing -> GlobDirTrailing + +------------------------------------------------------------------------------ +-- Checking the status of monitored files +-- + +-- | A monitor for detecting changes to a set of files. It can be used to +-- efficiently test if any of a set of files (specified individually or by +-- glob patterns) has changed since some snapshot. In addition, it also checks +-- for changes in a value (of type @a@), and when there are no changes in +-- either it returns a saved value (of type @b@). +-- +-- The main use case looks like this: suppose we have some expensive action +-- that depends on certain pure inputs and reads some set of files, and +-- produces some pure result. We want to avoid re-running this action when it +-- would produce the same result. So we need to monitor the files the action +-- looked at, the other pure input values, and we need to cache the result. +-- Then at some later point, if the input value didn't change, and none of the +-- files changed, then we can re-use the cached result rather than re-running +-- the action. +-- +-- This can be achieved using a 'FileMonitor'. Each 'FileMonitor' instance +-- saves state in a disk file, so the file for that has to be specified, +-- making sure it is unique. The pattern is to use 'checkFileMonitorChanged' +-- to see if there's been any change. If there is, re-run the action, keeping +-- track of the files, then use 'updateFileMonitor' to record the current +-- set of files to monitor, the current input value for the action, and the +-- result of the action. +-- +-- The typical occurrence of this pattern is captured by 'rerunIfChanged' +-- and the 'Rebuild' monad. More complicated cases may need to use +-- 'checkFileMonitorChanged' and 'updateFileMonitor' directly. +-- +data FileMonitor a b + = FileMonitor { + + -- | The file where this 'FileMonitor' should store its state. + -- + fileMonitorCacheFile :: FilePath, + + -- | Compares a new cache key with old one to determine if a + -- corresponding cached value is still valid. + -- + -- Typically this is just an equality test, but in some + -- circumstances it can make sense to do things like subset + -- comparisons. + -- + -- The first arg is the new value, the second is the old cached value. + -- + fileMonitorKeyValid :: a -> a -> Bool, + + -- | When this mode is enabled, if 'checkFileMonitorChanged' returns + -- 'MonitoredValueChanged' then we have the guarantee that no files + -- changed, that the value change was the only change. In the default + -- mode no such guarantee is provided which is slightly faster. + -- + fileMonitorCheckIfOnlyValueChanged :: Bool + } + +-- | Define a new file monitor. +-- +-- It's best practice to define file monitor values once, and then use the +-- same value for 'checkFileMonitorChanged' and 'updateFileMonitor' as this +-- ensures you get the same types @a@ and @b@ for reading and writing. +-- +-- The path of the file monitor itself must be unique because it keeps state +-- on disk and these would clash. +-- +newFileMonitor :: Eq a => FilePath -- ^ The file to cache the state of the + -- file monitor. Must be unique. + -> FileMonitor a b +newFileMonitor path = FileMonitor path (==) False + +-- | The result of 'checkFileMonitorChanged': either the monitored files or +-- value changed (and it tells us which it was) or nothing changed and we get +-- the cached result. +-- +data MonitorChanged a b = + -- | The monitored files and value did not change. The cached result is + -- @b@. + -- + -- The set of monitored files is also returned. This is useful + -- for composing or nesting 'FileMonitor's. + MonitorUnchanged b [MonitorFilePath] + + -- | The monitor found that something changed. The reason is given. + -- + | MonitorChanged (MonitorChangedReason a) + deriving Show + +-- | What kind of change 'checkFileMonitorChanged' detected. +-- +data MonitorChangedReason a = + + -- | One of the files changed (existence, file type, mtime or file + -- content, depending on the 'MonitorFilePath' in question) + MonitoredFileChanged FilePath + + -- | The pure input value changed. + -- + -- The previous cached key value is also returned. This is sometimes + -- useful when using a 'fileMonitorKeyValid' function that is not simply + -- '(==)', when invalidation can be partial. In such cases it can make + -- sense to 'updateFileMonitor' with a key value that's a combination of + -- the new and old (e.g. set union). + | MonitoredValueChanged a + + -- | There was no saved monitor state, cached value etc. Ie the file + -- for the 'FileMonitor' does not exist. + | MonitorFirstRun + + -- | There was existing state, but we could not read it. This typically + -- happens when the code has changed compared to an existing 'FileMonitor' + -- cache file and type of the input value or cached value has changed such + -- that we cannot decode the values. This is completely benign as we can + -- treat is just as if there were no cache file and re-run. + | MonitorCorruptCache + deriving (Eq, Show, Functor) + +-- | Test if the input value or files monitored by the 'FileMonitor' have +-- changed. If not, return the cached value. +-- +-- See 'FileMonitor' for a full explanation. +-- +checkFileMonitorChanged + :: (Binary a, Binary b) + => FileMonitor a b -- ^ cache file path + -> FilePath -- ^ root directory + -> a -- ^ guard or key value + -> IO (MonitorChanged a b) -- ^ did the key or any paths change? +checkFileMonitorChanged + monitor@FileMonitor { fileMonitorKeyValid, + fileMonitorCheckIfOnlyValueChanged } + root currentKey = + + -- Consider it a change if the cache file does not exist, + -- or we cannot decode it. Sadly ErrorCall can still happen, despite + -- using decodeFileOrFail, e.g. Data.Char.chr errors + + handleDoesNotExist (MonitorChanged MonitorFirstRun) $ + handleErrorCall (MonitorChanged MonitorCorruptCache) $ + readCacheFile monitor + >>= either (\_ -> return (MonitorChanged MonitorCorruptCache)) + checkStatusCache + + where + checkStatusCache (cachedFileStatus, cachedKey, cachedResult) = do + change <- checkForChanges + case change of + Just reason -> return (MonitorChanged reason) + Nothing -> return (MonitorUnchanged cachedResult monitorFiles) + where monitorFiles = reconstructMonitorFilePaths cachedFileStatus + where + -- In fileMonitorCheckIfOnlyValueChanged mode we want to guarantee that + -- if we return MonitoredValueChanged that only the value changed. + -- We do that by checkin for file changes first. Otherwise it makes + -- more sense to do the cheaper test first. + checkForChanges + | fileMonitorCheckIfOnlyValueChanged + = checkFileChange cachedFileStatus cachedKey cachedResult + `mplusMaybeT` + checkValueChange cachedKey + + | otherwise + = checkValueChange cachedKey + `mplusMaybeT` + checkFileChange cachedFileStatus cachedKey cachedResult + + mplusMaybeT :: Monad m => m (Maybe a) -> m (Maybe a) -> m (Maybe a) + mplusMaybeT ma mb = do + mx <- ma + case mx of + Nothing -> mb + Just x -> return (Just x) + + -- Check if the guard value has changed + checkValueChange cachedKey + | not (fileMonitorKeyValid currentKey cachedKey) + = return (Just (MonitoredValueChanged cachedKey)) + | otherwise + = return Nothing + + -- Check if any file has changed + checkFileChange cachedFileStatus cachedKey cachedResult = do + res <- probeFileSystem root cachedFileStatus + case res of + -- Some monitored file has changed + Left changedPath -> + return (Just (MonitoredFileChanged (normalise changedPath))) + + -- No monitored file has changed + Right (cachedFileStatus', cacheStatus) -> do + + -- But we might still want to update the cache + whenCacheChanged cacheStatus $ + rewriteCacheFile monitor cachedFileStatus' cachedKey cachedResult + + return Nothing + +-- | Helper for reading the cache file. +-- +-- This determines the type and format of the binary cache file. +-- +readCacheFile :: (Binary a, Binary b) + => FileMonitor a b + -> IO (Either String (MonitorStateFileSet, a, b)) +readCacheFile FileMonitor {fileMonitorCacheFile} = + withBinaryFile fileMonitorCacheFile ReadMode $ \hnd -> + Binary.decodeOrFailIO =<< BS.hGetContents hnd + +-- | Helper for writing the cache file. +-- +-- This determines the type and format of the binary cache file. +-- +rewriteCacheFile :: (Binary a, Binary b) + => FileMonitor a b + -> MonitorStateFileSet -> a -> b -> IO () +rewriteCacheFile FileMonitor {fileMonitorCacheFile} fileset key result = + writeFileAtomic fileMonitorCacheFile $ + Binary.encode (fileset, key, result) + +-- | Probe the file system to see if any of the monitored files have changed. +-- +-- It returns Nothing if any file changed, or returns a possibly updated +-- file 'MonitorStateFileSet' plus an indicator of whether it actually changed. +-- +-- We may need to update the cache since there may be changes in the filesystem +-- state which don't change any of our affected files. +-- +-- Consider the glob @{proj1,proj2}\/\*.cabal@. Say we first run and find a +-- @proj1@ directory containing @proj1.cabal@ yet no @proj2@. If we later run +-- and find @proj2@ was created, yet contains no files matching @*.cabal@ then +-- we want to update the cache despite no changes in our relevant file set. +-- Specifically, we should add an mtime for this directory so we can avoid +-- re-traversing the directory in future runs. +-- +probeFileSystem :: FilePath -> MonitorStateFileSet + -> IO (Either FilePath (MonitorStateFileSet, CacheChanged)) +probeFileSystem root (MonitorStateFileSet singlePaths globPaths) = + runChangedM $ do + sequence_ + [ probeMonitorStateFileStatus root file status + | (file, MonitorStateFile _ _ status) <- Map.toList singlePaths ] + -- The glob monitors can require state changes + globPaths' <- + sequence + [ probeMonitorStateGlob root globPath + | globPath <- globPaths ] + return (MonitorStateFileSet singlePaths globPaths') + + +----------------------------------------------- +-- Monad for checking for file system changes +-- +-- We need to be able to bail out if we detect a change (using ExceptT), +-- but if there's no change we need to be able to rebuild the monitor +-- state. And we want to optimise that rebuilding by keeping track if +-- anything actually changed (using StateT), so that in the typical case +-- we can avoid rewriting the state file. + +newtype ChangedM a = ChangedM (StateT CacheChanged (ExceptT FilePath IO) a) + deriving (Functor, Applicative, Monad, MonadIO) + +runChangedM :: ChangedM a -> IO (Either FilePath (a, CacheChanged)) +runChangedM (ChangedM action) = + runExceptT $ State.runStateT action CacheUnchanged + +somethingChanged :: FilePath -> ChangedM a +somethingChanged path = ChangedM $ throwError path + +cacheChanged :: ChangedM () +cacheChanged = ChangedM $ State.put CacheChanged + +mapChangedFile :: (FilePath -> FilePath) -> ChangedM a -> ChangedM a +mapChangedFile adjust (ChangedM a) = + ChangedM (mapStateT (withExceptT adjust) a) + +data CacheChanged = CacheChanged | CacheUnchanged + +whenCacheChanged :: Monad m => CacheChanged -> m () -> m () +whenCacheChanged CacheChanged action = action +whenCacheChanged CacheUnchanged _ = return () + +---------------------- + +-- | Probe the file system to see if a single monitored file has changed. +-- +probeMonitorStateFileStatus :: FilePath -> FilePath + -> MonitorStateFileStatus + -> ChangedM () +probeMonitorStateFileStatus root file status = + case status of + MonitorStateFileExists -> + probeFileExistence root file + + MonitorStateFileModTime mtime -> + probeFileModificationTime root file mtime + + MonitorStateFileHashed mtime hash -> + probeFileModificationTimeAndHash root file mtime hash + + MonitorStateDirExists -> + probeDirExistence root file + + MonitorStateDirModTime mtime -> + probeFileModificationTime root file mtime + + MonitorStateNonExistent -> + probeFileNonExistence root file + + MonitorStateAlreadyChanged -> + somethingChanged file + + +-- | Probe the file system to see if a monitored file glob has changed. +-- +probeMonitorStateGlob :: FilePath -- ^ root path + -> MonitorStateGlob + -> ChangedM MonitorStateGlob +probeMonitorStateGlob relroot + (MonitorStateGlob kindfile kinddir globroot glob) = do + root <- liftIO $ getFilePathRootDirectory globroot relroot + case globroot of + FilePathRelative -> + MonitorStateGlob kindfile kinddir globroot <$> + probeMonitorStateGlobRel kindfile kinddir root "." glob + + -- for absolute cases, make the changed file we report absolute too + _ -> + mapChangedFile (root ) $ + MonitorStateGlob kindfile kinddir globroot <$> + probeMonitorStateGlobRel kindfile kinddir root "" glob + +probeMonitorStateGlobRel :: MonitorKindFile -> MonitorKindDir + -> FilePath -- ^ root path + -> FilePath -- ^ path of the directory we are + -- looking in relative to @root@ + -> MonitorStateGlobRel + -> ChangedM MonitorStateGlobRel +probeMonitorStateGlobRel kindfile kinddir root dirName + (MonitorStateGlobDirs glob globPath mtime children) = do + change <- liftIO $ checkDirectoryModificationTime (root dirName) mtime + case change of + Nothing -> do + children' <- sequence + [ do fstate' <- probeMonitorStateGlobRel + kindfile kinddir root + (dirName fname) fstate + return (fname, fstate') + | (fname, fstate) <- children ] + return $! MonitorStateGlobDirs glob globPath mtime children' + + Just mtime' -> do + -- directory modification time changed: + -- a matching subdir may have been added or deleted + matches <- filterM (\entry -> let subdir = root dirName entry + in liftIO $ doesDirectoryExist subdir) + . filter (matchGlob glob) + =<< liftIO (getDirectoryContents (root dirName)) + + children' <- mapM probeMergeResult $ + mergeBy (\(path1,_) path2 -> compare path1 path2) + children + (sort matches) + return $! MonitorStateGlobDirs glob globPath mtime' children' + -- Note that just because the directory has changed, we don't force + -- a cache rewrite with 'cacheChanged' since that has some cost, and + -- all we're saving is scanning the directory. But we do rebuild the + -- cache with the new mtime', so that if the cache is rewritten for + -- some other reason, we'll take advantage of that. + + where + probeMergeResult :: MergeResult (FilePath, MonitorStateGlobRel) FilePath + -> ChangedM (FilePath, MonitorStateGlobRel) + + -- Only in cached (directory deleted) + probeMergeResult (OnlyInLeft (path, fstate)) = do + case allMatchingFiles (dirName path) fstate of + [] -> return (path, fstate) + -- Strictly speaking we should be returning 'CacheChanged' above + -- as we should prune the now-missing 'MonitorStateGlobRel'. However + -- we currently just leave these now-redundant entries in the + -- cache as they cost no IO and keeping them allows us to avoid + -- rewriting the cache. + (file:_) -> somethingChanged file + + -- Only in current filesystem state (directory added) + probeMergeResult (OnlyInRight path) = do + fstate <- liftIO $ buildMonitorStateGlobRel Nothing Map.empty + kindfile kinddir root (dirName path) globPath + case allMatchingFiles (dirName path) fstate of + (file:_) -> somethingChanged file + -- This is the only case where we use 'cacheChanged' because we can + -- have a whole new dir subtree (of unbounded size and cost), so we + -- need to save the state of that new subtree in the cache. + [] -> cacheChanged >> return (path, fstate) + + -- Found in path + probeMergeResult (InBoth (path, fstate) _) = do + fstate' <- probeMonitorStateGlobRel kindfile kinddir + root (dirName path) fstate + return (path, fstate') + + -- | Does a 'MonitorStateGlob' have any relevant files within it? + allMatchingFiles :: FilePath -> MonitorStateGlobRel -> [FilePath] + allMatchingFiles dir (MonitorStateGlobFiles _ _ entries) = + [ dir fname | (fname, _) <- entries ] + allMatchingFiles dir (MonitorStateGlobDirs _ _ _ entries) = + [ res + | (subdir, fstate) <- entries + , res <- allMatchingFiles (dir subdir) fstate ] + allMatchingFiles dir MonitorStateGlobDirTrailing = + [dir] + +probeMonitorStateGlobRel _ _ root dirName + (MonitorStateGlobFiles glob mtime children) = do + change <- liftIO $ checkDirectoryModificationTime (root dirName) mtime + mtime' <- case change of + Nothing -> return mtime + Just mtime' -> do + -- directory modification time changed: + -- a matching file may have been added or deleted + matches <- return . filter (matchGlob glob) + =<< liftIO (getDirectoryContents (root dirName)) + + mapM_ probeMergeResult $ + mergeBy (\(path1,_) path2 -> compare path1 path2) + children + (sort matches) + return mtime' + + -- Check that none of the children have changed + forM_ children $ \(file, status) -> + probeMonitorStateFileStatus root (dirName file) status + + + return (MonitorStateGlobFiles glob mtime' children) + -- Again, we don't force a cache rewite with 'cacheChanged', but we do use + -- the new mtime' if any. + where + probeMergeResult :: MergeResult (FilePath, MonitorStateFileStatus) FilePath + -> ChangedM () + probeMergeResult mr = case mr of + InBoth _ _ -> return () + -- this is just to be able to accurately report which file changed: + OnlyInLeft (path, _) -> somethingChanged (dirName path) + OnlyInRight path -> somethingChanged (dirName path) + +probeMonitorStateGlobRel _ _ _ _ MonitorStateGlobDirTrailing = + return MonitorStateGlobDirTrailing + +------------------------------------------------------------------------------ + +-- | Update the input value and the set of files monitored by the +-- 'FileMonitor', plus the cached value that may be returned in future. +-- +-- This takes a snapshot of the state of the monitored files right now, so +-- 'checkFileMonitorChanged' will look for file system changes relative to +-- this snapshot. +-- +-- This is typically done once the action has been completed successfully and +-- we have the action's result and we know what files it looked at. See +-- 'FileMonitor' for a full explanation. +-- +-- If we do take the snapshot after the action has completed then we have a +-- problem. The problem is that files might have changed /while/ the action was +-- running but /after/ the action read them. If we take the snapshot after the +-- action completes then we will miss these changes. The solution is to record +-- a timestamp before beginning execution of the action and then we make the +-- conservative assumption that any file that has changed since then has +-- already changed, ie the file monitor state for these files will be such that +-- 'checkFileMonitorChanged' will report that they have changed. +-- +-- So if you do use 'updateFileMonitor' after the action (so you can discover +-- the files used rather than predicting them in advance) then use +-- 'beginUpdateFileMonitor' to get a timestamp and pass that. Alternatively, +-- if you take the snapshot in advance of the action, or you're not monitoring +-- any files then you can use @Nothing@ for the timestamp parameter. +-- +updateFileMonitor + :: (Binary a, Binary b) + => FileMonitor a b -- ^ cache file path + -> FilePath -- ^ root directory + -> Maybe MonitorTimestamp -- ^ timestamp when the update action started + -> [MonitorFilePath] -- ^ files of interest relative to root + -> a -- ^ the current key value + -> b -- ^ the current result value + -> IO () +updateFileMonitor monitor root startTime monitorFiles + cachedKey cachedResult = do + hashcache <- readCacheFileHashes monitor + msfs <- buildMonitorStateFileSet startTime hashcache root monitorFiles + rewriteCacheFile monitor msfs cachedKey cachedResult + +-- | A timestamp to help with the problem of file changes during actions. +-- See 'updateFileMonitor' for details. +-- +newtype MonitorTimestamp = MonitorTimestamp ModTime + +-- | Record a timestamp at the beginning of an action, and when the action +-- completes call 'updateFileMonitor' passing it the timestamp. +-- See 'updateFileMonitor' for details. +-- +beginUpdateFileMonitor :: IO MonitorTimestamp +beginUpdateFileMonitor = MonitorTimestamp <$> getCurTime + +-- | Take the snapshot of the monitored files. That is, given the +-- specification of the set of files we need to monitor, inspect the state +-- of the file system now and collect the information we'll need later to +-- determine if anything has changed. +-- +buildMonitorStateFileSet :: Maybe MonitorTimestamp -- ^ optional: timestamp + -- of the start of the action + -> FileHashCache -- ^ existing file hashes + -> FilePath -- ^ root directory + -> [MonitorFilePath] -- ^ patterns of interest + -- relative to root + -> IO MonitorStateFileSet +buildMonitorStateFileSet mstartTime hashcache root = + go Map.empty [] + where + go :: Map FilePath MonitorStateFile -> [MonitorStateGlob] + -> [MonitorFilePath] -> IO MonitorStateFileSet + go !singlePaths !globPaths [] = + return (MonitorStateFileSet singlePaths globPaths) + + go !singlePaths !globPaths + (MonitorFile kindfile kinddir path : monitors) = do + monitorState <- MonitorStateFile kindfile kinddir + <$> buildMonitorStateFile mstartTime hashcache + kindfile kinddir root path + go (Map.insert path monitorState singlePaths) globPaths monitors + + go !singlePaths !globPaths + (MonitorFileGlob kindfile kinddir globPath : monitors) = do + monitorState <- buildMonitorStateGlob mstartTime hashcache + kindfile kinddir root globPath + go singlePaths (monitorState : globPaths) monitors + + +buildMonitorStateFile :: Maybe MonitorTimestamp -- ^ start time of update + -> FileHashCache -- ^ existing file hashes + -> MonitorKindFile -> MonitorKindDir + -> FilePath -- ^ the root directory + -> FilePath + -> IO MonitorStateFileStatus +buildMonitorStateFile mstartTime hashcache kindfile kinddir root path = do + let abspath = root path + isFile <- doesFileExist abspath + isDir <- doesDirectoryExist abspath + case (isFile, kindfile, isDir, kinddir) of + (_, FileNotExists, _, DirNotExists) -> + -- we don't need to care if it exists now, since we check at probe time + return MonitorStateNonExistent + + (False, _, False, _) -> + return MonitorStateAlreadyChanged + + (True, FileExists, _, _) -> + return MonitorStateFileExists + + (True, FileModTime, _, _) -> + handleIOException MonitorStateAlreadyChanged $ do + mtime <- getModTime abspath + if changedDuringUpdate mstartTime mtime + then return MonitorStateAlreadyChanged + else return (MonitorStateFileModTime mtime) + + (True, FileHashed, _, _) -> + handleIOException MonitorStateAlreadyChanged $ do + mtime <- getModTime abspath + if changedDuringUpdate mstartTime mtime + then return MonitorStateAlreadyChanged + else do hash <- getFileHash hashcache abspath abspath mtime + return (MonitorStateFileHashed mtime hash) + + (_, _, True, DirExists) -> + return MonitorStateDirExists + + (_, _, True, DirModTime) -> + handleIOException MonitorStateAlreadyChanged $ do + mtime <- getModTime abspath + if changedDuringUpdate mstartTime mtime + then return MonitorStateAlreadyChanged + else return (MonitorStateDirModTime mtime) + + (False, _, True, DirNotExists) -> return MonitorStateAlreadyChanged + (True, FileNotExists, False, _) -> return MonitorStateAlreadyChanged + +-- | If we have a timestamp for the beginning of the update, then any file +-- mtime later than this means that it changed during the update and we ought +-- to consider the file as already changed. +-- +changedDuringUpdate :: Maybe MonitorTimestamp -> ModTime -> Bool +changedDuringUpdate (Just (MonitorTimestamp startTime)) mtime + = mtime > startTime +changedDuringUpdate _ _ = False + +-- | Much like 'buildMonitorStateFileSet' but for the somewhat complicated case +-- of a file glob. +-- +-- This gets used both by 'buildMonitorStateFileSet' when we're taking the +-- file system snapshot, but also by 'probeGlobStatus' as part of checking +-- the monitored (globed) files for changes when we find a whole new subtree. +-- +buildMonitorStateGlob :: Maybe MonitorTimestamp -- ^ start time of update + -> FileHashCache -- ^ existing file hashes + -> MonitorKindFile -> MonitorKindDir + -> FilePath -- ^ the root directory + -> FilePathGlob -- ^ the matching glob + -> IO MonitorStateGlob +buildMonitorStateGlob mstartTime hashcache kindfile kinddir relroot + (FilePathGlob globroot globPath) = do + root <- liftIO $ getFilePathRootDirectory globroot relroot + MonitorStateGlob kindfile kinddir globroot <$> + buildMonitorStateGlobRel + mstartTime hashcache kindfile kinddir root "." globPath + +buildMonitorStateGlobRel :: Maybe MonitorTimestamp -- ^ start time of update + -> FileHashCache -- ^ existing file hashes + -> MonitorKindFile -> MonitorKindDir + -> FilePath -- ^ the root directory + -> FilePath -- ^ directory we are examining + -- relative to the root + -> FilePathGlobRel -- ^ the matching glob + -> IO MonitorStateGlobRel +buildMonitorStateGlobRel mstartTime hashcache kindfile kinddir root + dir globPath = do + let absdir = root dir + dirEntries <- getDirectoryContents absdir + dirMTime <- getModTime absdir + case globPath of + GlobDir glob globPath' -> do + subdirs <- filterM (\subdir -> doesDirectoryExist (absdir subdir)) + $ filter (matchGlob glob) dirEntries + subdirStates <- + forM (sort subdirs) $ \subdir -> do + fstate <- buildMonitorStateGlobRel + mstartTime hashcache kindfile kinddir root + (dir subdir) globPath' + return (subdir, fstate) + return $! MonitorStateGlobDirs glob globPath' dirMTime subdirStates + + GlobFile glob -> do + let files = filter (matchGlob glob) dirEntries + filesStates <- + forM (sort files) $ \file -> do + fstate <- buildMonitorStateFile + mstartTime hashcache kindfile kinddir root + (dir file) + return (file, fstate) + return $! MonitorStateGlobFiles glob dirMTime filesStates + + GlobDirTrailing -> + return MonitorStateGlobDirTrailing + + +-- | We really want to avoid re-hashing files all the time. We already make +-- the assumption that if a file mtime has not changed then we don't need to +-- bother checking if the content hash has changed. We can apply the same +-- assumption when updating the file monitor state. In the typical case of +-- updating a file monitor the set of files is the same or largely the same so +-- we can grab the previously known content hashes with their corresponding +-- mtimes. +-- +type FileHashCache = Map FilePath (ModTime, Hash) + +-- | We declare it a cache hit if the mtime of a file is the same as before. +-- +lookupFileHashCache :: FileHashCache -> FilePath -> ModTime -> Maybe Hash +lookupFileHashCache hashcache file mtime = do + (mtime', hash) <- Map.lookup file hashcache + guard (mtime' == mtime) + return hash + +-- | Either get it from the cache or go read the file +getFileHash :: FileHashCache -> FilePath -> FilePath -> ModTime -> IO Hash +getFileHash hashcache relfile absfile mtime = + case lookupFileHashCache hashcache relfile mtime of + Just hash -> return hash + Nothing -> readFileHash absfile + +-- | Build a 'FileHashCache' from the previous 'MonitorStateFileSet'. While +-- in principle we could preserve the structure of the previous state, given +-- that the set of files to monitor can change then it's simpler just to throw +-- away the structure and use a finite map. +-- +readCacheFileHashes :: (Binary a, Binary b) + => FileMonitor a b -> IO FileHashCache +readCacheFileHashes monitor = + handleDoesNotExist Map.empty $ + handleErrorCall Map.empty $ do + res <- readCacheFile monitor + case res of + Left _ -> return Map.empty + Right (msfs, _, _) -> return (mkFileHashCache msfs) + where + mkFileHashCache :: MonitorStateFileSet -> FileHashCache + mkFileHashCache (MonitorStateFileSet singlePaths globPaths) = + collectAllFileHashes singlePaths + `Map.union` collectAllGlobHashes globPaths + + collectAllFileHashes = + Map.mapMaybe $ \(MonitorStateFile _ _ fstate) -> case fstate of + MonitorStateFileHashed mtime hash -> Just (mtime, hash) + _ -> Nothing + + collectAllGlobHashes globPaths = + Map.fromList [ (fpath, hash) + | MonitorStateGlob _ _ _ gstate <- globPaths + , (fpath, hash) <- collectGlobHashes "" gstate ] + + collectGlobHashes dir (MonitorStateGlobDirs _ _ _ entries) = + [ res + | (subdir, fstate) <- entries + , res <- collectGlobHashes (dir subdir) fstate ] + + collectGlobHashes dir (MonitorStateGlobFiles _ _ entries) = + [ (dir fname, (mtime, hash)) + | (fname, MonitorStateFileHashed mtime hash) <- entries ] + + collectGlobHashes _dir MonitorStateGlobDirTrailing = + [] + + +------------------------------------------------------------------------------ +-- Utils +-- + +-- | Within the @root@ directory, check if @file@ has its 'ModTime' is +-- the same as @mtime@, short-circuiting if it is different. +probeFileModificationTime :: FilePath -> FilePath -> ModTime -> ChangedM () +probeFileModificationTime root file mtime = do + unchanged <- liftIO $ checkModificationTimeUnchanged root file mtime + unless unchanged (somethingChanged file) + +-- | Within the @root@ directory, check if @file@ has its 'ModTime' and +-- 'Hash' is the same as @mtime@ and @hash@, short-circuiting if it is +-- different. +probeFileModificationTimeAndHash :: FilePath -> FilePath -> ModTime -> Hash + -> ChangedM () +probeFileModificationTimeAndHash root file mtime hash = do + unchanged <- liftIO $ + checkFileModificationTimeAndHashUnchanged root file mtime hash + unless unchanged (somethingChanged file) + +-- | Within the @root@ directory, check if @file@ still exists as a file. +-- If it *does not* exist, short-circuit. +probeFileExistence :: FilePath -> FilePath -> ChangedM () +probeFileExistence root file = do + existsFile <- liftIO $ doesFileExist (root file) + unless existsFile (somethingChanged file) + +-- | Within the @root@ directory, check if @dir@ still exists. +-- If it *does not* exist, short-circuit. +probeDirExistence :: FilePath -> FilePath -> ChangedM () +probeDirExistence root dir = do + existsDir <- liftIO $ doesDirectoryExist (root dir) + unless existsDir (somethingChanged dir) + +-- | Within the @root@ directory, check if @file@ still does not exist. +-- If it *does* exist, short-circuit. +probeFileNonExistence :: FilePath -> FilePath -> ChangedM () +probeFileNonExistence root file = do + existsFile <- liftIO $ doesFileExist (root file) + existsDir <- liftIO $ doesDirectoryExist (root file) + when (existsFile || existsDir) (somethingChanged file) + +-- | Returns @True@ if, inside the @root@ directory, @file@ has the same +-- 'ModTime' as @mtime@. +checkModificationTimeUnchanged :: FilePath -> FilePath + -> ModTime -> IO Bool +checkModificationTimeUnchanged root file mtime = + handleIOException False $ do + mtime' <- getModTime (root file) + return (mtime == mtime') + +-- | Returns @True@ if, inside the @root@ directory, @file@ has the +-- same 'ModTime' and 'Hash' as @mtime and @chash@. +checkFileModificationTimeAndHashUnchanged :: FilePath -> FilePath + -> ModTime -> Hash -> IO Bool +checkFileModificationTimeAndHashUnchanged root file mtime chash = + handleIOException False $ do + mtime' <- getModTime (root file) + if mtime == mtime' + then return True + else do + chash' <- readFileHash (root file) + return (chash == chash') + +-- | Read a non-cryptographic hash of a @file@. +readFileHash :: FilePath -> IO Hash +readFileHash file = + withBinaryFile file ReadMode $ \hnd -> + evaluate . Hashable.hash =<< BS.hGetContents hnd + +-- | Given a directory @dir@, return @Nothing@ if its 'ModTime' +-- is the same as @mtime@, and the new 'ModTime' if it is not. +checkDirectoryModificationTime :: FilePath -> ModTime -> IO (Maybe ModTime) +checkDirectoryModificationTime dir mtime = + handleIOException Nothing $ do + mtime' <- getModTime dir + if mtime == mtime' + then return Nothing + else return (Just mtime') + +-- | Run an IO computation, returning @e@ if there is an 'error' +-- call. ('ErrorCall') +handleErrorCall :: a -> IO a -> IO a +handleErrorCall e = + handle (\(ErrorCall _) -> return e) + +-- | Run an IO computation, returning @e@ if there is any 'IOException'. +-- +-- This policy is OK in the file monitor code because it just causes the +-- monitor to report that something changed, and then code reacting to that +-- will normally encounter the same IO exception when it re-runs the action +-- that uses the file. +-- +handleIOException :: a -> IO a -> IO a +handleIOException e = + handle (anyIOException e) + where + anyIOException :: a -> IOException -> IO a + anyIOException x _ = return x + + +------------------------------------------------------------------------------ +-- Instances +-- + +instance Binary MonitorStateFileSet where + put (MonitorStateFileSet singlePaths globPaths) = do + put (1 :: Int) -- version + put singlePaths + put globPaths + get = do + ver <- get + if ver == (1 :: Int) + then do singlePaths <- get + globPaths <- get + return $! MonitorStateFileSet singlePaths globPaths + else fail "MonitorStateFileSet: wrong version" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Freeze.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Freeze.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Freeze.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Freeze.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,259 @@ +{-# LANGUAGE CPP #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Freeze +-- Copyright : (c) David Himmelstrup 2005 +-- Duncan Coutts 2011 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@gmail.com +-- Stability : provisional +-- Portability : portable +-- +-- The cabal freeze command +----------------------------------------------------------------------------- +module Distribution.Client.Freeze ( + freeze, getFreezePkgs + ) where + +import Distribution.Client.Config ( SavedConfig(..) ) +import Distribution.Client.Types +import Distribution.Client.Targets +import Distribution.Client.Dependency +import Distribution.Client.Dependency.Types + ( ConstraintSource(..), LabeledPackageConstraint(..) ) +import Distribution.Client.IndexUtils as IndexUtils + ( getSourcePackages, getInstalledPackages ) +import Distribution.Client.InstallPlan + ( InstallPlan, PlanPackage ) +import qualified Distribution.Client.InstallPlan as InstallPlan +import Distribution.Client.PkgConfigDb + ( PkgConfigDb, readPkgConfigDb ) +import Distribution.Client.Setup + ( GlobalFlags(..), FreezeFlags(..), ConfigExFlags(..) + , RepoContext(..) ) +import Distribution.Client.Sandbox.PackageEnvironment + ( loadUserConfig, pkgEnvSavedConfig, showPackageEnvironment, + userPackageEnvironmentFile ) +import Distribution.Client.Sandbox.Types + ( SandboxPackageInfo(..) ) + +import Distribution.Package + ( Package, packageId, packageName, packageVersion ) +import Distribution.Simple.Compiler + ( Compiler, compilerInfo, PackageDBStack ) +import Distribution.Simple.PackageIndex (InstalledPackageIndex) +import Distribution.Simple.Program + ( ProgramConfiguration ) +import Distribution.Simple.Setup + ( fromFlag, fromFlagOrDefault, flagToMaybe ) +import Distribution.Simple.Utils + ( die, notice, debug, writeFileAtomic ) +import Distribution.System + ( Platform ) +import Distribution.Text + ( display ) +import Distribution.Verbosity + ( Verbosity ) + +import Control.Monad + ( when ) +import qualified Data.ByteString.Lazy.Char8 as BS.Char8 +#if !MIN_VERSION_base(4,8,0) +import Data.Monoid + ( mempty ) +#endif +import Data.Version + ( showVersion ) +import Distribution.Version + ( thisVersion ) + +-- ------------------------------------------------------------ +-- * The freeze command +-- ------------------------------------------------------------ + +-- | Freeze all of the dependencies by writing a constraints section +-- constraining each dependency to an exact version. +-- +freeze :: Verbosity + -> PackageDBStack + -> RepoContext + -> Compiler + -> Platform + -> ProgramConfiguration + -> Maybe SandboxPackageInfo + -> GlobalFlags + -> FreezeFlags + -> IO () +freeze verbosity packageDBs repoCtxt comp platform conf mSandboxPkgInfo + globalFlags freezeFlags = do + + pkgs <- getFreezePkgs + verbosity packageDBs repoCtxt comp platform conf mSandboxPkgInfo + globalFlags freezeFlags + + if null pkgs + then notice verbosity $ "No packages to be frozen. " + ++ "As this package has no dependencies." + else if dryRun + then notice verbosity $ unlines $ + "The following packages would be frozen:" + : formatPkgs pkgs + + else freezePackages verbosity globalFlags pkgs + + where + dryRun = fromFlag (freezeDryRun freezeFlags) + +-- | Get the list of packages whose versions would be frozen by the @freeze@ +-- command. +getFreezePkgs :: Verbosity + -> PackageDBStack + -> RepoContext + -> Compiler + -> Platform + -> ProgramConfiguration + -> Maybe SandboxPackageInfo + -> GlobalFlags + -> FreezeFlags + -> IO [PlanPackage] +getFreezePkgs verbosity packageDBs repoCtxt comp platform conf mSandboxPkgInfo + globalFlags freezeFlags = do + + installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf + sourcePkgDb <- getSourcePackages verbosity repoCtxt + pkgConfigDb <- readPkgConfigDb verbosity conf + + pkgSpecifiers <- resolveUserTargets verbosity repoCtxt + (fromFlag $ globalWorldFile globalFlags) + (packageIndex sourcePkgDb) + [UserTargetLocalDir "."] + + sanityCheck pkgSpecifiers + planPackages + verbosity comp platform mSandboxPkgInfo freezeFlags + installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers + where + sanityCheck pkgSpecifiers = do + when (not . null $ [n | n@(NamedPackage _ _) <- pkgSpecifiers]) $ + die $ "internal error: 'resolveUserTargets' returned " + ++ "unexpected named package specifiers!" + when (length pkgSpecifiers /= 1) $ + die $ "internal error: 'resolveUserTargets' returned " + ++ "unexpected source package specifiers!" + +planPackages :: Verbosity + -> Compiler + -> Platform + -> Maybe SandboxPackageInfo + -> FreezeFlags + -> InstalledPackageIndex + -> SourcePackageDb + -> PkgConfigDb + -> [PackageSpecifier SourcePackage] + -> IO [PlanPackage] +planPackages verbosity comp platform mSandboxPkgInfo freezeFlags + installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers = do + + solver <- chooseSolver verbosity + (fromFlag (freezeSolver freezeFlags)) (compilerInfo comp) + notice verbosity "Resolving dependencies..." + + installPlan <- foldProgress logMsg die return $ + resolveDependencies + platform (compilerInfo comp) pkgConfigDb + solver + resolverParams + + return $ pruneInstallPlan installPlan pkgSpecifiers + + where + resolverParams = + + setMaxBackjumps (if maxBackjumps < 0 then Nothing + else Just maxBackjumps) + + . setIndependentGoals independentGoals + + . setReorderGoals reorderGoals + + . setShadowPkgs shadowPkgs + + . setStrongFlags strongFlags + + . addConstraints + [ let pkg = pkgSpecifierTarget pkgSpecifier + pc = PackageConstraintStanzas pkg stanzas + in LabeledPackageConstraint pc ConstraintSourceFreeze + | pkgSpecifier <- pkgSpecifiers ] + + . maybe id applySandboxInstallPolicy mSandboxPkgInfo + + $ standardInstallPolicy installedPkgIndex sourcePkgDb pkgSpecifiers + + logMsg message rest = debug verbosity message >> rest + + stanzas = [ TestStanzas | testsEnabled ] + ++ [ BenchStanzas | benchmarksEnabled ] + testsEnabled = fromFlagOrDefault False $ freezeTests freezeFlags + benchmarksEnabled = fromFlagOrDefault False $ freezeBenchmarks freezeFlags + + reorderGoals = fromFlag (freezeReorderGoals freezeFlags) + independentGoals = fromFlag (freezeIndependentGoals freezeFlags) + shadowPkgs = fromFlag (freezeShadowPkgs freezeFlags) + strongFlags = fromFlag (freezeStrongFlags freezeFlags) + maxBackjumps = fromFlag (freezeMaxBackjumps freezeFlags) + + +-- | Remove all unneeded packages from an install plan. +-- +-- A package is unneeded if it is either +-- +-- 1) the package that we are freezing, or +-- +-- 2) not a dependency (directly or transitively) of the package we are +-- freezing. This is useful for removing previously installed packages +-- which are no longer required from the install plan. +pruneInstallPlan :: InstallPlan + -> [PackageSpecifier SourcePackage] + -> [PlanPackage] +pruneInstallPlan installPlan pkgSpecifiers = + removeSelf pkgIds $ + InstallPlan.dependencyClosure installPlan (map fakeUnitId pkgIds) + where + pkgIds = [ packageId pkg + | SpecificSourcePackage pkg <- pkgSpecifiers ] + removeSelf [thisPkg] = filter (\pp -> packageId pp /= packageId thisPkg) + removeSelf _ = error $ "internal error: 'pruneInstallPlan' given " + ++ "unexpected package specifiers!" + + +freezePackages :: Package pkg => Verbosity -> GlobalFlags -> [pkg] -> IO () +freezePackages verbosity globalFlags pkgs = do + + pkgEnv <- fmap (createPkgEnv . addFrozenConstraints) $ + loadUserConfig verbosity "" (flagToMaybe . globalConstraintsFile $ globalFlags) + writeFileAtomic userPackageEnvironmentFile $ showPkgEnv pkgEnv + where + addFrozenConstraints config = + config { + savedConfigureExFlags = (savedConfigureExFlags config) { + configExConstraints = map constraint pkgs + } + } + constraint pkg = + (pkgIdToConstraint $ packageId pkg, ConstraintSourceUserConfig userPackageEnvironmentFile) + where + pkgIdToConstraint pkgId = + UserConstraintVersion (packageName pkgId) + (thisVersion $ packageVersion pkgId) + createPkgEnv config = mempty { pkgEnvSavedConfig = config } + showPkgEnv = BS.Char8.pack . showPackageEnvironment + + +formatPkgs :: Package pkg => [pkg] -> [String] +formatPkgs = map $ showPkg . packageId + where + showPkg pid = name pid ++ " == " ++ version pid + name = display . packageName + version = showVersion . packageVersion diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/GenBounds.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/GenBounds.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/GenBounds.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/GenBounds.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,159 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.GenBounds +-- Copyright : (c) Doug Beardsley 2015 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@gmail.com +-- Stability : provisional +-- Portability : portable +-- +-- The cabal gen-bounds command for generating PVP-compliant version bounds. +----------------------------------------------------------------------------- +module Distribution.Client.GenBounds ( + genBounds + ) where + +import Data.Version + ( Version(..), showVersion ) +import Distribution.Client.Init + ( incVersion ) +import Distribution.Client.Freeze + ( getFreezePkgs ) +import Distribution.Client.Sandbox.Types + ( SandboxPackageInfo(..) ) +import Distribution.Client.Setup + ( GlobalFlags(..), FreezeFlags(..), RepoContext ) +import Distribution.Package + ( Package(..), Dependency(..), PackageName(..) + , packageName, packageVersion ) +import Distribution.PackageDescription + ( buildDepends ) +import Distribution.PackageDescription.Configuration + ( finalizePackageDescription ) +import Distribution.PackageDescription.Parse + ( readPackageDescription ) +import Distribution.Simple.Compiler + ( Compiler, PackageDBStack, compilerInfo ) +import Distribution.Simple.Program + ( ProgramConfiguration ) +import Distribution.Simple.Utils + ( tryFindPackageDesc ) +import Distribution.System + ( Platform ) +import Distribution.Verbosity + ( Verbosity ) +import Distribution.Version + ( LowerBound(..), UpperBound(..), VersionRange(..), asVersionIntervals + , orLaterVersion, earlierVersion, intersectVersionRanges ) +import System.Directory + ( getCurrentDirectory ) + +-- | Does this version range have an upper bound? +hasUpperBound :: VersionRange -> Bool +hasUpperBound vr = + case asVersionIntervals vr of + [] -> False + is -> if snd (last is) == NoUpperBound then False else True + +-- | Given a version, return an API-compatible (according to PVP) version range. +-- +-- Example: @0.4.1.2@ produces the version range @>= 0.4.1 && < 0.5@. +-- +-- This version is slightly different than the one in +-- 'Distribution.Client.Init'. This one uses a.b.c as the lower bound because +-- the user could be using a new function introduced in a.b.c which would make +-- ">= a.b" incorrect. +pvpize :: Version -> VersionRange +pvpize v = orLaterVersion (vn 3) + `intersectVersionRanges` + earlierVersion (incVersion 1 (vn 2)) + where + vn n = (v { versionBranch = take n (versionBranch v) }) + +-- | Show the PVP-mandated version range for this package. The @padTo@ parameter +-- specifies the width of the package name column. +showBounds :: Package pkg => Int -> pkg -> String +showBounds padTo p = unwords $ + (padAfter padTo $ unPackageName $ packageName p) : + map showInterval (asVersionIntervals $ pvpize $ packageVersion p) + where + padAfter :: Int -> String -> String + padAfter n str = str ++ replicate (n - length str) ' ' + + showInterval :: (LowerBound, UpperBound) -> String + showInterval (LowerBound _ _, NoUpperBound) = + error "Error: expected upper bound...this should never happen!" + showInterval (LowerBound l _, UpperBound u _) = + unwords [">=", showVersion l, "&& <", showVersion u] + +-- | Entry point for the @gen-bounds@ command. +genBounds + :: Verbosity + -> PackageDBStack + -> RepoContext + -> Compiler + -> Platform + -> ProgramConfiguration + -> Maybe SandboxPackageInfo + -> GlobalFlags + -> FreezeFlags + -> IO () +genBounds verbosity packageDBs repoCtxt comp platform conf mSandboxPkgInfo + globalFlags freezeFlags = do + + let cinfo = compilerInfo comp + + cwd <- getCurrentDirectory + path <- tryFindPackageDesc cwd + gpd <- readPackageDescription verbosity path + let epd = finalizePackageDescription [] (const True) platform cinfo [] gpd + case epd of + Left _ -> putStrLn "finalizePackageDescription failed" + Right (pd,_) -> do + let needBounds = filter (not . hasUpperBound . depVersion) $ + buildDepends pd + + if (null needBounds) + then putStrLn + "Congratulations, all your dependencies have upper bounds!" + else go needBounds + where + go needBounds = do + pkgs <- getFreezePkgs + verbosity packageDBs repoCtxt comp platform conf + mSandboxPkgInfo globalFlags freezeFlags + + putStrLn boundsNeededMsg + + let isNeeded pkg = unPackageName (packageName pkg) + `elem` map depName needBounds + let thePkgs = filter isNeeded pkgs + + let padTo = maximum $ map (length . unPackageName . packageName) pkgs + mapM_ (putStrLn . (++",") . showBounds padTo) thePkgs + + depName :: Dependency -> String + depName (Dependency (PackageName nm) _) = nm + + depVersion :: Dependency -> VersionRange + depVersion (Dependency _ vr) = vr + +-- | The message printed when some dependencies are found to be lacking proper +-- PVP-mandated bounds. +boundsNeededMsg :: String +boundsNeededMsg = unlines + [ "" + , "The following packages need bounds and here is a suggested starting point." + , "You can copy and paste this into the build-depends section in your .cabal" + , "file and it should work (with the appropriate removal of commas)." + , "" + , "Note that version bounds are a statement that you've successfully built and" + , "tested your package and expect it to work with any of the specified package" + , "versions (PROVIDED that those packages continue to conform with the PVP)." + , "Therefore, the version bounds generated here are the most conservative" + , "based on the versions that you are currently building with. If you know" + , "your package will work with versions outside the ranges generated here," + , "feel free to widen them." + , "" + ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Get.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Get.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Get.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Get.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,355 @@ +{-# LANGUAGE CPP #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Get +-- Copyright : (c) Andrea Vezzosi 2008 +-- Duncan Coutts 2011 +-- John Millikin 2012 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- The 'cabal get' command. +----------------------------------------------------------------------------- + +module Distribution.Client.Get ( + get + ) where + +import Distribution.Package + ( PackageId, packageId, packageName ) +import Distribution.Simple.Setup + ( Flag(..), fromFlag, fromFlagOrDefault ) +import Distribution.Simple.Utils + ( notice, die, info, writeFileAtomic ) +import Distribution.Verbosity + ( Verbosity ) +import Distribution.Text(display) +import qualified Distribution.PackageDescription as PD + +import Distribution.Client.Setup + ( GlobalFlags(..), GetFlags(..), RepoContext(..) ) +import Distribution.Client.Types +import Distribution.Client.Targets +import Distribution.Client.Dependency +import Distribution.Client.FetchUtils +import qualified Distribution.Client.Tar as Tar (extractTarGzFile) +import Distribution.Client.IndexUtils as IndexUtils + ( getSourcePackages ) +import Distribution.Client.Compat.Process + ( readProcessWithExitCode ) +import Distribution.Compat.Exception + ( catchIO ) + +import Control.Exception + ( finally ) +import Control.Monad + ( filterM, forM_, unless, when ) +import Data.List + ( sortBy ) +import qualified Data.Map +import Data.Maybe + ( listToMaybe, mapMaybe ) +#if !MIN_VERSION_base(4,8,0) +import Data.Monoid + ( mempty ) +#endif +import Data.Ord + ( comparing ) +import System.Directory + ( createDirectoryIfMissing, doesDirectoryExist, doesFileExist + , getCurrentDirectory, setCurrentDirectory + ) +import System.Exit + ( ExitCode(..) ) +import System.FilePath + ( (), (<.>), addTrailingPathSeparator ) +import System.Process + ( rawSystem ) + + +-- | Entry point for the 'cabal get' command. +get :: Verbosity + -> RepoContext + -> GlobalFlags + -> GetFlags + -> [UserTarget] + -> IO () +get verbosity _ _ _ [] = + notice verbosity "No packages requested. Nothing to do." + +get verbosity repoCtxt globalFlags getFlags userTargets = do + let useFork = case (getSourceRepository getFlags) of + NoFlag -> False + _ -> True + + unless useFork $ + mapM_ checkTarget userTargets + + sourcePkgDb <- getSourcePackages verbosity repoCtxt + + pkgSpecifiers <- resolveUserTargets verbosity repoCtxt + (fromFlag $ globalWorldFile globalFlags) + (packageIndex sourcePkgDb) + userTargets + + pkgs <- either (die . unlines . map show) return $ + resolveWithoutDependencies + (resolverParams sourcePkgDb pkgSpecifiers) + + unless (null prefix) $ + createDirectoryIfMissing True prefix + + if useFork + then fork pkgs + else unpack pkgs + + where + resolverParams sourcePkgDb pkgSpecifiers = + --TODO: add command-line constraint and preference args for unpack + standardInstallPolicy mempty sourcePkgDb pkgSpecifiers + + prefix = fromFlagOrDefault "" (getDestDir getFlags) + + fork :: [SourcePackage] -> IO () + fork pkgs = do + let kind = fromFlag . getSourceRepository $ getFlags + branchers <- findUsableBranchers + mapM_ (forkPackage verbosity branchers prefix kind) pkgs + + unpack :: [SourcePackage] -> IO () + unpack pkgs = do + forM_ pkgs $ \pkg -> do + location <- fetchPackage verbosity repoCtxt (packageSource pkg) + let pkgid = packageId pkg + descOverride | usePristine = Nothing + | otherwise = packageDescrOverride pkg + case location of + LocalTarballPackage tarballPath -> + unpackPackage verbosity prefix pkgid descOverride tarballPath + + RemoteTarballPackage _tarballURL tarballPath -> + unpackPackage verbosity prefix pkgid descOverride tarballPath + + RepoTarballPackage _repo _pkgid tarballPath -> + unpackPackage verbosity prefix pkgid descOverride tarballPath + + LocalUnpackedPackage _ -> + error "Distribution.Client.Get.unpack: the impossible happened." + where + usePristine = fromFlagOrDefault False (getPristine getFlags) + +checkTarget :: UserTarget -> IO () +checkTarget target = case target of + UserTargetLocalDir dir -> die (notTarball dir) + UserTargetLocalCabalFile file -> die (notTarball file) + _ -> return () + where + notTarball t = + "The 'get' command is for tarball packages. " + ++ "The target '" ++ t ++ "' is not a tarball." + +-- ------------------------------------------------------------ +-- * Unpacking the source tarball +-- ------------------------------------------------------------ + +unpackPackage :: Verbosity -> FilePath -> PackageId + -> PackageDescriptionOverride + -> FilePath -> IO () +unpackPackage verbosity prefix pkgid descOverride pkgPath = do + let pkgdirname = display pkgid + pkgdir = prefix pkgdirname + pkgdir' = addTrailingPathSeparator pkgdir + existsDir <- doesDirectoryExist pkgdir + when existsDir $ die $ + "The directory \"" ++ pkgdir' ++ "\" already exists, not unpacking." + existsFile <- doesFileExist pkgdir + when existsFile $ die $ + "A file \"" ++ pkgdir ++ "\" is in the way, not unpacking." + notice verbosity $ "Unpacking to " ++ pkgdir' + Tar.extractTarGzFile prefix pkgdirname pkgPath + + case descOverride of + Nothing -> return () + Just pkgtxt -> do + let descFilePath = pkgdir display (packageName pkgid) <.> "cabal" + info verbosity $ + "Updating " ++ descFilePath + ++ " with the latest revision from the index." + writeFileAtomic descFilePath pkgtxt + + +-- ------------------------------------------------------------ +-- * Forking the source repository +-- ------------------------------------------------------------ + +data BranchCmd = BranchCmd (Verbosity -> FilePath -> IO ExitCode) + +data Brancher = Brancher + { brancherBinary :: String + , brancherBuildCmd :: PD.SourceRepo -> Maybe BranchCmd + } + +-- | The set of all supported branch drivers. +allBranchers :: [(PD.RepoType, Brancher)] +allBranchers = + [ (PD.Bazaar, branchBzr) + , (PD.Darcs, branchDarcs) + , (PD.Git, branchGit) + , (PD.Mercurial, branchHg) + , (PD.SVN, branchSvn) + ] + +-- | Find which usable branch drivers (selected from 'allBranchers') are +-- available and usable on the local machine. +-- +-- Each driver's main command is run with @--help@, and if the child process +-- exits successfully, that brancher is considered usable. +findUsableBranchers :: IO (Data.Map.Map PD.RepoType Brancher) +findUsableBranchers = do + let usable (_, brancher) = flip catchIO (const (return False)) $ do + let cmd = brancherBinary brancher + (exitCode, _, _) <- readProcessWithExitCode cmd ["--help"] "" + return (exitCode == ExitSuccess) + pairs <- filterM usable allBranchers + return (Data.Map.fromList pairs) + +-- | Fork a single package from a remote source repository to the local +-- file system. +forkPackage :: Verbosity + -> Data.Map.Map PD.RepoType Brancher + -- ^ Branchers supported by the local machine. + -> FilePath + -- ^ The directory in which new branches or repositories will + -- be created. + -> (Maybe PD.RepoKind) + -- ^ Which repo to choose. + -> SourcePackage + -- ^ The package to fork. + -> IO () +forkPackage verbosity branchers prefix kind src = do + let desc = PD.packageDescription (packageDescription src) + pkgid = display (packageId src) + pkgname = display (packageName src) + destdir = prefix pkgname + + destDirExists <- doesDirectoryExist destdir + when destDirExists $ do + die ("The directory " ++ show destdir ++ " already exists, not forking.") + + destFileExists <- doesFileExist destdir + when destFileExists $ do + die ("A file " ++ show destdir ++ " is in the way, not forking.") + + let repos = PD.sourceRepos desc + case findBranchCmd branchers repos kind of + Just (BranchCmd io) -> do + exitCode <- io verbosity destdir + case exitCode of + ExitSuccess -> return () + ExitFailure _ -> die ("Couldn't fork package " ++ pkgid) + Nothing -> case repos of + [] -> die ("Package " ++ pkgid + ++ " does not have any source repositories.") + _ -> die ("Package " ++ pkgid + ++ " does not have any usable source repositories.") + +-- | Given a set of possible branchers, and a set of possible source +-- repositories, find a repository that is both 1) likely to be specific to +-- this source version and 2) is supported by the local machine. +findBranchCmd :: Data.Map.Map PD.RepoType Brancher -> [PD.SourceRepo] + -> (Maybe PD.RepoKind) -> Maybe BranchCmd +findBranchCmd branchers allRepos maybeKind = cmd where + -- Sort repositories by kind, from This to Head to Unknown. Repositories + -- with equivalent kinds are selected based on the order they appear in + -- the Cabal description file. + repos' = sortBy (comparing thisFirst) allRepos + thisFirst r = case PD.repoKind r of + PD.RepoThis -> 0 :: Int + PD.RepoHead -> case PD.repoTag r of + -- If the type is 'head' but the author specified a tag, they + -- probably meant to create a 'this' repository but screwed up. + Just _ -> 0 + Nothing -> 1 + PD.RepoKindUnknown _ -> 2 + + -- If the user has specified the repo kind, filter out the repositories + -- she's not interested in. + repos = maybe repos' (\k -> filter ((==) k . PD.repoKind) repos') maybeKind + + repoBranchCmd repo = do + t <- PD.repoType repo + brancher <- Data.Map.lookup t branchers + brancherBuildCmd brancher repo + + cmd = listToMaybe (mapMaybe repoBranchCmd repos) + +-- | Branch driver for Bazaar. +branchBzr :: Brancher +branchBzr = Brancher "bzr" $ \repo -> do + src <- PD.repoLocation repo + let args dst = case PD.repoTag repo of + Just tag -> ["branch", src, dst, "-r", "tag:" ++ tag] + Nothing -> ["branch", src, dst] + return $ BranchCmd $ \verbosity dst -> do + notice verbosity ("bzr: branch " ++ show src) + rawSystem "bzr" (args dst) + +-- | Branch driver for Darcs. +branchDarcs :: Brancher +branchDarcs = Brancher "darcs" $ \repo -> do + src <- PD.repoLocation repo + let args dst = case PD.repoTag repo of + Just tag -> ["get", src, dst, "-t", tag] + Nothing -> ["get", src, dst] + return $ BranchCmd $ \verbosity dst -> do + notice verbosity ("darcs: get " ++ show src) + rawSystem "darcs" (args dst) + +-- | Branch driver for Git. +branchGit :: Brancher +branchGit = Brancher "git" $ \repo -> do + src <- PD.repoLocation repo + let branchArgs = case PD.repoBranch repo of + Just b -> ["--branch", b] + Nothing -> [] + let postClone dst = case PD.repoTag repo of + Just t -> do + cwd <- getCurrentDirectory + setCurrentDirectory dst + finally + (rawSystem "git" (["checkout", t] ++ branchArgs)) + (setCurrentDirectory cwd) + Nothing -> return ExitSuccess + return $ BranchCmd $ \verbosity dst -> do + notice verbosity ("git: clone " ++ show src) + code <- rawSystem "git" (["clone", src, dst] ++ branchArgs) + case code of + ExitFailure _ -> return code + ExitSuccess -> postClone dst + +-- | Branch driver for Mercurial. +branchHg :: Brancher +branchHg = Brancher "hg" $ \repo -> do + src <- PD.repoLocation repo + let branchArgs = case PD.repoBranch repo of + Just b -> ["--branch", b] + Nothing -> [] + let tagArgs = case PD.repoTag repo of + Just t -> ["--rev", t] + Nothing -> [] + let args dst = ["clone", src, dst] ++ branchArgs ++ tagArgs + return $ BranchCmd $ \verbosity dst -> do + notice verbosity ("hg: clone " ++ show src) + rawSystem "hg" (args dst) + +-- | Branch driver for Subversion. +branchSvn :: Brancher +branchSvn = Brancher "svn" $ \repo -> do + src <- PD.repoLocation repo + let args dst = ["checkout", src, dst] + return $ BranchCmd $ \verbosity dst -> do + notice verbosity ("svn: checkout " ++ show src) + rawSystem "svn" (args dst) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/GlobalFlags.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/GlobalFlags.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/GlobalFlags.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/GlobalFlags.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,267 @@ +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE ExistentialQuantification #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE RecordWildCards #-} +module Distribution.Client.GlobalFlags ( + GlobalFlags(..) + , defaultGlobalFlags + , RepoContext(..) + , withRepoContext + , withRepoContext' + ) where + +import Distribution.Client.Types + ( Repo(..), RemoteRepo(..) ) +import Distribution.Compat.Semigroup +import Distribution.Simple.Setup + ( Flag(..), fromFlag, flagToMaybe ) +import Distribution.Utils.NubList + ( NubList, fromNubList ) +import Distribution.Client.HttpUtils + ( HttpTransport, configureTransport ) +import Distribution.Verbosity + ( Verbosity ) +import Distribution.Simple.Utils + ( info ) + +import Data.Maybe + ( fromMaybe ) +import Control.Concurrent + ( MVar, newMVar, modifyMVar ) +import Control.Exception + ( throwIO ) +import Control.Monad + ( when ) +import System.FilePath + ( () ) +import Network.URI + ( uriScheme, uriPath ) +import Data.Map + ( Map ) +import qualified Data.Map as Map +import GHC.Generics ( Generic ) + +import qualified Hackage.Security.Client as Sec +import qualified Hackage.Security.Util.Path as Sec +import qualified Hackage.Security.Util.Pretty as Sec +import qualified Hackage.Security.Client.Repository.Cache as Sec +import qualified Hackage.Security.Client.Repository.Local as Sec.Local +import qualified Hackage.Security.Client.Repository.Remote as Sec.Remote +import qualified Distribution.Client.Security.HTTP as Sec.HTTP + +-- ------------------------------------------------------------ +-- * Global flags +-- ------------------------------------------------------------ + +-- | Flags that apply at the top level, not to any sub-command. +data GlobalFlags = GlobalFlags { + globalVersion :: Flag Bool, + globalNumericVersion :: Flag Bool, + globalConfigFile :: Flag FilePath, + globalSandboxConfigFile :: Flag FilePath, + globalConstraintsFile :: Flag FilePath, + globalRemoteRepos :: NubList RemoteRepo, -- ^ Available Hackage servers. + globalCacheDir :: Flag FilePath, + globalLocalRepos :: NubList FilePath, + globalLogsDir :: Flag FilePath, + globalWorldFile :: Flag FilePath, + globalRequireSandbox :: Flag Bool, + globalIgnoreSandbox :: Flag Bool, + globalIgnoreExpiry :: Flag Bool, -- ^ Ignore security expiry dates + globalHttpTransport :: Flag String + } deriving Generic + +defaultGlobalFlags :: GlobalFlags +defaultGlobalFlags = GlobalFlags { + globalVersion = Flag False, + globalNumericVersion = Flag False, + globalConfigFile = mempty, + globalSandboxConfigFile = mempty, + globalConstraintsFile = mempty, + globalRemoteRepos = mempty, + globalCacheDir = mempty, + globalLocalRepos = mempty, + globalLogsDir = mempty, + globalWorldFile = mempty, + globalRequireSandbox = Flag False, + globalIgnoreSandbox = Flag False, + globalIgnoreExpiry = Flag False, + globalHttpTransport = mempty + } + +instance Monoid GlobalFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup GlobalFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Repo context +-- ------------------------------------------------------------ + +-- | Access to repositories +data RepoContext = RepoContext { + -- | All user-specified repositories + repoContextRepos :: [Repo] + + -- | Get the HTTP transport + -- + -- The transport will be initialized on the first call to this function. + -- + -- NOTE: It is important that we don't eagerly initialize the transport. + -- Initializing the transport is not free, and especially in contexts where + -- we don't know a-priori whether or not we need the transport (for instance + -- when using cabal in "nix mode") incurring the overhead of transport + -- initialization on _every_ invocation (eg @cabal build@) is undesirable. + , repoContextGetTransport :: IO HttpTransport + + -- | Get the (initialized) secure repo + -- + -- (the 'Repo' type itself is stateless and must remain so, because it + -- must be serializable) + , repoContextWithSecureRepo :: forall a. + Repo + -> (forall down. Sec.Repository down -> IO a) + -> IO a + + -- | Should we ignore expiry times (when checking security)? + , repoContextIgnoreExpiry :: Bool + } + +-- | Wrapper around 'Repository', hiding the type argument +data SecureRepo = forall down. SecureRepo (Sec.Repository down) + +withRepoContext :: Verbosity -> GlobalFlags -> (RepoContext -> IO a) -> IO a +withRepoContext verbosity globalFlags = + withRepoContext' + verbosity + (fromNubList (globalRemoteRepos globalFlags)) + (fromNubList (globalLocalRepos globalFlags)) + (fromFlag (globalCacheDir globalFlags)) + (flagToMaybe (globalHttpTransport globalFlags)) + (flagToMaybe (globalIgnoreExpiry globalFlags)) + +withRepoContext' :: Verbosity -> [RemoteRepo] -> [FilePath] + -> FilePath -> Maybe String -> Maybe Bool + -> (RepoContext -> IO a) + -> IO a +withRepoContext' verbosity remoteRepos localRepos + sharedCacheDir httpTransport ignoreExpiry = \callback -> do + transportRef <- newMVar Nothing + let httpLib = Sec.HTTP.transportAdapter + verbosity + (getTransport transportRef) + initSecureRepos verbosity httpLib secureRemoteRepos $ \secureRepos' -> + callback RepoContext { + repoContextRepos = allRemoteRepos + ++ map RepoLocal localRepos + , repoContextGetTransport = getTransport transportRef + , repoContextWithSecureRepo = withSecureRepo secureRepos' + , repoContextIgnoreExpiry = fromMaybe False ignoreExpiry + } + where + secureRemoteRepos = + [ (remote, cacheDir) | RepoSecure remote cacheDir <- allRemoteRepos ] + allRemoteRepos = + [ (if isSecure then RepoSecure else RepoRemote) remote cacheDir + | remote <- remoteRepos + , let cacheDir = sharedCacheDir remoteRepoName remote + isSecure = remoteRepoSecure remote == Just True + ] + + getTransport :: MVar (Maybe HttpTransport) -> IO HttpTransport + getTransport transportRef = + modifyMVar transportRef $ \mTransport -> do + transport <- case mTransport of + Just tr -> return tr + Nothing -> configureTransport verbosity httpTransport + return (Just transport, transport) + + withSecureRepo :: Map Repo SecureRepo + -> Repo + -> (forall down. Sec.Repository down -> IO a) + -> IO a + withSecureRepo secureRepos repo callback = + case Map.lookup repo secureRepos of + Just (SecureRepo secureRepo) -> callback secureRepo + Nothing -> throwIO $ userError "repoContextWithSecureRepo: unknown repo" + +-- | Initialize the provided secure repositories +-- +-- Assumed invariant: `remoteRepoSecure` should be set for all these repos. +initSecureRepos :: forall a. Verbosity + -> Sec.HTTP.HttpLib + -> [(RemoteRepo, FilePath)] + -> (Map Repo SecureRepo -> IO a) + -> IO a +initSecureRepos verbosity httpLib repos callback = go Map.empty repos + where + go :: Map Repo SecureRepo -> [(RemoteRepo, FilePath)] -> IO a + go !acc [] = callback acc + go !acc ((r,cacheDir):rs) = do + cachePath <- Sec.makeAbsolute $ Sec.fromFilePath cacheDir + initSecureRepo verbosity httpLib r cachePath $ \r' -> + go (Map.insert (RepoSecure r cacheDir) r' acc) rs + +-- | Initialize the given secure repo +-- +-- The security library has its own concept of a "local" repository, distinct +-- from @cabal-install@'s; these are secure repositories, but live in the local +-- file system. We use the convention that these repositories are identified by +-- URLs of the form @file:/path/to/local/repo@. +initSecureRepo :: Verbosity + -> Sec.HTTP.HttpLib + -> RemoteRepo -- ^ Secure repo ('remoteRepoSecure' assumed) + -> Sec.Path Sec.Absolute -- ^ Cache dir + -> (SecureRepo -> IO a) -- ^ Callback + -> IO a +initSecureRepo verbosity httpLib RemoteRepo{..} cachePath = \callback -> do + withRepo $ \r -> do + requiresBootstrap <- Sec.requiresBootstrap r + when requiresBootstrap $ Sec.uncheckClientErrors $ + Sec.bootstrap r + (map Sec.KeyId remoteRepoRootKeys) + (Sec.KeyThreshold (fromIntegral remoteRepoKeyThreshold)) + callback $ SecureRepo r + where + -- Initialize local or remote repo depending on the URI + withRepo :: (forall down. Sec.Repository down -> IO a) -> IO a + withRepo callback | uriScheme remoteRepoURI == "file:" = do + dir <- Sec.makeAbsolute $ Sec.fromFilePath (uriPath remoteRepoURI) + Sec.Local.withRepository dir + cache + Sec.hackageRepoLayout + Sec.hackageIndexLayout + logTUF + callback + withRepo callback = + Sec.Remote.withRepository httpLib + [remoteRepoURI] + Sec.Remote.defaultRepoOpts + cache + Sec.hackageRepoLayout + Sec.hackageIndexLayout + logTUF + callback + + cache :: Sec.Cache + cache = Sec.Cache { + cacheRoot = cachePath + , cacheLayout = Sec.cabalCacheLayout { + Sec.cacheLayoutIndexTar = cacheFn "01-index.tar" + , Sec.cacheLayoutIndexIdx = cacheFn "01-index.tar.idx" + , Sec.cacheLayoutIndexTarGz = cacheFn "01-index.tar.gz" + } + } + + cacheFn :: FilePath -> Sec.CachePath + cacheFn = Sec.rootPath . Sec.fragment + + -- We display any TUF progress only in verbose mode, including any transient + -- verification errors. If verification fails, then the final exception that + -- is thrown will of course be shown. + logTUF :: Sec.LogMessage -> IO () + logTUF = info verbosity . Sec.pretty diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Glob.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Glob.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Glob.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Glob.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,269 @@ +{-# LANGUAGE CPP, DeriveGeneric #-} + +--TODO: [code cleanup] plausibly much of this module should be merged with +-- similar functionality in Cabal. +module Distribution.Client.Glob + ( FilePathGlob(..) + , FilePathRoot(..) + , FilePathGlobRel(..) + , Glob + , GlobPiece(..) + , matchFileGlob + , matchFileGlobRel + , matchGlob + , isTrivialFilePathGlob + , getFilePathRootDirectory + ) where + +import Data.Char (toUpper) +import Data.List (stripPrefix) +#if !MIN_VERSION_base(4,8,0) +import Control.Applicative +#endif +import Control.Monad +import Distribution.Compat.Binary +import GHC.Generics (Generic) + +import Distribution.Text +import Distribution.Compat.ReadP (ReadP, (<++), (+++)) +import qualified Distribution.Compat.ReadP as Parse +import qualified Text.PrettyPrint as Disp + +import System.FilePath +import System.Directory + + +-- | A file path specified by globbing +-- +data FilePathGlob = FilePathGlob FilePathRoot FilePathGlobRel + deriving (Eq, Show, Generic) + +data FilePathGlobRel + = GlobDir !Glob !FilePathGlobRel + | GlobFile !Glob + | GlobDirTrailing -- ^ trailing dir, a glob ending in @/@ + deriving (Eq, Show, Generic) + +-- | A single directory or file component of a globbed path +type Glob = [GlobPiece] + +-- | A piece of a globbing pattern +data GlobPiece = WildCard + | Literal String + | Union [Glob] + deriving (Eq, Show, Generic) + +data FilePathRoot + = FilePathRelative + | FilePathRoot FilePath -- ^ e.g. @"/"@, @"c:\"@ or result of 'takeDrive' + | FilePathHomeDir + deriving (Eq, Show, Generic) + +instance Binary FilePathGlob +instance Binary FilePathRoot +instance Binary FilePathGlobRel +instance Binary GlobPiece + + +-- | Check if a 'FilePathGlob' doesn't actually make use of any globbing and +-- is in fact equivalent to a non-glob 'FilePath'. +-- +-- If it is trivial in this sense then the result is the equivalent constant +-- 'FilePath'. On the other hand if it is not trivial (so could in principle +-- match more than one file) then the result is @Nothing@. +-- +isTrivialFilePathGlob :: FilePathGlob -> Maybe FilePath +isTrivialFilePathGlob (FilePathGlob root pathglob) = + case root of + FilePathRelative -> go [] pathglob + FilePathRoot root' -> go [root'] pathglob + FilePathHomeDir -> Nothing + where + go paths (GlobDir [Literal path] globs) = go (path:paths) globs + go paths (GlobFile [Literal path]) = Just (joinPath (reverse (path:paths))) + go paths GlobDirTrailing = Just (addTrailingPathSeparator + (joinPath (reverse paths))) + go _ _ = Nothing + +-- | Get the 'FilePath' corresponding to a 'FilePathRoot'. +-- +-- The 'FilePath' argument is required to supply the path for the +-- 'FilePathRelative' case. +-- +getFilePathRootDirectory :: FilePathRoot + -> FilePath -- ^ root for relative paths + -> IO FilePath +getFilePathRootDirectory FilePathRelative root = return root +getFilePathRootDirectory (FilePathRoot root) _ = return root +getFilePathRootDirectory FilePathHomeDir _ = getHomeDirectory + + +------------------------------------------------------------------------------ +-- Matching +-- + +-- | Match a 'FilePathGlob' against the file system, starting from a given +-- root directory for relative paths. The results of relative globs are +-- relative to the given root. Matches for absolute globs are absolute. +-- +matchFileGlob :: FilePath -> FilePathGlob -> IO [FilePath] +matchFileGlob relroot (FilePathGlob globroot glob) = do + root <- getFilePathRootDirectory globroot relroot + matches <- matchFileGlobRel root glob + case globroot of + FilePathRelative -> return matches + _ -> return (map (root ) matches) + +-- | Match a 'FilePathGlobRel' against the file system, starting from a +-- given root directory. The results are all relative to the given root. +-- +matchFileGlobRel :: FilePath -> FilePathGlobRel -> IO [FilePath] +matchFileGlobRel root glob0 = go glob0 "" + where + go (GlobFile glob) dir = do + entries <- getDirectoryContents (root dir) + let files = filter (matchGlob glob) entries + return (map (dir ) files) + + go (GlobDir glob globPath) dir = do + entries <- getDirectoryContents (root dir) + subdirs <- filterM (\subdir -> doesDirectoryExist + (root dir subdir)) + $ filter (matchGlob glob) entries + concat <$> mapM (\subdir -> go globPath (dir subdir)) subdirs + + go GlobDirTrailing dir = return [dir] + + +-- | Match a globbing pattern against a file path component +-- +matchGlob :: Glob -> String -> Bool +matchGlob = goStart + where + -- From the man page, glob(7): + -- "If a filename starts with a '.', this character must be + -- matched explicitly." + + go, goStart :: [GlobPiece] -> String -> Bool + + goStart (WildCard:_) ('.':_) = False + goStart (Union globs:rest) cs = any (\glob -> goStart (glob ++ rest) cs) + globs + goStart rest cs = go rest cs + + go [] "" = True + go (Literal lit:rest) cs + | Just cs' <- stripPrefix lit cs + = go rest cs' + | otherwise = False + go [WildCard] "" = True + go (WildCard:rest) (c:cs) = go rest (c:cs) || go (WildCard:rest) cs + go (Union globs:rest) cs = any (\glob -> go (glob ++ rest) cs) globs + go [] (_:_) = False + go (_:_) "" = False + + +------------------------------------------------------------------------------ +-- Parsing & printing +-- + +instance Text FilePathGlob where + disp (FilePathGlob root pathglob) = disp root Disp.<> disp pathglob + parse = + parse >>= \root -> + (FilePathGlob root <$> parse) + <++ (when (root == FilePathRelative) Parse.pfail >> + return (FilePathGlob root GlobDirTrailing)) + +instance Text FilePathRoot where + disp FilePathRelative = Disp.empty + disp (FilePathRoot root) = Disp.text root + disp FilePathHomeDir = Disp.char '~' Disp.<> Disp.char '/' + + parse = + ( (Parse.char '/' >> return (FilePathRoot "/")) + +++ (Parse.char '~' >> Parse.char '/' >> return FilePathHomeDir) + +++ (do drive <- Parse.satisfy (\c -> (c >= 'a' && c <= 'z') + || (c >= 'A' && c <= 'Z')) + _ <- Parse.char ':' + _ <- Parse.char '/' +++ Parse.char '\\' + return (FilePathRoot (toUpper drive : ":\\"))) + ) + <++ return FilePathRelative + +instance Text FilePathGlobRel where + disp (GlobDir glob pathglob) = dispGlob glob + Disp.<> Disp.char '/' + Disp.<> disp pathglob + disp (GlobFile glob) = dispGlob glob + disp GlobDirTrailing = Disp.empty + + parse = parsePath + where + parsePath :: ReadP r FilePathGlobRel + parsePath = + parseGlob >>= \globpieces -> + asDir globpieces + <++ asTDir globpieces + <++ asFile globpieces + + asDir glob = do dirSep + globs <- parsePath + return (GlobDir glob globs) + asTDir glob = do dirSep + return (GlobDir glob GlobDirTrailing) + asFile glob = return (GlobFile glob) + + dirSep = (Parse.char '/' >> return ()) + +++ (do _ <- Parse.char '\\' + -- check this isn't an escape code + following <- Parse.look + case following of + (c:_) | isGlobEscapedChar c -> Parse.pfail + _ -> return ()) + + +dispGlob :: Glob -> Disp.Doc +dispGlob = Disp.hcat . map dispPiece + where + dispPiece WildCard = Disp.char '*' + dispPiece (Literal str) = Disp.text (escape str) + dispPiece (Union globs) = Disp.braces + (Disp.hcat (Disp.punctuate + (Disp.char ',') + (map dispGlob globs))) + escape [] = [] + escape (c:cs) + | isGlobEscapedChar c = '\\' : c : escape cs + | otherwise = c : escape cs + +parseGlob :: ReadP r Glob +parseGlob = Parse.many1 parsePiece + where + parsePiece = literal +++ wildcard +++ union + + wildcard = Parse.char '*' >> return WildCard + + union = Parse.between (Parse.char '{') (Parse.char '}') $ + fmap Union (Parse.sepBy1 parseGlob (Parse.char ',')) + + literal = Literal `fmap` litchars1 + + litchar = normal +++ escape + + normal = Parse.satisfy (\c -> not (isGlobEscapedChar c) + && c /= '/' && c /= '\\') + escape = Parse.char '\\' >> Parse.satisfy isGlobEscapedChar + + litchars1 :: ReadP r [Char] + litchars1 = liftM2 (:) litchar litchars + + litchars :: ReadP r [Char] + litchars = litchars1 <++ return [] + +isGlobEscapedChar :: Char -> Bool +isGlobEscapedChar '*' = True +isGlobEscapedChar '{' = True +isGlobEscapedChar '}' = True +isGlobEscapedChar ',' = True +isGlobEscapedChar _ = False diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/GZipUtils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/GZipUtils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/GZipUtils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/GZipUtils.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,86 @@ +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE CPP #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.GZipUtils +-- Copyright : (c) Dmitry Astapov 2010 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@gmail.com +-- Stability : provisional +-- Portability : portable +-- +-- Provides a convenience functions for working with files that may or may not +-- be zipped. +----------------------------------------------------------------------------- +module Distribution.Client.GZipUtils ( + maybeDecompress, + ) where + +import Codec.Compression.Zlib.Internal +import Data.ByteString.Lazy.Internal as BS (ByteString(Empty, Chunk)) + +#if MIN_VERSION_zlib(0,6,0) +import Control.Exception (throw) +import Control.Monad (liftM) +import Control.Monad.ST.Lazy (ST, runST) +import qualified Data.ByteString as Strict +#endif + +-- | Attempts to decompress the `bytes' under the assumption that +-- "data format" error at the very beginning of the stream means +-- that it is already decompressed. Caller should make sanity checks +-- to verify that it is not, in fact, garbage. +-- +-- This is to deal with http proxies that lie to us and transparently +-- decompress without removing the content-encoding header. See: +-- +-- +maybeDecompress :: ByteString -> ByteString +#if MIN_VERSION_zlib(0,6,0) +maybeDecompress bytes = runST (go bytes decompressor) + where + decompressor :: DecompressStream (ST s) + decompressor = decompressST gzipOrZlibFormat defaultDecompressParams + + -- DataError at the beginning of the stream probably means that stream is + -- not compressed, so we return it as-is. + -- TODO: alternatively, we might consider looking for the two magic bytes + -- at the beginning of the gzip header. (not an option for zlib, though.) + go :: Monad m => ByteString -> DecompressStream m -> m ByteString + go cs (DecompressOutputAvailable bs k) = liftM (Chunk bs) $ go' cs =<< k + go _ (DecompressStreamEnd _bs ) = return Empty + go _ (DecompressStreamError _err ) = return bytes + go cs (DecompressInputRequired k) = go cs' =<< k c + where + (c, cs') = uncons cs + + -- Once we have received any output though we regard errors as actual errors + -- and we throw them (as pure exceptions). + -- TODO: We could (and should) avoid these pure exceptions. + go' :: Monad m => ByteString -> DecompressStream m -> m ByteString + go' cs (DecompressOutputAvailable bs k) = liftM (Chunk bs) $ go' cs =<< k + go' _ (DecompressStreamEnd _bs ) = return Empty + go' _ (DecompressStreamError err ) = throw err + go' cs (DecompressInputRequired k) = go' cs' =<< k c + where + (c, cs') = uncons cs + + uncons :: ByteString -> (Strict.ByteString, ByteString) + uncons Empty = (Strict.empty, Empty) + uncons (Chunk c cs) = (c, cs) +#else +maybeDecompress bytes = foldStream $ decompressWithErrors gzipOrZlibFormat defaultDecompressParams bytes + where + -- DataError at the beginning of the stream probably means that stream is not compressed. + -- Returning it as-is. + -- TODO: alternatively, we might consider looking for the two magic bytes + -- at the beginning of the gzip header. + foldStream (StreamError _ _) = bytes + foldStream somethingElse = doFold somethingElse + + doFold StreamEnd = BS.Empty + doFold (StreamChunk bs stream) = BS.Chunk bs (doFold stream) + doFold (StreamError _ msg) = error $ "Codec.Compression.Zlib: " ++ msg +#endif diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Haddock.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Haddock.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Haddock.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Haddock.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,69 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Haddock +-- Copyright : (c) Andrea Vezzosi 2009 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Interfacing with Haddock +-- +----------------------------------------------------------------------------- +module Distribution.Client.Haddock + ( + regenerateHaddockIndex + ) + where + +import Data.List (maximumBy) +import Data.Foldable (forM_) +import System.Directory (createDirectoryIfMissing, renameFile) +import System.FilePath ((), splitFileName) +import Distribution.Package + ( packageVersion ) +import Distribution.Simple.Haddock (haddockPackagePaths) +import Distribution.Simple.Program (haddockProgram, ProgramConfiguration + , rawSystemProgram, requireProgramVersion) +import Distribution.Version (Version(Version), orLaterVersion) +import Distribution.Verbosity (Verbosity) +import Distribution.Simple.PackageIndex + ( InstalledPackageIndex, allPackagesByName ) +import Distribution.Simple.Utils + ( comparing, debug, installDirectoryContents, withTempDirectory ) +import Distribution.InstalledPackageInfo as InstalledPackageInfo + ( InstalledPackageInfo(exposed) ) + +regenerateHaddockIndex :: Verbosity + -> InstalledPackageIndex -> ProgramConfiguration + -> FilePath + -> IO () +regenerateHaddockIndex verbosity pkgs conf index = do + (paths, warns) <- haddockPackagePaths pkgs' Nothing + let paths' = [ (interface, html) | (interface, Just html) <- paths] + forM_ warns (debug verbosity) + + (confHaddock, _, _) <- + requireProgramVersion verbosity haddockProgram + (orLaterVersion (Version [0,6] [])) conf + + createDirectoryIfMissing True destDir + + withTempDirectory verbosity destDir "tmphaddock" $ \tempDir -> do + + let flags = [ "--gen-contents" + , "--gen-index" + , "--odir=" ++ tempDir + , "--title=Haskell modules on this system" ] + ++ [ "--read-interface=" ++ html ++ "," ++ interface + | (interface, html) <- paths' ] + rawSystemProgram verbosity confHaddock flags + renameFile (tempDir "index.html") (tempDir destFile) + installDirectoryContents verbosity tempDir destDir + + where + (destDir,destFile) = splitFileName index + pkgs' = [ maximumBy (comparing packageVersion) pkgvers' + | (_pname, pkgvers) <- allPackagesByName pkgs + , let pkgvers' = filter exposed pkgvers + , not (null pkgvers') ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/HttpUtils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/HttpUtils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/HttpUtils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/HttpUtils.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,794 @@ +{-# LANGUAGE CPP, BangPatterns #-} +----------------------------------------------------------------------------- +-- | Separate module for HTTP actions, using a proxy server if one exists. +----------------------------------------------------------------------------- +module Distribution.Client.HttpUtils ( + DownloadResult(..), + configureTransport, + HttpTransport(..), + HttpCode, + downloadURI, + transportCheckHttps, + remoteRepoCheckHttps, + remoteRepoTryUpgradeToHttps, + isOldHackageURI + ) where + +import Network.HTTP + ( Request (..), Response (..), RequestMethod (..) + , Header(..), HeaderName(..), lookupHeader ) +import Network.HTTP.Proxy ( Proxy(..), fetchProxy) +import Network.URI + ( URI (..), URIAuth (..), uriToString ) +import Network.Browser + ( browse, setOutHandler, setErrHandler, setProxy + , setAuthorityGen, request, setAllowBasicAuth, setUserAgent ) +#if !MIN_VERSION_base(4,8,0) +import Control.Applicative +#endif +import qualified Control.Exception as Exception +import Control.Monad + ( when, guard ) +import qualified Data.ByteString.Lazy.Char8 as BS +import Data.List + ( isPrefixOf, find, intercalate ) +import Data.Maybe + ( listToMaybe, maybeToList, fromMaybe ) +import qualified Paths_cabal_install (version) +import Distribution.Verbosity (Verbosity) +import Distribution.Simple.Utils + ( die, info, warn, debug, notice, writeFileAtomic + , copyFileVerbose, withTempFile + , rawSystemStdInOut, toUTF8, fromUTF8, normaliseLineEndings ) +import Distribution.Client.Utils + ( readMaybe, withTempFileName ) +import Distribution.Client.Types + ( RemoteRepo(..) ) +import Distribution.System + ( buildOS, buildArch ) +import Distribution.Text + ( display ) +import Data.Char + ( isSpace ) +import qualified System.FilePath.Posix as FilePath.Posix + ( splitDirectories ) +import System.FilePath + ( (<.>) ) +import System.Directory + ( doesFileExist, renameFile ) +import System.IO.Error + ( isDoesNotExistError ) +import Distribution.Simple.Program + ( Program, simpleProgram, ConfiguredProgram, programPath + , ProgramInvocation(..), programInvocation + , getProgramInvocationOutput ) +import Distribution.Simple.Program.Db + ( ProgramDb, emptyProgramDb, addKnownPrograms + , configureAllKnownPrograms + , requireProgram, lookupProgram ) +import Distribution.Simple.Program.Run + ( IOEncoding(..), getEffectiveEnvironment ) +import Numeric (showHex) +import System.Directory (canonicalizePath) +import System.IO (hClose) +import System.FilePath (takeFileName, takeDirectory) +import System.Random (randomRIO) +import System.Exit (ExitCode(..)) + + +------------------------------------------------------------------------------ +-- Downloading a URI, given an HttpTransport +-- + +data DownloadResult = FileAlreadyInCache + | FileDownloaded FilePath + deriving (Eq) + +downloadURI :: HttpTransport + -> Verbosity + -> URI -- ^ What to download + -> FilePath -- ^ Where to put it + -> IO DownloadResult +downloadURI _transport verbosity uri path | uriScheme uri == "file:" = do + copyFileVerbose verbosity (uriPath uri) path + return (FileDownloaded path) + -- Can we store the hash of the file so we can safely return path when the + -- hash matches to avoid unnecessary computation? + +downloadURI transport verbosity uri path = do + + let etagPath = path <.> "etag" + targetExists <- doesFileExist path + etagPathExists <- doesFileExist etagPath + -- In rare cases the target file doesn't exist, but the etag does. + etag <- if targetExists && etagPathExists + then Just <$> readFile etagPath + else return Nothing + + -- Only use the external http transports if we actually have to + -- (or have been told to do so) + let transport' + | uriScheme uri == "http:" + , not (transportManuallySelected transport) + = plainHttpTransport + + | otherwise + = transport + + withTempFileName (takeDirectory path) (takeFileName path) $ \tmpFile -> do + result <- getHttp transport' verbosity uri etag tmpFile [] + + -- Only write the etag if we get a 200 response code. + -- A 304 still sends us an etag header. + case result of + (200, Just newEtag) -> writeFile etagPath newEtag + _ -> return () + + case fst result of + 200 -> do + info verbosity ("Downloaded to " ++ path) + renameFile tmpFile path + return (FileDownloaded path) + 304 -> do + notice verbosity "Skipping download: local and remote files match." + return FileAlreadyInCache + errCode -> die $ "Failed to download " ++ show uri + ++ " : HTTP code " ++ show errCode + +------------------------------------------------------------------------------ +-- Utilities for repo url management +-- + +remoteRepoCheckHttps :: HttpTransport -> RemoteRepo -> IO () +remoteRepoCheckHttps transport repo + | uriScheme (remoteRepoURI repo) == "https:" + , not (transportSupportsHttps transport) + = die $ "The remote repository '" ++ remoteRepoName repo + ++ "' specifies a URL that " ++ requiresHttpsErrorMessage + | otherwise = return () + +transportCheckHttps :: HttpTransport -> URI -> IO () +transportCheckHttps transport uri + | uriScheme uri == "https:" + , not (transportSupportsHttps transport) + = die $ "The URL " ++ show uri + ++ " " ++ requiresHttpsErrorMessage + | otherwise = return () + +requiresHttpsErrorMessage :: String +requiresHttpsErrorMessage = + "requires HTTPS however the built-in HTTP implementation " + ++ "does not support HTTPS. The transport implementations with HTTPS " + ++ "support are " ++ intercalate ", " + [ name | (name, _, True, _ ) <- supportedTransports ] + ++ ". One of these will be selected automatically if the corresponding " + ++ "external program is available, or one can be selected specifically " + ++ "with the global flag --http-transport=" + +remoteRepoTryUpgradeToHttps :: HttpTransport -> RemoteRepo -> IO RemoteRepo +remoteRepoTryUpgradeToHttps transport repo + | remoteRepoShouldTryHttps repo + , uriScheme (remoteRepoURI repo) == "http:" + , not (transportSupportsHttps transport) + , not (transportManuallySelected transport) + = die $ "The builtin HTTP implementation does not support HTTPS, but using " + ++ "HTTPS for authenticated uploads is recommended. " + ++ "The transport implementations with HTTPS support are " + ++ intercalate ", " [ name | (name, _, True, _ ) <- supportedTransports ] + ++ "but they require the corresponding external program to be " + ++ "available. You can either make one available or use plain HTTP by " + ++ "using the global flag --http-transport=plain-http (or putting the " + ++ "equivalent in the config file). With plain HTTP, your password " + ++ "is sent using HTTP digest authentication so it cannot be easily " + ++ "intercepted, but it is not as secure as using HTTPS." + + | remoteRepoShouldTryHttps repo + , uriScheme (remoteRepoURI repo) == "http:" + , transportSupportsHttps transport + = return repo { + remoteRepoURI = (remoteRepoURI repo) { uriScheme = "https:" } + } + + | otherwise + = return repo + +-- | Utility function for legacy support. +isOldHackageURI :: URI -> Bool +isOldHackageURI uri + = case uriAuthority uri of + Just (URIAuth {uriRegName = "hackage.haskell.org"}) -> + FilePath.Posix.splitDirectories (uriPath uri) + == ["/","packages","archive"] + _ -> False + + +------------------------------------------------------------------------------ +-- Setting up a HttpTransport +-- + +data HttpTransport = HttpTransport { + -- | GET a URI, with an optional ETag (to do a conditional fetch), + -- write the resource to the given file and return the HTTP status code, + -- and optional ETag. + getHttp :: Verbosity -> URI -> Maybe ETag -> FilePath -> [Header] + -> IO (HttpCode, Maybe ETag), + + -- | POST a resource to a URI, with optional auth (username, password) + -- and return the HTTP status code and any redirect URL. + postHttp :: Verbosity -> URI -> String -> Maybe Auth + -> IO (HttpCode, String), + + -- | POST a file resource to a URI using multipart\/form-data encoding, + -- with optional auth (username, password) and return the HTTP status + -- code and any error string. + postHttpFile :: Verbosity -> URI -> FilePath -> Maybe Auth + -> IO (HttpCode, String), + + -- | PUT a file resource to a URI, with optional auth + -- (username, password), extra headers and return the HTTP status code + -- and any error string. + putHttpFile :: Verbosity -> URI -> FilePath -> Maybe Auth -> [Header] + -> IO (HttpCode, String), + + -- | Whether this transport supports https or just http. + transportSupportsHttps :: Bool, + + -- | Whether this transport implementation was specifically chosen by + -- the user via configuration, or whether it was automatically selected. + -- Strictly speaking this is not a property of the transport itself but + -- about how it was chosen. Nevertheless it's convenient to keep here. + transportManuallySelected :: Bool + } + --TODO: why does postHttp return a redirect, but postHttpFile return errors? + +type HttpCode = Int +type ETag = String +type Auth = (String, String) + +noPostYet :: Verbosity -> URI -> String -> Maybe (String, String) + -> IO (Int, String) +noPostYet _ _ _ _ = die "Posting (for report upload) is not implemented yet" + +supportedTransports :: [(String, Maybe Program, Bool, + ProgramDb -> Maybe HttpTransport)] +supportedTransports = + [ let prog = simpleProgram "curl" in + ( "curl", Just prog, True + , \db -> curlTransport <$> lookupProgram prog db ) + + , let prog = simpleProgram "wget" in + ( "wget", Just prog, True + , \db -> wgetTransport <$> lookupProgram prog db ) + + , let prog = simpleProgram "powershell" in + ( "powershell", Just prog, True + , \db -> powershellTransport <$> lookupProgram prog db ) + + , ( "plain-http", Nothing, False + , \_ -> Just plainHttpTransport ) + ] + +configureTransport :: Verbosity -> Maybe String -> IO HttpTransport + +configureTransport verbosity (Just name) = + -- the user secifically selected a transport by name so we'll try and + -- configure that one + + case find (\(name',_,_,_) -> name' == name) supportedTransports of + Just (_, mprog, _tls, mkTrans) -> do + + progdb <- case mprog of + Nothing -> return emptyProgramDb + Just prog -> snd <$> requireProgram verbosity prog emptyProgramDb + -- ^^ if it fails, it'll fail here + + let Just transport = mkTrans progdb + return transport { transportManuallySelected = True } + + Nothing -> die $ "Unknown HTTP transport specified: " ++ name + ++ ". The supported transports are " + ++ intercalate ", " + [ name' | (name', _, _, _ ) <- supportedTransports ] + +configureTransport verbosity Nothing = do + -- the user hasn't selected a transport, so we'll pick the first one we + -- can configure successfully, provided that it supports tls + + -- for all the transports except plain-http we need to try and find + -- their external executable + progdb <- configureAllKnownPrograms verbosity $ + addKnownPrograms + [ prog | (_, Just prog, _, _) <- supportedTransports ] + emptyProgramDb + + let availableTransports = + [ (name, transport) + | (name, _, _, mkTrans) <- supportedTransports + , transport <- maybeToList (mkTrans progdb) ] + -- there's always one because the plain one is last and never fails + let (name, transport) = head availableTransports + debug verbosity $ "Selected http transport implementation: " ++ name + + return transport { transportManuallySelected = False } + + +------------------------------------------------------------------------------ +-- The HttpTransports based on external programs +-- + +curlTransport :: ConfiguredProgram -> HttpTransport +curlTransport prog = + HttpTransport gethttp posthttp posthttpfile puthttpfile True False + where + gethttp verbosity uri etag destPath reqHeaders = do + withTempFile (takeDirectory destPath) + "curl-headers.txt" $ \tmpFile tmpHandle -> do + hClose tmpHandle + let args = [ show uri + , "--output", destPath + , "--location" + , "--write-out", "%{http_code}" + , "--user-agent", userAgent + , "--silent", "--show-error" + , "--dump-header", tmpFile ] + ++ concat + [ ["--header", "If-None-Match: " ++ t] + | t <- maybeToList etag ] + ++ concat + [ ["--header", show name ++ ": " ++ value] + | Header name value <- reqHeaders ] + + resp <- getProgramInvocationOutput verbosity + (programInvocation prog args) + headers <- readFile tmpFile + (code, _err, etag') <- parseResponse uri resp headers + return (code, etag') + + posthttp = noPostYet + + addAuthConfig auth progInvocation = progInvocation + { progInvokeInput = do + (uname, passwd) <- auth + return $ unlines + [ "--digest" + , "--user " ++ uname ++ ":" ++ passwd + ] + , progInvokeArgs = ["--config", "-"] ++ progInvokeArgs progInvocation + } + + posthttpfile verbosity uri path auth = do + let args = [ show uri + , "--form", "package=@"++path + , "--write-out", "\n%{http_code}" + , "--user-agent", userAgent + , "--silent", "--show-error" + , "--header", "Accept: text/plain" + , "--location" + ] + resp <- getProgramInvocationOutput verbosity $ addAuthConfig auth + (programInvocation prog args) + (code, err, _etag) <- parseResponse uri resp "" + return (code, err) + + puthttpfile verbosity uri path auth headers = do + let args = [ show uri + , "--request", "PUT", "--data-binary", "@"++path + , "--write-out", "\n%{http_code}" + , "--user-agent", userAgent + , "--silent", "--show-error" + , "--location" + , "--header", "Accept: text/plain" + ] + ++ concat + [ ["--header", show name ++ ": " ++ value] + | Header name value <- headers ] + resp <- getProgramInvocationOutput verbosity $ addAuthConfig auth + (programInvocation prog args) + (code, err, _etag) <- parseResponse uri resp "" + return (code, err) + + -- on success these curl involcations produces an output like "200" + -- and on failure it has the server error response first + parseResponse uri resp headers = + let codeerr = + case reverse (lines resp) of + (codeLine:rerrLines) -> + case readMaybe (trim codeLine) of + Just i -> let errstr = mkErrstr rerrLines + in Just (i, errstr) + Nothing -> Nothing + [] -> Nothing + + mkErrstr = unlines . reverse . dropWhile (all isSpace) + + mb_etag :: Maybe ETag + mb_etag = listToMaybe $ reverse + [ etag + | ["ETag:", etag] <- map words (lines headers) ] + + in case codeerr of + Just (i, err) -> return (i, err, mb_etag) + _ -> statusParseFail uri resp + + +wgetTransport :: ConfiguredProgram -> HttpTransport +wgetTransport prog = + HttpTransport gethttp posthttp posthttpfile puthttpfile True False + where + gethttp verbosity uri etag destPath reqHeaders = do + resp <- runWGet verbosity uri args + (code, etag') <- parseOutput uri resp + return (code, etag') + where + args = [ "--output-document=" ++ destPath + , "--user-agent=" ++ userAgent + , "--tries=5" + , "--timeout=15" + , "--server-response" ] + ++ concat + [ ["--header", "If-None-Match: " ++ t] + | t <- maybeToList etag ] + ++ [ "--header=" ++ show name ++ ": " ++ value + | Header name value <- reqHeaders ] + + posthttp = noPostYet + + posthttpfile verbosity uri path auth = + withTempFile (takeDirectory path) + (takeFileName path) $ \tmpFile tmpHandle -> + withTempFile (takeDirectory path) "response" $ \responseFile responseHandle -> do + hClose responseHandle + (body, boundary) <- generateMultipartBody path + BS.hPut tmpHandle body + hClose tmpHandle + let args = [ "--post-file=" ++ tmpFile + , "--user-agent=" ++ userAgent + , "--server-response" + , "--output-document=" ++ responseFile + , "--header=Accept: text/plain" + , "--header=Content-type: multipart/form-data; " ++ + "boundary=" ++ boundary ] + out <- runWGet verbosity (addUriAuth auth uri) args + (code, _etag) <- parseOutput uri out + resp <- readFile responseFile + return (code, resp) + + puthttpfile verbosity uri path auth headers = + withTempFile (takeDirectory path) "response" $ \responseFile responseHandle -> do + hClose responseHandle + let args = [ "--method=PUT", "--body-file="++path + , "--user-agent=" ++ userAgent + , "--server-response" + , "--output-document=" ++ responseFile + , "--header=Accept: text/plain" ] + ++ [ "--header=" ++ show name ++ ": " ++ value + | Header name value <- headers ] + + out <- runWGet verbosity (addUriAuth auth uri) args + (code, _etag) <- parseOutput uri out + resp <- readFile responseFile + return (code, resp) + + addUriAuth Nothing uri = uri + addUriAuth (Just (user, pass)) uri = uri + { uriAuthority = Just a { uriUserInfo = user ++ ":" ++ pass ++ "@" } + } + where + a = fromMaybe (URIAuth "" "" "") (uriAuthority uri) + + runWGet verbosity uri args = do + -- We pass the URI via STDIN because it contains the users' credentials + -- and sensitive data should not be passed via command line arguments. + let + invocation = (programInvocation prog ("--input-file=-" : args)) + { progInvokeInput = Just (uriToString id uri "") + } + + -- wget returns its output on stderr rather than stdout + (_, resp, exitCode) <- getProgramInvocationOutputAndErrors verbosity + invocation + -- wget returns exit code 8 for server "errors" like "304 not modified" + if exitCode == ExitSuccess || exitCode == ExitFailure 8 + then return resp + else die $ "'" ++ programPath prog + ++ "' exited with an error:\n" ++ resp + + -- With the --server-response flag, wget produces output with the full + -- http server response with all headers, we want to find a line like + -- "HTTP/1.1 200 OK", but only the last one, since we can have multiple + -- requests due to redirects. + parseOutput uri resp = + let parsedCode = listToMaybe + [ code + | (protocol:codestr:_err) <- map words (reverse (lines resp)) + , "HTTP/" `isPrefixOf` protocol + , code <- maybeToList (readMaybe codestr) ] + mb_etag :: Maybe ETag + mb_etag = listToMaybe + [ etag + | ["ETag:", etag] <- map words (reverse (lines resp)) ] + in case parsedCode of + Just i -> return (i, mb_etag) + _ -> statusParseFail uri resp + + +powershellTransport :: ConfiguredProgram -> HttpTransport +powershellTransport prog = + HttpTransport gethttp posthttp posthttpfile puthttpfile True False + where + gethttp verbosity uri etag destPath reqHeaders = do + resp <- runPowershellScript verbosity $ + webclientScript + (setupHeaders ((useragentHeader : etagHeader) ++ reqHeaders)) + [ "$wc.DownloadFile(" ++ escape (show uri) + ++ "," ++ escape destPath ++ ");" + , "Write-Host \"200\";" + , "Write-Host $wc.ResponseHeaders.Item(\"ETag\");" + ] + parseResponse resp + where + parseResponse x = case readMaybe . unlines . take 1 . lines $ trim x of + Just i -> return (i, Nothing) -- TODO extract real etag + Nothing -> statusParseFail uri x + etagHeader = [ Header HdrIfNoneMatch t | t <- maybeToList etag ] + + posthttp = noPostYet + + posthttpfile verbosity uri path auth = + withTempFile (takeDirectory path) + (takeFileName path) $ \tmpFile tmpHandle -> do + (body, boundary) <- generateMultipartBody path + BS.hPut tmpHandle body + hClose tmpHandle + fullPath <- canonicalizePath tmpFile + + let contentHeader = Header HdrContentType + ("multipart/form-data; boundary=" ++ boundary) + resp <- runPowershellScript verbosity $ webclientScript + (setupHeaders (contentHeader : extraHeaders) ++ setupAuth auth) + (uploadFileAction "POST" uri fullPath) + parseUploadResponse uri resp + + puthttpfile verbosity uri path auth headers = do + fullPath <- canonicalizePath path + resp <- runPowershellScript verbosity $ webclientScript + (setupHeaders (extraHeaders ++ headers) ++ setupAuth auth) + (uploadFileAction "PUT" uri fullPath) + parseUploadResponse uri resp + + runPowershellScript verbosity script = do + let args = + [ "-InputFormat", "None" + -- the default execution policy doesn't allow running + -- unsigned scripts, so we need to tell powershell to bypass it + , "-ExecutionPolicy", "bypass" + , "-NoProfile", "-NonInteractive" + , "-Command", "-" + ] + getProgramInvocationOutput verbosity (programInvocation prog args) + { progInvokeInput = Just (script ++ "\nExit(0);") + } + + escape = show + + useragentHeader = Header HdrUserAgent userAgent + extraHeaders = [Header HdrAccept "text/plain", useragentHeader] + + setupHeaders headers = + [ "$wc.Headers.Add(" ++ escape (show name) ++ "," ++ escape value ++ ");" + | Header name value <- headers + ] + + setupAuth auth = + [ "$wc.Credentials = new-object System.Net.NetworkCredential(" + ++ escape uname ++ "," ++ escape passwd ++ ",\"\");" + | (uname,passwd) <- maybeToList auth + ] + + uploadFileAction method uri fullPath = + [ "$fileBytes = [System.IO.File]::ReadAllBytes(" ++ escape fullPath ++ ");" + , "$bodyBytes = $wc.UploadData(" ++ escape (show uri) ++ "," + ++ show method ++ ", $fileBytes);" + , "Write-Host \"200\";" + , "Write-Host (-join [System.Text.Encoding]::UTF8.GetChars($bodyBytes));" + ] + + parseUploadResponse uri resp = case lines (trim resp) of + (codeStr : message) + | Just code <- readMaybe codeStr -> return (code, unlines message) + _ -> statusParseFail uri resp + + webclientScript setup action = unlines + [ "$wc = new-object system.net.webclient;" + , unlines setup + , "Try {" + , unlines (map (" " ++) action) + , "} Catch [System.Net.WebException] {" + , " $exception = $_.Exception;" + , " If ($exception.Status -eq " + ++ "[System.Net.WebExceptionStatus]::ProtocolError) {" + , " $response = $exception.Response -as [System.Net.HttpWebResponse];" + , " $reader = new-object " + ++ "System.IO.StreamReader($response.GetResponseStream());" + , " Write-Host ($response.StatusCode -as [int]);" + , " Write-Host $reader.ReadToEnd();" + , " } Else {" + , " Write-Host $exception.Message;" + , " }" + , "} Catch {" + , " Write-Host $_.Exception.Message;" + , "}" + ] + + +------------------------------------------------------------------------------ +-- The builtin plain HttpTransport +-- + +plainHttpTransport :: HttpTransport +plainHttpTransport = + HttpTransport gethttp posthttp posthttpfile puthttpfile False False + where + gethttp verbosity uri etag destPath reqHeaders = do + let req = Request{ + rqURI = uri, + rqMethod = GET, + rqHeaders = [ Header HdrIfNoneMatch t + | t <- maybeToList etag ] + ++ reqHeaders, + rqBody = BS.empty + } + (_, resp) <- cabalBrowse verbosity Nothing (request req) + let code = convertRspCode (rspCode resp) + etag' = lookupHeader HdrETag (rspHeaders resp) + when (code==200 || code==206) $ + writeFileAtomic destPath $ rspBody resp + return (code, etag') + + posthttp = noPostYet + + posthttpfile verbosity uri path auth = do + (body, boundary) <- generateMultipartBody path + let headers = [ Header HdrContentType + ("multipart/form-data; boundary="++boundary) + , Header HdrContentLength (show (BS.length body)) + , Header HdrAccept ("text/plain") + ] + req = Request { + rqURI = uri, + rqMethod = POST, + rqHeaders = headers, + rqBody = body + } + (_, resp) <- cabalBrowse verbosity auth (request req) + return (convertRspCode (rspCode resp), rspErrorString resp) + + puthttpfile verbosity uri path auth headers = do + body <- BS.readFile path + let req = Request { + rqURI = uri, + rqMethod = PUT, + rqHeaders = Header HdrContentLength (show (BS.length body)) + : Header HdrAccept "text/plain" + : headers, + rqBody = body + } + (_, resp) <- cabalBrowse verbosity auth (request req) + return (convertRspCode (rspCode resp), rspErrorString resp) + + convertRspCode (a,b,c) = a*100 + b*10 + c + + rspErrorString resp = + case lookupHeader HdrContentType (rspHeaders resp) of + Just contenttype + | takeWhile (/= ';') contenttype == "text/plain" + -> BS.unpack (rspBody resp) + _ -> rspReason resp + + cabalBrowse verbosity auth act = do + p <- fixupEmptyProxy <$> fetchProxy True + Exception.handleJust + (guard . isDoesNotExistError) + (const . die $ "Couldn't establish HTTP connection. " + ++ "Possible cause: HTTP proxy server is down.") $ + browse $ do + setProxy p + setErrHandler (warn verbosity . ("http error: "++)) + setOutHandler (debug verbosity) + setUserAgent userAgent + setAllowBasicAuth False + setAuthorityGen (\_ _ -> return auth) + act + + fixupEmptyProxy (Proxy uri _) | null uri = NoProxy + fixupEmptyProxy p = p + + +------------------------------------------------------------------------------ +-- Common stuff used by multiple transport impls +-- + +userAgent :: String +userAgent = concat [ "cabal-install/", display Paths_cabal_install.version + , " (", display buildOS, "; ", display buildArch, ")" + ] + +statusParseFail :: URI -> String -> IO a +statusParseFail uri r = + die $ "Failed to download " ++ show uri ++ " : " + ++ "No Status Code could be parsed from response: " ++ r + +-- Trim +trim :: String -> String +trim = f . f + where f = reverse . dropWhile isSpace + + +------------------------------------------------------------------------------ +-- Multipart stuff partially taken from cgi package. +-- + +generateMultipartBody :: FilePath -> IO (BS.ByteString, String) +generateMultipartBody path = do + content <- BS.readFile path + boundary <- genBoundary + let !body = formatBody content (BS.pack boundary) + return (body, boundary) + where + formatBody content boundary = + BS.concat $ + [ crlf, dd, boundary, crlf ] + ++ [ BS.pack (show header) | header <- headers ] + ++ [ crlf + , content + , crlf, dd, boundary, dd, crlf ] + + headers = + [ Header (HdrCustom "Content-disposition") + ("form-data; name=package; " ++ + "filename=\"" ++ takeFileName path ++ "\"") + , Header HdrContentType "application/x-gzip" + ] + + crlf = BS.pack "\r\n" + dd = BS.pack "--" + +genBoundary :: IO String +genBoundary = do + i <- randomRIO (0x10000000000000,0xFFFFFFFFFFFFFF) :: IO Integer + return $ showHex i "" + +------------------------------------------------------------------------------ +-- Compat utils + +-- TODO: This is only here temporarily so we can release without also requiring +-- the latest Cabal lib. The function is also included in Cabal now. + +getProgramInvocationOutputAndErrors :: Verbosity -> ProgramInvocation + -> IO (String, String, ExitCode) +getProgramInvocationOutputAndErrors verbosity + ProgramInvocation { + progInvokePath = path, + progInvokeArgs = args, + progInvokeEnv = envOverrides, + progInvokeCwd = mcwd, + progInvokeInput = minputStr, + progInvokeOutputEncoding = encoding + } = do + let utf8 = case encoding of IOEncodingUTF8 -> True; _ -> False + decode | utf8 = fromUTF8 . normaliseLineEndings + | otherwise = id + menv <- getEffectiveEnvironment envOverrides + (output, errors, exitCode) <- rawSystemStdInOut verbosity + path args + mcwd menv + input utf8 + return (decode output, decode errors, exitCode) + where + input = + case minputStr of + Nothing -> Nothing + Just inputStr -> Just $ + case encoding of + IOEncodingText -> (inputStr, False) + IOEncodingUTF8 -> (toUTF8 inputStr, True) -- use binary mode for utf8 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/IndexUtils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/IndexUtils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/IndexUtils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/IndexUtils.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,647 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE GADTs #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.IndexUtils +-- Copyright : (c) Duncan Coutts 2008 +-- License : BSD-like +-- +-- Maintainer : duncan@community.haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Extra utils related to the package indexes. +----------------------------------------------------------------------------- +module Distribution.Client.IndexUtils ( + getIndexFileAge, + getInstalledPackages, + Configure.getInstalledPackagesMonitorFiles, + getSourcePackages, + getSourcePackagesMonitorFiles, + + Index(..), + PackageEntry(..), + parsePackageIndex, + updateRepoIndexCache, + updatePackageIndexCacheFile, + readCacheStrict, + + BuildTreeRefType(..), refTypeFromTypeCode, typeCodeFromRefType + ) where + +import qualified Codec.Archive.Tar as Tar +import qualified Codec.Archive.Tar.Entry as Tar +import qualified Codec.Archive.Tar.Index as Tar +import qualified Distribution.Client.Tar as Tar +import Distribution.Client.Types + +import Distribution.Package + ( PackageId, PackageIdentifier(..), PackageName(..) + , Package(..), packageVersion, packageName + , Dependency(Dependency) ) +import Distribution.Client.PackageIndex (PackageIndex) +import qualified Distribution.Client.PackageIndex as PackageIndex +import Distribution.Simple.PackageIndex (InstalledPackageIndex) +import qualified Distribution.PackageDescription.Parse as PackageDesc.Parse +import Distribution.PackageDescription + ( GenericPackageDescription ) +import Distribution.PackageDescription.Parse + ( parsePackageDescription ) +import Distribution.Simple.Compiler + ( Compiler, PackageDBStack ) +import Distribution.Simple.Program + ( ProgramConfiguration ) +import qualified Distribution.Simple.Configure as Configure + ( getInstalledPackages, getInstalledPackagesMonitorFiles ) +import Distribution.ParseUtils + ( ParseResult(..) ) +import Distribution.Version + ( Version(Version), intersectVersionRanges ) +import Distribution.Text + ( display, simpleParse ) +import Distribution.Verbosity + ( Verbosity, normal, lessVerbose ) +import Distribution.Simple.Utils + ( die, warn, info, fromUTF8, ignoreBOM ) +import Distribution.Client.Setup + ( RepoContext(..) ) + +import Data.Char (isAlphaNum) +import Data.Maybe (mapMaybe, catMaybes, maybeToList) +import Data.List (isPrefixOf) +#if !MIN_VERSION_base(4,8,0) +import Data.Monoid (Monoid(..)) +#endif +import qualified Data.Map as Map +import Control.Monad (when, liftM) +import Control.Exception (evaluate) +import qualified Data.ByteString.Lazy as BS +import qualified Data.ByteString.Lazy.Char8 as BS.Char8 +import qualified Data.ByteString.Char8 as BSS +import Data.ByteString.Lazy (ByteString) +import Distribution.Client.GZipUtils (maybeDecompress) +import Distribution.Client.Utils ( byteStringToFilePath + , tryFindAddSourcePackageDesc ) +import Distribution.Compat.Exception (catchIO) +import Distribution.Client.Compat.Time (getFileAge, getModTime) +import System.Directory (doesFileExist, doesDirectoryExist) +import System.FilePath + ( (), (<.>), takeExtension, replaceExtension, splitDirectories, normalise ) +import System.FilePath.Posix as FilePath.Posix + ( takeFileName ) +import System.IO +import System.IO.Unsafe (unsafeInterleaveIO) +import System.IO.Error (isDoesNotExistError) + +import qualified Hackage.Security.Client as Sec +import qualified Hackage.Security.Util.Some as Sec + +-- | Reduced-verbosity version of 'Configure.getInstalledPackages' +getInstalledPackages :: Verbosity -> Compiler + -> PackageDBStack -> ProgramConfiguration + -> IO InstalledPackageIndex +getInstalledPackages verbosity comp packageDbs conf = + Configure.getInstalledPackages verbosity' comp packageDbs conf + where + verbosity' = lessVerbose verbosity + + +-- | Get filename base (i.e. without file extension) for index-related files +-- +-- /Secure/ cabal repositories use a new extended & incremental +-- @01-index.tar@. In order to avoid issues resulting from clobbering +-- new/old-style index data, we save them locally to different names. +-- +-- Example: Use @indexBaseName repo <.> "tar.gz"@ to compute the 'FilePath' of the +-- @00-index.tar.gz@/@01-index.tar.gz@ file. +indexBaseName :: Repo -> FilePath +indexBaseName repo = repoLocalDir repo fn + where + fn = case repo of + RepoSecure {} -> "01-index" + RepoRemote {} -> "00-index" + RepoLocal {} -> "00-index" + +------------------------------------------------------------------------ +-- Reading the source package index +-- + +-- | Read a repository index from disk, from the local files specified by +-- a list of 'Repo's. +-- +-- All the 'SourcePackage's are marked as having come from the appropriate +-- 'Repo'. +-- +-- This is a higher level wrapper used internally in cabal-install. +getSourcePackages :: Verbosity -> RepoContext -> IO SourcePackageDb +getSourcePackages verbosity repoCtxt | null (repoContextRepos repoCtxt) = do + warn verbosity $ "No remote package servers have been specified. Usually " + ++ "you would have one specified in the config file." + return SourcePackageDb { + packageIndex = mempty, + packagePreferences = mempty + } +getSourcePackages verbosity repoCtxt = do + info verbosity "Reading available packages..." + pkgss <- mapM (\r -> readRepoIndex verbosity repoCtxt r) (repoContextRepos repoCtxt) + let (pkgs, prefs) = mconcat pkgss + prefs' = Map.fromListWith intersectVersionRanges + [ (name, range) | Dependency name range <- prefs ] + _ <- evaluate pkgs + _ <- evaluate prefs' + return SourcePackageDb { + packageIndex = pkgs, + packagePreferences = prefs' + } + +readCacheStrict :: Verbosity -> Index -> (PackageEntry -> pkg) -> IO ([pkg], [Dependency]) +readCacheStrict verbosity index mkPkg = do + updateRepoIndexCache verbosity index + cache <- liftM readIndexCache $ BSS.readFile (cacheFile index) + withFile (indexFile index) ReadMode $ \indexHnd -> + packageListFromCache mkPkg indexHnd cache ReadPackageIndexStrict + +-- | Read a repository index from disk, from the local file specified by +-- the 'Repo'. +-- +-- All the 'SourcePackage's are marked as having come from the given 'Repo'. +-- +-- This is a higher level wrapper used internally in cabal-install. +-- +readRepoIndex :: Verbosity -> RepoContext -> Repo + -> IO (PackageIndex SourcePackage, [Dependency]) +readRepoIndex verbosity repoCtxt repo = + handleNotFound $ do + warnIfIndexIsOld =<< getIndexFileAge repo + updateRepoIndexCache verbosity (RepoIndex repoCtxt repo) + readPackageIndexCacheFile mkAvailablePackage (RepoIndex repoCtxt repo) + + where + mkAvailablePackage pkgEntry = + SourcePackage { + packageInfoId = pkgid, + packageDescription = packageDesc pkgEntry, + packageSource = case pkgEntry of + NormalPackage _ _ _ _ -> RepoTarballPackage repo pkgid Nothing + BuildTreeRef _ _ _ path _ -> LocalUnpackedPackage path, + packageDescrOverride = case pkgEntry of + NormalPackage _ _ pkgtxt _ -> Just pkgtxt + _ -> Nothing + } + where + pkgid = packageId pkgEntry + + handleNotFound action = catchIO action $ \e -> if isDoesNotExistError e + then do + case repo of + RepoRemote{..} -> warn verbosity $ errMissingPackageList repoRemote + RepoSecure{..} -> warn verbosity $ errMissingPackageList repoRemote + RepoLocal{..} -> warn verbosity $ + "The package list for the local repo '" ++ repoLocalDir + ++ "' is missing. The repo is invalid." + return mempty + else ioError e + + isOldThreshold = 15 --days + warnIfIndexIsOld dt = do + when (dt >= isOldThreshold) $ case repo of + RepoRemote{..} -> warn verbosity $ errOutdatedPackageList repoRemote dt + RepoSecure{..} -> warn verbosity $ errOutdatedPackageList repoRemote dt + RepoLocal{..} -> return () + + errMissingPackageList repoRemote = + "The package list for '" ++ remoteRepoName repoRemote + ++ "' does not exist. Run 'cabal update' to download it." + errOutdatedPackageList repoRemote dt = + "The package list for '" ++ remoteRepoName repoRemote + ++ "' is " ++ shows (floor dt :: Int) " days old.\nRun " + ++ "'cabal update' to get the latest list of available packages." + +-- | Return the age of the index file in days (as a Double). +getIndexFileAge :: Repo -> IO Double +getIndexFileAge repo = getFileAge $ indexBaseName repo <.> "tar" + +-- | A set of files (or directories) that can be monitored to detect when +-- there might have been a change in the source packages. +-- +getSourcePackagesMonitorFiles :: [Repo] -> [FilePath] +getSourcePackagesMonitorFiles repos = + [ indexBaseName repo <.> "cache" | repo <- repos ] + +-- | It is not necessary to call this, as the cache will be updated when the +-- index is read normally. However you can do the work earlier if you like. +-- +updateRepoIndexCache :: Verbosity -> Index -> IO () +updateRepoIndexCache verbosity index = + whenCacheOutOfDate index $ do + updatePackageIndexCacheFile verbosity index + +whenCacheOutOfDate :: Index -> IO () -> IO () +whenCacheOutOfDate index action = do + exists <- doesFileExist $ cacheFile index + if not exists + then action + else do + indexTime <- getModTime $ indexFile index + cacheTime <- getModTime $ cacheFile index + when (indexTime > cacheTime) action + +------------------------------------------------------------------------ +-- Reading the index file +-- + +-- | An index entry is either a normal package, or a local build tree reference. +data PackageEntry = + NormalPackage PackageId GenericPackageDescription ByteString BlockNo + | BuildTreeRef BuildTreeRefType + PackageId GenericPackageDescription FilePath BlockNo + +-- | A build tree reference is either a link or a snapshot. +data BuildTreeRefType = SnapshotRef | LinkRef + deriving Eq + +refTypeFromTypeCode :: Tar.TypeCode -> BuildTreeRefType +refTypeFromTypeCode t + | t == Tar.buildTreeRefTypeCode = LinkRef + | t == Tar.buildTreeSnapshotTypeCode = SnapshotRef + | otherwise = + error "Distribution.Client.IndexUtils.refTypeFromTypeCode: unknown type code" + +typeCodeFromRefType :: BuildTreeRefType -> Tar.TypeCode +typeCodeFromRefType LinkRef = Tar.buildTreeRefTypeCode +typeCodeFromRefType SnapshotRef = Tar.buildTreeSnapshotTypeCode + +instance Package PackageEntry where + packageId (NormalPackage pkgid _ _ _) = pkgid + packageId (BuildTreeRef _ pkgid _ _ _) = pkgid + +packageDesc :: PackageEntry -> GenericPackageDescription +packageDesc (NormalPackage _ descr _ _) = descr +packageDesc (BuildTreeRef _ _ descr _ _) = descr + +-- | Parse an uncompressed \"00-index.tar\" repository index file represented +-- as a 'ByteString'. +-- + +data PackageOrDep = Pkg PackageEntry | Dep Dependency + +-- | Read @00-index.tar.gz@ and extract @.cabal@ and @preferred-versions@ files +-- +-- We read the index using 'Tar.read', which gives us a lazily constructed +-- 'TarEntries'. We translate it to a list of entries using 'tarEntriesList', +-- which preserves the lazy nature of 'TarEntries', and finally 'concatMap' a +-- function over this to translate it to a list of IO actions returning +-- 'PackageOrDep's. We can use 'lazySequence' to turn this into a list of +-- 'PackageOrDep's, still maintaining the lazy nature of the original tar read. +parsePackageIndex :: ByteString -> [IO (Maybe PackageOrDep)] +parsePackageIndex = concatMap (uncurry extract) . tarEntriesList . Tar.read + where + extract :: BlockNo -> Tar.Entry -> [IO (Maybe PackageOrDep)] + extract blockNo entry = tryExtractPkg ++ tryExtractPrefs + where + tryExtractPkg = do + mkPkgEntry <- maybeToList $ extractPkg entry blockNo + return $ fmap (fmap Pkg) mkPkgEntry + + tryExtractPrefs = do + prefs' <- maybeToList $ extractPrefs entry + fmap (return . Just . Dep) prefs' + +-- | Turn the 'Entries' data structure from the @tar@ package into a list, +-- and pair each entry with its block number. +-- +-- NOTE: This preserves the lazy nature of 'Entries': the tar file is only read +-- as far as the list is evaluated. +tarEntriesList :: Show e => Tar.Entries e -> [(BlockNo, Tar.Entry)] +tarEntriesList = go 0 + where + go !_ Tar.Done = [] + go !_ (Tar.Fail e) = error ("tarEntriesList: " ++ show e) + go !n (Tar.Next e es') = (n, e) : go (Tar.nextEntryOffset e n) es' + +extractPkg :: Tar.Entry -> BlockNo -> Maybe (IO (Maybe PackageEntry)) +extractPkg entry blockNo = case Tar.entryContent entry of + Tar.NormalFile content _ + | takeExtension fileName == ".cabal" + -> case splitDirectories (normalise fileName) of + [pkgname,vers,_] -> case simpleParse vers of + Just ver -> Just . return $ Just (NormalPackage pkgid descr content blockNo) + where + pkgid = PackageIdentifier (PackageName pkgname) ver + parsed = parsePackageDescription . ignoreBOM . fromUTF8 . BS.Char8.unpack + $ content + descr = case parsed of + ParseOk _ d -> d + _ -> error $ "Couldn't read cabal file " + ++ show fileName + _ -> Nothing + _ -> Nothing + + Tar.OtherEntryType typeCode content _ + | Tar.isBuildTreeRefTypeCode typeCode -> + Just $ do + let path = byteStringToFilePath content + dirExists <- doesDirectoryExist path + result <- if not dirExists then return Nothing + else do + cabalFile <- tryFindAddSourcePackageDesc path "Error reading package index." + descr <- PackageDesc.Parse.readPackageDescription normal cabalFile + return . Just $ BuildTreeRef (refTypeFromTypeCode typeCode) (packageId descr) + descr path blockNo + return result + + _ -> Nothing + + where + fileName = Tar.entryPath entry + +extractPrefs :: Tar.Entry -> Maybe [Dependency] +extractPrefs entry = case Tar.entryContent entry of + Tar.NormalFile content _ + | takeFileName entrypath == "preferred-versions" + -> Just prefs + where + entrypath = Tar.entryPath entry + prefs = parsePreferredVersions content + _ -> Nothing + +parsePreferredVersions :: ByteString -> [Dependency] +parsePreferredVersions = mapMaybe simpleParse + . filter (not . isPrefixOf "--") + . lines + . BS.Char8.unpack -- TODO: Are we sure no unicode? + +------------------------------------------------------------------------ +-- Reading and updating the index cache +-- + +-- | Variation on 'sequence' which evaluates the actions lazily +-- +-- Pattern matching on the result list will execute just the first action; +-- more generally pattern matching on the first @n@ '(:)' nodes will execute +-- the first @n@ actions. +lazySequence :: [IO a] -> IO [a] +lazySequence = unsafeInterleaveIO . go + where + go [] = return [] + go (x:xs) = do x' <- x + xs' <- lazySequence xs + return (x' : xs') + +-- | Which index do we mean? +data Index = + -- | The main index for the specified repository + RepoIndex RepoContext Repo + + -- | A sandbox-local repository + -- Argument is the location of the index file + | SandboxIndex FilePath + +indexFile :: Index -> FilePath +indexFile (RepoIndex _ctxt repo) = indexBaseName repo <.> "tar" +indexFile (SandboxIndex index) = index + +cacheFile :: Index -> FilePath +cacheFile (RepoIndex _ctxt repo) = indexBaseName repo <.> "cache" +cacheFile (SandboxIndex index) = index `replaceExtension` "cache" + +updatePackageIndexCacheFile :: Verbosity -> Index -> IO () +updatePackageIndexCacheFile verbosity index = do + info verbosity ("Updating index cache file " ++ cacheFile index) + withIndexEntries index $ \entries -> do + let cache = Cache { cacheEntries = entries } + writeFile (cacheFile index) (showIndexCache cache) + +-- | Read the index (for the purpose of building a cache) +-- +-- The callback is provided with list of cache entries, which is guaranteed to +-- be lazily constructed. This list must ONLY be used in the scope of the +-- callback; when the callback is terminated the file handle to the index will +-- be closed and further attempts to read from the list will result in (pure) +-- I/O exceptions. +-- +-- In the construction of the index for a secure repo we take advantage of the +-- index built by the @hackage-security@ library to avoid reading the @.tar@ +-- file as much as possible (we need to read it only to extract preferred +-- versions). This helps performance, but is also required for correctness: +-- the new @01-index.tar.gz@ may have multiple versions of preferred-versions +-- files, and 'parsePackageIndex' does not correctly deal with that (see #2956); +-- by reading the already-built cache from the security library we will be sure +-- to only read the latest versions of all files. +-- +-- TODO: It would be nicer if we actually incrementally updated @cabal@'s +-- cache, rather than reconstruct it from zero on each update. However, this +-- would require a change in the cache format. +withIndexEntries :: Index -> ([IndexCacheEntry] -> IO a) -> IO a +withIndexEntries (RepoIndex repoCtxt repo@RepoSecure{..}) callback = + repoContextWithSecureRepo repoCtxt repo $ \repoSecure -> + Sec.withIndex repoSecure $ \Sec.IndexCallbacks{..} -> do + let mk :: (Sec.DirectoryEntry, fp, Maybe (Sec.Some Sec.IndexFile)) + -> IO [IndexCacheEntry] + mk (_, _fp, Nothing) = + return [] -- skip unrecognized file + mk (_, _fp, Just (Sec.Some (Sec.IndexPkgMetadata _pkgId))) = + return [] -- skip metadata + mk (dirEntry, _fp, Just (Sec.Some (Sec.IndexPkgCabal pkgId))) = do + let blockNo = fromIntegral (Sec.directoryEntryBlockNo dirEntry) + return [CachePackageId pkgId blockNo] + mk (dirEntry, _fp, Just (Sec.Some file@(Sec.IndexPkgPrefs _pkgName))) = do + content <- Sec.indexEntryContent `fmap` indexLookupFileEntry dirEntry file + return $ map CachePreference (parsePreferredVersions content) + entriess <- lazySequence $ map mk (Sec.directoryEntries indexDirectory) + callback $ concat entriess +withIndexEntries index callback = do + withFile (indexFile index) ReadMode $ \h -> do + bs <- maybeDecompress `fmap` BS.hGetContents h + pkgsOrPrefs <- lazySequence $ parsePackageIndex bs + callback $ map toCache (catMaybes pkgsOrPrefs) + where + toCache :: PackageOrDep -> IndexCacheEntry + toCache (Pkg (NormalPackage pkgid _ _ blockNo)) = CachePackageId pkgid blockNo + toCache (Pkg (BuildTreeRef refType _ _ _ blockNo)) = CacheBuildTreeRef refType blockNo + toCache (Dep d) = CachePreference d + +data ReadPackageIndexMode = ReadPackageIndexStrict + | ReadPackageIndexLazyIO + +readPackageIndexCacheFile :: Package pkg + => (PackageEntry -> pkg) + -> Index + -> IO (PackageIndex pkg, [Dependency]) +readPackageIndexCacheFile mkPkg index = do + cache <- liftM readIndexCache $ BSS.readFile (cacheFile index) + indexHnd <- openFile (indexFile index) ReadMode + packageIndexFromCache mkPkg indexHnd cache ReadPackageIndexLazyIO + +packageIndexFromCache :: Package pkg + => (PackageEntry -> pkg) + -> Handle + -> Cache + -> ReadPackageIndexMode + -> IO (PackageIndex pkg, [Dependency]) +packageIndexFromCache mkPkg hnd cache mode = do + (pkgs, prefs) <- packageListFromCache mkPkg hnd cache mode + pkgIndex <- evaluate $ PackageIndex.fromList pkgs + return (pkgIndex, prefs) + +-- | Read package list +-- +-- The result packages (though not the preferences) are guaranteed to be listed +-- in the same order as they are in the tar file (because later entries in a tar +-- file mask earlier ones). +packageListFromCache :: (PackageEntry -> pkg) + -> Handle + -> Cache + -> ReadPackageIndexMode + -> IO ([pkg], [Dependency]) +packageListFromCache mkPkg hnd Cache{..} mode = accum mempty [] cacheEntries + where + accum srcpkgs prefs [] = return (reverse srcpkgs, prefs) + + accum srcpkgs prefs (CachePackageId pkgid blockno : entries) = do + -- Given the cache entry, make a package index entry. + -- The magic here is that we use lazy IO to read the .cabal file + -- from the index tarball if it turns out that we need it. + -- Most of the time we only need the package id. + ~(pkg, pkgtxt) <- unsafeInterleaveIO $ do + pkgtxt <- getEntryContent blockno + pkg <- readPackageDescription pkgtxt + return (pkg, pkgtxt) + let srcpkg = case mode of + ReadPackageIndexLazyIO -> + mkPkg (NormalPackage pkgid pkg pkgtxt blockno) + ReadPackageIndexStrict -> + pkg `seq` pkgtxt `seq` mkPkg (NormalPackage pkgid pkg + pkgtxt blockno) + accum (srcpkg:srcpkgs) prefs entries + + accum srcpkgs prefs (CacheBuildTreeRef refType blockno : entries) = do + -- We have to read the .cabal file eagerly here because we can't cache the + -- package id for build tree references - the user might edit the .cabal + -- file after the reference was added to the index. + path <- liftM byteStringToFilePath . getEntryContent $ blockno + pkg <- do let err = "Error reading package index from cache." + file <- tryFindAddSourcePackageDesc path err + PackageDesc.Parse.readPackageDescription normal file + let srcpkg = mkPkg (BuildTreeRef refType (packageId pkg) pkg path blockno) + accum (srcpkg:srcpkgs) prefs entries + + accum srcpkgs prefs (CachePreference pref : entries) = + accum srcpkgs (pref:prefs) entries + + getEntryContent :: BlockNo -> IO ByteString + getEntryContent blockno = do + entry <- Tar.hReadEntry hnd blockno + case Tar.entryContent entry of + Tar.NormalFile content _size -> return content + Tar.OtherEntryType typecode content _size + | Tar.isBuildTreeRefTypeCode typecode + -> return content + _ -> interror "unexpected tar entry type" + + readPackageDescription :: ByteString -> IO GenericPackageDescription + readPackageDescription content = + case parsePackageDescription . ignoreBOM . fromUTF8 . BS.Char8.unpack $ content of + ParseOk _ d -> return d + _ -> interror "failed to parse .cabal file" + + interror msg = die $ "internal error when reading package index: " ++ msg + ++ "The package index or index cache is probably " + ++ "corrupt. Running cabal update might fix it." + +------------------------------------------------------------------------ +-- Index cache data structure +-- + +-- | Tar files are block structured with 512 byte blocks. Every header and file +-- content starts on a block boundary. +-- +type BlockNo = Tar.TarEntryOffset + +data IndexCacheEntry = CachePackageId PackageId BlockNo + | CacheBuildTreeRef BuildTreeRefType BlockNo + | CachePreference Dependency + deriving (Eq) + +installedUnitId, blocknoKey, buildTreeRefKey, preferredVersionKey :: String +installedUnitId = "pkg:" +blocknoKey = "b#" +buildTreeRefKey = "build-tree-ref:" +preferredVersionKey = "pref-ver:" + +readIndexCacheEntry :: BSS.ByteString -> Maybe IndexCacheEntry +readIndexCacheEntry = \line -> + case BSS.words line of + [key, pkgnamestr, pkgverstr, sep, blocknostr] + | key == BSS.pack installedUnitId && sep == BSS.pack blocknoKey -> + case (parseName pkgnamestr, parseVer pkgverstr [], + parseBlockNo blocknostr) of + (Just pkgname, Just pkgver, Just blockno) + -> Just (CachePackageId (PackageIdentifier pkgname pkgver) blockno) + _ -> Nothing + [key, typecodestr, blocknostr] | key == BSS.pack buildTreeRefKey -> + case (parseRefType typecodestr, parseBlockNo blocknostr) of + (Just refType, Just blockno) + -> Just (CacheBuildTreeRef refType blockno) + _ -> Nothing + + (key: remainder) | key == BSS.pack preferredVersionKey -> + fmap CachePreference (simpleParse (BSS.unpack (BSS.unwords remainder))) + _ -> Nothing + where + parseName str + | BSS.all (\c -> isAlphaNum c || c == '-') str + = Just (PackageName (BSS.unpack str)) + | otherwise = Nothing + + parseVer str vs = + case BSS.readInt str of + Nothing -> Nothing + Just (v, str') -> case BSS.uncons str' of + Just ('.', str'') -> parseVer str'' (v:vs) + Just _ -> Nothing + Nothing -> Just (Version (reverse (v:vs)) []) + + parseBlockNo str = + case BSS.readInt str of + Just (blockno, remainder) + | BSS.null remainder -> Just (fromIntegral blockno) + _ -> Nothing + + parseRefType str = + case BSS.uncons str of + Just (typeCode, remainder) + | BSS.null remainder && Tar.isBuildTreeRefTypeCode typeCode + -> Just (refTypeFromTypeCode typeCode) + _ -> Nothing + +showIndexCacheEntry :: IndexCacheEntry -> String +showIndexCacheEntry entry = unwords $ case entry of + CachePackageId pkgid b -> [ installedUnitId + , display (packageName pkgid) + , display (packageVersion pkgid) + , blocknoKey + , show b + ] + CacheBuildTreeRef t b -> [ buildTreeRefKey + , [typeCodeFromRefType t] + , show b + ] + CachePreference dep -> [ preferredVersionKey + , display dep + ] + +-- | Cabal caches various information about the Hackage index +data Cache = Cache { + cacheEntries :: [IndexCacheEntry] + } + +readIndexCache :: BSS.ByteString -> Cache +readIndexCache bs = Cache { + cacheEntries = mapMaybe readIndexCacheEntry $ BSS.lines bs + } + +showIndexCache :: Cache -> String +showIndexCache Cache{..} = unlines $ map showIndexCacheEntry cacheEntries diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Init/Heuristics.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Init/Heuristics.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Init/Heuristics.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Init/Heuristics.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,391 @@ +{-# LANGUAGE CPP #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Init.Heuristics +-- Copyright : (c) Benedikt Huber 2009 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Heuristics for creating initial cabal files. +-- +----------------------------------------------------------------------------- +module Distribution.Client.Init.Heuristics ( + guessPackageName, + scanForModules, SourceFileEntry(..), + neededBuildPrograms, + guessMainFileCandidates, + guessAuthorNameMail, + knownCategories, +) where +import Distribution.Text (simpleParse) +import Distribution.Simple.Setup (Flag(..), flagToMaybe) +import Distribution.ModuleName + ( ModuleName, toFilePath ) +import Distribution.Client.PackageIndex + ( allPackagesByName ) +import qualified Distribution.Package as P +import qualified Distribution.PackageDescription as PD + ( category, packageDescription ) +import Distribution.Simple.Utils + ( intercalate ) +import Distribution.Client.Utils + ( tryCanonicalizePath ) +import Language.Haskell.Extension ( Extension ) + +import Distribution.Client.Types ( packageDescription, SourcePackageDb(..) ) +#if !MIN_VERSION_base(4,8,0) +import Control.Applicative ( pure, (<$>), (<*>) ) +import Data.Monoid ( mempty, mappend, mconcat ) +#endif +import Control.Arrow ( first ) +import Control.Monad ( liftM ) +import Data.Char ( isAlphaNum, isNumber, isUpper, isLower, isSpace ) +import Data.Either ( partitionEithers ) +import Data.List ( isInfixOf, isPrefixOf, isSuffixOf, sortBy ) +import Data.Maybe ( mapMaybe, catMaybes, maybeToList ) +import Data.Ord ( comparing ) +import qualified Data.Set as Set ( fromList, toList ) +import System.Directory ( getCurrentDirectory, getDirectoryContents, + doesDirectoryExist, doesFileExist, getHomeDirectory, ) +import Distribution.Compat.Environment ( getEnvironment ) +import System.FilePath ( takeExtension, takeBaseName, dropExtension, + (), (<.>), splitDirectories, makeRelative ) + +import Distribution.Client.Init.Types ( InitFlags(..) ) +import Distribution.Client.Compat.Process ( readProcessWithExitCode ) +import System.Exit ( ExitCode(..) ) + +-- | Return a list of candidate main files for this executable: top-level +-- modules including the word 'Main' in the file name. The list is sorted in +-- order of preference, shorter file names are preferred. 'Right's are existing +-- candidates and 'Left's are those that do not yet exist. +guessMainFileCandidates :: InitFlags -> IO [Either FilePath FilePath] +guessMainFileCandidates flags = do + dir <- + maybe getCurrentDirectory return (flagToMaybe $ packageDir flags) + files <- getDirectoryContents dir + let existingCandidates = filter isMain files + -- We always want to give the user at least one default choice. If either + -- Main.hs or Main.lhs has already been created, then we don't want to + -- suggest the other; however, if neither has been created, then we + -- suggest both. + newCandidates = + if any (`elem` existingCandidates) ["Main.hs", "Main.lhs"] + then [] + else ["Main.hs", "Main.lhs"] + candidates = + sortBy (\x y -> comparing (length . either id id) x y + `mappend` compare x y) + (map Left newCandidates ++ map Right existingCandidates) + return candidates + + where + isMain f = (isInfixOf "Main" f || isInfixOf "main" f) + && (isSuffixOf ".hs" f || isSuffixOf ".lhs" f) + +-- | Guess the package name based on the given root directory. +guessPackageName :: FilePath -> IO P.PackageName +guessPackageName = liftM (P.PackageName . repair . last . splitDirectories) + . tryCanonicalizePath + where + -- Treat each span of non-alphanumeric characters as a hyphen. Each + -- hyphenated component of a package name must contain at least one + -- alphabetic character. An arbitrary character ('x') will be prepended if + -- this is not the case for the first component, and subsequent components + -- will simply be run together. For example, "1+2_foo-3" will become + -- "x12-foo3". + repair = repair' ('x' :) id + repair' invalid valid x = case dropWhile (not . isAlphaNum) x of + "" -> repairComponent "" + x' -> let (c, r) = first repairComponent $ break (not . isAlphaNum) x' + in c ++ repairRest r + where + repairComponent c | all isNumber c = invalid c + | otherwise = valid c + repairRest = repair' id ('-' :) + +-- |Data type of source files found in the working directory +data SourceFileEntry = SourceFileEntry + { relativeSourcePath :: FilePath + , moduleName :: ModuleName + , fileExtension :: String + , imports :: [ModuleName] + , extensions :: [Extension] + } deriving Show + +sfToFileName :: FilePath -> SourceFileEntry -> FilePath +sfToFileName projectRoot (SourceFileEntry relPath m ext _ _) + = projectRoot relPath toFilePath m <.> ext + +-- |Search for source files in the given directory +-- and return pairs of guessed Haskell source path and +-- module names. +scanForModules :: FilePath -> IO [SourceFileEntry] +scanForModules rootDir = scanForModulesIn rootDir rootDir + +scanForModulesIn :: FilePath -> FilePath -> IO [SourceFileEntry] +scanForModulesIn projectRoot srcRoot = scan srcRoot [] + where + scan dir hierarchy = do + entries <- getDirectoryContents (projectRoot dir) + (files, dirs) <- liftM partitionEithers (mapM (tagIsDir dir) entries) + let modules = catMaybes [ guessModuleName hierarchy file + | file <- files + , isUpper (head file) ] + modules' <- mapM (findImportsAndExts projectRoot) modules + recMods <- mapM (scanRecursive dir hierarchy) dirs + return $ concat (modules' : recMods) + tagIsDir parent entry = do + isDir <- doesDirectoryExist (parent entry) + return $ (if isDir then Right else Left) entry + guessModuleName hierarchy entry + | takeBaseName entry == "Setup" = Nothing + | ext `elem` sourceExtensions = + SourceFileEntry <$> pure relRoot <*> modName <*> pure ext <*> pure [] <*> pure [] + | otherwise = Nothing + where + relRoot = makeRelative projectRoot srcRoot + unqualModName = dropExtension entry + modName = simpleParse + $ intercalate "." . reverse $ (unqualModName : hierarchy) + ext = case takeExtension entry of '.':e -> e; e -> e + scanRecursive parent hierarchy entry + | isUpper (head entry) = scan (parent entry) (entry : hierarchy) + | isLower (head entry) && not (ignoreDir entry) = + scanForModulesIn projectRoot $ foldl () srcRoot (reverse (entry : hierarchy)) + | otherwise = return [] + ignoreDir ('.':_) = True + ignoreDir dir = dir `elem` ["dist", "_darcs"] + +findImportsAndExts :: FilePath -> SourceFileEntry -> IO SourceFileEntry +findImportsAndExts projectRoot sf = do + s <- readFile (sfToFileName projectRoot sf) + + let modules = mapMaybe + ( getModName + . drop 1 + . filter (not . null) + . dropWhile (/= "import") + . words + ) + . filter (not . ("--" `isPrefixOf`)) -- poor man's comment filtering + . lines + $ s + + -- TODO: We should probably make a better attempt at parsing + -- comments above. Unfortunately we can't use a full-fledged + -- Haskell parser since cabal's dependencies must be kept at a + -- minimum. + + -- A poor man's LANGUAGE pragma parser. + exts = mapMaybe simpleParse + . concatMap getPragmas + . filter isLANGUAGEPragma + . map fst + . drop 1 + . takeWhile (not . null . snd) + . iterate (takeBraces . snd) + $ ("",s) + + takeBraces = break (== '}') . dropWhile (/= '{') + + isLANGUAGEPragma = ("{-# LANGUAGE " `isPrefixOf`) + + getPragmas = map trim . splitCommas . takeWhile (/= '#') . drop 13 + + splitCommas "" = [] + splitCommas xs = x : splitCommas (drop 1 y) + where (x,y) = break (==',') xs + + return sf { imports = modules + , extensions = exts + } + + where getModName :: [String] -> Maybe ModuleName + getModName [] = Nothing + getModName ("qualified":ws) = getModName ws + getModName (ms:_) = simpleParse ms + + + +-- Unfortunately we cannot use the version exported by Distribution.Simple.Program +knownSuffixHandlers :: [(String,String)] +knownSuffixHandlers = + [ ("gc", "greencard") + , ("chs", "chs") + , ("hsc", "hsc2hs") + , ("x", "alex") + , ("y", "happy") + , ("ly", "happy") + , ("cpphs", "cpp") + ] + +sourceExtensions :: [String] +sourceExtensions = "hs" : "lhs" : map fst knownSuffixHandlers + +neededBuildPrograms :: [SourceFileEntry] -> [String] +neededBuildPrograms entries = + [ handler + | ext <- nubSet (map fileExtension entries) + , handler <- maybeToList (lookup ext knownSuffixHandlers) + ] + +-- | Guess author and email using darcs and git configuration options. Use +-- the following in decreasing order of preference: +-- +-- 1. vcs env vars ($DARCS_EMAIL, $GIT_AUTHOR_*) +-- 2. Local repo configs +-- 3. Global vcs configs +-- 4. The generic $EMAIL +-- +-- Name and email are processed separately, so the guess might end up being +-- a name from DARCS_EMAIL and an email from git config. +-- +-- Darcs has preference, for tradition's sake. +guessAuthorNameMail :: IO (Flag String, Flag String) +guessAuthorNameMail = fmap authorGuessPure authorGuessIO + +-- Ordered in increasing preference, since Flag-as-monoid is identical to +-- Last. +authorGuessPure :: AuthorGuessIO -> AuthorGuess +authorGuessPure (AuthorGuessIO env darcsLocalF darcsGlobalF gitLocal gitGlobal) + = mconcat + [ emailEnv env + , gitGlobal + , darcsCfg darcsGlobalF + , gitLocal + , darcsCfg darcsLocalF + , gitEnv env + , darcsEnv env + ] + +authorGuessIO :: IO AuthorGuessIO +authorGuessIO = AuthorGuessIO + <$> getEnvironment + <*> (maybeReadFile $ "_darcs" "prefs" "author") + <*> (maybeReadFile =<< liftM ( (".darcs" "author")) getHomeDirectory) + <*> gitCfg Local + <*> gitCfg Global + +-- Types and functions used for guessing the author are now defined: + +type AuthorGuess = (Flag String, Flag String) +type Enviro = [(String, String)] +data GitLoc = Local | Global +data AuthorGuessIO = AuthorGuessIO + Enviro -- ^ Environment lookup table + (Maybe String) -- ^ Contents of local darcs author info + (Maybe String) -- ^ Contents of global darcs author info + AuthorGuess -- ^ Git config --local + AuthorGuess -- ^ Git config --global + +darcsEnv :: Enviro -> AuthorGuess +darcsEnv = maybe mempty nameAndMail . lookup "DARCS_EMAIL" + +gitEnv :: Enviro -> AuthorGuess +gitEnv env = (name, mail) + where + name = maybeFlag "GIT_AUTHOR_NAME" env + mail = maybeFlag "GIT_AUTHOR_EMAIL" env + +darcsCfg :: Maybe String -> AuthorGuess +darcsCfg = maybe mempty nameAndMail + +emailEnv :: Enviro -> AuthorGuess +emailEnv env = (mempty, mail) + where + mail = maybeFlag "EMAIL" env + +gitCfg :: GitLoc -> IO AuthorGuess +gitCfg which = do + name <- gitVar which "user.name" + mail <- gitVar which "user.email" + return (name, mail) + +gitVar :: GitLoc -> String -> IO (Flag String) +gitVar which = fmap happyOutput . gitConfigQuery which + +happyOutput :: (ExitCode, a, t) -> Flag a +happyOutput v = case v of + (ExitSuccess, s, _) -> Flag s + _ -> mempty + +gitConfigQuery :: GitLoc -> String -> IO (ExitCode, String, String) +gitConfigQuery which key = + fmap trim' $ readProcessWithExitCode "git" ["config", w, key] "" + where + w = case which of + Local -> "--local" + Global -> "--global" + trim' (a, b, c) = (a, trim b, c) + +maybeFlag :: String -> Enviro -> Flag String +maybeFlag k = maybe mempty Flag . lookup k + +-- | Read the first non-comment, non-trivial line of a file, if it exists +maybeReadFile :: String -> IO (Maybe String) +maybeReadFile f = do + exists <- doesFileExist f + if exists + then fmap getFirstLine $ readFile f + else return Nothing + where + getFirstLine content = + let nontrivialLines = dropWhile (\l -> (null l) || ("#" `isPrefixOf` l)) . lines $ content + in case nontrivialLines of + [] -> Nothing + (l:_) -> Just l + +-- |Get list of categories used in Hackage. NOTE: Very slow, needs to be cached +knownCategories :: SourcePackageDb -> [String] +knownCategories (SourcePackageDb sourcePkgIndex _) = nubSet + [ cat | pkg <- map head (allPackagesByName sourcePkgIndex) + , let catList = (PD.category . PD.packageDescription . packageDescription) pkg + , cat <- splitString ',' catList + ] + +-- Parse name and email, from darcs pref files or environment variable +nameAndMail :: String -> (Flag String, Flag String) +nameAndMail str + | all isSpace nameOrEmail = mempty + | null erest = (mempty, Flag $ trim nameOrEmail) + | otherwise = (Flag $ trim nameOrEmail, Flag mail) + where + (nameOrEmail,erest) = break (== '<') str + (mail,_) = break (== '>') (tail erest) + +trim :: String -> String +trim = removeLeadingSpace . reverse . removeLeadingSpace . reverse + where + removeLeadingSpace = dropWhile isSpace + +-- split string at given character, and remove whitespace +splitString :: Char -> String -> [String] +splitString sep str = go str where + go s = if null s' then [] else tok : go rest where + s' = dropWhile (\c -> c == sep || isSpace c) s + (tok,rest) = break (==sep) s' + +nubSet :: (Ord a) => [a] -> [a] +nubSet = Set.toList . Set.fromList + +{- +test db testProjectRoot = do + putStrLn "Guessed package name" + (guessPackageName >=> print) testProjectRoot + putStrLn "Guessed name and email" + guessAuthorNameMail >>= print + + mods <- scanForModules testProjectRoot + + putStrLn "Guessed modules" + mapM_ print mods + putStrLn "Needed build programs" + print (neededBuildPrograms mods) + + putStrLn "List of known categories" + print $ knownCategories db +-} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Init/Licenses.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Init/Licenses.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Init/Licenses.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Init/Licenses.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,3065 @@ +module Distribution.Client.Init.Licenses + ( License + , bsd2 + , bsd3 + , gplv2 + , gplv3 + , lgpl21 + , lgpl3 + , agplv3 + , apache20 + , mit + , mpl20 + , isc + ) where + +type License = String + +bsd2 :: String -> String -> License +bsd2 authors year = unlines + [ "Copyright (c) " ++ year ++ ", " ++ authors + , "All rights reserved." + , "" + , "Redistribution and use in source and binary forms, with or without" + , "modification, are permitted provided that the following conditions are" + , "met:" + , "" + , "1. Redistributions of source code must retain the above copyright" + , " notice, this list of conditions and the following disclaimer." + , "" + , "2. Redistributions in binary form must reproduce the above copyright" + , " notice, this list of conditions and the following disclaimer in the" + , " documentation and/or other materials provided with the" + , " distribution." + , "" + , "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS" + , "\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT" + , "LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR" + , "A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT" + , "OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL," + , "SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT" + , "LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE," + , "DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY" + , "THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT" + , "(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE" + , "OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." + ] + +bsd3 :: String -> String -> License +bsd3 authors year = unlines + [ "Copyright (c) " ++ year ++ ", " ++ authors + , "" + , "All rights reserved." + , "" + , "Redistribution and use in source and binary forms, with or without" + , "modification, are permitted provided that the following conditions are met:" + , "" + , " * Redistributions of source code must retain the above copyright" + , " notice, this list of conditions and the following disclaimer." + , "" + , " * Redistributions in binary form must reproduce the above" + , " copyright notice, this list of conditions and the following" + , " disclaimer in the documentation and/or other materials provided" + , " with the distribution." + , "" + , " * Neither the name of " ++ authors ++ " nor the names of other" + , " contributors may be used to endorse or promote products derived" + , " from this software without specific prior written permission." + , "" + , "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS" + , "\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT" + , "LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR" + , "A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT" + , "OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL," + , "SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT" + , "LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE," + , "DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY" + , "THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT" + , "(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE" + , "OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." + ] + +gplv2 :: License +gplv2 = unlines + [ " GNU GENERAL PUBLIC LICENSE" + , " Version 2, June 1991" + , "" + , " Copyright (C) 1989, 1991 Free Software Foundation, Inc.," + , " 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA" + , " Everyone is permitted to copy and distribute verbatim copies" + , " of this license document, but changing it is not allowed." + , "" + , " Preamble" + , "" + , " The licenses for most software are designed to take away your" + , "freedom to share and change it. By contrast, the GNU General Public" + , "License is intended to guarantee your freedom to share and change free" + , "software--to make sure the software is free for all its users. This" + , "General Public License applies to most of the Free Software" + , "Foundation's software and to any other program whose authors commit to" + , "using it. (Some other Free Software Foundation software is covered by" + , "the GNU Lesser General Public License instead.) You can apply it to" + , "your programs, too." + , "" + , " When we speak of free software, we are referring to freedom, not" + , "price. Our General Public Licenses are designed to make sure that you" + , "have the freedom to distribute copies of free software (and charge for" + , "this service if you wish), that you receive source code or can get it" + , "if you want it, that you can change the software or use pieces of it" + , "in new free programs; and that you know you can do these things." + , "" + , " To protect your rights, we need to make restrictions that forbid" + , "anyone to deny you these rights or to ask you to surrender the rights." + , "These restrictions translate to certain responsibilities for you if you" + , "distribute copies of the software, or if you modify it." + , "" + , " For example, if you distribute copies of such a program, whether" + , "gratis or for a fee, you must give the recipients all the rights that" + , "you have. You must make sure that they, too, receive or can get the" + , "source code. And you must show them these terms so they know their" + , "rights." + , "" + , " We protect your rights with two steps: (1) copyright the software, and" + , "(2) offer you this license which gives you legal permission to copy," + , "distribute and/or modify the software." + , "" + , " Also, for each author's protection and ours, we want to make certain" + , "that everyone understands that there is no warranty for this free" + , "software. If the software is modified by someone else and passed on, we" + , "want its recipients to know that what they have is not the original, so" + , "that any problems introduced by others will not reflect on the original" + , "authors' reputations." + , "" + , " Finally, any free program is threatened constantly by software" + , "patents. We wish to avoid the danger that redistributors of a free" + , "program will individually obtain patent licenses, in effect making the" + , "program proprietary. To prevent this, we have made it clear that any" + , "patent must be licensed for everyone's free use or not licensed at all." + , "" + , " The precise terms and conditions for copying, distribution and" + , "modification follow." + , "" + , " GNU GENERAL PUBLIC LICENSE" + , " TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION" + , "" + , " 0. This License applies to any program or other work which contains" + , "a notice placed by the copyright holder saying it may be distributed" + , "under the terms of this General Public License. The \"Program\", below," + , "refers to any such program or work, and a \"work based on the Program\"" + , "means either the Program or any derivative work under copyright law:" + , "that is to say, a work containing the Program or a portion of it," + , "either verbatim or with modifications and/or translated into another" + , "language. (Hereinafter, translation is included without limitation in" + , "the term \"modification\".) Each licensee is addressed as \"you\"." + , "" + , "Activities other than copying, distribution and modification are not" + , "covered by this License; they are outside its scope. The act of" + , "running the Program is not restricted, and the output from the Program" + , "is covered only if its contents constitute a work based on the" + , "Program (independent of having been made by running the Program)." + , "Whether that is true depends on what the Program does." + , "" + , " 1. You may copy and distribute verbatim copies of the Program's" + , "source code as you receive it, in any medium, provided that you" + , "conspicuously and appropriately publish on each copy an appropriate" + , "copyright notice and disclaimer of warranty; keep intact all the" + , "notices that refer to this License and to the absence of any warranty;" + , "and give any other recipients of the Program a copy of this License" + , "along with the Program." + , "" + , "You may charge a fee for the physical act of transferring a copy, and" + , "you may at your option offer warranty protection in exchange for a fee." + , "" + , " 2. You may modify your copy or copies of the Program or any portion" + , "of it, thus forming a work based on the Program, and copy and" + , "distribute such modifications or work under the terms of Section 1" + , "above, provided that you also meet all of these conditions:" + , "" + , " a) You must cause the modified files to carry prominent notices" + , " stating that you changed the files and the date of any change." + , "" + , " b) You must cause any work that you distribute or publish, that in" + , " whole or in part contains or is derived from the Program or any" + , " part thereof, to be licensed as a whole at no charge to all third" + , " parties under the terms of this License." + , "" + , " c) If the modified program normally reads commands interactively" + , " when run, you must cause it, when started running for such" + , " interactive use in the most ordinary way, to print or display an" + , " announcement including an appropriate copyright notice and a" + , " notice that there is no warranty (or else, saying that you provide" + , " a warranty) and that users may redistribute the program under" + , " these conditions, and telling the user how to view a copy of this" + , " License. (Exception: if the Program itself is interactive but" + , " does not normally print such an announcement, your work based on" + , " the Program is not required to print an announcement.)" + , "" + , "These requirements apply to the modified work as a whole. If" + , "identifiable sections of that work are not derived from the Program," + , "and can be reasonably considered independent and separate works in" + , "themselves, then this License, and its terms, do not apply to those" + , "sections when you distribute them as separate works. But when you" + , "distribute the same sections as part of a whole which is a work based" + , "on the Program, the distribution of the whole must be on the terms of" + , "this License, whose permissions for other licensees extend to the" + , "entire whole, and thus to each and every part regardless of who wrote it." + , "" + , "Thus, it is not the intent of this section to claim rights or contest" + , "your rights to work written entirely by you; rather, the intent is to" + , "exercise the right to control the distribution of derivative or" + , "collective works based on the Program." + , "" + , "In addition, mere aggregation of another work not based on the Program" + , "with the Program (or with a work based on the Program) on a volume of" + , "a storage or distribution medium does not bring the other work under" + , "the scope of this License." + , "" + , " 3. You may copy and distribute the Program (or a work based on it," + , "under Section 2) in object code or executable form under the terms of" + , "Sections 1 and 2 above provided that you also do one of the following:" + , "" + , " a) Accompany it with the complete corresponding machine-readable" + , " source code, which must be distributed under the terms of Sections" + , " 1 and 2 above on a medium customarily used for software interchange; or," + , "" + , " b) Accompany it with a written offer, valid for at least three" + , " years, to give any third party, for a charge no more than your" + , " cost of physically performing source distribution, a complete" + , " machine-readable copy of the corresponding source code, to be" + , " distributed under the terms of Sections 1 and 2 above on a medium" + , " customarily used for software interchange; or," + , "" + , " c) Accompany it with the information you received as to the offer" + , " to distribute corresponding source code. (This alternative is" + , " allowed only for noncommercial distribution and only if you" + , " received the program in object code or executable form with such" + , " an offer, in accord with Subsection b above.)" + , "" + , "The source code for a work means the preferred form of the work for" + , "making modifications to it. For an executable work, complete source" + , "code means all the source code for all modules it contains, plus any" + , "associated interface definition files, plus the scripts used to" + , "control compilation and installation of the executable. However, as a" + , "special exception, the source code distributed need not include" + , "anything that is normally distributed (in either source or binary" + , "form) with the major components (compiler, kernel, and so on) of the" + , "operating system on which the executable runs, unless that component" + , "itself accompanies the executable." + , "" + , "If distribution of executable or object code is made by offering" + , "access to copy from a designated place, then offering equivalent" + , "access to copy the source code from the same place counts as" + , "distribution of the source code, even though third parties are not" + , "compelled to copy the source along with the object code." + , "" + , " 4. You may not copy, modify, sublicense, or distribute the Program" + , "except as expressly provided under this License. Any attempt" + , "otherwise to copy, modify, sublicense or distribute the Program is" + , "void, and will automatically terminate your rights under this License." + , "However, parties who have received copies, or rights, from you under" + , "this License will not have their licenses terminated so long as such" + , "parties remain in full compliance." + , "" + , " 5. You are not required to accept this License, since you have not" + , "signed it. However, nothing else grants you permission to modify or" + , "distribute the Program or its derivative works. These actions are" + , "prohibited by law if you do not accept this License. Therefore, by" + , "modifying or distributing the Program (or any work based on the" + , "Program), you indicate your acceptance of this License to do so, and" + , "all its terms and conditions for copying, distributing or modifying" + , "the Program or works based on it." + , "" + , " 6. Each time you redistribute the Program (or any work based on the" + , "Program), the recipient automatically receives a license from the" + , "original licensor to copy, distribute or modify the Program subject to" + , "these terms and conditions. You may not impose any further" + , "restrictions on the recipients' exercise of the rights granted herein." + , "You are not responsible for enforcing compliance by third parties to" + , "this License." + , "" + , " 7. If, as a consequence of a court judgment or allegation of patent" + , "infringement or for any other reason (not limited to patent issues)," + , "conditions are imposed on you (whether by court order, agreement or" + , "otherwise) that contradict the conditions of this License, they do not" + , "excuse you from the conditions of this License. If you cannot" + , "distribute so as to satisfy simultaneously your obligations under this" + , "License and any other pertinent obligations, then as a consequence you" + , "may not distribute the Program at all. For example, if a patent" + , "license would not permit royalty-free redistribution of the Program by" + , "all those who receive copies directly or indirectly through you, then" + , "the only way you could satisfy both it and this License would be to" + , "refrain entirely from distribution of the Program." + , "" + , "If any portion of this section is held invalid or unenforceable under" + , "any particular circumstance, the balance of the section is intended to" + , "apply and the section as a whole is intended to apply in other" + , "circumstances." + , "" + , "It is not the purpose of this section to induce you to infringe any" + , "patents or other property right claims or to contest validity of any" + , "such claims; this section has the sole purpose of protecting the" + , "integrity of the free software distribution system, which is" + , "implemented by public license practices. Many people have made" + , "generous contributions to the wide range of software distributed" + , "through that system in reliance on consistent application of that" + , "system; it is up to the author/donor to decide if he or she is willing" + , "to distribute software through any other system and a licensee cannot" + , "impose that choice." + , "" + , "This section is intended to make thoroughly clear what is believed to" + , "be a consequence of the rest of this License." + , "" + , " 8. If the distribution and/or use of the Program is restricted in" + , "certain countries either by patents or by copyrighted interfaces, the" + , "original copyright holder who places the Program under this License" + , "may add an explicit geographical distribution limitation excluding" + , "those countries, so that distribution is permitted only in or among" + , "countries not thus excluded. In such case, this License incorporates" + , "the limitation as if written in the body of this License." + , "" + , " 9. The Free Software Foundation may publish revised and/or new versions" + , "of the General Public License from time to time. Such new versions will" + , "be similar in spirit to the present version, but may differ in detail to" + , "address new problems or concerns." + , "" + , "Each version is given a distinguishing version number. If the Program" + , "specifies a version number of this License which applies to it and \"any" + , "later version\", you have the option of following the terms and conditions" + , "either of that version or of any later version published by the Free" + , "Software Foundation. If the Program does not specify a version number of" + , "this License, you may choose any version ever published by the Free Software" + , "Foundation." + , "" + , " 10. If you wish to incorporate parts of the Program into other free" + , "programs whose distribution conditions are different, write to the author" + , "to ask for permission. For software which is copyrighted by the Free" + , "Software Foundation, write to the Free Software Foundation; we sometimes" + , "make exceptions for this. Our decision will be guided by the two goals" + , "of preserving the free status of all derivatives of our free software and" + , "of promoting the sharing and reuse of software generally." + , "" + , " NO WARRANTY" + , "" + , " 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY" + , "FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN" + , "OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES" + , "PROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED" + , "OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF" + , "MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS" + , "TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE" + , "PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING," + , "REPAIR OR CORRECTION." + , "" + , " 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING" + , "WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR" + , "REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES," + , "INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING" + , "OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED" + , "TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY" + , "YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER" + , "PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE" + , "POSSIBILITY OF SUCH DAMAGES." + , "" + , " END OF TERMS AND CONDITIONS" + , "" + , " How to Apply These Terms to Your New Programs" + , "" + , " If you develop a new program, and you want it to be of the greatest" + , "possible use to the public, the best way to achieve this is to make it" + , "free software which everyone can redistribute and change under these terms." + , "" + , " To do so, attach the following notices to the program. It is safest" + , "to attach them to the start of each source file to most effectively" + , "convey the exclusion of warranty; and each file should have at least" + , "the \"copyright\" line and a pointer to where the full notice is found." + , "" + , " " + , " Copyright (C) " + , "" + , " This program is free software; you can redistribute it and/or modify" + , " it under the terms of the GNU General Public License as published by" + , " the Free Software Foundation; either version 2 of the License, or" + , " (at your option) any later version." + , "" + , " This program is distributed in the hope that it will be useful," + , " but WITHOUT ANY WARRANTY; without even the implied warranty of" + , " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the" + , " GNU General Public License for more details." + , "" + , " You should have received a copy of the GNU General Public License along" + , " with this program; if not, write to the Free Software Foundation, Inc.," + , " 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA." + , "" + , "Also add information on how to contact you by electronic and paper mail." + , "" + , "If the program is interactive, make it output a short notice like this" + , "when it starts in an interactive mode:" + , "" + , " Gnomovision version 69, Copyright (C) year name of author" + , " Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'." + , " This is free software, and you are welcome to redistribute it" + , " under certain conditions; type `show c' for details." + , "" + , "The hypothetical commands `show w' and `show c' should show the appropriate" + , "parts of the General Public License. Of course, the commands you use may" + , "be called something other than `show w' and `show c'; they could even be" + , "mouse-clicks or menu items--whatever suits your program." + , "" + , "You should also get your employer (if you work as a programmer) or your" + , "school, if any, to sign a \"copyright disclaimer\" for the program, if" + , "necessary. Here is a sample; alter the names:" + , "" + , " Yoyodyne, Inc., hereby disclaims all copyright interest in the program" + , " `Gnomovision' (which makes passes at compilers) written by James Hacker." + , "" + , " , 1 April 1989" + , " Ty Coon, President of Vice" + , "" + , "This General Public License does not permit incorporating your program into" + , "proprietary programs. If your program is a subroutine library, you may" + , "consider it more useful to permit linking proprietary applications with the" + , "library. If this is what you want to do, use the GNU Lesser General" + , "Public License instead of this License." + ] + +gplv3 :: License +gplv3 = unlines + [ " GNU GENERAL PUBLIC LICENSE" + , " Version 3, 29 June 2007" + , "" + , " Copyright (C) 2007 Free Software Foundation, Inc. " + , " Everyone is permitted to copy and distribute verbatim copies" + , " of this license document, but changing it is not allowed." + , "" + , " Preamble" + , "" + , " The GNU General Public License is a free, copyleft license for" + , "software and other kinds of works." + , "" + , " The licenses for most software and other practical works are designed" + , "to take away your freedom to share and change the works. By contrast," + , "the GNU General Public License is intended to guarantee your freedom to" + , "share and change all versions of a program--to make sure it remains free" + , "software for all its users. We, the Free Software Foundation, use the" + , "GNU General Public License for most of our software; it applies also to" + , "any other work released this way by its authors. You can apply it to" + , "your programs, too." + , "" + , " When we speak of free software, we are referring to freedom, not" + , "price. Our General Public Licenses are designed to make sure that you" + , "have the freedom to distribute copies of free software (and charge for" + , "them if you wish), that you receive source code or can get it if you" + , "want it, that you can change the software or use pieces of it in new" + , "free programs, and that you know you can do these things." + , "" + , " To protect your rights, we need to prevent others from denying you" + , "these rights or asking you to surrender the rights. Therefore, you have" + , "certain responsibilities if you distribute copies of the software, or if" + , "you modify it: responsibilities to respect the freedom of others." + , "" + , " For example, if you distribute copies of such a program, whether" + , "gratis or for a fee, you must pass on to the recipients the same" + , "freedoms that you received. You must make sure that they, too, receive" + , "or can get the source code. And you must show them these terms so they" + , "know their rights." + , "" + , " Developers that use the GNU GPL protect your rights with two steps:" + , "(1) assert copyright on the software, and (2) offer you this License" + , "giving you legal permission to copy, distribute and/or modify it." + , "" + , " For the developers' and authors' protection, the GPL clearly explains" + , "that there is no warranty for this free software. For both users' and" + , "authors' sake, the GPL requires that modified versions be marked as" + , "changed, so that their problems will not be attributed erroneously to" + , "authors of previous versions." + , "" + , " Some devices are designed to deny users access to install or run" + , "modified versions of the software inside them, although the manufacturer" + , "can do so. This is fundamentally incompatible with the aim of" + , "protecting users' freedom to change the software. The systematic" + , "pattern of such abuse occurs in the area of products for individuals to" + , "use, which is precisely where it is most unacceptable. Therefore, we" + , "have designed this version of the GPL to prohibit the practice for those" + , "products. If such problems arise substantially in other domains, we" + , "stand ready to extend this provision to those domains in future versions" + , "of the GPL, as needed to protect the freedom of users." + , "" + , " Finally, every program is threatened constantly by software patents." + , "States should not allow patents to restrict development and use of" + , "software on general-purpose computers, but in those that do, we wish to" + , "avoid the special danger that patents applied to a free program could" + , "make it effectively proprietary. To prevent this, the GPL assures that" + , "patents cannot be used to render the program non-free." + , "" + , " The precise terms and conditions for copying, distribution and" + , "modification follow." + , "" + , " TERMS AND CONDITIONS" + , "" + , " 0. Definitions." + , "" + , " \"This License\" refers to version 3 of the GNU General Public License." + , "" + , " \"Copyright\" also means copyright-like laws that apply to other kinds of" + , "works, such as semiconductor masks." + , "" + , " \"The Program\" refers to any copyrightable work licensed under this" + , "License. Each licensee is addressed as \"you\". \"Licensees\" and" + , "\"recipients\" may be individuals or organizations." + , "" + , " To \"modify\" a work means to copy from or adapt all or part of the work" + , "in a fashion requiring copyright permission, other than the making of an" + , "exact copy. The resulting work is called a \"modified version\" of the" + , "earlier work or a work \"based on\" the earlier work." + , "" + , " A \"covered work\" means either the unmodified Program or a work based" + , "on the Program." + , "" + , " To \"propagate\" a work means to do anything with it that, without" + , "permission, would make you directly or secondarily liable for" + , "infringement under applicable copyright law, except executing it on a" + , "computer or modifying a private copy. Propagation includes copying," + , "distribution (with or without modification), making available to the" + , "public, and in some countries other activities as well." + , "" + , " To \"convey\" a work means any kind of propagation that enables other" + , "parties to make or receive copies. Mere interaction with a user through" + , "a computer network, with no transfer of a copy, is not conveying." + , "" + , " An interactive user interface displays \"Appropriate Legal Notices\"" + , "to the extent that it includes a convenient and prominently visible" + , "feature that (1) displays an appropriate copyright notice, and (2)" + , "tells the user that there is no warranty for the work (except to the" + , "extent that warranties are provided), that licensees may convey the" + , "work under this License, and how to view a copy of this License. If" + , "the interface presents a list of user commands or options, such as a" + , "menu, a prominent item in the list meets this criterion." + , "" + , " 1. Source Code." + , "" + , " The \"source code\" for a work means the preferred form of the work" + , "for making modifications to it. \"Object code\" means any non-source" + , "form of a work." + , "" + , " A \"Standard Interface\" means an interface that either is an official" + , "standard defined by a recognized standards body, or, in the case of" + , "interfaces specified for a particular programming language, one that" + , "is widely used among developers working in that language." + , "" + , " The \"System Libraries\" of an executable work include anything, other" + , "than the work as a whole, that (a) is included in the normal form of" + , "packaging a Major Component, but which is not part of that Major" + , "Component, and (b) serves only to enable use of the work with that" + , "Major Component, or to implement a Standard Interface for which an" + , "implementation is available to the public in source code form. A" + , "\"Major Component\", in this context, means a major essential component" + , "(kernel, window system, and so on) of the specific operating system" + , "(if any) on which the executable work runs, or a compiler used to" + , "produce the work, or an object code interpreter used to run it." + , "" + , " The \"Corresponding Source\" for a work in object code form means all" + , "the source code needed to generate, install, and (for an executable" + , "work) run the object code and to modify the work, including scripts to" + , "control those activities. However, it does not include the work's" + , "System Libraries, or general-purpose tools or generally available free" + , "programs which are used unmodified in performing those activities but" + , "which are not part of the work. For example, Corresponding Source" + , "includes interface definition files associated with source files for" + , "the work, and the source code for shared libraries and dynamically" + , "linked subprograms that the work is specifically designed to require," + , "such as by intimate data communication or control flow between those" + , "subprograms and other parts of the work." + , "" + , " The Corresponding Source need not include anything that users" + , "can regenerate automatically from other parts of the Corresponding" + , "Source." + , "" + , " The Corresponding Source for a work in source code form is that" + , "same work." + , "" + , " 2. Basic Permissions." + , "" + , " All rights granted under this License are granted for the term of" + , "copyright on the Program, and are irrevocable provided the stated" + , "conditions are met. This License explicitly affirms your unlimited" + , "permission to run the unmodified Program. The output from running a" + , "covered work is covered by this License only if the output, given its" + , "content, constitutes a covered work. This License acknowledges your" + , "rights of fair use or other equivalent, as provided by copyright law." + , "" + , " You may make, run and propagate covered works that you do not" + , "convey, without conditions so long as your license otherwise remains" + , "in force. You may convey covered works to others for the sole purpose" + , "of having them make modifications exclusively for you, or provide you" + , "with facilities for running those works, provided that you comply with" + , "the terms of this License in conveying all material for which you do" + , "not control copyright. Those thus making or running the covered works" + , "for you must do so exclusively on your behalf, under your direction" + , "and control, on terms that prohibit them from making any copies of" + , "your copyrighted material outside their relationship with you." + , "" + , " Conveying under any other circumstances is permitted solely under" + , "the conditions stated below. Sublicensing is not allowed; section 10" + , "makes it unnecessary." + , "" + , " 3. Protecting Users' Legal Rights From Anti-Circumvention Law." + , "" + , " No covered work shall be deemed part of an effective technological" + , "measure under any applicable law fulfilling obligations under article" + , "11 of the WIPO copyright treaty adopted on 20 December 1996, or" + , "similar laws prohibiting or restricting circumvention of such" + , "measures." + , "" + , " When you convey a covered work, you waive any legal power to forbid" + , "circumvention of technological measures to the extent such circumvention" + , "is effected by exercising rights under this License with respect to" + , "the covered work, and you disclaim any intention to limit operation or" + , "modification of the work as a means of enforcing, against the work's" + , "users, your or third parties' legal rights to forbid circumvention of" + , "technological measures." + , "" + , " 4. Conveying Verbatim Copies." + , "" + , " You may convey verbatim copies of the Program's source code as you" + , "receive it, in any medium, provided that you conspicuously and" + , "appropriately publish on each copy an appropriate copyright notice;" + , "keep intact all notices stating that this License and any" + , "non-permissive terms added in accord with section 7 apply to the code;" + , "keep intact all notices of the absence of any warranty; and give all" + , "recipients a copy of this License along with the Program." + , "" + , " You may charge any price or no price for each copy that you convey," + , "and you may offer support or warranty protection for a fee." + , "" + , " 5. Conveying Modified Source Versions." + , "" + , " You may convey a work based on the Program, or the modifications to" + , "produce it from the Program, in the form of source code under the" + , "terms of section 4, provided that you also meet all of these conditions:" + , "" + , " a) The work must carry prominent notices stating that you modified" + , " it, and giving a relevant date." + , "" + , " b) The work must carry prominent notices stating that it is" + , " released under this License and any conditions added under section" + , " 7. This requirement modifies the requirement in section 4 to" + , " \"keep intact all notices\"." + , "" + , " c) You must license the entire work, as a whole, under this" + , " License to anyone who comes into possession of a copy. This" + , " License will therefore apply, along with any applicable section 7" + , " additional terms, to the whole of the work, and all its parts," + , " regardless of how they are packaged. This License gives no" + , " permission to license the work in any other way, but it does not" + , " invalidate such permission if you have separately received it." + , "" + , " d) If the work has interactive user interfaces, each must display" + , " Appropriate Legal Notices; however, if the Program has interactive" + , " interfaces that do not display Appropriate Legal Notices, your" + , " work need not make them do so." + , "" + , " A compilation of a covered work with other separate and independent" + , "works, which are not by their nature extensions of the covered work," + , "and which are not combined with it such as to form a larger program," + , "in or on a volume of a storage or distribution medium, is called an" + , "\"aggregate\" if the compilation and its resulting copyright are not" + , "used to limit the access or legal rights of the compilation's users" + , "beyond what the individual works permit. Inclusion of a covered work" + , "in an aggregate does not cause this License to apply to the other" + , "parts of the aggregate." + , "" + , " 6. Conveying Non-Source Forms." + , "" + , " You may convey a covered work in object code form under the terms" + , "of sections 4 and 5, provided that you also convey the" + , "machine-readable Corresponding Source under the terms of this License," + , "in one of these ways:" + , "" + , " a) Convey the object code in, or embodied in, a physical product" + , " (including a physical distribution medium), accompanied by the" + , " Corresponding Source fixed on a durable physical medium" + , " customarily used for software interchange." + , "" + , " b) Convey the object code in, or embodied in, a physical product" + , " (including a physical distribution medium), accompanied by a" + , " written offer, valid for at least three years and valid for as" + , " long as you offer spare parts or customer support for that product" + , " model, to give anyone who possesses the object code either (1) a" + , " copy of the Corresponding Source for all the software in the" + , " product that is covered by this License, on a durable physical" + , " medium customarily used for software interchange, for a price no" + , " more than your reasonable cost of physically performing this" + , " conveying of source, or (2) access to copy the" + , " Corresponding Source from a network server at no charge." + , "" + , " c) Convey individual copies of the object code with a copy of the" + , " written offer to provide the Corresponding Source. This" + , " alternative is allowed only occasionally and noncommercially, and" + , " only if you received the object code with such an offer, in accord" + , " with subsection 6b." + , "" + , " d) Convey the object code by offering access from a designated" + , " place (gratis or for a charge), and offer equivalent access to the" + , " Corresponding Source in the same way through the same place at no" + , " further charge. You need not require recipients to copy the" + , " Corresponding Source along with the object code. If the place to" + , " copy the object code is a network server, the Corresponding Source" + , " may be on a different server (operated by you or a third party)" + , " that supports equivalent copying facilities, provided you maintain" + , " clear directions next to the object code saying where to find the" + , " Corresponding Source. Regardless of what server hosts the" + , " Corresponding Source, you remain obligated to ensure that it is" + , " available for as long as needed to satisfy these requirements." + , "" + , " e) Convey the object code using peer-to-peer transmission, provided" + , " you inform other peers where the object code and Corresponding" + , " Source of the work are being offered to the general public at no" + , " charge under subsection 6d." + , "" + , " A separable portion of the object code, whose source code is excluded" + , "from the Corresponding Source as a System Library, need not be" + , "included in conveying the object code work." + , "" + , " A \"User Product\" is either (1) a \"consumer product\", which means any" + , "tangible personal property which is normally used for personal, family," + , "or household purposes, or (2) anything designed or sold for incorporation" + , "into a dwelling. In determining whether a product is a consumer product," + , "doubtful cases shall be resolved in favor of coverage. For a particular" + , "product received by a particular user, \"normally used\" refers to a" + , "typical or common use of that class of product, regardless of the status" + , "of the particular user or of the way in which the particular user" + , "actually uses, or expects or is expected to use, the product. A product" + , "is a consumer product regardless of whether the product has substantial" + , "commercial, industrial or non-consumer uses, unless such uses represent" + , "the only significant mode of use of the product." + , "" + , " \"Installation Information\" for a User Product means any methods," + , "procedures, authorization keys, or other information required to install" + , "and execute modified versions of a covered work in that User Product from" + , "a modified version of its Corresponding Source. The information must" + , "suffice to ensure that the continued functioning of the modified object" + , "code is in no case prevented or interfered with solely because" + , "modification has been made." + , "" + , " If you convey an object code work under this section in, or with, or" + , "specifically for use in, a User Product, and the conveying occurs as" + , "part of a transaction in which the right of possession and use of the" + , "User Product is transferred to the recipient in perpetuity or for a" + , "fixed term (regardless of how the transaction is characterized), the" + , "Corresponding Source conveyed under this section must be accompanied" + , "by the Installation Information. But this requirement does not apply" + , "if neither you nor any third party retains the ability to install" + , "modified object code on the User Product (for example, the work has" + , "been installed in ROM)." + , "" + , " The requirement to provide Installation Information does not include a" + , "requirement to continue to provide support service, warranty, or updates" + , "for a work that has been modified or installed by the recipient, or for" + , "the User Product in which it has been modified or installed. Access to a" + , "network may be denied when the modification itself materially and" + , "adversely affects the operation of the network or violates the rules and" + , "protocols for communication across the network." + , "" + , " Corresponding Source conveyed, and Installation Information provided," + , "in accord with this section must be in a format that is publicly" + , "documented (and with an implementation available to the public in" + , "source code form), and must require no special password or key for" + , "unpacking, reading or copying." + , "" + , " 7. Additional Terms." + , "" + , " \"Additional permissions\" are terms that supplement the terms of this" + , "License by making exceptions from one or more of its conditions." + , "Additional permissions that are applicable to the entire Program shall" + , "be treated as though they were included in this License, to the extent" + , "that they are valid under applicable law. If additional permissions" + , "apply only to part of the Program, that part may be used separately" + , "under those permissions, but the entire Program remains governed by" + , "this License without regard to the additional permissions." + , "" + , " When you convey a copy of a covered work, you may at your option" + , "remove any additional permissions from that copy, or from any part of" + , "it. (Additional permissions may be written to require their own" + , "removal in certain cases when you modify the work.) You may place" + , "additional permissions on material, added by you to a covered work," + , "for which you have or can give appropriate copyright permission." + , "" + , " Notwithstanding any other provision of this License, for material you" + , "add to a covered work, you may (if authorized by the copyright holders of" + , "that material) supplement the terms of this License with terms:" + , "" + , " a) Disclaiming warranty or limiting liability differently from the" + , " terms of sections 15 and 16 of this License; or" + , "" + , " b) Requiring preservation of specified reasonable legal notices or" + , " author attributions in that material or in the Appropriate Legal" + , " Notices displayed by works containing it; or" + , "" + , " c) Prohibiting misrepresentation of the origin of that material, or" + , " requiring that modified versions of such material be marked in" + , " reasonable ways as different from the original version; or" + , "" + , " d) Limiting the use for publicity purposes of names of licensors or" + , " authors of the material; or" + , "" + , " e) Declining to grant rights under trademark law for use of some" + , " trade names, trademarks, or service marks; or" + , "" + , " f) Requiring indemnification of licensors and authors of that" + , " material by anyone who conveys the material (or modified versions of" + , " it) with contractual assumptions of liability to the recipient, for" + , " any liability that these contractual assumptions directly impose on" + , " those licensors and authors." + , "" + , " All other non-permissive additional terms are considered \"further" + , "restrictions\" within the meaning of section 10. If the Program as you" + , "received it, or any part of it, contains a notice stating that it is" + , "governed by this License along with a term that is a further" + , "restriction, you may remove that term. If a license document contains" + , "a further restriction but permits relicensing or conveying under this" + , "License, you may add to a covered work material governed by the terms" + , "of that license document, provided that the further restriction does" + , "not survive such relicensing or conveying." + , "" + , " If you add terms to a covered work in accord with this section, you" + , "must place, in the relevant source files, a statement of the" + , "additional terms that apply to those files, or a notice indicating" + , "where to find the applicable terms." + , "" + , " Additional terms, permissive or non-permissive, may be stated in the" + , "form of a separately written license, or stated as exceptions;" + , "the above requirements apply either way." + , "" + , " 8. Termination." + , "" + , " You may not propagate or modify a covered work except as expressly" + , "provided under this License. Any attempt otherwise to propagate or" + , "modify it is void, and will automatically terminate your rights under" + , "this License (including any patent licenses granted under the third" + , "paragraph of section 11)." + , "" + , " However, if you cease all violation of this License, then your" + , "license from a particular copyright holder is reinstated (a)" + , "provisionally, unless and until the copyright holder explicitly and" + , "finally terminates your license, and (b) permanently, if the copyright" + , "holder fails to notify you of the violation by some reasonable means" + , "prior to 60 days after the cessation." + , "" + , " Moreover, your license from a particular copyright holder is" + , "reinstated permanently if the copyright holder notifies you of the" + , "violation by some reasonable means, this is the first time you have" + , "received notice of violation of this License (for any work) from that" + , "copyright holder, and you cure the violation prior to 30 days after" + , "your receipt of the notice." + , "" + , " Termination of your rights under this section does not terminate the" + , "licenses of parties who have received copies or rights from you under" + , "this License. If your rights have been terminated and not permanently" + , "reinstated, you do not qualify to receive new licenses for the same" + , "material under section 10." + , "" + , " 9. Acceptance Not Required for Having Copies." + , "" + , " You are not required to accept this License in order to receive or" + , "run a copy of the Program. Ancillary propagation of a covered work" + , "occurring solely as a consequence of using peer-to-peer transmission" + , "to receive a copy likewise does not require acceptance. However," + , "nothing other than this License grants you permission to propagate or" + , "modify any covered work. These actions infringe copyright if you do" + , "not accept this License. Therefore, by modifying or propagating a" + , "covered work, you indicate your acceptance of this License to do so." + , "" + , " 10. Automatic Licensing of Downstream Recipients." + , "" + , " Each time you convey a covered work, the recipient automatically" + , "receives a license from the original licensors, to run, modify and" + , "propagate that work, subject to this License. You are not responsible" + , "for enforcing compliance by third parties with this License." + , "" + , " An \"entity transaction\" is a transaction transferring control of an" + , "organization, or substantially all assets of one, or subdividing an" + , "organization, or merging organizations. If propagation of a covered" + , "work results from an entity transaction, each party to that" + , "transaction who receives a copy of the work also receives whatever" + , "licenses to the work the party's predecessor in interest had or could" + , "give under the previous paragraph, plus a right to possession of the" + , "Corresponding Source of the work from the predecessor in interest, if" + , "the predecessor has it or can get it with reasonable efforts." + , "" + , " You may not impose any further restrictions on the exercise of the" + , "rights granted or affirmed under this License. For example, you may" + , "not impose a license fee, royalty, or other charge for exercise of" + , "rights granted under this License, and you may not initiate litigation" + , "(including a cross-claim or counterclaim in a lawsuit) alleging that" + , "any patent claim is infringed by making, using, selling, offering for" + , "sale, or importing the Program or any portion of it." + , "" + , " 11. Patents." + , "" + , " A \"contributor\" is a copyright holder who authorizes use under this" + , "License of the Program or a work on which the Program is based. The" + , "work thus licensed is called the contributor's \"contributor version\"." + , "" + , " A contributor's \"essential patent claims\" are all patent claims" + , "owned or controlled by the contributor, whether already acquired or" + , "hereafter acquired, that would be infringed by some manner, permitted" + , "by this License, of making, using, or selling its contributor version," + , "but do not include claims that would be infringed only as a" + , "consequence of further modification of the contributor version. For" + , "purposes of this definition, \"control\" includes the right to grant" + , "patent sublicenses in a manner consistent with the requirements of" + , "this License." + , "" + , " Each contributor grants you a non-exclusive, worldwide, royalty-free" + , "patent license under the contributor's essential patent claims, to" + , "make, use, sell, offer for sale, import and otherwise run, modify and" + , "propagate the contents of its contributor version." + , "" + , " In the following three paragraphs, a \"patent license\" is any express" + , "agreement or commitment, however denominated, not to enforce a patent" + , "(such as an express permission to practice a patent or covenant not to" + , "sue for patent infringement). To \"grant\" such a patent license to a" + , "party means to make such an agreement or commitment not to enforce a" + , "patent against the party." + , "" + , " If you convey a covered work, knowingly relying on a patent license," + , "and the Corresponding Source of the work is not available for anyone" + , "to copy, free of charge and under the terms of this License, through a" + , "publicly available network server or other readily accessible means," + , "then you must either (1) cause the Corresponding Source to be so" + , "available, or (2) arrange to deprive yourself of the benefit of the" + , "patent license for this particular work, or (3) arrange, in a manner" + , "consistent with the requirements of this License, to extend the patent" + , "license to downstream recipients. \"Knowingly relying\" means you have" + , "actual knowledge that, but for the patent license, your conveying the" + , "covered work in a country, or your recipient's use of the covered work" + , "in a country, would infringe one or more identifiable patents in that" + , "country that you have reason to believe are valid." + , "" + , " If, pursuant to or in connection with a single transaction or" + , "arrangement, you convey, or propagate by procuring conveyance of, a" + , "covered work, and grant a patent license to some of the parties" + , "receiving the covered work authorizing them to use, propagate, modify" + , "or convey a specific copy of the covered work, then the patent license" + , "you grant is automatically extended to all recipients of the covered" + , "work and works based on it." + , "" + , " A patent license is \"discriminatory\" if it does not include within" + , "the scope of its coverage, prohibits the exercise of, or is" + , "conditioned on the non-exercise of one or more of the rights that are" + , "specifically granted under this License. You may not convey a covered" + , "work if you are a party to an arrangement with a third party that is" + , "in the business of distributing software, under which you make payment" + , "to the third party based on the extent of your activity of conveying" + , "the work, and under which the third party grants, to any of the" + , "parties who would receive the covered work from you, a discriminatory" + , "patent license (a) in connection with copies of the covered work" + , "conveyed by you (or copies made from those copies), or (b) primarily" + , "for and in connection with specific products or compilations that" + , "contain the covered work, unless you entered into that arrangement," + , "or that patent license was granted, prior to 28 March 2007." + , "" + , " Nothing in this License shall be construed as excluding or limiting" + , "any implied license or other defenses to infringement that may" + , "otherwise be available to you under applicable patent law." + , "" + , " 12. No Surrender of Others' Freedom." + , "" + , " If conditions are imposed on you (whether by court order, agreement or" + , "otherwise) that contradict the conditions of this License, they do not" + , "excuse you from the conditions of this License. If you cannot convey a" + , "covered work so as to satisfy simultaneously your obligations under this" + , "License and any other pertinent obligations, then as a consequence you may" + , "not convey it at all. For example, if you agree to terms that obligate you" + , "to collect a royalty for further conveying from those to whom you convey" + , "the Program, the only way you could satisfy both those terms and this" + , "License would be to refrain entirely from conveying the Program." + , "" + , " 13. Use with the GNU Affero General Public License." + , "" + , " Notwithstanding any other provision of this License, you have" + , "permission to link or combine any covered work with a work licensed" + , "under version 3 of the GNU Affero General Public License into a single" + , "combined work, and to convey the resulting work. The terms of this" + , "License will continue to apply to the part which is the covered work," + , "but the special requirements of the GNU Affero General Public License," + , "section 13, concerning interaction through a network will apply to the" + , "combination as such." + , "" + , " 14. Revised Versions of this License." + , "" + , " The Free Software Foundation may publish revised and/or new versions of" + , "the GNU General Public License from time to time. Such new versions will" + , "be similar in spirit to the present version, but may differ in detail to" + , "address new problems or concerns." + , "" + , " Each version is given a distinguishing version number. If the" + , "Program specifies that a certain numbered version of the GNU General" + , "Public License \"or any later version\" applies to it, you have the" + , "option of following the terms and conditions either of that numbered" + , "version or of any later version published by the Free Software" + , "Foundation. If the Program does not specify a version number of the" + , "GNU General Public License, you may choose any version ever published" + , "by the Free Software Foundation." + , "" + , " If the Program specifies that a proxy can decide which future" + , "versions of the GNU General Public License can be used, that proxy's" + , "public statement of acceptance of a version permanently authorizes you" + , "to choose that version for the Program." + , "" + , " Later license versions may give you additional or different" + , "permissions. However, no additional obligations are imposed on any" + , "author or copyright holder as a result of your choosing to follow a" + , "later version." + , "" + , " 15. Disclaimer of Warranty." + , "" + , " THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY" + , "APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT" + , "HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY" + , "OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO," + , "THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR" + , "PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM" + , "IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF" + , "ALL NECESSARY SERVICING, REPAIR OR CORRECTION." + , "" + , " 16. Limitation of Liability." + , "" + , " IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING" + , "WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS" + , "THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY" + , "GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE" + , "USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF" + , "DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD" + , "PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS)," + , "EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF" + , "SUCH DAMAGES." + , "" + , " 17. Interpretation of Sections 15 and 16." + , "" + , " If the disclaimer of warranty and limitation of liability provided" + , "above cannot be given local legal effect according to their terms," + , "reviewing courts shall apply local law that most closely approximates" + , "an absolute waiver of all civil liability in connection with the" + , "Program, unless a warranty or assumption of liability accompanies a" + , "copy of the Program in return for a fee." + , "" + , " END OF TERMS AND CONDITIONS" + , "" + , " How to Apply These Terms to Your New Programs" + , "" + , " If you develop a new program, and you want it to be of the greatest" + , "possible use to the public, the best way to achieve this is to make it" + , "free software which everyone can redistribute and change under these terms." + , "" + , " To do so, attach the following notices to the program. It is safest" + , "to attach them to the start of each source file to most effectively" + , "state the exclusion of warranty; and each file should have at least" + , "the \"copyright\" line and a pointer to where the full notice is found." + , "" + , " " + , " Copyright (C) " + , "" + , " This program is free software: you can redistribute it and/or modify" + , " it under the terms of the GNU General Public License as published by" + , " the Free Software Foundation, either version 3 of the License, or" + , " (at your option) any later version." + , "" + , " This program is distributed in the hope that it will be useful," + , " but WITHOUT ANY WARRANTY; without even the implied warranty of" + , " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the" + , " GNU General Public License for more details." + , "" + , " You should have received a copy of the GNU General Public License" + , " along with this program. If not, see ." + , "" + , "Also add information on how to contact you by electronic and paper mail." + , "" + , " If the program does terminal interaction, make it output a short" + , "notice like this when it starts in an interactive mode:" + , "" + , " Copyright (C) " + , " This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'." + , " This is free software, and you are welcome to redistribute it" + , " under certain conditions; type `show c' for details." + , "" + , "The hypothetical commands `show w' and `show c' should show the appropriate" + , "parts of the General Public License. Of course, your program's commands" + , "might be different; for a GUI interface, you would use an \"about box\"." + , "" + , " You should also get your employer (if you work as a programmer) or school," + , "if any, to sign a \"copyright disclaimer\" for the program, if necessary." + , "For more information on this, and how to apply and follow the GNU GPL, see" + , "." + , "" + , " The GNU General Public License does not permit incorporating your program" + , "into proprietary programs. If your program is a subroutine library, you" + , "may consider it more useful to permit linking proprietary applications with" + , "the library. If this is what you want to do, use the GNU Lesser General" + , "Public License instead of this License. But first, please read" + , "." + ] + +agplv3 :: License +agplv3 = unlines + [ " GNU AFFERO GENERAL PUBLIC LICENSE" + , " Version 3, 19 November 2007" + , "" + , " Copyright (C) 2007 Free Software Foundation, Inc. " + , " Everyone is permitted to copy and distribute verbatim copies" + , " of this license document, but changing it is not allowed." + , "" + , " Preamble" + , "" + , " The GNU Affero General Public License is a free, copyleft license for" + , "software and other kinds of works, specifically designed to ensure" + , "cooperation with the community in the case of network server software." + , "" + , " The licenses for most software and other practical works are designed" + , "to take away your freedom to share and change the works. By contrast," + , "our General Public Licenses are intended to guarantee your freedom to" + , "share and change all versions of a program--to make sure it remains free" + , "software for all its users." + , "" + , " When we speak of free software, we are referring to freedom, not" + , "price. Our General Public Licenses are designed to make sure that you" + , "have the freedom to distribute copies of free software (and charge for" + , "them if you wish), that you receive source code or can get it if you" + , "want it, that you can change the software or use pieces of it in new" + , "free programs, and that you know you can do these things." + , "" + , " Developers that use our General Public Licenses protect your rights" + , "with two steps: (1) assert copyright on the software, and (2) offer" + , "you this License which gives you legal permission to copy, distribute" + , "and/or modify the software." + , "" + , " A secondary benefit of defending all users' freedom is that" + , "improvements made in alternate versions of the program, if they" + , "receive widespread use, become available for other developers to" + , "incorporate. Many developers of free software are heartened and" + , "encouraged by the resulting cooperation. However, in the case of" + , "software used on network servers, this result may fail to come about." + , "The GNU General Public License permits making a modified version and" + , "letting the public access it on a server without ever releasing its" + , "source code to the public." + , "" + , " The GNU Affero General Public License is designed specifically to" + , "ensure that, in such cases, the modified source code becomes available" + , "to the community. It requires the operator of a network server to" + , "provide the source code of the modified version running there to the" + , "users of that server. Therefore, public use of a modified version, on" + , "a publicly accessible server, gives the public access to the source" + , "code of the modified version." + , "" + , " An older license, called the Affero General Public License and" + , "published by Affero, was designed to accomplish similar goals. This is" + , "a different license, not a version of the Affero GPL, but Affero has" + , "released a new version of the Affero GPL which permits relicensing under" + , "this license." + , "" + , " The precise terms and conditions for copying, distribution and" + , "modification follow." + , "" + , " TERMS AND CONDITIONS" + , "" + , " 0. Definitions." + , "" + , " \"This License\" refers to version 3 of the GNU Affero General Public License." + , "" + , " \"Copyright\" also means copyright-like laws that apply to other kinds of" + , "works, such as semiconductor masks." + , "" + , " \"The Program\" refers to any copyrightable work licensed under this" + , "License. Each licensee is addressed as \"you\". \"Licensees\" and" + , "\"recipients\" may be individuals or organizations." + , "" + , " To \"modify\" a work means to copy from or adapt all or part of the work" + , "in a fashion requiring copyright permission, other than the making of an" + , "exact copy. The resulting work is called a \"modified version\" of the" + , "earlier work or a work \"based on\" the earlier work." + , "" + , " A \"covered work\" means either the unmodified Program or a work based" + , "on the Program." + , "" + , " To \"propagate\" a work means to do anything with it that, without" + , "permission, would make you directly or secondarily liable for" + , "infringement under applicable copyright law, except executing it on a" + , "computer or modifying a private copy. Propagation includes copying," + , "distribution (with or without modification), making available to the" + , "public, and in some countries other activities as well." + , "" + , " To \"convey\" a work means any kind of propagation that enables other" + , "parties to make or receive copies. Mere interaction with a user through" + , "a computer network, with no transfer of a copy, is not conveying." + , "" + , " An interactive user interface displays \"Appropriate Legal Notices\"" + , "to the extent that it includes a convenient and prominently visible" + , "feature that (1) displays an appropriate copyright notice, and (2)" + , "tells the user that there is no warranty for the work (except to the" + , "extent that warranties are provided), that licensees may convey the" + , "work under this License, and how to view a copy of this License. If" + , "the interface presents a list of user commands or options, such as a" + , "menu, a prominent item in the list meets this criterion." + , "" + , " 1. Source Code." + , "" + , " The \"source code\" for a work means the preferred form of the work" + , "for making modifications to it. \"Object code\" means any non-source" + , "form of a work." + , "" + , " A \"Standard Interface\" means an interface that either is an official" + , "standard defined by a recognized standards body, or, in the case of" + , "interfaces specified for a particular programming language, one that" + , "is widely used among developers working in that language." + , "" + , " The \"System Libraries\" of an executable work include anything, other" + , "than the work as a whole, that (a) is included in the normal form of" + , "packaging a Major Component, but which is not part of that Major" + , "Component, and (b) serves only to enable use of the work with that" + , "Major Component, or to implement a Standard Interface for which an" + , "implementation is available to the public in source code form. A" + , "\"Major Component\", in this context, means a major essential component" + , "(kernel, window system, and so on) of the specific operating system" + , "(if any) on which the executable work runs, or a compiler used to" + , "produce the work, or an object code interpreter used to run it." + , "" + , " The \"Corresponding Source\" for a work in object code form means all" + , "the source code needed to generate, install, and (for an executable" + , "work) run the object code and to modify the work, including scripts to" + , "control those activities. However, it does not include the work's" + , "System Libraries, or general-purpose tools or generally available free" + , "programs which are used unmodified in performing those activities but" + , "which are not part of the work. For example, Corresponding Source" + , "includes interface definition files associated with source files for" + , "the work, and the source code for shared libraries and dynamically" + , "linked subprograms that the work is specifically designed to require," + , "such as by intimate data communication or control flow between those" + , "subprograms and other parts of the work." + , "" + , " The Corresponding Source need not include anything that users" + , "can regenerate automatically from other parts of the Corresponding" + , "Source." + , "" + , " The Corresponding Source for a work in source code form is that" + , "same work." + , "" + , " 2. Basic Permissions." + , "" + , " All rights granted under this License are granted for the term of" + , "copyright on the Program, and are irrevocable provided the stated" + , "conditions are met. This License explicitly affirms your unlimited" + , "permission to run the unmodified Program. The output from running a" + , "covered work is covered by this License only if the output, given its" + , "content, constitutes a covered work. This License acknowledges your" + , "rights of fair use or other equivalent, as provided by copyright law." + , "" + , " You may make, run and propagate covered works that you do not" + , "convey, without conditions so long as your license otherwise remains" + , "in force. You may convey covered works to others for the sole purpose" + , "of having them make modifications exclusively for you, or provide you" + , "with facilities for running those works, provided that you comply with" + , "the terms of this License in conveying all material for which you do" + , "not control copyright. Those thus making or running the covered works" + , "for you must do so exclusively on your behalf, under your direction" + , "and control, on terms that prohibit them from making any copies of" + , "your copyrighted material outside their relationship with you." + , "" + , " Conveying under any other circumstances is permitted solely under" + , "the conditions stated below. Sublicensing is not allowed; section 10" + , "makes it unnecessary." + , "" + , " 3. Protecting Users' Legal Rights From Anti-Circumvention Law." + , "" + , " No covered work shall be deemed part of an effective technological" + , "measure under any applicable law fulfilling obligations under article" + , "11 of the WIPO copyright treaty adopted on 20 December 1996, or" + , "similar laws prohibiting or restricting circumvention of such" + , "measures." + , "" + , " When you convey a covered work, you waive any legal power to forbid" + , "circumvention of technological measures to the extent such circumvention" + , "is effected by exercising rights under this License with respect to" + , "the covered work, and you disclaim any intention to limit operation or" + , "modification of the work as a means of enforcing, against the work's" + , "users, your or third parties' legal rights to forbid circumvention of" + , "technological measures." + , "" + , " 4. Conveying Verbatim Copies." + , "" + , " You may convey verbatim copies of the Program's source code as you" + , "receive it, in any medium, provided that you conspicuously and" + , "appropriately publish on each copy an appropriate copyright notice;" + , "keep intact all notices stating that this License and any" + , "non-permissive terms added in accord with section 7 apply to the code;" + , "keep intact all notices of the absence of any warranty; and give all" + , "recipients a copy of this License along with the Program." + , "" + , " You may charge any price or no price for each copy that you convey," + , "and you may offer support or warranty protection for a fee." + , "" + , " 5. Conveying Modified Source Versions." + , "" + , " You may convey a work based on the Program, or the modifications to" + , "produce it from the Program, in the form of source code under the" + , "terms of section 4, provided that you also meet all of these conditions:" + , "" + , " a) The work must carry prominent notices stating that you modified" + , " it, and giving a relevant date." + , "" + , " b) The work must carry prominent notices stating that it is" + , " released under this License and any conditions added under section" + , " 7. This requirement modifies the requirement in section 4 to" + , " \"keep intact all notices\"." + , "" + , " c) You must license the entire work, as a whole, under this" + , " License to anyone who comes into possession of a copy. This" + , " License will therefore apply, along with any applicable section 7" + , " additional terms, to the whole of the work, and all its parts," + , " regardless of how they are packaged. This License gives no" + , " permission to license the work in any other way, but it does not" + , " invalidate such permission if you have separately received it." + , "" + , " d) If the work has interactive user interfaces, each must display" + , " Appropriate Legal Notices; however, if the Program has interactive" + , " interfaces that do not display Appropriate Legal Notices, your" + , " work need not make them do so." + , "" + , " A compilation of a covered work with other separate and independent" + , "works, which are not by their nature extensions of the covered work," + , "and which are not combined with it such as to form a larger program," + , "in or on a volume of a storage or distribution medium, is called an" + , "\"aggregate\" if the compilation and its resulting copyright are not" + , "used to limit the access or legal rights of the compilation's users" + , "beyond what the individual works permit. Inclusion of a covered work" + , "in an aggregate does not cause this License to apply to the other" + , "parts of the aggregate." + , "" + , " 6. Conveying Non-Source Forms." + , "" + , " You may convey a covered work in object code form under the terms" + , "of sections 4 and 5, provided that you also convey the" + , "machine-readable Corresponding Source under the terms of this License," + , "in one of these ways:" + , "" + , " a) Convey the object code in, or embodied in, a physical product" + , " (including a physical distribution medium), accompanied by the" + , " Corresponding Source fixed on a durable physical medium" + , " customarily used for software interchange." + , "" + , " b) Convey the object code in, or embodied in, a physical product" + , " (including a physical distribution medium), accompanied by a" + , " written offer, valid for at least three years and valid for as" + , " long as you offer spare parts or customer support for that product" + , " model, to give anyone who possesses the object code either (1) a" + , " copy of the Corresponding Source for all the software in the" + , " product that is covered by this License, on a durable physical" + , " medium customarily used for software interchange, for a price no" + , " more than your reasonable cost of physically performing this" + , " conveying of source, or (2) access to copy the" + , " Corresponding Source from a network server at no charge." + , "" + , " c) Convey individual copies of the object code with a copy of the" + , " written offer to provide the Corresponding Source. This" + , " alternative is allowed only occasionally and noncommercially, and" + , " only if you received the object code with such an offer, in accord" + , " with subsection 6b." + , "" + , " d) Convey the object code by offering access from a designated" + , " place (gratis or for a charge), and offer equivalent access to the" + , " Corresponding Source in the same way through the same place at no" + , " further charge. You need not require recipients to copy the" + , " Corresponding Source along with the object code. If the place to" + , " copy the object code is a network server, the Corresponding Source" + , " may be on a different server (operated by you or a third party)" + , " that supports equivalent copying facilities, provided you maintain" + , " clear directions next to the object code saying where to find the" + , " Corresponding Source. Regardless of what server hosts the" + , " Corresponding Source, you remain obligated to ensure that it is" + , " available for as long as needed to satisfy these requirements." + , "" + , " e) Convey the object code using peer-to-peer transmission, provided" + , " you inform other peers where the object code and Corresponding" + , " Source of the work are being offered to the general public at no" + , " charge under subsection 6d." + , "" + , " A separable portion of the object code, whose source code is excluded" + , "from the Corresponding Source as a System Library, need not be" + , "included in conveying the object code work." + , "" + , " A \"User Product\" is either (1) a \"consumer product\", which means any" + , "tangible personal property which is normally used for personal, family," + , "or household purposes, or (2) anything designed or sold for incorporation" + , "into a dwelling. In determining whether a product is a consumer product," + , "doubtful cases shall be resolved in favor of coverage. For a particular" + , "product received by a particular user, \"normally used\" refers to a" + , "typical or common use of that class of product, regardless of the status" + , "of the particular user or of the way in which the particular user" + , "actually uses, or expects or is expected to use, the product. A product" + , "is a consumer product regardless of whether the product has substantial" + , "commercial, industrial or non-consumer uses, unless such uses represent" + , "the only significant mode of use of the product." + , "" + , " \"Installation Information\" for a User Product means any methods," + , "procedures, authorization keys, or other information required to install" + , "and execute modified versions of a covered work in that User Product from" + , "a modified version of its Corresponding Source. The information must" + , "suffice to ensure that the continued functioning of the modified object" + , "code is in no case prevented or interfered with solely because" + , "modification has been made." + , "" + , " If you convey an object code work under this section in, or with, or" + , "specifically for use in, a User Product, and the conveying occurs as" + , "part of a transaction in which the right of possession and use of the" + , "User Product is transferred to the recipient in perpetuity or for a" + , "fixed term (regardless of how the transaction is characterized), the" + , "Corresponding Source conveyed under this section must be accompanied" + , "by the Installation Information. But this requirement does not apply" + , "if neither you nor any third party retains the ability to install" + , "modified object code on the User Product (for example, the work has" + , "been installed in ROM)." + , "" + , " The requirement to provide Installation Information does not include a" + , "requirement to continue to provide support service, warranty, or updates" + , "for a work that has been modified or installed by the recipient, or for" + , "the User Product in which it has been modified or installed. Access to a" + , "network may be denied when the modification itself materially and" + , "adversely affects the operation of the network or violates the rules and" + , "protocols for communication across the network." + , "" + , " Corresponding Source conveyed, and Installation Information provided," + , "in accord with this section must be in a format that is publicly" + , "documented (and with an implementation available to the public in" + , "source code form), and must require no special password or key for" + , "unpacking, reading or copying." + , "" + , " 7. Additional Terms." + , "" + , " \"Additional permissions\" are terms that supplement the terms of this" + , "License by making exceptions from one or more of its conditions." + , "Additional permissions that are applicable to the entire Program shall" + , "be treated as though they were included in this License, to the extent" + , "that they are valid under applicable law. If additional permissions" + , "apply only to part of the Program, that part may be used separately" + , "under those permissions, but the entire Program remains governed by" + , "this License without regard to the additional permissions." + , "" + , " When you convey a copy of a covered work, you may at your option" + , "remove any additional permissions from that copy, or from any part of" + , "it. (Additional permissions may be written to require their own" + , "removal in certain cases when you modify the work.) You may place" + , "additional permissions on material, added by you to a covered work," + , "for which you have or can give appropriate copyright permission." + , "" + , " Notwithstanding any other provision of this License, for material you" + , "add to a covered work, you may (if authorized by the copyright holders of" + , "that material) supplement the terms of this License with terms:" + , "" + , " a) Disclaiming warranty or limiting liability differently from the" + , " terms of sections 15 and 16 of this License; or" + , "" + , " b) Requiring preservation of specified reasonable legal notices or" + , " author attributions in that material or in the Appropriate Legal" + , " Notices displayed by works containing it; or" + , "" + , " c) Prohibiting misrepresentation of the origin of that material, or" + , " requiring that modified versions of such material be marked in" + , " reasonable ways as different from the original version; or" + , "" + , " d) Limiting the use for publicity purposes of names of licensors or" + , " authors of the material; or" + , "" + , " e) Declining to grant rights under trademark law for use of some" + , " trade names, trademarks, or service marks; or" + , "" + , " f) Requiring indemnification of licensors and authors of that" + , " material by anyone who conveys the material (or modified versions of" + , " it) with contractual assumptions of liability to the recipient, for" + , " any liability that these contractual assumptions directly impose on" + , " those licensors and authors." + , "" + , " All other non-permissive additional terms are considered \"further" + , "restrictions\" within the meaning of section 10. If the Program as you" + , "received it, or any part of it, contains a notice stating that it is" + , "governed by this License along with a term that is a further" + , "restriction, you may remove that term. If a license document contains" + , "a further restriction but permits relicensing or conveying under this" + , "License, you may add to a covered work material governed by the terms" + , "of that license document, provided that the further restriction does" + , "not survive such relicensing or conveying." + , "" + , " If you add terms to a covered work in accord with this section, you" + , "must place, in the relevant source files, a statement of the" + , "additional terms that apply to those files, or a notice indicating" + , "where to find the applicable terms." + , "" + , " Additional terms, permissive or non-permissive, may be stated in the" + , "form of a separately written license, or stated as exceptions;" + , "the above requirements apply either way." + , "" + , " 8. Termination." + , "" + , " You may not propagate or modify a covered work except as expressly" + , "provided under this License. Any attempt otherwise to propagate or" + , "modify it is void, and will automatically terminate your rights under" + , "this License (including any patent licenses granted under the third" + , "paragraph of section 11)." + , "" + , " However, if you cease all violation of this License, then your" + , "license from a particular copyright holder is reinstated (a)" + , "provisionally, unless and until the copyright holder explicitly and" + , "finally terminates your license, and (b) permanently, if the copyright" + , "holder fails to notify you of the violation by some reasonable means" + , "prior to 60 days after the cessation." + , "" + , " Moreover, your license from a particular copyright holder is" + , "reinstated permanently if the copyright holder notifies you of the" + , "violation by some reasonable means, this is the first time you have" + , "received notice of violation of this License (for any work) from that" + , "copyright holder, and you cure the violation prior to 30 days after" + , "your receipt of the notice." + , "" + , " Termination of your rights under this section does not terminate the" + , "licenses of parties who have received copies or rights from you under" + , "this License. If your rights have been terminated and not permanently" + , "reinstated, you do not qualify to receive new licenses for the same" + , "material under section 10." + , "" + , " 9. Acceptance Not Required for Having Copies." + , "" + , " You are not required to accept this License in order to receive or" + , "run a copy of the Program. Ancillary propagation of a covered work" + , "occurring solely as a consequence of using peer-to-peer transmission" + , "to receive a copy likewise does not require acceptance. However," + , "nothing other than this License grants you permission to propagate or" + , "modify any covered work. These actions infringe copyright if you do" + , "not accept this License. Therefore, by modifying or propagating a" + , "covered work, you indicate your acceptance of this License to do so." + , "" + , " 10. Automatic Licensing of Downstream Recipients." + , "" + , " Each time you convey a covered work, the recipient automatically" + , "receives a license from the original licensors, to run, modify and" + , "propagate that work, subject to this License. You are not responsible" + , "for enforcing compliance by third parties with this License." + , "" + , " An \"entity transaction\" is a transaction transferring control of an" + , "organization, or substantially all assets of one, or subdividing an" + , "organization, or merging organizations. If propagation of a covered" + , "work results from an entity transaction, each party to that" + , "transaction who receives a copy of the work also receives whatever" + , "licenses to the work the party's predecessor in interest had or could" + , "give under the previous paragraph, plus a right to possession of the" + , "Corresponding Source of the work from the predecessor in interest, if" + , "the predecessor has it or can get it with reasonable efforts." + , "" + , " You may not impose any further restrictions on the exercise of the" + , "rights granted or affirmed under this License. For example, you may" + , "not impose a license fee, royalty, or other charge for exercise of" + , "rights granted under this License, and you may not initiate litigation" + , "(including a cross-claim or counterclaim in a lawsuit) alleging that" + , "any patent claim is infringed by making, using, selling, offering for" + , "sale, or importing the Program or any portion of it." + , "" + , " 11. Patents." + , "" + , " A \"contributor\" is a copyright holder who authorizes use under this" + , "License of the Program or a work on which the Program is based. The" + , "work thus licensed is called the contributor's \"contributor version\"." + , "" + , " A contributor's \"essential patent claims\" are all patent claims" + , "owned or controlled by the contributor, whether already acquired or" + , "hereafter acquired, that would be infringed by some manner, permitted" + , "by this License, of making, using, or selling its contributor version," + , "but do not include claims that would be infringed only as a" + , "consequence of further modification of the contributor version. For" + , "purposes of this definition, \"control\" includes the right to grant" + , "patent sublicenses in a manner consistent with the requirements of" + , "this License." + , "" + , " Each contributor grants you a non-exclusive, worldwide, royalty-free" + , "patent license under the contributor's essential patent claims, to" + , "make, use, sell, offer for sale, import and otherwise run, modify and" + , "propagate the contents of its contributor version." + , "" + , " In the following three paragraphs, a \"patent license\" is any express" + , "agreement or commitment, however denominated, not to enforce a patent" + , "(such as an express permission to practice a patent or covenant not to" + , "sue for patent infringement). To \"grant\" such a patent license to a" + , "party means to make such an agreement or commitment not to enforce a" + , "patent against the party." + , "" + , " If you convey a covered work, knowingly relying on a patent license," + , "and the Corresponding Source of the work is not available for anyone" + , "to copy, free of charge and under the terms of this License, through a" + , "publicly available network server or other readily accessible means," + , "then you must either (1) cause the Corresponding Source to be so" + , "available, or (2) arrange to deprive yourself of the benefit of the" + , "patent license for this particular work, or (3) arrange, in a manner" + , "consistent with the requirements of this License, to extend the patent" + , "license to downstream recipients. \"Knowingly relying\" means you have" + , "actual knowledge that, but for the patent license, your conveying the" + , "covered work in a country, or your recipient's use of the covered work" + , "in a country, would infringe one or more identifiable patents in that" + , "country that you have reason to believe are valid." + , "" + , " If, pursuant to or in connection with a single transaction or" + , "arrangement, you convey, or propagate by procuring conveyance of, a" + , "covered work, and grant a patent license to some of the parties" + , "receiving the covered work authorizing them to use, propagate, modify" + , "or convey a specific copy of the covered work, then the patent license" + , "you grant is automatically extended to all recipients of the covered" + , "work and works based on it." + , "" + , " A patent license is \"discriminatory\" if it does not include within" + , "the scope of its coverage, prohibits the exercise of, or is" + , "conditioned on the non-exercise of one or more of the rights that are" + , "specifically granted under this License. You may not convey a covered" + , "work if you are a party to an arrangement with a third party that is" + , "in the business of distributing software, under which you make payment" + , "to the third party based on the extent of your activity of conveying" + , "the work, and under which the third party grants, to any of the" + , "parties who would receive the covered work from you, a discriminatory" + , "patent license (a) in connection with copies of the covered work" + , "conveyed by you (or copies made from those copies), or (b) primarily" + , "for and in connection with specific products or compilations that" + , "contain the covered work, unless you entered into that arrangement," + , "or that patent license was granted, prior to 28 March 2007." + , "" + , " Nothing in this License shall be construed as excluding or limiting" + , "any implied license or other defenses to infringement that may" + , "otherwise be available to you under applicable patent law." + , "" + , " 12. No Surrender of Others' Freedom." + , "" + , " If conditions are imposed on you (whether by court order, agreement or" + , "otherwise) that contradict the conditions of this License, they do not" + , "excuse you from the conditions of this License. If you cannot convey a" + , "covered work so as to satisfy simultaneously your obligations under this" + , "License and any other pertinent obligations, then as a consequence you may" + , "not convey it at all. For example, if you agree to terms that obligate you" + , "to collect a royalty for further conveying from those to whom you convey" + , "the Program, the only way you could satisfy both those terms and this" + , "License would be to refrain entirely from conveying the Program." + , "" + , " 13. Remote Network Interaction; Use with the GNU General Public License." + , "" + , " Notwithstanding any other provision of this License, if you modify the" + , "Program, your modified version must prominently offer all users" + , "interacting with it remotely through a computer network (if your version" + , "supports such interaction) an opportunity to receive the Corresponding" + , "Source of your version by providing access to the Corresponding Source" + , "from a network server at no charge, through some standard or customary" + , "means of facilitating copying of software. This Corresponding Source" + , "shall include the Corresponding Source for any work covered by version 3" + , "of the GNU General Public License that is incorporated pursuant to the" + , "following paragraph." + , "" + , " Notwithstanding any other provision of this License, you have" + , "permission to link or combine any covered work with a work licensed" + , "under version 3 of the GNU General Public License into a single" + , "combined work, and to convey the resulting work. The terms of this" + , "License will continue to apply to the part which is the covered work," + , "but the work with which it is combined will remain governed by version" + , "3 of the GNU General Public License." + , "" + , " 14. Revised Versions of this License." + , "" + , " The Free Software Foundation may publish revised and/or new versions of" + , "the GNU Affero General Public License from time to time. Such new versions" + , "will be similar in spirit to the present version, but may differ in detail to" + , "address new problems or concerns." + , "" + , " Each version is given a distinguishing version number. If the" + , "Program specifies that a certain numbered version of the GNU Affero General" + , "Public License \"or any later version\" applies to it, you have the" + , "option of following the terms and conditions either of that numbered" + , "version or of any later version published by the Free Software" + , "Foundation. If the Program does not specify a version number of the" + , "GNU Affero General Public License, you may choose any version ever published" + , "by the Free Software Foundation." + , "" + , " If the Program specifies that a proxy can decide which future" + , "versions of the GNU Affero General Public License can be used, that proxy's" + , "public statement of acceptance of a version permanently authorizes you" + , "to choose that version for the Program." + , "" + , " Later license versions may give you additional or different" + , "permissions. However, no additional obligations are imposed on any" + , "author or copyright holder as a result of your choosing to follow a" + , "later version." + , "" + , " 15. Disclaimer of Warranty." + , "" + , " THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY" + , "APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT" + , "HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY" + , "OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO," + , "THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR" + , "PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM" + , "IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF" + , "ALL NECESSARY SERVICING, REPAIR OR CORRECTION." + , "" + , " 16. Limitation of Liability." + , "" + , " IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING" + , "WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS" + , "THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY" + , "GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE" + , "USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF" + , "DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD" + , "PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS)," + , "EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF" + , "SUCH DAMAGES." + , "" + , " 17. Interpretation of Sections 15 and 16." + , "" + , " If the disclaimer of warranty and limitation of liability provided" + , "above cannot be given local legal effect according to their terms," + , "reviewing courts shall apply local law that most closely approximates" + , "an absolute waiver of all civil liability in connection with the" + , "Program, unless a warranty or assumption of liability accompanies a" + , "copy of the Program in return for a fee." + , "" + , " END OF TERMS AND CONDITIONS" + , "" + , " How to Apply These Terms to Your New Programs" + , "" + , " If you develop a new program, and you want it to be of the greatest" + , "possible use to the public, the best way to achieve this is to make it" + , "free software which everyone can redistribute and change under these terms." + , "" + , " To do so, attach the following notices to the program. It is safest" + , "to attach them to the start of each source file to most effectively" + , "state the exclusion of warranty; and each file should have at least" + , "the \"copyright\" line and a pointer to where the full notice is found." + , "" + , " " + , " Copyright (C) " + , "" + , " This program is free software: you can redistribute it and/or modify" + , " it under the terms of the GNU Affero General Public License as published by" + , " the Free Software Foundation, either version 3 of the License, or" + , " (at your option) any later version." + , "" + , " This program is distributed in the hope that it will be useful," + , " but WITHOUT ANY WARRANTY; without even the implied warranty of" + , " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the" + , " GNU Affero General Public License for more details." + , "" + , " You should have received a copy of the GNU Affero General Public License" + , " along with this program. If not, see ." + , "" + , "Also add information on how to contact you by electronic and paper mail." + , "" + , " If your software can interact with users remotely through a computer" + , "network, you should also make sure that it provides a way for users to" + , "get its source. For example, if your program is a web application, its" + , "interface could display a \"Source\" link that leads users to an archive" + , "of the code. There are many ways you could offer source, and different" + , "solutions will be better for different programs; see section 13 for the" + , "specific requirements." + , "" + , " You should also get your employer (if you work as a programmer) or school," + , "if any, to sign a \"copyright disclaimer\" for the program, if necessary." + , "For more information on this, and how to apply and follow the GNU AGPL, see" + , "." + ] + +lgpl21 :: License +lgpl21 = unlines + [ " GNU LESSER GENERAL PUBLIC LICENSE" + , " Version 2.1, February 1999" + , "" + , " Copyright (C) 1991, 1999 Free Software Foundation, Inc." + , " 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA" + , " Everyone is permitted to copy and distribute verbatim copies" + , " of this license document, but changing it is not allowed." + , "" + , "[This is the first released version of the Lesser GPL. It also counts" + , " as the successor of the GNU Library Public License, version 2, hence" + , " the version number 2.1.]" + , "" + , " Preamble" + , "" + , " The licenses for most software are designed to take away your" + , "freedom to share and change it. By contrast, the GNU General Public" + , "Licenses are intended to guarantee your freedom to share and change" + , "free software--to make sure the software is free for all its users." + , "" + , " This license, the Lesser General Public License, applies to some" + , "specially designated software packages--typically libraries--of the" + , "Free Software Foundation and other authors who decide to use it. You" + , "can use it too, but we suggest you first think carefully about whether" + , "this license or the ordinary General Public License is the better" + , "strategy to use in any particular case, based on the explanations below." + , "" + , " When we speak of free software, we are referring to freedom of use," + , "not price. Our General Public Licenses are designed to make sure that" + , "you have the freedom to distribute copies of free software (and charge" + , "for this service if you wish); that you receive source code or can get" + , "it if you want it; that you can change the software and use pieces of" + , "it in new free programs; and that you are informed that you can do" + , "these things." + , "" + , " To protect your rights, we need to make restrictions that forbid" + , "distributors to deny you these rights or to ask you to surrender these" + , "rights. These restrictions translate to certain responsibilities for" + , "you if you distribute copies of the library or if you modify it." + , "" + , " For example, if you distribute copies of the library, whether gratis" + , "or for a fee, you must give the recipients all the rights that we gave" + , "you. You must make sure that they, too, receive or can get the source" + , "code. If you link other code with the library, you must provide" + , "complete object files to the recipients, so that they can relink them" + , "with the library after making changes to the library and recompiling" + , "it. And you must show them these terms so they know their rights." + , "" + , " We protect your rights with a two-step method: (1) we copyright the" + , "library, and (2) we offer you this license, which gives you legal" + , "permission to copy, distribute and/or modify the library." + , "" + , " To protect each distributor, we want to make it very clear that" + , "there is no warranty for the free library. Also, if the library is" + , "modified by someone else and passed on, the recipients should know" + , "that what they have is not the original version, so that the original" + , "author's reputation will not be affected by problems that might be" + , "introduced by others." + , "" + , " Finally, software patents pose a constant threat to the existence of" + , "any free program. We wish to make sure that a company cannot" + , "effectively restrict the users of a free program by obtaining a" + , "restrictive license from a patent holder. Therefore, we insist that" + , "any patent license obtained for a version of the library must be" + , "consistent with the full freedom of use specified in this license." + , "" + , " Most GNU software, including some libraries, is covered by the" + , "ordinary GNU General Public License. This license, the GNU Lesser" + , "General Public License, applies to certain designated libraries, and" + , "is quite different from the ordinary General Public License. We use" + , "this license for certain libraries in order to permit linking those" + , "libraries into non-free programs." + , "" + , " When a program is linked with a library, whether statically or using" + , "a shared library, the combination of the two is legally speaking a" + , "combined work, a derivative of the original library. The ordinary" + , "General Public License therefore permits such linking only if the" + , "entire combination fits its criteria of freedom. The Lesser General" + , "Public License permits more lax criteria for linking other code with" + , "the library." + , "" + , " We call this license the \"Lesser\" General Public License because it" + , "does Less to protect the user's freedom than the ordinary General" + , "Public License. It also provides other free software developers Less" + , "of an advantage over competing non-free programs. These disadvantages" + , "are the reason we use the ordinary General Public License for many" + , "libraries. However, the Lesser license provides advantages in certain" + , "special circumstances." + , "" + , " For example, on rare occasions, there may be a special need to" + , "encourage the widest possible use of a certain library, so that it becomes" + , "a de-facto standard. To achieve this, non-free programs must be" + , "allowed to use the library. A more frequent case is that a free" + , "library does the same job as widely used non-free libraries. In this" + , "case, there is little to gain by limiting the free library to free" + , "software only, so we use the Lesser General Public License." + , "" + , " In other cases, permission to use a particular library in non-free" + , "programs enables a greater number of people to use a large body of" + , "free software. For example, permission to use the GNU C Library in" + , "non-free programs enables many more people to use the whole GNU" + , "operating system, as well as its variant, the GNU/Linux operating" + , "system." + , "" + , " Although the Lesser General Public License is Less protective of the" + , "users' freedom, it does ensure that the user of a program that is" + , "linked with the Library has the freedom and the wherewithal to run" + , "that program using a modified version of the Library." + , "" + , " The precise terms and conditions for copying, distribution and" + , "modification follow. Pay close attention to the difference between a" + , "\"work based on the library\" and a \"work that uses the library\". The" + , "former contains code derived from the library, whereas the latter must" + , "be combined with the library in order to run." + , "" + , " GNU LESSER GENERAL PUBLIC LICENSE" + , " TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION" + , "" + , " 0. This License Agreement applies to any software library or other" + , "program which contains a notice placed by the copyright holder or" + , "other authorized party saying it may be distributed under the terms of" + , "this Lesser General Public License (also called \"this License\")." + , "Each licensee is addressed as \"you\"." + , "" + , " A \"library\" means a collection of software functions and/or data" + , "prepared so as to be conveniently linked with application programs" + , "(which use some of those functions and data) to form executables." + , "" + , " The \"Library\", below, refers to any such software library or work" + , "which has been distributed under these terms. A \"work based on the" + , "Library\" means either the Library or any derivative work under" + , "copyright law: that is to say, a work containing the Library or a" + , "portion of it, either verbatim or with modifications and/or translated" + , "straightforwardly into another language. (Hereinafter, translation is" + , "included without limitation in the term \"modification\".)" + , "" + , " \"Source code\" for a work means the preferred form of the work for" + , "making modifications to it. For a library, complete source code means" + , "all the source code for all modules it contains, plus any associated" + , "interface definition files, plus the scripts used to control compilation" + , "and installation of the library." + , "" + , " Activities other than copying, distribution and modification are not" + , "covered by this License; they are outside its scope. The act of" + , "running a program using the Library is not restricted, and output from" + , "such a program is covered only if its contents constitute a work based" + , "on the Library (independent of the use of the Library in a tool for" + , "writing it). Whether that is true depends on what the Library does" + , "and what the program that uses the Library does." + , "" + , " 1. You may copy and distribute verbatim copies of the Library's" + , "complete source code as you receive it, in any medium, provided that" + , "you conspicuously and appropriately publish on each copy an" + , "appropriate copyright notice and disclaimer of warranty; keep intact" + , "all the notices that refer to this License and to the absence of any" + , "warranty; and distribute a copy of this License along with the" + , "Library." + , "" + , " You may charge a fee for the physical act of transferring a copy," + , "and you may at your option offer warranty protection in exchange for a" + , "fee." + , "" + , " 2. You may modify your copy or copies of the Library or any portion" + , "of it, thus forming a work based on the Library, and copy and" + , "distribute such modifications or work under the terms of Section 1" + , "above, provided that you also meet all of these conditions:" + , "" + , " a) The modified work must itself be a software library." + , "" + , " b) You must cause the files modified to carry prominent notices" + , " stating that you changed the files and the date of any change." + , "" + , " c) You must cause the whole of the work to be licensed at no" + , " charge to all third parties under the terms of this License." + , "" + , " d) If a facility in the modified Library refers to a function or a" + , " table of data to be supplied by an application program that uses" + , " the facility, other than as an argument passed when the facility" + , " is invoked, then you must make a good faith effort to ensure that," + , " in the event an application does not supply such function or" + , " table, the facility still operates, and performs whatever part of" + , " its purpose remains meaningful." + , "" + , " (For example, a function in a library to compute square roots has" + , " a purpose that is entirely well-defined independent of the" + , " application. Therefore, Subsection 2d requires that any" + , " application-supplied function or table used by this function must" + , " be optional: if the application does not supply it, the square" + , " root function must still compute square roots.)" + , "" + , "These requirements apply to the modified work as a whole. If" + , "identifiable sections of that work are not derived from the Library," + , "and can be reasonably considered independent and separate works in" + , "themselves, then this License, and its terms, do not apply to those" + , "sections when you distribute them as separate works. But when you" + , "distribute the same sections as part of a whole which is a work based" + , "on the Library, the distribution of the whole must be on the terms of" + , "this License, whose permissions for other licensees extend to the" + , "entire whole, and thus to each and every part regardless of who wrote" + , "it." + , "" + , "Thus, it is not the intent of this section to claim rights or contest" + , "your rights to work written entirely by you; rather, the intent is to" + , "exercise the right to control the distribution of derivative or" + , "collective works based on the Library." + , "" + , "In addition, mere aggregation of another work not based on the Library" + , "with the Library (or with a work based on the Library) on a volume of" + , "a storage or distribution medium does not bring the other work under" + , "the scope of this License." + , "" + , " 3. You may opt to apply the terms of the ordinary GNU General Public" + , "License instead of this License to a given copy of the Library. To do" + , "this, you must alter all the notices that refer to this License, so" + , "that they refer to the ordinary GNU General Public License, version 2," + , "instead of to this License. (If a newer version than version 2 of the" + , "ordinary GNU General Public License has appeared, then you can specify" + , "that version instead if you wish.) Do not make any other change in" + , "these notices." + , "" + , " Once this change is made in a given copy, it is irreversible for" + , "that copy, so the ordinary GNU General Public License applies to all" + , "subsequent copies and derivative works made from that copy." + , "" + , " This option is useful when you wish to copy part of the code of" + , "the Library into a program that is not a library." + , "" + , " 4. You may copy and distribute the Library (or a portion or" + , "derivative of it, under Section 2) in object code or executable form" + , "under the terms of Sections 1 and 2 above provided that you accompany" + , "it with the complete corresponding machine-readable source code, which" + , "must be distributed under the terms of Sections 1 and 2 above on a" + , "medium customarily used for software interchange." + , "" + , " If distribution of object code is made by offering access to copy" + , "from a designated place, then offering equivalent access to copy the" + , "source code from the same place satisfies the requirement to" + , "distribute the source code, even though third parties are not" + , "compelled to copy the source along with the object code." + , "" + , " 5. A program that contains no derivative of any portion of the" + , "Library, but is designed to work with the Library by being compiled or" + , "linked with it, is called a \"work that uses the Library\". Such a" + , "work, in isolation, is not a derivative work of the Library, and" + , "therefore falls outside the scope of this License." + , "" + , " However, linking a \"work that uses the Library\" with the Library" + , "creates an executable that is a derivative of the Library (because it" + , "contains portions of the Library), rather than a \"work that uses the" + , "library\". The executable is therefore covered by this License." + , "Section 6 states terms for distribution of such executables." + , "" + , " When a \"work that uses the Library\" uses material from a header file" + , "that is part of the Library, the object code for the work may be a" + , "derivative work of the Library even though the source code is not." + , "Whether this is true is especially significant if the work can be" + , "linked without the Library, or if the work is itself a library. The" + , "threshold for this to be true is not precisely defined by law." + , "" + , " If such an object file uses only numerical parameters, data" + , "structure layouts and accessors, and small macros and small inline" + , "functions (ten lines or less in length), then the use of the object" + , "file is unrestricted, regardless of whether it is legally a derivative" + , "work. (Executables containing this object code plus portions of the" + , "Library will still fall under Section 6.)" + , "" + , " Otherwise, if the work is a derivative of the Library, you may" + , "distribute the object code for the work under the terms of Section 6." + , "Any executables containing that work also fall under Section 6," + , "whether or not they are linked directly with the Library itself." + , "" + , " 6. As an exception to the Sections above, you may also combine or" + , "link a \"work that uses the Library\" with the Library to produce a" + , "work containing portions of the Library, and distribute that work" + , "under terms of your choice, provided that the terms permit" + , "modification of the work for the customer's own use and reverse" + , "engineering for debugging such modifications." + , "" + , " You must give prominent notice with each copy of the work that the" + , "Library is used in it and that the Library and its use are covered by" + , "this License. You must supply a copy of this License. If the work" + , "during execution displays copyright notices, you must include the" + , "copyright notice for the Library among them, as well as a reference" + , "directing the user to the copy of this License. Also, you must do one" + , "of these things:" + , "" + , " a) Accompany the work with the complete corresponding" + , " machine-readable source code for the Library including whatever" + , " changes were used in the work (which must be distributed under" + , " Sections 1 and 2 above); and, if the work is an executable linked" + , " with the Library, with the complete machine-readable \"work that" + , " uses the Library\", as object code and/or source code, so that the" + , " user can modify the Library and then relink to produce a modified" + , " executable containing the modified Library. (It is understood" + , " that the user who changes the contents of definitions files in the" + , " Library will not necessarily be able to recompile the application" + , " to use the modified definitions.)" + , "" + , " b) Use a suitable shared library mechanism for linking with the" + , " Library. A suitable mechanism is one that (1) uses at run time a" + , " copy of the library already present on the user's computer system," + , " rather than copying library functions into the executable, and (2)" + , " will operate properly with a modified version of the library, if" + , " the user installs one, as long as the modified version is" + , " interface-compatible with the version that the work was made with." + , "" + , " c) Accompany the work with a written offer, valid for at" + , " least three years, to give the same user the materials" + , " specified in Subsection 6a, above, for a charge no more" + , " than the cost of performing this distribution." + , "" + , " d) If distribution of the work is made by offering access to copy" + , " from a designated place, offer equivalent access to copy the above" + , " specified materials from the same place." + , "" + , " e) Verify that the user has already received a copy of these" + , " materials or that you have already sent this user a copy." + , "" + , " For an executable, the required form of the \"work that uses the" + , "Library\" must include any data and utility programs needed for" + , "reproducing the executable from it. However, as a special exception," + , "the materials to be distributed need not include anything that is" + , "normally distributed (in either source or binary form) with the major" + , "components (compiler, kernel, and so on) of the operating system on" + , "which the executable runs, unless that component itself accompanies" + , "the executable." + , "" + , " It may happen that this requirement contradicts the license" + , "restrictions of other proprietary libraries that do not normally" + , "accompany the operating system. Such a contradiction means you cannot" + , "use both them and the Library together in an executable that you" + , "distribute." + , "" + , " 7. You may place library facilities that are a work based on the" + , "Library side-by-side in a single library together with other library" + , "facilities not covered by this License, and distribute such a combined" + , "library, provided that the separate distribution of the work based on" + , "the Library and of the other library facilities is otherwise" + , "permitted, and provided that you do these two things:" + , "" + , " a) Accompany the combined library with a copy of the same work" + , " based on the Library, uncombined with any other library" + , " facilities. This must be distributed under the terms of the" + , " Sections above." + , "" + , " b) Give prominent notice with the combined library of the fact" + , " that part of it is a work based on the Library, and explaining" + , " where to find the accompanying uncombined form of the same work." + , "" + , " 8. You may not copy, modify, sublicense, link with, or distribute" + , "the Library except as expressly provided under this License. Any" + , "attempt otherwise to copy, modify, sublicense, link with, or" + , "distribute the Library is void, and will automatically terminate your" + , "rights under this License. However, parties who have received copies," + , "or rights, from you under this License will not have their licenses" + , "terminated so long as such parties remain in full compliance." + , "" + , " 9. You are not required to accept this License, since you have not" + , "signed it. However, nothing else grants you permission to modify or" + , "distribute the Library or its derivative works. These actions are" + , "prohibited by law if you do not accept this License. Therefore, by" + , "modifying or distributing the Library (or any work based on the" + , "Library), you indicate your acceptance of this License to do so, and" + , "all its terms and conditions for copying, distributing or modifying" + , "the Library or works based on it." + , "" + , " 10. Each time you redistribute the Library (or any work based on the" + , "Library), the recipient automatically receives a license from the" + , "original licensor to copy, distribute, link with or modify the Library" + , "subject to these terms and conditions. You may not impose any further" + , "restrictions on the recipients' exercise of the rights granted herein." + , "You are not responsible for enforcing compliance by third parties with" + , "this License." + , "" + , " 11. If, as a consequence of a court judgment or allegation of patent" + , "infringement or for any other reason (not limited to patent issues)," + , "conditions are imposed on you (whether by court order, agreement or" + , "otherwise) that contradict the conditions of this License, they do not" + , "excuse you from the conditions of this License. If you cannot" + , "distribute so as to satisfy simultaneously your obligations under this" + , "License and any other pertinent obligations, then as a consequence you" + , "may not distribute the Library at all. For example, if a patent" + , "license would not permit royalty-free redistribution of the Library by" + , "all those who receive copies directly or indirectly through you, then" + , "the only way you could satisfy both it and this License would be to" + , "refrain entirely from distribution of the Library." + , "" + , "If any portion of this section is held invalid or unenforceable under any" + , "particular circumstance, the balance of the section is intended to apply," + , "and the section as a whole is intended to apply in other circumstances." + , "" + , "It is not the purpose of this section to induce you to infringe any" + , "patents or other property right claims or to contest validity of any" + , "such claims; this section has the sole purpose of protecting the" + , "integrity of the free software distribution system which is" + , "implemented by public license practices. Many people have made" + , "generous contributions to the wide range of software distributed" + , "through that system in reliance on consistent application of that" + , "system; it is up to the author/donor to decide if he or she is willing" + , "to distribute software through any other system and a licensee cannot" + , "impose that choice." + , "" + , "This section is intended to make thoroughly clear what is believed to" + , "be a consequence of the rest of this License." + , "" + , " 12. If the distribution and/or use of the Library is restricted in" + , "certain countries either by patents or by copyrighted interfaces, the" + , "original copyright holder who places the Library under this License may add" + , "an explicit geographical distribution limitation excluding those countries," + , "so that distribution is permitted only in or among countries not thus" + , "excluded. In such case, this License incorporates the limitation as if" + , "written in the body of this License." + , "" + , " 13. The Free Software Foundation may publish revised and/or new" + , "versions of the Lesser General Public License from time to time." + , "Such new versions will be similar in spirit to the present version," + , "but may differ in detail to address new problems or concerns." + , "" + , "Each version is given a distinguishing version number. If the Library" + , "specifies a version number of this License which applies to it and" + , "\"any later version\", you have the option of following the terms and" + , "conditions either of that version or of any later version published by" + , "the Free Software Foundation. If the Library does not specify a" + , "license version number, you may choose any version ever published by" + , "the Free Software Foundation." + , "" + , " 14. If you wish to incorporate parts of the Library into other free" + , "programs whose distribution conditions are incompatible with these," + , "write to the author to ask for permission. For software which is" + , "copyrighted by the Free Software Foundation, write to the Free" + , "Software Foundation; we sometimes make exceptions for this. Our" + , "decision will be guided by the two goals of preserving the free status" + , "of all derivatives of our free software and of promoting the sharing" + , "and reuse of software generally." + , "" + , " NO WARRANTY" + , "" + , " 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO" + , "WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW." + , "EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR" + , "OTHER PARTIES PROVIDE THE LIBRARY \"AS IS\" WITHOUT WARRANTY OF ANY" + , "KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE" + , "IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR" + , "PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE" + , "LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME" + , "THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION." + , "" + , " 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN" + , "WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY" + , "AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU" + , "FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR" + , "CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE" + , "LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING" + , "RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A" + , "FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF" + , "SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH" + , "DAMAGES." + , "" + , " END OF TERMS AND CONDITIONS" + , "" + , " How to Apply These Terms to Your New Libraries" + , "" + , " If you develop a new library, and you want it to be of the greatest" + , "possible use to the public, we recommend making it free software that" + , "everyone can redistribute and change. You can do so by permitting" + , "redistribution under these terms (or, alternatively, under the terms of the" + , "ordinary General Public License)." + , "" + , " To apply these terms, attach the following notices to the library. It is" + , "safest to attach them to the start of each source file to most effectively" + , "convey the exclusion of warranty; and each file should have at least the" + , "\"copyright\" line and a pointer to where the full notice is found." + , "" + , " " + , " Copyright (C) " + , "" + , " This library is free software; you can redistribute it and/or" + , " modify it under the terms of the GNU Lesser General Public" + , " License as published by the Free Software Foundation; either" + , " version 2.1 of the License, or (at your option) any later version." + , "" + , " This library is distributed in the hope that it will be useful," + , " but WITHOUT ANY WARRANTY; without even the implied warranty of" + , " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU" + , " Lesser General Public License for more details." + , "" + , " You should have received a copy of the GNU Lesser General Public" + , " License along with this library; if not, write to the Free Software" + , " Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA" + , "" + , "Also add information on how to contact you by electronic and paper mail." + , "" + , "You should also get your employer (if you work as a programmer) or your" + , "school, if any, to sign a \"copyright disclaimer\" for the library, if" + , "necessary. Here is a sample; alter the names:" + , "" + , " Yoyodyne, Inc., hereby disclaims all copyright interest in the" + , " library `Frob' (a library for tweaking knobs) written by James Random Hacker." + , "" + , " , 1 April 1990" + , " Ty Coon, President of Vice" + , "" + , "That's all there is to it!" + ] + +lgpl3 :: License +lgpl3 = unlines + [ " GNU LESSER GENERAL PUBLIC LICENSE" + , " Version 3, 29 June 2007" + , "" + , " Copyright (C) 2007 Free Software Foundation, Inc. " + , " Everyone is permitted to copy and distribute verbatim copies" + , " of this license document, but changing it is not allowed." + , "" + , "" + , " This version of the GNU Lesser General Public License incorporates" + , "the terms and conditions of version 3 of the GNU General Public" + , "License, supplemented by the additional permissions listed below." + , "" + , " 0. Additional Definitions." + , "" + , " As used herein, \"this License\" refers to version 3 of the GNU Lesser" + , "General Public License, and the \"GNU GPL\" refers to version 3 of the GNU" + , "General Public License." + , "" + , " \"The Library\" refers to a covered work governed by this License," + , "other than an Application or a Combined Work as defined below." + , "" + , " An \"Application\" is any work that makes use of an interface provided" + , "by the Library, but which is not otherwise based on the Library." + , "Defining a subclass of a class defined by the Library is deemed a mode" + , "of using an interface provided by the Library." + , "" + , " A \"Combined Work\" is a work produced by combining or linking an" + , "Application with the Library. The particular version of the Library" + , "with which the Combined Work was made is also called the \"Linked" + , "Version\"." + , "" + , " The \"Minimal Corresponding Source\" for a Combined Work means the" + , "Corresponding Source for the Combined Work, excluding any source code" + , "for portions of the Combined Work that, considered in isolation, are" + , "based on the Application, and not on the Linked Version." + , "" + , " The \"Corresponding Application Code\" for a Combined Work means the" + , "object code and/or source code for the Application, including any data" + , "and utility programs needed for reproducing the Combined Work from the" + , "Application, but excluding the System Libraries of the Combined Work." + , "" + , " 1. Exception to Section 3 of the GNU GPL." + , "" + , " You may convey a covered work under sections 3 and 4 of this License" + , "without being bound by section 3 of the GNU GPL." + , "" + , " 2. Conveying Modified Versions." + , "" + , " If you modify a copy of the Library, and, in your modifications, a" + , "facility refers to a function or data to be supplied by an Application" + , "that uses the facility (other than as an argument passed when the" + , "facility is invoked), then you may convey a copy of the modified" + , "version:" + , "" + , " a) under this License, provided that you make a good faith effort to" + , " ensure that, in the event an Application does not supply the" + , " function or data, the facility still operates, and performs" + , " whatever part of its purpose remains meaningful, or" + , "" + , " b) under the GNU GPL, with none of the additional permissions of" + , " this License applicable to that copy." + , "" + , " 3. Object Code Incorporating Material from Library Header Files." + , "" + , " The object code form of an Application may incorporate material from" + , "a header file that is part of the Library. You may convey such object" + , "code under terms of your choice, provided that, if the incorporated" + , "material is not limited to numerical parameters, data structure" + , "layouts and accessors, or small macros, inline functions and templates" + , "(ten or fewer lines in length), you do both of the following:" + , "" + , " a) Give prominent notice with each copy of the object code that the" + , " Library is used in it and that the Library and its use are" + , " covered by this License." + , "" + , " b) Accompany the object code with a copy of the GNU GPL and this license" + , " document." + , "" + , " 4. Combined Works." + , "" + , " You may convey a Combined Work under terms of your choice that," + , "taken together, effectively do not restrict modification of the" + , "portions of the Library contained in the Combined Work and reverse" + , "engineering for debugging such modifications, if you also do each of" + , "the following:" + , "" + , " a) Give prominent notice with each copy of the Combined Work that" + , " the Library is used in it and that the Library and its use are" + , " covered by this License." + , "" + , " b) Accompany the Combined Work with a copy of the GNU GPL and this license" + , " document." + , "" + , " c) For a Combined Work that displays copyright notices during" + , " execution, include the copyright notice for the Library among" + , " these notices, as well as a reference directing the user to the" + , " copies of the GNU GPL and this license document." + , "" + , " d) Do one of the following:" + , "" + , " 0) Convey the Minimal Corresponding Source under the terms of this" + , " License, and the Corresponding Application Code in a form" + , " suitable for, and under terms that permit, the user to" + , " recombine or relink the Application with a modified version of" + , " the Linked Version to produce a modified Combined Work, in the" + , " manner specified by section 6 of the GNU GPL for conveying" + , " Corresponding Source." + , "" + , " 1) Use a suitable shared library mechanism for linking with the" + , " Library. A suitable mechanism is one that (a) uses at run time" + , " a copy of the Library already present on the user's computer" + , " system, and (b) will operate properly with a modified version" + , " of the Library that is interface-compatible with the Linked" + , " Version." + , "" + , " e) Provide Installation Information, but only if you would otherwise" + , " be required to provide such information under section 6 of the" + , " GNU GPL, and only to the extent that such information is" + , " necessary to install and execute a modified version of the" + , " Combined Work produced by recombining or relinking the" + , " Application with a modified version of the Linked Version. (If" + , " you use option 4d0, the Installation Information must accompany" + , " the Minimal Corresponding Source and Corresponding Application" + , " Code. If you use option 4d1, you must provide the Installation" + , " Information in the manner specified by section 6 of the GNU GPL" + , " for conveying Corresponding Source.)" + , "" + , " 5. Combined Libraries." + , "" + , " You may place library facilities that are a work based on the" + , "Library side by side in a single library together with other library" + , "facilities that are not Applications and are not covered by this" + , "License, and convey such a combined library under terms of your" + , "choice, if you do both of the following:" + , "" + , " a) Accompany the combined library with a copy of the same work based" + , " on the Library, uncombined with any other library facilities," + , " conveyed under the terms of this License." + , "" + , " b) Give prominent notice with the combined library that part of it" + , " is a work based on the Library, and explaining where to find the" + , " accompanying uncombined form of the same work." + , "" + , " 6. Revised Versions of the GNU Lesser General Public License." + , "" + , " The Free Software Foundation may publish revised and/or new versions" + , "of the GNU Lesser General Public License from time to time. Such new" + , "versions will be similar in spirit to the present version, but may" + , "differ in detail to address new problems or concerns." + , "" + , " Each version is given a distinguishing version number. If the" + , "Library as you received it specifies that a certain numbered version" + , "of the GNU Lesser General Public License \"or any later version\"" + , "applies to it, you have the option of following the terms and" + , "conditions either of that published version or of any later version" + , "published by the Free Software Foundation. If the Library as you" + , "received it does not specify a version number of the GNU Lesser" + , "General Public License, you may choose any version of the GNU Lesser" + , "General Public License ever published by the Free Software Foundation." + , "" + , " If the Library as you received it specifies that a proxy can decide" + , "whether future versions of the GNU Lesser General Public License shall" + , "apply, that proxy's public statement of acceptance of any version is" + , "permanent authorization for you to choose that version for the" + , "Library." + ] + +apache20 :: License +apache20 = unlines + [ "" + , " Apache License" + , " Version 2.0, January 2004" + , " http://www.apache.org/licenses/" + , "" + , " TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION" + , "" + , " 1. Definitions." + , "" + , " \"License\" shall mean the terms and conditions for use, reproduction," + , " and distribution as defined by Sections 1 through 9 of this document." + , "" + , " \"Licensor\" shall mean the copyright owner or entity authorized by" + , " the copyright owner that is granting the License." + , "" + , " \"Legal Entity\" shall mean the union of the acting entity and all" + , " other entities that control, are controlled by, or are under common" + , " control with that entity. For the purposes of this definition," + , " \"control\" means (i) the power, direct or indirect, to cause the" + , " direction or management of such entity, whether by contract or" + , " otherwise, or (ii) ownership of fifty percent (50%) or more of the" + , " outstanding shares, or (iii) beneficial ownership of such entity." + , "" + , " \"You\" (or \"Your\") shall mean an individual or Legal Entity" + , " exercising permissions granted by this License." + , "" + , " \"Source\" form shall mean the preferred form for making modifications," + , " including but not limited to software source code, documentation" + , " source, and configuration files." + , "" + , " \"Object\" form shall mean any form resulting from mechanical" + , " transformation or translation of a Source form, including but" + , " not limited to compiled object code, generated documentation," + , " and conversions to other media types." + , "" + , " \"Work\" shall mean the work of authorship, whether in Source or" + , " Object form, made available under the License, as indicated by a" + , " copyright notice that is included in or attached to the work" + , " (an example is provided in the Appendix below)." + , "" + , " \"Derivative Works\" shall mean any work, whether in Source or Object" + , " form, that is based on (or derived from) the Work and for which the" + , " editorial revisions, annotations, elaborations, or other modifications" + , " represent, as a whole, an original work of authorship. For the purposes" + , " of this License, Derivative Works shall not include works that remain" + , " separable from, or merely link (or bind by name) to the interfaces of," + , " the Work and Derivative Works thereof." + , "" + , " \"Contribution\" shall mean any work of authorship, including" + , " the original version of the Work and any modifications or additions" + , " to that Work or Derivative Works thereof, that is intentionally" + , " submitted to Licensor for inclusion in the Work by the copyright owner" + , " or by an individual or Legal Entity authorized to submit on behalf of" + , " the copyright owner. For the purposes of this definition, \"submitted\"" + , " means any form of electronic, verbal, or written communication sent" + , " to the Licensor or its representatives, including but not limited to" + , " communication on electronic mailing lists, source code control systems," + , " and issue tracking systems that are managed by, or on behalf of, the" + , " Licensor for the purpose of discussing and improving the Work, but" + , " excluding communication that is conspicuously marked or otherwise" + , " designated in writing by the copyright owner as \"Not a Contribution.\"" + , "" + , " \"Contributor\" shall mean Licensor and any individual or Legal Entity" + , " on behalf of whom a Contribution has been received by Licensor and" + , " subsequently incorporated within the Work." + , "" + , " 2. Grant of Copyright License. Subject to the terms and conditions of" + , " this License, each Contributor hereby grants to You a perpetual," + , " worldwide, non-exclusive, no-charge, royalty-free, irrevocable" + , " copyright license to reproduce, prepare Derivative Works of," + , " publicly display, publicly perform, sublicense, and distribute the" + , " Work and such Derivative Works in Source or Object form." + , "" + , " 3. Grant of Patent License. Subject to the terms and conditions of" + , " this License, each Contributor hereby grants to You a perpetual," + , " worldwide, non-exclusive, no-charge, royalty-free, irrevocable" + , " (except as stated in this section) patent license to make, have made," + , " use, offer to sell, sell, import, and otherwise transfer the Work," + , " where such license applies only to those patent claims licensable" + , " by such Contributor that are necessarily infringed by their" + , " Contribution(s) alone or by combination of their Contribution(s)" + , " with the Work to which such Contribution(s) was submitted. If You" + , " institute patent litigation against any entity (including a" + , " cross-claim or counterclaim in a lawsuit) alleging that the Work" + , " or a Contribution incorporated within the Work constitutes direct" + , " or contributory patent infringement, then any patent licenses" + , " granted to You under this License for that Work shall terminate" + , " as of the date such litigation is filed." + , "" + , " 4. Redistribution. You may reproduce and distribute copies of the" + , " Work or Derivative Works thereof in any medium, with or without" + , " modifications, and in Source or Object form, provided that You" + , " meet the following conditions:" + , "" + , " (a) You must give any other recipients of the Work or" + , " Derivative Works a copy of this License; and" + , "" + , " (b) You must cause any modified files to carry prominent notices" + , " stating that You changed the files; and" + , "" + , " (c) You must retain, in the Source form of any Derivative Works" + , " that You distribute, all copyright, patent, trademark, and" + , " attribution notices from the Source form of the Work," + , " excluding those notices that do not pertain to any part of" + , " the Derivative Works; and" + , "" + , " (d) If the Work includes a \"NOTICE\" text file as part of its" + , " distribution, then any Derivative Works that You distribute must" + , " include a readable copy of the attribution notices contained" + , " within such NOTICE file, excluding those notices that do not" + , " pertain to any part of the Derivative Works, in at least one" + , " of the following places: within a NOTICE text file distributed" + , " as part of the Derivative Works; within the Source form or" + , " documentation, if provided along with the Derivative Works; or," + , " within a display generated by the Derivative Works, if and" + , " wherever such third-party notices normally appear. The contents" + , " of the NOTICE file are for informational purposes only and" + , " do not modify the License. You may add Your own attribution" + , " notices within Derivative Works that You distribute, alongside" + , " or as an addendum to the NOTICE text from the Work, provided" + , " that such additional attribution notices cannot be construed" + , " as modifying the License." + , "" + , " You may add Your own copyright statement to Your modifications and" + , " may provide additional or different license terms and conditions" + , " for use, reproduction, or distribution of Your modifications, or" + , " for any such Derivative Works as a whole, provided Your use," + , " reproduction, and distribution of the Work otherwise complies with" + , " the conditions stated in this License." + , "" + , " 5. Submission of Contributions. Unless You explicitly state otherwise," + , " any Contribution intentionally submitted for inclusion in the Work" + , " by You to the Licensor shall be under the terms and conditions of" + , " this License, without any additional terms or conditions." + , " Notwithstanding the above, nothing herein shall supersede or modify" + , " the terms of any separate license agreement you may have executed" + , " with Licensor regarding such Contributions." + , "" + , " 6. Trademarks. This License does not grant permission to use the trade" + , " names, trademarks, service marks, or product names of the Licensor," + , " except as required for reasonable and customary use in describing the" + , " origin of the Work and reproducing the content of the NOTICE file." + , "" + , " 7. Disclaimer of Warranty. Unless required by applicable law or" + , " agreed to in writing, Licensor provides the Work (and each" + , " Contributor provides its Contributions) on an \"AS IS\" BASIS," + , " WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or" + , " implied, including, without limitation, any warranties or conditions" + , " of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A" + , " PARTICULAR PURPOSE. You are solely responsible for determining the" + , " appropriateness of using or redistributing the Work and assume any" + , " risks associated with Your exercise of permissions under this License." + , "" + , " 8. Limitation of Liability. In no event and under no legal theory," + , " whether in tort (including negligence), contract, or otherwise," + , " unless required by applicable law (such as deliberate and grossly" + , " negligent acts) or agreed to in writing, shall any Contributor be" + , " liable to You for damages, including any direct, indirect, special," + , " incidental, or consequential damages of any character arising as a" + , " result of this License or out of the use or inability to use the" + , " Work (including but not limited to damages for loss of goodwill," + , " work stoppage, computer failure or malfunction, or any and all" + , " other commercial damages or losses), even if such Contributor" + , " has been advised of the possibility of such damages." + , "" + , " 9. Accepting Warranty or Additional Liability. While redistributing" + , " the Work or Derivative Works thereof, You may choose to offer," + , " and charge a fee for, acceptance of support, warranty, indemnity," + , " or other liability obligations and/or rights consistent with this" + , " License. However, in accepting such obligations, You may act only" + , " on Your own behalf and on Your sole responsibility, not on behalf" + , " of any other Contributor, and only if You agree to indemnify," + , " defend, and hold each Contributor harmless for any liability" + , " incurred by, or claims asserted against, such Contributor by reason" + , " of your accepting any such warranty or additional liability." + , "" + , " END OF TERMS AND CONDITIONS" + , "" + , " APPENDIX: How to apply the Apache License to your work." + , "" + , " To apply the Apache License to your work, attach the following" + , " boilerplate notice, with the fields enclosed by brackets \"[]\"" + , " replaced with your own identifying information. (Don't include" + , " the brackets!) The text should be enclosed in the appropriate" + , " comment syntax for the file format. We also recommend that a" + , " file or class name and description of purpose be included on the" + , " same \"printed page\" as the copyright notice for easier" + , " identification within third-party archives." + , "" + , " Copyright [yyyy] [name of copyright owner]" + , "" + , " Licensed under the Apache License, Version 2.0 (the \"License\");" + , " you may not use this file except in compliance with the License." + , " You may obtain a copy of the License at" + , "" + , " http://www.apache.org/licenses/LICENSE-2.0" + , "" + , " Unless required by applicable law or agreed to in writing, software" + , " distributed under the License is distributed on an \"AS IS\" BASIS," + , " WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied." + , " See the License for the specific language governing permissions and" + , " limitations under the License." + ] + +mit :: String -> String -> License +mit authors year = unlines + [ "Copyright (c) " ++ year ++ " " ++ authors + , "" + , "Permission is hereby granted, free of charge, to any person obtaining" + , "a copy of this software and associated documentation files (the" + , "\"Software\"), to deal in the Software without restriction, including" + , "without limitation the rights to use, copy, modify, merge, publish," + , "distribute, sublicense, and/or sell copies of the Software, and to" + , "permit persons to whom the Software is furnished to do so, subject to" + , "the following conditions:" + , "" + , "The above copyright notice and this permission notice shall be included" + , "in all copies or substantial portions of the Software." + , "" + , "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND," + , "EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF" + , "MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT." + , "IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY" + , "CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT," + , "TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE" + , "SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE." + ] + +mpl20 :: License +mpl20 = unlines + [ "Mozilla Public License Version 2.0" + , "==================================" + , "" + , "1. Definitions" + , "--------------" + , "" + , "1.1. \"Contributor\"" + , " means each individual or legal entity that creates, contributes to" + , " the creation of, or owns Covered Software." + , "" + , "1.2. \"Contributor Version\"" + , " means the combination of the Contributions of others (if any) used" + , " by a Contributor and that particular Contributor's Contribution." + , "" + , "1.3. \"Contribution\"" + , " means Covered Software of a particular Contributor." + , "" + , "1.4. \"Covered Software\"" + , " means Source Code Form to which the initial Contributor has attached" + , " the notice in Exhibit A, the Executable Form of such Source Code" + , " Form, and Modifications of such Source Code Form, in each case" + , " including portions thereof." + , "" + , "1.5. \"Incompatible With Secondary Licenses\"" + , " means" + , "" + , " (a) that the initial Contributor has attached the notice described" + , " in Exhibit B to the Covered Software; or" + , "" + , " (b) that the Covered Software was made available under the terms of" + , " version 1.1 or earlier of the License, but not also under the" + , " terms of a Secondary License." + , "" + , "1.6. \"Executable Form\"" + , " means any form of the work other than Source Code Form." + , "" + , "1.7. \"Larger Work\"" + , " means a work that combines Covered Software with other material, in" + , " a separate file or files, that is not Covered Software." + , "" + , "1.8. \"License\"" + , " means this document." + , "" + , "1.9. \"Licensable\"" + , " means having the right to grant, to the maximum extent possible," + , " whether at the time of the initial grant or subsequently, any and" + , " all of the rights conveyed by this License." + , "" + , "1.10. \"Modifications\"" + , " means any of the following:" + , "" + , " (a) any file in Source Code Form that results from an addition to," + , " deletion from, or modification of the contents of Covered" + , " Software; or" + , "" + , " (b) any new file in Source Code Form that contains any Covered" + , " Software." + , "" + , "1.11. \"Patent Claims\" of a Contributor" + , " means any patent claim(s), including without limitation, method," + , " process, and apparatus claims, in any patent Licensable by such" + , " Contributor that would be infringed, but for the grant of the" + , " License, by the making, using, selling, offering for sale, having" + , " made, import, or transfer of either its Contributions or its" + , " Contributor Version." + , "" + , "1.12. \"Secondary License\"" + , " means either the GNU General Public License, Version 2.0, the GNU" + , " Lesser General Public License, Version 2.1, the GNU Affero General" + , " Public License, Version 3.0, or any later versions of those" + , " licenses." + , "" + , "1.13. \"Source Code Form\"" + , " means the form of the work preferred for making modifications." + , "" + , "1.14. \"You\" (or \"Your\")" + , " means an individual or a legal entity exercising rights under this" + , " License. For legal entities, \"You\" includes any entity that" + , " controls, is controlled by, or is under common control with You. For" + , " purposes of this definition, \"control\" means (a) the power, direct" + , " or indirect, to cause the direction or management of such entity," + , " whether by contract or otherwise, or (b) ownership of more than" + , " fifty percent (50%) of the outstanding shares or beneficial" + , " ownership of such entity." + , "" + , "2. License Grants and Conditions" + , "--------------------------------" + , "" + , "2.1. Grants" + , "" + , "Each Contributor hereby grants You a world-wide, royalty-free," + , "non-exclusive license:" + , "" + , "(a) under intellectual property rights (other than patent or trademark)" + , " Licensable by such Contributor to use, reproduce, make available," + , " modify, display, perform, distribute, and otherwise exploit its" + , " Contributions, either on an unmodified basis, with Modifications, or" + , " as part of a Larger Work; and" + , "" + , "(b) under Patent Claims of such Contributor to make, use, sell, offer" + , " for sale, have made, import, and otherwise transfer either its" + , " Contributions or its Contributor Version." + , "" + , "2.2. Effective Date" + , "" + , "The licenses granted in Section 2.1 with respect to any Contribution" + , "become effective for each Contribution on the date the Contributor first" + , "distributes such Contribution." + , "" + , "2.3. Limitations on Grant Scope" + , "" + , "The licenses granted in this Section 2 are the only rights granted under" + , "this License. No additional rights or licenses will be implied from the" + , "distribution or licensing of Covered Software under this License." + , "Notwithstanding Section 2.1(b) above, no patent license is granted by a" + , "Contributor:" + , "" + , "(a) for any code that a Contributor has removed from Covered Software;" + , " or" + , "" + , "(b) for infringements caused by: (i) Your and any other third party's" + , " modifications of Covered Software, or (ii) the combination of its" + , " Contributions with other software (except as part of its Contributor" + , " Version); or" + , "" + , "(c) under Patent Claims infringed by Covered Software in the absence of" + , " its Contributions." + , "" + , "This License does not grant any rights in the trademarks, service marks," + , "or logos of any Contributor (except as may be necessary to comply with" + , "the notice requirements in Section 3.4)." + , "" + , "2.4. Subsequent Licenses" + , "" + , "No Contributor makes additional grants as a result of Your choice to" + , "distribute the Covered Software under a subsequent version of this" + , "License (see Section 10.2) or under the terms of a Secondary License (if" + , "permitted under the terms of Section 3.3)." + , "" + , "2.5. Representation" + , "" + , "Each Contributor represents that the Contributor believes its" + , "Contributions are its original creation(s) or it has sufficient rights" + , "to grant the rights to its Contributions conveyed by this License." + , "" + , "2.6. Fair Use" + , "" + , "This License is not intended to limit any rights You have under" + , "applicable copyright doctrines of fair use, fair dealing, or other" + , "equivalents." + , "" + , "2.7. Conditions" + , "" + , "Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted" + , "in Section 2.1." + , "" + , "3. Responsibilities" + , "-------------------" + , "" + , "3.1. Distribution of Source Form" + , "" + , "All distribution of Covered Software in Source Code Form, including any" + , "Modifications that You create or to which You contribute, must be under" + , "the terms of this License. You must inform recipients that the Source" + , "Code Form of the Covered Software is governed by the terms of this" + , "License, and how they can obtain a copy of this License. You may not" + , "attempt to alter or restrict the recipients' rights in the Source Code" + , "Form." + , "" + , "3.2. Distribution of Executable Form" + , "" + , "If You distribute Covered Software in Executable Form then:" + , "" + , "(a) such Covered Software must also be made available in Source Code" + , " Form, as described in Section 3.1, and You must inform recipients of" + , " the Executable Form how they can obtain a copy of such Source Code" + , " Form by reasonable means in a timely manner, at a charge no more" + , " than the cost of distribution to the recipient; and" + , "" + , "(b) You may distribute such Executable Form under the terms of this" + , " License, or sublicense it under different terms, provided that the" + , " license for the Executable Form does not attempt to limit or alter" + , " the recipients' rights in the Source Code Form under this License." + , "" + , "3.3. Distribution of a Larger Work" + , "" + , "You may create and distribute a Larger Work under terms of Your choice," + , "provided that You also comply with the requirements of this License for" + , "the Covered Software. If the Larger Work is a combination of Covered" + , "Software with a work governed by one or more Secondary Licenses, and the" + , "Covered Software is not Incompatible With Secondary Licenses, this" + , "License permits You to additionally distribute such Covered Software" + , "under the terms of such Secondary License(s), so that the recipient of" + , "the Larger Work may, at their option, further distribute the Covered" + , "Software under the terms of either this License or such Secondary" + , "License(s)." + , "" + , "3.4. Notices" + , "" + , "You may not remove or alter the substance of any license notices" + , "(including copyright notices, patent notices, disclaimers of warranty," + , "or limitations of liability) contained within the Source Code Form of" + , "the Covered Software, except that You may alter any license notices to" + , "the extent required to remedy known factual inaccuracies." + , "" + , "3.5. Application of Additional Terms" + , "" + , "You may choose to offer, and to charge a fee for, warranty, support," + , "indemnity or liability obligations to one or more recipients of Covered" + , "Software. However, You may do so only on Your own behalf, and not on" + , "behalf of any Contributor. You must make it absolutely clear that any" + , "such warranty, support, indemnity, or liability obligation is offered by" + , "You alone, and You hereby agree to indemnify every Contributor for any" + , "liability incurred by such Contributor as a result of warranty, support," + , "indemnity or liability terms You offer. You may include additional" + , "disclaimers of warranty and limitations of liability specific to any" + , "jurisdiction." + , "" + , "4. Inability to Comply Due to Statute or Regulation" + , "---------------------------------------------------" + , "" + , "If it is impossible for You to comply with any of the terms of this" + , "License with respect to some or all of the Covered Software due to" + , "statute, judicial order, or regulation then You must: (a) comply with" + , "the terms of this License to the maximum extent possible; and (b)" + , "describe the limitations and the code they affect. Such description must" + , "be placed in a text file included with all distributions of the Covered" + , "Software under this License. Except to the extent prohibited by statute" + , "or regulation, such description must be sufficiently detailed for a" + , "recipient of ordinary skill to be able to understand it." + , "" + , "5. Termination" + , "--------------" + , "" + , "5.1. The rights granted under this License will terminate automatically" + , "if You fail to comply with any of its terms. However, if You become" + , "compliant, then the rights granted under this License from a particular" + , "Contributor are reinstated (a) provisionally, unless and until such" + , "Contributor explicitly and finally terminates Your grants, and (b) on an" + , "ongoing basis, if such Contributor fails to notify You of the" + , "non-compliance by some reasonable means prior to 60 days after You have" + , "come back into compliance. Moreover, Your grants from a particular" + , "Contributor are reinstated on an ongoing basis if such Contributor" + , "notifies You of the non-compliance by some reasonable means, this is the" + , "first time You have received notice of non-compliance with this License" + , "from such Contributor, and You become compliant prior to 30 days after" + , "Your receipt of the notice." + , "" + , "5.2. If You initiate litigation against any entity by asserting a patent" + , "infringement claim (excluding declaratory judgment actions," + , "counter-claims, and cross-claims) alleging that a Contributor Version" + , "directly or indirectly infringes any patent, then the rights granted to" + , "You by any and all Contributors for the Covered Software under Section" + , "2.1 of this License shall terminate." + , "" + , "5.3. In the event of termination under Sections 5.1 or 5.2 above, all" + , "end user license agreements (excluding distributors and resellers) which" + , "have been validly granted by You or Your distributors under this License" + , "prior to termination shall survive termination." + , "" + , "************************************************************************" + , "* *" + , "* 6. Disclaimer of Warranty *" + , "* ------------------------- *" + , "* *" + , "* Covered Software is provided under this License on an \"as is\" *" + , "* basis, without warranty of any kind, either expressed, implied, or *" + , "* statutory, including, without limitation, warranties that the *" + , "* Covered Software is free of defects, merchantable, fit for a *" + , "* particular purpose or non-infringing. The entire risk as to the *" + , "* quality and performance of the Covered Software is with You. *" + , "* Should any Covered Software prove defective in any respect, You *" + , "* (not any Contributor) assume the cost of any necessary servicing, *" + , "* repair, or correction. This disclaimer of warranty constitutes an *" + , "* essential part of this License. No use of any Covered Software is *" + , "* authorized under this License except under this disclaimer. *" + , "* *" + , "************************************************************************" + , "" + , "************************************************************************" + , "* *" + , "* 7. Limitation of Liability *" + , "* -------------------------- *" + , "* *" + , "* Under no circumstances and under no legal theory, whether tort *" + , "* (including negligence), contract, or otherwise, shall any *" + , "* Contributor, or anyone who distributes Covered Software as *" + , "* permitted above, be liable to You for any direct, indirect, *" + , "* special, incidental, or consequential damages of any character *" + , "* including, without limitation, damages for lost profits, loss of *" + , "* goodwill, work stoppage, computer failure or malfunction, or any *" + , "* and all other commercial damages or losses, even if such party *" + , "* shall have been informed of the possibility of such damages. This *" + , "* limitation of liability shall not apply to liability for death or *" + , "* personal injury resulting from such party's negligence to the *" + , "* extent applicable law prohibits such limitation. Some *" + , "* jurisdictions do not allow the exclusion or limitation of *" + , "* incidental or consequential damages, so this exclusion and *" + , "* limitation may not apply to You. *" + , "* *" + , "************************************************************************" + , "" + , "8. Litigation" + , "-------------" + , "" + , "Any litigation relating to this License may be brought only in the" + , "courts of a jurisdiction where the defendant maintains its principal" + , "place of business and such litigation shall be governed by laws of that" + , "jurisdiction, without reference to its conflict-of-law provisions." + , "Nothing in this Section shall prevent a party's ability to bring" + , "cross-claims or counter-claims." + , "" + , "9. Miscellaneous" + , "----------------" + , "" + , "This License represents the complete agreement concerning the subject" + , "matter hereof. If any provision of this License is held to be" + , "unenforceable, such provision shall be reformed only to the extent" + , "necessary to make it enforceable. Any law or regulation which provides" + , "that the language of a contract shall be construed against the drafter" + , "shall not be used to construe this License against a Contributor." + , "" + , "10. Versions of the License" + , "---------------------------" + , "" + , "10.1. New Versions" + , "" + , "Mozilla Foundation is the license steward. Except as provided in Section" + , "10.3, no one other than the license steward has the right to modify or" + , "publish new versions of this License. Each version will be given a" + , "distinguishing version number." + , "" + , "10.2. Effect of New Versions" + , "" + , "You may distribute the Covered Software under the terms of the version" + , "of the License under which You originally received the Covered Software," + , "or under the terms of any subsequent version published by the license" + , "steward." + , "" + , "10.3. Modified Versions" + , "" + , "If you create software not governed by this License, and you want to" + , "create a new license for such software, you may create and use a" + , "modified version of this License if you rename the license and remove" + , "any references to the name of the license steward (except to note that" + , "such modified license differs from this License)." + , "" + , "10.4. Distributing Source Code Form that is Incompatible With Secondary" + , "Licenses" + , "" + , "If You choose to distribute Source Code Form that is Incompatible With" + , "Secondary Licenses under the terms of this version of the License, the" + , "notice described in Exhibit B of this License must be attached." + , "" + , "Exhibit A - Source Code Form License Notice" + , "-------------------------------------------" + , "" + , " This Source Code Form is subject to the terms of the Mozilla Public" + , " License, v. 2.0. If a copy of the MPL was not distributed with this" + , " file, You can obtain one at http://mozilla.org/MPL/2.0/." + , "" + , "If it is not possible or desirable to put the notice in a particular" + , "file, then You may include the notice in a location (such as a LICENSE" + , "file in a relevant directory) where a recipient would be likely to look" + , "for such a notice." + , "" + , "You may add additional accurate notices of copyright ownership." + , "" + , "Exhibit B - \"Incompatible With Secondary Licenses\" Notice" + , "---------------------------------------------------------" + , "" + , " This Source Code Form is \"Incompatible With Secondary Licenses\", as" + , " defined by the Mozilla Public License, v. 2.0." + ] + +isc :: String -> String -> License +isc authors year = unlines + [ "Copyright (c) " ++ year ++ " " ++ authors + , "" + , "Permission to use, copy, modify, and/or distribute this software for any purpose" + , "with or without fee is hereby granted, provided that the above copyright notice" + , "and this permission notice appear in all copies." + , "" + , "THE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH" + , "REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND" + , "FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT," + , "INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS" + , "OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER" + , "TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF" + , "THIS SOFTWARE." + ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Init/Types.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Init/Types.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Init/Types.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Init/Types.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,118 @@ +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Init.Types +-- Copyright : (c) Brent Yorgey, Benedikt Huber 2009 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Some types used by the 'cabal init' command. +-- +----------------------------------------------------------------------------- +module Distribution.Client.Init.Types where + +import Distribution.Simple.Setup + ( Flag(..) ) + +import Distribution.Compat.Semigroup +import Distribution.Version +import Distribution.Verbosity +import qualified Distribution.Package as P +import Distribution.License +import Distribution.ModuleName +import Language.Haskell.Extension ( Language(..), Extension ) + +import qualified Text.PrettyPrint as Disp +import qualified Distribution.Compat.ReadP as Parse +import Distribution.Text + +import GHC.Generics ( Generic ) + +-- | InitFlags is really just a simple type to represent certain +-- portions of a .cabal file. Rather than have a flag for EVERY +-- possible field, we just have one for each field that the user is +-- likely to want and/or that we are likely to be able to +-- intelligently guess. +data InitFlags = + InitFlags { nonInteractive :: Flag Bool + , quiet :: Flag Bool + , packageDir :: Flag FilePath + , noComments :: Flag Bool + , minimal :: Flag Bool + + , packageName :: Flag P.PackageName + , version :: Flag Version + , cabalVersion :: Flag VersionRange + , license :: Flag License + , author :: Flag String + , email :: Flag String + , homepage :: Flag String + + , synopsis :: Flag String + , category :: Flag (Either String Category) + , extraSrc :: Maybe [String] + + , packageType :: Flag PackageType + , mainIs :: Flag FilePath + , language :: Flag Language + + , exposedModules :: Maybe [ModuleName] + , otherModules :: Maybe [ModuleName] + , otherExts :: Maybe [Extension] + + , dependencies :: Maybe [P.Dependency] + , sourceDirs :: Maybe [String] + , buildTools :: Maybe [String] + + , initVerbosity :: Flag Verbosity + , overwrite :: Flag Bool + } + deriving (Show, Generic) + + -- the Monoid instance for Flag has later values override earlier + -- ones, which is why we want Maybe [foo] for collecting foo values, + -- not Flag [foo]. + +data PackageType = Library | Executable + deriving (Show, Read, Eq) + +instance Text PackageType where + disp = Disp.text . show + parse = Parse.choice $ map (fmap read . Parse.string . show) [Library, Executable] + +instance Monoid InitFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup InitFlags where + (<>) = gmappend + +-- | Some common package categories. +data Category + = Codec + | Concurrency + | Control + | Data + | Database + | Development + | Distribution + | Game + | Graphics + | Language + | Math + | Network + | Sound + | System + | Testing + | Text + | Web + deriving (Read, Show, Eq, Ord, Bounded, Enum) + +instance Text Category where + disp = Disp.text . show + parse = Parse.choice $ map (fmap read . Parse.string . show) [Codec .. ] + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Init.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Init.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Init.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Init.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,965 @@ +{-# LANGUAGE CPP #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Init +-- Copyright : (c) Brent Yorgey 2009 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Implementation of the 'cabal init' command, which creates an initial .cabal +-- file for a project. +-- +----------------------------------------------------------------------------- + +module Distribution.Client.Init ( + + -- * Commands + initCabal + , pvpize + , incVersion + + ) where + +import System.IO + ( hSetBuffering, stdout, BufferMode(..) ) +import System.Directory + ( getCurrentDirectory, doesDirectoryExist, doesFileExist, copyFile + , getDirectoryContents, createDirectoryIfMissing ) +import System.FilePath + ( (), (<.>), takeBaseName ) +import Data.Time + ( getCurrentTime, utcToLocalTime, toGregorian, localDay, getCurrentTimeZone ) + +import Data.Char + ( toUpper ) +import Data.List + ( intercalate, nub, groupBy, (\\) ) +import Data.Maybe + ( fromMaybe, isJust, catMaybes, listToMaybe ) +import Data.Function + ( on ) +import qualified Data.Map as M +#if !MIN_VERSION_base(4,8,0) +import Control.Applicative + ( (<$>) ) +import Data.Traversable + ( traverse ) +#endif +import Control.Monad + ( when, unless, (>=>), join, forM_ ) +import Control.Arrow + ( (&&&), (***) ) + +import Text.PrettyPrint hiding (mode, cat) + +import Data.Version + ( Version(..) ) +import Distribution.Version + ( orLaterVersion, earlierVersion, intersectVersionRanges, VersionRange ) +import Distribution.Verbosity + ( Verbosity ) +import Distribution.ModuleName + ( ModuleName, fromString ) -- And for the Text instance +import Distribution.InstalledPackageInfo + ( InstalledPackageInfo, sourcePackageId, exposed ) +import qualified Distribution.Package as P +import Language.Haskell.Extension ( Language(..) ) + +import Distribution.Client.Init.Types + ( InitFlags(..), PackageType(..), Category(..) ) +import Distribution.Client.Init.Licenses + ( bsd2, bsd3, gplv2, gplv3, lgpl21, lgpl3, agplv3, apache20, mit, mpl20, isc ) +import Distribution.Client.Init.Heuristics + ( guessPackageName, guessAuthorNameMail, guessMainFileCandidates, + SourceFileEntry(..), + scanForModules, neededBuildPrograms ) + +import Distribution.License + ( License(..), knownLicenses ) + +import Distribution.ReadE + ( runReadE, readP_to_E ) +import Distribution.Simple.Setup + ( Flag(..), flagToMaybe ) +import Distribution.Simple.Configure + ( getInstalledPackages ) +import Distribution.Simple.Compiler + ( PackageDBStack, Compiler ) +import Distribution.Simple.Program + ( ProgramConfiguration ) +import Distribution.Simple.PackageIndex + ( InstalledPackageIndex, moduleNameIndex ) +import Distribution.Text + ( display, Text(..) ) + +import Distribution.Client.PackageIndex + ( elemByPackageName ) +import Distribution.Client.IndexUtils + ( getSourcePackages ) +import Distribution.Client.Types + ( SourcePackageDb(..) ) +import Distribution.Client.Setup + ( RepoContext(..) ) + +initCabal :: Verbosity + -> PackageDBStack + -> RepoContext + -> Compiler + -> ProgramConfiguration + -> InitFlags + -> IO () +initCabal verbosity packageDBs repoCtxt comp conf initFlags = do + + installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf + sourcePkgDb <- getSourcePackages verbosity repoCtxt + + hSetBuffering stdout NoBuffering + + initFlags' <- extendFlags installedPkgIndex sourcePkgDb initFlags + + case license initFlags' of + Flag PublicDomain -> return () + _ -> writeLicense initFlags' + writeSetupFile initFlags' + writeChangeLog initFlags' + createSourceDirectories initFlags' + createMainHs initFlags' + success <- writeCabalFile initFlags' + + when success $ generateWarnings initFlags' + +--------------------------------------------------------------------------- +-- Flag acquisition ----------------------------------------------------- +--------------------------------------------------------------------------- + +-- | Fill in more details by guessing, discovering, or prompting the +-- user. +extendFlags :: InstalledPackageIndex -> SourcePackageDb -> InitFlags -> IO InitFlags +extendFlags pkgIx sourcePkgDb = + getPackageName sourcePkgDb + >=> getVersion + >=> getLicense + >=> getAuthorInfo + >=> getHomepage + >=> getSynopsis + >=> getCategory + >=> getExtraSourceFiles + >=> getLibOrExec + >=> getSrcDir + >=> getLanguage + >=> getGenComments + >=> getModulesBuildToolsAndDeps pkgIx + +-- | Combine two actions which may return a value, preferring the first. That +-- is, run the second action only if the first doesn't return a value. +infixr 1 ?>> +(?>>) :: IO (Maybe a) -> IO (Maybe a) -> IO (Maybe a) +f ?>> g = do + ma <- f + if isJust ma + then return ma + else g + +-- | Witness the isomorphism between Maybe and Flag. +maybeToFlag :: Maybe a -> Flag a +maybeToFlag = maybe NoFlag Flag + +-- | Get the package name: use the package directory (supplied, or the current +-- directory by default) as a guess. It looks at the SourcePackageDb to avoid +-- using an existing package name. +getPackageName :: SourcePackageDb -> InitFlags -> IO InitFlags +getPackageName sourcePkgDb flags = do + guess <- traverse guessPackageName (flagToMaybe $ packageDir flags) + ?>> Just `fmap` (getCurrentDirectory >>= guessPackageName) + + let guess' | isPkgRegistered guess = Nothing + | otherwise = guess + + pkgName' <- return (flagToMaybe $ packageName flags) + ?>> maybePrompt flags (prompt "Package name" guess') + ?>> return guess' + + chooseAgain <- if isPkgRegistered pkgName' + then promptYesNo promptOtherNameMsg (Just True) + else return False + + if chooseAgain + then getPackageName sourcePkgDb flags + else return $ flags { packageName = maybeToFlag pkgName' } + + where + isPkgRegistered (Just pkg) = elemByPackageName (packageIndex sourcePkgDb) pkg + isPkgRegistered Nothing = False + + promptOtherNameMsg = "This package name is already used by another " ++ + "package on hackage. Do you want to choose a " ++ + "different name" + +-- | Package version: use 0.1.0.0 as a last resort, but try prompting the user +-- if possible. +getVersion :: InitFlags -> IO InitFlags +getVersion flags = do + let v = Just $ Version [0,1,0,0] [] + v' <- return (flagToMaybe $ version flags) + ?>> maybePrompt flags (prompt "Package version" v) + ?>> return v + return $ flags { version = maybeToFlag v' } + +-- | Choose a license. +getLicense :: InitFlags -> IO InitFlags +getLicense flags = do + lic <- return (flagToMaybe $ license flags) + ?>> fmap (fmap (either UnknownLicense id)) + (maybePrompt flags + (promptList "Please choose a license" listedLicenses (Just BSD3) display True)) + return $ flags { license = maybeToFlag lic } + where + listedLicenses = + knownLicenses \\ [GPL Nothing, LGPL Nothing, AGPL Nothing + , Apache Nothing, OtherLicense] + +-- | The author's name and email. Prompt, or try to guess from an existing +-- darcs repo. +getAuthorInfo :: InitFlags -> IO InitFlags +getAuthorInfo flags = do + (authorName, authorEmail) <- + (flagToMaybe *** flagToMaybe) `fmap` guessAuthorNameMail + authorName' <- return (flagToMaybe $ author flags) + ?>> maybePrompt flags (promptStr "Author name" authorName) + ?>> return authorName + + authorEmail' <- return (flagToMaybe $ email flags) + ?>> maybePrompt flags (promptStr "Maintainer email" authorEmail) + ?>> return authorEmail + + return $ flags { author = maybeToFlag authorName' + , email = maybeToFlag authorEmail' + } + +-- | Prompt for a homepage URL. +getHomepage :: InitFlags -> IO InitFlags +getHomepage flags = do + hp <- queryHomepage + hp' <- return (flagToMaybe $ homepage flags) + ?>> maybePrompt flags (promptStr "Project homepage URL" hp) + ?>> return hp + + return $ flags { homepage = maybeToFlag hp' } + +-- | Right now this does nothing, but it could be changed to do some +-- intelligent guessing. +queryHomepage :: IO (Maybe String) +queryHomepage = return Nothing -- get default remote darcs repo? + +-- | Prompt for a project synopsis. +getSynopsis :: InitFlags -> IO InitFlags +getSynopsis flags = do + syn <- return (flagToMaybe $ synopsis flags) + ?>> maybePrompt flags (promptStr "Project synopsis" Nothing) + + return $ flags { synopsis = maybeToFlag syn } + +-- | Prompt for a package category. +-- Note that it should be possible to do some smarter guessing here too, i.e. +-- look at the name of the top level source directory. +getCategory :: InitFlags -> IO InitFlags +getCategory flags = do + cat <- return (flagToMaybe $ category flags) + ?>> fmap join (maybePrompt flags + (promptListOptional "Project category" [Codec ..])) + return $ flags { category = maybeToFlag cat } + +-- | Try to guess extra source files (don't prompt the user). +getExtraSourceFiles :: InitFlags -> IO InitFlags +getExtraSourceFiles flags = do + extraSrcFiles <- return (extraSrc flags) + ?>> Just `fmap` guessExtraSourceFiles flags + + return $ flags { extraSrc = extraSrcFiles } + +defaultChangeLog :: FilePath +defaultChangeLog = "ChangeLog.md" + +-- | Try to guess things to include in the extra-source-files field. +-- For now, we just look for things in the root directory named +-- 'readme', 'changes', or 'changelog', with any sort of +-- capitalization and any extension. +guessExtraSourceFiles :: InitFlags -> IO [FilePath] +guessExtraSourceFiles flags = do + dir <- + maybe getCurrentDirectory return . flagToMaybe $ packageDir flags + files <- getDirectoryContents dir + let extraFiles = filter isExtra files + if any isLikeChangeLog extraFiles + then return extraFiles + else return (defaultChangeLog : extraFiles) + + where + isExtra = likeFileNameBase ("README" : changeLogLikeBases) + isLikeChangeLog = likeFileNameBase changeLogLikeBases + likeFileNameBase candidates = (`elem` candidates) . map toUpper . takeBaseName + changeLogLikeBases = ["CHANGES", "CHANGELOG"] + +-- | Ask whether the project builds a library or executable. +getLibOrExec :: InitFlags -> IO InitFlags +getLibOrExec flags = do + isLib <- return (flagToMaybe $ packageType flags) + ?>> maybePrompt flags (either (const Library) id `fmap` + promptList "What does the package build" + [Library, Executable] + Nothing display False) + ?>> return (Just Library) + mainFile <- if isLib /= Just Executable then return Nothing else + getMainFile flags + + return $ flags { packageType = maybeToFlag isLib + , mainIs = maybeToFlag mainFile + } + +-- | Try to guess the main file of the executable, and prompt the user to choose +-- one of them. Top-level modules including the word 'Main' in the file name +-- will be candidates, and shorter filenames will be preferred. +getMainFile :: InitFlags -> IO (Maybe FilePath) +getMainFile flags = + return (flagToMaybe $ mainIs flags) + ?>> do + candidates <- guessMainFileCandidates flags + let showCandidate = either (++" (does not yet exist, but will be created)") id + defaultFile = listToMaybe candidates + maybePrompt flags (either id (either id id) `fmap` + promptList "What is the main module of the executable" + candidates + defaultFile showCandidate True) + ?>> return (fmap (either id id) defaultFile) + +-- | Ask for the base language of the package. +getLanguage :: InitFlags -> IO InitFlags +getLanguage flags = do + lang <- return (flagToMaybe $ language flags) + ?>> maybePrompt flags + (either UnknownLanguage id `fmap` + promptList "What base language is the package written in" + [Haskell2010, Haskell98] + (Just Haskell2010) display True) + ?>> return (Just Haskell2010) + + return $ flags { language = maybeToFlag lang } + +-- | Ask whether to generate explanatory comments. +getGenComments :: InitFlags -> IO InitFlags +getGenComments flags = do + genComments <- return (not <$> flagToMaybe (noComments flags)) + ?>> maybePrompt flags (promptYesNo promptMsg (Just False)) + ?>> return (Just False) + return $ flags { noComments = maybeToFlag (fmap not genComments) } + where + promptMsg = "Add informative comments to each field in the cabal file (y/n)" + +-- | Ask for the source root directory. +getSrcDir :: InitFlags -> IO InitFlags +getSrcDir flags = do + srcDirs <- return (sourceDirs flags) + ?>> fmap (:[]) `fmap` guessSourceDir flags + ?>> fmap (>>= fmap ((:[]) . either id id)) (maybePrompt + flags + (promptListOptional' "Source directory" ["src"] id)) + + return $ flags { sourceDirs = srcDirs } + +-- | Try to guess source directory. Could try harder; for the +-- moment just looks to see whether there is a directory called 'src'. +guessSourceDir :: InitFlags -> IO (Maybe String) +guessSourceDir flags = do + dir <- + maybe getCurrentDirectory return . flagToMaybe $ packageDir flags + srcIsDir <- doesDirectoryExist (dir "src") + return $ if srcIsDir + then Just "src" + else Nothing + +-- | Get the list of exposed modules and extra tools needed to build them. +getModulesBuildToolsAndDeps :: InstalledPackageIndex -> InitFlags -> IO InitFlags +getModulesBuildToolsAndDeps pkgIx flags = do + dir <- maybe getCurrentDirectory return . flagToMaybe $ packageDir flags + + -- TODO: really should use guessed source roots. + sourceFiles <- scanForModules dir + + Just mods <- return (exposedModules flags) + ?>> (return . Just . map moduleName $ sourceFiles) + + tools <- return (buildTools flags) + ?>> (return . Just . neededBuildPrograms $ sourceFiles) + + deps <- return (dependencies flags) + ?>> Just <$> importsToDeps flags + (fromString "Prelude" : -- to ensure we get base as a dep + ( nub -- only need to consider each imported package once + . filter (`notElem` mods) -- don't consider modules from + -- this package itself + . concatMap imports + $ sourceFiles + ) + ) + pkgIx + + exts <- return (otherExts flags) + ?>> (return . Just . nub . concatMap extensions $ sourceFiles) + + return $ flags { exposedModules = Just mods + , buildTools = tools + , dependencies = deps + , otherExts = exts + } + +importsToDeps :: InitFlags -> [ModuleName] -> InstalledPackageIndex -> IO [P.Dependency] +importsToDeps flags mods pkgIx = do + + let modMap :: M.Map ModuleName [InstalledPackageInfo] + modMap = M.map (filter exposed) $ moduleNameIndex pkgIx + + modDeps :: [(ModuleName, Maybe [InstalledPackageInfo])] + modDeps = map (id &&& flip M.lookup modMap) mods + + message flags "\nGuessing dependencies..." + nub . catMaybes <$> mapM (chooseDep flags) modDeps + +-- Given a module and a list of installed packages providing it, +-- choose a dependency (i.e. package + version range) to use for that +-- module. +chooseDep :: InitFlags -> (ModuleName, Maybe [InstalledPackageInfo]) + -> IO (Maybe P.Dependency) + +chooseDep flags (m, Nothing) + = message flags ("\nWarning: no package found providing " ++ display m ++ ".") + >> return Nothing + +chooseDep flags (m, Just []) + = message flags ("\nWarning: no package found providing " ++ display m ++ ".") + >> return Nothing + + -- We found some packages: group them by name. +chooseDep flags (m, Just ps) + = case pkgGroups of + -- if there's only one group, i.e. multiple versions of a single package, + -- we make it into a dependency, choosing the latest-ish version (see toDep). + [grp] -> Just <$> toDep grp + -- otherwise, we refuse to choose between different packages and make the user + -- do it. + grps -> do message flags ("\nWarning: multiple packages found providing " + ++ display m + ++ ": " ++ intercalate ", " (map (display . P.pkgName . head) grps)) + message flags "You will need to pick one and manually add it to the Build-depends: field." + return Nothing + where + pkgGroups = groupBy ((==) `on` P.pkgName) (map sourcePackageId ps) + + -- Given a list of available versions of the same package, pick a dependency. + toDep :: [P.PackageIdentifier] -> IO P.Dependency + + -- If only one version, easy. We change e.g. 0.4.2 into 0.4.* + toDep [pid] = return $ P.Dependency (P.pkgName pid) (pvpize . P.pkgVersion $ pid) + + -- Otherwise, choose the latest version and issue a warning. + toDep pids = do + message flags ("\nWarning: multiple versions of " ++ display (P.pkgName . head $ pids) ++ " provide " ++ display m ++ ", choosing the latest.") + return $ P.Dependency (P.pkgName . head $ pids) + (pvpize . maximum . map P.pkgVersion $ pids) + +-- | Given a version, return an API-compatible (according to PVP) version range. +-- +-- Example: @0.4.1@ produces the version range @>= 0.4 && < 0.5@ (which is the +-- same as @0.4.*@). +pvpize :: Version -> VersionRange +pvpize v = orLaterVersion v' + `intersectVersionRanges` + earlierVersion (incVersion 1 v') + where v' = (v { versionBranch = take 2 (versionBranch v) }) + +-- | Increment the nth version component (counting from 0). +incVersion :: Int -> Version -> Version +incVersion n (Version vlist tags) = Version (incVersion' n vlist) tags + where + incVersion' 0 [] = [1] + incVersion' 0 (v:_) = [v+1] + incVersion' m [] = replicate m 0 ++ [1] + incVersion' m (v:vs) = v : incVersion' (m-1) vs + +--------------------------------------------------------------------------- +-- Prompting/user interaction ------------------------------------------- +--------------------------------------------------------------------------- + +-- | Run a prompt or not based on the nonInteractive flag of the +-- InitFlags structure. +maybePrompt :: InitFlags -> IO t -> IO (Maybe t) +maybePrompt flags p = + case nonInteractive flags of + Flag True -> return Nothing + _ -> Just `fmap` p + +-- | Create a prompt with optional default value that returns a +-- String. +promptStr :: String -> Maybe String -> IO String +promptStr = promptDefault' Just id + +-- | Create a yes/no prompt with optional default value. +-- +promptYesNo :: String -> Maybe Bool -> IO Bool +promptYesNo = + promptDefault' recogniseYesNo showYesNo + where + recogniseYesNo s | s == "y" || s == "Y" = Just True + | s == "n" || s == "N" = Just False + | otherwise = Nothing + showYesNo True = "y" + showYesNo False = "n" + +-- | Create a prompt with optional default value that returns a value +-- of some Text instance. +prompt :: Text t => String -> Maybe t -> IO t +prompt = promptDefault' + (either (const Nothing) Just . runReadE (readP_to_E id parse)) + display + +-- | Create a prompt with an optional default value. +promptDefault' :: (String -> Maybe t) -- ^ parser + -> (t -> String) -- ^ pretty-printer + -> String -- ^ prompt message + -> Maybe t -- ^ optional default value + -> IO t +promptDefault' parser pretty pr def = do + putStr $ mkDefPrompt pr (pretty `fmap` def) + inp <- getLine + case (inp, def) of + ("", Just d) -> return d + _ -> case parser inp of + Just t -> return t + Nothing -> do putStrLn $ "Couldn't parse " ++ inp ++ ", please try again!" + promptDefault' parser pretty pr def + +-- | Create a prompt from a prompt string and a String representation +-- of an optional default value. +mkDefPrompt :: String -> Maybe String -> String +mkDefPrompt pr def = pr ++ "?" ++ defStr def + where defStr Nothing = " " + defStr (Just s) = " [default: " ++ s ++ "] " + +promptListOptional :: (Text t, Eq t) + => String -- ^ prompt + -> [t] -- ^ choices + -> IO (Maybe (Either String t)) +promptListOptional pr choices = promptListOptional' pr choices display + +promptListOptional' :: Eq t + => String -- ^ prompt + -> [t] -- ^ choices + -> (t -> String) -- ^ show an item + -> IO (Maybe (Either String t)) +promptListOptional' pr choices displayItem = + fmap rearrange + $ promptList pr (Nothing : map Just choices) (Just Nothing) + (maybe "(none)" displayItem) True + where + rearrange = either (Just . Left) (fmap Right) + +-- | Create a prompt from a list of items. +promptList :: Eq t + => String -- ^ prompt + -> [t] -- ^ choices + -> Maybe t -- ^ optional default value + -> (t -> String) -- ^ show an item + -> Bool -- ^ whether to allow an 'other' option + -> IO (Either String t) +promptList pr choices def displayItem other = do + putStrLn $ pr ++ ":" + let options1 = map (\c -> (Just c == def, displayItem c)) choices + options2 = zip ([1..]::[Int]) + (options1 ++ [(False, "Other (specify)") | other]) + mapM_ (putStrLn . \(n,(i,s)) -> showOption n i ++ s) options2 + promptList' displayItem (length options2) choices def other + where showOption n i | n < 10 = " " ++ star i ++ " " ++ rest + | otherwise = " " ++ star i ++ rest + where rest = show n ++ ") " + star True = "*" + star False = " " + +promptList' :: (t -> String) -> Int -> [t] -> Maybe t -> Bool -> IO (Either String t) +promptList' displayItem numChoices choices def other = do + putStr $ mkDefPrompt "Your choice" (displayItem `fmap` def) + inp <- getLine + case (inp, def) of + ("", Just d) -> return $ Right d + _ -> case readMaybe inp of + Nothing -> invalidChoice inp + Just n -> getChoice n + where invalidChoice inp = do putStrLn $ inp ++ " is not a valid choice." + promptList' displayItem numChoices choices def other + getChoice n | n < 1 || n > numChoices = invalidChoice (show n) + | n < numChoices || + (n == numChoices && not other) + = return . Right $ choices !! (n-1) + | otherwise = Left `fmap` promptStr "Please specify" Nothing + +readMaybe :: (Read a) => String -> Maybe a +readMaybe s = case reads s of + [(a,"")] -> Just a + _ -> Nothing + +--------------------------------------------------------------------------- +-- File generation ------------------------------------------------------ +--------------------------------------------------------------------------- + +writeLicense :: InitFlags -> IO () +writeLicense flags = do + message flags "\nGenerating LICENSE..." + year <- show <$> getYear + let authors = fromMaybe "???" . flagToMaybe . author $ flags + let licenseFile = + case license flags of + Flag BSD2 + -> Just $ bsd2 authors year + + Flag BSD3 + -> Just $ bsd3 authors year + + Flag (GPL (Just (Version {versionBranch = [2]}))) + -> Just gplv2 + + Flag (GPL (Just (Version {versionBranch = [3]}))) + -> Just gplv3 + + Flag (LGPL (Just (Version {versionBranch = [2, 1]}))) + -> Just lgpl21 + + Flag (LGPL (Just (Version {versionBranch = [3]}))) + -> Just lgpl3 + + Flag (AGPL (Just (Version {versionBranch = [3]}))) + -> Just agplv3 + + Flag (Apache (Just (Version {versionBranch = [2, 0]}))) + -> Just apache20 + + Flag MIT + -> Just $ mit authors year + + Flag (MPL (Version {versionBranch = [2, 0]})) + -> Just mpl20 + + Flag ISC + -> Just $ isc authors year + + _ -> Nothing + + case licenseFile of + Just licenseText -> writeFileSafe flags "LICENSE" licenseText + Nothing -> message flags "Warning: unknown license type, you must put a copy in LICENSE yourself." + +getYear :: IO Integer +getYear = do + u <- getCurrentTime + z <- getCurrentTimeZone + let l = utcToLocalTime z u + (y, _, _) = toGregorian $ localDay l + return y + +writeSetupFile :: InitFlags -> IO () +writeSetupFile flags = do + message flags "Generating Setup.hs..." + writeFileSafe flags "Setup.hs" setupFile + where + setupFile = unlines + [ "import Distribution.Simple" + , "main = defaultMain" + ] + +writeChangeLog :: InitFlags -> IO () +writeChangeLog flags = when (any (== defaultChangeLog) $ maybe [] id (extraSrc flags)) $ do + message flags ("Generating "++ defaultChangeLog ++"...") + writeFileSafe flags defaultChangeLog changeLog + where + changeLog = unlines + [ "# Revision history for " ++ pname + , "" + , "## " ++ pver ++ " -- YYYY-mm-dd" + , "" + , "* First version. Released on an unsuspecting world." + ] + pname = maybe "" display $ flagToMaybe $ packageName flags + pver = maybe "" display $ flagToMaybe $ version flags + + + +writeCabalFile :: InitFlags -> IO Bool +writeCabalFile flags@(InitFlags{packageName = NoFlag}) = do + message flags "Error: no package name provided." + return False +writeCabalFile flags@(InitFlags{packageName = Flag p}) = do + let cabalFileName = display p ++ ".cabal" + message flags $ "Generating " ++ cabalFileName ++ "..." + writeFileSafe flags cabalFileName (generateCabalFile cabalFileName flags) + return True + +-- | Write a file \"safely\", backing up any existing version (unless +-- the overwrite flag is set). +writeFileSafe :: InitFlags -> FilePath -> String -> IO () +writeFileSafe flags fileName content = do + moveExistingFile flags fileName + writeFile fileName content + +-- | Create source directories, if they were given. +createSourceDirectories :: InitFlags -> IO () +createSourceDirectories flags = case sourceDirs flags of + Just dirs -> forM_ dirs (createDirectoryIfMissing True) + Nothing -> return () + +-- | Create Main.hs, but only if we are init'ing an executable and +-- the mainIs flag has been provided. +createMainHs :: InitFlags -> IO () +createMainHs flags@InitFlags{ sourceDirs = Just (srcPath:_) + , packageType = Flag Executable + , mainIs = Flag mainFile } = + writeMainHs flags (srcPath mainFile) +createMainHs flags@InitFlags{ sourceDirs = _ + , packageType = Flag Executable + , mainIs = Flag mainFile } = + writeMainHs flags mainFile +createMainHs _ = return () + +-- | Write a main file if it doesn't already exist. +writeMainHs :: InitFlags -> FilePath -> IO () +writeMainHs flags mainPath = do + dir <- maybe getCurrentDirectory return (flagToMaybe $ packageDir flags) + let mainFullPath = dir mainPath + exists <- doesFileExist mainFullPath + unless exists $ do + message flags $ "Generating " ++ mainPath ++ "..." + writeFileSafe flags mainFullPath mainHs + +-- | Default Main.hs file. Used when no Main.hs exists. +mainHs :: String +mainHs = unlines + [ "module Main where" + , "" + , "main :: IO ()" + , "main = putStrLn \"Hello, Haskell!\"" + ] + +-- | Move an existing file, if there is one, and the overwrite flag is +-- not set. +moveExistingFile :: InitFlags -> FilePath -> IO () +moveExistingFile flags fileName = + unless (overwrite flags == Flag True) $ do + e <- doesFileExist fileName + when e $ do + newName <- findNewName fileName + message flags $ "Warning: " ++ fileName ++ " already exists, backing up old version in " ++ newName + copyFile fileName newName + +findNewName :: FilePath -> IO FilePath +findNewName oldName = findNewName' 0 + where + findNewName' :: Integer -> IO FilePath + findNewName' n = do + let newName = oldName <.> ("save" ++ show n) + e <- doesFileExist newName + if e then findNewName' (n+1) else return newName + +-- | Generate a .cabal file from an InitFlags structure. NOTE: this +-- is rather ad-hoc! What we would REALLY like is to have a +-- standard low-level AST type representing .cabal files, which +-- preserves things like comments, and to write an *inverse* +-- parser/pretty-printer pair between .cabal files and this AST. +-- Then instead of this ad-hoc code we could just map an InitFlags +-- structure onto a low-level AST structure and use the existing +-- pretty-printing code to generate the file. +generateCabalFile :: String -> InitFlags -> String +generateCabalFile fileName c = + (++ "\n") . + renderStyle style { lineLength = 79, ribbonsPerLine = 1.1 } $ + (if minimal c /= Flag True + then showComment (Just $ "Initial " ++ fileName ++ " generated by cabal " + ++ "init. For further documentation, see " + ++ "http://haskell.org/cabal/users-guide/") + $$ text "" + else empty) + $$ + vcat [ field "name" (packageName c) + (Just "The name of the package.") + True + + , field "version" (version c) + (Just $ "The package version. See the Haskell package versioning policy (PVP) for standards guiding when and how versions should be incremented.\nhttps://wiki.haskell.org/Package_versioning_policy\n" + ++ "PVP summary: +-+------- breaking API changes\n" + ++ " | | +----- non-breaking API additions\n" + ++ " | | | +--- code changes with no API change") + True + + , fieldS "synopsis" (synopsis c) + (Just "A short (one-line) description of the package.") + True + + , fieldS "description" NoFlag + (Just "A longer description of the package.") + True + + , fieldS "homepage" (homepage c) + (Just "URL for the project homepage or repository.") + False + + , fieldS "bug-reports" NoFlag + (Just "A URL where users can report bugs.") + False + + , field "license" (license c) + (Just "The license under which the package is released.") + True + + , case (license c) of + Flag PublicDomain -> empty + _ -> fieldS "license-file" (Flag "LICENSE") + (Just "The file containing the license text.") + True + + , fieldS "author" (author c) + (Just "The package author(s).") + True + + , fieldS "maintainer" (email c) + (Just "An email address to which users can send suggestions, bug reports, and patches.") + True + + , case (license c) of + Flag PublicDomain -> empty + _ -> fieldS "copyright" NoFlag + (Just "A copyright notice.") + True + + , fieldS "category" (either id display `fmap` category c) + Nothing + True + + , fieldS "build-type" (Flag "Simple") + Nothing + True + + , fieldS "extra-source-files" (listFieldS (extraSrc c)) + (Just "Extra files to be distributed with the package, such as examples or a README.") + True + + , field "cabal-version" (Flag $ orLaterVersion (Version [1,10] [])) + (Just "Constraint on the version of Cabal needed to build this package.") + False + + , case packageType c of + Flag Executable -> + text "\nexecutable" <+> + text (maybe "" display . flagToMaybe $ packageName c) $$ + nest 2 (vcat + [ fieldS "main-is" (mainIs c) (Just ".hs or .lhs file containing the Main module.") True + + , generateBuildInfo Executable c + ]) + Flag Library -> text "\nlibrary" $$ nest 2 (vcat + [ fieldS "exposed-modules" (listField (exposedModules c)) + (Just "Modules exported by the library.") + True + + , generateBuildInfo Library c + ]) + _ -> empty + ] + where + generateBuildInfo :: PackageType -> InitFlags -> Doc + generateBuildInfo pkgtype c' = vcat + [ fieldS "other-modules" (listField (otherModules c')) + (Just $ case pkgtype of + Library -> "Modules included in this library but not exported." + Executable -> "Modules included in this executable, other than Main.") + True + + , fieldS "other-extensions" (listField (otherExts c')) + (Just "LANGUAGE extensions used by modules in this package.") + True + + , fieldS "build-depends" (listField (dependencies c')) + (Just "Other library packages from which modules are imported.") + True + + , fieldS "hs-source-dirs" (listFieldS (sourceDirs c')) + (Just "Directories containing source files.") + True + + , fieldS "build-tools" (listFieldS (buildTools c')) + (Just "Extra tools (e.g. alex, hsc2hs, ...) needed to build the source.") + False + + , field "default-language" (language c') + (Just "Base language which the package is written in.") + True + ] + + listField :: Text s => Maybe [s] -> Flag String + listField = listFieldS . fmap (map display) + + listFieldS :: Maybe [String] -> Flag String + listFieldS = Flag . maybe "" (intercalate ", ") + + field :: Text t => String -> Flag t -> Maybe String -> Bool -> Doc + field s f = fieldS s (fmap display f) + + fieldS :: String -- ^ Name of the field + -> Flag String -- ^ Field contents + -> Maybe String -- ^ Comment to explain the field + -> Bool -- ^ Should the field be included (commented out) even if blank? + -> Doc + fieldS _ NoFlag _ inc | not inc || (minimal c == Flag True) = empty + fieldS _ (Flag "") _ inc | not inc || (minimal c == Flag True) = empty + fieldS s f com _ = case (isJust com, noComments c, minimal c) of + (_, _, Flag True) -> id + (_, Flag True, _) -> id + (True, _, _) -> (showComment com $$) . ($$ text "") + (False, _, _) -> ($$ text "") + $ + comment f <> text s <> colon + <> text (replicate (20 - length s) ' ') + <> text (fromMaybe "" . flagToMaybe $ f) + comment NoFlag = text "-- " + comment (Flag "") = text "-- " + comment _ = text "" + + showComment :: Maybe String -> Doc + showComment (Just t) = vcat + . map (text . ("-- "++)) . lines + . renderStyle style { + lineLength = 76, + ribbonsPerLine = 1.05 + } + . vcat + . map (fcat . map text . breakLine) + . lines + $ t + showComment Nothing = text "" + + breakLine [] = [] + breakLine cs = case break (==' ') cs of (w,cs') -> w : breakLine' cs' + breakLine' [] = [] + breakLine' cs = case span (==' ') cs of (w,cs') -> w : breakLine cs' + +-- | Generate warnings for missing fields etc. +generateWarnings :: InitFlags -> IO () +generateWarnings flags = do + message flags "" + when (synopsis flags `elem` [NoFlag, Flag ""]) + (message flags "Warning: no synopsis given. You should edit the .cabal file and add one.") + + message flags "You may want to edit the .cabal file and add a Description field." + +-- | Possibly generate a message to stdout, taking into account the +-- --quiet flag. +message :: InitFlags -> String -> IO () +message (InitFlags{quiet = Flag True}) _ = return () +message _ s = putStrLn s diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Install.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Install.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Install.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Install.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,1641 @@ +{-# LANGUAGE CPP #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Install +-- Copyright : (c) 2005 David Himmelstrup +-- 2007 Bjorn Bringert +-- 2007-2010 Duncan Coutts +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- High level interface to package installation. +----------------------------------------------------------------------------- +module Distribution.Client.Install ( + -- * High-level interface + install, + + -- * Lower-level interface that allows to manipulate the install plan + makeInstallContext, + makeInstallPlan, + processInstallPlan, + InstallArgs, + InstallContext, + + -- * Prune certain packages from the install plan + pruneInstallPlan + ) where + +import Data.Foldable + ( traverse_ ) +import Data.List + ( isPrefixOf, unfoldr, nub, sort, (\\) ) +import qualified Data.Map as Map +import qualified Data.Set as S +import Data.Maybe + ( catMaybes, isJust, isNothing, fromMaybe, mapMaybe ) +import Control.Exception as Exception + ( Exception(toException), bracket, catches + , Handler(Handler), handleJust, IOException, SomeException ) +#ifndef mingw32_HOST_OS +import Control.Exception as Exception + ( Exception(fromException) ) +#endif +import System.Exit + ( ExitCode(..) ) +import Distribution.Compat.Exception + ( catchIO, catchExit ) +#if !MIN_VERSION_base(4,8,0) +import Control.Applicative + ( (<$>) ) +import Data.Traversable + ( traverse ) +#endif +import Control.Monad + ( filterM, forM_, when, unless ) +import System.Directory + ( getTemporaryDirectory, doesDirectoryExist, doesFileExist, + createDirectoryIfMissing, removeFile, renameDirectory ) +import System.FilePath + ( (), (<.>), equalFilePath, takeDirectory ) +import System.IO + ( openFile, IOMode(AppendMode), hClose ) +import System.IO.Error + ( isDoesNotExistError, ioeGetFileName ) + +import Distribution.Client.Targets +import Distribution.Client.Configure + ( chooseCabalVersion, configureSetupScript, checkConfigExFlags ) +import Distribution.Client.Dependency +import Distribution.Client.Dependency.Types + ( Solver(..), ConstraintSource(..), LabeledPackageConstraint(..) ) +import Distribution.Client.FetchUtils +import Distribution.Client.HttpUtils + ( HttpTransport (..) ) +import qualified Distribution.Client.Haddock as Haddock (regenerateHaddockIndex) +import Distribution.Client.IndexUtils as IndexUtils + ( getSourcePackages, getInstalledPackages ) +import qualified Distribution.Client.InstallPlan as InstallPlan +import Distribution.Client.InstallPlan (InstallPlan) +import Distribution.Client.Setup + ( GlobalFlags(..), RepoContext(..) + , ConfigFlags(..), configureCommand, filterConfigureFlags + , ConfigExFlags(..), InstallFlags(..) ) +import Distribution.Client.Config + ( defaultCabalDir, defaultUserInstall ) +import Distribution.Client.Sandbox.Timestamp + ( withUpdateTimestamps ) +import Distribution.Client.Sandbox.Types + ( SandboxPackageInfo(..), UseSandbox(..), isUseSandbox + , whenUsingSandbox ) +import Distribution.Client.Tar (extractTarGzFile) +import Distribution.Client.Types as Source +import Distribution.Client.BuildReports.Types + ( ReportLevel(..) ) +import Distribution.Client.SetupWrapper + ( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions ) +import qualified Distribution.Client.BuildReports.Anonymous as BuildReports +import qualified Distribution.Client.BuildReports.Storage as BuildReports + ( storeAnonymous, storeLocal, fromInstallPlan, fromPlanningFailure ) +import qualified Distribution.Client.InstallSymlink as InstallSymlink + ( symlinkBinaries ) +import qualified Distribution.Client.PackageIndex as SourcePackageIndex +import qualified Distribution.Client.Win32SelfUpgrade as Win32SelfUpgrade +import qualified Distribution.Client.World as World +import qualified Distribution.InstalledPackageInfo as Installed +import Distribution.Client.Compat.ExecutablePath +import Distribution.Client.JobControl +import qualified Distribution.Client.ComponentDeps as CD + +import Distribution.Utils.NubList +import Distribution.Simple.Compiler + ( CompilerId(..), Compiler(compilerId), compilerFlavor + , CompilerInfo(..), compilerInfo, PackageDB(..), PackageDBStack ) +import Distribution.Simple.Program (ProgramConfiguration, + defaultProgramConfiguration) +import qualified Distribution.Simple.InstallDirs as InstallDirs +import qualified Distribution.Simple.PackageIndex as PackageIndex +import Distribution.Simple.PackageIndex (InstalledPackageIndex) +import Distribution.Simple.LocalBuildInfo (ComponentName(CLibName)) +import qualified Distribution.Simple.Configure as Configure +import Distribution.Simple.Setup + ( haddockCommand, HaddockFlags(..) + , buildCommand, BuildFlags(..), emptyBuildFlags + , AllowNewer(..) + , toFlag, fromFlag, fromFlagOrDefault, flagToMaybe, defaultDistPref ) +import qualified Distribution.Simple.Setup as Cabal + ( Flag(..) + , copyCommand, CopyFlags(..), emptyCopyFlags + , registerCommand, RegisterFlags(..), emptyRegisterFlags + , testCommand, TestFlags(..), emptyTestFlags ) +import Distribution.Simple.Utils + ( createDirectoryIfMissingVerbose, rawSystemExit, comparing + , writeFileAtomic, withTempFile , withUTF8FileContents ) +import Distribution.Simple.InstallDirs as InstallDirs + ( PathTemplate, fromPathTemplate, toPathTemplate, substPathTemplate + , initialPathTemplateEnv, installDirsTemplateEnv ) +import Distribution.Package + ( PackageIdentifier(..), PackageId, packageName, packageVersion + , Package(..) + , Dependency(..), thisPackageVersion + , UnitId(..), mkUnitId + , HasUnitId(..) ) +import qualified Distribution.PackageDescription as PackageDescription +import Distribution.PackageDescription + ( PackageDescription, GenericPackageDescription(..), Flag(..) + , FlagName(..), FlagAssignment ) +import Distribution.PackageDescription.Configuration + ( finalizePackageDescription ) +import Distribution.Client.PkgConfigDb + ( PkgConfigDb, readPkgConfigDb ) +import Distribution.ParseUtils + ( showPWarning ) +import Distribution.Version + ( Version, VersionRange, foldVersionRange ) +import Distribution.Simple.Utils as Utils + ( notice, info, warn, debug, debugNoWrap, die + , intercalate, withTempDirectory ) +import Distribution.Client.Utils + ( determineNumJobs, inDir, logDirChange, mergeBy, MergeResult(..) + , tryCanonicalizePath ) +import Distribution.System + ( Platform, OS(Windows), buildOS ) +import Distribution.Text + ( display ) +import Distribution.Verbosity as Verbosity + ( Verbosity, showForCabal, normal, verbose ) +import Distribution.Simple.BuildPaths ( exeExtension ) + +--TODO: +-- * assign flags to packages individually +-- * complain about flags that do not apply to any package given as target +-- so flags do not apply to dependencies, only listed, can use flag +-- constraints for dependencies +-- * only record applicable flags in world file +-- * allow flag constraints +-- * allow installed constraints +-- * allow flag and installed preferences +-- * change world file to use cabal section syntax +-- * allow persistent configure flags for each package individually + +-- ------------------------------------------------------------ +-- * Top level user actions +-- ------------------------------------------------------------ + +-- | Installs the packages needed to satisfy a list of dependencies. +-- +install + :: Verbosity + -> PackageDBStack + -> RepoContext + -> Compiler + -> Platform + -> ProgramConfiguration + -> UseSandbox + -> Maybe SandboxPackageInfo + -> GlobalFlags + -> ConfigFlags + -> ConfigExFlags + -> InstallFlags + -> HaddockFlags + -> [UserTarget] + -> IO () +install verbosity packageDBs repos comp platform conf useSandbox mSandboxPkgInfo + globalFlags configFlags configExFlags installFlags haddockFlags + userTargets0 = do + + installContext <- makeInstallContext verbosity args (Just userTargets0) + planResult <- foldProgress logMsg (return . Left) (return . Right) =<< + makeInstallPlan verbosity args installContext + + case planResult of + Left message -> do + reportPlanningFailure verbosity args installContext message + die' message + Right installPlan -> + processInstallPlan verbosity args installContext installPlan + where + args :: InstallArgs + args = (packageDBs, repos, comp, platform, conf, useSandbox, mSandboxPkgInfo, + globalFlags, configFlags, configExFlags, installFlags, + haddockFlags) + + die' message = die (message ++ if isUseSandbox useSandbox + then installFailedInSandbox else []) + -- TODO: use a better error message, remove duplication. + installFailedInSandbox = + "\nNote: when using a sandbox, all packages are required to have " + ++ "consistent dependencies. " + ++ "Try reinstalling/unregistering the offending packages or " + ++ "recreating the sandbox." + logMsg message rest = debugNoWrap verbosity message >> rest + +-- TODO: Make InstallContext a proper data type with documented fields. +-- | Common context for makeInstallPlan and processInstallPlan. +type InstallContext = ( InstalledPackageIndex, SourcePackageDb + , PkgConfigDb + , [UserTarget], [PackageSpecifier SourcePackage] + , HttpTransport ) + +-- TODO: Make InstallArgs a proper data type with documented fields or just get +-- rid of it completely. +-- | Initial arguments given to 'install' or 'makeInstallContext'. +type InstallArgs = ( PackageDBStack + , RepoContext + , Compiler + , Platform + , ProgramConfiguration + , UseSandbox + , Maybe SandboxPackageInfo + , GlobalFlags + , ConfigFlags + , ConfigExFlags + , InstallFlags + , HaddockFlags ) + +-- | Make an install context given install arguments. +makeInstallContext :: Verbosity -> InstallArgs -> Maybe [UserTarget] + -> IO InstallContext +makeInstallContext verbosity + (packageDBs, repoCtxt, comp, _, conf,_,_, + globalFlags, _, configExFlags, _, _) mUserTargets = do + + installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf + sourcePkgDb <- getSourcePackages verbosity repoCtxt + pkgConfigDb <- readPkgConfigDb verbosity conf + + checkConfigExFlags verbosity installedPkgIndex + (packageIndex sourcePkgDb) configExFlags + transport <- repoContextGetTransport repoCtxt + + (userTargets, pkgSpecifiers) <- case mUserTargets of + Nothing -> + -- We want to distinguish between the case where the user has given an + -- empty list of targets on the command-line and the case where we + -- specifically want to have an empty list of targets. + return ([], []) + Just userTargets0 -> do + -- For install, if no target is given it means we use the current + -- directory as the single target. + let userTargets | null userTargets0 = [UserTargetLocalDir "."] + | otherwise = userTargets0 + + pkgSpecifiers <- resolveUserTargets verbosity repoCtxt + (fromFlag $ globalWorldFile globalFlags) + (packageIndex sourcePkgDb) + userTargets + return (userTargets, pkgSpecifiers) + + return (installedPkgIndex, sourcePkgDb, pkgConfigDb, userTargets + ,pkgSpecifiers, transport) + +-- | Make an install plan given install context and install arguments. +makeInstallPlan :: Verbosity -> InstallArgs -> InstallContext + -> IO (Progress String String InstallPlan) +makeInstallPlan verbosity + (_, _, comp, platform, _, _, mSandboxPkgInfo, + _, configFlags, configExFlags, installFlags, + _) + (installedPkgIndex, sourcePkgDb, pkgConfigDb, + _, pkgSpecifiers, _) = do + + solver <- chooseSolver verbosity (fromFlag (configSolver configExFlags)) + (compilerInfo comp) + notice verbosity "Resolving dependencies..." + return $ planPackages comp platform mSandboxPkgInfo solver + configFlags configExFlags installFlags + installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers + +-- | Given an install plan, perform the actual installations. +processInstallPlan :: Verbosity -> InstallArgs -> InstallContext + -> InstallPlan + -> IO () +processInstallPlan verbosity + args@(_,_, _, _, _, _, _, _, _, _, installFlags, _) + (installedPkgIndex, sourcePkgDb, _, + userTargets, pkgSpecifiers, _) installPlan = do + checkPrintPlan verbosity installedPkgIndex installPlan sourcePkgDb + installFlags pkgSpecifiers + + unless (dryRun || nothingToInstall) $ do + installPlan' <- performInstallations verbosity + args installedPkgIndex installPlan + postInstallActions verbosity args userTargets installPlan' + where + dryRun = fromFlag (installDryRun installFlags) + nothingToInstall = null (InstallPlan.ready installPlan) + +-- ------------------------------------------------------------ +-- * Installation planning +-- ------------------------------------------------------------ + +planPackages :: Compiler + -> Platform + -> Maybe SandboxPackageInfo + -> Solver + -> ConfigFlags + -> ConfigExFlags + -> InstallFlags + -> InstalledPackageIndex + -> SourcePackageDb + -> PkgConfigDb + -> [PackageSpecifier SourcePackage] + -> Progress String String InstallPlan +planPackages comp platform mSandboxPkgInfo solver + configFlags configExFlags installFlags + installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers = + + resolveDependencies + platform (compilerInfo comp) pkgConfigDb + solver + resolverParams + + >>= if onlyDeps then pruneInstallPlan pkgSpecifiers else return + + where + resolverParams = + + setMaxBackjumps (if maxBackjumps < 0 then Nothing + else Just maxBackjumps) + + . setIndependentGoals independentGoals + + . setReorderGoals reorderGoals + + . setAvoidReinstalls avoidReinstalls + + . setShadowPkgs shadowPkgs + + . setStrongFlags strongFlags + + . setPreferenceDefault (if upgradeDeps then PreferAllLatest + else PreferLatestForSelected) + + . removeUpperBounds allowNewer + + . addPreferences + -- preferences from the config file or command line + [ PackageVersionPreference name ver + | Dependency name ver <- configPreferences configExFlags ] + + . addConstraints + -- version constraints from the config file or command line + [ LabeledPackageConstraint (userToPackageConstraint pc) src + | (pc, src) <- configExConstraints configExFlags ] + + . addConstraints + --FIXME: this just applies all flags to all targets which + -- is silly. We should check if the flags are appropriate + [ let pc = PackageConstraintFlags + (pkgSpecifierTarget pkgSpecifier) flags + in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget + | let flags = configConfigurationsFlags configFlags + , not (null flags) + , pkgSpecifier <- pkgSpecifiers ] + + . addConstraints + [ let pc = PackageConstraintStanzas + (pkgSpecifierTarget pkgSpecifier) stanzas + in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget + | pkgSpecifier <- pkgSpecifiers ] + + . maybe id applySandboxInstallPolicy mSandboxPkgInfo + + . (if reinstall then reinstallTargets else id) + + $ standardInstallPolicy + installedPkgIndex sourcePkgDb pkgSpecifiers + + stanzas = [ TestStanzas | testsEnabled ] + ++ [ BenchStanzas | benchmarksEnabled ] + testsEnabled = fromFlagOrDefault False $ configTests configFlags + benchmarksEnabled = fromFlagOrDefault False $ configBenchmarks configFlags + + reinstall = fromFlag (installOverrideReinstall installFlags) || + fromFlag (installReinstall installFlags) + reorderGoals = fromFlag (installReorderGoals installFlags) + independentGoals = fromFlag (installIndependentGoals installFlags) + avoidReinstalls = fromFlag (installAvoidReinstalls installFlags) + shadowPkgs = fromFlag (installShadowPkgs installFlags) + strongFlags = fromFlag (installStrongFlags installFlags) + maxBackjumps = fromFlag (installMaxBackjumps installFlags) + upgradeDeps = fromFlag (installUpgradeDeps installFlags) + onlyDeps = fromFlag (installOnlyDeps installFlags) + allowNewer = fromMaybe AllowNewerNone (configAllowNewer configFlags) + +-- | Remove the provided targets from the install plan. +pruneInstallPlan :: Package targetpkg + => [PackageSpecifier targetpkg] + -> InstallPlan + -> Progress String String InstallPlan +pruneInstallPlan pkgSpecifiers = + -- TODO: this is a general feature and should be moved to D.C.Dependency + -- Also, the InstallPlan.remove should return info more precise to the + -- problem, rather than the very general PlanProblem type. + either (Fail . explain) Done + . InstallPlan.remove (\pkg -> packageName pkg `elem` targetnames) + where + explain :: [InstallPlan.PlanProblem ipkg srcpkg iresult ifailure] -> String + explain problems = + "Cannot select only the dependencies (as requested by the " + ++ "'--only-dependencies' flag), " + ++ (case pkgids of + [pkgid] -> "the package " ++ display pkgid ++ " is " + _ -> "the packages " + ++ intercalate ", " (map display pkgids) ++ " are ") + ++ "required by a dependency of one of the other targets." + where + pkgids = + nub [ depid + | InstallPlan.PackageMissingDeps _ depids <- problems + , depid <- depids + , packageName depid `elem` targetnames ] + + targetnames = map pkgSpecifierTarget pkgSpecifiers + +-- ------------------------------------------------------------ +-- * Informational messages +-- ------------------------------------------------------------ + +-- | Perform post-solver checks of the install plan and print it if +-- either requested or needed. +checkPrintPlan :: Verbosity + -> InstalledPackageIndex + -> InstallPlan + -> SourcePackageDb + -> InstallFlags + -> [PackageSpecifier SourcePackage] + -> IO () +checkPrintPlan verbosity installed installPlan sourcePkgDb + installFlags pkgSpecifiers = do + + -- User targets that are already installed. + let preExistingTargets = + [ p | let tgts = map pkgSpecifierTarget pkgSpecifiers, + InstallPlan.PreExisting p <- InstallPlan.toList installPlan, + packageName p `elem` tgts ] + + -- If there's nothing to install, we print the already existing + -- target packages as an explanation. + when nothingToInstall $ + notice verbosity $ unlines $ + "All the requested packages are already installed:" + : map (display . packageId) preExistingTargets + ++ ["Use --reinstall if you want to reinstall anyway."] + + let lPlan = linearizeInstallPlan installed installPlan + -- Are any packages classified as reinstalls? + let reinstalledPkgs = concatMap (extractReinstalls . snd) lPlan + -- Packages that are already broken. + let oldBrokenPkgs = + map Installed.installedUnitId + . PackageIndex.reverseDependencyClosure installed + . map (Installed.installedUnitId . fst) + . PackageIndex.brokenPackages + $ installed + let excluded = reinstalledPkgs ++ oldBrokenPkgs + -- Packages that are reverse dependencies of replaced packages are very + -- likely to be broken. We exclude packages that are already broken. + let newBrokenPkgs = + filter (\ p -> not (Installed.installedUnitId p `elem` excluded)) + (PackageIndex.reverseDependencyClosure installed reinstalledPkgs) + let containsReinstalls = not (null reinstalledPkgs) + let breaksPkgs = not (null newBrokenPkgs) + + let adaptedVerbosity + | containsReinstalls && not overrideReinstall = verbosity `max` verbose + | otherwise = verbosity + + -- We print the install plan if we are in a dry-run or if we are confronted + -- with a dangerous install plan. + when (dryRun || containsReinstalls && not overrideReinstall) $ + printPlan (dryRun || breaksPkgs && not overrideReinstall) + adaptedVerbosity lPlan sourcePkgDb + + -- If the install plan is dangerous, we print various warning messages. In + -- particular, if we can see that packages are likely to be broken, we even + -- bail out (unless installation has been forced with --force-reinstalls). + when containsReinstalls $ do + if breaksPkgs + then do + (if dryRun || overrideReinstall then warn verbosity else die) $ unlines $ + "The following packages are likely to be broken by the reinstalls:" + : map (display . Installed.sourcePackageId) newBrokenPkgs + ++ if overrideReinstall + then if dryRun then [] else + ["Continuing even though " ++ + "the plan contains dangerous reinstalls."] + else + ["Use --force-reinstalls if you want to install anyway."] + else unless dryRun $ warn verbosity + "Note that reinstalls are always dangerous. Continuing anyway..." + + -- If we are explicitly told to not download anything, check that all packages + -- are already fetched. + let offline = fromFlagOrDefault False (installOfflineMode installFlags) + when offline $ do + let pkgs = [ sourcePkg + | InstallPlan.Configured (ConfiguredPackage sourcePkg _ _ _) + <- InstallPlan.toList installPlan ] + notFetched <- fmap (map packageInfoId) + . filterM (fmap isNothing . checkFetched . packageSource) + $ pkgs + unless (null notFetched) $ + die $ "Can't download packages in offline mode. " + ++ "Must download the following packages to proceed:\n" + ++ intercalate ", " (map display notFetched) + ++ "\nTry using 'cabal fetch'." + + where + nothingToInstall = null (InstallPlan.ready installPlan) + + dryRun = fromFlag (installDryRun installFlags) + overrideReinstall = fromFlag (installOverrideReinstall installFlags) + +--TODO: this type is too specific +linearizeInstallPlan :: InstalledPackageIndex + -> InstallPlan + -> [(ReadyPackage, PackageStatus)] +linearizeInstallPlan installedPkgIndex plan = + unfoldr next plan + where + next plan' = case InstallPlan.ready plan' of + [] -> Nothing + (pkg:_) -> Just ((pkg, status), plan'') + where + pkgid = installedUnitId pkg + status = packageStatus installedPkgIndex pkg + ipkg = Installed.emptyInstalledPackageInfo { + Installed.sourcePackageId = packageId pkg, + Installed.installedUnitId = pkgid + } + plan'' = InstallPlan.completed pkgid (Just ipkg) + (BuildOk DocsNotTried TestsNotTried (Just ipkg)) + (InstallPlan.processing [pkg] plan') + --FIXME: This is a bit of a hack, + -- pretending that each package is installed + -- It's doubly a hack because the installed package ID + -- didn't get updated... + +data PackageStatus = NewPackage + | NewVersion [Version] + | Reinstall [UnitId] [PackageChange] + +type PackageChange = MergeResult PackageIdentifier PackageIdentifier + +extractReinstalls :: PackageStatus -> [UnitId] +extractReinstalls (Reinstall ipids _) = ipids +extractReinstalls _ = [] + +packageStatus :: InstalledPackageIndex + -> ReadyPackage + -> PackageStatus +packageStatus installedPkgIndex cpkg = + case PackageIndex.lookupPackageName installedPkgIndex + (packageName cpkg) of + [] -> NewPackage + ps -> case filter ((== packageId cpkg) + . Installed.sourcePackageId) (concatMap snd ps) of + [] -> NewVersion (map fst ps) + pkgs@(pkg:_) -> Reinstall (map Installed.installedUnitId pkgs) + (changes pkg cpkg) + + where + + changes :: Installed.InstalledPackageInfo + -> ReadyPackage + -> [MergeResult PackageIdentifier PackageIdentifier] + changes pkg pkg' = filter changed $ + mergeBy (comparing packageName) + -- deps of installed pkg + (resolveInstalledIds $ Installed.depends pkg) + -- deps of configured pkg + (resolveInstalledIds $ CD.nonSetupDeps (depends pkg')) + + -- convert to source pkg ids via index + resolveInstalledIds :: [UnitId] -> [PackageIdentifier] + resolveInstalledIds = + nub + . sort + . map Installed.sourcePackageId + . catMaybes + . map (PackageIndex.lookupUnitId installedPkgIndex) + + changed (InBoth pkgid pkgid') = pkgid /= pkgid' + changed _ = True + +printPlan :: Bool -- is dry run + -> Verbosity + -> [(ReadyPackage, PackageStatus)] + -> SourcePackageDb + -> IO () +printPlan dryRun verbosity plan sourcePkgDb = case plan of + [] -> return () + pkgs + | verbosity >= Verbosity.verbose -> putStr $ unlines $ + ("In order, the following " ++ wouldWill ++ " be installed:") + : map showPkgAndReason pkgs + | otherwise -> notice verbosity $ unlines $ + ("In order, the following " ++ wouldWill + ++ " be installed (use -v for more details):") + : map showPkg pkgs + where + wouldWill | dryRun = "would" + | otherwise = "will" + + showPkg (pkg, _) = display (packageId pkg) ++ + showLatest (pkg) + + showPkgAndReason (ReadyPackage pkg' _, pr) = display (packageId pkg') ++ + showLatest pkg' ++ + showFlagAssignment (nonDefaultFlags pkg') ++ + showStanzas (stanzas pkg') ++ + showDep pkg' ++ + case pr of + NewPackage -> " (new package)" + NewVersion _ -> " (new version)" + Reinstall _ cs -> " (reinstall)" ++ case cs of + [] -> "" + diff -> " (changes: " ++ intercalate ", " (map change diff) + ++ ")" + + showLatest :: Package srcpkg => srcpkg -> String + showLatest pkg = case mLatestVersion of + Just latestVersion -> + if packageVersion pkg < latestVersion + then (" (latest: " ++ display latestVersion ++ ")") + else "" + Nothing -> "" + where + mLatestVersion :: Maybe Version + mLatestVersion = case SourcePackageIndex.lookupPackageName + (packageIndex sourcePkgDb) + (packageName pkg) of + [] -> Nothing + x -> Just $ packageVersion $ last x + + toFlagAssignment :: [Flag] -> FlagAssignment + toFlagAssignment = map (\ f -> (flagName f, flagDefault f)) + + nonDefaultFlags :: ConfiguredPackage -> FlagAssignment + nonDefaultFlags (ConfiguredPackage spkg fa _ _) = + let defaultAssignment = + toFlagAssignment + (genPackageFlags (Source.packageDescription spkg)) + in fa \\ defaultAssignment + + stanzas :: ConfiguredPackage -> [OptionalStanza] + stanzas (ConfiguredPackage _ _ sts _) = sts + + showStanzas :: [OptionalStanza] -> String + showStanzas = concatMap ((' ' :) . showStanza) + showStanza TestStanzas = "*test" + showStanza BenchStanzas = "*bench" + + showFlagAssignment :: FlagAssignment -> String + showFlagAssignment = concatMap ((' ' :) . showFlagValue) + showFlagValue (f, True) = '+' : showFlagName f + showFlagValue (f, False) = '-' : showFlagName f + showFlagName (FlagName f) = f + + change (OnlyInLeft pkgid) = display pkgid ++ " removed" + change (InBoth pkgid pkgid') = display pkgid ++ " -> " + ++ display (packageVersion pkgid') + change (OnlyInRight pkgid') = display pkgid' ++ " added" + + showDep pkg | Just rdeps <- Map.lookup (packageId pkg) revDeps + = " (via: " ++ unwords (map display rdeps) ++ ")" + | otherwise = "" + + revDepGraphEdges :: [(PackageId, PackageId)] + revDepGraphEdges = [ (rpid, packageId pkg) + | (pkg@(ReadyPackage _ deps), _) <- plan + , rpid <- Installed.sourcePackageId <$> CD.flatDeps deps ] + + revDeps :: Map.Map PackageId [PackageId] + revDeps = Map.fromListWith (++) (map (fmap (:[])) revDepGraphEdges) + +-- ------------------------------------------------------------ +-- * Post installation stuff +-- ------------------------------------------------------------ + +-- | Report a solver failure. This works slightly differently to +-- 'postInstallActions', as (by definition) we don't have an install plan. +reportPlanningFailure :: Verbosity -> InstallArgs -> InstallContext -> String + -> IO () +reportPlanningFailure verbosity + (_, _, comp, platform, _, _, _ + ,_, configFlags, _, installFlags, _) + (_, sourcePkgDb, _, _, pkgSpecifiers, _) + message = do + + when reportFailure $ do + + -- Only create reports for explicitly named packages + let pkgids = filter + (SourcePackageIndex.elemByPackageId (packageIndex sourcePkgDb)) $ + mapMaybe theSpecifiedPackage pkgSpecifiers + + buildReports = BuildReports.fromPlanningFailure platform + (compilerId comp) pkgids + (configConfigurationsFlags configFlags) + + when (not (null buildReports)) $ + info verbosity $ + "Solver failure will be reported for " + ++ intercalate "," (map display pkgids) + + -- Save reports + BuildReports.storeLocal (compilerInfo comp) + (fromNubList $ installSummaryFile installFlags) + buildReports platform + + -- Save solver log + case logFile of + Nothing -> return () + Just template -> forM_ pkgids $ \pkgid -> + let env = initialPathTemplateEnv pkgid dummyIpid + (compilerInfo comp) platform + path = fromPathTemplate $ substPathTemplate env template + in writeFile path message + + where + reportFailure = fromFlag (installReportPlanningFailure installFlags) + logFile = flagToMaybe (installLogFile installFlags) + + -- A IPID is calculated from the transitive closure of + -- dependencies, but when the solver fails we don't have that. + -- So we fail. + dummyIpid = error "reportPlanningFailure: installed package ID not available" + +-- | If a 'PackageSpecifier' refers to a single package, return Just that +-- package. +theSpecifiedPackage :: Package pkg => PackageSpecifier pkg -> Maybe PackageId +theSpecifiedPackage pkgSpec = + case pkgSpec of + NamedPackage name [PackageConstraintVersion name' version] + | name == name' -> PackageIdentifier name <$> trivialRange version + NamedPackage _ _ -> Nothing + SpecificSourcePackage pkg -> Just $ packageId pkg + where + -- | If a range includes only a single version, return Just that version. + trivialRange :: VersionRange -> Maybe Version + trivialRange = foldVersionRange + Nothing + Just -- "== v" + (\_ -> Nothing) + (\_ -> Nothing) + (\_ _ -> Nothing) + (\_ _ -> Nothing) + +-- | Various stuff we do after successful or unsuccessfully installing a bunch +-- of packages. This includes: +-- +-- * build reporting, local and remote +-- * symlinking binaries +-- * updating indexes +-- * updating world file +-- * error reporting +-- +postInstallActions :: Verbosity + -> InstallArgs + -> [UserTarget] + -> InstallPlan + -> IO () +postInstallActions verbosity + (packageDBs, _, comp, platform, conf, useSandbox, mSandboxPkgInfo + ,globalFlags, configFlags, _, installFlags, _) + targets installPlan = do + + unless oneShot $ + World.insert verbosity worldFile + --FIXME: does not handle flags + [ World.WorldPkgInfo dep [] + | UserTargetNamed dep <- targets ] + + let buildReports = BuildReports.fromInstallPlan platform (compilerId comp) + installPlan + BuildReports.storeLocal (compilerInfo comp) + (fromNubList $ installSummaryFile installFlags) + buildReports + platform + when (reportingLevel >= AnonymousReports) $ + BuildReports.storeAnonymous buildReports + when (reportingLevel == DetailedReports) $ + storeDetailedBuildReports verbosity logsDir buildReports + + regenerateHaddockIndex verbosity packageDBs comp platform conf useSandbox + configFlags installFlags installPlan + + symlinkBinaries verbosity platform comp configFlags installFlags installPlan + + printBuildFailures installPlan + + updateSandboxTimestampsFile useSandbox mSandboxPkgInfo + comp platform installPlan + + where + reportingLevel = fromFlag (installBuildReports installFlags) + logsDir = fromFlag (globalLogsDir globalFlags) + oneShot = fromFlag (installOneShot installFlags) + worldFile = fromFlag $ globalWorldFile globalFlags + +storeDetailedBuildReports :: Verbosity -> FilePath + -> [(BuildReports.BuildReport, Maybe Repo)] -> IO () +storeDetailedBuildReports verbosity logsDir reports = sequence_ + [ do dotCabal <- defaultCabalDir + let logFileName = display (BuildReports.package report) <.> "log" + logFile = logsDir logFileName + reportsDir = dotCabal "reports" remoteRepoName remoteRepo + reportFile = reportsDir logFileName + + handleMissingLogFile $ do + buildLog <- readFile logFile + createDirectoryIfMissing True reportsDir -- FIXME + writeFile reportFile (show (BuildReports.show report, buildLog)) + + | (report, Just repo) <- reports + , Just remoteRepo <- [maybeRepoRemote repo] + , isLikelyToHaveLogFile (BuildReports.installOutcome report) ] + + where + isLikelyToHaveLogFile BuildReports.ConfigureFailed {} = True + isLikelyToHaveLogFile BuildReports.BuildFailed {} = True + isLikelyToHaveLogFile BuildReports.InstallFailed {} = True + isLikelyToHaveLogFile BuildReports.InstallOk {} = True + isLikelyToHaveLogFile _ = False + + handleMissingLogFile = Exception.handleJust missingFile $ \ioe -> + warn verbosity $ "Missing log file for build report: " + ++ fromMaybe "" (ioeGetFileName ioe) + + missingFile ioe + | isDoesNotExistError ioe = Just ioe + missingFile _ = Nothing + + +regenerateHaddockIndex :: Verbosity + -> [PackageDB] + -> Compiler + -> Platform + -> ProgramConfiguration + -> UseSandbox + -> ConfigFlags + -> InstallFlags + -> InstallPlan + -> IO () +regenerateHaddockIndex verbosity packageDBs comp platform conf useSandbox + configFlags installFlags installPlan + | haddockIndexFileIsRequested && shouldRegenerateHaddockIndex = do + + defaultDirs <- InstallDirs.defaultInstallDirs + (compilerFlavor comp) + (fromFlag (configUserInstall configFlags)) + True + let indexFileTemplate = fromFlag (installHaddockIndex installFlags) + indexFile = substHaddockIndexFileName defaultDirs indexFileTemplate + + notice verbosity $ + "Updating documentation index " ++ indexFile + + --TODO: might be nice if the install plan gave us the new InstalledPackageInfo + installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf + Haddock.regenerateHaddockIndex verbosity installedPkgIndex conf indexFile + + | otherwise = return () + where + haddockIndexFileIsRequested = + fromFlag (installDocumentation installFlags) + && isJust (flagToMaybe (installHaddockIndex installFlags)) + + -- We want to regenerate the index if some new documentation was actually + -- installed. Since the index can be only per-user or per-sandbox (see + -- #1337), we don't do it for global installs or special cases where we're + -- installing into a specific db. + shouldRegenerateHaddockIndex = (isUseSandbox useSandbox || normalUserInstall) + && someDocsWereInstalled installPlan + where + someDocsWereInstalled = any installedDocs . InstallPlan.toList + normalUserInstall = (UserPackageDB `elem` packageDBs) + && all (not . isSpecificPackageDB) packageDBs + + installedDocs (InstallPlan.Installed _ _ (BuildOk DocsOk _ _)) = True + installedDocs _ = False + isSpecificPackageDB (SpecificPackageDB _) = True + isSpecificPackageDB _ = False + + substHaddockIndexFileName defaultDirs = fromPathTemplate + . substPathTemplate env + where + env = env0 ++ installDirsTemplateEnv absoluteDirs + env0 = InstallDirs.compilerTemplateEnv (compilerInfo comp) + ++ InstallDirs.platformTemplateEnv platform + ++ InstallDirs.abiTemplateEnv (compilerInfo comp) platform + absoluteDirs = InstallDirs.substituteInstallDirTemplates + env0 templateDirs + templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault + defaultDirs (configInstallDirs configFlags) + + +symlinkBinaries :: Verbosity + -> Platform -> Compiler + -> ConfigFlags + -> InstallFlags + -> InstallPlan + -> IO () +symlinkBinaries verbosity platform comp configFlags installFlags plan = do + failed <- InstallSymlink.symlinkBinaries platform comp + configFlags installFlags + plan + case failed of + [] -> return () + [(_, exe, path)] -> + warn verbosity $ + "could not create a symlink in " ++ bindir ++ " for " + ++ exe ++ " because the file exists there already but is not " + ++ "managed by cabal. You can create a symlink for this executable " + ++ "manually if you wish. The executable file has been installed at " + ++ path + exes -> + warn verbosity $ + "could not create symlinks in " ++ bindir ++ " for " + ++ intercalate ", " [ exe | (_, exe, _) <- exes ] + ++ " because the files exist there already and are not " + ++ "managed by cabal. You can create symlinks for these executables " + ++ "manually if you wish. The executable files have been installed at " + ++ intercalate ", " [ path | (_, _, path) <- exes ] + where + bindir = fromFlag (installSymlinkBinDir installFlags) + + +printBuildFailures :: InstallPlan + -> IO () +printBuildFailures plan = + case [ (pkg, reason) + | InstallPlan.Failed pkg reason <- InstallPlan.toList plan ] of + [] -> return () + failed -> die . unlines + $ "Error: some packages failed to install:" + : [ display (packageId pkg) ++ printFailureReason reason + | (pkg, reason) <- failed ] + where + printFailureReason reason = case reason of + DependentFailed pkgid -> " depends on " ++ display pkgid + ++ " which failed to install." + DownloadFailed e -> " failed while downloading the package." + ++ showException e + UnpackFailed e -> " failed while unpacking the package." + ++ showException e + ConfigureFailed e -> " failed during the configure step." + ++ showException e + BuildFailed e -> " failed during the building phase." + ++ showException e + TestsFailed e -> " failed during the tests phase." + ++ showException e + InstallFailed e -> " failed during the final install step." + ++ showException e + + -- This will never happen, but we include it for completeness + PlanningFailed -> " failed during the planning phase." + + showException e = " The exception was:\n " ++ show e ++ maybeOOM e +#ifdef mingw32_HOST_OS + maybeOOM _ = "" +#else + maybeOOM e = maybe "" onExitFailure (fromException e) + onExitFailure (ExitFailure n) + | n == 9 || n == -9 = + "\nThis may be due to an out-of-memory condition." + onExitFailure _ = "" +#endif + + +-- | If we're working inside a sandbox and some add-source deps were installed, +-- update the timestamps of those deps. +updateSandboxTimestampsFile :: UseSandbox -> Maybe SandboxPackageInfo + -> Compiler -> Platform + -> InstallPlan + -> IO () +updateSandboxTimestampsFile (UseSandbox sandboxDir) + (Just (SandboxPackageInfo _ _ _ allAddSourceDeps)) + comp platform installPlan = + withUpdateTimestamps sandboxDir (compilerId comp) platform $ \_ -> do + let allInstalled = [ pkg | InstallPlan.Installed pkg _ _ + <- InstallPlan.toList installPlan ] + allSrcPkgs = [ pkg | ReadyPackage (ConfiguredPackage pkg _ _ _) _ + <- allInstalled ] + allPaths = [ pth | LocalUnpackedPackage pth + <- map packageSource allSrcPkgs] + allPathsCanonical <- mapM tryCanonicalizePath allPaths + return $! filter (`S.member` allAddSourceDeps) allPathsCanonical + +updateSandboxTimestampsFile _ _ _ _ _ = return () + +-- ------------------------------------------------------------ +-- * Actually do the installations +-- ------------------------------------------------------------ + +data InstallMisc = InstallMisc { + rootCmd :: Maybe FilePath, + libVersion :: Maybe Version + } + +-- | If logging is enabled, contains location of the log file and the verbosity +-- level for logging. +type UseLogFile = Maybe (PackageIdentifier -> UnitId -> FilePath, Verbosity) + +performInstallations :: Verbosity + -> InstallArgs + -> InstalledPackageIndex + -> InstallPlan + -> IO InstallPlan +performInstallations verbosity + (packageDBs, repoCtxt, comp, platform, conf, useSandbox, _, + globalFlags, configFlags, configExFlags, installFlags, haddockFlags) + installedPkgIndex installPlan = do + + -- With 'install -j' it can be a bit hard to tell whether a sandbox is used. + whenUsingSandbox useSandbox $ \sandboxDir -> + when parallelInstall $ + notice verbosity $ "Notice: installing into a sandbox located at " + ++ sandboxDir + + jobControl <- if parallelInstall then newParallelJobControl + else newSerialJobControl + buildLimit <- newJobLimit numJobs + fetchLimit <- newJobLimit (min numJobs numFetchJobs) + installLock <- newLock -- serialise installation + cacheLock <- newLock -- serialise access to setup exe cache + + executeInstallPlan verbosity comp jobControl useLogFile installPlan $ \rpkg -> + installReadyPackage platform cinfo configFlags + rpkg $ \configFlags' src pkg pkgoverride -> + fetchSourcePackage verbosity repoCtxt fetchLimit src $ \src' -> + installLocalPackage verbosity buildLimit + (packageId pkg) src' distPref $ \mpath -> + installUnpackedPackage verbosity buildLimit installLock numJobs + (setupScriptOptions installedPkgIndex + cacheLock rpkg) + miscOptions configFlags' + installFlags haddockFlags + cinfo platform pkg rpkg pkgoverride mpath useLogFile + + where + cinfo = compilerInfo comp + + numJobs = determineNumJobs (installNumJobs installFlags) + numFetchJobs = 2 + parallelInstall = numJobs >= 2 + distPref = fromFlagOrDefault (useDistPref defaultSetupScriptOptions) + (configDistPref configFlags) + + setupScriptOptions index lock rpkg = + configureSetupScript + packageDBs + comp + platform + conf + distPref + (chooseCabalVersion configFlags (libVersion miscOptions)) + (Just lock) + parallelInstall + index + (Just rpkg) + + reportingLevel = fromFlag (installBuildReports installFlags) + logsDir = fromFlag (globalLogsDir globalFlags) + + -- Should the build output be written to a log file instead of stdout? + useLogFile :: UseLogFile + useLogFile = fmap ((\f -> (f, loggingVerbosity)) . substLogFileName) + logFileTemplate + where + installLogFile' = flagToMaybe $ installLogFile installFlags + defaultTemplate = toPathTemplate $ logsDir "$pkgid" <.> "log" + + -- If the user has specified --remote-build-reporting=detailed, use the + -- default log file location. If the --build-log option is set, use the + -- provided location. Otherwise don't use logging, unless building in + -- parallel (in which case the default location is used). + logFileTemplate :: Maybe PathTemplate + logFileTemplate + | useDefaultTemplate = Just defaultTemplate + | otherwise = installLogFile' + + -- If the user has specified --remote-build-reporting=detailed or + -- --build-log, use more verbose logging. + loggingVerbosity :: Verbosity + loggingVerbosity | overrideVerbosity = max Verbosity.verbose verbosity + | otherwise = verbosity + + useDefaultTemplate :: Bool + useDefaultTemplate + | reportingLevel == DetailedReports = True + | isJust installLogFile' = False + | parallelInstall = True + | otherwise = False + + overrideVerbosity :: Bool + overrideVerbosity + | reportingLevel == DetailedReports = True + | isJust installLogFile' = True + | parallelInstall = False + | otherwise = False + + substLogFileName :: PathTemplate -> PackageIdentifier -> UnitId -> FilePath + substLogFileName template pkg ipid = fromPathTemplate + . substPathTemplate env + $ template + where env = initialPathTemplateEnv (packageId pkg) + ipid + (compilerInfo comp) platform + + miscOptions = InstallMisc { + rootCmd = if fromFlag (configUserInstall configFlags) + || (isUseSandbox useSandbox) + then Nothing -- ignore --root-cmd if --user + -- or working inside a sandbox. + else flagToMaybe (installRootCmd installFlags), + libVersion = flagToMaybe (configCabalVersion configExFlags) + } + + +executeInstallPlan :: Verbosity + -> Compiler + -> JobControl IO (PackageId, UnitId, BuildResult) + -> UseLogFile + -> InstallPlan + -> (ReadyPackage -> IO BuildResult) + -> IO InstallPlan +executeInstallPlan verbosity _comp jobCtl useLogFile plan0 installPkg = + tryNewTasks 0 plan0 + where + tryNewTasks taskCount plan = do + case InstallPlan.ready plan of + [] | taskCount == 0 -> return plan + | otherwise -> waitForTasks taskCount plan + pkgs -> do + sequence_ + [ do info verbosity $ "Ready to install " ++ display pkgid + spawnJob jobCtl $ do + buildResult <- installPkg pkg + let ipid = case buildResult of + Right (BuildOk _ _ (Just ipi)) -> + Installed.installedUnitId ipi + _ -> mkUnitId (display (packageId pkg)) + return (packageId pkg, ipid, buildResult) + | pkg <- pkgs + , let pkgid = packageId pkg ] + + let taskCount' = taskCount + length pkgs + plan' = InstallPlan.processing pkgs plan + waitForTasks taskCount' plan' + + waitForTasks taskCount plan = do + info verbosity $ "Waiting for install task to finish..." + (pkgid, ipid, buildResult) <- collectJob jobCtl + printBuildResult pkgid ipid buildResult + let taskCount' = taskCount-1 + plan' = updatePlan pkgid buildResult plan + tryNewTasks taskCount' plan' + + updatePlan :: PackageIdentifier -> BuildResult -> InstallPlan + -> InstallPlan + updatePlan pkgid (Right buildSuccess@(BuildOk _ _ mipkg)) = + InstallPlan.completed (Source.fakeUnitId pkgid) + mipkg buildSuccess + + updatePlan pkgid (Left buildFailure) = + InstallPlan.failed (Source.fakeUnitId pkgid) + buildFailure depsFailure + where + depsFailure = DependentFailed pkgid + -- So this first pkgid failed for whatever reason (buildFailure). + -- All the other packages that depended on this pkgid, which we + -- now cannot build, we mark as failing due to 'DependentFailed' + -- which kind of means it was not their fault. + + -- Print build log if something went wrong, and 'Installed $PKGID' + -- otherwise. + printBuildResult :: PackageId -> UnitId -> BuildResult -> IO () + printBuildResult pkgid ipid buildResult = case buildResult of + (Right _) -> notice verbosity $ "Installed " ++ display pkgid + (Left _) -> do + notice verbosity $ "Failed to install " ++ display pkgid + when (verbosity >= normal) $ + case useLogFile of + Nothing -> return () + Just (mkLogFileName, _) -> do + let logName = mkLogFileName pkgid ipid + putStr $ "Build log ( " ++ logName ++ " ):\n" + printFile logName + + printFile :: FilePath -> IO () + printFile path = readFile path >>= putStr + +-- | Call an installer for an 'SourcePackage' but override the configure +-- flags with the ones given by the 'ReadyPackage'. In particular the +-- 'ReadyPackage' specifies an exact 'FlagAssignment' and exactly +-- versioned package dependencies. So we ignore any previous partial flag +-- assignment or dependency constraints and use the new ones. +-- +-- NB: when updating this function, don't forget to also update +-- 'configurePackage' in D.C.Configure. +installReadyPackage :: Platform -> CompilerInfo + -> ConfigFlags + -> ReadyPackage + -> (ConfigFlags -> PackageLocation (Maybe FilePath) + -> PackageDescription + -> PackageDescriptionOverride + -> a) + -> a +installReadyPackage platform cinfo configFlags + (ReadyPackage (ConfiguredPackage + (SourcePackage _ gpkg source pkgoverride) + flags stanzas _) + deps) + installPkg = + installPkg configFlags { + configConfigurationsFlags = flags, + -- We generate the legacy constraints as well as the new style precise deps. + -- In the end only one set gets passed to Setup.hs configure, depending on + -- the Cabal version we are talking to. + configConstraints = [ thisPackageVersion (packageId deppkg) + | deppkg <- CD.nonSetupDeps deps ], + configDependencies = [ (packageName (Installed.sourcePackageId deppkg), + Installed.installedUnitId deppkg) + | deppkg <- CD.nonSetupDeps deps ], + -- Use '--exact-configuration' if supported. + configExactConfiguration = toFlag True, + configBenchmarks = toFlag False, + configTests = toFlag (TestStanzas `elem` stanzas) + } source pkg pkgoverride + where + pkg = case finalizePackageDescription flags + (const True) + platform cinfo [] (enableStanzas stanzas gpkg) of + Left _ -> error "finalizePackageDescription ReadyPackage failed" + Right (desc, _) -> desc + +fetchSourcePackage + :: Verbosity + -> RepoContext + -> JobLimit + -> PackageLocation (Maybe FilePath) + -> (PackageLocation FilePath -> IO BuildResult) + -> IO BuildResult +fetchSourcePackage verbosity repoCtxt fetchLimit src installPkg = do + fetched <- checkFetched src + case fetched of + Just src' -> installPkg src' + Nothing -> onFailure DownloadFailed $ do + loc <- withJobLimit fetchLimit $ + fetchPackage verbosity repoCtxt src + installPkg loc + + +installLocalPackage + :: Verbosity + -> JobLimit + -> PackageIdentifier -> PackageLocation FilePath -> FilePath + -> (Maybe FilePath -> IO BuildResult) + -> IO BuildResult +installLocalPackage verbosity jobLimit pkgid location distPref installPkg = + + case location of + + LocalUnpackedPackage dir -> + installPkg (Just dir) + + LocalTarballPackage tarballPath -> + installLocalTarballPackage verbosity jobLimit + pkgid tarballPath distPref installPkg + + RemoteTarballPackage _ tarballPath -> + installLocalTarballPackage verbosity jobLimit + pkgid tarballPath distPref installPkg + + RepoTarballPackage _ _ tarballPath -> + installLocalTarballPackage verbosity jobLimit + pkgid tarballPath distPref installPkg + + +installLocalTarballPackage + :: Verbosity + -> JobLimit + -> PackageIdentifier -> FilePath -> FilePath + -> (Maybe FilePath -> IO BuildResult) + -> IO BuildResult +installLocalTarballPackage verbosity jobLimit pkgid + tarballPath distPref installPkg = do + tmp <- getTemporaryDirectory + withTempDirectory verbosity tmp "cabal-tmp" $ \tmpDirPath -> + onFailure UnpackFailed $ do + let relUnpackedPath = display pkgid + absUnpackedPath = tmpDirPath relUnpackedPath + descFilePath = absUnpackedPath + display (packageName pkgid) <.> "cabal" + withJobLimit jobLimit $ do + info verbosity $ "Extracting " ++ tarballPath + ++ " to " ++ tmpDirPath ++ "..." + extractTarGzFile tmpDirPath relUnpackedPath tarballPath + exists <- doesFileExist descFilePath + when (not exists) $ + die $ "Package .cabal file not found: " ++ show descFilePath + maybeRenameDistDir absUnpackedPath + + installPkg (Just absUnpackedPath) + + where + -- 'cabal sdist' puts pre-generated files in the 'dist' + -- directory. This fails when a nonstandard build directory name + -- is used (as is the case with sandboxes), so we need to rename + -- the 'dist' dir here. + -- + -- TODO: 'cabal get happy && cd sandbox && cabal install ../happy' still + -- fails even with this workaround. We probably can live with that. + maybeRenameDistDir :: FilePath -> IO () + maybeRenameDistDir absUnpackedPath = do + let distDirPath = absUnpackedPath defaultDistPref + distDirPathTmp = absUnpackedPath (defaultDistPref ++ "-tmp") + distDirPathNew = absUnpackedPath distPref + distDirExists <- doesDirectoryExist distDirPath + when (distDirExists + && (not $ distDirPath `equalFilePath` distDirPathNew)) $ do + -- NB: we need to handle the case when 'distDirPathNew' is a + -- subdirectory of 'distDirPath' (e.g. the former is + -- 'dist/dist-sandbox-3688fbc2' and the latter is 'dist'). + debug verbosity $ "Renaming '" ++ distDirPath ++ "' to '" + ++ distDirPathTmp ++ "'." + renameDirectory distDirPath distDirPathTmp + when (distDirPath `isPrefixOf` distDirPathNew) $ + createDirectoryIfMissingVerbose verbosity False distDirPath + debug verbosity $ "Renaming '" ++ distDirPathTmp ++ "' to '" + ++ distDirPathNew ++ "'." + renameDirectory distDirPathTmp distDirPathNew + +installUnpackedPackage + :: Verbosity + -> JobLimit + -> Lock + -> Int + -> SetupScriptOptions + -> InstallMisc + -> ConfigFlags + -> InstallFlags + -> HaddockFlags + -> CompilerInfo + -> Platform + -> PackageDescription + -> ReadyPackage + -> PackageDescriptionOverride + -> Maybe FilePath -- ^ Directory to change to before starting the installation. + -> UseLogFile -- ^ File to log output to (if any) + -> IO BuildResult +installUnpackedPackage verbosity buildLimit installLock numJobs + scriptOptions miscOptions + configFlags installFlags haddockFlags + cinfo platform pkg rpkg pkgoverride workingDir useLogFile = do + + -- Override the .cabal file if necessary + case pkgoverride of + Nothing -> return () + Just pkgtxt -> do + let descFilePath = fromMaybe "." workingDir + display (packageName pkgid) <.> "cabal" + info verbosity $ + "Updating " ++ display (packageName pkgid) <.> "cabal" + ++ " with the latest revision from the index." + writeFileAtomic descFilePath pkgtxt + + -- Compute the IPID + let flags (ReadyPackage (ConfiguredPackage _ x _ _) _) = x + cid = Configure.computeComponentId (PackageDescription.package pkg) CLibName + (map (\(SimpleUnitId cid0) -> cid0) (CD.libraryDeps (depends rpkg))) (flags rpkg) + ipid = SimpleUnitId cid + + -- Make sure that we pass --libsubdir etc to 'setup configure' (necessary if + -- the setup script was compiled against an old version of the Cabal lib). + configFlags' <- addDefaultInstallDirs ipid configFlags + -- Filter out flags not supported by the old versions of the Cabal lib. + let configureFlags :: Version -> ConfigFlags + configureFlags = filterConfigureFlags configFlags' { + configVerbosity = toFlag verbosity' + } + + -- Path to the optional log file. + mLogPath <- maybeLogPath ipid + + logDirChange (maybe putStr appendFile mLogPath) workingDir $ do + -- Configure phase + onFailure ConfigureFailed $ withJobLimit buildLimit $ do + when (numJobs > 1) $ notice verbosity $ + "Configuring " ++ display pkgid ++ "..." + setup configureCommand configureFlags mLogPath + + -- Build phase + onFailure BuildFailed $ do + when (numJobs > 1) $ notice verbosity $ + "Building " ++ display pkgid ++ "..." + setup buildCommand' buildFlags mLogPath + + -- Doc generation phase + docsResult <- if shouldHaddock + then (do setup haddockCommand haddockFlags' mLogPath + return DocsOk) + `catchIO` (\_ -> return DocsFailed) + `catchExit` (\_ -> return DocsFailed) + else return DocsNotTried + + -- Tests phase + onFailure TestsFailed $ do + when (testsEnabled && PackageDescription.hasTests pkg) $ + setup Cabal.testCommand testFlags mLogPath + + let testsResult | testsEnabled = TestsOk + | otherwise = TestsNotTried + + -- Install phase + onFailure InstallFailed $ criticalSection installLock $ do + -- Capture installed package configuration file + maybePkgConf <- maybeGenPkgConf mLogPath + + -- Actual installation + withWin32SelfUpgrade verbosity ipid configFlags + cinfo platform pkg $ do + case rootCmd miscOptions of + (Just cmd) -> reexec cmd + Nothing -> do + setup Cabal.copyCommand copyFlags mLogPath + when shouldRegister $ do + setup Cabal.registerCommand registerFlags mLogPath + return (Right (BuildOk docsResult testsResult maybePkgConf)) + + where + pkgid = packageId pkg + buildCommand' = buildCommand defaultProgramConfiguration + buildFlags _ = emptyBuildFlags { + buildDistPref = configDistPref configFlags, + buildVerbosity = toFlag verbosity' + } + shouldHaddock = fromFlag (installDocumentation installFlags) + haddockFlags' _ = haddockFlags { + haddockVerbosity = toFlag verbosity', + haddockDistPref = configDistPref configFlags + } + testsEnabled = fromFlag (configTests configFlags) + && fromFlagOrDefault False (installRunTests installFlags) + testFlags _ = Cabal.emptyTestFlags { + Cabal.testDistPref = configDistPref configFlags + } + copyFlags _ = Cabal.emptyCopyFlags { + Cabal.copyDistPref = configDistPref configFlags, + Cabal.copyDest = toFlag InstallDirs.NoCopyDest, + Cabal.copyVerbosity = toFlag verbosity' + } + shouldRegister = PackageDescription.hasLibs pkg + registerFlags _ = Cabal.emptyRegisterFlags { + Cabal.regDistPref = configDistPref configFlags, + Cabal.regVerbosity = toFlag verbosity' + } + verbosity' = maybe verbosity snd useLogFile + tempTemplate name = name ++ "-" ++ display pkgid + + addDefaultInstallDirs :: UnitId -> ConfigFlags -> IO ConfigFlags + addDefaultInstallDirs ipid configFlags' = do + defInstallDirs <- InstallDirs.defaultInstallDirs flavor userInstall False + return $ configFlags' { + configInstallDirs = fmap Cabal.Flag . + InstallDirs.substituteInstallDirTemplates env $ + InstallDirs.combineInstallDirs fromFlagOrDefault + defInstallDirs (configInstallDirs configFlags) + } + where + CompilerId flavor _ = compilerInfoId cinfo + env = initialPathTemplateEnv pkgid ipid cinfo platform + userInstall = fromFlagOrDefault defaultUserInstall + (configUserInstall configFlags') + + maybeGenPkgConf :: Maybe FilePath + -> IO (Maybe Installed.InstalledPackageInfo) + maybeGenPkgConf mLogPath = + if shouldRegister then do + tmp <- getTemporaryDirectory + withTempFile tmp (tempTemplate "pkgConf") $ \pkgConfFile handle -> do + hClose handle + let registerFlags' version = (registerFlags version) { + Cabal.regGenPkgConf = toFlag (Just pkgConfFile) + } + setup Cabal.registerCommand registerFlags' mLogPath + withUTF8FileContents pkgConfFile $ \pkgConfText -> + case Installed.parseInstalledPackageInfo pkgConfText of + Installed.ParseFailed perror -> pkgConfParseFailed perror + Installed.ParseOk warns pkgConf -> do + unless (null warns) $ + warn verbosity $ unlines (map (showPWarning pkgConfFile) warns) + return (Just pkgConf) + else return Nothing + + pkgConfParseFailed :: Installed.PError -> IO a + pkgConfParseFailed perror = + die $ "Couldn't parse the output of 'setup register --gen-pkg-config':" + ++ show perror + + maybeLogPath :: UnitId -> IO (Maybe FilePath) + maybeLogPath ipid = + case useLogFile of + Nothing -> return Nothing + Just (mkLogFileName, _) -> do + let logFileName = mkLogFileName (packageId pkg) ipid + logDir = takeDirectory logFileName + unless (null logDir) $ createDirectoryIfMissing True logDir + logFileExists <- doesFileExist logFileName + when logFileExists $ removeFile logFileName + return (Just logFileName) + + setup cmd flags mLogPath = + Exception.bracket + (traverse (\path -> openFile path AppendMode) mLogPath) + (traverse_ hClose) + (\logFileHandle -> + setupWrapper verbosity + scriptOptions { useLoggingHandle = logFileHandle + , useWorkingDir = workingDir } + (Just pkg) + cmd flags []) + + reexec cmd = do + -- look for our own executable file and re-exec ourselves using a helper + -- program like sudo to elevate privileges: + self <- getExecutablePath + weExist <- doesFileExist self + if weExist + then inDir workingDir $ + rawSystemExit verbosity cmd + [self, "install", "--only" + ,"--verbose=" ++ showForCabal verbosity] + else die $ "Unable to find cabal executable at: " ++ self + + +-- helper +onFailure :: (SomeException -> BuildFailure) -> IO BuildResult -> IO BuildResult +onFailure result action = + action `catches` + [ Handler $ \ioe -> handler (ioe :: IOException) + , Handler $ \exit -> handler (exit :: ExitCode) + ] + where + handler :: Exception e => e -> IO BuildResult + handler = return . Left . result . toException + + +-- ------------------------------------------------------------ +-- * Weird windows hacks +-- ------------------------------------------------------------ + +withWin32SelfUpgrade :: Verbosity + -> UnitId + -> ConfigFlags + -> CompilerInfo + -> Platform + -> PackageDescription + -> IO a -> IO a +withWin32SelfUpgrade _ _ _ _ _ _ action | buildOS /= Windows = action +withWin32SelfUpgrade verbosity ipid configFlags cinfo platform pkg action = do + + defaultDirs <- InstallDirs.defaultInstallDirs + compFlavor + (fromFlag (configUserInstall configFlags)) + (PackageDescription.hasLibs pkg) + + Win32SelfUpgrade.possibleSelfUpgrade verbosity + (exeInstallPaths defaultDirs) action + + where + pkgid = packageId pkg + (CompilerId compFlavor _) = compilerInfoId cinfo + + exeInstallPaths defaultDirs = + [ InstallDirs.bindir absoluteDirs exeName <.> exeExtension + | exe <- PackageDescription.executables pkg + , PackageDescription.buildable (PackageDescription.buildInfo exe) + , let exeName = prefix ++ PackageDescription.exeName exe ++ suffix + prefix = substTemplate prefixTemplate + suffix = substTemplate suffixTemplate ] + where + fromFlagTemplate = fromFlagOrDefault (InstallDirs.toPathTemplate "") + prefixTemplate = fromFlagTemplate (configProgPrefix configFlags) + suffixTemplate = fromFlagTemplate (configProgSuffix configFlags) + templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault + defaultDirs (configInstallDirs configFlags) + absoluteDirs = InstallDirs.absoluteInstallDirs + pkgid ipid + cinfo InstallDirs.NoCopyDest + platform templateDirs + substTemplate = InstallDirs.fromPathTemplate + . InstallDirs.substPathTemplate env + where env = InstallDirs.initialPathTemplateEnv pkgid ipid + cinfo platform diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/InstallPlan.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/InstallPlan.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/InstallPlan.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/InstallPlan.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,788 @@ +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE DeriveGeneric #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.InstallPlan +-- Copyright : (c) Duncan Coutts 2008 +-- License : BSD-like +-- +-- Maintainer : duncan@community.haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Package installation plan +-- +----------------------------------------------------------------------------- +module Distribution.Client.InstallPlan ( + InstallPlan, + GenericInstallPlan, + PlanPackage, + GenericPlanPackage(..), + + -- * Operations on 'InstallPlan's + new, + toList, + mapPreservingGraph, + + ready, + processing, + completed, + failed, + remove, + preexisting, + preinstalled, + + showPlanIndex, + showInstallPlan, + + -- * Checking validity of plans + valid, + closed, + consistent, + acyclic, + + -- ** Details on invalid plans + PlanProblem(..), + showPlanProblem, + problems, + + -- ** Querying the install plan + dependencyClosure, + reverseDependencyClosure, + topologicalOrder, + reverseTopologicalOrder, + ) where + +import Distribution.InstalledPackageInfo + ( InstalledPackageInfo ) +import Distribution.Package + ( PackageIdentifier(..), PackageName(..), Package(..) + , HasUnitId(..), UnitId(..) ) +import Distribution.Client.Types + ( BuildSuccess, BuildFailure + , PackageFixedDeps(..), ConfiguredPackage + , GenericReadyPackage(..), fakeUnitId ) +import Distribution.Version + ( Version ) +import Distribution.Client.ComponentDeps (ComponentDeps) +import qualified Distribution.Client.ComponentDeps as CD +import Distribution.Simple.PackageIndex + ( PackageIndex ) +import qualified Distribution.Simple.PackageIndex as PackageIndex +import Distribution.Client.PlanIndex + ( FakeMap ) +import qualified Distribution.Client.PlanIndex as PlanIndex +import Distribution.Text + ( display ) + +import Data.List + ( foldl', intercalate ) +import Data.Maybe + ( fromMaybe, catMaybes ) +import qualified Data.Graph as Graph +import Data.Graph (Graph) +import qualified Data.Tree as Tree +import Distribution.Compat.Binary (Binary(..)) +import GHC.Generics +import Control.Exception + ( assert ) +import qualified Data.Map as Map +import qualified Data.Traversable as T + + +-- When cabal tries to install a number of packages, including all their +-- dependencies it has a non-trivial problem to solve. +-- +-- The Problem: +-- +-- In general we start with a set of installed packages and a set of source +-- packages. +-- +-- Installed packages have fixed dependencies. They have already been built and +-- we know exactly what packages they were built against, including their exact +-- versions. +-- +-- Source package have somewhat flexible dependencies. They are specified as +-- version ranges, though really they're predicates. To make matters worse they +-- have conditional flexible dependencies. Configuration flags can affect which +-- packages are required and can place additional constraints on their +-- versions. +-- +-- These two sets of package can and usually do overlap. There can be installed +-- packages that are also available as source packages which means they could +-- be re-installed if required, though there will also be packages which are +-- not available as source and cannot be re-installed. Very often there will be +-- extra versions available than are installed. Sometimes we may like to prefer +-- installed packages over source ones or perhaps always prefer the latest +-- available version whether installed or not. +-- +-- The goal is to calculate an installation plan that is closed, acyclic and +-- consistent and where every configured package is valid. +-- +-- An installation plan is a set of packages that are going to be used +-- together. It will consist of a mixture of installed packages and source +-- packages along with their exact version dependencies. An installation plan +-- is closed if for every package in the set, all of its dependencies are +-- also in the set. It is consistent if for every package in the set, all +-- dependencies which target that package have the same version. + +-- Note that plans do not necessarily compose. You might have a valid plan for +-- package A and a valid plan for package B. That does not mean the composition +-- is simultaneously valid for A and B. In particular you're most likely to +-- have problems with inconsistent dependencies. +-- On the other hand it is true that every closed sub plan is valid. + +-- | Packages in an install plan +-- +-- NOTE: 'ConfiguredPackage', 'GenericReadyPackage' and 'GenericPlanPackage' +-- intentionally have no 'PackageInstalled' instance. `This is important: +-- PackageInstalled returns only library dependencies, but for package that +-- aren't yet installed we know many more kinds of dependencies (setup +-- dependencies, exe, test-suite, benchmark, ..). Any functions that operate on +-- dependencies in cabal-install should consider what to do with these +-- dependencies; if we give a 'PackageInstalled' instance it would be too easy +-- to get this wrong (and, for instance, call graph traversal functions from +-- Cabal rather than from cabal-install). Instead, see 'PackageFixedDeps'. +data GenericPlanPackage ipkg srcpkg iresult ifailure + = PreExisting ipkg + | Configured srcpkg + | Processing (GenericReadyPackage srcpkg ipkg) + | Installed (GenericReadyPackage srcpkg ipkg) (Maybe ipkg) iresult + | Failed srcpkg ifailure + deriving (Eq, Show, Generic) + +instance (Binary ipkg, Binary srcpkg, Binary iresult, Binary ifailure) + => Binary (GenericPlanPackage ipkg srcpkg iresult ifailure) + +type PlanPackage = GenericPlanPackage + InstalledPackageInfo ConfiguredPackage + BuildSuccess BuildFailure + +instance (Package ipkg, Package srcpkg) => + Package (GenericPlanPackage ipkg srcpkg iresult ifailure) where + packageId (PreExisting ipkg) = packageId ipkg + packageId (Configured spkg) = packageId spkg + packageId (Processing rpkg) = packageId rpkg + packageId (Installed rpkg _ _) = packageId rpkg + packageId (Failed spkg _) = packageId spkg + +instance (PackageFixedDeps srcpkg, + PackageFixedDeps ipkg, HasUnitId ipkg) => + PackageFixedDeps (GenericPlanPackage ipkg srcpkg iresult ifailure) where + depends (PreExisting pkg) = depends pkg + depends (Configured pkg) = depends pkg + depends (Processing pkg) = depends pkg + depends (Installed pkg _ _) = depends pkg + depends (Failed pkg _) = depends pkg + +instance (HasUnitId ipkg, HasUnitId srcpkg) => + HasUnitId + (GenericPlanPackage ipkg srcpkg iresult ifailure) where + installedUnitId (PreExisting ipkg ) = installedUnitId ipkg + installedUnitId (Configured spkg) = installedUnitId spkg + installedUnitId (Processing rpkg) = installedUnitId rpkg + -- NB: defer to the actual installed package info in this case + installedUnitId (Installed _ (Just ipkg) _) = installedUnitId ipkg + installedUnitId (Installed rpkg _ _) = installedUnitId rpkg + installedUnitId (Failed spkg _) = installedUnitId spkg + + +data GenericInstallPlan ipkg srcpkg iresult ifailure = GenericInstallPlan { + planIndex :: !(PlanIndex ipkg srcpkg iresult ifailure), + planFakeMap :: !FakeMap, + planIndepGoals :: !Bool, + + -- | Cached (lazily) graph + -- + -- The 'Graph' representaion works in terms of integer node ids, so we + -- have to keep mapping to and from our meaningful nodes, which of course + -- are package ids. + -- + planGraph :: Graph, + planGraphRev :: Graph, -- ^ Reverse deps, transposed + planPkgIdOf :: Graph.Vertex -> UnitId, -- ^ mapping back to package ids + planVertexOf :: UnitId -> Graph.Vertex -- ^ mapping into node ids + } + +-- | Much like 'planPkgIdOf', but mapping back to full packages. +planPkgOf :: GenericInstallPlan ipkg srcpkg iresult ifailure + -> Graph.Vertex + -> GenericPlanPackage ipkg srcpkg iresult ifailure +planPkgOf plan v = + case PackageIndex.lookupUnitId (planIndex plan) + (planPkgIdOf plan v) of + Just pkg -> pkg + Nothing -> error "InstallPlan: internal error: planPkgOf lookup failed" + + +-- | 'GenericInstallPlan' specialised to most commonly used types. +type InstallPlan = GenericInstallPlan + InstalledPackageInfo ConfiguredPackage + BuildSuccess BuildFailure + +type PlanIndex ipkg srcpkg iresult ifailure = + PackageIndex (GenericPlanPackage ipkg srcpkg iresult ifailure) + +invariant :: (HasUnitId ipkg, PackageFixedDeps ipkg, + HasUnitId srcpkg, PackageFixedDeps srcpkg) + => GenericInstallPlan ipkg srcpkg iresult ifailure -> Bool +invariant plan = + valid (planFakeMap plan) + (planIndepGoals plan) + (planIndex plan) + +-- | Smart constructor that deals with caching the 'Graph' representation. +-- +mkInstallPlan :: (HasUnitId ipkg, PackageFixedDeps ipkg, + HasUnitId srcpkg, PackageFixedDeps srcpkg) + => PlanIndex ipkg srcpkg iresult ifailure + -> FakeMap + -> Bool + -> GenericInstallPlan ipkg srcpkg iresult ifailure +mkInstallPlan index fakeMap indepGoals = + GenericInstallPlan { + planIndex = index, + planFakeMap = fakeMap, + planIndepGoals = indepGoals, + + -- lazily cache the graph stuff: + planGraph = graph, + planGraphRev = Graph.transposeG graph, + planPkgIdOf = vertexToPkgId, + planVertexOf = fromMaybe noSuchPkgId . pkgIdToVertex + } + where + (graph, vertexToPkgId, pkgIdToVertex) = + PlanIndex.dependencyGraph fakeMap index + noSuchPkgId = internalError "package is not in the graph" + +internalError :: String -> a +internalError msg = error $ "InstallPlan: internal error: " ++ msg + +instance (HasUnitId ipkg, PackageFixedDeps ipkg, + HasUnitId srcpkg, PackageFixedDeps srcpkg, + Binary ipkg, Binary srcpkg, Binary iresult, Binary ifailure) + => Binary (GenericInstallPlan ipkg srcpkg iresult ifailure) where + put GenericInstallPlan { + planIndex = index, + planFakeMap = fakeMap, + planIndepGoals = indepGoals + } = put (index, fakeMap, indepGoals) + + get = do + (index, fakeMap, indepGoals) <- get + return $! mkInstallPlan index fakeMap indepGoals + +showPlanIndex :: (HasUnitId ipkg, HasUnitId srcpkg) + => PlanIndex ipkg srcpkg iresult ifailure -> String +showPlanIndex index = + intercalate "\n" (map showPlanPackage (PackageIndex.allPackages index)) + where showPlanPackage p = + showPlanPackageTag p ++ " " + ++ display (packageId p) ++ " (" + ++ display (installedUnitId p) ++ ")" + +showInstallPlan :: (HasUnitId ipkg, HasUnitId srcpkg) + => GenericInstallPlan ipkg srcpkg iresult ifailure -> String +showInstallPlan plan = + showPlanIndex (planIndex plan) ++ "\n" ++ + "fake map:\n " ++ + intercalate "\n " (map showKV (Map.toList (planFakeMap plan))) + where showKV (k,v) = display k ++ " -> " ++ display v + +showPlanPackageTag :: GenericPlanPackage ipkg srcpkg iresult ifailure -> String +showPlanPackageTag (PreExisting _) = "PreExisting" +showPlanPackageTag (Configured _) = "Configured" +showPlanPackageTag (Processing _) = "Processing" +showPlanPackageTag (Installed _ _ _) = "Installed" +showPlanPackageTag (Failed _ _) = "Failed" + +-- | Build an installation plan from a valid set of resolved packages. +-- +new :: (HasUnitId ipkg, PackageFixedDeps ipkg, + HasUnitId srcpkg, PackageFixedDeps srcpkg) + => Bool + -> PlanIndex ipkg srcpkg iresult ifailure + -> Either [PlanProblem ipkg srcpkg iresult ifailure] + (GenericInstallPlan ipkg srcpkg iresult ifailure) +new indepGoals index = + -- NB: Need to pre-initialize the fake-map with pre-existing + -- packages + let isPreExisting (PreExisting _) = True + isPreExisting _ = False + fakeMap = Map.fromList + . map (\p -> (fakeUnitId (packageId p) + ,installedUnitId p)) + . filter isPreExisting + $ PackageIndex.allPackages index in + case problems fakeMap indepGoals index of + [] -> Right (mkInstallPlan index fakeMap indepGoals) + probs -> Left probs + +toList :: GenericInstallPlan ipkg srcpkg iresult ifailure + -> [GenericPlanPackage ipkg srcpkg iresult ifailure] +toList = PackageIndex.allPackages . planIndex + +-- | Remove packages from the install plan. This will result in an +-- error if there are remaining packages that depend on any matching +-- package. This is primarily useful for obtaining an install plan for +-- the dependencies of a package or set of packages without actually +-- installing the package itself, as when doing development. +-- +remove :: (HasUnitId ipkg, PackageFixedDeps ipkg, + HasUnitId srcpkg, PackageFixedDeps srcpkg) + => (GenericPlanPackage ipkg srcpkg iresult ifailure -> Bool) + -> GenericInstallPlan ipkg srcpkg iresult ifailure + -> Either [PlanProblem ipkg srcpkg iresult ifailure] + (GenericInstallPlan ipkg srcpkg iresult ifailure) +remove shouldRemove plan = + new (planIndepGoals plan) newIndex + where + newIndex = PackageIndex.fromList $ + filter (not . shouldRemove) (toList plan) + +-- | The packages that are ready to be installed. That is they are in the +-- configured state and have all their dependencies installed already. +-- The plan is complete if the result is @[]@. +-- +ready :: forall ipkg srcpkg iresult ifailure. PackageFixedDeps srcpkg + => GenericInstallPlan ipkg srcpkg iresult ifailure + -> [GenericReadyPackage srcpkg ipkg] +ready plan = assert check readyPackages + where + check = if null readyPackages && null processingPackages + then null configuredPackages + else True + configuredPackages = [ pkg | Configured pkg <- toList plan ] + processingPackages = [ pkg | Processing pkg <- toList plan] + + readyPackages :: [GenericReadyPackage srcpkg ipkg] + readyPackages = catMaybes (map (lookupReadyPackage plan) configuredPackages) + +lookupReadyPackage :: forall ipkg srcpkg iresult ifailure. + PackageFixedDeps srcpkg + => GenericInstallPlan ipkg srcpkg iresult ifailure + -> srcpkg + -> Maybe (GenericReadyPackage srcpkg ipkg) +lookupReadyPackage plan pkg = do + deps <- hasAllInstalledDeps pkg + return (ReadyPackage pkg deps) + where + + hasAllInstalledDeps :: srcpkg -> Maybe (ComponentDeps [ipkg]) + hasAllInstalledDeps = T.mapM (mapM isInstalledDep) . depends + + isInstalledDep :: UnitId -> Maybe ipkg + isInstalledDep pkgid = + -- NB: Need to check if the ID has been updated in planFakeMap, in which + -- case we might be dealing with an old pointer + case PlanIndex.fakeLookupUnitId + (planFakeMap plan) (planIndex plan) pkgid + of + Just (PreExisting ipkg) -> Just ipkg + Just (Configured _) -> Nothing + Just (Processing _) -> Nothing + Just (Installed _ (Just ipkg) _) -> Just ipkg + Just (Installed _ Nothing _) -> internalError depOnNonLib + Just (Failed _ _) -> internalError depOnFailed + Nothing -> internalError incomplete + incomplete = "install plan is not closed" + depOnFailed = "configured package depends on failed package" + depOnNonLib = "configured package depends on a non-library package" + +-- | Marks packages in the graph as currently processing (e.g. building). +-- +-- * The package must exist in the graph and be in the configured state. +-- +processing :: (HasUnitId ipkg, PackageFixedDeps ipkg, + HasUnitId srcpkg, PackageFixedDeps srcpkg) + => [GenericReadyPackage srcpkg ipkg] + -> GenericInstallPlan ipkg srcpkg iresult ifailure + -> GenericInstallPlan ipkg srcpkg iresult ifailure +processing pkgs plan = assert (invariant plan') plan' + where + plan' = plan { + planIndex = PackageIndex.merge (planIndex plan) processingPkgs + } + processingPkgs = PackageIndex.fromList [Processing pkg | pkg <- pkgs] + +-- | Marks a package in the graph as completed. Also saves the build result for +-- the completed package in the plan. +-- +-- * The package must exist in the graph and be in the processing state. +-- * The package must have had no uninstalled dependent packages. +-- +completed :: (HasUnitId ipkg, PackageFixedDeps ipkg, + HasUnitId srcpkg, PackageFixedDeps srcpkg) + => UnitId + -> Maybe ipkg -> iresult + -> GenericInstallPlan ipkg srcpkg iresult ifailure + -> GenericInstallPlan ipkg srcpkg iresult ifailure +completed pkgid mipkg buildResult plan = assert (invariant plan') plan' + where + plan' = plan { + -- NB: installation can change the IPID, so better + -- record it in the fake mapping... + planFakeMap = insert_fake_mapping mipkg + $ planFakeMap plan, + planIndex = PackageIndex.insert installed + . PackageIndex.deleteUnitId pkgid + $ planIndex plan + } + -- ...but be sure to use the *old* IPID for the lookup for the + -- preexisting record + installed = Installed (lookupProcessingPackage plan pkgid) mipkg buildResult + insert_fake_mapping (Just ipkg) = Map.insert pkgid (installedUnitId ipkg) + insert_fake_mapping _ = id + +-- | Marks a package in the graph as having failed. It also marks all the +-- packages that depended on it as having failed. +-- +-- * The package must exist in the graph and be in the processing +-- state. +-- +failed :: (HasUnitId ipkg, PackageFixedDeps ipkg, + HasUnitId srcpkg, PackageFixedDeps srcpkg) + => UnitId -- ^ The id of the package that failed to install + -> ifailure -- ^ The build result to use for the failed package + -> ifailure -- ^ The build result to use for its dependencies + -> GenericInstallPlan ipkg srcpkg iresult ifailure + -> GenericInstallPlan ipkg srcpkg iresult ifailure +failed pkgid buildResult buildResult' plan = assert (invariant plan') plan' + where + -- NB: failures don't update IPIDs + plan' = plan { + planIndex = PackageIndex.merge (planIndex plan) failures + } + ReadyPackage srcpkg _deps = lookupProcessingPackage plan pkgid + failures = PackageIndex.fromList + $ Failed srcpkg buildResult + : [ Failed pkg' buildResult' + | Just pkg' <- map checkConfiguredPackage + $ packagesThatDependOn plan pkgid ] + +-- | Lookup the reachable packages in the reverse dependency graph. +-- +packagesThatDependOn :: GenericInstallPlan ipkg srcpkg iresult ifailure + -> UnitId + -> [GenericPlanPackage ipkg srcpkg iresult ifailure] +packagesThatDependOn plan pkgid = map (planPkgOf plan) + . tail + . Graph.reachable (planGraphRev plan) + . planVertexOf plan + $ Map.findWithDefault pkgid pkgid (planFakeMap plan) + +-- | Lookup a package that we expect to be in the processing state. +-- +lookupProcessingPackage :: GenericInstallPlan ipkg srcpkg iresult ifailure + -> UnitId + -> GenericReadyPackage srcpkg ipkg +lookupProcessingPackage plan pkgid = + -- NB: processing packages are guaranteed to not indirect through + -- planFakeMap + case PackageIndex.lookupUnitId (planIndex plan) pkgid of + Just (Processing pkg) -> pkg + _ -> internalError $ "not in processing state or no such pkg " ++ + display pkgid + +-- | Check a package that we expect to be in the configured or failed state. +-- +checkConfiguredPackage :: (Package srcpkg, Package ipkg) + => GenericPlanPackage ipkg srcpkg iresult ifailure + -> Maybe srcpkg +checkConfiguredPackage (Configured pkg) = Just pkg +checkConfiguredPackage (Failed _ _) = Nothing +checkConfiguredPackage pkg = + internalError $ "not configured or no such pkg " ++ display (packageId pkg) + +-- | Replace a ready package with a pre-existing one. The pre-existing one +-- must have exactly the same dependencies as the source one was configured +-- with. +-- +preexisting :: (HasUnitId ipkg, PackageFixedDeps ipkg, + HasUnitId srcpkg, PackageFixedDeps srcpkg) + => UnitId + -> ipkg + -> GenericInstallPlan ipkg srcpkg iresult ifailure + -> GenericInstallPlan ipkg srcpkg iresult ifailure +preexisting pkgid ipkg plan = assert (invariant plan') plan' + where + plan' = plan { + -- NB: installation can change the IPID, so better + -- record it in the fake mapping... + planFakeMap = Map.insert pkgid + (installedUnitId ipkg) + (planFakeMap plan), + planIndex = PackageIndex.insert (PreExisting ipkg) + -- ...but be sure to use the *old* IPID for the lookup for + -- the preexisting record + . PackageIndex.deleteUnitId pkgid + $ planIndex plan + } + +-- | Replace a ready package with an installed one. The installed one +-- must have exactly the same dependencies as the source one was configured +-- with. +-- +preinstalled :: (HasUnitId ipkg, PackageFixedDeps ipkg, + HasUnitId srcpkg, PackageFixedDeps srcpkg) + => UnitId + -> Maybe ipkg -> iresult + -> GenericInstallPlan ipkg srcpkg iresult ifailure + -> GenericInstallPlan ipkg srcpkg iresult ifailure +preinstalled pkgid mipkg buildResult plan = assert (invariant plan') plan' + where + plan' = plan { planIndex = PackageIndex.insert installed (planIndex plan) } + Just installed = do + Configured pkg <- PackageIndex.lookupUnitId (planIndex plan) pkgid + rpkg <- lookupReadyPackage plan pkg + return (Installed rpkg mipkg buildResult) + +-- | Transform an install plan by mapping a function over all the packages in +-- the plan. It can consistently change the 'UnitId' of all the packages, +-- while preserving the same overall graph structure. +-- +-- The mapping function has a few constraints on it for correct operation. +-- The mapping function /may/ change the 'UnitId' of the package, but it +-- /must/ also remap the 'UnitId's of its dependencies using ths supplied +-- remapping function. Apart from this consistent remapping it /may not/ +-- change the structure of the dependencies. +-- +mapPreservingGraph :: (HasUnitId ipkg, + HasUnitId srcpkg, + HasUnitId ipkg', PackageFixedDeps ipkg', + HasUnitId srcpkg', PackageFixedDeps srcpkg') + => ( (UnitId -> UnitId) + -> GenericPlanPackage ipkg srcpkg iresult ifailure + -> GenericPlanPackage ipkg' srcpkg' iresult' ifailure') + -> GenericInstallPlan ipkg srcpkg iresult ifailure + -> GenericInstallPlan ipkg' srcpkg' iresult' ifailure' +mapPreservingGraph f plan = + mkInstallPlan (PackageIndex.fromList pkgs') + Map.empty -- empty fakeMap + (planIndepGoals plan) + where + -- The package mapping function may change the UnitId. So we + -- walk over the packages in dependency order keeping track of these + -- package id changes and use it to supply the correct set of package + -- dependencies as an extra input to the package mapping function. + -- + -- Having fully remapped all the deps this also means we can use an empty + -- FakeMap for the resulting install plan. + + (_, pkgs') = foldl' f' (Map.empty, []) (reverseTopologicalOrder plan) + + f' (ipkgidMap, pkgs) pkg = (ipkgidMap', pkg' : pkgs) + where + pkg' = f (mapDep ipkgidMap) pkg + + ipkgidMap' + | ipkgid /= ipkgid' = Map.insert ipkgid ipkgid' ipkgidMap + | otherwise = ipkgidMap + where + ipkgid = installedUnitId pkg + ipkgid' = installedUnitId pkg' + + mapDep ipkgidMap ipkgid = Map.findWithDefault ipkgid ipkgid ipkgidMap + + +-- ------------------------------------------------------------ +-- * Checking validity of plans +-- ------------------------------------------------------------ + +-- | A valid installation plan is a set of packages that is 'acyclic', +-- 'closed' and 'consistent'. Also, every 'ConfiguredPackage' in the +-- plan has to have a valid configuration (see 'configuredPackageValid'). +-- +-- * if the result is @False@ use 'problems' to get a detailed list. +-- +valid :: (HasUnitId ipkg, PackageFixedDeps ipkg, + HasUnitId srcpkg, PackageFixedDeps srcpkg) + => FakeMap -> Bool + -> PlanIndex ipkg srcpkg iresult ifailure + -> Bool +valid fakeMap indepGoals index = + null $ problems fakeMap indepGoals index + +data PlanProblem ipkg srcpkg iresult ifailure = + PackageMissingDeps (GenericPlanPackage ipkg srcpkg iresult ifailure) + [PackageIdentifier] + | PackageCycle [GenericPlanPackage ipkg srcpkg iresult ifailure] + | PackageInconsistency PackageName [(PackageIdentifier, Version)] + | PackageStateInvalid (GenericPlanPackage ipkg srcpkg iresult ifailure) + (GenericPlanPackage ipkg srcpkg iresult ifailure) + +showPlanProblem :: (Package ipkg, Package srcpkg) + => PlanProblem ipkg srcpkg iresult ifailure -> String +showPlanProblem (PackageMissingDeps pkg missingDeps) = + "Package " ++ display (packageId pkg) + ++ " depends on the following packages which are missing from the plan: " + ++ intercalate ", " (map display missingDeps) + +showPlanProblem (PackageCycle cycleGroup) = + "The following packages are involved in a dependency cycle " + ++ intercalate ", " (map (display.packageId) cycleGroup) + +showPlanProblem (PackageInconsistency name inconsistencies) = + "Package " ++ display name + ++ " is required by several packages," + ++ " but they require inconsistent versions:\n" + ++ unlines [ " package " ++ display pkg ++ " requires " + ++ display (PackageIdentifier name ver) + | (pkg, ver) <- inconsistencies ] + +showPlanProblem (PackageStateInvalid pkg pkg') = + "Package " ++ display (packageId pkg) + ++ " is in the " ++ showPlanState pkg + ++ " state but it depends on package " ++ display (packageId pkg') + ++ " which is in the " ++ showPlanState pkg' + ++ " state" + where + showPlanState (PreExisting _) = "pre-existing" + showPlanState (Configured _) = "configured" + showPlanState (Processing _) = "processing" + showPlanState (Installed _ _ _) = "installed" + showPlanState (Failed _ _) = "failed" + +-- | For an invalid plan, produce a detailed list of problems as human readable +-- error messages. This is mainly intended for debugging purposes. +-- Use 'showPlanProblem' for a human readable explanation. +-- +problems :: (HasUnitId ipkg, PackageFixedDeps ipkg, + HasUnitId srcpkg, PackageFixedDeps srcpkg) + => FakeMap -> Bool + -> PlanIndex ipkg srcpkg iresult ifailure + -> [PlanProblem ipkg srcpkg iresult ifailure] +problems fakeMap indepGoals index = + + [ PackageMissingDeps pkg + (catMaybes + (map + (fmap packageId . PlanIndex.fakeLookupUnitId fakeMap index) + missingDeps)) + | (pkg, missingDeps) <- PlanIndex.brokenPackages fakeMap index ] + + ++ [ PackageCycle cycleGroup + | cycleGroup <- PlanIndex.dependencyCycles fakeMap index ] + + ++ [ PackageInconsistency name inconsistencies + | (name, inconsistencies) <- + PlanIndex.dependencyInconsistencies fakeMap indepGoals index ] + + ++ [ PackageStateInvalid pkg pkg' + | pkg <- PackageIndex.allPackages index + , Just pkg' <- map (PlanIndex.fakeLookupUnitId fakeMap index) + (CD.flatDeps (depends pkg)) + , not (stateDependencyRelation pkg pkg') ] + +-- | The graph of packages (nodes) and dependencies (edges) must be acyclic. +-- +-- * if the result is @False@ use 'PackageIndex.dependencyCycles' to find out +-- which packages are involved in dependency cycles. +-- +acyclic :: (HasUnitId ipkg, PackageFixedDeps ipkg, + HasUnitId srcpkg, PackageFixedDeps srcpkg) + => FakeMap -> PlanIndex ipkg srcpkg iresult ifailure -> Bool +acyclic fakeMap = null . PlanIndex.dependencyCycles fakeMap + +-- | An installation plan is closed if for every package in the set, all of +-- its dependencies are also in the set. That is, the set is closed under the +-- dependency relation. +-- +-- * if the result is @False@ use 'PackageIndex.brokenPackages' to find out +-- which packages depend on packages not in the index. +-- +closed :: (HasUnitId ipkg, PackageFixedDeps ipkg, + PackageFixedDeps srcpkg) + => FakeMap -> PlanIndex ipkg srcpkg iresult ifailure -> Bool +closed fakeMap = null . PlanIndex.brokenPackages fakeMap + +-- | An installation plan is consistent if all dependencies that target a +-- single package name, target the same version. +-- +-- This is slightly subtle. It is not the same as requiring that there be at +-- most one version of any package in the set. It only requires that of +-- packages which have more than one other package depending on them. We could +-- actually make the condition even more precise and say that different +-- versions are OK so long as they are not both in the transitive closure of +-- any other package (or equivalently that their inverse closures do not +-- intersect). The point is we do not want to have any packages depending +-- directly or indirectly on two different versions of the same package. The +-- current definition is just a safe approximation of that. +-- +-- * if the result is @False@ use 'PackageIndex.dependencyInconsistencies' to +-- find out which packages are. +-- +consistent :: (HasUnitId ipkg, PackageFixedDeps ipkg, + HasUnitId srcpkg, PackageFixedDeps srcpkg) + => FakeMap -> PlanIndex ipkg srcpkg iresult ifailure -> Bool +consistent fakeMap = null . PlanIndex.dependencyInconsistencies fakeMap False + +-- | The states of packages have that depend on each other must respect +-- this relation. That is for very case where package @a@ depends on +-- package @b@ we require that @dependencyStatesOk a b = True@. +-- +stateDependencyRelation :: GenericPlanPackage ipkg srcpkg iresult ifailure + -> GenericPlanPackage ipkg srcpkg iresult ifailure + -> Bool +stateDependencyRelation (PreExisting _) (PreExisting _) = True + +stateDependencyRelation (Configured _) (PreExisting _) = True +stateDependencyRelation (Configured _) (Configured _) = True +stateDependencyRelation (Configured _) (Processing _) = True +stateDependencyRelation (Configured _) (Installed _ _ _) = True + +stateDependencyRelation (Processing _) (PreExisting _) = True +stateDependencyRelation (Processing _) (Installed _ _ _) = True + +stateDependencyRelation (Installed _ _ _) (PreExisting _) = True +stateDependencyRelation (Installed _ _ _) (Installed _ _ _) = True + +stateDependencyRelation (Failed _ _) (PreExisting _) = True +-- failed can depends on configured because a package can depend on +-- several other packages and if one of the deps fail then we fail +-- but we still depend on the other ones that did not fail: +stateDependencyRelation (Failed _ _) (Configured _) = True +stateDependencyRelation (Failed _ _) (Processing _) = True +stateDependencyRelation (Failed _ _) (Installed _ _ _) = True +stateDependencyRelation (Failed _ _) (Failed _ _) = True + +stateDependencyRelation _ _ = False + + +-- | Compute the dependency closure of a package in a install plan +-- +dependencyClosure :: GenericInstallPlan ipkg srcpkg iresult ifailure + -> [UnitId] + -> [GenericPlanPackage ipkg srcpkg iresult ifailure] +dependencyClosure plan = + map (planPkgOf plan) + . concatMap Tree.flatten + . Graph.dfs (planGraph plan) + . map (planVertexOf plan) + + +reverseDependencyClosure :: GenericInstallPlan ipkg srcpkg iresult ifailure + -> [UnitId] + -> [GenericPlanPackage ipkg srcpkg iresult ifailure] +reverseDependencyClosure plan = + map (planPkgOf plan) + . concatMap Tree.flatten + . Graph.dfs (planGraphRev plan) + . map (planVertexOf plan) + + +topologicalOrder :: GenericInstallPlan ipkg srcpkg iresult ifailure + -> [GenericPlanPackage ipkg srcpkg iresult ifailure] +topologicalOrder plan = + map (planPkgOf plan) + . Graph.topSort + $ planGraph plan + + +reverseTopologicalOrder :: GenericInstallPlan ipkg srcpkg iresult ifailure + -> [GenericPlanPackage ipkg srcpkg iresult ifailure] +reverseTopologicalOrder plan = + map (planPkgOf plan) + . Graph.topSort + $ planGraphRev plan diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/InstallSymlink.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/InstallSymlink.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/InstallSymlink.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/InstallSymlink.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,251 @@ +{-# LANGUAGE CPP #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.InstallSymlink +-- Copyright : (c) Duncan Coutts 2008 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Managing installing binaries with symlinks. +----------------------------------------------------------------------------- +module Distribution.Client.InstallSymlink ( + symlinkBinaries, + symlinkBinary, + ) where + +#if mingw32_HOST_OS + +import Distribution.Package (PackageIdentifier) +import Distribution.Client.InstallPlan (InstallPlan) +import Distribution.Client.Setup (InstallFlags) +import Distribution.Simple.Setup (ConfigFlags) +import Distribution.Simple.Compiler +import Distribution.System + +symlinkBinaries :: Platform -> Compiler + -> ConfigFlags + -> InstallFlags + -> InstallPlan + -> IO [(PackageIdentifier, String, FilePath)] +symlinkBinaries _ _ _ _ _ = return [] + +symlinkBinary :: FilePath -> FilePath -> String -> String -> IO Bool +symlinkBinary _ _ _ _ = fail "Symlinking feature not available on Windows" + +#else + +import Distribution.Client.Types + ( SourcePackage(..) + , GenericReadyPackage(..), ReadyPackage, enableStanzas + , ConfiguredPackage(..) , fakeUnitId) +import Distribution.Client.Setup + ( InstallFlags(installSymlinkBinDir) ) +import qualified Distribution.Client.InstallPlan as InstallPlan +import Distribution.Client.InstallPlan (InstallPlan) + +import Distribution.Package + ( PackageIdentifier, Package(packageId), UnitId(..) ) +import Distribution.Compiler + ( CompilerId(..) ) +import qualified Distribution.PackageDescription as PackageDescription +import Distribution.PackageDescription + ( PackageDescription ) +import Distribution.PackageDescription.Configuration + ( finalizePackageDescription ) +import Distribution.Simple.Setup + ( ConfigFlags(..), fromFlag, fromFlagOrDefault, flagToMaybe ) +import qualified Distribution.Simple.InstallDirs as InstallDirs +import Distribution.Simple.Compiler + ( Compiler, compilerInfo, CompilerInfo(..) ) +import Distribution.System + ( Platform ) + +import System.Posix.Files + ( getSymbolicLinkStatus, isSymbolicLink, createSymbolicLink + , removeLink ) +import System.Directory + ( canonicalizePath ) +import System.FilePath + ( (), splitPath, joinPath, isAbsolute ) + +import Prelude hiding (ioError) +import System.IO.Error + ( isDoesNotExistError, ioError ) +import Distribution.Compat.Exception ( catchIO ) +import Control.Exception + ( assert ) +import Data.Maybe + ( catMaybes ) + +-- | We would like by default to install binaries into some location that is on +-- the user's PATH. For per-user installations on Unix systems that basically +-- means the @~/bin/@ directory. On the majority of platforms the @~/bin/@ +-- directory will be on the user's PATH. However some people are a bit nervous +-- about letting a package manager install programs into @~/bin/@. +-- +-- A compromise solution is that instead of installing binaries directly into +-- @~/bin/@, we could install them in a private location under @~/.cabal/bin@ +-- and then create symlinks in @~/bin/@. We can be careful when setting up the +-- symlinks that we do not overwrite any binary that the user installed. We can +-- check if it was a symlink we made because it would point to the private dir +-- where we install our binaries. This means we can install normally without +-- worrying and in a later phase set up symlinks, and if that fails then we +-- report it to the user, but even in this case the package is still in an OK +-- installed state. +-- +-- This is an optional feature that users can choose to use or not. It is +-- controlled from the config file. Of course it only works on POSIX systems +-- with symlinks so is not available to Windows users. +-- +symlinkBinaries :: Platform -> Compiler + -> ConfigFlags + -> InstallFlags + -> InstallPlan + -> IO [(PackageIdentifier, String, FilePath)] +symlinkBinaries platform comp configFlags installFlags plan = + case flagToMaybe (installSymlinkBinDir installFlags) of + Nothing -> return [] + Just symlinkBinDir + | null exes -> return [] + | otherwise -> do + publicBinDir <- canonicalizePath symlinkBinDir +-- TODO: do we want to do this here? : +-- createDirectoryIfMissing True publicBinDir + fmap catMaybes $ sequence + [ do privateBinDir <- pkgBinDir pkg ipid + ok <- symlinkBinary + publicBinDir privateBinDir + publicExeName privateExeName + if ok + then return Nothing + else return (Just (pkgid, publicExeName, + privateBinDir privateExeName)) + | (ReadyPackage (ConfiguredPackage _ _flags _ _) _, pkg, exe) <- exes + , let pkgid = packageId pkg + -- This is a bit dodgy; probably won't work for Backpack packages + ipid = fakeUnitId pkgid + publicExeName = PackageDescription.exeName exe + privateExeName = prefix ++ publicExeName ++ suffix + prefix = substTemplate pkgid ipid prefixTemplate + suffix = substTemplate pkgid ipid suffixTemplate ] + where + exes = + [ (cpkg, pkg, exe) + | InstallPlan.Installed cpkg _ _ <- InstallPlan.toList plan + , let pkg = pkgDescription cpkg + , exe <- PackageDescription.executables pkg + , PackageDescription.buildable (PackageDescription.buildInfo exe) ] + + pkgDescription :: ReadyPackage -> PackageDescription + pkgDescription (ReadyPackage (ConfiguredPackage + (SourcePackage _ pkg _ _) + flags stanzas _) + _) = + case finalizePackageDescription flags + (const True) + platform cinfo [] (enableStanzas stanzas pkg) of + Left _ -> error "finalizePackageDescription ReadyPackage failed" + Right (desc, _) -> desc + + -- This is sadly rather complicated. We're kind of re-doing part of the + -- configuration for the package. :-( + pkgBinDir :: PackageDescription -> UnitId -> IO FilePath + pkgBinDir pkg ipid = do + defaultDirs <- InstallDirs.defaultInstallDirs + compilerFlavor + (fromFlag (configUserInstall configFlags)) + (PackageDescription.hasLibs pkg) + let templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault + defaultDirs (configInstallDirs configFlags) + absoluteDirs = InstallDirs.absoluteInstallDirs + (packageId pkg) ipid + cinfo InstallDirs.NoCopyDest + platform templateDirs + canonicalizePath (InstallDirs.bindir absoluteDirs) + + substTemplate pkgid ipid = InstallDirs.fromPathTemplate + . InstallDirs.substPathTemplate env + where env = InstallDirs.initialPathTemplateEnv pkgid ipid + cinfo platform + + fromFlagTemplate = fromFlagOrDefault (InstallDirs.toPathTemplate "") + prefixTemplate = fromFlagTemplate (configProgPrefix configFlags) + suffixTemplate = fromFlagTemplate (configProgSuffix configFlags) + cinfo = compilerInfo comp + (CompilerId compilerFlavor _) = compilerInfoId cinfo + +symlinkBinary :: FilePath -- ^ The canonical path of the public bin dir + -- eg @/home/user/bin@ + -> FilePath -- ^ The canonical path of the private bin dir + -- eg @/home/user/.cabal/bin@ + -> String -- ^ The name of the executable to go in the public + -- bin dir, eg @foo@ + -> String -- ^ The name of the executable to in the private bin + -- dir, eg @foo-1.0@ + -> IO Bool -- ^ If creating the symlink was successful. @False@ + -- if there was another file there already that we + -- did not own. Other errors like permission errors + -- just propagate as exceptions. +symlinkBinary publicBindir privateBindir publicName privateName = do + ok <- targetOkToOverwrite (publicBindir publicName) + (privateBindir privateName) + case ok of + NotOurFile -> return False + NotExists -> mkLink >> return True + OkToOverwrite -> rmLink >> mkLink >> return True + where + relativeBindir = makeRelative publicBindir privateBindir + mkLink = createSymbolicLink (relativeBindir privateName) + (publicBindir publicName) + rmLink = removeLink (publicBindir publicName) + +-- | Check a file path of a symlink that we would like to create to see if it +-- is OK. For it to be OK to overwrite it must either not already exist yet or +-- be a symlink to our target (in which case we can assume ownership). +-- +targetOkToOverwrite :: FilePath -- ^ The file path of the symlink to the private + -- binary that we would like to create + -> FilePath -- ^ The canonical path of the private binary. + -- Use 'canonicalizePath' to make this. + -> IO SymlinkStatus +targetOkToOverwrite symlink target = handleNotExist $ do + status <- getSymbolicLinkStatus symlink + if not (isSymbolicLink status) + then return NotOurFile + else do target' <- canonicalizePath symlink + -- This relies on canonicalizePath handling symlinks + if target == target' + then return OkToOverwrite + else return NotOurFile + + where + handleNotExist action = catchIO action $ \ioexception -> + -- If the target doesn't exist then there's no problem overwriting it! + if isDoesNotExistError ioexception + then return NotExists + else ioError ioexception + +data SymlinkStatus + = NotExists -- ^ The file doesn't exist so we can make a symlink. + | OkToOverwrite -- ^ A symlink already exists, though it is ours. We'll + -- have to delete it first before we make a new symlink. + | NotOurFile -- ^ A file already exists and it is not one of our existing + -- symlinks (either because it is not a symlink or because + -- it points somewhere other than our managed space). + deriving Show + +-- | Take two canonical paths and produce a relative path to get from the first +-- to the second, even if it means adding @..@ path components. +-- +makeRelative :: FilePath -> FilePath -> FilePath +makeRelative a b = assert (isAbsolute a && isAbsolute b) $ + let as = splitPath a + bs = splitPath b + commonLen = length $ takeWhile id $ zipWith (==) as bs + in joinPath $ [ ".." | _ <- drop commonLen as ] + ++ drop commonLen bs + +#endif diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/JobControl.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/JobControl.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/JobControl.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/JobControl.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,89 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.JobControl +-- Copyright : (c) Duncan Coutts 2012 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- A job control concurrency abstraction +----------------------------------------------------------------------------- +module Distribution.Client.JobControl ( + JobControl, + newSerialJobControl, + newParallelJobControl, + spawnJob, + collectJob, + + JobLimit, + newJobLimit, + withJobLimit, + + Lock, + newLock, + criticalSection + ) where + +import Control.Monad +import Control.Concurrent hiding (QSem, newQSem, waitQSem, signalQSem) +import Control.Exception (SomeException, bracket_, mask, throw, try) +import Distribution.Client.Compat.Semaphore + +data JobControl m a = JobControl { + spawnJob :: m a -> m (), + collectJob :: m a + } + + +newSerialJobControl :: IO (JobControl IO a) +newSerialJobControl = do + queue <- newChan + return JobControl { + spawnJob = spawn queue, + collectJob = collect queue + } + where + spawn :: Chan (IO a) -> IO a -> IO () + spawn = writeChan + + collect :: Chan (IO a) -> IO a + collect = join . readChan + +newParallelJobControl :: IO (JobControl IO a) +newParallelJobControl = do + resultVar <- newEmptyMVar + return JobControl { + spawnJob = spawn resultVar, + collectJob = collect resultVar + } + where + spawn :: MVar (Either SomeException a) -> IO a -> IO () + spawn resultVar job = + mask $ \restore -> + forkIO (do res <- try (restore job) + putMVar resultVar res) + >> return () + + collect :: MVar (Either SomeException a) -> IO a + collect resultVar = + takeMVar resultVar >>= either throw return + +data JobLimit = JobLimit QSem + +newJobLimit :: Int -> IO JobLimit +newJobLimit n = + fmap JobLimit (newQSem n) + +withJobLimit :: JobLimit -> IO a -> IO a +withJobLimit (JobLimit sem) = + bracket_ (waitQSem sem) (signalQSem sem) + +newtype Lock = Lock (MVar ()) + +newLock :: IO Lock +newLock = fmap Lock $ newMVar () + +criticalSection :: Lock -> IO a -> IO a +criticalSection (Lock lck) act = bracket_ (takeMVar lck) (putMVar lck ()) act diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/List.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/List.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/List.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/List.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,599 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.List +-- Copyright : (c) David Himmelstrup 2005 +-- Duncan Coutts 2008-2011 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- +-- Search for and print information about packages +----------------------------------------------------------------------------- +module Distribution.Client.List ( + list, info + ) where + +import Distribution.Package + ( PackageName(..), Package(..), packageName, packageVersion + , Dependency(..), simplifyDependency + , UnitId ) +import Distribution.ModuleName (ModuleName) +import Distribution.License (License) +import qualified Distribution.InstalledPackageInfo as Installed +import qualified Distribution.PackageDescription as Source +import Distribution.PackageDescription + ( Flag(..), FlagName(..) ) +import Distribution.PackageDescription.Configuration + ( flattenPackageDescription ) + +import Distribution.Simple.Compiler + ( Compiler, PackageDBStack ) +import Distribution.Simple.Program (ProgramConfiguration) +import Distribution.Simple.Utils + ( equating, comparing, die, notice ) +import Distribution.Simple.Setup (fromFlag) +import Distribution.Simple.PackageIndex (InstalledPackageIndex) +import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex +import qualified Distribution.Client.PackageIndex as PackageIndex +import Distribution.Version + ( Version(..), VersionRange, withinRange, anyVersion + , intersectVersionRanges, simplifyVersionRange ) +import Distribution.Verbosity (Verbosity) +import Distribution.Text + ( Text(disp), display ) + +import Distribution.Client.Types + ( SourcePackage(..), SourcePackageDb(..) ) +import Distribution.Client.Dependency.Types + ( PackageConstraint(..) ) +import Distribution.Client.Targets + ( UserTarget, resolveUserTargets, PackageSpecifier(..) ) +import Distribution.Client.Setup + ( GlobalFlags(..), ListFlags(..), InfoFlags(..) + , RepoContext(..) ) +import Distribution.Client.Utils + ( mergeBy, MergeResult(..) ) +import Distribution.Client.IndexUtils as IndexUtils + ( getSourcePackages, getInstalledPackages ) +import Distribution.Client.FetchUtils + ( isFetched ) + +import Data.List + ( sortBy, groupBy, sort, nub, intersperse, maximumBy, partition ) +import Data.Maybe + ( listToMaybe, fromJust, fromMaybe, isJust ) +import qualified Data.Map as Map +import Data.Tree as Tree +import Control.Monad + ( MonadPlus(mplus), join ) +import Control.Exception + ( assert ) +import Text.PrettyPrint as Disp +import System.Directory + ( doesDirectoryExist ) + + +-- | Return a list of packages matching given search strings. +getPkgList :: Verbosity + -> PackageDBStack + -> RepoContext + -> Compiler + -> ProgramConfiguration + -> ListFlags + -> [String] + -> IO [PackageDisplayInfo] +getPkgList verbosity packageDBs repoCtxt comp conf listFlags pats = do + installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf + sourcePkgDb <- getSourcePackages verbosity repoCtxt + let sourcePkgIndex = packageIndex sourcePkgDb + prefs name = fromMaybe anyVersion + (Map.lookup name (packagePreferences sourcePkgDb)) + + pkgsInfo :: + [(PackageName, [Installed.InstalledPackageInfo], [SourcePackage])] + pkgsInfo + -- gather info for all packages + | null pats = mergePackages + (InstalledPackageIndex.allPackages installedPkgIndex) + ( PackageIndex.allPackages sourcePkgIndex) + + -- gather info for packages matching search term + | otherwise = pkgsInfoMatching + + pkgsInfoMatching :: + [(PackageName, [Installed.InstalledPackageInfo], [SourcePackage])] + pkgsInfoMatching = + let matchingInstalled = matchingPackages + InstalledPackageIndex.searchByNameSubstring + installedPkgIndex + matchingSource = matchingPackages + (\ idx n -> + concatMap snd + (PackageIndex.searchByNameSubstring idx n)) + sourcePkgIndex + in mergePackages matchingInstalled matchingSource + + matches :: [PackageDisplayInfo] + matches = [ mergePackageInfo pref + installedPkgs sourcePkgs selectedPkg False + | (pkgname, installedPkgs, sourcePkgs) <- pkgsInfo + , not onlyInstalled || not (null installedPkgs) + , let pref = prefs pkgname + selectedPkg = latestWithPref pref sourcePkgs ] + return matches + where + onlyInstalled = fromFlag (listInstalled listFlags) + matchingPackages search index = + [ pkg + | pat <- pats + , pkg <- search index pat ] + + +-- | Show information about packages. +list :: Verbosity + -> PackageDBStack + -> RepoContext + -> Compiler + -> ProgramConfiguration + -> ListFlags + -> [String] + -> IO () +list verbosity packageDBs repos comp conf listFlags pats = do + matches <- getPkgList verbosity packageDBs repos comp conf listFlags pats + + if simpleOutput + then putStr $ unlines + [ display (pkgName pkg) ++ " " ++ display version + | pkg <- matches + , version <- if onlyInstalled + then installedVersions pkg + else nub . sort $ installedVersions pkg + ++ sourceVersions pkg ] + -- Note: this only works because for 'list', one cannot currently + -- specify any version constraints, so listing all installed + -- and source ones works. + else + if null matches + then notice verbosity "No matches found." + else putStr $ unlines (map showPackageSummaryInfo matches) + where + onlyInstalled = fromFlag (listInstalled listFlags) + simpleOutput = fromFlag (listSimpleOutput listFlags) + +info :: Verbosity + -> PackageDBStack + -> RepoContext + -> Compiler + -> ProgramConfiguration + -> GlobalFlags + -> InfoFlags + -> [UserTarget] + -> IO () +info verbosity _ _ _ _ _ _ [] = + notice verbosity "No packages requested. Nothing to do." + +info verbosity packageDBs repoCtxt comp conf + globalFlags _listFlags userTargets = do + + installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf + sourcePkgDb <- getSourcePackages verbosity repoCtxt + let sourcePkgIndex = packageIndex sourcePkgDb + prefs name = fromMaybe anyVersion + (Map.lookup name (packagePreferences sourcePkgDb)) + + -- Users may specify names of packages that are only installed, not + -- just available source packages, so we must resolve targets using + -- the combination of installed and source packages. + let sourcePkgs' = PackageIndex.fromList + $ map packageId + (InstalledPackageIndex.allPackages installedPkgIndex) + ++ map packageId + (PackageIndex.allPackages sourcePkgIndex) + pkgSpecifiers <- resolveUserTargets verbosity repoCtxt + (fromFlag $ globalWorldFile globalFlags) + sourcePkgs' userTargets + + pkgsinfo <- sequence + [ do pkginfo <- either die return $ + gatherPkgInfo prefs + installedPkgIndex sourcePkgIndex + pkgSpecifier + updateFileSystemPackageDetails pkginfo + | pkgSpecifier <- pkgSpecifiers ] + + putStr $ unlines (map showPackageDetailedInfo pkgsinfo) + + where + gatherPkgInfo :: (PackageName -> VersionRange) -> + InstalledPackageIndex -> + PackageIndex.PackageIndex SourcePackage -> + PackageSpecifier SourcePackage -> + Either String PackageDisplayInfo + gatherPkgInfo prefs installedPkgIndex sourcePkgIndex + (NamedPackage name constraints) + | null (selectedInstalledPkgs) && null (selectedSourcePkgs) + = Left $ "There is no available version of " ++ display name + ++ " that satisfies " + ++ display (simplifyVersionRange verConstraint) + + | otherwise + = Right $ mergePackageInfo pref installedPkgs + sourcePkgs selectedSourcePkg' + showPkgVersion + where + (pref, installedPkgs, sourcePkgs) = + sourcePkgsInfo prefs name installedPkgIndex sourcePkgIndex + + selectedInstalledPkgs = InstalledPackageIndex.lookupDependency + installedPkgIndex + (Dependency name verConstraint) + selectedSourcePkgs = PackageIndex.lookupDependency sourcePkgIndex + (Dependency name verConstraint) + selectedSourcePkg' = latestWithPref pref selectedSourcePkgs + + -- display a specific package version if the user + -- supplied a non-trivial version constraint + showPkgVersion = not (null verConstraints) + verConstraint = foldr intersectVersionRanges anyVersion verConstraints + verConstraints = [ vr | PackageConstraintVersion _ vr <- constraints ] + + gatherPkgInfo prefs installedPkgIndex sourcePkgIndex + (SpecificSourcePackage pkg) = + Right $ mergePackageInfo pref installedPkgs sourcePkgs + selectedPkg True + where + name = packageName pkg + selectedPkg = Just pkg + (pref, installedPkgs, sourcePkgs) = + sourcePkgsInfo prefs name installedPkgIndex sourcePkgIndex + +sourcePkgsInfo :: + (PackageName -> VersionRange) + -> PackageName + -> InstalledPackageIndex + -> PackageIndex.PackageIndex SourcePackage + -> (VersionRange, [Installed.InstalledPackageInfo], [SourcePackage]) +sourcePkgsInfo prefs name installedPkgIndex sourcePkgIndex = + (pref, installedPkgs, sourcePkgs) + where + pref = prefs name + installedPkgs = concatMap snd (InstalledPackageIndex.lookupPackageName + installedPkgIndex name) + sourcePkgs = PackageIndex.lookupPackageName sourcePkgIndex name + + +-- | The info that we can display for each package. It is information per +-- package name and covers all installed and available versions. +-- +data PackageDisplayInfo = PackageDisplayInfo { + pkgName :: PackageName, + selectedVersion :: Maybe Version, + selectedSourcePkg :: Maybe SourcePackage, + installedVersions :: [Version], + sourceVersions :: [Version], + preferredVersions :: VersionRange, + homepage :: String, + bugReports :: String, + sourceRepo :: String, + synopsis :: String, + description :: String, + category :: String, + license :: License, + author :: String, + maintainer :: String, + dependencies :: [ExtDependency], + flags :: [Flag], + hasLib :: Bool, + hasExe :: Bool, + executables :: [String], + modules :: [ModuleName], + haddockHtml :: FilePath, + haveTarball :: Bool + } + +-- | Covers source dependencies and installed dependencies in +-- one type. +data ExtDependency = SourceDependency Dependency + | InstalledDependency UnitId + +showPackageSummaryInfo :: PackageDisplayInfo -> String +showPackageSummaryInfo pkginfo = + renderStyle (style {lineLength = 80, ribbonsPerLine = 1}) $ + char '*' <+> disp (pkgName pkginfo) + $+$ + (nest 4 $ vcat [ + maybeShow (synopsis pkginfo) "Synopsis:" reflowParagraphs + , text "Default available version:" <+> + case selectedSourcePkg pkginfo of + Nothing -> text "[ Not available from any configured repository ]" + Just pkg -> disp (packageVersion pkg) + , text "Installed versions:" <+> + case installedVersions pkginfo of + [] | hasLib pkginfo -> text "[ Not installed ]" + | otherwise -> text "[ Unknown ]" + versions -> dispTopVersions 4 + (preferredVersions pkginfo) versions + , maybeShow (homepage pkginfo) "Homepage:" text + , text "License: " <+> text (display (license pkginfo)) + ]) + $+$ text "" + where + maybeShow [] _ _ = empty + maybeShow l s f = text s <+> (f l) + +showPackageDetailedInfo :: PackageDisplayInfo -> String +showPackageDetailedInfo pkginfo = + renderStyle (style {lineLength = 80, ribbonsPerLine = 1}) $ + char '*' <+> disp (pkgName pkginfo) + <> maybe empty (\v -> char '-' <> disp v) (selectedVersion pkginfo) + <+> text (replicate (16 - length (display (pkgName pkginfo))) ' ') + <> parens pkgkind + $+$ + (nest 4 $ vcat [ + entry "Synopsis" synopsis hideIfNull reflowParagraphs + , entry "Versions available" sourceVersions + (altText null "[ Not available from server ]") + (dispTopVersions 9 (preferredVersions pkginfo)) + , entry "Versions installed" installedVersions + (altText null (if hasLib pkginfo then "[ Not installed ]" + else "[ Unknown ]")) + (dispTopVersions 4 (preferredVersions pkginfo)) + , entry "Homepage" homepage orNotSpecified text + , entry "Bug reports" bugReports orNotSpecified text + , entry "Description" description hideIfNull reflowParagraphs + , entry "Category" category hideIfNull text + , entry "License" license alwaysShow disp + , entry "Author" author hideIfNull reflowLines + , entry "Maintainer" maintainer hideIfNull reflowLines + , entry "Source repo" sourceRepo orNotSpecified text + , entry "Executables" executables hideIfNull (commaSep text) + , entry "Flags" flags hideIfNull (commaSep dispFlag) + , entry "Dependencies" dependencies hideIfNull (commaSep dispExtDep) + , entry "Documentation" haddockHtml showIfInstalled text + , entry "Cached" haveTarball alwaysShow dispYesNo + , if not (hasLib pkginfo) then empty else + text "Modules:" $+$ nest 4 (vcat (map disp . sort . modules $ pkginfo)) + ]) + $+$ text "" + where + entry fname field cond format = case cond (field pkginfo) of + Nothing -> label <+> format (field pkginfo) + Just Nothing -> empty + Just (Just other) -> label <+> text other + where + label = text fname <> char ':' <> padding + padding = text (replicate (13 - length fname ) ' ') + + normal = Nothing + hide = Just Nothing + replace msg = Just (Just msg) + + alwaysShow = const normal + hideIfNull v = if null v then hide else normal + showIfInstalled v + | not isInstalled = hide + | null v = replace "[ Not installed ]" + | otherwise = normal + altText nul msg v = if nul v then replace msg else normal + orNotSpecified = altText null "[ Not specified ]" + + commaSep f = Disp.fsep . Disp.punctuate (Disp.char ',') . map f + dispFlag f = case flagName f of FlagName n -> text n + dispYesNo True = text "Yes" + dispYesNo False = text "No" + + dispExtDep (SourceDependency dep) = disp dep + dispExtDep (InstalledDependency dep) = disp dep + + isInstalled = not (null (installedVersions pkginfo)) + hasExes = length (executables pkginfo) >= 2 + --TODO: exclude non-buildable exes + pkgkind | hasLib pkginfo && hasExes = text "programs and library" + | hasLib pkginfo && hasExe pkginfo = text "program and library" + | hasLib pkginfo = text "library" + | hasExes = text "programs" + | hasExe pkginfo = text "program" + | otherwise = empty + + +reflowParagraphs :: String -> Doc +reflowParagraphs = + vcat + . intersperse (text "") -- re-insert blank lines + . map (fsep . map text . concatMap words) -- reflow paragraphs + . filter (/= [""]) + . groupBy (\x y -> "" `notElem` [x,y]) -- break on blank lines + . lines + +reflowLines :: String -> Doc +reflowLines = vcat . map text . lines + +-- | We get the 'PackageDisplayInfo' by combining the info for the installed +-- and available versions of a package. +-- +-- * We're building info about a various versions of a single named package so +-- the input package info records are all supposed to refer to the same +-- package name. +-- +mergePackageInfo :: VersionRange + -> [Installed.InstalledPackageInfo] + -> [SourcePackage] + -> Maybe SourcePackage + -> Bool + -> PackageDisplayInfo +mergePackageInfo versionPref installedPkgs sourcePkgs selectedPkg showVer = + assert (length installedPkgs + length sourcePkgs > 0) $ + PackageDisplayInfo { + pkgName = combine packageName source + packageName installed, + selectedVersion = if showVer then fmap packageVersion selectedPkg + else Nothing, + selectedSourcePkg = sourceSelected, + installedVersions = map packageVersion installedPkgs, + sourceVersions = map packageVersion sourcePkgs, + preferredVersions = versionPref, + + license = combine Source.license source + Installed.license installed, + maintainer = combine Source.maintainer source + Installed.maintainer installed, + author = combine Source.author source + Installed.author installed, + homepage = combine Source.homepage source + Installed.homepage installed, + bugReports = maybe "" Source.bugReports source, + sourceRepo = fromMaybe "" . join + . fmap (uncons Nothing Source.repoLocation + . sortBy (comparing Source.repoKind) + . Source.sourceRepos) + $ source, + --TODO: installed package info is missing synopsis + synopsis = maybe "" Source.synopsis source, + description = combine Source.description source + Installed.description installed, + category = combine Source.category source + Installed.category installed, + flags = maybe [] Source.genPackageFlags sourceGeneric, + hasLib = isJust installed + || fromMaybe False + (fmap (isJust . Source.condLibrary) sourceGeneric), + hasExe = fromMaybe False + (fmap (not . null . Source.condExecutables) sourceGeneric), + executables = map fst (maybe [] Source.condExecutables sourceGeneric), + modules = combine (map Installed.exposedName . Installed.exposedModules) + installed + (maybe [] getListOfExposedModules . Source.library) + source, + dependencies = + combine (map (SourceDependency . simplifyDependency) + . Source.buildDepends) source + (map InstalledDependency . Installed.depends) installed, + haddockHtml = fromMaybe "" . join + . fmap (listToMaybe . Installed.haddockHTMLs) + $ installed, + haveTarball = False + } + where + combine f x g y = fromJust (fmap f x `mplus` fmap g y) + installed :: Maybe Installed.InstalledPackageInfo + installed = latestWithPref versionPref installedPkgs + + getListOfExposedModules lib = Source.exposedModules lib + ++ map Source.moduleReexportName + (Source.reexportedModules lib) + + sourceSelected + | isJust selectedPkg = selectedPkg + | otherwise = latestWithPref versionPref sourcePkgs + sourceGeneric = fmap packageDescription sourceSelected + source = fmap flattenPackageDescription sourceGeneric + + uncons :: b -> (a -> b) -> [a] -> b + uncons z _ [] = z + uncons _ f (x:_) = f x + + +-- | Not all the info is pure. We have to check if the docs really are +-- installed, because the registered package info lies. Similarly we have to +-- check if the tarball has indeed been fetched. +-- +updateFileSystemPackageDetails :: PackageDisplayInfo -> IO PackageDisplayInfo +updateFileSystemPackageDetails pkginfo = do + fetched <- maybe (return False) (isFetched . packageSource) + (selectedSourcePkg pkginfo) + docsExist <- doesDirectoryExist (haddockHtml pkginfo) + return pkginfo { + haveTarball = fetched, + haddockHtml = if docsExist then haddockHtml pkginfo else "" + } + +latestWithPref :: Package pkg => VersionRange -> [pkg] -> Maybe pkg +latestWithPref _ [] = Nothing +latestWithPref pref pkgs = Just (maximumBy (comparing prefThenVersion) pkgs) + where + prefThenVersion pkg = let ver = packageVersion pkg + in (withinRange ver pref, ver) + + +-- | Rearrange installed and source packages into groups referring to the +-- same package by name. In the result pairs, the lists are guaranteed to not +-- both be empty. +-- +mergePackages :: [Installed.InstalledPackageInfo] + -> [SourcePackage] + -> [( PackageName + , [Installed.InstalledPackageInfo] + , [SourcePackage] )] +mergePackages installedPkgs sourcePkgs = + map collect + $ mergeBy (\i a -> fst i `compare` fst a) + (groupOn packageName installedPkgs) + (groupOn packageName sourcePkgs) + where + collect (OnlyInLeft (name,is) ) = (name, is, []) + collect ( InBoth (_,is) (name,as)) = (name, is, as) + collect (OnlyInRight (name,as)) = (name, [], as) + +groupOn :: Ord key => (a -> key) -> [a] -> [(key,[a])] +groupOn key = map (\xs -> (key (head xs), xs)) + . groupBy (equating key) + . sortBy (comparing key) + +dispTopVersions :: Int -> VersionRange -> [Version] -> Doc +dispTopVersions n pref vs = + (Disp.fsep . Disp.punctuate (Disp.char ',') + . map (\ver -> if ispref ver then disp ver else parens (disp ver)) + . sort . take n . interestingVersions ispref + $ vs) + <+> trailingMessage + + where + ispref ver = withinRange ver pref + extra = length vs - n + trailingMessage + | extra <= 0 = Disp.empty + | otherwise = Disp.parens $ Disp.text "and" + <+> Disp.int (length vs - n) + <+> if extra == 1 then Disp.text "other" + else Disp.text "others" + +-- | Reorder a bunch of versions to put the most interesting / significant +-- versions first. A preferred version range is taken into account. +-- +-- This may be used in a user interface to select a small number of versions +-- to present to the user, e.g. +-- +-- > let selectVersions = sort . take 5 . interestingVersions pref +-- +interestingVersions :: (Version -> Bool) -> [Version] -> [Version] +interestingVersions pref = + map ((\ns -> Version ns []) . fst) . filter snd + . concat . Tree.levels + . swizzleTree + . reorderTree (\(Node (v,_) _) -> pref (Version v [])) + . reverseTree + . mkTree + . map versionBranch + + where + swizzleTree = unfoldTree (spine []) + where + spine ts' (Node x []) = (x, ts') + spine ts' (Node x (t:ts)) = spine (Node x ts:ts') t + + reorderTree _ (Node x []) = Node x [] + reorderTree p (Node x ts) = Node x (ts' ++ ts'') + where + (ts',ts'') = partition p (map (reorderTree p) ts) + + reverseTree (Node x cs) = Node x (reverse (map reverseTree cs)) + + mkTree xs = unfoldTree step (False, [], xs) + where + step (node,ns,vs) = + ( (reverse ns, node) + , [ (any null vs', n:ns, filter (not . null) vs') + | (n, vs') <- groups vs ] + ) + groups = map (\g -> (head (head g), map tail g)) + . groupBy (equating head) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Manpage.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Manpage.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Manpage.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Manpage.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,171 @@ +{-# LANGUAGE CPP #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Manpage +-- Copyright : (c) Maciek Makowski 2015 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Functions for building the manual page. + +module Distribution.Client.Manpage + ( -- * Manual page generation + manpage + ) where + +import Distribution.Simple.Command +import Distribution.Client.Setup (globalCommand) + +import Data.Char (toUpper) +import Data.List (intercalate) + +data FileInfo = FileInfo String String -- ^ path, description + +-- | A list of files that should be documented in the manual page. +files :: [FileInfo] +files = + [ (FileInfo "~/.cabal/config" "The defaults that can be overridden with command-line options.") + , (FileInfo "~/.cabal/world" "A list of all packages whose installation has been explicitly requested.") + ] + +-- | Produces a manual page with @troff@ markup. +manpage :: String -> [CommandSpec a] -> String +manpage pname commands = unlines $ + [ ".TH " ++ map toUpper pname ++ " 1" + , ".SH NAME" + , pname ++ " \\- a system for building and packaging Haskell libraries and programs" + , ".SH SYNOPSIS" + , ".B " ++ pname + , ".I command" + , ".RI < arguments |[ options ]>..." + , "" + , "Where the" + , ".I commands" + , "are" + , "" + ] ++ + concatMap (commandSynopsisLines pname) commands ++ + [ ".SH DESCRIPTION" + , "Cabal is the standard package system for Haskell software. It helps people to configure, " + , "build and install Haskell software and to distribute it easily to other users and developers." + , "" + , "The command line " ++ pname ++ " tool (also referred to as cabal-install) helps with " + , "installing existing packages and developing new packages. " + , "It can be used to work with local packages or to install packages from online package archives, " + , "including automatically installing dependencies. By default it is configured to use Hackage, " + , "which is Haskell’s central package archive that contains thousands of libraries and applications " + , "in the Cabal package format." + , ".SH OPTIONS" + , "Global options:" + , "" + ] ++ + optionsLines (globalCommand []) ++ + [ ".SH COMMANDS" + ] ++ + concatMap (commandDetailsLines pname) commands ++ + [ ".SH FILES" + ] ++ + concatMap fileLines files ++ + [ ".SH BUGS" + , "To browse the list of known issues or report a new one please see " + , "https://github.com/haskell/cabal/labels/cabal-install." + ] + +commandSynopsisLines :: String -> CommandSpec action -> [String] +commandSynopsisLines pname (CommandSpec ui _ NormalCommand) = + [ ".B " ++ pname ++ " " ++ (commandName ui) + , ".R - " ++ commandSynopsis ui + , ".br" + ] +commandSynopsisLines _ (CommandSpec _ _ HiddenCommand) = [] + +commandDetailsLines :: String -> CommandSpec action -> [String] +commandDetailsLines pname (CommandSpec ui _ NormalCommand) = + [ ".B " ++ pname ++ " " ++ (commandName ui) + , "" + , commandUsage ui pname + , "" + ] ++ + optional commandDescription ++ + optional commandNotes ++ + [ "Flags:" + , ".RS" + ] ++ + optionsLines ui ++ + [ ".RE" + , "" + ] + where + optional field = + case field ui of + Just text -> [text pname, ""] + Nothing -> [] +commandDetailsLines _ (CommandSpec _ _ HiddenCommand) = [] + +optionsLines :: CommandUI flags -> [String] +optionsLines command = concatMap optionLines (concatMap optionDescr (commandOptions command ParseArgs)) + +data ArgumentRequired = Optional | Required +type OptionArg = (ArgumentRequired, ArgPlaceHolder) + +optionLines :: OptDescr flags -> [String] +optionLines (ReqArg description (optionChars, optionStrings) placeHolder _ _) = + argOptionLines description optionChars optionStrings (Required, placeHolder) +optionLines (OptArg description (optionChars, optionStrings) placeHolder _ _ _) = + argOptionLines description optionChars optionStrings (Optional, placeHolder) +optionLines (BoolOpt description (trueChars, trueStrings) (falseChars, falseStrings) _ _) = + optionLinesIfPresent trueChars trueStrings ++ + optionLinesIfPresent falseChars falseStrings ++ + optionDescriptionLines description +optionLines (ChoiceOpt options) = + concatMap choiceLines options + where + choiceLines (description, (optionChars, optionStrings), _, _) = + [ optionsLine optionChars optionStrings ] ++ + optionDescriptionLines description + +argOptionLines :: String -> [Char] -> [String] -> OptionArg -> [String] +argOptionLines description optionChars optionStrings arg = + [ optionsLine optionChars optionStrings + , optionArgLine arg + ] ++ + optionDescriptionLines description + +optionLinesIfPresent :: [Char] -> [String] -> [String] +optionLinesIfPresent optionChars optionStrings = + if null optionChars && null optionStrings then [] + else [ optionsLine optionChars optionStrings, ".br" ] + +optionDescriptionLines :: String -> [String] +optionDescriptionLines description = + [ ".RS" + , description + , ".RE" + , "" + ] + +optionsLine :: [Char] -> [String] -> String +optionsLine optionChars optionStrings = + intercalate ", " (shortOptions optionChars ++ longOptions optionStrings) + +shortOptions :: [Char] -> [String] +shortOptions = map (\c -> "\\-" ++ [c]) + +longOptions :: [String] -> [String] +longOptions = map (\s -> "\\-\\-" ++ s) + +optionArgLine :: OptionArg -> String +optionArgLine (Required, placeHolder) = ".I " ++ placeHolder +optionArgLine (Optional, placeHolder) = ".RI [ " ++ placeHolder ++ " ]" + +fileLines :: FileInfo -> [String] +fileLines (FileInfo path description) = + [ path + , ".RS" + , description + , ".RE" + , "" + ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/PackageHash.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/PackageHash.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/PackageHash.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/PackageHash.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,305 @@ +{-# LANGUAGE RecordWildCards, NamedFieldPuns, GeneralizedNewtypeDeriving #-} + +-- | Functions to calculate nix-style hashes for package ids. +-- +-- The basic idea is simple, hash the combination of: +-- +-- * the package tarball +-- * the ids of all the direct dependencies +-- * other local configuration (flags, profiling, etc) +-- +module Distribution.Client.PackageHash ( + -- * Calculating package hashes + PackageHashInputs(..), + PackageHashConfigInputs(..), + PackageSourceHash, + hashedInstalledPackageId, + hashPackageHashInputs, + renderPackageHashInputs, + -- ** Platform-specific variations + hashedInstalledPackageIdLong, + hashedInstalledPackageIdShort, + + -- * Low level hash choice + HashValue, + hashValue, + showHashValue, + readFileHashValue, + hashFromTUF, + ) where + +import Distribution.Package + ( PackageId, PackageIdentifier(..), mkUnitId ) +import Distribution.System + ( Platform, OS(Windows), buildOS ) +import Distribution.PackageDescription + ( FlagName(..), FlagAssignment ) +import Distribution.Simple.Compiler + ( CompilerId, OptimisationLevel(..), DebugInfoLevel(..) + , ProfDetailLevel(..), showProfDetailLevel ) +import Distribution.Simple.InstallDirs + ( PathTemplate, fromPathTemplate ) +import Distribution.Text + ( display ) +import Distribution.Client.Types + ( InstalledPackageId ) + +import qualified Hackage.Security.Client as Sec + +import qualified Crypto.Hash.SHA256 as SHA256 +import qualified Data.ByteString.Base16 as Base16 +import qualified Data.ByteString.Char8 as BS +import qualified Data.ByteString.Lazy.Char8 as LBS +import qualified Data.Set as Set +import Data.Set (Set) + +import Data.Maybe (catMaybes) +import Data.List (sortBy, intercalate) +import Data.Function (on) +import Distribution.Compat.Binary (Binary(..)) +import Control.Exception (evaluate) +import System.IO (withBinaryFile, IOMode(..)) + + +------------------------------- +-- Calculating package hashes +-- + +-- | Calculate a 'InstalledPackageId' for a package using our nix-style +-- inputs hashing method. +-- +-- Note that due to path length limitations on Windows, this function uses +-- a different method on Windows that produces shorted package ids. +-- See 'hashedInstalledPackageIdLong' vs 'hashedInstalledPackageIdShort'. +-- +hashedInstalledPackageId :: PackageHashInputs -> InstalledPackageId +hashedInstalledPackageId + | buildOS == Windows = hashedInstalledPackageIdShort + | otherwise = hashedInstalledPackageIdLong + +-- | Calculate a 'InstalledPackageId' for a package using our nix-style +-- inputs hashing method. +-- +-- This produces large ids with big hashes. It is only suitable for systems +-- without significant path length limitations (ie not Windows). +-- +hashedInstalledPackageIdLong :: PackageHashInputs -> InstalledPackageId +hashedInstalledPackageIdLong pkghashinputs@PackageHashInputs{pkgHashPkgId} = + mkUnitId $ + display pkgHashPkgId -- to be a bit user friendly + ++ "-" + ++ showHashValue (hashPackageHashInputs pkghashinputs) + +-- | On Windows we have serious problems with path lengths. Windows imposes a +-- maximum path length of 260 chars, and even if we can use the windows long +-- path APIs ourselves, we cannot guarantee that ghc, gcc, ld, ar, etc etc all +-- do so too. +-- +-- So our only choice is to limit the lengths of the paths, and the only real +-- way to do that is to limit the size of the 'InstalledPackageId's that we +-- generate. We do this by truncating the package names and versions and also +-- by truncating the hash sizes. +-- +-- Truncating the package names and versions is technically ok because they are +-- just included for human convenience, the full source package id is included +-- in the hash. +-- +-- Truncating the hash size is disappointing but also technically ok. We +-- rely on the hash primarily for collision avoidance not for any securty +-- properties (at least for now). +-- +hashedInstalledPackageIdShort :: PackageHashInputs -> InstalledPackageId +hashedInstalledPackageIdShort pkghashinputs@PackageHashInputs{pkgHashPkgId} = + mkUnitId $ + intercalate "-" + -- max length now 64 + [ truncateStr 14 (display name) + , truncateStr 8 (display version) + , showHashValue (truncateHash (hashPackageHashInputs pkghashinputs)) + ] + where + PackageIdentifier name version = pkgHashPkgId + + -- Truncate a 32 byte SHA256 hash to 160bits, 20 bytes :-( + -- It'll render as 40 hex chars. + truncateHash (HashValue h) = HashValue (BS.take 20 h) + + -- Truncate a string, with a visual indication that it is truncated. + truncateStr n s | length s <= n = s + | otherwise = take (n-1) s ++ "_" + +-- | All the information that contribues to a package's hash, and thus its +-- 'InstalledPackageId'. +-- +data PackageHashInputs = PackageHashInputs { + pkgHashPkgId :: PackageId, + pkgHashSourceHash :: PackageSourceHash, + pkgHashDirectDeps :: Set InstalledPackageId, + pkgHashOtherConfig :: PackageHashConfigInputs + } + +type PackageSourceHash = HashValue + +-- | Those parts of the package configuration that contribute to the +-- package hash. +-- +data PackageHashConfigInputs = PackageHashConfigInputs { + pkgHashCompilerId :: CompilerId, + pkgHashPlatform :: Platform, + pkgHashFlagAssignment :: FlagAssignment, -- complete not partial + pkgHashConfigureScriptArgs :: [String], -- just ./configure for build-type Configure + pkgHashVanillaLib :: Bool, + pkgHashSharedLib :: Bool, + pkgHashDynExe :: Bool, + pkgHashGHCiLib :: Bool, + pkgHashProfLib :: Bool, + pkgHashProfExe :: Bool, + pkgHashProfLibDetail :: ProfDetailLevel, + pkgHashProfExeDetail :: ProfDetailLevel, + pkgHashCoverage :: Bool, + pkgHashOptimization :: OptimisationLevel, + pkgHashSplitObjs :: Bool, + pkgHashStripLibs :: Bool, + pkgHashStripExes :: Bool, + pkgHashDebugInfo :: DebugInfoLevel, + pkgHashExtraLibDirs :: [FilePath], + pkgHashExtraFrameworkDirs :: [FilePath], + pkgHashExtraIncludeDirs :: [FilePath], + pkgHashProgPrefix :: Maybe PathTemplate, + pkgHashProgSuffix :: Maybe PathTemplate + +-- TODO: [required eventually] extra program options +-- TODO: [required eventually] pkgHashToolsVersions ? +-- TODO: [required eventually] pkgHashToolsExtraOptions ? +-- TODO: [research required] and what about docs? + } + deriving Show + + +-- | Calculate the overall hash to be used for an 'InstalledPackageId'. +-- +hashPackageHashInputs :: PackageHashInputs -> HashValue +hashPackageHashInputs = hashValue . renderPackageHashInputs + +-- | Render a textual representation of the 'PackageHashInputs'. +-- +-- The 'hashValue' of this text is the overall package hash. +-- +renderPackageHashInputs :: PackageHashInputs -> LBS.ByteString +renderPackageHashInputs PackageHashInputs{ + pkgHashPkgId, + pkgHashSourceHash, + pkgHashDirectDeps, + pkgHashOtherConfig = + PackageHashConfigInputs{..} + } = + -- The purpose of this somewhat laboured rendering (e.g. why not just + -- use show?) is so that existing package hashes do not change + -- unnecessarily when new configuration inputs are added into the hash. + + -- In particular, the assumption is that when a new configuration input + -- is included into the hash, that existing packages will typically get + -- the default value for that feature. So if we avoid adding entries with + -- the default value then most of the time adding new features will not + -- change the hashes of existing packages and so fewer packages will need + -- to be rebuilt. + + --TODO: [nice to have] ultimately we probably want to put this config info + -- into the ghc-pkg db. At that point this should probably be changed to + -- use the config file infrastructure so it can be read back in again. + LBS.pack $ unlines $ catMaybes + [ entry "pkgid" display pkgHashPkgId + , entry "src" showHashValue pkgHashSourceHash + , entry "deps" (intercalate ", " . map display + . Set.toList) pkgHashDirectDeps + -- and then all the config + , entry "compilerid" display pkgHashCompilerId + , entry "platform" display pkgHashPlatform + , opt "flags" [] showFlagAssignment pkgHashFlagAssignment + , opt "configure-script" [] unwords pkgHashConfigureScriptArgs + , opt "vanilla-lib" True display pkgHashVanillaLib + , opt "shared-lib" False display pkgHashSharedLib + , opt "dynamic-exe" False display pkgHashDynExe + , opt "ghci-lib" False display pkgHashGHCiLib + , opt "prof-lib" False display pkgHashProfLib + , opt "prof-exe" False display pkgHashProfExe + , opt "prof-lib-detail" ProfDetailDefault showProfDetailLevel pkgHashProfLibDetail + , opt "prof-exe-detail" ProfDetailDefault showProfDetailLevel pkgHashProfExeDetail + , opt "hpc" False display pkgHashCoverage + , opt "optimisation" NormalOptimisation (show . fromEnum) pkgHashOptimization + , opt "split-objs" False display pkgHashSplitObjs + , opt "stripped-lib" False display pkgHashStripLibs + , opt "stripped-exe" True display pkgHashStripExes + , opt "debug-info" NormalDebugInfo (show . fromEnum) pkgHashDebugInfo + , opt "extra-lib-dirs" [] unwords pkgHashExtraLibDirs + , opt "extra-framework-dirs" [] unwords pkgHashExtraFrameworkDirs + , opt "extra-include-dirs" [] unwords pkgHashExtraIncludeDirs + , opt "prog-prefix" Nothing (maybe "" fromPathTemplate) pkgHashProgPrefix + , opt "prog-suffix" Nothing (maybe "" fromPathTemplate) pkgHashProgSuffix + ] + where + entry key format value = Just (key ++ ": " ++ format value) + opt key def format value + | value == def = Nothing + | otherwise = entry key format value + + showFlagAssignment = unwords . map showEntry . sortBy (compare `on` fst) + where + showEntry (FlagName name, False) = '-' : name + showEntry (FlagName name, True) = '+' : name + +----------------------------------------------- +-- The specific choice of hash implementation +-- + +-- Is a crypto hash necessary here? One thing to consider is who controls the +-- inputs and what's the result of a hash collision. Obviously we should not +-- install packages we don't trust because they can run all sorts of code, but +-- if I've checked there's no TH, no custom Setup etc, is there still a +-- problem? If someone provided us a tarball that hashed to the same value as +-- some other package and we installed it, we could end up re-using that +-- installed package in place of another one we wanted. So yes, in general +-- there is some value in preventing intentional hash collisions in installed +-- package ids. + +newtype HashValue = HashValue BS.ByteString + deriving (Eq, Show) + +instance Binary HashValue where + put (HashValue digest) = put digest + get = do + digest <- get + -- Cannot do any sensible validation here. Although we use SHA256 + -- for stuff we hash ourselves, we can also get hashes from TUF + -- and that can in principle use different hash functions in future. + return (HashValue digest) + +-- | Hash some data. Currently uses SHA256. +-- +hashValue :: LBS.ByteString -> HashValue +hashValue = HashValue . SHA256.hashlazy + +showHashValue :: HashValue -> String +showHashValue (HashValue digest) = BS.unpack (Base16.encode digest) + +-- | Hash the content of a file. Uses SHA256. +-- +readFileHashValue :: FilePath -> IO HashValue +readFileHashValue tarball = + withBinaryFile tarball ReadMode $ \hnd -> + evaluate . hashValue =<< LBS.hGetContents hnd + +-- | Convert a hash from TUF metadata into a 'PackageSourceHash'. +-- +-- Note that TUF hashes don't neessarily have to be SHA256, since it can +-- support new algorithms in future. +-- +hashFromTUF :: Sec.Hash -> HashValue +hashFromTUF (Sec.Hash hashstr) = + --TODO: [code cleanup] either we should get TUF to use raw bytestrings or + -- perhaps we should also just use a base16 string as the internal rep. + case Base16.decode (BS.pack hashstr) of + (hash, trailing) | not (BS.null hash) && BS.null trailing + -> HashValue hash + _ -> error "hashFromTUF: cannot decode base16 hash" + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/PackageIndex.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/PackageIndex.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/PackageIndex.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/PackageIndex.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,318 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE DeriveGeneric #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.PackageIndex +-- Copyright : (c) David Himmelstrup 2005, +-- Bjorn Bringert 2007, +-- Duncan Coutts 2008 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- An index of packages. +-- +module Distribution.Client.PackageIndex ( + -- * Package index data type + PackageIndex, + + -- * Creating an index + fromList, + + -- * Updates + merge, + insert, + deletePackageName, + deletePackageId, + deleteDependency, + + -- * Queries + + -- ** Precise lookups + elemByPackageId, + elemByPackageName, + lookupPackageName, + lookupPackageId, + lookupDependency, + + -- ** Case-insensitive searches + searchByName, + SearchResult(..), + searchByNameSubstring, + + -- ** Bulk queries + allPackages, + allPackagesByName, + ) where + +import Prelude hiding (lookup) +import Control.Exception (assert) +import qualified Data.Map as Map +import Data.Map (Map) +import Data.List (groupBy, sortBy, isInfixOf) +#if !MIN_VERSION_base(4,8,0) +import Data.Monoid (Monoid(..)) +#endif +import Data.Maybe (isJust, fromMaybe) +import GHC.Generics (Generic) +import Distribution.Compat.Binary (Binary) +import Distribution.Compat.Semigroup (Semigroup((<>))) + +import Distribution.Package + ( PackageName(..), PackageIdentifier(..) + , Package(..), packageName, packageVersion + , Dependency(Dependency) ) +import Distribution.Version + ( withinRange ) +import Distribution.Simple.Utils + ( lowercase, comparing ) + + +-- | The collection of information about packages from one or more 'PackageDB's. +-- +-- It can be searched efficiently by package name and version. +-- +newtype PackageIndex pkg = PackageIndex + -- This index package names to all the package records matching that package + -- name case-sensitively. It includes all versions. + -- + -- This allows us to find all versions satisfying a dependency. + -- Most queries are a map lookup followed by a linear scan of the bucket. + -- + (Map PackageName [pkg]) + + deriving (Eq, Show, Read, Functor, Generic) +--FIXME: the Functor instance here relies on no package id changes + +instance Package pkg => Semigroup (PackageIndex pkg) where + (<>) = merge + +instance Package pkg => Monoid (PackageIndex pkg) where + mempty = PackageIndex Map.empty + mappend = (<>) + --save one mappend with empty in the common case: + mconcat [] = mempty + mconcat xs = foldr1 mappend xs + +instance Binary pkg => Binary (PackageIndex pkg) + +invariant :: Package pkg => PackageIndex pkg -> Bool +invariant (PackageIndex m) = all (uncurry goodBucket) (Map.toList m) + where + goodBucket _ [] = False + goodBucket name (pkg0:pkgs0) = check (packageId pkg0) pkgs0 + where + check pkgid [] = packageName pkgid == name + check pkgid (pkg':pkgs) = packageName pkgid == name + && pkgid < pkgid' + && check pkgid' pkgs + where pkgid' = packageId pkg' + +-- +-- * Internal helpers +-- + +mkPackageIndex :: Package pkg => Map PackageName [pkg] -> PackageIndex pkg +mkPackageIndex index = assert (invariant (PackageIndex index)) + (PackageIndex index) + +internalError :: String -> a +internalError name = error ("PackageIndex." ++ name ++ ": internal error") + +-- | Lookup a name in the index to get all packages that match that name +-- case-sensitively. +-- +lookup :: PackageIndex pkg -> PackageName -> [pkg] +lookup (PackageIndex m) name = fromMaybe [] $ Map.lookup name m + +-- +-- * Construction +-- + +-- | Build an index out of a bunch of packages. +-- +-- If there are duplicates, later ones mask earlier ones. +-- +fromList :: Package pkg => [pkg] -> PackageIndex pkg +fromList pkgs = mkPackageIndex + . Map.map fixBucket + . Map.fromListWith (++) + $ [ (packageName pkg, [pkg]) + | pkg <- pkgs ] + where + fixBucket = -- out of groups of duplicates, later ones mask earlier ones + -- but Map.fromListWith (++) constructs groups in reverse order + map head + -- Eq instance for PackageIdentifier is wrong, so use Ord: + . groupBy (\a b -> EQ == comparing packageId a b) + -- relies on sortBy being a stable sort so we + -- can pick consistently among duplicates + . sortBy (comparing packageId) + +-- +-- * Updates +-- + +-- | Merge two indexes. +-- +-- Packages from the second mask packages of the same exact name +-- (case-sensitively) from the first. +-- +merge :: Package pkg => PackageIndex pkg -> PackageIndex pkg -> PackageIndex pkg +merge i1@(PackageIndex m1) i2@(PackageIndex m2) = + assert (invariant i1 && invariant i2) $ + mkPackageIndex (Map.unionWith mergeBuckets m1 m2) + +-- | Elements in the second list mask those in the first. +mergeBuckets :: Package pkg => [pkg] -> [pkg] -> [pkg] +mergeBuckets [] ys = ys +mergeBuckets xs [] = xs +mergeBuckets xs@(x:xs') ys@(y:ys') = + case packageId x `compare` packageId y of + GT -> y : mergeBuckets xs ys' + EQ -> y : mergeBuckets xs' ys' + LT -> x : mergeBuckets xs' ys + +-- | Inserts a single package into the index. +-- +-- This is equivalent to (but slightly quicker than) using 'mappend' or +-- 'merge' with a singleton index. +-- +insert :: Package pkg => pkg -> PackageIndex pkg -> PackageIndex pkg +insert pkg (PackageIndex index) = mkPackageIndex $ + Map.insertWith (\_ -> insertNoDup) (packageName pkg) [pkg] index + where + pkgid = packageId pkg + insertNoDup [] = [pkg] + insertNoDup pkgs@(pkg':pkgs') = case compare pkgid (packageId pkg') of + LT -> pkg : pkgs + EQ -> pkg : pkgs' + GT -> pkg' : insertNoDup pkgs' + +-- | Internal delete helper. +-- +delete :: Package pkg => PackageName -> (pkg -> Bool) -> PackageIndex pkg -> PackageIndex pkg +delete name p (PackageIndex index) = mkPackageIndex $ + Map.update filterBucket name index + where + filterBucket = deleteEmptyBucket + . filter (not . p) + deleteEmptyBucket [] = Nothing + deleteEmptyBucket remaining = Just remaining + +-- | Removes a single package from the index. +-- +deletePackageId :: Package pkg => PackageIdentifier -> PackageIndex pkg -> PackageIndex pkg +deletePackageId pkgid = + delete (packageName pkgid) (\pkg -> packageId pkg == pkgid) + +-- | Removes all packages with this (case-sensitive) name from the index. +-- +deletePackageName :: Package pkg => PackageName -> PackageIndex pkg -> PackageIndex pkg +deletePackageName name = + delete name (\pkg -> packageName pkg == name) + +-- | Removes all packages satisfying this dependency from the index. +-- +deleteDependency :: Package pkg => Dependency -> PackageIndex pkg -> PackageIndex pkg +deleteDependency (Dependency name verstionRange) = + delete name (\pkg -> packageVersion pkg `withinRange` verstionRange) + +-- +-- * Bulk queries +-- + +-- | Get all the packages from the index. +-- +allPackages :: PackageIndex pkg -> [pkg] +allPackages (PackageIndex m) = concat (Map.elems m) + +-- | Get all the packages from the index. +-- +-- They are grouped by package name, case-sensitively. +-- +allPackagesByName :: PackageIndex pkg -> [[pkg]] +allPackagesByName (PackageIndex m) = Map.elems m + +-- +-- * Lookups +-- + +elemByPackageId :: Package pkg => PackageIndex pkg -> PackageIdentifier -> Bool +elemByPackageId index = isJust . lookupPackageId index + +elemByPackageName :: Package pkg => PackageIndex pkg -> PackageName -> Bool +elemByPackageName index = not . null . lookupPackageName index + + +-- | Does a lookup by package id (name & version). +-- +-- Since multiple package DBs mask each other case-sensitively by package name, +-- then we get back at most one package. +-- +lookupPackageId :: Package pkg => PackageIndex pkg -> PackageIdentifier -> Maybe pkg +lookupPackageId index pkgid = + case [ pkg | pkg <- lookup index (packageName pkgid) + , packageId pkg == pkgid ] of + [] -> Nothing + [pkg] -> Just pkg + _ -> internalError "lookupPackageIdentifier" + +-- | Does a case-sensitive search by package name. +-- +lookupPackageName :: Package pkg => PackageIndex pkg -> PackageName -> [pkg] +lookupPackageName index name = + [ pkg | pkg <- lookup index name + , packageName pkg == name ] + +-- | Does a case-sensitive search by package name and a range of versions. +-- +-- We get back any number of versions of the specified package name, all +-- satisfying the version range constraint. +-- +lookupDependency :: Package pkg => PackageIndex pkg -> Dependency -> [pkg] +lookupDependency index (Dependency name versionRange) = + [ pkg | pkg <- lookup index name + , packageName pkg == name + , packageVersion pkg `withinRange` versionRange ] + +-- +-- * Case insensitive name lookups +-- + +-- | Does a case-insensitive search by package name. +-- +-- If there is only one package that compares case-insensitively to this name +-- then the search is unambiguous and we get back all versions of that package. +-- If several match case-insensitively but one matches exactly then it is also +-- unambiguous. +-- +-- If however several match case-insensitively and none match exactly then we +-- have an ambiguous result, and we get back all the versions of all the +-- packages. The list of ambiguous results is split by exact package name. So +-- it is a non-empty list of non-empty lists. +-- +searchByName :: PackageIndex pkg + -> String -> [(PackageName, [pkg])] +searchByName (PackageIndex m) name = + [ pkgs + | pkgs@(PackageName name',_) <- Map.toList m + , lowercase name' == lname ] + where + lname = lowercase name + +data SearchResult a = None | Unambiguous a | Ambiguous [a] + +-- | Does a case-insensitive substring search by package name. +-- +-- That is, all packages that contain the given string in their name. +-- +searchByNameSubstring :: PackageIndex pkg + -> String -> [(PackageName, [pkg])] +searchByNameSubstring (PackageIndex m) searchterm = + [ pkgs + | pkgs@(PackageName name, _) <- Map.toList m + , lsearchterm `isInfixOf` lowercase name ] + where + lsearchterm = lowercase searchterm diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/PackageUtils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/PackageUtils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/PackageUtils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/PackageUtils.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,34 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.PackageUtils +-- Copyright : (c) Duncan Coutts 2010 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@gmail.com +-- Stability : provisional +-- Portability : portable +-- +-- Various package description utils that should be in the Cabal lib +----------------------------------------------------------------------------- +module Distribution.Client.PackageUtils ( + externalBuildDepends, + ) where + +import Distribution.Package + ( packageVersion, packageName, Dependency(..) ) +import Distribution.PackageDescription + ( PackageDescription(..) ) +import Distribution.Version + ( withinRange ) + +-- | The list of dependencies that refer to external packages +-- rather than internal package components. +-- +externalBuildDepends :: PackageDescription -> [Dependency] +externalBuildDepends pkg = filter (not . internal) (buildDepends pkg) + where + -- True if this dependency is an internal one (depends on a library + -- defined in the same package). + internal (Dependency depName versionRange) = + depName == packageName pkg && + packageVersion pkg `withinRange` versionRange diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/ParseUtils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/ParseUtils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/ParseUtils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/ParseUtils.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,279 @@ +{-# LANGUAGE ExistentialQuantification, NamedFieldPuns #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.ParseUtils +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Parsing utilities. +----------------------------------------------------------------------------- + +module Distribution.Client.ParseUtils ( + + -- * Fields and field utilities + FieldDescr(..), + liftField, + liftFields, + filterFields, + mapFieldNames, + commandOptionToField, + commandOptionsToFields, + + -- * Sections and utilities + SectionDescr(..), + liftSection, + + -- * Parsing and printing flat config + parseFields, + ppFields, + ppSection, + + -- * Parsing and printing config with sections and subsections + parseFieldsAndSections, + ppFieldsAndSections, + + -- ** Top level of config files + parseConfig, + showConfig, + ) + where + +import Distribution.ParseUtils + ( FieldDescr(..), ParseResult(..), warning, LineNo, lineNo + , Field(..), liftField, readFieldsFlat ) +import Distribution.Simple.Command + ( OptionField, viewAsFieldDescr ) + +import Control.Monad ( foldM ) +import Text.PrettyPrint ( (<>), (<+>), ($+$) ) +import qualified Data.Map as Map +import qualified Text.PrettyPrint as Disp + ( Doc, text, colon, vcat, empty, isEmpty, nest ) + + +------------------------- +-- FieldDescr utilities +-- + +liftFields :: (b -> a) + -> (a -> b -> b) + -> [FieldDescr a] + -> [FieldDescr b] +liftFields get set = map (liftField get set) + + +-- | Given a collection of field descriptions, keep only a given list of them, +-- identified by name. +-- +filterFields :: [String] -> [FieldDescr a] -> [FieldDescr a] +filterFields includeFields = filter ((`elem` includeFields) . fieldName) + +-- | Apply a name mangling function to the field names of all the field +-- descriptions. The typical use case is to apply some prefix. +-- +mapFieldNames :: (String -> String) -> [FieldDescr a] -> [FieldDescr a] +mapFieldNames mangleName = + map (\descr -> descr { fieldName = mangleName (fieldName descr) }) + + +-- | Reuse a command line 'OptionField' as a config file 'FieldDescr'. +-- +commandOptionToField :: OptionField a -> FieldDescr a +commandOptionToField = viewAsFieldDescr + +-- | Reuse a bunch of command line 'OptionField's as config file 'FieldDescr's. +-- +commandOptionsToFields :: [OptionField a] -> [FieldDescr a] +commandOptionsToFields = map viewAsFieldDescr + + +------------------------------------------ +-- SectionDescr definition and utilities +-- + +-- | The description of a section in a config file. It can contain both +-- fields and optionally further subsections. See also 'FieldDescr'. +-- +data SectionDescr a = forall b. SectionDescr { + sectionName :: String, + sectionFields :: [FieldDescr b], + sectionSubsections :: [SectionDescr b], + sectionGet :: a -> [(String, b)], + sectionSet :: LineNo -> String -> b -> a -> ParseResult a, + sectionEmpty :: b + } + +-- | To help construction of config file descriptions in a modular way it is +-- useful to define fields and sections on local types and then hoist them +-- into the parent types when combining them in bigger descriptions. +-- +-- This is essentially a lens operation for 'SectionDescr' to help embedding +-- one inside another. +-- +liftSection :: (b -> a) + -> (a -> b -> b) + -> SectionDescr a + -> SectionDescr b +liftSection get' set' (SectionDescr name fields sections get set empty) = + let sectionGet' = get . get' + sectionSet' lineno param x y = do + x' <- set lineno param x (get' y) + return (set' x' y) + in SectionDescr name fields sections sectionGet' sectionSet' empty + + +------------------------------------- +-- Parsing and printing flat config +-- + +-- | Parse a bunch of semi-parsed 'Field's according to a set of field +-- descriptions. It accumulates the result on top of a given initial value. +-- +-- This only covers the case of flat configuration without subsections. See +-- also 'parseFieldsAndSections'. +-- +parseFields :: [FieldDescr a] -> a -> [Field] -> ParseResult a +parseFields fieldDescrs = + foldM setField + where + fieldMap = Map.fromList [ (fieldName f, f) | f <- fieldDescrs ] + + setField accum (F line name value) = + case Map.lookup name fieldMap of + Just (FieldDescr _ _ set) -> set line value accum + Nothing -> do + warning $ "Unrecognized field " ++ name ++ " on line " ++ show line + return accum + + setField accum f = do + warning $ "Unrecognized stanza on line " ++ show (lineNo f) + return accum + +-- | This is a customised version of the functions from Distribution.ParseUtils +-- that also optionally print default values for empty fields as comments. +-- +ppFields :: [FieldDescr a] -> (Maybe a) -> a -> Disp.Doc +ppFields fields def cur = + Disp.vcat [ ppField name (fmap getter def) (getter cur) + | FieldDescr name getter _ <- fields] + +ppField :: String -> (Maybe Disp.Doc) -> Disp.Doc -> Disp.Doc +ppField name mdef cur + | Disp.isEmpty cur = maybe Disp.empty + (\def -> Disp.text "--" <+> Disp.text name + <> Disp.colon <+> def) mdef + | otherwise = Disp.text name <> Disp.colon <+> cur + +-- | Pretty print a section. +-- +-- Since 'ppFields' does not cover subsections you can use this to add them. +-- Or alternatively use a 'SectionDescr' and use 'ppFieldsAndSections'. +-- +ppSection :: String -> String -> [FieldDescr a] -> (Maybe a) -> a -> Disp.Doc +ppSection name arg fields def cur + | Disp.isEmpty fieldsDoc = Disp.empty + | otherwise = Disp.text name <+> argDoc + $+$ (Disp.nest 2 fieldsDoc) + where + fieldsDoc = ppFields fields def cur + argDoc | arg == "" = Disp.empty + | otherwise = Disp.text arg + + +----------------------------------------- +-- Parsing and printing non-flat config +-- + +-- | Much like 'parseFields' but it also allows subsections. The permitted +-- subsections are given by a list of 'SectionDescr's. +-- +parseFieldsAndSections :: [FieldDescr a] -> [SectionDescr a] -> a + -> [Field] -> ParseResult a +parseFieldsAndSections fieldDescrs sectionDescrs = + foldM setField + where + fieldMap = Map.fromList [ (fieldName f, f) | f <- fieldDescrs ] + sectionMap = Map.fromList [ (sectionName s, s) | s <- sectionDescrs ] + + setField a (F line name value) = + case Map.lookup name fieldMap of + Just (FieldDescr _ _ set) -> set line value a + Nothing -> do + warning $ "Unrecognized field '" ++ name + ++ "' on line " ++ show line + return a + + setField a (Section line name param fields) = + case Map.lookup name sectionMap of + Just (SectionDescr _ fieldDescrs' sectionDescrs' _ set sectionEmpty) -> do + b <- parseFieldsAndSections fieldDescrs' sectionDescrs' sectionEmpty fields + set line param b a + Nothing -> do + warning $ "Unrecognized section '" ++ name + ++ "' on line " ++ show line + return a + + setField accum (block@IfBlock {}) = do + warning $ "Unrecognized stanza on line " ++ show (lineNo block) + return accum + +-- | Much like 'ppFields' but also pretty prints any subsections. Subsection +-- are only shown if they are non-empty. +-- +-- Note that unlike 'ppFields', at present it does not support printing +-- default values. If needed, adding such support would be quite reasonable. +-- +ppFieldsAndSections :: [FieldDescr a] -> [SectionDescr a] -> a -> Disp.Doc +ppFieldsAndSections fieldDescrs sectionDescrs val = + ppFields fieldDescrs Nothing val + $+$ + Disp.vcat + [ Disp.text "" $+$ sectionDoc + | SectionDescr { + sectionName, sectionGet, + sectionFields, sectionSubsections + } <- sectionDescrs + , (param, x) <- sectionGet val + , let sectionDoc = ppSectionAndSubsections + sectionName param + sectionFields sectionSubsections x + , not (Disp.isEmpty sectionDoc) + ] + +-- | Unlike 'ppSection' which has to be called directly, this gets used via +-- 'ppFieldsAndSections' and so does not need to be exported. +-- +ppSectionAndSubsections :: String -> String + -> [FieldDescr a] -> [SectionDescr a] -> a -> Disp.Doc +ppSectionAndSubsections name arg fields sections cur + | Disp.isEmpty fieldsDoc = Disp.empty + | otherwise = Disp.text name <+> argDoc + $+$ (Disp.nest 2 fieldsDoc) + where + fieldsDoc = showConfig fields sections cur + argDoc | arg == "" = Disp.empty + | otherwise = Disp.text arg + + +----------------------------------------------- +-- Top level config file parsing and printing +-- + +-- | Parse a string in the config file syntax into a value, based on a +-- description of the configuration file in terms of its fields and sections. +-- +-- It accumulates the result on top of a given initial (typically empty) value. +-- +parseConfig :: [FieldDescr a] -> [SectionDescr a] -> a + -> String -> ParseResult a +parseConfig fieldDescrs sectionDescrs empty str = + parseFieldsAndSections fieldDescrs sectionDescrs empty + =<< readFieldsFlat str + +-- | Render a value in the config file syntax, based on a description of the +-- configuration file in terms of its fields and sections. +-- +showConfig :: [FieldDescr a] -> [SectionDescr a] -> a -> Disp.Doc +showConfig = ppFieldsAndSections + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/PkgConfigDb.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/PkgConfigDb.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/PkgConfigDb.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/PkgConfigDb.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,146 @@ +{-# LANGUAGE CPP #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.PkgConfigDb +-- Copyright : (c) Iñaki García Etxebarria 2016 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Read the list of packages available to pkg-config. +----------------------------------------------------------------------------- +module Distribution.Client.PkgConfigDb + ( PkgConfigDb + , readPkgConfigDb + , pkgConfigDbFromList + , pkgConfigPkgIsPresent + , getPkgConfigDbDirs + ) where + +#if !MIN_VERSION_base(4,8,0) +import Control.Applicative ((<$>), (<*>)) +#endif + +import Control.Exception (IOException, handle) +import Data.Char (isSpace) +import qualified Data.Map as M +import Data.Version (parseVersion) +import Text.ParserCombinators.ReadP (readP_to_S) +import System.FilePath (splitSearchPath) + +import Distribution.Package + ( PackageName(..) ) +import Distribution.Verbosity + ( Verbosity ) +import Distribution.Version + ( Version, VersionRange, withinRange ) + +import Distribution.Compat.Environment + ( lookupEnv ) +import Distribution.Simple.Program + ( ProgramConfiguration, pkgConfigProgram, getProgramOutput, + requireProgram ) +import Distribution.Simple.Utils + ( info ) + +-- | The list of packages installed in the system visible to +-- @pkg-config@. This is an opaque datatype, to be constructed with +-- `readPkgConfigDb` and queried with `pkgConfigPkgPresent`. +data PkgConfigDb = PkgConfigDb (M.Map PackageName (Maybe Version)) + -- ^ If an entry is `Nothing`, this means that the + -- package seems to be present, but we don't know the + -- exact version (because parsing of the version + -- number failed). + | NoPkgConfigDb + -- ^ For when we could not run pkg-config successfully. + deriving (Show) + +-- | Query pkg-config for the list of installed packages, together +-- with their versions. Return a `PkgConfigDb` encapsulating this +-- information. +readPkgConfigDb :: Verbosity -> ProgramConfiguration -> IO PkgConfigDb +readPkgConfigDb verbosity conf = handle ioErrorHandler $ do + (pkgConfig, _) <- requireProgram verbosity pkgConfigProgram conf + pkgList <- lines <$> getProgramOutput verbosity pkgConfig ["--list-all"] + -- The output of @pkg-config --list-all@ also includes a description + -- for each package, which we do not need. + let pkgNames = map (takeWhile (not . isSpace)) pkgList + pkgVersions <- lines <$> getProgramOutput verbosity pkgConfig + ("--modversion" : pkgNames) + (return . pkgConfigDbFromList . zip pkgNames) pkgVersions + where + -- For when pkg-config invocation fails (possibly because of a + -- too long command line). + ioErrorHandler :: IOException -> IO PkgConfigDb + ioErrorHandler e = do + info verbosity ("Failed to query pkg-config, Cabal will continue" + ++ " without solving for pkg-config constraints: " + ++ show e) + return NoPkgConfigDb + +-- | Create a `PkgConfigDb` from a list of @(packageName, version)@ pairs. +pkgConfigDbFromList :: [(String, String)] -> PkgConfigDb +pkgConfigDbFromList pairs = (PkgConfigDb . M.fromList . map convert) pairs + where + convert :: (String, String) -> (PackageName, Maybe Version) + convert (n,vs) = (PackageName n, + case (reverse . readP_to_S parseVersion) vs of + (v, "") : _ -> Just v + _ -> Nothing -- Version not (fully) + -- understood. + ) + +-- | Check whether a given package range is satisfiable in the given +-- @pkg-config@ database. +pkgConfigPkgIsPresent :: PkgConfigDb -> PackageName -> VersionRange -> Bool +pkgConfigPkgIsPresent (PkgConfigDb db) pn vr = + case M.lookup pn db of + Nothing -> False -- Package not present in the DB. + Just Nothing -> True -- Package present, but version unknown. + Just (Just v) -> withinRange v vr +-- If we could not read the pkg-config database successfully we allow +-- the check to succeed. The plan found by the solver may fail to be +-- executed later on, but we have no grounds for rejecting the plan at +-- this stage. +pkgConfigPkgIsPresent NoPkgConfigDb _ _ = True + + +-- | Query pkg-config for the locations of pkg-config's package files. Use this +-- to monitor for changes in the pkg-config DB. +-- +getPkgConfigDbDirs :: Verbosity -> ProgramConfiguration -> IO [FilePath] +getPkgConfigDbDirs verbosity conf = + (++) <$> getEnvPath <*> getDefPath + where + -- According to @man pkg-config@: + -- + -- PKG_CONFIG_PATH + -- A colon-separated (on Windows, semicolon-separated) list of directories + -- to search for .pc files. The default directory will always be searched + -- after searching the path + -- + getEnvPath = maybe [] parseSearchPath + <$> lookupEnv "PKG_CONFIG_PATH" + + -- Again according to @man pkg-config@: + -- + -- pkg-config can be used to query itself for the default search path, + -- version number and other information, for instance using: + -- + -- > pkg-config --variable pc_path pkg-config + -- + getDefPath = handle ioErrorHandler $ do + (pkgConfig, _) <- requireProgram verbosity pkgConfigProgram conf + parseSearchPath <$> + getProgramOutput verbosity pkgConfig + ["--variable", "pc_path", "pkg-config"] + + parseSearchPath str = + case lines str of + [p] | not (null p) -> splitSearchPath p + _ -> [] + + ioErrorHandler :: IOException -> IO [FilePath] + ioErrorHandler _e = return [] + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/PlanIndex.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/PlanIndex.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/PlanIndex.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/PlanIndex.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,289 @@ +-- | These graph traversal functions mirror the ones in Cabal, but work with +-- the more complete (and fine-grained) set of dependencies provided by +-- PackageFixedDeps rather than only the library dependencies provided by +-- PackageInstalled. +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE CPP #-} +module Distribution.Client.PlanIndex ( + -- * FakeMap and related operations + FakeMap + , fakeDepends + , fakeLookupUnitId + -- * Graph traversal functions + , brokenPackages + , dependencyCycles + , dependencyGraph + , dependencyInconsistencies + ) where + +import Prelude hiding (lookup) +import qualified Data.Map as Map +import qualified Data.Graph as Graph +import Data.Array ((!)) +import Data.Map (Map) +import Data.Maybe (isNothing) +import Data.Either (rights) + +#if !MIN_VERSION_base(4,8,0) +import Data.Monoid (Monoid(..)) +#endif + +import Distribution.Package + ( PackageName(..), PackageIdentifier(..), UnitId(..) + , Package(..), packageName, packageVersion + ) +import Distribution.Version + ( Version ) + +import Distribution.Client.ComponentDeps (ComponentDeps) +import qualified Distribution.Client.ComponentDeps as CD +import Distribution.Client.Types + ( PackageFixedDeps(..) ) +import Distribution.Simple.PackageIndex + ( PackageIndex, allPackages, insert, lookupUnitId ) +import Distribution.Package + ( HasUnitId(..), PackageId ) + +-- Note [FakeMap] +----------------- +-- We'd like to use the PackageIndex defined in this module for cabal-install's +-- InstallPlan. However, at the moment, this data structure is indexed by +-- UnitId, which we don't know until after we've compiled a package +-- (whereas InstallPlan needs to store not-compiled packages in the index.) +-- Eventually, an UnitId will be calculatable prior to actually building +-- the package, but at the moment, the "fake installed package ID map" is a +-- workaround to solve this problem while reusing PackageIndex. The basic idea +-- is that, since we don't know what an UnitId is beforehand, we just fake +-- up one based on the package ID (it only needs to be unique for the particular +-- install plan), and fill it out with the actual generated UnitId after +-- the package is successfully compiled. +-- +-- However, there is a problem: in the index there may be references using the +-- old package ID, which are now dangling if we update the UnitId. We +-- could map over the entire index to update these pointers as well (a costly +-- operation), but instead, we've chosen to parametrize a variety of important +-- functions by a FakeMap, which records what a fake installed package ID was +-- actually resolved to post-compilation. If we do a lookup, we first check and +-- see if it's a fake ID in the FakeMap. +-- +-- It's a bit grungy, but we expect this to only be temporary anyway. (Another +-- possible workaround would have been to *not* update the installed package ID, +-- but I decided this would be hard to understand.) + +-- | Map from fake package keys to real ones. See Note [FakeMap] +type FakeMap = Map UnitId UnitId + +-- | Variant of `depends` which accepts a `FakeMap` +-- +-- Analogous to `fakeInstalledDepends`. See Note [FakeMap]. +fakeDepends :: PackageFixedDeps pkg => FakeMap -> pkg -> ComponentDeps [UnitId] +fakeDepends fakeMap = fmap (map resolveFakeId) . depends + where + resolveFakeId :: UnitId -> UnitId + resolveFakeId ipid = Map.findWithDefault ipid ipid fakeMap + +--- | Variant of 'lookupUnitId' which accepts a 'FakeMap'. See Note +--- [FakeMap]. +fakeLookupUnitId :: FakeMap -> PackageIndex a -> UnitId + -> Maybe a +fakeLookupUnitId fakeMap index pkg = + lookupUnitId index (Map.findWithDefault pkg pkg fakeMap) + +-- | All packages that have dependencies that are not in the index. +-- +-- Returns such packages along with the dependencies that they're missing. +-- +brokenPackages :: (PackageFixedDeps pkg) + => FakeMap + -> PackageIndex pkg + -> [(pkg, [UnitId])] +brokenPackages fakeMap index = + [ (pkg, missing) + | pkg <- allPackages index + , let missing = + [ pkg' | pkg' <- CD.flatDeps (depends pkg) + , isNothing (fakeLookupUnitId fakeMap index pkg') ] + , not (null missing) ] + +-- | Compute all roots of the install plan, and verify that the transitive +-- plans from those roots are all consistent. +-- +-- NOTE: This does not check for dependency cycles. Moreover, dependency cycles +-- may be absent from the subplans even if the larger plan contains a dependency +-- cycle. Such cycles may or may not be an issue; either way, we don't check +-- for them here. +dependencyInconsistencies :: forall pkg. (PackageFixedDeps pkg, HasUnitId pkg) + => FakeMap + -> Bool + -> PackageIndex pkg + -> [(PackageName, [(PackageIdentifier, Version)])] +dependencyInconsistencies fakeMap indepGoals index = + concatMap (dependencyInconsistencies' fakeMap) subplans + where + subplans :: [PackageIndex pkg] + subplans = rights $ + map (dependencyClosure fakeMap index) + (rootSets fakeMap indepGoals index) + +-- | Compute the root sets of a plan +-- +-- A root set is a set of packages whose dependency closure must be consistent. +-- This is the set of all top-level library roots (taken together normally, or +-- as singletons sets if we are considering them as independent goals), along +-- with all setup dependencies of all packages. +rootSets :: (PackageFixedDeps pkg, HasUnitId pkg) + => FakeMap -> Bool -> PackageIndex pkg -> [[UnitId]] +rootSets fakeMap indepGoals index = + if indepGoals then map (:[]) libRoots else [libRoots] + ++ setupRoots index + where + libRoots = libraryRoots fakeMap index + +-- | Compute the library roots of a plan +-- +-- The library roots are the set of packages with no reverse dependencies +-- (no reverse library dependencies but also no reverse setup dependencies). +libraryRoots :: (PackageFixedDeps pkg, HasUnitId pkg) + => FakeMap -> PackageIndex pkg -> [UnitId] +libraryRoots fakeMap index = + map toPkgId roots + where + (graph, toPkgId, _) = dependencyGraph fakeMap index + indegree = Graph.indegree graph + roots = filter isRoot (Graph.vertices graph) + isRoot v = indegree ! v == 0 + +-- | The setup dependencies of each package in the plan +setupRoots :: PackageFixedDeps pkg => PackageIndex pkg -> [[UnitId]] +setupRoots = filter (not . null) + . map (CD.setupDeps . depends) + . allPackages + +-- | Given a package index where we assume we want to use all the packages +-- (use 'dependencyClosure' if you need to get such a index subset) find out +-- if the dependencies within it use consistent versions of each package. +-- Return all cases where multiple packages depend on different versions of +-- some other package. +-- +-- Each element in the result is a package name along with the packages that +-- depend on it and the versions they require. These are guaranteed to be +-- distinct. +-- +dependencyInconsistencies' :: forall pkg. + (PackageFixedDeps pkg, HasUnitId pkg) + => FakeMap + -> PackageIndex pkg + -> [(PackageName, [(PackageIdentifier, Version)])] +dependencyInconsistencies' fakeMap index = + [ (name, [ (pid,packageVersion dep) | (dep,pids) <- uses, pid <- pids]) + | (name, ipid_map) <- Map.toList inverseIndex + , let uses = Map.elems ipid_map + , reallyIsInconsistent (map fst uses) + ] + where + -- For each package name (of a dependency, somewhere) + -- and each installed ID of that that package + -- the associated package instance + -- and a list of reverse dependencies (as source IDs) + inverseIndex :: Map PackageName (Map UnitId (pkg, [PackageId])) + inverseIndex = Map.fromListWith (Map.unionWith (\(a,b) (_,b') -> (a,b++b'))) + [ (packageName dep, Map.fromList [(ipid,(dep,[packageId pkg]))]) + | -- For each package @pkg@ + pkg <- allPackages index + -- Find out which @ipid@ @pkg@ depends on + , ipid <- CD.nonSetupDeps (fakeDepends fakeMap pkg) + -- And look up those @ipid@ (i.e., @ipid@ is the ID of @dep@) + , Just dep <- [fakeLookupUnitId fakeMap index ipid] + ] + + -- If, in a single install plan, we depend on more than one version of a + -- package, then this is ONLY okay in the (rather special) case that we + -- depend on precisely two versions of that package, and one of them + -- depends on the other. This is necessary for example for the base where + -- we have base-3 depending on base-4. + reallyIsInconsistent :: [pkg] -> Bool + reallyIsInconsistent [] = False + reallyIsInconsistent [_p] = False + reallyIsInconsistent [p1, p2] = + let pid1 = installedUnitId p1 + pid2 = installedUnitId p2 + in Map.findWithDefault pid1 pid1 fakeMap `notElem` CD.nonSetupDeps (fakeDepends fakeMap p2) + && Map.findWithDefault pid2 pid2 fakeMap `notElem` CD.nonSetupDeps (fakeDepends fakeMap p1) + reallyIsInconsistent _ = True + + + +-- | Find if there are any cycles in the dependency graph. If there are no +-- cycles the result is @[]@. +-- +-- This actually computes the strongly connected components. So it gives us a +-- list of groups of packages where within each group they all depend on each +-- other, directly or indirectly. +-- +dependencyCycles :: (PackageFixedDeps pkg, HasUnitId pkg) + => FakeMap + -> PackageIndex pkg + -> [[pkg]] +dependencyCycles fakeMap index = + [ vs | Graph.CyclicSCC vs <- Graph.stronglyConnComp adjacencyList ] + where + adjacencyList = [ (pkg, installedUnitId pkg, + CD.flatDeps (fakeDepends fakeMap pkg)) + | pkg <- allPackages index ] + + +-- | Tries to take the transitive closure of the package dependencies. +-- +-- If the transitive closure is complete then it returns that subset of the +-- index. Otherwise it returns the broken packages as in 'brokenPackages'. +-- +-- * Note that if the result is @Right []@ it is because at least one of +-- the original given 'PackageIdentifier's do not occur in the index. +dependencyClosure :: (PackageFixedDeps pkg, HasUnitId pkg) + => FakeMap + -> PackageIndex pkg + -> [UnitId] + -> Either [(pkg, [UnitId])] + (PackageIndex pkg) +dependencyClosure fakeMap index pkgids0 = case closure mempty [] pkgids0 of + (completed, []) -> Right completed + (completed, _) -> Left (brokenPackages fakeMap completed) + where + closure completed failed [] = (completed, failed) + closure completed failed (pkgid:pkgids) = + case fakeLookupUnitId fakeMap index pkgid of + Nothing -> closure completed (pkgid:failed) pkgids + Just pkg -> + case fakeLookupUnitId fakeMap completed + (installedUnitId pkg) of + Just _ -> closure completed failed pkgids + Nothing -> closure completed' failed pkgids' + where completed' = insert pkg completed + pkgids' = CD.nonSetupDeps (depends pkg) ++ pkgids + + +-- | Builds a graph of the package dependencies. +-- +-- Dependencies on other packages that are not in the index are discarded. +-- You can check if there are any such dependencies with 'brokenPackages'. +-- +dependencyGraph :: (PackageFixedDeps pkg, HasUnitId pkg) + => FakeMap + -> PackageIndex pkg + -> (Graph.Graph, + Graph.Vertex -> UnitId, + UnitId -> Maybe Graph.Vertex) +dependencyGraph fakeMap index = (graph, vertexToPkg, idToVertex) + where + (graph, vertexToPkg', idToVertex) = Graph.graphFromEdges edges + vertexToPkg v = case vertexToPkg' v of + ((), pkgid, _targets) -> pkgid + + pkgs = allPackages index + edges = map edgesFrom pkgs + + resolve pid = Map.findWithDefault pid pid fakeMap + edgesFrom pkg = ( () + , resolve (installedUnitId pkg) + , CD.flatDeps (fakeDepends fakeMap pkg) + ) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/ProjectBuilding.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/ProjectBuilding.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/ProjectBuilding.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/ProjectBuilding.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,1292 @@ +{-# LANGUAGE CPP, BangPatterns, RecordWildCards, NamedFieldPuns, + DeriveGeneric, DeriveDataTypeable, GeneralizedNewtypeDeriving, + ScopedTypeVariables #-} + +-- | +-- +module Distribution.Client.ProjectBuilding ( + BuildStatus(..), + BuildStatusMap, + BuildStatusRebuild(..), + BuildReason(..), + MonitorChangedReason(..), + rebuildTargetsDryRun, + rebuildTargets + ) where + +import Distribution.Client.PackageHash (renderPackageHashInputs) +import Distribution.Client.RebuildMonad +import Distribution.Client.ProjectConfig +import Distribution.Client.ProjectPlanning + +import Distribution.Client.Types + ( PackageLocation(..), GenericReadyPackage(..) + , PackageFixedDeps(..) + , InstalledPackageId, installedPackageId ) +import Distribution.Client.InstallPlan + ( GenericInstallPlan, GenericPlanPackage ) +import qualified Distribution.Client.InstallPlan as InstallPlan +import qualified Distribution.Client.ComponentDeps as CD +import Distribution.Client.ComponentDeps (ComponentDeps) +import Distribution.Client.DistDirLayout +import Distribution.Client.FileMonitor +import Distribution.Client.SetupWrapper +import Distribution.Client.JobControl +import Distribution.Client.FetchUtils +import Distribution.Client.GlobalFlags (RepoContext) +import qualified Distribution.Client.Tar as Tar +import Distribution.Client.Setup (filterConfigureFlags) +import Distribution.Client.SrcDist (allPackageSourceFiles) +import Distribution.Client.Utils (removeExistingFile) + +import Distribution.Package hiding (InstalledPackageId, installedPackageId) +import Distribution.InstalledPackageInfo (InstalledPackageInfo) +import qualified Distribution.InstalledPackageInfo as Installed +import Distribution.Simple.Program +import qualified Distribution.Simple.Setup as Cabal +import Distribution.Simple.Command (CommandUI) +import qualified Distribution.Simple.Register as Cabal +import qualified Distribution.Simple.InstallDirs as InstallDirs +import Distribution.Simple.LocalBuildInfo (ComponentName) + +import Distribution.Simple.Utils hiding (matchFileGlob) +import Distribution.Version +import Distribution.Verbosity +import Distribution.Text +import Distribution.ParseUtils ( showPWarning ) + +import Data.Map (Map) +import qualified Data.Map as Map +import Data.Set (Set) +import qualified Data.Set as Set +import qualified Data.ByteString.Lazy as LBS + +#if !MIN_VERSION_base(4,8,0) +import Control.Applicative +#endif +import Control.Monad +import Control.Exception +import Control.Concurrent.Async +import Control.Concurrent.MVar +import Data.List +import Data.Maybe + +import System.FilePath +import System.IO +import System.Directory +import System.Exit (ExitCode) + + +------------------------------------------------------------------------------ +-- * Overall building strategy. +------------------------------------------------------------------------------ +-- +-- We start with an 'ElaboratedInstallPlan' that has already been improved by +-- reusing packages from the store. So the remaining packages in the +-- 'InstallPlan.Configured' state are ones we either need to build or rebuild. +-- +-- First, we do a preliminary dry run phase where we work out which packages +-- we really need to (re)build, and for the ones we do need to build which +-- build phase to start at. + + +------------------------------------------------------------------------------ +-- * Dry run: what bits of the 'ElaboratedInstallPlan' will we execute? +------------------------------------------------------------------------------ + +-- We split things like this for a couple reasons. Firstly we need to be able +-- to do dry runs, and these need to be reasonably accurate in terms of +-- letting users know what (and why) things are going to be (re)built. +-- +-- Given that we need to be able to do dry runs, it would not be great if +-- we had to repeat all the same work when we do it for real. Not only is +-- it duplicate work, but it's duplicate code which is likely to get out of +-- sync. So we do things only once. We preserve info we discover in the dry +-- run phase and rely on it later when we build things for real. This also +-- somewhat simplifies the build phase. So this way the dry run can't so +-- easily drift out of sync with the real thing since we're relying on the +-- info it produces. +-- +-- An additional advantage is that it makes it easier to debug rebuild +-- errors (ie rebuilding too much or too little), since all the rebuild +-- decisions are made without making any state changes at the same time +-- (that would make it harder to reproduce the problem sitation). + + +-- | The 'BuildStatus' of every package in the 'ElaboratedInstallPlan' +-- +type BuildStatusMap = Map InstalledPackageId BuildStatus + +-- | The build status for an individual package. That is, the state that the +-- package is in prior to initiating a (re)build. +-- +-- It serves two purposes: +-- +-- * For dry-run output, it lets us explain to the user if and why a package +-- is going to be (re)built. +-- +-- * It tell us what step to start or resume building from, and carries +-- enough information for us to be able to do so. +-- +data BuildStatus = + + -- | The package is in the 'InstallPlan.PreExisting' state, so does not + -- need building. + BuildStatusPreExisting + + -- | The package has not been downloaded yet, so it will have to be + -- downloaded, unpacked and built. + | BuildStatusDownload + + -- | The package has not been unpacked yet, so it will have to be + -- unpacked and built. + | BuildStatusUnpack FilePath + + -- | The package exists in a local dir already, and just needs building + -- or rebuilding. So this can only happen for 'BuildInplaceOnly' style + -- packages. + | BuildStatusRebuild FilePath BuildStatusRebuild + + -- | The package exists in a local dir already, and is fully up to date. + -- So this package can be put into the 'InstallPlan.Installed' state + -- and it does not need to be built. + | BuildStatusUpToDate (Maybe InstalledPackageInfo) BuildSuccess + +-- | For a package that is going to be built or rebuilt, the state it's in now. +-- +-- So again, this tells us why a package needs to be rebuilt and what build +-- phases need to be run. The 'MonitorChangedReason' gives us details like +-- which file changed, which is mainly for high verbosity debug output. +-- +data BuildStatusRebuild = + + -- | The package configuration changed, so the configure and build phases + -- needs to be (re)run. + BuildStatusConfigure (MonitorChangedReason ()) + + -- | The configuration has not changed but the build phase needs to be + -- rerun. We record the reason the (re)build is needed. + -- + -- The optional registration info here tells us if we've registered the + -- package already, or if we stil need to do that after building. + -- + | BuildStatusBuild (Maybe (Maybe InstalledPackageInfo)) BuildReason + +data BuildReason = + -- | The depencencies of this package have been (re)built so the build + -- phase needs to be rerun. + -- + -- The optional registration info here tells us if we've registered the + -- package already, or if we stil need to do that after building. + -- + BuildReasonDepsRebuilt + + -- | Changes in files within the package (or first run or corrupt cache) + | BuildReasonFilesChanged (MonitorChangedReason ()) + + -- | An important special case is that no files have changed but the + -- set of components the /user asked to build/ has changed. We track the + -- set of components /we have built/, which of course only grows (until + -- some other change resets it). + -- + -- The @Set 'ComponentName'@ is the set of components we have built + -- previously. When we update the monitor we take the union of the ones + -- we have built previously with the ones the user has asked for this + -- time and save those. See 'updatePackageBuildFileMonitor'. + -- + | BuildReasonExtraTargets (Set ComponentName) + + -- | Although we're not going to build any additional targets as a whole, + -- we're going to build some part of a component or run a repl or any + -- other action that does not result in additional persistent artifacts. + -- + | BuildReasonEphemeralTargets + +-- | Which 'BuildStatus' values indicate we'll have to do some build work of +-- some sort. In particular we use this as part of checking if any of a +-- package's deps have changed. +-- +buildStatusRequiresBuild :: BuildStatus -> Bool +buildStatusRequiresBuild BuildStatusPreExisting = False +buildStatusRequiresBuild BuildStatusUpToDate {} = False +buildStatusRequiresBuild _ = True + +-- | Do the dry run pass. This is a prerequisite of 'rebuildTargets'. +-- +-- It gives us the 'BuildStatusMap' and also gives us an improved version of +-- the 'ElaboratedInstallPlan' with packages switched to the +-- 'InstallPlan.Installed' state when we find that they're already up to date. +-- +rebuildTargetsDryRun :: DistDirLayout + -> ElaboratedInstallPlan + -> IO (ElaboratedInstallPlan, BuildStatusMap) +rebuildTargetsDryRun distDirLayout@DistDirLayout{..} = \installPlan -> do + + -- Do the various checks to work out the 'BuildStatus' of each package + pkgsBuildStatus <- foldMInstallPlanDepOrder installPlan dryRunPkg + + -- For 'BuildStatusUpToDate' packages, improve the plan by marking them as + -- 'InstallPlan.Installed'. + let installPlan' = improveInstallPlanWithUpToDatePackages + installPlan pkgsBuildStatus + + return (installPlan', pkgsBuildStatus) + where + dryRunPkg :: ElaboratedPlanPackage + -> ComponentDeps [BuildStatus] + -> IO BuildStatus + dryRunPkg (InstallPlan.PreExisting _pkg) _depsBuildStatus = + return BuildStatusPreExisting + + dryRunPkg (InstallPlan.Configured pkg) depsBuildStatus = do + mloc <- checkFetched (pkgSourceLocation pkg) + case mloc of + Nothing -> return BuildStatusDownload + + Just (LocalUnpackedPackage srcdir) -> + -- For the case of a user-managed local dir, irrespective of the + -- build style, we build from that directory and put build + -- artifacts under the shared dist directory. + dryRunLocalPkg pkg depsBuildStatus srcdir + + -- The three tarball cases are handled the same as each other, + -- though depending on the build style. + Just (LocalTarballPackage tarball) -> + dryRunTarballPkg pkg depsBuildStatus tarball + + Just (RemoteTarballPackage _ tarball) -> + dryRunTarballPkg pkg depsBuildStatus tarball + + Just (RepoTarballPackage _ _ tarball) -> + dryRunTarballPkg pkg depsBuildStatus tarball + + dryRunPkg (InstallPlan.Processing {}) _ = unexpectedState + dryRunPkg (InstallPlan.Installed {}) _ = unexpectedState + dryRunPkg (InstallPlan.Failed {}) _ = unexpectedState + + unexpectedState = error "rebuildTargetsDryRun: unexpected package state" + + dryRunTarballPkg :: ElaboratedConfiguredPackage + -> ComponentDeps [BuildStatus] + -> FilePath + -> IO BuildStatus + dryRunTarballPkg pkg depsBuildStatus tarball = + case pkgBuildStyle pkg of + BuildAndInstall -> return (BuildStatusUnpack tarball) + BuildInplaceOnly -> do + -- TODO: [nice to have] use a proper file monitor rather than this dir exists test + exists <- doesDirectoryExist srcdir + if exists + then dryRunLocalPkg pkg depsBuildStatus srcdir + else return (BuildStatusUnpack tarball) + where + srcdir = distUnpackedSrcDirectory (packageId pkg) + + dryRunLocalPkg :: ElaboratedConfiguredPackage + -> ComponentDeps [BuildStatus] + -> FilePath + -> IO BuildStatus + dryRunLocalPkg pkg depsBuildStatus srcdir = do + -- Go and do lots of I/O, reading caches and probing files to work out + -- if anything has changed + change <- checkPackageFileMonitorChanged + packageFileMonitor pkg srcdir depsBuildStatus + case change of + -- It did change, giving us 'BuildStatusRebuild' info on why + Left rebuild -> + return (BuildStatusRebuild srcdir rebuild) + + -- No changes, the package is up to date. Use the saved build results. + Right (mipkg, buildSuccess) -> + return (BuildStatusUpToDate mipkg buildSuccess) + where + packageFileMonitor = + newPackageFileMonitor distDirLayout (packageId pkg) + + +-- | A specialised traversal over the packages in an install plan. +-- +-- The packages are visited in dependency order, starting with packages with no +-- depencencies. The result for each package is accumulated into a 'Map' and +-- returned as the final result. In addition, when visting a package, the +-- visiting function is passed the results for all the immediate package +-- depencencies. This can be used to propagate information from depencencies. +-- +foldMInstallPlanDepOrder + :: forall m ipkg srcpkg iresult ifailure b. + (Monad m, + HasUnitId ipkg, PackageFixedDeps ipkg, + HasUnitId srcpkg, PackageFixedDeps srcpkg) + => GenericInstallPlan ipkg srcpkg iresult ifailure + -> (GenericPlanPackage ipkg srcpkg iresult ifailure -> + ComponentDeps [b] -> m b) + -> m (Map InstalledPackageId b) +foldMInstallPlanDepOrder plan0 visit = + go Map.empty (InstallPlan.reverseTopologicalOrder plan0) + where + go :: Map InstalledPackageId b + -> [GenericPlanPackage ipkg srcpkg iresult ifailure] + -> m (Map InstalledPackageId b) + go !results [] = return results + + go !results (pkg : pkgs) = do + -- we go in the right order so the results map has entries for all deps + let depresults :: ComponentDeps [b] + depresults = + fmap (map (\ipkgid -> let Just result = Map.lookup ipkgid results + in result)) + (depends pkg) + result <- visit pkg depresults + let results' = Map.insert (installedPackageId pkg) result results + go results' pkgs + +improveInstallPlanWithUpToDatePackages :: ElaboratedInstallPlan + -> BuildStatusMap + -> ElaboratedInstallPlan +improveInstallPlanWithUpToDatePackages installPlan pkgsBuildStatus = + replaceWithPreInstalled installPlan + [ (installedPackageId pkg, mipkg, buildSuccess) + | InstallPlan.Configured pkg + <- InstallPlan.reverseTopologicalOrder installPlan + , let ipkgid = installedPackageId pkg + Just pkgBuildStatus = Map.lookup ipkgid pkgsBuildStatus + , BuildStatusUpToDate mipkg buildSuccess <- [pkgBuildStatus] + ] + where + replaceWithPreInstalled = + foldl' (\plan (ipkgid, mipkg, buildSuccess) -> + InstallPlan.preinstalled ipkgid mipkg buildSuccess plan) + + +----------------------------- +-- Package change detection +-- + +-- | As part of the dry run for local unpacked packages we have to check if the +-- package config or files have changed. That is the purpose of +-- 'PackageFileMonitor' and 'checkPackageFileMonitorChanged'. +-- +-- When a package is (re)built, the monitor must be updated to reflect the new +-- state of the package. Because we sometimes build without reconfiguring the +-- state updates are split into two, one for package config changes and one +-- for other changes. This is the purpose of 'updatePackageConfigFileMonitor' +-- and 'updatePackageBuildFileMonitor'. +-- +data PackageFileMonitor = PackageFileMonitor { + pkgFileMonitorConfig :: FileMonitor ElaboratedConfiguredPackage (), + pkgFileMonitorBuild :: FileMonitor (Set ComponentName) BuildSuccess, + pkgFileMonitorReg :: FileMonitor () (Maybe InstalledPackageInfo) + } + +newPackageFileMonitor :: DistDirLayout -> PackageId -> PackageFileMonitor +newPackageFileMonitor DistDirLayout{distPackageCacheFile} pkgid = + PackageFileMonitor { + pkgFileMonitorConfig = + newFileMonitor (distPackageCacheFile pkgid "config"), + + pkgFileMonitorBuild = + FileMonitor { + fileMonitorCacheFile = distPackageCacheFile pkgid "build", + fileMonitorKeyValid = \componentsToBuild componentsAlreadyBuilt -> + componentsToBuild `Set.isSubsetOf` componentsAlreadyBuilt, + fileMonitorCheckIfOnlyValueChanged = True + }, + + pkgFileMonitorReg = + newFileMonitor (distPackageCacheFile pkgid "registration") + } + +-- | Helper function for 'checkPackageFileMonitorChanged', +-- 'updatePackageConfigFileMonitor' and 'updatePackageBuildFileMonitor'. +-- +-- It selects the info from a 'ElaboratedConfiguredPackage' that are used by +-- the 'FileMonitor's (in the 'PackageFileMonitor') to detect value changes. +-- +packageFileMonitorKeyValues :: ElaboratedConfiguredPackage + -> (ElaboratedConfiguredPackage, Set ComponentName) +packageFileMonitorKeyValues pkg = + (pkgconfig, buildComponents) + where + -- The first part is the value used to guard (re)configuring the package. + -- That is, if this value changes then we will reconfigure. + -- The ElaboratedConfiguredPackage consists mostly (but not entirely) of + -- information that affects the (re)configure step. But those parts that + -- do not affect the configure step need to be nulled out. Those parts are + -- the specific targets that we're going to build. + -- + pkgconfig = pkg { + pkgBuildTargets = [], + pkgReplTarget = Nothing, + pkgBuildHaddocks = False + } + + -- The second part is the value used to guard the build step. So this is + -- more or less the opposite of the first part, as it's just the info about + -- what targets we're going to build. + -- + buildComponents = pkgBuildTargetWholeComponents pkg + +-- | Do all the checks on whether a package has changed and thus needs either +-- rebuilding or reconfiguring and rebuilding. +-- +checkPackageFileMonitorChanged :: PackageFileMonitor + -> ElaboratedConfiguredPackage + -> FilePath + -> ComponentDeps [BuildStatus] + -> IO (Either BuildStatusRebuild + (Maybe InstalledPackageInfo, + BuildSuccess)) +checkPackageFileMonitorChanged PackageFileMonitor{..} + pkg srcdir depsBuildStatus = do + --TODO: [nice to have] some debug-level message about file changes, like rerunIfChanged + configChanged <- checkFileMonitorChanged + pkgFileMonitorConfig srcdir pkgconfig + case configChanged of + MonitorChanged monitorReason -> + return (Left (BuildStatusConfigure monitorReason')) + where + monitorReason' = fmap (const ()) monitorReason + + MonitorUnchanged () _ + -- The configChanged here includes the identity of the dependencies, + -- so depsBuildStatus is just needed for the changes in the content + -- of depencencies. + | any buildStatusRequiresBuild (CD.flatDeps depsBuildStatus) -> do + regChanged <- checkFileMonitorChanged pkgFileMonitorReg srcdir () + let mreg = changedToMaybe regChanged + return (Left (BuildStatusBuild mreg BuildReasonDepsRebuilt)) + + | otherwise -> do + buildChanged <- checkFileMonitorChanged + pkgFileMonitorBuild srcdir buildComponents + regChanged <- checkFileMonitorChanged + pkgFileMonitorReg srcdir () + let mreg = changedToMaybe regChanged + case (buildChanged, regChanged) of + (MonitorChanged (MonitoredValueChanged prevBuildComponents), _) -> + return (Left (BuildStatusBuild mreg buildReason)) + where + buildReason = BuildReasonExtraTargets prevBuildComponents + + (MonitorChanged monitorReason, _) -> + return (Left (BuildStatusBuild mreg buildReason)) + where + buildReason = BuildReasonFilesChanged monitorReason' + monitorReason' = fmap (const ()) monitorReason + + (MonitorUnchanged _ _, MonitorChanged monitorReason) -> + -- this should only happen if the file is corrupt or been + -- manually deleted. We don't want to bother with another + -- phase just for this, so we'll reregister by doing a build. + return (Left (BuildStatusBuild Nothing buildReason)) + where + buildReason = BuildReasonFilesChanged monitorReason' + monitorReason' = fmap (const ()) monitorReason + + (MonitorUnchanged _ _, MonitorUnchanged _ _) + | pkgHasEphemeralBuildTargets pkg -> + return (Left (BuildStatusBuild mreg buildReason)) + where + buildReason = BuildReasonEphemeralTargets + + (MonitorUnchanged buildSuccess _, MonitorUnchanged mipkg _) -> + return (Right (mipkg, buildSuccess)) + where + (pkgconfig, buildComponents) = packageFileMonitorKeyValues pkg + changedToMaybe (MonitorChanged _) = Nothing + changedToMaybe (MonitorUnchanged x _) = Just x + + +updatePackageConfigFileMonitor :: PackageFileMonitor + -> FilePath + -> ElaboratedConfiguredPackage + -> IO () +updatePackageConfigFileMonitor PackageFileMonitor{pkgFileMonitorConfig} + srcdir pkg = + updateFileMonitor pkgFileMonitorConfig srcdir Nothing + [] pkgconfig () + where + (pkgconfig, _buildComponents) = packageFileMonitorKeyValues pkg + +updatePackageBuildFileMonitor :: PackageFileMonitor + -> FilePath + -> MonitorTimestamp + -> ElaboratedConfiguredPackage + -> BuildStatusRebuild + -> [FilePath] + -> BuildSuccess + -> IO () +updatePackageBuildFileMonitor PackageFileMonitor{pkgFileMonitorBuild} + srcdir timestamp pkg pkgBuildStatus + allSrcFiles buildSuccess = + updateFileMonitor pkgFileMonitorBuild srcdir (Just timestamp) + (map monitorFileHashed allSrcFiles) + buildComponents' buildSuccess + where + (_pkgconfig, buildComponents) = packageFileMonitorKeyValues pkg + + -- If the only thing that's changed is that we're now building extra + -- components, then we can avoid later unnecessary rebuilds by saving the + -- total set of components that have been built, namely the union of the + -- existing ones plus the new ones. If files also changed this would be + -- the wrong thing to do. Note that we rely on the + -- fileMonitorCheckIfOnlyValueChanged = True mode to get this guarantee + -- that it's /only/ the value that changed not any files that changed. + buildComponents' = + case pkgBuildStatus of + BuildStatusBuild _ (BuildReasonExtraTargets prevBuildComponents) + -> buildComponents `Set.union` prevBuildComponents + _ -> buildComponents + +updatePackageRegFileMonitor :: PackageFileMonitor + -> FilePath + -> Maybe InstalledPackageInfo + -> IO () +updatePackageRegFileMonitor PackageFileMonitor{pkgFileMonitorReg} + srcdir mipkg = + updateFileMonitor pkgFileMonitorReg srcdir Nothing + [] () mipkg + +invalidatePackageRegFileMonitor :: PackageFileMonitor -> IO () +invalidatePackageRegFileMonitor PackageFileMonitor{pkgFileMonitorReg} = + removeExistingFile (fileMonitorCacheFile pkgFileMonitorReg) + + +------------------------------------------------------------------------------ +-- * Doing it: executing an 'ElaboratedInstallPlan' +------------------------------------------------------------------------------ + + +-- | Build things for real. +-- +-- It requires the 'BuildStatusMap' gatthered by 'rebuildTargetsDryRun'. +-- +rebuildTargets :: Verbosity + -> DistDirLayout + -> ElaboratedInstallPlan + -> ElaboratedSharedConfig + -> BuildStatusMap + -> BuildTimeSettings + -> IO ElaboratedInstallPlan +rebuildTargets verbosity + distDirLayout@DistDirLayout{..} + installPlan + sharedPackageConfig + pkgsBuildStatus + buildSettings@BuildTimeSettings{buildSettingNumJobs} = do + + -- Concurrency control: create the job controller and concurrency limits + -- for downloading, building and installing. + jobControl <- if isParallelBuild then newParallelJobControl + else newSerialJobControl + buildLimit <- newJobLimit buildSettingNumJobs + installLock <- newLock -- serialise installation + cacheLock <- newLock -- serialise access to setup exe cache + --TODO: [code cleanup] eliminate setup exe cache + + createDirectoryIfMissingVerbose verbosity False distBuildRootDirectory + createDirectoryIfMissingVerbose verbosity False distTempDirectory + + -- Before traversing the install plan, pre-emptively find all packages that + -- will need to be downloaded and start downloading them. + asyncDownloadPackages verbosity withRepoCtx + installPlan pkgsBuildStatus $ \downloadMap -> + + -- For each package in the plan, in dependency order, but in parallel... + executeInstallPlan verbosity jobControl installPlan $ \pkg -> + handle (return . BuildFailure) $ --TODO: review exception handling + + let ipkgid = installedPackageId pkg + Just pkgBuildStatus = Map.lookup ipkgid pkgsBuildStatus in + + rebuildTarget + verbosity + distDirLayout + buildSettings downloadMap + buildLimit installLock cacheLock + sharedPackageConfig + pkg + pkgBuildStatus + where + isParallelBuild = buildSettingNumJobs >= 2 + withRepoCtx = projectConfigWithBuilderRepoContext verbosity + buildSettings + +-- | Given all the context and resources, (re)build an individual package. +-- +rebuildTarget :: Verbosity + -> DistDirLayout + -> BuildTimeSettings + -> AsyncDownloadMap + -> JobLimit -> Lock -> Lock + -> ElaboratedSharedConfig + -> ElaboratedReadyPackage + -> BuildStatus + -> IO BuildResult +rebuildTarget verbosity + distDirLayout@DistDirLayout{distBuildDirectory} + buildSettings downloadMap + buildLimit installLock cacheLock + sharedPackageConfig + rpkg@(ReadyPackage pkg _) + pkgBuildStatus = + + -- We rely on the 'BuildStatus' to decide which phase to start from: + case pkgBuildStatus of + BuildStatusDownload -> downloadPhase + BuildStatusUnpack tarball -> unpackTarballPhase tarball + BuildStatusRebuild srcdir status -> rebuildPhase status srcdir + + -- TODO: perhaps re-nest the types to make these impossible + BuildStatusPreExisting {} -> unexpectedState + BuildStatusUpToDate {} -> unexpectedState + where + unexpectedState = error "rebuildTarget: unexpected package status" + + downloadPhase = do + downsrcloc <- waitAsyncPackageDownload verbosity downloadMap pkg + case downsrcloc of + DownloadedTarball tarball -> unpackTarballPhase tarball + --TODO: [nice to have] git/darcs repos etc + + + unpackTarballPhase tarball = + withJobLimit buildLimit $ + withTarballLocalDirectory + verbosity distDirLayout tarball + (packageId pkg) (pkgBuildStyle pkg) + (pkgDescriptionOverride pkg) $ + + case pkgBuildStyle pkg of + BuildAndInstall -> buildAndInstall + BuildInplaceOnly -> buildInplace buildStatus + where + buildStatus = BuildStatusConfigure MonitorFirstRun + + -- Note that this really is rebuild, not build. It can only happen for + -- 'BuildInplaceOnly' style packages. 'BuildAndInstall' style packages + -- would only start from download or unpack phases. + -- + rebuildPhase buildStatus srcdir = + assert (pkgBuildStyle pkg == BuildInplaceOnly) $ + + withJobLimit buildLimit $ + buildInplace buildStatus srcdir builddir + where + builddir = distBuildDirectory (packageId pkg) + + buildAndInstall srcdir builddir = + buildAndInstallUnpackedPackage + verbosity distDirLayout + buildSettings installLock cacheLock + sharedPackageConfig + rpkg + srcdir builddir' + where + builddir' = makeRelative srcdir builddir + --TODO: [nice to have] ^^ do this relative stuff better + + buildInplace buildStatus srcdir builddir = + --TODO: [nice to have] use a relative build dir rather than absolute + buildInplaceUnpackedPackage + verbosity distDirLayout + buildSettings cacheLock + sharedPackageConfig + rpkg + buildStatus + srcdir builddir + +--TODO: [nice to have] do we need to use a with-style for the temp files for downloading http +-- packages, or are we going to cache them persistently? + +type AsyncDownloadMap = Map (PackageLocation (Maybe FilePath)) + (MVar DownloadedSourceLocation) + +data DownloadedSourceLocation = DownloadedTarball FilePath + --TODO: [nice to have] git/darcs repos etc + +downloadedSourceLocation :: PackageLocation FilePath + -> Maybe DownloadedSourceLocation +downloadedSourceLocation pkgloc = + case pkgloc of + RemoteTarballPackage _ tarball -> Just (DownloadedTarball tarball) + RepoTarballPackage _ _ tarball -> Just (DownloadedTarball tarball) + _ -> Nothing + +-- | Given the current 'InstallPlan' and 'BuildStatusMap', select all the +-- packages we have to download and fork off an async action to download them. +-- We download them in dependency order so that the one's we'll need +-- first are the ones we will start downloading first. +-- +-- The body action is passed a map from those packages (identified by their +-- location) to a completion var for that package. So the body action should +-- lookup the location and use 'waitAsyncPackageDownload' to get the result. +-- +asyncDownloadPackages :: Verbosity + -> ((RepoContext -> IO ()) -> IO ()) + -> ElaboratedInstallPlan + -> BuildStatusMap + -> (AsyncDownloadMap -> IO a) + -> IO a +asyncDownloadPackages verbosity withRepoCtx installPlan pkgsBuildStatus body + | null pkgsToDownload = body Map.empty + | otherwise = do + --TODO: [research required] use parallel downloads? if so, use the fetchLimit + + asyncDownloadVars <- mapM (\loc -> (,) loc <$> newEmptyMVar) pkgsToDownload + + let downloadAction :: IO () + downloadAction = + withRepoCtx $ \repoctx -> + forM_ asyncDownloadVars $ \(pkgloc, var) -> do + Just scrloc <- downloadedSourceLocation <$> + fetchPackage verbosity repoctx pkgloc + putMVar var scrloc + + withAsync downloadAction $ \_ -> + body (Map.fromList asyncDownloadVars) + where + pkgsToDownload = + [ pkgSourceLocation pkg + | InstallPlan.Configured pkg + <- InstallPlan.reverseTopologicalOrder installPlan + , let ipkgid = installedPackageId pkg + Just pkgBuildStatus = Map.lookup ipkgid pkgsBuildStatus + , BuildStatusDownload <- [pkgBuildStatus] + ] + + +-- | Check if a package needs downloading, and if so expect to find a download +-- in progress in the given 'AsyncDownloadMap' and wait on it to finish. +-- +waitAsyncPackageDownload :: Verbosity + -> AsyncDownloadMap + -> ElaboratedConfiguredPackage + -> IO DownloadedSourceLocation +waitAsyncPackageDownload verbosity downloadMap pkg = + case Map.lookup (pkgSourceLocation pkg) downloadMap of + Just hnd -> do + debug verbosity $ + "Waiting for download of " ++ display (packageId pkg) ++ " to finish" + --TODO: [required eventually] do the exception handling on download stuff + takeMVar hnd + Nothing -> + fail "waitAsyncPackageDownload: package not being download" + + +executeInstallPlan + :: forall ipkg srcpkg iresult. + (HasUnitId ipkg, PackageFixedDeps ipkg, + HasUnitId srcpkg, PackageFixedDeps srcpkg) + => Verbosity + -> JobControl IO ( GenericReadyPackage srcpkg ipkg + , GenericBuildResult ipkg iresult BuildFailure ) + -> GenericInstallPlan ipkg srcpkg iresult BuildFailure + -> ( GenericReadyPackage srcpkg ipkg + -> IO (GenericBuildResult ipkg iresult BuildFailure)) + -> IO (GenericInstallPlan ipkg srcpkg iresult BuildFailure) +executeInstallPlan verbosity jobCtl plan0 installPkg = + tryNewTasks 0 plan0 + where + tryNewTasks taskCount plan = do + case InstallPlan.ready plan of + [] | taskCount == 0 -> return plan + | otherwise -> waitForTasks taskCount plan + pkgs -> do + sequence_ + [ do debug verbosity $ "Ready to install " ++ display pkgid + spawnJob jobCtl $ do + buildResult <- installPkg pkg + return (pkg, buildResult) + | pkg <- pkgs + , let pkgid = packageId pkg + ] + + let taskCount' = taskCount + length pkgs + plan' = InstallPlan.processing pkgs plan + waitForTasks taskCount' plan' + + waitForTasks taskCount plan = do + debug verbosity $ "Waiting for install task to finish..." + (pkg, buildResult) <- collectJob jobCtl + let taskCount' = taskCount-1 + plan' = updatePlan pkg buildResult plan + tryNewTasks taskCount' plan' + + updatePlan :: GenericReadyPackage srcpkg ipkg + -> GenericBuildResult ipkg iresult BuildFailure + -> GenericInstallPlan ipkg srcpkg iresult BuildFailure + -> GenericInstallPlan ipkg srcpkg iresult BuildFailure + updatePlan pkg (BuildSuccess mipkg buildSuccess) = + InstallPlan.completed (installedPackageId pkg) mipkg buildSuccess + + updatePlan pkg (BuildFailure buildFailure) = + InstallPlan.failed (installedPackageId pkg) buildFailure depsFailure + where + depsFailure = DependentFailed (packageId pkg) + -- So this first pkgid failed for whatever reason (buildFailure). + -- All the other packages that depended on this pkgid, which we + -- now cannot build, we mark as failing due to 'DependentFailed' + -- which kind of means it was not their fault. + + +-- | Ensure that the package is unpacked in an appropriate directory, either +-- a temporary one or a persistent one under the shared dist directory. +-- +withTarballLocalDirectory + :: Verbosity + -> DistDirLayout + -> FilePath + -> PackageId + -> BuildStyle + -> Maybe CabalFileText + -> (FilePath -> FilePath -> IO a) + -> IO a +withTarballLocalDirectory verbosity distDirLayout@DistDirLayout{..} + tarball pkgid buildstyle pkgTextOverride + buildPkg = + case buildstyle of + -- In this case we make a temp dir, unpack the tarball to there and + -- build and install it from that temp dir. + BuildAndInstall -> + withTempDirectory verbosity distTempDirectory + (display (packageName pkgid)) $ \tmpdir -> do + unpackPackageTarball verbosity tarball tmpdir + pkgid pkgTextOverride + let srcdir = tmpdir display pkgid + builddir = srcdir "dist" + buildPkg srcdir builddir + + -- In this case we make sure the tarball has been unpacked to the + -- appropriate location under the shared dist dir, and then build it + -- inplace there + BuildInplaceOnly -> do + let srcrootdir = distUnpackedSrcRootDirectory + srcdir = distUnpackedSrcDirectory pkgid + builddir = distBuildDirectory pkgid + -- TODO: [nice to have] use a proper file monitor rather than this dir exists test + exists <- doesDirectoryExist srcdir + unless exists $ do + createDirectoryIfMissingVerbose verbosity False srcrootdir + unpackPackageTarball verbosity tarball srcrootdir + pkgid pkgTextOverride + moveTarballShippedDistDirectory verbosity distDirLayout + srcrootdir pkgid + buildPkg srcdir builddir + + +unpackPackageTarball :: Verbosity -> FilePath -> FilePath + -> PackageId -> Maybe CabalFileText + -> IO () +unpackPackageTarball verbosity tarball parentdir pkgid pkgTextOverride = + --TODO: [nice to have] switch to tar package and catch tar exceptions + annotateFailure UnpackFailed $ do + + -- Unpack the tarball + -- + info verbosity $ "Extracting " ++ tarball ++ " to " ++ parentdir ++ "..." + Tar.extractTarGzFile parentdir pkgsubdir tarball + + -- Sanity check + -- + exists <- doesFileExist cabalFile + when (not exists) $ + die $ "Package .cabal file not found in the tarball: " ++ cabalFile + + -- Overwrite the .cabal with the one from the index, when appropriate + -- + case pkgTextOverride of + Nothing -> return () + Just pkgtxt -> do + info verbosity $ "Updating " ++ display pkgname <.> "cabal" + ++ " with the latest revision from the index." + writeFileAtomic cabalFile pkgtxt + + where + cabalFile = parentdir pkgsubdir + display pkgname <.> "cabal" + pkgsubdir = display pkgid + pkgname = packageName pkgid + + +-- | This is a bit of a hacky workaround. A number of packages ship +-- pre-processed .hs files in a dist directory inside the tarball. We don't +-- use the standard 'dist' location so unless we move this dist dir to the +-- right place then we'll miss the shipped pre-procssed files. This hacky +-- approach to shipped pre-procssed files ought to be replaced by a proper +-- system, though we'll still need to keep this hack for older packages. +-- +moveTarballShippedDistDirectory :: Verbosity -> DistDirLayout + -> FilePath -> PackageId -> IO () +moveTarballShippedDistDirectory verbosity DistDirLayout{distBuildDirectory} + parentdir pkgid = do + distDirExists <- doesDirectoryExist tarballDistDir + when distDirExists $ do + debug verbosity $ "Moving '" ++ tarballDistDir ++ "' to '" + ++ targetDistDir ++ "'" + --TODO: [nice to have] or perhaps better to copy, and use a file monitor + renameDirectory tarballDistDir targetDistDir + where + tarballDistDir = parentdir display pkgid "dist" + targetDistDir = distBuildDirectory pkgid + + +buildAndInstallUnpackedPackage :: Verbosity + -> DistDirLayout + -> BuildTimeSettings -> Lock -> Lock + -> ElaboratedSharedConfig + -> ElaboratedReadyPackage + -> FilePath -> FilePath + -> IO BuildResult +buildAndInstallUnpackedPackage verbosity + DistDirLayout{distTempDirectory} + BuildTimeSettings { + buildSettingNumJobs, + buildSettingLogFile + } + installLock cacheLock + pkgshared@ElaboratedSharedConfig { + pkgConfigPlatform = platform, + pkgConfigCompiler = compiler, + pkgConfigCompilerProgs = progdb + } + rpkg@(ReadyPackage pkg _deps) + srcdir builddir = do + + createDirectoryIfMissingVerbose verbosity False builddir + initLogFile + + --TODO: [code cleanup] deal consistently with talking to older Setup.hs versions, much like + -- we do for ghc, with a proper options type and rendering step + -- which will also let us call directly into the lib, rather than always + -- going via the lib's command line interface, which would also allow + -- passing data like installed packages, compiler, and program db for a + -- quicker configure. + + --TODO: [required feature] docs and tests + --TODO: [required feature] sudo re-exec + + -- Configure phase + when isParallelBuild $ + notice verbosity $ "Configuring " ++ display pkgid ++ "..." + annotateFailure ConfigureFailed $ + setup configureCommand configureFlags + + -- Build phase + when isParallelBuild $ + notice verbosity $ "Building " ++ display pkgid ++ "..." + annotateFailure BuildFailed $ + setup buildCommand buildFlags + + -- Install phase + mipkg <- + criticalSection installLock $ + annotateFailure InstallFailed $ do + --TODO: [research required] do we need the installLock for copying? can we not do that in + -- parallel? Isn't it just registering that we have to lock for? + + --TODO: [required eventually] need to lock installing this ipkig so other processes don't + -- stomp on our files, since we don't have ABI compat, not safe to replace + + -- TODO: [required eventually] note that for nix-style installations it is not necessary to do + -- the 'withWin32SelfUpgrade' dance, but it would be necessary for a + -- shared bin dir. + + -- Actual installation + setup Cabal.copyCommand copyFlags + + LBS.writeFile + (InstallDirs.prefix (pkgInstallDirs pkg) "cabal-hash.txt") $ + (renderPackageHashInputs (packageHashInputs pkgshared pkg)) + + -- here's where we could keep track of the installed files ourselves if + -- we wanted by calling copy to an image dir and then we would make a + -- manifest and move it to its final location + + --TODO: [nice to have] we should actually have it make an image in store/incomming and + -- then when it's done, move it to its final location, to reduce problems + -- with installs failing half-way. Could also register and then move. + + -- For libraries, grab the package configuration file + -- and register it ourselves + if pkgRequiresRegistration pkg + then do + ipkg <- generateInstalledPackageInfo + -- We register ourselves rather than via Setup.hs. We need to + -- grab and modify the InstalledPackageInfo. We decide what + -- the installed package id is, not the build system. + let ipkg' = ipkg { Installed.installedUnitId = ipkgid } + Cabal.registerPackage verbosity compiler progdb + True -- multi-instance, nix style + (pkgRegisterPackageDBStack pkg) ipkg' + return (Just ipkg') + else return Nothing + + --TODO: [required feature] docs and test phases + let docsResult = DocsNotTried + testsResult = TestsNotTried + + return (BuildSuccess mipkg (BuildOk docsResult testsResult)) + + where + pkgid = packageId rpkg + ipkgid = installedPackageId rpkg + + isParallelBuild = buildSettingNumJobs >= 2 + + configureCommand = Cabal.configureCommand defaultProgramConfiguration + configureFlags v = flip filterConfigureFlags v $ + setupHsConfigureFlags rpkg pkgshared + verbosity builddir + + buildCommand = Cabal.buildCommand defaultProgramConfiguration + buildFlags _ = setupHsBuildFlags pkg pkgshared verbosity builddir + + generateInstalledPackageInfo :: IO InstalledPackageInfo + generateInstalledPackageInfo = + withTempInstalledPackageInfoFile + verbosity distTempDirectory $ \pkgConfFile -> do + -- make absolute since setup changes dir + pkgConfFile' <- canonicalizePath pkgConfFile + let registerFlags _ = setupHsRegisterFlags + pkg pkgshared + verbosity builddir + pkgConfFile' + setup Cabal.registerCommand registerFlags + + copyFlags _ = setupHsCopyFlags pkg pkgshared verbosity builddir + + scriptOptions = setupHsScriptOptions rpkg pkgshared srcdir builddir + isParallelBuild cacheLock + + setup :: CommandUI flags -> (Version -> flags) -> IO () + setup cmd flags = + withLogging $ \mLogFileHandle -> + setupWrapper + verbosity + scriptOptions { useLoggingHandle = mLogFileHandle } + (Just (pkgDescription pkg)) + cmd flags [] + + mlogFile = + case buildSettingLogFile of + Nothing -> Nothing + Just mkLogFile -> Just (mkLogFile compiler platform pkgid ipkgid) + + initLogFile = + case mlogFile of + Nothing -> return () + Just logFile -> do + createDirectoryIfMissing True (takeDirectory logFile) + exists <- doesFileExist logFile + when exists $ removeFile logFile + + withLogging action = + case mlogFile of + Nothing -> action Nothing + Just logFile -> withFile logFile AppendMode (action . Just) + + +buildInplaceUnpackedPackage :: Verbosity + -> DistDirLayout + -> BuildTimeSettings -> Lock + -> ElaboratedSharedConfig + -> ElaboratedReadyPackage + -> BuildStatusRebuild + -> FilePath -> FilePath + -> IO BuildResult +buildInplaceUnpackedPackage verbosity + distDirLayout@DistDirLayout { + distTempDirectory, + distPackageCacheDirectory + } + BuildTimeSettings{buildSettingNumJobs} + cacheLock + pkgshared@ElaboratedSharedConfig { + pkgConfigCompiler = compiler, + pkgConfigCompilerProgs = progdb + } + rpkg@(ReadyPackage pkg _deps) + buildStatus + srcdir builddir = do + + --TODO: [code cleanup] there is duplication between the distdirlayout and the builddir here + -- builddir is not enough, we also need the per-package cachedir + createDirectoryIfMissingVerbose verbosity False builddir + createDirectoryIfMissingVerbose verbosity False (distPackageCacheDirectory pkgid) + createPackageDBIfMissing verbosity compiler progdb (pkgBuildPackageDBStack pkg) + + -- Configure phase + -- + whenReConfigure $ do + annotateFailure ConfigureFailed $ + setup configureCommand configureFlags [] + invalidatePackageRegFileMonitor packageFileMonitor + updatePackageConfigFileMonitor packageFileMonitor srcdir pkg + + -- Build phase + -- + let docsResult = DocsNotTried + testsResult = TestsNotTried + + buildSuccess :: BuildSuccess + buildSuccess = BuildOk docsResult testsResult + + whenRebuild $ do + timestamp <- beginUpdateFileMonitor + annotateFailure BuildFailed $ + setup buildCommand buildFlags buildArgs + + --TODO: [required eventually] this doesn't track file + --non-existence, so we could fail to rebuild if someone + --adds a new file which changes behavior. + allSrcFiles <- allPackageSourceFiles verbosity srcdir + + updatePackageBuildFileMonitor packageFileMonitor srcdir timestamp + pkg buildStatus + allSrcFiles buildSuccess + + mipkg <- whenReRegister $ annotateFailure InstallFailed $ do + -- Register locally + mipkg <- if pkgRequiresRegistration pkg + then do + ipkg <- generateInstalledPackageInfo + -- We register ourselves rather than via Setup.hs. We need to + -- grab and modify the InstalledPackageInfo. We decide what + -- the installed package id is, not the build system. + let ipkg' = ipkg { Installed.installedUnitId = ipkgid } + Cabal.registerPackage verbosity compiler progdb False + (pkgRegisterPackageDBStack pkg) + ipkg' + return (Just ipkg') + + else return Nothing + + updatePackageRegFileMonitor packageFileMonitor srcdir mipkg + return mipkg + + -- Repl phase + -- + whenRepl $ + annotateFailure BuildFailed $ + setup replCommand replFlags replArgs + + -- Haddock phase + whenHaddock $ + annotateFailure BuildFailed $ + setup haddockCommand haddockFlags [] + + return (BuildSuccess mipkg buildSuccess) + + where + pkgid = packageId rpkg + ipkgid = installedPackageId rpkg + + isParallelBuild = buildSettingNumJobs >= 2 + + packageFileMonitor = newPackageFileMonitor distDirLayout pkgid + + whenReConfigure action = case buildStatus of + BuildStatusConfigure _ -> action + _ -> return () + + whenRebuild action + | null (pkgBuildTargets pkg) = return () + | otherwise = action + + whenRepl action + | isNothing (pkgReplTarget pkg) = return () + | otherwise = action + + whenHaddock action + | pkgBuildHaddocks pkg = action + | otherwise = return () + + whenReRegister action = case buildStatus of + BuildStatusConfigure _ -> action + BuildStatusBuild Nothing _ -> action + BuildStatusBuild (Just mipkg) _ -> return mipkg + + configureCommand = Cabal.configureCommand defaultProgramConfiguration + configureFlags v = flip filterConfigureFlags v $ + setupHsConfigureFlags rpkg pkgshared + verbosity builddir + + buildCommand = Cabal.buildCommand defaultProgramConfiguration + buildFlags _ = setupHsBuildFlags pkg pkgshared + verbosity builddir + buildArgs = setupHsBuildArgs pkg + + replCommand = Cabal.replCommand defaultProgramConfiguration + replFlags _ = setupHsReplFlags pkg pkgshared + verbosity builddir + replArgs = setupHsReplArgs pkg + + haddockCommand = Cabal.haddockCommand + haddockFlags _ = setupHsHaddockFlags pkg pkgshared + verbosity builddir + + scriptOptions = setupHsScriptOptions rpkg pkgshared + srcdir builddir + isParallelBuild cacheLock + + setup :: CommandUI flags -> (Version -> flags) -> [String] -> IO () + setup cmd flags args = + setupWrapper verbosity + scriptOptions + (Just (pkgDescription pkg)) + cmd flags args + + generateInstalledPackageInfo :: IO InstalledPackageInfo + generateInstalledPackageInfo = + withTempInstalledPackageInfoFile + verbosity distTempDirectory $ \pkgConfFile -> do + -- make absolute since setup changes dir + pkgConfFile' <- canonicalizePath pkgConfFile + let registerFlags _ = setupHsRegisterFlags + pkg pkgshared + verbosity builddir + pkgConfFile' + setup Cabal.registerCommand registerFlags [] + + +-- helper +annotateFailure :: (String -> BuildFailure) -> IO a -> IO a +annotateFailure annotate action = + action `catches` + [ Handler $ \ioe -> handler (ioe :: IOException) + , Handler $ \exit -> handler (exit :: ExitCode) + ] + where + handler :: Exception e => e -> IO a + handler = throwIO . annotate +#if MIN_VERSION_base(4,8,0) + . displayException +#else + . show +#endif + + +withTempInstalledPackageInfoFile :: Verbosity -> FilePath + -> (FilePath -> IO ()) + -> IO InstalledPackageInfo +withTempInstalledPackageInfoFile verbosity tempdir action = + withTempFile tempdir "package-registration-" $ \pkgConfFile hnd -> do + hClose hnd + action pkgConfFile + + (warns, ipkg) <- withUTF8FileContents pkgConfFile $ \pkgConfStr -> + case Installed.parseInstalledPackageInfo pkgConfStr of + Installed.ParseFailed perror -> pkgConfParseFailed perror + Installed.ParseOk warns ipkg -> return (warns, ipkg) + + unless (null warns) $ + warn verbosity $ unlines (map (showPWarning pkgConfFile) warns) + + return ipkg + where + pkgConfParseFailed :: Installed.PError -> IO a + pkgConfParseFailed perror = + die $ "Couldn't parse the output of 'setup register --gen-pkg-config':" + ++ show perror + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/ProjectConfig/Legacy.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/ProjectConfig/Legacy.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/ProjectConfig/Legacy.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/ProjectConfig/Legacy.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,1259 @@ +{-# LANGUAGE CPP, RecordWildCards, NamedFieldPuns, DeriveGeneric #-} + +-- | Project configuration, implementation in terms of legacy types. +-- +module Distribution.Client.ProjectConfig.Legacy ( + + -- * Project config in terms of legacy types + LegacyProjectConfig, + parseLegacyProjectConfig, + showLegacyProjectConfig, + + -- * Conversion to and from legacy config types + commandLineFlagsToProjectConfig, + convertLegacyProjectConfig, + convertLegacyGlobalConfig, + convertToLegacyProjectConfig, + + -- * Internals, just for tests + parsePackageLocationTokenQ, + renderPackageLocationToken, + ) where + +import Distribution.Client.ProjectConfig.Types +import Distribution.Client.Types + ( RemoteRepo(..), emptyRemoteRepo ) +import Distribution.Client.Dependency.Types + ( ConstraintSource(..) ) +import Distribution.Client.Config + ( SavedConfig(..), remoteRepoFields ) + +import Distribution.Package +import Distribution.PackageDescription + ( SourceRepo(..), RepoKind(..) ) +import Distribution.PackageDescription.Parse + ( sourceRepoFieldDescrs ) +import Distribution.Simple.Compiler + ( OptimisationLevel(..), DebugInfoLevel(..) ) +import Distribution.Simple.Setup + ( Flag(Flag), toFlag, fromFlagOrDefault + , ConfigFlags(..), configureOptions + , HaddockFlags(..), haddockOptions, defaultHaddockFlags + , programConfigurationPaths', splitArgs + , AllowNewer(..) ) +import Distribution.Client.Setup + ( GlobalFlags(..), globalCommand + , ConfigExFlags(..), configureExOptions, defaultConfigExFlags + , InstallFlags(..), installOptions, defaultInstallFlags ) +import Distribution.Simple.Program + ( programName, knownPrograms ) +import Distribution.Simple.Program.Db + ( ProgramDb, defaultProgramDb ) +import Distribution.Client.Targets + ( dispFlagAssignment, parseFlagAssignment ) +import Distribution.Simple.Utils + ( lowercase ) +import Distribution.Utils.NubList + ( toNubList, fromNubList, overNubList ) +import Distribution.Simple.LocalBuildInfo + ( toPathTemplate, fromPathTemplate ) + +import Distribution.Text +import qualified Distribution.Compat.ReadP as Parse +import Distribution.Compat.ReadP + ( ReadP, (+++), (<++) ) +import qualified Text.Read as Read +import qualified Text.PrettyPrint as Disp +import Text.PrettyPrint + ( Doc, ($+$) ) +import qualified Distribution.ParseUtils as ParseUtils (field) +import Distribution.ParseUtils + ( ParseResult(..), PError(..), syntaxError, PWarning(..), warning + , simpleField, commaNewLineListField + , showToken ) +import Distribution.Client.ParseUtils +import Distribution.Simple.Command + ( CommandUI(commandOptions), ShowOrParseArgs(..) + , OptionField, option, reqArg' ) + +#if !MIN_VERSION_base(4,8,0) +import Control.Applicative +#endif +import Control.Monad +import qualified Data.Map as Map +import Data.Char (isSpace) +import Distribution.Compat.Semigroup +import GHC.Generics (Generic) + +------------------------------------------------------------------ +-- Representing the project config file in terms of legacy types +-- + +-- | We already have parsers\/pretty-printers for almost all the fields in the +-- project config file, but they're in terms of the types used for the command +-- line flags for Setup.hs or cabal commands. We don't want to redefine them +-- all, at least not yet so for the moment we use the parsers at the old types +-- and use conversion functions. +-- +-- Ultimately if\/when this project-based approach becomes the default then we +-- can redefine the parsers directly for the new types. +-- +data LegacyProjectConfig = LegacyProjectConfig { + legacyPackages :: [String], + legacyPackagesOptional :: [String], + legacyPackagesRepo :: [SourceRepo], + legacyPackagesNamed :: [Dependency], + + legacySharedConfig :: LegacySharedConfig, + legacyLocalConfig :: LegacyPackageConfig, + legacySpecificConfig :: MapMappend PackageName LegacyPackageConfig + } deriving Generic + +instance Monoid LegacyProjectConfig where + mempty = gmempty + mappend = (<>) + +instance Semigroup LegacyProjectConfig where + (<>) = gmappend + +data LegacyPackageConfig = LegacyPackageConfig { + legacyConfigureFlags :: ConfigFlags, + legacyInstallPkgFlags :: InstallFlags, + legacyHaddockFlags :: HaddockFlags + } deriving Generic + +instance Monoid LegacyPackageConfig where + mempty = gmempty + mappend = (<>) + +instance Semigroup LegacyPackageConfig where + (<>) = gmappend + +data LegacySharedConfig = LegacySharedConfig { + legacyGlobalFlags :: GlobalFlags, + legacyConfigureShFlags :: ConfigFlags, + legacyConfigureExFlags :: ConfigExFlags, + legacyInstallFlags :: InstallFlags + } deriving Generic + +instance Monoid LegacySharedConfig where + mempty = gmempty + mappend = (<>) + +instance Semigroup LegacySharedConfig where + (<>) = gmappend + + +------------------------------------------------------------------ +-- Converting from and to the legacy types +-- + +-- | Convert configuration from the @cabal configure@ or @cabal build@ command +-- line into a 'ProjectConfig' value that can combined with configuration from +-- other sources. +-- +-- At the moment this uses the legacy command line flag types. See +-- 'LegacyProjectConfig' for an explanation. +-- +commandLineFlagsToProjectConfig :: GlobalFlags + -> ConfigFlags -> ConfigExFlags + -> InstallFlags -> HaddockFlags + -> ProjectConfig +commandLineFlagsToProjectConfig globalFlags configFlags configExFlags + installFlags haddockFlags = + mempty { + projectConfigBuildOnly = convertLegacyBuildOnlyFlags + globalFlags configFlags + installFlags haddockFlags, + projectConfigShared = convertLegacyAllPackageFlags + globalFlags configFlags + configExFlags installFlags, + projectConfigLocalPackages = convertLegacyPerPackageFlags + configFlags installFlags haddockFlags + } + + +-- | Convert from the types currently used for the user-wide @~/.cabal/config@ +-- file into the 'ProjectConfig' type. +-- +-- Only a subset of the 'ProjectConfig' can be represented in the user-wide +-- config. In particular it does not include packages that are in the project, +-- and it also doesn't support package-specific configuration (only +-- configuration that applies to all packages). +-- +convertLegacyGlobalConfig :: SavedConfig -> ProjectConfig +convertLegacyGlobalConfig + SavedConfig { + savedGlobalFlags = globalFlags, + savedInstallFlags = installFlags, + savedConfigureFlags = configFlags, + savedConfigureExFlags = configExFlags, + savedUserInstallDirs = _, + savedGlobalInstallDirs = _, + savedUploadFlags = _, + savedReportFlags = _, + savedHaddockFlags = haddockFlags + } = + mempty { + projectConfigShared = configAllPackages, + projectConfigLocalPackages = configLocalPackages, + projectConfigBuildOnly = configBuildOnly + } + where + --TODO: [code cleanup] eliminate use of default*Flags here and specify the + -- defaults in the various resolve functions in terms of the new types. + configExFlags' = defaultConfigExFlags <> configExFlags + installFlags' = defaultInstallFlags <> installFlags + haddockFlags' = defaultHaddockFlags <> haddockFlags + + configLocalPackages = convertLegacyPerPackageFlags + configFlags installFlags' haddockFlags' + configAllPackages = convertLegacyAllPackageFlags + globalFlags configFlags + configExFlags' installFlags' + configBuildOnly = convertLegacyBuildOnlyFlags + globalFlags configFlags + installFlags' haddockFlags' + + +-- | Convert the project config from the legacy types to the 'ProjectConfig' +-- and associated types. See 'LegacyProjectConfig' for an explanation of the +-- approach. +-- +convertLegacyProjectConfig :: LegacyProjectConfig -> ProjectConfig +convertLegacyProjectConfig + LegacyProjectConfig { + legacyPackages, + legacyPackagesOptional, + legacyPackagesRepo, + legacyPackagesNamed, + legacySharedConfig = LegacySharedConfig globalFlags configShFlags + configExFlags installSharedFlags, + legacyLocalConfig = LegacyPackageConfig configFlags installPerPkgFlags + haddockFlags, + legacySpecificConfig + } = + + ProjectConfig { + projectPackages = legacyPackages, + projectPackagesOptional = legacyPackagesOptional, + projectPackagesRepo = legacyPackagesRepo, + projectPackagesNamed = legacyPackagesNamed, + + projectConfigBuildOnly = configBuildOnly, + projectConfigShared = configAllPackages, + projectConfigLocalPackages = configLocalPackages, + projectConfigSpecificPackage = fmap perPackage legacySpecificConfig + } + where + configLocalPackages = convertLegacyPerPackageFlags + configFlags installPerPkgFlags haddockFlags + configAllPackages = convertLegacyAllPackageFlags + globalFlags (configFlags <> configShFlags) + configExFlags installSharedFlags + configBuildOnly = convertLegacyBuildOnlyFlags + globalFlags configShFlags + installSharedFlags haddockFlags + + perPackage (LegacyPackageConfig perPkgConfigFlags perPkgInstallFlags + perPkgHaddockFlags) = + convertLegacyPerPackageFlags + perPkgConfigFlags perPkgInstallFlags perPkgHaddockFlags + + +-- | Helper used by other conversion functions that returns the +-- 'ProjectConfigShared' subset of the 'ProjectConfig'. +-- +convertLegacyAllPackageFlags :: GlobalFlags -> ConfigFlags + -> ConfigExFlags -> InstallFlags + -> ProjectConfigShared +convertLegacyAllPackageFlags globalFlags configFlags + configExFlags installFlags = + ProjectConfigShared{..} + where + GlobalFlags { + globalConfigFile = _, -- TODO: [required feature] + globalSandboxConfigFile = _, -- ?? + globalRemoteRepos = projectConfigRemoteRepos, + globalLocalRepos = projectConfigLocalRepos + } = globalFlags + + ConfigFlags { + configHcFlavor = projectConfigHcFlavor, + configHcPath = projectConfigHcPath, + configHcPkg = projectConfigHcPkg, + --configInstallDirs = projectConfigInstallDirs, + --configUserInstall = projectConfigUserInstall, + --configPackageDBs = projectConfigPackageDBs, + configAllowNewer = projectConfigAllowNewer + } = configFlags + + ConfigExFlags { + configCabalVersion = projectConfigCabalVersion, + configExConstraints = projectConfigConstraints, + configPreferences = projectConfigPreferences, + configSolver = projectConfigSolver + } = configExFlags + + InstallFlags { + installHaddockIndex = projectConfigHaddockIndex, + --installReinstall = projectConfigReinstall, + --installAvoidReinstalls = projectConfigAvoidReinstalls, + --installOverrideReinstall = projectConfigOverrideReinstall, + installMaxBackjumps = projectConfigMaxBackjumps, + --installUpgradeDeps = projectConfigUpgradeDeps, + installReorderGoals = projectConfigReorderGoals, + --installIndependentGoals = projectConfigIndependentGoals, + --installShadowPkgs = projectConfigShadowPkgs, + installStrongFlags = projectConfigStrongFlags + } = installFlags + + + +-- | Helper used by other conversion functions that returns the +-- 'PackageConfig' subset of the 'ProjectConfig'. +-- +convertLegacyPerPackageFlags :: ConfigFlags -> InstallFlags -> HaddockFlags + -> PackageConfig +convertLegacyPerPackageFlags configFlags installFlags haddockFlags = + PackageConfig{..} + where + ConfigFlags { + configProgramPaths, + configProgramArgs, + configProgramPathExtra = packageConfigProgramPathExtra, + configVanillaLib = packageConfigVanillaLib, + configProfLib = packageConfigProfLib, + configSharedLib = packageConfigSharedLib, + configDynExe = packageConfigDynExe, + configProfExe = packageConfigProfExe, + configProf = packageConfigProf, + configProfDetail = packageConfigProfDetail, + configProfLibDetail = packageConfigProfLibDetail, + configConfigureArgs = packageConfigConfigureArgs, + configOptimization = packageConfigOptimization, + configProgPrefix = packageConfigProgPrefix, + configProgSuffix = packageConfigProgSuffix, + configGHCiLib = packageConfigGHCiLib, + configSplitObjs = packageConfigSplitObjs, + configStripExes = packageConfigStripExes, + configStripLibs = packageConfigStripLibs, + configExtraLibDirs = packageConfigExtraLibDirs, + configExtraFrameworkDirs = packageConfigExtraFrameworkDirs, + configExtraIncludeDirs = packageConfigExtraIncludeDirs, + configConfigurationsFlags = packageConfigFlagAssignment, + configTests = packageConfigTests, + configBenchmarks = packageConfigBenchmarks, + configCoverage = coverage, + configLibCoverage = libcoverage, --deprecated + configDebugInfo = packageConfigDebugInfo, + configRelocatable = packageConfigRelocatable + } = configFlags + packageConfigProgramPaths = MapLast (Map.fromList configProgramPaths) + packageConfigProgramArgs = MapMappend (Map.fromList configProgramArgs) + + packageConfigCoverage = coverage <> libcoverage + --TODO: defer this merging to the resolve phase + + InstallFlags { + installDocumentation = packageConfigDocumentation, + installRunTests = packageConfigRunTests + } = installFlags + + HaddockFlags { + haddockHoogle = packageConfigHaddockHoogle, + haddockHtml = packageConfigHaddockHtml, + haddockHtmlLocation = packageConfigHaddockHtmlLocation, + haddockExecutables = packageConfigHaddockExecutables, + haddockTestSuites = packageConfigHaddockTestSuites, + haddockBenchmarks = packageConfigHaddockBenchmarks, + haddockInternal = packageConfigHaddockInternal, + haddockCss = packageConfigHaddockCss, + haddockHscolour = packageConfigHaddockHscolour, + haddockHscolourCss = packageConfigHaddockHscolourCss, + haddockContents = packageConfigHaddockContents + } = haddockFlags + + + +-- | Helper used by other conversion functions that returns the +-- 'ProjectConfigBuildOnly' subset of the 'ProjectConfig'. +-- +convertLegacyBuildOnlyFlags :: GlobalFlags -> ConfigFlags + -> InstallFlags -> HaddockFlags + -> ProjectConfigBuildOnly +convertLegacyBuildOnlyFlags globalFlags configFlags + installFlags haddockFlags = + ProjectConfigBuildOnly{..} + where + GlobalFlags { + globalCacheDir = projectConfigCacheDir, + globalLogsDir = projectConfigLogsDir, + globalWorldFile = projectConfigWorldFile, + globalHttpTransport = projectConfigHttpTransport, + globalIgnoreExpiry = projectConfigIgnoreExpiry + } = globalFlags + + ConfigFlags { + configVerbosity = projectConfigVerbosity + } = configFlags + + InstallFlags { + installDryRun = projectConfigDryRun, + installOnly = _, + installOnlyDeps = projectConfigOnlyDeps, + installRootCmd = projectConfigRootCmd, + installSummaryFile = projectConfigSummaryFile, + installLogFile = projectConfigLogFile, + installBuildReports = projectConfigBuildReports, + installReportPlanningFailure = projectConfigReportPlanningFailure, + installSymlinkBinDir = projectConfigSymlinkBinDir, + installOneShot = projectConfigOneShot, + installNumJobs = projectConfigNumJobs, + installOfflineMode = projectConfigOfflineMode + } = installFlags + + HaddockFlags { + haddockKeepTempFiles = projectConfigKeepTempFiles --TODO: this ought to live elsewhere + } = haddockFlags + + +convertToLegacyProjectConfig :: ProjectConfig -> LegacyProjectConfig +convertToLegacyProjectConfig + projectConfig@ProjectConfig { + projectPackages, + projectPackagesOptional, + projectPackagesRepo, + projectPackagesNamed, + projectConfigLocalPackages, + projectConfigSpecificPackage + } = + LegacyProjectConfig { + legacyPackages = projectPackages, + legacyPackagesOptional = projectPackagesOptional, + legacyPackagesRepo = projectPackagesRepo, + legacyPackagesNamed = projectPackagesNamed, + legacySharedConfig = convertToLegacySharedConfig projectConfig, + legacyLocalConfig = convertToLegacyAllPackageConfig projectConfig + <> convertToLegacyPerPackageConfig + projectConfigLocalPackages, + legacySpecificConfig = fmap convertToLegacyPerPackageConfig + projectConfigSpecificPackage + } + +convertToLegacySharedConfig :: ProjectConfig -> LegacySharedConfig +convertToLegacySharedConfig + ProjectConfig { + projectConfigBuildOnly = ProjectConfigBuildOnly {..}, + projectConfigShared = ProjectConfigShared {..} + } = + + LegacySharedConfig { + legacyGlobalFlags = globalFlags, + legacyConfigureShFlags = configFlags, + legacyConfigureExFlags = configExFlags, + legacyInstallFlags = installFlags + } + where + globalFlags = GlobalFlags { + globalVersion = mempty, + globalNumericVersion = mempty, + globalConfigFile = mempty, + globalSandboxConfigFile = mempty, + globalConstraintsFile = mempty, + globalRemoteRepos = projectConfigRemoteRepos, + globalCacheDir = projectConfigCacheDir, + globalLocalRepos = projectConfigLocalRepos, + globalLogsDir = projectConfigLogsDir, + globalWorldFile = projectConfigWorldFile, + globalRequireSandbox = mempty, + globalIgnoreSandbox = mempty, + globalIgnoreExpiry = projectConfigIgnoreExpiry, + globalHttpTransport = projectConfigHttpTransport + } + + configFlags = mempty { + configVerbosity = projectConfigVerbosity, + configAllowNewer = projectConfigAllowNewer + } + + configExFlags = ConfigExFlags { + configCabalVersion = projectConfigCabalVersion, + configExConstraints = projectConfigConstraints, + configPreferences = projectConfigPreferences, + configSolver = projectConfigSolver + } + + installFlags = InstallFlags { + installDocumentation = mempty, + installHaddockIndex = projectConfigHaddockIndex, + installDryRun = projectConfigDryRun, + installReinstall = mempty, --projectConfigReinstall, + installAvoidReinstalls = mempty, --projectConfigAvoidReinstalls, + installOverrideReinstall = mempty, --projectConfigOverrideReinstall, + installMaxBackjumps = projectConfigMaxBackjumps, + installUpgradeDeps = mempty, --projectConfigUpgradeDeps, + installReorderGoals = projectConfigReorderGoals, + installIndependentGoals = mempty, --projectConfigIndependentGoals, + installShadowPkgs = mempty, --projectConfigShadowPkgs, + installStrongFlags = projectConfigStrongFlags, + installOnly = mempty, + installOnlyDeps = projectConfigOnlyDeps, + installRootCmd = projectConfigRootCmd, + installSummaryFile = projectConfigSummaryFile, + installLogFile = projectConfigLogFile, + installBuildReports = projectConfigBuildReports, + installReportPlanningFailure = projectConfigReportPlanningFailure, + installSymlinkBinDir = projectConfigSymlinkBinDir, + installOneShot = projectConfigOneShot, + installNumJobs = projectConfigNumJobs, + installRunTests = mempty, + installOfflineMode = projectConfigOfflineMode + } + + +convertToLegacyAllPackageConfig :: ProjectConfig -> LegacyPackageConfig +convertToLegacyAllPackageConfig + ProjectConfig { + projectConfigBuildOnly = ProjectConfigBuildOnly {..}, + projectConfigShared = ProjectConfigShared {..} + } = + + LegacyPackageConfig { + legacyConfigureFlags = configFlags, + legacyInstallPkgFlags= mempty, + legacyHaddockFlags = haddockFlags + } + where + configFlags = ConfigFlags { + configPrograms_ = mempty, + configProgramPaths = mempty, + configProgramArgs = mempty, + configProgramPathExtra = mempty, + configHcFlavor = projectConfigHcFlavor, + configHcPath = projectConfigHcPath, + configHcPkg = projectConfigHcPkg, + configVanillaLib = mempty, + configProfLib = mempty, + configSharedLib = mempty, + configDynExe = mempty, + configProfExe = mempty, + configProf = mempty, + configProfDetail = mempty, + configProfLibDetail = mempty, + configConfigureArgs = mempty, + configOptimization = mempty, + configProgPrefix = mempty, + configProgSuffix = mempty, + configInstallDirs = mempty, + configScratchDir = mempty, + configDistPref = mempty, + configVerbosity = mempty, + configUserInstall = mempty, --projectConfigUserInstall, + configPackageDBs = mempty, --projectConfigPackageDBs, + configGHCiLib = mempty, + configSplitObjs = mempty, + configStripExes = mempty, + configStripLibs = mempty, + configExtraLibDirs = mempty, + configExtraFrameworkDirs = mempty, + configConstraints = mempty, + configDependencies = mempty, + configExtraIncludeDirs = mempty, + configIPID = mempty, + configConfigurationsFlags = mempty, + configTests = mempty, + configCoverage = mempty, --TODO: don't merge + configLibCoverage = mempty, --TODO: don't merge + configExactConfiguration = mempty, + configBenchmarks = mempty, + configFlagError = mempty, --TODO: ??? + configRelocatable = mempty, + configDebugInfo = mempty, + configAllowNewer = mempty + } + + haddockFlags = mempty { + haddockKeepTempFiles = projectConfigKeepTempFiles + } + + +convertToLegacyPerPackageConfig :: PackageConfig -> LegacyPackageConfig +convertToLegacyPerPackageConfig PackageConfig {..} = + LegacyPackageConfig { + legacyConfigureFlags = configFlags, + legacyInstallPkgFlags = installFlags, + legacyHaddockFlags = haddockFlags + } + where + configFlags = ConfigFlags { + configPrograms_ = configPrograms_ mempty, + configProgramPaths = Map.toList (getMapLast packageConfigProgramPaths), + configProgramArgs = Map.toList (getMapMappend packageConfigProgramArgs), + configProgramPathExtra = packageConfigProgramPathExtra, + configHcFlavor = mempty, + configHcPath = mempty, + configHcPkg = mempty, + configVanillaLib = packageConfigVanillaLib, + configProfLib = packageConfigProfLib, + configSharedLib = packageConfigSharedLib, + configDynExe = packageConfigDynExe, + configProfExe = packageConfigProfExe, + configProf = packageConfigProf, + configProfDetail = packageConfigProfDetail, + configProfLibDetail = packageConfigProfLibDetail, + configConfigureArgs = packageConfigConfigureArgs, + configOptimization = packageConfigOptimization, + configProgPrefix = packageConfigProgPrefix, + configProgSuffix = packageConfigProgSuffix, + configInstallDirs = mempty, + configScratchDir = mempty, + configDistPref = mempty, + configVerbosity = mempty, + configUserInstall = mempty, + configPackageDBs = mempty, + configGHCiLib = packageConfigGHCiLib, + configSplitObjs = packageConfigSplitObjs, + configStripExes = packageConfigStripExes, + configStripLibs = packageConfigStripLibs, + configExtraLibDirs = packageConfigExtraLibDirs, + configExtraFrameworkDirs = packageConfigExtraFrameworkDirs, + configConstraints = mempty, + configDependencies = mempty, + configExtraIncludeDirs = packageConfigExtraIncludeDirs, + configIPID = mempty, + configConfigurationsFlags = packageConfigFlagAssignment, + configTests = packageConfigTests, + configCoverage = packageConfigCoverage, --TODO: don't merge + configLibCoverage = packageConfigCoverage, --TODO: don't merge + configExactConfiguration = mempty, + configBenchmarks = packageConfigBenchmarks, + configFlagError = mempty, --TODO: ??? + configRelocatable = packageConfigRelocatable, + configDebugInfo = packageConfigDebugInfo, + configAllowNewer = mempty + } + + installFlags = mempty { + installDocumentation = packageConfigDocumentation, + installRunTests = packageConfigRunTests + } + + haddockFlags = HaddockFlags { + haddockProgramPaths = mempty, + haddockProgramArgs = mempty, + haddockHoogle = packageConfigHaddockHoogle, + haddockHtml = packageConfigHaddockHtml, + haddockHtmlLocation = packageConfigHaddockHtmlLocation, + haddockForHackage = mempty, --TODO: added recently + haddockExecutables = packageConfigHaddockExecutables, + haddockTestSuites = packageConfigHaddockTestSuites, + haddockBenchmarks = packageConfigHaddockBenchmarks, + haddockInternal = packageConfigHaddockInternal, + haddockCss = packageConfigHaddockCss, + haddockHscolour = packageConfigHaddockHscolour, + haddockHscolourCss = packageConfigHaddockHscolourCss, + haddockContents = packageConfigHaddockContents, + haddockDistPref = mempty, + haddockKeepTempFiles = mempty, + haddockVerbosity = mempty + } + + +------------------------------------------------ +-- Parsing and showing the project config file +-- + +parseLegacyProjectConfig :: String -> ParseResult LegacyProjectConfig +parseLegacyProjectConfig = + parseConfig legacyProjectConfigFieldDescrs + legacyPackageConfigSectionDescrs + mempty + +showLegacyProjectConfig :: LegacyProjectConfig -> String +showLegacyProjectConfig config = + Disp.render $ + showConfig legacyProjectConfigFieldDescrs + legacyPackageConfigSectionDescrs + config + $+$ + Disp.text "" + + +legacyProjectConfigFieldDescrs :: [FieldDescr LegacyProjectConfig] +legacyProjectConfigFieldDescrs = + + [ newLineListField "packages" + (Disp.text . renderPackageLocationToken) parsePackageLocationTokenQ + legacyPackages + (\v flags -> flags { legacyPackages = v }) + , newLineListField "optional-packages" + (Disp.text . renderPackageLocationToken) parsePackageLocationTokenQ + legacyPackagesOptional + (\v flags -> flags { legacyPackagesOptional = v }) + , commaNewLineListField "extra-packages" + disp parse + legacyPackagesNamed + (\v flags -> flags { legacyPackagesNamed = v }) + ] + + ++ map (liftField + legacySharedConfig + (\flags conf -> conf { legacySharedConfig = flags })) + legacySharedConfigFieldDescrs + + ++ map (liftField + legacyLocalConfig + (\flags conf -> conf { legacyLocalConfig = flags })) + legacyPackageConfigFieldDescrs + +-- | This is a bit tricky since it has to cover globs which have embedded @,@ +-- chars. But we don't just want to parse strictly as a glob since we want to +-- allow http urls which don't parse as globs, and possibly some +-- system-dependent file paths. So we parse fairly liberally as a token, but +-- we allow @,@ inside matched @{}@ braces. +-- +parsePackageLocationTokenQ :: ReadP r String +parsePackageLocationTokenQ = parseHaskellString + Parse.<++ parsePackageLocationToken + where + parsePackageLocationToken :: ReadP r String + parsePackageLocationToken = fmap fst (Parse.gather outerTerm) + where + outerTerm = alternateEither1 outerToken (braces innerTerm) + innerTerm = alternateEither innerToken (braces innerTerm) + outerToken = Parse.munch1 outerChar >> return () + innerToken = Parse.munch1 innerChar >> return () + outerChar c = not (isSpace c || c == '{' || c == '}' || c == ',') + innerChar c = not (isSpace c || c == '{' || c == '}') + braces = Parse.between (Parse.char '{') (Parse.char '}') + + alternateEither, alternateEither1, + alternatePQs, alternate1PQs, alternateQsP, alternate1QsP + :: ReadP r () -> ReadP r () -> ReadP r () + + alternateEither1 p q = alternate1PQs p q +++ alternate1QsP q p + alternateEither p q = alternateEither1 p q +++ return () + alternate1PQs p q = p >> alternateQsP q p + alternatePQs p q = alternate1PQs p q +++ return () + alternate1QsP q p = Parse.many1 q >> alternatePQs p q + alternateQsP q p = alternate1QsP q p +++ return () + +renderPackageLocationToken :: String -> String +renderPackageLocationToken s | needsQuoting = show s + | otherwise = s + where + needsQuoting = not (ok 0 s) + || s == "." -- . on its own on a line has special meaning + || take 2 s == "--" -- on its own line is comment syntax + --TODO: [code cleanup] these "." and "--" escaping issues + -- ought to be dealt with systematically in ParseUtils. + ok :: Int -> String -> Bool + ok n [] = n == 0 + ok _ ('"':_) = False + ok n ('{':cs) = ok (n+1) cs + ok n ('}':cs) = ok (n-1) cs + ok n (',':cs) = (n > 0) && ok n cs + ok _ (c:_) + | isSpace c = False + ok n (_ :cs) = ok n cs + + +legacySharedConfigFieldDescrs :: [FieldDescr LegacySharedConfig] +legacySharedConfigFieldDescrs = + + ( liftFields + legacyGlobalFlags + (\flags conf -> conf { legacyGlobalFlags = flags }) + . addFields + [ newLineListField "local-repo" + showTokenQ parseTokenQ + (fromNubList . globalLocalRepos) + (\v conf -> conf { globalLocalRepos = toNubList v }) + ] + . filterFields + [ "remote-repo-cache" + , "logs-dir", "world-file", "ignore-expiry", "http-transport" + ] + . commandOptionsToFields + ) (commandOptions (globalCommand []) ParseArgs) + ++ + ( liftFields + legacyConfigureShFlags + (\flags conf -> conf { legacyConfigureShFlags = flags }) + . addFields + [ simpleField "allow-newer" + (maybe mempty dispAllowNewer) (fmap Just parseAllowNewer) + configAllowNewer (\v conf -> conf { configAllowNewer = v }) + ] + . filterFields ["verbose"] + . commandOptionsToFields + ) (configureOptions ParseArgs) + ++ + ( liftFields + legacyConfigureExFlags + (\flags conf -> conf { legacyConfigureExFlags = flags }) + . addFields + [ commaNewLineListField "constraints" + (disp . fst) (fmap (\constraint -> (constraint, constraintSrc)) parse) + configExConstraints (\v conf -> conf { configExConstraints = v }) + + , commaNewLineListField "preferences" + disp parse + configPreferences (\v conf -> conf { configPreferences = v }) + ] + . filterFields + [ "cabal-lib-version", "solver" + -- not "constraint" or "preference", we use our own plural ones above + ] + . commandOptionsToFields + ) (configureExOptions ParseArgs constraintSrc) + ++ + ( liftFields + legacyInstallFlags + (\flags conf -> conf { legacyInstallFlags = flags }) + . addFields + [ newLineListField "build-summary" + (showTokenQ . fromPathTemplate) (fmap toPathTemplate parseTokenQ) + (fromNubList . installSummaryFile) + (\v conf -> conf { installSummaryFile = toNubList v }) + ] + . filterFields + [ "doc-index-file" + , "root-cmd", "symlink-bindir" + , "build-log" + , "remote-build-reporting", "report-planning-failure" + , "one-shot", "jobs", "offline" + -- solver flags: + , "max-backjumps", "reorder-goals", "strong-flags" + ] + . commandOptionsToFields + ) (installOptions ParseArgs) + where + constraintSrc = ConstraintSourceProjectConfig "TODO" + +parseAllowNewer :: ReadP r AllowNewer +parseAllowNewer = + ((const AllowNewerNone <$> (Parse.string "none" +++ Parse.string "None")) + +++ (const AllowNewerAll <$> (Parse.string "all" +++ Parse.string "All"))) + <++ ( AllowNewerSome <$> parseOptCommaList parse) + +dispAllowNewer :: AllowNewer -> Doc +dispAllowNewer AllowNewerNone = Disp.text "None" +dispAllowNewer (AllowNewerSome pkgs) = Disp.fsep . Disp.punctuate Disp.comma + . map disp $ pkgs +dispAllowNewer AllowNewerAll = Disp.text "All" + + +legacyPackageConfigFieldDescrs :: [FieldDescr LegacyPackageConfig] +legacyPackageConfigFieldDescrs = + ( liftFields + legacyConfigureFlags + (\flags conf -> conf { legacyConfigureFlags = flags }) + . addFields + [ newLineListField "extra-include-dirs" + showTokenQ parseTokenQ + configExtraIncludeDirs + (\v conf -> conf { configExtraIncludeDirs = v }) + , newLineListField "extra-lib-dirs" + showTokenQ parseTokenQ + configExtraLibDirs + (\v conf -> conf { configExtraLibDirs = v }) + , newLineListField "extra-framework-dirs" + showTokenQ parseTokenQ + configExtraFrameworkDirs + (\v conf -> conf { configExtraFrameworkDirs = v }) + , newLineListField "extra-prog-path" + showTokenQ parseTokenQ + (fromNubList . configProgramPathExtra) + (\v conf -> conf { configProgramPathExtra = toNubList v }) + , newLineListField "configure-options" + showTokenQ parseTokenQ + configConfigureArgs + (\v conf -> conf { configConfigureArgs = v }) + , simpleField "flags" + dispFlagAssignment parseFlagAssignment + configConfigurationsFlags + (\v conf -> conf { configConfigurationsFlags = v }) + ] + . filterFields + [ "compiler", "with-compiler", "with-hc-pkg" + , "program-prefix", "program-suffix" + , "library-vanilla", "library-profiling" + , "shared", "executable-dynamic" + , "profiling", "executable-profiling" + , "profiling-detail", "library-profiling-detail" + , "optimization", "debug-info", "library-for-ghci", "split-objs" + , "executable-stripping", "library-stripping" + , "tests", "benchmarks" + , "coverage", "library-coverage" + , "relocatable" + -- not "extra-include-dirs", "extra-lib-dirs", "extra-framework-dirs" + -- or "extra-prog-path". We use corrected ones above that parse + -- as list fields. + ] + . commandOptionsToFields + ) (configureOptions ParseArgs) + ++ + liftFields + legacyConfigureFlags + (\flags conf -> conf { legacyConfigureFlags = flags }) + [ overrideFieldCompiler + , overrideFieldOptimization + , overrideFieldDebugInfo + ] + ++ + ( liftFields + legacyInstallPkgFlags + (\flags conf -> conf { legacyInstallPkgFlags = flags }) + . filterFields + [ "documentation", "run-tests" + ] + . commandOptionsToFields + ) (installOptions ParseArgs) + ++ + ( liftFields + legacyHaddockFlags + (\flags conf -> conf { legacyHaddockFlags = flags }) + . mapFieldNames + ("haddock-"++) + . filterFields + [ "hoogle", "html", "html-location" + , "executables", "tests", "benchmarks", "all", "internal", "css" + , "hyperlink-source", "hscolour-css" + , "contents-location", "keep-temp-files" + ] + . commandOptionsToFields + ) (haddockOptions ParseArgs) + + where + overrideFieldCompiler = + simpleField "compiler" + (fromFlagOrDefault Disp.empty . fmap disp) + (Parse.option mempty (fmap toFlag parse)) + configHcFlavor (\v flags -> flags { configHcFlavor = v }) + + + -- TODO: [code cleanup] The following is a hack. The "optimization" and + -- "debug-info" fields are OptArg, and viewAsFieldDescr fails on that. + -- Instead of a hand-written parser and printer, we should handle this case + -- properly in the library. + + overrideFieldOptimization = + liftField configOptimization + (\v flags -> flags { configOptimization = v }) $ + let name = "optimization" in + FieldDescr name + (\f -> case f of + Flag NoOptimisation -> Disp.text "False" + Flag NormalOptimisation -> Disp.text "True" + Flag MaximumOptimisation -> Disp.text "2" + _ -> Disp.empty) + (\line str _ -> case () of + _ | str == "False" -> ParseOk [] (Flag NoOptimisation) + | str == "True" -> ParseOk [] (Flag NormalOptimisation) + | str == "0" -> ParseOk [] (Flag NoOptimisation) + | str == "1" -> ParseOk [] (Flag NormalOptimisation) + | str == "2" -> ParseOk [] (Flag MaximumOptimisation) + | lstr == "false" -> ParseOk [caseWarning] (Flag NoOptimisation) + | lstr == "true" -> ParseOk [caseWarning] (Flag NormalOptimisation) + | otherwise -> ParseFailed (NoParse name line) + where + lstr = lowercase str + caseWarning = PWarning $ + "The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'.") + + overrideFieldDebugInfo = + liftField configDebugInfo (\v flags -> flags { configDebugInfo = v }) $ + let name = "debug-info" in + FieldDescr name + (\f -> case f of + Flag NoDebugInfo -> Disp.text "False" + Flag MinimalDebugInfo -> Disp.text "1" + Flag NormalDebugInfo -> Disp.text "True" + Flag MaximalDebugInfo -> Disp.text "3" + _ -> Disp.empty) + (\line str _ -> case () of + _ | str == "False" -> ParseOk [] (Flag NoDebugInfo) + | str == "True" -> ParseOk [] (Flag NormalDebugInfo) + | str == "0" -> ParseOk [] (Flag NoDebugInfo) + | str == "1" -> ParseOk [] (Flag MinimalDebugInfo) + | str == "2" -> ParseOk [] (Flag NormalDebugInfo) + | str == "3" -> ParseOk [] (Flag MaximalDebugInfo) + | lstr == "false" -> ParseOk [caseWarning] (Flag NoDebugInfo) + | lstr == "true" -> ParseOk [caseWarning] (Flag NormalDebugInfo) + | otherwise -> ParseFailed (NoParse name line) + where + lstr = lowercase str + caseWarning = PWarning $ + "The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'.") + + +legacyPackageConfigSectionDescrs :: [SectionDescr LegacyProjectConfig] +legacyPackageConfigSectionDescrs = + [ packageRepoSectionDescr + , packageSpecificOptionsSectionDescr + , liftSection + legacyLocalConfig + (\flags conf -> conf { legacyLocalConfig = flags }) + programOptionsSectionDescr + , liftSection + legacyLocalConfig + (\flags conf -> conf { legacyLocalConfig = flags }) + programLocationsSectionDescr + , liftSection + legacySharedConfig + (\flags conf -> conf { legacySharedConfig = flags }) $ + liftSection + legacyGlobalFlags + (\flags conf -> conf { legacyGlobalFlags = flags }) + remoteRepoSectionDescr + ] + +packageRepoSectionDescr :: SectionDescr LegacyProjectConfig +packageRepoSectionDescr = + SectionDescr { + sectionName = "source-repository-package", + sectionFields = sourceRepoFieldDescrs, + sectionSubsections = [], + sectionGet = map (\x->("", x)) + . legacyPackagesRepo, + sectionSet = + \lineno unused pkgrepo projconf -> do + unless (null unused) $ + syntaxError lineno "the section 'source-repository-package' takes no arguments" + return projconf { + legacyPackagesRepo = legacyPackagesRepo projconf ++ [pkgrepo] + }, + sectionEmpty = SourceRepo { + repoKind = RepoThis, -- hopefully unused + repoType = Nothing, + repoLocation = Nothing, + repoModule = Nothing, + repoBranch = Nothing, + repoTag = Nothing, + repoSubdir = Nothing + } + } + +packageSpecificOptionsSectionDescr :: SectionDescr LegacyProjectConfig +packageSpecificOptionsSectionDescr = + SectionDescr { + sectionName = "package", + sectionFields = legacyPackageConfigFieldDescrs + ++ programOptionsFieldDescrs + (configProgramArgs . legacyConfigureFlags) + (\args pkgconf -> pkgconf { + legacyConfigureFlags = (legacyConfigureFlags pkgconf) { + configProgramArgs = args + } + } + ) + ++ liftFields + legacyConfigureFlags + (\flags pkgconf -> pkgconf { + legacyConfigureFlags = flags + } + ) + programLocationsFieldDescrs, + sectionSubsections = [], + sectionGet = \projconf -> + [ (display pkgname, pkgconf) + | (pkgname, pkgconf) <- + Map.toList . getMapMappend + . legacySpecificConfig $ projconf ], + sectionSet = + \lineno pkgnamestr pkgconf projconf -> do + pkgname <- case simpleParse pkgnamestr of + Just pkgname -> return pkgname + Nothing -> syntaxError lineno $ + "a 'package' section requires a package name " + ++ "as an argument" + return projconf { + legacySpecificConfig = + MapMappend $ + Map.insertWith mappend pkgname pkgconf + (getMapMappend $ legacySpecificConfig projconf) + }, + sectionEmpty = mempty + } + +programOptionsFieldDescrs :: (a -> [(String, [String])]) + -> ([(String, [String])] -> a -> a) + -> [FieldDescr a] +programOptionsFieldDescrs get set = + commandOptionsToFields + $ programConfigurationOptions + defaultProgramDb + ParseArgs get set + +programOptionsSectionDescr :: SectionDescr LegacyPackageConfig +programOptionsSectionDescr = + SectionDescr { + sectionName = "program-options", + sectionFields = programOptionsFieldDescrs + configProgramArgs + (\args conf -> conf { configProgramArgs = args }), + sectionSubsections = [], + sectionGet = (\x->[("", x)]) + . legacyConfigureFlags, + sectionSet = + \lineno unused confflags pkgconf -> do + unless (null unused) $ + syntaxError lineno "the section 'program-options' takes no arguments" + return pkgconf { + legacyConfigureFlags = legacyConfigureFlags pkgconf <> confflags + }, + sectionEmpty = mempty + } + +programLocationsFieldDescrs :: [FieldDescr ConfigFlags] +programLocationsFieldDescrs = + commandOptionsToFields + $ programConfigurationPaths' + (++ "-location") + defaultProgramDb + ParseArgs + configProgramPaths + (\paths conf -> conf { configProgramPaths = paths }) + +programLocationsSectionDescr :: SectionDescr LegacyPackageConfig +programLocationsSectionDescr = + SectionDescr { + sectionName = "program-locations", + sectionFields = programLocationsFieldDescrs, + sectionSubsections = [], + sectionGet = (\x->[("", x)]) + . legacyConfigureFlags, + sectionSet = + \lineno unused confflags pkgconf -> do + unless (null unused) $ + syntaxError lineno "the section 'program-locations' takes no arguments" + return pkgconf { + legacyConfigureFlags = legacyConfigureFlags pkgconf <> confflags + }, + sectionEmpty = mempty + } + + +-- | For each known program @PROG@ in 'progConf', produce a @PROG-options@ +-- 'OptionField'. +programConfigurationOptions + :: ProgramDb + -> ShowOrParseArgs + -> (flags -> [(String, [String])]) + -> ([(String, [String])] -> (flags -> flags)) + -> [OptionField flags] +programConfigurationOptions progConf showOrParseArgs get set = + case showOrParseArgs of + -- we don't want a verbose help text list so we just show a generic one: + ShowArgs -> [programOptions "PROG"] + ParseArgs -> map (programOptions . programName . fst) + (knownPrograms progConf) + where + programOptions prog = + option "" [prog ++ "-options"] + ("give extra options to " ++ prog) + get set + (reqArg' "OPTS" (\args -> [(prog, splitArgs args)]) + (\progArgs -> [ joinsArgs args + | (prog', args) <- progArgs, prog==prog' ])) + + + joinsArgs = unwords . map escape + escape arg | any isSpace arg = "\"" ++ arg ++ "\"" + | otherwise = arg + + +remoteRepoSectionDescr :: SectionDescr GlobalFlags +remoteRepoSectionDescr = + SectionDescr { + sectionName = "repository", + sectionFields = remoteRepoFields, + sectionSubsections = [], + sectionGet = map (\x->(remoteRepoName x, x)) . fromNubList + . globalRemoteRepos, + sectionSet = + \lineno reponame repo0 conf -> do + when (null reponame) $ + syntaxError lineno $ "a 'repository' section requires the " + ++ "repository name as an argument" + let repo = repo0 { remoteRepoName = reponame } + when (remoteRepoKeyThreshold repo + > length (remoteRepoRootKeys repo)) $ + warning $ "'key-threshold' for repository " + ++ show (remoteRepoName repo) + ++ " higher than number of keys" + when (not (null (remoteRepoRootKeys repo)) + && remoteRepoSecure repo /= Just True) $ + warning $ "'root-keys' for repository " + ++ show (remoteRepoName repo) + ++ " non-empty, but 'secure' not set to True." + return conf { + globalRemoteRepos = overNubList (++[repo]) (globalRemoteRepos conf) + }, + sectionEmpty = emptyRemoteRepo "" + } + + +------------------------------- +-- Local field utils +-- + +--TODO: [code cleanup] all these utils should move to Distribution.ParseUtils +-- either augmenting or replacing the ones there + +--TODO: [code cleanup] this is a different definition from listField, like +-- commaNewLineListField it pretty prints on multiple lines +newLineListField :: String -> (a -> Doc) -> ReadP [a] a + -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b +newLineListField = listFieldWithSep Disp.sep + +--TODO: [code cleanup] local copy purely so we can use the fixed version +-- of parseOptCommaList below +listFieldWithSep :: ([Doc] -> Doc) -> String -> (a -> Doc) -> ReadP [a] a + -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b +listFieldWithSep separator name showF readF get set = + liftField get set' $ + ParseUtils.field name showF' (parseOptCommaList readF) + where + set' xs b = set (get b ++ xs) b + showF' = separator . map showF + +--TODO: [code cleanup] local redefinition that should replace the version in +-- D.ParseUtils. This version avoid parse ambiguity for list element parsers +-- that have multiple valid parses of prefixes. +parseOptCommaList :: ReadP r a -> ReadP r [a] +parseOptCommaList p = Parse.sepBy p sep + where + -- The separator must not be empty or it introduces ambiguity + sep = (Parse.skipSpaces >> Parse.char ',' >> Parse.skipSpaces) + +++ (Parse.satisfy isSpace >> Parse.skipSpaces) + +--TODO: [code cleanup] local redefinition that should replace the version in +-- D.ParseUtils called showFilePath. This version escapes "." and "--" which +-- otherwise are special syntax. +showTokenQ :: String -> Doc +showTokenQ "" = Disp.empty +showTokenQ x@('-':'-':_) = Disp.text (show x) +showTokenQ x@('.':[]) = Disp.text (show x) +showTokenQ x = showToken x + +-- This is just a copy of parseTokenQ, using the fixed parseHaskellString +parseTokenQ :: ReadP r String +parseTokenQ = parseHaskellString + <++ Parse.munch1 (\x -> not (isSpace x) && x /= ',') + +--TODO: [code cleanup] use this to replace the parseHaskellString in +-- Distribution.ParseUtils. It turns out Read instance for String accepts +-- the ['a', 'b'] syntax, which we do not want. In particular it messes +-- up any token starting with []. +parseHaskellString :: ReadP r String +parseHaskellString = + Parse.readS_to_P $ + Read.readPrec_to_S (do Read.String s <- Read.lexP; return s) 0 + +-- Handy util +addFields :: [FieldDescr a] + -> ([FieldDescr a] -> [FieldDescr a]) +addFields = (++) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/ProjectConfig/Types.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/ProjectConfig/Types.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/ProjectConfig/Types.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/ProjectConfig/Types.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,366 @@ +{-# LANGUAGE DeriveGeneric, DeriveDataTypeable, GeneralizedNewtypeDeriving #-} + +-- | Handling project configuration, types. +-- +module Distribution.Client.ProjectConfig.Types ( + + -- * Types for project config + ProjectConfig(..), + ProjectConfigBuildOnly(..), + ProjectConfigShared(..), + PackageConfig(..), + + -- * Resolving configuration + SolverSettings(..), + BuildTimeSettings(..), + + -- * Extra useful Monoids + MapLast(..), + MapMappend(..), + ) where + +import Distribution.Client.Types + ( RemoteRepo ) +import Distribution.Client.Dependency.Types + ( PreSolver, ConstraintSource ) +import Distribution.Client.Targets + ( UserConstraint ) +import Distribution.Client.BuildReports.Types + ( ReportLevel(..) ) + +import Distribution.Package + ( PackageName, PackageId, UnitId, Dependency ) +import Distribution.Version + ( Version ) +import Distribution.System + ( Platform ) +import Distribution.PackageDescription + ( FlagAssignment, SourceRepo(..) ) +import Distribution.Simple.Compiler + ( Compiler, CompilerFlavor + , OptimisationLevel(..), ProfDetailLevel, DebugInfoLevel(..) ) +import Distribution.Simple.Setup + ( Flag, AllowNewer(..) ) +import Distribution.Simple.InstallDirs + ( PathTemplate ) +import Distribution.Utils.NubList + ( NubList ) +import Distribution.Verbosity + ( Verbosity ) + +import Data.Map (Map) +import qualified Data.Map as Map +import Distribution.Compat.Binary (Binary) +import Distribution.Compat.Semigroup +import GHC.Generics (Generic) + + +------------------------------- +-- Project config types +-- + +-- | This type corresponds directly to what can be written in the +-- @cabal.project@ file. Other sources of configuration can also be injected +-- into this type, such as the user-wide @~/.cabal/config@ file and the +-- command line of @cabal configure@ or @cabal build@. +-- +-- Since it corresponds to the external project file it is an instance of +-- 'Monoid' and all the fields can be empty. This also means there has to +-- be a step where we resolve configuration. At a minimum resolving means +-- applying defaults but it can also mean merging information from multiple +-- sources. For example for package-specific configuration the project file +-- can specify configuration that applies to all local packages, and then +-- additional configuration for a specific package. +-- +-- Future directions: multiple profiles, conditionals. If we add these +-- features then the gap between configuration as written in the config file +-- and resolved settings we actually use will become even bigger. +-- +data ProjectConfig + = ProjectConfig { + + -- | Packages in this project, including local dirs, local .cabal files + -- local and remote tarballs. Where these are file globs, they must + -- match something. + projectPackages :: [String], + + -- | Like 'projectConfigPackageGlobs' but /optional/ in the sense that + -- file globs are allowed to match nothing. The primary use case for + -- this is to be able to say @optional-packages: */@ to automagically + -- pick up deps that we unpack locally. + projectPackagesOptional :: [String], + + -- | Packages in this project from remote source repositories. + projectPackagesRepo :: [SourceRepo], + + -- | Packages in this project from hackage repositories. + projectPackagesNamed :: [Dependency], + + projectConfigBuildOnly :: ProjectConfigBuildOnly, + projectConfigShared :: ProjectConfigShared, + projectConfigLocalPackages :: PackageConfig, + projectConfigSpecificPackage :: MapMappend PackageName PackageConfig + } + deriving (Eq, Show, Generic) + +-- | That part of the project configuration that only affects /how/ we build +-- and not the /value/ of the things we build. This means this information +-- does not need to be tracked for changes since it does not affect the +-- outcome. +-- +data ProjectConfigBuildOnly + = ProjectConfigBuildOnly { + projectConfigVerbosity :: Flag Verbosity, + projectConfigDryRun :: Flag Bool, + projectConfigOnlyDeps :: Flag Bool, + projectConfigSummaryFile :: NubList PathTemplate, + projectConfigLogFile :: Flag PathTemplate, + projectConfigBuildReports :: Flag ReportLevel, + projectConfigReportPlanningFailure :: Flag Bool, + projectConfigSymlinkBinDir :: Flag FilePath, + projectConfigOneShot :: Flag Bool, + projectConfigNumJobs :: Flag (Maybe Int), + projectConfigOfflineMode :: Flag Bool, + projectConfigKeepTempFiles :: Flag Bool, + projectConfigHttpTransport :: Flag String, + projectConfigIgnoreExpiry :: Flag Bool, + projectConfigCacheDir :: Flag FilePath, + projectConfigLogsDir :: Flag FilePath, + projectConfigWorldFile :: Flag FilePath, + projectConfigRootCmd :: Flag String + } + deriving (Eq, Show, Generic) + + +-- | Project configuration that is shared between all packages in the project. +-- In particular this includes configuration that affects the solver. +-- +data ProjectConfigShared + = ProjectConfigShared { + projectConfigHcFlavor :: Flag CompilerFlavor, + projectConfigHcPath :: Flag FilePath, + projectConfigHcPkg :: Flag FilePath, + projectConfigHaddockIndex :: Flag PathTemplate, + + -- Things that only make sense for manual mode, not --local mode + -- too much control! + --projectConfigUserInstall :: Flag Bool, + --projectConfigInstallDirs :: InstallDirs (Flag PathTemplate), + --TODO: [required eventually] decide what to do with InstallDirs + -- currently we don't allow it to be specified in the config file + --projectConfigPackageDBs :: [Maybe PackageDB], + + -- configuration used both by the solver and other phases + projectConfigRemoteRepos :: NubList RemoteRepo, -- ^ Available Hackage servers. + projectConfigLocalRepos :: NubList FilePath, + + -- solver configuration + projectConfigConstraints :: [(UserConstraint, ConstraintSource)], + projectConfigPreferences :: [Dependency], + projectConfigCabalVersion :: Flag Version, --TODO: [required eventually] unused + projectConfigSolver :: Flag PreSolver, + projectConfigAllowNewer :: Maybe AllowNewer, + projectConfigMaxBackjumps :: Flag Int, + projectConfigReorderGoals :: Flag Bool, + projectConfigStrongFlags :: Flag Bool + + -- More things that only make sense for manual mode, not --local mode + -- too much control! + --projectConfigIndependentGoals :: Flag Bool, + --projectConfigShadowPkgs :: Flag Bool, + --projectConfigReinstall :: Flag Bool, + --projectConfigAvoidReinstalls :: Flag Bool, + --projectConfigOverrideReinstall :: Flag Bool, + --projectConfigUpgradeDeps :: Flag Bool + } + deriving (Eq, Show, Generic) + + +-- | Project configuration that is specific to each package, that is where we +-- can in principle have different values for different packages in the same +-- project. +-- +data PackageConfig + = PackageConfig { + packageConfigProgramPaths :: MapLast String FilePath, + packageConfigProgramArgs :: MapMappend String [String], + packageConfigProgramPathExtra :: NubList FilePath, + packageConfigFlagAssignment :: FlagAssignment, + packageConfigVanillaLib :: Flag Bool, + packageConfigSharedLib :: Flag Bool, + packageConfigDynExe :: Flag Bool, + packageConfigProf :: Flag Bool, --TODO: [code cleanup] sort out + packageConfigProfLib :: Flag Bool, -- this duplication + packageConfigProfExe :: Flag Bool, -- and consistency + packageConfigProfDetail :: Flag ProfDetailLevel, + packageConfigProfLibDetail :: Flag ProfDetailLevel, + packageConfigConfigureArgs :: [String], + packageConfigOptimization :: Flag OptimisationLevel, + packageConfigProgPrefix :: Flag PathTemplate, + packageConfigProgSuffix :: Flag PathTemplate, + packageConfigExtraLibDirs :: [FilePath], + packageConfigExtraFrameworkDirs :: [FilePath], + packageConfigExtraIncludeDirs :: [FilePath], + packageConfigGHCiLib :: Flag Bool, + packageConfigSplitObjs :: Flag Bool, + packageConfigStripExes :: Flag Bool, + packageConfigStripLibs :: Flag Bool, + packageConfigTests :: Flag Bool, + packageConfigBenchmarks :: Flag Bool, + packageConfigCoverage :: Flag Bool, + packageConfigRelocatable :: Flag Bool, + packageConfigDebugInfo :: Flag DebugInfoLevel, + packageConfigRunTests :: Flag Bool, --TODO: [required eventually] use this + packageConfigDocumentation :: Flag Bool, --TODO: [required eventually] use this + packageConfigHaddockHoogle :: Flag Bool, --TODO: [required eventually] use this + packageConfigHaddockHtml :: Flag Bool, --TODO: [required eventually] use this + packageConfigHaddockHtmlLocation :: Flag String, --TODO: [required eventually] use this + packageConfigHaddockExecutables :: Flag Bool, --TODO: [required eventually] use this + packageConfigHaddockTestSuites :: Flag Bool, --TODO: [required eventually] use this + packageConfigHaddockBenchmarks :: Flag Bool, --TODO: [required eventually] use this + packageConfigHaddockInternal :: Flag Bool, --TODO: [required eventually] use this + packageConfigHaddockCss :: Flag FilePath, --TODO: [required eventually] use this + packageConfigHaddockHscolour :: Flag Bool, --TODO: [required eventually] use this + packageConfigHaddockHscolourCss :: Flag FilePath, --TODO: [required eventually] use this + packageConfigHaddockContents :: Flag PathTemplate --TODO: [required eventually] use this + } + deriving (Eq, Show, Generic) + +instance Binary ProjectConfig +instance Binary ProjectConfigBuildOnly +instance Binary ProjectConfigShared +instance Binary PackageConfig + + +-- | Newtype wrapper for 'Map' that provides a 'Monoid' instance that takes +-- the last value rather than the first value for overlapping keys. +newtype MapLast k v = MapLast { getMapLast :: Map k v } + deriving (Eq, Show, Functor, Generic, Binary) + +instance Ord k => Monoid (MapLast k v) where + mempty = MapLast Map.empty + mappend = (<>) + +instance Ord k => Semigroup (MapLast k v) where + MapLast a <> MapLast b = MapLast (flip Map.union a b) + -- rather than Map.union which is the normal Map monoid instance + + +-- | Newtype wrapper for 'Map' that provides a 'Monoid' instance that +-- 'mappend's values of overlapping keys rather than taking the first. +newtype MapMappend k v = MapMappend { getMapMappend :: Map k v } + deriving (Eq, Show, Functor, Generic, Binary) + +instance (Semigroup v, Ord k) => Monoid (MapMappend k v) where + mempty = MapMappend Map.empty + mappend = (<>) + +instance (Semigroup v, Ord k) => Semigroup (MapMappend k v) where + MapMappend a <> MapMappend b = MapMappend (Map.unionWith (<>) a b) + -- rather than Map.union which is the normal Map monoid instance + + +instance Monoid ProjectConfig where + mempty = gmempty + mappend = (<>) + +instance Semigroup ProjectConfig where + (<>) = gmappend + + +instance Monoid ProjectConfigBuildOnly where + mempty = gmempty + mappend = (<>) + +instance Semigroup ProjectConfigBuildOnly where + (<>) = gmappend + + +instance Monoid ProjectConfigShared where + mempty = gmempty + mappend = (<>) + +instance Semigroup ProjectConfigShared where + (<>) = gmappend + + +instance Monoid PackageConfig where + mempty = gmempty + mappend = (<>) + +instance Semigroup PackageConfig where + (<>) = gmappend + +---------------------------------------- +-- Resolving configuration to settings +-- + +-- | Resolved configuration for the solver. The idea is that this is easier to +-- use than the raw configuration because in the raw configuration everything +-- is optional (monoidial). In the 'BuildTimeSettings' every field is filled +-- in, if only with the defaults. +-- +-- Use 'resolveSolverSettings' to make one from the project config (by +-- applying defaults etc). +-- +data SolverSettings + = SolverSettings { + solverSettingRemoteRepos :: [RemoteRepo], -- ^ Available Hackage servers. + solverSettingLocalRepos :: [FilePath], + solverSettingConstraints :: [(UserConstraint, ConstraintSource)], + solverSettingPreferences :: [Dependency], + solverSettingFlagAssignment :: FlagAssignment, -- ^ For all local packages + solverSettingFlagAssignments :: Map PackageName FlagAssignment, + solverSettingCabalVersion :: Maybe Version, --TODO: [required eventually] unused + solverSettingSolver :: PreSolver, + solverSettingAllowNewer :: AllowNewer, + solverSettingMaxBackjumps :: Maybe Int, + solverSettingReorderGoals :: Bool, + solverSettingStrongFlags :: Bool + -- Things that only make sense for manual mode, not --local mode + -- too much control! + --solverSettingIndependentGoals :: Bool, + --solverSettingShadowPkgs :: Bool, + --solverSettingReinstall :: Bool, + --solverSettingAvoidReinstalls :: Bool, + --solverSettingOverrideReinstall :: Bool, + --solverSettingUpgradeDeps :: Bool + } + deriving (Eq, Show, Generic) + +instance Binary SolverSettings + + +-- | Resolved configuration for things that affect how we build and not the +-- value of the things we build. The idea is that this is easier to use than +-- the raw configuration because in the raw configuration everything is +-- optional (monoidial). In the 'BuildTimeSettings' every field is filled in, +-- if only with the defaults. +-- +-- Use 'resolveBuildTimeSettings' to make one from the project config (by +-- applying defaults etc). +-- +data BuildTimeSettings + = BuildTimeSettings { + buildSettingDryRun :: Bool, + buildSettingOnlyDeps :: Bool, + buildSettingSummaryFile :: [PathTemplate], + buildSettingLogFile :: Maybe (Compiler -> Platform + -> PackageId -> UnitId + -> FilePath), + buildSettingLogVerbosity :: Verbosity, + buildSettingBuildReports :: ReportLevel, + buildSettingReportPlanningFailure :: Bool, + buildSettingSymlinkBinDir :: [FilePath], + buildSettingOneShot :: Bool, + buildSettingNumJobs :: Int, + buildSettingOfflineMode :: Bool, + buildSettingKeepTempFiles :: Bool, + buildSettingRemoteRepos :: [RemoteRepo], + buildSettingLocalRepos :: [FilePath], + buildSettingCacheDir :: FilePath, + buildSettingHttpTransport :: Maybe String, + buildSettingIgnoreExpiry :: Bool, + buildSettingRootCmd :: Maybe String + } + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/ProjectConfig.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/ProjectConfig.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/ProjectConfig.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/ProjectConfig.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,747 @@ +{-# LANGUAGE CPP, RecordWildCards, NamedFieldPuns, DeriveDataTypeable #-} + +-- | Handling project configuration. +-- +module Distribution.Client.ProjectConfig ( + + -- * Types for project config + ProjectConfig(..), + ProjectConfigBuildOnly(..), + ProjectConfigShared(..), + PackageConfig(..), + MapLast(..), + MapMappend(..), + + -- * Project config files + findProjectRoot, + readProjectConfig, + writeProjectLocalExtraConfig, + writeProjectConfigFile, + commandLineFlagsToProjectConfig, + + -- * Packages within projects + ProjectPackageLocation(..), + BadPackageLocations(..), + BadPackageLocation(..), + BadPackageLocationMatch(..), + findProjectPackages, + readSourcePackage, + + -- * Resolving configuration + lookupLocalPackageConfig, + projectConfigWithBuilderRepoContext, + projectConfigWithSolverRepoContext, + SolverSettings(..), + resolveSolverSettings, + BuildTimeSettings(..), + resolveBuildTimeSettings, + + -- * Checking configuration + checkBadPerPackageCompilerPaths, + BadPerPackageCompilerPaths(..) + ) where + +import Distribution.Client.ProjectConfig.Types +import Distribution.Client.ProjectConfig.Legacy +import Distribution.Client.RebuildMonad +import Distribution.Client.Glob + ( isTrivialFilePathGlob ) + +import Distribution.Client.Types +import Distribution.Client.DistDirLayout + ( CabalDirLayout(..) ) +import Distribution.Client.GlobalFlags + ( RepoContext(..), withRepoContext' ) +import Distribution.Client.BuildReports.Types + ( ReportLevel(..) ) +import Distribution.Client.Config + ( loadConfig, defaultConfigFile ) + +import Distribution.Package + ( PackageName, PackageId, packageId, UnitId, Dependency ) +import Distribution.System + ( Platform ) +import Distribution.PackageDescription + ( SourceRepo(..) ) +import Distribution.PackageDescription.Parse + ( readPackageDescription ) +import Distribution.Simple.Compiler + ( Compiler, compilerInfo ) +import Distribution.Simple.Program + ( ConfiguredProgram(..) ) +import Distribution.Simple.Setup + ( Flag(Flag), toFlag, flagToMaybe, flagToList + , fromFlag, AllowNewer(..) ) +import Distribution.Client.Setup + ( defaultSolver, defaultMaxBackjumps, ) +import Distribution.Simple.InstallDirs + ( PathTemplate, fromPathTemplate + , toPathTemplate, substPathTemplate, initialPathTemplateEnv ) +import Distribution.Simple.Utils + ( die, warn ) +import Distribution.Client.Utils + ( determineNumJobs ) +import Distribution.Utils.NubList + ( fromNubList ) +import Distribution.Verbosity + ( Verbosity, verbose ) +import Distribution.Text +import Distribution.ParseUtils + ( ParseResult(..), locatedErrorMsg, showPWarning ) + +#if !MIN_VERSION_base(4,8,0) +import Control.Applicative +#endif +import Control.Monad +import Control.Monad.Trans (liftIO) +import Control.Exception +import Data.Typeable +import Data.Maybe +import Data.Either +import qualified Data.Map as Map +import Data.Map (Map) +import qualified Data.Set as Set +import Distribution.Compat.Semigroup +import System.FilePath hiding (combine) +import System.Directory +import Network.URI (URI(..), URIAuth(..), parseAbsoluteURI) + + +---------------------------------------- +-- Resolving configuration to settings +-- + +-- | Look up a 'PackageConfig' field in the 'ProjectConfig' for a specific +-- 'PackageName'. This returns the configuration that applies to all local +-- packages plus any package-specific configuration for this package. +-- +lookupLocalPackageConfig :: (Semigroup a, Monoid a) + => (PackageConfig -> a) + -> ProjectConfig + -> PackageName -> a +lookupLocalPackageConfig field ProjectConfig { + projectConfigLocalPackages, + projectConfigSpecificPackage + } pkgname = + field projectConfigLocalPackages + <> maybe mempty field + (Map.lookup pkgname (getMapMappend projectConfigSpecificPackage)) + + +-- | Use a 'RepoContext' based on the 'BuildTimeSettings'. +-- +projectConfigWithBuilderRepoContext :: Verbosity + -> BuildTimeSettings + -> (RepoContext -> IO a) -> IO a +projectConfigWithBuilderRepoContext verbosity BuildTimeSettings{..} = + withRepoContext' + verbosity + buildSettingRemoteRepos + buildSettingLocalRepos + buildSettingCacheDir + buildSettingHttpTransport + (Just buildSettingIgnoreExpiry) + + +-- | Use a 'RepoContext', but only for the solver. The solver does not use the +-- full facilities of the 'RepoContext' so we can get away with making one +-- that doesn't have an http transport. And that avoids having to have access +-- to the 'BuildTimeSettings' +-- +projectConfigWithSolverRepoContext :: Verbosity + -> FilePath + -> ProjectConfigShared + -> ProjectConfigBuildOnly + -> (RepoContext -> IO a) -> IO a +projectConfigWithSolverRepoContext verbosity downloadCacheRootDir + ProjectConfigShared{..} + ProjectConfigBuildOnly{..} = + withRepoContext' + verbosity + (fromNubList projectConfigRemoteRepos) + (fromNubList projectConfigLocalRepos) + downloadCacheRootDir + (flagToMaybe projectConfigHttpTransport) + (flagToMaybe projectConfigIgnoreExpiry) + + +-- | Resolve the project configuration, with all its optional fields, into +-- 'SolverSettings' with no optional fields (by applying defaults). +-- +resolveSolverSettings :: ProjectConfig -> SolverSettings +resolveSolverSettings ProjectConfig{ + projectConfigShared, + projectConfigLocalPackages, + projectConfigSpecificPackage + } = + SolverSettings {..} + where + --TODO: [required eventually] some of these settings need validation, e.g. + -- the flag assignments need checking. + solverSettingRemoteRepos = fromNubList projectConfigRemoteRepos + solverSettingLocalRepos = fromNubList projectConfigLocalRepos + solverSettingConstraints = projectConfigConstraints + solverSettingPreferences = projectConfigPreferences + solverSettingFlagAssignment = packageConfigFlagAssignment projectConfigLocalPackages + solverSettingFlagAssignments = fmap packageConfigFlagAssignment + (getMapMappend projectConfigSpecificPackage) + solverSettingCabalVersion = flagToMaybe projectConfigCabalVersion + solverSettingSolver = fromFlag projectConfigSolver + solverSettingAllowNewer = fromJust projectConfigAllowNewer + solverSettingMaxBackjumps = case fromFlag projectConfigMaxBackjumps of + n | n < 0 -> Nothing + | otherwise -> Just n + solverSettingReorderGoals = fromFlag projectConfigReorderGoals + solverSettingStrongFlags = fromFlag projectConfigStrongFlags + --solverSettingIndependentGoals = fromFlag projectConfigIndependentGoals + --solverSettingShadowPkgs = fromFlag projectConfigShadowPkgs + --solverSettingReinstall = fromFlag projectConfigReinstall + --solverSettingAvoidReinstalls = fromFlag projectConfigAvoidReinstalls + --solverSettingOverrideReinstall = fromFlag projectConfigOverrideReinstall + --solverSettingUpgradeDeps = fromFlag projectConfigUpgradeDeps + + ProjectConfigShared {..} = defaults <> projectConfigShared + + defaults = mempty { + projectConfigSolver = Flag defaultSolver, + projectConfigAllowNewer = Just AllowNewerNone, + projectConfigMaxBackjumps = Flag defaultMaxBackjumps, + projectConfigReorderGoals = Flag False, + projectConfigStrongFlags = Flag False + --projectConfigIndependentGoals = Flag False, + --projectConfigShadowPkgs = Flag False, + --projectConfigReinstall = Flag False, + --projectConfigAvoidReinstalls = Flag False, + --projectConfigOverrideReinstall = Flag False, + --projectConfigUpgradeDeps = Flag False + } + + +-- | Resolve the project configuration, with all its optional fields, into +-- 'BuildTimeSettings' with no optional fields (by applying defaults). +-- +resolveBuildTimeSettings :: Verbosity + -> CabalDirLayout + -> ProjectConfigShared + -> ProjectConfigBuildOnly + -> ProjectConfigBuildOnly + -> BuildTimeSettings +resolveBuildTimeSettings verbosity + CabalDirLayout { + cabalLogsDirectory, + cabalPackageCacheDirectory + } + ProjectConfigShared { + projectConfigRemoteRepos, + projectConfigLocalRepos + } + fromProjectFile + fromCommandLine = + BuildTimeSettings {..} + where + buildSettingDryRun = fromFlag projectConfigDryRun + buildSettingOnlyDeps = fromFlag projectConfigOnlyDeps + buildSettingSummaryFile = fromNubList projectConfigSummaryFile + --buildSettingLogFile -- defined below, more complicated + --buildSettingLogVerbosity -- defined below, more complicated + buildSettingBuildReports = fromFlag projectConfigBuildReports + buildSettingSymlinkBinDir = flagToList projectConfigSymlinkBinDir + buildSettingOneShot = fromFlag projectConfigOneShot + buildSettingNumJobs = determineNumJobs projectConfigNumJobs + buildSettingOfflineMode = fromFlag projectConfigOfflineMode + buildSettingKeepTempFiles = fromFlag projectConfigKeepTempFiles + buildSettingRemoteRepos = fromNubList projectConfigRemoteRepos + buildSettingLocalRepos = fromNubList projectConfigLocalRepos + buildSettingCacheDir = cabalPackageCacheDirectory + buildSettingHttpTransport = flagToMaybe projectConfigHttpTransport + buildSettingIgnoreExpiry = fromFlag projectConfigIgnoreExpiry + buildSettingReportPlanningFailure + = fromFlag projectConfigReportPlanningFailure + buildSettingRootCmd = flagToMaybe projectConfigRootCmd + + ProjectConfigBuildOnly{..} = defaults + <> fromProjectFile + <> fromCommandLine + + defaults = mempty { + projectConfigDryRun = toFlag False, + projectConfigOnlyDeps = toFlag False, + projectConfigBuildReports = toFlag NoReports, + projectConfigReportPlanningFailure = toFlag False, + projectConfigOneShot = toFlag False, + projectConfigOfflineMode = toFlag False, + projectConfigKeepTempFiles = toFlag False, + projectConfigIgnoreExpiry = toFlag False + } + + -- The logging logic: what log file to use and what verbosity. + -- + -- If the user has specified --remote-build-reporting=detailed, use the + -- default log file location. If the --build-log option is set, use the + -- provided location. Otherwise don't use logging, unless building in + -- parallel (in which case the default location is used). + -- + buildSettingLogFile :: Maybe (Compiler -> Platform + -> PackageId -> UnitId -> FilePath) + buildSettingLogFile + | useDefaultTemplate = Just (substLogFileName defaultTemplate) + | otherwise = fmap substLogFileName givenTemplate + + defaultTemplate = toPathTemplate $ + cabalLogsDirectory "$pkgid" <.> "log" + givenTemplate = flagToMaybe projectConfigLogFile + + useDefaultTemplate + | buildSettingBuildReports == DetailedReports = True + | isJust givenTemplate = False + | isParallelBuild = True + | otherwise = False + + isParallelBuild = buildSettingNumJobs >= 2 + + substLogFileName :: PathTemplate + -> Compiler -> Platform + -> PackageId -> UnitId -> FilePath + substLogFileName template compiler platform pkgid uid = + fromPathTemplate (substPathTemplate env template) + where + env = initialPathTemplateEnv + pkgid uid (compilerInfo compiler) platform + + -- If the user has specified --remote-build-reporting=detailed or + -- --build-log, use more verbose logging. + -- + buildSettingLogVerbosity + | overrideVerbosity = max verbose verbosity + | otherwise = verbosity + + overrideVerbosity + | buildSettingBuildReports == DetailedReports = True + | isJust givenTemplate = True + | isParallelBuild = False + | otherwise = False + + +--------------------------------------------- +-- Reading and writing project config files +-- + +-- | Find the root of this project. +-- +-- Searches for an explicit @cabal.project@ file, in the current directory or +-- parent directories. If no project file is found then the current dir is the +-- project root (and the project will use an implicit config). +-- +findProjectRoot :: IO FilePath +findProjectRoot = do + + curdir <- getCurrentDirectory + homedir <- getHomeDirectory + + -- Search upwards. If we get to the users home dir or the filesystem root, + -- then use the current dir + let probe dir | isDrive dir || dir == homedir + = return curdir -- implicit project root + probe dir = do + exists <- doesFileExist (dir "cabal.project") + if exists + then return dir -- explicit project root + else probe (takeDirectory dir) + + probe curdir + --TODO: [nice to have] add compat support for old style sandboxes + + +-- | Read all the config relevant for a project. This includes the project +-- file if any, plus other global config. +-- +readProjectConfig :: Verbosity -> FilePath -> Rebuild ProjectConfig +readProjectConfig verbosity projectRootDir = do + global <- readGlobalConfig verbosity + local <- readProjectLocalConfig verbosity projectRootDir + extra <- readProjectLocalExtraConfig verbosity projectRootDir + return (global <> local <> extra) + + +-- | Reads an explicit @cabal.project@ file in the given project root dir, +-- or returns the default project config for an implicitly defined project. +-- +readProjectLocalConfig :: Verbosity -> FilePath -> Rebuild ProjectConfig +readProjectLocalConfig verbosity projectRootDir = do + usesExplicitProjectRoot <- liftIO $ doesFileExist projectFile + if usesExplicitProjectRoot + then do + monitorFiles [monitorFileHashed projectFile] + liftIO readProjectFile + else do + monitorFiles [monitorNonExistentFile projectFile] + return defaultImplicitProjectConfig + + where + projectFile = projectRootDir "cabal.project" + readProjectFile = + reportParseResult verbosity "project file" projectFile + . parseProjectConfig + =<< readFile projectFile + + defaultImplicitProjectConfig :: ProjectConfig + defaultImplicitProjectConfig = + mempty { + -- We expect a package in the current directory. + projectPackages = [ "./*.cabal" ], + + -- This is to automatically pick up deps that we unpack locally. + projectPackagesOptional = [ "./*/*.cabal" ] + } + + +-- | Reads a @cabal.project.extra@ file in the given project root dir, +-- or returns empty. This file gets written by @cabal configure@, or in +-- principle can be edited manually or by other tools. +-- +readProjectLocalExtraConfig :: Verbosity -> FilePath -> Rebuild ProjectConfig +readProjectLocalExtraConfig verbosity projectRootDir = do + hasExtraConfig <- liftIO $ doesFileExist projectExtraConfigFile + if hasExtraConfig + then do monitorFiles [monitorFileHashed projectExtraConfigFile] + liftIO readProjectExtraConfigFile + else do monitorFiles [monitorNonExistentFile projectExtraConfigFile] + return mempty + where + projectExtraConfigFile = projectRootDir "cabal.project.local" + + readProjectExtraConfigFile = + reportParseResult verbosity "project local configuration file" + projectExtraConfigFile + . parseProjectConfig + =<< readFile projectExtraConfigFile + + +-- | Parse the 'ProjectConfig' format. +-- +-- For the moment this is implemented in terms of parsers for legacy +-- configuration types, plus a conversion. +-- +parseProjectConfig :: String -> ParseResult ProjectConfig +parseProjectConfig content = + convertLegacyProjectConfig <$> + parseLegacyProjectConfig content + + +-- | Render the 'ProjectConfig' format. +-- +-- For the moment this is implemented in terms of a pretty printer for the +-- legacy configuration types, plus a conversion. +-- +showProjectConfig :: ProjectConfig -> String +showProjectConfig = + showLegacyProjectConfig . convertToLegacyProjectConfig + + +-- | Write a @cabal.project.extra@ file in the given project root dir. +-- +writeProjectLocalExtraConfig :: FilePath -> ProjectConfig -> IO () +writeProjectLocalExtraConfig projectRootDir = + writeProjectConfigFile projectExtraConfigFile + where + projectExtraConfigFile = projectRootDir "cabal.project.local" + + +-- | Write in the @cabal.project@ format to the given file. +-- +writeProjectConfigFile :: FilePath -> ProjectConfig -> IO () +writeProjectConfigFile file = + writeFile file . showProjectConfig + + +-- | Read the user's @~/.cabal/config@ file. +-- +readGlobalConfig :: Verbosity -> Rebuild ProjectConfig +readGlobalConfig verbosity = do + config <- liftIO (loadConfig verbosity mempty) + configFile <- liftIO defaultConfigFile + monitorFiles [monitorFileHashed configFile] + return (convertLegacyGlobalConfig config) + --TODO: do this properly, there's several possible locations + -- and env vars, and flags for selecting the global config + + +reportParseResult :: Verbosity -> String -> FilePath -> ParseResult a -> IO a +reportParseResult verbosity _filetype filename (ParseOk warnings x) = do + unless (null warnings) $ + let msg = unlines (map (showPWarning filename) warnings) + in warn verbosity msg + return x +reportParseResult _verbosity filetype filename (ParseFailed err) = + let (line, msg) = locatedErrorMsg err + in die $ "Error parsing " ++ filetype ++ " " ++ filename + ++ maybe "" (\n -> ':' : show n) line ++ ":\n" ++ msg + + +--------------------------------------------- +-- Reading packages in the project +-- + +-- | The location of a package as part of a project. Local file paths are +-- either absolute (if the user specified it as such) or they are relative +-- to the project root. +-- +data ProjectPackageLocation = + ProjectPackageLocalCabalFile FilePath + | ProjectPackageLocalDirectory FilePath FilePath -- dir and .cabal file + | ProjectPackageLocalTarball FilePath + | ProjectPackageRemoteTarball URI + | ProjectPackageRemoteRepo SourceRepo + | ProjectPackageNamed Dependency + deriving Show + + +-- | Exception thrown by 'findProjectPackages'. +-- +newtype BadPackageLocations = BadPackageLocations [BadPackageLocation] + deriving (Show, Typeable) + +instance Exception BadPackageLocations +--TODO: [required eventually] displayException for nice rendering +--TODO: [nice to have] custom exception subclass for Doc rendering, colour etc + +data BadPackageLocation + = BadPackageLocationFile BadPackageLocationMatch + | BadLocGlobEmptyMatch String + | BadLocGlobBadMatches String [BadPackageLocationMatch] + | BadLocUnexpectedUriScheme String + | BadLocUnrecognisedUri String + | BadLocUnrecognised String + deriving Show + +data BadPackageLocationMatch + = BadLocUnexpectedFile String + | BadLocNonexistantFile String + | BadLocDirNoCabalFile String + | BadLocDirManyCabalFiles String + deriving Show + + +-- | Given the project config, +-- +-- Throws 'BadPackageLocations'. +-- +findProjectPackages :: FilePath -> ProjectConfig + -> Rebuild [ProjectPackageLocation] +findProjectPackages projectRootDir ProjectConfig{..} = do + + requiredPkgs <- findPackageLocations True projectPackages + optionalPkgs <- findPackageLocations False projectPackagesOptional + let repoPkgs = map ProjectPackageRemoteRepo projectPackagesRepo + namedPkgs = map ProjectPackageNamed projectPackagesNamed + + return (concat [requiredPkgs, optionalPkgs, repoPkgs, namedPkgs]) + where + findPackageLocations required pkglocstr = do + (problems, pkglocs) <- + partitionEithers <$> mapM (findPackageLocation required) pkglocstr + unless (null problems) $ + liftIO $ throwIO $ BadPackageLocations problems + return (concat pkglocs) + + + findPackageLocation :: Bool -> String + -> Rebuild (Either BadPackageLocation + [ProjectPackageLocation]) + findPackageLocation _required@True pkglocstr = + -- strategy: try first as a file:// or http(s):// URL. + -- then as a file glob (usually encompassing single file) + -- finally as a single file, for files that fail to parse as globs + checkIsUriPackage pkglocstr + `mplusMaybeT` checkIsFileGlobPackage pkglocstr + `mplusMaybeT` checkIsSingleFilePackage pkglocstr + >>= maybe (return (Left (BadLocUnrecognised pkglocstr))) return + + + findPackageLocation _required@False pkglocstr = do + -- just globs for optional case + res <- checkIsFileGlobPackage pkglocstr + case res of + Nothing -> return (Left (BadLocUnrecognised pkglocstr)) + Just (Left _) -> return (Right []) -- it's optional + Just (Right pkglocs) -> return (Right pkglocs) + + + checkIsUriPackage, checkIsFileGlobPackage, checkIsSingleFilePackage + :: String -> Rebuild (Maybe (Either BadPackageLocation + [ProjectPackageLocation])) + checkIsUriPackage pkglocstr = + return $! + case parseAbsoluteURI pkglocstr of + Just uri@URI { + uriScheme = scheme, + uriAuthority = Just URIAuth { uriRegName = host } + } + | recognisedScheme && not (null host) -> + Just (Right [ProjectPackageRemoteTarball uri]) + + | not recognisedScheme && not (null host) -> + Just (Left (BadLocUnexpectedUriScheme pkglocstr)) + + | recognisedScheme && null host -> + Just (Left (BadLocUnrecognisedUri pkglocstr)) + where + recognisedScheme = scheme == "http:" || scheme == "https:" + || scheme == "file:" + + _ -> Nothing + + + checkIsFileGlobPackage pkglocstr = + case simpleParse pkglocstr of + Nothing -> return Nothing + Just glob -> liftM Just $ do + matches <- matchFileGlob glob + case matches of + [] | isJust (isTrivialFilePathGlob glob) + -> return (Left (BadPackageLocationFile + (BadLocNonexistantFile pkglocstr))) + + [] -> return (Left (BadLocGlobEmptyMatch pkglocstr)) + + _ -> do + (failures, pkglocs) <- partitionEithers <$> + mapM checkFilePackageMatch matches + if null pkglocs + then return (Left (BadLocGlobBadMatches pkglocstr failures)) + else return (Right pkglocs) + + + checkIsSingleFilePackage pkglocstr = do + let filename = projectRootDir pkglocstr + isFile <- liftIO $ doesFileExist filename + isDir <- liftIO $ doesDirectoryExist filename + if isFile || isDir + then checkFilePackageMatch pkglocstr + >>= either (return . Just . Left . BadPackageLocationFile) + (return . Just . Right . (\x->[x])) + else return Nothing + + + checkFilePackageMatch :: String -> Rebuild (Either BadPackageLocationMatch + ProjectPackageLocation) + checkFilePackageMatch pkglocstr = do + -- The pkglocstr may be absolute or may be relative to the project root. + -- Either way, does the right thing here. We return relative paths if + -- they were relative in the first place. + let abspath = projectRootDir pkglocstr + isDir <- liftIO $ doesDirectoryExist abspath + parentDirExists <- case takeDirectory abspath of + [] -> return False + dir -> liftIO $ doesDirectoryExist dir + case () of + _ | isDir + -> do matches <- matchFileGlob (globStarDotCabal pkglocstr) + case matches of + [cabalFile] + -> return (Right (ProjectPackageLocalDirectory + pkglocstr cabalFile)) + [] -> return (Left (BadLocDirNoCabalFile pkglocstr)) + _ -> return (Left (BadLocDirManyCabalFiles pkglocstr)) + + | extensionIsTarGz pkglocstr + -> return (Right (ProjectPackageLocalTarball pkglocstr)) + + | takeExtension pkglocstr == ".cabal" + -> return (Right (ProjectPackageLocalCabalFile pkglocstr)) + + | parentDirExists + -> return (Left (BadLocNonexistantFile pkglocstr)) + + | otherwise + -> return (Left (BadLocUnexpectedFile pkglocstr)) + + + extensionIsTarGz f = takeExtension f == ".gz" + && takeExtension (dropExtension f) == ".tar" + + +-- | A glob to find all the cabal files in a directory. +-- +-- For a directory @some/dir/@, this is a glob of the form @some/dir/\*.cabal@. +-- The directory part can be either absolute or relative. +-- +globStarDotCabal :: FilePath -> FilePathGlob +globStarDotCabal dir = + FilePathGlob + (if isAbsolute dir then FilePathRoot root else FilePathRelative) + (foldr (\d -> GlobDir [Literal d]) + (GlobFile [WildCard, Literal ".cabal"]) dirComponents) + where + (root, dirComponents) = fmap splitDirectories (splitDrive dir) + + +--TODO: [code cleanup] use sufficiently recent transformers package +mplusMaybeT :: Monad m => m (Maybe a) -> m (Maybe a) -> m (Maybe a) +mplusMaybeT ma mb = do + mx <- ma + case mx of + Nothing -> mb + Just x -> return (Just x) + + +-- | Read the @.cabal@ file of the given package. +-- +-- Note here is where we convert from project-root relative paths to absolute +-- paths. +-- +readSourcePackage :: Verbosity -> ProjectPackageLocation + -> Rebuild SourcePackage +readSourcePackage verbosity (ProjectPackageLocalCabalFile cabalFile) = + readSourcePackage verbosity (ProjectPackageLocalDirectory dir cabalFile) + where + dir = takeDirectory cabalFile + +readSourcePackage verbosity (ProjectPackageLocalDirectory dir cabalFile) = do + monitorFiles [monitorFileHashed cabalFile] + root <- askRoot + pkgdesc <- liftIO $ readPackageDescription verbosity (root cabalFile) + return SourcePackage { + packageInfoId = packageId pkgdesc, + packageDescription = pkgdesc, + packageSource = LocalUnpackedPackage (root dir), + packageDescrOverride = Nothing + } +readSourcePackage _verbosity _ = + fail $ "TODO: add support for fetching and reading local tarballs, remote " + ++ "tarballs, remote repos and passing named packages through" + + +--------------------------------------------- +-- Checking configuration sanity +-- + +data BadPerPackageCompilerPaths + = BadPerPackageCompilerPaths [(PackageName, String)] + deriving (Show, Typeable) + +instance Exception BadPerPackageCompilerPaths +--TODO: [required eventually] displayException for nice rendering +--TODO: [nice to have] custom exception subclass for Doc rendering, colour etc + +-- | The project configuration is not allowed to specify program locations for +-- programs used by the compiler as these have to be the same for each set of +-- packages. +-- +-- We cannot check this until we know which programs the compiler uses, which +-- in principle is not until we've configured the compiler. +-- +-- Throws 'BadPerPackageCompilerPaths' +-- +checkBadPerPackageCompilerPaths :: [ConfiguredProgram] + -> Map PackageName PackageConfig + -> IO () +checkBadPerPackageCompilerPaths compilerPrograms packagesConfig = + case [ (pkgname, progname) + | let compProgNames = Set.fromList (map programId compilerPrograms) + , (pkgname, pkgconf) <- Map.toList packagesConfig + , progname <- Map.keys (getMapLast (packageConfigProgramPaths pkgconf)) + , progname `Set.member` compProgNames ] of + [] -> return () + ps -> throwIO (BadPerPackageCompilerPaths ps) + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/ProjectOrchestration.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/ProjectOrchestration.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/ProjectOrchestration.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/ProjectOrchestration.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,493 @@ +{-# LANGUAGE RecordWildCards, NamedFieldPuns #-} + +-- | This module deals with building and incrementally rebuilding a collection +-- of packages. It is what backs the @cabal build@ and @configure@ commands, +-- as well as being a core part of @run@, @test@, @bench@ and others. +-- +-- The primary thing is in fact rebuilding (and trying to make that quick by +-- not redoing unnecessary work), so building from scratch is just a special +-- case. +-- +-- The build process and the code can be understood by breaking it down into +-- three major parts: +-- +-- * The 'ElaboratedInstallPlan' type +-- +-- * The \"what to do\" phase, where we look at the all input configuration +-- (project files, .cabal files, command line etc) and produce a detailed +-- plan of what to do -- the 'ElaboratedInstallPlan'. +-- +-- * The \"do it\" phase, where we take the 'ElaboratedInstallPlan' and we +-- re-execute it. +-- +-- As far as possible, the \"what to do\" phase embodies all the policy, leaving +-- the \"do it\" phase policy free. The first phase contains more of the +-- complicated logic, but it is contained in code that is either pure or just +-- has read effects (except cache updates). Then the second phase does all the +-- actions to build packages, but as far as possible it just follows the +-- instructions and avoids any logic for deciding what to do (apart from +-- recompilation avoidance in executing the plan). +-- +-- This division helps us keep the code under control, making it easier to +-- understand, test and debug. So when you are extending these modules, please +-- think about which parts of your change belong in which part. It is +-- perfectly ok to extend the description of what to do (i.e. the +-- 'ElaboratedInstallPlan') if that helps keep the policy decisions in the +-- first phase. Also, the second phase does not have direct access to any of +-- the input configuration anyway; all the information has to flow via the +-- 'ElaboratedInstallPlan'. +-- +module Distribution.Client.ProjectOrchestration ( + -- * Pre-build phase: decide what to do. + runProjectPreBuildPhase, + CliConfigFlags, + PreBuildHooks(..), + ProjectBuildContext(..), + + -- ** Adjusting the plan + selectTargets, + printPlan, + + -- * Build phase: now do it. + runProjectBuildPhase, + + -- * Post build actions + reportBuildFailures, + ) where + +import Distribution.Client.ProjectConfig +import Distribution.Client.ProjectPlanning +import Distribution.Client.ProjectBuilding + +import Distribution.Client.Types + hiding ( BuildResult, BuildSuccess(..), BuildFailure(..) + , DocsResult(..), TestsResult(..) ) +import qualified Distribution.Client.InstallPlan as InstallPlan +import Distribution.Client.BuildTarget + ( UserBuildTarget, resolveUserBuildTargets + , BuildTarget(..), buildTargetPackage ) +import Distribution.Client.DistDirLayout +import Distribution.Client.Config (defaultCabalDir) +import Distribution.Client.Setup hiding (packageName) + +import Distribution.Package + hiding (InstalledPackageId, installedPackageId) +import qualified Distribution.PackageDescription as PD +import Distribution.PackageDescription (FlagAssignment) +import qualified Distribution.InstalledPackageInfo as Installed +import Distribution.Simple.Setup (HaddockFlags) + +import Distribution.Simple.Utils (die, notice) +import Distribution.Verbosity +import Distribution.Text + +import qualified Data.Set as Set +import qualified Data.Map as Map +import Data.Map (Map) +import Data.List +import Data.Either +import System.Exit (exitFailure) + + +-- | Command line configuration flags. These are used to extend\/override the +-- project configuration. +-- +type CliConfigFlags = ( GlobalFlags + , ConfigFlags, ConfigExFlags + , InstallFlags, HaddockFlags ) + +-- | Hooks to alter the behaviour of 'runProjectPreBuildPhase'. +-- +-- For example the @configure@, @build@ and @repl@ commands use this to get +-- their different behaviour. +-- +data PreBuildHooks = PreBuildHooks { + hookPrePlanning :: FilePath + -> DistDirLayout + -> ProjectConfig + -> IO (), + hookSelectPlanSubset :: ElaboratedInstallPlan + -> IO ElaboratedInstallPlan + } + +-- | This holds the context between the pre-build and build phases. +-- +data ProjectBuildContext = ProjectBuildContext { + distDirLayout :: DistDirLayout, + elaboratedPlan :: ElaboratedInstallPlan, + elaboratedShared :: ElaboratedSharedConfig, + pkgsBuildStatus :: BuildStatusMap, + buildSettings :: BuildTimeSettings + } + + +-- | Pre-build phase: decide what to do. +-- +runProjectPreBuildPhase :: Verbosity + -> CliConfigFlags + -> PreBuildHooks + -> IO ProjectBuildContext +runProjectPreBuildPhase + verbosity + ( globalFlags + , configFlags, configExFlags + , installFlags, haddockFlags ) + PreBuildHooks{..} = do + + cabalDir <- defaultCabalDir + let cabalDirLayout = defaultCabalDirLayout cabalDir + + projectRootDir <- findProjectRoot + let distDirLayout = defaultDistDirLayout projectRootDir + + let cliConfig = commandLineFlagsToProjectConfig + globalFlags configFlags configExFlags + installFlags haddockFlags + + hookPrePlanning + projectRootDir + distDirLayout + cliConfig + + -- Take the project configuration and make a plan for how to build + -- everything in the project. This is independent of any specific targets + -- the user has asked for. + -- + (elaboratedPlan, elaboratedShared, projectConfig) <- + rebuildInstallPlan verbosity + projectRootDir distDirLayout cabalDirLayout + cliConfig + + let buildSettings = resolveBuildTimeSettings + verbosity cabalDirLayout + (projectConfigShared projectConfig) + (projectConfigBuildOnly projectConfig) + (projectConfigBuildOnly cliConfig) + + -- The plan for what to do is represented by an 'ElaboratedInstallPlan' + + -- Now given the specific targets the user has asked for, decide + -- which bits of the plan we will want to execute. + -- + elaboratedPlan' <- hookSelectPlanSubset elaboratedPlan + + -- Check if any packages don't need rebuilding, and improve the plan. + -- This also gives us more accurate reasons for the --dry-run output. + -- + (elaboratedPlan'', pkgsBuildStatus) <- + rebuildTargetsDryRun distDirLayout + elaboratedPlan' + + return ProjectBuildContext { + distDirLayout, + elaboratedPlan = elaboratedPlan'', + elaboratedShared, + pkgsBuildStatus, + buildSettings + } + + +-- | Build phase: now do it. +-- +-- Execute all or parts of the description of what to do to build or +-- rebuild the various packages needed. +-- +runProjectBuildPhase :: Verbosity + -> ProjectBuildContext + -> IO ElaboratedInstallPlan +runProjectBuildPhase verbosity ProjectBuildContext {..} = + rebuildTargets verbosity + distDirLayout + elaboratedPlan + elaboratedShared + pkgsBuildStatus + buildSettings + + -- Note that it is a deliberate design choice that the 'buildTargets' is + -- not passed to phase 1, and the various bits of input config is not + -- passed to phase 2. + -- + -- We make the install plan without looking at the particular targets the + -- user asks us to build. The set of available things we can build is + -- discovered from the env and config and is used to make the install plan. + -- The targets just tell us which parts of the install plan to execute. + -- + -- Conversely, executing the plan does not directly depend on any of the + -- input config. The bits that are needed (or better, the decisions based + -- on it) all go into the install plan. + + -- Notionally, the 'BuildFlags' should be things that do not affect what + -- we build, just how we do it. These ones of course do + + +------------------------------------------------------------------------------ +-- Taking targets into account, selecting what to build +-- + +-- | Adjust an 'ElaboratedInstallPlan' by selecting just those parts of it +-- required to build the given user targets. +-- +-- How to get the 'PackageTarget's from the 'UserBuildTarget' is customisable. +-- +selectTargets :: PackageTarget + -> (ComponentTarget -> PackageTarget) + -> [UserBuildTarget] + -> ElaboratedInstallPlan + -> IO ElaboratedInstallPlan +selectTargets targetDefaultComponents targetSpecificComponent + userBuildTargets installPlan = do + + -- Match the user targets against the available targets. If no targets are + -- given this uses the package in the current directory, if any. + -- + buildTargets <- resolveUserBuildTargets localPackages userBuildTargets + --TODO: [required eventually] report something if there are no targets + + --TODO: [required eventually] + -- we cannot resolve names of packages other than those that are + -- directly in the current plan. We ought to keep a set of the known + -- hackage packages so we can resolve names to those. Though we don't + -- really need that until we can do something sensible with packages + -- outside of the project. + + -- Now check if those targets belong to the current project or not. + -- Ultimately we want to do something sensible for targets not in this + -- project, but for now we just bail. This gives us back the ipkgid from + -- the plan. + -- + buildTargets' <- either reportBuildTargetProblems return + $ resolveAndCheckTargets + targetDefaultComponents + targetSpecificComponent + installPlan + buildTargets + + -- Finally, prune the install plan to cover just those target packages + -- and their deps. + -- + return (pruneInstallPlanToTargets buildTargets' installPlan) + where + localPackages = + [ (pkgDescription pkg, pkgSourceLocation pkg) + | InstallPlan.Configured pkg <- InstallPlan.toList installPlan ] + --TODO: [code cleanup] is there a better way to identify local packages? + + + +resolveAndCheckTargets :: PackageTarget + -> (ComponentTarget -> PackageTarget) + -> ElaboratedInstallPlan + -> [BuildTarget PackageName] + -> Either [BuildTargetProblem] + (Map InstalledPackageId [PackageTarget]) +resolveAndCheckTargets targetDefaultComponents + targetSpecificComponent + installPlan targets = + case partitionEithers (map checkTarget targets) of + ([], targets') -> Right $ Map.fromListWith (++) + [ (ipkgid, [t]) | (ipkgid, t) <- targets' ] + (problems, _) -> Left problems + where + -- TODO [required eventually] currently all build targets refer to packages + -- inside the project. Ultimately this has to be generalised to allow + -- referring to other packages and targets. + + -- We can ask to build any whole package, project-local or a dependency + checkTarget (BuildTargetPackage pn) + | Just ipkgid <- Map.lookup pn projAllPkgs + = Right (ipkgid, targetDefaultComponents) + + -- But if we ask to build an individual component, then that component + -- had better be in a package that is local to the project. + -- TODO: and if it's an optional stanza, then that stanza must be available + checkTarget t@(BuildTargetComponent pn cn) + | Just ipkgid <- Map.lookup pn projLocalPkgs + = Right (ipkgid, targetSpecificComponent + (ComponentTarget cn WholeComponent)) + + | Map.member pn projAllPkgs + = Left (BuildTargetComponentNotProjectLocal t) + + checkTarget t@(BuildTargetModule pn cn mn) + | Just ipkgid <- Map.lookup pn projLocalPkgs + = Right (ipkgid, BuildSpecificComponent (ComponentTarget cn (ModuleTarget mn))) + + | Map.member pn projAllPkgs + = Left (BuildTargetComponentNotProjectLocal t) + + checkTarget t@(BuildTargetFile pn cn fn) + | Just ipkgid <- Map.lookup pn projLocalPkgs + = Right (ipkgid, BuildSpecificComponent (ComponentTarget cn (FileTarget fn))) + + | Map.member pn projAllPkgs + = Left (BuildTargetComponentNotProjectLocal t) + + checkTarget t + = Left (BuildTargetNotInProject (buildTargetPackage t)) + + + projAllPkgs, projLocalPkgs :: Map PackageName InstalledPackageId + projAllPkgs = + Map.fromList + [ (packageName pkg, installedPackageId pkg) + | pkg <- InstallPlan.toList installPlan ] + + projLocalPkgs = + Map.fromList + [ (packageName pkg, installedPackageId pkg) + | InstallPlan.Configured pkg <- InstallPlan.toList installPlan + , case pkgSourceLocation pkg of + LocalUnpackedPackage _ -> True; _ -> False + --TODO: [code cleanup] is there a better way to identify local packages? + ] + + --TODO: [research required] what if the solution has multiple versions of this package? + -- e.g. due to setup deps or due to multiple independent sets of + -- packages being built (e.g. ghc + ghcjs in a project) + +data BuildTargetProblem + = BuildTargetNotInProject PackageName + | BuildTargetComponentNotProjectLocal (BuildTarget PackageName) + | BuildTargetOptionalStanzaDisabled Bool + -- ^ @True@: explicitly disabled by user + -- @False@: disabled by solver + +reportBuildTargetProblems :: [BuildTargetProblem] -> IO a +reportBuildTargetProblems = die . unlines . map reportBuildTargetProblem + +reportBuildTargetProblem :: BuildTargetProblem -> String +reportBuildTargetProblem (BuildTargetNotInProject pn) = + "Cannot build the package " ++ display pn ++ ", it is not in this project." + ++ "(either directly or indirectly). If you want to add it to the " + ++ "project then edit the cabal.project file." + +reportBuildTargetProblem (BuildTargetComponentNotProjectLocal t) = + "The package " ++ display (buildTargetPackage t) ++ " is in the " + ++ "project but it is not a locally unpacked package, so " + +reportBuildTargetProblem (BuildTargetOptionalStanzaDisabled _) = undefined + + +------------------------------------------------------------------------------ +-- Displaying what we plan to do +-- + +-- | Print a user-oriented presentation of the install plan, indicating what +-- will be built. +-- +printPlan :: Verbosity -> ProjectBuildContext -> IO () +printPlan verbosity + ProjectBuildContext { + elaboratedPlan, + pkgsBuildStatus, + buildSettings = BuildTimeSettings{buildSettingDryRun} + } + + | null pkgs + = notice verbosity "Up to date" + + | verbosity >= verbose + = notice verbosity $ unlines $ + ("In order, the following " ++ wouldWill ++ " be built:") + : map showPkgAndReason pkgs + + | otherwise + = notice verbosity $ unlines $ + ("In order, the following " ++ wouldWill + ++ " be built (use -v for more details):") + : map showPkg pkgs + where + pkgs = linearizeInstallPlan elaboratedPlan + + wouldWill | buildSettingDryRun = "would" + | otherwise = "will" + + showPkg pkg = display (packageId pkg) + + showPkgAndReason :: ElaboratedReadyPackage -> String + showPkgAndReason (ReadyPackage pkg _) = + display (packageId pkg) ++ + showTargets pkg ++ + showFlagAssignment (nonDefaultFlags pkg) ++ + showStanzas pkg ++ + let buildStatus = pkgsBuildStatus Map.! installedPackageId pkg in + " (" ++ showBuildStatus buildStatus ++ ")" + + nonDefaultFlags :: ElaboratedConfiguredPackage -> FlagAssignment + nonDefaultFlags pkg = pkgFlagAssignment pkg \\ pkgFlagDefaults pkg + + showStanzas pkg = concat + $ [ " *test" + | TestStanzas `Set.member` pkgStanzasEnabled pkg ] + ++ [ " *bench" + | BenchStanzas `Set.member` pkgStanzasEnabled pkg ] + + showTargets pkg + | null (pkgBuildTargets pkg) = "" + | otherwise + = " (" ++ unwords [ showComponentTarget pkg t | t <- pkgBuildTargets pkg ] + ++ ")" + + -- TODO: [code cleanup] this should be a proper function in a proper place + showFlagAssignment :: FlagAssignment -> String + showFlagAssignment = concatMap ((' ' :) . showFlagValue) + showFlagValue (f, True) = '+' : showFlagName f + showFlagValue (f, False) = '-' : showFlagName f + showFlagName (PD.FlagName f) = f + + showBuildStatus status = case status of + BuildStatusPreExisting -> "already installed" + BuildStatusDownload {} -> "requires download & build" + BuildStatusUnpack {} -> "requires build" + BuildStatusRebuild _ rebuild -> case rebuild of + BuildStatusConfigure + (MonitoredValueChanged _) -> "configuration changed" + BuildStatusConfigure mreason -> showMonitorChangedReason mreason + BuildStatusBuild _ buildreason -> case buildreason of + BuildReasonDepsRebuilt -> "dependency rebuilt" + BuildReasonFilesChanged + (MonitoredFileChanged _) -> "files changed" + BuildReasonFilesChanged + mreason -> showMonitorChangedReason mreason + BuildReasonExtraTargets _ -> "additional components to build" + BuildReasonEphemeralTargets -> "ephemeral targets" + BuildStatusUpToDate {} -> "up to date" -- doesn't happen + + showMonitorChangedReason (MonitoredFileChanged file) = "file " ++ file + showMonitorChangedReason (MonitoredValueChanged _) = "value changed" + showMonitorChangedReason MonitorFirstRun = "first run" + showMonitorChangedReason MonitorCorruptCache = "cannot read state cache" + +linearizeInstallPlan :: ElaboratedInstallPlan -> [ElaboratedReadyPackage] +linearizeInstallPlan = + unfoldr next + where + next plan = case InstallPlan.ready plan of + [] -> Nothing + (pkg:_) -> Just (pkg, plan') + where + ipkgid = installedPackageId pkg + ipkg = Installed.emptyInstalledPackageInfo { + Installed.sourcePackageId = packageId pkg, + Installed.installedUnitId = ipkgid + } + plan' = InstallPlan.completed ipkgid (Just ipkg) + (BuildOk DocsNotTried TestsNotTried) + (InstallPlan.processing [pkg] plan) + --TODO: [code cleanup] This is a bit of a hack, pretending that each package is installed + -- could we use InstallPlan.topologicalOrder? + + +reportBuildFailures :: ElaboratedInstallPlan -> IO () +reportBuildFailures plan = + + case [ (pkg, reason) + | InstallPlan.Failed pkg reason <- InstallPlan.toList plan ] of + [] -> return () + _failed -> exitFailure + --TODO: [required eventually] see the old printBuildFailures for an example + -- of the kind of things we could report, but we want to handle the special + -- case of the current package better, since if you do "cabal build" then + -- you don't need a lot of context to explain where the ghc error message + -- comes from, and indeed extra noise would just be annoying. + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/ProjectPlanning/Types.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/ProjectPlanning/Types.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/ProjectPlanning/Types.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/ProjectPlanning/Types.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,369 @@ +{-# LANGUAGE DeriveGeneric, DeriveDataTypeable #-} + +-- | Types used while planning how to build everything in a project. +-- +-- Primarily this is the 'ElaboratedInstallPlan'. +-- +module Distribution.Client.ProjectPlanning.Types ( + SolverInstallPlan, + + -- * Elaborated install plan types + ElaboratedInstallPlan, + ElaboratedConfiguredPackage(..), + ElaboratedPlanPackage, + ElaboratedSharedConfig(..), + ElaboratedReadyPackage, + BuildStyle(..), + CabalFileText, + + -- * Types used in executing an install plan + --TODO: [code cleanup] these types should live with execution, not with + -- plan definition. Need to better separate InstallPlan definition. + GenericBuildResult(..), + BuildResult, + BuildSuccess(..), + BuildFailure(..), + DocsResult(..), + TestsResult(..), + + -- * Build targets + PackageTarget(..), + ComponentTarget(..), + SubComponentTarget(..), + + -- * Setup script + SetupScriptStyle(..), + ) where + +import Distribution.Client.PackageHash + +import Distribution.Client.Types + hiding ( BuildResult, BuildSuccess(..), BuildFailure(..) + , DocsResult(..), TestsResult(..) ) +import Distribution.Client.InstallPlan + ( GenericInstallPlan, InstallPlan, GenericPlanPackage ) +import Distribution.Client.ComponentDeps (ComponentDeps) + +import Distribution.Package + hiding (InstalledPackageId, installedPackageId) +import Distribution.System +import qualified Distribution.PackageDescription as Cabal +import Distribution.InstalledPackageInfo (InstalledPackageInfo) +import Distribution.Simple.Compiler +import Distribution.Simple.Program.Db +import Distribution.ModuleName (ModuleName) +import Distribution.Simple.LocalBuildInfo (ComponentName(..)) +import qualified Distribution.Simple.InstallDirs as InstallDirs +import Distribution.Simple.InstallDirs (PathTemplate) +import Distribution.Version + +import Data.Map (Map) +import Data.Set (Set) +import qualified Data.ByteString.Lazy as LBS +import Distribution.Compat.Binary +import GHC.Generics (Generic) +import Data.Typeable (Typeable) +import Control.Exception + + + +-- | The type of install plan produced by the solver and used as the starting +-- point for the 'ElaboratedInstallPlan'. +-- +type SolverInstallPlan + = InstallPlan --TODO: [code cleanup] redefine locally or move def to solver interface + + +-- | The combination of an elaborated install plan plus a +-- 'ElaboratedSharedConfig' contains all the details necessary to be able +-- to execute the plan without having to make further policy decisions. +-- +-- It does not include dynamic elements such as resources (such as http +-- connections). +-- +type ElaboratedInstallPlan + = GenericInstallPlan InstalledPackageInfo + ElaboratedConfiguredPackage + BuildSuccess BuildFailure + +type ElaboratedPlanPackage + = GenericPlanPackage InstalledPackageInfo + ElaboratedConfiguredPackage + BuildSuccess BuildFailure + +--TODO: [code cleanup] decide if we really need this, there's not much in it, and in principle +-- even platform and compiler could be different if we're building things +-- like a server + client with ghc + ghcjs +data ElaboratedSharedConfig + = ElaboratedSharedConfig { + + pkgConfigPlatform :: Platform, + pkgConfigCompiler :: Compiler, --TODO: [code cleanup] replace with CompilerInfo + -- | The programs that the compiler configured (e.g. for GHC, the progs + -- ghc & ghc-pkg). Once constructed, only the 'configuredPrograms' are + -- used. + pkgConfigCompilerProgs :: ProgramDb + } + deriving (Show, Generic) + --TODO: [code cleanup] no Eq instance + +instance Binary ElaboratedSharedConfig + +data ElaboratedConfiguredPackage + = ElaboratedConfiguredPackage { + + pkgInstalledId :: InstalledPackageId, + pkgSourceId :: PackageId, + + -- | TODO: [code cleanup] we don't need this, just a few bits from it: + -- build type, spec version + pkgDescription :: Cabal.PackageDescription, + + -- | A total flag assignment for the package + pkgFlagAssignment :: Cabal.FlagAssignment, + + -- | The original default flag assignment, used only for reporting. + pkgFlagDefaults :: Cabal.FlagAssignment, + + -- | The exact dependencies (on other plan packages) + -- + pkgDependencies :: ComponentDeps [ConfiguredId], + + -- | Which optional stanzas (ie testsuites, benchmarks) can be built. + -- This means the solver produced a plan that has them available. + -- This doesn't necessary mean we build them by default. + pkgStanzasAvailable :: Set OptionalStanza, + + -- | Which optional stanzas the user explicitly asked to enable or + -- to disable. This tells us which ones we build by default, and + -- helps with error messages when the user asks to build something + -- they explicitly disabled. + pkgStanzasRequested :: Map OptionalStanza Bool, + + -- | Which optional stanzas (ie testsuites, benchmarks) will actually + -- be enabled during the package configure step. + pkgStanzasEnabled :: Set OptionalStanza, + + -- | Where the package comes from, e.g. tarball, local dir etc. This + -- is not the same as where it may be unpacked to for the build. + pkgSourceLocation :: PackageLocation (Maybe FilePath), + + -- | The hash of the source, e.g. the tarball. We don't have this for + -- local source dir packages. + pkgSourceHash :: Maybe PackageSourceHash, + + --pkgSourceDir ? -- currently passed in later because they can use temp locations + --pkgBuildDir ? -- but could in principle still have it here, with optional instr to use temp loc + + pkgBuildStyle :: BuildStyle, + + pkgSetupPackageDBStack :: PackageDBStack, + pkgBuildPackageDBStack :: PackageDBStack, + pkgRegisterPackageDBStack :: PackageDBStack, + + -- | The package contains a library and so must be registered + pkgRequiresRegistration :: Bool, + pkgDescriptionOverride :: Maybe CabalFileText, + + pkgVanillaLib :: Bool, + pkgSharedLib :: Bool, + pkgDynExe :: Bool, + pkgGHCiLib :: Bool, + pkgProfLib :: Bool, + pkgProfExe :: Bool, + pkgProfLibDetail :: ProfDetailLevel, + pkgProfExeDetail :: ProfDetailLevel, + pkgCoverage :: Bool, + pkgOptimization :: OptimisationLevel, + pkgSplitObjs :: Bool, + pkgStripLibs :: Bool, + pkgStripExes :: Bool, + pkgDebugInfo :: DebugInfoLevel, + + pkgProgramPaths :: Map String FilePath, + pkgProgramArgs :: Map String [String], + pkgProgramPathExtra :: [FilePath], + pkgConfigureScriptArgs :: [String], + pkgExtraLibDirs :: [FilePath], + pkgExtraFrameworkDirs :: [FilePath], + pkgExtraIncludeDirs :: [FilePath], + pkgProgPrefix :: Maybe PathTemplate, + pkgProgSuffix :: Maybe PathTemplate, + + pkgInstallDirs :: InstallDirs.InstallDirs FilePath, + + pkgHaddockHoogle :: Bool, + pkgHaddockHtml :: Bool, + pkgHaddockHtmlLocation :: Maybe String, + pkgHaddockExecutables :: Bool, + pkgHaddockTestSuites :: Bool, + pkgHaddockBenchmarks :: Bool, + pkgHaddockInternal :: Bool, + pkgHaddockCss :: Maybe FilePath, + pkgHaddockHscolour :: Bool, + pkgHaddockHscolourCss :: Maybe FilePath, + pkgHaddockContents :: Maybe PathTemplate, + + -- Setup.hs related things: + + -- | One of four modes for how we build and interact with the Setup.hs + -- script, based on whether it's a build-type Custom, with or without + -- explicit deps and the cabal spec version the .cabal file needs. + pkgSetupScriptStyle :: SetupScriptStyle, + + -- | The version of the Cabal command line interface that we are using + -- for this package. This is typically the version of the Cabal lib + -- that the Setup.hs is built against. + pkgSetupScriptCliVersion :: Version, + + -- Build time related: + pkgBuildTargets :: [ComponentTarget], + pkgReplTarget :: Maybe ComponentTarget, + pkgBuildHaddocks :: Bool + } + deriving (Eq, Show, Generic) + +instance Binary ElaboratedConfiguredPackage + +instance Package ElaboratedConfiguredPackage where + packageId = pkgSourceId + +instance HasUnitId ElaboratedConfiguredPackage where + installedUnitId = pkgInstalledId + +instance PackageFixedDeps ElaboratedConfiguredPackage where + depends = fmap (map installedPackageId) . pkgDependencies + +-- | This is used in the install plan to indicate how the package will be +-- built. +-- +data BuildStyle = + -- | The classic approach where the package is built, then the files + -- installed into some location and the result registered in a package db. + -- + -- If the package came from a tarball then it's built in a temp dir and + -- the results discarded. + BuildAndInstall + + -- | The package is built, but the files are not installed anywhere, + -- rather the build dir is kept and the package is registered inplace. + -- + -- Such packages can still subsequently be installed. + -- + -- Typically 'BuildAndInstall' packages will only depend on other + -- 'BuildAndInstall' style packages and not on 'BuildInplaceOnly' ones. + -- + | BuildInplaceOnly + deriving (Eq, Show, Generic) + +instance Binary BuildStyle + +type CabalFileText = LBS.ByteString + +type ElaboratedReadyPackage = GenericReadyPackage ElaboratedConfiguredPackage + InstalledPackageInfo + +--TODO: [code cleanup] this duplicates the InstalledPackageInfo quite a bit in an install plan +-- because the same ipkg is used by many packages. So the binary file will be big. +-- Could we keep just (ipkgid, deps) instead of the whole InstalledPackageInfo? +-- or transform to a shared form when serialising / deserialising + +data GenericBuildResult ipkg iresult ifailure + = BuildFailure ifailure + | BuildSuccess (Maybe ipkg) iresult + deriving (Eq, Show, Generic) + +instance (Binary ipkg, Binary iresult, Binary ifailure) => + Binary (GenericBuildResult ipkg iresult ifailure) + +type BuildResult = GenericBuildResult InstalledPackageInfo + BuildSuccess BuildFailure + +data BuildSuccess = BuildOk DocsResult TestsResult + deriving (Eq, Show, Generic) + +data DocsResult = DocsNotTried | DocsFailed | DocsOk + deriving (Eq, Show, Generic) + +data TestsResult = TestsNotTried | TestsOk + deriving (Eq, Show, Generic) + +data BuildFailure = PlanningFailed --TODO: [required eventually] not yet used + | DependentFailed PackageId + | DownloadFailed String --TODO: [required eventually] not yet used + | UnpackFailed String --TODO: [required eventually] not yet used + | ConfigureFailed String + | BuildFailed String + | TestsFailed String --TODO: [required eventually] not yet used + | InstallFailed String + deriving (Eq, Show, Typeable, Generic) + +instance Exception BuildFailure + +instance Binary BuildFailure +instance Binary BuildSuccess +instance Binary DocsResult +instance Binary TestsResult + + +--------------------------- +-- Build targets +-- + +-- | The various targets within a package. This is more of a high level +-- specification than a elaborated prescription. +-- +data PackageTarget = + -- | Build the default components in this package. This usually means + -- just the lib and exes, but it can also mean the testsuites and + -- benchmarks if the user explicitly requested them. + BuildDefaultComponents + -- | Build a specific component in this package. + | BuildSpecificComponent ComponentTarget + | ReplDefaultComponent + | ReplSpecificComponent ComponentTarget + | HaddockDefaultComponents + deriving (Eq, Show, Generic) + +data ComponentTarget = ComponentTarget ComponentName SubComponentTarget + deriving (Eq, Show, Generic) + +data SubComponentTarget = WholeComponent + | ModuleTarget ModuleName + | FileTarget FilePath + deriving (Eq, Show, Generic) + +instance Binary PackageTarget +instance Binary ComponentTarget +instance Binary SubComponentTarget + + +--------------------------- +-- Setup.hs script policy +-- + +-- | There are four major cases for Setup.hs handling: +-- +-- 1. @build-type@ Custom with a @custom-setup@ section +-- 2. @build-type@ Custom without a @custom-setup@ section +-- 3. @build-type@ not Custom with @cabal-version > $our-cabal-version@ +-- 4. @build-type@ not Custom with @cabal-version <= $our-cabal-version@ +-- +-- It's also worth noting that packages specifying @cabal-version: >= 1.23@ +-- or later that have @build-type@ Custom will always have a @custom-setup@ +-- section. Therefore in case 2, the specified @cabal-version@ will always be +-- less than 1.23. +-- +-- In cases 1 and 2 we obviously have to build an external Setup.hs script, +-- while in case 4 we can use the internal library API. In case 3 we also have +-- to build an external Setup.hs script because the package needs a later +-- Cabal lib version than we can support internally. +-- +data SetupScriptStyle = SetupCustomExplicitDeps + | SetupCustomImplicitDeps + | SetupNonCustomExternalLib + | SetupNonCustomInternalLib + deriving (Eq, Show, Generic) + +instance Binary SetupScriptStyle + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/ProjectPlanning.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/ProjectPlanning.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/ProjectPlanning.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/ProjectPlanning.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,2283 @@ +{-# LANGUAGE CPP, RecordWildCards, NamedFieldPuns, RankNTypes #-} + +-- | Planning how to build everything in a project. +-- +module Distribution.Client.ProjectPlanning ( + -- * elaborated install plan types + ElaboratedInstallPlan, + ElaboratedConfiguredPackage(..), + ElaboratedPlanPackage, + ElaboratedSharedConfig(..), + ElaboratedReadyPackage, + BuildStyle(..), + CabalFileText, + + --TODO: [code cleanup] these types should live with execution, not with + -- plan definition. Need to better separate InstallPlan definition. + GenericBuildResult(..), + BuildResult, + BuildSuccess(..), + BuildFailure(..), + DocsResult(..), + TestsResult(..), + + -- * Producing the elaborated install plan + rebuildInstallPlan, + + -- * Build targets + PackageTarget(..), + ComponentTarget(..), + SubComponentTarget(..), + showComponentTarget, + + -- * Selecting a plan subset + pruneInstallPlanToTargets, + + -- * Utils required for building + pkgHasEphemeralBuildTargets, + pkgBuildTargetWholeComponents, + + -- * Setup.hs CLI flags for building + setupHsScriptOptions, + setupHsConfigureFlags, + setupHsBuildFlags, + setupHsBuildArgs, + setupHsReplFlags, + setupHsReplArgs, + setupHsCopyFlags, + setupHsRegisterFlags, + setupHsHaddockFlags, + + packageHashInputs, + + -- TODO: [code cleanup] utils that should live in some shared place? + createPackageDBIfMissing + ) where + +import Distribution.Client.ProjectPlanning.Types +import Distribution.Client.PackageHash +import Distribution.Client.RebuildMonad +import Distribution.Client.ProjectConfig +import Distribution.Client.ProjectPlanOutput + +import Distribution.Client.Types + hiding ( BuildResult, BuildSuccess(..), BuildFailure(..) + , DocsResult(..), TestsResult(..) ) +import qualified Distribution.Client.InstallPlan as InstallPlan +import Distribution.Client.Dependency +import Distribution.Client.Dependency.Types +import qualified Distribution.Client.ComponentDeps as CD +import Distribution.Client.ComponentDeps (ComponentDeps) +import qualified Distribution.Client.IndexUtils as IndexUtils +import qualified Distribution.Client.PackageIndex as SourcePackageIndex +import Distribution.Client.Targets (userToPackageConstraint) +import Distribution.Client.DistDirLayout +import Distribution.Client.SetupWrapper +import Distribution.Client.JobControl +import Distribution.Client.FetchUtils +import qualified Hackage.Security.Client as Sec +import Distribution.Client.PkgConfigDb +import Distribution.Client.Setup hiding (packageName, cabalVersion) +import Distribution.Utils.NubList + +import Distribution.Package hiding + (InstalledPackageId, installedPackageId) +import Distribution.System +import qualified Distribution.PackageDescription as Cabal +import qualified Distribution.PackageDescription as PD +import qualified Distribution.PackageDescription.Configuration as PD +import qualified Distribution.InstalledPackageInfo as Installed +import Distribution.Simple.PackageIndex (InstalledPackageIndex) +import qualified Distribution.Simple.PackageIndex as PackageIndex +import Distribution.Simple.Compiler hiding (Flag) +import qualified Distribution.Simple.GHC as GHC --TODO: [code cleanup] eliminate +import qualified Distribution.Simple.GHCJS as GHCJS --TODO: [code cleanup] eliminate +import Distribution.Simple.Program +import Distribution.Simple.Program.Db +import Distribution.Simple.Program.Find +import qualified Distribution.Simple.Setup as Cabal +import Distribution.Simple.Setup + (Flag, toFlag, flagToMaybe, flagToList, fromFlagOrDefault) +import qualified Distribution.Simple.Configure as Cabal +import qualified Distribution.Simple.LocalBuildInfo as Cabal +import Distribution.Simple.LocalBuildInfo (ComponentName(..)) +import qualified Distribution.Simple.Register as Cabal +import qualified Distribution.Simple.InstallDirs as InstallDirs +import qualified Distribution.Simple.BuildTarget as Cabal + +import Distribution.Simple.Utils hiding (matchFileGlob) +import Distribution.Version +import Distribution.Verbosity +import Distribution.Text + +import Data.Map (Map) +import qualified Data.Map as Map +import Data.Set (Set) +import qualified Data.Set as Set +import qualified Data.Graph as Graph +import qualified Data.Tree as Tree +#if !MIN_VERSION_base(4,8,0) +import Control.Applicative +#endif +import Control.Monad +import Control.Monad.State as State +import Control.Exception +import Data.List +import Data.Maybe +import Data.Either +import Data.Monoid +import Data.Function +import System.FilePath +import System.Directory (doesDirectoryExist) + + +------------------------------------------------------------------------------ +-- * Elaborated install plan +------------------------------------------------------------------------------ + +-- "Elaborated" -- worked out with great care and nicety of detail; +-- executed with great minuteness: elaborate preparations; +-- elaborate care. +-- +-- So here's the idea: +-- +-- Rather than a miscellaneous collection of 'ConfigFlags', 'InstallFlags' etc +-- all passed in as separate args and which are then further selected, +-- transformed etc during the execution of the build. Instead we construct +-- an elaborated install plan that includes everything we will need, and then +-- during the execution of the plan we do as little transformation of this +-- info as possible. +-- +-- So we're trying to split the work into two phases: construction of the +-- elaborated install plan (which as far as possible should be pure) and +-- then simple execution of that plan without any smarts, just doing what the +-- plan says to do. +-- +-- So that means we need a representation of this fully elaborated install +-- plan. The representation consists of two parts: +-- +-- * A 'ElaboratedInstallPlan'. This is a 'GenericInstallPlan' with a +-- representation of source packages that includes a lot more detail about +-- that package's individual configuration +-- +-- * A 'ElaboratedSharedConfig'. Some package configuration is the same for +-- every package in a plan. Rather than duplicate that info every entry in +-- the 'GenericInstallPlan' we keep that separately. +-- +-- The division between the shared and per-package config is /not set in stone +-- for all time/. For example if we wanted to generalise the install plan to +-- describe a situation where we want to build some packages with GHC and some +-- with GHCJS then the platform and compiler would no longer be shared between +-- all packages but would have to be per-package (probably with some sanity +-- condition on the graph structure). +-- + +-- Refer to ProjectPlanning.Types for details of these important types: + +-- type ElaboratedInstallPlan = ... +-- type ElaboratedPlanPackage = ... +-- data ElaboratedSharedConfig = ... +-- data ElaboratedConfiguredPackage = ... +-- data BuildStyle = + + +sanityCheckElaboratedConfiguredPackage :: ElaboratedSharedConfig + -> ElaboratedConfiguredPackage + -> Bool +sanityCheckElaboratedConfiguredPackage sharedConfig + pkg@ElaboratedConfiguredPackage{..} = + + pkgStanzasEnabled `Set.isSubsetOf` pkgStanzasAvailable + + -- the stanzas explicitly enabled should be available and enabled + && Map.keysSet (Map.filter id pkgStanzasRequested) + `Set.isSubsetOf` pkgStanzasEnabled + + -- the stanzas explicitly disabled should not be available + && Set.null (Map.keysSet (Map.filter not pkgStanzasRequested) + `Set.intersection` pkgStanzasAvailable) + + && (pkgBuildStyle == BuildInplaceOnly || + installedPackageId pkg == hashedInstalledPackageId + (packageHashInputs sharedConfig pkg)) + + && (pkgBuildStyle == BuildInplaceOnly || + Set.null pkgStanzasAvailable) + + +------------------------------------------------------------------------------ +-- * Deciding what to do: making an 'ElaboratedInstallPlan' +------------------------------------------------------------------------------ + +rebuildInstallPlan :: Verbosity + -> FilePath -> DistDirLayout -> CabalDirLayout + -> ProjectConfig + -> IO ( ElaboratedInstallPlan + , ElaboratedSharedConfig + , ProjectConfig ) +rebuildInstallPlan verbosity + projectRootDir + distDirLayout@DistDirLayout { + distDirectory, + distProjectCacheFile, + distProjectCacheDirectory + } + cabalDirLayout@CabalDirLayout { + cabalPackageCacheDirectory, + cabalStoreDirectory, + cabalStorePackageDB + } + cliConfig = + runRebuild projectRootDir $ do + progsearchpath <- liftIO $ getSystemSearchPath + let cliConfigPersistent = cliConfig { projectConfigBuildOnly = mempty } + + -- The overall improved plan is cached + rerunIfChanged verbosity fileMonitorImprovedPlan + -- react to changes in command line args and the path + (cliConfigPersistent, progsearchpath) $ do + + -- And so is the elaborated plan that the improved plan based on + (elaboratedPlan, elaboratedShared, + projectConfig) <- + rerunIfChanged verbosity fileMonitorElaboratedPlan + (cliConfigPersistent, progsearchpath) $ do + + (projectConfig, projectConfigTransient) <- phaseReadProjectConfig + localPackages <- phaseReadLocalPackages projectConfig + compilerEtc <- phaseConfigureCompiler projectConfig + _ <- phaseConfigurePrograms projectConfig compilerEtc + solverPlan <- phaseRunSolver projectConfigTransient + compilerEtc localPackages + (elaboratedPlan, + elaboratedShared) <- phaseElaboratePlan projectConfigTransient + compilerEtc + solverPlan localPackages + phaseMaintainPlanOutputs elaboratedPlan elaboratedShared + + return (elaboratedPlan, elaboratedShared, + projectConfig) + + -- The improved plan changes each time we install something, whereas + -- the underlying elaborated plan only changes when input config + -- changes, so it's worth caching them separately. + improvedPlan <- phaseImprovePlan elaboratedPlan elaboratedShared + return (improvedPlan, elaboratedShared, projectConfig) + + where + fileMonitorCompiler = newFileMonitorInCacheDir "compiler" + fileMonitorSolverPlan = newFileMonitorInCacheDir "solver-plan" + fileMonitorSourceHashes = newFileMonitorInCacheDir "source-hashes" + fileMonitorElaboratedPlan = newFileMonitorInCacheDir "elaborated-plan" + fileMonitorImprovedPlan = newFileMonitorInCacheDir "improved-plan" + + newFileMonitorInCacheDir :: Eq a => FilePath -> FileMonitor a b + newFileMonitorInCacheDir = newFileMonitor . distProjectCacheFile + + -- Read the cabal.project (or implicit config) and combine it with + -- arguments from the command line + -- + phaseReadProjectConfig :: Rebuild (ProjectConfig, ProjectConfig) + phaseReadProjectConfig = do + liftIO $ do + info verbosity "Project settings changed, reconfiguring..." + createDirectoryIfMissingVerbose verbosity False distDirectory + createDirectoryIfMissingVerbose verbosity False distProjectCacheDirectory + + projectConfig <- readProjectConfig verbosity projectRootDir + + -- The project config comming from the command line includes "build only" + -- flags that we don't cache persistently (because like all "build only" + -- flags they do not affect the value of the outcome) but that we do + -- sometimes using during planning (in particular the http transport) + let projectConfigTransient = projectConfig <> cliConfig + projectConfigPersistent = projectConfig + <> cliConfig { + projectConfigBuildOnly = mempty + } + liftIO $ writeProjectConfigFile (distProjectCacheFile "config") + projectConfigPersistent + return (projectConfigPersistent, projectConfigTransient) + + -- Look for all the cabal packages in the project + -- some of which may be local src dirs, tarballs etc + -- + phaseReadLocalPackages :: ProjectConfig + -> Rebuild [SourcePackage] + phaseReadLocalPackages projectConfig = do + + localCabalFiles <- findProjectPackages projectRootDir projectConfig + mapM (readSourcePackage verbosity) localCabalFiles + + + -- Configure the compiler we're using. + -- + -- This is moderately expensive and doesn't change that often so we cache + -- it independently. + -- + phaseConfigureCompiler :: ProjectConfig + -> Rebuild (Compiler, Platform, ProgramDb) + phaseConfigureCompiler ProjectConfig { + projectConfigShared = ProjectConfigShared { + projectConfigHcFlavor, + projectConfigHcPath, + projectConfigHcPkg + }, + projectConfigLocalPackages = PackageConfig { + packageConfigProgramPaths, + packageConfigProgramArgs, + packageConfigProgramPathExtra + } + } = do + progsearchpath <- liftIO $ getSystemSearchPath + rerunIfChanged verbosity fileMonitorCompiler + (hcFlavor, hcPath, hcPkg, progsearchpath, + packageConfigProgramPaths, + packageConfigProgramArgs, + packageConfigProgramPathExtra) $ do + + liftIO $ info verbosity "Compiler settings changed, reconfiguring..." + result@(_, _, progdb') <- liftIO $ + Cabal.configCompilerEx + hcFlavor hcPath hcPkg + progdb verbosity + + -- Note that we added the user-supplied program locations and args + -- for /all/ programs, not just those for the compiler prog and + -- compiler-related utils. In principle we don't know which programs + -- the compiler will configure (and it does vary between compilers). + -- We do know however that the compiler will only configure the + -- programs it cares about, and those are the ones we monitor here. + monitorFiles (programsMonitorFiles progdb') + + return result + where + hcFlavor = flagToMaybe projectConfigHcFlavor + hcPath = flagToMaybe projectConfigHcPath + hcPkg = flagToMaybe projectConfigHcPkg + progdb = + userSpecifyPaths (Map.toList (getMapLast packageConfigProgramPaths)) + . userSpecifyArgss (Map.toList (getMapMappend packageConfigProgramArgs)) + . modifyProgramSearchPath + (++ [ ProgramSearchPathDir dir + | dir <- fromNubList packageConfigProgramPathExtra ]) + $ defaultProgramDb + + + -- Configuring other programs. + -- + -- Having configred the compiler, now we configure all the remaining + -- programs. This is to check we can find them, and to monitor them for + -- changes. + -- + -- TODO: [required eventually] we don't actually do this yet. + -- + -- We rely on the fact that the previous phase added the program config for + -- all local packages, but that all the programs configured so far are the + -- compiler program or related util programs. + -- + phaseConfigurePrograms :: ProjectConfig + -> (Compiler, Platform, ProgramDb) + -> Rebuild () + phaseConfigurePrograms projectConfig (_, _, compilerprogdb) = do + -- Users are allowed to specify program locations independently for + -- each package (e.g. to use a particular version of a pre-processor + -- for some packages). However they cannot do this for the compiler + -- itself as that's just not going to work. So we check for this. + liftIO $ checkBadPerPackageCompilerPaths + (configuredPrograms compilerprogdb) + (getMapMappend (projectConfigSpecificPackage projectConfig)) + + --TODO: [required eventually] find/configure other programs that the + -- user specifies. + + --TODO: [required eventually] find/configure all build-tools + -- but note that some of them may be built as part of the plan. + + + -- Run the solver to get the initial install plan. + -- This is expensive so we cache it independently. + -- + phaseRunSolver :: ProjectConfig + -> (Compiler, Platform, ProgramDb) + -> [SourcePackage] + -> Rebuild (SolverInstallPlan, PackagesImplicitSetupDeps) + phaseRunSolver projectConfig@ProjectConfig { + projectConfigShared, + projectConfigBuildOnly + } + (compiler, platform, progdb) + localPackages = + rerunIfChanged verbosity fileMonitorSolverPlan + (solverSettings, cabalPackageCacheDirectory, + localPackages, localPackagesEnabledStanzas, + compiler, platform, programsDbSignature progdb) $ do + + installedPkgIndex <- getInstalledPackages verbosity + compiler progdb platform + corePackageDbs + sourcePkgDb <- getSourcePackages verbosity withRepoCtx + pkgConfigDB <- getPkgConfigDb verbosity progdb + + --TODO: [code cleanup] it'd be better if the Compiler contained the + -- ConfiguredPrograms that it needs, rather than relying on the progdb + -- since we don't need to depend on all the programs here, just the + -- ones relevant for the compiler. + + liftIO $ do + solver <- chooseSolver verbosity + (solverSettingSolver solverSettings) + (compilerInfo compiler) + + notice verbosity "Resolving dependencies..." + foldProgress logMsg die return $ + planPackages compiler platform solver solverSettings + installedPkgIndex sourcePkgDb pkgConfigDB + localPackages localPackagesEnabledStanzas + where + corePackageDbs = [GlobalPackageDB] + withRepoCtx = projectConfigWithSolverRepoContext verbosity + cabalPackageCacheDirectory + projectConfigShared + projectConfigBuildOnly + solverSettings = resolveSolverSettings projectConfig + logMsg message rest = debugNoWrap verbosity message >> rest + + localPackagesEnabledStanzas = + Map.fromList + [ (pkgname, stanzas) + | pkg <- localPackages + , let pkgname = packageName pkg + testsEnabled = lookupLocalPackageConfig + packageConfigTests + projectConfig pkgname + benchmarksEnabled = lookupLocalPackageConfig + packageConfigBenchmarks + projectConfig pkgname + stanzas = + Map.fromList $ + [ (TestStanzas, enabled) + | enabled <- flagToList testsEnabled ] + ++ [ (BenchStanzas , enabled) + | enabled <- flagToList benchmarksEnabled ] + ] + + -- Elaborate the solver's install plan to get a fully detailed plan. This + -- version of the plan has the final nix-style hashed ids. + -- + phaseElaboratePlan :: ProjectConfig + -> (Compiler, Platform, ProgramDb) + -> (SolverInstallPlan, PackagesImplicitSetupDeps) + -> [SourcePackage] + -> Rebuild ( ElaboratedInstallPlan + , ElaboratedSharedConfig ) + phaseElaboratePlan ProjectConfig { + projectConfigShared, + projectConfigLocalPackages, + projectConfigSpecificPackage, + projectConfigBuildOnly + } + (compiler, platform, progdb) + (solverPlan, pkgsImplicitSetupDeps) + localPackages = do + + liftIO $ debug verbosity "Elaborating the install plan..." + + sourcePackageHashes <- + rerunIfChanged verbosity fileMonitorSourceHashes + (packageLocationsSignature solverPlan) $ + getPackageSourceHashes verbosity withRepoCtx solverPlan + + defaultInstallDirs <- liftIO $ userInstallDirTemplates compiler + return $ + elaborateInstallPlan + platform compiler progdb + distDirLayout + cabalDirLayout + solverPlan + pkgsImplicitSetupDeps + localPackages + sourcePackageHashes + defaultInstallDirs + projectConfigShared + projectConfigLocalPackages + (getMapMappend projectConfigSpecificPackage) + where + withRepoCtx = projectConfigWithSolverRepoContext verbosity + cabalPackageCacheDirectory + projectConfigShared + projectConfigBuildOnly + + + -- Update the files we maintain that reflect our current build environment. + -- In particular we maintain a JSON representation of the elaborated + -- install plan. + -- + -- TODO: [required eventually] maintain the ghc environment file reflecting + -- the libs available. This will need to be after plan improvement phase. + -- + phaseMaintainPlanOutputs :: ElaboratedInstallPlan + -> ElaboratedSharedConfig + -> Rebuild () + phaseMaintainPlanOutputs elaboratedPlan elaboratedShared = do + liftIO $ debug verbosity "Updating plan.json" + liftIO $ writePlanExternalRepresentation + distDirLayout + elaboratedPlan + elaboratedShared + + + -- Improve the elaborated install plan. The elaborated plan consists + -- mostly of source packages (with full nix-style hashed ids). Where + -- corresponding installed packages already exist in the store, replace + -- them in the plan. + -- + -- Note that we do monitor the store's package db here, so we will redo + -- this improvement phase when the db changes -- including as a result of + -- executing a plan and installing things. + -- + phaseImprovePlan :: ElaboratedInstallPlan + -> ElaboratedSharedConfig + -> Rebuild ElaboratedInstallPlan + phaseImprovePlan elaboratedPlan elaboratedShared = do + + liftIO $ debug verbosity "Improving the install plan..." + recreateDirectory verbosity True storeDirectory + storePkgIndex <- getPackageDBContents verbosity + compiler progdb platform + storePackageDb + let improvedPlan = improveInstallPlanWithPreExistingPackages + storePkgIndex + elaboratedPlan + return improvedPlan + + where + storeDirectory = cabalStoreDirectory (compilerId compiler) + storePackageDb = cabalStorePackageDB (compilerId compiler) + ElaboratedSharedConfig { + pkgConfigPlatform = platform, + pkgConfigCompiler = compiler, + pkgConfigCompilerProgs = progdb + } = elaboratedShared + + +programsMonitorFiles :: ProgramDb -> [MonitorFilePath] +programsMonitorFiles progdb = + [ monitor + | prog <- configuredPrograms progdb + , monitor <- monitorFileSearchPath (programMonitorFiles prog) + (programPath prog) + ] + +-- | Select the bits of a 'ProgramDb' to monitor for value changes. +-- Use 'programsMonitorFiles' for the files to monitor. +-- +programsDbSignature :: ProgramDb -> [ConfiguredProgram] +programsDbSignature progdb = + [ prog { programMonitorFiles = [] + , programOverrideEnv = filter ((/="PATH") . fst) + (programOverrideEnv prog) } + | prog <- configuredPrograms progdb ] + +getInstalledPackages :: Verbosity + -> Compiler -> ProgramDb -> Platform + -> PackageDBStack + -> Rebuild InstalledPackageIndex +getInstalledPackages verbosity compiler progdb platform packagedbs = do + monitorFiles . map monitorFileOrDirectory + =<< liftIO (IndexUtils.getInstalledPackagesMonitorFiles + verbosity compiler + packagedbs progdb platform) + liftIO $ IndexUtils.getInstalledPackages + verbosity compiler + packagedbs progdb + +getPackageDBContents :: Verbosity + -> Compiler -> ProgramDb -> Platform + -> PackageDB + -> Rebuild InstalledPackageIndex +getPackageDBContents verbosity compiler progdb platform packagedb = do + monitorFiles . map monitorFileOrDirectory + =<< liftIO (IndexUtils.getInstalledPackagesMonitorFiles + verbosity compiler + [packagedb] progdb platform) + liftIO $ do + createPackageDBIfMissing verbosity compiler + progdb [packagedb] + Cabal.getPackageDBContents verbosity compiler + packagedb progdb + +getSourcePackages :: Verbosity -> (forall a. (RepoContext -> IO a) -> IO a) + -> Rebuild SourcePackageDb +getSourcePackages verbosity withRepoCtx = do + (sourcePkgDb, repos) <- + liftIO $ + withRepoCtx $ \repoctx -> do + sourcePkgDb <- IndexUtils.getSourcePackages verbosity repoctx + return (sourcePkgDb, repoContextRepos repoctx) + + monitorFiles . map monitorFile + . IndexUtils.getSourcePackagesMonitorFiles + $ repos + return sourcePkgDb + +createPackageDBIfMissing :: Verbosity -> Compiler -> ProgramDb + -> PackageDBStack -> IO () +createPackageDBIfMissing verbosity compiler progdb packageDbs = + case reverse packageDbs of + SpecificPackageDB dbPath : _ -> do + exists <- liftIO $ Cabal.doesPackageDBExist dbPath + unless exists $ do + createDirectoryIfMissingVerbose verbosity False (takeDirectory dbPath) + Cabal.createPackageDB verbosity compiler progdb False dbPath + _ -> return () + + +getPkgConfigDb :: Verbosity -> ProgramDb -> Rebuild PkgConfigDb +getPkgConfigDb verbosity progdb = do + dirs <- liftIO $ getPkgConfigDbDirs verbosity progdb + -- Just monitor the dirs so we'll notice new .pc files. + -- Alternatively we could monitor all the .pc files too. + forM_ dirs $ \dir -> do + dirExists <- liftIO $ doesDirectoryExist dir + -- TODO: turn this into a utility function + monitorFiles [if dirExists + then monitorDirectory dir + else monitorNonExistentDirectory dir] + + liftIO $ readPkgConfigDb verbosity progdb + + +recreateDirectory :: Verbosity -> Bool -> FilePath -> Rebuild () +recreateDirectory verbosity createParents dir = do + liftIO $ createDirectoryIfMissingVerbose verbosity createParents dir + monitorFiles [monitorDirectoryExistence dir] + + +-- | Select the config values to monitor for changes package source hashes. +packageLocationsSignature :: SolverInstallPlan + -> [(PackageId, PackageLocation (Maybe FilePath))] +packageLocationsSignature solverPlan = + [ (packageId pkg, packageSource pkg) + | InstallPlan.Configured + (ConfiguredPackage pkg _ _ _) <- InstallPlan.toList solverPlan + ] + + +-- | Get the 'HashValue' for all the source packages where we use hashes, +-- and download any packages required to do so. +-- +-- Note that we don't get hashes for local unpacked packages. +-- +getPackageSourceHashes :: Verbosity + -> (forall a. (RepoContext -> IO a) -> IO a) + -> SolverInstallPlan + -> Rebuild (Map PackageId PackageSourceHash) +getPackageSourceHashes verbosity withRepoCtx solverPlan = do + + -- Determine if and where to get the package's source hash from. + -- + let allPkgLocations :: [(PackageId, PackageLocation (Maybe FilePath))] + allPkgLocations = + [ (packageId pkg, packageSource pkg) + | InstallPlan.Configured + (ConfiguredPackage pkg _ _ _) <- InstallPlan.toList solverPlan ] + + -- Tarballs that were local in the first place. + -- We'll hash these tarball files directly. + localTarballPkgs :: [(PackageId, FilePath)] + localTarballPkgs = + [ (pkgid, tarball) + | (pkgid, LocalTarballPackage tarball) <- allPkgLocations ] + + -- Tarballs from remote URLs. We must have downloaded these already + -- (since we extracted the .cabal file earlier) + --TODO: [required eventually] finish remote tarball functionality +-- allRemoteTarballPkgs = +-- [ (pkgid, ) +-- | (pkgid, RemoteTarballPackage ) <- allPkgLocations ] + + -- Tarballs from repositories, either where the repository provides + -- hashes as part of the repo metadata, or where we will have to + -- download and hash the tarball. + repoTarballPkgsWithMetadata :: [(PackageId, Repo)] + repoTarballPkgsWithoutMetadata :: [(PackageId, Repo)] + (repoTarballPkgsWithMetadata, + repoTarballPkgsWithoutMetadata) = + partitionEithers + [ case repo of + RepoSecure{} -> Left (pkgid, repo) + _ -> Right (pkgid, repo) + | (pkgid, RepoTarballPackage repo _ _) <- allPkgLocations ] + + -- For tarballs from repos that do not have hashes available we now have + -- to check if the packages were downloaded already. + -- + (repoTarballPkgsToDownload, + repoTarballPkgsDownloaded) + <- fmap partitionEithers $ + liftIO $ sequence + [ do mtarball <- checkRepoTarballFetched repo pkgid + case mtarball of + Nothing -> return (Left (pkgid, repo)) + Just tarball -> return (Right (pkgid, tarball)) + | (pkgid, repo) <- repoTarballPkgsWithoutMetadata ] + + (hashesFromRepoMetadata, + repoTarballPkgsNewlyDownloaded) <- + -- Avoid having to initialise the repository (ie 'withRepoCtx') if we + -- don't have to. (The main cost is configuring the http client.) + if null repoTarballPkgsToDownload && null repoTarballPkgsWithMetadata + then return (Map.empty, []) + else liftIO $ withRepoCtx $ \repoctx -> do + + -- For tarballs from repos that do have hashes available as part of the + -- repo metadata we now load up the index for each repo and retrieve + -- the hashes for the packages + -- + hashesFromRepoMetadata <- + Sec.uncheckClientErrors $ --TODO: [code cleanup] wrap in our own exceptions + fmap (Map.fromList . concat) $ + sequence + -- Reading the repo index is expensive so we group the packages by repo + [ repoContextWithSecureRepo repoctx repo $ \secureRepo -> + Sec.withIndex secureRepo $ \repoIndex -> + sequence + [ do hash <- Sec.trusted <$> -- strip off Trusted tag + Sec.indexLookupHash repoIndex pkgid + -- Note that hackage-security currently uses SHA256 + -- but this API could in principle give us some other + -- choice in future. + return (pkgid, hashFromTUF hash) + | pkgid <- pkgids ] + | (repo, pkgids) <- + map (\grp@((_,repo):_) -> (repo, map fst grp)) + . groupBy ((==) `on` (remoteRepoName . repoRemote . snd)) + . sortBy (compare `on` (remoteRepoName . repoRemote . snd)) + $ repoTarballPkgsWithMetadata + ] + + -- For tarballs from repos that do not have hashes available, download + -- the ones we previously determined we need. + -- + repoTarballPkgsNewlyDownloaded <- + sequence + [ do tarball <- fetchRepoTarball verbosity repoctx repo pkgid + return (pkgid, tarball) + | (pkgid, repo) <- repoTarballPkgsToDownload ] + + return (hashesFromRepoMetadata, + repoTarballPkgsNewlyDownloaded) + + -- Hash tarball files for packages where we have to do that. This includes + -- tarballs that were local in the first place, plus tarballs from repos, + -- either previously cached or freshly downloaded. + -- + let allTarballFilePkgs :: [(PackageId, FilePath)] + allTarballFilePkgs = localTarballPkgs + ++ repoTarballPkgsDownloaded + ++ repoTarballPkgsNewlyDownloaded + hashesFromTarballFiles <- liftIO $ + fmap Map.fromList $ + sequence + [ do srchash <- readFileHashValue tarball + return (pkgid, srchash) + | (pkgid, tarball) <- allTarballFilePkgs + ] + monitorFiles [ monitorFile tarball + | (_pkgid, tarball) <- allTarballFilePkgs ] + + -- Return the combination + return $! hashesFromRepoMetadata + <> hashesFromTarballFiles + + +-- ------------------------------------------------------------ +-- * Installation planning +-- ------------------------------------------------------------ + +planPackages :: Compiler + -> Platform + -> Solver -> SolverSettings + -> InstalledPackageIndex + -> SourcePackageDb + -> PkgConfigDb + -> [SourcePackage] + -> Map PackageName (Map OptionalStanza Bool) + -> Progress String String + (SolverInstallPlan, PackagesImplicitSetupDeps) +planPackages comp platform solver SolverSettings{..} + installedPkgIndex sourcePkgDb pkgConfigDB + localPackages pkgStanzasEnable = + + rememberImplicitSetupDeps (depResolverSourcePkgIndex stdResolverParams) <$> + + resolveDependencies + platform (compilerInfo comp) + pkgConfigDB solver + resolverParams + + where + + --TODO: [nice to have] disable multiple instances restriction in the solver, but then + -- make sure we can cope with that in the output. + resolverParams = + + setMaxBackjumps solverSettingMaxBackjumps + + --TODO: [required eventually] should only be configurable for custom installs + -- . setIndependentGoals solverSettingIndependentGoals + + . setReorderGoals solverSettingReorderGoals + + --TODO: [required eventually] should only be configurable for custom installs + -- . setAvoidReinstalls solverSettingAvoidReinstalls + + --TODO: [required eventually] should only be configurable for custom installs + -- . setShadowPkgs solverSettingShadowPkgs + + . setStrongFlags solverSettingStrongFlags + + --TODO: [required eventually] decide if we need to prefer installed for + -- global packages, or prefer latest even for global packages. Perhaps + -- should be configurable but with a different name than "upgrade-dependencies". + . setPreferenceDefault PreferLatestForSelected + {-(if solverSettingUpgradeDeps + then PreferAllLatest + else PreferLatestForSelected)-} + + . removeUpperBounds solverSettingAllowNewer + + . addDefaultSetupDependencies (defaultSetupDeps comp platform + . PD.packageDescription + . packageDescription) + + . addPreferences + -- preferences from the config file or command line + [ PackageVersionPreference name ver + | Dependency name ver <- solverSettingPreferences ] + + . addConstraints + -- version constraints from the config file or command line + [ LabeledPackageConstraint (userToPackageConstraint pc) src + | (pc, src) <- solverSettingConstraints ] + + . addPreferences + -- enable stanza preference where the user did not specify + [ PackageStanzasPreference pkgname stanzas + | pkg <- localPackages + , let pkgname = packageName pkg + stanzaM = Map.findWithDefault Map.empty pkgname pkgStanzasEnable + stanzas = [ stanza | stanza <- [minBound..maxBound] + , Map.lookup stanza stanzaM == Nothing ] + , not (null stanzas) + ] + + . addConstraints + -- enable stanza constraints where the user asked to enable + [ LabeledPackageConstraint + (PackageConstraintStanzas pkgname stanzas) + ConstraintSourceConfigFlagOrTarget + | pkg <- localPackages + , let pkgname = packageName pkg + stanzaM = Map.findWithDefault Map.empty pkgname pkgStanzasEnable + stanzas = [ stanza | stanza <- [minBound..maxBound] + , Map.lookup stanza stanzaM == Just True ] + , not (null stanzas) + ] + + . addConstraints + --TODO: [nice to have] should have checked at some point that the + -- package in question actually has these flags. + [ LabeledPackageConstraint + (PackageConstraintFlags pkgname flags) + ConstraintSourceConfigFlagOrTarget + | (pkgname, flags) <- Map.toList solverSettingFlagAssignments ] + + . addConstraints + --TODO: [nice to have] we have user-supplied flags for unspecified + -- local packages (as well as specific per-package flags). For the + -- former we just apply all these flags to all local targets which + -- is silly. We should check if the flags are appropriate. + [ LabeledPackageConstraint + (PackageConstraintFlags pkgname flags) + ConstraintSourceConfigFlagOrTarget + | let flags = solverSettingFlagAssignment + , not (null flags) + , pkg <- localPackages + , let pkgname = packageName pkg ] + + $ stdResolverParams + + stdResolverParams = + standardInstallPolicy + installedPkgIndex sourcePkgDb + (map SpecificSourcePackage localPackages) + + +------------------------------------------------------------------------------ +-- * Install plan post-processing +------------------------------------------------------------------------------ + +-- This phase goes from the InstallPlan we get from the solver and has to +-- make an elaborated install plan. +-- +-- We go in two steps: +-- +-- 1. elaborate all the source packages that the solver has chosen. +-- 2. swap source packages for pre-existing installed packages wherever +-- possible. +-- +-- We do it in this order, elaborating and then replacing, because the easiest +-- way to calculate the installed package ids used for the replacement step is +-- from the elaborated configuration for each package. + + + + +------------------------------------------------------------------------------ +-- * Install plan elaboration +------------------------------------------------------------------------------ + +-- | Produce an elaborated install plan using the policy for local builds with +-- a nix-style shared store. +-- +-- In theory should be able to make an elaborated install plan with a policy +-- matching that of the classic @cabal install --user@ or @--global@ +-- +elaborateInstallPlan + :: Platform -> Compiler -> ProgramDb + -> DistDirLayout + -> CabalDirLayout + -> SolverInstallPlan + -> PackagesImplicitSetupDeps + -> [SourcePackage] + -> Map PackageId PackageSourceHash + -> InstallDirs.InstallDirTemplates + -> ProjectConfigShared + -> PackageConfig + -> Map PackageName PackageConfig + -> (ElaboratedInstallPlan, ElaboratedSharedConfig) +elaborateInstallPlan platform compiler compilerprogdb + DistDirLayout{..} + cabalDirLayout@CabalDirLayout{cabalStorePackageDB} + solverPlan pkgsImplicitSetupDeps localPackages + sourcePackageHashes + defaultInstallDirs + _sharedPackageConfig + localPackagesConfig + perPackageConfig = + (elaboratedInstallPlan, elaboratedSharedConfig) + where + elaboratedSharedConfig = + ElaboratedSharedConfig { + pkgConfigPlatform = platform, + pkgConfigCompiler = compiler, + pkgConfigCompilerProgs = compilerprogdb + } + + elaboratedInstallPlan = + flip InstallPlan.mapPreservingGraph solverPlan $ \mapDep planpkg -> + case planpkg of + InstallPlan.PreExisting pkg -> + InstallPlan.PreExisting pkg + + InstallPlan.Configured pkg -> + InstallPlan.Configured + (elaborateConfiguredPackage (fixupDependencies mapDep pkg)) + + _ -> error "elaborateInstallPlan: unexpected package state" + + -- remap the installed package ids of the direct deps, since we're + -- changing the installed package ids of all the packages to use the + -- final nix-style hashed ids. + fixupDependencies mapDep + (ConfiguredPackage pkg flags stanzas deps) = + ConfiguredPackage pkg flags stanzas deps' + where + deps' = fmap (map (\d -> d { confInstId = mapDep (confInstId d) })) deps + + elaborateConfiguredPackage :: ConfiguredPackage + -> ElaboratedConfiguredPackage + elaborateConfiguredPackage + pkg@(ConfiguredPackage (SourcePackage pkgid gdesc srcloc descOverride) + flags stanzas deps) = + elaboratedPackage + where + -- Knot tying: the final elaboratedPackage includes the + -- pkgInstalledId, which is calculated by hashing many + -- of the other fields of the elaboratedPackage. + -- + elaboratedPackage = ElaboratedConfiguredPackage {..} + + pkgInstalledId + | shouldBuildInplaceOnly pkg + = mkUnitId (display pkgid ++ "-inplace") + + | otherwise + = assert (isJust pkgSourceHash) $ + hashedInstalledPackageId + (packageHashInputs + elaboratedSharedConfig + elaboratedPackage) -- recursive use of elaboratedPackage + + | otherwise + = error $ "elaborateInstallPlan: non-inplace package " + ++ " is missing a source hash: " ++ display pkgid + + -- All the other fields of the ElaboratedConfiguredPackage + -- + pkgSourceId = pkgid + pkgDescription = let Right (desc, _) = + PD.finalizePackageDescription + flags (const True) + platform (compilerInfo compiler) + [] gdesc + in desc + pkgFlagAssignment = flags + pkgFlagDefaults = [ (Cabal.flagName flag, Cabal.flagDefault flag) + | flag <- PD.genPackageFlags gdesc ] + pkgDependencies = deps + pkgStanzasAvailable = Set.fromList stanzas + pkgStanzasRequested = + Map.fromList $ [ (TestStanzas, v) | v <- maybeToList tests ] + ++ [ (BenchStanzas, v) | v <- maybeToList benchmarks ] + where + tests, benchmarks :: Maybe Bool + tests = perPkgOptionMaybe pkgid packageConfigTests + benchmarks = perPkgOptionMaybe pkgid packageConfigBenchmarks + + -- These sometimes get adjusted later + pkgStanzasEnabled = Set.empty + pkgBuildTargets = [] + pkgReplTarget = Nothing + pkgBuildHaddocks = False + + pkgSourceLocation = srcloc + pkgSourceHash = Map.lookup pkgid sourcePackageHashes + pkgBuildStyle = if shouldBuildInplaceOnly pkg + then BuildInplaceOnly else BuildAndInstall + pkgBuildPackageDBStack = buildAndRegisterDbs + pkgRegisterPackageDBStack = buildAndRegisterDbs + pkgRequiresRegistration = isJust (Cabal.condLibrary gdesc) + + pkgSetupScriptStyle = packageSetupScriptStylePostSolver + pkgsImplicitSetupDeps pkg pkgDescription + pkgSetupScriptCliVersion = packageSetupScriptSpecVersion + pkgSetupScriptStyle pkgDescription deps + pkgSetupPackageDBStack = buildAndRegisterDbs + + buildAndRegisterDbs + | shouldBuildInplaceOnly pkg = inplacePackageDbs + | otherwise = storePackageDbs + + pkgDescriptionOverride = descOverride + + pkgVanillaLib = perPkgOptionFlag pkgid True packageConfigVanillaLib --TODO: [required feature]: also needs to be handled recursively + pkgSharedLib = pkgid `Set.member` pkgsUseSharedLibrary + pkgDynExe = perPkgOptionFlag pkgid False packageConfigDynExe + pkgGHCiLib = perPkgOptionFlag pkgid False packageConfigGHCiLib --TODO: [required feature] needs to default to enabled on windows still + + pkgProfExe = perPkgOptionFlag pkgid False packageConfigProf + pkgProfLib = pkgid `Set.member` pkgsUseProfilingLibrary + + (pkgProfExeDetail, + pkgProfLibDetail) = perPkgOptionLibExeFlag pkgid ProfDetailDefault + packageConfigProfDetail + packageConfigProfLibDetail + pkgCoverage = perPkgOptionFlag pkgid False packageConfigCoverage + + pkgOptimization = perPkgOptionFlag pkgid NormalOptimisation packageConfigOptimization + pkgSplitObjs = perPkgOptionFlag pkgid False packageConfigSplitObjs + pkgStripLibs = perPkgOptionFlag pkgid False packageConfigStripLibs + pkgStripExes = perPkgOptionFlag pkgid False packageConfigStripExes + pkgDebugInfo = perPkgOptionFlag pkgid NoDebugInfo packageConfigDebugInfo + + -- Combine the configured compiler prog settings with the user-supplied + -- config. For the compiler progs any user-supplied config was taken + -- into account earlier when configuring the compiler so its ok that + -- our configured settings for the compiler override the user-supplied + -- config here. + pkgProgramPaths = Map.fromList + [ (programId prog, programPath prog) + | prog <- configuredPrograms compilerprogdb ] + <> perPkgOptionMapLast pkgid packageConfigProgramPaths + pkgProgramArgs = Map.fromList + [ (programId prog, args) + | prog <- configuredPrograms compilerprogdb + , let args = programOverrideArgs prog + , not (null args) + ] + <> perPkgOptionMapMappend pkgid packageConfigProgramArgs + pkgProgramPathExtra = perPkgOptionNubList pkgid packageConfigProgramPathExtra + pkgConfigureScriptArgs = perPkgOptionList pkgid packageConfigConfigureArgs + pkgExtraLibDirs = perPkgOptionList pkgid packageConfigExtraLibDirs + pkgExtraFrameworkDirs = perPkgOptionList pkgid packageConfigExtraFrameworkDirs + pkgExtraIncludeDirs = perPkgOptionList pkgid packageConfigExtraIncludeDirs + pkgProgPrefix = perPkgOptionMaybe pkgid packageConfigProgPrefix + pkgProgSuffix = perPkgOptionMaybe pkgid packageConfigProgSuffix + + pkgInstallDirs + | shouldBuildInplaceOnly pkg + -- use the ordinary default install dirs + = (InstallDirs.absoluteInstallDirs + pkgid + (installedUnitId pkg) + (compilerInfo compiler) + InstallDirs.NoCopyDest + platform + defaultInstallDirs) { + + InstallDirs.libsubdir = "", -- absoluteInstallDirs sets these as + InstallDirs.datasubdir = "" -- 'undefined' but we have to use + } -- them as "Setup.hs configure" args + + | otherwise + -- use special simplified install dirs + = storePackageInstallDirs + cabalDirLayout + (compilerId compiler) + pkgInstalledId + + pkgHaddockHoogle = perPkgOptionFlag pkgid False packageConfigHaddockHoogle + pkgHaddockHtml = perPkgOptionFlag pkgid False packageConfigHaddockHtml + pkgHaddockHtmlLocation = perPkgOptionMaybe pkgid packageConfigHaddockHtmlLocation + pkgHaddockExecutables = perPkgOptionFlag pkgid False packageConfigHaddockExecutables + pkgHaddockTestSuites = perPkgOptionFlag pkgid False packageConfigHaddockTestSuites + pkgHaddockBenchmarks = perPkgOptionFlag pkgid False packageConfigHaddockBenchmarks + pkgHaddockInternal = perPkgOptionFlag pkgid False packageConfigHaddockInternal + pkgHaddockCss = perPkgOptionMaybe pkgid packageConfigHaddockCss + pkgHaddockHscolour = perPkgOptionFlag pkgid False packageConfigHaddockHscolour + pkgHaddockHscolourCss = perPkgOptionMaybe pkgid packageConfigHaddockHscolourCss + pkgHaddockContents = perPkgOptionMaybe pkgid packageConfigHaddockContents + + perPkgOptionFlag :: PackageId -> a -> (PackageConfig -> Flag a) -> a + perPkgOptionMaybe :: PackageId -> (PackageConfig -> Flag a) -> Maybe a + perPkgOptionList :: PackageId -> (PackageConfig -> [a]) -> [a] + + perPkgOptionFlag pkgid def f = fromFlagOrDefault def (lookupPerPkgOption pkgid f) + perPkgOptionMaybe pkgid f = flagToMaybe (lookupPerPkgOption pkgid f) + perPkgOptionList pkgid f = lookupPerPkgOption pkgid f + perPkgOptionNubList pkgid f = fromNubList (lookupPerPkgOption pkgid f) + perPkgOptionMapLast pkgid f = getMapLast (lookupPerPkgOption pkgid f) + perPkgOptionMapMappend pkgid f = getMapMappend (lookupPerPkgOption pkgid f) + + perPkgOptionLibExeFlag pkgid def fboth flib = (exe, lib) + where + exe = fromFlagOrDefault def bothflag + lib = fromFlagOrDefault def (bothflag <> libflag) + + bothflag = lookupPerPkgOption pkgid fboth + libflag = lookupPerPkgOption pkgid flib + + lookupPerPkgOption :: (Package pkg, Monoid m) + => pkg -> (PackageConfig -> m) -> m + lookupPerPkgOption pkg f + -- the project config specifies values that apply to packages local to + -- but by default non-local packages get all default config values + -- the project, and can specify per-package values for any package, + | isLocalToProject pkg = local <> perpkg + | otherwise = perpkg + where + local = f localPackagesConfig + perpkg = maybe mempty f (Map.lookup (packageName pkg) perPackageConfig) + + inplacePackageDbs = storePackageDbs + ++ [ distPackageDB (compilerId compiler) ] + + storePackageDbs = [ GlobalPackageDB + , cabalStorePackageDB (compilerId compiler) ] + + -- For this local build policy, every package that lives in a local source + -- dir (as opposed to a tarball), or depends on such a package, will be + -- built inplace into a shared dist dir. Tarball packages that depend on + -- source dir packages will also get unpacked locally. + shouldBuildInplaceOnly :: HasUnitId pkg => pkg -> Bool + shouldBuildInplaceOnly pkg = Set.member (installedPackageId pkg) + pkgsToBuildInplaceOnly + + pkgsToBuildInplaceOnly :: Set InstalledPackageId + pkgsToBuildInplaceOnly = + Set.fromList + $ map installedPackageId + $ InstallPlan.reverseDependencyClosure + solverPlan + [ fakeUnitId (packageId pkg) + | pkg <- localPackages ] + + isLocalToProject :: Package pkg => pkg -> Bool + isLocalToProject pkg = Set.member (packageId pkg) + pkgsLocalToProject + + pkgsLocalToProject :: Set PackageId + pkgsLocalToProject = Set.fromList [ packageId pkg | pkg <- localPackages ] + + pkgsUseSharedLibrary :: Set PackageId + pkgsUseSharedLibrary = + packagesWithDownwardClosedProperty needsSharedLib + where + needsSharedLib pkg = + fromMaybe compilerShouldUseSharedLibByDefault + (liftM2 (||) pkgSharedLib pkgDynExe) + where + pkgid = packageId pkg + pkgSharedLib = perPkgOptionMaybe pkgid packageConfigSharedLib + pkgDynExe = perPkgOptionMaybe pkgid packageConfigDynExe + + --TODO: [code cleanup] move this into the Cabal lib. It's currently open + -- coded in Distribution.Simple.Configure, but should be made a proper + -- function of the Compiler or CompilerInfo. + compilerShouldUseSharedLibByDefault = + case compilerFlavor compiler of + GHC -> GHC.isDynamic compiler + GHCJS -> GHCJS.isDynamic compiler + _ -> False + + pkgsUseProfilingLibrary :: Set PackageId + pkgsUseProfilingLibrary = + packagesWithDownwardClosedProperty needsProfilingLib + where + needsProfilingLib pkg = + fromFlagOrDefault False (profBothFlag <> profLibFlag) + where + pkgid = packageId pkg + profBothFlag = lookupPerPkgOption pkgid packageConfigProf + profLibFlag = lookupPerPkgOption pkgid packageConfigProfLib + --TODO: [code cleanup] unused: the old deprecated packageConfigProfExe + + packagesWithDownwardClosedProperty property = + Set.fromList + $ map packageId + $ InstallPlan.dependencyClosure + solverPlan + [ installedPackageId pkg + | pkg <- InstallPlan.toList solverPlan + , property pkg ] -- just the packages that satisfy the propety + --TODO: [nice to have] this does not check the config consistency, + -- e.g. a package explicitly turning off profiling, but something + -- depending on it that needs profiling. This really needs a separate + -- package config validation/resolution pass. + + --TODO: [nice to have] config consistency checking: + -- * profiling libs & exes, exe needs lib, recursive + -- * shared libs & exes, exe needs lib, recursive + -- * vanilla libs & exes, exe needs lib, recursive + -- * ghci or shared lib needed by TH, recursive, ghc version dependent + + +--------------------------- +-- Build targets +-- + +-- Refer to ProjectPlanning.Types for details of these important types: + +-- data PackageTarget = ... +-- data ComponentTarget = ... +-- data SubComponentTarget = ... + + +--TODO: this needs to report some user target/config errors +elaboratePackageTargets :: ElaboratedConfiguredPackage -> [PackageTarget] + -> ([ComponentTarget], Maybe ComponentTarget, Bool) +elaboratePackageTargets ElaboratedConfiguredPackage{..} targets = + let buildTargets = nubComponentTargets + . map compatSubComponentTargets + . concatMap elaborateBuildTarget + $ targets + --TODO: instead of listToMaybe we should be reporting an error here + replTargets = listToMaybe + . nubComponentTargets + . map compatSubComponentTargets + . concatMap elaborateReplTarget + $ targets + buildHaddocks = HaddockDefaultComponents `elem` targets + + in (buildTargets, replTargets, buildHaddocks) + where + --TODO: need to report an error here if defaultComponents is empty + elaborateBuildTarget BuildDefaultComponents = pkgDefaultComponents + elaborateBuildTarget (BuildSpecificComponent t) = [t] + elaborateBuildTarget _ = [] + + --TODO: need to report an error here if defaultComponents is empty + elaborateReplTarget ReplDefaultComponent = take 1 pkgDefaultComponents + elaborateReplTarget (ReplSpecificComponent t) = [t] + elaborateReplTarget _ = [] + + pkgDefaultComponents = + [ ComponentTarget cname WholeComponent + | c <- Cabal.pkgComponents pkgDescription + , PD.buildable (Cabal.componentBuildInfo c) + , let cname = Cabal.componentName c + , enabledOptionalStanza cname + ] + where + enabledOptionalStanza cname = + case componentOptionalStanza cname of + Nothing -> True + Just stanza -> Map.lookup stanza pkgStanzasRequested + == Just True + + -- Not all Cabal Setup.hs versions support sub-component targets, so switch + -- them over to the whole component + compatSubComponentTargets :: ComponentTarget -> ComponentTarget + compatSubComponentTargets target@(ComponentTarget cname _subtarget) + | not setupHsSupportsSubComponentTargets + = ComponentTarget cname WholeComponent + | otherwise = target + + -- Actually the reality is that no current version of Cabal's Setup.hs + -- build command actually support building specific files or modules. + setupHsSupportsSubComponentTargets = False + -- TODO: when that changes, adjust this test, e.g. + -- | pkgSetupScriptCliVersion >= Version [x,y] [] + + nubComponentTargets :: [ComponentTarget] -> [ComponentTarget] + nubComponentTargets = + concatMap (wholeComponentOverrides . map snd) + . groupBy ((==) `on` fst) + . sortBy (compare `on` fst) + . map (\t@(ComponentTarget cname _) -> (cname, t)) + + -- If we're building the whole component then that the only target all we + -- need, otherwise we can have several targets within the component. + wholeComponentOverrides :: [ComponentTarget] -> [ComponentTarget] + wholeComponentOverrides ts = + case [ t | t@(ComponentTarget _ WholeComponent) <- ts ] of + (t:_) -> [t] + [] -> ts + + +pkgHasEphemeralBuildTargets :: ElaboratedConfiguredPackage -> Bool +pkgHasEphemeralBuildTargets pkg = + isJust (pkgReplTarget pkg) + || (not . null) [ () | ComponentTarget _ subtarget <- pkgBuildTargets pkg + , subtarget /= WholeComponent ] + +-- | The components that we'll build all of, meaning that after they're built +-- we can skip building them again (unlike with building just some modules or +-- other files within a component). +-- +pkgBuildTargetWholeComponents :: ElaboratedConfiguredPackage + -> Set ComponentName +pkgBuildTargetWholeComponents pkg = + Set.fromList + [ cname | ComponentTarget cname WholeComponent <- pkgBuildTargets pkg ] + + +------------------------------------------------------------------------------ +-- * Install plan pruning +------------------------------------------------------------------------------ + +-- | Given a set of package targets (and optionally component targets within +-- those packages), take the subset of the install plan needed to build those +-- targets. Also, update the package config to specify which optional stanzas +-- to enable, and which targets within each package to build. +-- +pruneInstallPlanToTargets :: Map InstalledPackageId [PackageTarget] + -> ElaboratedInstallPlan -> ElaboratedInstallPlan +pruneInstallPlanToTargets perPkgTargetsMap = + either (\_ -> assert False undefined) id + . InstallPlan.new False + . PackageIndex.fromList + -- We have to do this in two passes + . pruneInstallPlanPass2 + . pruneInstallPlanPass1 perPkgTargetsMap + . InstallPlan.toList + +-- The first pass does three things: +-- +-- * Set the build targets based on the user targets (but not rev deps yet). +-- * A first go at determining which optional stanzas (testsuites, benchmarks) +-- are needed. We have a second go in the next pass. +-- * Take the dependency closure using pruned dependencies. We prune deps that +-- are used only by unneeded optional stanzas. These pruned deps are only +-- used for the dependency closure and are not persisted in this pass. +-- +pruneInstallPlanPass1 :: Map InstalledPackageId [PackageTarget] + -> [ElaboratedPlanPackage] + -> [ElaboratedPlanPackage] +pruneInstallPlanPass1 perPkgTargetsMap pkgs = + map fst $ + dependencyClosure + (installedPackageId . fst) -- the pkg id + snd -- the pruned deps + [ (pkg', pruneOptionalDependencies pkg') + | pkg <- pkgs + , let pkg' = mapConfiguredPackage + (pruneOptionalStanzas . setBuildTargets) pkg + ] + (Map.keys perPkgTargetsMap) + where + -- Elaborate and set the targets we'll build for this package. This is just + -- based on the targets from the user, not targets implied by reverse + -- depencencies. Those comes in the second pass once we know the rev deps. + -- + setBuildTargets pkg = + pkg { + pkgBuildTargets = buildTargets, + pkgReplTarget = replTarget, + pkgBuildHaddocks = buildHaddocks + } + where + (buildTargets, replTarget, buildHaddocks) + = elaboratePackageTargets pkg targets + targets = fromMaybe [] + $ Map.lookup (installedPackageId pkg) perPkgTargetsMap + + -- Decide whether or not to enable testsuites and benchmarks + -- + -- The testsuite and benchmark targets are somewhat special in that we need + -- to configure the packages with them enabled, and we need to do that even + -- if we only want to build one of several testsuites. + -- + -- There are two cases in which we will enable the testsuites (or + -- benchmarks): if one of the targets is a testsuite, or if all of the + -- testsuite depencencies are already cached in the store. The rationale + -- for the latter is to minimise how often we have to reconfigure due to + -- the particular targets we choose to build. Otherwise choosing to build + -- a testsuite target, and then later choosing to build an exe target + -- would involve unnecessarily reconfiguring the package with testsuites + -- disabled. Technically this introduces a little bit of stateful + -- behaviour to make this "sticky", but it should be benign. + -- + pruneOptionalStanzas pkg = pkg { pkgStanzasEnabled = stanzas } + where + stanzas :: Set OptionalStanza + stanzas = optionalStanzasRequiredByTargets pkg + <> optionalStanzasRequestedByDefault pkg + <> optionalStanzasWithDepsAvailable availablePkgs pkg + + -- Calculate package depencencies but cut out those needed only by + -- optional stanzas that we've determined we will not enable. + -- These pruned deps are not persisted in this pass since they're based on + -- the optional stanzas and we'll make further tweaks to the optional + -- stanzas in the next pass. + -- + pruneOptionalDependencies :: ElaboratedPlanPackage -> [InstalledPackageId] + pruneOptionalDependencies (InstallPlan.Configured pkg) = + (CD.flatDeps . CD.filterDeps keepNeeded) (depends pkg) + where + keepNeeded (CD.ComponentTest _) _ = TestStanzas `Set.member` stanzas + keepNeeded (CD.ComponentBench _) _ = BenchStanzas `Set.member` stanzas + keepNeeded _ _ = True + stanzas = pkgStanzasEnabled pkg + pruneOptionalDependencies pkg = + CD.flatDeps (depends pkg) + + optionalStanzasRequiredByTargets :: ElaboratedConfiguredPackage + -> Set OptionalStanza + optionalStanzasRequiredByTargets pkg = + Set.fromList + [ stanza + | ComponentTarget cname _ <- pkgBuildTargets pkg + ++ maybeToList (pkgReplTarget pkg) + , stanza <- maybeToList (componentOptionalStanza cname) + ] + + optionalStanzasRequestedByDefault :: ElaboratedConfiguredPackage + -> Set OptionalStanza + optionalStanzasRequestedByDefault = + Map.keysSet + . Map.filter (id :: Bool -> Bool) + . pkgStanzasRequested + + availablePkgs = + Set.fromList + [ installedPackageId pkg + | InstallPlan.PreExisting pkg <- pkgs ] + +optionalStanzasWithDepsAvailable :: Set InstalledPackageId + -> ElaboratedConfiguredPackage + -> Set OptionalStanza +optionalStanzasWithDepsAvailable availablePkgs pkg = + Set.fromList + [ stanza + | stanza <- Set.toList (pkgStanzasAvailable pkg) + , let deps :: [InstalledPackageId] + deps = map installedPackageId + $ CD.select (optionalStanzaDeps stanza) + (pkgDependencies pkg) + , all (`Set.member` availablePkgs) deps + ] + where + optionalStanzaDeps TestStanzas (CD.ComponentTest _) = True + optionalStanzaDeps BenchStanzas (CD.ComponentBench _) = True + optionalStanzaDeps _ _ = False + + +-- The second pass does three things: +-- +-- * A second go at deciding which optional stanzas to enable. +-- * Prune the depencencies based on the final choice of optional stanzas. +-- * Extend the targets within each package to build, now we know the reverse +-- depencencies, ie we know which libs are needed as deps by other packages. +-- +-- Achieving sticky behaviour with enabling\/disabling optional stanzas is +-- tricky. The first approximation was handled by the first pass above, but +-- it's not quite enough. That pass will enable stanzas if all of the deps +-- of the optional stanza are already instaled /in the store/. That's important +-- but it does not account for depencencies that get built inplace as part of +-- the project. We cannot take those inplace build deps into account in the +-- pruning pass however because we don't yet know which ones we're going to +-- build. Once we do know, we can have another go and enable stanzas that have +-- all their deps available. Now we can consider all packages in the pruned +-- plan to be available, including ones we already decided to build from +-- source. +-- +-- Deciding which targets to build depends on knowing which packages have +-- reverse dependencies (ie are needed). This requires the result of first +-- pass, which is another reason we have to split it into two passes. +-- +-- Note that just because we might enable testsuites or benchmarks (in the +-- first or second pass) doesn't mean that we build all (or even any) of them. +-- That depends on which targets we picked in the first pass. +-- +pruneInstallPlanPass2 :: [ElaboratedPlanPackage] + -> [ElaboratedPlanPackage] +pruneInstallPlanPass2 pkgs = + map (mapConfiguredPackage setStanzasDepsAndTargets) pkgs + where + setStanzasDepsAndTargets pkg = + pkg { + pkgStanzasEnabled = stanzas, + pkgDependencies = CD.filterDeps keepNeeded (pkgDependencies pkg), + pkgBuildTargets = pkgBuildTargets pkg ++ targetsRequiredForRevDeps + } + where + stanzas :: Set OptionalStanza + stanzas = pkgStanzasEnabled pkg + <> optionalStanzasWithDepsAvailable availablePkgs pkg + + keepNeeded (CD.ComponentTest _) _ = TestStanzas `Set.member` stanzas + keepNeeded (CD.ComponentBench _) _ = BenchStanzas `Set.member` stanzas + keepNeeded _ _ = True + + targetsRequiredForRevDeps = + [ ComponentTarget CLibName WholeComponent + -- if anything needs this pkg, build the library component + | installedPackageId pkg `Set.member` hasReverseLibDeps + ] + --TODO: also need to track build-tool rev-deps for exes + + availablePkgs :: Set InstalledPackageId + availablePkgs = Set.fromList (map installedPackageId pkgs) + + hasReverseLibDeps :: Set InstalledPackageId + hasReverseLibDeps = + Set.fromList [ depid | pkg <- pkgs + , depid <- CD.flatDeps (depends pkg) ] + + +mapConfiguredPackage :: (ElaboratedConfiguredPackage -> ElaboratedConfiguredPackage) + -> ElaboratedPlanPackage + -> ElaboratedPlanPackage +mapConfiguredPackage f (InstallPlan.Configured pkg) = + InstallPlan.Configured (f pkg) +mapConfiguredPackage _ pkg = pkg + +componentOptionalStanza :: Cabal.ComponentName -> Maybe OptionalStanza +componentOptionalStanza (Cabal.CTestName _) = Just TestStanzas +componentOptionalStanza (Cabal.CBenchName _) = Just BenchStanzas +componentOptionalStanza _ = Nothing + + +dependencyClosure :: (pkg -> InstalledPackageId) + -> (pkg -> [InstalledPackageId]) + -> [pkg] + -> [InstalledPackageId] + -> [pkg] +dependencyClosure pkgid deps allpkgs = + map vertexToPkg + . concatMap Tree.flatten + . Graph.dfs graph + . map pkgidToVertex + where + (graph, vertexToPkg, pkgidToVertex) = dependencyGraph pkgid deps allpkgs + +dependencyGraph :: (pkg -> InstalledPackageId) + -> (pkg -> [InstalledPackageId]) + -> [pkg] + -> (Graph.Graph, + Graph.Vertex -> pkg, + InstalledPackageId -> Graph.Vertex) +dependencyGraph pkgid deps pkgs = + (graph, vertexToPkg', pkgidToVertex') + where + (graph, vertexToPkg, pkgidToVertex) = + Graph.graphFromEdges [ ( pkg, pkgid pkg, deps pkg ) + | pkg <- pkgs ] + vertexToPkg' = (\(pkg,_,_) -> pkg) + . vertexToPkg + pkgidToVertex' = fromMaybe (error "dependencyGraph: lookup failure") + . pkgidToVertex + + +--------------------------- +-- Setup.hs script policy +-- + +-- Handling for Setup.hs scripts is a bit tricky, part of it lives in the +-- solver phase, and part in the elaboration phase. We keep the helper +-- functions for both phases together here so at least you can see all of it +-- in one place. +-- +-- There are four major cases for Setup.hs handling: +-- +-- 1. @build-type@ Custom with a @custom-setup@ section +-- 2. @build-type@ Custom without a @custom-setup@ section +-- 3. @build-type@ not Custom with @cabal-version > $our-cabal-version@ +-- 4. @build-type@ not Custom with @cabal-version <= $our-cabal-version@ +-- +-- It's also worth noting that packages specifying @cabal-version: >= 1.23@ +-- or later that have @build-type@ Custom will always have a @custom-setup@ +-- section. Therefore in case 2, the specified @cabal-version@ will always be +-- less than 1.23. +-- +-- In cases 1 and 2 we obviously have to build an external Setup.hs script, +-- while in case 4 we can use the internal library API. In case 3 we also have +-- to build an external Setup.hs script because the package needs a later +-- Cabal lib version than we can support internally. +-- +-- data SetupScriptStyle = ... -- see ProjectPlanning.Types + +-- | Work out the 'SetupScriptStyle' given the package description. +-- +-- This only works on original packages before we give them to the solver, +-- since after the solver some implicit setup deps are made explicit. +-- +-- See 'rememberImplicitSetupDeps' and 'packageSetupScriptStylePostSolver'. +-- +packageSetupScriptStylePreSolver :: PD.PackageDescription -> SetupScriptStyle +packageSetupScriptStylePreSolver pkg + | buildType == PD.Custom + , isJust (PD.setupBuildInfo pkg) + = SetupCustomExplicitDeps + + | buildType == PD.Custom + = SetupCustomImplicitDeps + + | PD.specVersion pkg > cabalVersion -- one cabal-install is built against + = SetupNonCustomExternalLib + + | otherwise + = SetupNonCustomInternalLib + where + buildType = fromMaybe PD.Custom (PD.buildType pkg) + + +-- | Part of our Setup.hs handling policy is implemented by getting the solver +-- to work out setup dependencies for packages. The solver already handles +-- packages that explicitly specify setup dependencies, but we can also tell +-- the solver to treat other packages as if they had setup dependencies. +-- That's what this function does, it gets called by the solver for all +-- packages that don't already have setup dependencies. +-- +-- The dependencies we want to add is different for each 'SetupScriptStyle'. +-- +-- Note that adding default deps means these deps are actually /added/ to the +-- packages that we get out of the solver in the 'SolverInstallPlan'. Making +-- implicit setup deps explicit is a problem in the post-solver stages because +-- we still need to distinguish the case of explicit and implict setup deps. +-- See 'rememberImplicitSetupDeps'. +-- +defaultSetupDeps :: Compiler -> Platform + -> PD.PackageDescription + -> Maybe [Dependency] +defaultSetupDeps compiler platform pkg = + case packageSetupScriptStylePreSolver pkg of + + -- For packages with build type custom that do not specify explicit + -- setup dependencies, we add a dependency on Cabal and a number + -- of other packages. + SetupCustomImplicitDeps -> + Just $ + [ Dependency depPkgname anyVersion + | depPkgname <- legacyCustomSetupPkgs compiler platform ] ++ + [ Dependency cabalPkgname cabalConstraint + | packageName pkg /= cabalPkgname ] + where + -- The Cabal dep is slightly special: + -- * We omit the dep for the Cabal lib itself, since it bootstraps. + -- * We constrain it to be >= 1.18 < 2 + -- + cabalConstraint = orLaterVersion cabalCompatMinVer + `intersectVersionRanges` + orLaterVersion (PD.specVersion pkg) + `intersectVersionRanges` + earlierVersion cabalCompatMaxVer + -- The idea here is that at some point we will make significant + -- breaking changes to the Cabal API that Setup.hs scripts use. + -- So for old custom Setup scripts that do not specify explicit + -- constraints, we constrain them to use a compatible Cabal version. + cabalCompatMaxVer = Version [1,25] [] + -- In principle we can talk to any old Cabal version, and we need to + -- be able to do that for custom Setup scripts that require older + -- Cabal lib versions. However in practice we have currently have + -- problems with Cabal-1.16. (1.16 does not know about build targets) + -- If this is fixed we can relax this constraint. + cabalCompatMinVer = Version [1,18] [] + + -- For other build types (like Simple) if we still need to compile an + -- external Setup.hs, it'll be one of the simple ones that only depends + -- on Cabal and base. + SetupNonCustomExternalLib -> + Just [ Dependency cabalPkgname cabalConstraint + , Dependency basePkgname anyVersion ] + where + cabalConstraint = orLaterVersion (PD.specVersion pkg) + + -- The internal setup wrapper method has no deps at all. + SetupNonCustomInternalLib -> Just [] + + SetupCustomExplicitDeps -> + error $ "defaultSetupDeps: called for a package with explicit " + ++ "setup deps: " ++ display (packageId pkg) + + +-- | See 'rememberImplicitSetupDeps' for details. +type PackagesImplicitSetupDeps = Set InstalledPackageId + +-- | A consequence of using 'defaultSetupDeps' in 'planPackages' is that by +-- making implicit setup deps explicit we loose track of which packages +-- originally had implicit setup deps. That's important because we do still +-- have different behaviour based on the setup style (in particular whether to +-- compile a Setup.hs script with version macros). +-- +-- So we remember the necessary information in an auxilliary set and use it +-- in 'packageSetupScriptStylePreSolver' to recover the full info. +-- +rememberImplicitSetupDeps :: SourcePackageIndex.PackageIndex SourcePackage + -> SolverInstallPlan + -> (SolverInstallPlan, PackagesImplicitSetupDeps) +rememberImplicitSetupDeps sourcePkgIndex plan = + (plan, pkgsImplicitSetupDeps) + where + pkgsImplicitSetupDeps = + Set.fromList + [ installedPackageId pkg + | InstallPlan.Configured + pkg@(ConfiguredPackage newpkg _ _ _) <- InstallPlan.toList plan + -- has explicit setup deps now + , hasExplicitSetupDeps newpkg + -- but originally had no setup deps + , let Just origpkg = SourcePackageIndex.lookupPackageId + sourcePkgIndex (packageId pkg) + , not (hasExplicitSetupDeps origpkg) + ] + + hasExplicitSetupDeps = + (SetupCustomExplicitDeps==) + . packageSetupScriptStylePreSolver + . PD.packageDescription . packageDescription + + +-- | Use the extra info saved by 'rememberImplicitSetupDeps' to let us work +-- out the correct 'SetupScriptStyle'. This should give the same result as +-- 'packageSetupScriptStylePreSolver' gave prior to munging the package info +-- through the solver. +-- +packageSetupScriptStylePostSolver :: Set InstalledPackageId + -> ConfiguredPackage + -> PD.PackageDescription + -> SetupScriptStyle +packageSetupScriptStylePostSolver pkgsImplicitSetupDeps pkg pkgDescription = + case packageSetupScriptStylePreSolver pkgDescription of + SetupCustomExplicitDeps + | Set.member (installedPackageId pkg) pkgsImplicitSetupDeps + -> SetupCustomImplicitDeps + other -> other + + +-- | Work out which version of the Cabal spec we will be using to talk to the +-- Setup.hs interface for this package. +-- +-- This depends somewhat on the 'SetupScriptStyle' but most cases are a result +-- of what the solver picked for us, based on the explicit setup deps or the +-- ones added implicitly by 'defaultSetupDeps'. +-- +packageSetupScriptSpecVersion :: Package pkg + => SetupScriptStyle + -> PD.PackageDescription + -> ComponentDeps [pkg] + -> Version + +-- We're going to be using the internal Cabal library, so the spec version of +-- that is simply the version of the Cabal library that cabal-install has been +-- built with. +packageSetupScriptSpecVersion SetupNonCustomInternalLib _ _ = + cabalVersion + +-- If we happen to be building the Cabal lib itself then because that +-- bootstraps itself then we use the version of the lib we're building. +packageSetupScriptSpecVersion SetupCustomImplicitDeps pkg _ + | packageName pkg == cabalPkgname + = packageVersion pkg + +-- In all other cases we have a look at what version of the Cabal lib the +-- solver picked. Or if it didn't depend on Cabal at all (which is very rare) +-- then we look at the .cabal file to see what spec version it declares. +packageSetupScriptSpecVersion _ pkg deps = + case find ((cabalPkgname ==) . packageName) (CD.setupDeps deps) of + Just dep -> packageVersion dep + Nothing -> PD.specVersion pkg + + +cabalPkgname, basePkgname :: PackageName +cabalPkgname = PackageName "Cabal" +basePkgname = PackageName "base" + + +legacyCustomSetupPkgs :: Compiler -> Platform -> [PackageName] +legacyCustomSetupPkgs compiler (Platform _ os) = + map PackageName $ + [ "array", "base", "binary", "bytestring", "containers" + , "deepseq", "directory", "filepath", "old-time", "pretty" + , "process", "time", "transformers" ] + ++ [ "Win32" | os == Windows ] + ++ [ "unix" | os /= Windows ] + ++ [ "ghc-prim" | isGHC ] + ++ [ "template-haskell" | isGHC ] + where + isGHC = compilerCompatFlavor GHC compiler + + -- This util is copied here just in this branch to avoid requiring a new + -- Cabal version. The master branch already does the right thing. + compilerCompatFlavor :: CompilerFlavor -> Compiler -> Bool + compilerCompatFlavor flavor comp = + flavor == compilerFlavor comp + || flavor `elem` [ flavor' | CompilerId flavor' _ <- compilerCompat comp ] + +-- The other aspects of our Setup.hs policy lives here where we decide on +-- the 'SetupScriptOptions'. +-- +-- Our current policy for the 'SetupCustomImplicitDeps' case is that we +-- try to make the implicit deps cover everything, and we don't allow the +-- compiler to pick up other deps. This may or may not be sustainable, and +-- we might have to allow the deps to be non-exclusive, but that itself would +-- be tricky since we would have to allow the Setup access to all the packages +-- in the store and local dbs. + +setupHsScriptOptions :: ElaboratedReadyPackage + -> ElaboratedSharedConfig + -> FilePath + -> FilePath + -> Bool + -> Lock + -> SetupScriptOptions +setupHsScriptOptions (ReadyPackage ElaboratedConfiguredPackage{..} deps) + ElaboratedSharedConfig{..} srcdir builddir + isParallelBuild cacheLock = + SetupScriptOptions { + useCabalVersion = thisVersion pkgSetupScriptCliVersion, + useCabalSpecVersion = Just pkgSetupScriptCliVersion, + useCompiler = Just pkgConfigCompiler, + usePlatform = Just pkgConfigPlatform, + usePackageDB = pkgSetupPackageDBStack, + usePackageIndex = Nothing, + useDependencies = [ (installedPackageId ipkg, packageId ipkg) + | ipkg <- CD.setupDeps deps ], + useDependenciesExclusive = True, + useVersionMacros = pkgSetupScriptStyle == SetupCustomExplicitDeps, + useProgramConfig = pkgConfigCompilerProgs, + useDistPref = builddir, + useLoggingHandle = Nothing, -- this gets set later + useWorkingDir = Just srcdir, + useWin32CleanHack = False, --TODO: [required eventually] + forceExternalSetupMethod = isParallelBuild, + setupCacheLock = Just cacheLock + } + + +-- | To be used for the input for elaborateInstallPlan. +-- +-- TODO: [code cleanup] make InstallDirs.defaultInstallDirs pure. +-- +userInstallDirTemplates :: Compiler + -> IO InstallDirs.InstallDirTemplates +userInstallDirTemplates compiler = do + InstallDirs.defaultInstallDirs + (compilerFlavor compiler) + True -- user install + False -- unused + +storePackageInstallDirs :: CabalDirLayout + -> CompilerId + -> InstalledPackageId + -> InstallDirs.InstallDirs FilePath +storePackageInstallDirs CabalDirLayout{cabalStorePackageDirectory} + compid ipkgid = + InstallDirs.InstallDirs {..} + where + prefix = cabalStorePackageDirectory compid ipkgid + bindir = prefix "bin" + libdir = prefix "lib" + libsubdir = "" + dynlibdir = libdir + libexecdir = prefix "libexec" + includedir = libdir "include" + datadir = prefix "share" + datasubdir = "" + docdir = datadir "doc" + mandir = datadir "man" + htmldir = docdir "html" + haddockdir = htmldir + sysconfdir = prefix "etc" + + +--TODO: [code cleanup] perhaps reorder this code +-- based on the ElaboratedInstallPlan + ElaboratedSharedConfig, +-- make the various Setup.hs {configure,build,copy} flags + + +setupHsConfigureFlags :: ElaboratedReadyPackage + -> ElaboratedSharedConfig + -> Verbosity + -> FilePath + -> Cabal.ConfigFlags +setupHsConfigureFlags (ReadyPackage + pkg@ElaboratedConfiguredPackage{..} + pkgdeps) + sharedConfig@ElaboratedSharedConfig{..} + verbosity builddir = + assert (sanityCheckElaboratedConfiguredPackage sharedConfig pkg) + Cabal.ConfigFlags {..} + where + configDistPref = toFlag builddir + configVerbosity = toFlag verbosity + + configIPID = toFlag (display (installedUnitId pkg)) + + configProgramPaths = Map.toList pkgProgramPaths + configProgramArgs = Map.toList pkgProgramArgs + configProgramPathExtra = toNubList pkgProgramPathExtra + configHcFlavor = toFlag (compilerFlavor pkgConfigCompiler) + configHcPath = mempty -- we use configProgramPaths instead + configHcPkg = mempty -- we use configProgramPaths instead + + configVanillaLib = toFlag pkgVanillaLib + configSharedLib = toFlag pkgSharedLib + configDynExe = toFlag pkgDynExe + configGHCiLib = toFlag pkgGHCiLib + configProfExe = mempty + configProfLib = toFlag pkgProfLib + configProf = toFlag pkgProfExe + + -- configProfDetail is for exe+lib, but overridden by configProfLibDetail + -- so we specify both so we can specify independently + configProfDetail = toFlag pkgProfExeDetail + configProfLibDetail = toFlag pkgProfLibDetail + + configCoverage = toFlag pkgCoverage + configLibCoverage = mempty + + configOptimization = toFlag pkgOptimization + configSplitObjs = toFlag pkgSplitObjs + configStripExes = toFlag pkgStripExes + configStripLibs = toFlag pkgStripLibs + configDebugInfo = toFlag pkgDebugInfo + configAllowNewer = mempty -- we use configExactConfiguration True + + configConfigurationsFlags = pkgFlagAssignment + configConfigureArgs = pkgConfigureScriptArgs + configExtraLibDirs = pkgExtraLibDirs + configExtraFrameworkDirs = pkgExtraFrameworkDirs + configExtraIncludeDirs = pkgExtraIncludeDirs + configProgPrefix = maybe mempty toFlag pkgProgPrefix + configProgSuffix = maybe mempty toFlag pkgProgSuffix + + configInstallDirs = fmap (toFlag . InstallDirs.toPathTemplate) + pkgInstallDirs + + -- we only use configDependencies, unless we're talking to an old Cabal + -- in which case we use configConstraints + configDependencies = [ (packageName (Installed.sourcePackageId deppkg), + Installed.installedUnitId deppkg) + | deppkg <- CD.nonSetupDeps pkgdeps ] + configConstraints = [ thisPackageVersion (packageId deppkg) + | deppkg <- CD.nonSetupDeps pkgdeps ] + + -- explicitly clear, then our package db stack + -- TODO: [required eventually] have to do this differently for older Cabal versions + configPackageDBs = Nothing : map Just pkgBuildPackageDBStack + + configTests = toFlag (TestStanzas `Set.member` pkgStanzasEnabled) + configBenchmarks = toFlag (BenchStanzas `Set.member` pkgStanzasEnabled) + + configExactConfiguration = toFlag True + configFlagError = mempty --TODO: [research required] appears not to be implemented + configRelocatable = mempty --TODO: [research required] ??? + configScratchDir = mempty -- never use + configUserInstall = mempty -- don't rely on defaults + configPrograms_ = mempty -- never use, shouldn't exist + + +setupHsBuildFlags :: ElaboratedConfiguredPackage + -> ElaboratedSharedConfig + -> Verbosity + -> FilePath + -> Cabal.BuildFlags +setupHsBuildFlags ElaboratedConfiguredPackage{..} _ verbosity builddir = + Cabal.BuildFlags { + buildProgramPaths = mempty, --unused, set at configure time + buildProgramArgs = mempty, --unused, set at configure time + buildVerbosity = toFlag verbosity, + buildDistPref = toFlag builddir, + buildNumJobs = mempty, --TODO: [nice to have] sometimes want to use toFlag (Just numBuildJobs), + buildArgs = mempty -- unused, passed via args not flags + } + + +setupHsBuildArgs :: ElaboratedConfiguredPackage -> [String] +setupHsBuildArgs pkg = + map (showComponentTarget pkg) (pkgBuildTargets pkg) + + +showComponentTarget :: ElaboratedConfiguredPackage -> ComponentTarget -> String +showComponentTarget pkg = + showBuildTarget . toBuildTarget + where + showBuildTarget t = + Cabal.showBuildTarget (qlBuildTarget t) (packageId pkg) t + + qlBuildTarget Cabal.BuildTargetComponent{} = Cabal.QL2 + qlBuildTarget _ = Cabal.QL3 + + toBuildTarget :: ComponentTarget -> Cabal.BuildTarget + toBuildTarget (ComponentTarget cname subtarget) = + case subtarget of + WholeComponent -> Cabal.BuildTargetComponent cname + ModuleTarget mname -> Cabal.BuildTargetModule cname mname + FileTarget fname -> Cabal.BuildTargetFile cname fname + + +setupHsReplFlags :: ElaboratedConfiguredPackage + -> ElaboratedSharedConfig + -> Verbosity + -> FilePath + -> Cabal.ReplFlags +setupHsReplFlags ElaboratedConfiguredPackage{..} _ verbosity builddir = + Cabal.ReplFlags { + replProgramPaths = mempty, --unused, set at configure time + replProgramArgs = mempty, --unused, set at configure time + replVerbosity = toFlag verbosity, + replDistPref = toFlag builddir, + replReload = mempty --only used as callback from repl + } + + +setupHsReplArgs :: ElaboratedConfiguredPackage -> [String] +setupHsReplArgs pkg = + maybe [] (\t -> [showComponentTarget pkg t]) (pkgReplTarget pkg) + --TODO: should be able to give multiple modules in one component + + +setupHsCopyFlags :: ElaboratedConfiguredPackage + -> ElaboratedSharedConfig + -> Verbosity + -> FilePath + -> Cabal.CopyFlags +setupHsCopyFlags _ _ verbosity builddir = + Cabal.CopyFlags { + --TODO: [nice to have] we currently just rely on Setup.hs copy to always do the right + -- thing, but perhaps we ought really to copy into an image dir and do + -- some sanity checks and move into the final location ourselves + copyDest = toFlag InstallDirs.NoCopyDest, + copyDistPref = toFlag builddir, + copyVerbosity = toFlag verbosity + } + +setupHsRegisterFlags :: ElaboratedConfiguredPackage + -> ElaboratedSharedConfig + -> Verbosity + -> FilePath + -> FilePath + -> Cabal.RegisterFlags +setupHsRegisterFlags ElaboratedConfiguredPackage {pkgBuildStyle} _ + verbosity builddir pkgConfFile = + Cabal.RegisterFlags { + regPackageDB = mempty, -- misfeature + regGenScript = mempty, -- never use + regGenPkgConf = toFlag (Just pkgConfFile), + regInPlace = case pkgBuildStyle of + BuildInplaceOnly -> toFlag True + _ -> toFlag False, + regPrintId = mempty, -- never use + regDistPref = toFlag builddir, + regVerbosity = toFlag verbosity + } + +setupHsHaddockFlags :: ElaboratedConfiguredPackage + -> ElaboratedSharedConfig + -> Verbosity + -> FilePath + -> Cabal.HaddockFlags +setupHsHaddockFlags ElaboratedConfiguredPackage{..} _ verbosity builddir = + Cabal.HaddockFlags { + haddockProgramPaths = mempty, --unused, set at configure time + haddockProgramArgs = mempty, --unused, set at configure time + haddockHoogle = toFlag pkgHaddockHoogle, + haddockHtml = toFlag pkgHaddockHtml, + haddockHtmlLocation = maybe mempty toFlag pkgHaddockHtmlLocation, + haddockForHackage = mempty, --TODO: new flag + haddockExecutables = toFlag pkgHaddockExecutables, + haddockTestSuites = toFlag pkgHaddockTestSuites, + haddockBenchmarks = toFlag pkgHaddockBenchmarks, + haddockInternal = toFlag pkgHaddockInternal, + haddockCss = maybe mempty toFlag pkgHaddockCss, + haddockHscolour = toFlag pkgHaddockHscolour, + haddockHscolourCss = maybe mempty toFlag pkgHaddockHscolourCss, + haddockContents = maybe mempty toFlag pkgHaddockContents, + haddockDistPref = toFlag builddir, + haddockKeepTempFiles = mempty, --TODO: from build settings + haddockVerbosity = toFlag verbosity + } + +{- +setupHsTestFlags :: ElaboratedConfiguredPackage + -> ElaboratedSharedConfig + -> Verbosity + -> FilePath + -> Cabal.TestFlags +setupHsTestFlags _ _ verbosity builddir = + Cabal.TestFlags { + } +-} + +------------------------------------------------------------------------------ +-- * Sharing installed packages +------------------------------------------------------------------------------ + +-- +-- Nix style store management for tarball packages +-- +-- So here's our strategy: +-- +-- We use a per-user nix-style hashed store, but /only/ for tarball packages. +-- So that includes packages from hackage repos (and other http and local +-- tarballs). For packages in local directories we do not register them into +-- the shared store by default, we just build them locally inplace. +-- +-- The reason we do it like this is that it's easy to make stable hashes for +-- tarball packages, and these packages benefit most from sharing. By contrast +-- unpacked dir packages are harder to hash and they tend to change more +-- frequently so there's less benefit to sharing them. +-- +-- When using the nix store approach we have to run the solver *without* +-- looking at the packages installed in the store, just at the source packages +-- (plus core\/global installed packages). Then we do a post-processing pass +-- to replace configured packages in the plan with pre-existing ones, where +-- possible. Where possible of course means where the nix-style package hash +-- equals one that's already in the store. +-- +-- One extra wrinkle is that unless we know package tarball hashes upfront, we +-- will have to download the tarballs to find their hashes. So we have two +-- options: delay replacing source with pre-existing installed packages until +-- the point during the execution of the install plan where we have the +-- tarball, or try to do as much up-front as possible and then check again +-- during plan execution. The former isn't great because we would end up +-- telling users we're going to re-install loads of packages when in fact we +-- would just share them. It'd be better to give as accurate a prediction as +-- we can. The latter is better for users, but we do still have to check +-- during plan execution because it's important that we don't replace existing +-- installed packages even if they have the same package hash, because we +-- don't guarantee ABI stability. + +-- TODO: [required eventually] for safety of concurrent installs, we must make sure we register but +-- not replace installed packages with ghc-pkg. + +packageHashInputs :: ElaboratedSharedConfig + -> ElaboratedConfiguredPackage + -> PackageHashInputs +packageHashInputs + pkgshared + pkg@ElaboratedConfiguredPackage{ + pkgSourceId, + pkgSourceHash = Just srchash, + pkgDependencies + } = + PackageHashInputs { + pkgHashPkgId = pkgSourceId, + pkgHashSourceHash = srchash, + pkgHashDirectDeps = Set.fromList + [ installedPackageId dep + | dep <- CD.select relevantDeps pkgDependencies ], + pkgHashOtherConfig = packageHashConfigInputs pkgshared pkg + } + where + -- Obviously the main deps are relevant + relevantDeps CD.ComponentLib = True + relevantDeps (CD.ComponentExe _) = True + -- Setup deps can affect the Setup.hs behaviour and thus what is built + relevantDeps CD.ComponentSetup = True + -- However testsuites and benchmarks do not get installed and should not + -- affect the result, so we do not include them. + relevantDeps (CD.ComponentTest _) = False + relevantDeps (CD.ComponentBench _) = False + +packageHashInputs _ pkg = + error $ "packageHashInputs: only for packages with source hashes. " + ++ display (packageId pkg) + +packageHashConfigInputs :: ElaboratedSharedConfig + -> ElaboratedConfiguredPackage + -> PackageHashConfigInputs +packageHashConfigInputs + ElaboratedSharedConfig{..} + ElaboratedConfiguredPackage{..} = + + PackageHashConfigInputs { + pkgHashCompilerId = compilerId pkgConfigCompiler, + pkgHashPlatform = pkgConfigPlatform, + pkgHashFlagAssignment = pkgFlagAssignment, + pkgHashConfigureScriptArgs = pkgConfigureScriptArgs, + pkgHashVanillaLib = pkgVanillaLib, + pkgHashSharedLib = pkgSharedLib, + pkgHashDynExe = pkgDynExe, + pkgHashGHCiLib = pkgGHCiLib, + pkgHashProfLib = pkgProfLib, + pkgHashProfExe = pkgProfExe, + pkgHashProfLibDetail = pkgProfLibDetail, + pkgHashProfExeDetail = pkgProfExeDetail, + pkgHashCoverage = pkgCoverage, + pkgHashOptimization = pkgOptimization, + pkgHashSplitObjs = pkgSplitObjs, + pkgHashStripLibs = pkgStripLibs, + pkgHashStripExes = pkgStripExes, + pkgHashDebugInfo = pkgDebugInfo, + pkgHashExtraLibDirs = pkgExtraLibDirs, + pkgHashExtraFrameworkDirs = pkgExtraFrameworkDirs, + pkgHashExtraIncludeDirs = pkgExtraIncludeDirs, + pkgHashProgPrefix = pkgProgPrefix, + pkgHashProgSuffix = pkgProgSuffix + } + + +-- | Given the 'InstalledPackageIndex' for a nix-style package store, and an +-- 'ElaboratedInstallPlan', replace configured source packages by pre-existing +-- installed packages whenever they exist. +-- +improveInstallPlanWithPreExistingPackages :: InstalledPackageIndex + -> ElaboratedInstallPlan + -> ElaboratedInstallPlan +improveInstallPlanWithPreExistingPackages installedPkgIndex installPlan = + replaceWithPreExisting installPlan + [ ipkg + | InstallPlan.Configured pkg + <- InstallPlan.reverseTopologicalOrder installPlan + , ipkg <- maybeToList (canPackageBeImproved pkg) ] + where + --TODO: sanity checks: + -- * the installed package must have the expected deps etc + -- * the installed package must not be broken, valid dep closure + + --TODO: decide what to do if we encounter broken installed packages, + -- since overwriting is never safe. + + canPackageBeImproved pkg = + PackageIndex.lookupUnitId + installedPkgIndex (installedPackageId pkg) + + replaceWithPreExisting = + foldl' (\plan ipkg -> InstallPlan.preexisting + (installedPackageId ipkg) ipkg plan) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/ProjectPlanOutput.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/ProjectPlanOutput.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/ProjectPlanOutput.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/ProjectPlanOutput.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,100 @@ +{-# LANGUAGE BangPatterns, RecordWildCards, NamedFieldPuns, + DeriveGeneric, DeriveDataTypeable, GeneralizedNewtypeDeriving, + ScopedTypeVariables #-} + +-- | An experimental new UI for cabal for working with multiple packages +----------------------------------------------------------------------------- +module Distribution.Client.ProjectPlanOutput ( + writePlanExternalRepresentation, + ) where + +import Distribution.Client.ProjectPlanning.Types + ( ElaboratedInstallPlan, ElaboratedConfiguredPackage(..) + , ElaboratedSharedConfig(..) ) +import Distribution.Client.DistDirLayout + +import qualified Distribution.Client.InstallPlan as InstallPlan +import qualified Distribution.Client.Utils.Json as J +import qualified Distribution.Client.ComponentDeps as ComponentDeps + +import Distribution.Package +import qualified Distribution.PackageDescription as PD +import Distribution.Text +import Distribution.Simple.Utils +import qualified Paths_cabal_install as Our (version) + +import Data.Monoid +import qualified Data.ByteString.Builder as BB + + +-- | Write out a representation of the elaborated install plan. +-- +-- This is for the benefit of debugging and external tools like editors. +-- +writePlanExternalRepresentation :: DistDirLayout + -> ElaboratedInstallPlan + -> ElaboratedSharedConfig + -> IO () +writePlanExternalRepresentation distDirLayout elaboratedInstallPlan + elaboratedSharedConfig = + writeFileAtomic (distProjectCacheFile distDirLayout "plan.json") $ + BB.toLazyByteString + . J.encodeToBuilder + $ encodePlanAsJson elaboratedInstallPlan elaboratedSharedConfig + +-- | Renders a subset of the elaborated install plan in a semi-stable JSON +-- format. +-- +encodePlanAsJson :: ElaboratedInstallPlan -> ElaboratedSharedConfig -> J.Value +encodePlanAsJson elaboratedInstallPlan _elaboratedSharedConfig = + --TODO: [nice to have] include all of the sharedPackageConfig and all of + -- the parts of the elaboratedInstallPlan + J.object [ "cabal-version" J..= jdisplay Our.version + , "cabal-lib-version" J..= jdisplay cabalVersion + , "install-plan" J..= jsonIPlan + ] + where + jsonIPlan = map toJ (InstallPlan.toList elaboratedInstallPlan) + + -- ipi :: InstalledPackageInfo + toJ (InstallPlan.PreExisting ipi) = + -- installed packages currently lack configuration information + -- such as their flag settings or non-lib components. + -- + -- TODO: how to find out whether package is "local"? + J.object + [ "type" J..= J.String "pre-existing" + , "id" J..= jdisplay (installedUnitId ipi) + , "components" J..= J.object + [ "lib" J..= J.object [ "depends" J..= map jdisplay (installedDepends ipi) ] ] + ] + + -- ecp :: ElaboratedConfiguredPackage + toJ (InstallPlan.Configured ecp) = + J.object + [ "type" J..= J.String "configured" + , "id" J..= (jdisplay . installedUnitId) ecp + , "components" J..= components + , "flags" J..= J.object [ fn J..= v + | (PD.FlagName fn,v) <- pkgFlagAssignment ecp ] + ] + where + components = J.object + [ comp2str c J..= J.object + [ "depends" J..= map (jdisplay . installedUnitId) v ] + | (c,v) <- ComponentDeps.toList (pkgDependencies ecp) ] + + toJ _ = error "encodePlanToJson: only expecting PreExisting and Configured" + + -- TODO: maybe move this helper to "ComponentDeps" module? + -- Or maybe define a 'Text' instance? + comp2str c = case c of + ComponentDeps.ComponentLib -> "lib" + ComponentDeps.ComponentExe s -> "exe:" <> s + ComponentDeps.ComponentTest s -> "test:" <> s + ComponentDeps.ComponentBench s -> "bench:" <> s + ComponentDeps.ComponentSetup -> "setup" + + jdisplay :: Text a => a -> J.Value + jdisplay = J.String . display + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/RebuildMonad.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/RebuildMonad.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/RebuildMonad.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/RebuildMonad.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,147 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} + +-- | An abstraction for re-running actions if values or files have changed. +-- +-- This is not a full-blown make-style incremental build system, it's a bit +-- more ad-hoc than that, but it's easier to integrate with existing code. +-- +-- It's a convenient interface to the "Distribution.Client.FileMonitor" +-- functions. +-- +module Distribution.Client.RebuildMonad ( + -- * Rebuild monad + Rebuild, + runRebuild, + askRoot, + + -- * Setting up file monitoring + monitorFiles, + MonitorFilePath, + monitorFile, + monitorFileHashed, + monitorNonExistentFile, + monitorDirectory, + monitorNonExistentDirectory, + monitorDirectoryExistence, + monitorFileOrDirectory, + monitorFileSearchPath, + monitorFileHashedSearchPath, + -- ** Monitoring file globs + monitorFileGlob, + monitorFileGlobExistence, + FilePathGlob(..), + FilePathRoot(..), + FilePathGlobRel(..), + GlobPiece(..), + + -- * Using a file monitor + FileMonitor(..), + newFileMonitor, + rerunIfChanged, + + -- * Utils + matchFileGlob, + ) where + +import Distribution.Client.FileMonitor +import Distribution.Client.Glob hiding (matchFileGlob) +import qualified Distribution.Client.Glob as Glob (matchFileGlob) + +import Distribution.Simple.Utils (debug) +import Distribution.Verbosity (Verbosity) + +#if !MIN_VERSION_base(4,8,0) +import Control.Applicative +#endif +import Control.Monad.State as State +import Control.Monad.Reader as Reader +import Distribution.Compat.Binary (Binary) +import System.FilePath (takeFileName) + + +-- | A monad layered on top of 'IO' to help with re-running actions when the +-- input files and values they depend on change. The crucial operations are +-- 'rerunIfChanged' and 'monitorFiles'. +-- +newtype Rebuild a = Rebuild (ReaderT FilePath (StateT [MonitorFilePath] IO) a) + deriving (Functor, Applicative, Monad, MonadIO) + +-- | Use this wihin the body action of 'rerunIfChanged' to declare that the +-- action depends on the given files. This can be based on what the action +-- actually did. It is these files that will be checked for changes next +-- time 'rerunIfChanged' is called for that 'FileMonitor'. +-- +-- Relative paths are interpreted as relative to an implicit root, ultimately +-- passed in to 'runRebuild'. +-- +monitorFiles :: [MonitorFilePath] -> Rebuild () +monitorFiles filespecs = Rebuild (State.modify (filespecs++)) + +-- | Run a 'Rebuild' IO action. +unRebuild :: FilePath -> Rebuild a -> IO (a, [MonitorFilePath]) +unRebuild rootDir (Rebuild action) = runStateT (runReaderT action rootDir) [] + +-- | Run a 'Rebuild' IO action. +runRebuild :: FilePath -> Rebuild a -> IO a +runRebuild rootDir (Rebuild action) = evalStateT (runReaderT action rootDir) [] + +-- | The root that relative paths are interpreted as being relative to. +askRoot :: Rebuild FilePath +askRoot = Rebuild Reader.ask + +-- | This captures the standard use pattern for a 'FileMonitor': given a +-- monitor, an action and the input value the action depends on, either +-- re-run the action to get its output, or if the value and files the action +-- depends on have not changed then return a previously cached action result. +-- +-- The result is still in the 'Rebuild' monad, so these can be nested. +-- +-- Do not share 'FileMonitor's between different uses of 'rerunIfChanged'. +-- +rerunIfChanged :: (Binary a, Binary b) + => Verbosity + -> FileMonitor a b + -> a + -> Rebuild b + -> Rebuild b +rerunIfChanged verbosity monitor key action = do + rootDir <- askRoot + changed <- liftIO $ checkFileMonitorChanged monitor rootDir key + case changed of + MonitorUnchanged result files -> do + liftIO $ debug verbosity $ "File monitor '" ++ monitorName + ++ "' unchanged." + monitorFiles files + return result + + MonitorChanged reason -> do + liftIO $ debug verbosity $ "File monitor '" ++ monitorName + ++ "' changed: " ++ showReason reason + startTime <- liftIO $ beginUpdateFileMonitor + (result, files) <- liftIO $ unRebuild rootDir action + liftIO $ updateFileMonitor monitor rootDir + (Just startTime) files key result + monitorFiles files + return result + where + monitorName = takeFileName (fileMonitorCacheFile monitor) + + showReason (MonitoredFileChanged file) = "file " ++ file + showReason (MonitoredValueChanged _) = "monitor value changed" + showReason MonitorFirstRun = "first run" + showReason MonitorCorruptCache = "invalid cache file" + + +-- | Utility to match a file glob against the file system, starting from a +-- given root directory. The results are all relative to the given root. +-- +-- Since this operates in the 'Rebuild' monad, it also monitors the given glob +-- for changes. +-- +matchFileGlob :: FilePathGlob -> Rebuild [FilePath] +matchFileGlob glob = do + root <- askRoot + monitorFiles [monitorFileGlobExistence glob] + liftIO $ Glob.matchFileGlob root glob + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Run.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Run.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Run.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Run.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,139 @@ +{-# LANGUAGE CPP #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Run +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Implementation of the 'run' command. +----------------------------------------------------------------------------- + +module Distribution.Client.Run ( run, splitRunArgs ) + where + +import Distribution.Client.Utils (tryCanonicalizePath) + +import Distribution.PackageDescription (Executable (..), + TestSuite(..), + Benchmark(..), + PackageDescription (..), + BuildInfo(buildable)) +import Distribution.Simple.Compiler (compilerFlavor, CompilerFlavor(..)) +import Distribution.Simple.Build.PathsModule (pkgPathEnvVar) +import Distribution.Simple.BuildPaths (exeExtension) +import Distribution.Simple.LocalBuildInfo (ComponentName (..), + LocalBuildInfo (..), + getComponentLocalBuildInfo, + depLibraryPaths) +import Distribution.Simple.Utils (die, notice, warn, + rawSystemExitWithEnv, + addLibraryPath) +import Distribution.System (Platform (..)) +import Distribution.Verbosity (Verbosity) + +import qualified Distribution.Simple.GHCJS as GHCJS + +#if !MIN_VERSION_base(4,8,0) +import Data.Functor ((<$>)) +#endif +import Data.List (find) +import Data.Foldable (traverse_) +import System.Directory (getCurrentDirectory) +import Distribution.Compat.Environment (getEnvironment) +import System.FilePath ((<.>), ()) + + +-- | Return the executable to run and any extra arguments that should be +-- forwarded to it. Die in case of error. +splitRunArgs :: Verbosity -> LocalBuildInfo -> [String] + -> IO (Executable, [String]) +splitRunArgs verbosity lbi args = + case whichExecutable of -- Either err (wasManuallyChosen, exe, paramsRest) + Left err -> do + warn verbosity `traverse_` maybeWarning -- If there is a warning, print it. + die err + Right (True, exe, xs) -> return (exe, xs) + Right (False, exe, xs) -> do + let addition = " Interpreting all parameters to `run` as a parameter to" + ++ " the default executable." + -- If there is a warning, print it together with the addition. + warn verbosity `traverse_` fmap (++addition) maybeWarning + return (exe, xs) + where + pkg_descr = localPkgDescr lbi + whichExecutable :: Either String -- Error string. + ( Bool -- If it was manually chosen. + , Executable -- The executable. + , [String] -- The remaining parameters. + ) + whichExecutable = case (enabledExes, args) of + ([] , _) -> Left "Couldn't find any enabled executables." + ([exe], []) -> return (False, exe, []) + ([exe], (x:xs)) + | x == exeName exe -> return (True, exe, xs) + | otherwise -> return (False, exe, args) + (_ , []) -> Left + $ "This package contains multiple executables. " + ++ "You must pass the executable name as the first argument " + ++ "to 'cabal run'." + (_ , (x:xs)) -> + case find (\exe -> exeName exe == x) enabledExes of + Nothing -> Left $ "No executable named '" ++ x ++ "'." + Just exe -> return (True, exe, xs) + where + enabledExes = filter (buildable . buildInfo) (executables pkg_descr) + + maybeWarning :: Maybe String + maybeWarning = case args of + [] -> Nothing + (x:_) -> lookup x components + where + components :: [(String, String)] -- Component name, message. + components = + [ (name, "The executable '" ++ name ++ "' is disabled.") + | e <- executables pkg_descr + , not . buildable . buildInfo $ e, let name = exeName e] + + ++ [ (name, "There is a test-suite '" ++ name ++ "'," + ++ " but the `run` command is only for executables.") + | t <- testSuites pkg_descr + , let name = testName t] + + ++ [ (name, "There is a benchmark '" ++ name ++ "'," + ++ " but the `run` command is only for executables.") + | b <- benchmarks pkg_descr + , let name = benchmarkName b] + +-- | Run a given executable. +run :: Verbosity -> LocalBuildInfo -> Executable -> [String] -> IO () +run verbosity lbi exe exeArgs = do + curDir <- getCurrentDirectory + let buildPref = buildDir lbi + pkg_descr = localPkgDescr lbi + dataDirEnvVar = (pkgPathEnvVar pkg_descr "datadir", + curDir dataDir pkg_descr) + + (path, runArgs) <- + case compilerFlavor (compiler lbi) of + GHCJS -> do + let (script, cmd, cmdArgs) = + GHCJS.runCmd (withPrograms lbi) + (buildPref exeName exe exeName exe) + script' <- tryCanonicalizePath script + return (cmd, cmdArgs ++ [script']) + _ -> do + p <- tryCanonicalizePath $ + buildPref exeName exe (exeName exe <.> exeExtension) + return (p, []) + + env <- (dataDirEnvVar:) <$> getEnvironment + -- Add (DY)LD_LIBRARY_PATH if needed + env' <- if withDynExe lbi + then do let (Platform _ os) = hostPlatform lbi + clbi = getComponentLocalBuildInfo lbi + (CExeName (exeName exe)) + paths <- depLibraryPaths True False lbi clbi + return (addLibraryPath os paths env) + else return env + notice verbosity $ "Running " ++ exeName exe ++ "..." + rawSystemExitWithEnv verbosity path (runArgs++exeArgs) env' diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Sandbox/Index.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Sandbox/Index.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Sandbox/Index.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Sandbox/Index.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,281 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Sandbox.Index +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Querying and modifying local build tree references in the package index. +----------------------------------------------------------------------------- + +module Distribution.Client.Sandbox.Index ( + createEmpty, + addBuildTreeRefs, + removeBuildTreeRefs, + ListIgnoredBuildTreeRefs(..), RefTypesToList(..), + DeleteSourceError(..), + listBuildTreeRefs, + validateIndexPath, + + defaultIndexFileName + ) where + +import qualified Codec.Archive.Tar as Tar +import qualified Codec.Archive.Tar.Entry as Tar +import qualified Codec.Archive.Tar.Index as Tar +import qualified Distribution.Client.Tar as Tar +import Distribution.Client.IndexUtils ( BuildTreeRefType(..) + , refTypeFromTypeCode + , typeCodeFromRefType + , updatePackageIndexCacheFile + , readCacheStrict + , Index(..) ) +import qualified Distribution.Client.IndexUtils as IndexUtils +import Distribution.Client.Utils ( byteStringToFilePath, filePathToByteString + , makeAbsoluteToCwd, tryCanonicalizePath + , tryFindAddSourcePackageDesc ) + +import Distribution.Simple.Utils ( die, debug ) +import Distribution.Compat.Exception ( tryIO ) +import Distribution.Verbosity ( Verbosity ) + +import qualified Data.ByteString.Lazy as BS +import Control.Exception ( evaluate, throw, Exception ) +import Control.Monad ( liftM, unless ) +import Control.Monad.Writer.Lazy (WriterT(..), runWriterT, tell) +import Data.List ( (\\), intersect, nub, find ) +import Data.Maybe ( catMaybes, fromMaybe ) +import Data.Either (partitionEithers) +import System.Directory ( createDirectoryIfMissing, + doesDirectoryExist, doesFileExist, + renameFile, canonicalizePath) +import System.FilePath ( (), (<.>), takeDirectory, takeExtension ) +import System.IO ( IOMode(..), withBinaryFile ) + +-- | A reference to a local build tree. +data BuildTreeRef = BuildTreeRef { + buildTreeRefType :: !BuildTreeRefType, + buildTreePath :: !FilePath + } + +defaultIndexFileName :: FilePath +defaultIndexFileName = "00-index.tar" + +-- | Given a path, ensure that it refers to a local build tree. +buildTreeRefFromPath :: BuildTreeRefType -> FilePath -> IO (Maybe BuildTreeRef) +buildTreeRefFromPath refType dir = do + dirExists <- doesDirectoryExist dir + unless dirExists $ + die $ "directory '" ++ dir ++ "' does not exist" + _ <- tryFindAddSourcePackageDesc dir "Error adding source reference." + return . Just $ BuildTreeRef refType dir + +-- | Given a tar archive entry, try to parse it as a local build tree reference. +readBuildTreeRef :: Tar.Entry -> Maybe BuildTreeRef +readBuildTreeRef entry = case Tar.entryContent entry of + (Tar.OtherEntryType typeCode bs size) + | (Tar.isBuildTreeRefTypeCode typeCode) + && (size == BS.length bs) -> Just $! BuildTreeRef + (refTypeFromTypeCode typeCode) + (byteStringToFilePath bs) + | otherwise -> Nothing + _ -> Nothing + +-- | Given a sequence of tar archive entries, extract all references to local +-- build trees. +readBuildTreeRefs :: Exception e => Tar.Entries e -> [BuildTreeRef] +readBuildTreeRefs = + catMaybes + . Tar.foldEntries (\e r -> readBuildTreeRef e : r) + [] throw + +-- | Given a path to a tar archive, extract all references to local build trees. +readBuildTreeRefsFromFile :: FilePath -> IO [BuildTreeRef] +readBuildTreeRefsFromFile = liftM (readBuildTreeRefs . Tar.read) . BS.readFile + +-- | Read build tree references from an index cache +readBuildTreeRefsFromCache :: Verbosity -> FilePath -> IO [BuildTreeRef] +readBuildTreeRefsFromCache verbosity indexPath = do + (mRefs, _prefs) <- readCacheStrict verbosity (SandboxIndex indexPath) buildTreeRef + return (catMaybes mRefs) + where + buildTreeRef pkgEntry = + case pkgEntry of + IndexUtils.NormalPackage _ _ _ _ -> Nothing + IndexUtils.BuildTreeRef typ _ _ path _ -> Just $ BuildTreeRef typ path + +-- | Given a local build tree ref, serialise it to a tar archive entry. +writeBuildTreeRef :: BuildTreeRef -> Tar.Entry +writeBuildTreeRef (BuildTreeRef refType path) = Tar.simpleEntry tarPath content + where + bs = filePathToByteString path + -- Provide a filename for tools that treat custom entries as ordinary files. + tarPath' = "local-build-tree-reference" + -- fromRight can't fail because the path is shorter than 255 characters. + tarPath = fromRight $ Tar.toTarPath True tarPath' + content = Tar.OtherEntryType (typeCodeFromRefType refType) bs (BS.length bs) + + -- TODO: Move this to D.C.Utils? + fromRight (Left err) = error err + fromRight (Right a) = a + +-- | Check that the provided path is either an existing directory, or a tar +-- archive in an existing directory. +validateIndexPath :: FilePath -> IO FilePath +validateIndexPath path' = do + path <- makeAbsoluteToCwd path' + if (== ".tar") . takeExtension $ path + then return path + else do dirExists <- doesDirectoryExist path + unless dirExists $ + die $ "directory does not exist: '" ++ path ++ "'" + return $ path defaultIndexFileName + +-- | Create an empty index file. +createEmpty :: Verbosity -> FilePath -> IO () +createEmpty verbosity path = do + indexExists <- doesFileExist path + if indexExists + then debug verbosity $ "Package index already exists: " ++ path + else do + debug verbosity $ "Creating the index file '" ++ path ++ "'" + createDirectoryIfMissing True (takeDirectory path) + -- Equivalent to 'tar cvf empty.tar --files-from /dev/null'. + let zeros = BS.replicate (512*20) 0 + BS.writeFile path zeros + +-- | Add given local build tree references to the index. +addBuildTreeRefs :: Verbosity -> FilePath -> [FilePath] -> BuildTreeRefType + -> IO () +addBuildTreeRefs _ _ [] _ = + error "Distribution.Client.Sandbox.Index.addBuildTreeRefs: unexpected" +addBuildTreeRefs verbosity path l' refType = do + checkIndexExists path + l <- liftM nub . mapM tryCanonicalizePath $ l' + treesInIndex <- fmap (map buildTreePath) (readBuildTreeRefsFromFile path) + -- Add only those paths that aren't already in the index. + treesToAdd <- mapM (buildTreeRefFromPath refType) (l \\ treesInIndex) + let entries = map writeBuildTreeRef (catMaybes treesToAdd) + unless (null entries) $ do + withBinaryFile path ReadWriteMode $ \h -> do + block <- Tar.hSeekEndEntryOffset h Nothing + debug verbosity $ "Writing at tar block: " ++ show block + BS.hPut h (Tar.write entries) + debug verbosity $ "Successfully appended to '" ++ path ++ "'" + updatePackageIndexCacheFile verbosity $ SandboxIndex path + +data DeleteSourceError = ErrNonregisteredSource { nrPath :: FilePath } + | ErrNonexistentSource { nePath :: FilePath } deriving Show + +-- | Remove given local build tree references from the index. +-- +-- Returns a tuple with either removed build tree refs or errors and a function +-- that converts from a provided build tree ref to corresponding full directory path. +removeBuildTreeRefs :: Verbosity -> FilePath -> [FilePath] + -> IO ([Either DeleteSourceError FilePath], + (FilePath -> FilePath)) +removeBuildTreeRefs _ _ [] = + error "Distribution.Client.Sandbox.Index.removeBuildTreeRefs: unexpected" +removeBuildTreeRefs verbosity indexPath l = do + checkIndexExists indexPath + let tmpFile = indexPath <.> "tmp" + + canonRes <- mapM (\btr -> do res <- tryIO $ canonicalizePath btr + return $ case res of + Right pth -> Right (btr, pth) + Left _ -> Left $ ErrNonexistentSource btr) l + let (failures, convDict) = partitionEithers canonRes + allRefs = fmap snd convDict + + -- Performance note: on my system, it takes 'index --remove-source' + -- approx. 3,5s to filter a 65M file. Real-life indices are expected to be + -- much smaller. + removedRefs <- doRemove convDict tmpFile + + renameFile tmpFile indexPath + debug verbosity $ "Successfully renamed '" ++ tmpFile + ++ "' to '" ++ indexPath ++ "'" + + unless (null removedRefs) $ + updatePackageIndexCacheFile verbosity $ SandboxIndex indexPath + + let results = fmap Right removedRefs + ++ fmap Left failures + ++ fmap (Left . ErrNonregisteredSource) + (fmap (convertWith convDict) (allRefs \\ removedRefs)) + + return (results, convertWith convDict) + + where + doRemove :: [(FilePath, FilePath)] -> FilePath -> IO [FilePath] + doRemove srcRefs tmpFile = do + (newIdx, changedPaths) <- + Tar.read `fmap` BS.readFile indexPath + >>= runWriterT . Tar.filterEntriesM (p $ fmap snd srcRefs) + BS.writeFile tmpFile . Tar.write . Tar.entriesToList $ newIdx + return changedPaths + + p :: [FilePath] -> Tar.Entry -> WriterT [FilePath] IO Bool + p refs entry = case readBuildTreeRef entry of + Nothing -> return True + -- FIXME: removing snapshot deps is done with `delete-source + -- .cabal-sandbox/snapshots/$SNAPSHOT_NAME`. Perhaps we also want to + -- support removing snapshots by providing the original path. + (Just (BuildTreeRef _ pth)) -> if pth `elem` refs + then tell [pth] >> return False + else return True + + convertWith dict pth = fromMaybe pth $ fmap fst $ find ((==pth) . snd) dict + +-- | A build tree ref can become ignored if the user later adds a build tree ref +-- with the same package ID. We display ignored build tree refs when the user +-- runs 'cabal sandbox list-sources', but do not look at their timestamps in +-- 'reinstallAddSourceDeps'. +data ListIgnoredBuildTreeRefs = ListIgnored | DontListIgnored + +-- | Which types of build tree refs should be listed? +data RefTypesToList = OnlySnapshots | OnlyLinks | LinksAndSnapshots + +-- | List the local build trees that are referred to from the index. +listBuildTreeRefs :: Verbosity -> ListIgnoredBuildTreeRefs -> RefTypesToList + -> FilePath + -> IO [FilePath] +listBuildTreeRefs verbosity listIgnored refTypesToList path = do + checkIndexExists path + buildTreeRefs <- + case listIgnored of + DontListIgnored -> do + paths <- listWithoutIgnored + case refTypesToList of + LinksAndSnapshots -> return paths + _ -> do + allPathsFiltered <- fmap (map buildTreePath . filter predicate) + listWithIgnored + _ <- evaluate (length allPathsFiltered) + return (paths `intersect` allPathsFiltered) + + ListIgnored -> fmap (map buildTreePath . filter predicate) listWithIgnored + + _ <- evaluate (length buildTreeRefs) + return buildTreeRefs + + where + predicate :: BuildTreeRef -> Bool + predicate = case refTypesToList of + OnlySnapshots -> (==) SnapshotRef . buildTreeRefType + OnlyLinks -> (==) LinkRef . buildTreeRefType + LinksAndSnapshots -> const True + + listWithIgnored :: IO [BuildTreeRef] + listWithIgnored = readBuildTreeRefsFromFile path + + listWithoutIgnored :: IO [FilePath] + listWithoutIgnored = fmap (map buildTreePath) + $ readBuildTreeRefsFromCache verbosity path + + +-- | Check that the package index file exists and exit with error if it does not. +checkIndexExists :: FilePath -> IO () +checkIndexExists path = do + indexExists <- doesFileExist path + unless indexExists $ + die $ "index does not exist: '" ++ path ++ "'" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Sandbox/PackageEnvironment.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Sandbox/PackageEnvironment.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Sandbox/PackageEnvironment.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Sandbox/PackageEnvironment.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,557 @@ +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Sandbox.PackageEnvironment +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Utilities for working with the package environment file. Patterned after +-- Distribution.Client.Config. +----------------------------------------------------------------------------- + +module Distribution.Client.Sandbox.PackageEnvironment ( + PackageEnvironment(..) + , PackageEnvironmentType(..) + , classifyPackageEnvironment + , createPackageEnvironmentFile + , tryLoadSandboxPackageEnvironmentFile + , readPackageEnvironmentFile + , showPackageEnvironment + , showPackageEnvironmentWithComments + , setPackageDB + , sandboxPackageDBPath + , loadUserConfig + + , basePackageEnvironment + , initialPackageEnvironment + , commentPackageEnvironment + , sandboxPackageEnvironmentFile + , userPackageEnvironmentFile + ) where + +import Distribution.Client.Config ( SavedConfig(..), commentSavedConfig + , loadConfig, configFieldDescriptions + , haddockFlagsFields + , installDirsFields, withProgramsFields + , withProgramOptionsFields + , defaultCompiler ) +import Distribution.Client.Dependency.Types ( ConstraintSource (..) ) +import Distribution.Client.ParseUtils ( parseFields, ppFields, ppSection ) +import Distribution.Client.Setup ( GlobalFlags(..), ConfigExFlags(..) + , InstallFlags(..) + , defaultSandboxLocation ) +import Distribution.Utils.NubList ( toNubList ) +import Distribution.Simple.Compiler ( Compiler, PackageDB(..) + , compilerFlavor, showCompilerIdWithAbi ) +import Distribution.Simple.InstallDirs ( InstallDirs(..), PathTemplate + , defaultInstallDirs, combineInstallDirs + , fromPathTemplate, toPathTemplate ) +import Distribution.Simple.Setup ( Flag(..) + , ConfigFlags(..), HaddockFlags(..) + , fromFlagOrDefault, toFlag, flagToMaybe ) +import Distribution.Simple.Utils ( die, info, notice, warn ) +import Distribution.ParseUtils ( FieldDescr(..), ParseResult(..) + , commaListField, commaNewLineListField + , liftField, lineNo, locatedErrorMsg + , parseFilePathQ, readFields + , showPWarning, simpleField + , syntaxError, warning ) +import Distribution.System ( Platform ) +import Distribution.Verbosity ( Verbosity, normal ) +import Control.Monad ( foldM, liftM2, when, unless ) +import Data.List ( partition ) +import Data.Maybe ( isJust ) +import Distribution.Compat.Exception ( catchIO ) +import Distribution.Compat.Semigroup +import System.Directory ( doesDirectoryExist, doesFileExist + , renameFile ) +import System.FilePath ( (<.>), (), takeDirectory ) +import System.IO.Error ( isDoesNotExistError ) +import Text.PrettyPrint ( ($+$) ) + +import qualified Text.PrettyPrint as Disp +import qualified Distribution.Compat.ReadP as Parse +import qualified Distribution.ParseUtils as ParseUtils ( Field(..) ) +import qualified Distribution.Text as Text +import GHC.Generics ( Generic ) + + +-- +-- * Configuration saved in the package environment file +-- + +-- TODO: would be nice to remove duplication between +-- D.C.Sandbox.PackageEnvironment and D.C.Config. +data PackageEnvironment = PackageEnvironment { + -- The 'inherit' feature is not used ATM, but could be useful in the future + -- for constructing nested sandboxes (see discussion in #1196). + pkgEnvInherit :: Flag FilePath, + pkgEnvSavedConfig :: SavedConfig +} deriving Generic + +instance Monoid PackageEnvironment where + mempty = gmempty + mappend = (<>) + +instance Semigroup PackageEnvironment where + (<>) = gmappend + +-- | The automatically-created package environment file that should not be +-- touched by the user. +sandboxPackageEnvironmentFile :: FilePath +sandboxPackageEnvironmentFile = "cabal.sandbox.config" + +-- | Optional package environment file that can be used to customize the default +-- settings. Created by the user. +userPackageEnvironmentFile :: FilePath +userPackageEnvironmentFile = "cabal.config" + +-- | Type of the current package environment. +data PackageEnvironmentType = + SandboxPackageEnvironment -- ^ './cabal.sandbox.config' + | UserPackageEnvironment -- ^ './cabal.config' + | AmbientPackageEnvironment -- ^ '~/.cabal/config' + +-- | Is there a 'cabal.sandbox.config' or 'cabal.config' in this +-- directory? +classifyPackageEnvironment :: FilePath -> Flag FilePath -> Flag Bool + -> IO PackageEnvironmentType +classifyPackageEnvironment pkgEnvDir sandboxConfigFileFlag ignoreSandboxFlag = + do isSandbox <- liftM2 (||) (return forceSandboxConfig) + (configExists sandboxPackageEnvironmentFile) + isUser <- configExists userPackageEnvironmentFile + return (classify isSandbox isUser) + where + configExists fname = doesFileExist (pkgEnvDir fname) + ignoreSandbox = fromFlagOrDefault False ignoreSandboxFlag + forceSandboxConfig = isJust . flagToMaybe $ sandboxConfigFileFlag + + classify :: Bool -> Bool -> PackageEnvironmentType + classify True _ + | not ignoreSandbox = SandboxPackageEnvironment + classify _ True = UserPackageEnvironment + classify _ False = AmbientPackageEnvironment + +-- | Defaults common to 'initialPackageEnvironment' and +-- 'commentPackageEnvironment'. +commonPackageEnvironmentConfig :: FilePath -> SavedConfig +commonPackageEnvironmentConfig sandboxDir = + mempty { + savedConfigureFlags = mempty { + -- TODO: Currently, we follow cabal-dev and set 'user-install: False' in + -- the config file. In the future we may want to distinguish between + -- global, sandbox and user install types. + configUserInstall = toFlag False, + configInstallDirs = installDirs + }, + savedUserInstallDirs = installDirs, + savedGlobalInstallDirs = installDirs, + savedGlobalFlags = mempty { + globalLogsDir = toFlag $ sandboxDir "logs", + -- Is this right? cabal-dev uses the global world file. + globalWorldFile = toFlag $ sandboxDir "world" + } + } + where + installDirs = sandboxInstallDirs sandboxDir + +-- | 'commonPackageEnvironmentConfig' wrapped inside a 'PackageEnvironment'. +commonPackageEnvironment :: FilePath -> PackageEnvironment +commonPackageEnvironment sandboxDir = mempty { + pkgEnvSavedConfig = commonPackageEnvironmentConfig sandboxDir + } + +-- | Given a path to a sandbox, return the corresponding InstallDirs record. +sandboxInstallDirs :: FilePath -> InstallDirs (Flag PathTemplate) +sandboxInstallDirs sandboxDir = mempty { + prefix = toFlag (toPathTemplate sandboxDir) + } + +-- | These are the absolute basic defaults, the fields that must be +-- initialised. When we load the package environment from the file we layer the +-- loaded values over these ones. +basePackageEnvironment :: PackageEnvironment +basePackageEnvironment = + mempty { + pkgEnvSavedConfig = mempty { + savedConfigureFlags = mempty { + configHcFlavor = toFlag defaultCompiler, + configVerbosity = toFlag normal + } + } + } + +-- | Initial configuration that we write out to the package environment file if +-- it does not exist. When the package environment gets loaded this +-- configuration gets layered on top of 'basePackageEnvironment'. +initialPackageEnvironment :: FilePath -> Compiler -> Platform + -> IO PackageEnvironment +initialPackageEnvironment sandboxDir compiler platform = do + defInstallDirs <- defaultInstallDirs (compilerFlavor compiler) + {- userInstall= -} False {- _hasLibs= -} False + let initialConfig = commonPackageEnvironmentConfig sandboxDir + installDirs = combineInstallDirs (\d f -> Flag $ fromFlagOrDefault d f) + defInstallDirs (savedUserInstallDirs initialConfig) + return $ mempty { + pkgEnvSavedConfig = initialConfig { + savedUserInstallDirs = installDirs, + savedGlobalInstallDirs = installDirs, + savedGlobalFlags = (savedGlobalFlags initialConfig) { + globalLocalRepos = toNubList [sandboxDir "packages"] + }, + savedConfigureFlags = setPackageDB sandboxDir compiler platform + (savedConfigureFlags initialConfig), + savedInstallFlags = (savedInstallFlags initialConfig) { + installSummaryFile = toNubList [toPathTemplate (sandboxDir + "logs" "build.log")] + } + } + } + +-- | Return the path to the sandbox package database. +sandboxPackageDBPath :: FilePath -> Compiler -> Platform -> String +sandboxPackageDBPath sandboxDir compiler platform = + sandboxDir + (Text.display platform ++ "-" + ++ showCompilerIdWithAbi compiler + ++ "-packages.conf.d") +-- The path in sandboxPackageDBPath should be kept in sync with the +-- path in the bootstrap.sh which is used to bootstrap cabal-install +-- into a sandbox. + +-- | Use the package DB location specific for this compiler. +setPackageDB :: FilePath -> Compiler -> Platform -> ConfigFlags -> ConfigFlags +setPackageDB sandboxDir compiler platform configFlags = + configFlags { + configPackageDBs = [Just (SpecificPackageDB $ sandboxPackageDBPath + sandboxDir + compiler + platform)] + } + +-- | Almost the same as 'savedConf `mappend` pkgEnv', but some settings are +-- overridden instead of mappend'ed. +overrideSandboxSettings :: PackageEnvironment -> PackageEnvironment -> + PackageEnvironment +overrideSandboxSettings pkgEnv0 pkgEnv = + pkgEnv { + pkgEnvSavedConfig = mappendedConf { + savedConfigureFlags = (savedConfigureFlags mappendedConf) { + configPackageDBs = configPackageDBs pkgEnvConfigureFlags + } + , savedInstallFlags = (savedInstallFlags mappendedConf) { + installSummaryFile = installSummaryFile pkgEnvInstallFlags + } + }, + pkgEnvInherit = pkgEnvInherit pkgEnv0 + } + where + pkgEnvConf = pkgEnvSavedConfig pkgEnv + mappendedConf = (pkgEnvSavedConfig pkgEnv0) `mappend` pkgEnvConf + pkgEnvConfigureFlags = savedConfigureFlags pkgEnvConf + pkgEnvInstallFlags = savedInstallFlags pkgEnvConf + +-- | Default values that get used if no value is given. Used here to include in +-- comments when we write out the initial package environment. +commentPackageEnvironment :: FilePath -> IO PackageEnvironment +commentPackageEnvironment sandboxDir = do + commentConf <- commentSavedConfig + let baseConf = commonPackageEnvironmentConfig sandboxDir + return $ mempty { + pkgEnvSavedConfig = commentConf `mappend` baseConf + } + +-- | If this package environment inherits from some other package environment, +-- return that package environment; otherwise return mempty. +inheritedPackageEnvironment :: Verbosity -> PackageEnvironment + -> IO PackageEnvironment +inheritedPackageEnvironment verbosity pkgEnv = do + case (pkgEnvInherit pkgEnv) of + NoFlag -> return mempty + confPathFlag@(Flag _) -> do + conf <- loadConfig verbosity confPathFlag + return $ mempty { pkgEnvSavedConfig = conf } + +-- | Load the user package environment if it exists (the optional "cabal.config" +-- file). If it does not exist locally, attempt to load an optional global one. +userPackageEnvironment :: Verbosity -> FilePath -> Maybe FilePath -> IO PackageEnvironment +userPackageEnvironment verbosity pkgEnvDir globalConfigLocation = do + let path = pkgEnvDir userPackageEnvironmentFile + minp <- readPackageEnvironmentFile (ConstraintSourceUserConfig path) mempty path + case (minp, globalConfigLocation) of + (Just parseRes, _) -> processConfigParse path parseRes + (_, Just globalLoc) -> maybe (warn verbosity ("no constraints file found at " ++ globalLoc) >> return mempty) (processConfigParse globalLoc) =<< readPackageEnvironmentFile (ConstraintSourceUserConfig globalLoc) mempty globalLoc + _ -> return mempty + where + processConfigParse path (ParseOk warns parseResult) = do + when (not $ null warns) $ warn verbosity $ + unlines (map (showPWarning path) warns) + return parseResult + processConfigParse path (ParseFailed err) = do + let (line, msg) = locatedErrorMsg err + warn verbosity $ "Error parsing package environment file " ++ path + ++ maybe "" (\n -> ":" ++ show n) line ++ ":\n" ++ msg + return mempty + +-- | Same as @userPackageEnvironmentFile@, but returns a SavedConfig. +loadUserConfig :: Verbosity -> FilePath -> Maybe FilePath -> IO SavedConfig +loadUserConfig verbosity pkgEnvDir globalConfigLocation = + fmap pkgEnvSavedConfig $ userPackageEnvironment verbosity pkgEnvDir globalConfigLocation + +-- | Common error handling code used by 'tryLoadSandboxPackageEnvironment' and +-- 'updatePackageEnvironment'. +handleParseResult :: Verbosity -> FilePath + -> Maybe (ParseResult PackageEnvironment) + -> IO PackageEnvironment +handleParseResult verbosity path minp = + case minp of + Nothing -> die $ + "The package environment file '" ++ path ++ "' doesn't exist" + Just (ParseOk warns parseResult) -> do + when (not $ null warns) $ warn verbosity $ + unlines (map (showPWarning path) warns) + return parseResult + Just (ParseFailed err) -> do + let (line, msg) = locatedErrorMsg err + die $ "Error parsing package environment file " ++ path + ++ maybe "" (\n -> ":" ++ show n) line ++ ":\n" ++ msg + +-- | Try to load the given package environment file, exiting with error if it +-- doesn't exist. Also returns the path to the sandbox directory. The path +-- parameter should refer to an existing file. +tryLoadSandboxPackageEnvironmentFile :: Verbosity -> FilePath -> (Flag FilePath) + -> IO (FilePath, PackageEnvironment) +tryLoadSandboxPackageEnvironmentFile verbosity pkgEnvFile configFileFlag = do + let pkgEnvDir = takeDirectory pkgEnvFile + minp <- readPackageEnvironmentFile + (ConstraintSourceSandboxConfig pkgEnvFile) mempty pkgEnvFile + pkgEnv <- handleParseResult verbosity pkgEnvFile minp + + -- Get the saved sandbox directory. + -- TODO: Use substPathTemplate with + -- compilerTemplateEnv ++ platformTemplateEnv ++ abiTemplateEnv. + let sandboxDir = fromFlagOrDefault defaultSandboxLocation + . fmap fromPathTemplate . prefix . savedUserInstallDirs + . pkgEnvSavedConfig $ pkgEnv + + -- Do some sanity checks + dirExists <- doesDirectoryExist sandboxDir + -- TODO: Also check for an initialised package DB? + unless dirExists $ + die ("No sandbox exists at " ++ sandboxDir) + info verbosity $ "Using a sandbox located at " ++ sandboxDir + + let base = basePackageEnvironment + let common = commonPackageEnvironment sandboxDir + user <- userPackageEnvironment verbosity pkgEnvDir Nothing --TODO + inherited <- inheritedPackageEnvironment verbosity user + + -- Layer the package environment settings over settings from ~/.cabal/config. + cabalConfig <- fmap unsetSymlinkBinDir $ loadConfig verbosity configFileFlag + return (sandboxDir, + updateInstallDirs $ + (base `mappend` (toPkgEnv cabalConfig) `mappend` + common `mappend` inherited `mappend` user) + `overrideSandboxSettings` pkgEnv) + where + toPkgEnv config = mempty { pkgEnvSavedConfig = config } + + updateInstallDirs pkgEnv = + let config = pkgEnvSavedConfig pkgEnv + configureFlags = savedConfigureFlags config + installDirs = savedUserInstallDirs config + in pkgEnv { + pkgEnvSavedConfig = config { + savedConfigureFlags = configureFlags { + configInstallDirs = installDirs + } + } + } + + -- We don't want to inherit the value of 'symlink-bindir' from + -- '~/.cabal/config'. See #1514. + unsetSymlinkBinDir config = + let installFlags = savedInstallFlags config + in config { + savedInstallFlags = installFlags { + installSymlinkBinDir = NoFlag + } + } + +-- | Create a new package environment file, replacing the existing one if it +-- exists. Note that the path parameters should point to existing directories. +createPackageEnvironmentFile :: Verbosity -> FilePath -> FilePath + -> Compiler + -> Platform + -> IO () +createPackageEnvironmentFile verbosity sandboxDir pkgEnvFile compiler platform = do + notice verbosity $ "Writing a default package environment file to " ++ pkgEnvFile + initialPkgEnv <- initialPackageEnvironment sandboxDir compiler platform + writePackageEnvironmentFile pkgEnvFile initialPkgEnv + +-- | Descriptions of all fields in the package environment file. +pkgEnvFieldDescrs :: ConstraintSource -> [FieldDescr PackageEnvironment] +pkgEnvFieldDescrs src = [ + simpleField "inherit" + (fromFlagOrDefault Disp.empty . fmap Disp.text) (optional parseFilePathQ) + pkgEnvInherit (\v pkgEnv -> pkgEnv { pkgEnvInherit = v }) + + , commaNewLineListField "constraints" + (Text.disp . fst) ((\pc -> (pc, src)) `fmap` Text.parse) + (configExConstraints . savedConfigureExFlags . pkgEnvSavedConfig) + (\v pkgEnv -> updateConfigureExFlags pkgEnv + (\flags -> flags { configExConstraints = v })) + + , commaListField "preferences" + Text.disp Text.parse + (configPreferences . savedConfigureExFlags . pkgEnvSavedConfig) + (\v pkgEnv -> updateConfigureExFlags pkgEnv + (\flags -> flags { configPreferences = v })) + ] + ++ map toPkgEnv configFieldDescriptions' + where + optional = Parse.option mempty . fmap toFlag + + configFieldDescriptions' :: [FieldDescr SavedConfig] + configFieldDescriptions' = filter + (\(FieldDescr name _ _) -> name /= "preference" && name /= "constraint") + (configFieldDescriptions src) + + toPkgEnv :: FieldDescr SavedConfig -> FieldDescr PackageEnvironment + toPkgEnv fieldDescr = + liftField pkgEnvSavedConfig + (\savedConfig pkgEnv -> pkgEnv { pkgEnvSavedConfig = savedConfig}) + fieldDescr + + updateConfigureExFlags :: PackageEnvironment + -> (ConfigExFlags -> ConfigExFlags) + -> PackageEnvironment + updateConfigureExFlags pkgEnv f = pkgEnv { + pkgEnvSavedConfig = (pkgEnvSavedConfig pkgEnv) { + savedConfigureExFlags = f . savedConfigureExFlags . pkgEnvSavedConfig + $ pkgEnv + } + } + +-- | Read the package environment file. +readPackageEnvironmentFile :: ConstraintSource -> PackageEnvironment -> FilePath + -> IO (Maybe (ParseResult PackageEnvironment)) +readPackageEnvironmentFile src initial file = + handleNotExists $ + fmap (Just . parsePackageEnvironment src initial) (readFile file) + where + handleNotExists action = catchIO action $ \ioe -> + if isDoesNotExistError ioe + then return Nothing + else ioError ioe + +-- | Parse the package environment file. +parsePackageEnvironment :: ConstraintSource -> PackageEnvironment -> String + -> ParseResult PackageEnvironment +parsePackageEnvironment src initial str = do + fields <- readFields str + let (knownSections, others) = partition isKnownSection fields + pkgEnv <- parse others + let config = pkgEnvSavedConfig pkgEnv + installDirs0 = savedUserInstallDirs config + (haddockFlags, installDirs, paths, args) <- + foldM parseSections + (savedHaddockFlags config, installDirs0, [], []) + knownSections + return pkgEnv { + pkgEnvSavedConfig = config { + savedConfigureFlags = (savedConfigureFlags config) { + configProgramPaths = paths, + configProgramArgs = args + }, + savedHaddockFlags = haddockFlags, + savedUserInstallDirs = installDirs, + savedGlobalInstallDirs = installDirs + } + } + + where + isKnownSection :: ParseUtils.Field -> Bool + isKnownSection (ParseUtils.Section _ "haddock" _ _) = True + isKnownSection (ParseUtils.Section _ "install-dirs" _ _) = True + isKnownSection (ParseUtils.Section _ "program-locations" _ _) = True + isKnownSection (ParseUtils.Section _ "program-default-options" _ _) = True + isKnownSection _ = False + + parse :: [ParseUtils.Field] -> ParseResult PackageEnvironment + parse = parseFields (pkgEnvFieldDescrs src) initial + + parseSections :: SectionsAccum -> ParseUtils.Field + -> ParseResult SectionsAccum + parseSections accum@(h,d,p,a) + (ParseUtils.Section _ "haddock" name fs) + | name == "" = do h' <- parseFields haddockFlagsFields h fs + return (h', d, p, a) + | otherwise = do + warning "The 'haddock' section should be unnamed" + return accum + parseSections (h,d,p,a) + (ParseUtils.Section line "install-dirs" name fs) + | name == "" = do d' <- parseFields installDirsFields d fs + return (h, d',p,a) + | otherwise = + syntaxError line $ + "Named 'install-dirs' section: '" ++ name + ++ "'. Note that named 'install-dirs' sections are not allowed in the '" + ++ userPackageEnvironmentFile ++ "' file." + parseSections accum@(h, d,p,a) + (ParseUtils.Section _ "program-locations" name fs) + | name == "" = do p' <- parseFields withProgramsFields p fs + return (h, d, p', a) + | otherwise = do + warning "The 'program-locations' section should be unnamed" + return accum + parseSections accum@(h, d, p, a) + (ParseUtils.Section _ "program-default-options" name fs) + | name == "" = do a' <- parseFields withProgramOptionsFields a fs + return (h, d, p, a') + | otherwise = do + warning "The 'program-default-options' section should be unnamed" + return accum + parseSections accum f = do + warning $ "Unrecognized stanza on line " ++ show (lineNo f) + return accum + +-- | Accumulator type for 'parseSections'. +type SectionsAccum = (HaddockFlags, InstallDirs (Flag PathTemplate) + , [(String, FilePath)], [(String, [String])]) + +-- | Write out the package environment file. +writePackageEnvironmentFile :: FilePath -> PackageEnvironment -> IO () +writePackageEnvironmentFile path pkgEnv = do + let tmpPath = (path <.> "tmp") + writeFile tmpPath $ explanation ++ pkgEnvStr ++ "\n" + renameFile tmpPath path + where + pkgEnvStr = showPackageEnvironment pkgEnv + explanation = unlines + ["-- This is a Cabal package environment file." + ,"-- THIS FILE IS AUTO-GENERATED. DO NOT EDIT DIRECTLY." + ,"-- Please create a 'cabal.config' file in the same directory" + ,"-- if you want to change the default settings for this sandbox." + ,"","" + ] + +-- | Pretty-print the package environment. +showPackageEnvironment :: PackageEnvironment -> String +showPackageEnvironment pkgEnv = showPackageEnvironmentWithComments Nothing pkgEnv + +-- | Pretty-print the package environment with default values for empty fields +-- commented out (just like the default ~/.cabal/config). +showPackageEnvironmentWithComments :: (Maybe PackageEnvironment) + -> PackageEnvironment + -> String +showPackageEnvironmentWithComments mdefPkgEnv pkgEnv = Disp.render $ + ppFields (pkgEnvFieldDescrs ConstraintSourceUnknown) + mdefPkgEnv pkgEnv + $+$ Disp.text "" + $+$ ppSection "install-dirs" "" installDirsFields + (fmap installDirsSection mdefPkgEnv) (installDirsSection pkgEnv) + where + installDirsSection = savedUserInstallDirs . pkgEnvSavedConfig diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Sandbox/Timestamp.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Sandbox/Timestamp.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Sandbox/Timestamp.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Sandbox/Timestamp.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,268 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Sandbox.Timestamp +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Timestamp file handling (for add-source dependencies). +----------------------------------------------------------------------------- + +module Distribution.Client.Sandbox.Timestamp ( + AddSourceTimestamp, + withAddTimestamps, + withUpdateTimestamps, + maybeAddCompilerTimestampRecord, + listModifiedDeps, + removeTimestamps, + + -- * For testing + TimestampFileRecord, + readTimestampFile, + writeTimestampFile + ) where + +import Control.Monad (filterM, forM, when) +import Data.Char (isSpace) +import Data.List (partition) +import System.Directory (renameFile) +import System.FilePath ((<.>), ()) +import qualified Data.Map as M + +import Distribution.Compiler (CompilerId) +import Distribution.Simple.Utils (debug, die, warn) +import Distribution.System (Platform) +import Distribution.Text (display) +import Distribution.Verbosity (Verbosity) + +import Distribution.Client.SrcDist (allPackageSourceFiles) +import Distribution.Client.Sandbox.Index + (ListIgnoredBuildTreeRefs (ListIgnored), RefTypesToList(OnlyLinks) + ,listBuildTreeRefs) + +import Distribution.Compat.Exception (catchIO) +import Distribution.Client.Compat.Time (ModTime, getCurTime, + getModTime, + posixSecondsToModTime) + + +-- | Timestamp of an add-source dependency. +type AddSourceTimestamp = (FilePath, ModTime) +-- | Timestamp file record - a string identifying the compiler & platform plus a +-- list of add-source timestamps. +type TimestampFileRecord = (String, [AddSourceTimestamp]) + +timestampRecordKey :: CompilerId -> Platform -> String +timestampRecordKey compId platform = display platform ++ "-" ++ display compId + +-- | The 'add-source-timestamps' file keeps the timestamps of all add-source +-- dependencies. It is initially populated by 'sandbox add-source' and kept +-- current by 'reinstallAddSourceDeps' and 'configure -w'. The user can install +-- add-source deps manually with 'cabal install' after having edited them, so we +-- can err on the side of caution sometimes. +-- FIXME: We should keep this info in the index file, together with build tree +-- refs. +timestampFileName :: FilePath +timestampFileName = "add-source-timestamps" + +-- | Read the timestamp file. Exits with error if the timestamp file is +-- corrupted. Returns an empty list if the file doesn't exist. +readTimestampFile :: FilePath -> IO [TimestampFileRecord] +readTimestampFile timestampFile = do + timestampString <- readFile timestampFile `catchIO` \_ -> return "[]" + case reads timestampString of + [(version, s)] + | version == (2::Int) -> + case reads s of + [(timestamps, s')] | all isSpace s' -> return timestamps + _ -> dieCorrupted + | otherwise -> dieWrongFormat + + -- Old format (timestamps are POSIX seconds). Convert to new format. + [] -> + case reads timestampString of + [(timestamps, s)] | all isSpace s -> do + let timestamps' = map (\(i, ts) -> + (i, map (\(p, t) -> + (p, posixSecondsToModTime t)) ts)) + timestamps + writeTimestampFile timestampFile timestamps' + return timestamps' + _ -> dieCorrupted + _ -> dieCorrupted + where + dieWrongFormat = die $ wrongFormat ++ deleteAndRecreate + dieCorrupted = die $ corrupted ++ deleteAndRecreate + wrongFormat = "The timestamps file is in the wrong format." + corrupted = "The timestamps file is corrupted." + deleteAndRecreate = " Please delete and recreate the sandbox." + +-- | Write the timestamp file, atomically. +writeTimestampFile :: FilePath -> [TimestampFileRecord] -> IO () +writeTimestampFile timestampFile timestamps = do + writeFile timestampTmpFile "2\n" -- version + appendFile timestampTmpFile (show timestamps ++ "\n") + renameFile timestampTmpFile timestampFile + where + timestampTmpFile = timestampFile <.> "tmp" + +-- | Read, process and write the timestamp file in one go. +withTimestampFile :: FilePath + -> ([TimestampFileRecord] -> IO [TimestampFileRecord]) + -> IO () +withTimestampFile sandboxDir process = do + let timestampFile = sandboxDir timestampFileName + timestampRecords <- readTimestampFile timestampFile >>= process + writeTimestampFile timestampFile timestampRecords + +-- | Given a list of 'AddSourceTimestamp's, a list of paths to add-source deps +-- we've added and an initial timestamp, add an 'AddSourceTimestamp' to the list +-- for each path. If a timestamp for a given path already exists in the list, +-- update it. +addTimestamps :: ModTime -> [AddSourceTimestamp] -> [FilePath] + -> [AddSourceTimestamp] +addTimestamps initial timestamps newPaths = + [ (p, initial) | p <- newPaths ] ++ oldTimestamps + where + (oldTimestamps, _toBeUpdated) = + partition (\(path, _) -> path `notElem` newPaths) timestamps + +-- | Given a list of 'AddSourceTimestamp's, a list of paths to add-source deps +-- we've reinstalled and a new timestamp value, update the timestamp value for +-- the deps in the list. If there are new paths in the list, ignore them. +updateTimestamps :: [AddSourceTimestamp] -> [FilePath] -> ModTime + -> [AddSourceTimestamp] +updateTimestamps timestamps pathsToUpdate newTimestamp = + foldr updateTimestamp [] timestamps + where + updateTimestamp t@(path, _oldTimestamp) rest + | path `elem` pathsToUpdate = (path, newTimestamp) : rest + | otherwise = t : rest + +-- | Given a list of 'TimestampFileRecord's and a list of paths to add-source +-- deps we've removed, remove those deps from the list. +removeTimestamps' :: [AddSourceTimestamp] -> [FilePath] -> [AddSourceTimestamp] +removeTimestamps' l pathsToRemove = foldr removeTimestamp [] l + where + removeTimestamp t@(path, _oldTimestamp) rest = + if path `elem` pathsToRemove + then rest + else t : rest + +-- | If a timestamp record for this compiler doesn't exist, add a new one. +maybeAddCompilerTimestampRecord :: Verbosity -> FilePath -> FilePath + -> CompilerId -> Platform + -> IO () +maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile + compId platform = do + let key = timestampRecordKey compId platform + withTimestampFile sandboxDir $ \timestampRecords -> do + case lookup key timestampRecords of + Just _ -> return timestampRecords + Nothing -> do + buildTreeRefs <- listBuildTreeRefs verbosity ListIgnored OnlyLinks + indexFile + now <- getCurTime + let timestamps = map (\p -> (p, now)) buildTreeRefs + return $ (key, timestamps):timestampRecords + +-- | Given an IO action that returns a list of build tree refs, add those +-- build tree refs to the timestamps file (for all compilers). +withAddTimestamps :: FilePath -> IO [FilePath] -> IO () +withAddTimestamps sandboxDir act = do + let initialTimestamp = minBound + withActionOnAllTimestamps (addTimestamps initialTimestamp) sandboxDir act + +-- | Given a list of build tree refs, remove those +-- build tree refs from the timestamps file (for all compilers). +removeTimestamps :: FilePath -> [FilePath] -> IO () +removeTimestamps idxFile = + withActionOnAllTimestamps removeTimestamps' idxFile . return + +-- | Given an IO action that returns a list of build tree refs, update the +-- timestamps of the returned build tree refs to the current time (only for the +-- given compiler & platform). +withUpdateTimestamps :: FilePath -> CompilerId -> Platform + ->([AddSourceTimestamp] -> IO [FilePath]) + -> IO () +withUpdateTimestamps = + withActionOnCompilerTimestamps updateTimestamps + +-- | Helper for implementing 'withAddTimestamps' and +-- 'withRemoveTimestamps'. Runs a given action on the list of +-- 'AddSourceTimestamp's for all compilers, applies 'f' to the result and then +-- updates the timestamp file. The IO action is run only once. +withActionOnAllTimestamps :: ([AddSourceTimestamp] -> [FilePath] + -> [AddSourceTimestamp]) + -> FilePath + -> IO [FilePath] + -> IO () +withActionOnAllTimestamps f sandboxDir act = + withTimestampFile sandboxDir $ \timestampRecords -> do + paths <- act + return [(key, f timestamps paths) | (key, timestamps) <- timestampRecords] + +-- | Helper for implementing 'withUpdateTimestamps'. Runs a given action on the +-- list of 'AddSourceTimestamp's for this compiler, applies 'f' to the result +-- and then updates the timestamp file record. The IO action is run only once. +withActionOnCompilerTimestamps :: ([AddSourceTimestamp] + -> [FilePath] -> ModTime + -> [AddSourceTimestamp]) + -> FilePath + -> CompilerId + -> Platform + -> ([AddSourceTimestamp] -> IO [FilePath]) + -> IO () +withActionOnCompilerTimestamps f sandboxDir compId platform act = do + let needle = timestampRecordKey compId platform + withTimestampFile sandboxDir $ \timestampRecords -> do + timestampRecords' <- forM timestampRecords $ \r@(key, timestamps) -> + if key == needle + then do paths <- act timestamps + now <- getCurTime + return (key, f timestamps paths now) + else return r + return timestampRecords' + +-- | Has this dependency been modified since we have last looked at it? +isDepModified :: Verbosity -> ModTime -> AddSourceTimestamp -> IO Bool +isDepModified verbosity now (packageDir, timestamp) = do + debug verbosity ("Checking whether the dependency is modified: " ++ packageDir) + depSources <- allPackageSourceFiles verbosity packageDir + go depSources + + where + go [] = return False + go (dep0:rest) = do + -- FIXME: What if the clock jumps backwards at any point? For now we only + -- print a warning. + let dep = packageDir dep0 + modTime <- getModTime dep + when (modTime > now) $ + warn verbosity $ "File '" ++ dep + ++ "' has a modification time that is in the future." + if modTime >= timestamp + then do + debug verbosity ("Dependency has a modified source file: " ++ dep) + return True + else go rest + +-- | List all modified dependencies. +listModifiedDeps :: Verbosity -> FilePath -> CompilerId -> Platform + -> M.Map FilePath a + -- ^ The set of all installed add-source deps. + -> IO [FilePath] +listModifiedDeps verbosity sandboxDir compId platform installedDepsMap = do + timestampRecords <- readTimestampFile (sandboxDir timestampFileName) + let needle = timestampRecordKey compId platform + timestamps <- maybe noTimestampRecord return + (lookup needle timestampRecords) + now <- getCurTime + fmap (map fst) . filterM (isDepModified verbosity now) + . filter (\ts -> fst ts `M.member` installedDepsMap) + $ timestamps + + where + noTimestampRecord = die $ "Сouldn't find a timestamp record for the given " + ++ "compiler/platform pair. " + ++ "Please report this on the Cabal bug tracker: " + ++ "https://github.com/haskell/cabal/issues/new ." diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Sandbox/Types.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Sandbox/Types.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Sandbox/Types.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Sandbox/Types.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,67 @@ +{-# LANGUAGE CPP #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Sandbox.Types +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Helpers for writing code that works both inside and outside a sandbox. +----------------------------------------------------------------------------- + +module Distribution.Client.Sandbox.Types ( + UseSandbox(..), isUseSandbox, whenUsingSandbox, + SandboxPackageInfo(..) + ) where + +import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex +import Distribution.Client.Types (SourcePackage) +import Distribution.Compat.Semigroup (Semigroup((<>))) + +#if !MIN_VERSION_base(4,8,0) +import Data.Monoid (Monoid(..)) +#endif +import qualified Data.Set as S + +-- | Are we using a sandbox? +data UseSandbox = UseSandbox FilePath | NoSandbox + +instance Monoid UseSandbox where + mempty = NoSandbox + mappend = (<>) + +instance Semigroup UseSandbox where + NoSandbox <> s = s + u0@(UseSandbox _) <> NoSandbox = u0 + (UseSandbox _) <> u1@(UseSandbox _) = u1 + +-- | Convert a @UseSandbox@ value to a boolean. Useful in conjunction with +-- @when@. +isUseSandbox :: UseSandbox -> Bool +isUseSandbox (UseSandbox _) = True +isUseSandbox NoSandbox = False + +-- | Execute an action only if we're in a sandbox, feeding to it the path to the +-- sandbox directory. +whenUsingSandbox :: UseSandbox -> (FilePath -> IO ()) -> IO () +whenUsingSandbox NoSandbox _ = return () +whenUsingSandbox (UseSandbox sandboxDir) act = act sandboxDir + +-- | Data about the packages installed in the sandbox that is passed from +-- 'reinstallAddSourceDeps' to the solver. +data SandboxPackageInfo = SandboxPackageInfo { + modifiedAddSourceDependencies :: ![SourcePackage], + -- ^ Modified add-source deps that we want to reinstall. These are guaranteed + -- to be already installed in the sandbox. + + otherAddSourceDependencies :: ![SourcePackage], + -- ^ Remaining add-source deps. Some of these may be not installed in the + -- sandbox. + + otherInstalledSandboxPackages :: !InstalledPackageIndex.InstalledPackageIndex, + -- ^ All packages installed in the sandbox. Intersection with + -- 'modifiedAddSourceDependencies' and/or 'otherAddSourceDependencies' can be + -- non-empty. + + allAddSourceDependencies :: !(S.Set FilePath) + -- ^ A set of paths to all add-source dependencies, for convenience. + } diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Sandbox.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Sandbox.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Sandbox.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Sandbox.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,886 @@ +{-# LANGUAGE CPP #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Sandbox +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- UI for the sandboxing functionality. +----------------------------------------------------------------------------- + +module Distribution.Client.Sandbox ( + sandboxInit, + sandboxDelete, + sandboxAddSource, + sandboxAddSourceSnapshot, + sandboxDeleteSource, + sandboxListSources, + sandboxHcPkg, + dumpPackageEnvironment, + withSandboxBinDirOnSearchPath, + + getSandboxConfigFilePath, + loadConfigOrSandboxConfig, + findSavedDistPref, + initPackageDBIfNeeded, + maybeWithSandboxDirOnSearchPath, + + WereDepsReinstalled(..), + reinstallAddSourceDeps, + maybeReinstallAddSourceDeps, + + SandboxPackageInfo(..), + maybeWithSandboxPackageInfo, + + tryGetIndexFilePath, + sandboxBuildDir, + getInstalledPackagesInSandbox, + updateSandboxConfigFileFlag, + updateInstallDirs, + + configPackageDB', configCompilerAux', getPersistOrConfigCompiler + ) where + +import Distribution.Client.Setup + ( SandboxFlags(..), ConfigFlags(..), ConfigExFlags(..), InstallFlags(..) + , GlobalFlags(..), defaultConfigExFlags, defaultInstallFlags + , defaultSandboxLocation, withRepoContext ) +import Distribution.Client.Sandbox.Timestamp ( listModifiedDeps + , maybeAddCompilerTimestampRecord + , withAddTimestamps + , removeTimestamps ) +import Distribution.Client.Config + ( SavedConfig(..), defaultUserInstall, loadConfig ) +import Distribution.Client.Dependency ( foldProgress ) +import Distribution.Client.IndexUtils ( BuildTreeRefType(..) ) +import Distribution.Client.Install ( InstallArgs, + makeInstallContext, + makeInstallPlan, + processInstallPlan ) +import Distribution.Utils.NubList ( fromNubList ) + +import Distribution.Client.Sandbox.PackageEnvironment + ( PackageEnvironment(..), PackageEnvironmentType(..) + , createPackageEnvironmentFile, classifyPackageEnvironment + , tryLoadSandboxPackageEnvironmentFile, loadUserConfig + , commentPackageEnvironment, showPackageEnvironmentWithComments + , sandboxPackageEnvironmentFile, userPackageEnvironmentFile + , sandboxPackageDBPath ) +import Distribution.Client.Sandbox.Types ( SandboxPackageInfo(..) + , UseSandbox(..) ) +import Distribution.Client.SetupWrapper + ( SetupScriptOptions(..), defaultSetupScriptOptions ) +import Distribution.Client.Types ( PackageLocation(..) + , SourcePackage(..) ) +import Distribution.Client.Utils ( inDir, tryCanonicalizePath + , tryFindAddSourcePackageDesc) +import Distribution.PackageDescription.Configuration + ( flattenPackageDescription ) +import Distribution.PackageDescription.Parse ( readPackageDescription ) +import Distribution.Simple.Compiler ( Compiler(..), PackageDB(..) + , PackageDBStack ) +import Distribution.Simple.Configure ( configCompilerAuxEx + , interpretPackageDbFlags + , getPackageDBContents + , maybeGetPersistBuildConfig + , findDistPrefOrDefault + , findDistPref ) +import qualified Distribution.Simple.LocalBuildInfo as LocalBuildInfo +import Distribution.Simple.PreProcess ( knownSuffixHandlers ) +import Distribution.Simple.Program ( ProgramConfiguration ) +import Distribution.Simple.Setup ( Flag(..), HaddockFlags(..) + , fromFlagOrDefault, flagToMaybe ) +import Distribution.Simple.SrcDist ( prepareTree ) +import Distribution.Simple.Utils ( die, debug, notice, info, warn + , debugNoWrap, defaultPackageDesc + , intercalate, topHandlerWith + , createDirectoryIfMissingVerbose ) +import Distribution.Package ( Package(..) ) +import Distribution.System ( Platform ) +import Distribution.Text ( display ) +import Distribution.Verbosity ( Verbosity, lessVerbose ) +import Distribution.Compat.Environment ( lookupEnv, setEnv ) +import Distribution.Client.Compat.FilePerms ( setFileHidden ) +import qualified Distribution.Client.Sandbox.Index as Index +import Distribution.Simple.PackageIndex ( InstalledPackageIndex ) +import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex +import qualified Distribution.Simple.Register as Register +import qualified Data.Map as M +import qualified Data.Set as S +import Data.Either (partitionEithers) +import Control.Exception ( assert, bracket_ ) +import Control.Monad ( forM, liftM, liftM2, unless, when ) +import Data.Bits ( shiftL, shiftR, xor ) +import Data.Char ( ord ) +import Data.IORef ( newIORef, writeIORef, readIORef ) +import Data.List ( delete + , foldl' + , intersperse + , isPrefixOf + , groupBy ) +import Data.Maybe ( fromJust ) +#if !MIN_VERSION_base(4,8,0) +import Data.Monoid ( mempty, mappend ) +#endif +import Data.Word ( Word32 ) +import Numeric ( showHex ) +import System.Directory ( canonicalizePath + , createDirectory + , doesDirectoryExist + , doesFileExist + , getCurrentDirectory + , removeDirectoryRecursive + , removeFile + , renameDirectory ) +import System.FilePath ( (), equalFilePath + , getSearchPath + , searchPathSeparator + , splitSearchPath + , takeDirectory ) + +-- +-- * Constants +-- + +-- | The name of the sandbox subdirectory where we keep snapshots of add-source +-- dependencies. +snapshotDirectoryName :: FilePath +snapshotDirectoryName = "snapshots" + +-- | Non-standard build dir that is used for building add-source deps instead of +-- "dist". Fixes surprising behaviour in some cases (see issue #1281). +sandboxBuildDir :: FilePath -> FilePath +sandboxBuildDir sandboxDir = "dist/dist-sandbox-" ++ showHex sandboxDirHash "" + where + sandboxDirHash = jenkins sandboxDir + + -- See http://en.wikipedia.org/wiki/Jenkins_hash_function + jenkins :: String -> Word32 + jenkins str = loop_finish $ foldl' loop 0 str + where + loop :: Word32 -> Char -> Word32 + loop hash key_i' = hash''' + where + key_i = toEnum . ord $ key_i' + hash' = hash + key_i + hash'' = hash' + (shiftL hash' 10) + hash''' = hash'' `xor` (shiftR hash'' 6) + + loop_finish :: Word32 -> Word32 + loop_finish hash = hash''' + where + hash' = hash + (shiftL hash 3) + hash'' = hash' `xor` (shiftR hash' 11) + hash''' = hash'' + (shiftL hash'' 15) + +-- +-- * Basic sandbox functions. +-- + +-- | If @--sandbox-config-file@ wasn't given on the command-line, set it to the +-- value of the @CABAL_SANDBOX_CONFIG@ environment variable, or else to +-- 'NoFlag'. +updateSandboxConfigFileFlag :: GlobalFlags -> IO GlobalFlags +updateSandboxConfigFileFlag globalFlags = + case globalSandboxConfigFile globalFlags of + Flag _ -> return globalFlags + NoFlag -> do + f' <- fmap (maybe NoFlag Flag) . lookupEnv $ "CABAL_SANDBOX_CONFIG" + return globalFlags { globalSandboxConfigFile = f' } + +-- | Return the path to the sandbox config file - either the default or the one +-- specified with @--sandbox-config-file@. +getSandboxConfigFilePath :: GlobalFlags -> IO FilePath +getSandboxConfigFilePath globalFlags = do + let sandboxConfigFileFlag = globalSandboxConfigFile globalFlags + case sandboxConfigFileFlag of + NoFlag -> do pkgEnvDir <- getCurrentDirectory + return (pkgEnvDir sandboxPackageEnvironmentFile) + Flag path -> return path + +-- | Load the @cabal.sandbox.config@ file (and possibly the optional +-- @cabal.config@). In addition to a @PackageEnvironment@, also return a +-- canonical path to the sandbox. Exit with error if the sandbox directory or +-- the package environment file do not exist. +tryLoadSandboxConfig :: Verbosity -> GlobalFlags + -> IO (FilePath, PackageEnvironment) +tryLoadSandboxConfig verbosity globalFlags = do + path <- getSandboxConfigFilePath globalFlags + tryLoadSandboxPackageEnvironmentFile verbosity path + (globalConfigFile globalFlags) + +-- | Return the name of the package index file for this package environment. +tryGetIndexFilePath :: SavedConfig -> IO FilePath +tryGetIndexFilePath config = tryGetIndexFilePath' (savedGlobalFlags config) + +-- | The same as 'tryGetIndexFilePath', but takes 'GlobalFlags' instead of +-- 'SavedConfig'. +tryGetIndexFilePath' :: GlobalFlags -> IO FilePath +tryGetIndexFilePath' globalFlags = do + let paths = fromNubList $ globalLocalRepos globalFlags + case paths of + [] -> die $ "Distribution.Client.Sandbox.tryGetIndexFilePath: " ++ + "no local repos found. " ++ checkConfiguration + _ -> return $ (last paths) Index.defaultIndexFileName + where + checkConfiguration = "Please check your configuration ('" + ++ userPackageEnvironmentFile ++ "')." + +-- | Try to extract a 'PackageDB' from 'ConfigFlags'. Gives a better error +-- message than just pattern-matching. +getSandboxPackageDB :: ConfigFlags -> IO PackageDB +getSandboxPackageDB configFlags = do + case configPackageDBs configFlags of + [Just sandboxDB@(SpecificPackageDB _)] -> return sandboxDB + -- TODO: should we allow multiple package DBs (e.g. with 'inherit')? + + [] -> + die $ "Sandbox package DB is not specified. " ++ sandboxConfigCorrupt + [_] -> + die $ "Unexpected contents of the 'package-db' field. " + ++ sandboxConfigCorrupt + _ -> + die $ "Too many package DBs provided. " ++ sandboxConfigCorrupt + + where + sandboxConfigCorrupt = "Your 'cabal.sandbox.config' is probably corrupt." + + +-- | Which packages are installed in the sandbox package DB? +getInstalledPackagesInSandbox :: Verbosity -> ConfigFlags + -> Compiler -> ProgramConfiguration + -> IO InstalledPackageIndex +getInstalledPackagesInSandbox verbosity configFlags comp conf = do + sandboxDB <- getSandboxPackageDB configFlags + getPackageDBContents verbosity comp sandboxDB conf + +-- | Temporarily add $SANDBOX_DIR/bin to $PATH. +withSandboxBinDirOnSearchPath :: FilePath -> IO a -> IO a +withSandboxBinDirOnSearchPath sandboxDir = bracket_ addBinDir rmBinDir + where + -- TODO: Instead of modifying the global process state, it'd be better to + -- set the environment individually for each subprocess invocation. This + -- will have to wait until the Shell monad is implemented; without it the + -- required changes are too intrusive. + addBinDir :: IO () + addBinDir = do + mbOldPath <- lookupEnv "PATH" + let newPath = maybe sandboxBin ((++) sandboxBin . (:) searchPathSeparator) + mbOldPath + setEnv "PATH" newPath + + rmBinDir :: IO () + rmBinDir = do + oldPath <- getSearchPath + let newPath = intercalate [searchPathSeparator] + (delete sandboxBin oldPath) + setEnv "PATH" newPath + + sandboxBin = sandboxDir "bin" + +-- | Initialise a package DB for this compiler if it doesn't exist. +initPackageDBIfNeeded :: Verbosity -> ConfigFlags + -> Compiler -> ProgramConfiguration + -> IO () +initPackageDBIfNeeded verbosity configFlags comp conf = do + SpecificPackageDB dbPath <- getSandboxPackageDB configFlags + packageDBExists <- doesDirectoryExist dbPath + unless packageDBExists $ + Register.initPackageDB verbosity comp conf dbPath + when packageDBExists $ + debug verbosity $ "The package database already exists: " ++ dbPath + +-- | Entry point for the 'cabal sandbox dump-pkgenv' command. +dumpPackageEnvironment :: Verbosity -> SandboxFlags -> GlobalFlags -> IO () +dumpPackageEnvironment verbosity _sandboxFlags globalFlags = do + (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags + commentPkgEnv <- commentPackageEnvironment sandboxDir + putStrLn . showPackageEnvironmentWithComments (Just commentPkgEnv) $ pkgEnv + +-- | Entry point for the 'cabal sandbox init' command. +sandboxInit :: Verbosity -> SandboxFlags -> GlobalFlags -> IO () +sandboxInit verbosity sandboxFlags globalFlags = do + -- Warn if there's a 'cabal-dev' sandbox. + isCabalDevSandbox <- liftM2 (&&) (doesDirectoryExist "cabal-dev") + (doesFileExist $ "cabal-dev" "cabal.config") + when isCabalDevSandbox $ + warn verbosity $ + "You are apparently using a legacy (cabal-dev) sandbox. " + ++ "Legacy sandboxes may interact badly with native Cabal sandboxes. " + ++ "You may want to delete the 'cabal-dev' directory to prevent issues." + + -- Create the sandbox directory. + let sandboxDir' = fromFlagOrDefault defaultSandboxLocation + (sandboxLocation sandboxFlags) + createDirectoryIfMissingVerbose verbosity True sandboxDir' + sandboxDir <- tryCanonicalizePath sandboxDir' + setFileHidden sandboxDir + + -- Determine which compiler to use (using the value from ~/.cabal/config). + userConfig <- loadConfig verbosity (globalConfigFile globalFlags) + (comp, platform, conf) <- configCompilerAuxEx (savedConfigureFlags userConfig) + + -- Create the package environment file. + pkgEnvFile <- getSandboxConfigFilePath globalFlags + createPackageEnvironmentFile verbosity sandboxDir pkgEnvFile comp platform + (_sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags + let config = pkgEnvSavedConfig pkgEnv + configFlags = savedConfigureFlags config + + -- Create the index file if it doesn't exist. + indexFile <- tryGetIndexFilePath config + indexFileExists <- doesFileExist indexFile + if indexFileExists + then notice verbosity $ "Using an existing sandbox located at " ++ sandboxDir + else notice verbosity $ "Creating a new sandbox at " ++ sandboxDir + Index.createEmpty verbosity indexFile + + -- Create the package DB for the default compiler. + initPackageDBIfNeeded verbosity configFlags comp conf + maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile + (compilerId comp) platform + +-- | Entry point for the 'cabal sandbox delete' command. +sandboxDelete :: Verbosity -> SandboxFlags -> GlobalFlags -> IO () +sandboxDelete verbosity _sandboxFlags globalFlags = do + (useSandbox, _) <- loadConfigOrSandboxConfig + verbosity + globalFlags { globalRequireSandbox = Flag False } + case useSandbox of + NoSandbox -> warn verbosity "Not in a sandbox." + UseSandbox sandboxDir -> do + curDir <- getCurrentDirectory + pkgEnvFile <- getSandboxConfigFilePath globalFlags + + -- Remove the @cabal.sandbox.config@ file, unless it's in a non-standard + -- location. + let isNonDefaultConfigLocation = not $ equalFilePath pkgEnvFile $ + curDir sandboxPackageEnvironmentFile + + if isNonDefaultConfigLocation + then warn verbosity $ "Sandbox config file is in non-default location: '" + ++ pkgEnvFile ++ "'.\n Please delete manually." + else removeFile pkgEnvFile + + -- Remove the sandbox directory, unless we're using a shared sandbox. + let isNonDefaultSandboxLocation = not $ equalFilePath sandboxDir $ + curDir defaultSandboxLocation + + when isNonDefaultSandboxLocation $ + die $ "Non-default sandbox location used: '" ++ sandboxDir + ++ "'.\nAssuming a shared sandbox. Please delete '" + ++ sandboxDir ++ "' manually." + + absSandboxDir <- canonicalizePath sandboxDir + notice verbosity $ "Deleting the sandbox located at " ++ absSandboxDir + removeDirectoryRecursive absSandboxDir + + let + pathInsideSandbox = isPrefixOf absSandboxDir + + -- Warn the user if deleting the sandbox deleted a package database + -- referenced in the current environment. + checkPackagePaths var = do + let + checkPath path = do + absPath <- canonicalizePath path + (when (pathInsideSandbox absPath) . warn verbosity) + (var ++ " refers to package database " ++ path + ++ " inside the deleted sandbox.") + liftM (maybe [] splitSearchPath) (lookupEnv var) >>= mapM_ checkPath + + checkPackagePaths "CABAL_SANDBOX_PACKAGE_PATH" + checkPackagePaths "GHC_PACKAGE_PATH" + checkPackagePaths "GHCJS_PACKAGE_PATH" + +-- Common implementation of 'sandboxAddSource' and 'sandboxAddSourceSnapshot'. +doAddSource :: Verbosity -> [FilePath] -> FilePath -> PackageEnvironment + -> BuildTreeRefType + -> IO () +doAddSource verbosity buildTreeRefs sandboxDir pkgEnv refType = do + let savedConfig = pkgEnvSavedConfig pkgEnv + indexFile <- tryGetIndexFilePath savedConfig + + -- If we're running 'sandbox add-source' for the first time for this compiler, + -- we need to create an initial timestamp record. + (comp, platform, _) <- configCompilerAuxEx . savedConfigureFlags $ savedConfig + maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile + (compilerId comp) platform + + withAddTimestamps sandboxDir $ do + -- Path canonicalisation is done in addBuildTreeRefs, but we do it + -- twice because of the timestamps file. + buildTreeRefs' <- mapM tryCanonicalizePath buildTreeRefs + Index.addBuildTreeRefs verbosity indexFile buildTreeRefs' refType + return buildTreeRefs' + +-- | Entry point for the 'cabal sandbox add-source' command. +sandboxAddSource :: Verbosity -> [FilePath] -> SandboxFlags -> GlobalFlags + -> IO () +sandboxAddSource verbosity buildTreeRefs sandboxFlags globalFlags = do + (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags + + if fromFlagOrDefault False (sandboxSnapshot sandboxFlags) + then sandboxAddSourceSnapshot verbosity buildTreeRefs sandboxDir pkgEnv + else doAddSource verbosity buildTreeRefs sandboxDir pkgEnv LinkRef + +-- | Entry point for the 'cabal sandbox add-source --snapshot' command. +sandboxAddSourceSnapshot :: Verbosity -> [FilePath] -> FilePath + -> PackageEnvironment + -> IO () +sandboxAddSourceSnapshot verbosity buildTreeRefs sandboxDir pkgEnv = do + let snapshotDir = sandboxDir snapshotDirectoryName + + -- Use 'D.S.SrcDist.prepareTree' to copy each package's files to our private + -- location. + createDirectoryIfMissingVerbose verbosity True snapshotDir + + -- Collect the package descriptions first, so that if some path does not refer + -- to a cabal package, we fail immediately. + pkgs <- forM buildTreeRefs $ \buildTreeRef -> + inDir (Just buildTreeRef) $ + return . flattenPackageDescription + =<< readPackageDescription verbosity + =<< defaultPackageDesc verbosity + + -- Copy the package sources to "snapshots/$PKGNAME-$VERSION-tmp". If + -- 'prepareTree' throws an error at any point, the old snapshots will still be + -- in consistent state. + tmpDirs <- forM (zip buildTreeRefs pkgs) $ \(buildTreeRef, pkg) -> + inDir (Just buildTreeRef) $ do + let targetDir = snapshotDir (display . packageId $ pkg) + targetTmpDir = targetDir ++ "-tmp" + dirExists <- doesDirectoryExist targetTmpDir + when dirExists $ + removeDirectoryRecursive targetDir + createDirectory targetTmpDir + prepareTree verbosity pkg Nothing targetTmpDir knownSuffixHandlers + return (targetTmpDir, targetDir) + + -- Now rename the "snapshots/$PKGNAME-$VERSION-tmp" dirs to + -- "snapshots/$PKGNAME-$VERSION". + snapshots <- forM tmpDirs $ \(targetTmpDir, targetDir) -> do + dirExists <- doesDirectoryExist targetDir + when dirExists $ + removeDirectoryRecursive targetDir + renameDirectory targetTmpDir targetDir + return targetDir + + -- Once the packages are copied, just 'add-source' them as usual. + doAddSource verbosity snapshots sandboxDir pkgEnv SnapshotRef + +-- | Entry point for the 'cabal sandbox delete-source' command. +sandboxDeleteSource :: Verbosity -> [FilePath] -> SandboxFlags -> GlobalFlags + -> IO () +sandboxDeleteSource verbosity buildTreeRefs _sandboxFlags globalFlags = do + (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags + indexFile <- tryGetIndexFilePath (pkgEnvSavedConfig pkgEnv) + + (results, convDict) <- + Index.removeBuildTreeRefs verbosity indexFile buildTreeRefs + + let (failedPaths, removedPaths) = partitionEithers results + removedRefs = fmap convDict removedPaths + + unless (null removedPaths) $ do + removeTimestamps sandboxDir removedPaths + + notice verbosity $ "Success deleting sources: " ++ + showL removedRefs ++ "\n\n" + + unless (null failedPaths) $ do + let groupedFailures = groupBy errorType failedPaths + mapM_ handleErrors groupedFailures + die $ "The sources with the above errors were skipped. (" ++ + showL (fmap getPath failedPaths) ++ ")" + + notice verbosity $ "Note: 'sandbox delete-source' only unregisters the " ++ + "source dependency, but does not remove the package " ++ + "from the sandbox package DB.\n\n" ++ + "Use 'sandbox hc-pkg -- unregister' to do that." + where + getPath (Index.ErrNonregisteredSource p) = p + getPath (Index.ErrNonexistentSource p) = p + + showPaths f = concat . intersperse " " . fmap (show . f) + + showL = showPaths id + + showE [] = return ' ' + showE errs = showPaths getPath errs + + errorType Index.ErrNonregisteredSource{} Index.ErrNonregisteredSource{} = + True + errorType Index.ErrNonexistentSource{} Index.ErrNonexistentSource{} = True + errorType _ _ = False + + handleErrors [] = return () + handleErrors errs@(Index.ErrNonregisteredSource{}:_) = + warn verbosity ("Sources not registered: " ++ showE errs ++ "\n\n") + handleErrors errs@(Index.ErrNonexistentSource{}:_) = + warn verbosity + ("Source directory not found for paths: " ++ showE errs ++ "\n" + ++ "If you are trying to delete a reference to a removed directory, " + ++ "please provide the full absolute path " + ++ "(as given by `sandbox list-sources`).\n\n") + +-- | Entry point for the 'cabal sandbox list-sources' command. +sandboxListSources :: Verbosity -> SandboxFlags -> GlobalFlags + -> IO () +sandboxListSources verbosity _sandboxFlags globalFlags = do + (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags + indexFile <- tryGetIndexFilePath (pkgEnvSavedConfig pkgEnv) + + refs <- Index.listBuildTreeRefs verbosity + Index.ListIgnored Index.LinksAndSnapshots indexFile + when (null refs) $ + notice verbosity $ "Index file '" ++ indexFile + ++ "' has no references to local build trees." + when (not . null $ refs) $ do + notice verbosity $ "Source dependencies registered " + ++ "in the current sandbox ('" ++ sandboxDir ++ "'):\n\n" + mapM_ putStrLn refs + notice verbosity $ "\nTo unregister source dependencies, " + ++ "use the 'sandbox delete-source' command." + +-- | Entry point for the 'cabal sandbox hc-pkg' command. Invokes the @hc-pkg@ +-- tool with provided arguments, restricted to the sandbox. +sandboxHcPkg :: Verbosity -> SandboxFlags -> GlobalFlags -> [String] -> IO () +sandboxHcPkg verbosity _sandboxFlags globalFlags extraArgs = do + (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags + let configFlags = savedConfigureFlags . pkgEnvSavedConfig $ pkgEnv + -- Invoke hc-pkg for the most recently configured compiler (if any), + -- using the right package-db for the compiler (see #1935). + (comp, platform, conf) <- getPersistOrConfigCompiler configFlags + let dir = sandboxPackageDBPath sandboxDir comp platform + dbStack = [GlobalPackageDB, SpecificPackageDB dir] + Register.invokeHcPkg verbosity comp conf dbStack extraArgs + +updateInstallDirs :: Flag Bool + -> (UseSandbox, SavedConfig) -> (UseSandbox, SavedConfig) +updateInstallDirs userInstallFlag (useSandbox, savedConfig) = + case useSandbox of + NoSandbox -> + let savedConfig' = savedConfig { + savedConfigureFlags = configureFlags { + configInstallDirs = installDirs + } + } + in (useSandbox, savedConfig') + _ -> (useSandbox, savedConfig) + where + configureFlags = savedConfigureFlags savedConfig + userInstallDirs = savedUserInstallDirs savedConfig + globalInstallDirs = savedGlobalInstallDirs savedConfig + installDirs | userInstall = userInstallDirs + | otherwise = globalInstallDirs + userInstall = fromFlagOrDefault defaultUserInstall + (configUserInstall configureFlags `mappend` userInstallFlag) + +-- | Check which type of package environment we're in and return a +-- correctly-initialised @SavedConfig@ and a @UseSandbox@ value that indicates +-- whether we're working in a sandbox. +loadConfigOrSandboxConfig :: Verbosity + -> GlobalFlags -- ^ For @--config-file@ and + -- @--sandbox-config-file@. + -> IO (UseSandbox, SavedConfig) +loadConfigOrSandboxConfig verbosity globalFlags = do + let configFileFlag = globalConfigFile globalFlags + sandboxConfigFileFlag = globalSandboxConfigFile globalFlags + ignoreSandboxFlag = globalIgnoreSandbox globalFlags + + pkgEnvDir <- getPkgEnvDir sandboxConfigFileFlag + pkgEnvType <- classifyPackageEnvironment pkgEnvDir sandboxConfigFileFlag + ignoreSandboxFlag + case pkgEnvType of + -- A @cabal.sandbox.config@ file (and possibly @cabal.config@) is present. + SandboxPackageEnvironment -> do + (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags + -- ^ Prints an error message and exits on error. + let config = pkgEnvSavedConfig pkgEnv + return (UseSandbox sandboxDir, config) + + -- Only @cabal.config@ is present. + UserPackageEnvironment -> do + config <- loadConfig verbosity configFileFlag + userConfig <- loadUserConfig verbosity pkgEnvDir Nothing + let config' = config `mappend` userConfig + dieIfSandboxRequired config' + return (NoSandbox, config') + + -- Neither @cabal.sandbox.config@ nor @cabal.config@ are present. + AmbientPackageEnvironment -> do + config <- loadConfig verbosity configFileFlag + let globalConstraintsOpt = + flagToMaybe . globalConstraintsFile . savedGlobalFlags $ config + globalConstraintConfig <- + loadUserConfig verbosity pkgEnvDir globalConstraintsOpt + let config' = config `mappend` globalConstraintConfig + dieIfSandboxRequired config + return (NoSandbox, config') + + where + -- Return the path to the package environment directory - either the + -- current directory or the one that @--sandbox-config-file@ resides in. + getPkgEnvDir :: (Flag FilePath) -> IO FilePath + getPkgEnvDir sandboxConfigFileFlag = do + case sandboxConfigFileFlag of + NoFlag -> getCurrentDirectory + Flag path -> tryCanonicalizePath . takeDirectory $ path + + -- Die if @--require-sandbox@ was specified and we're not inside a sandbox. + dieIfSandboxRequired :: SavedConfig -> IO () + dieIfSandboxRequired config = checkFlag flag + where + flag = (globalRequireSandbox . savedGlobalFlags $ config) + `mappend` (globalRequireSandbox globalFlags) + checkFlag (Flag True) = + die $ "'require-sandbox' is set to True, but no sandbox is present. " + ++ "Use '--no-require-sandbox' if you want to override " + ++ "'require-sandbox' temporarily." + checkFlag (Flag False) = return () + checkFlag (NoFlag) = return () + +-- | Return the saved \"dist/\" prefix, or the default prefix. +findSavedDistPref :: SavedConfig -> Flag FilePath -> IO FilePath +findSavedDistPref config flagDistPref = do + let defDistPref = useDistPref defaultSetupScriptOptions + flagDistPref' = configDistPref (savedConfigureFlags config) + `mappend` flagDistPref + findDistPref defDistPref flagDistPref' + +-- | If we're in a sandbox, call @withSandboxBinDirOnSearchPath@, otherwise do +-- nothing. +maybeWithSandboxDirOnSearchPath :: UseSandbox -> IO a -> IO a +maybeWithSandboxDirOnSearchPath NoSandbox act = act +maybeWithSandboxDirOnSearchPath (UseSandbox sandboxDir) act = + withSandboxBinDirOnSearchPath sandboxDir $ act + +-- | Had reinstallAddSourceDeps actually reinstalled any dependencies? +data WereDepsReinstalled = ReinstalledSomeDeps | NoDepsReinstalled + +-- | Reinstall those add-source dependencies that have been modified since +-- we've last installed them. Assumes that we're working inside a sandbox. +reinstallAddSourceDeps :: Verbosity + -> ConfigFlags -> ConfigExFlags + -> InstallFlags -> GlobalFlags + -> FilePath + -> IO WereDepsReinstalled +reinstallAddSourceDeps verbosity configFlags' configExFlags + installFlags globalFlags sandboxDir = topHandler' $ do + let sandboxDistPref = sandboxBuildDir sandboxDir + configFlags = configFlags' + { configDistPref = Flag sandboxDistPref } + haddockFlags = mempty + { haddockDistPref = Flag sandboxDistPref } + (comp, platform, conf) <- configCompilerAux' configFlags + retVal <- newIORef NoDepsReinstalled + + withSandboxPackageInfo verbosity configFlags globalFlags + comp platform conf sandboxDir $ \sandboxPkgInfo -> + unless (null $ modifiedAddSourceDependencies sandboxPkgInfo) $ do + + withRepoContext verbosity globalFlags $ \repoContext -> do + let args :: InstallArgs + args = ((configPackageDB' configFlags) + ,repoContext + ,comp, platform, conf + ,UseSandbox sandboxDir, Just sandboxPkgInfo + ,globalFlags, configFlags, configExFlags, installFlags + ,haddockFlags) + + -- This can actually be replaced by a call to 'install', but we use a + -- lower-level API because of layer separation reasons. Additionally, we + -- might want to use some lower-level features this in the future. + withSandboxBinDirOnSearchPath sandboxDir $ do + installContext <- makeInstallContext verbosity args Nothing + installPlan <- foldProgress logMsg die' return =<< + makeInstallPlan verbosity args installContext + + processInstallPlan verbosity args installContext installPlan + writeIORef retVal ReinstalledSomeDeps + + readIORef retVal + + where + die' message = die (message ++ installFailedInSandbox) + -- TODO: use a better error message, remove duplication. + installFailedInSandbox = + "Note: when using a sandbox, all packages are required to have " + ++ "consistent dependencies. Try reinstalling/unregistering the " + ++ "offending packages or recreating the sandbox." + logMsg message rest = debugNoWrap verbosity message >> rest + + topHandler' = topHandlerWith $ \_ -> do + warn verbosity "Couldn't reinstall some add-source dependencies." + -- Here we can't know whether any deps have been reinstalled, so we have + -- to be conservative. + return ReinstalledSomeDeps + +-- | Produce a 'SandboxPackageInfo' and feed it to the given action. Note that +-- we don't update the timestamp file here - this is done in +-- 'postInstallActions'. +withSandboxPackageInfo :: Verbosity -> ConfigFlags -> GlobalFlags + -> Compiler -> Platform -> ProgramConfiguration + -> FilePath + -> (SandboxPackageInfo -> IO ()) + -> IO () +withSandboxPackageInfo verbosity configFlags globalFlags + comp platform conf sandboxDir cont = do + -- List all add-source deps. + indexFile <- tryGetIndexFilePath' globalFlags + buildTreeRefs <- Index.listBuildTreeRefs verbosity + Index.DontListIgnored Index.OnlyLinks indexFile + let allAddSourceDepsSet = S.fromList buildTreeRefs + + -- List all packages installed in the sandbox. + installedPkgIndex <- getInstalledPackagesInSandbox verbosity + configFlags comp conf + let err = "Error reading sandbox package information." + -- Get the package descriptions for all add-source deps. + depsCabalFiles <- mapM (flip tryFindAddSourcePackageDesc err) buildTreeRefs + depsPkgDescs <- mapM (readPackageDescription verbosity) depsCabalFiles + let depsMap = M.fromList (zip buildTreeRefs depsPkgDescs) + isInstalled pkgid = not . null + . InstalledPackageIndex.lookupSourcePackageId installedPkgIndex $ pkgid + installedDepsMap = M.filter (isInstalled . packageId) depsMap + + -- Get the package ids of modified (and installed) add-source deps. + modifiedAddSourceDeps <- listModifiedDeps verbosity sandboxDir + (compilerId comp) platform installedDepsMap + -- 'fromJust' here is safe because 'modifiedAddSourceDeps' are guaranteed to + -- be a subset of the keys of 'depsMap'. + let modifiedDeps = [ (modDepPath, fromJust $ M.lookup modDepPath depsMap) + | modDepPath <- modifiedAddSourceDeps ] + modifiedDepsMap = M.fromList modifiedDeps + + assert (all (`S.member` allAddSourceDepsSet) modifiedAddSourceDeps) (return ()) + if (null modifiedDeps) + then info verbosity $ "Found no modified add-source deps." + else notice verbosity $ "Some add-source dependencies have been modified. " + ++ "They will be reinstalled..." + + -- Get the package ids of the remaining add-source deps (some are possibly not + -- installed). + let otherDeps = M.assocs (depsMap `M.difference` modifiedDepsMap) + + -- Finally, assemble a 'SandboxPackageInfo'. + cont $ SandboxPackageInfo (map toSourcePackage modifiedDeps) + (map toSourcePackage otherDeps) installedPkgIndex allAddSourceDepsSet + + where + toSourcePackage (path, pkgDesc) = SourcePackage + (packageId pkgDesc) pkgDesc (LocalUnpackedPackage path) Nothing + +-- | Same as 'withSandboxPackageInfo' if we're inside a sandbox and the +-- identity otherwise. +maybeWithSandboxPackageInfo :: Verbosity -> ConfigFlags -> GlobalFlags + -> Compiler -> Platform -> ProgramConfiguration + -> UseSandbox + -> (Maybe SandboxPackageInfo -> IO ()) + -> IO () +maybeWithSandboxPackageInfo verbosity configFlags globalFlags + comp platform conf useSandbox cont = + case useSandbox of + NoSandbox -> cont Nothing + UseSandbox sandboxDir -> withSandboxPackageInfo verbosity + configFlags globalFlags + comp platform conf sandboxDir + (\spi -> cont (Just spi)) + +-- | Check if a sandbox is present and call @reinstallAddSourceDeps@ in that +-- case. +maybeReinstallAddSourceDeps :: Verbosity + -> Flag (Maybe Int) -- ^ The '-j' flag + -> ConfigFlags -- ^ Saved configure flags + -- (from dist/setup-config) + -> GlobalFlags + -> (UseSandbox, SavedConfig) + -> IO WereDepsReinstalled +maybeReinstallAddSourceDeps verbosity numJobsFlag configFlags' + globalFlags' (useSandbox, config) = do + case useSandbox of + NoSandbox -> return NoDepsReinstalled + UseSandbox sandboxDir -> do + -- Reinstall the modified add-source deps. + let configFlags = savedConfigureFlags config + `mappendSomeSavedFlags` + configFlags' + configExFlags = defaultConfigExFlags + `mappend` savedConfigureExFlags config + installFlags' = defaultInstallFlags + `mappend` savedInstallFlags config + installFlags = installFlags' { + installNumJobs = installNumJobs installFlags' + `mappend` numJobsFlag + } + globalFlags = savedGlobalFlags config + -- This makes it possible to override things like 'remote-repo-cache' + -- from the command line. These options are hidden, and are only + -- useful for debugging, so this should be fine. + `mappend` globalFlags' + reinstallAddSourceDeps + verbosity configFlags configExFlags + installFlags globalFlags sandboxDir + + where + + -- NOTE: we can't simply do @sandboxConfigFlags `mappend` savedFlags@ + -- because we don't want to auto-enable things like 'library-profiling' for + -- all add-source dependencies even if the user has passed + -- '--enable-library-profiling' to 'cabal configure'. These options are + -- supposed to be set in 'cabal.config'. + mappendSomeSavedFlags :: ConfigFlags -> ConfigFlags -> ConfigFlags + mappendSomeSavedFlags sandboxConfigFlags savedFlags = + sandboxConfigFlags { + configHcFlavor = configHcFlavor sandboxConfigFlags + `mappend` configHcFlavor savedFlags, + configHcPath = configHcPath sandboxConfigFlags + `mappend` configHcPath savedFlags, + configHcPkg = configHcPkg sandboxConfigFlags + `mappend` configHcPkg savedFlags, + configProgramPaths = configProgramPaths sandboxConfigFlags + `mappend` configProgramPaths savedFlags, + configProgramArgs = configProgramArgs sandboxConfigFlags + `mappend` configProgramArgs savedFlags, + -- NOTE: Unconditionally choosing the value from + -- 'dist/setup-config'. Sandbox package DB location may have been + -- changed by 'configure -w'. + configPackageDBs = configPackageDBs savedFlags + -- FIXME: Is this compatible with the 'inherit' feature? + } + +-- +-- Utils (transitionary) +-- +-- FIXME: configPackageDB' and configCompilerAux' don't really belong in this +-- module +-- + +configPackageDB' :: ConfigFlags -> PackageDBStack +configPackageDB' cfg = + interpretPackageDbFlags userInstall (configPackageDBs cfg) + where + userInstall = fromFlagOrDefault True (configUserInstall cfg) + +configCompilerAux' :: ConfigFlags + -> IO (Compiler, Platform, ProgramConfiguration) +configCompilerAux' configFlags = + configCompilerAuxEx configFlags + --FIXME: make configCompilerAux use a sensible verbosity + { configVerbosity = fmap lessVerbose (configVerbosity configFlags) } + +-- | Try to read the most recently configured compiler from the +-- 'localBuildInfoFile', falling back on 'configCompilerAuxEx' if it +-- cannot be read. +getPersistOrConfigCompiler :: ConfigFlags + -> IO (Compiler, Platform, ProgramConfiguration) +getPersistOrConfigCompiler configFlags = do + distPref <- findDistPrefOrDefault (configDistPref configFlags) + mlbi <- maybeGetPersistBuildConfig distPref + case mlbi of + Nothing -> do configCompilerAux' configFlags + Just lbi -> return ( LocalBuildInfo.compiler lbi + , LocalBuildInfo.hostPlatform lbi + , LocalBuildInfo.withPrograms lbi + ) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Security/HTTP.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Security/HTTP.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Security/HTTP.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Security/HTTP.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,174 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE StandaloneDeriving #-} +-- | Implementation of 'HttpLib' using cabal-install's own 'HttpTransport' +module Distribution.Client.Security.HTTP (HttpLib, transportAdapter) where + +-- stdlibs +import Control.Exception + ( Exception(..), IOException ) +import Data.List + ( intercalate ) +import Data.Typeable + ( Typeable ) +import System.Directory + ( getTemporaryDirectory ) +import Network.URI + ( URI ) +import qualified Data.ByteString.Lazy as BS.L +import qualified Network.HTTP as HTTP + +-- Cabal/cabal-install +import Distribution.Verbosity + ( Verbosity ) +import Distribution.Client.HttpUtils + ( HttpTransport(..), HttpCode ) +import Distribution.Client.Utils + ( withTempFileName ) + +-- hackage-security +import Hackage.Security.Client +import Hackage.Security.Client.Repository.HttpLib +import Hackage.Security.Util.Checked +import Hackage.Security.Util.Pretty +import qualified Hackage.Security.Util.Lens as Lens + +{------------------------------------------------------------------------------- + 'HttpLib' implementation +-------------------------------------------------------------------------------} + +-- | Translate from hackage-security's 'HttpLib' to cabal-install's 'HttpTransport' +-- +-- NOTE: The match between these two APIs is currently not perfect: +-- +-- * We don't get any response headers back from the 'HttpTransport', so we +-- don't know if the server supports range requests. For now we optimistically +-- assume that it does. +-- * The 'HttpTransport' wants to know where to place the resulting file, +-- whereas the 'HttpLib' expects an 'IO' action which streams the download; +-- the security library then makes sure that the file gets written to a +-- location which is suitable (in particular, to a temporary file in the +-- directory where the file needs to end up, so that it can "finalize" the +-- file simply by doing 'renameFile'). Right now we write the file to a +-- temporary file in the system temp directory here and then read it again +-- to pass it to the security library; this is a problem for two reasons: it +-- is a source of inefficiency; and it means that the security library cannot +-- insist on a minimum download rate (potential security attack). +-- Fixing it however would require changing the 'HttpTransport'. +transportAdapter :: Verbosity -> IO HttpTransport -> HttpLib +transportAdapter verbosity getTransport = HttpLib{ + httpGet = \headers uri callback -> do + transport <- getTransport + get verbosity transport headers uri callback + , httpGetRange = \headers uri range callback -> do + transport <- getTransport + getRange verbosity transport headers uri range callback + } + +get :: Throws SomeRemoteError + => Verbosity + -> HttpTransport + -> [HttpRequestHeader] -> URI + -> ([HttpResponseHeader] -> BodyReader -> IO a) + -> IO a +get verbosity transport reqHeaders uri callback = wrapCustomEx $ do + get' verbosity transport reqHeaders uri Nothing $ \code respHeaders br -> + case code of + 200 -> callback respHeaders br + _ -> throwChecked $ UnexpectedResponse uri code + +getRange :: Throws SomeRemoteError + => Verbosity + -> HttpTransport + -> [HttpRequestHeader] -> URI -> (Int, Int) + -> (HttpStatus -> [HttpResponseHeader] -> BodyReader -> IO a) + -> IO a +getRange verbosity transport reqHeaders uri range callback = wrapCustomEx $ do + get' verbosity transport reqHeaders uri (Just range) $ \code respHeaders br -> + case code of + 200 -> callback HttpStatus200OK respHeaders br + 206 -> callback HttpStatus206PartialContent respHeaders br + _ -> throwChecked $ UnexpectedResponse uri code + +-- | Internal generalization of 'get' and 'getRange' +get' :: Verbosity + -> HttpTransport + -> [HttpRequestHeader] -> URI -> Maybe (Int, Int) + -> (HttpCode -> [HttpResponseHeader] -> BodyReader -> IO a) + -> IO a +get' verbosity transport reqHeaders uri mRange callback = do + tempDir <- getTemporaryDirectory + withTempFileName tempDir "transportAdapterGet" $ \temp -> do + (code, _etag) <- getHttp transport verbosity uri Nothing temp reqHeaders' + br <- bodyReaderFromBS =<< BS.L.readFile temp + callback code [HttpResponseAcceptRangesBytes] br + where + reqHeaders' = mkReqHeaders reqHeaders mRange + +{------------------------------------------------------------------------------- + Request headers +-------------------------------------------------------------------------------} + +mkRangeHeader :: Int -> Int -> HTTP.Header +mkRangeHeader from to = HTTP.Header HTTP.HdrRange rangeHeader + where + -- Content-Range header uses inclusive rather than exclusive bounds + -- See + rangeHeader = "bytes=" ++ show from ++ "-" ++ show (to - 1) + +mkReqHeaders :: [HttpRequestHeader] -> Maybe (Int, Int) -> [HTTP.Header] +mkReqHeaders reqHeaders mRange = concat [ + tr [] reqHeaders + , [mkRangeHeader fr to | Just (fr, to) <- [mRange]] + ] + where + tr :: [(HTTP.HeaderName, [String])] -> [HttpRequestHeader] -> [HTTP.Header] + tr acc [] = + concatMap finalize acc + tr acc (HttpRequestMaxAge0:os) = + tr (insert HTTP.HdrCacheControl ["max-age=0"] acc) os + tr acc (HttpRequestNoTransform:os) = + tr (insert HTTP.HdrCacheControl ["no-transform"] acc) os + + -- Some headers are comma-separated, others need multiple headers for + -- multiple options. + -- + -- TODO: Right we we just comma-separate all of them. + finalize :: (HTTP.HeaderName, [String]) -> [HTTP.Header] + finalize (name, strs) = [HTTP.Header name (intercalate ", " (reverse strs))] + + insert :: Eq a => a -> [b] -> [(a, [b])] -> [(a, [b])] + insert x y = Lens.modify (Lens.lookupM x) (++ y) + +{------------------------------------------------------------------------------- + Custom exceptions +-------------------------------------------------------------------------------} + +data UnexpectedResponse = UnexpectedResponse URI Int + deriving (Typeable) + +instance Pretty UnexpectedResponse where + pretty (UnexpectedResponse uri code) = "Unexpected response " ++ show code + ++ "for " ++ show uri + +#if MIN_VERSION_base(4,8,0) +deriving instance Show UnexpectedResponse +instance Exception UnexpectedResponse where displayException = pretty +#else +instance Show UnexpectedResponse where show = pretty +instance Exception UnexpectedResponse +#endif + +wrapCustomEx :: ( ( Throws UnexpectedResponse + , Throws IOException + ) => IO a) + -> (Throws SomeRemoteError => IO a) +wrapCustomEx act = handleChecked (\(ex :: UnexpectedResponse) -> go ex) + $ handleChecked (\(ex :: IOException) -> go ex) + $ act + where + go ex = throwChecked (SomeRemoteError ex) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Setup.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,2192 @@ +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE DeriveGeneric #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Setup +-- Copyright : (c) David Himmelstrup 2005 +-- License : BSD-like +-- +-- Maintainer : lemmih@gmail.com +-- Stability : provisional +-- Portability : portable +-- +-- +----------------------------------------------------------------------------- +module Distribution.Client.Setup + ( globalCommand, GlobalFlags(..), defaultGlobalFlags + , RepoContext(..), withRepoContext + , configureCommand, ConfigFlags(..), filterConfigureFlags + , configureExCommand, ConfigExFlags(..), defaultConfigExFlags + , configureExOptions + , buildCommand, BuildFlags(..), BuildExFlags(..), SkipAddSourceDepsCheck(..) + , replCommand, testCommand, benchmarkCommand + , installCommand, InstallFlags(..), installOptions, defaultInstallFlags + , defaultSolver, defaultMaxBackjumps + , listCommand, ListFlags(..) + , updateCommand + , upgradeCommand + , uninstallCommand + , infoCommand, InfoFlags(..) + , fetchCommand, FetchFlags(..) + , freezeCommand, FreezeFlags(..) + , genBoundsCommand + , getCommand, unpackCommand, GetFlags(..) + , checkCommand + , formatCommand + , uploadCommand, UploadFlags(..) + , reportCommand, ReportFlags(..) + , runCommand + , initCommand, IT.InitFlags(..) + , sdistCommand, SDistFlags(..), SDistExFlags(..), ArchiveFormat(..) + , win32SelfUpgradeCommand, Win32SelfUpgradeFlags(..) + , actAsSetupCommand, ActAsSetupFlags(..) + , sandboxCommand, defaultSandboxLocation, SandboxFlags(..) + , execCommand, ExecFlags(..) + , userConfigCommand, UserConfigFlags(..) + , manpageCommand + + , parsePackageArgs + --TODO: stop exporting these: + , showRepo + , parseRepo + , readRepo + ) where + +import Distribution.Client.Types + ( Username(..), Password(..), RemoteRepo(..) ) +import Distribution.Client.BuildReports.Types + ( ReportLevel(..) ) +import Distribution.Client.Dependency.Types + ( PreSolver(..), ConstraintSource(..) ) +import qualified Distribution.Client.Init.Types as IT + ( InitFlags(..), PackageType(..) ) +import Distribution.Client.Targets + ( UserConstraint, readUserConstraint ) +import Distribution.Utils.NubList + ( NubList, toNubList, fromNubList) + + +import Distribution.Simple.Compiler (PackageDB) +import Distribution.Simple.Program + ( defaultProgramConfiguration ) +import Distribution.Simple.Command hiding (boolOpt, boolOpt') +import qualified Distribution.Simple.Command as Command +import Distribution.Simple.Configure ( computeEffectiveProfiling ) +import qualified Distribution.Simple.Setup as Cabal +import Distribution.Simple.Setup + ( ConfigFlags(..), BuildFlags(..), ReplFlags + , TestFlags(..), BenchmarkFlags(..) + , SDistFlags(..), HaddockFlags(..) + , readPackageDbList, showPackageDbList + , Flag(..), toFlag, flagToMaybe, flagToList + , optionVerbosity, boolOpt, boolOpt', trueArg, falseArg + , readPToMaybe, optionNumJobs ) +import Distribution.Simple.InstallDirs + ( PathTemplate, InstallDirs(dynlibdir, sysconfdir) + , toPathTemplate, fromPathTemplate ) +import Distribution.Version + ( Version(Version), anyVersion, thisVersion ) +import Distribution.Package + ( PackageIdentifier, packageName, packageVersion, Dependency(..) ) +import Distribution.PackageDescription + ( BuildType(..), RepoKind(..) ) +import Distribution.Text + ( Text(..), display ) +import Distribution.ReadE + ( ReadE(..), readP_to_E, succeedReadE ) +import qualified Distribution.Compat.ReadP as Parse + ( ReadP, char, munch1, pfail, (+++) ) +import Distribution.Compat.Semigroup +import Distribution.Verbosity + ( Verbosity, normal ) +import Distribution.Simple.Utils + ( wrapText, wrapLine ) +import Distribution.Client.GlobalFlags + ( GlobalFlags(..), defaultGlobalFlags + , RepoContext(..), withRepoContext + ) + +import Data.Char + ( isAlphaNum ) +import Data.List + ( intercalate, deleteFirstsBy ) +import Data.Maybe + ( maybeToList, fromMaybe ) +import GHC.Generics (Generic) +import Distribution.Compat.Binary (Binary) +import Control.Monad + ( liftM ) +import System.FilePath + ( () ) +import Network.URI + ( parseAbsoluteURI, uriToString ) + +globalCommand :: [Command action] -> CommandUI GlobalFlags +globalCommand commands = CommandUI { + commandName = "", + commandSynopsis = + "Command line interface to the Haskell Cabal infrastructure.", + commandUsage = \pname -> + "See http://www.haskell.org/cabal/ for more information.\n" + ++ "\n" + ++ "Usage: " ++ pname ++ " [GLOBAL FLAGS] [COMMAND [FLAGS]]\n", + commandDescription = Just $ \pname -> + let + commands' = commands ++ [commandAddAction helpCommandUI undefined] + cmdDescs = getNormalCommandDescriptions commands' + -- if new commands are added, we want them to appear even if they + -- are not included in the custom listing below. Thus, we calculate + -- the `otherCmds` list and append it under the `other` category. + -- Alternatively, a new testcase could be added that ensures that + -- the set of commands listed here is equal to the set of commands + -- that are actually available. + otherCmds = deleteFirstsBy (==) (map fst cmdDescs) + [ "help" + , "update" + , "install" + , "fetch" + , "list" + , "info" + , "user-config" + , "get" + , "init" + , "configure" + , "build" + , "clean" + , "run" + , "repl" + , "test" + , "bench" + , "check" + , "sdist" + , "upload" + , "report" + , "freeze" + , "gen-bounds" + , "haddock" + , "hscolour" + , "copy" + , "register" + , "sandbox" + , "exec" + ] + maxlen = maximum $ [length name | (name, _) <- cmdDescs] + align str = str ++ replicate (maxlen - length str) ' ' + startGroup n = " ["++n++"]" + par = "" + addCmd n = case lookup n cmdDescs of + Nothing -> "" + Just d -> " " ++ align n ++ " " ++ d + addCmdCustom n d = case lookup n cmdDescs of -- make sure that the + -- command still exists. + Nothing -> "" + Just _ -> " " ++ align n ++ " " ++ d + in + "Commands:\n" + ++ unlines ( + [ startGroup "global" + , addCmd "update" + , addCmd "install" + , par + , addCmd "help" + , addCmd "info" + , addCmd "list" + , addCmd "fetch" + , addCmd "user-config" + , par + , startGroup "package" + , addCmd "get" + , addCmd "init" + , par + , addCmd "configure" + , addCmd "build" + , addCmd "clean" + , par + , addCmd "run" + , addCmd "repl" + , addCmd "test" + , addCmd "bench" + , par + , addCmd "check" + , addCmd "sdist" + , addCmd "upload" + , addCmd "report" + , par + , addCmd "freeze" + , addCmd "gen-bounds" + , addCmd "haddock" + , addCmd "hscolour" + , addCmd "copy" + , addCmd "register" + , par + , startGroup "sandbox" + , addCmd "sandbox" + , addCmd "exec" + , addCmdCustom "repl" "Open interpreter with access to sandbox packages." + ] ++ if null otherCmds then [] else par + :startGroup "other" + :[addCmd n | n <- otherCmds]) + ++ "\n" + ++ "For more information about a command use:\n" + ++ " " ++ pname ++ " COMMAND --help\n" + ++ "or " ++ pname ++ " help COMMAND\n" + ++ "\n" + ++ "To install Cabal packages from hackage use:\n" + ++ " " ++ pname ++ " install foo [--dry-run]\n" + ++ "\n" + ++ "Occasionally you need to update the list of available packages:\n" + ++ " " ++ pname ++ " update\n", + commandNotes = Nothing, + commandDefaultFlags = mempty, + commandOptions = args + } + where + args :: ShowOrParseArgs -> [OptionField GlobalFlags] + args ShowArgs = argsShown + args ParseArgs = argsShown ++ argsNotShown + + -- arguments we want to show in the help + argsShown :: [OptionField GlobalFlags] + argsShown = [ + option ['V'] ["version"] + "Print version information" + globalVersion (\v flags -> flags { globalVersion = v }) + trueArg + + ,option [] ["numeric-version"] + "Print just the version number" + globalNumericVersion (\v flags -> flags { globalNumericVersion = v }) + trueArg + + ,option [] ["config-file"] + "Set an alternate location for the config file" + globalConfigFile (\v flags -> flags { globalConfigFile = v }) + (reqArgFlag "FILE") + + ,option [] ["sandbox-config-file"] + "Set an alternate location for the sandbox config file (default: './cabal.sandbox.config')" + globalSandboxConfigFile (\v flags -> flags { globalSandboxConfigFile = v }) + (reqArgFlag "FILE") + + ,option [] ["default-user-config"] + "Set a location for a cabal.config file for projects without their own cabal.config freeze file." + globalConstraintsFile (\v flags -> flags {globalConstraintsFile = v}) + (reqArgFlag "FILE") + + ,option [] ["require-sandbox"] + "requiring the presence of a sandbox for sandbox-aware commands" + globalRequireSandbox (\v flags -> flags { globalRequireSandbox = v }) + (boolOpt' ([], ["require-sandbox"]) ([], ["no-require-sandbox"])) + + ,option [] ["ignore-sandbox"] + "Ignore any existing sandbox" + globalIgnoreSandbox (\v flags -> flags { globalIgnoreSandbox = v }) + trueArg + + ,option [] ["ignore-expiry"] + "Ignore expiry dates on signed metadata (use only in exceptional circumstances)" + globalIgnoreExpiry (\v flags -> flags { globalIgnoreExpiry = v }) + trueArg + + ,option [] ["http-transport"] + "Set a transport for http(s) requests. Accepts 'curl', 'wget', 'powershell', and 'plain-http'. (default: 'curl')" + globalHttpTransport (\v flags -> flags { globalHttpTransport = v }) + (reqArgFlag "HttpTransport") + ] + + -- arguments we don't want shown in the help + argsNotShown :: [OptionField GlobalFlags] + argsNotShown = [ + option [] ["remote-repo"] + "The name and url for a remote repository" + globalRemoteRepos (\v flags -> flags { globalRemoteRepos = v }) + (reqArg' "NAME:URL" (toNubList . maybeToList . readRepo) (map showRepo . fromNubList)) + + ,option [] ["remote-repo-cache"] + "The location where downloads from all remote repos are cached" + globalCacheDir (\v flags -> flags { globalCacheDir = v }) + (reqArgFlag "DIR") + + ,option [] ["local-repo"] + "The location of a local repository" + globalLocalRepos (\v flags -> flags { globalLocalRepos = v }) + (reqArg' "DIR" (\x -> toNubList [x]) fromNubList) + + ,option [] ["logs-dir"] + "The location to put log files" + globalLogsDir (\v flags -> flags { globalLogsDir = v }) + (reqArgFlag "DIR") + + ,option [] ["world-file"] + "The location of the world file" + globalWorldFile (\v flags -> flags { globalWorldFile = v }) + (reqArgFlag "FILE") + ] + +-- ------------------------------------------------------------ +-- * Config flags +-- ------------------------------------------------------------ + +configureCommand :: CommandUI ConfigFlags +configureCommand = c + { commandDefaultFlags = mempty + , commandNotes = Just $ \pname -> (case commandNotes c of + Nothing -> "" + Just n -> n pname ++ "\n") + ++ "Examples:\n" + ++ " " ++ pname ++ " configure\n" + ++ " Configure with defaults;\n" + ++ " " ++ pname ++ " configure --enable-tests -fcustomflag\n" + ++ " Configure building package including tests,\n" + ++ " with some package-specific flag.\n" + } + where + c = Cabal.configureCommand defaultProgramConfiguration + +configureOptions :: ShowOrParseArgs -> [OptionField ConfigFlags] +configureOptions = commandOptions configureCommand + +filterConfigureFlags :: ConfigFlags -> Version -> ConfigFlags +filterConfigureFlags flags cabalLibVersion + | cabalLibVersion >= Version [1,24,1] [] = flags_latest + -- ^ NB: we expect the latest version to be the most common case. + | cabalLibVersion < Version [1,3,10] [] = flags_1_3_10 + | cabalLibVersion < Version [1,10,0] [] = flags_1_10_0 + | cabalLibVersion < Version [1,12,0] [] = flags_1_12_0 + | cabalLibVersion < Version [1,14,0] [] = flags_1_14_0 + | cabalLibVersion < Version [1,18,0] [] = flags_1_18_0 + | cabalLibVersion < Version [1,19,1] [] = flags_1_19_0 + | cabalLibVersion < Version [1,19,2] [] = flags_1_19_1 + | cabalLibVersion < Version [1,21,1] [] = flags_1_20_0 + | cabalLibVersion < Version [1,22,0] [] = flags_1_21_0 + | cabalLibVersion < Version [1,23,0] [] = flags_1_22_0 + | cabalLibVersion < Version [1,24,1] [] = flags_1_24_0 + | otherwise = flags_latest + where + (profEnabledLib, profEnabledExe) = computeEffectiveProfiling flags + flags_latest = flags { + -- Cabal >= 1.19.1 uses '--dependency' and does not need '--constraint'. + configConstraints = [], + -- Passing '--allow-newer' to Setup.hs is unnecessary, we use + -- '--exact-configuration' instead. + configAllowNewer = Just Cabal.AllowNewerNone + } + + -- Cabal < 1.24.1 doesn't know about --dynlibdir. + flags_1_24_0 = flags_latest { configInstallDirs = configInstallDirs_1_24_0} + configInstallDirs_1_24_0 = (configInstallDirs flags) { dynlibdir = NoFlag } + + -- Cabal < 1.23 doesn't know about '--profiling-detail'. + -- Cabal < 1.23 has a hacked up version of 'enable-profiling' + -- which we shouldn't use. + flags_1_22_0 = flags_1_24_0 { configProfDetail = NoFlag + , configProfLibDetail = NoFlag + , configIPID = NoFlag + , configProf = NoFlag + , configProfExe = Flag profEnabledExe + , configProfLib = Flag profEnabledLib + } + + -- Cabal < 1.22 doesn't know about '--disable-debug-info'. + flags_1_21_0 = flags_1_22_0 { configDebugInfo = NoFlag } + + -- Cabal < 1.21.1 doesn't know about 'disable-relocatable' + -- Cabal < 1.21.1 doesn't know about 'enable-profiling' + -- (but we already dealt with it in flags_1_22_0) + flags_1_20_0 = + flags_1_21_0 { configRelocatable = NoFlag + , configCoverage = NoFlag + , configLibCoverage = configCoverage flags + } + -- Cabal < 1.19.2 doesn't know about '--exact-configuration' and + -- '--enable-library-stripping'. + flags_1_19_1 = flags_1_20_0 { configExactConfiguration = NoFlag + , configStripLibs = NoFlag } + -- Cabal < 1.19.1 uses '--constraint' instead of '--dependency'. + flags_1_19_0 = flags_1_19_1 { configDependencies = [] + , configConstraints = configConstraints flags } + -- Cabal < 1.18.0 doesn't know about --extra-prog-path and --sysconfdir. + flags_1_18_0 = flags_1_19_0 { configProgramPathExtra = toNubList [] + , configInstallDirs = configInstallDirs_1_18_0} + configInstallDirs_1_18_0 = (configInstallDirs flags_1_19_0) { sysconfdir = NoFlag } + -- Cabal < 1.14.0 doesn't know about '--disable-benchmarks'. + flags_1_14_0 = flags_1_18_0 { configBenchmarks = NoFlag } + -- Cabal < 1.12.0 doesn't know about '--enable/disable-executable-dynamic' + -- and '--enable/disable-library-coverage'. + flags_1_12_0 = flags_1_14_0 { configLibCoverage = NoFlag + , configDynExe = NoFlag } + -- Cabal < 1.10.0 doesn't know about '--disable-tests'. + flags_1_10_0 = flags_1_12_0 { configTests = NoFlag } + -- Cabal < 1.3.10 does not grok the '--constraints' flag. + flags_1_3_10 = flags_1_10_0 { configConstraints = [] } + +-- ------------------------------------------------------------ +-- * Config extra flags +-- ------------------------------------------------------------ + +-- | cabal configure takes some extra flags beyond runghc Setup configure +-- +data ConfigExFlags = ConfigExFlags { + configCabalVersion :: Flag Version, + configExConstraints:: [(UserConstraint, ConstraintSource)], + configPreferences :: [Dependency], + configSolver :: Flag PreSolver + } + deriving (Eq, Generic) + +defaultConfigExFlags :: ConfigExFlags +defaultConfigExFlags = mempty { configSolver = Flag defaultSolver } + +configureExCommand :: CommandUI (ConfigFlags, ConfigExFlags) +configureExCommand = configureCommand { + commandDefaultFlags = (mempty, defaultConfigExFlags), + commandOptions = \showOrParseArgs -> + liftOptions fst setFst + (filter ((`notElem` ["constraint", "dependency", "exact-configuration"]) + . optionName) $ configureOptions showOrParseArgs) + ++ liftOptions snd setSnd + (configureExOptions showOrParseArgs ConstraintSourceCommandlineFlag) + } + where + setFst a (_,b) = (a,b) + setSnd b (a,_) = (a,b) + +configureExOptions :: ShowOrParseArgs + -> ConstraintSource + -> [OptionField ConfigExFlags] +configureExOptions _showOrParseArgs src = + [ option [] ["cabal-lib-version"] + ("Select which version of the Cabal lib to use to build packages " + ++ "(useful for testing).") + configCabalVersion (\v flags -> flags { configCabalVersion = v }) + (reqArg "VERSION" (readP_to_E ("Cannot parse cabal lib version: "++) + (fmap toFlag parse)) + (map display . flagToList)) + , option [] ["constraint"] + "Specify constraints on a package (version, installed/source, flags)" + configExConstraints (\v flags -> flags { configExConstraints = v }) + (reqArg "CONSTRAINT" + ((\x -> [(x, src)]) `fmap` ReadE readUserConstraint) + (map $ display . fst)) + + , option [] ["preference"] + "Specify preferences (soft constraints) on the version of a package" + configPreferences (\v flags -> flags { configPreferences = v }) + (reqArg "CONSTRAINT" + (readP_to_E (const "dependency expected") + (fmap (\x -> [x]) parse)) + (map display)) + + , optionSolver configSolver (\v flags -> flags { configSolver = v }) + + ] + +instance Monoid ConfigExFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup ConfigExFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Build flags +-- ------------------------------------------------------------ + +data SkipAddSourceDepsCheck = + SkipAddSourceDepsCheck | DontSkipAddSourceDepsCheck + deriving Eq + +data BuildExFlags = BuildExFlags { + buildOnly :: Flag SkipAddSourceDepsCheck +} deriving Generic + +buildExOptions :: ShowOrParseArgs -> [OptionField BuildExFlags] +buildExOptions _showOrParseArgs = + option [] ["only"] + "Don't reinstall add-source dependencies (sandbox-only)" + buildOnly (\v flags -> flags { buildOnly = v }) + (noArg (Flag SkipAddSourceDepsCheck)) + + : [] + +buildCommand :: CommandUI (BuildFlags, BuildExFlags) +buildCommand = parent { + commandDefaultFlags = (commandDefaultFlags parent, mempty), + commandOptions = + \showOrParseArgs -> liftOptions fst setFst + (commandOptions parent showOrParseArgs) + ++ + liftOptions snd setSnd (buildExOptions showOrParseArgs) + } + where + setFst a (_,b) = (a,b) + setSnd b (a,_) = (a,b) + + parent = Cabal.buildCommand defaultProgramConfiguration + +instance Monoid BuildExFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup BuildExFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Repl command +-- ------------------------------------------------------------ + +replCommand :: CommandUI (ReplFlags, BuildExFlags) +replCommand = parent { + commandDefaultFlags = (commandDefaultFlags parent, mempty), + commandOptions = + \showOrParseArgs -> liftOptions fst setFst + (commandOptions parent showOrParseArgs) + ++ + liftOptions snd setSnd (buildExOptions showOrParseArgs) + } + where + setFst a (_,b) = (a,b) + setSnd b (a,_) = (a,b) + + parent = Cabal.replCommand defaultProgramConfiguration + +-- ------------------------------------------------------------ +-- * Test command +-- ------------------------------------------------------------ + +testCommand :: CommandUI (TestFlags, BuildFlags, BuildExFlags) +testCommand = parent { + commandDefaultFlags = (commandDefaultFlags parent, + Cabal.defaultBuildFlags, mempty), + commandOptions = + \showOrParseArgs -> liftOptions get1 set1 + (commandOptions parent showOrParseArgs) + ++ + liftOptions get2 set2 + (Cabal.buildOptions progConf showOrParseArgs) + ++ + liftOptions get3 set3 (buildExOptions showOrParseArgs) + } + where + get1 (a,_,_) = a; set1 a (_,b,c) = (a,b,c) + get2 (_,b,_) = b; set2 b (a,_,c) = (a,b,c) + get3 (_,_,c) = c; set3 c (a,b,_) = (a,b,c) + + parent = Cabal.testCommand + progConf = defaultProgramConfiguration + +-- ------------------------------------------------------------ +-- * Bench command +-- ------------------------------------------------------------ + +benchmarkCommand :: CommandUI (BenchmarkFlags, BuildFlags, BuildExFlags) +benchmarkCommand = parent { + commandDefaultFlags = (commandDefaultFlags parent, + Cabal.defaultBuildFlags, mempty), + commandOptions = + \showOrParseArgs -> liftOptions get1 set1 + (commandOptions parent showOrParseArgs) + ++ + liftOptions get2 set2 + (Cabal.buildOptions progConf showOrParseArgs) + ++ + liftOptions get3 set3 (buildExOptions showOrParseArgs) + } + where + get1 (a,_,_) = a; set1 a (_,b,c) = (a,b,c) + get2 (_,b,_) = b; set2 b (a,_,c) = (a,b,c) + get3 (_,_,c) = c; set3 c (a,b,_) = (a,b,c) + + parent = Cabal.benchmarkCommand + progConf = defaultProgramConfiguration + +-- ------------------------------------------------------------ +-- * Fetch command +-- ------------------------------------------------------------ + +data FetchFlags = FetchFlags { +-- fetchOutput :: Flag FilePath, + fetchDeps :: Flag Bool, + fetchDryRun :: Flag Bool, + fetchSolver :: Flag PreSolver, + fetchMaxBackjumps :: Flag Int, + fetchReorderGoals :: Flag Bool, + fetchIndependentGoals :: Flag Bool, + fetchShadowPkgs :: Flag Bool, + fetchStrongFlags :: Flag Bool, + fetchVerbosity :: Flag Verbosity + } + +defaultFetchFlags :: FetchFlags +defaultFetchFlags = FetchFlags { +-- fetchOutput = mempty, + fetchDeps = toFlag True, + fetchDryRun = toFlag False, + fetchSolver = Flag defaultSolver, + fetchMaxBackjumps = Flag defaultMaxBackjumps, + fetchReorderGoals = Flag False, + fetchIndependentGoals = Flag False, + fetchShadowPkgs = Flag False, + fetchStrongFlags = Flag False, + fetchVerbosity = toFlag normal + } + +fetchCommand :: CommandUI FetchFlags +fetchCommand = CommandUI { + commandName = "fetch", + commandSynopsis = "Downloads packages for later installation.", + commandUsage = usageAlternatives "fetch" [ "[FLAGS] PACKAGES" + ], + commandDescription = Just $ \_ -> + "Note that it currently is not possible to fetch the dependencies for a\n" + ++ "package in the current directory.\n", + commandNotes = Nothing, + commandDefaultFlags = defaultFetchFlags, + commandOptions = \ showOrParseArgs -> [ + optionVerbosity fetchVerbosity (\v flags -> flags { fetchVerbosity = v }) + +-- , option "o" ["output"] +-- "Put the package(s) somewhere specific rather than the usual cache." +-- fetchOutput (\v flags -> flags { fetchOutput = v }) +-- (reqArgFlag "PATH") + + , option [] ["dependencies", "deps"] + "Resolve and fetch dependencies (default)" + fetchDeps (\v flags -> flags { fetchDeps = v }) + trueArg + + , option [] ["no-dependencies", "no-deps"] + "Ignore dependencies" + fetchDeps (\v flags -> flags { fetchDeps = v }) + falseArg + + , option [] ["dry-run"] + "Do not install anything, only print what would be installed." + fetchDryRun (\v flags -> flags { fetchDryRun = v }) + trueArg + + ] ++ + + optionSolver fetchSolver (\v flags -> flags { fetchSolver = v }) : + optionSolverFlags showOrParseArgs + fetchMaxBackjumps (\v flags -> flags { fetchMaxBackjumps = v }) + fetchReorderGoals (\v flags -> flags { fetchReorderGoals = v }) + fetchIndependentGoals (\v flags -> flags { fetchIndependentGoals = v }) + fetchShadowPkgs (\v flags -> flags { fetchShadowPkgs = v }) + fetchStrongFlags (\v flags -> flags { fetchStrongFlags = v }) + + } + +-- ------------------------------------------------------------ +-- * Freeze command +-- ------------------------------------------------------------ + +data FreezeFlags = FreezeFlags { + freezeDryRun :: Flag Bool, + freezeTests :: Flag Bool, + freezeBenchmarks :: Flag Bool, + freezeSolver :: Flag PreSolver, + freezeMaxBackjumps :: Flag Int, + freezeReorderGoals :: Flag Bool, + freezeIndependentGoals :: Flag Bool, + freezeShadowPkgs :: Flag Bool, + freezeStrongFlags :: Flag Bool, + freezeVerbosity :: Flag Verbosity + } + +defaultFreezeFlags :: FreezeFlags +defaultFreezeFlags = FreezeFlags { + freezeDryRun = toFlag False, + freezeTests = toFlag False, + freezeBenchmarks = toFlag False, + freezeSolver = Flag defaultSolver, + freezeMaxBackjumps = Flag defaultMaxBackjumps, + freezeReorderGoals = Flag False, + freezeIndependentGoals = Flag False, + freezeShadowPkgs = Flag False, + freezeStrongFlags = Flag False, + freezeVerbosity = toFlag normal + } + +freezeCommand :: CommandUI FreezeFlags +freezeCommand = CommandUI { + commandName = "freeze", + commandSynopsis = "Freeze dependencies.", + commandDescription = Just $ \_ -> wrapText $ + "Calculates a valid set of dependencies and their exact versions. " + ++ "If successful, saves the result to the file `cabal.config`.\n" + ++ "\n" + ++ "The package versions specified in `cabal.config` will be used for " + ++ "any future installs.\n" + ++ "\n" + ++ "An existing `cabal.config` is ignored and overwritten.\n", + commandNotes = Nothing, + commandUsage = usageFlags "freeze", + commandDefaultFlags = defaultFreezeFlags, + commandOptions = \ showOrParseArgs -> [ + optionVerbosity freezeVerbosity (\v flags -> flags { freezeVerbosity = v }) + + , option [] ["dry-run"] + "Do not freeze anything, only print what would be frozen" + freezeDryRun (\v flags -> flags { freezeDryRun = v }) + trueArg + + , option [] ["tests"] + "freezing of the dependencies of any tests suites in the package description file." + freezeTests (\v flags -> flags { freezeTests = v }) + (boolOpt [] []) + + , option [] ["benchmarks"] + "freezing of the dependencies of any benchmarks suites in the package description file." + freezeBenchmarks (\v flags -> flags { freezeBenchmarks = v }) + (boolOpt [] []) + + ] ++ + + optionSolver freezeSolver (\v flags -> flags { freezeSolver = v }) : + optionSolverFlags showOrParseArgs + freezeMaxBackjumps (\v flags -> flags { freezeMaxBackjumps = v }) + freezeReorderGoals (\v flags -> flags { freezeReorderGoals = v }) + freezeIndependentGoals (\v flags -> flags { freezeIndependentGoals = v }) + freezeShadowPkgs (\v flags -> flags { freezeShadowPkgs = v }) + freezeStrongFlags (\v flags -> flags { freezeStrongFlags = v }) + + } + +genBoundsCommand :: CommandUI FreezeFlags +genBoundsCommand = CommandUI { + commandName = "gen-bounds", + commandSynopsis = "Generate dependency bounds.", + commandDescription = Just $ \_ -> wrapText $ + "Generates bounds for all dependencies that do not currently have them. " + ++ "Generated bounds are printed to stdout. You can then paste them into your .cabal file.\n" + ++ "\n", + commandNotes = Nothing, + commandUsage = usageFlags "gen-bounds", + commandDefaultFlags = defaultFreezeFlags, + commandOptions = \ _ -> [ + optionVerbosity freezeVerbosity (\v flags -> flags { freezeVerbosity = v }) + ] + } + +-- ------------------------------------------------------------ +-- * Other commands +-- ------------------------------------------------------------ + +updateCommand :: CommandUI (Flag Verbosity) +updateCommand = CommandUI { + commandName = "update", + commandSynopsis = "Updates list of known packages.", + commandDescription = Just $ \_ -> + "For all known remote repositories, download the package list.\n", + commandNotes = Just $ \_ -> + relevantConfigValuesText ["remote-repo" + ,"remote-repo-cache" + ,"local-repo"], + commandUsage = usageFlags "update", + commandDefaultFlags = toFlag normal, + commandOptions = \_ -> [optionVerbosity id const] + } + +upgradeCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) +upgradeCommand = configureCommand { + commandName = "upgrade", + commandSynopsis = "(command disabled, use install instead)", + commandDescription = Nothing, + commandUsage = usageFlagsOrPackages "upgrade", + commandDefaultFlags = (mempty, mempty, mempty, mempty), + commandOptions = commandOptions installCommand + } + +{- +cleanCommand :: CommandUI () +cleanCommand = makeCommand name shortDesc longDesc emptyFlags options + where + name = "clean" + shortDesc = "Removes downloaded files" + longDesc = Nothing + emptyFlags = () + options _ = [] +-} + +checkCommand :: CommandUI (Flag Verbosity) +checkCommand = CommandUI { + commandName = "check", + commandSynopsis = "Check the package for common mistakes.", + commandDescription = Just $ \_ -> wrapText $ + "Expects a .cabal package file in the current directory.\n" + ++ "\n" + ++ "The checks correspond to the requirements to packages on Hackage. " + ++ "If no errors and warnings are reported, Hackage will accept this " + ++ "package.\n", + commandNotes = Nothing, + commandUsage = \pname -> "Usage: " ++ pname ++ " check\n", + commandDefaultFlags = toFlag normal, + commandOptions = \_ -> [] + } + +formatCommand :: CommandUI (Flag Verbosity) +formatCommand = CommandUI { + commandName = "format", + commandSynopsis = "Reformat the .cabal file using the standard style.", + commandDescription = Nothing, + commandNotes = Nothing, + commandUsage = usageAlternatives "format" ["[FILE]"], + commandDefaultFlags = toFlag normal, + commandOptions = \_ -> [] + } + +uninstallCommand :: CommandUI (Flag Verbosity) +uninstallCommand = CommandUI { + commandName = "uninstall", + commandSynopsis = "Warn about 'uninstall' not being implemented.", + commandDescription = Nothing, + commandNotes = Nothing, + commandUsage = usageAlternatives "uninstall" ["PACKAGES"], + commandDefaultFlags = toFlag normal, + commandOptions = \_ -> [] + } + +manpageCommand :: CommandUI (Flag Verbosity) +manpageCommand = CommandUI { + commandName = "manpage", + commandSynopsis = "Outputs manpage source.", + commandDescription = Just $ \_ -> + "Output manpage source to STDOUT.\n", + commandNotes = Nothing, + commandUsage = usageFlags "manpage", + commandDefaultFlags = toFlag normal, + commandOptions = \_ -> [optionVerbosity id const] + } + +runCommand :: CommandUI (BuildFlags, BuildExFlags) +runCommand = CommandUI { + commandName = "run", + commandSynopsis = "Builds and runs an executable.", + commandDescription = Just $ \pname -> wrapText $ + "Builds and then runs the specified executable. If no executable is " + ++ "specified, but the package contains just one executable, that one " + ++ "is built and executed.\n" + ++ "\n" + ++ "Use `" ++ pname ++ " test --show-details=streaming` to run a " + ++ "test-suite and get its full output.\n", + commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " run\n" + ++ " Run the only executable in the current package;\n" + ++ " " ++ pname ++ " run foo -- --fooflag\n" + ++ " Works similar to `./foo --fooflag`.\n", + commandUsage = usageAlternatives "run" + ["[FLAGS] [EXECUTABLE] [-- EXECUTABLE_FLAGS]"], + commandDefaultFlags = mempty, + commandOptions = + \showOrParseArgs -> liftOptions fst setFst + (commandOptions parent showOrParseArgs) + ++ + liftOptions snd setSnd + (buildExOptions showOrParseArgs) + } + where + setFst a (_,b) = (a,b) + setSnd b (a,_) = (a,b) + + parent = Cabal.buildCommand defaultProgramConfiguration + +-- ------------------------------------------------------------ +-- * Report flags +-- ------------------------------------------------------------ + +data ReportFlags = ReportFlags { + reportUsername :: Flag Username, + reportPassword :: Flag Password, + reportVerbosity :: Flag Verbosity + } deriving Generic + +defaultReportFlags :: ReportFlags +defaultReportFlags = ReportFlags { + reportUsername = mempty, + reportPassword = mempty, + reportVerbosity = toFlag normal + } + +reportCommand :: CommandUI ReportFlags +reportCommand = CommandUI { + commandName = "report", + commandSynopsis = "Upload build reports to a remote server.", + commandDescription = Nothing, + commandNotes = Just $ \_ -> + "You can store your Hackage login in the ~/.cabal/config file\n", + commandUsage = usageAlternatives "report" ["[FLAGS]"], + commandDefaultFlags = defaultReportFlags, + commandOptions = \_ -> + [optionVerbosity reportVerbosity (\v flags -> flags { reportVerbosity = v }) + + ,option ['u'] ["username"] + "Hackage username." + reportUsername (\v flags -> flags { reportUsername = v }) + (reqArg' "USERNAME" (toFlag . Username) + (flagToList . fmap unUsername)) + + ,option ['p'] ["password"] + "Hackage password." + reportPassword (\v flags -> flags { reportPassword = v }) + (reqArg' "PASSWORD" (toFlag . Password) + (flagToList . fmap unPassword)) + ] + } + +instance Monoid ReportFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup ReportFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Get flags +-- ------------------------------------------------------------ + +data GetFlags = GetFlags { + getDestDir :: Flag FilePath, + getPristine :: Flag Bool, + getSourceRepository :: Flag (Maybe RepoKind), + getVerbosity :: Flag Verbosity + } deriving Generic + +defaultGetFlags :: GetFlags +defaultGetFlags = GetFlags { + getDestDir = mempty, + getPristine = mempty, + getSourceRepository = mempty, + getVerbosity = toFlag normal + } + +getCommand :: CommandUI GetFlags +getCommand = CommandUI { + commandName = "get", + commandSynopsis = "Download/Extract a package's source code (repository).", + commandDescription = Just $ \_ -> wrapText $ + "Creates a local copy of a package's source code. By default it gets " + ++ "the source\ntarball and unpacks it in a local subdirectory. " + ++ "Alternatively, with -s it will\nget the code from the source " + ++ "repository specified by the package.\n", + commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " get hlint\n" + ++ " Download the latest stable version of hlint;\n" + ++ " " ++ pname ++ " get lens --source-repository=head\n" + ++ " Download the source repository (i.e. git clone from github).\n", + commandUsage = usagePackages "get", + commandDefaultFlags = defaultGetFlags, + commandOptions = \_ -> [ + optionVerbosity getVerbosity (\v flags -> flags { getVerbosity = v }) + + ,option "d" ["destdir"] + "Where to place the package source, defaults to the current directory." + getDestDir (\v flags -> flags { getDestDir = v }) + (reqArgFlag "PATH") + + ,option "s" ["source-repository"] + "Copy the package's source repository (ie git clone, darcs get, etc as appropriate)." + getSourceRepository (\v flags -> flags { getSourceRepository = v }) + (optArg "[head|this|...]" (readP_to_E (const "invalid source-repository") + (fmap (toFlag . Just) parse)) + (Flag Nothing) + (map (fmap show) . flagToList)) + + , option [] ["pristine"] + ("Unpack the original pristine tarball, rather than updating the " + ++ ".cabal file with the latest revision from the package archive.") + getPristine (\v flags -> flags { getPristine = v }) + trueArg + ] + } + +-- 'cabal unpack' is a deprecated alias for 'cabal get'. +unpackCommand :: CommandUI GetFlags +unpackCommand = getCommand { + commandName = "unpack", + commandUsage = usagePackages "unpack" + } + +instance Monoid GetFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup GetFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * List flags +-- ------------------------------------------------------------ + +data ListFlags = ListFlags { + listInstalled :: Flag Bool, + listSimpleOutput :: Flag Bool, + listVerbosity :: Flag Verbosity, + listPackageDBs :: [Maybe PackageDB] + } deriving Generic + +defaultListFlags :: ListFlags +defaultListFlags = ListFlags { + listInstalled = Flag False, + listSimpleOutput = Flag False, + listVerbosity = toFlag normal, + listPackageDBs = [] + } + +listCommand :: CommandUI ListFlags +listCommand = CommandUI { + commandName = "list", + commandSynopsis = "List packages matching a search string.", + commandDescription = Just $ \_ -> wrapText $ + "List all packages, or all packages matching one of the search" + ++ " strings.\n" + ++ "\n" + ++ "If there is a sandbox in the current directory and " + ++ "config:ignore-sandbox is False, use the sandbox package database. " + ++ "Otherwise, use the package database specified with --package-db. " + ++ "If not specified, use the user package database.\n", + commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " list pandoc\n" + ++ " Will find pandoc, pandoc-citeproc, pandoc-lens, ...\n", + commandUsage = usageAlternatives "list" [ "[FLAGS]" + , "[FLAGS] STRINGS"], + commandDefaultFlags = defaultListFlags, + commandOptions = \_ -> [ + optionVerbosity listVerbosity (\v flags -> flags { listVerbosity = v }) + + , option [] ["installed"] + "Only print installed packages" + listInstalled (\v flags -> flags { listInstalled = v }) + trueArg + + , option [] ["simple-output"] + "Print in a easy-to-parse format" + listSimpleOutput (\v flags -> flags { listSimpleOutput = v }) + trueArg + + , option "" ["package-db"] + ( "Append the given package database to the list of package" + ++ " databases used (to satisfy dependencies and register into)." + ++ " May be a specific file, 'global' or 'user'. The initial list" + ++ " is ['global'], ['global', 'user'], or ['global', $sandbox]," + ++ " depending on context. Use 'clear' to reset the list to empty." + ++ " See the user guide for details.") + listPackageDBs (\v flags -> flags { listPackageDBs = v }) + (reqArg' "DB" readPackageDbList showPackageDbList) + + ] + } + +instance Monoid ListFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup ListFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Info flags +-- ------------------------------------------------------------ + +data InfoFlags = InfoFlags { + infoVerbosity :: Flag Verbosity, + infoPackageDBs :: [Maybe PackageDB] + } deriving Generic + +defaultInfoFlags :: InfoFlags +defaultInfoFlags = InfoFlags { + infoVerbosity = toFlag normal, + infoPackageDBs = [] + } + +infoCommand :: CommandUI InfoFlags +infoCommand = CommandUI { + commandName = "info", + commandSynopsis = "Display detailed information about a particular package.", + commandDescription = Just $ \_ -> wrapText $ + "If there is a sandbox in the current directory and " + ++ "config:ignore-sandbox is False, use the sandbox package database. " + ++ "Otherwise, use the package database specified with --package-db. " + ++ "If not specified, use the user package database.\n", + commandNotes = Nothing, + commandUsage = usageAlternatives "info" ["[FLAGS] PACKAGES"], + commandDefaultFlags = defaultInfoFlags, + commandOptions = \_ -> [ + optionVerbosity infoVerbosity (\v flags -> flags { infoVerbosity = v }) + + , option "" ["package-db"] + ( "Append the given package database to the list of package" + ++ " databases used (to satisfy dependencies and register into)." + ++ " May be a specific file, 'global' or 'user'. The initial list" + ++ " is ['global'], ['global', 'user'], or ['global', $sandbox]," + ++ " depending on context. Use 'clear' to reset the list to empty." + ++ " See the user guide for details.") + infoPackageDBs (\v flags -> flags { infoPackageDBs = v }) + (reqArg' "DB" readPackageDbList showPackageDbList) + + ] + } + +instance Monoid InfoFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup InfoFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Install flags +-- ------------------------------------------------------------ + +-- | Install takes the same flags as configure along with a few extras. +-- +data InstallFlags = InstallFlags { + installDocumentation :: Flag Bool, + installHaddockIndex :: Flag PathTemplate, + installDryRun :: Flag Bool, + installMaxBackjumps :: Flag Int, + installReorderGoals :: Flag Bool, + installIndependentGoals :: Flag Bool, + installShadowPkgs :: Flag Bool, + installStrongFlags :: Flag Bool, + installReinstall :: Flag Bool, + installAvoidReinstalls :: Flag Bool, + installOverrideReinstall :: Flag Bool, + installUpgradeDeps :: Flag Bool, + installOnly :: Flag Bool, + installOnlyDeps :: Flag Bool, + installRootCmd :: Flag String, + installSummaryFile :: NubList PathTemplate, + installLogFile :: Flag PathTemplate, + installBuildReports :: Flag ReportLevel, + installReportPlanningFailure :: Flag Bool, + installSymlinkBinDir :: Flag FilePath, + installOneShot :: Flag Bool, + installNumJobs :: Flag (Maybe Int), + installRunTests :: Flag Bool, + installOfflineMode :: Flag Bool + } + deriving (Eq, Generic) + +instance Binary InstallFlags + +defaultInstallFlags :: InstallFlags +defaultInstallFlags = InstallFlags { + installDocumentation = Flag False, + installHaddockIndex = Flag docIndexFile, + installDryRun = Flag False, + installMaxBackjumps = Flag defaultMaxBackjumps, + installReorderGoals = Flag False, + installIndependentGoals= Flag False, + installShadowPkgs = Flag False, + installStrongFlags = Flag False, + installReinstall = Flag False, + installAvoidReinstalls = Flag False, + installOverrideReinstall = Flag False, + installUpgradeDeps = Flag False, + installOnly = Flag False, + installOnlyDeps = Flag False, + installRootCmd = mempty, + installSummaryFile = mempty, + installLogFile = mempty, + installBuildReports = Flag NoReports, + installReportPlanningFailure = Flag False, + installSymlinkBinDir = mempty, + installOneShot = Flag False, + installNumJobs = mempty, + installRunTests = mempty, + installOfflineMode = Flag False + } + where + docIndexFile = toPathTemplate ("$datadir" "doc" + "$arch-$os-$compiler" "index.html") + +defaultMaxBackjumps :: Int +defaultMaxBackjumps = 2000 + +defaultSolver :: PreSolver +defaultSolver = Choose + +allSolvers :: String +allSolvers = intercalate ", " (map display ([minBound .. maxBound] :: [PreSolver])) + +installCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) +installCommand = CommandUI { + commandName = "install", + commandSynopsis = "Install packages.", + commandUsage = usageAlternatives "install" [ "[FLAGS]" + , "[FLAGS] PACKAGES" + ], + commandDescription = Just $ \_ -> wrapText $ + "Installs one or more packages. By default, the installed package" + ++ " will be registered in the user's package database or, if a sandbox" + ++ " is present in the current directory, inside the sandbox.\n" + ++ "\n" + ++ "If PACKAGES are specified, downloads and installs those packages." + ++ " Otherwise, install the package in the current directory (and/or its" + ++ " dependencies) (there must be exactly one .cabal file in the current" + ++ " directory).\n" + ++ "\n" + ++ "When using a sandbox, the flags for `install` only affect the" + ++ " current command and have no effect on future commands. (To achieve" + ++ " that, `configure` must be used.)\n" + ++ " In contrast, without a sandbox, the flags to `install` are saved and" + ++ " affect future commands such as `build` and `repl`. See the help for" + ++ " `configure` for a list of commands being affected.\n" + ++ "\n" + ++ "Installed executables will by default (and without a sandbox)" + ++ " be put into `~/.cabal/bin/`." + ++ " If you want installed executable to be available globally, make" + ++ " sure that the PATH environment variable contains that directory.\n" + ++ "When using a sandbox, executables will be put into" + ++ " `$SANDBOX/bin/` (by default: `./.cabal-sandbox/bin/`).\n" + ++ "\n" + ++ "When specifying --bindir, consider also specifying --datadir;" + ++ " this way the sandbox can be deleted and the executable should" + ++ " continue working as long as bindir and datadir are left untouched.", + commandNotes = Just $ \pname -> + ( case commandNotes + $ Cabal.configureCommand defaultProgramConfiguration + of Just desc -> desc pname ++ "\n" + Nothing -> "" + ) + ++ "Examples:\n" + ++ " " ++ pname ++ " install " + ++ " Package in the current directory\n" + ++ " " ++ pname ++ " install foo " + ++ " Package from the hackage server\n" + ++ " " ++ pname ++ " install foo-1.0 " + ++ " Specific version of a package\n" + ++ " " ++ pname ++ " install 'foo < 2' " + ++ " Constrained package version\n" + ++ " " ++ pname ++ " install haddock --bindir=$HOME/hask-bin/ --datadir=$HOME/hask-data/\n" + ++ " " ++ (map (const ' ') pname) + ++ " " + ++ " Change installation destination\n", + commandDefaultFlags = (mempty, mempty, mempty, mempty), + commandOptions = \showOrParseArgs -> + liftOptions get1 set1 + (filter ((`notElem` ["constraint", "dependency" + , "exact-configuration"]) + . optionName) $ + configureOptions showOrParseArgs) + ++ liftOptions get2 set2 (configureExOptions showOrParseArgs ConstraintSourceCommandlineFlag) + ++ liftOptions get3 set3 (installOptions showOrParseArgs) + ++ liftOptions get4 set4 (haddockOptions showOrParseArgs) + } + where + get1 (a,_,_,_) = a; set1 a (_,b,c,d) = (a,b,c,d) + get2 (_,b,_,_) = b; set2 b (a,_,c,d) = (a,b,c,d) + get3 (_,_,c,_) = c; set3 c (a,b,_,d) = (a,b,c,d) + get4 (_,_,_,d) = d; set4 d (a,b,c,_) = (a,b,c,d) + +haddockOptions :: ShowOrParseArgs -> [OptionField HaddockFlags] +haddockOptions showOrParseArgs + = [ opt { optionName = "haddock-" ++ name, + optionDescr = [ fmapOptFlags (\(_, lflags) -> ([], map ("haddock-" ++) lflags)) descr + | descr <- optionDescr opt] } + | opt <- commandOptions Cabal.haddockCommand showOrParseArgs + , let name = optionName opt + , name `elem` ["hoogle", "html", "html-location" + ,"executables", "tests", "benchmarks", "all", "internal", "css" + ,"hyperlink-source", "hscolour-css" + ,"contents-location"] + ] + where + fmapOptFlags :: (OptFlags -> OptFlags) -> OptDescr a -> OptDescr a + fmapOptFlags modify (ReqArg d f p r w) = ReqArg d (modify f) p r w + fmapOptFlags modify (OptArg d f p r i w) = OptArg d (modify f) p r i w + fmapOptFlags modify (ChoiceOpt xs) = ChoiceOpt [(d, modify f, i, w) | (d, f, i, w) <- xs] + fmapOptFlags modify (BoolOpt d f1 f2 r w) = BoolOpt d (modify f1) (modify f2) r w + +installOptions :: ShowOrParseArgs -> [OptionField InstallFlags] +installOptions showOrParseArgs = + [ option "" ["documentation"] + "building of documentation" + installDocumentation (\v flags -> flags { installDocumentation = v }) + (boolOpt [] []) + + , option [] ["doc-index-file"] + "A central index of haddock API documentation (template cannot use $pkgid)" + installHaddockIndex (\v flags -> flags { installHaddockIndex = v }) + (reqArg' "TEMPLATE" (toFlag.toPathTemplate) + (flagToList . fmap fromPathTemplate)) + + , option [] ["dry-run"] + "Do not install anything, only print what would be installed." + installDryRun (\v flags -> flags { installDryRun = v }) + trueArg + ] ++ + + optionSolverFlags showOrParseArgs + installMaxBackjumps (\v flags -> flags { installMaxBackjumps = v }) + installReorderGoals (\v flags -> flags { installReorderGoals = v }) + installIndependentGoals (\v flags -> flags { installIndependentGoals = v }) + installShadowPkgs (\v flags -> flags { installShadowPkgs = v }) + installStrongFlags (\v flags -> flags { installStrongFlags = v }) ++ + + [ option [] ["reinstall"] + "Install even if it means installing the same version again." + installReinstall (\v flags -> flags { installReinstall = v }) + (yesNoOpt showOrParseArgs) + + , option [] ["avoid-reinstalls"] + "Do not select versions that would destructively overwrite installed packages." + installAvoidReinstalls (\v flags -> flags { installAvoidReinstalls = v }) + (yesNoOpt showOrParseArgs) + + , option [] ["force-reinstalls"] + "Reinstall packages even if they will most likely break other installed packages." + installOverrideReinstall (\v flags -> flags { installOverrideReinstall = v }) + (yesNoOpt showOrParseArgs) + + , option [] ["upgrade-dependencies"] + "Pick the latest version for all dependencies, rather than trying to pick an installed version." + installUpgradeDeps (\v flags -> flags { installUpgradeDeps = v }) + (yesNoOpt showOrParseArgs) + + , option [] ["only-dependencies"] + "Install only the dependencies necessary to build the given packages" + installOnlyDeps (\v flags -> flags { installOnlyDeps = v }) + (yesNoOpt showOrParseArgs) + + , option [] ["dependencies-only"] + "A synonym for --only-dependencies" + installOnlyDeps (\v flags -> flags { installOnlyDeps = v }) + (yesNoOpt showOrParseArgs) + + , option [] ["root-cmd"] + "Command used to gain root privileges, when installing with --global." + installRootCmd (\v flags -> flags { installRootCmd = v }) + (reqArg' "COMMAND" toFlag flagToList) + + , option [] ["symlink-bindir"] + "Add symlinks to installed executables into this directory." + installSymlinkBinDir (\v flags -> flags { installSymlinkBinDir = v }) + (reqArgFlag "DIR") + + , option [] ["build-summary"] + "Save build summaries to file (name template can use $pkgid, $compiler, $os, $arch)" + installSummaryFile (\v flags -> flags { installSummaryFile = v }) + (reqArg' "TEMPLATE" (\x -> toNubList [toPathTemplate x]) (map fromPathTemplate . fromNubList)) + + , option [] ["build-log"] + "Log all builds to file (name template can use $pkgid, $compiler, $os, $arch)" + installLogFile (\v flags -> flags { installLogFile = v }) + (reqArg' "TEMPLATE" (toFlag.toPathTemplate) + (flagToList . fmap fromPathTemplate)) + + , option [] ["remote-build-reporting"] + "Generate build reports to send to a remote server (none, anonymous or detailed)." + installBuildReports (\v flags -> flags { installBuildReports = v }) + (reqArg "LEVEL" (readP_to_E (const $ "report level must be 'none', " + ++ "'anonymous' or 'detailed'") + (toFlag `fmap` parse)) + (flagToList . fmap display)) + + , option [] ["report-planning-failure"] + "Generate build reports when the dependency solver fails. This is used by the Hackage build bot." + installReportPlanningFailure (\v flags -> flags { installReportPlanningFailure = v }) + trueArg + + , option [] ["one-shot"] + "Do not record the packages in the world file." + installOneShot (\v flags -> flags { installOneShot = v }) + (yesNoOpt showOrParseArgs) + + , option [] ["run-tests"] + "Run package test suites during installation." + installRunTests (\v flags -> flags { installRunTests = v }) + trueArg + + , optionNumJobs + installNumJobs (\v flags -> flags { installNumJobs = v }) + + , option [] ["offline"] + "Don't download packages from the Internet." + installOfflineMode (\v flags -> flags { installOfflineMode = v }) + (yesNoOpt showOrParseArgs) + ] ++ case showOrParseArgs of -- TODO: remove when "cabal install" + -- avoids + ParseArgs -> + [ option [] ["only"] + "Only installs the package in the current directory." + installOnly (\v flags -> flags { installOnly = v }) + trueArg ] + _ -> [] + + +instance Monoid InstallFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup InstallFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Upload flags +-- ------------------------------------------------------------ + +data UploadFlags = UploadFlags { + uploadCheck :: Flag Bool, + uploadDoc :: Flag Bool, + uploadUsername :: Flag Username, + uploadPassword :: Flag Password, + uploadPasswordCmd :: Flag [String], + uploadVerbosity :: Flag Verbosity + } deriving Generic + +defaultUploadFlags :: UploadFlags +defaultUploadFlags = UploadFlags { + uploadCheck = toFlag False, + uploadDoc = toFlag False, + uploadUsername = mempty, + uploadPassword = mempty, + uploadPasswordCmd = mempty, + uploadVerbosity = toFlag normal + } + +uploadCommand :: CommandUI UploadFlags +uploadCommand = CommandUI { + commandName = "upload", + commandSynopsis = "Uploads source packages or documentation to Hackage.", + commandDescription = Nothing, + commandNotes = Just $ \_ -> + "You can store your Hackage login in the ~/.cabal/config file\n" + ++ relevantConfigValuesText ["username", "password"], + commandUsage = \pname -> + "Usage: " ++ pname ++ " upload [FLAGS] TARFILES\n", + commandDefaultFlags = defaultUploadFlags, + commandOptions = \_ -> + [optionVerbosity uploadVerbosity (\v flags -> flags { uploadVerbosity = v }) + + ,option ['c'] ["check"] + "Do not upload, just do QA checks." + uploadCheck (\v flags -> flags { uploadCheck = v }) + trueArg + + ,option ['d'] ["documentation"] + "Upload documentation instead of a source package. Cannot be used together with --check." + uploadDoc (\v flags -> flags { uploadDoc = v }) + trueArg + + ,option ['u'] ["username"] + "Hackage username." + uploadUsername (\v flags -> flags { uploadUsername = v }) + (reqArg' "USERNAME" (toFlag . Username) + (flagToList . fmap unUsername)) + + ,option ['p'] ["password"] + "Hackage password." + uploadPassword (\v flags -> flags { uploadPassword = v }) + (reqArg' "PASSWORD" (toFlag . Password) + (flagToList . fmap unPassword)) + + ,option ['P'] ["password-command"] + "Command to get Hackage password." + uploadPasswordCmd (\v flags -> flags { uploadPasswordCmd = v }) + (reqArg' "PASSWORD" (Flag . words) (fromMaybe [] . flagToMaybe)) + ] + } + +instance Monoid UploadFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup UploadFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Init flags +-- ------------------------------------------------------------ + +emptyInitFlags :: IT.InitFlags +emptyInitFlags = mempty + +defaultInitFlags :: IT.InitFlags +defaultInitFlags = emptyInitFlags { IT.initVerbosity = toFlag normal } + +initCommand :: CommandUI IT.InitFlags +initCommand = CommandUI { + commandName = "init", + commandSynopsis = "Create a new .cabal package file (interactively).", + commandDescription = Just $ \_ -> wrapText $ + "Cabalise a project by creating a .cabal, Setup.hs, and " + ++ "optionally a LICENSE file.\n" + ++ "\n" + ++ "Calling init with no arguments (recommended) uses an " + ++ "interactive mode, which will try to guess as much as " + ++ "possible and prompt you for the rest. Command-line " + ++ "arguments are provided for scripting purposes. " + ++ "If you don't want interactive mode, be sure to pass " + ++ "the -n flag.\n", + commandNotes = Nothing, + commandUsage = \pname -> + "Usage: " ++ pname ++ " init [FLAGS]\n", + commandDefaultFlags = defaultInitFlags, + commandOptions = \_ -> + [ option ['n'] ["non-interactive"] + "Non-interactive mode." + IT.nonInteractive (\v flags -> flags { IT.nonInteractive = v }) + trueArg + + , option ['q'] ["quiet"] + "Do not generate log messages to stdout." + IT.quiet (\v flags -> flags { IT.quiet = v }) + trueArg + + , option [] ["no-comments"] + "Do not generate explanatory comments in the .cabal file." + IT.noComments (\v flags -> flags { IT.noComments = v }) + trueArg + + , option ['m'] ["minimal"] + "Generate a minimal .cabal file, that is, do not include extra empty fields. Also implies --no-comments." + IT.minimal (\v flags -> flags { IT.minimal = v }) + trueArg + + , option [] ["overwrite"] + "Overwrite any existing .cabal, LICENSE, or Setup.hs files without warning." + IT.overwrite (\v flags -> flags { IT.overwrite = v }) + trueArg + + , option [] ["package-dir"] + "Root directory of the package (default = current directory)." + IT.packageDir (\v flags -> flags { IT.packageDir = v }) + (reqArgFlag "DIRECTORY") + + , option ['p'] ["package-name"] + "Name of the Cabal package to create." + IT.packageName (\v flags -> flags { IT.packageName = v }) + (reqArg "PACKAGE" (readP_to_E ("Cannot parse package name: "++) + (toFlag `fmap` parse)) + (flagToList . fmap display)) + + , option [] ["version"] + "Initial version of the package." + IT.version (\v flags -> flags { IT.version = v }) + (reqArg "VERSION" (readP_to_E ("Cannot parse package version: "++) + (toFlag `fmap` parse)) + (flagToList . fmap display)) + + , option [] ["cabal-version"] + "Required version of the Cabal library." + IT.cabalVersion (\v flags -> flags { IT.cabalVersion = v }) + (reqArg "VERSION_RANGE" (readP_to_E ("Cannot parse Cabal version range: "++) + (toFlag `fmap` parse)) + (flagToList . fmap display)) + + , option ['l'] ["license"] + "Project license." + IT.license (\v flags -> flags { IT.license = v }) + (reqArg "LICENSE" (readP_to_E ("Cannot parse license: "++) + (toFlag `fmap` parse)) + (flagToList . fmap display)) + + , option ['a'] ["author"] + "Name of the project's author." + IT.author (\v flags -> flags { IT.author = v }) + (reqArgFlag "NAME") + + , option ['e'] ["email"] + "Email address of the maintainer." + IT.email (\v flags -> flags { IT.email = v }) + (reqArgFlag "EMAIL") + + , option ['u'] ["homepage"] + "Project homepage and/or repository." + IT.homepage (\v flags -> flags { IT.homepage = v }) + (reqArgFlag "URL") + + , option ['s'] ["synopsis"] + "Short project synopsis." + IT.synopsis (\v flags -> flags { IT.synopsis = v }) + (reqArgFlag "TEXT") + + , option ['c'] ["category"] + "Project category." + IT.category (\v flags -> flags { IT.category = v }) + (reqArg' "CATEGORY" (\s -> toFlag $ maybe (Left s) Right (readMaybe s)) + (flagToList . fmap (either id show))) + + , option ['x'] ["extra-source-file"] + "Extra source file to be distributed with tarball." + IT.extraSrc (\v flags -> flags { IT.extraSrc = v }) + (reqArg' "FILE" (Just . (:[])) + (fromMaybe [])) + + , option [] ["is-library"] + "Build a library." + IT.packageType (\v flags -> flags { IT.packageType = v }) + (noArg (Flag IT.Library)) + + , option [] ["is-executable"] + "Build an executable." + IT.packageType + (\v flags -> flags { IT.packageType = v }) + (noArg (Flag IT.Executable)) + + , option [] ["main-is"] + "Specify the main module." + IT.mainIs + (\v flags -> flags { IT.mainIs = v }) + (reqArgFlag "FILE") + + , option [] ["language"] + "Specify the default language." + IT.language + (\v flags -> flags { IT.language = v }) + (reqArg "LANGUAGE" (readP_to_E ("Cannot parse language: "++) + (toFlag `fmap` parse)) + (flagToList . fmap display)) + + , option ['o'] ["expose-module"] + "Export a module from the package." + IT.exposedModules + (\v flags -> flags { IT.exposedModules = v }) + (reqArg "MODULE" (readP_to_E ("Cannot parse module name: "++) + ((Just . (:[])) `fmap` parse)) + (maybe [] (fmap display))) + + , option [] ["extension"] + "Use a LANGUAGE extension (in the other-extensions field)." + IT.otherExts + (\v flags -> flags { IT.otherExts = v }) + (reqArg "EXTENSION" (readP_to_E ("Cannot parse extension: "++) + ((Just . (:[])) `fmap` parse)) + (maybe [] (fmap display))) + + , option ['d'] ["dependency"] + "Package dependency." + IT.dependencies (\v flags -> flags { IT.dependencies = v }) + (reqArg "PACKAGE" (readP_to_E ("Cannot parse dependency: "++) + ((Just . (:[])) `fmap` parse)) + (maybe [] (fmap display))) + + , option [] ["source-dir"] + "Directory containing package source." + IT.sourceDirs (\v flags -> flags { IT.sourceDirs = v }) + (reqArg' "DIR" (Just . (:[])) + (fromMaybe [])) + + , option [] ["build-tool"] + "Required external build tool." + IT.buildTools (\v flags -> flags { IT.buildTools = v }) + (reqArg' "TOOL" (Just . (:[])) + (fromMaybe [])) + + , optionVerbosity IT.initVerbosity (\v flags -> flags { IT.initVerbosity = v }) + ] + } + where readMaybe s = case reads s of + [(x,"")] -> Just x + _ -> Nothing + +-- ------------------------------------------------------------ +-- * SDist flags +-- ------------------------------------------------------------ + +-- | Extra flags to @sdist@ beyond runghc Setup sdist +-- +data SDistExFlags = SDistExFlags { + sDistFormat :: Flag ArchiveFormat + } + deriving (Show, Generic) + +data ArchiveFormat = TargzFormat | ZipFormat -- | ... + deriving (Show, Eq) + +defaultSDistExFlags :: SDistExFlags +defaultSDistExFlags = SDistExFlags { + sDistFormat = Flag TargzFormat + } + +sdistCommand :: CommandUI (SDistFlags, SDistExFlags) +sdistCommand = Cabal.sdistCommand { + commandDefaultFlags = (commandDefaultFlags Cabal.sdistCommand, defaultSDistExFlags), + commandOptions = \showOrParseArgs -> + liftOptions fst setFst (commandOptions Cabal.sdistCommand showOrParseArgs) + ++ liftOptions snd setSnd sdistExOptions + } + where + setFst a (_,b) = (a,b) + setSnd b (a,_) = (a,b) + + sdistExOptions = + [option [] ["archive-format"] "archive-format" + sDistFormat (\v flags -> flags { sDistFormat = v }) + (choiceOpt + [ (Flag TargzFormat, ([], ["targz"]), + "Produce a '.tar.gz' format archive (default and required for uploading to hackage)") + , (Flag ZipFormat, ([], ["zip"]), + "Produce a '.zip' format archive") + ]) + ] + +instance Monoid SDistExFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup SDistExFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Win32SelfUpgrade flags +-- ------------------------------------------------------------ + +data Win32SelfUpgradeFlags = Win32SelfUpgradeFlags { + win32SelfUpgradeVerbosity :: Flag Verbosity +} deriving Generic + +defaultWin32SelfUpgradeFlags :: Win32SelfUpgradeFlags +defaultWin32SelfUpgradeFlags = Win32SelfUpgradeFlags { + win32SelfUpgradeVerbosity = toFlag normal +} + +win32SelfUpgradeCommand :: CommandUI Win32SelfUpgradeFlags +win32SelfUpgradeCommand = CommandUI { + commandName = "win32selfupgrade", + commandSynopsis = "Self-upgrade the executable on Windows", + commandDescription = Nothing, + commandNotes = Nothing, + commandUsage = \pname -> + "Usage: " ++ pname ++ " win32selfupgrade PID PATH\n", + commandDefaultFlags = defaultWin32SelfUpgradeFlags, + commandOptions = \_ -> + [optionVerbosity win32SelfUpgradeVerbosity + (\v flags -> flags { win32SelfUpgradeVerbosity = v}) + ] +} + +instance Monoid Win32SelfUpgradeFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup Win32SelfUpgradeFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * ActAsSetup flags +-- ------------------------------------------------------------ + +data ActAsSetupFlags = ActAsSetupFlags { + actAsSetupBuildType :: Flag BuildType +} deriving Generic + +defaultActAsSetupFlags :: ActAsSetupFlags +defaultActAsSetupFlags = ActAsSetupFlags { + actAsSetupBuildType = toFlag Simple +} + +actAsSetupCommand :: CommandUI ActAsSetupFlags +actAsSetupCommand = CommandUI { + commandName = "act-as-setup", + commandSynopsis = "Run as-if this was a Setup.hs", + commandDescription = Nothing, + commandNotes = Nothing, + commandUsage = \pname -> + "Usage: " ++ pname ++ " act-as-setup\n", + commandDefaultFlags = defaultActAsSetupFlags, + commandOptions = \_ -> + [option "" ["build-type"] + "Use the given build type." + actAsSetupBuildType (\v flags -> flags { actAsSetupBuildType = v }) + (reqArg "BUILD-TYPE" (readP_to_E ("Cannot parse build type: "++) + (fmap toFlag parse)) + (map display . flagToList)) + ] +} + +instance Monoid ActAsSetupFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup ActAsSetupFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Sandbox-related flags +-- ------------------------------------------------------------ + +data SandboxFlags = SandboxFlags { + sandboxVerbosity :: Flag Verbosity, + sandboxSnapshot :: Flag Bool, -- FIXME: this should be an 'add-source'-only + -- flag. + sandboxLocation :: Flag FilePath +} deriving Generic + +defaultSandboxLocation :: FilePath +defaultSandboxLocation = ".cabal-sandbox" + +defaultSandboxFlags :: SandboxFlags +defaultSandboxFlags = SandboxFlags { + sandboxVerbosity = toFlag normal, + sandboxSnapshot = toFlag False, + sandboxLocation = toFlag defaultSandboxLocation + } + +sandboxCommand :: CommandUI SandboxFlags +sandboxCommand = CommandUI { + commandName = "sandbox", + commandSynopsis = "Create/modify/delete a sandbox.", + commandDescription = Just $ \pname -> concat + [ paragraph $ "Sandboxes are isolated package databases that can be used" + ++ " to prevent dependency conflicts that arise when many different" + ++ " packages are installed in the same database (i.e. the user's" + ++ " database in the home directory)." + , paragraph $ "A sandbox in the current directory (created by" + ++ " `sandbox init`) will be used instead of the user's database for" + ++ " commands such as `install` and `build`. Note that (a directly" + ++ " invoked) GHC will not automatically be aware of sandboxes;" + ++ " only if called via appropriate " ++ pname + ++ " commands, e.g. `repl`, `build`, `exec`." + , paragraph $ "Currently, " ++ pname ++ " will not search for a sandbox" + ++ " in folders above the current one, so cabal will not see the sandbox" + ++ " if you are in a subfolder of a sandbox." + , paragraph "Subcommands:" + , headLine "init:" + , indentParagraph $ "Initialize a sandbox in the current directory." + ++ " An existing package database will not be modified, but settings" + ++ " (such as the location of the database) can be modified this way." + , headLine "delete:" + , indentParagraph $ "Remove the sandbox; deleting all the packages" + ++ " installed inside." + , headLine "add-source:" + , indentParagraph $ "Make one or more local packages available in the" + ++ " sandbox. PATHS may be relative or absolute." + ++ " Typical usecase is when you need" + ++ " to make a (temporary) modification to a dependency: You download" + ++ " the package into a different directory, make the modification," + ++ " and add that directory to the sandbox with `add-source`." + , indentParagraph $ "Unless given `--snapshot`, any add-source'd" + ++ " dependency that was modified since the last build will be" + ++ " re-installed automatically." + , headLine "delete-source:" + , indentParagraph $ "Remove an add-source dependency; however, this will" + ++ " not delete the package(s) that have been installed in the sandbox" + ++ " from this dependency. You can either unregister the package(s) via" + ++ " `" ++ pname ++ " sandbox hc-pkg unregister` or re-create the" + ++ " sandbox (`sandbox delete; sandbox init`)." + , headLine "list-sources:" + , indentParagraph $ "List the directories of local packages made" + ++ " available via `" ++ pname ++ " add-source`." + , headLine "hc-pkg:" + , indentParagraph $ "Similar to `ghc-pkg`, but for the sandbox package" + ++ " database. Can be used to list specific/all packages that are" + ++ " installed in the sandbox. For subcommands, see the help for" + ++ " ghc-pkg. Affected by the compiler version specified by `configure`." + ], + commandNotes = Just $ \pname -> + relevantConfigValuesText ["require-sandbox" + ,"ignore-sandbox"] + ++ "\n" + ++ "Examples:\n" + ++ " Set up a sandbox with one local dependency, located at ../foo:\n" + ++ " " ++ pname ++ " sandbox init\n" + ++ " " ++ pname ++ " sandbox add-source ../foo\n" + ++ " " ++ pname ++ " install --only-dependencies\n" + ++ " Reset the sandbox:\n" + ++ " " ++ pname ++ " sandbox delete\n" + ++ " " ++ pname ++ " sandbox init\n" + ++ " " ++ pname ++ " install --only-dependencies\n" + ++ " List the packages in the sandbox:\n" + ++ " " ++ pname ++ " sandbox hc-pkg list\n" + ++ " Unregister the `broken` package from the sandbox:\n" + ++ " " ++ pname ++ " sandbox hc-pkg -- --force unregister broken\n", + commandUsage = usageAlternatives "sandbox" + [ "init [FLAGS]" + , "delete [FLAGS]" + , "add-source [FLAGS] PATHS" + , "delete-source [FLAGS] PATHS" + , "list-sources [FLAGS]" + , "hc-pkg [FLAGS] [--] COMMAND [--] [ARGS]" + ], + + commandDefaultFlags = defaultSandboxFlags, + commandOptions = \_ -> + [ optionVerbosity sandboxVerbosity + (\v flags -> flags { sandboxVerbosity = v }) + + , option [] ["snapshot"] + "Take a snapshot instead of creating a link (only applies to 'add-source')" + sandboxSnapshot (\v flags -> flags { sandboxSnapshot = v }) + trueArg + + , option [] ["sandbox"] + "Sandbox location (default: './.cabal-sandbox')." + sandboxLocation (\v flags -> flags { sandboxLocation = v }) + (reqArgFlag "DIR") + ] + } + +instance Monoid SandboxFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup SandboxFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Exec Flags +-- ------------------------------------------------------------ + +data ExecFlags = ExecFlags { + execVerbosity :: Flag Verbosity +} deriving Generic + +defaultExecFlags :: ExecFlags +defaultExecFlags = ExecFlags { + execVerbosity = toFlag normal + } + +execCommand :: CommandUI ExecFlags +execCommand = CommandUI { + commandName = "exec", + commandSynopsis = "Give a command access to the sandbox package repository.", + commandDescription = Just $ \pname -> wrapText $ + -- TODO: this is too GHC-focused for my liking.. + "A directly invoked GHC will not automatically be aware of any" + ++ " sandboxes: the GHC_PACKAGE_PATH environment variable controls what" + ++ " GHC uses. `" ++ pname ++ " exec` can be used to modify this variable:" + ++ " COMMAND will be executed in a modified environment and thereby uses" + ++ " the sandbox package database.\n" + ++ "\n" + ++ "If there is no sandbox, behaves as identity (executing COMMAND).\n" + ++ "\n" + ++ "Note that other " ++ pname ++ " commands change the environment" + ++ " variable appropriately already, so there is no need to wrap those" + ++ " in `" ++ pname ++ " exec`. But with `" ++ pname ++ " exec`, the user" + ++ " has more control and can, for example, execute custom scripts which" + ++ " indirectly execute GHC.\n" + ++ "\n" + ++ "Note that `" ++ pname ++ " repl` is different from `" ++ pname + ++ " exec -- ghci` as the latter will not forward any additional flags" + ++ " being defined in the local package to ghci.\n" + ++ "\n" + ++ "See `" ++ pname ++ " sandbox`.\n", + commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " exec -- ghci -Wall\n" + ++ " Start a repl session with sandbox packages and all warnings;\n" + ++ " " ++ pname ++ " exec gitit -- -f gitit.cnf\n" + ++ " Give gitit access to the sandbox packages, and pass it a flag;\n" + ++ " " ++ pname ++ " exec runghc Foo.hs\n" + ++ " Execute runghc on Foo.hs with runghc configured to use the\n" + ++ " sandbox package database (if a sandbox is being used).\n", + commandUsage = \pname -> + "Usage: " ++ pname ++ " exec [FLAGS] [--] COMMAND [--] [ARGS]\n", + + commandDefaultFlags = defaultExecFlags, + commandOptions = \_ -> + [ optionVerbosity execVerbosity + (\v flags -> flags { execVerbosity = v }) + ] + } + +instance Monoid ExecFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup ExecFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * UserConfig flags +-- ------------------------------------------------------------ + +data UserConfigFlags = UserConfigFlags { + userConfigVerbosity :: Flag Verbosity, + userConfigForce :: Flag Bool +} deriving Generic + +instance Monoid UserConfigFlags where + mempty = UserConfigFlags { + userConfigVerbosity = toFlag normal, + userConfigForce = toFlag False + } + mappend = (<>) + +instance Semigroup UserConfigFlags where + (<>) = gmappend + +userConfigCommand :: CommandUI UserConfigFlags +userConfigCommand = CommandUI { + commandName = "user-config", + commandSynopsis = "Display and update the user's global cabal configuration.", + commandDescription = Just $ \_ -> wrapText $ + "When upgrading cabal, the set of configuration keys and their default" + ++ " values may change. This command provides means to merge the existing" + ++ " config in ~/.cabal/config" + ++ " (i.e. all bindings that are actually defined and not commented out)" + ++ " and the default config of the new version.\n" + ++ "\n" + ++ "init: Creates a new config file at either ~/.cabal/config or as" + ++ " specified by --config-file, if given. An existing file won't be " + ++ " overwritten unless -f or --force is given.\n" + ++ "diff: Shows a pseudo-diff of the user's ~/.cabal/config file and" + ++ " the default configuration that would be created by cabal if the" + ++ " config file did not exist.\n" + ++ "update: Applies the pseudo-diff to the configuration that would be" + ++ " created by default, and write the result back to ~/.cabal/config.", + + commandNotes = Nothing, + commandUsage = usageAlternatives "user-config" ["init", "diff", "update"], + commandDefaultFlags = mempty, + commandOptions = \ _ -> [ + optionVerbosity userConfigVerbosity (\v flags -> flags { userConfigVerbosity = v }) + , option ['f'] ["force"] + "Overwrite the config file if it already exists." + userConfigForce (\v flags -> flags { userConfigForce = v }) + trueArg + ] + } + +-- ------------------------------------------------------------ +-- * GetOpt Utils +-- ------------------------------------------------------------ + +reqArgFlag :: ArgPlaceHolder -> + MkOptDescr (b -> Flag String) (Flag String -> b -> b) b +reqArgFlag ad = reqArg ad (succeedReadE Flag) flagToList + +liftOptions :: (b -> a) -> (a -> b -> b) + -> [OptionField a] -> [OptionField b] +liftOptions get set = map (liftOption get set) + +yesNoOpt :: ShowOrParseArgs -> MkOptDescr (b -> Flag Bool) (Flag Bool -> b -> b) b +yesNoOpt ShowArgs sf lf = trueArg sf lf +yesNoOpt _ sf lf = Command.boolOpt' flagToMaybe Flag (sf, lf) ([], map ("no-" ++) lf) sf lf + +optionSolver :: (flags -> Flag PreSolver) + -> (Flag PreSolver -> flags -> flags) + -> OptionField flags +optionSolver get set = + option [] ["solver"] + ("Select dependency solver to use (default: " ++ display defaultSolver ++ "). Choices: " ++ allSolvers ++ ", where 'choose' chooses between 'topdown' and 'modular' based on compiler version.") + get set + (reqArg "SOLVER" (readP_to_E (const $ "solver must be one of: " ++ allSolvers) + (toFlag `fmap` parse)) + (flagToList . fmap display)) + +optionSolverFlags :: ShowOrParseArgs + -> (flags -> Flag Int ) -> (Flag Int -> flags -> flags) + -> (flags -> Flag Bool ) -> (Flag Bool -> flags -> flags) + -> (flags -> Flag Bool ) -> (Flag Bool -> flags -> flags) + -> (flags -> Flag Bool ) -> (Flag Bool -> flags -> flags) + -> (flags -> Flag Bool ) -> (Flag Bool -> flags -> flags) + -> [OptionField flags] +optionSolverFlags showOrParseArgs getmbj setmbj getrg setrg _getig _setig getsip setsip getstrfl setstrfl = + [ option [] ["max-backjumps"] + ("Maximum number of backjumps allowed while solving (default: " ++ show defaultMaxBackjumps ++ "). Use a negative number to enable unlimited backtracking. Use 0 to disable backtracking completely.") + getmbj setmbj + (reqArg "NUM" (readP_to_E ("Cannot parse number: "++) (fmap toFlag parse)) + (map show . flagToList)) + , option [] ["reorder-goals"] + "Try to reorder goals according to certain heuristics. Slows things down on average, but may make backtracking faster for some packages." + getrg setrg + (yesNoOpt showOrParseArgs) + -- TODO: Disabled for now because it does not work as advertised (yet). +{- + , option [] ["independent-goals"] + "Treat several goals on the command line as independent. If several goals depend on the same package, different versions can be chosen." + getig setig + (yesNoOpt showOrParseArgs) +-} + , option [] ["shadow-installed-packages"] + "If multiple package instances of the same version are installed, treat all but one as shadowed." + getsip setsip + (yesNoOpt showOrParseArgs) + , option [] ["strong-flags"] + "Do not defer flag choices (this used to be the default in cabal-install <= 1.20)." + getstrfl setstrfl + (yesNoOpt showOrParseArgs) + ] + +usageFlagsOrPackages :: String -> String -> String +usageFlagsOrPackages name pname = + "Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n" + ++ " or: " ++ pname ++ " " ++ name ++ " [PACKAGES]\n" + +usagePackages :: String -> String -> String +usagePackages name pname = + "Usage: " ++ pname ++ " " ++ name ++ " [PACKAGES]\n" + +usageFlags :: String -> String -> String +usageFlags name pname = + "Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n" + +--TODO: do we want to allow per-package flags? +parsePackageArgs :: [String] -> Either String [Dependency] +parsePackageArgs = parsePkgArgs [] + where + parsePkgArgs ds [] = Right (reverse ds) + parsePkgArgs ds (arg:args) = + case readPToMaybe parseDependencyOrPackageId arg of + Just dep -> parsePkgArgs (dep:ds) args + Nothing -> Left $ + show arg ++ " is not valid syntax for a package name or" + ++ " package dependency." + +parseDependencyOrPackageId :: Parse.ReadP r Dependency +parseDependencyOrPackageId = parse Parse.+++ liftM pkgidToDependency parse + where + pkgidToDependency :: PackageIdentifier -> Dependency + pkgidToDependency p = case packageVersion p of + Version [] _ -> Dependency (packageName p) anyVersion + version -> Dependency (packageName p) (thisVersion version) + +showRepo :: RemoteRepo -> String +showRepo repo = remoteRepoName repo ++ ":" + ++ uriToString id (remoteRepoURI repo) [] + +readRepo :: String -> Maybe RemoteRepo +readRepo = readPToMaybe parseRepo + +parseRepo :: Parse.ReadP r RemoteRepo +parseRepo = do + name <- Parse.munch1 (\c -> isAlphaNum c || c `elem` "_-.") + _ <- Parse.char ':' + uriStr <- Parse.munch1 (\c -> isAlphaNum c || c `elem` "+-=._/*()@'$:;&!?~") + uri <- maybe Parse.pfail return (parseAbsoluteURI uriStr) + return RemoteRepo { + remoteRepoName = name, + remoteRepoURI = uri, + remoteRepoSecure = Nothing, + remoteRepoRootKeys = [], + remoteRepoKeyThreshold = 0, + remoteRepoShouldTryHttps = False + } + +-- ------------------------------------------------------------ +-- * Helpers for Documentation +-- ------------------------------------------------------------ + +headLine :: String -> String +headLine = unlines + . map unwords + . wrapLine 79 + . words + +paragraph :: String -> String +paragraph = (++"\n") + . unlines + . map unwords + . wrapLine 79 + . words + +indentParagraph :: String -> String +indentParagraph = unlines + . (flip (++)) [""] + . map ((" "++).unwords) + . wrapLine 77 + . words + +relevantConfigValuesText :: [String] -> String +relevantConfigValuesText vs = + "Relevant global configuration keys:\n" + ++ concat [" " ++ v ++ "\n" |v <- vs] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/SetupWrapper.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/SetupWrapper.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/SetupWrapper.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/SetupWrapper.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,742 @@ +{-# LANGUAGE CPP #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.SetupWrapper +-- Copyright : (c) The University of Glasgow 2006, +-- Duncan Coutts 2008 +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : alpha +-- Portability : portable +-- +-- An interface to building and installing Cabal packages. +-- If the @Built-Type@ field is specified as something other than +-- 'Custom', and the current version of Cabal is acceptable, this performs +-- setup actions directly. Otherwise it builds the setup script and +-- runs it with the given arguments. + +module Distribution.Client.SetupWrapper ( + setupWrapper, + SetupScriptOptions(..), + defaultSetupScriptOptions, + ) where + +import qualified Distribution.Make as Make +import qualified Distribution.Simple as Simple +import Distribution.Version + ( Version(..), VersionRange, anyVersion + , intersectVersionRanges, orLaterVersion + , withinRange ) +import Distribution.InstalledPackageInfo (installedUnitId) +import Distribution.Package + ( UnitId(..), PackageIdentifier(..), PackageId, + PackageName(..), Package(..), packageName + , packageVersion, Dependency(..) ) +import Distribution.PackageDescription + ( GenericPackageDescription(packageDescription) + , PackageDescription(..), specVersion + , BuildType(..), knownBuildTypes, defaultRenaming ) +import Distribution.PackageDescription.Parse + ( readPackageDescription ) +import Distribution.Simple.Configure + ( configCompilerEx ) +import Distribution.Compiler + ( buildCompilerId, CompilerFlavor(GHC, GHCJS) ) +import Distribution.Simple.Compiler + ( Compiler(compilerId), compilerFlavor, PackageDB(..), PackageDBStack ) +import Distribution.Simple.PreProcess + ( runSimplePreProcessor, ppUnlit ) +import Distribution.Simple.Build.Macros + ( generatePackageVersionMacros ) +import Distribution.Simple.Program + ( ProgramConfiguration, emptyProgramConfiguration + , getProgramSearchPath, getDbProgramOutput, runDbProgram, ghcProgram + , ghcjsProgram ) +import Distribution.Simple.Program.Find + ( programSearchPathAsPATHVar ) +import Distribution.Simple.Program.Run + ( getEffectiveEnvironment ) +import qualified Distribution.Simple.Program.Strip as Strip +import Distribution.Simple.BuildPaths + ( defaultDistPref, exeExtension ) + +import Distribution.Simple.Command + ( CommandUI(..), commandShowOptions ) +import Distribution.Simple.Program.GHC + ( GhcMode(..), GhcOptions(..), renderGhcOptions ) +import qualified Distribution.Simple.PackageIndex as PackageIndex +import Distribution.Simple.PackageIndex (InstalledPackageIndex) +import Distribution.Client.Config + ( defaultCabalDir ) +import Distribution.Client.IndexUtils + ( getInstalledPackages ) +import Distribution.Client.JobControl + ( Lock, criticalSection ) +import Distribution.Simple.Setup + ( Flag(..) ) +import Distribution.Simple.Utils + ( die, debug, info, cabalVersion, tryFindPackageDesc, comparing + , createDirectoryIfMissingVerbose, installExecutableFile + , copyFileVerbose, rewriteFile, intercalate ) +import Distribution.Client.Utils + ( inDir, tryCanonicalizePath + , existsAndIsMoreRecentThan, moreRecentFile +#if mingw32_HOST_OS + , canonicalizePathNoThrow +#endif + ) +import Distribution.System ( Platform(..), buildPlatform ) +import Distribution.Text + ( display ) +import Distribution.Utils.NubList + ( toNubListR ) +import Distribution.Verbosity + ( Verbosity ) +import Distribution.Compat.Exception + ( catchIO ) + +import System.Directory ( doesFileExist ) +import System.FilePath ( (), (<.>) ) +import System.IO ( Handle, hPutStr ) +import System.Exit ( ExitCode(..), exitWith ) +import System.Process ( runProcess, waitForProcess ) +#if !MIN_VERSION_base(4,8,0) +import Control.Applicative ( (<$>), (<*>) ) +import Data.Monoid ( mempty ) +#endif +import Control.Monad ( when, unless ) +import Data.List ( find, foldl1' ) +import Data.Maybe ( fromMaybe, isJust ) +import Data.Char ( isSpace ) +import Distribution.Client.Compat.ExecutablePath ( getExecutablePath ) + +#ifdef mingw32_HOST_OS +import Distribution.Simple.Utils + ( withTempDirectory ) + +import Control.Exception ( bracket ) +import System.FilePath ( equalFilePath, takeDirectory ) +import System.Directory ( doesDirectoryExist ) +import qualified System.Win32 as Win32 +#endif + +--TODO: The 'setupWrapper' and 'SetupScriptOptions' should be split into two +-- parts: one that has no policy and just does as it's told with all the +-- explicit options, and an optional initial part that applies certain +-- policies (like if we should add the Cabal lib as a dep, and if so which +-- version). This could be structured as an action that returns a fully +-- elaborated 'SetupScriptOptions' containing no remaining policy choices. +-- +-- See also the discussion at https://github.com/haskell/cabal/pull/3094 + +data SetupScriptOptions = SetupScriptOptions { + -- | The version of the Cabal library to use (if 'useDependenciesExclusive' + -- is not set). A suitable version of the Cabal library must be installed + -- (or for some build-types be the one cabal-install was built with). + -- + -- The version found also determines the version of the Cabal specification + -- that we us for talking to the Setup.hs, unless overridden by + -- 'useCabalSpecVersion'. + -- + useCabalVersion :: VersionRange, + + -- | This is the version of the Cabal specification that we believe that + -- this package uses. This affects the semantics and in particular the + -- Setup command line interface. + -- + -- This is similar to 'useCabalVersion' but instead of probing the system + -- for a version of the /Cabal library/ you just say exactly which version + -- of the /spec/ we will use. Using this also avoid adding the Cabal + -- library as an additional dependency, so add it to 'useDependencies' + -- if needed. + -- + useCabalSpecVersion :: Maybe Version, + useCompiler :: Maybe Compiler, + usePlatform :: Maybe Platform, + usePackageDB :: PackageDBStack, + usePackageIndex :: Maybe InstalledPackageIndex, + useProgramConfig :: ProgramConfiguration, + useDistPref :: FilePath, + useLoggingHandle :: Maybe Handle, + useWorkingDir :: Maybe FilePath, + forceExternalSetupMethod :: Bool, + + -- | List of dependencies to use when building Setup.hs. + useDependencies :: [(UnitId, PackageId)], + + -- | Is the list of setup dependencies exclusive? + -- + -- When this is @False@, if we compile the Setup.hs script we do so with the + -- list in 'useDependencies' but all other packages in the environment are + -- also visible. A suitable version of @Cabal@ library (see + -- 'useCabalVersion') is also added to the list of dependencies, unless + -- 'useDependencies' already contains a Cabal dependency. + -- + -- When @True@, only the 'useDependencies' packages are used, with other + -- packages in the environment hidden. + -- + -- This feature is here to support the setup stanza in .cabal files that + -- specifies explicit (and exclusive) dependencies, as well as the old + -- style with no dependencies. + useDependenciesExclusive :: Bool, + + -- | Should we build the Setup.hs with CPP version macros available? + -- We turn this on when we have a setup stanza in .cabal that declares + -- explicit setup dependencies. + -- + useVersionMacros :: Bool, + + -- Used only by 'cabal clean' on Windows. + -- + -- Note: win32 clean hack + ------------------------- + -- On Windows, running './dist/setup/setup clean' doesn't work because the + -- setup script will try to delete itself (which causes it to fail horribly, + -- unlike on Linux). So we have to move the setup exe out of the way first + -- and then delete it manually. This applies only to the external setup + -- method. + useWin32CleanHack :: Bool, + + -- Used only when calling setupWrapper from parallel code to serialise + -- access to the setup cache; should be Nothing otherwise. + -- + -- Note: setup exe cache + ------------------------ + -- When we are installing in parallel, we always use the external setup + -- method. Since compiling the setup script each time adds noticeable + -- overhead, we use a shared setup script cache + -- ('~/.cabal/setup-exe-cache'). For each (compiler, platform, Cabal + -- version) combination the cache holds a compiled setup script + -- executable. This only affects the Simple build type; for the Custom, + -- Configure and Make build types we always compile the setup script anew. + setupCacheLock :: Maybe Lock + } + +defaultSetupScriptOptions :: SetupScriptOptions +defaultSetupScriptOptions = SetupScriptOptions { + useCabalVersion = anyVersion, + useCabalSpecVersion = Nothing, + useCompiler = Nothing, + usePlatform = Nothing, + usePackageDB = [GlobalPackageDB, UserPackageDB], + usePackageIndex = Nothing, + useDependencies = [], + useDependenciesExclusive = False, + useVersionMacros = False, + useProgramConfig = emptyProgramConfiguration, + useDistPref = defaultDistPref, + useLoggingHandle = Nothing, + useWorkingDir = Nothing, + useWin32CleanHack = False, + forceExternalSetupMethod = False, + setupCacheLock = Nothing + } + +setupWrapper :: Verbosity + -> SetupScriptOptions + -> Maybe PackageDescription + -> CommandUI flags + -> (Version -> flags) + -> [String] + -> IO () +setupWrapper verbosity options mpkg cmd flags extraArgs = do + pkg <- maybe getPkg return mpkg + let setupMethod = determineSetupMethod options' buildType' + options' = options { + useCabalVersion = intersectVersionRanges + (useCabalVersion options) + (orLaterVersion (specVersion pkg)) + } + buildType' = fromMaybe Custom (buildType pkg) + mkArgs cabalLibVersion = commandName cmd + : commandShowOptions cmd (flags cabalLibVersion) + ++ extraArgs + checkBuildType buildType' + setupMethod verbosity options' (packageId pkg) buildType' mkArgs + where + getPkg = tryFindPackageDesc (fromMaybe "." (useWorkingDir options)) + >>= readPackageDescription verbosity + >>= return . packageDescription + + checkBuildType (UnknownBuildType name) = + die $ "The build-type '" ++ name ++ "' is not known. Use one of: " + ++ intercalate ", " (map display knownBuildTypes) ++ "." + checkBuildType _ = return () + +-- | Decide if we're going to be able to do a direct internal call to the +-- entry point in the Cabal library or if we're going to have to compile +-- and execute an external Setup.hs script. +-- +determineSetupMethod :: SetupScriptOptions -> BuildType -> SetupMethod +determineSetupMethod options buildType' + -- This order is picked so that it's stable. The build type and + -- required cabal version are external info, coming from .cabal + -- files and the command line. Those do switch between the + -- external and self & internal methods, but that info itself can + -- be considered stable. The logging and force-external conditions + -- are internally generated choices but now these only switch + -- between the self and internal setup methods, which are + -- consistent with each other. + | buildType' == Custom = externalSetupMethod + | maybe False (cabalVersion /=) + (useCabalSpecVersion options) + || not (cabalVersion `withinRange` + useCabalVersion options) = externalSetupMethod + | isJust (useLoggingHandle options) + -- Forcing is done to use an external process e.g. due to parallel + -- build concerns. + || forceExternalSetupMethod options = selfExecSetupMethod + | otherwise = internalSetupMethod + +type SetupMethod = Verbosity + -> SetupScriptOptions + -> PackageIdentifier + -> BuildType + -> (Version -> [String]) -> IO () + +-- ------------------------------------------------------------ +-- * Internal SetupMethod +-- ------------------------------------------------------------ + +internalSetupMethod :: SetupMethod +internalSetupMethod verbosity options _ bt mkargs = do + let args = mkargs cabalVersion + debug verbosity $ "Using internal setup method with build-type " ++ show bt + ++ " and args:\n " ++ show args + inDir (useWorkingDir options) $ + buildTypeAction bt args + +buildTypeAction :: BuildType -> ([String] -> IO ()) +buildTypeAction Simple = Simple.defaultMainArgs +buildTypeAction Configure = Simple.defaultMainWithHooksArgs + Simple.autoconfUserHooks +buildTypeAction Make = Make.defaultMainArgs +buildTypeAction Custom = error "buildTypeAction Custom" +buildTypeAction (UnknownBuildType _) = error "buildTypeAction UnknownBuildType" + +-- ------------------------------------------------------------ +-- * Self-Exec SetupMethod +-- ------------------------------------------------------------ + +selfExecSetupMethod :: SetupMethod +selfExecSetupMethod verbosity options _pkg bt mkargs = do + let args = ["act-as-setup", + "--build-type=" ++ display bt, + "--"] ++ mkargs cabalVersion + debug verbosity $ "Using self-exec internal setup method with build-type " + ++ show bt ++ " and args:\n " ++ show args + path <- getExecutablePath + info verbosity $ unwords (path : args) + case useLoggingHandle options of + Nothing -> return () + Just logHandle -> info verbosity $ "Redirecting build log to " + ++ show logHandle + + searchpath <- programSearchPathAsPATHVar + (getProgramSearchPath (useProgramConfig options)) + env <- getEffectiveEnvironment [("PATH", Just searchpath)] + + process <- runProcess path args + (useWorkingDir options) env Nothing + (useLoggingHandle options) (useLoggingHandle options) + exitCode <- waitForProcess process + unless (exitCode == ExitSuccess) $ exitWith exitCode + +-- ------------------------------------------------------------ +-- * External SetupMethod +-- ------------------------------------------------------------ + +externalSetupMethod :: SetupMethod +externalSetupMethod verbosity options pkg bt mkargs = do + debug verbosity $ "Using external setup method with build-type " ++ show bt + debug verbosity $ "Using explicit dependencies: " + ++ show (useDependenciesExclusive options) + createDirectoryIfMissingVerbose verbosity True setupDir + (cabalLibVersion, mCabalLibInstalledPkgId, options') <- cabalLibVersionToUse + debug verbosity $ "Using Cabal library version " ++ display cabalLibVersion + path <- if useCachedSetupExecutable + then getCachedSetupExecutable options' + cabalLibVersion mCabalLibInstalledPkgId + else compileSetupExecutable options' + cabalLibVersion mCabalLibInstalledPkgId False + invokeSetupScript options' path (mkargs cabalLibVersion) + + where + workingDir = case fromMaybe "" (useWorkingDir options) of + [] -> "." + dir -> dir + setupDir = workingDir useDistPref options "setup" + setupVersionFile = setupDir "setup" <.> "version" + setupHs = setupDir "setup" <.> "hs" + setupProgFile = setupDir "setup" <.> exeExtension + platform = fromMaybe buildPlatform (usePlatform options) + + useCachedSetupExecutable = (bt == Simple || bt == Configure || bt == Make) + + maybeGetInstalledPackages :: SetupScriptOptions -> Compiler + -> ProgramConfiguration -> IO InstalledPackageIndex + maybeGetInstalledPackages options' comp conf = + case usePackageIndex options' of + Just index -> return index + Nothing -> getInstalledPackages verbosity + comp (usePackageDB options') conf + + -- Choose the version of Cabal to use if the setup script has a dependency on + -- Cabal, and possibly update the setup script options. The version also + -- determines how to filter the flags to Setup. + -- + -- We first check whether the dependency solver has specified a Cabal version. + -- If it has, we use the solver's version without looking at the installed + -- package index (See issue #3436). Otherwise, we pick the Cabal version by + -- checking 'useCabalSpecVersion', then the saved version, and finally the + -- versions available in the index. + -- + -- The version chosen here must match the one used in 'compileSetupExecutable' + -- (See issue #3433). + cabalLibVersionToUse :: IO (Version, Maybe UnitId + ,SetupScriptOptions) + cabalLibVersionToUse = + case find (hasCabal . snd) (useDependencies options) of + Just (unitId, pkgId) -> do + let version = pkgVersion pkgId + updateSetupScript version bt + writeSetupVersionFile version + return (version, Just unitId, options) + Nothing -> + case useCabalSpecVersion options of + Just version -> do + updateSetupScript version bt + writeSetupVersionFile version + return (version, Nothing, options) + Nothing -> do + savedVer <- savedVersion + case savedVer of + Just version | version `withinRange` useCabalVersion options + -> do updateSetupScript version bt + -- Does the previously compiled setup executable still exist + -- and is it up-to date? + useExisting <- canUseExistingSetup version + if useExisting + then return (version, Nothing, options) + else installedVersion + _ -> installedVersion + where + -- This check duplicates the checks in 'getCachedSetupExecutable' / + -- 'compileSetupExecutable'. Unfortunately, we have to perform it twice + -- because the selected Cabal version may change as a result of this + -- check. + canUseExistingSetup :: Version -> IO Bool + canUseExistingSetup version = + if useCachedSetupExecutable + then do + (_, cachedSetupProgFile) <- cachedSetupDirAndProg options version + doesFileExist cachedSetupProgFile + else + (&&) <$> setupProgFile `existsAndIsMoreRecentThan` setupHs + <*> setupProgFile `existsAndIsMoreRecentThan` setupVersionFile + + writeSetupVersionFile :: Version -> IO () + writeSetupVersionFile version = + writeFile setupVersionFile (show version ++ "\n") + + hasCabal (PackageIdentifier (PackageName "Cabal") _) = True + hasCabal _ = False + + installedVersion :: IO (Version, Maybe UnitId + ,SetupScriptOptions) + installedVersion = do + (comp, conf, options') <- configureCompiler options + (version, mipkgid, options'') <- installedCabalVersion options' comp conf + updateSetupScript version bt + writeSetupVersionFile version + return (version, mipkgid, options'') + + savedVersion :: IO (Maybe Version) + savedVersion = do + versionString <- readFile setupVersionFile `catchIO` \_ -> return "" + case reads versionString of + [(version,s)] | all isSpace s -> return (Just version) + _ -> return Nothing + + -- | Update a Setup.hs script, creating it if necessary. + updateSetupScript :: Version -> BuildType -> IO () + updateSetupScript _ Custom = do + useHs <- doesFileExist customSetupHs + useLhs <- doesFileExist customSetupLhs + unless (useHs || useLhs) $ die + "Using 'build-type: Custom' but there is no Setup.hs or Setup.lhs script." + let src = (if useHs then customSetupHs else customSetupLhs) + srcNewer <- src `moreRecentFile` setupHs + when srcNewer $ if useHs + then copyFileVerbose verbosity src setupHs + else runSimplePreProcessor ppUnlit src setupHs verbosity + where + customSetupHs = workingDir "Setup.hs" + customSetupLhs = workingDir "Setup.lhs" + + updateSetupScript cabalLibVersion _ = + rewriteFile setupHs (buildTypeScript cabalLibVersion) + + buildTypeScript :: Version -> String + buildTypeScript cabalLibVersion = case bt of + Simple -> "import Distribution.Simple; main = defaultMain\n" + Configure -> "import Distribution.Simple; main = defaultMainWithHooks " + ++ if cabalLibVersion >= Version [1,3,10] [] + then "autoconfUserHooks\n" + else "defaultUserHooks\n" + Make -> "import Distribution.Make; main = defaultMain\n" + Custom -> error "buildTypeScript Custom" + UnknownBuildType _ -> error "buildTypeScript UnknownBuildType" + + installedCabalVersion :: SetupScriptOptions -> Compiler -> ProgramConfiguration + -> IO (Version, Maybe UnitId + ,SetupScriptOptions) + installedCabalVersion options' compiler conf = do + index <- maybeGetInstalledPackages options' compiler conf + let cabalDep = Dependency (PackageName "Cabal") (useCabalVersion options') + options'' = options' { usePackageIndex = Just index } + case PackageIndex.lookupDependency index cabalDep of + [] -> die $ "The package '" ++ display (packageName pkg) + ++ "' requires Cabal library version " + ++ display (useCabalVersion options) + ++ " but no suitable version is installed." + pkgs -> let ipkginfo = head . snd . bestVersion fst $ pkgs + in return (packageVersion ipkginfo + ,Just . installedUnitId $ ipkginfo, options'') + + bestVersion :: (a -> Version) -> [a] -> a + bestVersion f = firstMaximumBy (comparing (preference . f)) + where + -- Like maximumBy, but picks the first maximum element instead of the + -- last. In general, we expect the preferred version to go first in the + -- list. For the default case, this has the effect of choosing the version + -- installed in the user package DB instead of the global one. See #1463. + -- + -- Note: firstMaximumBy could be written as just + -- `maximumBy cmp . reverse`, but the problem is that the behaviour of + -- maximumBy is not fully specified in the case when there is not a single + -- greatest element. + firstMaximumBy :: (a -> a -> Ordering) -> [a] -> a + firstMaximumBy _ [] = + error "Distribution.Client.firstMaximumBy: empty list" + firstMaximumBy cmp xs = foldl1' maxBy xs + where + maxBy x y = case cmp x y of { GT -> x; EQ -> x; LT -> y; } + + preference version = (sameVersion, sameMajorVersion + ,stableVersion, latestVersion) + where + sameVersion = version == cabalVersion + sameMajorVersion = majorVersion version == majorVersion cabalVersion + majorVersion = take 2 . versionBranch + stableVersion = case versionBranch version of + (_:x:_) -> even x + _ -> False + latestVersion = version + + configureCompiler :: SetupScriptOptions + -> IO (Compiler, ProgramConfiguration, SetupScriptOptions) + configureCompiler options' = do + (comp, conf) <- case useCompiler options' of + Just comp -> return (comp, useProgramConfig options') + Nothing -> do (comp, _, conf) <- + configCompilerEx (Just GHC) Nothing Nothing + (useProgramConfig options') verbosity + return (comp, conf) + -- Whenever we need to call configureCompiler, we also need to access the + -- package index, so let's cache it in SetupScriptOptions. + index <- maybeGetInstalledPackages options' comp conf + return (comp, conf, options' { useCompiler = Just comp, + usePackageIndex = Just index, + useProgramConfig = conf }) + + -- | Path to the setup exe cache directory and path to the cached setup + -- executable. + cachedSetupDirAndProg :: SetupScriptOptions -> Version + -> IO (FilePath, FilePath) + cachedSetupDirAndProg options' cabalLibVersion = do + cabalDir <- defaultCabalDir + let setupCacheDir = cabalDir "setup-exe-cache" + cachedSetupProgFile = setupCacheDir + ("setup-" ++ buildTypeString ++ "-" + ++ cabalVersionString ++ "-" + ++ platformString ++ "-" + ++ compilerVersionString) + <.> exeExtension + return (setupCacheDir, cachedSetupProgFile) + where + buildTypeString = show bt + cabalVersionString = "Cabal-" ++ (display cabalLibVersion) + compilerVersionString = display $ + fromMaybe buildCompilerId + (fmap compilerId . useCompiler $ options') + platformString = display platform + + -- | Look up the setup executable in the cache; update the cache if the setup + -- executable is not found. + getCachedSetupExecutable :: SetupScriptOptions + -> Version -> Maybe UnitId + -> IO FilePath + getCachedSetupExecutable options' cabalLibVersion + maybeCabalLibInstalledPkgId = do + (setupCacheDir, cachedSetupProgFile) <- + cachedSetupDirAndProg options' cabalLibVersion + cachedSetupExists <- doesFileExist cachedSetupProgFile + if cachedSetupExists + then debug verbosity $ + "Found cached setup executable: " ++ cachedSetupProgFile + else criticalSection' $ do + -- The cache may have been populated while we were waiting. + cachedSetupExists' <- doesFileExist cachedSetupProgFile + if cachedSetupExists' + then debug verbosity $ + "Found cached setup executable: " ++ cachedSetupProgFile + else do + debug verbosity $ "Setup executable not found in the cache." + src <- compileSetupExecutable options' + cabalLibVersion maybeCabalLibInstalledPkgId True + createDirectoryIfMissingVerbose verbosity True setupCacheDir + installExecutableFile verbosity src cachedSetupProgFile + -- Do not strip if we're using GHCJS, since the result may be a script + when (maybe True ((/=GHCJS).compilerFlavor) $ useCompiler options') $ + Strip.stripExe verbosity platform (useProgramConfig options') + cachedSetupProgFile + return cachedSetupProgFile + where + criticalSection' = fromMaybe id + (fmap criticalSection $ setupCacheLock options') + + -- | If the Setup.hs is out of date wrt the executable then recompile it. + -- Currently this is GHC/GHCJS only. It should really be generalised. + -- + compileSetupExecutable :: SetupScriptOptions + -> Version -> Maybe UnitId -> Bool + -> IO FilePath + compileSetupExecutable options' cabalLibVersion maybeCabalLibInstalledPkgId + forceCompile = do + setupHsNewer <- setupHs `moreRecentFile` setupProgFile + cabalVersionNewer <- setupVersionFile `moreRecentFile` setupProgFile + let outOfDate = setupHsNewer || cabalVersionNewer + when (outOfDate || forceCompile) $ do + debug verbosity "Setup executable needs to be updated, compiling..." + (compiler, conf, options'') <- configureCompiler options' + let cabalPkgid = PackageIdentifier (PackageName "Cabal") cabalLibVersion + (program, extraOpts) + = case compilerFlavor compiler of + GHCJS -> (ghcjsProgram, ["-build-runner"]) + _ -> (ghcProgram, ["-threaded"]) + cabalDep = maybe [] (\ipkgid -> [(ipkgid, cabalPkgid)]) + maybeCabalLibInstalledPkgId + + -- With 'useDependenciesExclusive' we enforce the deps specified, + -- so only the given ones can be used. Otherwise we allow the use + -- of packages in the ambient environment, and add on a dep on the + -- Cabal library (unless 'useDependencies' already contains one). + -- + -- With 'useVersionMacros' we use a version CPP macros .h file. + -- + -- Both of these options should be enabled for packages that have + -- opted-in and declared a custom-settup stanza. + -- + hasCabal (_, PackageIdentifier (PackageName "Cabal") _) = True + hasCabal _ = False + + selectedDeps | useDependenciesExclusive options' + = useDependencies options' + | otherwise = useDependencies options' ++ + if any hasCabal (useDependencies options') + then [] + else cabalDep + addRenaming (ipid, pid) = (ipid, pid, defaultRenaming) + cppMacrosFile = setupDir "setup_macros.h" + ghcOptions = mempty { + ghcOptVerbosity = Flag verbosity + , ghcOptMode = Flag GhcModeMake + , ghcOptInputFiles = toNubListR [setupHs] + , ghcOptOutputFile = Flag setupProgFile + , ghcOptObjDir = Flag setupDir + , ghcOptHiDir = Flag setupDir + , ghcOptSourcePathClear = Flag True + , ghcOptSourcePath = case bt of + Custom -> toNubListR [workingDir] + _ -> mempty + , ghcOptPackageDBs = usePackageDB options'' + , ghcOptHideAllPackages = Flag (useDependenciesExclusive options') + , ghcOptCabal = Flag (useDependenciesExclusive options') + , ghcOptPackages = toNubListR $ map addRenaming selectedDeps + , ghcOptCppIncludes = toNubListR [ cppMacrosFile + | useVersionMacros options' ] + , ghcOptExtra = toNubListR extraOpts + } + let ghcCmdLine = renderGhcOptions compiler platform ghcOptions + when (useVersionMacros options') $ + rewriteFile cppMacrosFile (generatePackageVersionMacros + [ pid | (_ipid, pid) <- selectedDeps ]) + case useLoggingHandle options of + Nothing -> runDbProgram verbosity program conf ghcCmdLine + + -- If build logging is enabled, redirect compiler output to the log file. + (Just logHandle) -> do output <- getDbProgramOutput verbosity program + conf ghcCmdLine + hPutStr logHandle output + return setupProgFile + + invokeSetupScript :: SetupScriptOptions -> FilePath -> [String] -> IO () + invokeSetupScript options' path args = do + info verbosity $ unwords (path : args) + case useLoggingHandle options' of + Nothing -> return () + Just logHandle -> info verbosity $ "Redirecting build log to " + ++ show logHandle + + -- Since useWorkingDir can change the relative path, the path argument must + -- be turned into an absolute path. On some systems, runProcess will take + -- path as relative to the new working directory instead of the current + -- working directory. + path' <- tryCanonicalizePath path + + -- See 'Note: win32 clean hack' above. +#if mingw32_HOST_OS + -- setupProgFile may not exist if we're using a cached program + setupProgFile' <- canonicalizePathNoThrow setupProgFile + let win32CleanHackNeeded = (useWin32CleanHack options') + -- Skip when a cached setup script is used. + && setupProgFile' `equalFilePath` path' + if win32CleanHackNeeded then doWin32CleanHack path' else doInvoke path' +#else + doInvoke path' +#endif + + where + doInvoke path' = do + searchpath <- programSearchPathAsPATHVar + (getProgramSearchPath (useProgramConfig options')) + env <- getEffectiveEnvironment [("PATH", Just searchpath)] + + process <- runProcess path' args + (useWorkingDir options') env Nothing + (useLoggingHandle options') (useLoggingHandle options') + exitCode <- waitForProcess process + unless (exitCode == ExitSuccess) $ exitWith exitCode + +#if mingw32_HOST_OS + doWin32CleanHack path' = do + info verbosity $ "Using the Win32 clean hack." + -- Recursively removes the temp dir on exit. + withTempDirectory verbosity workingDir "cabal-tmp" $ \tmpDir -> + bracket (moveOutOfTheWay tmpDir path') + (maybeRestore path') + doInvoke + + moveOutOfTheWay tmpDir path' = do + let newPath = tmpDir "setup" <.> exeExtension + Win32.moveFile path' newPath + return newPath + + maybeRestore oldPath path' = do + let oldPathDir = takeDirectory oldPath + oldPathDirExists <- doesDirectoryExist oldPathDir + -- 'setup clean' didn't complete, 'dist/setup' still exists. + when oldPathDirExists $ + Win32.moveFile path' oldPath +#endif diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/SrcDist.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/SrcDist.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/SrcDist.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/SrcDist.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,189 @@ +{-# LANGUAGE NondecreasingIndentation #-} +-- Implements the \"@.\/cabal sdist@\" command, which creates a source +-- distribution for this package. That is, packs up the source code +-- into a tarball, making use of the corresponding Cabal module. +module Distribution.Client.SrcDist ( + sdist, + allPackageSourceFiles + ) where + + +import Distribution.Client.SetupWrapper + ( SetupScriptOptions(..), defaultSetupScriptOptions, setupWrapper ) +import Distribution.Client.Tar (createTarGzFile) + +import Distribution.Package + ( Package(..), packageName ) +import Distribution.PackageDescription + ( PackageDescription ) +import Distribution.PackageDescription.Configuration + ( flattenPackageDescription ) +import Distribution.PackageDescription.Parse + ( readPackageDescription ) +import Distribution.Simple.Utils + ( createDirectoryIfMissingVerbose, defaultPackageDesc + , warn, die, notice, withTempDirectory ) +import Distribution.Client.Setup + ( SDistFlags(..), SDistExFlags(..), ArchiveFormat(..) ) +import Distribution.Simple.Setup + ( Flag(..), sdistCommand, flagToList, fromFlag, fromFlagOrDefault + , defaultSDistFlags ) +import Distribution.Simple.BuildPaths ( srcPref) +import Distribution.Simple.Program (requireProgram, simpleProgram, programPath) +import Distribution.Simple.Program.Db (emptyProgramDb) +import Distribution.Text ( display ) +import Distribution.Verbosity (Verbosity, normal, lessVerbose) +import Distribution.Version (Version(..), orLaterVersion) + +import Distribution.Client.Utils + (tryFindAddSourcePackageDesc) +import Distribution.Compat.Exception (catchIO) + +import System.FilePath ((), (<.>)) +import Control.Monad (when, unless, liftM) +import System.Directory (doesFileExist, removeFile, canonicalizePath, getTemporaryDirectory) +import System.Process (runProcess, waitForProcess) +import System.Exit (ExitCode(..)) +import Control.Exception (IOException, evaluate) + +-- |Create a source distribution. +sdist :: SDistFlags -> SDistExFlags -> IO () +sdist flags exflags = do + pkg <- liftM flattenPackageDescription + (readPackageDescription verbosity =<< defaultPackageDesc verbosity) + let withDir = if not needMakeArchive then (\f -> f tmpTargetDir) + else withTempDirectory verbosity tmpTargetDir "sdist." + -- 'withTempDir' fails if we don't create 'tmpTargetDir'... + when needMakeArchive $ + createDirectoryIfMissingVerbose verbosity True tmpTargetDir + withDir $ \tmpDir -> do + let outDir = if isOutDirectory then tmpDir else tmpDir tarBallName pkg + flags' = (if not needMakeArchive then flags + else flags { sDistDirectory = Flag outDir }) + unless isListSources $ + createDirectoryIfMissingVerbose verbosity True outDir + + -- Run 'setup sdist --output-directory=tmpDir' (or + -- '--list-source'/'--output-directory=someOtherDir') in case we were passed + -- those options. + setupWrapper verbosity setupOpts (Just pkg) sdistCommand (const flags') [] + + -- Unless we were given --list-sources or --output-directory ourselves, + -- create an archive. + when needMakeArchive $ + createArchive verbosity pkg tmpDir distPref + + when isOutDirectory $ + notice verbosity $ "Source directory created: " ++ tmpTargetDir + + when isListSources $ + notice verbosity $ "List of package sources written to file '" + ++ (fromFlag . sDistListSources $ flags) ++ "'" + + where + flagEnabled f = not . null . flagToList . f $ flags + + isListSources = flagEnabled sDistListSources + isOutDirectory = flagEnabled sDistDirectory + needMakeArchive = not (isListSources || isOutDirectory) + verbosity = fromFlag (sDistVerbosity flags) + distPref = fromFlag (sDistDistPref flags) + tmpTargetDir = fromFlagOrDefault (srcPref distPref) (sDistDirectory flags) + setupOpts = defaultSetupScriptOptions { + useDistPref = distPref, + -- The '--output-directory' sdist flag was introduced in Cabal 1.12, and + -- '--list-sources' in 1.17. + useCabalVersion = if isListSources + then orLaterVersion $ Version [1,17,0] [] + else orLaterVersion $ Version [1,12,0] [] + } + format = fromFlag (sDistFormat exflags) + createArchive = case format of + TargzFormat -> createTarGzArchive + ZipFormat -> createZipArchive + +tarBallName :: PackageDescription -> String +tarBallName = display . packageId + +-- | Create a tar.gz archive from a tree of source files. +createTarGzArchive :: Verbosity -> PackageDescription -> FilePath -> FilePath + -> IO () +createTarGzArchive verbosity pkg tmpDir targetPref = do + createTarGzFile tarBallFilePath tmpDir (tarBallName pkg) + notice verbosity $ "Source tarball created: " ++ tarBallFilePath + where + tarBallFilePath = targetPref tarBallName pkg <.> "tar.gz" + +-- | Create a zip archive from a tree of source files. +createZipArchive :: Verbosity -> PackageDescription -> FilePath -> FilePath + -> IO () +createZipArchive verbosity pkg tmpDir targetPref = do + let dir = tarBallName pkg + zipfile = targetPref dir <.> "zip" + (zipProg, _) <- requireProgram verbosity zipProgram emptyProgramDb + + -- zip has an annoying habit of updating the target rather than creating + -- it from scratch. While that might sound like an optimisation, it doesn't + -- remove files already in the archive that are no longer present in the + -- uncompressed tree. + alreadyExists <- doesFileExist zipfile + when alreadyExists $ removeFile zipfile + + -- We call zip with a different CWD, so have to make the path + -- absolute. Can't just use 'canonicalizePath zipfile' since this function + -- requires its argument to refer to an existing file. + zipfileAbs <- fmap ( dir <.> "zip") . canonicalizePath $ targetPref + + --TODO: use runProgramInvocation, but has to be able to set CWD + hnd <- runProcess (programPath zipProg) ["-q", "-r", zipfileAbs, dir] + (Just tmpDir) + Nothing Nothing Nothing Nothing + exitCode <- waitForProcess hnd + unless (exitCode == ExitSuccess) $ + die $ "Generating the zip file failed " + ++ "(zip returned exit code " ++ show exitCode ++ ")" + notice verbosity $ "Source zip archive created: " ++ zipfile + where + zipProgram = simpleProgram "zip" + +-- | List all source files of a given add-source dependency. Exits with error if +-- something is wrong (e.g. there is no .cabal file in the given directory). +allPackageSourceFiles :: Verbosity -> FilePath -> IO [FilePath] +allPackageSourceFiles verbosity packageDir = do + pkg <- do + let err = "Error reading source files of package." + desc <- tryFindAddSourcePackageDesc packageDir err + flattenPackageDescription `fmap` readPackageDescription verbosity desc + globalTmp <- getTemporaryDirectory + withTempDirectory verbosity globalTmp "cabal-list-sources." $ \tempDir -> do + let file = tempDir "cabal-sdist-list-sources" + flags = defaultSDistFlags { + sDistVerbosity = Flag $ if verbosity == normal + then lessVerbose verbosity else verbosity, + sDistListSources = Flag file + } + setupOpts = defaultSetupScriptOptions { + -- 'sdist --list-sources' was introduced in Cabal 1.18. + useCabalVersion = orLaterVersion $ Version [1,18,0] [], + useWorkingDir = Just packageDir + } + + doListSources :: IO [FilePath] + doListSources = do + setupWrapper verbosity setupOpts (Just pkg) sdistCommand (const flags) [] + fmap lines . readFile $ file + + onFailedListSources :: IOException -> IO () + onFailedListSources e = do + warn verbosity $ + "Could not list sources of the package '" + ++ display (packageName pkg) ++ "'." + warn verbosity $ + "Exception was: " ++ show e + + -- Run setup sdist --list-sources=TMPFILE + r <- doListSources `catchIO` (\e -> onFailedListSources e >> return []) + -- Ensure that we've closed the 'readFile' handle before we exit the + -- temporary directory. + _ <- evaluate (length r) + return r diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Targets.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Targets.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Targets.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Targets.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,818 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE DeriveGeneric #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Targets +-- Copyright : (c) Duncan Coutts 2011 +-- License : BSD-like +-- +-- Maintainer : duncan@community.haskell.org +-- +-- Handling for user-specified targets +----------------------------------------------------------------------------- +module Distribution.Client.Targets ( + -- * User targets + UserTarget(..), + readUserTargets, + + -- * Package specifiers + PackageSpecifier(..), + pkgSpecifierTarget, + pkgSpecifierConstraints, + + -- * Resolving user targets to package specifiers + resolveUserTargets, + + -- ** Detailed interface + UserTargetProblem(..), + readUserTarget, + reportUserTargetProblems, + expandUserTarget, + + PackageTarget(..), + fetchPackageTarget, + readPackageTarget, + + PackageTargetProblem(..), + reportPackageTargetProblems, + + disambiguatePackageTargets, + disambiguatePackageName, + + -- * User constraints + UserConstraint(..), + userConstraintPackageName, + readUserConstraint, + userToPackageConstraint, + dispFlagAssignment, + parseFlagAssignment, + + ) where + +import Distribution.Package + ( Package(..), PackageName(..) + , PackageIdentifier(..), packageName, packageVersion + , Dependency(Dependency) ) +import Distribution.Client.Types + ( SourcePackage(..), PackageLocation(..), OptionalStanza(..) ) +import Distribution.Client.Dependency.Types + ( PackageConstraint(..), ConstraintSource(..) + , LabeledPackageConstraint(..) ) + +import qualified Distribution.Client.World as World +import Distribution.Client.PackageIndex (PackageIndex) +import qualified Distribution.Client.PackageIndex as PackageIndex +import qualified Codec.Archive.Tar as Tar +import qualified Codec.Archive.Tar.Entry as Tar +import qualified Distribution.Client.Tar as Tar +import Distribution.Client.FetchUtils +import Distribution.Client.Utils ( tryFindPackageDesc ) +import Distribution.Client.GlobalFlags + ( RepoContext(..) ) + +import Distribution.PackageDescription + ( GenericPackageDescription, FlagName(..), FlagAssignment ) +import Distribution.PackageDescription.Parse + ( readPackageDescription, parsePackageDescription, ParseResult(..) ) +import Distribution.Version + ( Version(Version), thisVersion, anyVersion, isAnyVersion + , VersionRange ) +import Distribution.Text + ( Text(..), display ) +import Distribution.Verbosity (Verbosity) +import Distribution.Simple.Utils + ( die, warn, intercalate, fromUTF8, lowercase, ignoreBOM ) + +import Data.List + ( find, nub ) +import Data.Maybe + ( listToMaybe ) +import Data.Either + ( partitionEithers ) +#if !MIN_VERSION_base(4,8,0) +import Data.Monoid + ( Monoid(..) ) +#endif +import qualified Data.Map as Map +import qualified Data.ByteString.Lazy as BS +import qualified Data.ByteString.Lazy.Char8 as BS.Char8 +import qualified Distribution.Client.GZipUtils as GZipUtils +import Control.Monad (liftM) +import qualified Distribution.Compat.ReadP as Parse +import Distribution.Compat.ReadP + ( (+++), (<++) ) +import qualified Distribution.Compat.Semigroup as Semi + ( Semigroup((<>)) ) +import qualified Text.PrettyPrint as Disp +import Text.PrettyPrint + ( (<>), (<+>) ) +import Data.Char + ( isSpace, isAlphaNum ) +import System.FilePath + ( takeExtension, dropExtension, takeDirectory, splitPath ) +import System.Directory + ( doesFileExist, doesDirectoryExist ) +import Network.URI + ( URI(..), URIAuth(..), parseAbsoluteURI ) +import GHC.Generics (Generic) +import Distribution.Compat.Binary (Binary) + +-- ------------------------------------------------------------ +-- * User targets +-- ------------------------------------------------------------ + +-- | Various ways that a user may specify a package or package collection. +-- +data UserTarget = + + -- | A partially specified package, identified by name and possibly with + -- an exact version or a version constraint. + -- + -- > cabal install foo + -- > cabal install foo-1.0 + -- > cabal install 'foo < 2' + -- + UserTargetNamed Dependency + + -- | A special virtual package that refers to the collection of packages + -- recorded in the world file that the user specifically installed. + -- + -- > cabal install world + -- + | UserTargetWorld + + -- | A specific package that is unpacked in a local directory, often the + -- current directory. + -- + -- > cabal install . + -- > cabal install ../lib/other + -- + -- * Note: in future, if multiple @.cabal@ files are allowed in a single + -- directory then this will refer to the collection of packages. + -- + | UserTargetLocalDir FilePath + + -- | A specific local unpacked package, identified by its @.cabal@ file. + -- + -- > cabal install foo.cabal + -- > cabal install ../lib/other/bar.cabal + -- + | UserTargetLocalCabalFile FilePath + + -- | A specific package that is available as a local tarball file + -- + -- > cabal install dist/foo-1.0.tar.gz + -- > cabal install ../build/baz-1.0.tar.gz + -- + | UserTargetLocalTarball FilePath + + -- | A specific package that is available as a remote tarball file + -- + -- > cabal install http://code.haskell.org/~user/foo/foo-0.9.tar.gz + -- + | UserTargetRemoteTarball URI + deriving (Show,Eq) + + +-- ------------------------------------------------------------ +-- * Package specifier +-- ------------------------------------------------------------ + +-- | A fully or partially resolved reference to a package. +-- +data PackageSpecifier pkg = + + -- | A partially specified reference to a package (either source or + -- installed). It is specified by package name and optionally some + -- additional constraints. Use a dependency resolver to pick a specific + -- package satisfying these constraints. + -- + NamedPackage PackageName [PackageConstraint] + + -- | A fully specified source package. + -- + | SpecificSourcePackage pkg + deriving (Eq, Show, Generic) + +instance Binary pkg => Binary (PackageSpecifier pkg) + +pkgSpecifierTarget :: Package pkg => PackageSpecifier pkg -> PackageName +pkgSpecifierTarget (NamedPackage name _) = name +pkgSpecifierTarget (SpecificSourcePackage pkg) = packageName pkg + +pkgSpecifierConstraints :: Package pkg + => PackageSpecifier pkg -> [LabeledPackageConstraint] +pkgSpecifierConstraints (NamedPackage _ constraints) = map toLpc constraints + where + toLpc pc = LabeledPackageConstraint pc ConstraintSourceUserTarget +pkgSpecifierConstraints (SpecificSourcePackage pkg) = + [LabeledPackageConstraint pc ConstraintSourceUserTarget] + where + pc = PackageConstraintVersion (packageName pkg) + (thisVersion (packageVersion pkg)) + +-- ------------------------------------------------------------ +-- * Parsing and checking user targets +-- ------------------------------------------------------------ + +readUserTargets :: Verbosity -> [String] -> IO [UserTarget] +readUserTargets _verbosity targetStrs = do + (problems, targets) <- liftM partitionEithers + (mapM readUserTarget targetStrs) + reportUserTargetProblems problems + return targets + + +data UserTargetProblem + = UserTargetUnexpectedFile String + | UserTargetNonexistantFile String + | UserTargetUnexpectedUriScheme String + | UserTargetUnrecognisedUri String + | UserTargetUnrecognised String + | UserTargetBadWorldPkg + deriving Show + +readUserTarget :: String -> IO (Either UserTargetProblem UserTarget) +readUserTarget targetstr = + case testNamedTargets targetstr of + Just (Dependency (PackageName "world") verrange) + | verrange == anyVersion -> return (Right UserTargetWorld) + | otherwise -> return (Left UserTargetBadWorldPkg) + Just dep -> return (Right (UserTargetNamed dep)) + Nothing -> do + fileTarget <- testFileTargets targetstr + case fileTarget of + Just target -> return target + Nothing -> + case testUriTargets targetstr of + Just target -> return target + Nothing -> return (Left (UserTargetUnrecognised targetstr)) + where + testNamedTargets = readPToMaybe parseDependencyOrPackageId + + testFileTargets filename = do + isDir <- doesDirectoryExist filename + isFile <- doesFileExist filename + parentDirExists <- case takeDirectory filename of + [] -> return False + dir -> doesDirectoryExist dir + let result + | isDir + = Just (Right (UserTargetLocalDir filename)) + + | isFile && extensionIsTarGz filename + = Just (Right (UserTargetLocalTarball filename)) + + | isFile && takeExtension filename == ".cabal" + = Just (Right (UserTargetLocalCabalFile filename)) + + | isFile + = Just (Left (UserTargetUnexpectedFile filename)) + + | parentDirExists + = Just (Left (UserTargetNonexistantFile filename)) + + | otherwise + = Nothing + return result + + testUriTargets str = + case parseAbsoluteURI str of + Just uri@URI { + uriScheme = scheme, + uriAuthority = Just URIAuth { uriRegName = host } + } + | scheme /= "http:" && scheme /= "https:" -> + Just (Left (UserTargetUnexpectedUriScheme targetstr)) + + | null host -> + Just (Left (UserTargetUnrecognisedUri targetstr)) + + | otherwise -> + Just (Right (UserTargetRemoteTarball uri)) + _ -> Nothing + + extensionIsTarGz f = takeExtension f == ".gz" + && takeExtension (dropExtension f) == ".tar" + + parseDependencyOrPackageId :: Parse.ReadP r Dependency + parseDependencyOrPackageId = parse + +++ liftM pkgidToDependency parse + where + pkgidToDependency :: PackageIdentifier -> Dependency + pkgidToDependency p = case packageVersion p of + Version [] _ -> Dependency (packageName p) anyVersion + version -> Dependency (packageName p) (thisVersion version) + +readPToMaybe :: Parse.ReadP a a -> String -> Maybe a +readPToMaybe p str = listToMaybe [ r | (r,s) <- Parse.readP_to_S p str + , all isSpace s ] + + +reportUserTargetProblems :: [UserTargetProblem] -> IO () +reportUserTargetProblems problems = do + case [ target | UserTargetUnrecognised target <- problems ] of + [] -> return () + target -> die + $ unlines + [ "Unrecognised target '" ++ name ++ "'." + | name <- target ] + ++ "Targets can be:\n" + ++ " - package names, e.g. 'pkgname', 'pkgname-1.0.1', 'pkgname < 2.0'\n" + ++ " - the special 'world' target\n" + ++ " - cabal files 'pkgname.cabal' or package directories 'pkgname/'\n" + ++ " - package tarballs 'pkgname.tar.gz' or 'http://example.com/pkgname.tar.gz'" + + case [ () | UserTargetBadWorldPkg <- problems ] of + [] -> return () + _ -> die "The special 'world' target does not take any version." + + case [ target | UserTargetNonexistantFile target <- problems ] of + [] -> return () + target -> die + $ unlines + [ "The file does not exist '" ++ name ++ "'." + | name <- target ] + + case [ target | UserTargetUnexpectedFile target <- problems ] of + [] -> return () + target -> die + $ unlines + [ "Unrecognised file target '" ++ name ++ "'." + | name <- target ] + ++ "File targets can be either package tarballs 'pkgname.tar.gz' " + ++ "or cabal files 'pkgname.cabal'." + + case [ target | UserTargetUnexpectedUriScheme target <- problems ] of + [] -> return () + target -> die + $ unlines + [ "URL target not supported '" ++ name ++ "'." + | name <- target ] + ++ "Only 'http://' and 'https://' URLs are supported." + + case [ target | UserTargetUnrecognisedUri target <- problems ] of + [] -> return () + target -> die + $ unlines + [ "Unrecognise URL target '" ++ name ++ "'." + | name <- target ] + + +-- ------------------------------------------------------------ +-- * Resolving user targets to package specifiers +-- ------------------------------------------------------------ + +-- | Given a bunch of user-specified targets, try to resolve what it is they +-- refer to. They can either be specific packages (local dirs, tarballs etc) +-- or they can be named packages (with or without version info). +-- +resolveUserTargets :: Package pkg + => Verbosity + -> RepoContext + -> FilePath + -> PackageIndex pkg + -> [UserTarget] + -> IO [PackageSpecifier SourcePackage] +resolveUserTargets verbosity repoCtxt worldFile available userTargets = do + + -- given the user targets, get a list of fully or partially resolved + -- package references + packageTargets <- mapM (readPackageTarget verbosity) + =<< mapM (fetchPackageTarget verbosity repoCtxt) . concat + =<< mapM (expandUserTarget worldFile) userTargets + + -- users are allowed to give package names case-insensitively, so we must + -- disambiguate named package references + let (problems, packageSpecifiers) = + disambiguatePackageTargets available availableExtra packageTargets + + -- use any extra specific available packages to help us disambiguate + availableExtra = [ packageName pkg + | PackageTargetLocation pkg <- packageTargets ] + + reportPackageTargetProblems verbosity problems + + return packageSpecifiers + + +-- ------------------------------------------------------------ +-- * Package targets +-- ------------------------------------------------------------ + +-- | An intermediate between a 'UserTarget' and a resolved 'PackageSpecifier'. +-- Unlike a 'UserTarget', a 'PackageTarget' refers only to a single package. +-- +data PackageTarget pkg = + PackageTargetNamed PackageName [PackageConstraint] UserTarget + + -- | A package identified by name, but case insensitively, so it needs + -- to be resolved to the right case-sensitive name. + | PackageTargetNamedFuzzy PackageName [PackageConstraint] UserTarget + | PackageTargetLocation pkg + deriving Show + + +-- ------------------------------------------------------------ +-- * Converting user targets to package targets +-- ------------------------------------------------------------ + +-- | Given a user-specified target, expand it to a bunch of package targets +-- (each of which refers to only one package). +-- +expandUserTarget :: FilePath + -> UserTarget + -> IO [PackageTarget (PackageLocation ())] +expandUserTarget worldFile userTarget = case userTarget of + + UserTargetNamed (Dependency name vrange) -> + let constraints = [ PackageConstraintVersion name vrange + | not (isAnyVersion vrange) ] + in return [PackageTargetNamedFuzzy name constraints userTarget] + + UserTargetWorld -> do + worldPkgs <- World.getContents worldFile + --TODO: should we warn if there are no world targets? + return [ PackageTargetNamed name constraints userTarget + | World.WorldPkgInfo (Dependency name vrange) flags <- worldPkgs + , let constraints = [ PackageConstraintVersion name vrange + | not (isAnyVersion vrange) ] + ++ [ PackageConstraintFlags name flags + | not (null flags) ] ] + + UserTargetLocalDir dir -> + return [ PackageTargetLocation (LocalUnpackedPackage dir) ] + + UserTargetLocalCabalFile file -> do + let dir = takeDirectory file + _ <- tryFindPackageDesc dir (localPackageError dir) -- just as a check + return [ PackageTargetLocation (LocalUnpackedPackage dir) ] + + UserTargetLocalTarball tarballFile -> + return [ PackageTargetLocation (LocalTarballPackage tarballFile) ] + + UserTargetRemoteTarball tarballURL -> + return [ PackageTargetLocation (RemoteTarballPackage tarballURL ()) ] + +localPackageError :: FilePath -> String +localPackageError dir = + "Error reading local package.\nCouldn't find .cabal file in: " ++ dir + +-- ------------------------------------------------------------ +-- * Fetching and reading package targets +-- ------------------------------------------------------------ + + +-- | Fetch any remote targets so that they can be read. +-- +fetchPackageTarget :: Verbosity + -> RepoContext + -> PackageTarget (PackageLocation ()) + -> IO (PackageTarget (PackageLocation FilePath)) +fetchPackageTarget verbosity repoCtxt target = case target of + PackageTargetNamed n cs ut -> return (PackageTargetNamed n cs ut) + PackageTargetNamedFuzzy n cs ut -> return (PackageTargetNamedFuzzy n cs ut) + PackageTargetLocation location -> do + location' <- fetchPackage verbosity repoCtxt (fmap (const Nothing) location) + return (PackageTargetLocation location') + + +-- | Given a package target that has been fetched, read the .cabal file. +-- +-- This only affects targets given by location, named targets are unaffected. +-- +readPackageTarget :: Verbosity + -> PackageTarget (PackageLocation FilePath) + -> IO (PackageTarget SourcePackage) +readPackageTarget verbosity target = case target of + + PackageTargetNamed pkgname constraints userTarget -> + return (PackageTargetNamed pkgname constraints userTarget) + + PackageTargetNamedFuzzy pkgname constraints userTarget -> + return (PackageTargetNamedFuzzy pkgname constraints userTarget) + + PackageTargetLocation location -> case location of + + LocalUnpackedPackage dir -> do + pkg <- tryFindPackageDesc dir (localPackageError dir) >>= + readPackageDescription verbosity + return $ PackageTargetLocation $ + SourcePackage { + packageInfoId = packageId pkg, + packageDescription = pkg, + packageSource = fmap Just location, + packageDescrOverride = Nothing + } + + LocalTarballPackage tarballFile -> + readTarballPackageTarget location tarballFile tarballFile + + RemoteTarballPackage tarballURL tarballFile -> + readTarballPackageTarget location tarballFile (show tarballURL) + + RepoTarballPackage _repo _pkgid _ -> + error "TODO: readPackageTarget RepoTarballPackage" + -- For repo tarballs this info should be obtained from the index. + + where + readTarballPackageTarget location tarballFile tarballOriginalLoc = do + (filename, content) <- extractTarballPackageCabalFile + tarballFile tarballOriginalLoc + case parsePackageDescription' content of + Nothing -> die $ "Could not parse the cabal file " + ++ filename ++ " in " ++ tarballFile + Just pkg -> + return $ PackageTargetLocation $ + SourcePackage { + packageInfoId = packageId pkg, + packageDescription = pkg, + packageSource = fmap Just location, + packageDescrOverride = Nothing + } + + extractTarballPackageCabalFile :: FilePath -> String + -> IO (FilePath, BS.ByteString) + extractTarballPackageCabalFile tarballFile tarballOriginalLoc = + either (die . formatErr) return + . check + . accumEntryMap + . Tar.filterEntries isCabalFile + . Tar.read + . GZipUtils.maybeDecompress + =<< BS.readFile tarballFile + where + formatErr msg = "Error reading " ++ tarballOriginalLoc ++ ": " ++ msg + + accumEntryMap = Tar.foldlEntries + (\m e -> Map.insert (Tar.entryTarPath e) e m) + Map.empty + + check (Left e) = Left (show e) + check (Right m) = case Map.elems m of + [] -> Left noCabalFile + [file] -> case Tar.entryContent file of + Tar.NormalFile content _ -> Right (Tar.entryPath file, content) + _ -> Left noCabalFile + _files -> Left multipleCabalFiles + where + noCabalFile = "No cabal file found" + multipleCabalFiles = "Multiple cabal files found" + + isCabalFile e = case splitPath (Tar.entryPath e) of + [ _dir, file] -> takeExtension file == ".cabal" + [".", _dir, file] -> takeExtension file == ".cabal" + _ -> False + + parsePackageDescription' :: BS.ByteString -> Maybe GenericPackageDescription + parsePackageDescription' content = + case parsePackageDescription . ignoreBOM . fromUTF8 . BS.Char8.unpack $ content of + ParseOk _ pkg -> Just pkg + _ -> Nothing + + +-- ------------------------------------------------------------ +-- * Checking package targets +-- ------------------------------------------------------------ + +data PackageTargetProblem + = PackageNameUnknown PackageName UserTarget + | PackageNameAmbiguous PackageName [PackageName] UserTarget + deriving Show + + +-- | Users are allowed to give package names case-insensitively, so we must +-- disambiguate named package references. +-- +disambiguatePackageTargets :: Package pkg' + => PackageIndex pkg' + -> [PackageName] + -> [PackageTarget pkg] + -> ( [PackageTargetProblem] + , [PackageSpecifier pkg] ) +disambiguatePackageTargets availablePkgIndex availableExtra targets = + partitionEithers (map disambiguatePackageTarget targets) + where + disambiguatePackageTarget packageTarget = case packageTarget of + PackageTargetLocation pkg -> Right (SpecificSourcePackage pkg) + + PackageTargetNamed pkgname constraints userTarget + | null (PackageIndex.lookupPackageName availablePkgIndex pkgname) + -> Left (PackageNameUnknown pkgname userTarget) + | otherwise -> Right (NamedPackage pkgname constraints) + + PackageTargetNamedFuzzy pkgname constraints userTarget -> + case disambiguatePackageName packageNameEnv pkgname of + None -> Left (PackageNameUnknown + pkgname userTarget) + Ambiguous pkgnames -> Left (PackageNameAmbiguous + pkgname pkgnames userTarget) + Unambiguous pkgname' -> Right (NamedPackage pkgname' constraints') + where + constraints' = map (renamePackageConstraint pkgname') constraints + + -- use any extra specific available packages to help us disambiguate + packageNameEnv :: PackageNameEnv + packageNameEnv = mappend (indexPackageNameEnv availablePkgIndex) + (extraPackageNameEnv availableExtra) + + +-- | Report problems to the user. That is, if there are any problems +-- then raise an exception. +reportPackageTargetProblems :: Verbosity + -> [PackageTargetProblem] -> IO () +reportPackageTargetProblems verbosity problems = do + case [ pkg | PackageNameUnknown pkg originalTarget <- problems + , not (isUserTagetWorld originalTarget) ] of + [] -> return () + pkgs -> die $ unlines + [ "There is no package named '" ++ display name ++ "'. " + | name <- pkgs ] + ++ "You may need to run 'cabal update' to get the latest " + ++ "list of available packages." + + case [ (pkg, matches) | PackageNameAmbiguous pkg matches _ <- problems ] of + [] -> return () + ambiguities -> die $ unlines + [ "The package name '" ++ display name + ++ "' is ambiguous. It could be: " + ++ intercalate ", " (map display matches) + | (name, matches) <- ambiguities ] + + case [ pkg | PackageNameUnknown pkg UserTargetWorld <- problems ] of + [] -> return () + pkgs -> warn verbosity $ + "The following 'world' packages will be ignored because " + ++ "they refer to packages that cannot be found: " + ++ intercalate ", " (map display pkgs) ++ "\n" + ++ "You can suppress this warning by correcting the world file." + where + isUserTagetWorld UserTargetWorld = True; isUserTagetWorld _ = False + + +-- ------------------------------------------------------------ +-- * Disambiguating package names +-- ------------------------------------------------------------ + +data MaybeAmbiguous a = None | Unambiguous a | Ambiguous [a] + +-- | Given a package name and a list of matching names, figure out which one it +-- might be referring to. If there is an exact case-sensitive match then that's +-- ok. If it matches just one package case-insensitively then that's also ok. +-- The only problem is if it matches multiple packages case-insensitively, in +-- that case it is ambiguous. +-- +disambiguatePackageName :: PackageNameEnv + -> PackageName + -> MaybeAmbiguous PackageName +disambiguatePackageName (PackageNameEnv pkgNameLookup) name = + case nub (pkgNameLookup name) of + [] -> None + [name'] -> Unambiguous name' + names -> case find (name==) names of + Just name' -> Unambiguous name' + Nothing -> Ambiguous names + + +newtype PackageNameEnv = PackageNameEnv (PackageName -> [PackageName]) + +instance Monoid PackageNameEnv where + mempty = PackageNameEnv (const []) + mappend = (Semi.<>) + +instance Semi.Semigroup PackageNameEnv where + PackageNameEnv lookupA <> PackageNameEnv lookupB = + PackageNameEnv (\name -> lookupA name ++ lookupB name) + +indexPackageNameEnv :: PackageIndex pkg -> PackageNameEnv +indexPackageNameEnv pkgIndex = PackageNameEnv pkgNameLookup + where + pkgNameLookup (PackageName name) = + map fst (PackageIndex.searchByName pkgIndex name) + +extraPackageNameEnv :: [PackageName] -> PackageNameEnv +extraPackageNameEnv names = PackageNameEnv pkgNameLookup + where + pkgNameLookup (PackageName name) = + [ PackageName name' + | let lname = lowercase name + , PackageName name' <- names + , lowercase name' == lname ] + + +-- ------------------------------------------------------------ +-- * Package constraints +-- ------------------------------------------------------------ + +data UserConstraint = + UserConstraintVersion PackageName VersionRange + | UserConstraintInstalled PackageName + | UserConstraintSource PackageName + | UserConstraintFlags PackageName FlagAssignment + | UserConstraintStanzas PackageName [OptionalStanza] + deriving (Eq, Show, Generic) + +instance Binary UserConstraint + +userConstraintPackageName :: UserConstraint -> PackageName +userConstraintPackageName uc = case uc of + UserConstraintVersion name _ -> name + UserConstraintInstalled name -> name + UserConstraintSource name -> name + UserConstraintFlags name _ -> name + UserConstraintStanzas name _ -> name + +userToPackageConstraint :: UserConstraint -> PackageConstraint +-- At the moment, the types happen to be directly equivalent +userToPackageConstraint uc = case uc of + UserConstraintVersion name ver -> PackageConstraintVersion name ver + UserConstraintInstalled name -> PackageConstraintInstalled name + UserConstraintSource name -> PackageConstraintSource name + UserConstraintFlags name flags -> PackageConstraintFlags name flags + UserConstraintStanzas name stanzas -> PackageConstraintStanzas name stanzas + +renamePackageConstraint :: PackageName -> PackageConstraint -> PackageConstraint +renamePackageConstraint name pc = case pc of + PackageConstraintVersion _ ver -> PackageConstraintVersion name ver + PackageConstraintInstalled _ -> PackageConstraintInstalled name + PackageConstraintSource _ -> PackageConstraintSource name + PackageConstraintFlags _ flags -> PackageConstraintFlags name flags + PackageConstraintStanzas _ stanzas -> PackageConstraintStanzas name stanzas + +readUserConstraint :: String -> Either String UserConstraint +readUserConstraint str = + case readPToMaybe parse str of + Nothing -> Left msgCannotParse + Just c -> Right c + where + msgCannotParse = + "expected a package name followed by a constraint, which is " + ++ "either a version range, 'installed', 'source' or flags" + +instance Text UserConstraint where + disp (UserConstraintVersion pkgname verrange) = disp pkgname + <+> disp verrange + disp (UserConstraintInstalled pkgname) = disp pkgname + <+> Disp.text "installed" + disp (UserConstraintSource pkgname) = disp pkgname + <+> Disp.text "source" + disp (UserConstraintFlags pkgname flags) = disp pkgname + <+> dispFlagAssignment flags + disp (UserConstraintStanzas pkgname stanzas) = disp pkgname + <+> dispStanzas stanzas + where + dispStanzas = Disp.hsep . map dispStanza + dispStanza TestStanzas = Disp.text "test" + dispStanza BenchStanzas = Disp.text "bench" + + parse = parse >>= parseConstraint + where + parseConstraint pkgname = + ((parse >>= return . UserConstraintVersion pkgname) + +++ (do skipSpaces1 + _ <- Parse.string "installed" + return (UserConstraintInstalled pkgname)) + +++ (do skipSpaces1 + _ <- Parse.string "source" + return (UserConstraintSource pkgname)) + +++ (do skipSpaces1 + _ <- Parse.string "test" + return (UserConstraintStanzas pkgname [TestStanzas])) + +++ (do skipSpaces1 + _ <- Parse.string "bench" + return (UserConstraintStanzas pkgname [BenchStanzas]))) + <++ (do skipSpaces1 + flags <- parseFlagAssignment + return (UserConstraintFlags pkgname flags)) + +--TODO: [code cleanup] move these somewhere else +dispFlagAssignment :: FlagAssignment -> Disp.Doc +dispFlagAssignment = Disp.hsep . map dispFlagValue + where + dispFlagValue (f, True) = Disp.char '+' <> dispFlagName f + dispFlagValue (f, False) = Disp.char '-' <> dispFlagName f + dispFlagName (FlagName f) = Disp.text f + +parseFlagAssignment :: Parse.ReadP r FlagAssignment +parseFlagAssignment = Parse.sepBy1 parseFlagValue skipSpaces1 + where + parseFlagValue = + (do Parse.optional (Parse.char '+') + f <- parseFlagName + return (f, True)) + +++ (do _ <- Parse.char '-' + f <- parseFlagName + return (f, False)) + parseFlagName = liftM (FlagName . lowercase) ident + + ident :: Parse.ReadP r String + ident = Parse.munch1 identChar >>= \s -> check s >> return s + where + identChar c = isAlphaNum c || c == '_' || c == '-' + check ('-':_) = Parse.pfail + check _ = return () + +skipSpaces1 :: Parse.ReadP r () +skipSpaces1 = Parse.satisfy isSpace >> Parse.skipSpaces + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Tar.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Tar.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Tar.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Tar.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,110 @@ +{-# LANGUAGE DeriveFunctor #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Tar +-- Copyright : (c) 2007 Bjorn Bringert, +-- 2008 Andrea Vezzosi, +-- 2008-2009 Duncan Coutts +-- License : BSD3 +-- +-- Maintainer : duncan@community.haskell.org +-- Portability : portable +-- +-- Reading, writing and manipulating \"@.tar@\" archive files. +-- +----------------------------------------------------------------------------- +module Distribution.Client.Tar ( + -- * @tar.gz@ operations + createTarGzFile, + extractTarGzFile, + + -- * Other local utils + buildTreeRefTypeCode, + buildTreeSnapshotTypeCode, + isBuildTreeRefTypeCode, + filterEntries, + filterEntriesM, + entriesToList, + ) where + +import qualified Data.ByteString.Lazy as BS +import qualified Codec.Archive.Tar as Tar +import qualified Codec.Archive.Tar.Entry as Tar +import qualified Codec.Archive.Tar.Check as Tar +import qualified Codec.Compression.GZip as GZip +import qualified Distribution.Client.GZipUtils as GZipUtils + +import Control.Exception (Exception(..), throw) + +-- +-- * High level operations +-- + +createTarGzFile :: FilePath -- ^ Full Tarball path + -> FilePath -- ^ Base directory + -> FilePath -- ^ Directory to archive, relative to base dir + -> IO () +createTarGzFile tar base dir = + BS.writeFile tar . GZip.compress . Tar.write =<< Tar.pack base [dir] + +extractTarGzFile :: FilePath -- ^ Destination directory + -> FilePath -- ^ Expected subdir (to check for tarbombs) + -> FilePath -- ^ Tarball + -> IO () +extractTarGzFile dir expected tar = + Tar.unpack dir . Tar.checkTarbomb expected . Tar.read + . GZipUtils.maybeDecompress =<< BS.readFile tar + +instance (Exception a, Exception b) => Exception (Either a b) where + toException (Left e) = toException e + toException (Right e) = toException e + + fromException e = + case fromException e of + Just e' -> Just (Left e') + Nothing -> case fromException e of + Just e' -> Just (Right e') + Nothing -> Nothing + + +-- | Type code for the local build tree reference entry type. We don't use the +-- symbolic link entry type because it allows only 100 ASCII characters for the +-- path. +buildTreeRefTypeCode :: Tar.TypeCode +buildTreeRefTypeCode = 'C' + +-- | Type code for the local build tree snapshot entry type. +buildTreeSnapshotTypeCode :: Tar.TypeCode +buildTreeSnapshotTypeCode = 'S' + +-- | Is this a type code for a build tree reference? +isBuildTreeRefTypeCode :: Tar.TypeCode -> Bool +isBuildTreeRefTypeCode typeCode + | (typeCode == buildTreeRefTypeCode + || typeCode == buildTreeSnapshotTypeCode) = True + | otherwise = False + +filterEntries :: (Tar.Entry -> Bool) -> Tar.Entries e -> Tar.Entries e +filterEntries p = + Tar.foldEntries + (\e es -> if p e then Tar.Next e es else es) + Tar.Done + Tar.Fail + +filterEntriesM :: Monad m => (Tar.Entry -> m Bool) + -> Tar.Entries e -> m (Tar.Entries e) +filterEntriesM p = + Tar.foldEntries + (\entry rest -> do + keep <- p entry + xs <- rest + if keep + then return (Tar.Next entry xs) + else return xs) + (return Tar.Done) + (return . Tar.Fail) + +entriesToList :: Exception e => Tar.Entries e -> [Tar.Entry] +entriesToList = Tar.foldEntries (:) [] throw + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Types.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Types.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Types.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Types.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,371 @@ +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE BangPatterns #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Types +-- Copyright : (c) David Himmelstrup 2005 +-- Duncan Coutts 2011 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Various common data types for the entire cabal-install system +----------------------------------------------------------------------------- +module Distribution.Client.Types where + +import Distribution.Package + ( PackageName, PackageId, Package(..) + , UnitId(..), mkUnitId + , HasUnitId(..), PackageInstalled(..) ) +import Distribution.InstalledPackageInfo + ( InstalledPackageInfo ) +import Distribution.PackageDescription + ( Benchmark(..), GenericPackageDescription(..), FlagAssignment + , TestSuite(..) ) +import Distribution.PackageDescription.Configuration + ( mapTreeData ) +import Distribution.Client.PackageIndex + ( PackageIndex ) +import Distribution.Client.ComponentDeps + ( ComponentDeps ) +import qualified Distribution.Client.ComponentDeps as CD +import Distribution.Version + ( VersionRange ) +import Distribution.Text (display) + +import Data.Map (Map) +import Network.URI (URI(..), URIAuth(..), nullURI) +import Data.ByteString.Lazy (ByteString) +import Control.Exception + ( SomeException ) +import GHC.Generics (Generic) +import Distribution.Compat.Binary (Binary(..)) + + +newtype Username = Username { unUsername :: String } +newtype Password = Password { unPassword :: String } + +-- | This is the information we get from a @00-index.tar.gz@ hackage index. +-- +data SourcePackageDb = SourcePackageDb { + packageIndex :: PackageIndex SourcePackage, + packagePreferences :: Map PackageName VersionRange +} + deriving (Eq, Generic) + +instance Binary SourcePackageDb + +-- ------------------------------------------------------------ +-- * Various kinds of information about packages +-- ------------------------------------------------------------ + +-- | Within Cabal the library we no longer have a @InstalledPackageId@ type. +-- That's because it deals with the compilers' notion of a registered library, +-- and those really are libraries not packages. Those are now named units. +-- +-- The package management layer does however deal with installed packages, as +-- whole packages not just as libraries. So we do still need a type for +-- installed package ids. At the moment however we track instaled packages via +-- their primary library, which is a unit id. In future this may change +-- slightly and we may distinguish these two types and have an explicit +-- conversion when we register units with the compiler. +-- +type InstalledPackageId = UnitId + +installedPackageId :: HasUnitId pkg => pkg -> InstalledPackageId +installedPackageId = installedUnitId + +-- | Subclass of packages that have specific versioned dependencies. +-- +-- So for example a not-yet-configured package has dependencies on version +-- ranges, not specific versions. A configured or an already installed package +-- depends on exact versions. Some operations or data structures (like +-- dependency graphs) only make sense on this subclass of package types. +-- +class Package pkg => PackageFixedDeps pkg where + depends :: pkg -> ComponentDeps [UnitId] + +instance PackageFixedDeps InstalledPackageInfo where + depends = CD.fromInstalled . installedDepends + + +-- | In order to reuse the implementation of PackageIndex which relies on +-- 'UnitId', we need to be able to synthesize these IDs prior +-- to installation. Eventually, we'll move to a representation of +-- 'UnitId' which can be properly computed before compilation +-- (of course, it's a bit of a misnomer since the packages are not actually +-- installed yet.) In any case, we'll synthesize temporary installed package +-- IDs to use as keys during install planning. These should never be written +-- out! Additionally, they need to be guaranteed unique within the install +-- plan. +fakeUnitId :: PackageId -> UnitId +fakeUnitId = mkUnitId . (".fake."++) . display + +-- | A 'ConfiguredPackage' is a not-yet-installed package along with the +-- total configuration information. The configuration information is total in +-- the sense that it provides all the configuration information and so the +-- final configure process will be independent of the environment. +-- +data ConfiguredPackage = ConfiguredPackage + SourcePackage -- package info, including repo + FlagAssignment -- complete flag assignment for the package + [OptionalStanza] -- list of enabled optional stanzas for the package + (ComponentDeps [ConfiguredId]) + -- set of exact dependencies (installed or source). + -- These must be consistent with the 'buildDepends' + -- in the 'PackageDescription' that you'd get by + -- applying the flag assignment and optional stanzas. + deriving (Eq, Show, Generic) + +instance Binary ConfiguredPackage + +-- | A ConfiguredId is a package ID for a configured package. +-- +-- Once we configure a source package we know it's UnitId +-- (at least, in principle, even if we have to fake it currently). It is still +-- however useful in lots of places to also know the source ID for the package. +-- We therefore bundle the two. +-- +-- An already installed package of course is also "configured" (all it's +-- configuration parameters and dependencies have been specified). +-- +-- TODO: I wonder if it would make sense to promote this datatype to Cabal +-- and use it consistently instead of UnitIds? +data ConfiguredId = ConfiguredId { + confSrcId :: PackageId + , confInstId :: UnitId + } + deriving (Eq, Generic) + +instance Binary ConfiguredId + +instance Show ConfiguredId where + show = show . confSrcId + +instance Package ConfiguredId where + packageId = confSrcId + +instance HasUnitId ConfiguredId where + installedUnitId = confInstId + +instance Package ConfiguredPackage where + packageId (ConfiguredPackage pkg _ _ _) = packageId pkg + +instance PackageFixedDeps ConfiguredPackage where + depends (ConfiguredPackage _ _ _ deps) = fmap (map confInstId) deps + +instance HasUnitId ConfiguredPackage where + installedUnitId = fakeUnitId . packageId + +-- | Like 'ConfiguredPackage', but with all dependencies guaranteed to be +-- installed already, hence itself ready to be installed. +data GenericReadyPackage srcpkg ipkg + = ReadyPackage + srcpkg -- see 'ConfiguredPackage'. + (ComponentDeps [ipkg]) -- Installed dependencies. + deriving (Eq, Show, Generic) + +type ReadyPackage = GenericReadyPackage ConfiguredPackage InstalledPackageInfo + +instance Package srcpkg => Package (GenericReadyPackage srcpkg ipkg) where + packageId (ReadyPackage srcpkg _deps) = packageId srcpkg + +instance (Package srcpkg, HasUnitId ipkg) => + PackageFixedDeps (GenericReadyPackage srcpkg ipkg) where + depends (ReadyPackage _ deps) = fmap (map installedUnitId) deps + +instance HasUnitId srcpkg => + HasUnitId (GenericReadyPackage srcpkg ipkg) where + installedUnitId (ReadyPackage pkg _) = installedUnitId pkg + +instance (Binary srcpkg, Binary ipkg) => Binary (GenericReadyPackage srcpkg ipkg) + + +-- | A package description along with the location of the package sources. +-- +data SourcePackage = SourcePackage { + packageInfoId :: PackageId, + packageDescription :: GenericPackageDescription, + packageSource :: PackageLocation (Maybe FilePath), + packageDescrOverride :: PackageDescriptionOverride + } + deriving (Eq, Show, Generic) + +instance Binary SourcePackage + +-- | We sometimes need to override the .cabal file in the tarball with +-- the newer one from the package index. +type PackageDescriptionOverride = Maybe ByteString + +instance Package SourcePackage where packageId = packageInfoId + +data OptionalStanza + = TestStanzas + | BenchStanzas + deriving (Eq, Ord, Enum, Bounded, Show, Generic) + +instance Binary OptionalStanza + +enableStanzas + :: [OptionalStanza] + -> GenericPackageDescription + -> GenericPackageDescription +enableStanzas stanzas gpkg = gpkg + { condBenchmarks = flagBenchmarks $ condBenchmarks gpkg + , condTestSuites = flagTests $ condTestSuites gpkg + } + where + enableTest t = t { testEnabled = TestStanzas `elem` stanzas } + enableBenchmark bm = bm { benchmarkEnabled = BenchStanzas `elem` stanzas } + flagBenchmarks = map (\(n, bm) -> (n, mapTreeData enableBenchmark bm)) + flagTests = map (\(n, t) -> (n, mapTreeData enableTest t)) + +-- ------------------------------------------------------------ +-- * Package locations and repositories +-- ------------------------------------------------------------ + +data PackageLocation local = + + -- | An unpacked package in the given dir, or current dir + LocalUnpackedPackage FilePath + + -- | A package as a tarball that's available as a local tarball + | LocalTarballPackage FilePath + + -- | A package as a tarball from a remote URI + | RemoteTarballPackage URI local + + -- | A package available as a tarball from a repository. + -- + -- It may be from a local repository or from a remote repository, with a + -- locally cached copy. ie a package available from hackage + | RepoTarballPackage Repo PackageId local + +--TODO: +-- * add support for darcs and other SCM style remote repos with a local cache +-- | ScmPackage + deriving (Show, Functor, Eq, Ord, Generic) + +instance Binary local => Binary (PackageLocation local) + +-- note, network-uri-2.6.0.3+ provide a Generic instance but earlier +-- versions do not, so we use manual Binary instances here +instance Binary URI where + put (URI a b c d e) = do put a; put b; put c; put d; put e + get = do !a <- get; !b <- get; !c <- get; !d <- get; !e <- get + return (URI a b c d e) + +instance Binary URIAuth where + put (URIAuth a b c) = do put a; put b; put c + get = do !a <- get; !b <- get; !c <- get; return (URIAuth a b c) + +data RemoteRepo = + RemoteRepo { + remoteRepoName :: String, + remoteRepoURI :: URI, + + -- | Enable secure access? + -- + -- 'Nothing' here represents "whatever the default is"; this is important + -- to allow for a smooth transition from opt-in to opt-out security + -- (once we switch to opt-out, all access to the central Hackage + -- repository should be secure by default) + remoteRepoSecure :: Maybe Bool, + + -- | Root key IDs (for bootstrapping) + remoteRepoRootKeys :: [String], + + -- | Threshold for verification during bootstrapping + remoteRepoKeyThreshold :: Int, + + -- | Normally a repo just specifies an HTTP or HTTPS URI, but as a + -- special case we may know a repo supports both and want to try HTTPS + -- if we can, but still allow falling back to HTTP. + -- + -- This field is not currently stored in the config file, but is filled + -- in automagically for known repos. + remoteRepoShouldTryHttps :: Bool + } + + deriving (Show, Eq, Ord, Generic) + +instance Binary RemoteRepo + +-- | Construct a partial 'RemoteRepo' value to fold the field parser list over. +emptyRemoteRepo :: String -> RemoteRepo +emptyRemoteRepo name = RemoteRepo name nullURI Nothing [] 0 False + +-- | Different kinds of repositories +-- +-- NOTE: It is important that this type remains serializable. +data Repo = + -- | Local repositories + RepoLocal { + repoLocalDir :: FilePath + } + + -- | Standard (unsecured) remote repositores + | RepoRemote { + repoRemote :: RemoteRepo + , repoLocalDir :: FilePath + } + + -- | Secure repositories + -- + -- Although this contains the same fields as 'RepoRemote', we use a separate + -- constructor to avoid confusing the two. + -- + -- Not all access to a secure repo goes through the hackage-security + -- library currently; code paths that do not still make use of the + -- 'repoRemote' and 'repoLocalDir' fields directly. + | RepoSecure { + repoRemote :: RemoteRepo + , repoLocalDir :: FilePath + } + deriving (Show, Eq, Ord, Generic) + +instance Binary Repo + +-- | Check if this is a remote repo +maybeRepoRemote :: Repo -> Maybe RemoteRepo +maybeRepoRemote (RepoLocal _localDir) = Nothing +maybeRepoRemote (RepoRemote r _localDir) = Just r +maybeRepoRemote (RepoSecure r _localDir) = Just r + +-- ------------------------------------------------------------ +-- * Build results +-- ------------------------------------------------------------ + +type BuildResult = Either BuildFailure BuildSuccess +data BuildFailure = PlanningFailed + | DependentFailed PackageId + | DownloadFailed SomeException + | UnpackFailed SomeException + | ConfigureFailed SomeException + | BuildFailed SomeException + | TestsFailed SomeException + | InstallFailed SomeException + deriving (Show, Generic) +data BuildSuccess = BuildOk DocsResult TestsResult + (Maybe InstalledPackageInfo) + deriving (Show, Generic) + +data DocsResult = DocsNotTried | DocsFailed | DocsOk + deriving (Show, Generic) +data TestsResult = TestsNotTried | TestsOk + deriving (Show, Generic) + +instance Binary BuildFailure +instance Binary BuildSuccess +instance Binary DocsResult +instance Binary TestsResult + +--FIXME: this is a total cheat +instance Binary SomeException where + put _ = return () + get = fail "cannot serialise exceptions" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Update.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Update.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Update.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Update.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,88 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Update +-- Copyright : (c) David Himmelstrup 2005 +-- License : BSD-like +-- +-- Maintainer : lemmih@gmail.com +-- Stability : provisional +-- Portability : portable +-- +-- +----------------------------------------------------------------------------- +{-# LANGUAGE RecordWildCards #-} +module Distribution.Client.Update + ( update + ) where + +import Distribution.Client.Types + ( Repo(..), RemoteRepo(..), maybeRepoRemote ) +import Distribution.Client.HttpUtils + ( DownloadResult(..) ) +import Distribution.Client.FetchUtils + ( downloadIndex ) +import Distribution.Client.IndexUtils + ( updateRepoIndexCache, Index(..) ) +import Distribution.Client.JobControl + ( newParallelJobControl, spawnJob, collectJob ) +import Distribution.Client.Setup + ( RepoContext(..) ) +import Distribution.Verbosity + ( Verbosity ) + +import Distribution.Simple.Utils + ( writeFileAtomic, warn, notice ) + +import qualified Data.ByteString.Lazy as BS +import Distribution.Client.GZipUtils (maybeDecompress) +import System.FilePath (dropExtension) +import Data.Maybe (catMaybes) +import Data.Time (getCurrentTime) + +import qualified Hackage.Security.Client as Sec + +-- | 'update' downloads the package list from all known servers +update :: Verbosity -> RepoContext -> IO () +update verbosity repoCtxt | null (repoContextRepos repoCtxt) = do + warn verbosity $ "No remote package servers have been specified. Usually " + ++ "you would have one specified in the config file." +update verbosity repoCtxt = do + jobCtrl <- newParallelJobControl + let repos = repoContextRepos repoCtxt + remoteRepos = catMaybes (map maybeRepoRemote repos) + case remoteRepos of + [] -> return () + [remoteRepo] -> + notice verbosity $ "Downloading the latest package list from " + ++ remoteRepoName remoteRepo + _ -> notice verbosity . unlines + $ "Downloading the latest package lists from: " + : map (("- " ++) . remoteRepoName) remoteRepos + mapM_ (spawnJob jobCtrl . updateRepo verbosity repoCtxt) repos + mapM_ (\_ -> collectJob jobCtrl) repos + +updateRepo :: Verbosity -> RepoContext -> Repo -> IO () +updateRepo verbosity repoCtxt repo = do + transport <- repoContextGetTransport repoCtxt + case repo of + RepoLocal{..} -> return () + RepoRemote{..} -> do + downloadResult <- downloadIndex transport verbosity repoRemote repoLocalDir + case downloadResult of + FileAlreadyInCache -> return () + FileDownloaded indexPath -> do + writeFileAtomic (dropExtension indexPath) . maybeDecompress + =<< BS.readFile indexPath + updateRepoIndexCache verbosity (RepoIndex repoCtxt repo) + RepoSecure{} -> repoContextWithSecureRepo repoCtxt repo $ \repoSecure -> do + ce <- if repoContextIgnoreExpiry repoCtxt + then Just `fmap` getCurrentTime + else return Nothing + updated <- Sec.uncheckClientErrors $ Sec.checkForUpdates repoSecure ce + -- Update cabal's internal index as well so that it's not out of sync + -- (If all access to the cache goes through hackage-security this can go) + case updated of + Sec.NoUpdates -> + return () + Sec.HasUpdates -> + updateRepoIndexCache verbosity (RepoIndex repoCtxt repo) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Upload.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Upload.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Upload.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Upload.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,163 @@ +module Distribution.Client.Upload (check, upload, uploadDoc, report) where + +import Distribution.Client.Types ( Username(..), Password(..) + , RemoteRepo(..), maybeRepoRemote ) +import Distribution.Client.HttpUtils + ( HttpTransport(..), remoteRepoTryUpgradeToHttps ) +import Distribution.Client.Setup + ( RepoContext(..) ) + +import Distribution.Simple.Utils (notice, warn, info, die) +import Distribution.Verbosity (Verbosity) +import Distribution.Text (display) +import Distribution.Client.Config + +import qualified Distribution.Client.BuildReports.Anonymous as BuildReport +import qualified Distribution.Client.BuildReports.Upload as BuildReport + +import Network.URI (URI(uriPath), parseURI) +import Network.HTTP (Header(..), HeaderName(..)) + +import System.IO (hFlush, stdin, stdout, hGetEcho, hSetEcho) +import System.Exit (exitFailure) +import Control.Exception (bracket) +import System.FilePath ((), takeExtension, takeFileName) +import qualified System.FilePath.Posix as FilePath.Posix (()) +import System.Directory +import Control.Monad (forM_, when) +import Data.Maybe (catMaybes) + +type Auth = Maybe (String, String) + +checkURI :: URI +Just checkURI = parseURI $ "http://hackage.haskell.org/cgi-bin/" + ++ "hackage-scripts/check-pkg" + +upload :: Verbosity -> RepoContext + -> Maybe Username -> Maybe Password -> [FilePath] + -> IO () +upload verbosity repoCtxt mUsername mPassword paths = do + let repos = repoContextRepos repoCtxt + transport <- repoContextGetTransport repoCtxt + targetRepo <- + case [ remoteRepo | Just remoteRepo <- map maybeRepoRemote repos ] of + [] -> die "Cannot upload. No remote repositories are configured." + rs -> remoteRepoTryUpgradeToHttps transport (last rs) + let targetRepoURI = remoteRepoURI targetRepo + rootIfEmpty x = if null x then "/" else x + uploadURI = targetRepoURI { + uriPath = rootIfEmpty (uriPath targetRepoURI) + FilePath.Posix. "upload" + } + Username username <- maybe promptUsername return mUsername + Password password <- maybe promptPassword return mPassword + let auth = Just (username,password) + forM_ paths $ \path -> do + notice verbosity $ "Uploading " ++ path ++ "... " + handlePackage transport verbosity uploadURI auth path + +uploadDoc :: Verbosity -> RepoContext + -> Maybe Username -> Maybe Password -> FilePath + -> IO () +uploadDoc verbosity repoCtxt mUsername mPassword path = do + let repos = repoContextRepos repoCtxt + transport <- repoContextGetTransport repoCtxt + targetRepo <- + case [ remoteRepo | Just remoteRepo <- map maybeRepoRemote repos ] of + [] -> die $ "Cannot upload. No remote repositories are configured." + rs -> remoteRepoTryUpgradeToHttps transport (last rs) + let targetRepoURI = remoteRepoURI targetRepo + rootIfEmpty x = if null x then "/" else x + uploadURI = targetRepoURI { + uriPath = rootIfEmpty (uriPath targetRepoURI) + FilePath.Posix. "package/" ++ pkgid ++ "/docs" + } + (reverseSuffix, reversePkgid) = break (== '-') + (reverse (takeFileName path)) + pkgid = reverse $ tail reversePkgid + when (reverse reverseSuffix /= "docs.tar.gz" + || null reversePkgid || head reversePkgid /= '-') $ + die "Expected a file name matching the pattern -docs.tar.gz" + Username username <- maybe promptUsername return mUsername + Password password <- maybe promptPassword return mPassword + + let auth = Just (username,password) + headers = + [ Header HdrContentType "application/x-tar" + , Header HdrContentEncoding "gzip" + ] + notice verbosity $ "Uploading documentation " ++ path ++ "... " + resp <- putHttpFile transport verbosity uploadURI path auth headers + case resp of + (200,_) -> + notice verbosity "Ok" + (code,err) -> do + notice verbosity $ "Error uploading documentation " + ++ path ++ ": " + ++ "http code " ++ show code ++ "\n" + ++ err + exitFailure + +promptUsername :: IO Username +promptUsername = do + putStr "Hackage username: " + hFlush stdout + fmap Username getLine + +promptPassword :: IO Password +promptPassword = do + putStr "Hackage password: " + hFlush stdout + -- save/restore the terminal echoing status + passwd <- bracket (hGetEcho stdin) (hSetEcho stdin) $ \_ -> do + hSetEcho stdin False -- no echoing for entering the password + fmap Password getLine + putStrLn "" + return passwd + +report :: Verbosity -> RepoContext -> Maybe Username -> Maybe Password -> IO () +report verbosity repoCtxt mUsername mPassword = do + Username username <- maybe promptUsername return mUsername + Password password <- maybe promptPassword return mPassword + let auth = (username, password) + repos = repoContextRepos repoCtxt + remoteRepos = catMaybes (map maybeRepoRemote repos) + forM_ remoteRepos $ \remoteRepo -> + do dotCabal <- defaultCabalDir + let srcDir = dotCabal "reports" remoteRepoName remoteRepo + -- We don't want to bomb out just because we haven't built any packages + -- from this repo yet. + srcExists <- doesDirectoryExist srcDir + when srcExists $ do + contents <- getDirectoryContents srcDir + forM_ (filter (\c -> takeExtension c ==".log") contents) $ \logFile -> + do inp <- readFile (srcDir logFile) + let (reportStr, buildLog) = read inp :: (String,String) + case BuildReport.parse reportStr of + Left errs -> warn verbosity $ "Errors: " ++ errs -- FIXME + Right report' -> + do info verbosity $ "Uploading report for " + ++ display (BuildReport.package report') + BuildReport.uploadReports verbosity repoCtxt auth + (remoteRepoURI remoteRepo) [(report', Just buildLog)] + return () + +check :: Verbosity -> RepoContext -> [FilePath] -> IO () +check verbosity repoCtxt paths = do + transport <- repoContextGetTransport repoCtxt + forM_ paths $ \path -> do + notice verbosity $ "Checking " ++ path ++ "... " + handlePackage transport verbosity checkURI Nothing path + +handlePackage :: HttpTransport -> Verbosity -> URI -> Auth + -> FilePath -> IO () +handlePackage transport verbosity uri auth path = + do resp <- postHttpFile transport verbosity uri path auth + case resp of + (200,_) -> + notice verbosity "Ok" + (code,err) -> do + notice verbosity $ "Error uploading " ++ path ++ ": " + ++ "http code " ++ show code ++ "\n" + ++ err + exitFailure diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Utils/Json.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Utils/Json.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Utils/Json.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Utils/Json.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,225 @@ +{-# LANGUAGE OverloadedStrings #-} + +-- | Minimal JSON / RFC 7159 support +-- +-- The API is heavily inspired by @aeson@'s API but puts emphasis on +-- simplicity rather than performance. The 'ToJSON' instances are +-- intended to have an encoding compatible with @aeson@'s encoding. +-- +module Distribution.Client.Utils.Json + ( Value(..) + , Object, object, Pair, (.=) + , encodeToString + , encodeToBuilder + , ToJSON(toJSON) + ) + where + +import Data.Char +import Data.Int +import Data.String +import Data.Word +import Data.List +import Data.Monoid + +import Data.ByteString.Builder (Builder) +import qualified Data.ByteString.Builder as BB + +-- TODO: We may want to replace 'String' with 'Text' or 'ByteString' + +-- | A JSON value represented as a Haskell value. +data Value = Object !Object + | Array [Value] + | String String + | Number !Double + | Bool !Bool + | Null + deriving (Eq, Read, Show) + +-- | A key\/value pair for an 'Object' +type Pair = (String, Value) + +-- | A JSON \"object\" (key/value map). +type Object = [Pair] + +infixr 8 .= + +-- | A key-value pair for encoding a JSON object. +(.=) :: ToJSON v => String -> v -> Pair +k .= v = (k, toJSON v) + +-- | Create a 'Value' from a list of name\/value 'Pair's. +object :: [Pair] -> Value +object = Object + +instance IsString Value where + fromString = String + + +-- | A type that can be converted to JSON. +class ToJSON a where + -- | Convert a Haskell value to a JSON-friendly intermediate type. + toJSON :: a -> Value + +instance ToJSON () where + toJSON () = Array [] + +instance ToJSON Value where + toJSON = id + +instance ToJSON Bool where + toJSON = Bool + +instance ToJSON a => ToJSON [a] where + toJSON = Array . map toJSON + +instance ToJSON a => ToJSON (Maybe a) where + toJSON Nothing = Null + toJSON (Just a) = toJSON a + +instance (ToJSON a,ToJSON b) => ToJSON (a,b) where + toJSON (a,b) = Array [toJSON a, toJSON b] + +instance (ToJSON a,ToJSON b,ToJSON c) => ToJSON (a,b,c) where + toJSON (a,b,c) = Array [toJSON a, toJSON b, toJSON c] + +instance (ToJSON a,ToJSON b,ToJSON c, ToJSON d) => ToJSON (a,b,c,d) where + toJSON (a,b,c,d) = Array [toJSON a, toJSON b, toJSON c, toJSON d] + +instance ToJSON Float where + toJSON = Number . realToFrac + +instance ToJSON Double where + toJSON = Number + +instance ToJSON Int where toJSON = Number . realToFrac +instance ToJSON Int8 where toJSON = Number . realToFrac +instance ToJSON Int16 where toJSON = Number . realToFrac +instance ToJSON Int32 where toJSON = Number . realToFrac + +instance ToJSON Word where toJSON = Number . realToFrac +instance ToJSON Word8 where toJSON = Number . realToFrac +instance ToJSON Word16 where toJSON = Number . realToFrac +instance ToJSON Word32 where toJSON = Number . realToFrac + +-- | Possibly lossy due to conversion to 'Double' +instance ToJSON Int64 where toJSON = Number . realToFrac + +-- | Possibly lossy due to conversion to 'Double' +instance ToJSON Word64 where toJSON = Number . realToFrac + +-- | Possibly lossy due to conversion to 'Double' +instance ToJSON Integer where toJSON = Number . fromInteger + +------------------------------------------------------------------------------ +-- 'BB.Builder'-based encoding + +-- | Serialise value as JSON/UTF8-encoded 'Builder' +encodeToBuilder :: ToJSON a => a -> Builder +encodeToBuilder = encodeValueBB . toJSON + +encodeValueBB :: Value -> Builder +encodeValueBB jv = case jv of + Bool True -> "true" + Bool False -> "false" + Null -> "null" + Number n + | isNaN n || isInfinite n -> encodeValueBB Null + | Just i <- doubleToInt64 n -> BB.int64Dec i + | otherwise -> BB.doubleDec n + Array a -> encodeArrayBB a + String s -> encodeStringBB s + Object o -> encodeObjectBB o + +encodeArrayBB :: [Value] -> Builder +encodeArrayBB [] = "[]" +encodeArrayBB jvs = BB.char8 '[' <> go jvs <> BB.char8 ']' + where + go = Data.Monoid.mconcat . intersperse (BB.char8 ',') . map encodeValueBB + +encodeObjectBB :: Object -> Builder +encodeObjectBB [] = "{}" +encodeObjectBB jvs = BB.char8 '{' <> go jvs <> BB.char8 '}' + where + go = Data.Monoid.mconcat . intersperse (BB.char8 ',') . map encPair + encPair (l,x) = encodeStringBB l <> BB.char8 ':' <> encodeValueBB x + +encodeStringBB :: String -> Builder +encodeStringBB str = BB.char8 '"' <> go str <> BB.char8 '"' + where + go = BB.stringUtf8 . escapeString + +------------------------------------------------------------------------------ +-- 'String'-based encoding + +-- | Serialise value as JSON-encoded Unicode 'String' +encodeToString :: ToJSON a => a -> String +encodeToString jv = encodeValue (toJSON jv) [] + +encodeValue :: Value -> ShowS +encodeValue jv = case jv of + Bool b -> showString (if b then "true" else "false") + Null -> showString "null" + Number n + | isNaN n || isInfinite n -> encodeValue Null + | Just i <- doubleToInt64 n -> shows i + | otherwise -> shows n + Array a -> encodeArray a + String s -> encodeString s + Object o -> encodeObject o + +encodeArray :: [Value] -> ShowS +encodeArray [] = showString "[]" +encodeArray jvs = ('[':) . go jvs . (']':) + where + go [] = id + go [x] = encodeValue x + go (x:xs) = encodeValue x . (',':) . go xs + +encodeObject :: Object -> ShowS +encodeObject [] = showString "{}" +encodeObject jvs = ('{':) . go jvs . ('}':) + where + go [] = id + go [(l,x)] = encodeString l . (':':) . encodeValue x + go ((l,x):lxs) = encodeString l . (':':) . encodeValue x . (',':) . go lxs + +encodeString :: String -> ShowS +encodeString str = ('"':) . showString (escapeString str) . ('"':) + +------------------------------------------------------------------------------ +-- helpers + +-- | Try to convert 'Double' into 'Int64', return 'Nothing' if not +-- representable loss-free as integral 'Int64' value. +doubleToInt64 :: Double -> Maybe Int64 +doubleToInt64 x + | fromInteger x' == x + , x' <= toInteger (maxBound :: Int64) + , x' >= toInteger (minBound :: Int64) + = Just (fromIntegral x') + | otherwise = Nothing + where + x' = round x + +-- | Minimally escape a 'String' in accordance with RFC 7159, "7. Strings" +escapeString :: String -> String +escapeString s + | not (any needsEscape s) = s + | otherwise = escape s + where + escape [] = [] + escape (x:xs) = case x of + '\\' -> '\\':'\\':escape xs + '"' -> '\\':'"':escape xs + '\b' -> '\\':'b':escape xs + '\f' -> '\\':'f':escape xs + '\n' -> '\\':'n':escape xs + '\r' -> '\\':'r':escape xs + '\t' -> '\\':'t':escape xs + c | ord c < 0x10 -> '\\':'u':'0':'0':'0':intToDigit (ord c):escape xs + | ord c < 0x20 -> '\\':'u':'0':'0':'1':intToDigit (ord c - 0x10):escape xs + | otherwise -> c : escape xs + + -- unescaped = %x20-21 / %x23-5B / %x5D-10FFFF + needsEscape c = ord c < 0x20 || c `elem` ['\\','"'] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Utils/LabeledGraph.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Utils/LabeledGraph.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Utils/LabeledGraph.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Utils/LabeledGraph.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,116 @@ +-- | Wrapper around Data.Graph with support for edge labels +{-# LANGUAGE ScopedTypeVariables #-} +module Distribution.Client.Utils.LabeledGraph ( + -- * Graphs + Graph + , Vertex + -- ** Building graphs + , graphFromEdges + , graphFromEdges' + , buildG + , transposeG + -- ** Graph properties + , vertices + , edges + -- ** Operations on the underlying unlabeled graph + , forgetLabels + , topSort + ) where + +import Data.Array +import Data.Graph (Vertex, Bounds) +import Data.List (sortBy) +import Data.Maybe (mapMaybe) +import qualified Data.Graph as G + +{------------------------------------------------------------------------------- + Types +-------------------------------------------------------------------------------} + +type Graph e = Array Vertex [(e, Vertex)] +type Edge e = (Vertex, e, Vertex) + +{------------------------------------------------------------------------------- + Building graphs +-------------------------------------------------------------------------------} + +-- | Construct an edge-labeled graph +-- +-- This is a simple adaptation of the definition in Data.Graph +graphFromEdges :: forall key node edge. Ord key + => [ (node, key, [(edge, key)]) ] + -> ( Graph edge + , Vertex -> (node, key, [(edge, key)]) + , key -> Maybe Vertex + ) +graphFromEdges edges0 = + (graph, \v -> vertex_map ! v, key_vertex) + where + max_v = length edges0 - 1 + bounds0 = (0, max_v) :: (Vertex, Vertex) + sorted_edges = sortBy lt edges0 + edges1 = zipWith (,) [0..] sorted_edges + + graph = array bounds0 [(v, (mapMaybe mk_edge ks)) + | (v, (_, _, ks)) <- edges1] + key_map = array bounds0 [(v, k ) + | (v, (_, k, _ )) <- edges1] + vertex_map = array bounds0 edges1 + + (_,k1,_) `lt` (_,k2,_) = k1 `compare` k2 + + mk_edge :: (edge, key) -> Maybe (edge, Vertex) + mk_edge (edge, key) = do v <- key_vertex key ; return (edge, v) + + -- returns Nothing for non-interesting vertices + key_vertex :: key -> Maybe Vertex + key_vertex k = findVertex 0 max_v + where + findVertex a b + | a > b = Nothing + | otherwise = case compare k (key_map ! mid) of + LT -> findVertex a (mid-1) + EQ -> Just mid + GT -> findVertex (mid+1) b + where + mid = a + (b - a) `div` 2 + +graphFromEdges' :: Ord key + => [ (node, key, [(edge, key)]) ] + -> ( Graph edge + , Vertex -> (node, key, [(edge, key)]) + ) +graphFromEdges' x = (a,b) + where + (a,b,_) = graphFromEdges x + +transposeG :: Graph e -> Graph e +transposeG g = buildG (bounds g) (reverseE g) + +buildG :: Bounds -> [Edge e] -> Graph e +buildG bounds0 edges0 = accumArray (flip (:)) [] bounds0 (map reassoc edges0) + where + reassoc (v, e, w) = (v, (e, w)) + +reverseE :: Graph e -> [Edge e] +reverseE g = [ (w, e, v) | (v, e, w) <- edges g ] + +{------------------------------------------------------------------------------- + Graph properties +-------------------------------------------------------------------------------} + +vertices :: Graph e -> [Vertex] +vertices = indices + +edges :: Graph e -> [Edge e] +edges g = [ (v, e, w) | v <- vertices g, (e, w) <- g!v ] + +{------------------------------------------------------------------------------- + Operations on the underlying unlabelled graph +-------------------------------------------------------------------------------} + +forgetLabels :: Graph e -> G.Graph +forgetLabels = fmap (map snd) + +topSort :: Graph e -> [Vertex] +topSort = G.topSort . forgetLabels diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Utils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Utils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Utils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Utils.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,301 @@ +{-# LANGUAGE ForeignFunctionInterface, CPP #-} + +module Distribution.Client.Utils ( MergeResult(..) + , mergeBy, duplicates, duplicatesBy + , readMaybe + , inDir, logDirChange + , determineNumJobs, numberOfProcessors + , removeExistingFile + , withTempFileName + , makeAbsoluteToCwd + , makeRelativeToCwd, makeRelativeToDir + , filePathToByteString + , byteStringToFilePath, tryCanonicalizePath + , canonicalizePathNoThrow + , moreRecentFile, existsAndIsMoreRecentThan + , tryFindAddSourcePackageDesc + , tryFindPackageDesc + , relaxEncodingErrors) + where + +import Distribution.Compat.Exception ( catchIO ) +import Distribution.Client.Compat.Time ( getModTime ) +import Distribution.Simple.Setup ( Flag(..) ) +import Distribution.Simple.Utils ( die, findPackageDesc ) +import qualified Data.ByteString.Lazy as BS +#if !MIN_VERSION_base(4,8,0) +import Control.Applicative +#endif +import Control.Monad + ( when ) +import Data.Bits + ( (.|.), shiftL, shiftR ) +import Data.Char + ( ord, chr ) +#if MIN_VERSION_base(4,6,0) +import Text.Read + ( readMaybe ) +#endif +import Data.List + ( isPrefixOf, sortBy, groupBy ) +import Data.Word + ( Word8, Word32) +import Foreign.C.Types ( CInt(..) ) +import qualified Control.Exception as Exception + ( finally, bracket ) +import System.Directory + ( canonicalizePath, doesFileExist, getCurrentDirectory + , removeFile, setCurrentDirectory ) +import System.FilePath + ( (), isAbsolute, takeDrive, splitPath, joinPath ) +import System.IO + ( Handle, hClose, openTempFile +#if MIN_VERSION_base(4,4,0) + , hGetEncoding, hSetEncoding +#endif + ) +import System.IO.Unsafe ( unsafePerformIO ) + +#if MIN_VERSION_base(4,4,0) +import GHC.IO.Encoding + ( recover, TextEncoding(TextEncoding) ) +import GHC.IO.Encoding.Failure + ( recoverEncode, CodingFailureMode(TransliterateCodingFailure) ) +#endif + +#if defined(mingw32_HOST_OS) || MIN_VERSION_directory(1,2,3) +import Prelude hiding (ioError) +import Control.Monad (liftM2, unless) +import System.Directory (doesDirectoryExist) +import System.IO.Error (ioError, mkIOError, doesNotExistErrorType) +#endif + +-- | Generic merging utility. For sorted input lists this is a full outer join. +-- +-- * The result list never contains @(Nothing, Nothing)@. +-- +mergeBy :: (a -> b -> Ordering) -> [a] -> [b] -> [MergeResult a b] +mergeBy cmp = merge + where + merge [] ys = [ OnlyInRight y | y <- ys] + merge xs [] = [ OnlyInLeft x | x <- xs] + merge (x:xs) (y:ys) = + case x `cmp` y of + GT -> OnlyInRight y : merge (x:xs) ys + EQ -> InBoth x y : merge xs ys + LT -> OnlyInLeft x : merge xs (y:ys) + +data MergeResult a b = OnlyInLeft a | InBoth a b | OnlyInRight b + +duplicates :: Ord a => [a] -> [[a]] +duplicates = duplicatesBy compare + +duplicatesBy :: (a -> a -> Ordering) -> [a] -> [[a]] +duplicatesBy cmp = filter moreThanOne . groupBy eq . sortBy cmp + where + eq a b = case cmp a b of + EQ -> True + _ -> False + moreThanOne (_:_:_) = True + moreThanOne _ = False + +#if !MIN_VERSION_base(4,6,0) +-- | An implementation of readMaybe, for compatability with older base versions. +readMaybe :: Read a => String -> Maybe a +readMaybe s = case reads s of + [(x,"")] -> Just x + _ -> Nothing +#endif + +-- | Like 'removeFile', but does not throw an exception when the file does not +-- exist. +removeExistingFile :: FilePath -> IO () +removeExistingFile path = do + exists <- doesFileExist path + when exists $ + removeFile path + +-- | A variant of 'withTempFile' that only gives us the file name, and while +-- it will clean up the file afterwards, it's lenient if the file is +-- moved\/deleted. +-- +withTempFileName :: FilePath + -> String + -> (FilePath -> IO a) -> IO a +withTempFileName tmpDir template action = + Exception.bracket + (openTempFile tmpDir template) + (\(name, _) -> removeExistingFile name) + (\(name, h) -> hClose h >> action name) + +-- | Executes the action in the specified directory. +inDir :: Maybe FilePath -> IO a -> IO a +inDir Nothing m = m +inDir (Just d) m = do + old <- getCurrentDirectory + setCurrentDirectory d + m `Exception.finally` setCurrentDirectory old + +-- | Log directory change in 'make' compatible syntax +logDirChange :: (String -> IO ()) -> Maybe FilePath -> IO a -> IO a +logDirChange _ Nothing m = m +logDirChange l (Just d) m = do + l $ "cabal: Entering directory '" ++ d ++ "'\n" + m `Exception.finally` + (l $ "cabal: Leaving directory '" ++ d ++ "'\n") + +foreign import ccall "getNumberOfProcessors" c_getNumberOfProcessors :: IO CInt + +-- The number of processors is not going to change during the duration of the +-- program, so unsafePerformIO is safe here. +numberOfProcessors :: Int +numberOfProcessors = fromEnum $ unsafePerformIO c_getNumberOfProcessors + +-- | Determine the number of jobs to use given the value of the '-j' flag. +determineNumJobs :: Flag (Maybe Int) -> Int +determineNumJobs numJobsFlag = + case numJobsFlag of + NoFlag -> 1 + Flag Nothing -> numberOfProcessors + Flag (Just n) -> n + +-- | Given a relative path, make it absolute relative to the current +-- directory. Absolute paths are returned unmodified. +makeAbsoluteToCwd :: FilePath -> IO FilePath +makeAbsoluteToCwd path | isAbsolute path = return path + | otherwise = do cwd <- getCurrentDirectory + return $! cwd path + +-- | Given a path (relative or absolute), make it relative to the current +-- directory, including using @../..@ if necessary. +makeRelativeToCwd :: FilePath -> IO FilePath +makeRelativeToCwd path = + makeRelativeCanonical <$> canonicalizePath path <*> getCurrentDirectory + +-- | Given a path (relative or absolute), make it relative to the given +-- directory, including using @../..@ if necessary. +makeRelativeToDir :: FilePath -> FilePath -> IO FilePath +makeRelativeToDir path dir = + makeRelativeCanonical <$> canonicalizePath path <*> canonicalizePath dir + +-- | Given a canonical absolute path and canonical absolute dir, make the path +-- relative to the directory, including using @../..@ if necessary. Returns +-- the original absolute path if it is not on the same drive as the given dir. +makeRelativeCanonical :: FilePath -> FilePath -> FilePath +makeRelativeCanonical path dir + | takeDrive path /= takeDrive dir = path + | otherwise = go (splitPath path) (splitPath dir) + where + go (p:ps) (d:ds) | p == d = go ps ds + go [] [] = "./" + go ps ds = joinPath (replicate (length ds) ".." ++ ps) + +-- | Convert a 'FilePath' to a lazy 'ByteString'. Each 'Char' is +-- encoded as a little-endian 'Word32'. +filePathToByteString :: FilePath -> BS.ByteString +filePathToByteString p = + BS.pack $ foldr conv [] codepts + where + codepts :: [Word32] + codepts = map (fromIntegral . ord) p + + conv :: Word32 -> [Word8] -> [Word8] + conv w32 rest = b0:b1:b2:b3:rest + where + b0 = fromIntegral $ w32 + b1 = fromIntegral $ w32 `shiftR` 8 + b2 = fromIntegral $ w32 `shiftR` 16 + b3 = fromIntegral $ w32 `shiftR` 24 + +-- | Reverse operation to 'filePathToByteString'. +byteStringToFilePath :: BS.ByteString -> FilePath +byteStringToFilePath bs | bslen `mod` 4 /= 0 = unexpected + | otherwise = go 0 + where + unexpected = "Distribution.Client.Utils.byteStringToFilePath: unexpected" + bslen = BS.length bs + + go i | i == bslen = [] + | otherwise = (chr . fromIntegral $ w32) : go (i+4) + where + w32 :: Word32 + w32 = b0 .|. (b1 `shiftL` 8) .|. (b2 `shiftL` 16) .|. (b3 `shiftL` 24) + b0 = fromIntegral $ BS.index bs i + b1 = fromIntegral $ BS.index bs (i + 1) + b2 = fromIntegral $ BS.index bs (i + 2) + b3 = fromIntegral $ BS.index bs (i + 3) + +-- | Workaround for the inconsistent behaviour of 'canonicalizePath'. Always +-- throws an error if the path refers to a non-existent file. +tryCanonicalizePath :: FilePath -> IO FilePath +tryCanonicalizePath path = do + ret <- canonicalizePath path +#if defined(mingw32_HOST_OS) || MIN_VERSION_directory(1,2,3) + exists <- liftM2 (||) (doesFileExist ret) (doesDirectoryExist ret) + unless exists $ + ioError $ mkIOError doesNotExistErrorType "canonicalizePath" + Nothing (Just ret) +#endif + return ret + +-- | A non-throwing wrapper for 'canonicalizePath'. If 'canonicalizePath' throws +-- an exception, returns the path argument unmodified. +canonicalizePathNoThrow :: FilePath -> IO FilePath +canonicalizePathNoThrow path = do + canonicalizePath path `catchIO` (\_ -> return path) + +-------------------- +-- Modification time + +-- | Like Distribution.Simple.Utils.moreRecentFile, but uses getModTime instead +-- of getModificationTime for higher precision. We can't merge the two because +-- Distribution.Client.Time uses MIN_VERSION macros. +moreRecentFile :: FilePath -> FilePath -> IO Bool +moreRecentFile a b = do + exists <- doesFileExist b + if not exists + then return True + else do tb <- getModTime b + ta <- getModTime a + return (ta > tb) + +-- | Like 'moreRecentFile', but also checks that the first file exists. +existsAndIsMoreRecentThan :: FilePath -> FilePath -> IO Bool +existsAndIsMoreRecentThan a b = do + exists <- doesFileExist a + if not exists + then return False + else a `moreRecentFile` b + +-- | Sets the handler for encoding errors to one that transliterates invalid +-- characters into one present in the encoding (i.e., \'?\'). +-- This is opposed to the default behavior, which is to throw an exception on +-- error. This function will ignore file handles that have a Unicode encoding +-- set. It's a no-op for versions of `base` less than 4.4. +relaxEncodingErrors :: Handle -> IO () +relaxEncodingErrors handle = do +#if MIN_VERSION_base(4,4,0) + maybeEncoding <- hGetEncoding handle + case maybeEncoding of + Just (TextEncoding name decoder encoder) | not ("UTF" `isPrefixOf` name) -> + let relax x = x { recover = recoverEncode TransliterateCodingFailure } + in hSetEncoding handle (TextEncoding name decoder (fmap relax encoder)) + _ -> +#endif + return () + +-- |Like 'tryFindPackageDesc', but with error specific to add-source deps. +tryFindAddSourcePackageDesc :: FilePath -> String -> IO FilePath +tryFindAddSourcePackageDesc depPath err = tryFindPackageDesc depPath $ + err ++ "\n" ++ "Failed to read cabal file of add-source dependency: " + ++ depPath + +-- |Try to find a @.cabal@ file, in directory @depPath@. Fails if one cannot be +-- found, with @err@ prefixing the error message. This function simply allows +-- us to give a more descriptive error than that provided by @findPackageDesc@. +tryFindPackageDesc :: FilePath -> String -> IO FilePath +tryFindPackageDesc depPath err = do + errOrCabalFile <- findPackageDesc depPath + case errOrCabalFile of + Right file -> return file + Left _ -> die err diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Win32SelfUpgrade.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Win32SelfUpgrade.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/Win32SelfUpgrade.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/Win32SelfUpgrade.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,225 @@ +{-# LANGUAGE CPP, ForeignFunctionInterface #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Win32SelfUpgrade +-- Copyright : (c) Duncan Coutts 2008 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Support for self-upgrading executables on Windows platforms. +----------------------------------------------------------------------------- +module Distribution.Client.Win32SelfUpgrade ( +-- * Explanation +-- +-- | Windows inherited a design choice from DOS that while initially innocuous +-- has rather unfortunate consequences. It maintains the invariant that every +-- open file has a corresponding name on disk. One positive consequence of this +-- is that an executable can always find it's own executable file. The downside +-- is that a program cannot be deleted or upgraded while it is running without +-- hideous workarounds. This module implements one such hideous workaround. +-- +-- The basic idea is: +-- +-- * Move our own exe file to a new name +-- * Copy a new exe file to the previous name +-- * Run the new exe file, passing our own PID and new path +-- * Wait for the new process to start +-- * Close the new exe file +-- * Exit old process +-- +-- Then in the new process: +-- +-- * Inform the old process that we've started +-- * Wait for the old process to die +-- * Delete the old exe file +-- * Exit new process +-- + + possibleSelfUpgrade, + deleteOldExeFile, + ) where + +#if mingw32_HOST_OS + +import qualified System.Win32 as Win32 +import System.Win32 (DWORD, BOOL, HANDLE, LPCTSTR) +import Foreign.Ptr (Ptr, nullPtr) +import System.Process (runProcess) +import System.Directory (canonicalizePath) +import System.FilePath (takeBaseName, replaceBaseName, equalFilePath) + +import Distribution.Verbosity as Verbosity (Verbosity, showForCabal) +import Distribution.Simple.Utils (debug, info) + +import Prelude hiding (log) + +-- | If one of the given files is our own exe file then we arrange things such +-- that the nested action can replace our own exe file. +-- +-- We require that the new process accepts a command line invocation that +-- calls 'deleteOldExeFile', passing in the PID and exe file. +-- +possibleSelfUpgrade :: Verbosity + -> [FilePath] + -> IO a -> IO a +possibleSelfUpgrade verbosity newPaths action = do + dstPath <- canonicalizePath =<< Win32.getModuleFileName Win32.nullHANDLE + + newPaths' <- mapM canonicalizePath newPaths + let doingSelfUpgrade = any (equalFilePath dstPath) newPaths' + + if not doingSelfUpgrade + then action + else do + info verbosity $ "cabal-install does the replace-own-exe-file dance..." + tmpPath <- moveOurExeOutOfTheWay verbosity + result <- action + scheduleOurDemise verbosity dstPath tmpPath + (\pid path -> ["win32selfupgrade", pid, path + ,"--verbose=" ++ Verbosity.showForCabal verbosity]) + return result + +-- | The name of a Win32 Event object that we use to synchronise between the +-- old and new processes. We need to synchronise to make sure that the old +-- process has not yet terminated by the time the new one starts up and looks +-- for the old process. Otherwise the old one might have already terminated +-- and we could not wait on it terminating reliably (eg the PID might get +-- re-used). +-- +syncEventName :: String +syncEventName = "Local\\cabal-install-upgrade" + +-- | The first part of allowing our exe file to be replaced is to move the +-- existing exe file out of the way. Although we cannot delete our exe file +-- while we're still running, fortunately we can rename it, at least within +-- the same directory. +-- +moveOurExeOutOfTheWay :: Verbosity -> IO FilePath +moveOurExeOutOfTheWay verbosity = do + ourPID <- getCurrentProcessId + dstPath <- Win32.getModuleFileName Win32.nullHANDLE + + let tmpPath = replaceBaseName dstPath (takeBaseName dstPath ++ show ourPID) + + debug verbosity $ "moving " ++ dstPath ++ " to " ++ tmpPath + Win32.moveFile dstPath tmpPath + return tmpPath + +-- | Assuming we've now installed the new exe file in the right place, we +-- launch it and ask it to delete our exe file when we eventually terminate. +-- +scheduleOurDemise :: Verbosity -> FilePath -> FilePath + -> (String -> FilePath -> [String]) -> IO () +scheduleOurDemise verbosity dstPath tmpPath mkArgs = do + ourPID <- getCurrentProcessId + event <- createEvent syncEventName + + let args = mkArgs (show ourPID) tmpPath + log $ "launching child " ++ unwords (dstPath : map show args) + _ <- runProcess dstPath args Nothing Nothing Nothing Nothing Nothing + + log $ "waiting for the child to start up" + waitForSingleObject event (10*1000) -- wait at most 10 sec + log $ "child started ok" + + where + log msg = debug verbosity ("Win32Reinstall.parent: " ++ msg) + +-- | Assuming we're now in the new child process, we've been asked by the old +-- process to wait for it to terminate and then we can remove the old exe file +-- that it renamed itself to. +-- +deleteOldExeFile :: Verbosity -> Int -> FilePath -> IO () +deleteOldExeFile verbosity oldPID tmpPath = do + log $ "process started. Will delete exe file of process " + ++ show oldPID ++ " at path " ++ tmpPath + + log $ "getting handle of parent process " ++ show oldPID + oldPHANDLE <- Win32.openProcess Win32.sYNCHORNIZE False (fromIntegral oldPID) + + log $ "synchronising with parent" + event <- openEvent syncEventName + setEvent event + + log $ "waiting for parent process to terminate" + waitForSingleObject oldPHANDLE Win32.iNFINITE + log $ "parent process terminated" + + log $ "deleting parent's old .exe file" + Win32.deleteFile tmpPath + + where + log msg = debug verbosity ("Win32Reinstall.child: " ++ msg) + +------------------------ +-- Win32 foreign imports +-- + +-- A bunch of functions sadly not provided by the Win32 package. + +#ifdef x86_64_HOST_ARCH +#define CALLCONV ccall +#else +#define CALLCONV stdcall +#endif + +foreign import CALLCONV unsafe "windows.h GetCurrentProcessId" + getCurrentProcessId :: IO DWORD + +foreign import CALLCONV unsafe "windows.h WaitForSingleObject" + waitForSingleObject_ :: HANDLE -> DWORD -> IO DWORD + +waitForSingleObject :: HANDLE -> DWORD -> IO () +waitForSingleObject handle timeout = + Win32.failIf_ bad "WaitForSingleObject" $ + waitForSingleObject_ handle timeout + where + bad result = not (result == 0 || result == wAIT_TIMEOUT) + wAIT_TIMEOUT = 0x00000102 + +foreign import CALLCONV unsafe "windows.h CreateEventW" + createEvent_ :: Ptr () -> BOOL -> BOOL -> LPCTSTR -> IO HANDLE + +createEvent :: String -> IO HANDLE +createEvent name = do + Win32.failIfNull "CreateEvent" $ + Win32.withTString name $ + createEvent_ nullPtr False False + +foreign import CALLCONV unsafe "windows.h OpenEventW" + openEvent_ :: DWORD -> BOOL -> LPCTSTR -> IO HANDLE + +openEvent :: String -> IO HANDLE +openEvent name = do + Win32.failIfNull "OpenEvent" $ + Win32.withTString name $ + openEvent_ eVENT_MODIFY_STATE False + where + eVENT_MODIFY_STATE :: DWORD + eVENT_MODIFY_STATE = 0x0002 + +foreign import CALLCONV unsafe "windows.h SetEvent" + setEvent_ :: HANDLE -> IO BOOL + +setEvent :: HANDLE -> IO () +setEvent handle = + Win32.failIfFalse_ "SetEvent" $ + setEvent_ handle + +#else + +import Distribution.Verbosity (Verbosity) +import Distribution.Simple.Utils (die) + +possibleSelfUpgrade :: Verbosity + -> [FilePath] + -> IO a -> IO a +possibleSelfUpgrade _ _ action = action + +deleteOldExeFile :: Verbosity -> Int -> FilePath -> IO () +deleteOldExeFile _ _ _ = die "win32selfupgrade not needed except on win32" + +#endif diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/World.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/World.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Distribution/Client/World.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Distribution/Client/World.hs 2016-12-23 10:35:30.000000000 +0000 @@ -0,0 +1,172 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.World +-- Copyright : (c) Peter Robinson 2009 +-- License : BSD-like +-- +-- Maintainer : thaldyron@gmail.com +-- Stability : provisional +-- Portability : portable +-- +-- Interface to the world-file that contains a list of explicitly +-- requested packages. Meant to be imported qualified. +-- +-- A world file entry stores the package-name, package-version, and +-- user flags. +-- For example, the entry generated by +-- # cabal install stm-io-hooks --flags="-debug" +-- looks like this: +-- # stm-io-hooks -any --flags="-debug" +-- To rebuild/upgrade the packages in world (e.g. when updating the compiler) +-- use +-- # cabal install world +-- +----------------------------------------------------------------------------- +module Distribution.Client.World ( + WorldPkgInfo(..), + insert, + delete, + getContents, + ) where + +import Distribution.Package + ( Dependency(..) ) +import Distribution.PackageDescription + ( FlagAssignment, FlagName(FlagName) ) +import Distribution.Verbosity + ( Verbosity ) +import Distribution.Simple.Utils + ( die, info, chattyTry, writeFileAtomic ) +import Distribution.Text + ( Text(..), display, simpleParse ) +import qualified Distribution.Compat.ReadP as Parse +import Distribution.Compat.Exception ( catchIO ) +import qualified Text.PrettyPrint as Disp +import Text.PrettyPrint ( (<>), (<+>) ) + + +import Data.Char as Char + +import Data.List + ( unionBy, deleteFirstsBy, nubBy ) +import System.IO.Error + ( isDoesNotExistError ) +import qualified Data.ByteString.Lazy.Char8 as B +import Prelude hiding (getContents) + + +data WorldPkgInfo = WorldPkgInfo Dependency FlagAssignment + deriving (Show,Eq) + +-- | Adds packages to the world file; creates the file if it doesn't +-- exist yet. Version constraints and flag assignments for a package are +-- updated if already present. IO errors are non-fatal. +insert :: Verbosity -> FilePath -> [WorldPkgInfo] -> IO () +insert = modifyWorld $ unionBy equalUDep + +-- | Removes packages from the world file. +-- Note: Currently unused as there is no mechanism in Cabal (yet) to +-- handle uninstalls. IO errors are non-fatal. +delete :: Verbosity -> FilePath -> [WorldPkgInfo] -> IO () +delete = modifyWorld $ flip (deleteFirstsBy equalUDep) + +-- | WorldPkgInfo values are considered equal if they refer to +-- the same package, i.e., we don't care about differing versions or flags. +equalUDep :: WorldPkgInfo -> WorldPkgInfo -> Bool +equalUDep (WorldPkgInfo (Dependency pkg1 _) _) + (WorldPkgInfo (Dependency pkg2 _) _) = pkg1 == pkg2 + +-- | Modifies the world file by applying an update-function ('unionBy' +-- for 'insert', 'deleteFirstsBy' for 'delete') to the given list of +-- packages. IO errors are considered non-fatal. +modifyWorld :: ([WorldPkgInfo] -> [WorldPkgInfo] + -> [WorldPkgInfo]) + -- ^ Function that defines how + -- the list of user packages are merged with + -- existing world packages. + -> Verbosity + -> FilePath -- ^ Location of the world file + -> [WorldPkgInfo] -- ^ list of user supplied packages + -> IO () +modifyWorld _ _ _ [] = return () +modifyWorld f verbosity world pkgs = + chattyTry "Error while updating world-file. " $ do + pkgsOldWorld <- getContents world + -- Filter out packages that are not in the world file: + let pkgsNewWorld = nubBy equalUDep $ f pkgs pkgsOldWorld + -- 'Dependency' is not an Ord instance, so we need to check for + -- equivalence the awkward way: + if not (all (`elem` pkgsOldWorld) pkgsNewWorld && + all (`elem` pkgsNewWorld) pkgsOldWorld) + then do + info verbosity "Updating world file..." + writeFileAtomic world . B.pack $ unlines + [ (display pkg) | pkg <- pkgsNewWorld] + else + info verbosity "World file is already up to date." + + +-- | Returns the content of the world file as a list +getContents :: FilePath -> IO [WorldPkgInfo] +getContents world = do + content <- safelyReadFile world + let result = map simpleParse (lines $ B.unpack content) + case sequence result of + Nothing -> die "Could not parse world file." + Just xs -> return xs + where + safelyReadFile :: FilePath -> IO B.ByteString + safelyReadFile file = B.readFile file `catchIO` handler + where + handler e | isDoesNotExistError e = return B.empty + | otherwise = ioError e + + +instance Text WorldPkgInfo where + disp (WorldPkgInfo dep flags) = disp dep <+> dispFlags flags + where + dispFlags [] = Disp.empty + dispFlags fs = Disp.text "--flags=" + <> Disp.doubleQuotes (flagAssToDoc fs) + flagAssToDoc = foldr (\(FlagName fname,val) flagAssDoc -> + (if not val then Disp.char '-' + else Disp.empty) + Disp.<> Disp.text fname + Disp.<+> flagAssDoc) + Disp.empty + parse = do + dep <- parse + Parse.skipSpaces + flagAss <- Parse.option [] parseFlagAssignment + return $ WorldPkgInfo dep flagAss + where + parseFlagAssignment :: Parse.ReadP r FlagAssignment + parseFlagAssignment = do + _ <- Parse.string "--flags" + Parse.skipSpaces + _ <- Parse.char '=' + Parse.skipSpaces + inDoubleQuotes $ Parse.many1 flag + where + inDoubleQuotes :: Parse.ReadP r a -> Parse.ReadP r a + inDoubleQuotes = Parse.between (Parse.char '"') (Parse.char '"') + + flag = do + Parse.skipSpaces + val <- negative Parse.+++ positive + name <- ident + Parse.skipSpaces + return (FlagName name,val) + negative = do + _ <- Parse.char '-' + return False + positive = return True + + ident :: Parse.ReadP r String + ident = do + -- First character must be a letter/digit to avoid flags + -- like "+-debug": + c <- Parse.satisfy Char.isAlphaNum + cs <- Parse.munch (\ch -> Char.isAlphaNum ch || ch == '_' + || ch == '-') + return (c:cs) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/LICENSE cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/LICENSE --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/LICENSE 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/LICENSE 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,34 @@ +Copyright (c) 2003-2008, Isaac Jones, Simon Marlow, Martin Sjögren, + Bjorn Bringert, Krasimir Angelov, + Malcolm Wallace, Ross Patterson, + Lemmih, Paolo Martini, Don Stewart, + Duncan Coutts +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of Isaac Jones nor the names of other + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Main.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Main.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Main.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Main.hs 2016-12-23 10:35:29.000000000 +0000 @@ -0,0 +1,1325 @@ +{-# LANGUAGE CPP #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Main +-- Copyright : (c) David Himmelstrup 2005 +-- License : BSD-like +-- +-- Maintainer : lemmih@gmail.com +-- Stability : provisional +-- Portability : portable +-- +-- Entry point to the default cabal-install front-end. +----------------------------------------------------------------------------- + +module Main (main) where + +import Distribution.Client.Setup + ( GlobalFlags(..), globalCommand, withRepoContext + , ConfigFlags(..) + , ConfigExFlags(..), defaultConfigExFlags, configureExCommand + , BuildFlags(..), BuildExFlags(..), SkipAddSourceDepsCheck(..) + , buildCommand, replCommand, testCommand, benchmarkCommand + , InstallFlags(..), defaultInstallFlags + , installCommand, upgradeCommand, uninstallCommand + , FetchFlags(..), fetchCommand + , FreezeFlags(..), freezeCommand + , genBoundsCommand + , GetFlags(..), getCommand, unpackCommand + , checkCommand + , formatCommand + , updateCommand + , ListFlags(..), listCommand + , InfoFlags(..), infoCommand + , UploadFlags(..), uploadCommand + , ReportFlags(..), reportCommand + , runCommand + , InitFlags(initVerbosity), initCommand + , SDistFlags(..), SDistExFlags(..), sdistCommand + , Win32SelfUpgradeFlags(..), win32SelfUpgradeCommand + , ActAsSetupFlags(..), actAsSetupCommand + , SandboxFlags(..), sandboxCommand + , ExecFlags(..), execCommand + , UserConfigFlags(..), userConfigCommand + , reportCommand + , manpageCommand + ) +import Distribution.Simple.Setup + ( HaddockFlags(..), haddockCommand, defaultHaddockFlags + , HscolourFlags(..), hscolourCommand + , ReplFlags(..) + , CopyFlags(..), copyCommand + , RegisterFlags(..), registerCommand + , CleanFlags(..), cleanCommand + , TestFlags(..), BenchmarkFlags(..) + , Flag(..), fromFlag, fromFlagOrDefault, flagToMaybe, toFlag + , configAbsolutePaths + ) + +import Distribution.Client.SetupWrapper + ( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions ) +import Distribution.Client.Config + ( SavedConfig(..), loadConfig, defaultConfigFile, userConfigDiff + , userConfigUpdate, createDefaultConfigFile, getConfigFilePath ) +import Distribution.Client.Targets + ( readUserTargets ) +import qualified Distribution.Client.List as List + ( list, info ) + +import qualified Distribution.Client.CmdConfigure as CmdConfigure +import qualified Distribution.Client.CmdBuild as CmdBuild +import qualified Distribution.Client.CmdRepl as CmdRepl + +import Distribution.Client.Install (install) +import Distribution.Client.Configure (configure) +import Distribution.Client.Update (update) +import Distribution.Client.Exec (exec) +import Distribution.Client.Fetch (fetch) +import Distribution.Client.Freeze (freeze) +import Distribution.Client.GenBounds (genBounds) +import Distribution.Client.Check as Check (check) +--import Distribution.Client.Clean (clean) +import qualified Distribution.Client.Upload as Upload +import Distribution.Client.Run (run, splitRunArgs) +import Distribution.Client.SrcDist (sdist) +import Distribution.Client.Get (get) +import Distribution.Client.Sandbox (sandboxInit + ,sandboxAddSource + ,sandboxDelete + ,sandboxDeleteSource + ,sandboxListSources + ,sandboxHcPkg + ,dumpPackageEnvironment + + ,getSandboxConfigFilePath + ,loadConfigOrSandboxConfig + ,findSavedDistPref + ,initPackageDBIfNeeded + ,maybeWithSandboxDirOnSearchPath + ,maybeWithSandboxPackageInfo + ,WereDepsReinstalled(..) + ,maybeReinstallAddSourceDeps + ,tryGetIndexFilePath + ,sandboxBuildDir + ,updateSandboxConfigFileFlag + ,updateInstallDirs + + ,configCompilerAux' + ,getPersistOrConfigCompiler + ,configPackageDB') +import Distribution.Client.Sandbox.PackageEnvironment + (setPackageDB + ,userPackageEnvironmentFile) +import Distribution.Client.Sandbox.Timestamp (maybeAddCompilerTimestampRecord) +import Distribution.Client.Sandbox.Types (UseSandbox(..), whenUsingSandbox) +import Distribution.Client.Tar (createTarGzFile) +import Distribution.Client.Types (Password (..)) +import Distribution.Client.Init (initCabal) +import Distribution.Client.Manpage (manpage) +import qualified Distribution.Client.Win32SelfUpgrade as Win32SelfUpgrade +import Distribution.Client.Utils (determineNumJobs +#if defined(mingw32_HOST_OS) + ,relaxEncodingErrors +#endif + ,existsAndIsMoreRecentThan) + +import Distribution.Package (packageId) +import Distribution.PackageDescription + ( BuildType(..), Executable(..), buildable ) +import Distribution.PackageDescription.Parse + ( readPackageDescription ) +import Distribution.PackageDescription.PrettyPrint + ( writeGenericPackageDescription ) +import qualified Distribution.Simple as Simple +import qualified Distribution.Make as Make +import Distribution.Simple.Build + ( startInterpreter ) +import Distribution.Simple.Command + ( CommandParse(..), CommandUI(..), Command, CommandSpec(..) + , CommandType(..), commandsRun, commandAddAction, hiddenCommand + , commandFromSpec) +import Distribution.Simple.Compiler + ( Compiler(..) ) +import Distribution.Simple.Configure + ( checkPersistBuildConfigOutdated, configCompilerAuxEx + , ConfigStateFileError(..), localBuildInfoFile + , getPersistBuildConfig, tryGetPersistBuildConfig ) +import qualified Distribution.Simple.LocalBuildInfo as LBI +import Distribution.Simple.Program (defaultProgramConfiguration + ,configureAllKnownPrograms + ,simpleProgramInvocation + ,getProgramInvocationOutput) +import Distribution.Simple.Program.Db (reconfigurePrograms) +import qualified Distribution.Simple.Setup as Cabal +import Distribution.Simple.Utils + ( cabalVersion, die, notice, info, topHandler + , findPackageDesc, tryFindPackageDesc ) +import Distribution.Text + ( display ) +import Distribution.Verbosity as Verbosity + ( Verbosity, normal ) +import Distribution.Version + ( Version(..), orLaterVersion ) +import qualified Paths_cabal_install (version) + +import System.Environment (getArgs, getProgName) +import System.Exit (exitFailure, exitSuccess) +import System.FilePath ( dropExtension, splitExtension + , takeExtension, (), (<.>)) +import System.IO ( BufferMode(LineBuffering), hSetBuffering +#ifdef mingw32_HOST_OS + , stderr +#endif + , stdout ) +import System.Directory (doesFileExist, getCurrentDirectory) +import Data.List (intercalate) +import Data.Maybe (listToMaybe) +#if !MIN_VERSION_base(4,8,0) +import Data.Monoid (Monoid(..)) +import Control.Applicative (pure, (<$>)) +#endif +import Control.Exception (SomeException(..), try) +import Control.Monad (when, unless, void) + +-- | Entry point +-- +main :: IO () +main = do + -- Enable line buffering so that we can get fast feedback even when piped. + -- This is especially important for CI and build systems. + hSetBuffering stdout LineBuffering + -- The default locale encoding for Windows CLI is not UTF-8 and printing + -- Unicode characters to it will fail unless we relax the handling of encoding + -- errors when writing to stderr and stdout. +#ifdef mingw32_HOST_OS + relaxEncodingErrors stdout + relaxEncodingErrors stderr +#endif + getArgs >>= mainWorker + +mainWorker :: [String] -> IO () +mainWorker args = topHandler $ + case commandsRun (globalCommand commands) commands args of + CommandHelp help -> printGlobalHelp help + CommandList opts -> printOptionsList opts + CommandErrors errs -> printErrors errs + CommandReadyToGo (globalFlags, commandParse) -> + case commandParse of + _ | fromFlagOrDefault False (globalVersion globalFlags) + -> printVersion + | fromFlagOrDefault False (globalNumericVersion globalFlags) + -> printNumericVersion + CommandHelp help -> printCommandHelp help + CommandList opts -> printOptionsList opts + CommandErrors errs -> printErrors errs + CommandReadyToGo action -> do + globalFlags' <- updateSandboxConfigFileFlag globalFlags + action globalFlags' + + where + printCommandHelp help = do + pname <- getProgName + putStr (help pname) + printGlobalHelp help = do + pname <- getProgName + configFile <- defaultConfigFile + putStr (help pname) + putStr $ "\nYou can edit the cabal configuration file to set defaults:\n" + ++ " " ++ configFile ++ "\n" + exists <- doesFileExist configFile + when (not exists) $ + putStrLn $ "This file will be generated with sensible " + ++ "defaults if you run 'cabal update'." + printOptionsList = putStr . unlines + printErrors errs = die $ intercalate "\n" errs + printNumericVersion = putStrLn $ display Paths_cabal_install.version + printVersion = putStrLn $ "cabal-install version " + ++ display Paths_cabal_install.version + ++ "\ncompiled using version " + ++ display cabalVersion + ++ " of the Cabal library " + + commands = map commandFromSpec commandSpecs + commandSpecs = + [ regularCmd installCommand installAction + , regularCmd updateCommand updateAction + , regularCmd listCommand listAction + , regularCmd infoCommand infoAction + , regularCmd fetchCommand fetchAction + , regularCmd freezeCommand freezeAction + , regularCmd getCommand getAction + , hiddenCmd unpackCommand unpackAction + , regularCmd checkCommand checkAction + , regularCmd sdistCommand sdistAction + , regularCmd uploadCommand uploadAction + , regularCmd reportCommand reportAction + , regularCmd runCommand runAction + , regularCmd initCommand initAction + , regularCmd configureExCommand configureAction + , regularCmd buildCommand buildAction + , regularCmd replCommand replAction + , regularCmd sandboxCommand sandboxAction + , regularCmd haddockCommand haddockAction + , regularCmd execCommand execAction + , regularCmd userConfigCommand userConfigAction + , regularCmd cleanCommand cleanAction + , regularCmd genBoundsCommand genBoundsAction + , wrapperCmd copyCommand copyVerbosity copyDistPref + , wrapperCmd hscolourCommand hscolourVerbosity hscolourDistPref + , wrapperCmd registerCommand regVerbosity regDistPref + , regularCmd testCommand testAction + , regularCmd benchmarkCommand benchmarkAction + , hiddenCmd uninstallCommand uninstallAction + , hiddenCmd formatCommand formatAction + , hiddenCmd upgradeCommand upgradeAction + , hiddenCmd win32SelfUpgradeCommand win32SelfUpgradeAction + , hiddenCmd actAsSetupCommand actAsSetupAction + , hiddenCmd manpageCommand (manpageAction commandSpecs) + + , hiddenCmd installCommand { commandName = "new-configure" } + CmdConfigure.configureAction + , hiddenCmd installCommand { commandName = "new-build" } + CmdBuild.buildAction + , hiddenCmd installCommand { commandName = "new-repl" } + CmdRepl.replAction + ] + +type Action = GlobalFlags -> IO () + +regularCmd :: CommandUI flags -> (flags -> [String] -> action) + -> CommandSpec action +regularCmd ui action = + CommandSpec ui ((flip commandAddAction) action) NormalCommand + +hiddenCmd :: CommandUI flags -> (flags -> [String] -> action) + -> CommandSpec action +hiddenCmd ui action = + CommandSpec ui (\ui' -> hiddenCommand (commandAddAction ui' action)) + HiddenCommand + +wrapperCmd :: Monoid flags => CommandUI flags -> (flags -> Flag Verbosity) + -> (flags -> Flag String) -> CommandSpec Action +wrapperCmd ui verbosity distPref = + CommandSpec ui (\ui' -> wrapperAction ui' verbosity distPref) NormalCommand + +wrapperAction :: Monoid flags + => CommandUI flags + -> (flags -> Flag Verbosity) + -> (flags -> Flag String) + -> Command Action +wrapperAction command verbosityFlag distPrefFlag = + commandAddAction command + { commandDefaultFlags = mempty } $ \flags extraArgs globalFlags -> do + let verbosity = fromFlagOrDefault normal (verbosityFlag flags) + load <- try (loadConfigOrSandboxConfig verbosity globalFlags) + let config = either (\(SomeException _) -> mempty) snd load + distPref <- findSavedDistPref config (distPrefFlag flags) + let setupScriptOptions = defaultSetupScriptOptions { useDistPref = distPref } + setupWrapper verbosity setupScriptOptions Nothing + command (const flags) extraArgs + +configureAction :: (ConfigFlags, ConfigExFlags) + -> [String] -> Action +configureAction (configFlags, configExFlags) extraArgs globalFlags = do + let verbosity = fromFlagOrDefault normal (configVerbosity configFlags) + + (useSandbox, config) <- fmap + (updateInstallDirs (configUserInstall configFlags)) + (loadConfigOrSandboxConfig verbosity globalFlags) + let configFlags' = savedConfigureFlags config `mappend` configFlags + configExFlags' = savedConfigureExFlags config `mappend` configExFlags + globalFlags' = savedGlobalFlags config `mappend` globalFlags + (comp, platform, conf) <- configCompilerAuxEx configFlags' + + -- If we're working inside a sandbox and the user has set the -w option, we + -- may need to create a sandbox-local package DB for this compiler and add a + -- timestamp record for this compiler to the timestamp file. + let configFlags'' = case useSandbox of + NoSandbox -> configFlags' + (UseSandbox sandboxDir) -> setPackageDB sandboxDir + comp platform configFlags' + + whenUsingSandbox useSandbox $ \sandboxDir -> do + initPackageDBIfNeeded verbosity configFlags'' comp conf + -- NOTE: We do not write the new sandbox package DB location to + -- 'cabal.sandbox.config' here because 'configure -w' must not affect + -- subsequent 'install' (for UI compatibility with non-sandboxed mode). + + indexFile <- tryGetIndexFilePath config + maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile + (compilerId comp) platform + + maybeWithSandboxDirOnSearchPath useSandbox $ + withRepoContext verbosity globalFlags' $ \repoContext -> + configure verbosity + (configPackageDB' configFlags'') + repoContext + comp platform conf configFlags'' configExFlags' extraArgs + +buildAction :: (BuildFlags, BuildExFlags) -> [String] -> Action +buildAction (buildFlags, buildExFlags) extraArgs globalFlags = do + let verbosity = fromFlagOrDefault normal (buildVerbosity buildFlags) + noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck + (buildOnly buildExFlags) + + -- Calls 'configureAction' to do the real work, so nothing special has to be + -- done to support sandboxes. + (useSandbox, config, distPref) <- reconfigure verbosity + (buildDistPref buildFlags) + mempty [] globalFlags noAddSource + (buildNumJobs buildFlags) (const Nothing) + + maybeWithSandboxDirOnSearchPath useSandbox $ + build verbosity config distPref buildFlags extraArgs + + +-- | Actually do the work of building the package. This is separate from +-- 'buildAction' so that 'testAction' and 'benchmarkAction' do not invoke +-- 'reconfigure' twice. +build :: Verbosity -> SavedConfig -> FilePath -> BuildFlags -> [String] -> IO () +build verbosity config distPref buildFlags extraArgs = + setupWrapper verbosity setupOptions Nothing + (Cabal.buildCommand progConf) mkBuildFlags extraArgs + where + progConf = defaultProgramConfiguration + setupOptions = defaultSetupScriptOptions { useDistPref = distPref } + + mkBuildFlags version = filterBuildFlags version config buildFlags' + buildFlags' = buildFlags + { buildVerbosity = toFlag verbosity + , buildDistPref = toFlag distPref + } + +-- | Make sure that we don't pass new flags to setup scripts compiled against +-- old versions of Cabal. +filterBuildFlags :: Version -> SavedConfig -> BuildFlags -> BuildFlags +filterBuildFlags version config buildFlags + | version >= Version [1,19,1] [] = buildFlags_latest + -- Cabal < 1.19.1 doesn't support 'build -j'. + | otherwise = buildFlags_pre_1_19_1 + where + buildFlags_pre_1_19_1 = buildFlags { + buildNumJobs = NoFlag + } + buildFlags_latest = buildFlags { + -- Take the 'jobs' setting '~/.cabal/config' into account. + buildNumJobs = Flag . Just . determineNumJobs $ + (numJobsConfigFlag `mappend` numJobsCmdLineFlag) + } + numJobsConfigFlag = installNumJobs . savedInstallFlags $ config + numJobsCmdLineFlag = buildNumJobs buildFlags + + +replAction :: (ReplFlags, BuildExFlags) -> [String] -> Action +replAction (replFlags, buildExFlags) extraArgs globalFlags = do + cwd <- getCurrentDirectory + pkgDesc <- findPackageDesc cwd + either (const onNoPkgDesc) (const onPkgDesc) pkgDesc + where + verbosity = fromFlagOrDefault normal (replVerbosity replFlags) + + -- There is a .cabal file in the current directory: start a REPL and load + -- the project's modules. + onPkgDesc = do + let noAddSource = case replReload replFlags of + Flag True -> SkipAddSourceDepsCheck + _ -> fromFlagOrDefault DontSkipAddSourceDepsCheck + (buildOnly buildExFlags) + -- Calls 'configureAction' to do the real work, so nothing special has to + -- be done to support sandboxes. + (useSandbox, _config, distPref) <- + reconfigure verbosity (replDistPref replFlags) + mempty [] globalFlags noAddSource NoFlag + (const Nothing) + let progConf = defaultProgramConfiguration + setupOptions = defaultSetupScriptOptions + { useCabalVersion = orLaterVersion $ Version [1,18,0] [] + , useDistPref = distPref + } + replFlags' = replFlags + { replVerbosity = toFlag verbosity + , replDistPref = toFlag distPref + } + + maybeWithSandboxDirOnSearchPath useSandbox $ + setupWrapper verbosity setupOptions Nothing + (Cabal.replCommand progConf) (const replFlags') extraArgs + + -- No .cabal file in the current directory: just start the REPL (possibly + -- using the sandbox package DB). + onNoPkgDesc = do + (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags + let configFlags = savedConfigureFlags config + (comp, platform, programDb) <- configCompilerAux' configFlags + programDb' <- reconfigurePrograms verbosity + (replProgramPaths replFlags) + (replProgramArgs replFlags) + programDb + startInterpreter verbosity programDb' comp platform + (configPackageDB' configFlags) + +-- | Re-configure the package in the current directory if needed. Deciding +-- when to reconfigure and with which options is convoluted: +-- +-- If we are reconfiguring, we must always run @configure@ with the +-- verbosity option we are given; however, that a previous configuration +-- uses a different verbosity setting is not reason enough to reconfigure. +-- +-- The package should be configured to use the same \"dist\" prefix as +-- given to the @build@ command, otherwise the build will probably +-- fail. Not only does this determine the \"dist\" prefix setting if we +-- need to reconfigure anyway, but an existing configuration should be +-- invalidated if its \"dist\" prefix differs. +-- +-- If the package has never been configured (i.e., there is no +-- LocalBuildInfo), we must configure first, using the default options. +-- +-- If the package has been configured, there will be a 'LocalBuildInfo'. +-- If there no package description file, we assume that the +-- 'PackageDescription' is up to date, though the configuration may need +-- to be updated for other reasons (see above). If there is a package +-- description file, and it has been modified since the 'LocalBuildInfo' +-- was generated, then we need to reconfigure. +-- +-- The caller of this function may also have specific requirements +-- regarding the flags the last configuration used. For example, +-- 'testAction' requires that the package be configured with test suites +-- enabled. The caller may pass the required settings to this function +-- along with a function to check the validity of the saved 'ConfigFlags'; +-- these required settings will be checked first upon determining that +-- a previous configuration exists. +reconfigure :: Verbosity -- ^ Verbosity setting + -> Flag FilePath -- ^ \"dist\" prefix + -> ConfigFlags -- ^ Additional config flags to set. These flags + -- will be 'mappend'ed to the last used or + -- default 'ConfigFlags' as appropriate, so + -- this value should be 'mempty' with only the + -- required flags set. The required verbosity + -- and \"dist\" prefix flags will be set + -- automatically because they are always + -- required; therefore, it is not necessary to + -- set them here. + -> [String] -- ^ Extra arguments + -> GlobalFlags -- ^ Global flags + -> SkipAddSourceDepsCheck + -- ^ Should we skip the timestamp check for modified + -- add-source dependencies? + -> Flag (Maybe Int) + -- ^ -j flag for reinstalling add-source deps. + -> (ConfigFlags -> Maybe String) + -- ^ Check that the required flags are set in + -- the last used 'ConfigFlags'. If the required + -- flags are not set, provide a message to the + -- user explaining the reason for + -- reconfiguration. Because the correct \"dist\" + -- prefix setting is always required, it is checked + -- automatically; this function need not check + -- for it. + -> IO (UseSandbox, SavedConfig, FilePath) +reconfigure verbosity flagDistPref addConfigFlags extraArgs globalFlags + skipAddSourceDepsCheck numJobsFlag checkFlags = do + (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags + distPref <- findSavedDistPref config flagDistPref + eLbi <- tryGetPersistBuildConfig distPref + config' <- case eLbi of + Left err -> onNoBuildConfig (useSandbox, config) distPref err + Right lbi -> onBuildConfig (useSandbox, config) distPref lbi + return (useSandbox, config', distPref) + + where + + -- We couldn't load the saved package config file. + -- + -- If we're in a sandbox: add-source deps don't have to be reinstalled + -- (since we don't know the compiler & platform). + onNoBuildConfig :: (UseSandbox, SavedConfig) -> FilePath + -> ConfigStateFileError -> IO SavedConfig + onNoBuildConfig (_, config) distPref err = do + let msg = case err of + ConfigStateFileMissing -> "Package has never been configured." + ConfigStateFileNoParse -> "Saved package config file seems " + ++ "to be corrupt." + _ -> show err + case err of + -- Note: the build config could have been generated by a custom setup + -- script built against a different Cabal version, so it's crucial that + -- we ignore the bad version error here. + ConfigStateFileBadVersion _ _ _ -> info verbosity msg + _ -> do + let distVerbFlags = mempty + { configVerbosity = toFlag verbosity + , configDistPref = toFlag distPref + } + defaultFlags = mappend addConfigFlags distVerbFlags + notice verbosity + $ msg ++ " Configuring with default flags." ++ configureManually + configureAction (defaultFlags, defaultConfigExFlags) + extraArgs globalFlags + return config + + -- Package has been configured, but the configuration may be out of + -- date or required flags may not be set. + -- + -- If we're in a sandbox: reinstall the modified add-source deps and + -- force reconfigure if we did. + onBuildConfig :: (UseSandbox, SavedConfig) -> FilePath + -> LBI.LocalBuildInfo -> IO SavedConfig + onBuildConfig (useSandbox, config) distPref lbi = do + let configFlags = LBI.configFlags lbi + distVerbFlags = mempty + { configVerbosity = toFlag verbosity + , configDistPref = toFlag distPref + } + flags = mconcat [configFlags, addConfigFlags, distVerbFlags] + + -- Was the sandbox created after the package was already configured? We + -- may need to skip reinstallation of add-source deps and force + -- reconfigure. + let buildConfig = localBuildInfoFile distPref + sandboxConfig <- getSandboxConfigFilePath globalFlags + isSandboxConfigNewer <- + sandboxConfig `existsAndIsMoreRecentThan` buildConfig + + let skipAddSourceDepsCheck' + | isSandboxConfigNewer = SkipAddSourceDepsCheck + | otherwise = skipAddSourceDepsCheck + + when (skipAddSourceDepsCheck' == SkipAddSourceDepsCheck) $ + info verbosity "Skipping add-source deps check..." + + let (_, config') = updateInstallDirs + (configUserInstall flags) + (useSandbox, config) + + depsReinstalled <- + case skipAddSourceDepsCheck' of + DontSkipAddSourceDepsCheck -> + maybeReinstallAddSourceDeps + verbosity numJobsFlag flags globalFlags + (useSandbox, config') + SkipAddSourceDepsCheck -> do + return NoDepsReinstalled + + -- Is the @cabal.config@ file newer than @dist/setup.config@? Then we need + -- to force reconfigure. Note that it's possible to use @cabal.config@ + -- even without sandboxes. + isUserPackageEnvironmentFileNewer <- + userPackageEnvironmentFile `existsAndIsMoreRecentThan` buildConfig + + -- Determine whether we need to reconfigure and which message to show to + -- the user if that is the case. + mMsg <- determineMessageToShow distPref lbi configFlags + depsReinstalled isSandboxConfigNewer + isUserPackageEnvironmentFileNewer + case mMsg of + + -- No message for the user indicates that reconfiguration + -- is not required. + Nothing -> return config' + + -- Show the message and reconfigure. + Just msg -> do + notice verbosity msg + configureAction (flags, defaultConfigExFlags) + extraArgs globalFlags + return config' + + -- Determine what message, if any, to display to the user if reconfiguration + -- is required. + determineMessageToShow :: FilePath -> LBI.LocalBuildInfo -> ConfigFlags + -> WereDepsReinstalled -> Bool -> Bool + -> IO (Maybe String) + determineMessageToShow _ _ _ _ True _ = + -- The sandbox was created after the package was already configured. + return $! Just $! sandboxConfigNewerMessage + + determineMessageToShow _ _ _ _ False True = + -- The user package environment file was modified. + return $! Just $! userPackageEnvironmentFileModifiedMessage + + determineMessageToShow distPref lbi configFlags depsReinstalled + False False = do + let savedDistPref = fromFlagOrDefault + (useDistPref defaultSetupScriptOptions) + (configDistPref configFlags) + case depsReinstalled of + ReinstalledSomeDeps -> + -- Some add-source deps were reinstalled. + return $! Just $! reinstalledDepsMessage + NoDepsReinstalled -> + case checkFlags configFlags of + -- Flag required by the caller is not set. + Just msg -> return $! Just $! msg ++ configureManually + + Nothing + -- Required "dist" prefix is not set. + | savedDistPref /= distPref -> + return $! Just distPrefMessage + + -- All required flags are set, but the configuration + -- may be outdated. + | otherwise -> case LBI.pkgDescrFile lbi of + Nothing -> return Nothing + Just pdFile -> do + outdated <- checkPersistBuildConfigOutdated + distPref pdFile + return $! if outdated + then Just $! outdatedMessage pdFile + else Nothing + + reconfiguringMostRecent = " Re-configuring with most recently used options." + configureManually = " If this fails, please run configure manually." + sandboxConfigNewerMessage = + "The sandbox was created after the package was already configured." + ++ reconfiguringMostRecent + ++ configureManually + userPackageEnvironmentFileModifiedMessage = + "The user package environment file ('" + ++ userPackageEnvironmentFile ++ "') was modified." + ++ reconfiguringMostRecent + ++ configureManually + distPrefMessage = + "Package previously configured with different \"dist\" prefix." + ++ reconfiguringMostRecent + ++ configureManually + outdatedMessage pdFile = + pdFile ++ " has been changed." + ++ reconfiguringMostRecent + ++ configureManually + reinstalledDepsMessage = + "Some add-source dependencies have been reinstalled." + ++ reconfiguringMostRecent + ++ configureManually + +installAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) + -> [String] -> Action +installAction (configFlags, _, installFlags, _) _ globalFlags + | fromFlagOrDefault False (installOnly installFlags) = do + let verbosity = fromFlagOrDefault normal (configVerbosity configFlags) + load <- try (loadConfigOrSandboxConfig verbosity globalFlags) + let config = either (\(SomeException _) -> mempty) snd load + distPref <- findSavedDistPref config (configDistPref configFlags) + let setupOpts = defaultSetupScriptOptions { useDistPref = distPref } + setupWrapper verbosity setupOpts Nothing installCommand (const mempty) [] + +installAction (configFlags, configExFlags, installFlags, haddockFlags) + extraArgs globalFlags = do + let verbosity = fromFlagOrDefault normal (configVerbosity configFlags) + (useSandbox, config) <- fmap + (updateInstallDirs (configUserInstall configFlags)) + (loadConfigOrSandboxConfig verbosity globalFlags) + targets <- readUserTargets verbosity extraArgs + + -- TODO: It'd be nice if 'cabal install' picked up the '-w' flag passed to + -- 'configure' when run inside a sandbox. Right now, running + -- + -- $ cabal sandbox init && cabal configure -w /path/to/ghc + -- && cabal build && cabal install + -- + -- performs the compilation twice unless you also pass -w to 'install'. + -- However, this is the same behaviour that 'cabal install' has in the normal + -- mode of operation, so we stick to it for consistency. + + let sandboxDistPref = case useSandbox of + NoSandbox -> NoFlag + UseSandbox sandboxDir -> Flag $ sandboxBuildDir sandboxDir + distPref <- findSavedDistPref config + (configDistPref configFlags `mappend` sandboxDistPref) + + let configFlags' = maybeForceTests installFlags' $ + savedConfigureFlags config `mappend` + configFlags { configDistPref = toFlag distPref } + configExFlags' = defaultConfigExFlags `mappend` + savedConfigureExFlags config `mappend` configExFlags + installFlags' = defaultInstallFlags `mappend` + savedInstallFlags config `mappend` installFlags + haddockFlags' = defaultHaddockFlags `mappend` + savedHaddockFlags config `mappend` + haddockFlags { haddockDistPref = toFlag distPref } + globalFlags' = savedGlobalFlags config `mappend` globalFlags + (comp, platform, conf) <- configCompilerAux' configFlags' + -- TODO: Redesign ProgramDB API to prevent such problems as #2241 in the + -- future. + conf' <- configureAllKnownPrograms verbosity conf + + -- If we're working inside a sandbox and the user has set the -w option, we + -- may need to create a sandbox-local package DB for this compiler and add a + -- timestamp record for this compiler to the timestamp file. + configFlags'' <- case useSandbox of + NoSandbox -> configAbsolutePaths $ configFlags' + (UseSandbox sandboxDir) -> return $ setPackageDB sandboxDir comp platform + configFlags' + + whenUsingSandbox useSandbox $ \sandboxDir -> do + initPackageDBIfNeeded verbosity configFlags'' comp conf' + + indexFile <- tryGetIndexFilePath config + maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile + (compilerId comp) platform + + -- TODO: Passing 'SandboxPackageInfo' to install unconditionally here means + -- that 'cabal install some-package' inside a sandbox will sometimes reinstall + -- modified add-source deps, even if they are not among the dependencies of + -- 'some-package'. This can also prevent packages that depend on older + -- versions of add-source'd packages from building (see #1362). + maybeWithSandboxPackageInfo verbosity configFlags'' globalFlags' + comp platform conf useSandbox $ \mSandboxPkgInfo -> + maybeWithSandboxDirOnSearchPath useSandbox $ + withRepoContext verbosity globalFlags' $ \repoContext -> + install verbosity + (configPackageDB' configFlags'') + repoContext + comp platform conf' + useSandbox mSandboxPkgInfo + globalFlags' configFlags'' configExFlags' + installFlags' haddockFlags' + targets + + where + -- '--run-tests' implies '--enable-tests'. + maybeForceTests installFlags' configFlags' = + if fromFlagOrDefault False (installRunTests installFlags') + then configFlags' { configTests = toFlag True } + else configFlags' + +testAction :: (TestFlags, BuildFlags, BuildExFlags) -> [String] -> GlobalFlags + -> IO () +testAction (testFlags, buildFlags, buildExFlags) extraArgs globalFlags = do + let verbosity = fromFlagOrDefault normal (testVerbosity testFlags) + addConfigFlags = mempty { configTests = toFlag True } + noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck + (buildOnly buildExFlags) + buildFlags' = buildFlags + { buildVerbosity = testVerbosity testFlags } + checkFlags flags + | fromFlagOrDefault False (configTests flags) = Nothing + | otherwise = Just "Re-configuring with test suites enabled." + + -- reconfigure also checks if we're in a sandbox and reinstalls add-source + -- deps if needed. + (useSandbox, config, distPref) <- + reconfigure verbosity (testDistPref testFlags) + addConfigFlags [] globalFlags noAddSource + (buildNumJobs buildFlags') checkFlags + let setupOptions = defaultSetupScriptOptions { useDistPref = distPref } + testFlags' = testFlags { testDistPref = toFlag distPref } + + -- The package was just configured, so the LBI must be available. + names <- componentNamesFromLBI verbosity distPref "test suites" + (\c -> case c of { LBI.CTest{} -> True; _ -> False }) + let extraArgs' + | null extraArgs = case names of + ComponentNamesUnknown -> [] + ComponentNames names' -> [ name | LBI.CTestName name <- names' ] + | otherwise = extraArgs + + maybeWithSandboxDirOnSearchPath useSandbox $ + build verbosity config distPref buildFlags' extraArgs' + + maybeWithSandboxDirOnSearchPath useSandbox $ + setupWrapper verbosity setupOptions Nothing + Cabal.testCommand (const testFlags') extraArgs' + +data ComponentNames = ComponentNamesUnknown + | ComponentNames [LBI.ComponentName] + +-- | Return the names of all buildable components matching a given predicate. +componentNamesFromLBI :: Verbosity -> FilePath -> String + -> (LBI.Component -> Bool) + -> IO ComponentNames +componentNamesFromLBI verbosity distPref targetsDescr compPred = do + eLBI <- tryGetPersistBuildConfig distPref + case eLBI of + Left err -> case err of + -- Note: the build config could have been generated by a custom setup + -- script built against a different Cabal version, so it's crucial that + -- we ignore the bad version error here. + ConfigStateFileBadVersion _ _ _ -> return ComponentNamesUnknown + _ -> die (show err) + Right lbi -> do + let pkgDescr = LBI.localPkgDescr lbi + names = map LBI.componentName + . filter (buildable . LBI.componentBuildInfo) + . filter compPred $ + LBI.pkgComponents pkgDescr + if null names + then do notice verbosity $ "Package has no buildable " + ++ targetsDescr ++ "." + exitSuccess -- See #3215. + + else return $! (ComponentNames names) + +benchmarkAction :: (BenchmarkFlags, BuildFlags, BuildExFlags) + -> [String] -> GlobalFlags + -> IO () +benchmarkAction (benchmarkFlags, buildFlags, buildExFlags) + extraArgs globalFlags = do + let verbosity = fromFlagOrDefault normal + (benchmarkVerbosity benchmarkFlags) + addConfigFlags = mempty { configBenchmarks = toFlag True } + buildFlags' = buildFlags + { buildVerbosity = benchmarkVerbosity benchmarkFlags } + checkFlags flags + | fromFlagOrDefault False (configBenchmarks flags) = Nothing + | otherwise = Just "Re-configuring with benchmarks enabled." + noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck + (buildOnly buildExFlags) + + -- reconfigure also checks if we're in a sandbox and reinstalls add-source + -- deps if needed. + (useSandbox, config, distPref) <- + reconfigure verbosity (benchmarkDistPref benchmarkFlags) + addConfigFlags [] globalFlags noAddSource + (buildNumJobs buildFlags') checkFlags + let setupOptions = defaultSetupScriptOptions { useDistPref = distPref } + benchmarkFlags'= benchmarkFlags { benchmarkDistPref = toFlag distPref } + + -- The package was just configured, so the LBI must be available. + names <- componentNamesFromLBI verbosity distPref "benchmarks" + (\c -> case c of { LBI.CBench{} -> True; _ -> False; }) + let extraArgs' + | null extraArgs = case names of + ComponentNamesUnknown -> [] + ComponentNames names' -> [name | LBI.CBenchName name <- names'] + | otherwise = extraArgs + + maybeWithSandboxDirOnSearchPath useSandbox $ + build verbosity config distPref buildFlags' extraArgs' + + maybeWithSandboxDirOnSearchPath useSandbox $ + setupWrapper verbosity setupOptions Nothing + Cabal.benchmarkCommand (const benchmarkFlags') extraArgs' + +haddockAction :: HaddockFlags -> [String] -> Action +haddockAction haddockFlags extraArgs globalFlags = do + let verbosity = fromFlag (haddockVerbosity haddockFlags) + (_useSandbox, config, distPref) <- + reconfigure verbosity (haddockDistPref haddockFlags) + mempty [] globalFlags DontSkipAddSourceDepsCheck + NoFlag (const Nothing) + let haddockFlags' = defaultHaddockFlags `mappend` + savedHaddockFlags config `mappend` + haddockFlags { haddockDistPref = toFlag distPref } + setupScriptOptions = defaultSetupScriptOptions { useDistPref = distPref } + setupWrapper verbosity setupScriptOptions Nothing + haddockCommand (const haddockFlags') extraArgs + when (haddockForHackage haddockFlags == Flag True) $ do + pkg <- fmap LBI.localPkgDescr (getPersistBuildConfig distPref) + let dest = distPref name <.> "tar.gz" + name = display (packageId pkg) ++ "-docs" + docDir = distPref "doc" "html" + createTarGzFile dest docDir name + notice verbosity $ "Documentation tarball created: " ++ dest + +cleanAction :: CleanFlags -> [String] -> Action +cleanAction cleanFlags extraArgs globalFlags = do + load <- try (loadConfigOrSandboxConfig verbosity globalFlags) + let config = either (\(SomeException _) -> mempty) snd load + distPref <- findSavedDistPref config (cleanDistPref cleanFlags) + let setupScriptOptions = defaultSetupScriptOptions + { useDistPref = distPref + , useWin32CleanHack = True + } + cleanFlags' = cleanFlags { cleanDistPref = toFlag distPref } + setupWrapper verbosity setupScriptOptions Nothing + cleanCommand (const cleanFlags') extraArgs + where + verbosity = fromFlagOrDefault normal (cleanVerbosity cleanFlags) + +listAction :: ListFlags -> [String] -> Action +listAction listFlags extraArgs globalFlags = do + let verbosity = fromFlag (listVerbosity listFlags) + (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity + (globalFlags { globalRequireSandbox = Flag False }) + let configFlags' = savedConfigureFlags config + configFlags = configFlags' { + configPackageDBs = configPackageDBs configFlags' + `mappend` listPackageDBs listFlags + } + globalFlags' = savedGlobalFlags config `mappend` globalFlags + (comp, _, conf) <- configCompilerAux' configFlags + withRepoContext verbosity globalFlags' $ \repoContext -> + List.list verbosity + (configPackageDB' configFlags) + repoContext + comp + conf + listFlags + extraArgs + +infoAction :: InfoFlags -> [String] -> Action +infoAction infoFlags extraArgs globalFlags = do + let verbosity = fromFlag (infoVerbosity infoFlags) + targets <- readUserTargets verbosity extraArgs + (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity + (globalFlags { globalRequireSandbox = Flag False }) + let configFlags' = savedConfigureFlags config + configFlags = configFlags' { + configPackageDBs = configPackageDBs configFlags' + `mappend` infoPackageDBs infoFlags + } + globalFlags' = savedGlobalFlags config `mappend` globalFlags + (comp, _, conf) <- configCompilerAuxEx configFlags + withRepoContext verbosity globalFlags' $ \repoContext -> + List.info verbosity + (configPackageDB' configFlags) + repoContext + comp + conf + globalFlags' + infoFlags + targets + +updateAction :: Flag Verbosity -> [String] -> Action +updateAction verbosityFlag extraArgs globalFlags = do + unless (null extraArgs) $ + die $ "'update' doesn't take any extra arguments: " ++ unwords extraArgs + let verbosity = fromFlag verbosityFlag + (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity + (globalFlags { globalRequireSandbox = Flag False }) + let globalFlags' = savedGlobalFlags config `mappend` globalFlags + withRepoContext verbosity globalFlags' $ \repoContext -> + update verbosity repoContext + +upgradeAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) + -> [String] -> Action +upgradeAction _ _ _ = die $ + "Use the 'cabal install' command instead of 'cabal upgrade'.\n" + ++ "You can install the latest version of a package using 'cabal install'. " + ++ "The 'cabal upgrade' command has been removed because people found it " + ++ "confusing and it often led to broken packages.\n" + ++ "If you want the old upgrade behaviour then use the install command " + ++ "with the --upgrade-dependencies flag (but check first with --dry-run " + ++ "to see what would happen). This will try to pick the latest versions " + ++ "of all dependencies, rather than the usual behaviour of trying to pick " + ++ "installed versions of all dependencies. If you do use " + ++ "--upgrade-dependencies, it is recommended that you do not upgrade core " + ++ "packages (e.g. by using appropriate --constraint= flags)." + +fetchAction :: FetchFlags -> [String] -> Action +fetchAction fetchFlags extraArgs globalFlags = do + let verbosity = fromFlag (fetchVerbosity fetchFlags) + targets <- readUserTargets verbosity extraArgs + config <- loadConfig verbosity (globalConfigFile globalFlags) + let configFlags = savedConfigureFlags config + globalFlags' = savedGlobalFlags config `mappend` globalFlags + (comp, platform, conf) <- configCompilerAux' configFlags + withRepoContext verbosity globalFlags' $ \repoContext -> + fetch verbosity + (configPackageDB' configFlags) + repoContext + comp platform conf globalFlags' fetchFlags + targets + +freezeAction :: FreezeFlags -> [String] -> Action +freezeAction freezeFlags _extraArgs globalFlags = do + let verbosity = fromFlag (freezeVerbosity freezeFlags) + (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags + let configFlags = savedConfigureFlags config + globalFlags' = savedGlobalFlags config `mappend` globalFlags + (comp, platform, conf) <- configCompilerAux' configFlags + + maybeWithSandboxPackageInfo verbosity configFlags globalFlags' + comp platform conf useSandbox $ \mSandboxPkgInfo -> + maybeWithSandboxDirOnSearchPath useSandbox $ + withRepoContext verbosity globalFlags' $ \repoContext -> + freeze verbosity + (configPackageDB' configFlags) + repoContext + comp platform conf + mSandboxPkgInfo + globalFlags' freezeFlags + +genBoundsAction :: FreezeFlags -> [String] -> GlobalFlags -> IO () +genBoundsAction freezeFlags _extraArgs globalFlags = do + let verbosity = fromFlag (freezeVerbosity freezeFlags) + (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags + let configFlags = savedConfigureFlags config + globalFlags' = savedGlobalFlags config `mappend` globalFlags + (comp, platform, conf) <- configCompilerAux' configFlags + + maybeWithSandboxPackageInfo verbosity configFlags globalFlags' + comp platform conf useSandbox $ \mSandboxPkgInfo -> + maybeWithSandboxDirOnSearchPath useSandbox $ + withRepoContext verbosity globalFlags' $ \repoContext -> + genBounds verbosity + (configPackageDB' configFlags) + repoContext + comp platform conf + mSandboxPkgInfo + globalFlags' freezeFlags + +uploadAction :: UploadFlags -> [String] -> Action +uploadAction uploadFlags extraArgs globalFlags = do + config <- loadConfig verbosity (globalConfigFile globalFlags) + let uploadFlags' = savedUploadFlags config `mappend` uploadFlags + globalFlags' = savedGlobalFlags config `mappend` globalFlags + tarfiles = extraArgs + when (null tarfiles && not (fromFlag (uploadDoc uploadFlags'))) $ + die "the 'upload' command expects at least one .tar.gz archive." + when (fromFlag (uploadCheck uploadFlags') + && fromFlag (uploadDoc uploadFlags')) $ + die "--check and --doc cannot be used together." + checkTarFiles extraArgs + maybe_password <- + case uploadPasswordCmd uploadFlags' + of Flag (xs:xss) -> Just . Password <$> + getProgramInvocationOutput verbosity + (simpleProgramInvocation xs xss) + _ -> pure $ flagToMaybe $ uploadPassword uploadFlags' + withRepoContext verbosity globalFlags' $ \repoContext -> do + if fromFlag (uploadCheck uploadFlags') + then do + Upload.check verbosity repoContext tarfiles + else if fromFlag (uploadDoc uploadFlags') + then do + when (length tarfiles > 1) $ + die $ "the 'upload' command can only upload documentation " + ++ "for one package at a time." + tarfile <- maybe (generateDocTarball config) return $ listToMaybe tarfiles + Upload.uploadDoc verbosity + repoContext + (flagToMaybe $ uploadUsername uploadFlags') + maybe_password + tarfile + else do + Upload.upload verbosity + repoContext + (flagToMaybe $ uploadUsername uploadFlags') + maybe_password + tarfiles + where + verbosity = fromFlag (uploadVerbosity uploadFlags) + checkTarFiles tarfiles + | not (null otherFiles) + = die $ "the 'upload' command expects only .tar.gz archives: " + ++ intercalate ", " otherFiles + | otherwise = sequence_ + [ do exists <- doesFileExist tarfile + unless exists $ die $ "file not found: " ++ tarfile + | tarfile <- tarfiles ] + + where otherFiles = filter (not . isTarGzFile) tarfiles + isTarGzFile file = case splitExtension file of + (file', ".gz") -> takeExtension file' == ".tar" + _ -> False + generateDocTarball config = do + notice verbosity $ + "No documentation tarball specified. " + ++ "Building a documentation tarball with default settings...\n" + ++ "If you need to customise Haddock options, " + ++ "run 'haddock --for-hackage' first " + ++ "to generate a documentation tarball." + haddockAction (defaultHaddockFlags { haddockForHackage = Flag True }) + [] globalFlags + distPref <- findSavedDistPref config NoFlag + pkg <- fmap LBI.localPkgDescr (getPersistBuildConfig distPref) + return $ distPref display (packageId pkg) ++ "-docs" <.> "tar.gz" + +checkAction :: Flag Verbosity -> [String] -> Action +checkAction verbosityFlag extraArgs _globalFlags = do + unless (null extraArgs) $ + die $ "'check' doesn't take any extra arguments: " ++ unwords extraArgs + allOk <- Check.check (fromFlag verbosityFlag) + unless allOk exitFailure + +formatAction :: Flag Verbosity -> [String] -> Action +formatAction verbosityFlag extraArgs _globalFlags = do + let verbosity = fromFlag verbosityFlag + path <- case extraArgs of + [] -> do cwd <- getCurrentDirectory + tryFindPackageDesc cwd + (p:_) -> return p + pkgDesc <- readPackageDescription verbosity path + -- Uses 'writeFileAtomic' under the hood. + writeGenericPackageDescription path pkgDesc + +uninstallAction :: Flag Verbosity -> [String] -> Action +uninstallAction _verbosityFlag extraArgs _globalFlags = do + let package = case extraArgs of + p:_ -> p + _ -> "PACKAGE_NAME" + die $ "This version of 'cabal-install' does not support the 'uninstall' " + ++ "operation. " + ++ "It will likely be implemented at some point in the future; " + ++ "in the meantime you're advised to use either 'ghc-pkg unregister " + ++ package ++ "' or 'cabal sandbox hc-pkg -- unregister " ++ package ++ "'." + + +sdistAction :: (SDistFlags, SDistExFlags) -> [String] -> Action +sdistAction (sdistFlags, sdistExFlags) extraArgs globalFlags = do + unless (null extraArgs) $ + die $ "'sdist' doesn't take any extra arguments: " ++ unwords extraArgs + let verbosity = fromFlag (sDistVerbosity sdistFlags) + load <- try (loadConfigOrSandboxConfig verbosity globalFlags) + let config = either (\(SomeException _) -> mempty) snd load + distPref <- findSavedDistPref config (sDistDistPref sdistFlags) + let sdistFlags' = sdistFlags { sDistDistPref = toFlag distPref } + sdist sdistFlags' sdistExFlags + +reportAction :: ReportFlags -> [String] -> Action +reportAction reportFlags extraArgs globalFlags = do + unless (null extraArgs) $ + die $ "'report' doesn't take any extra arguments: " ++ unwords extraArgs + + let verbosity = fromFlag (reportVerbosity reportFlags) + config <- loadConfig verbosity (globalConfigFile globalFlags) + let globalFlags' = savedGlobalFlags config `mappend` globalFlags + reportFlags' = savedReportFlags config `mappend` reportFlags + + withRepoContext verbosity globalFlags' $ \repoContext -> + Upload.report verbosity repoContext + (flagToMaybe $ reportUsername reportFlags') + (flagToMaybe $ reportPassword reportFlags') + +runAction :: (BuildFlags, BuildExFlags) -> [String] -> Action +runAction (buildFlags, buildExFlags) extraArgs globalFlags = do + let verbosity = fromFlagOrDefault normal (buildVerbosity buildFlags) + let noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck + (buildOnly buildExFlags) + + -- reconfigure also checks if we're in a sandbox and reinstalls add-source + -- deps if needed. + (useSandbox, config, distPref) <- + reconfigure verbosity (buildDistPref buildFlags) mempty [] + globalFlags noAddSource (buildNumJobs buildFlags) + (const Nothing) + + lbi <- getPersistBuildConfig distPref + (exe, exeArgs) <- splitRunArgs verbosity lbi extraArgs + + maybeWithSandboxDirOnSearchPath useSandbox $ + build verbosity config distPref buildFlags ["exe:" ++ exeName exe] + + maybeWithSandboxDirOnSearchPath useSandbox $ + run verbosity lbi exe exeArgs + +getAction :: GetFlags -> [String] -> Action +getAction getFlags extraArgs globalFlags = do + let verbosity = fromFlag (getVerbosity getFlags) + targets <- readUserTargets verbosity extraArgs + (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity + (globalFlags { globalRequireSandbox = Flag False }) + let globalFlags' = savedGlobalFlags config `mappend` globalFlags + withRepoContext verbosity (savedGlobalFlags config) $ \repoContext -> + get verbosity + repoContext + globalFlags' + getFlags + targets + +unpackAction :: GetFlags -> [String] -> Action +unpackAction getFlags extraArgs globalFlags = do + getAction getFlags extraArgs globalFlags + +initAction :: InitFlags -> [String] -> Action +initAction initFlags extraArgs globalFlags = do + when (extraArgs /= []) $ + die $ "'init' doesn't take any extra arguments: " ++ unwords extraArgs + let verbosity = fromFlag (initVerbosity initFlags) + (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity + (globalFlags { globalRequireSandbox = Flag False }) + let configFlags = savedConfigureFlags config + let globalFlags' = savedGlobalFlags config `mappend` globalFlags + (comp, _, conf) <- configCompilerAux' configFlags + withRepoContext verbosity globalFlags' $ \repoContext -> + initCabal verbosity + (configPackageDB' configFlags) + repoContext + comp + conf + initFlags + +sandboxAction :: SandboxFlags -> [String] -> Action +sandboxAction sandboxFlags extraArgs globalFlags = do + let verbosity = fromFlag (sandboxVerbosity sandboxFlags) + case extraArgs of + -- Basic sandbox commands. + ["init"] -> sandboxInit verbosity sandboxFlags globalFlags + ["delete"] -> sandboxDelete verbosity sandboxFlags globalFlags + ("add-source":extra) -> do + when (noExtraArgs extra) $ + die "The 'sandbox add-source' command expects at least one argument" + sandboxAddSource verbosity extra sandboxFlags globalFlags + ("delete-source":extra) -> do + when (noExtraArgs extra) $ + die ("The 'sandbox delete-source' command expects " ++ + "at least one argument") + sandboxDeleteSource verbosity extra sandboxFlags globalFlags + ["list-sources"] -> sandboxListSources verbosity sandboxFlags globalFlags + + -- More advanced commands. + ("hc-pkg":extra) -> do + when (noExtraArgs extra) $ + die $ "The 'sandbox hc-pkg' command expects at least one argument" + sandboxHcPkg verbosity sandboxFlags globalFlags extra + ["buildopts"] -> die "Not implemented!" + + -- Hidden commands. + ["dump-pkgenv"] -> dumpPackageEnvironment verbosity sandboxFlags globalFlags + + -- Error handling. + [] -> die $ "Please specify a subcommand (see 'help sandbox')" + _ -> die $ "Unknown 'sandbox' subcommand: " ++ unwords extraArgs + + where + noExtraArgs = (<1) . length + +execAction :: ExecFlags -> [String] -> Action +execAction execFlags extraArgs globalFlags = do + let verbosity = fromFlag (execVerbosity execFlags) + (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags + let configFlags = savedConfigureFlags config + (comp, platform, conf) <- getPersistOrConfigCompiler configFlags + exec verbosity useSandbox comp platform conf extraArgs + +userConfigAction :: UserConfigFlags -> [String] -> Action +userConfigAction ucflags extraArgs globalFlags = do + let verbosity = fromFlag (userConfigVerbosity ucflags) + force = fromFlag (userConfigForce ucflags) + case extraArgs of + ("init":_) -> do + path <- configFile + fileExists <- doesFileExist path + if (not fileExists || (fileExists && force)) + then void $ createDefaultConfigFile verbosity path + else die $ path ++ " already exists." + ("diff":_) -> mapM_ putStrLn =<< userConfigDiff globalFlags + ("update":_) -> userConfigUpdate verbosity globalFlags + -- Error handling. + [] -> die $ "Please specify a subcommand (see 'help user-config')" + _ -> die $ "Unknown 'user-config' subcommand: " ++ unwords extraArgs + where configFile = getConfigFilePath (globalConfigFile globalFlags) + +-- | See 'Distribution.Client.Install.withWin32SelfUpgrade' for details. +-- +win32SelfUpgradeAction :: Win32SelfUpgradeFlags -> [String] -> Action +win32SelfUpgradeAction selfUpgradeFlags (pid:path:_extraArgs) _globalFlags = do + let verbosity = fromFlag (win32SelfUpgradeVerbosity selfUpgradeFlags) + Win32SelfUpgrade.deleteOldExeFile verbosity (read pid) path +win32SelfUpgradeAction _ _ _ = return () + +-- | Used as an entry point when cabal-install needs to invoke itself +-- as a setup script. This can happen e.g. when doing parallel builds. +-- +actAsSetupAction :: ActAsSetupFlags -> [String] -> Action +actAsSetupAction actAsSetupFlags args _globalFlags = + let bt = fromFlag (actAsSetupBuildType actAsSetupFlags) + in case bt of + Simple -> Simple.defaultMainArgs args + Configure -> Simple.defaultMainWithHooksArgs + Simple.autoconfUserHooks args + Make -> Make.defaultMainArgs args + Custom -> error "actAsSetupAction Custom" + (UnknownBuildType _) -> error "actAsSetupAction UnknownBuildType" + +manpageAction :: [CommandSpec action] -> Flag Verbosity -> [String] -> Action +manpageAction commands _ extraArgs _ = do + unless (null extraArgs) $ + die $ "'manpage' doesn't take any extra arguments: " ++ unwords extraArgs + pname <- getProgName + let cabalCmd = if takeExtension pname == ".exe" + then dropExtension pname + else pname + putStrLn $ manpage cabalCmd commands diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/README.md cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/README.md --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/README.md 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/README.md 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,155 @@ +The cabal-install package +========================= + +See the [Cabal web site] for more information. + +The `cabal-install` package provides a command line tool named `cabal`. +It uses the [Cabal] library and provides a user interface to the +Cabal/[Hackage] build automation and package management system. It can +build and install both local and remote packages, including +dependencies. + +[Cabal web site]: http://www.haskell.org/cabal/ +[Cabal]: ../Cabal/README.md + +Installing the `cabal` command-line tool +======================================== + +The `cabal-install` package requires a number of other packages, most of +which come with a standard GHC installation. It requires the [network] +package, which is sometimes packaged separately by Linux distributions; +for example, on Debian or Ubuntu, it is located in the +"libghc6-network-dev" package. + +`cabal` requires a few other Haskell packages that are not always +installed. The exact list is specified in the [.cabal] file or in the +[bootstrap.sh] file. All these packages are available from [Hackage]. + +Note that on some Unix systems you may need to install an additional +zlib development package using your system package manager; for example, +on Debian or Ubuntu, it is located in the "zlib1g-dev" package; on +Fedora, it is located in the "zlib-devel" package. It is required +because the Haskell zlib package uses the system zlib C library and +header files. + +The `cabal-install` package is now part of the [Haskell Platform], so you +do not usually need to install it separately. However, if you are +starting from a minimal GHC installation, you need to install +`cabal-install` manually. Since it is an ordinary Cabal package, +`cabal-install` can be built the standard way; to facilitate this, the +process has been partially automated. It is described below. + +[.cabal]: cabal-install.cabal +[network]: http://hackage.haskell.org/package/network +[Haskell Platform]: http://www.haskell.org/platform/ + +Quick start on Unix-like systems +-------------------------------- + +As a convenience for users on Unix-like systems, there is a +[bootstrap.sh] script that will download and install each of +`cabal-install`'s dependencies in turn. + + $ ./bootstrap.sh + +It will download and install the dependencies. The script will install the +library packages (vanilla, profiling and shared) into `$HOME/.cabal/` and the +`cabal` program into `$HOME/.cabal/bin/`. If you don't want to install profiling +and shared versions of the libraries, use + + $ EXTRA_CONFIGURE_OPTS="" ./bootstrap.sh + +You then have the choice either to place `$HOME/.cabal/bin` on your +`$PATH` or move the `cabal` program to somewhere on your `$PATH`. Next, +you can get the latest list of packages by running: + + $ cabal update + +This will also create a default configuration file, if it does not +already exist, at `$HOME/.cabal/config`. + +By default, `cabal` will install programs to `$HOME/.cabal/bin`. If you +do not want to add this directory to your `$PATH`, you can change +the setting in the config file; for example, you could use the +following: + + symlink-bindir: $HOME/bin + + +Quick start on Windows systems +------------------------------ + +For Windows users, a precompiled program ([cabal.exe]) is provided. +Download and put it somewhere on your `%PATH%` (for example, +`C:\Program Files\Haskell\bin`.) + +Next, you can get the latest list of packages by running: + + $ cabal update + +This will also create a default configuration file (if it does not +already exist) at +`C:\Documents and Settings\%USERNAME%\Application Data\cabal\config`. + +[cabal.exe]: http://www.haskell.org/cabal/release/cabal-install-latest/ + +Using `cabal` +============= + +There are two sets of commands: commands for working with a local +project build tree and those for working with packages distributed +from [Hackage]. + +For the list of the full set of commands and flags for each command, +run: + + $ cabal help + + +Commands for developers for local build trees +--------------------------------------------- + +The commands for local project build trees are almost the same as the +`runghc Setup` command-line interface you may already be familiar with. +In particular, it has the following commands: + + * `cabal configure` + * `cabal build` + * `cabal haddock` + * `cabal clean` + * `cabal sdist` + +The `install` command is somewhat different; it is an all-in-one +operation. If you run `cabal install` in your build tree, it will +configure, build, and install. It takes all the flags that `configure` +takes such as `--global` and `--prefix`. + +In addition, `cabal` will download and install any dependencies that are +not already installed. It can also rebuild packages to ensure a +consistent set of dependencies. + + +Commands for released Hackage packages +-------------------------------------- + + $ cabal update + +This command gets the latest list of packages from the [Hackage] server. +On occasion, this command must be run manually--for instance, if you +want to install a newly released package. + + $ cabal install xmonad + +This command installs one or more named packages, and all their +dependencies, from Hackage. By default, it installs the latest available +version; however, you may specify exact versions or version ranges. For +example, `cabal install alex-2.2` or `cabal install parsec < 3`. + + $ cabal list xml + +This does a search of the installed and available packages. It does a +case-insensitive substring match on the package name. + + +[Hackage]: http://hackage.haskell.org +[bootstrap.sh]: bootstrap.sh diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/Setup.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,53 @@ +import Distribution.PackageDescription ( PackageDescription ) +import Distribution.Simple ( defaultMainWithHooks + , simpleUserHooks + , postBuild + , postCopy + , postInst + ) +import Distribution.Simple.InstallDirs ( mandir + , CopyDest (NoCopyDest) + ) +import Distribution.Simple.LocalBuildInfo ( LocalBuildInfo(..) + , absoluteInstallDirs + ) +import Distribution.Simple.Utils ( copyFiles + , notice ) +import Distribution.Simple.Setup ( buildVerbosity + , copyDest + , copyVerbosity + , fromFlag + , installVerbosity + ) +import Distribution.Verbosity ( Verbosity ) + +import System.IO ( openFile + , IOMode (WriteMode) + ) +import System.Process ( runProcess ) +import System.FilePath ( () ) + + +main :: IO () +main = defaultMainWithHooks $ simpleUserHooks + { postBuild = \ _ flags _ lbi -> + buildManpage lbi (fromFlag $ buildVerbosity flags) + , postCopy = \ _ flags pkg lbi -> + installManpage pkg lbi (fromFlag $ copyVerbosity flags) (fromFlag $ copyDest flags) + , postInst = \ _ flags pkg lbi -> + installManpage pkg lbi (fromFlag $ installVerbosity flags) NoCopyDest + } + +buildManpage :: LocalBuildInfo -> Verbosity -> IO () +buildManpage lbi verbosity = do + let cabal = buildDir lbi "cabal/cabal" + manpage = buildDir lbi "cabal/cabal.1" + manpageHandle <- openFile manpage WriteMode + notice verbosity ("Generating manual page " ++ manpage ++ " ...") + _ <- runProcess cabal ["manpage"] Nothing Nothing Nothing (Just manpageHandle) Nothing + return () + +installManpage :: PackageDescription -> LocalBuildInfo -> Verbosity -> CopyDest -> IO () +installManpage pkg lbi verbosity copy = do + let destDir = mandir (absoluteInstallDirs pkg lbi copy) "man1" + copyFiles verbosity destDir [(buildDir lbi "cabal", "cabal.1")] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom/common.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom/common.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom/common.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom/common.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,9 @@ +# Helper to run Cabal +cabal() { + "$CABAL" $CABAL_ARGS "$@" +} + +die() { + echo "die: $@" + exit 1 +} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom/should_run/plain/A.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom/should_run/plain/A.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom/should_run/plain/A.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom/should_run/plain/A.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1 @@ +module A where diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom/should_run/plain/plain.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom/should_run/plain/plain.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom/should_run/plain/plain.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom/should_run/plain/plain.cabal 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,12 @@ +name: plain +version: 0.1.0.0 +license: BSD3 +author: Edward Z. Yang +maintainer: ezyang@cs.stanford.edu +build-type: Custom +cabal-version: >=1.10 + +library + exposed-modules: A + build-depends: base + default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom/should_run/plain/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom/should_run/plain/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom/should_run/plain/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom/should_run/plain/Setup.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,3 @@ +import Distribution.Simple +import System.IO +main = hPutStrLn stderr "Custom" >> defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom/should_run/plain.err cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom/should_run/plain.err --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom/should_run/plain.err 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom/should_run/plain.err 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,2 @@ +Custom +Custom diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom/should_run/plain.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom/should_run/plain.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom/should_run/plain.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom/should_run/plain.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,4 @@ +. ../common.sh +cd plain +cabal configure +cabal build diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/common.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/common.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/common.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/common.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,9 @@ +# Helper to run Cabal +cabal() { + "$CABAL" $CABAL_ARGS "$@" +} + +die() { + echo "die: $@" + exit 1 +} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/Cabal-99998/Cabal.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/Cabal-99998/Cabal.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/Cabal-99998/Cabal.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/Cabal-99998/Cabal.cabal 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,8 @@ +name: Cabal +version: 99998 +build-type: Simple +cabal-version: >= 1.2 + +library + build-depends: base + exposed-modules: CabalMessage diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/Cabal-99998/CabalMessage.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/Cabal-99998/CabalMessage.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/Cabal-99998/CabalMessage.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/Cabal-99998/CabalMessage.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,3 @@ +module CabalMessage where + +message = "This is Cabal-99998" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/Cabal-99999/Cabal.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/Cabal-99999/Cabal.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/Cabal-99999/Cabal.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/Cabal-99999/Cabal.cabal 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,8 @@ +name: Cabal +version: 99999 +build-type: Simple +cabal-version: >= 1.2 + +library + build-depends: base + exposed-modules: CabalMessage diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/Cabal-99999/CabalMessage.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/Cabal-99999/CabalMessage.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/Cabal-99999/CabalMessage.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/Cabal-99999/CabalMessage.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,3 @@ +module CabalMessage where + +message = "This is Cabal-99999" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom-setup/custom-setup.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom-setup/custom-setup.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom-setup/custom-setup.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom-setup/custom-setup.cabal 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,9 @@ +name: custom-setup +version: 1.0 +build-type: Custom +cabal-version: >= 99999 + +custom-setup + setup-depends: base, Cabal >= 99999 + +library diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom-setup/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom-setup/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom-setup/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom-setup/Setup.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,5 @@ +import CabalMessage (message) +import System.Exit +import System.IO + +main = hPutStrLn stderr message >> exitFailure diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal/custom-setup-without-cabal.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal/custom-setup-without-cabal.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal/custom-setup-without-cabal.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal/custom-setup-without-cabal.cabal 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,9 @@ +name: custom-setup-without-cabal +version: 1.0 +build-type: Custom +cabal-version: >= 99999 + +custom-setup + setup-depends: base + +library diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal/Setup.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,4 @@ +import System.Exit +import System.IO + +main = hPutStrLn stderr "My custom Setup" >> exitFailure diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal-defaultMain/custom-setup-without-cabal-defaultMain.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal-defaultMain/custom-setup-without-cabal-defaultMain.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal-defaultMain/custom-setup-without-cabal-defaultMain.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal-defaultMain/custom-setup-without-cabal-defaultMain.cabal 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,9 @@ +name: custom-setup-without-cabal-defaultMain +version: 1.0 +build-type: Custom +cabal-version: >= 1.2 + +custom-setup + setup-depends: base + +library diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal-defaultMain/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal-defaultMain/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal-defaultMain/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom-setup-without-cabal-defaultMain/Setup.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,3 @@ +import Distribution.Simple + +main = defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom_setup_without_Cabal_doesnt_allow_Cabal_import.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom_setup_without_Cabal_doesnt_allow_Cabal_import.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom_setup_without_Cabal_doesnt_allow_Cabal_import.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom_setup_without_Cabal_doesnt_allow_Cabal_import.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,12 @@ +. ../common.sh +cd custom-setup-without-cabal-defaultMain + +# This package has explicit setup dependencies that do not include Cabal. +# Compilation should fail because Setup.hs imports Distribution.Simple. +! cabal new-build custom-setup-without-cabal-defaultMain > output 2>&1 +cat output +grep -q "\(Could not find module\|Failed to load interface for\).*Distribution\\.Simple" output \ + || die "Should not have been able to import Cabal" + +grep -q "It is a member of the hidden package .*Cabal-" output \ + || die "Cabal should be available" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom_setup_without_Cabal_doesnt_require_Cabal.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom_setup_without_Cabal_doesnt_require_Cabal.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom_setup_without_Cabal_doesnt_require_Cabal.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/custom_setup_without_Cabal_doesnt_require_Cabal.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,11 @@ +. ../common.sh +cd custom-setup-without-cabal + +# This package has explicit setup dependencies that do not include Cabal. +# new-build should try to build it, even though the cabal-version cannot be +# satisfied by an installed version of Cabal (cabal-version: >= 99999). However, +# configure should fail because Setup.hs just prints an error message and exits. +! cabal new-build custom-setup-without-cabal > output 2>&1 +cat output +grep -q "My custom Setup" output \ + || die "Expected output from custom Setup" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/installs_Cabal_as_setup_dep.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/installs_Cabal_as_setup_dep.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/installs_Cabal_as_setup_dep.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/custom-setup/should_run/installs_Cabal_as_setup_dep.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,15 @@ +# Regression test for issue #3436 + +. ../common.sh +cabal sandbox init +cabal install ./Cabal-99998 +cabal sandbox add-source Cabal-99999 + +# Install custom-setup, which has a setup dependency on Cabal-99999. +# cabal should build the setup script with Cabal-99999, but then +# configure should fail because Setup just prints an error message +# imported from Cabal and exits. +! cabal install custom-setup/ > output 2>&1 + +cat output +grep -q "This is Cabal-99999" output || die "Expected output from Cabal-99999" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/common.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/common.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/common.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/common.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,9 @@ +# Helper to run Cabal +cabal() { + "$CABAL" $CABAL_ARGS "$@" +} + +die() { + echo "die: $@" + exit 1 +} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_fail/exit_with_failure_without_args.err cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_fail/exit_with_failure_without_args.err --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_fail/exit_with_failure_without_args.err 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_fail/exit_with_failure_without_args.err 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1 @@ +RE:^cabal(\.exe)?: Please specify an executable to run$ diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_fail/exit_with_failure_without_args.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_fail/exit_with_failure_without_args.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_fail/exit_with_failure_without_args.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_fail/exit_with_failure_without_args.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,3 @@ +. ../common.sh + +cabal exec diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/adds_sandbox_bin_directory_to_path.out cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/adds_sandbox_bin_directory_to_path.out --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/adds_sandbox_bin_directory_to_path.out 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/adds_sandbox_bin_directory_to_path.out 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1 @@ +This is my-executable diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/adds_sandbox_bin_directory_to_path.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/adds_sandbox_bin_directory_to_path.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/adds_sandbox_bin_directory_to_path.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/adds_sandbox_bin_directory_to_path.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,10 @@ +. ../common.sh + +cabal sandbox delete > /dev/null +cabal exec my-executable && die "Unexpectedly found executable" + +cabal sandbox init > /dev/null +cabal install > /dev/null + +# Execute indirectly via bash to ensure that we go through $PATH +cabal exec sh -- -c my-executable || die "Did not find executable" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/auto_configures_on_exec.out cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/auto_configures_on_exec.out --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/auto_configures_on_exec.out 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/auto_configures_on_exec.out 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,4 @@ +Config file path source is commandline option. +Config file config-file not found. +Writing default configuration to config-file +find_me_in_output diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/auto_configures_on_exec.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/auto_configures_on_exec.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/auto_configures_on_exec.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/auto_configures_on_exec.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,2 @@ +. ../common.sh +cabal exec echo find_me_in_output diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/can_run_executables_installed_in_sandbox.out cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/can_run_executables_installed_in_sandbox.out --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/can_run_executables_installed_in_sandbox.out 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/can_run_executables_installed_in_sandbox.out 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1 @@ +This is my-executable diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/can_run_executables_installed_in_sandbox.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/can_run_executables_installed_in_sandbox.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/can_run_executables_installed_in_sandbox.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/can_run_executables_installed_in_sandbox.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,9 @@ +. ../common.sh + +cabal sandbox delete > /dev/null +cabal exec my-executable && die "Unexpectedly found executable" + +cabal sandbox init > /dev/null +cabal install > /dev/null + +cabal exec my-executable || die "Did not find executable" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/configures_cabal_to_use_sandbox.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/configures_cabal_to_use_sandbox.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/configures_cabal_to_use_sandbox.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/configures_cabal_to_use_sandbox.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,14 @@ +. ../common.sh + +cabal sandbox delete > /dev/null +cabal exec my-executable && die "Unexpectedly found executable" + +cabal sandbox init > /dev/null +cabal install > /dev/null + +# The library should not be available outside the sandbox +"$GHC_PKG" list | grep -v "my-0.1" + +# When run inside 'cabal-exec' the 'sandbox hc-pkg list' sub-command +# should find the library. +cabal exec sh -- -c 'cd subdir && "$CABAL" sandbox hc-pkg list' | grep "my-0.1" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/configures_ghc_to_use_sandbox.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/configures_ghc_to_use_sandbox.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/configures_ghc_to_use_sandbox.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/configures_ghc_to_use_sandbox.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,13 @@ +. ../common.sh + +cabal sandbox delete > /dev/null +cabal exec my-executable && die "Unexpectedly found executable" + +cabal sandbox init > /dev/null +cabal install > /dev/null + +# The library should not be available outside the sandbox +"$GHC_PKG" list | grep -v "my-0.1" + +# Execute ghc-pkg inside the sandbox; it should find my-0.1 +cabal exec ghc-pkg list | grep "my-0.1" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/Foo.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/Foo.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/Foo.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/Foo.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,4 @@ +module Foo where + +foo :: String +foo = "foo" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/my.cabal 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,14 @@ +name: my +version: 0.1 +license: BSD3 +cabal-version: >= 1.2 +build-type: Simple + +library + exposed-modules: Foo + build-depends: base + + +executable my-executable + main-is: My.hs + build-depends: base diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/My.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/My.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/My.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/My.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,5 @@ +module Main where + +main :: IO () +main = do + putStrLn "This is my-executable" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/runs_given_command.out cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/runs_given_command.out --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/runs_given_command.out 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/runs_given_command.out 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1 @@ +this string diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/runs_given_command.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/runs_given_command.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/runs_given_command.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/exec/should_run/runs_given_command.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,3 @@ +. ../common.sh +cabal configure > /dev/null +cabal exec echo this string diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/common.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/common.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/common.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/common.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,9 @@ +# Helper to run Cabal +cabal() { + "$CABAL" $CABAL_ARGS "$@" +} + +die() { + echo "die: $@" + exit 1 +} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/disable_benchmarks_freezes_bench_deps.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/disable_benchmarks_freezes_bench_deps.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/disable_benchmarks_freezes_bench_deps.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/disable_benchmarks_freezes_bench_deps.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,3 @@ +. ../common.sh +cabal freeze --disable-benchmarks +grep -v " criterion ==" cabal.config || die "should NOT have frozen criterion" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/disable_tests_freezes_test_deps.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/disable_tests_freezes_test_deps.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/disable_tests_freezes_test_deps.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/disable_tests_freezes_test_deps.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,3 @@ +. ../common.sh +cabal freeze --disable-tests +grep -v " test-framework ==" cabal.config || die "should NOT have frozen test-framework" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/does_not_freeze_nondeps.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/does_not_freeze_nondeps.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/does_not_freeze_nondeps.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/does_not_freeze_nondeps.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,5 @@ +. ../common.sh +# TODO: Test this against a package installed in the sandbox but not +# depended upon. +cabal freeze +grep -v "exceptions ==" cabal.config || die "should not have frozen exceptions" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/does_not_freeze_self.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/does_not_freeze_self.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/does_not_freeze_self.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/does_not_freeze_self.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,3 @@ +. ../common.sh +cabal freeze +grep -v " my ==" cabal.config || die "should not have frozen self" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/dry_run_does_not_create_config.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/dry_run_does_not_create_config.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/dry_run_does_not_create_config.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/dry_run_does_not_create_config.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,3 @@ +. ../common.sh +cabal freeze --dry-run +[ ! -e cabal.config ] || die "cabal.config file should not have been created" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/enable_benchmarks_freezes_bench_deps.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/enable_benchmarks_freezes_bench_deps.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/enable_benchmarks_freezes_bench_deps.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/enable_benchmarks_freezes_bench_deps.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,4 @@ +. ../common.sh +# TODO: solver should find solution without extra flags too +cabal freeze --enable-benchmarks --reorder-goals --max-backjumps=-1 +grep " criterion ==" cabal.config || die "should have frozen criterion" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/enable_tests_freezes_test_deps.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/enable_tests_freezes_test_deps.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/enable_tests_freezes_test_deps.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/enable_tests_freezes_test_deps.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,3 @@ +. ../common.sh +cabal freeze --enable-tests +grep " test-framework ==" cabal.config || die "should have frozen test-framework" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/freezes_direct_dependencies.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/freezes_direct_dependencies.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/freezes_direct_dependencies.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/freezes_direct_dependencies.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,3 @@ +. ../common.sh +cabal freeze +grep " base ==" cabal.config || die "'base' should have been frozen" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/freezes_transitive_dependencies.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/freezes_transitive_dependencies.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/freezes_transitive_dependencies.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/freezes_transitive_dependencies.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,3 @@ +. ../common.sh +cabal freeze +grep " ghc-prim ==" cabal.config || die "'ghc-prim' should have been frozen" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/my.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/my.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/my.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/my.cabal 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,21 @@ +name: my +version: 0.1 +license: BSD3 +cabal-version: >= 1.20.0 +build-type: Simple + +library + exposed-modules: Foo + build-depends: base + +test-suite test-Foo + type: exitcode-stdio-1.0 + hs-source-dirs: tests + main-is: test-Foo.hs + build-depends: base, my, test-framework + +benchmark bench-Foo + type: exitcode-stdio-1.0 + hs-source-dirs: benchmarks + main-is: benchmark-Foo.hs + build-depends: base, my, criterion diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/runs_without_error.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/runs_without_error.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/runs_without_error.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/freeze/should_run/runs_without_error.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,2 @@ +. ../common.sh +cabal freeze diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/manpage/common.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/manpage/common.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/manpage/common.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/manpage/common.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,9 @@ +# Helper to run Cabal +cabal() { + "$CABAL" $CABAL_ARGS "$@" +} + +die() { + echo "die: $@" + exit 1 +} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/manpage/should_run/outputs_manpage.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/manpage/should_run/outputs_manpage.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/manpage/should_run/outputs_manpage.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/manpage/should_run/outputs_manpage.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,11 @@ +. ../common.sh + +OUTPUT=`cabal manpage` + +# contains visible command descriptions +echo $OUTPUT | grep -q '\.B cabal install' || die "visible command description line not found in:\n----$OUTPUT\n----" + +# does not contain hidden command descriptions +echo $OUTPUT | grep -q '\.B cabal manpage' && die "hidden command description line found in:\n----$OUTPUT\n----" + +exit 0 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/multiple-source/common.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/multiple-source/common.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/multiple-source/common.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/multiple-source/common.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,8 @@ +cabal() { + "$CABAL" $CABAL_ARGS "$@" +} + +die() { + echo "die: $@" + exit 1 +} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/multiple-source/should_run/finds_second_source_of_multiple_source.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/multiple-source/should_run/finds_second_source_of_multiple_source.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/multiple-source/should_run/finds_second_source_of_multiple_source.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/multiple-source/should_run/finds_second_source_of_multiple_source.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,11 @@ +. ../common.sh + +# Create the sandbox +cabal sandbox init + +# Add the sources +cabal sandbox add-source p +cabal sandbox add-source q + +# Install the second package +cabal install q diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/multiple-source/should_run/p/p.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/multiple-source/should_run/p/p.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/multiple-source/should_run/p/p.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/multiple-source/should_run/p/p.cabal 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,11 @@ +name: p +version: 0.1.0.0 +license-file: LICENSE +author: Edward Z. Yang +maintainer: ezyang@cs.stanford.edu +build-type: Simple +cabal-version: >=1.10 + +library + build-depends: base + default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/multiple-source/should_run/p/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/multiple-source/should_run/p/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/multiple-source/should_run/p/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/multiple-source/should_run/p/Setup.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,2 @@ +import Distribution.Simple +main = defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/multiple-source/should_run/q/q.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/multiple-source/should_run/q/q.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/multiple-source/should_run/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/multiple-source/should_run/q/q.cabal 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,11 @@ +name: q +version: 0.1.0.0 +license-file: LICENSE +author: Edward Z. Yang +maintainer: ezyang@cs.stanford.edu +build-type: Simple +cabal-version: >=1.10 + +library + build-depends: base + default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/multiple-source/should_run/q/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/multiple-source/should_run/q/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/multiple-source/should_run/q/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/multiple-source/should_run/q/Setup.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,2 @@ +import Distribution.Simple +main = defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/p/p.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/p/p.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/p/p.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/p/p.cabal 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,12 @@ +name: p +version: 1.0 +license: BSD3 +author: Edward Z. Yang +maintainer: ezyang@cs.stanford.edu +build-type: Simple +cabal-version: >=1.10 + +library + exposed-modules: P + build-depends: base + default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/p/P.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/p/P.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/p/P.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/p/P.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1 @@ +module P where diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/p/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/p/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/p/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/p/Setup.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,2 @@ +import Distribution.Simple +main = defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/q/Main.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/q/Main.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/q/Main.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/q/Main.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,4 @@ +module Main where +import P +main :: IO () +main = return () diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/q/q-broken.cabal.in cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/q/q-broken.cabal.in --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/q/q-broken.cabal.in 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/q/q-broken.cabal.in 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,12 @@ +name: q +version: 0.1.0.0 +license: BSD3 +author: Edward Z. Yang +maintainer: ezyang@cs.stanford.edu +build-type: Simple +cabal-version: >=1.10 + +executable q + main-is: Main.hs + build-depends: base + default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/q/q-fixed.cabal.in cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/q/q-fixed.cabal.in --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/q/q-fixed.cabal.in 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/q/q-fixed.cabal.in 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,12 @@ +name: q +version: 0.1.0.0 +license: BSD3 +author: Edward Z. Yang +maintainer: ezyang@cs.stanford.edu +build-type: Simple +cabal-version: >=1.10 + +executable q + main-is: Main.hs + build-depends: base, p + default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/q/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/q/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/q/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files/q/Setup.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,2 @@ +import Distribution.Simple +main = defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/new-build/monitor_cabal_files.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,8 @@ +. ./common.sh +cd monitor_cabal_files +cp q/q-broken.cabal.in q/q.cabal +echo "Run 1" | awk '{print;print > "/dev/stderr"}' +! cabal new-build q +cp q/q-fixed.cabal.in q/q.cabal +echo "Run 2" | awk '{print;print > "/dev/stderr"}' +cabal new-build q diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/regression/common.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/regression/common.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/regression/common.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/regression/common.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,9 @@ +# Helper to run Cabal +cabal() { + "$CABAL" $CABAL_ARGS "$@" +} + +die() { + echo "die: $@" + exit 1 +} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/regression/t3199/Main.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/regression/t3199/Main.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/regression/t3199/Main.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/regression/t3199/Main.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,4 @@ +module Main where + +main :: IO () +main = putStrLn "Hello, Haskell!" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/regression/t3199/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/regression/t3199/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/regression/t3199/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/regression/t3199/Setup.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,2 @@ +import Distribution.Simple +main = defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/regression/t3199/test-3199.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/regression/t3199/test-3199.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/regression/t3199/test-3199.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/regression/t3199/test-3199.cabal 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,27 @@ +name: test-t3199 +version: 0.1.0.0 +license: BSD3 +author: Mikhail Glushenkov +maintainer: mikhail.glushenkov@gmail.com +category: Test +build-type: Custom +cabal-version: >=1.10 + +flag exe_2 + description: Build second exe + default: False + +executable test-3199-1 + main-is: Main.hs + build-depends: base + default-language: Haskell2010 + +executable test-3199-2 + main-is: Main.hs + build-depends: base, ansi-terminal + default-language: Haskell2010 + + if flag(exe_2) + buildable: True + else + buildable: False diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/regression/t3199.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/regression/t3199.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/regression/t3199.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/regression/t3199.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,12 @@ +. ./common.sh + +if [[ `ghc --numeric-version` =~ "7\\." ]]; then + cd t3199 + tmpfile=$(mktemp /tmp/cabal-t3199.XXXXXX) + cabal sandbox init + cabal sandbox add-source ../../../../../Cabal + cabal install --package-db=clear --package-db=global --only-dep --dry-run > $tmpfile + grep -q "the following would be installed" $tmpfile || die "Should've installed Cabal" + grep -q Cabal $tmpfile || die "Should've installed Cabal" + rm $tmpfile +fi diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/common.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/common.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/common.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/common.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,8 @@ +cabal() { + "$CABAL" $CABAL_ARGS "$@" +} + +die() { + echo "die: $@" + exit 1 +} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_fail/fail_removing_source_thats_not_registered.err cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_fail/fail_removing_source_thats_not_registered.err --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_fail/fail_removing_source_thats_not_registered.err 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_fail/fail_removing_source_thats_not_registered.err 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,3 @@ +Warning: Sources not registered: "q" + +RE:^cabal(\.exe)?: The sources with the above errors were skipped\. \("q"\)$ diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_fail/fail_removing_source_thats_not_registered.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_fail/fail_removing_source_thats_not_registered.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_fail/fail_removing_source_thats_not_registered.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_fail/fail_removing_source_thats_not_registered.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,10 @@ +. ../common.sh + +# Create the sandbox +cabal sandbox init > /dev/null + +# Add one source +cabal sandbox add-source p > /dev/null + +# Remove a source that exists on disk, but is not registered +cabal sandbox delete-source q diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_fail/p/p.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_fail/p/p.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_fail/p/p.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_fail/p/p.cabal 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,11 @@ +name: p +version: 0.1.0.0 +license-file: LICENSE +author: Edward Z. Yang +maintainer: ezyang@cs.stanford.edu +build-type: Simple +cabal-version: >=1.10 + +library + build-depends: base + default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_fail/p/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_fail/p/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_fail/p/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_fail/p/Setup.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,2 @@ +import Distribution.Simple +main = defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_fail/q/q.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_fail/q/q.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_fail/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_fail/q/q.cabal 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,11 @@ +name: q +version: 0.1.0.0 +license-file: LICENSE +author: Edward Z. Yang +maintainer: ezyang@cs.stanford.edu +build-type: Simple +cabal-version: >=1.10 + +library + build-depends: base + default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_fail/q/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_fail/q/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_fail/q/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_fail/q/Setup.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,2 @@ +import Distribution.Simple +main = defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/p/p.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/p/p.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/p/p.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/p/p.cabal 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,11 @@ +name: p +version: 0.1.0.0 +license-file: LICENSE +author: Edward Z. Yang +maintainer: ezyang@cs.stanford.edu +build-type: Simple +cabal-version: >=1.10 + +library + build-depends: base + default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/p/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/p/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/p/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/p/Setup.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,2 @@ +import Distribution.Simple +main = defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/q/q.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/q/q.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/q/q.cabal 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,11 @@ +name: q +version: 0.1.0.0 +license-file: LICENSE +author: Edward Z. Yang +maintainer: ezyang@cs.stanford.edu +build-type: Simple +cabal-version: >=1.10 + +library + build-depends: base + default-language: Haskell2010 diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/q/Setup.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/q/Setup.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/q/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/q/Setup.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,2 @@ +import Distribution.Simple +main = defaultMain diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/remove_nonexistent_source.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/remove_nonexistent_source.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/remove_nonexistent_source.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/remove_nonexistent_source.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,22 @@ +. ../common.sh + +# Create the sandbox +cabal sandbox init + +# Add the sources +cabal sandbox add-source p +cabal sandbox add-source q + +# delete the directory on disk +rm -R p + +# Remove the registered source which is no longer on disk. cabal's handling of +# non-existent sources depends on the behavior of the directory package. +if OUTPUT=`cabal sandbox delete-source p 2>&1`; then + # 'canonicalizePath' should always succeed with directory >= 1.2.3.0 + echo $OUTPUT | grep 'Success deleting sources: "p"' \ + || die "Incorrect success message: $OUTPUT" +else + echo $OUTPUT | grep 'Warning: Source directory not found for paths: "p"' \ + || die "Incorrect failure message: $OUTPUT" +fi diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/report_success_removing_source.out cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/report_success_removing_source.out --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/report_success_removing_source.out 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/report_success_removing_source.out 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,6 @@ +Success deleting sources: "p" "q" + +Note: 'sandbox delete-source' only unregisters the source dependency, but does +not remove the package from the sandbox package DB. + +Use 'sandbox hc-pkg -- unregister' to do that. diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/report_success_removing_source.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/report_success_removing_source.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/report_success_removing_source.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/sandbox-sources/should_run/report_success_removing_source.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,11 @@ +. ../common.sh + +# Create the sandbox +cabal sandbox init > /dev/null + +# Add the sources +cabal sandbox add-source p > /dev/null +cabal sandbox add-source q > /dev/null + +# Remove one of the sources +cabal sandbox delete-source p q diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/common.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/common.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/common.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/common.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,9 @@ +# Helper to run Cabal +cabal() { + "$CABAL" $CABAL_ARGS_NO_CONFIG_FILE "$@" +} + +die() { + echo "die: $@" + exit 1 +} diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_fail/doesnt_overwrite_without_f.err cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_fail/doesnt_overwrite_without_f.err --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_fail/doesnt_overwrite_without_f.err 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_fail/doesnt_overwrite_without_f.err 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1 @@ +RE:^cabal(\.exe)?: \./cabal-config already exists\.$ diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_fail/doesnt_overwrite_without_f.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_fail/doesnt_overwrite_without_f.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_fail/doesnt_overwrite_without_f.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_fail/doesnt_overwrite_without_f.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,6 @@ +. ../common.sh + +rm -f ./cabal-config +cabal --config-file=./cabal-config user-config init > /dev/null +cabal --config-file=./cabal-config user-config init +rm -f ./cabal-config diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_run/overwrites_with_f.out cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_run/overwrites_with_f.out --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_run/overwrites_with_f.out 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_run/overwrites_with_f.out 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,2 @@ +Writing default configuration to ./cabal-config +Writing default configuration to ./cabal-config diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_run/overwrites_with_f.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_run/overwrites_with_f.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_run/overwrites_with_f.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_run/overwrites_with_f.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,9 @@ +. ../common.sh + +rm -f ./cabal-config +cabal --config-file=./cabal-config user-config init \ + || die "Couldn't create config file" +cabal --config-file=./cabal-config user-config -f init \ + || die "Couldn't create config file" +test -e ./cabal-config || die "Config file doesn't exist" +rm -f ./cabal-config diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_run/runs_without_error.out cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_run/runs_without_error.out --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_run/runs_without_error.out 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_run/runs_without_error.out 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1 @@ +Writing default configuration to ./cabal-config diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_run/runs_without_error.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_run/runs_without_error.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_run/runs_without_error.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_run/runs_without_error.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,7 @@ +. ../common.sh + +rm -f ./cabal-config +cabal --config-file=./cabal-config user-config init \ + || die "Couldn't create config file" +test -e ./cabal-config || die "Config file doesn't exist" +rm -f ./cabal-config diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_run/uses_CABAL_CONFIG.out cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_run/uses_CABAL_CONFIG.out --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_run/uses_CABAL_CONFIG.out 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_run/uses_CABAL_CONFIG.out 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1 @@ +Writing default configuration to ./my-config diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_run/uses_CABAL_CONFIG.sh cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_run/uses_CABAL_CONFIG.sh --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_run/uses_CABAL_CONFIG.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests/user-config/should_run/uses_CABAL_CONFIG.sh 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,5 @@ +. ../common.sh + +export CABAL_CONFIG=./my-config +cabal user-config init || die "Couldn't create config file" +test -e ./my-config || die "Config file doesn't exist" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/IntegrationTests.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/IntegrationTests.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,310 @@ +{-# LANGUAGE CPP #-} +-- | Groups black-box tests of cabal-install and configures them to test +-- the correct binary. +-- +-- This file should do nothing but import tests from other modules and run +-- them with the path to the correct cabal-install binary. +module Main + where + +-- Modules from Cabal. +import Distribution.Compat.CreatePipe (createPipe) +import Distribution.Compat.Environment (setEnv) +import Distribution.Compat.Internal.TempFile (createTempDirectory) +import Distribution.Simple.Configure (findDistPrefOrDefault) +import Distribution.Simple.Program.Builtin (ghcPkgProgram) +import Distribution.Simple.Program.Db + (defaultProgramDb, requireProgram, setProgramSearchPath) +import Distribution.Simple.Program.Find + (ProgramSearchPathEntry(ProgramSearchPathDir), defaultProgramSearchPath) +import Distribution.Simple.Program.Types + ( Program(..), simpleProgram, programPath) +import Distribution.Simple.Setup ( Flag(..) ) +import Distribution.Simple.Utils ( findProgramVersion, copyDirectoryRecursive ) +import Distribution.Verbosity (normal) + +-- Third party modules. +import Control.Concurrent.Async (withAsync, wait) +import Control.Exception (bracket) +import Data.Maybe (fromMaybe) +import System.Directory + ( canonicalizePath + , findExecutable + , getDirectoryContents + , getTemporaryDirectory + , doesDirectoryExist + , removeDirectoryRecursive + , doesFileExist ) +import System.FilePath +import Test.Tasty (TestTree, defaultMain, testGroup) +import Test.Tasty.HUnit (testCase, Assertion, assertFailure) +import Control.Monad ( filterM, forM, unless, when ) +import Data.List (isPrefixOf, isSuffixOf, sort) +import Data.IORef (newIORef, writeIORef, readIORef) +import System.Exit (ExitCode(..)) +import System.IO (withBinaryFile, IOMode(ReadMode)) +import System.Process (runProcess, waitForProcess) +import Text.Regex.Posix ((=~)) +import qualified Data.ByteString as B +import qualified Data.ByteString.Char8 as C8 +import Data.ByteString (ByteString) + +#if MIN_VERSION_base(4,6,0) +import System.Environment ( getExecutablePath ) +#endif + +-- | Test case. +data TestCase = TestCase + { tcName :: String -- ^ Name of the shell script + , tcBaseDirectory :: FilePath + , tcCategory :: String + , tcShouldX :: String + , tcStdOutPath :: Maybe FilePath -- ^ File path of "golden standard output" + , tcStdErrPath :: Maybe FilePath -- ^ File path of "golden standard error" + } + +-- | Test result. +data TestResult = TestResult + { trExitCode :: ExitCode + , trStdOut :: ByteString + , trStdErr :: ByteString + , trWorkingDirectory :: FilePath + } + +-- | Cabal executable +cabalProgram :: Program +cabalProgram = (simpleProgram "cabal") { + programFindVersion = findProgramVersion "--numeric-version" id + } + +-- | Convert test result to string. +testResultToString :: TestResult -> String +testResultToString testResult = + exitStatus ++ "\n" ++ workingDirectory ++ "\n\n" ++ stdOut ++ "\n\n" ++ stdErr + where + exitStatus = "Exit status: " ++ show (trExitCode testResult) + workingDirectory = "Working directory: " ++ (trWorkingDirectory testResult) + stdOut = " was:\n" ++ C8.unpack (trStdOut testResult) + stdErr = " was:\n" ++ C8.unpack (trStdErr testResult) + +-- | Returns the command that was issued, the return code, and the output text +run :: FilePath -> String -> [String] -> IO TestResult +run cwd path args = do + -- path is relative to the current directory; canonicalizePath makes it + -- absolute, so that runProcess will find it even when changing directory. + path' <- canonicalizePath path + + (pid, hReadStdOut, hReadStdErr) <- do + -- Create pipes for StdOut and StdErr + (hReadStdOut, hWriteStdOut) <- createPipe + (hReadStdErr, hWriteStdErr) <- createPipe + -- Run the process + pid <- runProcess path' args (Just cwd) Nothing Nothing (Just hWriteStdOut) (Just hWriteStdErr) + -- Return the pid and read ends of the pipes + return (pid, hReadStdOut, hReadStdErr) + -- Read subprocess output using asynchronous threads; we need to + -- do this aynchronously to avoid deadlocks due to buffers filling + -- up. + withAsync (B.hGetContents hReadStdOut) $ \stdOutAsync -> do + withAsync (B.hGetContents hReadStdErr) $ \stdErrAsync -> do + -- Wait for the subprocess to terminate + exitcode <- waitForProcess pid + -- We can now be sure that no further output is going to arrive, + -- so we wait for the results of the asynchronous reads. + stdOut <- wait stdOutAsync + stdErr <- wait stdErrAsync + -- Done + return $ TestResult exitcode stdOut stdErr cwd + +-- | Get a list of all names in a directory, excluding all hidden or +-- system files/directories such as '.', '..' or any files/directories +-- starting with a '.'. +listDirectory :: FilePath -> IO [String] +listDirectory directory = do + fmap (filter notHidden) $ getDirectoryContents directory + where + notHidden = not . isHidden + isHidden name = "." `isPrefixOf` name + +-- | List a directory as per 'listDirectory', but return an empty list +-- in case the directory does not exist. +listDirectoryLax :: FilePath -> IO [String] +listDirectoryLax directory = do + d <- doesDirectoryExist directory + if d then + listDirectory directory + else + return [ ] + +pathIfExists :: FilePath -> IO (Maybe FilePath) +pathIfExists p = do + e <- doesFileExist p + if e then + return $ Just p + else + return Nothing + +fileMatchesString :: FilePath -> ByteString -> IO Bool +fileMatchesString p s = do + withBinaryFile p ReadMode $ \h -> do + expected <- (C8.lines . normalizeLinebreaks) `fmap` B.hGetContents h -- Strict + let actual = C8.lines $ normalizeLinebreaks s + return $ length expected == length actual && + and (zipWith matches expected actual) + where + matches :: ByteString -> ByteString -> Bool + matches pattern line + | C8.pack "RE:" `B.isPrefixOf` pattern = line =~ C8.drop 3 pattern + | otherwise = line == pattern + + -- This is a bit of a hack, but since we're comparing + -- *text* output, we should be OK. + normalizeLinebreaks = B.filter (not . ((==) 13)) + +mustMatch :: TestResult -> String -> ByteString -> Maybe FilePath -> Assertion +mustMatch _ _ _ Nothing = return () +mustMatch testResult handleName actual (Just expected) = do + m <- fileMatchesString expected actual + unless m $ assertFailure $ + "<" ++ handleName ++ "> did not match file '" + ++ expected ++ "'.\n" ++ testResultToString testResult + +discoverTestCategories :: FilePath -> IO [String] +discoverTestCategories directory = do + names <- listDirectory directory + fmap sort $ filterM (\name -> doesDirectoryExist $ directory name) names + +discoverTestCases :: FilePath -> String -> String -> IO [TestCase] +discoverTestCases baseDirectory category shouldX = do + -- Find the names of the shell scripts + names <- fmap (filter isTestCase) $ listDirectoryLax directory + -- Fill in TestCase for each script + forM (sort names) $ \name -> do + stdOutPath <- pathIfExists $ directory name `replaceExtension` ".out" + stdErrPath <- pathIfExists $ directory name `replaceExtension` ".err" + return $ TestCase { tcName = name + , tcBaseDirectory = baseDirectory + , tcCategory = category + , tcShouldX = shouldX + , tcStdOutPath = stdOutPath + , tcStdErrPath = stdErrPath + } + where + directory = baseDirectory category shouldX + isTestCase name = ".sh" `isSuffixOf` name + +createTestCases :: [TestCase] -> (TestCase -> Assertion) -> IO [TestTree] +createTestCases testCases mk = + return $ (flip map) testCases $ \tc -> testCase (tcName tc ++ suffix tc) $ mk tc + where + suffix tc = case (tcStdOutPath tc, tcStdErrPath tc) of + (Nothing, Nothing) -> " (ignoring stdout+stderr)" + (Just _ , Nothing) -> " (ignoring stderr)" + (Nothing, Just _ ) -> " (ignoring stdout)" + (Just _ , Just _ ) -> "" + +runTestCase :: (TestResult -> Assertion) -> TestCase -> IO () +runTestCase assertResult tc = do + doRemove <- newIORef False + bracket createWorkDirectory (removeWorkDirectory doRemove) $ \workDirectory -> do + -- Run + let scriptDirectory = workDirectory tcShouldX tc + sh <- fmap (fromMaybe $ error "Cannot find 'sh' executable") $ findExecutable "sh" + testResult <- run scriptDirectory sh [ "-e", tcName tc] + -- Assert that we got what we expected + assertResult testResult + mustMatch testResult "stdout" (trStdOut testResult) (tcStdOutPath tc) + mustMatch testResult "stderr" (trStdErr testResult) (tcStdErrPath tc) + -- Only remove working directory if test succeeded + writeIORef doRemove True + where + createWorkDirectory = do + -- Create the temporary directory + tempDirectory <- getTemporaryDirectory + workDirectory <- createTempDirectory tempDirectory "cabal-install-test" + -- Copy all the files from the category into the working directory. + copyDirectoryRecursive normal + (tcBaseDirectory tc tcCategory tc) + workDirectory + -- Done + return workDirectory + removeWorkDirectory doRemove workDirectory = do + remove <- readIORef doRemove + when remove $ removeDirectoryRecursive workDirectory + +makeShouldXTests :: FilePath -> String -> String -> (TestResult -> Assertion) -> IO [TestTree] +makeShouldXTests baseDirectory category shouldX assertResult = do + testCases <- discoverTestCases baseDirectory category shouldX + createTestCases testCases $ \tc -> + runTestCase assertResult tc + +makeShouldRunTests :: FilePath -> String -> IO [TestTree] +makeShouldRunTests baseDirectory category = do + makeShouldXTests baseDirectory category "should_run" $ \testResult -> do + case trExitCode testResult of + ExitSuccess -> + return () -- We're good + ExitFailure _ -> + assertFailure $ "Unexpected exit status.\n\n" ++ testResultToString testResult + +makeShouldFailTests :: FilePath -> String -> IO [TestTree] +makeShouldFailTests baseDirectory category = do + makeShouldXTests baseDirectory category "should_fail" $ \testResult -> do + case trExitCode testResult of + ExitSuccess -> + assertFailure $ "Unexpected exit status.\n\n" ++ testResultToString testResult + ExitFailure _ -> + return () -- We're good + +discoverCategoryTests :: FilePath -> String -> IO [TestTree] +discoverCategoryTests baseDirectory category = do + srTests <- makeShouldRunTests baseDirectory category + sfTests <- makeShouldFailTests baseDirectory category + return [ testGroup "should_run" srTests + , testGroup "should_fail" sfTests + ] + +main :: IO () +main = do + -- Find executables and build directories, etc. + distPref <- guessDistDir + buildDir <- canonicalizePath (distPref "build/cabal") + let programSearchPath = ProgramSearchPathDir buildDir : defaultProgramSearchPath + (cabal, _) <- requireProgram normal cabalProgram (setProgramSearchPath programSearchPath defaultProgramDb) + (ghcPkg, _) <- requireProgram normal ghcPkgProgram defaultProgramDb + baseDirectory <- canonicalizePath $ "tests" "IntegrationTests" + -- Set up environment variables for test scripts + setEnv "GHC_PKG" $ programPath ghcPkg + setEnv "CABAL" $ programPath cabal + -- Define default arguments + setEnv "CABAL_ARGS" $ "--config-file=config-file" + setEnv "CABAL_ARGS_NO_CONFIG_FILE" " " + -- Discover all the test caregories + categories <- discoverTestCategories baseDirectory + -- Discover tests in each category + tests <- forM categories $ \category -> do + categoryTests <- discoverCategoryTests baseDirectory category + return (category, categoryTests) + -- Map into a test tree + let testTree = map (\(category, categoryTests) -> testGroup category categoryTests) tests + -- Run the tests + defaultMain $ testGroup "Integration Tests" $ testTree + +-- See this function in Cabal's PackageTests. If you update this, +-- update its copy in cabal-install. (Why a copy here? I wanted +-- to try moving this into the Cabal library, but to do this properly +-- I'd have to BC'ify getExecutablePath, and then it got hairy, so +-- I aborted and did something simple.) +guessDistDir :: IO FilePath +guessDistDir = do +#if MIN_VERSION_base(4,6,0) + exe_path <- canonicalizePath =<< getExecutablePath + let dist0 = dropFileName exe_path ".." ".." + b <- doesFileExist (dist0 "setup-config") +#else + let dist0 = error "no path" + b = False +#endif + -- Method (2) + if b then canonicalizePath dist0 + else findDistPrefOrDefault NoFlag >>= canonicalizePath diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/README.md cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/README.md --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/README.md 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/README.md 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,27 @@ +Integration Tests +================= + +Each test is a shell script. Tests that share files (e.g., `.cabal` files) are +grouped under a common sub-directory of [IntegrationTests]. The framework +copies the whole group's directory before running each test, which allows tests +to reuse files, yet run independently. A group's tests are further divided into +`should_run` and `should_fail` directories, based on the expected exit status. +For example, the test +`IntegrationTests/exec/should_fail/exit_with_failure_without_args.sh` has access +to all files under `exec` and is expected to fail. + +Tests can specify their expected output. For a test named `x.sh`, `x.out` +specifies `stdout` and `x.err` specifies `stderr`. Both files are optional. +The framework expects an exact match between lines in the file and output, +except for lines beginning with "RE:", which are interpreted as regular +expressions. + +[IntegrationTests.hs] defines several environment variables: + +* `CABAL` - The path to the executable being tested. +* `GHC_PKG` - The path to ghc-pkg. +* `CABAL_ARGS` - A common set of arguments for running cabal. +* `CABAL_ARGS_NO_CONFIG_FILE` - `CABAL_ARGS` without `--config-file`. + +[IntegrationTests]: IntegrationTests +[IntegrationTests.hs]: IntegrationTests.hs diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/ArbitraryInstances.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/ArbitraryInstances.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/ArbitraryInstances.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/ArbitraryInstances.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,174 @@ +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} + +module UnitTests.Distribution.Client.ArbitraryInstances ( + adjustSize, + shortListOf, + shortListOf1, + arbitraryFlag, + ShortToken(..), + arbitraryShortToken, + NonMEmpty(..), + NoShrink(..), + ) where + +import Data.Char +import Data.List +#if !MIN_VERSION_base(4,8,0) +import Data.Monoid +import Control.Applicative +#endif +import Control.Monad + +import Distribution.Version +import Distribution.Package +import Distribution.System +import Distribution.Verbosity + +import Distribution.Simple.Setup +import Distribution.Simple.InstallDirs + +import Distribution.Utils.NubList + +import Test.QuickCheck + + +adjustSize :: (Int -> Int) -> Gen a -> Gen a +adjustSize adjust gen = sized (\n -> resize (adjust n) gen) + +shortListOf :: Int -> Gen a -> Gen [a] +shortListOf bound gen = + sized $ \n -> do + k <- choose (0, (n `div` 2) `min` bound) + vectorOf k gen + +shortListOf1 :: Int -> Gen a -> Gen [a] +shortListOf1 bound gen = + sized $ \n -> do + k <- choose (1, 1 `max` ((n `div` 2) `min` bound)) + vectorOf k gen + +newtype ShortToken = ShortToken { getShortToken :: String } + deriving Show + +instance Arbitrary ShortToken where + arbitrary = + ShortToken <$> + (shortListOf1 5 (choose ('#', '~')) + `suchThat` (not . ("[]" `isPrefixOf`))) + --TODO: [code cleanup] need to replace parseHaskellString impl to stop + -- accepting Haskell list syntax [], ['a'] etc, just allow String syntax. + -- Workaround, don't generate [] as this does not round trip. + + + shrink (ShortToken cs) = + [ ShortToken cs' | cs' <- shrink cs, not (null cs') ] + +arbitraryShortToken :: Gen String +arbitraryShortToken = getShortToken <$> arbitrary + +#if !MIN_VERSION_QuickCheck(2,9,0) +instance Arbitrary Version where + arbitrary = do + branch <- shortListOf1 4 $ + frequency [(3, return 0) + ,(3, return 1) + ,(2, return 2) + ,(1, return 3)] + return (Version branch []) -- deliberate [] + where + + shrink (Version branch []) = + [ Version branch' [] | branch' <- shrink branch, not (null branch') ] + shrink (Version branch _tags) = + [ Version branch [] ] +#endif + +instance Arbitrary VersionRange where + arbitrary = canonicaliseVersionRange <$> sized verRangeExp + where + verRangeExp n = frequency $ + [ (2, return anyVersion) + , (1, liftM thisVersion arbitrary) + , (1, liftM laterVersion arbitrary) + , (1, liftM orLaterVersion arbitrary) + , (1, liftM orLaterVersion' arbitrary) + , (1, liftM earlierVersion arbitrary) + , (1, liftM orEarlierVersion arbitrary) + , (1, liftM orEarlierVersion' arbitrary) + , (1, liftM withinVersion arbitrary) + , (2, liftM VersionRangeParens arbitrary) + ] ++ if n == 0 then [] else + [ (2, liftM2 unionVersionRanges verRangeExp2 verRangeExp2) + , (2, liftM2 intersectVersionRanges verRangeExp2 verRangeExp2) + ] + where + verRangeExp2 = verRangeExp (n `div` 2) + + orLaterVersion' v = + unionVersionRanges (laterVersion v) (thisVersion v) + orEarlierVersion' v = + unionVersionRanges (earlierVersion v) (thisVersion v) + + canonicaliseVersionRange = fromVersionIntervals . toVersionIntervals + +instance Arbitrary PackageName where + arbitrary = PackageName . intercalate "-" <$> shortListOf1 2 nameComponent + where + nameComponent = shortListOf1 5 (elements packageChars) + `suchThat` (not . all isDigit) + packageChars = filter isAlphaNum ['\0'..'\127'] + +instance Arbitrary Dependency where + arbitrary = Dependency <$> arbitrary <*> arbitrary + +instance Arbitrary OS where + arbitrary = elements knownOSs + +instance Arbitrary Arch where + arbitrary = elements knownArches + +instance Arbitrary Platform where + arbitrary = Platform <$> arbitrary <*> arbitrary + +instance Arbitrary a => Arbitrary (Flag a) where + arbitrary = arbitraryFlag arbitrary + shrink NoFlag = [] + shrink (Flag x) = NoFlag : [ Flag x' | x' <- shrink x ] + +arbitraryFlag :: Gen a -> Gen (Flag a) +arbitraryFlag genA = + sized $ \sz -> + case sz of + 0 -> pure NoFlag + _ -> frequency [ (1, pure NoFlag) + , (3, Flag <$> genA) ] + + +instance (Arbitrary a, Ord a) => Arbitrary (NubList a) where + arbitrary = toNubList <$> arbitrary + shrink xs = [ toNubList [] | (not . null) (fromNubList xs) ] + -- try empty, otherwise don't shrink as it can loop + +instance Arbitrary Verbosity where + arbitrary = elements [minBound..maxBound] + +instance Arbitrary PathTemplate where + arbitrary = toPathTemplate <$> arbitraryShortToken + shrink t = [ toPathTemplate s | s <- shrink (show t), not (null s) ] + + +newtype NonMEmpty a = NonMEmpty { getNonMEmpty :: a } + deriving (Eq, Ord, Show) + +instance (Arbitrary a, Monoid a, Eq a) => Arbitrary (NonMEmpty a) where + arbitrary = NonMEmpty <$> (arbitrary `suchThat` (/= mempty)) + shrink (NonMEmpty x) = [ NonMEmpty x' | x' <- shrink x, x' /= mempty ] + +newtype NoShrink a = NoShrink { getNoShrink :: a } + deriving (Eq, Ord, Show) + +instance Arbitrary a => Arbitrary (NoShrink a) where + arbitrary = NoShrink <$> arbitrary + shrink _ = [] + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Compat/Time.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Compat/Time.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Compat/Time.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Compat/Time.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,49 @@ +module UnitTests.Distribution.Client.Compat.Time (tests) where + +import Control.Concurrent (threadDelay) +import System.FilePath + +import Distribution.Simple.Utils (withTempDirectory) +import Distribution.Verbosity + +import Distribution.Client.Compat.Time + +import Test.Tasty +import Test.Tasty.HUnit + +tests :: Int -> [TestTree] +tests mtimeChange = + [ testCase "getModTime has sub-second resolution" $ getModTimeTest mtimeChange + , testCase "getCurTime works as expected" $ getCurTimeTest mtimeChange + ] + +getModTimeTest :: Int -> Assertion +getModTimeTest mtimeChange = + withTempDirectory silent "." "getmodtime-" $ \dir -> do + let fileName = dir "foo" + writeFile fileName "bar" + t0 <- getModTime fileName + threadDelay mtimeChange + writeFile fileName "baz" + t1 <- getModTime fileName + assertBool "expected different file mtimes" (t1 > t0) + + +getCurTimeTest :: Int -> Assertion +getCurTimeTest mtimeChange = + withTempDirectory silent "." "getmodtime-" $ \dir -> do + let fileName = dir "foo" + writeFile fileName "bar" + t0 <- getModTime fileName + threadDelay mtimeChange + t1 <- getCurTime + assertBool("expected file mtime (" ++ show t0 + ++ ") to be earlier than current time (" ++ show t1 ++ ")") + (t0 < t1) + + threadDelay mtimeChange + writeFile fileName "baz" + t2 <- getModTime fileName + assertBool ("expected current time (" ++ show t1 + ++ ") to be earlier than file mtime (" ++ show t2 ++ ")") + (t1 < t2) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Dependency/Modular/DSL.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Dependency/Modular/DSL.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Dependency/Modular/DSL.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Dependency/Modular/DSL.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,418 @@ +{-# LANGUAGE RecordWildCards #-} +-- | DSL for testing the modular solver +module UnitTests.Distribution.Client.Dependency.Modular.DSL ( + ExampleDependency(..) + , Dependencies(..) + , ExTest(..) + , ExPreference(..) + , ExampleDb + , ExampleVersionRange + , ExamplePkgVersion + , exAv + , exInst + , exFlag + , exResolve + , extractInstallPlan + , withSetupDeps + , withTest + , withTests + ) where + +-- base +import Data.Either (partitionEithers) +import Data.Maybe (catMaybes) +import Data.List (nub) +import Data.Monoid +import Data.Version +import qualified Data.Map as Map + +-- Cabal +import qualified Distribution.Compiler as C +import qualified Distribution.InstalledPackageInfo as C +import qualified Distribution.Package as C + hiding (HasUnitId(..)) +import qualified Distribution.PackageDescription as C +import qualified Distribution.Simple.PackageIndex as C.PackageIndex +import qualified Distribution.System as C +import qualified Distribution.Version as C +import Language.Haskell.Extension (Extension(..), Language) + +-- cabal-install +import Distribution.Client.ComponentDeps (ComponentDeps) +import Distribution.Client.Dependency +import Distribution.Client.Dependency.Types +import Distribution.Client.Types +import qualified Distribution.Client.InstallPlan as CI.InstallPlan +import qualified Distribution.Client.PackageIndex as CI.PackageIndex +import qualified Distribution.Client.PkgConfigDb as PC +import qualified Distribution.Client.ComponentDeps as CD + +{------------------------------------------------------------------------------- + Example package database DSL + + In order to be able to set simple examples up quickly, we define a very + simple version of the package database here explicitly designed for use in + tests. + + The design of `ExampleDb` takes the perspective of the solver, not the + perspective of the package DB. This makes it easier to set up tests for + various parts of the solver, but makes the mapping somewhat awkward, because + it means we first map from "solver perspective" `ExampleDb` to the package + database format, and then the modular solver internally in `IndexConversion` + maps this back to the solver specific data structures. + + IMPLEMENTATION NOTES + -------------------- + + TODO: Perhaps these should be made comments of the corresponding data type + definitions. For now these are just my own conclusions and may be wrong. + + * The difference between `GenericPackageDescription` and `PackageDescription` + is that `PackageDescription` describes a particular _configuration_ of a + package (for instance, see documentation for `checkPackage`). A + `GenericPackageDescription` can be turned into a `PackageDescription` in + two ways: + + a. `finalizePackageDescription` does the proper translation, by taking + into account the platform, available dependencies, etc. and picks a + flag assignment (or gives an error if no flag assignment can be found) + b. `flattenPackageDescription` ignores flag assignment and just joins all + components together. + + The slightly odd thing is that a `GenericPackageDescription` contains a + `PackageDescription` as a field; both of the above functions do the same + thing: they take the embedded `PackageDescription` as a basis for the result + value, but override `library`, `executables`, `testSuites`, `benchmarks` + and `buildDepends`. + * The `condTreeComponents` fields of a `CondTree` is a list of triples + `(condition, then-branch, else-branch)`, where the `else-branch` is + optional. +-------------------------------------------------------------------------------} + +type ExamplePkgName = String +type ExamplePkgVersion = Int +type ExamplePkgHash = String -- for example "installed" packages +type ExampleFlagName = String +type ExampleTestName = String +type ExampleVersionRange = C.VersionRange +data Dependencies = NotBuildable | Buildable [ExampleDependency] + +data ExampleDependency = + -- | Simple dependency on any version + ExAny ExamplePkgName + + -- | Simple dependency on a fixed version + | ExFix ExamplePkgName ExamplePkgVersion + + -- | Dependencies indexed by a flag + | ExFlag ExampleFlagName Dependencies Dependencies + + -- | Dependency on a language extension + | ExExt Extension + + -- | Dependency on a language version + | ExLang Language + + -- | Dependency on a pkg-config package + | ExPkg (ExamplePkgName, ExamplePkgVersion) + +data ExTest = ExTest ExampleTestName [ExampleDependency] + +exFlag :: ExampleFlagName -> [ExampleDependency] -> [ExampleDependency] + -> ExampleDependency +exFlag n t e = ExFlag n (Buildable t) (Buildable e) + +data ExPreference = ExPref String ExampleVersionRange + +data ExampleAvailable = ExAv { + exAvName :: ExamplePkgName + , exAvVersion :: ExamplePkgVersion + , exAvDeps :: ComponentDeps [ExampleDependency] + } + +exAv :: ExamplePkgName -> ExamplePkgVersion -> [ExampleDependency] + -> ExampleAvailable +exAv n v ds = ExAv { exAvName = n, exAvVersion = v + , exAvDeps = CD.fromLibraryDeps ds } + +withSetupDeps :: ExampleAvailable -> [ExampleDependency] -> ExampleAvailable +withSetupDeps ex setupDeps = ex { + exAvDeps = exAvDeps ex <> CD.fromSetupDeps setupDeps + } + +withTest :: ExampleAvailable -> ExTest -> ExampleAvailable +withTest ex test = withTests ex [test] + +withTests :: ExampleAvailable -> [ExTest] -> ExampleAvailable +withTests ex tests = + let testCDs = CD.fromList [(CD.ComponentTest name, deps) + | ExTest name deps <- tests] + in ex { exAvDeps = exAvDeps ex <> testCDs } + +data ExampleInstalled = ExInst { + exInstName :: ExamplePkgName + , exInstVersion :: ExamplePkgVersion + , exInstHash :: ExamplePkgHash + , exInstBuildAgainst :: [ExampleInstalled] + } + +exInst :: ExamplePkgName -> ExamplePkgVersion -> ExamplePkgHash + -> [ExampleInstalled] -> ExampleInstalled +exInst = ExInst + +type ExampleDb = [Either ExampleInstalled ExampleAvailable] + +type DependencyTree a = C.CondTree C.ConfVar [C.Dependency] a + +exDbPkgs :: ExampleDb -> [ExamplePkgName] +exDbPkgs = map (either exInstName exAvName) + +exAvSrcPkg :: ExampleAvailable -> SourcePackage +exAvSrcPkg ex = + let (libraryDeps, exts, mlang, pcpkgs) = splitTopLevel (CD.libraryDeps (exAvDeps ex)) + testSuites = [(name, deps) | (CD.ComponentTest name, deps) <- CD.toList (exAvDeps ex)] + in SourcePackage { + packageInfoId = exAvPkgId ex + , packageSource = LocalTarballPackage "<>" + , packageDescrOverride = Nothing + , packageDescription = C.GenericPackageDescription { + C.packageDescription = C.emptyPackageDescription { + C.package = exAvPkgId ex + , C.library = error "not yet configured: library" + , C.executables = error "not yet configured: executables" + , C.testSuites = error "not yet configured: testSuites" + , C.benchmarks = error "not yet configured: benchmarks" + , C.buildDepends = error "not yet configured: buildDepends" + , C.setupBuildInfo = Just C.SetupBuildInfo { + C.setupDepends = mkSetupDeps (CD.setupDeps (exAvDeps ex)), + C.defaultSetupDepends = False + } + } + , C.genPackageFlags = nub $ concatMap extractFlags $ + CD.libraryDeps (exAvDeps ex) ++ concatMap snd testSuites + , C.condLibrary = Just $ mkCondTree (extsLib exts <> langLib mlang <> pcpkgLib pcpkgs) + disableLib + (Buildable libraryDeps) + , C.condExecutables = [] + , C.condTestSuites = + let mkTree = mkCondTree mempty disableTest . Buildable + in map (\(t, deps) -> (t, mkTree deps)) testSuites + , C.condBenchmarks = [] + } + } + where + -- Split the set of dependencies into the set of dependencies of the library, + -- the dependencies of the test suites and extensions. + splitTopLevel :: [ExampleDependency] + -> ( [ExampleDependency] + , [Extension] + , Maybe Language + , [(ExamplePkgName, ExamplePkgVersion)] -- pkg-config + ) + splitTopLevel [] = + ([], [], Nothing, []) + splitTopLevel (ExExt ext:deps) = + let (other, exts, lang, pcpkgs) = splitTopLevel deps + in (other, ext:exts, lang, pcpkgs) + splitTopLevel (ExLang lang:deps) = + case splitTopLevel deps of + (other, exts, Nothing, pcpkgs) -> (other, exts, Just lang, pcpkgs) + _ -> error "Only 1 Language dependency is supported" + splitTopLevel (ExPkg pkg:deps) = + let (other, exts, lang, pcpkgs) = splitTopLevel deps + in (other, exts, lang, pkg:pcpkgs) + splitTopLevel (dep:deps) = + let (other, exts, lang, pcpkgs) = splitTopLevel deps + in (dep:other, exts, lang, pcpkgs) + + -- Extract the total set of flags used + extractFlags :: ExampleDependency -> [C.Flag] + extractFlags (ExAny _) = [] + extractFlags (ExFix _ _) = [] + extractFlags (ExFlag f a b) = C.MkFlag { + C.flagName = C.FlagName f + , C.flagDescription = "" + , C.flagDefault = True + , C.flagManual = False + } + : concatMap extractFlags (deps a ++ deps b) + where + deps :: Dependencies -> [ExampleDependency] + deps NotBuildable = [] + deps (Buildable ds) = ds + extractFlags (ExExt _) = [] + extractFlags (ExLang _) = [] + extractFlags (ExPkg _) = [] + + mkCondTree :: Monoid a => a -> (a -> a) -> Dependencies -> DependencyTree a + mkCondTree x dontBuild NotBuildable = + C.CondNode { + C.condTreeData = dontBuild x + , C.condTreeConstraints = [] + , C.condTreeComponents = [] + } + mkCondTree x dontBuild (Buildable deps) = + let (directDeps, flaggedDeps) = splitDeps deps + in C.CondNode { + C.condTreeData = x -- Necessary for language extensions + , C.condTreeConstraints = map mkDirect directDeps + , C.condTreeComponents = map (mkFlagged dontBuild) flaggedDeps + } + + mkDirect :: (ExamplePkgName, Maybe ExamplePkgVersion) -> C.Dependency + mkDirect (dep, Nothing) = C.Dependency (C.PackageName dep) C.anyVersion + mkDirect (dep, Just n) = C.Dependency (C.PackageName dep) (C.thisVersion v) + where + v = Version [n, 0, 0] [] + + mkFlagged :: Monoid a + => (a -> a) + -> (ExampleFlagName, Dependencies, Dependencies) + -> (C.Condition C.ConfVar + , DependencyTree a, Maybe (DependencyTree a)) + mkFlagged dontBuild (f, a, b) = ( C.Var (C.Flag (C.FlagName f)) + , mkCondTree mempty dontBuild a + , Just (mkCondTree mempty dontBuild b) + ) + + -- Split a set of dependencies into direct dependencies and flagged + -- dependencies. A direct dependency is a tuple of the name of package and + -- maybe its version (no version means any version) meant to be converted + -- to a 'C.Dependency' with 'mkDirect' for example. A flagged dependency is + -- the set of dependencies guarded by a flag. + -- + -- TODO: Take care of flagged language extensions and language flavours. + splitDeps :: [ExampleDependency] + -> ( [(ExamplePkgName, Maybe Int)] + , [(ExampleFlagName, Dependencies, Dependencies)] + ) + splitDeps [] = + ([], []) + splitDeps (ExAny p:deps) = + let (directDeps, flaggedDeps) = splitDeps deps + in ((p, Nothing):directDeps, flaggedDeps) + splitDeps (ExFix p v:deps) = + let (directDeps, flaggedDeps) = splitDeps deps + in ((p, Just v):directDeps, flaggedDeps) + splitDeps (ExFlag f a b:deps) = + let (directDeps, flaggedDeps) = splitDeps deps + in (directDeps, (f, a, b):flaggedDeps) + splitDeps (_:deps) = splitDeps deps + + -- Currently we only support simple setup dependencies + mkSetupDeps :: [ExampleDependency] -> [C.Dependency] + mkSetupDeps deps = + let (directDeps, []) = splitDeps deps in map mkDirect directDeps + + -- A 'C.Library' with just the given extensions in its 'BuildInfo' + extsLib :: [Extension] -> C.Library + extsLib es = mempty { C.libBuildInfo = mempty { C.otherExtensions = es } } + + -- A 'C.Library' with just the given extensions in its 'BuildInfo' + langLib :: Maybe Language -> C.Library + langLib (Just lang) = mempty { C.libBuildInfo = mempty { C.defaultLanguage = Just lang } } + langLib _ = mempty + + disableLib :: C.Library -> C.Library + disableLib lib = + lib { C.libBuildInfo = (C.libBuildInfo lib) { C.buildable = False }} + + disableTest :: C.TestSuite -> C.TestSuite + disableTest test = + test { C.testBuildInfo = (C.testBuildInfo test) { C.buildable = False }} + + -- A 'C.Library' with just the given pkgconfig-depends in its 'BuildInfo' + pcpkgLib :: [(ExamplePkgName, ExamplePkgVersion)] -> C.Library + pcpkgLib ds = mempty { C.libBuildInfo = mempty { C.pkgconfigDepends = [mkDirect (n, (Just v)) | (n,v) <- ds] } } + +exAvPkgId :: ExampleAvailable -> C.PackageIdentifier +exAvPkgId ex = C.PackageIdentifier { + pkgName = C.PackageName (exAvName ex) + , pkgVersion = Version [exAvVersion ex, 0, 0] [] + } + +exInstInfo :: ExampleInstalled -> C.InstalledPackageInfo +exInstInfo ex = C.emptyInstalledPackageInfo { + C.installedUnitId = C.mkUnitId (exInstHash ex) + , C.sourcePackageId = exInstPkgId ex + , C.depends = map (C.mkUnitId . exInstHash) + (exInstBuildAgainst ex) + } + +exInstPkgId :: ExampleInstalled -> C.PackageIdentifier +exInstPkgId ex = C.PackageIdentifier { + pkgName = C.PackageName (exInstName ex) + , pkgVersion = Version [exInstVersion ex, 0, 0] [] + } + +exAvIdx :: [ExampleAvailable] -> CI.PackageIndex.PackageIndex SourcePackage +exAvIdx = CI.PackageIndex.fromList . map exAvSrcPkg + +exInstIdx :: [ExampleInstalled] -> C.PackageIndex.InstalledPackageIndex +exInstIdx = C.PackageIndex.fromList . map exInstInfo + +exResolve :: ExampleDb + -- List of extensions supported by the compiler, or Nothing if unknown. + -> Maybe [Extension] + -- List of languages supported by the compiler, or Nothing if unknown. + -> Maybe [Language] + -> PC.PkgConfigDb + -> [ExamplePkgName] + -> Bool + -> [ExPreference] + -> ([String], Either String CI.InstallPlan.InstallPlan) +exResolve db exts langs pkgConfigDb targets indepGoals prefs = runProgress $ + resolveDependencies C.buildPlatform + compiler pkgConfigDb + Modular + params + where + defaultCompiler = C.unknownCompilerInfo C.buildCompilerId C.NoAbiTag + compiler = defaultCompiler { C.compilerInfoExtensions = exts + , C.compilerInfoLanguages = langs + } + (inst, avai) = partitionEithers db + instIdx = exInstIdx inst + avaiIdx = SourcePackageDb { + packageIndex = exAvIdx avai + , packagePreferences = Map.empty + } + enableTests = fmap (\p -> PackageConstraintStanzas + (C.PackageName p) [TestStanzas]) + (exDbPkgs db) + targets' = fmap (\p -> NamedPackage (C.PackageName p) []) targets + params = addPreferences (fmap toPref prefs) + $ addConstraints (fmap toLpc enableTests) + $ (standardInstallPolicy instIdx avaiIdx targets') { + depResolverIndependentGoals = indepGoals + } + toLpc pc = LabeledPackageConstraint pc ConstraintSourceUnknown + toPref (ExPref n v) = PackageVersionPreference (C.PackageName n) v + +extractInstallPlan :: CI.InstallPlan.InstallPlan + -> [(ExamplePkgName, ExamplePkgVersion)] +extractInstallPlan = catMaybes . map confPkg . CI.InstallPlan.toList + where + confPkg :: CI.InstallPlan.PlanPackage -> Maybe (String, Int) + confPkg (CI.InstallPlan.Configured pkg) = Just $ srcPkg pkg + confPkg _ = Nothing + + srcPkg :: ConfiguredPackage -> (String, Int) + srcPkg (ConfiguredPackage pkg _flags _stanzas _deps) = + let C.PackageIdentifier (C.PackageName p) (Version (n:_) _) = + packageInfoId pkg + in (p, n) + +{------------------------------------------------------------------------------- + Auxiliary +-------------------------------------------------------------------------------} + +-- | Run Progress computation +-- +-- Like `runLog`, but for the more general `Progress` type. +runProgress :: Progress step e a -> ([step], Either e a) +runProgress = go + where + go (Step s p) = let (ss, result) = go p in (s:ss, result) + go (Fail e) = ([], Left e) + go (Done a) = ([], Right a) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Dependency/Modular/PSQ.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Dependency/Modular/PSQ.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Dependency/Modular/PSQ.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Dependency/Modular/PSQ.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,22 @@ +module UnitTests.Distribution.Client.Dependency.Modular.PSQ ( + tests + ) where + +import Distribution.Client.Dependency.Modular.PSQ + +import Test.Tasty +import Test.Tasty.QuickCheck + +tests :: [TestTree] +tests = [ testProperty "splitsAltImplementation" splitsTest + ] + +-- | Original splits implementation +splits' :: PSQ k a -> PSQ k (a, PSQ k a) +splits' xs = + casePSQ xs + (PSQ []) + (\ k v ys -> cons k (v, ys) (fmap (\ (w, zs) -> (w, cons k v zs)) (splits' ys))) + +splitsTest :: [(Int, Int)] -> Bool +splitsTest psq = splits' (PSQ psq) == splits (PSQ psq) diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Dependency/Modular/Solver.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Dependency/Modular/Solver.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Dependency/Modular/Solver.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Dependency/Modular/Solver.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,805 @@ +{-# LANGUAGE RecordWildCards #-} +module UnitTests.Distribution.Client.Dependency.Modular.Solver (tests) + where + +-- base +import Control.Monad +import Data.List (isInfixOf) + +import qualified Data.Version as V +import qualified Distribution.Version as V + +-- test-framework +import Test.Tasty as TF +import Test.Tasty.HUnit (testCase, assertEqual, assertBool) + +-- Cabal +import Language.Haskell.Extension ( Extension(..) + , KnownExtension(..), Language(..)) + +-- cabal-install +import Distribution.Client.PkgConfigDb (PkgConfigDb, pkgConfigDbFromList) +import UnitTests.Distribution.Client.Dependency.Modular.DSL +import UnitTests.Options + +tests :: [TF.TestTree] +tests = [ + testGroup "Simple dependencies" [ + runTest $ mkTest db1 "alreadyInstalled" ["A"] (SolverSuccess []) + , runTest $ mkTest db1 "installLatest" ["B"] (SolverSuccess [("B", 2)]) + , runTest $ mkTest db1 "simpleDep1" ["C"] (SolverSuccess [("B", 1), ("C", 1)]) + , runTest $ mkTest db1 "simpleDep2" ["D"] (SolverSuccess [("B", 2), ("D", 1)]) + , runTest $ mkTest db1 "failTwoVersions" ["C", "D"] anySolverFailure + , runTest $ indep $ mkTest db1 "indepTwoVersions" ["C", "D"] (SolverSuccess [("B", 1), ("B", 2), ("C", 1), ("D", 1)]) + , runTest $ indep $ mkTest db1 "aliasWhenPossible1" ["C", "E"] (SolverSuccess [("B", 1), ("C", 1), ("E", 1)]) + , runTest $ indep $ mkTest db1 "aliasWhenPossible2" ["D", "E"] (SolverSuccess [("B", 2), ("D", 1), ("E", 1)]) + , runTest $ indep $ mkTest db2 "aliasWhenPossible3" ["C", "D"] (SolverSuccess [("A", 1), ("A", 2), ("B", 1), ("B", 2), ("C", 1), ("D", 1)]) + , runTest $ mkTest db1 "buildDepAgainstOld" ["F"] (SolverSuccess [("B", 1), ("E", 1), ("F", 1)]) + , runTest $ mkTest db1 "buildDepAgainstNew" ["G"] (SolverSuccess [("B", 2), ("E", 1), ("G", 1)]) + , runTest $ indep $ mkTest db1 "multipleInstances" ["F", "G"] anySolverFailure + , runTest $ mkTest db21 "unknownPackage1" ["A"] (SolverSuccess [("A", 1), ("B", 1)]) + , runTest $ mkTest db22 "unknownPackage2" ["A"] (SolverFailure (isInfixOf "unknown package: C")) + ] + , testGroup "Flagged dependencies" [ + runTest $ mkTest db3 "forceFlagOn" ["C"] (SolverSuccess [("A", 1), ("B", 1), ("C", 1)]) + , runTest $ mkTest db3 "forceFlagOff" ["D"] (SolverSuccess [("A", 2), ("B", 1), ("D", 1)]) + , runTest $ indep $ mkTest db3 "linkFlags1" ["C", "D"] anySolverFailure + , runTest $ indep $ mkTest db4 "linkFlags2" ["C", "D"] anySolverFailure + , runTest $ indep $ mkTest db18 "linkFlags3" ["A", "B"] (SolverSuccess [("A", 1), ("B", 1), ("C", 1), ("D", 1), ("D", 2), ("F", 1)]) + ] + , testGroup "Stanzas" [ + runTest $ mkTest db5 "simpleTest1" ["C"] (SolverSuccess [("A", 2), ("C", 1)]) + , runTest $ mkTest db5 "simpleTest2" ["D"] anySolverFailure + , runTest $ mkTest db5 "simpleTest3" ["E"] (SolverSuccess [("A", 1), ("E", 1)]) + , runTest $ mkTest db5 "simpleTest4" ["F"] anySolverFailure -- TODO + , runTest $ mkTest db5 "simpleTest5" ["G"] (SolverSuccess [("A", 2), ("G", 1)]) + , runTest $ mkTest db5 "simpleTest6" ["E", "G"] anySolverFailure + , runTest $ indep $ mkTest db5 "simpleTest7" ["E", "G"] (SolverSuccess [("A", 1), ("A", 2), ("E", 1), ("G", 1)]) + , runTest $ mkTest db6 "depsWithTests1" ["C"] (SolverSuccess [("A", 1), ("B", 1), ("C", 1)]) + , runTest $ indep $ mkTest db6 "depsWithTests2" ["C", "D"] (SolverSuccess [("A", 1), ("B", 1), ("C", 1), ("D", 1)]) + ] + , testGroup "Setup dependencies" [ + runTest $ mkTest db7 "setupDeps1" ["B"] (SolverSuccess [("A", 2), ("B", 1)]) + , runTest $ mkTest db7 "setupDeps2" ["C"] (SolverSuccess [("A", 2), ("C", 1)]) + , runTest $ mkTest db7 "setupDeps3" ["D"] (SolverSuccess [("A", 1), ("D", 1)]) + , runTest $ mkTest db7 "setupDeps4" ["E"] (SolverSuccess [("A", 1), ("A", 2), ("E", 1)]) + , runTest $ mkTest db7 "setupDeps5" ["F"] (SolverSuccess [("A", 1), ("A", 2), ("F", 1)]) + , runTest $ mkTest db8 "setupDeps6" ["C", "D"] (SolverSuccess [("A", 1), ("B", 1), ("B", 2), ("C", 1), ("D", 1)]) + , runTest $ mkTest db9 "setupDeps7" ["F", "G"] (SolverSuccess [("A", 1), ("B", 1), ("B",2 ), ("C", 1), ("D", 1), ("E", 1), ("E", 2), ("F", 1), ("G", 1)]) + , runTest $ mkTest db10 "setupDeps8" ["C"] (SolverSuccess [("C", 1)]) + , runTest $ indep $ mkTest dbSetupDeps "setupDeps9" ["A", "B"] (SolverSuccess [("A", 1), ("B", 1), ("C", 1), ("D", 1), ("D", 2)]) + ] + , testGroup "Base shim" [ + runTest $ mkTest db11 "baseShim1" ["A"] (SolverSuccess [("A", 1)]) + , runTest $ mkTest db12 "baseShim2" ["A"] (SolverSuccess [("A", 1)]) + , runTest $ mkTest db12 "baseShim3" ["B"] (SolverSuccess [("B", 1)]) + , runTest $ mkTest db12 "baseShim4" ["C"] (SolverSuccess [("A", 1), ("B", 1), ("C", 1)]) + , runTest $ mkTest db12 "baseShim5" ["D"] anySolverFailure + , runTest $ mkTest db12 "baseShim6" ["E"] (SolverSuccess [("E", 1), ("syb", 2)]) + ] + , testGroup "Cycles" [ + runTest $ mkTest db14 "simpleCycle1" ["A"] anySolverFailure + , runTest $ mkTest db14 "simpleCycle2" ["A", "B"] anySolverFailure + , runTest $ mkTest db14 "cycleWithFlagChoice1" ["C"] (SolverSuccess [("C", 1), ("E", 1)]) + , runTest $ mkTest db15 "cycleThroughSetupDep1" ["A"] anySolverFailure + , runTest $ mkTest db15 "cycleThroughSetupDep2" ["B"] anySolverFailure + , runTest $ mkTest db15 "cycleThroughSetupDep3" ["C"] (SolverSuccess [("C", 2), ("D", 1)]) + , runTest $ mkTest db15 "cycleThroughSetupDep4" ["D"] (SolverSuccess [("D", 1)]) + , runTest $ mkTest db15 "cycleThroughSetupDep5" ["E"] (SolverSuccess [("C", 2), ("D", 1), ("E", 1)]) + ] + , testGroup "Extensions" [ + runTest $ mkTestExts [EnableExtension CPP] dbExts1 "unsupported" ["A"] anySolverFailure + , runTest $ mkTestExts [EnableExtension CPP] dbExts1 "unsupportedIndirect" ["B"] anySolverFailure + , runTest $ mkTestExts [EnableExtension RankNTypes] dbExts1 "supported" ["A"] (SolverSuccess [("A",1)]) + , runTest $ mkTestExts (map EnableExtension [CPP,RankNTypes]) dbExts1 "supportedIndirect" ["C"] (SolverSuccess [("A",1),("B",1), ("C",1)]) + , runTest $ mkTestExts [EnableExtension CPP] dbExts1 "disabledExtension" ["D"] anySolverFailure + , runTest $ mkTestExts (map EnableExtension [CPP,RankNTypes]) dbExts1 "disabledExtension" ["D"] anySolverFailure + , runTest $ mkTestExts (UnknownExtension "custom" : map EnableExtension [CPP,RankNTypes]) dbExts1 "supportedUnknown" ["E"] (SolverSuccess [("A",1),("B",1),("C",1),("E",1)]) + ] + , testGroup "Languages" [ + runTest $ mkTestLangs [Haskell98] dbLangs1 "unsupported" ["A"] anySolverFailure + , runTest $ mkTestLangs [Haskell98,Haskell2010] dbLangs1 "supported" ["A"] (SolverSuccess [("A",1)]) + , runTest $ mkTestLangs [Haskell98] dbLangs1 "unsupportedIndirect" ["B"] anySolverFailure + , runTest $ mkTestLangs [Haskell98, Haskell2010, UnknownLanguage "Haskell3000"] dbLangs1 "supportedUnknown" ["C"] (SolverSuccess [("A",1),("B",1),("C",1)]) + ] + + , testGroup "Soft Constraints" [ + runTest $ soft [ ExPref "A" $ mkvrThis 1] $ mkTest db13 "selectPreferredVersionSimple" ["A"] (SolverSuccess [("A", 1)]) + , runTest $ soft [ ExPref "A" $ mkvrOrEarlier 2] $ mkTest db13 "selectPreferredVersionSimple2" ["A"] (SolverSuccess [("A", 2)]) + , runTest $ soft [ ExPref "A" $ mkvrOrEarlier 2 + , ExPref "A" $ mkvrOrEarlier 1] $ mkTest db13 "selectPreferredVersionMultiple" ["A"] (SolverSuccess [("A", 1)]) + , runTest $ soft [ ExPref "A" $ mkvrOrEarlier 1 + , ExPref "A" $ mkvrOrEarlier 2] $ mkTest db13 "selectPreferredVersionMultiple2" ["A"] (SolverSuccess [("A", 1)]) + , runTest $ soft [ ExPref "A" $ mkvrThis 1 + , ExPref "A" $ mkvrThis 2] $ mkTest db13 "selectPreferredVersionMultiple3" ["A"] (SolverSuccess [("A", 2)]) + , runTest $ soft [ ExPref "A" $ mkvrThis 1 + , ExPref "A" $ mkvrOrEarlier 2] $ mkTest db13 "selectPreferredVersionMultiple4" ["A"] (SolverSuccess [("A", 1)]) + ] + , testGroup "Buildable Field" [ + testBuildable "avoid building component with unknown dependency" (ExAny "unknown") + , testBuildable "avoid building component with unknown extension" (ExExt (UnknownExtension "unknown")) + , testBuildable "avoid building component with unknown language" (ExLang (UnknownLanguage "unknown")) + , runTest $ mkTest dbBuildable1 "choose flags that set buildable to false" ["pkg"] (SolverSuccess [("flag1-false", 1), ("flag2-true", 1), ("pkg", 1)]) + , runTest $ mkTest dbBuildable2 "choose version that sets buildable to false" ["A"] (SolverSuccess [("A", 1), ("B", 2)]) + ] + , testGroup "Pkg-config dependencies" [ + runTest $ mkTestPCDepends [] dbPC1 "noPkgs" ["A"] anySolverFailure + , runTest $ mkTestPCDepends [("pkgA", "0")] dbPC1 "tooOld" ["A"] anySolverFailure + , runTest $ mkTestPCDepends [("pkgA", "1.0.0"), ("pkgB", "1.0.0")] dbPC1 "pruneNotFound" ["C"] (SolverSuccess [("A", 1), ("B", 1), ("C", 1)]) + , runTest $ mkTestPCDepends [("pkgA", "1.0.0"), ("pkgB", "2.0.0")] dbPC1 "chooseNewest" ["C"] (SolverSuccess [("A", 1), ("B", 2), ("C", 1)]) + ] + , testGroup "Independent goals" [ + runTest $ indep $ mkTest db16 "indepGoals1" ["A", "B"] (SolverSuccess [("A", 1), ("B", 1), ("C", 1), ("D", 1), ("D", 2), ("E", 1)]) + , runTest $ indep $ mkTest db17 "indepGoals2" ["A", "B"] (SolverSuccess [("A", 1), ("B", 1), ("C", 1), ("D", 1)]) + , runTest $ indep $ mkTest db19 "indepGoals3" ["D", "E", "F"] anySolverFailure -- The target order is important. + , runTest $ indep $ mkTest db20 "indepGoals4" ["C", "A", "B"] (SolverSuccess [("A", 1), ("B", 1), ("C", 1), ("D", 1), ("D", 2)]) + , runTest $ indep $ mkTest db23 "indepGoals5" ["X", "Y"] (SolverSuccess [("A", 1), ("A", 2), ("B", 1), ("C", 1), ("C", 2), ("X", 1), ("Y", 1)]) + , runTest $ indep $ mkTest db24 "indepGoals6" ["X", "Y"] (SolverSuccess [("A", 1), ("A", 2), ("B", 1), ("B", 2), ("X", 1), ("Y", 1)]) + ] + ] + where + indep test = test { testIndepGoals = True } + soft prefs test = test { testSoftConstraints = prefs } + mkvrThis = V.thisVersion . makeV + mkvrOrEarlier = V.orEarlierVersion . makeV + makeV v = V.Version [v,0,0] [] + +{------------------------------------------------------------------------------- + Solver tests +-------------------------------------------------------------------------------} + +data SolverTest = SolverTest { + testLabel :: String + , testTargets :: [String] + , testResult :: SolverResult + , testIndepGoals :: Bool + , testSoftConstraints :: [ExPreference] + , testDb :: ExampleDb + , testSupportedExts :: Maybe [Extension] + , testSupportedLangs :: Maybe [Language] + , testPkgConfigDb :: PkgConfigDb + } + +-- | Result of a solver test. +data SolverResult = + SolverSuccess [(String, Int)] -- ^ succeeds with given plan + | SolverFailure (String -> Bool) -- ^ fails, and the error message satisfies the predicate + +-- | Can be used for test cases where we just want to verify that +-- they fail, but do not care about the error message. +anySolverFailure :: SolverResult +anySolverFailure = SolverFailure (const True) + +mkTest :: ExampleDb + -> String + -> [String] + -> SolverResult + -> SolverTest +mkTest = mkTestExtLangPC Nothing Nothing [] + +mkTestExts :: [Extension] + -> ExampleDb + -> String + -> [String] + -> SolverResult + -> SolverTest +mkTestExts exts = mkTestExtLangPC (Just exts) Nothing [] + +mkTestLangs :: [Language] + -> ExampleDb + -> String + -> [String] + -> SolverResult + -> SolverTest +mkTestLangs langs = mkTestExtLangPC Nothing (Just langs) [] + +mkTestPCDepends :: [(String, String)] + -> ExampleDb + -> String + -> [String] + -> SolverResult + -> SolverTest +mkTestPCDepends pkgConfigDb = mkTestExtLangPC Nothing Nothing pkgConfigDb + +mkTestExtLangPC :: Maybe [Extension] + -> Maybe [Language] + -> [(String, String)] + -> ExampleDb + -> String + -> [String] + -> SolverResult + -> SolverTest +mkTestExtLangPC exts langs pkgConfigDb db label targets result = SolverTest { + testLabel = label + , testTargets = targets + , testResult = result + , testIndepGoals = False + , testSoftConstraints = [] + , testDb = db + , testSupportedExts = exts + , testSupportedLangs = langs + , testPkgConfigDb = pkgConfigDbFromList pkgConfigDb + } + +runTest :: SolverTest -> TF.TestTree +runTest SolverTest{..} = askOption $ \(OptionShowSolverLog showSolverLog) -> + testCase testLabel $ do + let (_msgs, result) = exResolve testDb testSupportedExts testSupportedLangs + testPkgConfigDb testTargets testIndepGoals testSoftConstraints + when showSolverLog $ mapM_ putStrLn _msgs + case result of + Left err -> assertBool ("Unexpected error:\n" ++ err) (check testResult err) + Right plan -> assertEqual "" (toMaybe testResult) (Just (extractInstallPlan plan)) + where + toMaybe :: SolverResult -> Maybe ([(String, Int)]) + toMaybe (SolverSuccess plan) = Just plan + toMaybe (SolverFailure _ ) = Nothing + + check :: SolverResult -> (String -> Bool) + check (SolverFailure f) = f + check _ = const False + +{------------------------------------------------------------------------------- + Specific example database for the tests +-------------------------------------------------------------------------------} + +db1 :: ExampleDb +db1 = + let a = exInst "A" 1 "A-1" [] + in [ Left a + , Right $ exAv "B" 1 [ExAny "A"] + , Right $ exAv "B" 2 [ExAny "A"] + , Right $ exAv "C" 1 [ExFix "B" 1] + , Right $ exAv "D" 1 [ExFix "B" 2] + , Right $ exAv "E" 1 [ExAny "B"] + , Right $ exAv "F" 1 [ExFix "B" 1, ExAny "E"] + , Right $ exAv "G" 1 [ExFix "B" 2, ExAny "E"] + , Right $ exAv "Z" 1 [] + ] + +-- In this example, we _can_ install C and D as independent goals, but we have +-- to pick two diferent versions for B (arbitrarily) +db2 :: ExampleDb +db2 = [ + Right $ exAv "A" 1 [] + , Right $ exAv "A" 2 [] + , Right $ exAv "B" 1 [ExAny "A"] + , Right $ exAv "B" 2 [ExAny "A"] + , Right $ exAv "C" 1 [ExAny "B", ExFix "A" 1] + , Right $ exAv "D" 1 [ExAny "B", ExFix "A" 2] + ] + +db3 :: ExampleDb +db3 = [ + Right $ exAv "A" 1 [] + , Right $ exAv "A" 2 [] + , Right $ exAv "B" 1 [exFlag "flagB" [ExFix "A" 1] [ExFix "A" 2]] + , Right $ exAv "C" 1 [ExFix "A" 1, ExAny "B"] + , Right $ exAv "D" 1 [ExFix "A" 2, ExAny "B"] + ] + +-- | Like db3, but the flag picks a different package rather than a +-- different package version +-- +-- In db3 we cannot install C and D as independent goals because: +-- +-- * The multiple instance restriction says C and D _must_ share B +-- * Since C relies on A-1, C needs B to be compiled with flagB on +-- * Since D relies on A-2, D needs B to be compiled with flagB off +-- * Hence C and D have incompatible requirements on B's flags. +-- +-- However, _even_ if we don't check explicitly that we pick the same flag +-- assignment for 0.B and 1.B, we will still detect the problem because +-- 0.B depends on 0.A-1, 1.B depends on 1.A-2, hence we cannot link 0.A to +-- 1.A and therefore we cannot link 0.B to 1.B. +-- +-- In db4 the situation however is trickier. We again cannot install +-- packages C and D as independent goals because: +-- +-- * As above, the multiple instance restriction says that C and D _must_ share B +-- * Since C relies on Ax-2, it requires B to be compiled with flagB off +-- * Since D relies on Ay-2, it requires B to be compiled with flagB on +-- * Hence C and D have incompatible requirements on B's flags. +-- +-- But now this requirement is more indirect. If we only check dependencies +-- we don't see the problem: +-- +-- * We link 0.B to 1.B +-- * 0.B relies on Ay-1 +-- * 1.B relies on Ax-1 +-- +-- We will insist that 0.Ay will be linked to 1.Ay, and 0.Ax to 1.Ax, but since +-- we only ever assign to one of these, these constraints are never broken. +db4 :: ExampleDb +db4 = [ + Right $ exAv "Ax" 1 [] + , Right $ exAv "Ax" 2 [] + , Right $ exAv "Ay" 1 [] + , Right $ exAv "Ay" 2 [] + , Right $ exAv "B" 1 [exFlag "flagB" [ExFix "Ax" 1] [ExFix "Ay" 1]] + , Right $ exAv "C" 1 [ExFix "Ax" 2, ExAny "B"] + , Right $ exAv "D" 1 [ExFix "Ay" 2, ExAny "B"] + ] + +-- | Some tests involving testsuites +-- +-- Note that in this test framework test suites are always enabled; if you +-- want to test without test suites just set up a test database without +-- test suites. +-- +-- * C depends on A (through its test suite) +-- * D depends on B-2 (through its test suite), but B-2 is unavailable +-- * E depends on A-1 directly and on A through its test suite. We prefer +-- to use A-1 for the test suite in this case. +-- * F depends on A-1 directly and on A-2 through its test suite. In this +-- case we currently fail to install F, although strictly speaking +-- test suites should be considered independent goals. +-- * G is like E, but for version A-2. This means that if we cannot install +-- E and G together, unless we regard them as independent goals. +db5 :: ExampleDb +db5 = [ + Right $ exAv "A" 1 [] + , Right $ exAv "A" 2 [] + , Right $ exAv "B" 1 [] + , Right $ exAv "C" 1 [] `withTest` ExTest "testC" [ExAny "A"] + , Right $ exAv "D" 1 [] `withTest` ExTest "testD" [ExFix "B" 2] + , Right $ exAv "E" 1 [ExFix "A" 1] `withTest` ExTest "testE" [ExAny "A"] + , Right $ exAv "F" 1 [ExFix "A" 1] `withTest` ExTest "testF" [ExFix "A" 2] + , Right $ exAv "G" 1 [ExFix "A" 2] `withTest` ExTest "testG" [ExAny "A"] + ] + +-- Now the _dependencies_ have test suites +-- +-- * Installing C is a simple example. C wants version 1 of A, but depends on +-- B, and B's testsuite depends on an any version of A. In this case we prefer +-- to link (if we don't regard test suites as independent goals then of course +-- linking here doesn't even come into it). +-- * Installing [C, D] means that we prefer to link B -- depending on how we +-- set things up, this means that we should also link their test suites. +db6 :: ExampleDb +db6 = [ + Right $ exAv "A" 1 [] + , Right $ exAv "A" 2 [] + , Right $ exAv "B" 1 [] `withTest` ExTest "testA" [ExAny "A"] + , Right $ exAv "C" 1 [ExFix "A" 1, ExAny "B"] + , Right $ exAv "D" 1 [ExAny "B"] + ] + +-- Packages with setup dependencies +-- +-- Install.. +-- * B: Simple example, just make sure setup deps are taken into account at all +-- * C: Both the package and the setup script depend on any version of A. +-- In this case we prefer to link +-- * D: Variation on C.1 where the package requires a specific (not latest) +-- version but the setup dependency is not fixed. Again, we prefer to +-- link (picking the older version) +-- * E: Variation on C.2 with the setup dependency the more inflexible. +-- Currently, in this case we do not see the opportunity to link because +-- we consider setup dependencies after normal dependencies; we will +-- pick A.2 for E, then realize we cannot link E.setup.A to A.2, and pick +-- A.1 instead. This isn't so easy to fix (if we want to fix it at all); +-- in particular, considering setup dependencies _before_ other deps is +-- not an improvement, because in general we would prefer to link setup +-- setups to package deps, rather than the other way around. (For example, +-- if we change this ordering then the test for D would start to install +-- two versions of A). +-- * F: The package and the setup script depend on different versions of A. +-- This will only work if setup dependencies are considered independent. +db7 :: ExampleDb +db7 = [ + Right $ exAv "A" 1 [] + , Right $ exAv "A" 2 [] + , Right $ exAv "B" 1 [] `withSetupDeps` [ExAny "A"] + , Right $ exAv "C" 1 [ExAny "A" ] `withSetupDeps` [ExAny "A" ] + , Right $ exAv "D" 1 [ExFix "A" 1] `withSetupDeps` [ExAny "A" ] + , Right $ exAv "E" 1 [ExAny "A" ] `withSetupDeps` [ExFix "A" 1] + , Right $ exAv "F" 1 [ExFix "A" 2] `withSetupDeps` [ExFix "A" 1] + ] + +-- If we install C and D together (not as independent goals), we need to build +-- both B.1 and B.2, both of which depend on A. +db8 :: ExampleDb +db8 = [ + Right $ exAv "A" 1 [] + , Right $ exAv "B" 1 [ExAny "A"] + , Right $ exAv "B" 2 [ExAny "A"] + , Right $ exAv "C" 1 [] `withSetupDeps` [ExFix "B" 1] + , Right $ exAv "D" 1 [] `withSetupDeps` [ExFix "B" 2] + ] + +-- Extended version of `db8` so that we have nested setup dependencies +db9 :: ExampleDb +db9 = db8 ++ [ + Right $ exAv "E" 1 [ExAny "C"] + , Right $ exAv "E" 2 [ExAny "D"] + , Right $ exAv "F" 1 [] `withSetupDeps` [ExFix "E" 1] + , Right $ exAv "G" 1 [] `withSetupDeps` [ExFix "E" 2] + ] + +-- Multiple already-installed packages with inter-dependencies, and one package +-- (C) that depends on package A-1 for its setup script and package A-2 as a +-- library dependency. +db10 :: ExampleDb +db10 = + let rts = exInst "rts" 1 "rts-inst" [] + ghc_prim = exInst "ghc-prim" 1 "ghc-prim-inst" [rts] + base = exInst "base" 1 "base-inst" [rts, ghc_prim] + a1 = exInst "A" 1 "A1-inst" [base] + a2 = exInst "A" 2 "A2-inst" [base] + in [ + Left rts + , Left ghc_prim + , Left base + , Left a1 + , Left a2 + , Right $ exAv "C" 1 [ExFix "A" 2] `withSetupDeps` [ExFix "A" 1] + ] + +-- | This database tests that a package's setup dependencies are correctly +-- linked when the package is linked. See pull request #3268. +-- +-- When A and B are installed as independent goals, their dependencies on C must +-- be linked, due to the single instance restriction. Since C depends on D, 0.D +-- and 1.D must be linked. C also has a setup dependency on D, so 0.C-setup.D +-- and 1.C-setup.D must be linked. However, D's two link groups must remain +-- independent. The solver should be able to choose D-1 for C's library and D-2 +-- for C's setup script. +dbSetupDeps :: ExampleDb +dbSetupDeps = [ + Right $ exAv "A" 1 [ExAny "C"] + , Right $ exAv "B" 1 [ExAny "C"] + , Right $ exAv "C" 1 [ExFix "D" 1] `withSetupDeps` [ExFix "D" 2] + , Right $ exAv "D" 1 [] + , Right $ exAv "D" 2 [] + ] + +-- | Tests for dealing with base shims +db11 :: ExampleDb +db11 = + let base3 = exInst "base" 3 "base-3-inst" [base4] + base4 = exInst "base" 4 "base-4-inst" [] + in [ + Left base3 + , Left base4 + , Right $ exAv "A" 1 [ExFix "base" 3] + ] + +-- | Slightly more realistic version of db11 where base-3 depends on syb +-- This means that if a package depends on base-3 and on syb, then they MUST +-- share the version of syb +-- +-- * Package A relies on base-3 (which relies on base-4) +-- * Package B relies on base-4 +-- * Package C relies on both A and B +-- * Package D relies on base-3 and on syb-2, which is not possible because +-- base-3 has a dependency on syb-1 (non-inheritance of the Base qualifier) +-- * Package E relies on base-4 and on syb-2, which is fine. +db12 :: ExampleDb +db12 = + let base3 = exInst "base" 3 "base-3-inst" [base4, syb1] + base4 = exInst "base" 4 "base-4-inst" [] + syb1 = exInst "syb" 1 "syb-1-inst" [base4] + in [ + Left base3 + , Left base4 + , Left syb1 + , Right $ exAv "syb" 2 [ExFix "base" 4] + , Right $ exAv "A" 1 [ExFix "base" 3, ExAny "syb"] + , Right $ exAv "B" 1 [ExFix "base" 4, ExAny "syb"] + , Right $ exAv "C" 1 [ExAny "A", ExAny "B"] + , Right $ exAv "D" 1 [ExFix "base" 3, ExFix "syb" 2] + , Right $ exAv "E" 1 [ExFix "base" 4, ExFix "syb" 2] + ] + +db13 :: ExampleDb +db13 = [ + Right $ exAv "A" 1 [] + , Right $ exAv "A" 2 [] + , Right $ exAv "A" 3 [] + ] + +-- | Database with some cycles +-- +-- * Simplest non-trivial cycle: A -> B and B -> A +-- * There is a cycle C -> D -> C, but it can be broken by picking the +-- right flag assignment. +db14 :: ExampleDb +db14 = [ + Right $ exAv "A" 1 [ExAny "B"] + , Right $ exAv "B" 1 [ExAny "A"] + , Right $ exAv "C" 1 [exFlag "flagC" [ExAny "D"] [ExAny "E"]] + , Right $ exAv "D" 1 [ExAny "C"] + , Right $ exAv "E" 1 [] + ] + +-- | Cycles through setup dependencies +-- +-- The first cycle is unsolvable: package A has a setup dependency on B, +-- B has a regular dependency on A, and we only have a single version available +-- for both. +-- +-- The second cycle can be broken by picking different versions: package C-2.0 +-- has a setup dependency on D, and D has a regular dependency on C-*. However, +-- version C-1.0 is already available (perhaps it didn't have this setup dep). +-- Thus, we should be able to break this cycle even if we are installing package +-- E, which explictly depends on C-2.0. +db15 :: ExampleDb +db15 = [ + -- First example (real cycle, no solution) + Right $ exAv "A" 1 [] `withSetupDeps` [ExAny "B"] + , Right $ exAv "B" 1 [ExAny "A"] + -- Second example (cycle can be broken by picking versions carefully) + , Left $ exInst "C" 1 "C-1-inst" [] + , Right $ exAv "C" 2 [] `withSetupDeps` [ExAny "D"] + , Right $ exAv "D" 1 [ExAny "C" ] + , Right $ exAv "E" 1 [ExFix "C" 2] + ] + +-- | Check that the solver can backtrack after encountering the SIR (issue #2843) +-- +-- When A and B are installed as independent goals, the single instance +-- restriction prevents B from depending on C. This database tests that the +-- solver can backtrack after encountering the single instance restriction and +-- choose the only valid flag assignment (-flagA +flagB): +-- +-- > flagA flagB B depends on +-- > On _ C-* +-- > Off On E-* <-- only valid flag assignment +-- > Off Off D-2.0, C-* +-- +-- Since A depends on C-* and D-1.0, and C-1.0 depends on any version of D, +-- we must build C-1.0 against D-1.0. Since B depends on D-2.0, we cannot have +-- C in the transitive closure of B's dependencies, because that would mean we +-- would need two instances of C: one built against D-1.0 and one built against +-- D-2.0. +db16 :: ExampleDb +db16 = [ + Right $ exAv "A" 1 [ExAny "C", ExFix "D" 1] + , Right $ exAv "B" 1 [ ExFix "D" 2 + , exFlag "flagA" + [ExAny "C"] + [exFlag "flagB" + [ExAny "E"] + [ExAny "C"]]] + , Right $ exAv "C" 1 [ExAny "D"] + , Right $ exAv "D" 1 [] + , Right $ exAv "D" 2 [] + , Right $ exAv "E" 1 [] + ] + +-- | This database checks that when the solver discovers a constraint on a +-- package's version after choosing to link that package, it can backtrack to +-- try alternative versions for the linked-to package. See pull request #3327. +-- +-- When A and B are installed as independent goals, their dependencies on C +-- must be linked. Since C depends on D, A and B's dependencies on D must also +-- be linked. This test relies on the fact that the solver chooses D-2 for both +-- 0.D and 1.D before it encounters the test suites' constraints. The solver +-- must backtrack to try D-1 for both 0.D and 1.D. +db17 :: ExampleDb +db17 = [ + Right $ exAv "A" 1 [ExAny "C"] `withTest` ExTest "test" [ExFix "D" 1] + , Right $ exAv "B" 1 [ExAny "C"] `withTest` ExTest "test" [ExFix "D" 1] + , Right $ exAv "C" 1 [ExAny "D"] + , Right $ exAv "D" 1 [] + , Right $ exAv "D" 2 [] + ] + +-- | Issue #2834 +-- When both A and B are installed as independent goals, their dependencies on +-- C must be linked. The only combination of C's flags that is consistent with +-- A and B's dependencies on D is -flagA +flagB. This database tests that the +-- solver can backtrack to find the right combination of flags (requiring F, but +-- not E or G) and apply it to both 0.C and 1.C. +-- +-- > flagA flagB C depends on +-- > On _ D-1, E-* +-- > Off On F-* <-- Only valid choice +-- > Off Off D-2, G-* +-- +-- The single instance restriction means we cannot have one instance of C +-- built against D-1 and one instance built against D-2; since A depends on +-- D-1, and B depends on C-2, it is therefore important that C cannot depend +-- on any version of D. +db18 :: ExampleDb +db18 = [ + Right $ exAv "A" 1 [ExAny "C", ExFix "D" 1] + , Right $ exAv "B" 1 [ExAny "C", ExFix "D" 2] + , Right $ exAv "C" 1 [exFlag "flagA" + [ExFix "D" 1, ExAny "E"] + [exFlag "flagB" + [ExAny "F"] + [ExFix "D" 2, ExAny "G"]]] + , Right $ exAv "D" 1 [] + , Right $ exAv "D" 2 [] + , Right $ exAv "E" 1 [] + , Right $ exAv "F" 1 [] + , Right $ exAv "G" 1 [] + ] + +-- | Tricky test case with independent goals (issue #2842) +-- +-- Suppose we are installing D, E, and F as independent goals: +-- +-- * D depends on A-* and C-1, requiring A-1 to be built against C-1 +-- * E depends on B-* and C-2, requiring B-1 to be built against C-2 +-- * F depends on A-* and B-*; this means we need A-1 and B-1 both to be built +-- against the same version of C, violating the single instance restriction. +-- +-- We can visualize this DB as: +-- +-- > C-1 C-2 +-- > /|\ /|\ +-- > / | \ / | \ +-- > / | X | \ +-- > | | / \ | | +-- > | |/ \| | +-- > | + + | +-- > | | | | +-- > | A B | +-- > \ |\ /| / +-- > \ | \ / | / +-- > \| V |/ +-- > D F E +db19 :: ExampleDb +db19 = [ + Right $ exAv "A" 1 [ExAny "C"] + , Right $ exAv "B" 1 [ExAny "C"] + , Right $ exAv "C" 1 [] + , Right $ exAv "C" 2 [] + , Right $ exAv "D" 1 [ExAny "A", ExFix "C" 1] + , Right $ exAv "E" 1 [ExAny "B", ExFix "C" 2] + , Right $ exAv "F" 1 [ExAny "A", ExAny "B"] + ] + +-- | This database tests that the solver correctly backjumps when dependencies +-- of linked packages are not linked. It is an example where the conflict set +-- from enforcing the single instance restriction is not sufficient. See pull +-- request #3327. +-- +-- When C, A, and B are installed as independent goals, the solver first +-- chooses 0.C-1 and 0.D-2. When choosing dependencies for A and B, it links +-- 1.D and 2.D to 0.D. Finally, the solver discovers the test's constraint on +-- D. It must backjump to try 1.D-1 and then link 2.D to 1.D. +db20 :: ExampleDb +db20 = [ + Right $ exAv "A" 1 [ExAny "B"] + , Right $ exAv "B" 1 [ExAny "D"] `withTest` ExTest "test" [ExFix "D" 1] + , Right $ exAv "C" 1 [ExFix "D" 2] + , Right $ exAv "D" 1 [] + , Right $ exAv "D" 2 [] + ] + +-- | Test the trace messages that we get when a package refers to an unknown pkg +-- +-- TODO: Currently we don't actually test the trace messages, and this particular +-- test still suceeds. The trace can only be verified by hand. +db21 :: ExampleDb +db21 = [ + Right $ exAv "A" 1 [ExAny "B"] + , Right $ exAv "A" 2 [ExAny "C"] -- A-2.0 will be tried first, but C unknown + , Right $ exAv "B" 1 [] + ] + +-- | A variant of 'db21', which actually fails. +db22 :: ExampleDb +db22 = [ + Right $ exAv "A" 1 [ExAny "B"] + , Right $ exAv "A" 2 [ExAny "C"] + ] + +-- | Database for (unsuccessfully) trying to expose a bug in the handling +-- of implied linking constraints. The question is whether an implied linking +-- constraint should only have the introducing package in its conflict set, +-- or also its link target. +-- +-- It turns out that as long as the Single Instance Restriction is in place, +-- it does not matter, because there will aways be an option that is failing +-- due to the SIR, which contains the link target in its conflict set. +-- +-- Even if the SIR is not in place, if there is a solution, one will always +-- be found, because without the SIR, linking is always optional, but never +-- necessary. +-- +db23 :: ExampleDb +db23 = [ + Right $ exAv "X" 1 [ExFix "C" 2, ExAny "A"] + , Right $ exAv "Y" 1 [ExFix "C" 1, ExFix "A" 2] + , Right $ exAv "A" 1 [] + , Right $ exAv "A" 2 [ExAny "B"] + , Right $ exAv "B" 1 [ExAny "C"] + , Right $ exAv "C" 1 [] + , Right $ exAv "C" 2 [] + ] + +-- | A simplified version of 'db23'. +db24 :: ExampleDb +db24 = [ + Right $ exAv "X" 1 [ExFix "B" 2, ExAny "A"] + , Right $ exAv "Y" 1 [ExFix "B" 1, ExFix "A" 2] + , Right $ exAv "A" 1 [] + , Right $ exAv "A" 2 [ExAny "B"] + , Right $ exAv "B" 1 [] + , Right $ exAv "B" 2 [] + ] + +dbExts1 :: ExampleDb +dbExts1 = [ + Right $ exAv "A" 1 [ExExt (EnableExtension RankNTypes)] + , Right $ exAv "B" 1 [ExExt (EnableExtension CPP), ExAny "A"] + , Right $ exAv "C" 1 [ExAny "B"] + , Right $ exAv "D" 1 [ExExt (DisableExtension CPP), ExAny "B"] + , Right $ exAv "E" 1 [ExExt (UnknownExtension "custom"), ExAny "C"] + ] + +dbLangs1 :: ExampleDb +dbLangs1 = [ + Right $ exAv "A" 1 [ExLang Haskell2010] + , Right $ exAv "B" 1 [ExLang Haskell98, ExAny "A"] + , Right $ exAv "C" 1 [ExLang (UnknownLanguage "Haskell3000"), ExAny "B"] + ] + +-- | cabal must set enable-lib to false in order to avoid the unavailable +-- dependency. Flags are true by default. The flag choice causes "pkg" to +-- depend on "false-dep". +testBuildable :: String -> ExampleDependency -> TestTree +testBuildable testName unavailableDep = + runTest $ mkTestExtLangPC (Just []) (Just []) [] db testName ["pkg"] expected + where + expected = SolverSuccess [("false-dep", 1), ("pkg", 1)] + db = [ + Right $ exAv "pkg" 1 + [ unavailableDep + , ExFlag "enable-lib" (Buildable []) NotBuildable ] + `withTest` + ExTest "test" [exFlag "enable-lib" + [ExAny "true-dep"] + [ExAny "false-dep"]] + , Right $ exAv "true-dep" 1 [] + , Right $ exAv "false-dep" 1 [] + ] + +-- | cabal must choose -flag1 +flag2 for "pkg", which requires packages +-- "flag1-false" and "flag2-true". +dbBuildable1 :: ExampleDb +dbBuildable1 = [ + Right $ exAv "pkg" 1 + [ ExAny "unknown" + , ExFlag "flag1" (Buildable []) NotBuildable + , ExFlag "flag2" (Buildable []) NotBuildable] + `withTests` + [ ExTest "optional-test" + [ ExAny "unknown" + , ExFlag "flag1" + (Buildable []) + (Buildable [ExFlag "flag2" NotBuildable (Buildable [])])] + , ExTest "test" [ exFlag "flag1" [ExAny "flag1-true"] [ExAny "flag1-false"] + , exFlag "flag2" [ExAny "flag2-true"] [ExAny "flag2-false"]] + ] + , Right $ exAv "flag1-true" 1 [] + , Right $ exAv "flag1-false" 1 [] + , Right $ exAv "flag2-true" 1 [] + , Right $ exAv "flag2-false" 1 [] + ] + +-- | Package databases for testing @pkg-config@ dependencies. +dbPC1 :: ExampleDb +dbPC1 = [ + Right $ exAv "A" 1 [ExPkg ("pkgA", 1)] + , Right $ exAv "B" 1 [ExPkg ("pkgB", 1), ExAny "A"] + , Right $ exAv "B" 2 [ExPkg ("pkgB", 2), ExAny "A"] + , Right $ exAv "C" 1 [ExAny "B"] + ] + +-- | cabal must pick B-2 to avoid the unknown dependency. +dbBuildable2 :: ExampleDb +dbBuildable2 = [ + Right $ exAv "A" 1 [ExAny "B"] + , Right $ exAv "B" 1 [ExAny "unknown"] + , Right $ exAv "B" 2 + [ ExAny "unknown" + , ExFlag "disable-lib" NotBuildable (Buildable []) + ] + , Right $ exAv "B" 3 [ExAny "unknown"] + ] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/FileMonitor.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/FileMonitor.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/FileMonitor.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/FileMonitor.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,769 @@ +module UnitTests.Distribution.Client.FileMonitor (tests) where + +import Control.Monad +import Control.Exception +import Control.Concurrent (threadDelay) +import qualified Data.Set as Set +import System.FilePath +import qualified System.Directory as IO +import Prelude hiding (writeFile) +import qualified Prelude as IO (writeFile) + +import Distribution.Text (simpleParse) +import Distribution.Compat.Binary +import Distribution.Simple.Utils (withTempDirectory) +import Distribution.Verbosity (silent) + +import Distribution.Client.FileMonitor +import Distribution.Client.Compat.Time + +import Test.Tasty +import Test.Tasty.HUnit + + +tests :: Int -> [TestTree] +tests mtimeChange = + [ testCase "sanity check mtimes" $ testFileMTimeSanity mtimeChange + , testCase "no monitor cache" testNoMonitorCache + , testCase "corrupt monitor cache" testCorruptMonitorCache + , testCase "empty monitor" testEmptyMonitor + , testCase "missing file" testMissingFile + , testCase "change file" $ testChangedFile mtimeChange + , testCase "file mtime vs content" $ testChangedFileMtimeVsContent mtimeChange + , testCase "update during action" $ testUpdateDuringAction mtimeChange + , testCase "remove file" testRemoveFile + , testCase "non-existent file" testNonExistentFile + , testCase "changed file type" $ testChangedFileType mtimeChange + + , testGroup "glob matches" + [ testCase "no change" testGlobNoChange + , testCase "add match" $ testGlobAddMatch mtimeChange + , testCase "remove match" $ testGlobRemoveMatch mtimeChange + , testCase "change match" $ testGlobChangeMatch mtimeChange + + , testCase "add match subdir" $ testGlobAddMatchSubdir mtimeChange + , testCase "remove match subdir" $ testGlobRemoveMatchSubdir mtimeChange + , testCase "change match subdir" $ testGlobChangeMatchSubdir mtimeChange + + , testCase "match toplevel dir" $ testGlobMatchTopDir mtimeChange + , testCase "add non-match" $ testGlobAddNonMatch mtimeChange + , testCase "remove non-match" $ testGlobRemoveNonMatch mtimeChange + + , testCase "add non-match" $ testGlobAddNonMatchSubdir mtimeChange + , testCase "remove non-match" $ testGlobRemoveNonMatchSubdir mtimeChange + + , testCase "invariant sorted 1" $ testInvariantMonitorStateGlobFiles + mtimeChange + , testCase "invariant sorted 2" $ testInvariantMonitorStateGlobDirs + mtimeChange + + , testCase "match dirs" $ testGlobMatchDir mtimeChange + , testCase "match dirs only" $ testGlobMatchDirOnly mtimeChange + , testCase "change file type" $ testGlobChangeFileType mtimeChange + , testCase "absolute paths" $ testGlobAbsolutePath mtimeChange + ] + + , testCase "value unchanged" testValueUnchanged + , testCase "value changed" testValueChanged + , testCase "value & file changed" $ testValueAndFileChanged mtimeChange + , testCase "value updated" testValueUpdated + ] + +-- we rely on file mtimes having a reasonable resolution +testFileMTimeSanity :: Int -> Assertion +testFileMTimeSanity mtimeChange = + withTempDirectory silent "." "file-status-" $ \dir -> do + replicateM_ 10 $ do + IO.writeFile (dir "a") "content" + t1 <- getModTime (dir "a") + threadDelay mtimeChange + IO.writeFile (dir "a") "content" + t2 <- getModTime (dir "a") + assertBool "expected different file mtimes" (t2 > t1) + +-- first run, where we don't even call updateMonitor +testNoMonitorCache :: Assertion +testNoMonitorCache = + withFileMonitor $ \root monitor -> do + reason <- expectMonitorChanged root (monitor :: FileMonitor () ()) () + reason @?= MonitorFirstRun + +-- write garbage into the binary cache file +testCorruptMonitorCache :: Assertion +testCorruptMonitorCache = + withFileMonitor $ \root monitor -> do + IO.writeFile (fileMonitorCacheFile monitor) "broken" + reason <- expectMonitorChanged root monitor () + reason @?= MonitorCorruptCache + + updateMonitor root monitor [] () () + (res, files) <- expectMonitorUnchanged root monitor () + res @?= () + files @?= [] + + IO.writeFile (fileMonitorCacheFile monitor) "broken" + reason2 <- expectMonitorChanged root monitor () + reason2 @?= MonitorCorruptCache + +-- no files to monitor +testEmptyMonitor :: Assertion +testEmptyMonitor = + withFileMonitor $ \root monitor -> do + touchFile root "a" + updateMonitor root monitor [] () () + touchFile root "b" + (res, files) <- expectMonitorUnchanged root monitor () + res @?= () + files @?= [] + +-- monitor a file that is expected to exist +testMissingFile :: Assertion +testMissingFile = do + test monitorFile touchFile "a" + test monitorFileHashed touchFile "a" + test monitorFile touchFile ("dir" "a") + test monitorFileHashed touchFile ("dir" "a") + test monitorDirectory touchDir "a" + test monitorDirectory touchDir ("dir" "a") + where + test :: (FilePath -> MonitorFilePath) + -> (RootPath -> FilePath -> IO ()) + -> FilePath + -> IO () + test monitorKind touch file = + withFileMonitor $ \root monitor -> do + -- a file that doesn't exist at snapshot time is considered to have + -- changed + updateMonitor root monitor [monitorKind file] () () + reason <- expectMonitorChanged root monitor () + reason @?= MonitoredFileChanged file + + -- a file doesn't exist at snapshot time, but gets added afterwards is + -- also considered to have changed + updateMonitor root monitor [monitorKind file] () () + touch root file + reason2 <- expectMonitorChanged root monitor () + reason2 @?= MonitoredFileChanged file + + +testChangedFile :: Int -> Assertion +testChangedFile mtimeChange = do + test monitorFile touchFile touchFile "a" + test monitorFileHashed touchFile touchFileContent "a" + test monitorFile touchFile touchFile ("dir" "a") + test monitorFileHashed touchFile touchFileContent ("dir" "a") + test monitorDirectory touchDir touchDir "a" + test monitorDirectory touchDir touchDir ("dir" "a") + where + test :: (FilePath -> MonitorFilePath) + -> (RootPath -> FilePath -> IO ()) + -> (RootPath -> FilePath -> IO ()) + -> FilePath + -> IO () + test monitorKind touch touch' file = + withFileMonitor $ \root monitor -> do + touch root file + updateMonitor root monitor [monitorKind file] () () + threadDelay mtimeChange + touch' root file + reason <- expectMonitorChanged root monitor () + reason @?= MonitoredFileChanged file + + +testChangedFileMtimeVsContent :: Int -> Assertion +testChangedFileMtimeVsContent mtimeChange = + withFileMonitor $ \root monitor -> do + -- if we don't touch the file, it's unchanged + touchFile root "a" + updateMonitor root monitor [monitorFile "a"] () () + (res, files) <- expectMonitorUnchanged root monitor () + res @?= () + files @?= [monitorFile "a"] + + -- if we do touch the file, it's changed if we only consider mtime + updateMonitor root monitor [monitorFile "a"] () () + threadDelay mtimeChange + touchFile root "a" + reason <- expectMonitorChanged root monitor () + reason @?= MonitoredFileChanged "a" + + -- but if we touch the file, it's unchanged if we consider content hash + updateMonitor root monitor [monitorFileHashed "a"] () () + threadDelay mtimeChange + touchFile root "a" + (res2, files2) <- expectMonitorUnchanged root monitor () + res2 @?= () + files2 @?= [monitorFileHashed "a"] + + -- finally if we change the content it's changed + updateMonitor root monitor [monitorFileHashed "a"] () () + threadDelay mtimeChange + touchFileContent root "a" + reason2 <- expectMonitorChanged root monitor () + reason2 @?= MonitoredFileChanged "a" + + +testUpdateDuringAction :: Int -> Assertion +testUpdateDuringAction mtimeChange = do + test (monitorFile "a") touchFile "a" + test (monitorFileHashed "a") touchFile "a" + test (monitorDirectory "a") touchDir "a" + test (monitorFileGlobStr "*") touchFile "a" + test (monitorFileGlobStr "*") { monitorKindDir = DirModTime } + touchDir "a" + where + test :: MonitorFilePath + -> (RootPath -> FilePath -> IO ()) + -> FilePath + -> IO () + test monitorSpec touch file = + withFileMonitor $ \root monitor -> do + touch root file + updateMonitor root monitor [monitorSpec] () () + + -- start doing an update action... + threadDelay mtimeChange -- some time passes + touch root file -- a file gets updates during the action + threadDelay mtimeChange -- some time passes then we finish + updateMonitor root monitor [monitorSpec] () () + -- we don't notice this change since we took the timestamp after the + -- action finished + (res, files) <- expectMonitorUnchanged root monitor () + res @?= () + files @?= [monitorSpec] + + -- Let's try again, this time taking the timestamp before the action + timestamp' <- beginUpdateFileMonitor + threadDelay mtimeChange -- some time passes + touch root file -- a file gets updates during the action + threadDelay mtimeChange -- some time passes then we finish + updateMonitorWithTimestamp root monitor timestamp' [monitorSpec] () () + -- now we do notice the change since we took the snapshot before the + -- action finished + reason <- expectMonitorChanged root monitor () + reason @?= MonitoredFileChanged file + + +testRemoveFile :: Assertion +testRemoveFile = do + test monitorFile touchFile removeFile "a" + test monitorFileHashed touchFile removeFile "a" + test monitorFile touchFile removeFile ("dir" "a") + test monitorFileHashed touchFile removeFile ("dir" "a") + test monitorDirectory touchDir removeDir "a" + test monitorDirectory touchDir removeDir ("dir" "a") + where + test :: (FilePath -> MonitorFilePath) + -> (RootPath -> FilePath -> IO ()) + -> (RootPath -> FilePath -> IO ()) + -> FilePath + -> IO () + test monitorKind touch remove file = + withFileMonitor $ \root monitor -> do + touch root file + updateMonitor root monitor [monitorKind file] () () + remove root file + reason <- expectMonitorChanged root monitor () + reason @?= MonitoredFileChanged file + + +-- monitor a file that we expect not to exist +testNonExistentFile :: Assertion +testNonExistentFile = + withFileMonitor $ \root monitor -> do + -- a file that doesn't exist at snapshot time or check time is unchanged + updateMonitor root monitor [monitorNonExistentFile "a"] () () + (res, files) <- expectMonitorUnchanged root monitor () + res @?= () + files @?= [monitorNonExistentFile "a"] + + -- if the file then exists it has changed + touchFile root "a" + reason <- expectMonitorChanged root monitor () + reason @?= MonitoredFileChanged "a" + + -- if the file then exists at snapshot and check time it has changed + updateMonitor root monitor [monitorNonExistentFile "a"] () () + reason2 <- expectMonitorChanged root monitor () + reason2 @?= MonitoredFileChanged "a" + + -- but if the file existed at snapshot time and doesn't exist at check time + -- it is consider unchanged. This is unlike files we expect to exist, but + -- that's because files that exist can have different content and actions + -- can depend on that content, whereas if the action expected a file not to + -- exist and it now does not, it'll give the same result, irrespective of + -- the fact that the file might have existed in the meantime. + updateMonitor root monitor [monitorNonExistentFile "a"] () () + removeFile root "a" + (res2, files2) <- expectMonitorUnchanged root monitor () + res2 @?= () + files2 @?= [monitorNonExistentFile "a"] + + +testChangedFileType :: Int-> Assertion +testChangedFileType mtimeChange = do + test (monitorFile "a") touchFile removeFile createDir + test (monitorFileHashed "a") touchFile removeFile createDir + + test (monitorDirectory "a") createDir removeDir touchFile + test (monitorFileOrDirectory "a") createDir removeDir touchFile + + test (monitorFileGlobStr "*") { monitorKindDir = DirModTime } + touchFile removeFile createDir + test (monitorFileGlobStr "*") { monitorKindDir = DirModTime } + createDir removeDir touchFile + where + test :: MonitorFilePath + -> (RootPath -> String -> IO ()) + -> (RootPath -> String -> IO ()) + -> (RootPath -> String -> IO ()) + -> IO () + test monitorKind touch remove touch' = + withFileMonitor $ \root monitor -> do + touch root "a" + updateMonitor root monitor [monitorKind] () () + threadDelay mtimeChange + remove root "a" + touch' root "a" + reason <- expectMonitorChanged root monitor () + reason @?= MonitoredFileChanged "a" + + +------------------ +-- globs +-- + +testGlobNoChange :: Assertion +testGlobNoChange = + withFileMonitor $ \root monitor -> do + touchFile root ("dir" "good-a") + touchFile root ("dir" "good-b") + updateMonitor root monitor [monitorFileGlobStr "dir/good-*"] () () + (res, files) <- expectMonitorUnchanged root monitor () + res @?= () + files @?= [monitorFileGlobStr "dir/good-*"] + +testGlobAddMatch :: Int -> Assertion +testGlobAddMatch mtimeChange = + withFileMonitor $ \root monitor -> do + touchFile root ("dir" "good-a") + updateMonitor root monitor [monitorFileGlobStr "dir/good-*"] () () + (res, files) <- expectMonitorUnchanged root monitor () + res @?= () + files @?= [monitorFileGlobStr "dir/good-*"] + threadDelay mtimeChange + touchFile root ("dir" "good-b") + reason <- expectMonitorChanged root monitor () + reason @?= MonitoredFileChanged ("dir" "good-b") + +testGlobRemoveMatch :: Int -> Assertion +testGlobRemoveMatch mtimeChange = + withFileMonitor $ \root monitor -> do + touchFile root ("dir" "good-a") + touchFile root ("dir" "good-b") + updateMonitor root monitor [monitorFileGlobStr "dir/good-*"] () () + threadDelay mtimeChange + removeFile root "dir/good-a" + reason <- expectMonitorChanged root monitor () + reason @?= MonitoredFileChanged ("dir" "good-a") + +testGlobChangeMatch :: Int -> Assertion +testGlobChangeMatch mtimeChange = + withFileMonitor $ \root monitor -> do + touchFile root ("dir" "good-a") + touchFile root ("dir" "good-b") + updateMonitor root monitor [monitorFileGlobStr "dir/good-*"] () () + threadDelay mtimeChange + touchFile root ("dir" "good-b") + (res, files) <- expectMonitorUnchanged root monitor () + res @?= () + files @?= [monitorFileGlobStr "dir/good-*"] + + touchFileContent root ("dir" "good-b") + reason <- expectMonitorChanged root monitor () + reason @?= MonitoredFileChanged ("dir" "good-b") + +testGlobAddMatchSubdir :: Int -> Assertion +testGlobAddMatchSubdir mtimeChange = + withFileMonitor $ \root monitor -> do + touchFile root ("dir" "a" "good-a") + updateMonitor root monitor [monitorFileGlobStr "dir/*/good-*"] () () + threadDelay mtimeChange + touchFile root ("dir" "b" "good-b") + reason <- expectMonitorChanged root monitor () + reason @?= MonitoredFileChanged ("dir" "b" "good-b") + +testGlobRemoveMatchSubdir :: Int -> Assertion +testGlobRemoveMatchSubdir mtimeChange = + withFileMonitor $ \root monitor -> do + touchFile root ("dir" "a" "good-a") + touchFile root ("dir" "b" "good-b") + updateMonitor root monitor [monitorFileGlobStr "dir/*/good-*"] () () + threadDelay mtimeChange + removeDir root ("dir" "a") + reason <- expectMonitorChanged root monitor () + reason @?= MonitoredFileChanged ("dir" "a" "good-a") + +testGlobChangeMatchSubdir :: Int -> Assertion +testGlobChangeMatchSubdir mtimeChange = + withFileMonitor $ \root monitor -> do + touchFile root ("dir" "a" "good-a") + touchFile root ("dir" "b" "good-b") + updateMonitor root monitor [monitorFileGlobStr "dir/*/good-*"] () () + threadDelay mtimeChange + touchFile root ("dir" "b" "good-b") + (res, files) <- expectMonitorUnchanged root monitor () + res @?= () + files @?= [monitorFileGlobStr "dir/*/good-*"] + + touchFileContent root "dir/b/good-b" + reason <- expectMonitorChanged root monitor () + reason @?= MonitoredFileChanged ("dir" "b" "good-b") + +-- check nothing goes squiffy with matching in the top dir +testGlobMatchTopDir :: Int -> Assertion +testGlobMatchTopDir mtimeChange = + withFileMonitor $ \root monitor -> do + updateMonitor root monitor [monitorFileGlobStr "*"] () () + threadDelay mtimeChange + touchFile root "a" + reason <- expectMonitorChanged root monitor () + reason @?= MonitoredFileChanged "a" + +testGlobAddNonMatch :: Int -> Assertion +testGlobAddNonMatch mtimeChange = + withFileMonitor $ \root monitor -> do + touchFile root ("dir" "good-a") + updateMonitor root monitor [monitorFileGlobStr "dir/good-*"] () () + threadDelay mtimeChange + touchFile root ("dir" "bad") + (res, files) <- expectMonitorUnchanged root monitor () + res @?= () + files @?= [monitorFileGlobStr "dir/good-*"] + +testGlobRemoveNonMatch :: Int -> Assertion +testGlobRemoveNonMatch mtimeChange = + withFileMonitor $ \root monitor -> do + touchFile root ("dir" "good-a") + touchFile root ("dir" "bad") + updateMonitor root monitor [monitorFileGlobStr "dir/good-*"] () () + threadDelay mtimeChange + removeFile root "dir/bad" + (res, files) <- expectMonitorUnchanged root monitor () + res @?= () + files @?= [monitorFileGlobStr "dir/good-*"] + +testGlobAddNonMatchSubdir :: Int -> Assertion +testGlobAddNonMatchSubdir mtimeChange = + withFileMonitor $ \root monitor -> do + touchFile root ("dir" "a" "good-a") + updateMonitor root monitor [monitorFileGlobStr "dir/*/good-*"] () () + threadDelay mtimeChange + touchFile root ("dir" "b" "bad") + (res, files) <- expectMonitorUnchanged root monitor () + res @?= () + files @?= [monitorFileGlobStr "dir/*/good-*"] + +testGlobRemoveNonMatchSubdir :: Int -> Assertion +testGlobRemoveNonMatchSubdir mtimeChange = + withFileMonitor $ \root monitor -> do + touchFile root ("dir" "a" "good-a") + touchFile root ("dir" "b" "bad") + updateMonitor root monitor [monitorFileGlobStr "dir/*/good-*"] () () + threadDelay mtimeChange + removeDir root ("dir" "b") + (res, files) <- expectMonitorUnchanged root monitor () + res @?= () + files @?= [monitorFileGlobStr "dir/*/good-*"] + + +-- try and tickle a bug that happens if we don't maintain the invariant that +-- MonitorStateGlobFiles entries are sorted +testInvariantMonitorStateGlobFiles :: Int -> Assertion +testInvariantMonitorStateGlobFiles mtimeChange = + withFileMonitor $ \root monitor -> do + touchFile root ("dir" "a") + touchFile root ("dir" "b") + touchFile root ("dir" "c") + touchFile root ("dir" "d") + updateMonitor root monitor [monitorFileGlobStr "dir/*"] () () + threadDelay mtimeChange + -- so there should be no change (since we're doing content checks) + -- but if we can get the dir entries to appear in the wrong order + -- then if the sorted invariant is not maintained then we can fool + -- the 'probeGlobStatus' into thinking there's changes + removeFile root ("dir" "a") + removeFile root ("dir" "b") + removeFile root ("dir" "c") + removeFile root ("dir" "d") + touchFile root ("dir" "d") + touchFile root ("dir" "c") + touchFile root ("dir" "b") + touchFile root ("dir" "a") + (res, files) <- expectMonitorUnchanged root monitor () + res @?= () + files @?= [monitorFileGlobStr "dir/*"] + +-- same thing for the subdirs case +testInvariantMonitorStateGlobDirs :: Int -> Assertion +testInvariantMonitorStateGlobDirs mtimeChange = + withFileMonitor $ \root monitor -> do + touchFile root ("dir" "a" "file") + touchFile root ("dir" "b" "file") + touchFile root ("dir" "c" "file") + touchFile root ("dir" "d" "file") + updateMonitor root monitor [monitorFileGlobStr "dir/*/file"] () () + threadDelay mtimeChange + removeDir root ("dir" "a") + removeDir root ("dir" "b") + removeDir root ("dir" "c") + removeDir root ("dir" "d") + touchFile root ("dir" "d" "file") + touchFile root ("dir" "c" "file") + touchFile root ("dir" "b" "file") + touchFile root ("dir" "a" "file") + (res, files) <- expectMonitorUnchanged root monitor () + res @?= () + files @?= [monitorFileGlobStr "dir/*/file"] + +-- ensure that a glob can match a directory as well as a file +testGlobMatchDir :: Int -> Assertion +testGlobMatchDir mtimeChange = + withFileMonitor $ \root monitor -> do + createDir root ("dir" "a") + updateMonitor root monitor [monitorFileGlobStr "dir/*"] () () + threadDelay mtimeChange + -- nothing changed yet + (res, files) <- expectMonitorUnchanged root monitor () + res @?= () + files @?= [monitorFileGlobStr "dir/*"] + -- expect dir/b to match and be detected as changed + createDir root ("dir" "b") + reason <- expectMonitorChanged root monitor () + reason @?= MonitoredFileChanged ("dir" "b") + -- now remove dir/a and expect it to be detected as changed + updateMonitor root monitor [monitorFileGlobStr "dir/*"] () () + threadDelay mtimeChange + removeDir root ("dir" "a") + reason2 <- expectMonitorChanged root monitor () + reason2 @?= MonitoredFileChanged ("dir" "a") + +testGlobMatchDirOnly :: Int -> Assertion +testGlobMatchDirOnly mtimeChange = + withFileMonitor $ \root monitor -> do + updateMonitor root monitor [monitorFileGlobStr "dir/*/"] () () + threadDelay mtimeChange + -- expect file dir/a to not match, so not detected as changed + touchFile root ("dir" "a") + (res, files) <- expectMonitorUnchanged root monitor () + res @?= () + files @?= [monitorFileGlobStr "dir/*/"] + -- note that checking the file monitor for changes can updates the + -- cached dir mtimes (when it has to record that there's new matches) + -- so we need an extra mtime delay + threadDelay mtimeChange + -- but expect dir/b to match + createDir root ("dir" "b") + reason <- expectMonitorChanged root monitor () + reason @?= MonitoredFileChanged ("dir" "b") + +testGlobChangeFileType :: Int -> Assertion +testGlobChangeFileType mtimeChange = + withFileMonitor $ \root monitor -> do + -- change file to dir + touchFile root ("dir" "a") + updateMonitor root monitor [monitorFileGlobStr "dir/*"] () () + threadDelay mtimeChange + removeFile root ("dir" "a") + createDir root ("dir" "a") + reason <- expectMonitorChanged root monitor () + reason @?= MonitoredFileChanged ("dir" "a") + -- change dir to file + updateMonitor root monitor [monitorFileGlobStr "dir/*"] () () + threadDelay mtimeChange + removeDir root ("dir" "a") + touchFile root ("dir" "a") + reason2 <- expectMonitorChanged root monitor () + reason2 @?= MonitoredFileChanged ("dir" "a") + +testGlobAbsolutePath :: Int -> Assertion +testGlobAbsolutePath mtimeChange = + withFileMonitor $ \root monitor -> do + root' <- absoluteRoot root + -- absolute glob, removing a file + touchFile root ("dir/good-a") + touchFile root ("dir/good-b") + updateMonitor root monitor [monitorFileGlobStr (root' "dir/good-*")] () () + threadDelay mtimeChange + removeFile root "dir/good-a" + reason <- expectMonitorChanged root monitor () + reason @?= MonitoredFileChanged (root' "dir/good-a") + -- absolute glob, adding a file + updateMonitor root monitor [monitorFileGlobStr (root' "dir/good-*")] () () + threadDelay mtimeChange + touchFile root ("dir/good-a") + reason2 <- expectMonitorChanged root monitor () + reason2 @?= MonitoredFileChanged (root' "dir/good-a") + -- absolute glob, changing a file + updateMonitor root monitor [monitorFileGlobStr (root' "dir/good-*")] () () + threadDelay mtimeChange + touchFileContent root "dir/good-b" + reason3 <- expectMonitorChanged root monitor () + reason3 @?= MonitoredFileChanged (root' "dir/good-b") + + +------------------ +-- value changes +-- + +testValueUnchanged :: Assertion +testValueUnchanged = + withFileMonitor $ \root monitor -> do + touchFile root "a" + updateMonitor root monitor [monitorFile "a"] (42 :: Int) "ok" + (res, files) <- expectMonitorUnchanged root monitor 42 + res @?= "ok" + files @?= [monitorFile "a"] + +testValueChanged :: Assertion +testValueChanged = + withFileMonitor $ \root monitor -> do + touchFile root "a" + updateMonitor root monitor [monitorFile "a"] (42 :: Int) "ok" + reason <- expectMonitorChanged root monitor 43 + reason @?= MonitoredValueChanged 42 + +testValueAndFileChanged :: Int -> Assertion +testValueAndFileChanged mtimeChange = + withFileMonitor $ \root monitor -> do + touchFile root "a" + + -- we change the value and the file, and the value change is reported + updateMonitor root monitor [monitorFile "a"] (42 :: Int) "ok" + threadDelay mtimeChange + touchFile root "a" + reason <- expectMonitorChanged root monitor 43 + reason @?= MonitoredValueChanged 42 + + -- if fileMonitorCheckIfOnlyValueChanged then if only the value changed + -- then it's reported as MonitoredValueChanged + let monitor' :: FileMonitor Int String + monitor' = monitor { fileMonitorCheckIfOnlyValueChanged = True } + updateMonitor root monitor' [monitorFile "a"] 42 "ok" + reason2 <- expectMonitorChanged root monitor' 43 + reason2 @?= MonitoredValueChanged 42 + + -- but if a file changed too then we don't report MonitoredValueChanged + updateMonitor root monitor' [monitorFile "a"] 42 "ok" + threadDelay mtimeChange + touchFile root "a" + reason3 <- expectMonitorChanged root monitor' 43 + reason3 @?= MonitoredFileChanged "a" + +testValueUpdated :: Assertion +testValueUpdated = + withFileMonitor $ \root monitor -> do + touchFile root "a" + + let monitor' :: FileMonitor (Set.Set Int) String + monitor' = (monitor :: FileMonitor (Set.Set Int) String) { + fileMonitorCheckIfOnlyValueChanged = True, + fileMonitorKeyValid = Set.isSubsetOf + } + + updateMonitor root monitor' [monitorFile "a"] (Set.fromList [42,43]) "ok" + (res,_files) <- expectMonitorUnchanged root monitor' (Set.fromList [42]) + res @?= "ok" + + reason <- expectMonitorChanged root monitor' (Set.fromList [42,44]) + reason @?= MonitoredValueChanged (Set.fromList [42,43]) + + +------------- +-- Utils + +newtype RootPath = RootPath FilePath + +touchFile :: RootPath -> FilePath -> IO () +touchFile (RootPath root) fname = do + let path = root fname + IO.createDirectoryIfMissing True (takeDirectory path) + IO.writeFile path "touched" + +touchFileContent :: RootPath -> FilePath -> IO () +touchFileContent (RootPath root) fname = do + let path = root fname + IO.createDirectoryIfMissing True (takeDirectory path) + IO.writeFile path "different" + +removeFile :: RootPath -> FilePath -> IO () +removeFile (RootPath root) fname = IO.removeFile (root fname) + +touchDir :: RootPath -> FilePath -> IO () +touchDir root@(RootPath rootdir) dname = do + IO.createDirectoryIfMissing True (rootdir dname) + touchFile root (dname "touch") + removeFile root (dname "touch") + +createDir :: RootPath -> FilePath -> IO () +createDir (RootPath root) dname = do + let path = root dname + IO.createDirectoryIfMissing True (takeDirectory path) + IO.createDirectory path + +removeDir :: RootPath -> FilePath -> IO () +removeDir (RootPath root) dname = IO.removeDirectoryRecursive (root dname) + +absoluteRoot :: RootPath -> IO FilePath +absoluteRoot (RootPath root) = IO.canonicalizePath root + +monitorFileGlobStr :: String -> MonitorFilePath +monitorFileGlobStr globstr + | Just glob <- simpleParse globstr = monitorFileGlob glob + | otherwise = error $ "Failed to parse " ++ globstr + + +expectMonitorChanged :: (Binary a, Binary b) + => RootPath -> FileMonitor a b -> a + -> IO (MonitorChangedReason a) +expectMonitorChanged root monitor key = do + res <- checkChanged root monitor key + case res of + MonitorChanged reason -> return reason + MonitorUnchanged _ _ -> throwIO $ HUnitFailure "expected change" + +expectMonitorUnchanged :: (Binary a, Binary b) + => RootPath -> FileMonitor a b -> a + -> IO (b, [MonitorFilePath]) +expectMonitorUnchanged root monitor key = do + res <- checkChanged root monitor key + case res of + MonitorChanged _reason -> throwIO $ HUnitFailure "expected no change" + MonitorUnchanged b files -> return (b, files) + +checkChanged :: (Binary a, Binary b) + => RootPath -> FileMonitor a b + -> a -> IO (MonitorChanged a b) +checkChanged (RootPath root) monitor key = + checkFileMonitorChanged monitor root key + +updateMonitor :: (Binary a, Binary b) + => RootPath -> FileMonitor a b + -> [MonitorFilePath] -> a -> b -> IO () +updateMonitor (RootPath root) monitor files key result = + updateFileMonitor monitor root Nothing files key result + +updateMonitorWithTimestamp :: (Binary a, Binary b) + => RootPath -> FileMonitor a b -> MonitorTimestamp + -> [MonitorFilePath] -> a -> b -> IO () +updateMonitorWithTimestamp (RootPath root) monitor timestamp files key result = + updateFileMonitor monitor root (Just timestamp) files key result + +withFileMonitor :: Eq a => (RootPath -> FileMonitor a b -> IO c) -> IO c +withFileMonitor action = do + withTempDirectory silent "." "file-status-" $ \root -> do + let file = root <.> "monitor" + monitor = newFileMonitor file + finally (action (RootPath root) monitor) $ do + exists <- IO.doesFileExist file + when exists $ IO.removeFile file diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Glob.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Glob.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Glob.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Glob.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,203 @@ +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} + +module UnitTests.Distribution.Client.Glob (tests) where + +#if !MIN_VERSION_base(4,8,0) +import Control.Applicative +#endif +import Data.Char +import Data.List +import Distribution.Text (display, parse, simpleParse) +import Distribution.Compat.ReadP + +import Distribution.Client.Glob +import UnitTests.Distribution.Client.ArbitraryInstances + +import Test.Tasty +import Test.Tasty.QuickCheck +import Test.Tasty.HUnit +import Control.Exception + + +tests :: [TestTree] +tests = + [ testProperty "print/parse roundtrip" prop_roundtrip_printparse + , testCase "parse examples" testParseCases + ] + +--TODO: [nice to have] tests for trivial globs, tests for matching, +-- tests for windows style file paths + +prop_roundtrip_printparse :: FilePathGlob -> Bool +prop_roundtrip_printparse pathglob = + -- can't use simpleParse because it mis-handles trailing spaces + case [ x | (x, []) <- readP_to_S parse (display pathglob) ] of + xs@(_:_) -> last xs == pathglob + _ -> False + +-- first run, where we don't even call updateMonitor +testParseCases :: Assertion +testParseCases = do + + FilePathGlob (FilePathRoot "/") GlobDirTrailing <- testparse "/" + FilePathGlob FilePathHomeDir GlobDirTrailing <- testparse "~/" + + FilePathGlob (FilePathRoot "A:\\") GlobDirTrailing <- testparse "A:/" + FilePathGlob (FilePathRoot "Z:\\") GlobDirTrailing <- testparse "z:/" + FilePathGlob (FilePathRoot "C:\\") GlobDirTrailing <- testparse "C:\\" + FilePathGlob FilePathRelative (GlobFile [Literal "_:"]) <- testparse "_:" + + FilePathGlob FilePathRelative + (GlobFile [Literal "."]) <- testparse "." + + FilePathGlob FilePathRelative + (GlobFile [Literal "~"]) <- testparse "~" + + FilePathGlob FilePathRelative + (GlobDir [Literal "."] GlobDirTrailing) <- testparse "./" + + FilePathGlob FilePathRelative + (GlobFile [Literal "foo"]) <- testparse "foo" + + FilePathGlob FilePathRelative + (GlobDir [Literal "foo"] + (GlobFile [Literal "bar"])) <- testparse "foo/bar" + + FilePathGlob FilePathRelative + (GlobDir [Literal "foo"] + (GlobDir [Literal "bar"] GlobDirTrailing)) <- testparse "foo/bar/" + + FilePathGlob (FilePathRoot "/") + (GlobDir [Literal "foo"] + (GlobDir [Literal "bar"] GlobDirTrailing)) <- testparse "/foo/bar/" + + FilePathGlob FilePathRelative + (GlobFile [WildCard]) <- testparse "*" + + FilePathGlob FilePathRelative + (GlobFile [WildCard,WildCard]) <- testparse "**" -- not helpful but valid + + FilePathGlob FilePathRelative + (GlobFile [WildCard, Literal "foo", WildCard]) <- testparse "*foo*" + + FilePathGlob FilePathRelative + (GlobFile [Literal "foo", WildCard, Literal "bar"]) <- testparse "foo*bar" + + FilePathGlob FilePathRelative + (GlobFile [Union [[WildCard], [Literal "foo"]]]) <- testparse "{*,foo}" + + parseFail "{" + parseFail "}" + parseFail "," + parseFail "{" + parseFail "{{}" + parseFail "{}" + parseFail "{,}" + parseFail "{foo,}" + parseFail "{,foo}" + + return () + +testparse :: String -> IO FilePathGlob +testparse s = + case simpleParse s of + Just p -> return p + Nothing -> throwIO $ HUnitFailure ("expected parse of: " ++ s) + +parseFail :: String -> Assertion +parseFail s = + case simpleParse s :: Maybe FilePathGlob of + Just _ -> throwIO $ HUnitFailure ("expected no parse of: " ++ s) + Nothing -> return () + +instance Arbitrary FilePathGlob where + arbitrary = (FilePathGlob <$> arbitrary <*> arbitrary) + `suchThat` validFilePathGlob + + shrink (FilePathGlob root pathglob) = + [ FilePathGlob root' pathglob' + | (root', pathglob') <- shrink (root, pathglob) + , validFilePathGlob (FilePathGlob root' pathglob') ] + +validFilePathGlob :: FilePathGlob -> Bool +validFilePathGlob (FilePathGlob FilePathRelative pathglob) = + case pathglob of + GlobDirTrailing -> False + GlobDir [Literal "~"] _ -> False + GlobDir [Literal (d:":")] _ + | isLetter d -> False + _ -> True +validFilePathGlob _ = True + +instance Arbitrary FilePathRoot where + arbitrary = + frequency + [ (3, pure FilePathRelative) + , (1, pure (FilePathRoot unixroot)) + , (1, FilePathRoot <$> windrive) + , (1, pure FilePathHomeDir) + ] + where + unixroot = "/" + windrive = do d <- choose ('A', 'Z'); return (d : ":\\") + + shrink FilePathRelative = [] + shrink (FilePathRoot _) = [FilePathRelative] + shrink FilePathHomeDir = [FilePathRelative] + + +instance Arbitrary FilePathGlobRel where + arbitrary = sized $ \sz -> + oneof $ take (max 1 sz) + [ pure GlobDirTrailing + , GlobFile <$> (getGlobPieces <$> arbitrary) + , GlobDir <$> (getGlobPieces <$> arbitrary) + <*> resize (sz `div` 2) arbitrary + ] + + shrink GlobDirTrailing = [] + shrink (GlobFile glob) = + GlobDirTrailing + : [ GlobFile (getGlobPieces glob') | glob' <- shrink (GlobPieces glob) ] + shrink (GlobDir glob pathglob) = + pathglob + : GlobFile glob + : [ GlobDir (getGlobPieces glob') pathglob' + | (glob', pathglob') <- shrink (GlobPieces glob, pathglob) ] + +newtype GlobPieces = GlobPieces { getGlobPieces :: [GlobPiece] } + deriving Eq + +instance Arbitrary GlobPieces where + arbitrary = GlobPieces . mergeLiterals <$> shortListOf1 5 arbitrary + + shrink (GlobPieces glob) = + [ GlobPieces (mergeLiterals (getNonEmpty glob')) + | glob' <- shrink (NonEmpty glob) ] + +mergeLiterals :: [GlobPiece] -> [GlobPiece] +mergeLiterals (Literal a : Literal b : ps) = mergeLiterals (Literal (a++b) : ps) +mergeLiterals (Union as : ps) = Union (map mergeLiterals as) : mergeLiterals ps +mergeLiterals (p:ps) = p : mergeLiterals ps +mergeLiterals [] = [] + +instance Arbitrary GlobPiece where + arbitrary = sized $ \sz -> + frequency + [ (3, Literal <$> shortListOf1 10 (elements globLiteralChars)) + , (1, pure WildCard) + , (1, Union <$> resize (sz `div` 2) (shortListOf1 5 (shortListOf1 5 arbitrary))) + ] + + shrink (Literal str) = [ Literal str' + | str' <- shrink str + , not (null str') + , all (`elem` globLiteralChars) str' ] + shrink WildCard = [] + shrink (Union as) = [ Union (map getGlobPieces (getNonEmpty as')) + | as' <- shrink (NonEmpty (map GlobPieces as)) ] + +globLiteralChars :: [Char] +globLiteralChars = ['\0'..'\128'] \\ "*{},/\\" + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/GZipUtils.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/GZipUtils.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/GZipUtils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/GZipUtils.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,60 @@ +module UnitTests.Distribution.Client.GZipUtils ( + tests + ) where + +import Codec.Compression.GZip as GZip +import Codec.Compression.Zlib as Zlib +import Control.Exception.Base (evaluate) +import Control.Exception (try, SomeException) +import Control.Monad (void) +import Data.ByteString as BS (null) +import Data.ByteString.Lazy as BSL (pack, toChunks) +import Data.ByteString.Lazy.Char8 as BSLL (pack, init, length) +import Data.Monoid ((<>)) +import Distribution.Client.GZipUtils (maybeDecompress) +import Data.Word (Word8) + +import Test.Tasty +import Test.Tasty.HUnit +import Test.Tasty.QuickCheck + +tests :: [TestTree] +tests = [ testCase "maybeDecompress" maybeDecompressUnitTest + -- "decompress plain" property is non-trivial to state, + -- maybeDecompress returns input bytestring only if error occurs right at the beginning of the decompression process + -- generating such input would essentially duplicate maybeDecompress implementation + , testProperty "decompress zlib" prop_maybeDecompress_zlib + , testProperty "decompress gzip" prop_maybeDecompress_gzip + ] + +maybeDecompressUnitTest :: Assertion +maybeDecompressUnitTest = + assertBool "decompress plain" (maybeDecompress original == original) + >> assertBool "decompress zlib (with show)" (show (maybeDecompress compressedZlib) == show original) + >> assertBool "decompress gzip (with show)" (show (maybeDecompress compressedGZip) == show original) + >> assertBool "decompress zlib" (maybeDecompress compressedZlib == original) + >> assertBool "decompress gzip" (maybeDecompress compressedGZip == original) + >> assertBool "have no empty chunks" (Prelude.all (not . BS.null) . BSL.toChunks . maybeDecompress $ compressedZlib) + >> (runBrokenStream >>= assertBool "decompress broken stream" . isLeft) + where + original = BSLL.pack "original uncompressed input" + compressedZlib = Zlib.compress original + compressedGZip = GZip.compress original + + runBrokenStream :: IO (Either SomeException ()) + runBrokenStream = try . void . evaluate . BSLL.length $ maybeDecompress (BSLL.init compressedZlib <> BSLL.pack "*") + +prop_maybeDecompress_zlib :: [Word8] -> Property +prop_maybeDecompress_zlib ws = property $ maybeDecompress compressedZlib === original + where original = BSL.pack ws + compressedZlib = Zlib.compress original + +prop_maybeDecompress_gzip :: [Word8] -> Property +prop_maybeDecompress_gzip ws = property $ maybeDecompress compressedGZip === original + where original = BSL.pack ws + compressedGZip = GZip.compress original + +-- (Only available from "Data.Either" since 7.8.) +isLeft :: Either a b -> Bool +isLeft (Right _) = False +isLeft (Left _) = True diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/ProjectConfig.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/ProjectConfig.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/ProjectConfig.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/ProjectConfig.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,616 @@ +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} + +module UnitTests.Distribution.Client.ProjectConfig (tests) where + +#if !MIN_VERSION_base(4,8,0) +import Data.Monoid +import Control.Applicative +#endif +import Data.Map (Map) +import qualified Data.Map as Map +import Data.List + +import Distribution.Package +import Distribution.PackageDescription hiding (Flag) +import Distribution.Compiler +import Distribution.Version +import Distribution.ParseUtils +import Distribution.Simple.Compiler +import Distribution.Simple.Setup +import Distribution.Simple.InstallDirs +import qualified Distribution.Compat.ReadP as Parse +import Distribution.Simple.Utils +import Distribution.Simple.Program.Types +import Distribution.Simple.Program.Db + +import Distribution.Client.Types +import Distribution.Client.Dependency.Types +import Distribution.Client.BuildReports.Types +import Distribution.Client.Targets +import Distribution.Utils.NubList +import Network.URI + +import Distribution.Client.ProjectConfig +import Distribution.Client.ProjectConfig.Legacy + +import UnitTests.Distribution.Client.ArbitraryInstances + +import Test.Tasty +import Test.Tasty.QuickCheck + +tests :: [TestTree] +tests = + [ testGroup "ProjectConfig <-> LegacyProjectConfig round trip" $ + [ testProperty "packages" prop_roundtrip_legacytypes_packages + , testProperty "buildonly" prop_roundtrip_legacytypes_buildonly + , testProperty "specific" prop_roundtrip_legacytypes_specific + ] ++ + -- a couple tests seem to trigger a RTS fault in ghc-7.6 and older + -- unclear why as of yet + concat + [ [ testProperty "shared" prop_roundtrip_legacytypes_shared + , testProperty "local" prop_roundtrip_legacytypes_local + , testProperty "all" prop_roundtrip_legacytypes_all + ] + | not usingGhc76orOlder + ] + + , testGroup "individual parser tests" + [ testProperty "package location" prop_parsePackageLocationTokenQ + ] + + , testGroup "ProjectConfig printing/parsing round trip" + [ testProperty "packages" prop_roundtrip_printparse_packages + , testProperty "buildonly" prop_roundtrip_printparse_buildonly + , testProperty "shared" prop_roundtrip_printparse_shared + , testProperty "local" prop_roundtrip_printparse_local + , testProperty "specific" prop_roundtrip_printparse_specific + , testProperty "all" prop_roundtrip_printparse_all + ] + ] + where + usingGhc76orOlder = + case buildCompilerId of + CompilerId GHC v -> v < Version [7,7] [] + _ -> False + + +------------------------------------------------ +-- Round trip: conversion to/from legacy types +-- + +roundtrip :: Eq a => (a -> b) -> (b -> a) -> a -> Bool +roundtrip f f_inv x = + (f_inv . f) x == x + +roundtrip_legacytypes :: ProjectConfig -> Bool +roundtrip_legacytypes = + roundtrip convertToLegacyProjectConfig + convertLegacyProjectConfig + + +prop_roundtrip_legacytypes_all :: ProjectConfig -> Bool +prop_roundtrip_legacytypes_all = + roundtrip_legacytypes + +prop_roundtrip_legacytypes_packages :: ProjectConfig -> Bool +prop_roundtrip_legacytypes_packages config = + roundtrip_legacytypes + config { + projectConfigBuildOnly = mempty, + projectConfigShared = mempty, + projectConfigLocalPackages = mempty, + projectConfigSpecificPackage = mempty + } + +prop_roundtrip_legacytypes_buildonly :: ProjectConfigBuildOnly -> Bool +prop_roundtrip_legacytypes_buildonly config = + roundtrip_legacytypes + mempty { projectConfigBuildOnly = config } + +prop_roundtrip_legacytypes_shared :: ProjectConfigShared -> Bool +prop_roundtrip_legacytypes_shared config = + roundtrip_legacytypes + mempty { projectConfigShared = config } + +prop_roundtrip_legacytypes_local :: PackageConfig -> Bool +prop_roundtrip_legacytypes_local config = + roundtrip_legacytypes + mempty { projectConfigLocalPackages = config } + +prop_roundtrip_legacytypes_specific :: Map PackageName PackageConfig -> Bool +prop_roundtrip_legacytypes_specific config = + roundtrip_legacytypes + mempty { projectConfigSpecificPackage = MapMappend config } + + +-------------------------------------------- +-- Round trip: printing and parsing config +-- + +roundtrip_printparse :: ProjectConfig -> Bool +roundtrip_printparse config = + case (fmap convertLegacyProjectConfig + . parseLegacyProjectConfig + . showLegacyProjectConfig + . convertToLegacyProjectConfig) + config of + ParseOk _ x -> x == config + _ -> False + + +prop_roundtrip_printparse_all :: ProjectConfig -> Bool +prop_roundtrip_printparse_all config = + roundtrip_printparse config { + projectConfigBuildOnly = + hackProjectConfigBuildOnly (projectConfigBuildOnly config), + + projectConfigShared = + hackProjectConfigShared (projectConfigShared config) + } + +prop_roundtrip_printparse_packages :: [PackageLocationString] + -> [PackageLocationString] + -> [SourceRepo] + -> [Dependency] + -> Bool +prop_roundtrip_printparse_packages pkglocstrs1 pkglocstrs2 repos named = + roundtrip_printparse + mempty { + projectPackages = map getPackageLocationString pkglocstrs1, + projectPackagesOptional = map getPackageLocationString pkglocstrs2, + projectPackagesRepo = repos, + projectPackagesNamed = named + } + +prop_roundtrip_printparse_buildonly :: ProjectConfigBuildOnly -> Bool +prop_roundtrip_printparse_buildonly config = + roundtrip_printparse + mempty { + projectConfigBuildOnly = hackProjectConfigBuildOnly config + } + +hackProjectConfigBuildOnly :: ProjectConfigBuildOnly -> ProjectConfigBuildOnly +hackProjectConfigBuildOnly config = + config { + -- These two fields are only command line transitory things, not + -- something to be recorded persistently in a config file + projectConfigOnlyDeps = mempty, + projectConfigDryRun = mempty + } + +prop_roundtrip_printparse_shared :: ProjectConfigShared -> Bool +prop_roundtrip_printparse_shared config = + roundtrip_printparse + mempty { + projectConfigShared = hackProjectConfigShared config + } + +hackProjectConfigShared :: ProjectConfigShared -> ProjectConfigShared +hackProjectConfigShared config = + config { + projectConfigConstraints = + --TODO: [required eventually] parse ambiguity in constraint + -- "pkgname -any" as either any version or disabled flag "any". + let ambiguous ((UserConstraintFlags _pkg flags), _) = + (not . null) [ () | (FlagName name, False) <- flags + , "any" `isPrefixOf` name ] + ambiguous _ = False + in filter (not . ambiguous) (projectConfigConstraints config) + } + + +prop_roundtrip_printparse_local :: PackageConfig -> Bool +prop_roundtrip_printparse_local config = + roundtrip_printparse + mempty { + projectConfigLocalPackages = config + } + +prop_roundtrip_printparse_specific :: Map PackageName (NonMEmpty PackageConfig) + -> Bool +prop_roundtrip_printparse_specific config = + roundtrip_printparse + mempty { + projectConfigSpecificPackage = MapMappend (fmap getNonMEmpty config) + } + + +---------------------------- +-- Individual Parser tests +-- + +prop_parsePackageLocationTokenQ :: PackageLocationString -> Bool +prop_parsePackageLocationTokenQ (PackageLocationString str) = + case [ x | (x,"") <- Parse.readP_to_S parsePackageLocationTokenQ + (renderPackageLocationToken str) ] of + [str'] -> str' == str + _ -> False + + +------------------------ +-- Arbitrary instances +-- + +instance Arbitrary ProjectConfig where + arbitrary = + ProjectConfig + <$> (map getPackageLocationString <$> arbitrary) + <*> (map getPackageLocationString <$> arbitrary) + <*> shortListOf 3 arbitrary + <*> arbitrary + <*> arbitrary <*> arbitrary + <*> arbitrary + <*> (MapMappend . fmap getNonMEmpty . Map.fromList + <$> shortListOf 3 arbitrary) + -- package entries with no content are equivalent to + -- the entry not existing at all, so exclude empty + + shrink (ProjectConfig x0 x1 x2 x3 x4 x5 x6 x7) = + [ ProjectConfig x0' x1' x2' x3' + x4' x5' x6' (MapMappend (fmap getNonMEmpty x7')) + | ((x0', x1', x2', x3'), (x4', x5', x6', x7')) + <- shrink ((x0, x1, x2, x3), + (x4, x5, x6, fmap NonMEmpty (getMapMappend x7))) + ] + +newtype PackageLocationString + = PackageLocationString { getPackageLocationString :: String } + deriving Show + +instance Arbitrary PackageLocationString where + arbitrary = + PackageLocationString <$> + oneof + [ show . getNonEmpty <$> (arbitrary :: Gen (NonEmptyList String)) + , arbitraryGlobLikeStr + , show <$> (arbitrary :: Gen URI) + ] + +arbitraryGlobLikeStr :: Gen String +arbitraryGlobLikeStr = outerTerm + where + outerTerm = concat <$> shortListOf1 4 + (frequency [ (2, token) + , (1, braces <$> innerTerm) ]) + innerTerm = intercalate "," <$> shortListOf1 3 + (frequency [ (3, token) + , (1, braces <$> innerTerm) ]) + token = shortListOf1 4 (elements (['#'..'~'] \\ "{,}")) + braces s = "{" ++ s ++ "}" + + +instance Arbitrary ProjectConfigBuildOnly where + arbitrary = + ProjectConfigBuildOnly + <$> arbitrary + <*> arbitrary + <*> arbitrary + <*> (toNubList <$> shortListOf 2 arbitrary) -- 4 + <*> arbitrary + <*> arbitrary + <*> arbitrary + <*> (fmap getShortToken <$> arbitrary) -- 8 + <*> arbitrary + <*> arbitraryNumJobs + <*> arbitrary + <*> arbitrary -- 12 + <*> (fmap getShortToken <$> arbitrary) + <*> arbitrary + <*> (fmap getShortToken <$> arbitrary) + <*> (fmap getShortToken <$> arbitrary) -- 16 + <*> (fmap getShortToken <$> arbitrary) + <*> (fmap getShortToken <$> arbitrary) + where + arbitraryNumJobs = fmap (fmap getPositive) <$> arbitrary + + shrink (ProjectConfigBuildOnly + x00 x01 x02 x03 x04 x05 x06 x07 + x08 x09 x10 x11 x12 x13 x14 x15 + x16 x17) = + [ ProjectConfigBuildOnly + x00' x01' x02' x03' x04' + x05' x06' x07' x08' (postShrink_NumJobs x09') + x10' x11' x12 x13' x14 + x15 x16 x17 + | ((x00', x01', x02', x03', x04'), + (x05', x06', x07', x08', x09'), + (x10', x11', x13')) + <- shrink + ((x00, x01, x02, x03, x04), + (x05, x06, x07, x08, preShrink_NumJobs x09), + (x10, x11, x13)) + ] + where + preShrink_NumJobs = fmap (fmap Positive) + postShrink_NumJobs = fmap (fmap getPositive) + +instance Arbitrary ProjectConfigShared where + arbitrary = + ProjectConfigShared + <$> arbitrary -- 4 + <*> arbitraryFlag arbitraryShortToken + <*> arbitraryFlag arbitraryShortToken + <*> arbitrary + <*> arbitrary + <*> (toNubList <$> listOf arbitraryShortToken) + <*> arbitraryConstraints + <*> shortListOf 2 arbitrary + <*> arbitrary <*> arbitrary + <*> arbitrary <*> arbitrary + <*> arbitrary <*> arbitrary + where + arbitraryConstraints :: Gen [(UserConstraint, ConstraintSource)] + arbitraryConstraints = + map (\uc -> (uc, projectConfigConstraintSource)) <$> arbitrary + + shrink (ProjectConfigShared + x00 x01 x02 x03 x04 + x05 x06 x07 x08 x09 + x10 x11 x12 x13) = + [ ProjectConfigShared + x00' (fmap getNonEmpty x01') (fmap getNonEmpty x02') x03' x04' + x05' (postShrink_Constraints x06') x07' x08' x09' + x10' x11' x12' x13' + | ((x00', x01', x02', x03', x04'), + (x05', x06', x07', x08', x09'), + (x10', x11', x12', x13')) + <- shrink + ((x00, fmap NonEmpty x01, fmap NonEmpty x02, x03, x04), + (x05, preShrink_Constraints x06, x07, x08, x09), + (x10, x11, x12, x13)) + ] + where + preShrink_Constraints = map fst + postShrink_Constraints = map (\uc -> (uc, projectConfigConstraintSource)) + +projectConfigConstraintSource :: ConstraintSource +projectConfigConstraintSource = + ConstraintSourceProjectConfig "TODO" + +instance Arbitrary PackageConfig where + arbitrary = + PackageConfig + <$> (MapLast . Map.fromList <$> shortListOf 10 + ((,) <$> arbitraryProgramName + <*> arbitraryShortToken)) + <*> (MapMappend . Map.fromList <$> shortListOf 10 + ((,) <$> arbitraryProgramName + <*> listOf arbitraryShortToken)) + <*> (toNubList <$> listOf arbitraryShortToken) + <*> arbitrary + <*> arbitrary <*> arbitrary + <*> arbitrary <*> arbitrary + <*> arbitrary <*> arbitrary + <*> arbitrary <*> arbitrary + <*> shortListOf 5 arbitraryShortToken + <*> arbitrary + <*> arbitrary <*> arbitrary + <*> shortListOf 5 arbitraryShortToken + <*> shortListOf 5 arbitraryShortToken + <*> shortListOf 5 arbitraryShortToken + <*> arbitrary + <*> arbitrary <*> arbitrary + <*> arbitrary <*> arbitrary + <*> arbitrary <*> arbitrary + <*> arbitrary <*> arbitrary + <*> arbitrary <*> arbitrary + <*> arbitrary <*> arbitrary + <*> arbitraryFlag arbitraryShortToken + <*> arbitrary + <*> arbitrary <*> arbitrary + <*> arbitrary + <*> arbitraryFlag arbitraryShortToken + <*> arbitrary + <*> arbitraryFlag arbitraryShortToken + <*> arbitrary + where + arbitraryProgramName :: Gen String + arbitraryProgramName = + elements [ programName prog + | (prog, _) <- knownPrograms (defaultProgramDb) ] + + shrink (PackageConfig + x00 x01 x02 x03 x04 + x05 x06 x07 x08 x09 + x10 x11 x12 x13 x14 + x15 x16 x17 x18 x19 + x20 x21 x22 x23 x24 + x25 x26 x27 x28 x29 + x30 x31 x32 x33 x34 + x35 x36 x37 x38 x39 + x40) = + [ PackageConfig + (postShrink_Paths x00') + (postShrink_Args x01') x02' x03' x04' + x05' x06' x07' x08' x09' + x10' x11' (map getNonEmpty x12') x13' x14' + x15' (map getNonEmpty x16') + (map getNonEmpty x17') + (map getNonEmpty x18') + x19' + x20' x21' x22' x23' x24' + x25' x26' x27' x28' x29' + x30' x31' x32' x33' x34' + x35' x36' (fmap getNonEmpty x37') x38' + (fmap getNonEmpty x39') + x40' + | (((x00', x01', x02', x03', x04'), + (x05', x06', x07', x08', x09'), + (x10', x11', x12', x13', x14'), + (x15', x16', x17', x18', x19')), + ((x20', x21', x22', x23', x24'), + (x25', x26', x27', x28', x29'), + (x30', x31', x32', x33', x34'), + (x35', x36', x37', x38', x39'), + (x40'))) + <- shrink + (((preShrink_Paths x00, preShrink_Args x01, x02, x03, x04), + (x05, x06, x07, x08, x09), + (x10, x11, map NonEmpty x12, x13, x14), + (x15, map NonEmpty x16, + map NonEmpty x17, + map NonEmpty x18, + x19)), + ((x20, x21, x22, x23, x24), + (x25, x26, x27, x28, x29), + (x30, x31, x32, x33, x34), + (x35, x36, fmap NonEmpty x37, x38, fmap NonEmpty x39), + (x40))) + ] + where + preShrink_Paths = Map.map NonEmpty + . Map.mapKeys NoShrink + . getMapLast + postShrink_Paths = MapLast + . Map.map getNonEmpty + . Map.mapKeys getNoShrink + preShrink_Args = Map.map (NonEmpty . map NonEmpty) + . Map.mapKeys NoShrink + . getMapMappend + postShrink_Args = MapMappend + . Map.map (map getNonEmpty . getNonEmpty) + . Map.mapKeys getNoShrink + + +instance Arbitrary SourceRepo where + arbitrary = (SourceRepo RepoThis + <$> arbitrary + <*> (fmap getShortToken <$> arbitrary) + <*> (fmap getShortToken <$> arbitrary) + <*> (fmap getShortToken <$> arbitrary) + <*> (fmap getShortToken <$> arbitrary) + <*> (fmap getShortToken <$> arbitrary)) + `suchThat` (/= emptySourceRepo) + + shrink (SourceRepo _ x1 x2 x3 x4 x5 x6) = + [ repo + | ((x1', x2', x3'), (x4', x5', x6')) + <- shrink ((x1, + fmap ShortToken x2, + fmap ShortToken x3), + (fmap ShortToken x4, + fmap ShortToken x5, + fmap ShortToken x6)) + , let repo = SourceRepo RepoThis x1' + (fmap getShortToken x2') + (fmap getShortToken x3') + (fmap getShortToken x4') + (fmap getShortToken x5') + (fmap getShortToken x6') + , repo /= emptySourceRepo + ] + +emptySourceRepo :: SourceRepo +emptySourceRepo = SourceRepo RepoThis Nothing Nothing Nothing + Nothing Nothing Nothing + + +instance Arbitrary RepoType where + arbitrary = elements knownRepoTypes + +instance Arbitrary ReportLevel where + arbitrary = elements [NoReports .. DetailedReports] + +instance Arbitrary CompilerFlavor where + arbitrary = elements knownCompilerFlavors + where + --TODO: [code cleanup] export knownCompilerFlavors from D.Compiler + -- it's already defined there, just need it exported. + knownCompilerFlavors = + [GHC, GHCJS, NHC, YHC, Hugs, HBC, Helium, JHC, LHC, UHC] + +instance Arbitrary a => Arbitrary (InstallDirs a) where + arbitrary = + InstallDirs + <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary -- 4 + <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary -- 8 + <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary -- 12 + <*> arbitrary <*> arbitrary -- 14 + +instance Arbitrary PackageDB where + arbitrary = oneof [ pure GlobalPackageDB + , pure UserPackageDB + , SpecificPackageDB . getShortToken <$> arbitrary + ] + +instance Arbitrary RemoteRepo where + arbitrary = + RemoteRepo + <$> arbitraryShortToken `suchThat` (not . (":" `isPrefixOf`)) + <*> arbitrary -- URI + <*> arbitrary + <*> listOf arbitraryRootKey + <*> (fmap getNonNegative arbitrary) + <*> pure False + where + arbitraryRootKey = + shortListOf1 5 (oneof [ choose ('0', '9') + , choose ('a', 'f') ]) + +instance Arbitrary UserConstraint where + arbitrary = + oneof + [ UserConstraintVersion <$> arbitrary <*> arbitrary + , UserConstraintInstalled <$> arbitrary + , UserConstraintSource <$> arbitrary + , UserConstraintFlags <$> arbitrary <*> shortListOf1 3 arbitrary + , UserConstraintStanzas <$> arbitrary <*> ((\x->[x]) <$> arbitrary) + ] + +instance Arbitrary OptionalStanza where + arbitrary = elements [minBound..maxBound] + +instance Arbitrary FlagName where + arbitrary = FlagName <$> flagident + where + flagident = lowercase <$> shortListOf1 5 (elements flagChars) + `suchThat` (("-" /=) . take 1) + flagChars = "-_" ++ ['a'..'z'] + +instance Arbitrary PreSolver where + arbitrary = elements [minBound..maxBound] + +instance Arbitrary AllowNewer where + arbitrary = oneof [ pure AllowNewerNone + , AllowNewerSome <$> shortListOf1 3 arbitrary + , pure AllowNewerAll + ] + +instance Arbitrary AllowNewerDep where + arbitrary = oneof [ AllowNewerDep <$> arbitrary + , AllowNewerDepScoped <$> arbitrary <*> arbitrary + ] + +instance Arbitrary ProfDetailLevel where + arbitrary = elements [ d | (_,_,d) <- knownProfDetailLevels ] + +instance Arbitrary OptimisationLevel where + arbitrary = elements [minBound..maxBound] + +instance Arbitrary DebugInfoLevel where + arbitrary = elements [minBound..maxBound] + +instance Arbitrary URI where + arbitrary = + URI <$> elements ["file:", "http:", "https:"] + <*> (Just <$> arbitrary) + <*> (('/':) <$> arbitraryURIToken) + <*> (('?':) <$> arbitraryURIToken) + <*> pure "" + +instance Arbitrary URIAuth where + arbitrary = + URIAuth <$> pure "" -- no password as this does not roundtrip + <*> arbitraryURIToken + <*> arbitraryURIPort + +arbitraryURIToken :: Gen String +arbitraryURIToken = + shortListOf1 6 (elements (filter isUnreserved ['\0'..'\255'])) + +arbitraryURIPort :: Gen String +arbitraryURIPort = + oneof [ pure "", (':':) <$> shortListOf1 4 (choose ('0','9')) ] + diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Sandbox/Timestamp.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Sandbox/Timestamp.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Sandbox/Timestamp.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Sandbox/Timestamp.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,63 @@ +module UnitTests.Distribution.Client.Sandbox.Timestamp (tests) where + +import System.FilePath + +import Distribution.Simple.Utils (withTempDirectory) +import Distribution.Verbosity + +import Distribution.Client.Compat.Time +import Distribution.Client.Sandbox.Timestamp + +import Test.Tasty +import Test.Tasty.HUnit + +tests :: [TestTree] +tests = + [ testCase "timestamp record version 1 can be read" timestampReadTest_v1 + , testCase "timestamp record version 2 can be read" timestampReadTest_v2 + , testCase "written timestamp record can be read" timestampReadWriteTest ] + +timestampRecord_v1 :: String +timestampRecord_v1 = + "[(\"i386-linux-ghc-8.0.0.20160204\",[(\"/foo/bar/Baz\",1455350946)])" ++ + ",(\"i386-linux-ghc-7.10.3\",[(\"/foo/bar/Baz\",1455484719)])]\n" + +timestampRecord_v2 :: String +timestampRecord_v2 = + "2\n" ++ + "[(\"i386-linux-ghc-8.0.0.20160204\",[(\"/foo/bar/Baz\",1455350946)])" ++ + ",(\"i386-linux-ghc-7.10.3\",[(\"/foo/bar/Baz\",1455484719)])]" + +timestampReadTest_v1 :: Assertion +timestampReadTest_v1 = + timestampReadTest timestampRecord_v1 $ + map (\(i, ts) -> + (i, map (\(p, ModTime t) -> + (p, posixSecondsToModTime . fromIntegral $ t)) ts)) + timestampRecord + +timestampReadTest_v2 :: Assertion +timestampReadTest_v2 = timestampReadTest timestampRecord_v2 timestampRecord + +timestampReadTest :: FilePath -> [TimestampFileRecord] -> Assertion +timestampReadTest fileContent expected = + withTempDirectory silent "." "cabal-timestamp-" $ \dir -> do + let fileName = dir "timestamp-record" + writeFile fileName fileContent + tRec <- readTimestampFile fileName + assertEqual "expected timestamp records to be equal" + expected tRec + +timestampRecord :: [TimestampFileRecord] +timestampRecord = + [("i386-linux-ghc-8.0.0.20160204",[("/foo/bar/Baz",ModTime 1455350946)]) + ,("i386-linux-ghc-7.10.3",[("/foo/bar/Baz",ModTime 1455484719)])] + +timestampReadWriteTest :: Assertion +timestampReadWriteTest = + withTempDirectory silent "." "cabal-timestamp-" $ \dir -> do + let fileName = dir "timestamp-record" + writeTimestampFile fileName timestampRecord + tRec <- readTimestampFile fileName + assertEqual "expected timestamp records to be equal" + timestampRecord tRec diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Sandbox.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Sandbox.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Sandbox.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Sandbox.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,28 @@ +module UnitTests.Distribution.Client.Sandbox ( + tests + ) where + +import Distribution.Client.Sandbox (withSandboxBinDirOnSearchPath) + +import Test.Tasty +import Test.Tasty.HUnit + +import System.FilePath (getSearchPath, ()) + +tests :: [TestTree] +tests = [ testCase "sandboxBinDirOnSearchPath" sandboxBinDirOnSearchPathTest + , testCase "oldSearchPathRestored" oldSearchPathRestoreTest + ] + +sandboxBinDirOnSearchPathTest :: Assertion +sandboxBinDirOnSearchPathTest = + withSandboxBinDirOnSearchPath "foo" $ do + r <- getSearchPath + assertBool "'foo/bin' not on search path" $ ("foo" "bin") `elem` r + +oldSearchPathRestoreTest :: Assertion +oldSearchPathRestoreTest = do + r <- getSearchPath + withSandboxBinDirOnSearchPath "foo" $ return () + r' <- getSearchPath + assertEqual "Old search path wasn't restored" r r' diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Targets.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Targets.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Targets.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Targets.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,58 @@ +module UnitTests.Distribution.Client.Targets ( + tests + ) where + +import Distribution.Client.Targets (UserConstraint (..), readUserConstraint) +import Distribution.Compat.ReadP (ReadP, readP_to_S) +import Distribution.Package (PackageName (..)) +import Distribution.ParseUtils (parseCommaList) +import Distribution.Text (parse) + +import Test.Tasty +import Test.Tasty.HUnit + +import Data.Char (isSpace) + +tests :: [TestTree] +tests = [ testCase "readUserConstraint" readUserConstraintTest + , testCase "parseUserConstraint" parseUserConstraintTest + , testCase "readUserConstraints" readUserConstraintsTest + ] + +readUserConstraintTest :: Assertion +readUserConstraintTest = + assertEqual ("Couldn't read constraint: '" ++ constr ++ "'") expected actual + where + pkgName = "template-haskell" + constr = pkgName ++ " installed" + + expected = UserConstraintInstalled (PackageName pkgName) + actual = let (Right r) = readUserConstraint constr in r + +parseUserConstraintTest :: Assertion +parseUserConstraintTest = + assertEqual ("Couldn't parse constraint: '" ++ constr ++ "'") expected actual + where + pkgName = "template-haskell" + constr = pkgName ++ " installed" + + expected = [UserConstraintInstalled (PackageName pkgName)] + actual = [ x | (x, ys) <- readP_to_S parseUserConstraint constr + , all isSpace ys] + + parseUserConstraint :: ReadP r UserConstraint + parseUserConstraint = parse + +readUserConstraintsTest :: Assertion +readUserConstraintsTest = + assertEqual ("Couldn't read constraints: '" ++ constr ++ "'") expected actual + where + pkgName = "template-haskell" + constr = pkgName ++ " installed" + + expected = [[UserConstraintInstalled (PackageName pkgName)]] + actual = [ x | (x, ys) <- readP_to_S parseUserConstraints constr + , all isSpace ys] + + parseUserConstraints :: ReadP r [UserConstraint] + parseUserConstraints = parseCommaList parse diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Tar.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Tar.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Tar.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/Tar.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,75 @@ +module UnitTests.Distribution.Client.Tar ( + tests + ) where + +import Distribution.Client.Tar ( filterEntries + , filterEntriesM + ) +import Codec.Archive.Tar ( Entries(..) + , foldEntries + ) +import Codec.Archive.Tar.Entry ( EntryContent(..) + , simpleEntry + , Entry(..) + , toTarPath + ) + +import Test.Tasty +import Test.Tasty.HUnit + +import qualified Data.ByteString.Lazy as BS +import qualified Data.ByteString.Lazy.Char8 as BS.Char8 +import Control.Monad.Writer.Lazy (runWriterT, tell) + +tests :: [TestTree] +tests = [ testCase "filterEntries" filterTest + , testCase "filterEntriesM" filterMTest + ] + +filterTest :: Assertion +filterTest = do + let e1 = getFileEntry "file1" "x" + e2 = getFileEntry "file2" "y" + p = (\e -> let (NormalFile dta _) = entryContent e + str = BS.Char8.unpack dta + in not . (=="y") $ str) + assertEqual "Unexpected result for filter" "xz" $ + entriesToString $ filterEntries p $ Next e1 $ Next e2 Done + assertEqual "Unexpected result for filter" "z" $ + entriesToString $ filterEntries p $ Done + assertEqual "Unexpected result for filter" "xf" $ + entriesToString $ filterEntries p $ Next e1 $ Next e2 $ Fail "f" + +filterMTest :: Assertion +filterMTest = do + let e1 = getFileEntry "file1" "x" + e2 = getFileEntry "file2" "y" + p = (\e -> let (NormalFile dta _) = entryContent e + str = BS.Char8.unpack dta + in tell "t" >> return (not . (=="y") $ str)) + + (r, w) <- runWriterT $ filterEntriesM p $ Next e1 $ Next e2 Done + assertEqual "Unexpected result for filterM" "xz" $ entriesToString r + assertEqual "Unexpected result for filterM w" "tt" w + + (r1, w1) <- runWriterT $ filterEntriesM p $ Done + assertEqual "Unexpected result for filterM" "z" $ entriesToString r1 + assertEqual "Unexpected result for filterM w" "" w1 + + (r2, w2) <- runWriterT $ filterEntriesM p $ Next e1 $ Next e2 $ Fail "f" + assertEqual "Unexpected result for filterM" "xf" $ entriesToString r2 + assertEqual "Unexpected result for filterM w" "tt" w2 + +getFileEntry :: FilePath -> [Char] -> Entry +getFileEntry pth dta = + simpleEntry tp $ NormalFile dta' $ BS.length dta' + where tp = case toTarPath False pth of + Right tp' -> tp' + Left e -> error e + dta' = BS.Char8.pack dta + +entriesToString :: Entries String -> String +entriesToString = + foldEntries (\e acc -> let (NormalFile dta _) = entryContent e + str = BS.Char8.unpack dta + in str ++ acc) "z" id diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/UserConfig.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/UserConfig.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/UserConfig.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Distribution/Client/UserConfig.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,112 @@ +{-# LANGUAGE CPP #-} +module UnitTests.Distribution.Client.UserConfig + ( tests + ) where + +import Control.Exception (bracket) +import Control.Monad (replicateM_) +import Data.List (sort, nub) +#if !MIN_VERSION_base(4,8,0) +import Data.Monoid +#endif +import System.Directory (doesFileExist, + getCurrentDirectory, getTemporaryDirectory) +import System.FilePath (()) + +import Test.Tasty +import Test.Tasty.HUnit + +import Distribution.Client.Config +import Distribution.Utils.NubList (fromNubList) +import Distribution.Client.Setup (GlobalFlags (..), InstallFlags (..)) +import Distribution.Client.Utils (removeExistingFile) +import Distribution.Simple.Setup (Flag (..), ConfigFlags (..), fromFlag) +import Distribution.Simple.Utils (withTempDirectory) +import Distribution.Verbosity (silent) + +tests :: [TestTree] +tests = [ testCase "nullDiffOnCreate" nullDiffOnCreateTest + , testCase "canDetectDifference" canDetectDifference + , testCase "canUpdateConfig" canUpdateConfig + , testCase "doubleUpdateConfig" doubleUpdateConfig + , testCase "newDefaultConfig" newDefaultConfig + ] + +nullDiffOnCreateTest :: Assertion +nullDiffOnCreateTest = bracketTest $ \configFile -> do + -- Create a new default config file in our test directory. + _ <- loadConfig silent (Flag configFile) + -- Now we read it in and compare it against the default. + diff <- userConfigDiff $ globalFlags configFile + assertBool (unlines $ "Following diff should be empty:" : diff) $ null diff + + +canDetectDifference :: Assertion +canDetectDifference = bracketTest $ \configFile -> do + -- Create a new default config file in our test directory. + _ <- loadConfig silent (Flag configFile) + appendFile configFile "verbose: 0\n" + diff <- userConfigDiff $ globalFlags configFile + assertBool (unlines $ "Should detect a difference:" : diff) $ + diff == [ "+ verbose: 0" ] + + +canUpdateConfig :: Assertion +canUpdateConfig = bracketTest $ \configFile -> do + -- Write a trivial cabal file. + writeFile configFile "tests: True\n" + -- Update the config file. + userConfigUpdate silent $ globalFlags configFile + -- Load it again. + updated <- loadConfig silent (Flag configFile) + assertBool ("Field 'tests' should be True") $ + fromFlag (configTests $ savedConfigureFlags updated) + + +doubleUpdateConfig :: Assertion +doubleUpdateConfig = bracketTest $ \configFile -> do + -- Create a new default config file in our test directory. + _ <- loadConfig silent (Flag configFile) + -- Update it twice. + replicateM_ 2 . userConfigUpdate silent $ globalFlags configFile + -- Load it again. + updated <- loadConfig silent (Flag configFile) + + assertBool ("Field 'remote-repo' doesn't contain duplicates") $ + listUnique (map show . fromNubList . globalRemoteRepos $ savedGlobalFlags updated) + assertBool ("Field 'extra-prog-path' doesn't contain duplicates") $ + listUnique (map show . fromNubList . configProgramPathExtra $ savedConfigureFlags updated) + assertBool ("Field 'build-summary' doesn't contain duplicates") $ + listUnique (map show . fromNubList . installSummaryFile $ savedInstallFlags updated) + + +newDefaultConfig :: Assertion +newDefaultConfig = do + sysTmpDir <- getTemporaryDirectory + withTempDirectory silent sysTmpDir "cabal-test" $ \tmpDir -> do + let configFile = tmpDir "tmp.config" + _ <- createDefaultConfigFile silent configFile + exists <- doesFileExist configFile + assertBool ("Config file should be written to " ++ configFile) exists + + +globalFlags :: FilePath -> GlobalFlags +globalFlags configFile = mempty { globalConfigFile = Flag configFile } + + +listUnique :: Ord a => [a] -> Bool +listUnique xs = + let sorted = sort xs + in nub sorted == xs + + +bracketTest :: (FilePath -> IO ()) -> Assertion +bracketTest = + bracket testSetup testTearDown + where + testSetup :: IO FilePath + testSetup = fmap ( "test-user-config") getCurrentDirectory + + testTearDown :: FilePath -> IO () + testTearDown configFile = + mapM_ removeExistingFile [configFile, configFile ++ ".backup"] diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Options.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Options.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests/Options.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests/Options.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,41 @@ +{-# LANGUAGE DeriveDataTypeable #-} + +module UnitTests.Options ( OptionShowSolverLog(..) + , OptionMtimeChangeDelay(..) + , extraOptions ) + where + +import Data.Proxy +import Data.Typeable + +import Test.Tasty.Options + +{------------------------------------------------------------------------------- + Test options +-------------------------------------------------------------------------------} + +extraOptions :: [OptionDescription] +extraOptions = + [ Option (Proxy :: Proxy OptionShowSolverLog) + , Option (Proxy :: Proxy OptionMtimeChangeDelay) + ] + +newtype OptionShowSolverLog = OptionShowSolverLog Bool + deriving Typeable + +instance IsOption OptionShowSolverLog where + defaultValue = OptionShowSolverLog False + parseValue = fmap OptionShowSolverLog . safeRead + optionName = return "show-solver-log" + optionHelp = return "Show full log from the solver" + optionCLParser = flagCLParser Nothing (OptionShowSolverLog True) + +newtype OptionMtimeChangeDelay = OptionMtimeChangeDelay Int + deriving Typeable + +instance IsOption OptionMtimeChangeDelay where + defaultValue = OptionMtimeChangeDelay 0 + parseValue = fmap OptionMtimeChangeDelay . safeRead + optionName = return "mtime-change-delay" + optionHelp = return $ "How long to wait before attempting to detect" + ++ "file modification, in microseconds" diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests.hs cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests.hs --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/cabal-install-1.24.0.2/tests/UnitTests.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/cabal-install-1.24.0.2/tests/UnitTests.hs 2016-12-23 10:35:31.000000000 +0000 @@ -0,0 +1,106 @@ +{-# LANGUAGE ScopedTypeVariables #-} + +module Main + where + +import Test.Tasty + +import Control.Monad +import Data.Time.Clock +import System.FilePath + +import Distribution.Simple.Utils +import Distribution.Verbosity + +import Distribution.Client.Compat.Time + +import qualified UnitTests.Distribution.Client.Compat.Time +import qualified UnitTests.Distribution.Client.Dependency.Modular.PSQ +import qualified UnitTests.Distribution.Client.Dependency.Modular.Solver +import qualified UnitTests.Distribution.Client.FileMonitor +import qualified UnitTests.Distribution.Client.Glob +import qualified UnitTests.Distribution.Client.GZipUtils +import qualified UnitTests.Distribution.Client.Sandbox +import qualified UnitTests.Distribution.Client.Sandbox.Timestamp +import qualified UnitTests.Distribution.Client.Tar +import qualified UnitTests.Distribution.Client.Targets +import qualified UnitTests.Distribution.Client.UserConfig +import qualified UnitTests.Distribution.Client.ProjectConfig + +import UnitTests.Options + + +tests :: Int -> TestTree +tests mtimeChangeCalibrated = + askOption $ \(OptionMtimeChangeDelay mtimeChangeProvided) -> + let mtimeChange = if mtimeChangeProvided /= 0 + then mtimeChangeProvided + else mtimeChangeCalibrated + in + testGroup "Unit Tests" + [ testGroup "UnitTests.Distribution.Client.Compat.Time" $ + UnitTests.Distribution.Client.Compat.Time.tests mtimeChange + , testGroup "UnitTests.Distribution.Client.Dependency.Modular.PSQ" + UnitTests.Distribution.Client.Dependency.Modular.PSQ.tests + , testGroup "UnitTests.Distribution.Client.Dependency.Modular.Solver" + UnitTests.Distribution.Client.Dependency.Modular.Solver.tests + , testGroup "UnitTests.Distribution.Client.FileMonitor" $ + UnitTests.Distribution.Client.FileMonitor.tests mtimeChange + , testGroup "UnitTests.Distribution.Client.Glob" + UnitTests.Distribution.Client.Glob.tests + , testGroup "Distribution.Client.GZipUtils" + UnitTests.Distribution.Client.GZipUtils.tests + , testGroup "Distribution.Client.Sandbox" + UnitTests.Distribution.Client.Sandbox.tests + , testGroup "Distribution.Client.Sandbox.Timestamp" + UnitTests.Distribution.Client.Sandbox.Timestamp.tests + , testGroup "Distribution.Client.Tar" + UnitTests.Distribution.Client.Tar.tests + , testGroup "Distribution.Client.Targets" + UnitTests.Distribution.Client.Targets.tests + , testGroup "UnitTests.Distribution.Client.UserConfig" + UnitTests.Distribution.Client.UserConfig.tests + , testGroup "UnitTests.Distribution.Client.ProjectConfig" + UnitTests.Distribution.Client.ProjectConfig.tests + ] + +main :: IO () +main = do + mtimeChangeDelay <- calibrateMtimeChangeDelay + defaultMainWithIngredients + (includingOptions extraOptions : defaultIngredients) + (tests mtimeChangeDelay) + +-- Based on code written by Neill Mitchell for Shake. See +-- 'sleepFileTimeCalibrate' in 'Test.Type'. The returned delay is never smaller +-- than 10 ms, but never larger than 1 second. +calibrateMtimeChangeDelay :: IO Int +calibrateMtimeChangeDelay = do + withTempDirectory silent "." "calibration-" $ \dir -> do + let fileName = dir "probe" + mtimes <- forM [1..25] $ \(i::Int) -> time $ do + writeFile fileName $ show i + t0 <- getModTime fileName + let spin j = do + writeFile fileName $ show (i,j) + t1 <- getModTime fileName + unless (t0 < t1) (spin $ j + 1) + spin (0::Int) + let mtimeChange = maximum mtimes + mtimeChange' = min 1000000 $ (max 10000 mtimeChange) * 2 + notice normal $ "File modification time resolution calibration completed, " + ++ "maximum delay observed: " + ++ (show . toMillis $ mtimeChange ) ++ " ms. " + ++ "Will be using delay of " ++ (show . toMillis $ mtimeChange') + ++ " for test runs." + return mtimeChange' + where + toMillis :: Int -> Double + toMillis x = fromIntegral x / 1000.0 + + time :: IO () -> IO Int + time act = do + t0 <- getCurrentTime + act + t1 <- getCurrentTime + return . ceiling $! (t1 `diffUTCTime` t0) * 1e6 -- microseconds diff -Nru cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/hackage-security-0.5.2.2/hackage-security.cabal cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/hackage-security-0.5.2.2/hackage-security.cabal --- cabal-install-1.24-1.24.0.1+git20161105.0.b8f7afb/src/hackage-security-0.5.2.2/hackage-security.cabal 2016-11-07 10:02:59.000000000 +0000 +++ cabal-install-1.24-1.24.0.2+git20161208.0.c5ebf12/src/hackage-security-0.5.2.2/hackage-security.cabal 2016-12-23 10:35:41.000000000 +0000 @@ -1,245 +1,249 @@ -name: hackage-security -version: 0.5.2.2 -synopsis: Hackage security library -description: The hackage security library provides both server and - client utilities for securing the Hackage package server - (). It is based on The Update - Framework (), a set of - recommendations developed by security researchers at - various universities in the US as well as developers on the - Tor project (). - . - The current implementation supports only index signing, - thereby enabling untrusted mirrors. It does not yet provide - facilities for author package signing. - . - The library has two main entry points: - "Hackage.Security.Client" is the main entry point for - clients (the typical example being @cabal@), and - "Hackage.Security.Server" is the main entry point for - servers (the typical example being @hackage-server@). -license: BSD3 -license-file: LICENSE -author: Edsko de Vries -maintainer: edsko@well-typed.com -copyright: Copyright 2015-2016 Well-Typed LLP -category: Distribution -homepage: https://github.com/well-typed/hackage-security -bug-reports: https://github.com/well-typed/hackage-security/issues -build-type: Simple -cabal-version: >=1.10 - -extra-source-files: - ChangeLog.md - -source-repository head - type: git - location: https://github.com/well-typed/hackage-security.git - -flag base48 - description: Are we using base 4.8 or later? - manual: False - -flag use-network-uri - description: Are we using network-uri? - manual: False - -Flag old-directory - description: Use directory < 1.2 and old-time - manual: False - default: False - -library - -- Most functionality is exported through the top-level entry points .Client - -- and .Server; the other exported modules are intended for qualified imports. - exposed-modules: Hackage.Security.Client - Hackage.Security.Client.Formats - Hackage.Security.Client.Repository - Hackage.Security.Client.Repository.Cache - Hackage.Security.Client.Repository.Local - Hackage.Security.Client.Repository.Remote - Hackage.Security.Client.Repository.HttpLib - Hackage.Security.Client.Verify - Hackage.Security.JSON - Hackage.Security.Key.Env - Hackage.Security.Server - Hackage.Security.Trusted - Hackage.Security.TUF.FileMap - Hackage.Security.Util.Checked - Hackage.Security.Util.IO - Hackage.Security.Util.Lens - Hackage.Security.Util.Path - Hackage.Security.Util.Pretty - Hackage.Security.Util.Some - Text.JSON.Canonical - other-modules: Hackage.Security.Key - Hackage.Security.Trusted.TCB - Hackage.Security.TUF - Hackage.Security.TUF.Common - Hackage.Security.TUF.FileInfo - Hackage.Security.TUF.Header - Hackage.Security.TUF.Layout.Cache - Hackage.Security.TUF.Layout.Index - Hackage.Security.TUF.Layout.Repo - Hackage.Security.TUF.Mirrors - Hackage.Security.TUF.Paths - Hackage.Security.TUF.Patterns - Hackage.Security.TUF.Root - Hackage.Security.TUF.Signed - Hackage.Security.TUF.Snapshot - Hackage.Security.TUF.Targets - Hackage.Security.TUF.Timestamp - Hackage.Security.Util.Base64 - Hackage.Security.Util.JSON - Hackage.Security.Util.Stack - Hackage.Security.Util.TypedEmbedded - Prelude - -- We support ghc 7.4 (bundled with Cabal 1.14) and up - build-depends: base >= 4.5 && < 5, - base16-bytestring >= 0.1.1 && < 0.2, - base64-bytestring >= 1.0 && < 1.1, - bytestring >= 0.9 && < 0.11, - Cabal >= 1.14 && < 1.26, - containers >= 0.4 && < 0.6, - directory >= 1.1.0.2 && < 1.3, - ed25519 >= 0.0 && < 0.1, - filepath >= 1.2 && < 1.5, - mtl >= 2.2 && < 2.3, - parsec >= 3.1 && < 3.2, - pretty >= 1.0 && < 1.2, - cryptohash-sha256 >= 0.11 && < 0.12, - -- 0.4.2 introduces TarIndex, 0.4.4 introduces more - -- functionality, 0.5.0 changes type of serialise - tar >= 0.5 && < 0.6, - time >= 1.2 && < 1.7, - transformers >= 0.4 && < 0.6, - zlib >= 0.5 && < 0.7, - -- whatever versions are bundled with ghc: - template-haskell, - ghc-prim - if flag(old-directory) - build-depends: directory < 1.2, old-time >= 1 && < 1.2 - else - build-depends: directory >= 1.2 - hs-source-dirs: src - default-language: Haskell2010 - default-extensions: DefaultSignatures - DeriveDataTypeable - DeriveFunctor - FlexibleContexts - FlexibleInstances - GADTs - GeneralizedNewtypeDeriving - KindSignatures - MultiParamTypeClasses - NamedFieldPuns - NoMonomorphismRestriction - RankNTypes - RecordWildCards - ScopedTypeVariables - StandaloneDeriving - TupleSections - TypeFamilies - TypeOperators - ViewPatterns - other-extensions: BangPatterns - CPP - OverlappingInstances - PackageImports - UndecidableInstances - -- use the new stage1/cross-compile-friendly Quotes subset of TH for new GHCs - if impl(ghc >= 8.0) - -- place holder until Hackage allows to edit in the new extension token - -- other-extensions: TemplateHaskellQuotes - other-extensions: - else - other-extensions: TemplateHaskell - - ghc-options: -Wall - - if flag(base48) - build-depends: base >= 4.8 - else - build-depends: old-locale >= 1.0 - - -- The URI type got split out off the network package after version 2.5, and - -- moved to a separate network-uri package. Since we don't need the rest of - -- network here, it would suffice to rely only on network-uri: - -- - -- > if flag(use-network-uri) - -- > build-depends: network-uri >= 2.6 && < 2.7 - -- > else - -- > build-depends: network >= 2.5 && < 2.6 - -- - -- However, if we did the same in hackage-security-HTTP, Cabal would consider - -- those two flag choices (hackage-security:use-network-uri and - -- hackage-security-HTTP:use-network-uri) to be completely independent; but - -- they aren't: if it links hackage-security against network-uri and - -- hackage-security-HTTP against network, we will get type errors when - -- hackage-security-HTTP tries to pass a URI to hackage-security. - -- - -- It might seem we can solve this problem by re-exporting the URI type in - -- hackage-security and avoid the dependency in hackage-security-HTTP - -- altogether. However, this merely shifts the problem: hackage-security-HTTP - -- relies on the HTTP library which--surprise!--makes the same choice between - -- depending on network or network-uri. Cabal will not notice that we cannot - -- build hackage-security and hackage-security-HTTP against network-uri but - -- HTTP against network. - -- - -- We solve the problem by explicitly relying on network-2.6 when choosing - -- network-uri. This dependency is redundant, strictly speaking. However, it - -- serves as a proxy for forcing flag choices: since all packages in a - -- solution must be linked against the same version of network, having one - -- version of network in one branch of the conditional and another version of - -- network in the other branch forces the choice to be consistent throughout. - -- (Note that the HTTP library does the same thing, though in this case the - -- dependency in network is not redundant.) - if flag(use-network-uri) - build-depends: network-uri >= 2.6 && < 2.7, - network >= 2.6 && < 2.7 - else - build-depends: network >= 2.5 && < 2.6 - - if impl(ghc >= 7.8) - other-extensions: RoleAnnotations - - if impl(ghc >= 7.10) - other-extensions: AllowAmbiguousTypes --- StaticPointers --- ^^^ Temporarily disabled because Hackage doesn't know yet about this --- extension and will therefore reject this package. - -test-suite TestSuite - type: exitcode-stdio-1.0 - main-is: TestSuite.hs - other-modules: TestSuite.HttpMem - TestSuite.InMemCache - TestSuite.InMemRepo - TestSuite.InMemRepository - TestSuite.JSON - TestSuite.PrivateKeys - TestSuite.Util.StrictMVar - build-depends: base, - Cabal, - containers, - HUnit, - bytestring, - hackage-security, - network-uri, - tar, - tasty, - tasty-hunit, - tasty-quickcheck, - QuickCheck, - temporary, - time, - zlib - hs-source-dirs: tests - default-language: Haskell2010 - default-extensions: FlexibleContexts - GADTs - KindSignatures - RankNTypes - RecordWildCards - ScopedTypeVariables - ghc-options: -Wall +name: hackage-security +version: 0.5.2.2 +x-revision: 1 +-- xrevision:1 integrates +-- https://github.com/well-typed/hackage-security/commit/e4bff90a82a588ff2d0beedfc50d5fdf75861d48 +synopsis: Hackage security library +description: The hackage security library provides both server and + client utilities for securing the Hackage package server + (). It is based on The Update + Framework (), a set of + recommendations developed by security researchers at + various universities in the US as well as developers on the + Tor project (). + . + The current implementation supports only index signing, + thereby enabling untrusted mirrors. It does not yet provide + facilities for author package signing. + . + The library has two main entry points: + "Hackage.Security.Client" is the main entry point for + clients (the typical example being @cabal@), and + "Hackage.Security.Server" is the main entry point for + servers (the typical example being @hackage-server@). +license: BSD3 +license-file: LICENSE +author: Edsko de Vries +maintainer: edsko@well-typed.com +copyright: Copyright 2015-2016 Well-Typed LLP +category: Distribution +homepage: https://github.com/well-typed/hackage-security +bug-reports: https://github.com/well-typed/hackage-security/issues +build-type: Simple +cabal-version: >=1.10 + +extra-source-files: + ChangeLog.md + +source-repository head + type: git + location: https://github.com/well-typed/hackage-security.git + +flag base48 + description: Are we using base 4.8 or later? + manual: False + +flag use-network-uri + description: Are we using network-uri? + manual: False + +Flag old-directory + description: Use directory < 1.2 and old-time + manual: False + default: False + + +library + -- Most functionality is exported through the top-level entry points .Client + -- and .Server; the other exported modules are intended for qualified imports. + exposed-modules: Hackage.Security.Client + Hackage.Security.Client.Formats + Hackage.Security.Client.Repository + Hackage.Security.Client.Repository.Cache + Hackage.Security.Client.Repository.Local + Hackage.Security.Client.Repository.Remote + Hackage.Security.Client.Repository.HttpLib + Hackage.Security.Client.Verify + Hackage.Security.JSON + Hackage.Security.Key.Env + Hackage.Security.Server + Hackage.Security.Trusted + Hackage.Security.TUF.FileMap + Hackage.Security.Util.Checked + Hackage.Security.Util.IO + Hackage.Security.Util.Lens + Hackage.Security.Util.Path + Hackage.Security.Util.Pretty + Hackage.Security.Util.Some + Text.JSON.Canonical + other-modules: Hackage.Security.Key + Hackage.Security.Trusted.TCB + Hackage.Security.TUF + Hackage.Security.TUF.Common + Hackage.Security.TUF.FileInfo + Hackage.Security.TUF.Header + Hackage.Security.TUF.Layout.Cache + Hackage.Security.TUF.Layout.Index + Hackage.Security.TUF.Layout.Repo + Hackage.Security.TUF.Mirrors + Hackage.Security.TUF.Paths + Hackage.Security.TUF.Patterns + Hackage.Security.TUF.Root + Hackage.Security.TUF.Signed + Hackage.Security.TUF.Snapshot + Hackage.Security.TUF.Targets + Hackage.Security.TUF.Timestamp + Hackage.Security.Util.Base64 + Hackage.Security.Util.JSON + Hackage.Security.Util.Stack + Hackage.Security.Util.TypedEmbedded + Prelude + -- We support ghc 7.4 (bundled with Cabal 1.14) and up + build-depends: base >= 4.5 && < 5, + base16-bytestring >= 0.1.1 && < 0.2, + base64-bytestring >= 1.0 && < 1.1, + bytestring >= 0.9 && < 0.11, + Cabal >= 1.14 && < 1.26, + containers >= 0.4 && < 0.6, + directory >= 1.1.0.2 && < 1.4, + ed25519 >= 0.0 && < 0.1, + filepath >= 1.2 && < 1.5, + mtl >= 2.2 && < 2.3, + parsec >= 3.1 && < 3.2, + pretty >= 1.0 && < 1.2, + cryptohash-sha256 >= 0.11 && < 0.12, + -- 0.4.2 introduces TarIndex, 0.4.4 introduces more + -- functionality, 0.5.0 changes type of serialise + tar >= 0.5 && < 0.6, + time >= 1.2 && < 1.7, + transformers >= 0.4 && < 0.6, + zlib >= 0.5 && < 0.7, + -- whatever versions are bundled with ghc: + template-haskell, + ghc-prim + if flag(old-directory) + build-depends: directory < 1.2, old-time >= 1 && < 1.2 + else + build-depends: directory >= 1.2 + hs-source-dirs: src + default-language: Haskell2010 + default-extensions: DefaultSignatures + DeriveDataTypeable + DeriveFunctor + FlexibleContexts + FlexibleInstances + GADTs + GeneralizedNewtypeDeriving + KindSignatures + MultiParamTypeClasses + NamedFieldPuns + NoMonomorphismRestriction + RankNTypes + RecordWildCards + ScopedTypeVariables + StandaloneDeriving + TupleSections + TypeFamilies + TypeOperators + ViewPatterns + other-extensions: BangPatterns + CPP + OverlappingInstances + PackageImports + UndecidableInstances + -- use the new stage1/cross-compile-friendly Quotes subset of TH for new GHCs + if impl(ghc >= 8.0) + -- place holder until Hackage allows to edit in the new extension token + -- other-extensions: TemplateHaskellQuotes + other-extensions: + else + other-extensions: TemplateHaskell + + ghc-options: -Wall + + if flag(base48) + build-depends: base >= 4.8 + else + build-depends: old-locale >= 1.0 + + -- The URI type got split out off the network package after version 2.5, and + -- moved to a separate network-uri package. Since we don't need the rest of + -- network here, it would suffice to rely only on network-uri: + -- + -- > if flag(use-network-uri) + -- > build-depends: network-uri >= 2.6 && < 2.7 + -- > else + -- > build-depends: network >= 2.5 && < 2.6 + -- + -- However, if we did the same in hackage-security-HTTP, Cabal would consider + -- those two flag choices (hackage-security:use-network-uri and + -- hackage-security-HTTP:use-network-uri) to be completely independent; but + -- they aren't: if it links hackage-security against network-uri and + -- hackage-security-HTTP against network, we will get type errors when + -- hackage-security-HTTP tries to pass a URI to hackage-security. + -- + -- It might seem we can solve this problem by re-exporting the URI type in + -- hackage-security and avoid the dependency in hackage-security-HTTP + -- altogether. However, this merely shifts the problem: hackage-security-HTTP + -- relies on the HTTP library which--surprise!--makes the same choice between + -- depending on network or network-uri. Cabal will not notice that we cannot + -- build hackage-security and hackage-security-HTTP against network-uri but + -- HTTP against network. + -- + -- We solve the problem by explicitly relying on network-2.6 when choosing + -- network-uri. This dependency is redundant, strictly speaking. However, it + -- serves as a proxy for forcing flag choices: since all packages in a + -- solution must be linked against the same version of network, having one + -- version of network in one branch of the conditional and another version of + -- network in the other branch forces the choice to be consistent throughout. + -- (Note that the HTTP library does the same thing, though in this case the + -- dependency in network is not redundant.) + if flag(use-network-uri) + build-depends: network-uri >= 2.6 && < 2.7, + network >= 2.6 && < 2.7 + else + build-depends: network >= 2.5 && < 2.6 + + if impl(ghc >= 7.8) + other-extensions: RoleAnnotations + + if impl(ghc >= 7.10) + other-extensions: AllowAmbiguousTypes +-- StaticPointers +-- ^^^ Temporarily disabled because Hackage doesn't know yet about this +-- extension and will therefore reject this package. + +test-suite TestSuite + type: exitcode-stdio-1.0 + main-is: TestSuite.hs + other-modules: TestSuite.HttpMem + TestSuite.InMemCache + TestSuite.InMemRepo + TestSuite.InMemRepository + TestSuite.JSON + TestSuite.PrivateKeys + TestSuite.Util.StrictMVar + build-depends: base, + Cabal, + containers, + HUnit, + bytestring, + hackage-security, + network-uri, + tar, + tasty, + tasty-hunit, + tasty-quickcheck, + QuickCheck, + temporary, + time, + zlib + hs-source-dirs: tests + default-language: Haskell2010 + default-extensions: FlexibleContexts + GADTs + KindSignatures + RankNTypes + RecordWildCards + ScopedTypeVariables + ghc-options: -Wall