summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorJasper Van der Jeugt <m@jaspervdj.be>2017-06-19 11:57:23 +0200
committerJasper Van der Jeugt <m@jaspervdj.be>2017-06-19 11:57:23 +0200
commit67ecff7ad383640bc73d64edc2506c7cc648a134 (patch)
tree6d328e43c3ab86c29a2d775fabaa23618c16fb51 /src
parent2df3209bafa08e6b77ee4a8598fc503269513527 (diff)
downloadhakyll-67ecff7ad383640bc73d64edc2506c7cc648a134.tar.gz
Move src/ to lib/, put Init.hs in src/
Diffstat (limited to 'src')
-rw-r--r--src/Data/List/Extended.hs15
-rw-r--r--src/Data/Yaml/Extended.hs24
-rw-r--r--src/Hakyll.hs62
-rw-r--r--src/Hakyll/Check.hs290
-rw-r--r--src/Hakyll/Commands.hs160
-rw-r--r--src/Hakyll/Core/Compiler.hs189
-rw-r--r--src/Hakyll/Core/Compiler/Internal.hs265
-rw-r--r--src/Hakyll/Core/Compiler/Require.hs121
-rw-r--r--src/Hakyll/Core/Configuration.hs134
-rw-r--r--src/Hakyll/Core/Dependencies.hs146
-rw-r--r--src/Hakyll/Core/File.hs93
-rw-r--r--src/Hakyll/Core/Identifier.hs80
-rw-r--r--src/Hakyll/Core/Identifier/Pattern.hs322
-rw-r--r--src/Hakyll/Core/Item.hs63
-rw-r--r--src/Hakyll/Core/Item/SomeItem.hs23
-rw-r--r--src/Hakyll/Core/Logger.hs97
-rw-r--r--src/Hakyll/Core/Metadata.hs138
-rw-r--r--src/Hakyll/Core/Provider.hs43
-rw-r--r--src/Hakyll/Core/Provider/Internal.hs202
-rw-r--r--src/Hakyll/Core/Provider/Metadata.hs151
-rw-r--r--src/Hakyll/Core/Provider/MetadataCache.hs62
-rw-r--r--src/Hakyll/Core/Routes.hs194
-rw-r--r--src/Hakyll/Core/Rules.hs223
-rw-r--r--src/Hakyll/Core/Rules/Internal.hs109
-rw-r--r--src/Hakyll/Core/Runtime.hs276
-rw-r--r--src/Hakyll/Core/Store.hs197
-rw-r--r--src/Hakyll/Core/UnixFilter.hs159
-rw-r--r--src/Hakyll/Core/Util/File.hs56
-rw-r--r--src/Hakyll/Core/Util/Parser.hs32
-rw-r--r--src/Hakyll/Core/Util/String.hs78
-rw-r--r--src/Hakyll/Core/Writable.hs56
-rw-r--r--src/Hakyll/Main.hs165
-rw-r--r--src/Hakyll/Preview/Poll.hs119
-rw-r--r--src/Hakyll/Preview/Server.hs35
-rw-r--r--src/Hakyll/Web/CompressCss.hs86
-rw-r--r--src/Hakyll/Web/Feed.hs135
-rw-r--r--src/Hakyll/Web/Html.hs184
-rw-r--r--src/Hakyll/Web/Html/RelativizeUrls.hs52
-rw-r--r--src/Hakyll/Web/Paginate.hs153
-rw-r--r--src/Hakyll/Web/Pandoc.hs164
-rw-r--r--src/Hakyll/Web/Pandoc/Biblio.hs115
-rw-r--r--src/Hakyll/Web/Pandoc/Binary.hs32
-rw-r--r--src/Hakyll/Web/Pandoc/FileType.hs74
-rw-r--r--src/Hakyll/Web/Redirect.hs87
-rw-r--r--src/Hakyll/Web/Tags.hs344
-rw-r--r--src/Hakyll/Web/Template.hs154
-rw-r--r--src/Hakyll/Web/Template/Context.hs379
-rw-r--r--src/Hakyll/Web/Template/Internal.hs203
-rw-r--r--src/Hakyll/Web/Template/Internal/Element.hs298
-rw-r--r--src/Hakyll/Web/Template/Internal/Trim.hs95
-rw-r--r--src/Hakyll/Web/Template/List.hs91
-rw-r--r--src/Init.hs96
52 files changed, 96 insertions, 7025 deletions
diff --git a/src/Data/List/Extended.hs b/src/Data/List/Extended.hs
deleted file mode 100644
index 485cba8..0000000
--- a/src/Data/List/Extended.hs
+++ /dev/null
@@ -1,15 +0,0 @@
-module Data.List.Extended
- ( module Data.List
- , breakWhen
- ) where
-
-import Data.List
-
--- | Like 'break', but can act on the entire tail of the list.
-breakWhen :: ([a] -> Bool) -> [a] -> ([a], [a])
-breakWhen predicate = go []
- where
- go buf [] = (reverse buf, [])
- go buf (x : xs)
- | predicate (x : xs) = (reverse buf, x : xs)
- | otherwise = go (x : buf) xs
diff --git a/src/Data/Yaml/Extended.hs b/src/Data/Yaml/Extended.hs
deleted file mode 100644
index c940ff7..0000000
--- a/src/Data/Yaml/Extended.hs
+++ /dev/null
@@ -1,24 +0,0 @@
-module Data.Yaml.Extended
- ( module Data.Yaml
- , toString
- , toList
- ) where
-
-import qualified Data.Text as T
-import qualified Data.Vector as V
-import Data.Yaml
-import Data.Scientific
-
-toString :: Value -> Maybe String
-toString (String t) = Just (T.unpack t)
-toString (Bool True) = Just "true"
-toString (Bool False) = Just "false"
--- | Make sure that numeric fields containing integer numbers are shown as
--- | integers (i.e., "42" instead of "42.0").
-toString (Number d) | isInteger d = Just (formatScientific Fixed (Just 0) d)
- | otherwise = Just (show d)
-toString _ = Nothing
-
-toList :: Value -> Maybe [Value]
-toList (Array a) = Just (V.toList a)
-toList _ = Nothing
diff --git a/src/Hakyll.hs b/src/Hakyll.hs
deleted file mode 100644
index 7b64bcb..0000000
--- a/src/Hakyll.hs
+++ /dev/null
@@ -1,62 +0,0 @@
---------------------------------------------------------------------------------
--- | Top-level module exporting all modules that are interesting for the user
-{-# LANGUAGE CPP #-}
-module Hakyll
- ( module Hakyll.Core.Compiler
- , module Hakyll.Core.Configuration
- , module Hakyll.Core.File
- , module Hakyll.Core.Identifier
- , module Hakyll.Core.Identifier.Pattern
- , module Hakyll.Core.Item
- , module Hakyll.Core.Metadata
- , module Hakyll.Core.Routes
- , module Hakyll.Core.Rules
- , module Hakyll.Core.UnixFilter
- , module Hakyll.Core.Util.File
- , module Hakyll.Core.Util.String
- , module Hakyll.Core.Writable
- , module Hakyll.Main
- , module Hakyll.Web.CompressCss
- , module Hakyll.Web.Feed
- , module Hakyll.Web.Html
- , module Hakyll.Web.Html.RelativizeUrls
- , module Hakyll.Web.Pandoc
- , module Hakyll.Web.Paginate
- , module Hakyll.Web.Pandoc.Biblio
- , module Hakyll.Web.Pandoc.FileType
- , module Hakyll.Web.Redirect
- , module Hakyll.Web.Tags
- , module Hakyll.Web.Template
- , module Hakyll.Web.Template.Context
- , module Hakyll.Web.Template.List
- ) where
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler
-import Hakyll.Core.Configuration
-import Hakyll.Core.File
-import Hakyll.Core.Identifier
-import Hakyll.Core.Identifier.Pattern
-import Hakyll.Core.Item
-import Hakyll.Core.Metadata
-import Hakyll.Core.Routes
-import Hakyll.Core.Rules
-import Hakyll.Core.UnixFilter
-import Hakyll.Core.Util.File
-import Hakyll.Core.Util.String
-import Hakyll.Core.Writable
-import Hakyll.Main
-import Hakyll.Web.CompressCss
-import Hakyll.Web.Feed
-import Hakyll.Web.Html
-import Hakyll.Web.Html.RelativizeUrls
-import Hakyll.Web.Paginate
-import Hakyll.Web.Pandoc
-import Hakyll.Web.Pandoc.Biblio
-import Hakyll.Web.Pandoc.FileType
-import Hakyll.Web.Redirect
-import Hakyll.Web.Tags
-import Hakyll.Web.Template
-import Hakyll.Web.Template.Context
-import Hakyll.Web.Template.List
diff --git a/src/Hakyll/Check.hs b/src/Hakyll/Check.hs
deleted file mode 100644
index da77bac..0000000
--- a/src/Hakyll/Check.hs
+++ /dev/null
@@ -1,290 +0,0 @@
---------------------------------------------------------------------------------
-{-# LANGUAGE CPP #-}
-{-# LANGUAGE OverloadedStrings #-}
-module Hakyll.Check
- ( Check (..)
- , check
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Concurrent.MVar (MVar, newEmptyMVar, putMVar,
- readMVar)
-import Control.Exception (SomeAsyncException (..),
- SomeException (..), throw, try)
-import Control.Monad (foldM, forM_)
-import Control.Monad.Reader (ReaderT, ask, runReaderT)
-import Control.Monad.State (StateT, get, modify, runStateT)
-import Control.Monad.Trans (liftIO)
-import Control.Monad.Trans.Resource (runResourceT)
-import Data.ByteString.Char8 (unpack)
-import Data.List (isPrefixOf)
-import qualified Data.Map.Lazy as Map
-import Network.URI (unEscapeString)
-import System.Directory (doesDirectoryExist,
- doesFileExist)
-import System.Exit (ExitCode (..))
-import System.FilePath (takeDirectory, takeExtension,
- (</>))
-import qualified Text.HTML.TagSoup as TS
-
-
---------------------------------------------------------------------------------
-#ifdef CHECK_EXTERNAL
-import Data.List (intercalate)
-import Data.Typeable (cast)
-import Data.Version (versionBranch)
-import GHC.Exts (fromString)
-import qualified Network.HTTP.Conduit as Http
-import qualified Network.HTTP.Types as Http
-import qualified Paths_hakyll as Paths_hakyll
-#endif
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Configuration
-import Hakyll.Core.Logger (Logger)
-import qualified Hakyll.Core.Logger as Logger
-import Hakyll.Core.Util.File
-import Hakyll.Web.Html
-
-
---------------------------------------------------------------------------------
-data Check = All | InternalLinks
- deriving (Eq, Ord, Show)
-
-
---------------------------------------------------------------------------------
-check :: Configuration -> Logger -> Check -> IO ExitCode
-check config logger check' = do
- ((), state) <- runChecker checkDestination config logger check'
- failed <- countFailedLinks state
- return $ if failed > 0 then ExitFailure 1 else ExitSuccess
-
-
---------------------------------------------------------------------------------
-countFailedLinks :: CheckerState -> IO Int
-countFailedLinks state = foldM addIfFailure 0 (Map.elems state)
- where addIfFailure failures mvar = do
- checkerWrite <- readMVar mvar
- return $ failures + checkerFaulty checkerWrite
-
-
---------------------------------------------------------------------------------
-data CheckerRead = CheckerRead
- { checkerConfig :: Configuration
- , checkerLogger :: Logger
- , checkerCheck :: Check
- }
-
-
---------------------------------------------------------------------------------
-data CheckerWrite = CheckerWrite
- { checkerFaulty :: Int
- , checkerOk :: Int
- } deriving (Show)
-
-
---------------------------------------------------------------------------------
-instance Monoid CheckerWrite where
- mempty = CheckerWrite 0 0
- mappend (CheckerWrite f1 o1) (CheckerWrite f2 o2) =
- CheckerWrite (f1 + f2) (o1 + o2)
-
-
---------------------------------------------------------------------------------
-type CheckerState = Map.Map URL (MVar CheckerWrite)
-
-
---------------------------------------------------------------------------------
-type Checker a = ReaderT CheckerRead (StateT CheckerState IO) a
-
-
---------------------------------------------------------------------------------
-type URL = String
-
-
---------------------------------------------------------------------------------
-runChecker :: Checker a -> Configuration -> Logger -> Check
- -> IO (a, CheckerState)
-runChecker checker config logger check' = do
- let read' = CheckerRead
- { checkerConfig = config
- , checkerLogger = logger
- , checkerCheck = check'
- }
- Logger.flush logger
- runStateT (runReaderT checker read') Map.empty
-
-
---------------------------------------------------------------------------------
-checkDestination :: Checker ()
-checkDestination = do
- config <- checkerConfig <$> ask
- files <- liftIO $ getRecursiveContents
- (const $ return False) (destinationDirectory config)
-
- let htmls =
- [ destinationDirectory config </> file
- | file <- files
- , takeExtension file == ".html"
- ]
-
- forM_ htmls checkFile
-
-
---------------------------------------------------------------------------------
-checkFile :: FilePath -> Checker ()
-checkFile filePath = do
- logger <- checkerLogger <$> ask
- contents <- liftIO $ readFile filePath
- Logger.header logger $ "Checking file " ++ filePath
-
- let urls = getUrls $ TS.parseTags contents
- forM_ urls $ \url -> do
- Logger.debug logger $ "Checking link " ++ url
- m <- liftIO newEmptyMVar
- checkUrlIfNeeded filePath (canonicalizeUrl url) m
- where
- -- Check scheme-relative links
- canonicalizeUrl url = if schemeRelative url then "http:" ++ url else url
- schemeRelative = isPrefixOf "//"
-
-
---------------------------------------------------------------------------------
-checkUrlIfNeeded :: FilePath -> URL -> MVar CheckerWrite -> Checker ()
-checkUrlIfNeeded filepath url m = do
- logger <- checkerLogger <$> ask
- needsCheck <- (== All) . checkerCheck <$> ask
- checked <- (url `Map.member`) <$> get
- if not needsCheck || checked
- then Logger.debug logger "Already checked, skipping"
- else do modify $ Map.insert url m
- checkUrl filepath url
-
-
---------------------------------------------------------------------------------
-checkUrl :: FilePath -> URL -> Checker ()
-checkUrl filePath url
- | isExternal url = checkExternalUrl url
- | hasProtocol url = skip url $ Just "Unknown protocol, skipping"
- | otherwise = checkInternalUrl filePath url
- where
- validProtoChars = ['A'..'Z'] ++ ['a'..'z'] ++ ['0'..'9'] ++ "+-."
- hasProtocol str = case break (== ':') str of
- (proto, ':' : _) -> all (`elem` validProtoChars) proto
- _ -> False
-
-
---------------------------------------------------------------------------------
-ok :: URL -> Checker ()
-ok url = putCheckResult url mempty {checkerOk = 1}
-
-
---------------------------------------------------------------------------------
-skip :: URL -> Maybe String -> Checker ()
-skip url maybeReason = do
- logger <- checkerLogger <$> ask
- case maybeReason of
- Nothing -> return ()
- Just reason -> Logger.debug logger reason
- putCheckResult url mempty {checkerOk = 1}
-
-
---------------------------------------------------------------------------------
-faulty :: URL -> Maybe String -> Checker ()
-faulty url reason = do
- logger <- checkerLogger <$> ask
- Logger.error logger $ "Broken link to " ++ show url ++ explanation
- putCheckResult url mempty {checkerFaulty = 1}
- where
- formatExplanation = (" (" ++) . (++ ")")
- explanation = maybe "" formatExplanation reason
-
-
---------------------------------------------------------------------------------
-putCheckResult :: URL -> CheckerWrite -> Checker ()
-putCheckResult url result = do
- state <- get
- let maybeMVar = Map.lookup url state
- case maybeMVar of
- Just m -> liftIO $ putMVar m result
- Nothing -> do
- logger <- checkerLogger <$> ask
- Logger.debug logger "Failed to find existing entry for checked URL"
-
-
---------------------------------------------------------------------------------
-checkInternalUrl :: FilePath -> URL -> Checker ()
-checkInternalUrl base url = case url' of
- "" -> ok url
- _ -> do
- config <- checkerConfig <$> ask
- let dest = destinationDirectory config
- dir = takeDirectory base
- filePath
- | "/" `isPrefixOf` url' = dest ++ url'
- | otherwise = dir </> url'
-
- exists <- checkFileExists filePath
- if exists then ok url else faulty url Nothing
- where
- url' = stripFragments $ unEscapeString url
-
-
---------------------------------------------------------------------------------
-checkExternalUrl :: URL -> Checker ()
-#ifdef CHECK_EXTERNAL
-checkExternalUrl url = do
- result <- requestExternalUrl url
- case result of
- Left (SomeException e) ->
- case (cast e :: Maybe SomeAsyncException) of
- Just ae -> throw ae
- _ -> faulty url (Just $ showException e)
- Right _ -> ok url
- where
- -- Convert exception to a concise form
- showException e = case cast e of
- Just (Http.HttpExceptionRequest _ e') -> show e'
- _ -> head $ words $ show e
-
-requestExternalUrl :: URL -> Checker (Either SomeException Bool)
-requestExternalUrl url = liftIO $ try $ do
- mgr <- Http.newManager Http.tlsManagerSettings
- runResourceT $ do
- request <- Http.parseRequest url
- response <- Http.http (settings request) mgr
- let code = Http.statusCode (Http.responseStatus response)
- return $ code >= 200 && code < 300
- where
- -- Add additional request info
- settings r = r
- { Http.method = "HEAD"
- , Http.redirectCount = 10
- , Http.requestHeaders = ("User-Agent", ua) : Http.requestHeaders r
- }
-
- -- Nice user agent info
- ua = fromString $ "hakyll-check/" ++
- (intercalate "." $ map show $ versionBranch Paths_hakyll.version)
-#else
-checkExternalUrl url = skip url Nothing
-#endif
-
-
---------------------------------------------------------------------------------
--- | Wraps doesFileExist, also checks for index.html
-checkFileExists :: FilePath -> Checker Bool
-checkFileExists filePath = liftIO $ do
- file <- doesFileExist filePath
- dir <- doesDirectoryExist filePath
- case (file, dir) of
- (True, _) -> return True
- (_, True) -> doesFileExist $ filePath </> "index.html"
- _ -> return False
-
-
---------------------------------------------------------------------------------
-stripFragments :: String -> String
-stripFragments = takeWhile (not . flip elem ['?', '#'])
diff --git a/src/Hakyll/Commands.hs b/src/Hakyll/Commands.hs
deleted file mode 100644
index 6763fe7..0000000
--- a/src/Hakyll/Commands.hs
+++ /dev/null
@@ -1,160 +0,0 @@
- --------------------------------------------------------------------------------
--- | Implementation of Hakyll commands: build, preview...
-{-# LANGUAGE CPP #-}
-module Hakyll.Commands
- ( build
- , check
- , clean
- , preview
- , rebuild
- , server
- , deploy
- , watch
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Concurrent
-import System.Exit (ExitCode, exitWith)
-
---------------------------------------------------------------------------------
-import qualified Hakyll.Check as Check
-import Hakyll.Core.Configuration
-import Hakyll.Core.Logger (Logger)
-import qualified Hakyll.Core.Logger as Logger
-import Hakyll.Core.Rules
-import Hakyll.Core.Rules.Internal
-import Hakyll.Core.Runtime
-import Hakyll.Core.Util.File
-
---------------------------------------------------------------------------------
-#ifdef WATCH_SERVER
-import Hakyll.Preview.Poll (watchUpdates)
-#endif
-
-#ifdef PREVIEW_SERVER
-import Hakyll.Preview.Server
-#endif
-
-#ifdef mingw32_HOST_OS
-import Control.Monad (void)
-import System.IO.Error (catchIOError)
-#endif
-
-
---------------------------------------------------------------------------------
--- | Build the site
-build :: Configuration -> Logger -> Rules a -> IO ExitCode
-build conf logger rules = fst <$> run conf logger rules
-
-
---------------------------------------------------------------------------------
--- | Run the checker and exit
-check :: Configuration -> Logger -> Check.Check -> IO ExitCode
-check = Check.check
-
-
---------------------------------------------------------------------------------
--- | Remove the output directories
-clean :: Configuration -> Logger -> IO ()
-clean conf logger = do
- remove $ destinationDirectory conf
- remove $ storeDirectory conf
- remove $ tmpDirectory conf
- where
- remove dir = do
- Logger.header logger $ "Removing " ++ dir ++ "..."
- removeDirectory dir
-
-
---------------------------------------------------------------------------------
--- | Preview the site
-preview :: Configuration -> Logger -> Rules a -> Int -> IO ()
-#ifdef PREVIEW_SERVER
-preview conf logger rules port = do
- deprecatedMessage
- watch conf logger "0.0.0.0" port True rules
- where
- deprecatedMessage = mapM_ putStrLn [ "The preview command has been deprecated."
- , "Use the watch command for recompilation and serving."
- ]
-#else
-preview _ _ _ _ = previewServerDisabled
-#endif
-
-
---------------------------------------------------------------------------------
--- | Watch and recompile for changes
-
-watch :: Configuration -> Logger -> String -> Int -> Bool -> Rules a -> IO ()
-#ifdef WATCH_SERVER
-watch conf logger host port runServer rules = do
-#ifndef mingw32_HOST_OS
- _ <- forkIO $ watchUpdates conf update
-#else
- -- Force windows users to compile with -threaded flag, as otherwise
- -- thread is blocked indefinitely.
- catchIOError (void $ forkOS $ watchUpdates conf update) $ do
- fail $ "Hakyll.Commands.watch: Could not start update watching " ++
- "thread. Did you compile with -threaded flag?"
-#endif
- server'
- where
- update = do
- (_, ruleSet) <- run conf logger rules
- return $ rulesPattern ruleSet
- loop = threadDelay 100000 >> loop
- server' = if runServer then server conf logger host port else loop
-#else
-watch _ _ _ _ _ _ = watchServerDisabled
-#endif
-
---------------------------------------------------------------------------------
--- | Rebuild the site
-rebuild :: Configuration -> Logger -> Rules a -> IO ExitCode
-rebuild conf logger rules =
- clean conf logger >> build conf logger rules
-
---------------------------------------------------------------------------------
--- | Start a server
-server :: Configuration -> Logger -> String -> Int -> IO ()
-#ifdef PREVIEW_SERVER
-server conf logger host port = do
- let destination = destinationDirectory conf
- staticServer logger destination host port
-#else
-server _ _ _ _ = previewServerDisabled
-#endif
-
-
---------------------------------------------------------------------------------
--- | Upload the site
-deploy :: Configuration -> IO ExitCode
-deploy conf = deploySite conf conf
-
-
---------------------------------------------------------------------------------
--- | Print a warning message about the preview serving not being enabled
-#ifndef PREVIEW_SERVER
-previewServerDisabled :: IO ()
-previewServerDisabled =
- mapM_ putStrLn
- [ "PREVIEW SERVER"
- , ""
- , "The preview server is not enabled in the version of Hakyll. To"
- , "enable it, set the flag to True and recompile Hakyll."
- , "Alternatively, use an external tool to serve your site directory."
- ]
-#endif
-
-#ifndef WATCH_SERVER
-watchServerDisabled :: IO ()
-watchServerDisabled =
- mapM_ putStrLn
- [ "WATCH SERVER"
- , ""
- , "The watch server is not enabled in the version of Hakyll. To"
- , "enable it, set the flag to True and recompile Hakyll."
- , "Alternatively, use an external tool to serve your site directory."
- ]
-#endif
diff --git a/src/Hakyll/Core/Compiler.hs b/src/Hakyll/Core/Compiler.hs
deleted file mode 100644
index 42b24d6..0000000
--- a/src/Hakyll/Core/Compiler.hs
+++ /dev/null
@@ -1,189 +0,0 @@
---------------------------------------------------------------------------------
-{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-{-# LANGUAGE ScopedTypeVariables #-}
-module Hakyll.Core.Compiler
- ( Compiler
- , getUnderlying
- , getUnderlyingExtension
- , makeItem
- , getRoute
- , getResourceBody
- , getResourceString
- , getResourceLBS
- , getResourceFilePath
-
- , Internal.Snapshot
- , saveSnapshot
- , Internal.load
- , Internal.loadSnapshot
- , Internal.loadBody
- , Internal.loadSnapshotBody
- , Internal.loadAll
- , Internal.loadAllSnapshots
-
- , cached
- , unsafeCompiler
- , debugCompiler
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Monad (when, unless)
-import Data.Binary (Binary)
-import Data.ByteString.Lazy (ByteString)
-import Data.Typeable (Typeable)
-import System.Environment (getProgName)
-import System.FilePath (takeExtension)
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler.Internal
-import qualified Hakyll.Core.Compiler.Require as Internal
-import Hakyll.Core.Dependencies
-import Hakyll.Core.Identifier
-import Hakyll.Core.Item
-import Hakyll.Core.Logger as Logger
-import Hakyll.Core.Provider
-import Hakyll.Core.Routes
-import qualified Hakyll.Core.Store as Store
-
-
---------------------------------------------------------------------------------
--- | Get the underlying identifier.
-getUnderlying :: Compiler Identifier
-getUnderlying = compilerUnderlying <$> compilerAsk
-
-
---------------------------------------------------------------------------------
--- | Get the extension of the underlying identifier. Returns something like
--- @".html"@
-getUnderlyingExtension :: Compiler String
-getUnderlyingExtension = takeExtension . toFilePath <$> getUnderlying
-
-
---------------------------------------------------------------------------------
-makeItem :: a -> Compiler (Item a)
-makeItem x = do
- identifier <- getUnderlying
- return $ Item identifier x
-
-
---------------------------------------------------------------------------------
--- | Get the route for a specified item
-getRoute :: Identifier -> Compiler (Maybe FilePath)
-getRoute identifier = do
- provider <- compilerProvider <$> compilerAsk
- routes <- compilerRoutes <$> compilerAsk
- -- Note that this makes us dependend on that identifier: when the metadata
- -- of that item changes, the route may change, hence we have to recompile
- (mfp, um) <- compilerUnsafeIO $ runRoutes routes provider identifier
- when um $ compilerTellDependencies [IdentifierDependency identifier]
- return mfp
-
-
---------------------------------------------------------------------------------
--- | Get the full contents of the matched source file as a string,
--- but without metadata preamble, if there was one.
-getResourceBody :: Compiler (Item String)
-getResourceBody = getResourceWith resourceBody
-
-
---------------------------------------------------------------------------------
--- | Get the full contents of the matched source file as a string.
-getResourceString :: Compiler (Item String)
-getResourceString = getResourceWith resourceString
-
-
---------------------------------------------------------------------------------
--- | Get the full contents of the matched source file as a lazy bytestring.
-getResourceLBS :: Compiler (Item ByteString)
-getResourceLBS = getResourceWith resourceLBS
-
-
---------------------------------------------------------------------------------
--- | Get the file path of the resource we are compiling
-getResourceFilePath :: Compiler FilePath
-getResourceFilePath = do
- provider <- compilerProvider <$> compilerAsk
- id' <- compilerUnderlying <$> compilerAsk
- return $ resourceFilePath provider id'
-
-
---------------------------------------------------------------------------------
--- | Overloadable function for 'getResourceString' and 'getResourceLBS'
-getResourceWith :: (Provider -> Identifier -> IO a) -> Compiler (Item a)
-getResourceWith reader = do
- provider <- compilerProvider <$> compilerAsk
- id' <- compilerUnderlying <$> compilerAsk
- let filePath = toFilePath id'
- if resourceExists provider id'
- then compilerUnsafeIO $ Item id' <$> reader provider id'
- else fail $ error' filePath
- where
- error' fp = "Hakyll.Core.Compiler.getResourceWith: resource " ++
- show fp ++ " not found"
-
-
---------------------------------------------------------------------------------
--- | Save a snapshot of the item. This function returns the same item, which
--- convenient for building '>>=' chains.
-saveSnapshot :: (Binary a, Typeable a)
- => Internal.Snapshot -> Item a -> Compiler (Item a)
-saveSnapshot snapshot item = do
- store <- compilerStore <$> compilerAsk
- logger <- compilerLogger <$> compilerAsk
- compilerUnsafeIO $ do
- Logger.debug logger $ "Storing snapshot: " ++ snapshot
- Internal.saveSnapshot store snapshot item
-
- -- Signal that we saved the snapshot.
- Compiler $ \_ -> return $ CompilerSnapshot snapshot (return item)
-
-
---------------------------------------------------------------------------------
-cached :: (Binary a, Typeable a)
- => String
- -> Compiler a
- -> Compiler a
-cached name compiler = do
- id' <- compilerUnderlying <$> compilerAsk
- store <- compilerStore <$> compilerAsk
- provider <- compilerProvider <$> compilerAsk
-
- -- Give a better error message when the resource is not there at all.
- unless (resourceExists provider id') $ fail $ itDoesntEvenExist id'
-
- let modified = resourceModified provider id'
- if modified
- then do
- x <- compiler
- compilerUnsafeIO $ Store.set store [name, show id'] x
- return x
- else do
- compilerTellCacheHits 1
- x <- compilerUnsafeIO $ Store.get store [name, show id']
- progName <- compilerUnsafeIO getProgName
- case x of Store.Found x' -> return x'
- _ -> fail $ error' progName
- where
- error' progName =
- "Hakyll.Core.Compiler.cached: Cache corrupt! " ++
- "Try running: " ++ progName ++ " clean"
-
- itDoesntEvenExist id' =
- "Hakyll.Core.Compiler.cached: You are trying to (perhaps " ++
- "indirectly) use `cached` on a non-existing resource: there " ++
- "is no file backing " ++ show id'
-
-
---------------------------------------------------------------------------------
-unsafeCompiler :: IO a -> Compiler a
-unsafeCompiler = compilerUnsafeIO
-
-
---------------------------------------------------------------------------------
--- | Compiler for debugging purposes
-debugCompiler :: String -> Compiler ()
-debugCompiler msg = do
- logger <- compilerLogger <$> compilerAsk
- compilerUnsafeIO $ Logger.debug logger msg
diff --git a/src/Hakyll/Core/Compiler/Internal.hs b/src/Hakyll/Core/Compiler/Internal.hs
deleted file mode 100644
index 7b1df83..0000000
--- a/src/Hakyll/Core/Compiler/Internal.hs
+++ /dev/null
@@ -1,265 +0,0 @@
---------------------------------------------------------------------------------
--- | Internally used compiler module
-{-# LANGUAGE FlexibleInstances #-}
-{-# LANGUAGE GADTs #-}
-{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-{-# LANGUAGE MultiParamTypeClasses #-}
-module Hakyll.Core.Compiler.Internal
- ( -- * Types
- Snapshot
- , CompilerRead (..)
- , CompilerWrite (..)
- , CompilerResult (..)
- , Compiler (..)
- , runCompiler
-
- -- * Core operations
- , compilerTell
- , compilerAsk
- , compilerThrow
- , compilerCatch
- , compilerResult
- , compilerUnsafeIO
-
- -- * Utilities
- , compilerTellDependencies
- , compilerTellCacheHits
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Applicative (Alternative (..))
-import Control.Exception (SomeException, handle)
-import Control.Monad (forM_)
-import Control.Monad.Except (MonadError (..))
-import Data.Set (Set)
-import qualified Data.Set as S
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Configuration
-import Hakyll.Core.Dependencies
-import Hakyll.Core.Identifier
-import Hakyll.Core.Identifier.Pattern
-import Hakyll.Core.Logger (Logger)
-import qualified Hakyll.Core.Logger as Logger
-import Hakyll.Core.Metadata
-import Hakyll.Core.Provider
-import Hakyll.Core.Routes
-import Hakyll.Core.Store
-
-
---------------------------------------------------------------------------------
--- | Whilst compiling an item, it possible to save multiple snapshots of it, and
--- not just the final result.
-type Snapshot = String
-
-
---------------------------------------------------------------------------------
--- | Environment in which a compiler runs
-data CompilerRead = CompilerRead
- { -- | Main configuration
- compilerConfig :: Configuration
- , -- | Underlying identifier
- compilerUnderlying :: Identifier
- , -- | Resource provider
- compilerProvider :: Provider
- , -- | List of all known identifiers
- compilerUniverse :: Set Identifier
- , -- | Site routes
- compilerRoutes :: Routes
- , -- | Compiler store
- compilerStore :: Store
- , -- | Logger
- compilerLogger :: Logger
- }
-
-
---------------------------------------------------------------------------------
-data CompilerWrite = CompilerWrite
- { compilerDependencies :: [Dependency]
- , compilerCacheHits :: Int
- } deriving (Show)
-
-
---------------------------------------------------------------------------------
-instance Monoid CompilerWrite where
- mempty = CompilerWrite [] 0
- mappend (CompilerWrite d1 h1) (CompilerWrite d2 h2) =
- CompilerWrite (d1 ++ d2) (h1 + h2)
-
-
---------------------------------------------------------------------------------
-data CompilerResult a where
- CompilerDone :: a -> CompilerWrite -> CompilerResult a
- CompilerSnapshot :: Snapshot -> Compiler a -> CompilerResult a
- CompilerError :: [String] -> CompilerResult a
- CompilerRequire :: (Identifier, Snapshot) -> Compiler a -> CompilerResult a
-
-
---------------------------------------------------------------------------------
--- | A monad which lets you compile items and takes care of dependency tracking
--- for you.
-newtype Compiler a = Compiler
- { unCompiler :: CompilerRead -> IO (CompilerResult a)
- }
-
-
---------------------------------------------------------------------------------
-instance Functor Compiler where
- fmap f (Compiler c) = Compiler $ \r -> do
- res <- c r
- return $ case res of
- CompilerDone x w -> CompilerDone (f x) w
- CompilerSnapshot s c' -> CompilerSnapshot s (fmap f c')
- CompilerError e -> CompilerError e
- CompilerRequire i c' -> CompilerRequire i (fmap f c')
- {-# INLINE fmap #-}
-
-
---------------------------------------------------------------------------------
-instance Monad Compiler where
- return x = Compiler $ \_ -> return $ CompilerDone x mempty
- {-# INLINE return #-}
-
- Compiler c >>= f = Compiler $ \r -> do
- res <- c r
- case res of
- CompilerDone x w -> do
- res' <- unCompiler (f x) r
- return $ case res' of
- CompilerDone y w' -> CompilerDone y (w `mappend` w')
- CompilerSnapshot s c' -> CompilerSnapshot s $ do
- compilerTell w -- Save dependencies!
- c'
- CompilerError e -> CompilerError e
- CompilerRequire i c' -> CompilerRequire i $ do
- compilerTell w -- Save dependencies!
- c'
-
- CompilerSnapshot s c' -> return $ CompilerSnapshot s (c' >>= f)
- CompilerError e -> return $ CompilerError e
- CompilerRequire i c' -> return $ CompilerRequire i (c' >>= f)
- {-# INLINE (>>=) #-}
-
- fail = compilerThrow . return
- {-# INLINE fail #-}
-
-
---------------------------------------------------------------------------------
-instance Applicative Compiler where
- pure x = return x
- {-# INLINE pure #-}
-
- f <*> x = f >>= \f' -> fmap f' x
- {-# INLINE (<*>) #-}
-
-
---------------------------------------------------------------------------------
-instance MonadMetadata Compiler where
- getMetadata = compilerGetMetadata
- getMatches = compilerGetMatches
-
-
---------------------------------------------------------------------------------
-instance MonadError [String] Compiler where
- throwError = compilerThrow
- catchError = compilerCatch
-
-
---------------------------------------------------------------------------------
-runCompiler :: Compiler a -> CompilerRead -> IO (CompilerResult a)
-runCompiler compiler read' = handle handler $ unCompiler compiler read'
- where
- handler :: SomeException -> IO (CompilerResult a)
- handler e = return $ CompilerError [show e]
-
-
---------------------------------------------------------------------------------
-instance Alternative Compiler where
- empty = compilerThrow []
- x <|> y = compilerCatch x $ \es -> do
- logger <- compilerLogger <$> compilerAsk
- forM_ es $ \e -> compilerUnsafeIO $ Logger.debug logger $
- "Hakyll.Core.Compiler.Internal: Alternative failed: " ++ e
- y
- {-# INLINE (<|>) #-}
-
-
---------------------------------------------------------------------------------
-compilerAsk :: Compiler CompilerRead
-compilerAsk = Compiler $ \r -> return $ CompilerDone r mempty
-{-# INLINE compilerAsk #-}
-
-
---------------------------------------------------------------------------------
-compilerTell :: CompilerWrite -> Compiler ()
-compilerTell deps = Compiler $ \_ -> return $ CompilerDone () deps
-{-# INLINE compilerTell #-}
-
-
---------------------------------------------------------------------------------
-compilerThrow :: [String] -> Compiler a
-compilerThrow es = Compiler $ \_ -> return $ CompilerError es
-{-# INLINE compilerThrow #-}
-
-
---------------------------------------------------------------------------------
-compilerCatch :: Compiler a -> ([String] -> Compiler a) -> Compiler a
-compilerCatch (Compiler x) f = Compiler $ \r -> do
- res <- x r
- case res of
- CompilerDone res' w -> return (CompilerDone res' w)
- CompilerSnapshot s c -> return (CompilerSnapshot s (compilerCatch c f))
- CompilerError e -> unCompiler (f e) r
- CompilerRequire i c -> return (CompilerRequire i (compilerCatch c f))
-{-# INLINE compilerCatch #-}
-
-
---------------------------------------------------------------------------------
--- | Put the result back in a compiler
-compilerResult :: CompilerResult a -> Compiler a
-compilerResult x = Compiler $ \_ -> return x
-{-# INLINE compilerResult #-}
-
-
---------------------------------------------------------------------------------
-compilerUnsafeIO :: IO a -> Compiler a
-compilerUnsafeIO io = Compiler $ \_ -> do
- x <- io
- return $ CompilerDone x mempty
-{-# INLINE compilerUnsafeIO #-}
-
-
---------------------------------------------------------------------------------
-compilerTellDependencies :: [Dependency] -> Compiler ()
-compilerTellDependencies ds = do
- logger <- compilerLogger <$> compilerAsk
- forM_ ds $ \d -> compilerUnsafeIO $ Logger.debug logger $
- "Hakyll.Core.Compiler.Internal: Adding dependency: " ++ show d
- compilerTell mempty {compilerDependencies = ds}
-{-# INLINE compilerTellDependencies #-}
-
-
---------------------------------------------------------------------------------
-compilerTellCacheHits :: Int -> Compiler ()
-compilerTellCacheHits ch = compilerTell mempty {compilerCacheHits = ch}
-{-# INLINE compilerTellCacheHits #-}
-
-
---------------------------------------------------------------------------------
-compilerGetMetadata :: Identifier -> Compiler Metadata
-compilerGetMetadata identifier = do
- provider <- compilerProvider <$> compilerAsk
- compilerTellDependencies [IdentifierDependency identifier]
- compilerUnsafeIO $ resourceMetadata provider identifier
-
-
---------------------------------------------------------------------------------
-compilerGetMatches :: Pattern -> Compiler [Identifier]
-compilerGetMatches pattern = do
- universe <- compilerUniverse <$> compilerAsk
- let matching = filterMatches pattern $ S.toList universe
- set' = S.fromList matching
- compilerTellDependencies [PatternDependency pattern set']
- return matching
diff --git a/src/Hakyll/Core/Compiler/Require.hs b/src/Hakyll/Core/Compiler/Require.hs
deleted file mode 100644
index c9373bf..0000000
--- a/src/Hakyll/Core/Compiler/Require.hs
+++ /dev/null
@@ -1,121 +0,0 @@
---------------------------------------------------------------------------------
-module Hakyll.Core.Compiler.Require
- ( Snapshot
- , save
- , saveSnapshot
- , load
- , loadSnapshot
- , loadBody
- , loadSnapshotBody
- , loadAll
- , loadAllSnapshots
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Monad (when)
-import Data.Binary (Binary)
-import qualified Data.Set as S
-import Data.Typeable
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler.Internal
-import Hakyll.Core.Dependencies
-import Hakyll.Core.Identifier
-import Hakyll.Core.Identifier.Pattern
-import Hakyll.Core.Item
-import Hakyll.Core.Metadata
-import Hakyll.Core.Store (Store)
-import qualified Hakyll.Core.Store as Store
-
-
---------------------------------------------------------------------------------
-save :: (Binary a, Typeable a) => Store -> Item a -> IO ()
-save store item = saveSnapshot store final item
-
-
---------------------------------------------------------------------------------
--- | Save a specific snapshot of an item, so you can load it later using
--- 'loadSnapshot'.
-saveSnapshot :: (Binary a, Typeable a)
- => Store -> Snapshot -> Item a -> IO ()
-saveSnapshot store snapshot item =
- Store.set store (key (itemIdentifier item) snapshot) (itemBody item)
-
-
---------------------------------------------------------------------------------
--- | Load an item compiled elsewhere. If the required item is not yet compiled,
--- the build system will take care of that automatically.
-load :: (Binary a, Typeable a) => Identifier -> Compiler (Item a)
-load id' = loadSnapshot id' final
-
-
---------------------------------------------------------------------------------
--- | Require a specific snapshot of an item.
-loadSnapshot :: (Binary a, Typeable a)
- => Identifier -> Snapshot -> Compiler (Item a)
-loadSnapshot id' snapshot = do
- store <- compilerStore <$> compilerAsk
- universe <- compilerUniverse <$> compilerAsk
-
- -- Quick check for better error messages
- when (id' `S.notMember` universe) $ fail notFound
-
- compilerTellDependencies [IdentifierDependency id']
- compilerResult $ CompilerRequire (id', snapshot) $ do
- result <- compilerUnsafeIO $ Store.get store (key id' snapshot)
- case result of
- Store.NotFound -> fail notFound
- Store.WrongType e r -> fail $ wrongType e r
- Store.Found x -> return $ Item id' x
- where
- notFound =
- "Hakyll.Core.Compiler.Require.load: " ++ show id' ++
- " (snapshot " ++ snapshot ++ ") was not found in the cache, " ++
- "the cache might be corrupted or " ++
- "the item you are referring to might not exist"
- wrongType e r =
- "Hakyll.Core.Compiler.Require.load: " ++ show id' ++
- " (snapshot " ++ snapshot ++ ") was found in the cache, " ++
- "but does not have the right type: expected " ++ show e ++
- " but got " ++ show r
-
-
---------------------------------------------------------------------------------
--- | A shortcut for only requiring the body of an item.
---
--- > loadBody = fmap itemBody . load
-loadBody :: (Binary a, Typeable a) => Identifier -> Compiler a
-loadBody id' = loadSnapshotBody id' final
-
-
---------------------------------------------------------------------------------
-loadSnapshotBody :: (Binary a, Typeable a)
- => Identifier -> Snapshot -> Compiler a
-loadSnapshotBody id' snapshot = fmap itemBody $ loadSnapshot id' snapshot
-
-
---------------------------------------------------------------------------------
--- | This function allows you to 'load' a dynamic list of items
-loadAll :: (Binary a, Typeable a) => Pattern -> Compiler [Item a]
-loadAll pattern = loadAllSnapshots pattern final
-
-
---------------------------------------------------------------------------------
-loadAllSnapshots :: (Binary a, Typeable a)
- => Pattern -> Snapshot -> Compiler [Item a]
-loadAllSnapshots pattern snapshot = do
- matching <- getMatches pattern
- mapM (\i -> loadSnapshot i snapshot) matching
-
-
---------------------------------------------------------------------------------
-key :: Identifier -> String -> [String]
-key identifier snapshot =
- ["Hakyll.Core.Compiler.Require", show identifier, snapshot]
-
-
---------------------------------------------------------------------------------
-final :: Snapshot
-final = "_final"
diff --git a/src/Hakyll/Core/Configuration.hs b/src/Hakyll/Core/Configuration.hs
deleted file mode 100644
index 52b23ec..0000000
--- a/src/Hakyll/Core/Configuration.hs
+++ /dev/null
@@ -1,134 +0,0 @@
---------------------------------------------------------------------------------
--- | Exports a datastructure for the top-level hakyll configuration
-module Hakyll.Core.Configuration
- ( Configuration (..)
- , shouldIgnoreFile
- , defaultConfiguration
- ) where
-
-
---------------------------------------------------------------------------------
-import Data.Default (Default (..))
-import Data.List (isPrefixOf, isSuffixOf)
-import System.Directory (canonicalizePath)
-import System.Exit (ExitCode)
-import System.FilePath (isAbsolute, normalise, takeFileName)
-import System.IO.Error (catchIOError)
-import System.Process (system)
-
-
---------------------------------------------------------------------------------
-data Configuration = Configuration
- { -- | Directory in which the output written
- destinationDirectory :: FilePath
- , -- | Directory where hakyll's internal store is kept
- storeDirectory :: FilePath
- , -- | Directory in which some temporary files will be kept
- tmpDirectory :: FilePath
- , -- | Directory where hakyll finds the files to compile. This is @.@ by
- -- default.
- providerDirectory :: FilePath
- , -- | Function to determine ignored files
- --
- -- In 'defaultConfiguration', the following files are ignored:
- --
- -- * files starting with a @.@
- --
- -- * files starting with a @#@
- --
- -- * files ending with a @~@
- --
- -- * files ending with @.swp@
- --
- -- Note that the files in 'destinationDirectory' and 'storeDirectory' will
- -- also be ignored. Note that this is the configuration parameter, if you
- -- want to use the test, you should use 'shouldIgnoreFile'.
- --
- ignoreFile :: FilePath -> Bool
- , -- | Here, you can plug in a system command to upload/deploy your site.
- --
- -- Example:
- --
- -- > rsync -ave 'ssh -p 2217' _site jaspervdj@jaspervdj.be:hakyll
- --
- -- You can execute this by using
- --
- -- > ./site deploy
- --
- deployCommand :: String
- , -- | Function to deploy the site from Haskell.
- --
- -- By default, this command executes the shell command stored in
- -- 'deployCommand'. If you override it, 'deployCommand' will not
- -- be used implicitely.
- --
- -- The 'Configuration' object is passed as a parameter to this
- -- function.
- --
- deploySite :: Configuration -> IO ExitCode
- , -- | Use an in-memory cache for items. This is faster but uses more
- -- memory.
- inMemoryCache :: Bool
- , -- | Override default host for preview server. Default is "127.0.0.1",
- -- which binds only on the loopback address.
- -- One can also override the host as a command line argument:
- -- ./site preview -h "0.0.0.0"
- previewHost :: String
- , -- | Override default port for preview server. Default is 8000.
- -- One can also override the port as a command line argument:
- -- ./site preview -p 1234
- previewPort :: Int
- }
-
---------------------------------------------------------------------------------
-instance Default Configuration where
- def = defaultConfiguration
-
---------------------------------------------------------------------------------
--- | Default configuration for a hakyll application
-defaultConfiguration :: Configuration
-defaultConfiguration = Configuration
- { destinationDirectory = "_site"
- , storeDirectory = "_cache"
- , tmpDirectory = "_cache/tmp"
- , providerDirectory = "."
- , ignoreFile = ignoreFile'
- , deployCommand = "echo 'No deploy command specified' && exit 1"
- , deploySite = system . deployCommand
- , inMemoryCache = True
- , previewHost = "127.0.0.1"
- , previewPort = 8000
- }
- where
- ignoreFile' path
- | "." `isPrefixOf` fileName = True
- | "#" `isPrefixOf` fileName = True
- | "~" `isSuffixOf` fileName = True
- | ".swp" `isSuffixOf` fileName = True
- | otherwise = False
- where
- fileName = takeFileName path
-
-
---------------------------------------------------------------------------------
--- | Check if a file should be ignored
-shouldIgnoreFile :: Configuration -> FilePath -> IO Bool
-shouldIgnoreFile conf path = orM
- [ inDir (destinationDirectory conf)
- , inDir (storeDirectory conf)
- , inDir (tmpDirectory conf)
- , return (ignoreFile conf path')
- ]
- where
- path' = normalise path
- absolute = isAbsolute path
-
- inDir dir
- | absolute = do
- dir' <- catchIOError (canonicalizePath dir) (const $ return dir)
- return $ dir' `isPrefixOf` path'
- | otherwise = return $ dir `isPrefixOf` path'
-
- orM :: [IO Bool] -> IO Bool
- orM [] = return False
- orM (x : xs) = x >>= \b -> if b then return True else orM xs
diff --git a/src/Hakyll/Core/Dependencies.hs b/src/Hakyll/Core/Dependencies.hs
deleted file mode 100644
index 4a51b9c..0000000
--- a/src/Hakyll/Core/Dependencies.hs
+++ /dev/null
@@ -1,146 +0,0 @@
---------------------------------------------------------------------------------
-{-# LANGUAGE DeriveDataTypeable #-}
-module Hakyll.Core.Dependencies
- ( Dependency (..)
- , DependencyFacts
- , outOfDate
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Monad (foldM, forM_, unless, when)
-import Control.Monad.Reader (ask)
-import Control.Monad.RWS (RWS, runRWS)
-import qualified Control.Monad.State as State
-import Control.Monad.Writer (tell)
-import Data.Binary (Binary (..), getWord8,
- putWord8)
-import Data.List (find)
-import Data.Map (Map)
-import qualified Data.Map as M
-import Data.Maybe (fromMaybe)
-import Data.Set (Set)
-import qualified Data.Set as S
-import Data.Typeable (Typeable)
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Identifier
-import Hakyll.Core.Identifier.Pattern
-
-
---------------------------------------------------------------------------------
-data Dependency
- = PatternDependency Pattern (Set Identifier)
- | IdentifierDependency Identifier
- deriving (Show, Typeable)
-
-
---------------------------------------------------------------------------------
-instance Binary Dependency where
- put (PatternDependency p is) = putWord8 0 >> put p >> put is
- put (IdentifierDependency i) = putWord8 1 >> put i
- get = getWord8 >>= \t -> case t of
- 0 -> PatternDependency <$> get <*> get
- 1 -> IdentifierDependency <$> get
- _ -> error "Data.Binary.get: Invalid Dependency"
-
-
---------------------------------------------------------------------------------
-type DependencyFacts = Map Identifier [Dependency]
-
-
---------------------------------------------------------------------------------
-outOfDate
- :: [Identifier] -- ^ All known identifiers
- -> Set Identifier -- ^ Initially out-of-date resources
- -> DependencyFacts -- ^ Old dependency facts
- -> (Set Identifier, DependencyFacts, [String])
-outOfDate universe ood oldFacts =
- let (_, state, logs) = runRWS rws universe (DependencyState oldFacts ood)
- in (dependencyOod state, dependencyFacts state, logs)
- where
- rws = do
- checkNew
- checkChangedPatterns
- bruteForce
-
-
---------------------------------------------------------------------------------
-data DependencyState = DependencyState
- { dependencyFacts :: DependencyFacts
- , dependencyOod :: Set Identifier
- } deriving (Show)
-
-
---------------------------------------------------------------------------------
-type DependencyM a = RWS [Identifier] [String] DependencyState a
-
-
---------------------------------------------------------------------------------
-markOod :: Identifier -> DependencyM ()
-markOod id' = State.modify $ \s ->
- s {dependencyOod = S.insert id' $ dependencyOod s}
-
-
---------------------------------------------------------------------------------
-dependenciesFor :: Identifier -> DependencyM [Identifier]
-dependenciesFor id' = do
- facts <- dependencyFacts <$> State.get
- return $ concatMap dependenciesFor' $ fromMaybe [] $ M.lookup id' facts
- where
- dependenciesFor' (IdentifierDependency i) = [i]
- dependenciesFor' (PatternDependency _ is) = S.toList is
-
-
---------------------------------------------------------------------------------
-checkNew :: DependencyM ()
-checkNew = do
- universe <- ask
- facts <- dependencyFacts <$> State.get
- forM_ universe $ \id' -> unless (id' `M.member` facts) $ do
- tell [show id' ++ " is out-of-date because it is new"]
- markOod id'
-
-
---------------------------------------------------------------------------------
-checkChangedPatterns :: DependencyM ()
-checkChangedPatterns = do
- facts <- M.toList . dependencyFacts <$> State.get
- forM_ facts $ \(id', deps) -> do
- deps' <- foldM (go id') [] deps
- State.modify $ \s -> s
- {dependencyFacts = M.insert id' deps' $ dependencyFacts s}
- where
- go _ ds (IdentifierDependency i) = return $ IdentifierDependency i : ds
- go id' ds (PatternDependency p ls) = do
- universe <- ask
- let ls' = S.fromList $ filterMatches p universe
- if ls == ls'
- then return $ PatternDependency p ls : ds
- else do
- tell [show id' ++ " is out-of-date because a pattern changed"]
- markOod id'
- return $ PatternDependency p ls' : ds
-
-
---------------------------------------------------------------------------------
-bruteForce :: DependencyM ()
-bruteForce = do
- todo <- ask
- go todo
- where
- go todo = do
- (todo', changed) <- foldM check ([], False) todo
- when changed (go todo')
-
- check (todo, changed) id' = do
- deps <- dependenciesFor id'
- ood <- dependencyOod <$> State.get
- case find (`S.member` ood) deps of
- Nothing -> return (id' : todo, changed)
- Just d -> do
- tell [show id' ++ " is out-of-date because " ++
- show d ++ " is out-of-date"]
- markOod id'
- return (todo, True)
diff --git a/src/Hakyll/Core/File.hs b/src/Hakyll/Core/File.hs
deleted file mode 100644
index 49af659..0000000
--- a/src/Hakyll/Core/File.hs
+++ /dev/null
@@ -1,93 +0,0 @@
---------------------------------------------------------------------------------
--- | Exports simple compilers to just copy files
-{-# LANGUAGE CPP #-}
-{-# LANGUAGE DeriveDataTypeable #-}
-{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-module Hakyll.Core.File
- ( CopyFile (..)
- , copyFileCompiler
- , TmpFile (..)
- , newTmpFile
- ) where
-
-
---------------------------------------------------------------------------------
-import Data.Binary (Binary (..))
-import Data.Typeable (Typeable)
-#if MIN_VERSION_directory(1,2,6)
-import System.Directory (copyFileWithMetadata)
-#else
-import System.Directory (copyFile)
-#endif
-import System.Directory (doesFileExist,
- renameFile)
-import System.FilePath ((</>))
-import System.Random (randomIO)
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler
-import Hakyll.Core.Compiler.Internal
-import Hakyll.Core.Configuration
-import Hakyll.Core.Item
-import Hakyll.Core.Provider
-import qualified Hakyll.Core.Store as Store
-import Hakyll.Core.Util.File
-import Hakyll.Core.Writable
-
-
---------------------------------------------------------------------------------
--- | This will copy any file directly by using a system call
-newtype CopyFile = CopyFile FilePath
- deriving (Binary, Eq, Ord, Show, Typeable)
-
-
---------------------------------------------------------------------------------
-instance Writable CopyFile where
-#if MIN_VERSION_directory(1,2,6)
- write dst (Item _ (CopyFile src)) = copyFileWithMetadata src dst
-#else
- write dst (Item _ (CopyFile src)) = copyFile src dst
-#endif
---------------------------------------------------------------------------------
-copyFileCompiler :: Compiler (Item CopyFile)
-copyFileCompiler = do
- identifier <- getUnderlying
- provider <- compilerProvider <$> compilerAsk
- makeItem $ CopyFile $ resourceFilePath provider identifier
-
-
---------------------------------------------------------------------------------
-newtype TmpFile = TmpFile FilePath
- deriving (Typeable)
-
-
---------------------------------------------------------------------------------
-instance Binary TmpFile where
- put _ = return ()
- get = error $
- "Hakyll.Core.File.TmpFile: You tried to load a TmpFile, however, " ++
- "this is not possible since these are deleted as soon as possible."
-
-
---------------------------------------------------------------------------------
-instance Writable TmpFile where
- write dst (Item _ (TmpFile fp)) = renameFile fp dst
-
-
---------------------------------------------------------------------------------
--- | Create a tmp file
-newTmpFile :: String -- ^ Suffix and extension
- -> Compiler TmpFile -- ^ Resulting tmp path
-newTmpFile suffix = do
- path <- mkPath
- compilerUnsafeIO $ makeDirectories path
- debugCompiler $ "newTmpFile " ++ path
- return $ TmpFile path
- where
- mkPath = do
- rand <- compilerUnsafeIO $ randomIO :: Compiler Int
- tmp <- tmpDirectory . compilerConfig <$> compilerAsk
- let path = tmp </> Store.hash [show rand] ++ "-" ++ suffix
- exists <- compilerUnsafeIO $ doesFileExist path
- if exists then mkPath else return path
diff --git a/src/Hakyll/Core/Identifier.hs b/src/Hakyll/Core/Identifier.hs
deleted file mode 100644
index 777811c..0000000
--- a/src/Hakyll/Core/Identifier.hs
+++ /dev/null
@@ -1,80 +0,0 @@
---------------------------------------------------------------------------------
--- | An identifier is a type used to uniquely identify an item. An identifier is
--- conceptually similar to a file path. Examples of identifiers are:
---
--- * @posts/foo.markdown@
---
--- * @index@
---
--- * @error/404@
-{-# LANGUAGE DeriveDataTypeable #-}
-{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-module Hakyll.Core.Identifier
- ( Identifier
- , fromFilePath
- , toFilePath
- , identifierVersion
- , setVersion
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.DeepSeq (NFData (..))
-import Data.List (intercalate)
-import System.FilePath (dropTrailingPathSeparator, splitPath)
-
-
---------------------------------------------------------------------------------
-import Data.Binary (Binary (..))
-import Data.Typeable (Typeable)
-import GHC.Exts (IsString, fromString)
-
-
---------------------------------------------------------------------------------
-data Identifier = Identifier
- { identifierVersion :: Maybe String
- , identifierPath :: String
- } deriving (Eq, Ord, Typeable)
-
-
---------------------------------------------------------------------------------
-instance Binary Identifier where
- put (Identifier v p) = put v >> put p
- get = Identifier <$> get <*> get
-
-
---------------------------------------------------------------------------------
-instance IsString Identifier where
- fromString = fromFilePath
-
-
---------------------------------------------------------------------------------
-instance NFData Identifier where
- rnf (Identifier v p) = rnf v `seq` rnf p `seq` ()
-
-
---------------------------------------------------------------------------------
-instance Show Identifier where
- show i = case identifierVersion i of
- Nothing -> toFilePath i
- Just v -> toFilePath i ++ " (" ++ v ++ ")"
-
-
---------------------------------------------------------------------------------
--- | Parse an identifier from a string
-fromFilePath :: String -> Identifier
-fromFilePath = Identifier Nothing .
- intercalate "/" . filter (not . null) . split'
- where
- split' = map dropTrailingPathSeparator . splitPath
-
-
---------------------------------------------------------------------------------
--- | Convert an identifier to a relative 'FilePath'
-toFilePath :: Identifier -> FilePath
-toFilePath = identifierPath
-
-
---------------------------------------------------------------------------------
-setVersion :: Maybe String -> Identifier -> Identifier
-setVersion v i = i {identifierVersion = v}
diff --git a/src/Hakyll/Core/Identifier/Pattern.hs b/src/Hakyll/Core/Identifier/Pattern.hs
deleted file mode 100644
index 47ad21b..0000000
--- a/src/Hakyll/Core/Identifier/Pattern.hs
+++ /dev/null
@@ -1,322 +0,0 @@
---------------------------------------------------------------------------------
--- | As 'Identifier' is used to specify a single item, a 'Pattern' is used to
--- specify a list of items.
---
--- In most cases, globs are used for patterns.
---
--- A very simple pattern of such a pattern is @\"foo\/bar\"@. This pattern will
--- only match the exact @foo\/bar@ identifier.
---
--- To match more than one identifier, there are different captures that one can
--- use:
---
--- * @\"*\"@: matches at most one element of an identifier;
---
--- * @\"**\"@: matches one or more elements of an identifier.
---
--- Some examples:
---
--- * @\"foo\/*\"@ will match @\"foo\/bar\"@ and @\"foo\/foo\"@, but not
--- @\"foo\/bar\/qux\"@;
---
--- * @\"**\"@ will match any identifier;
---
--- * @\"foo\/**\"@ will match @\"foo\/bar\"@ and @\"foo\/bar\/qux\"@, but not
--- @\"bar\/foo\"@;
---
--- * @\"foo\/*.html\"@ will match all HTML files in the @\"foo\/\"@ directory.
---
--- The 'capture' function allows the user to get access to the elements captured
--- by the capture elements in the pattern.
-module Hakyll.Core.Identifier.Pattern
- ( -- * The pattern type
- Pattern
-
- -- * Creating patterns
- , fromGlob
- , fromList
- , fromRegex
- , fromVersion
- , hasVersion
- , hasNoVersion
-
- -- * Composing patterns
- , (.&&.)
- , (.||.)
- , complement
-
- -- * Applying patterns
- , matches
- , filterMatches
-
- -- * Capturing strings
- , capture
- , fromCapture
- , fromCaptures
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Arrow ((&&&), (>>>))
-import Control.Monad (msum)
-import Data.Binary (Binary (..), getWord8, putWord8)
-import Data.List (inits, isPrefixOf, tails)
-import Data.Maybe (isJust)
-import Data.Set (Set)
-import qualified Data.Set as S
-
-
---------------------------------------------------------------------------------
-import GHC.Exts (IsString, fromString)
-import Text.Regex.TDFA ((=~))
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Identifier
-
-
---------------------------------------------------------------------------------
--- | Elements of a glob pattern
-data GlobComponent
- = Capture
- | CaptureMany
- | Literal String
- deriving (Eq, Show)
-
-
---------------------------------------------------------------------------------
-instance Binary GlobComponent where
- put Capture = putWord8 0
- put CaptureMany = putWord8 1
- put (Literal s) = putWord8 2 >> put s
-
- get = getWord8 >>= \t -> case t of
- 0 -> pure Capture
- 1 -> pure CaptureMany
- 2 -> Literal <$> get
- _ -> error "Data.Binary.get: Invalid GlobComponent"
-
-
---------------------------------------------------------------------------------
--- | Type that allows matching on identifiers
-data Pattern
- = Everything
- | Complement Pattern
- | And Pattern Pattern
- | Glob [GlobComponent]
- | List (Set Identifier)
- | Regex String
- | Version (Maybe String)
- deriving (Show)
-
-
---------------------------------------------------------------------------------
-instance Binary Pattern where
- put Everything = putWord8 0
- put (Complement p) = putWord8 1 >> put p
- put (And x y) = putWord8 2 >> put x >> put y
- put (Glob g) = putWord8 3 >> put g
- put (List is) = putWord8 4 >> put is
- put (Regex r) = putWord8 5 >> put r
- put (Version v) = putWord8 6 >> put v
-
- get = getWord8 >>= \t -> case t of
- 0 -> pure Everything
- 1 -> Complement <$> get
- 2 -> And <$> get <*> get
- 3 -> Glob <$> get
- 4 -> List <$> get
- 5 -> Regex <$> get
- _ -> Version <$> get
-
-
---------------------------------------------------------------------------------
-instance IsString Pattern where
- fromString = fromGlob
-
-
---------------------------------------------------------------------------------
-instance Monoid Pattern where
- mempty = Everything
- mappend = (.&&.)
-
-
---------------------------------------------------------------------------------
--- | Parse a pattern from a string
-fromGlob :: String -> Pattern
-fromGlob = Glob . parse'
- where
- parse' str =
- let (chunk, rest) = break (`elem` "\\*") str
- in case rest of
- ('\\' : x : xs) -> Literal (chunk ++ [x]) : parse' xs
- ('*' : '*' : xs) -> Literal chunk : CaptureMany : parse' xs
- ('*' : xs) -> Literal chunk : Capture : parse' xs
- xs -> Literal chunk : Literal xs : []
-
-
---------------------------------------------------------------------------------
--- | Create a 'Pattern' from a list of 'Identifier's it should match.
---
--- /Warning/: use this carefully with 'hasNoVersion' and 'hasVersion'. The
--- 'Identifier's in the list /already/ have versions assigned, and the pattern
--- will then only match the intersection of both versions.
---
--- A more concrete example,
---
--- > fromList ["foo.markdown"] .&&. hasVersion "pdf"
---
--- will not match anything! The @"foo.markdown"@ 'Identifier' has no version
--- assigned, so the LHS of '.&&.' will only match this 'Identifier' with no
--- version. The RHS only matches 'Identifier's with version set to @"pdf"@ --
--- hence, this pattern matches nothing.
---
--- The correct way to use this is:
---
--- > fromList $ map (setVersion $ Just "pdf") ["foo.markdown"]
-fromList :: [Identifier] -> Pattern
-fromList = List . S.fromList
-
-
---------------------------------------------------------------------------------
--- | Create a 'Pattern' from a regex
---
--- Example:
---
--- > regex "^foo/[^x]*$
-fromRegex :: String -> Pattern
-fromRegex = Regex
-
-
---------------------------------------------------------------------------------
--- | Create a pattern which matches all items with the given version.
-fromVersion :: Maybe String -> Pattern
-fromVersion = Version
-
-
---------------------------------------------------------------------------------
--- | Specify a version, e.g.
---
--- > "foo/*.markdown" .&&. hasVersion "pdf"
-hasVersion :: String -> Pattern
-hasVersion = fromVersion . Just
-
-
---------------------------------------------------------------------------------
--- | Match only if the identifier has no version set, e.g.
---
--- > "foo/*.markdown" .&&. hasNoVersion
-hasNoVersion :: Pattern
-hasNoVersion = fromVersion Nothing
-
-
---------------------------------------------------------------------------------
--- | '&&' for patterns: the given identifier must match both subterms
-(.&&.) :: Pattern -> Pattern -> Pattern
-x .&&. y = And x y
-infixr 3 .&&.
-
-
---------------------------------------------------------------------------------
--- | '||' for patterns: the given identifier must match any subterm
-(.||.) :: Pattern -> Pattern -> Pattern
-x .||. y = complement (complement x `And` complement y) -- De Morgan's law
-infixr 2 .||.
-
-
---------------------------------------------------------------------------------
--- | Inverts a pattern, e.g.
---
--- > complement "foo/bar.html"
---
--- will match /anything/ except @\"foo\/bar.html\"@
-complement :: Pattern -> Pattern
-complement = Complement
-
-
---------------------------------------------------------------------------------
--- | Check if an identifier matches a pattern
-matches :: Pattern -> Identifier -> Bool
-matches Everything _ = True
-matches (Complement p) i = not $ matches p i
-matches (And x y) i = matches x i && matches y i
-matches (Glob p) i = isJust $ capture (Glob p) i
-matches (List l) i = i `S.member` l
-matches (Regex r) i = toFilePath i =~ r
-matches (Version v) i = identifierVersion i == v
-
-
---------------------------------------------------------------------------------
--- | Given a list of identifiers, retain only those who match the given pattern
-filterMatches :: Pattern -> [Identifier] -> [Identifier]
-filterMatches = filter . matches
-
-
---------------------------------------------------------------------------------
--- | Split a list at every possible point, generate a list of (init, tail)
--- cases. The result is sorted with inits decreasing in length.
-splits :: [a] -> [([a], [a])]
-splits = inits &&& tails >>> uncurry zip >>> reverse
-
-
---------------------------------------------------------------------------------
--- | Match a glob against a pattern, generating a list of captures
-capture :: Pattern -> Identifier -> Maybe [String]
-capture (Glob p) i = capture' p (toFilePath i)
-capture _ _ = Nothing
-
-
---------------------------------------------------------------------------------
--- | Internal verion of 'capture'
-capture' :: [GlobComponent] -> String -> Maybe [String]
-capture' [] [] = Just [] -- An empty match
-capture' [] _ = Nothing -- No match
-capture' (Literal l : ms) str
- -- Match the literal against the string
- | l `isPrefixOf` str = capture' ms $ drop (length l) str
- | otherwise = Nothing
-capture' (Capture : ms) str =
- -- Match until the next /
- let (chunk, rest) = break (== '/') str
- in msum $ [ fmap (i :) (capture' ms (t ++ rest)) | (i, t) <- splits chunk ]
-capture' (CaptureMany : ms) str =
- -- Match everything
- msum $ [ fmap (i :) (capture' ms t) | (i, t) <- splits str ]
-
-
---------------------------------------------------------------------------------
--- | Create an identifier from a pattern by filling in the captures with a given
--- string
---
--- Example:
---
--- > fromCapture (fromGlob "tags/*") "foo"
---
--- Result:
---
--- > "tags/foo"
-fromCapture :: Pattern -> String -> Identifier
-fromCapture pattern = fromCaptures pattern . repeat
-
-
---------------------------------------------------------------------------------
--- | Create an identifier from a pattern by filling in the captures with the
--- given list of strings
-fromCaptures :: Pattern -> [String] -> Identifier
-fromCaptures (Glob p) = fromFilePath . fromCaptures' p
-fromCaptures _ = error $
- "Hakyll.Core.Identifier.Pattern.fromCaptures: fromCaptures only works " ++
- "on simple globs!"
-
-
---------------------------------------------------------------------------------
--- | Internally used version of 'fromCaptures'
-fromCaptures' :: [GlobComponent] -> [String] -> String
-fromCaptures' [] _ = mempty
-fromCaptures' (m : ms) [] = case m of
- Literal l -> l `mappend` fromCaptures' ms []
- _ -> error $ "Hakyll.Core.Identifier.Pattern.fromCaptures': "
- ++ "identifier list exhausted"
-fromCaptures' (m : ms) ids@(i : is) = case m of
- Literal l -> l `mappend` fromCaptures' ms ids
- _ -> i `mappend` fromCaptures' ms is
diff --git a/src/Hakyll/Core/Item.hs b/src/Hakyll/Core/Item.hs
deleted file mode 100644
index e05df42..0000000
--- a/src/Hakyll/Core/Item.hs
+++ /dev/null
@@ -1,63 +0,0 @@
---------------------------------------------------------------------------------
--- | An item is a combination of some content and its 'Identifier'. This way, we
--- can still use the 'Identifier' to access metadata.
-{-# LANGUAGE DeriveDataTypeable #-}
-module Hakyll.Core.Item
- ( Item (..)
- , itemSetBody
- , withItemBody
- ) where
-
-
---------------------------------------------------------------------------------
-import Data.Binary (Binary (..))
-import Data.Foldable (Foldable (..))
-import Data.Typeable (Typeable)
-import Prelude hiding (foldr)
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler.Internal
-import Hakyll.Core.Identifier
-
-
---------------------------------------------------------------------------------
-data Item a = Item
- { itemIdentifier :: Identifier
- , itemBody :: a
- } deriving (Show, Typeable)
-
-
---------------------------------------------------------------------------------
-instance Functor Item where
- fmap f (Item i x) = Item i (f x)
-
-
---------------------------------------------------------------------------------
-instance Foldable Item where
- foldr f z (Item _ x) = f x z
-
-
---------------------------------------------------------------------------------
-instance Traversable Item where
- traverse f (Item i x) = Item i <$> f x
-
-
---------------------------------------------------------------------------------
-instance Binary a => Binary (Item a) where
- put (Item i x) = put i >> put x
- get = Item <$> get <*> get
-
-
---------------------------------------------------------------------------------
-itemSetBody :: a -> Item b -> Item a
-itemSetBody x (Item i _) = Item i x
-
-
---------------------------------------------------------------------------------
--- | Perform a compiler action on the item body. This is the same as 'traverse',
--- but looks less intimidating.
---
--- > withItemBody = traverse
-withItemBody :: (a -> Compiler b) -> Item a -> Compiler (Item b)
-withItemBody = traverse
diff --git a/src/Hakyll/Core/Item/SomeItem.hs b/src/Hakyll/Core/Item/SomeItem.hs
deleted file mode 100644
index c5ba0df..0000000
--- a/src/Hakyll/Core/Item/SomeItem.hs
+++ /dev/null
@@ -1,23 +0,0 @@
---------------------------------------------------------------------------------
-{-# LANGUAGE DeriveDataTypeable #-}
-{-# LANGUAGE ExistentialQuantification #-}
-module Hakyll.Core.Item.SomeItem
- ( SomeItem (..)
- ) where
-
-
---------------------------------------------------------------------------------
-import Data.Binary (Binary)
-import Data.Typeable (Typeable)
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Item
-import Hakyll.Core.Writable
-
-
---------------------------------------------------------------------------------
--- | An existential type, mostly for internal usage.
-data SomeItem = forall a.
- (Binary a, Typeable a, Writable a) => SomeItem (Item a)
- deriving (Typeable)
diff --git a/src/Hakyll/Core/Logger.hs b/src/Hakyll/Core/Logger.hs
deleted file mode 100644
index 6f950a6..0000000
--- a/src/Hakyll/Core/Logger.hs
+++ /dev/null
@@ -1,97 +0,0 @@
---------------------------------------------------------------------------------
--- | Produce pretty, thread-safe logs
-module Hakyll.Core.Logger
- ( Verbosity (..)
- , Logger
- , new
- , flush
- , error
- , header
- , message
- , debug
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Concurrent (forkIO)
-import Control.Concurrent.Chan (Chan, newChan, readChan, writeChan)
-import Control.Concurrent.MVar (MVar, newEmptyMVar, putMVar, takeMVar)
-import Control.Monad (forever)
-import Control.Monad.Trans (MonadIO, liftIO)
-import Prelude hiding (error)
-
-
---------------------------------------------------------------------------------
-data Verbosity
- = Error
- | Message
- | Debug
- deriving (Eq, Ord, Show)
-
-
---------------------------------------------------------------------------------
--- | Logger structure. Very complicated.
-data Logger = Logger
- { loggerChan :: Chan (Maybe String) -- ^ Nothing marks the end
- , loggerSync :: MVar () -- ^ Used for sync on quit
- , loggerSink :: String -> IO () -- ^ Out sink
- , loggerVerbosity :: Verbosity -- ^ Verbosity
- }
-
-
---------------------------------------------------------------------------------
--- | Create a new logger
-new :: Verbosity -> IO Logger
-new vbty = do
- logger <- Logger <$>
- newChan <*> newEmptyMVar <*> pure putStrLn <*> pure vbty
- _ <- forkIO $ loggerThread logger
- return logger
- where
- loggerThread logger = forever $ do
- msg <- readChan $ loggerChan logger
- case msg of
- -- Stop: sync
- Nothing -> putMVar (loggerSync logger) ()
- -- Print and continue
- Just m -> loggerSink logger m
-
-
---------------------------------------------------------------------------------
--- | Flush the logger (blocks until flushed)
-flush :: Logger -> IO ()
-flush logger = do
- writeChan (loggerChan logger) Nothing
- () <- takeMVar $ loggerSync logger
- return ()
-
-
---------------------------------------------------------------------------------
-string :: MonadIO m
- => Logger -- ^ Logger
- -> Verbosity -- ^ Verbosity of the string
- -> String -- ^ Section name
- -> m () -- ^ No result
-string l v m
- | loggerVerbosity l >= v = liftIO $ writeChan (loggerChan l) (Just m)
- | otherwise = return ()
-
-
---------------------------------------------------------------------------------
-error :: MonadIO m => Logger -> String -> m ()
-error l m = string l Error $ " [ERROR] " ++ m
-
-
---------------------------------------------------------------------------------
-header :: MonadIO m => Logger -> String -> m ()
-header l = string l Message
-
-
---------------------------------------------------------------------------------
-message :: MonadIO m => Logger -> String -> m ()
-message l m = string l Message $ " " ++ m
-
-
---------------------------------------------------------------------------------
-debug :: MonadIO m => Logger -> String -> m ()
-debug l m = string l Debug $ " [DEBUG] " ++ m
diff --git a/src/Hakyll/Core/Metadata.hs b/src/Hakyll/Core/Metadata.hs
deleted file mode 100644
index 1cf536e..0000000
--- a/src/Hakyll/Core/Metadata.hs
+++ /dev/null
@@ -1,138 +0,0 @@
---------------------------------------------------------------------------------
-module Hakyll.Core.Metadata
- ( Metadata
- , lookupString
- , lookupStringList
-
- , MonadMetadata (..)
- , getMetadataField
- , getMetadataField'
- , makePatternDependency
-
- , BinaryMetadata (..)
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Arrow (second)
-import Control.Monad (forM)
-import Data.Binary (Binary (..), getWord8,
- putWord8, Get)
-import qualified Data.HashMap.Strict as HMS
-import qualified Data.Set as S
-import qualified Data.Text as T
-import qualified Data.Vector as V
-import qualified Data.Yaml.Extended as Yaml
-import Hakyll.Core.Dependencies
-import Hakyll.Core.Identifier
-import Hakyll.Core.Identifier.Pattern
-
-
---------------------------------------------------------------------------------
-type Metadata = Yaml.Object
-
-
---------------------------------------------------------------------------------
-lookupString :: String -> Metadata -> Maybe String
-lookupString key meta = HMS.lookup (T.pack key) meta >>= Yaml.toString
-
-
---------------------------------------------------------------------------------
-lookupStringList :: String -> Metadata -> Maybe [String]
-lookupStringList key meta =
- HMS.lookup (T.pack key) meta >>= Yaml.toList >>= mapM Yaml.toString
-
-
---------------------------------------------------------------------------------
-class Monad m => MonadMetadata m where
- getMetadata :: Identifier -> m Metadata
- getMatches :: Pattern -> m [Identifier]
-
- getAllMetadata :: Pattern -> m [(Identifier, Metadata)]
- getAllMetadata pattern = do
- matches' <- getMatches pattern
- forM matches' $ \id' -> do
- metadata <- getMetadata id'
- return (id', metadata)
-
-
---------------------------------------------------------------------------------
-getMetadataField :: MonadMetadata m => Identifier -> String -> m (Maybe String)
-getMetadataField identifier key = do
- metadata <- getMetadata identifier
- return $ lookupString key metadata
-
-
---------------------------------------------------------------------------------
--- | Version of 'getMetadataField' which throws an error if the field does not
--- exist.
-getMetadataField' :: MonadMetadata m => Identifier -> String -> m String
-getMetadataField' identifier key = do
- field <- getMetadataField identifier key
- case field of
- Just v -> return v
- Nothing -> fail $ "Hakyll.Core.Metadata.getMetadataField': " ++
- "Item " ++ show identifier ++ " has no metadata field " ++ show key
-
-
---------------------------------------------------------------------------------
-makePatternDependency :: MonadMetadata m => Pattern -> m Dependency
-makePatternDependency pattern = do
- matches' <- getMatches pattern
- return $ PatternDependency pattern (S.fromList matches')
-
-
---------------------------------------------------------------------------------
--- | Newtype wrapper for serialization.
-newtype BinaryMetadata = BinaryMetadata
- {unBinaryMetadata :: Metadata}
-
-
-instance Binary BinaryMetadata where
- put (BinaryMetadata obj) = put (BinaryYaml $ Yaml.Object obj)
- get = do
- BinaryYaml (Yaml.Object obj) <- get
- return $ BinaryMetadata obj
-
-
---------------------------------------------------------------------------------
-newtype BinaryYaml = BinaryYaml {unBinaryYaml :: Yaml.Value}
-
-
---------------------------------------------------------------------------------
-instance Binary BinaryYaml where
- put (BinaryYaml yaml) = case yaml of
- Yaml.Object obj -> do
- putWord8 0
- let list :: [(T.Text, BinaryYaml)]
- list = map (second BinaryYaml) $ HMS.toList obj
- put list
-
- Yaml.Array arr -> do
- putWord8 1
- let list = map BinaryYaml (V.toList arr) :: [BinaryYaml]
- put list
-
- Yaml.String s -> putWord8 2 >> put s
- Yaml.Number n -> putWord8 3 >> put n
- Yaml.Bool b -> putWord8 4 >> put b
- Yaml.Null -> putWord8 5
-
- get = do
- tag <- getWord8
- case tag of
- 0 -> do
- list <- get :: Get [(T.Text, BinaryYaml)]
- return $ BinaryYaml $ Yaml.Object $
- HMS.fromList $ map (second unBinaryYaml) list
-
- 1 -> do
- list <- get :: Get [BinaryYaml]
- return $ BinaryYaml $
- Yaml.Array $ V.fromList $ map unBinaryYaml list
-
- 2 -> BinaryYaml . Yaml.String <$> get
- 3 -> BinaryYaml . Yaml.Number <$> get
- 4 -> BinaryYaml . Yaml.Bool <$> get
- 5 -> return $ BinaryYaml Yaml.Null
- _ -> fail "Data.Binary.get: Invalid Binary Metadata"
diff --git a/src/Hakyll/Core/Provider.hs b/src/Hakyll/Core/Provider.hs
deleted file mode 100644
index 384f5b1..0000000
--- a/src/Hakyll/Core/Provider.hs
+++ /dev/null
@@ -1,43 +0,0 @@
---------------------------------------------------------------------------------
--- | This module provides an wrapper API around the file system which does some
--- caching.
-module Hakyll.Core.Provider
- ( -- * Constructing resource providers
- Internal.Provider
- , newProvider
-
- -- * Querying resource properties
- , Internal.resourceList
- , Internal.resourceExists
- , Internal.resourceFilePath
- , Internal.resourceModified
- , Internal.resourceModificationTime
-
- -- * Access to raw resource content
- , Internal.resourceString
- , Internal.resourceLBS
-
- -- * Access to metadata and body content
- , Internal.resourceMetadata
- , Internal.resourceBody
- ) where
-
-
---------------------------------------------------------------------------------
-import qualified Hakyll.Core.Provider.Internal as Internal
-import qualified Hakyll.Core.Provider.MetadataCache as Internal
-import Hakyll.Core.Store (Store)
-
-
---------------------------------------------------------------------------------
--- | Create a resource provider
-newProvider :: Store -- ^ Store to use
- -> (FilePath -> IO Bool) -- ^ Should we ignore this file?
- -> FilePath -- ^ Search directory
- -> IO Internal.Provider -- ^ Resulting provider
-newProvider store ignore directory = do
- -- Delete metadata cache where necessary
- p <- Internal.newProvider store ignore directory
- mapM_ (Internal.resourceInvalidateMetadataCache p) $
- filter (Internal.resourceModified p) $ Internal.resourceList p
- return p
diff --git a/src/Hakyll/Core/Provider/Internal.hs b/src/Hakyll/Core/Provider/Internal.hs
deleted file mode 100644
index c298653..0000000
--- a/src/Hakyll/Core/Provider/Internal.hs
+++ /dev/null
@@ -1,202 +0,0 @@
---------------------------------------------------------------------------------
-{-# LANGUAGE CPP #-}
-{-# LANGUAGE DeriveDataTypeable #-}
-{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-module Hakyll.Core.Provider.Internal
- ( ResourceInfo (..)
- , Provider (..)
- , newProvider
-
- , resourceList
- , resourceExists
-
- , resourceFilePath
- , resourceString
- , resourceLBS
-
- , resourceModified
- , resourceModificationTime
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.DeepSeq (NFData (..), deepseq)
-import Control.Monad (forM)
-import Data.Binary (Binary (..))
-import qualified Data.ByteString.Lazy as BL
-import Data.Map (Map)
-import qualified Data.Map as M
-import Data.Maybe (fromMaybe)
-import Data.Set (Set)
-import qualified Data.Set as S
-import Data.Time (Day (..), UTCTime (..))
-import Data.Typeable (Typeable)
-import System.Directory (getModificationTime)
-import System.FilePath (addExtension, (</>))
-
-
---------------------------------------------------------------------------------
-#if !MIN_VERSION_directory(1,2,0)
-import Data.Time (readTime)
-import System.Locale (defaultTimeLocale)
-import System.Time (formatCalendarTime, toCalendarTime)
-#endif
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Identifier
-import Hakyll.Core.Store (Store)
-import qualified Hakyll.Core.Store as Store
-import Hakyll.Core.Util.File
-
-
---------------------------------------------------------------------------------
--- | Because UTCTime doesn't have a Binary instance...
-newtype BinaryTime = BinaryTime {unBinaryTime :: UTCTime}
- deriving (Eq, NFData, Ord, Show, Typeable)
-
-
---------------------------------------------------------------------------------
-instance Binary BinaryTime where
- put (BinaryTime (UTCTime (ModifiedJulianDay d) dt)) =
- put d >> put (toRational dt)
-
- get = fmap BinaryTime $ UTCTime
- <$> (ModifiedJulianDay <$> get)
- <*> (fromRational <$> get)
-
-
---------------------------------------------------------------------------------
-data ResourceInfo = ResourceInfo
- { resourceInfoModified :: BinaryTime
- , resourceInfoMetadata :: Maybe Identifier
- } deriving (Show, Typeable)
-
-
---------------------------------------------------------------------------------
-instance Binary ResourceInfo where
- put (ResourceInfo mtime meta) = put mtime >> put meta
- get = ResourceInfo <$> get <*> get
-
-
---------------------------------------------------------------------------------
-instance NFData ResourceInfo where
- rnf (ResourceInfo mtime meta) = rnf mtime `seq` rnf meta `seq` ()
-
-
---------------------------------------------------------------------------------
--- | Responsible for retrieving and listing resources
-data Provider = Provider
- { -- Top of the provided directory
- providerDirectory :: FilePath
- , -- | A list of all files found
- providerFiles :: Map Identifier ResourceInfo
- , -- | A list of the files from the previous run
- providerOldFiles :: Map Identifier ResourceInfo
- , -- | Underlying persistent store for caching
- providerStore :: Store
- } deriving (Show)
-
-
---------------------------------------------------------------------------------
--- | Create a resource provider
-newProvider :: Store -- ^ Store to use
- -> (FilePath -> IO Bool) -- ^ Should we ignore this file?
- -> FilePath -- ^ Search directory
- -> IO Provider -- ^ Resulting provider
-newProvider store ignore directory = do
- list <- map fromFilePath <$> getRecursiveContents ignore directory
- let universe = S.fromList list
- files <- fmap (maxmtime . M.fromList) $ forM list $ \identifier -> do
- rInfo <- getResourceInfo directory universe identifier
- return (identifier, rInfo)
-
- -- Get the old files from the store, and then immediately replace them by
- -- the new files.
- oldFiles <- fromMaybe mempty . Store.toMaybe <$> Store.get store oldKey
- oldFiles `deepseq` Store.set store oldKey files
-
- return $ Provider directory files oldFiles store
- where
- oldKey = ["Hakyll.Core.Provider.Internal.newProvider", "oldFiles"]
-
- -- Update modified if metadata is modified
- maxmtime files = flip M.map files $ \rInfo@(ResourceInfo mtime meta) ->
- let metaMod = fmap resourceInfoModified $ meta >>= flip M.lookup files
- in rInfo {resourceInfoModified = maybe mtime (max mtime) metaMod}
-
-
---------------------------------------------------------------------------------
-getResourceInfo :: FilePath -> Set Identifier -> Identifier -> IO ResourceInfo
-getResourceInfo directory universe identifier = do
- mtime <- fileModificationTime $ directory </> toFilePath identifier
- return $ ResourceInfo (BinaryTime mtime) $
- if mdRsc `S.member` universe then Just mdRsc else Nothing
- where
- mdRsc = fromFilePath $ flip addExtension "metadata" $ toFilePath identifier
-
-
---------------------------------------------------------------------------------
-resourceList :: Provider -> [Identifier]
-resourceList = M.keys . providerFiles
-
-
---------------------------------------------------------------------------------
--- | Check if a given resource exists
-resourceExists :: Provider -> Identifier -> Bool
-resourceExists provider =
- (`M.member` providerFiles provider) . setVersion Nothing
-
-
---------------------------------------------------------------------------------
-resourceFilePath :: Provider -> Identifier -> FilePath
-resourceFilePath p i = providerDirectory p </> toFilePath i
-
-
---------------------------------------------------------------------------------
--- | Get the raw body of a resource as string
-resourceString :: Provider -> Identifier -> IO String
-resourceString p i = readFile $ resourceFilePath p i
-
-
---------------------------------------------------------------------------------
--- | Get the raw body of a resource of a lazy bytestring
-resourceLBS :: Provider -> Identifier -> IO BL.ByteString
-resourceLBS p i = BL.readFile $ resourceFilePath p i
-
-
---------------------------------------------------------------------------------
--- | A resource is modified if it or its metadata has changed
-resourceModified :: Provider -> Identifier -> Bool
-resourceModified p r = case (ri, oldRi) of
- (Nothing, _) -> False
- (Just _, Nothing) -> True
- (Just n, Just o) ->
- resourceInfoModified n > resourceInfoModified o ||
- resourceInfoMetadata n /= resourceInfoMetadata o
- where
- normal = setVersion Nothing r
- ri = M.lookup normal (providerFiles p)
- oldRi = M.lookup normal (providerOldFiles p)
-
-
---------------------------------------------------------------------------------
-resourceModificationTime :: Provider -> Identifier -> UTCTime
-resourceModificationTime p i =
- case M.lookup (setVersion Nothing i) (providerFiles p) of
- Just ri -> unBinaryTime $ resourceInfoModified ri
- Nothing -> error $
- "Hakyll.Core.Provider.Internal.resourceModificationTime: " ++
- "resource " ++ show i ++ " does not exist"
-
-
---------------------------------------------------------------------------------
-fileModificationTime :: FilePath -> IO UTCTime
-fileModificationTime fp = do
-#if MIN_VERSION_directory(1,2,0)
- getModificationTime fp
-#else
- ct <- toCalendarTime =<< getModificationTime fp
- let str = formatCalendarTime defaultTimeLocale "%s" ct
- return $ readTime defaultTimeLocale "%s" str
-#endif
diff --git a/src/Hakyll/Core/Provider/Metadata.hs b/src/Hakyll/Core/Provider/Metadata.hs
deleted file mode 100644
index 6285ce1..0000000
--- a/src/Hakyll/Core/Provider/Metadata.hs
+++ /dev/null
@@ -1,151 +0,0 @@
---------------------------------------------------------------------------------
--- | Internal module to parse metadata
-{-# LANGUAGE BangPatterns #-}
-{-# LANGUAGE RecordWildCards #-}
-module Hakyll.Core.Provider.Metadata
- ( loadMetadata
- , parsePage
-
- , MetadataException (..)
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Arrow (second)
-import Control.Exception (Exception, throwIO)
-import Control.Monad (guard)
-import qualified Data.ByteString as B
-import qualified Data.ByteString.Char8 as BC
-import Data.List.Extended (breakWhen)
-import qualified Data.Map as M
-import Data.Maybe (fromMaybe)
-import Data.Monoid ((<>))
-import qualified Data.Text as T
-import qualified Data.Text.Encoding as T
-import qualified Data.Yaml as Yaml
-import Hakyll.Core.Identifier
-import Hakyll.Core.Metadata
-import Hakyll.Core.Provider.Internal
-import System.IO as IO
-
-
---------------------------------------------------------------------------------
-loadMetadata :: Provider -> Identifier -> IO (Metadata, Maybe String)
-loadMetadata p identifier = do
- hasHeader <- probablyHasMetadataHeader fp
- (md, body) <- if hasHeader
- then second Just <$> loadMetadataHeader fp
- else return (mempty, Nothing)
-
- emd <- case mi of
- Nothing -> return mempty
- Just mi' -> loadMetadataFile $ resourceFilePath p mi'
-
- return (md <> emd, body)
- where
- normal = setVersion Nothing identifier
- fp = resourceFilePath p identifier
- mi = M.lookup normal (providerFiles p) >>= resourceInfoMetadata
-
-
---------------------------------------------------------------------------------
-loadMetadataHeader :: FilePath -> IO (Metadata, String)
-loadMetadataHeader fp = do
- fileContent <- readFile fp
- case parsePage fileContent of
- Right x -> return x
- Left err -> throwIO $ MetadataException fp err
-
-
---------------------------------------------------------------------------------
-loadMetadataFile :: FilePath -> IO Metadata
-loadMetadataFile fp = do
- fileContent <- B.readFile fp
- let errOrMeta = Yaml.decodeEither' fileContent
- either (fail . show) return errOrMeta
-
-
---------------------------------------------------------------------------------
--- | Check if a file "probably" has a metadata header. The main goal of this is
--- to exclude binary files (which are unlikely to start with "---").
-probablyHasMetadataHeader :: FilePath -> IO Bool
-probablyHasMetadataHeader fp = do
- handle <- IO.openFile fp IO.ReadMode
- bs <- BC.hGet handle 1024
- IO.hClose handle
- return $ isMetadataHeader bs
- where
- isMetadataHeader bs =
- let pre = BC.takeWhile (\x -> x /= '\n' && x /= '\r') bs
- in BC.length pre >= 3 && BC.all (== '-') pre
-
-
---------------------------------------------------------------------------------
--- | Parse the page metadata and body.
-splitMetadata :: String -> (Maybe String, String)
-splitMetadata str0 = fromMaybe (Nothing, str0) $ do
- guard $ leading >= 3
- let !str1 = drop leading str0
- guard $ all isNewline (take 1 str1)
- let !(!meta, !content0) = breakWhen isTrailing str1
- guard $ not $ null content0
- let !content1 = drop (leading + 1) content0
- !content2 = dropWhile isNewline $ dropWhile isInlineSpace content1
- -- Adding this newline fixes the line numbers reported by the YAML parser.
- -- It's a bit ugly but it works.
- return (Just ('\n' : meta), content2)
- where
- -- Parse the leading "---"
- !leading = length $ takeWhile (== '-') str0
-
- -- Predicate to recognize the trailing "---" or "..."
- isTrailing [] = False
- isTrailing (x : xs) =
- isNewline x && length (takeWhile isDash xs) == leading
-
- -- Characters
- isNewline c = c == '\n' || c == '\r'
- isDash c = c == '-' || c == '.'
- isInlineSpace c = c == '\t' || c == ' '
-
-
---------------------------------------------------------------------------------
-parseMetadata :: String -> Either Yaml.ParseException Metadata
-parseMetadata = Yaml.decodeEither' . T.encodeUtf8 . T.pack
-
-
---------------------------------------------------------------------------------
-parsePage :: String -> Either Yaml.ParseException (Metadata, String)
-parsePage fileContent = case mbMetaBlock of
- Nothing -> return (mempty, content)
- Just metaBlock -> case parseMetadata metaBlock of
- Left err -> Left err
- Right meta -> return (meta, content)
- where
- !(!mbMetaBlock, !content) = splitMetadata fileContent
-
-
---------------------------------------------------------------------------------
--- | Thrown in the IO monad if things go wrong. Provides a nice-ish error
--- message.
-data MetadataException = MetadataException FilePath Yaml.ParseException
-
-
---------------------------------------------------------------------------------
-instance Exception MetadataException
-
-
---------------------------------------------------------------------------------
-instance Show MetadataException where
- show (MetadataException fp err) =
- fp ++ ": " ++ Yaml.prettyPrintParseException err ++ hint
-
- where
- hint = case err of
- Yaml.InvalidYaml (Just (Yaml.YamlParseException {..}))
- | yamlProblem == problem -> "\n" ++
- "Hint: if the metadata value contains characters such\n" ++
- "as ':' or '-', try enclosing it in quotes."
- _ -> ""
-
- problem = "mapping values are not allowed in this context"
diff --git a/src/Hakyll/Core/Provider/MetadataCache.hs b/src/Hakyll/Core/Provider/MetadataCache.hs
deleted file mode 100644
index 46dbf3e..0000000
--- a/src/Hakyll/Core/Provider/MetadataCache.hs
+++ /dev/null
@@ -1,62 +0,0 @@
---------------------------------------------------------------------------------
-module Hakyll.Core.Provider.MetadataCache
- ( resourceMetadata
- , resourceBody
- , resourceInvalidateMetadataCache
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Monad (unless)
-import Hakyll.Core.Identifier
-import Hakyll.Core.Metadata
-import Hakyll.Core.Provider.Internal
-import Hakyll.Core.Provider.Metadata
-import qualified Hakyll.Core.Store as Store
-
-
---------------------------------------------------------------------------------
-resourceMetadata :: Provider -> Identifier -> IO Metadata
-resourceMetadata p r
- | not (resourceExists p r) = return mempty
- | otherwise = do
- -- TODO keep time in md cache
- load p r
- Store.Found (BinaryMetadata md) <- Store.get (providerStore p)
- [name, toFilePath r, "metadata"]
- return md
-
-
---------------------------------------------------------------------------------
-resourceBody :: Provider -> Identifier -> IO String
-resourceBody p r = do
- load p r
- Store.Found bd <- Store.get (providerStore p)
- [name, toFilePath r, "body"]
- maybe (resourceString p r) return bd
-
-
---------------------------------------------------------------------------------
-resourceInvalidateMetadataCache :: Provider -> Identifier -> IO ()
-resourceInvalidateMetadataCache p r = do
- Store.delete (providerStore p) [name, toFilePath r, "metadata"]
- Store.delete (providerStore p) [name, toFilePath r, "body"]
-
-
---------------------------------------------------------------------------------
-load :: Provider -> Identifier -> IO ()
-load p r = do
- mmof <- Store.isMember store mdk
- unless mmof $ do
- (md, body) <- loadMetadata p r
- Store.set store mdk (BinaryMetadata md)
- Store.set store bk body
- where
- store = providerStore p
- mdk = [name, toFilePath r, "metadata"]
- bk = [name, toFilePath r, "body"]
-
-
---------------------------------------------------------------------------------
-name :: String
-name = "Hakyll.Core.Resource.Provider.MetadataCache"
diff --git a/src/Hakyll/Core/Routes.hs b/src/Hakyll/Core/Routes.hs
deleted file mode 100644
index 513725f..0000000
--- a/src/Hakyll/Core/Routes.hs
+++ /dev/null
@@ -1,194 +0,0 @@
---------------------------------------------------------------------------------
--- | Once a target is compiled, the user usually wants to save it to the disk.
--- This is where the 'Routes' type comes in; it determines where a certain
--- target should be written.
---
--- Suppose we have an item @foo\/bar.markdown@. We can render this to
--- @foo\/bar.html@ using:
---
--- > route "foo/bar.markdown" (setExtension ".html")
---
--- If we do not want to change the extension, we can use 'idRoute', the simplest
--- route available:
---
--- > route "foo/bar.markdown" idRoute
---
--- That will route @foo\/bar.markdown@ to @foo\/bar.markdown@.
---
--- Note that the extension says nothing about the content! If you set the
--- extension to @.html@, it is your own responsibility to ensure that the
--- content is indeed HTML.
---
--- Finally, some special cases:
---
--- * If there is no route for an item, this item will not be routed, so it will
--- not appear in your site directory.
---
--- * If an item matches multiple routes, the first rule will be chosen.
-{-# LANGUAGE Rank2Types #-}
-module Hakyll.Core.Routes
- ( UsedMetadata
- , Routes
- , runRoutes
- , idRoute
- , setExtension
- , matchRoute
- , customRoute
- , constRoute
- , gsubRoute
- , metadataRoute
- , composeRoutes
- ) where
-
-
---------------------------------------------------------------------------------
-import System.FilePath (replaceExtension)
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Identifier
-import Hakyll.Core.Identifier.Pattern
-import Hakyll.Core.Metadata
-import Hakyll.Core.Provider
-import Hakyll.Core.Util.String
-
-
---------------------------------------------------------------------------------
--- | When you ran a route, it's useful to know whether or not this used
--- metadata. This allows us to do more granular dependency analysis.
-type UsedMetadata = Bool
-
-
---------------------------------------------------------------------------------
-data RoutesRead = RoutesRead
- { routesProvider :: Provider
- , routesUnderlying :: Identifier
- }
-
-
---------------------------------------------------------------------------------
--- | Type used for a route
-newtype Routes = Routes
- { unRoutes :: RoutesRead -> Identifier -> IO (Maybe FilePath, UsedMetadata)
- }
-
-
---------------------------------------------------------------------------------
-instance Monoid Routes where
- mempty = Routes $ \_ _ -> return (Nothing, False)
- mappend (Routes f) (Routes g) = Routes $ \p id' -> do
- (mfp, um) <- f p id'
- case mfp of
- Nothing -> g p id'
- Just _ -> return (mfp, um)
-
-
---------------------------------------------------------------------------------
--- | Apply a route to an identifier
-runRoutes :: Routes -> Provider -> Identifier
- -> IO (Maybe FilePath, UsedMetadata)
-runRoutes routes provider identifier =
- unRoutes routes (RoutesRead provider identifier) identifier
-
-
---------------------------------------------------------------------------------
--- | A route that uses the identifier as filepath. For example, the target with
--- ID @foo\/bar@ will be written to the file @foo\/bar@.
-idRoute :: Routes
-idRoute = customRoute toFilePath
-
-
---------------------------------------------------------------------------------
--- | Set (or replace) the extension of a route.
---
--- Example:
---
--- > runRoutes (setExtension "html") "foo/bar"
---
--- Result:
---
--- > Just "foo/bar.html"
---
--- Example:
---
--- > runRoutes (setExtension "html") "posts/the-art-of-trolling.markdown"
---
--- Result:
---
--- > Just "posts/the-art-of-trolling.html"
-setExtension :: String -> Routes
-setExtension extension = customRoute $
- (`replaceExtension` extension) . toFilePath
-
-
---------------------------------------------------------------------------------
--- | Apply the route if the identifier matches the given pattern, fail
--- otherwise
-matchRoute :: Pattern -> Routes -> Routes
-matchRoute pattern (Routes route) = Routes $ \p id' ->
- if matches pattern id' then route p id' else return (Nothing, False)
-
-
---------------------------------------------------------------------------------
--- | Create a custom route. This should almost always be used with
--- 'matchRoute'
-customRoute :: (Identifier -> FilePath) -> Routes
-customRoute f = Routes $ const $ \id' -> return (Just (f id'), False)
-
-
---------------------------------------------------------------------------------
--- | A route that always gives the same result. Obviously, you should only use
--- this for a single compilation rule.
-constRoute :: FilePath -> Routes
-constRoute = customRoute . const
-
-
---------------------------------------------------------------------------------
--- | Create a gsub route
---
--- Example:
---
--- > runRoutes (gsubRoute "rss/" (const "")) "tags/rss/bar.xml"
---
--- Result:
---
--- > Just "tags/bar.xml"
-gsubRoute :: String -- ^ Pattern
- -> (String -> String) -- ^ Replacement
- -> Routes -- ^ Resulting route
-gsubRoute pattern replacement = customRoute $
- replaceAll pattern replacement . toFilePath
-
-
---------------------------------------------------------------------------------
--- | Get access to the metadata in order to determine the route
-metadataRoute :: (Metadata -> Routes) -> Routes
-metadataRoute f = Routes $ \r i -> do
- metadata <- resourceMetadata (routesProvider r) (routesUnderlying r)
- unRoutes (f metadata) r i
-
-
---------------------------------------------------------------------------------
--- | Compose routes so that @f \`composeRoutes\` g@ is more or less equivalent
--- with @g . f@.
---
--- Example:
---
--- > let routes = gsubRoute "rss/" (const "") `composeRoutes` setExtension "xml"
--- > in runRoutes routes "tags/rss/bar"
---
--- Result:
---
--- > Just "tags/bar.xml"
---
--- If the first route given fails, Hakyll will not apply the second route.
-composeRoutes :: Routes -- ^ First route to apply
- -> Routes -- ^ Second route to apply
- -> Routes -- ^ Resulting route
-composeRoutes (Routes f) (Routes g) = Routes $ \p i -> do
- (mfp, um) <- f p i
- case mfp of
- Nothing -> return (Nothing, um)
- Just fp -> do
- (mfp', um') <- g p (fromFilePath fp)
- return (mfp', um || um')
diff --git a/src/Hakyll/Core/Rules.hs b/src/Hakyll/Core/Rules.hs
deleted file mode 100644
index 41b9a73..0000000
--- a/src/Hakyll/Core/Rules.hs
+++ /dev/null
@@ -1,223 +0,0 @@
---------------------------------------------------------------------------------
--- | This module provides a declarative DSL in which the user can specify the
--- different rules used to run the compilers.
---
--- The convention is to just list all items in the 'Rules' monad, routes and
--- compilation rules.
---
--- A typical usage example would be:
---
--- > main = hakyll $ do
--- > match "posts/*" $ do
--- > route (setExtension "html")
--- > compile someCompiler
--- > match "css/*" $ do
--- > route idRoute
--- > compile compressCssCompiler
-{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-{-# LANGUAGE OverloadedStrings #-}
-module Hakyll.Core.Rules
- ( Rules
- , match
- , matchMetadata
- , create
- , version
- , compile
- , route
-
- -- * Advanced usage
- , preprocess
- , Dependency (..)
- , rulesExtraDependencies
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Monad.Reader (ask, local)
-import Control.Monad.State (get, modify, put)
-import Control.Monad.Trans (liftIO)
-import Control.Monad.Writer (censor, tell)
-import Data.Maybe (fromMaybe)
-import qualified Data.Set as S
-
-
---------------------------------------------------------------------------------
-import Data.Binary (Binary)
-import Data.Typeable (Typeable)
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler.Internal
-import Hakyll.Core.Dependencies
-import Hakyll.Core.Identifier
-import Hakyll.Core.Identifier.Pattern
-import Hakyll.Core.Item
-import Hakyll.Core.Item.SomeItem
-import Hakyll.Core.Metadata
-import Hakyll.Core.Routes
-import Hakyll.Core.Rules.Internal
-import Hakyll.Core.Writable
-
-
---------------------------------------------------------------------------------
--- | Add a route
-tellRoute :: Routes -> Rules ()
-tellRoute route' = Rules $ tell $ RuleSet route' mempty mempty mempty
-
-
---------------------------------------------------------------------------------
--- | Add a number of compilers
-tellCompilers :: [(Identifier, Compiler SomeItem)] -> Rules ()
-tellCompilers compilers = Rules $ tell $ RuleSet mempty compilers mempty mempty
-
-
---------------------------------------------------------------------------------
--- | Add resources
-tellResources :: [Identifier] -> Rules ()
-tellResources resources' = Rules $ tell $
- RuleSet mempty mempty (S.fromList resources') mempty
-
-
---------------------------------------------------------------------------------
--- | Add a pattern
-tellPattern :: Pattern -> Rules ()
-tellPattern pattern = Rules $ tell $ RuleSet mempty mempty mempty pattern
-
-
---------------------------------------------------------------------------------
-flush :: Rules ()
-flush = Rules $ do
- mcompiler <- rulesCompiler <$> get
- case mcompiler of
- Nothing -> return ()
- Just compiler -> do
- matches' <- rulesMatches <$> ask
- version' <- rulesVersion <$> ask
- route' <- fromMaybe mempty . rulesRoute <$> get
-
- -- The version is possibly not set correctly at this point (yet)
- let ids = map (setVersion version') matches'
-
- {-
- ids <- case fromLiteral pattern of
- Just id' -> return [setVersion version' id']
- Nothing -> do
- ids <- unRules $ getMatches pattern
- unRules $ tellResources ids
- return $ map (setVersion version') ids
- -}
-
- -- Create a fast pattern for routing that matches exactly the
- -- compilers created in the block given to match
- let fastPattern = fromList ids
-
- -- Write out the compilers and routes
- unRules $ tellRoute $ matchRoute fastPattern route'
- unRules $ tellCompilers $ [(id', compiler) | id' <- ids]
-
- put $ emptyRulesState
-
-
---------------------------------------------------------------------------------
-matchInternal :: Pattern -> Rules [Identifier] -> Rules () -> Rules ()
-matchInternal pattern getIDs rules = do
- tellPattern pattern
- flush
- ids <- getIDs
- tellResources ids
- Rules $ local (setMatches ids) $ unRules $ rules >> flush
- where
- setMatches ids env = env {rulesMatches = ids}
-
---------------------------------------------------------------------------------
-match :: Pattern -> Rules () -> Rules ()
-match pattern = matchInternal pattern $ getMatches pattern
-
-
---------------------------------------------------------------------------------
-matchMetadata :: Pattern -> (Metadata -> Bool) -> Rules () -> Rules ()
-matchMetadata pattern metadataPred = matchInternal pattern $
- map fst . filter (metadataPred . snd) <$> getAllMetadata pattern
-
-
---------------------------------------------------------------------------------
-create :: [Identifier] -> Rules () -> Rules ()
-create ids rules = do
- flush
- -- TODO Maybe check if the resources exist and call tellResources on that
- Rules $ local setMatches $ unRules $ rules >> flush
- where
- setMatches env = env {rulesMatches = ids}
-
-
---------------------------------------------------------------------------------
-version :: String -> Rules () -> Rules ()
-version v rules = do
- flush
- Rules $ local setVersion' $ unRules $ rules >> flush
- where
- setVersion' env = env {rulesVersion = Just v}
-
-
---------------------------------------------------------------------------------
--- | Add a compilation rule to the rules.
---
--- This instructs all resources to be compiled using the given compiler.
-compile :: (Binary a, Typeable a, Writable a) => Compiler (Item a) -> Rules ()
-compile compiler = Rules $ modify $ \s ->
- s {rulesCompiler = Just (fmap SomeItem compiler)}
-
-
---------------------------------------------------------------------------------
--- | Add a route.
---
--- This adds a route for all items matching the current pattern.
-route :: Routes -> Rules ()
-route route' = Rules $ modify $ \s -> s {rulesRoute = Just route'}
-
-
---------------------------------------------------------------------------------
--- | Execute an 'IO' action immediately while the rules are being evaluated.
--- This should be avoided if possible, but occasionally comes in useful.
-preprocess :: IO a -> Rules a
-preprocess = Rules . liftIO
-
-
---------------------------------------------------------------------------------
--- | Advanced usage: add extra dependencies to compilers. Basically this is
--- needed when you're doing unsafe tricky stuff in the rules monad, but you
--- still want correct builds.
---
--- A useful utility for this purpose is 'makePatternDependency'.
-rulesExtraDependencies :: [Dependency] -> Rules a -> Rules a
-rulesExtraDependencies deps rules =
- -- Note that we add the dependencies seemingly twice here. However, this is
- -- done so that 'rulesExtraDependencies' works both if we have something
- -- like:
- --
- -- > match "*.css" $ rulesExtraDependencies [foo] $ ...
- --
- -- and something like:
- --
- -- > rulesExtraDependencies [foo] $ match "*.css" $ ...
- --
- -- (1) takes care of the latter and (2) of the former.
- Rules $ censor fixRuleSet $ do
- x <- unRules rules
- fixCompiler
- return x
- where
- -- (1) Adds the dependencies to the compilers we are yet to create
- fixCompiler = modify $ \s -> case rulesCompiler s of
- Nothing -> s
- Just c -> s
- { rulesCompiler = Just $ compilerTellDependencies deps >> c
- }
-
- -- (2) Adds the dependencies to the compilers that are already in the ruleset
- fixRuleSet ruleSet = ruleSet
- { rulesCompilers =
- [ (i, compilerTellDependencies deps >> c)
- | (i, c) <- rulesCompilers ruleSet
- ]
- }
diff --git a/src/Hakyll/Core/Rules/Internal.hs b/src/Hakyll/Core/Rules/Internal.hs
deleted file mode 100644
index 0641dcf..0000000
--- a/src/Hakyll/Core/Rules/Internal.hs
+++ /dev/null
@@ -1,109 +0,0 @@
---------------------------------------------------------------------------------
-{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-{-# LANGUAGE Rank2Types #-}
-module Hakyll.Core.Rules.Internal
- ( RulesRead (..)
- , RuleSet (..)
- , RulesState (..)
- , emptyRulesState
- , Rules (..)
- , runRules
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Monad.Reader (ask)
-import Control.Monad.RWS (RWST, runRWST)
-import Control.Monad.Trans (liftIO)
-import qualified Data.Map as M
-import Data.Set (Set)
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler.Internal
-import Hakyll.Core.Identifier
-import Hakyll.Core.Identifier.Pattern
-import Hakyll.Core.Item.SomeItem
-import Hakyll.Core.Metadata
-import Hakyll.Core.Provider
-import Hakyll.Core.Routes
-
-
---------------------------------------------------------------------------------
-data RulesRead = RulesRead
- { rulesProvider :: Provider
- , rulesMatches :: [Identifier]
- , rulesVersion :: Maybe String
- }
-
-
---------------------------------------------------------------------------------
-data RuleSet = RuleSet
- { -- | Accumulated routes
- rulesRoutes :: Routes
- , -- | Accumulated compilers
- rulesCompilers :: [(Identifier, Compiler SomeItem)]
- , -- | A set of the actually used files
- rulesResources :: Set Identifier
- , -- | A pattern we can use to check if a file *would* be used. This is
- -- needed for the preview server.
- rulesPattern :: Pattern
- }
-
-
---------------------------------------------------------------------------------
-instance Monoid RuleSet where
- mempty = RuleSet mempty mempty mempty mempty
- mappend (RuleSet r1 c1 s1 p1) (RuleSet r2 c2 s2 p2) =
- RuleSet (mappend r1 r2) (mappend c1 c2) (mappend s1 s2) (p1 .||. p2)
-
-
---------------------------------------------------------------------------------
-data RulesState = RulesState
- { rulesRoute :: Maybe Routes
- , rulesCompiler :: Maybe (Compiler SomeItem)
- }
-
-
---------------------------------------------------------------------------------
-emptyRulesState :: RulesState
-emptyRulesState = RulesState Nothing Nothing
-
-
---------------------------------------------------------------------------------
--- | The monad used to compose rules
-newtype Rules a = Rules
- { unRules :: RWST RulesRead RuleSet RulesState IO a
- } deriving (Monad, Functor, Applicative)
-
-
---------------------------------------------------------------------------------
-instance MonadMetadata Rules where
- getMetadata identifier = Rules $ do
- provider <- rulesProvider <$> ask
- liftIO $ resourceMetadata provider identifier
-
- getMatches pattern = Rules $ do
- provider <- rulesProvider <$> ask
- return $ filterMatches pattern $ resourceList provider
-
-
---------------------------------------------------------------------------------
--- | Run a Rules monad, resulting in a 'RuleSet'
-runRules :: Rules a -> Provider -> IO RuleSet
-runRules rules provider = do
- (_, _, ruleSet) <- runRWST (unRules rules) env emptyRulesState
-
- -- Ensure compiler uniqueness
- let ruleSet' = ruleSet
- { rulesCompilers = M.toList $
- M.fromListWith (flip const) (rulesCompilers ruleSet)
- }
-
- return ruleSet'
- where
- env = RulesRead
- { rulesProvider = provider
- , rulesMatches = []
- , rulesVersion = Nothing
- }
diff --git a/src/Hakyll/Core/Runtime.hs b/src/Hakyll/Core/Runtime.hs
deleted file mode 100644
index 16a5d9e..0000000
--- a/src/Hakyll/Core/Runtime.hs
+++ /dev/null
@@ -1,276 +0,0 @@
---------------------------------------------------------------------------------
-module Hakyll.Core.Runtime
- ( run
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Monad (unless)
-import Control.Monad.Except (ExceptT, runExceptT, throwError)
-import Control.Monad.Reader (ask)
-import Control.Monad.RWS (RWST, runRWST)
-import Control.Monad.State (get, modify)
-import Control.Monad.Trans (liftIO)
-import Data.List (intercalate)
-import Data.Map (Map)
-import qualified Data.Map as M
-import Data.Set (Set)
-import qualified Data.Set as S
-import System.Exit (ExitCode (..))
-import System.FilePath ((</>))
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler.Internal
-import Hakyll.Core.Compiler.Require
-import Hakyll.Core.Configuration
-import Hakyll.Core.Dependencies
-import Hakyll.Core.Identifier
-import Hakyll.Core.Item
-import Hakyll.Core.Item.SomeItem
-import Hakyll.Core.Logger (Logger)
-import qualified Hakyll.Core.Logger as Logger
-import Hakyll.Core.Provider
-import Hakyll.Core.Routes
-import Hakyll.Core.Rules.Internal
-import Hakyll.Core.Store (Store)
-import qualified Hakyll.Core.Store as Store
-import Hakyll.Core.Util.File
-import Hakyll.Core.Writable
-
-
---------------------------------------------------------------------------------
-run :: Configuration -> Logger -> Rules a -> IO (ExitCode, RuleSet)
-run config logger rules = do
- -- Initialization
- Logger.header logger "Initialising..."
- Logger.message logger "Creating store..."
- store <- Store.new (inMemoryCache config) $ storeDirectory config
- Logger.message logger "Creating provider..."
- provider <- newProvider store (shouldIgnoreFile config) $
- providerDirectory config
- Logger.message logger "Running rules..."
- ruleSet <- runRules rules provider
-
- -- Get old facts
- mOldFacts <- Store.get store factsKey
- let (oldFacts) = case mOldFacts of Store.Found f -> f
- _ -> mempty
-
- -- Build runtime read/state
- let compilers = rulesCompilers ruleSet
- read' = RuntimeRead
- { runtimeConfiguration = config
- , runtimeLogger = logger
- , runtimeProvider = provider
- , runtimeStore = store
- , runtimeRoutes = rulesRoutes ruleSet
- , runtimeUniverse = M.fromList compilers
- }
- state = RuntimeState
- { runtimeDone = S.empty
- , runtimeSnapshots = S.empty
- , runtimeTodo = M.empty
- , runtimeFacts = oldFacts
- }
-
- -- Run the program and fetch the resulting state
- result <- runExceptT $ runRWST build read' state
- case result of
- Left e -> do
- Logger.error logger e
- Logger.flush logger
- return (ExitFailure 1, ruleSet)
-
- Right (_, s, _) -> do
- Store.set store factsKey $ runtimeFacts s
-
- Logger.debug logger "Removing tmp directory..."
- removeDirectory $ tmpDirectory config
-
- Logger.flush logger
- return (ExitSuccess, ruleSet)
- where
- factsKey = ["Hakyll.Core.Runtime.run", "facts"]
-
-
---------------------------------------------------------------------------------
-data RuntimeRead = RuntimeRead
- { runtimeConfiguration :: Configuration
- , runtimeLogger :: Logger
- , runtimeProvider :: Provider
- , runtimeStore :: Store
- , runtimeRoutes :: Routes
- , runtimeUniverse :: Map Identifier (Compiler SomeItem)
- }
-
-
---------------------------------------------------------------------------------
-data RuntimeState = RuntimeState
- { runtimeDone :: Set Identifier
- , runtimeSnapshots :: Set (Identifier, Snapshot)
- , runtimeTodo :: Map Identifier (Compiler SomeItem)
- , runtimeFacts :: DependencyFacts
- }
-
-
---------------------------------------------------------------------------------
-type Runtime a = RWST RuntimeRead () RuntimeState (ExceptT String IO) a
-
-
---------------------------------------------------------------------------------
-build :: Runtime ()
-build = do
- logger <- runtimeLogger <$> ask
- Logger.header logger "Checking for out-of-date items"
- scheduleOutOfDate
- Logger.header logger "Compiling"
- pickAndChase
- Logger.header logger "Success"
-
-
---------------------------------------------------------------------------------
-scheduleOutOfDate :: Runtime ()
-scheduleOutOfDate = do
- logger <- runtimeLogger <$> ask
- provider <- runtimeProvider <$> ask
- universe <- runtimeUniverse <$> ask
- facts <- runtimeFacts <$> get
- todo <- runtimeTodo <$> get
-
- let identifiers = M.keys universe
- modified = S.fromList $ flip filter identifiers $
- resourceModified provider
-
- let (ood, facts', msgs) = outOfDate identifiers modified facts
- todo' = M.filterWithKey
- (\id' _ -> id' `S.member` ood) universe
-
- -- Print messages
- mapM_ (Logger.debug logger) msgs
-
- -- Update facts and todo items
- modify $ \s -> s
- { runtimeDone = runtimeDone s `S.union`
- (S.fromList identifiers `S.difference` ood)
- , runtimeTodo = todo `M.union` todo'
- , runtimeFacts = facts'
- }
-
-
---------------------------------------------------------------------------------
-pickAndChase :: Runtime ()
-pickAndChase = do
- todo <- runtimeTodo <$> get
- case M.minViewWithKey todo of
- Nothing -> return ()
- Just ((id', _), _) -> do
- chase [] id'
- pickAndChase
-
-
---------------------------------------------------------------------------------
-chase :: [Identifier] -> Identifier -> Runtime ()
-chase trail id'
- | id' `elem` trail = throwError $ "Hakyll.Core.Runtime.chase: " ++
- "Dependency cycle detected: " ++ intercalate " depends on "
- (map show $ dropWhile (/= id') (reverse trail) ++ [id'])
- | otherwise = do
- logger <- runtimeLogger <$> ask
- todo <- runtimeTodo <$> get
- provider <- runtimeProvider <$> ask
- universe <- runtimeUniverse <$> ask
- routes <- runtimeRoutes <$> ask
- store <- runtimeStore <$> ask
- config <- runtimeConfiguration <$> ask
- Logger.debug logger $ "Processing " ++ show id'
-
- let compiler = todo M.! id'
- read' = CompilerRead
- { compilerConfig = config
- , compilerUnderlying = id'
- , compilerProvider = provider
- , compilerUniverse = M.keysSet universe
- , compilerRoutes = routes
- , compilerStore = store
- , compilerLogger = logger
- }
-
- result <- liftIO $ runCompiler compiler read'
- case result of
- -- Rethrow error
- CompilerError [] -> throwError
- "Compiler failed but no info given, try running with -v?"
- CompilerError es -> throwError $ intercalate "; " es
-
- -- Signal that a snapshot was saved ->
- CompilerSnapshot snapshot c -> do
- -- Update info. The next 'chase' will pick us again at some
- -- point so we can continue then.
- modify $ \s -> s
- { runtimeSnapshots =
- S.insert (id', snapshot) (runtimeSnapshots s)
- , runtimeTodo = M.insert id' c (runtimeTodo s)
- }
-
- -- Huge success
- CompilerDone (SomeItem item) cwrite -> do
- -- Print some info
- let facts = compilerDependencies cwrite
- cacheHits
- | compilerCacheHits cwrite <= 0 = "updated"
- | otherwise = "cached "
- Logger.message logger $ cacheHits ++ " " ++ show id'
-
- -- Sanity check
- unless (itemIdentifier item == id') $ throwError $
- "The compiler yielded an Item with Identifier " ++
- show (itemIdentifier item) ++ ", but we were expecting " ++
- "an Item with Identifier " ++ show id' ++ " " ++
- "(you probably want to call makeItem to solve this problem)"
-
- -- Write if necessary
- (mroute, _) <- liftIO $ runRoutes routes provider id'
- case mroute of
- Nothing -> return ()
- Just route -> do
- let path = destinationDirectory config </> route
- liftIO $ makeDirectories path
- liftIO $ write path item
- Logger.debug logger $ "Routed to " ++ path
-
- -- Save! (For load)
- liftIO $ save store item
-
- -- Update state
- modify $ \s -> s
- { runtimeDone = S.insert id' (runtimeDone s)
- , runtimeTodo = M.delete id' (runtimeTodo s)
- , runtimeFacts = M.insert id' facts (runtimeFacts s)
- }
-
- -- Try something else first
- CompilerRequire dep c -> do
- -- Update the compiler so we don't execute it twice
- let (depId, depSnapshot) = dep
- done <- runtimeDone <$> get
- snapshots <- runtimeSnapshots <$> get
-
- -- Done if we either completed the entire item (runtimeDone) or
- -- if we previously saved the snapshot (runtimeSnapshots).
- let depDone =
- depId `S.member` done ||
- (depId, depSnapshot) `S.member` snapshots
-
- modify $ \s -> s
- { runtimeTodo = M.insert id'
- (if depDone then c else compilerResult result)
- (runtimeTodo s)
- }
-
- -- If the required item is already compiled, continue, or, start
- -- chasing that
- Logger.debug logger $ "Require " ++ show depId ++
- " (snapshot " ++ depSnapshot ++ "): " ++
- (if depDone then "OK" else "chasing")
- if depDone then chase trail id' else chase (id' : trail) depId
diff --git a/src/Hakyll/Core/Store.hs b/src/Hakyll/Core/Store.hs
deleted file mode 100644
index fdbcf11..0000000
--- a/src/Hakyll/Core/Store.hs
+++ /dev/null
@@ -1,197 +0,0 @@
---------------------------------------------------------------------------------
--- | A store for storing and retreiving items
-{-# LANGUAGE ExistentialQuantification #-}
-{-# LANGUAGE ScopedTypeVariables #-}
-module Hakyll.Core.Store
- ( Store
- , Result (..)
- , toMaybe
- , new
- , set
- , get
- , isMember
- , delete
- , hash
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Exception (IOException, handle)
-import qualified Crypto.Hash.MD5 as MD5
-import Data.Binary (Binary, decode, encodeFile)
-import qualified Data.ByteString as B
-import qualified Data.ByteString.Lazy as BL
-import qualified Data.Cache.LRU.IO as Lru
-import Data.List (intercalate)
-import Data.Maybe (isJust)
-import qualified Data.Text as T
-import qualified Data.Text.Encoding as T
-import Data.Typeable (TypeRep, Typeable, cast, typeOf)
-import System.Directory (createDirectoryIfMissing)
-import System.Directory (doesFileExist, removeFile)
-import System.FilePath ((</>))
-import System.IO (IOMode (..), hClose, openFile)
-import Text.Printf (printf)
-
-
---------------------------------------------------------------------------------
--- | Simple wrapper type
-data Box = forall a. Typeable a => Box a
-
-
---------------------------------------------------------------------------------
-data Store = Store
- { -- | All items are stored on the filesystem
- storeDirectory :: FilePath
- , -- | Optionally, items are also kept in-memory
- storeMap :: Maybe (Lru.AtomicLRU FilePath Box)
- }
-
-
---------------------------------------------------------------------------------
-instance Show Store where
- show _ = "<Store>"
-
-
---------------------------------------------------------------------------------
--- | Result of a store query
-data Result a
- = Found a -- ^ Found, result
- | NotFound -- ^ Not found
- | WrongType TypeRep TypeRep -- ^ Expected, true type
- deriving (Show, Eq)
-
-
---------------------------------------------------------------------------------
--- | Convert result to 'Maybe'
-toMaybe :: Result a -> Maybe a
-toMaybe (Found x) = Just x
-toMaybe _ = Nothing
-
-
---------------------------------------------------------------------------------
--- | Initialize the store
-new :: Bool -- ^ Use in-memory caching
- -> FilePath -- ^ Directory to use for hard disk storage
- -> IO Store -- ^ Store
-new inMemory directory = do
- createDirectoryIfMissing True directory
- ref <- if inMemory then Just <$> Lru.newAtomicLRU csize else return Nothing
- return Store
- { storeDirectory = directory
- , storeMap = ref
- }
- where
- csize = Just 500
-
-
---------------------------------------------------------------------------------
--- | Auxiliary: add an item to the in-memory cache
-cacheInsert :: Typeable a => Store -> String -> a -> IO ()
-cacheInsert (Store _ Nothing) _ _ = return ()
-cacheInsert (Store _ (Just lru)) key x =
- Lru.insert key (Box x) lru
-
-
---------------------------------------------------------------------------------
--- | Auxiliary: get an item from the in-memory cache
-cacheLookup :: forall a. Typeable a => Store -> String -> IO (Result a)
-cacheLookup (Store _ Nothing) _ = return NotFound
-cacheLookup (Store _ (Just lru)) key = do
- res <- Lru.lookup key lru
- return $ case res of
- Nothing -> NotFound
- Just (Box x) -> case cast x of
- Just x' -> Found x'
- Nothing -> WrongType (typeOf (undefined :: a)) (typeOf x)
-
-
---------------------------------------------------------------------------------
-cacheIsMember :: Store -> String -> IO Bool
-cacheIsMember (Store _ Nothing) _ = return False
-cacheIsMember (Store _ (Just lru)) key = isJust <$> Lru.lookup key lru
-
-
---------------------------------------------------------------------------------
--- | Auxiliary: delete an item from the in-memory cache
-cacheDelete :: Store -> String -> IO ()
-cacheDelete (Store _ Nothing) _ = return ()
-cacheDelete (Store _ (Just lru)) key = do
- _ <- Lru.delete key lru
- return ()
-
-
---------------------------------------------------------------------------------
--- | Store an item
-set :: (Binary a, Typeable a) => Store -> [String] -> a -> IO ()
-set store identifier value = do
- encodeFile (storeDirectory store </> key) value
- cacheInsert store key value
- where
- key = hash identifier
-
-
---------------------------------------------------------------------------------
--- | Load an item
-get :: (Binary a, Typeable a) => Store -> [String] -> IO (Result a)
-get store identifier = do
- -- First check the in-memory map
- ref <- cacheLookup store key
- case ref of
- -- Not found in the map, try the filesystem
- NotFound -> do
- exists <- doesFileExist path
- if not exists
- -- Not found in the filesystem either
- then return NotFound
- -- Found in the filesystem
- else do
- v <- decodeClose
- cacheInsert store key v
- return $ Found v
- -- Found in the in-memory map (or wrong type), just return
- s -> return s
- where
- key = hash identifier
- path = storeDirectory store </> key
-
- -- 'decodeFile' from Data.Binary which closes the file ASAP
- decodeClose = do
- h <- openFile path ReadMode
- lbs <- BL.hGetContents h
- BL.length lbs `seq` hClose h
- return $ decode lbs
-
-
---------------------------------------------------------------------------------
--- | Strict function
-isMember :: Store -> [String] -> IO Bool
-isMember store identifier = do
- inCache <- cacheIsMember store key
- if inCache then return True else doesFileExist path
- where
- key = hash identifier
- path = storeDirectory store </> key
-
-
---------------------------------------------------------------------------------
--- | Delete an item
-delete :: Store -> [String] -> IO ()
-delete store identifier = do
- cacheDelete store key
- deleteFile $ storeDirectory store </> key
- where
- key = hash identifier
-
-
---------------------------------------------------------------------------------
--- | Delete a file unless it doesn't exist...
-deleteFile :: FilePath -> IO ()
-deleteFile = handle (\(_ :: IOException) -> return ()) . removeFile
-
-
---------------------------------------------------------------------------------
--- | Mostly meant for internal usage
-hash :: [String] -> String
-hash = concatMap (printf "%02x") . B.unpack .
- MD5.hash . T.encodeUtf8 . T.pack . intercalate "/"
diff --git a/src/Hakyll/Core/UnixFilter.hs b/src/Hakyll/Core/UnixFilter.hs
deleted file mode 100644
index 734d8d8..0000000
--- a/src/Hakyll/Core/UnixFilter.hs
+++ /dev/null
@@ -1,159 +0,0 @@
-{-# LANGUAGE CPP #-}
-
---------------------------------------------------------------------------------
--- | A Compiler that supports unix filters.
-module Hakyll.Core.UnixFilter
- ( unixFilter
- , unixFilterLBS
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Concurrent (forkIO)
-import Control.Concurrent.MVar (newEmptyMVar, putMVar, takeMVar)
-import Control.DeepSeq (deepseq)
-import Control.Monad (forM_)
-import Data.ByteString.Lazy (ByteString)
-import qualified Data.ByteString.Lazy as LB
-import Data.IORef (newIORef, readIORef, writeIORef)
-import System.Exit (ExitCode (..))
-import System.IO (Handle, hClose, hFlush, hGetContents,
- hPutStr, hSetEncoding, localeEncoding)
-import System.Process
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler
-
-
---------------------------------------------------------------------------------
--- | Use a unix filter as compiler. For example, we could use the 'rev' program
--- as a compiler.
---
--- > rev :: Compiler (Item String)
--- > rev = getResourceString >>= withItemBody (unixFilter "rev" [])
---
--- A more realistic example: one can use this to call, for example, the sass
--- compiler on CSS files. More information about sass can be found here:
---
--- <http://sass-lang.com/>
---
--- The code is fairly straightforward, given that we use @.scss@ for sass:
---
--- > match "style.scss" $ do
--- > route $ setExtension "css"
--- > compile $ getResourceString >>=
--- > withItemBody (unixFilter "sass" ["-s", "--scss"]) >>=
--- > return . fmap compressCss
-unixFilter :: String -- ^ Program name
- -> [String] -- ^ Program args
- -> String -- ^ Program input
- -> Compiler String -- ^ Program output
-unixFilter = unixFilterWith writer reader
- where
- writer handle input = do
- hSetEncoding handle localeEncoding
- hPutStr handle input
- reader handle = do
- hSetEncoding handle localeEncoding
- out <- hGetContents handle
- deepseq out (return out)
-
-
---------------------------------------------------------------------------------
--- | Variant of 'unixFilter' that should be used for binary files
---
--- > match "music.wav" $ do
--- > route $ setExtension "ogg"
--- > compile $ getResourceLBS >>= withItemBody (unixFilterLBS "oggenc" ["-"])
-unixFilterLBS :: String -- ^ Program name
- -> [String] -- ^ Program args
- -> ByteString -- ^ Program input
- -> Compiler ByteString -- ^ Program output
-unixFilterLBS = unixFilterWith LB.hPutStr $ \handle -> do
- out <- LB.hGetContents handle
- LB.length out `seq` return out
-
-
---------------------------------------------------------------------------------
--- | Overloaded compiler
-unixFilterWith :: Monoid o
- => (Handle -> i -> IO ()) -- ^ Writer
- -> (Handle -> IO o) -- ^ Reader
- -> String -- ^ Program name
- -> [String] -- ^ Program args
- -> i -- ^ Program input
- -> Compiler o -- ^ Program output
-unixFilterWith writer reader programName args input = do
- debugCompiler ("Executing external program " ++ programName)
- (output, err, exitCode) <- unsafeCompiler $
- unixFilterIO writer reader programName args input
- forM_ (lines err) debugCompiler
- case exitCode of
- ExitSuccess -> return output
- ExitFailure e -> fail $
- "Hakyll.Core.UnixFilter.unixFilterWith: " ++
- unwords (programName : args) ++ " gave exit code " ++ show e
-
-
---------------------------------------------------------------------------------
--- | Internally used function
-unixFilterIO :: Monoid o
- => (Handle -> i -> IO ())
- -> (Handle -> IO o)
- -> String
- -> [String]
- -> i
- -> IO (o, String, ExitCode)
-unixFilterIO writer reader programName args input = do
- -- The problem on Windows is that `proc` is unable to execute
- -- batch stubs (eg. anything created using 'gem install ...') even if its in
- -- `$PATH`. A solution to this issue is to execute the batch file explicitly
- -- using `cmd /c batchfile` but there is no rational way to know where said
- -- batchfile is on the system. Hence, we detect windows using the
- -- CPP and instead of using `proc` to create the process, use `shell`
- -- which will be able to execute everything `proc` can
- -- as well as batch files.
-#ifdef mingw32_HOST_OS
- let pr = shell $ unwords (programName : args)
-#else
- let pr = proc programName args
-#endif
-
- (Just inh, Just outh, Just errh, pid) <-
- createProcess pr
- { std_in = CreatePipe
- , std_out = CreatePipe
- , std_err = CreatePipe
- }
-
- -- Create boxes
- lock <- newEmptyMVar
- outRef <- newIORef mempty
- errRef <- newIORef ""
-
- -- Write the input to the child pipe
- _ <- forkIO $ writer inh input >> hFlush inh >> hClose inh
-
- -- Read from stdout
- _ <- forkIO $ do
- out <- reader outh
- hClose outh
- writeIORef outRef out
- putMVar lock ()
-
- -- Read from stderr
- _ <- forkIO $ do
- hSetEncoding errh localeEncoding
- err <- hGetContents errh
- _ <- deepseq err (return err)
- hClose errh
- writeIORef errRef err
- putMVar lock ()
-
- -- Get exit code & return
- takeMVar lock
- takeMVar lock
- exitCode <- waitForProcess pid
- out <- readIORef outRef
- err <- readIORef errRef
- return (out, err, exitCode)
diff --git a/src/Hakyll/Core/Util/File.hs b/src/Hakyll/Core/Util/File.hs
deleted file mode 100644
index 9db6b11..0000000
--- a/src/Hakyll/Core/Util/File.hs
+++ /dev/null
@@ -1,56 +0,0 @@
---------------------------------------------------------------------------------
--- | A module containing various file utility functions
-module Hakyll.Core.Util.File
- ( makeDirectories
- , getRecursiveContents
- , removeDirectory
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Monad (filterM, forM, when)
-import System.Directory (createDirectoryIfMissing,
- doesDirectoryExist, getDirectoryContents,
- removeDirectoryRecursive)
-import System.FilePath (takeDirectory, (</>))
-
-
---------------------------------------------------------------------------------
--- | Given a path to a file, try to make the path writable by making
--- all directories on the path.
-makeDirectories :: FilePath -> IO ()
-makeDirectories = createDirectoryIfMissing True . takeDirectory
-
-
---------------------------------------------------------------------------------
--- | Get all contents of a directory.
-getRecursiveContents :: (FilePath -> IO Bool) -- ^ Ignore this file/directory
- -> FilePath -- ^ Directory to search
- -> IO [FilePath] -- ^ List of files found
-getRecursiveContents ignore top = go ""
- where
- isProper x
- | x `elem` [".", ".."] = return False
- | otherwise = not <$> ignore x
-
- go dir = do
- dirExists <- doesDirectoryExist (top </> dir)
- if not dirExists
- then return []
- else do
- names <- filterM isProper =<< getDirectoryContents (top </> dir)
- paths <- forM names $ \name -> do
- let rel = dir </> name
- isDirectory <- doesDirectoryExist (top </> rel)
- if isDirectory
- then go rel
- else return [rel]
-
- return $ concat paths
-
-
---------------------------------------------------------------------------------
-removeDirectory :: FilePath -> IO ()
-removeDirectory fp = do
- e <- doesDirectoryExist fp
- when e $ removeDirectoryRecursive fp
diff --git a/src/Hakyll/Core/Util/Parser.hs b/src/Hakyll/Core/Util/Parser.hs
deleted file mode 100644
index c4b2f8d..0000000
--- a/src/Hakyll/Core/Util/Parser.hs
+++ /dev/null
@@ -1,32 +0,0 @@
---------------------------------------------------------------------------------
--- | Parser utilities
-module Hakyll.Core.Util.Parser
- ( metadataKey
- , reservedKeys
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Applicative ((<|>))
-import Control.Monad (guard, mzero, void)
-import qualified Text.Parsec as P
-import Text.Parsec.String (Parser)
-
-
---------------------------------------------------------------------------------
-metadataKey :: Parser String
-metadataKey = do
- -- Ensure trailing '-' binds to '$' if present.
- let hyphon = P.try $ do
- void $ P.char '-'
- x <- P.lookAhead P.anyChar
- guard $ x /= '$'
- pure '-'
-
- i <- (:) <$> P.letter <*> P.many (P.alphaNum <|> P.oneOf "_." <|> hyphon)
- if i `elem` reservedKeys then mzero else return i
-
-
---------------------------------------------------------------------------------
-reservedKeys :: [String]
-reservedKeys = ["if", "else", "endif", "for", "sep", "endfor", "partial"]
diff --git a/src/Hakyll/Core/Util/String.hs b/src/Hakyll/Core/Util/String.hs
deleted file mode 100644
index 23bdd39..0000000
--- a/src/Hakyll/Core/Util/String.hs
+++ /dev/null
@@ -1,78 +0,0 @@
-{-# LANGUAGE FlexibleContexts #-}
---------------------------------------------------------------------------------
--- | Miscellaneous string manipulation functions.
-module Hakyll.Core.Util.String
- ( trim
- , replaceAll
- , splitAll
- , needlePrefix
- ) where
-
-
---------------------------------------------------------------------------------
-import Data.Char (isSpace)
-import Data.List (isPrefixOf)
-import Data.Maybe (listToMaybe)
-import Text.Regex.TDFA ((=~~))
-
-
---------------------------------------------------------------------------------
--- | Trim a string (drop spaces, tabs and newlines at both sides).
-trim :: String -> String
-trim = reverse . trim' . reverse . trim'
- where
- trim' = dropWhile isSpace
-
-
---------------------------------------------------------------------------------
--- | A simple (but inefficient) regex replace funcion
-replaceAll :: String -- ^ Pattern
- -> (String -> String) -- ^ Replacement (called on capture)
- -> String -- ^ Source string
- -> String -- ^ Result
-replaceAll pattern f source = replaceAll' source
- where
- replaceAll' src = case listToMaybe (src =~~ pattern) of
- Nothing -> src
- Just (o, l) ->
- let (before, tmp) = splitAt o src
- (capture, after) = splitAt l tmp
- in before ++ f capture ++ replaceAll' after
-
-
---------------------------------------------------------------------------------
--- | A simple regex split function. The resulting list will contain no empty
--- strings.
-splitAll :: String -- ^ Pattern
- -> String -- ^ String to split
- -> [String] -- ^ Result
-splitAll pattern = filter (not . null) . splitAll'
- where
- splitAll' src = case listToMaybe (src =~~ pattern) of
- Nothing -> [src]
- Just (o, l) ->
- let (before, tmp) = splitAt o src
- in before : splitAll' (drop l tmp)
-
-
-
---------------------------------------------------------------------------------
--- | Find the first instance of needle (must be non-empty) in haystack. We
--- return the prefix of haystack before needle is matched.
---
--- Examples:
---
--- > needlePrefix "cd" "abcde" = "ab"
---
--- > needlePrefix "ab" "abc" = ""
---
--- > needlePrefix "ab" "xxab" = "xx"
---
--- > needlePrefix "a" "xx" = "xx"
-needlePrefix :: String -> String -> Maybe String
-needlePrefix needle haystack = go [] haystack
- where
- go _ [] = Nothing
- go acc xss@(x:xs)
- | needle `isPrefixOf` xss = Just $ reverse acc
- | otherwise = go (x : acc) xs
diff --git a/src/Hakyll/Core/Writable.hs b/src/Hakyll/Core/Writable.hs
deleted file mode 100644
index cad6cf1..0000000
--- a/src/Hakyll/Core/Writable.hs
+++ /dev/null
@@ -1,56 +0,0 @@
---------------------------------------------------------------------------------
--- | Describes writable items; items that can be saved to the disk
-{-# LANGUAGE FlexibleInstances #-}
-{-# LANGUAGE TypeSynonymInstances #-}
-module Hakyll.Core.Writable
- ( Writable (..)
- ) where
-
-
---------------------------------------------------------------------------------
-import qualified Data.ByteString as SB
-import qualified Data.ByteString.Lazy as LB
-import Data.Word (Word8)
-import Text.Blaze.Html (Html)
-import Text.Blaze.Html.Renderer.String (renderHtml)
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Item
-
-
---------------------------------------------------------------------------------
--- | Describes an item that can be saved to the disk
-class Writable a where
- -- | Save an item to the given filepath
- write :: FilePath -> Item a -> IO ()
-
-
---------------------------------------------------------------------------------
-instance Writable () where
- write _ _ = return ()
-
-
---------------------------------------------------------------------------------
-instance Writable [Char] where
- write p = writeFile p . itemBody
-
-
---------------------------------------------------------------------------------
-instance Writable SB.ByteString where
- write p = SB.writeFile p . itemBody
-
-
---------------------------------------------------------------------------------
-instance Writable LB.ByteString where
- write p = LB.writeFile p . itemBody
-
-
---------------------------------------------------------------------------------
-instance Writable [Word8] where
- write p = write p . fmap SB.pack
-
-
---------------------------------------------------------------------------------
-instance Writable Html where
- write p = write p . fmap renderHtml
diff --git a/src/Hakyll/Main.hs b/src/Hakyll/Main.hs
deleted file mode 100644
index b5c645f..0000000
--- a/src/Hakyll/Main.hs
+++ /dev/null
@@ -1,165 +0,0 @@
---------------------------------------------------------------------------------
--- | Module providing the main hakyll function and command-line argument parsing
-{-# LANGUAGE CPP #-}
-
-module Hakyll.Main
- ( hakyll
- , hakyllWith
- , hakyllWithArgs
- , hakyllWithExitCode
- ) where
-
-
---------------------------------------------------------------------------------
-import System.Environment (getProgName)
-import System.Exit (ExitCode (ExitSuccess), exitWith)
-import System.IO.Unsafe (unsafePerformIO)
-
-
---------------------------------------------------------------------------------
-import Data.Monoid ((<>))
-import qualified Options.Applicative as OA
-
-
---------------------------------------------------------------------------------
-import qualified Hakyll.Check as Check
-import qualified Hakyll.Commands as Commands
-import qualified Hakyll.Core.Configuration as Config
-import qualified Hakyll.Core.Logger as Logger
-import Hakyll.Core.Rules
-
-
---------------------------------------------------------------------------------
--- | This usually is the function with which the user runs the hakyll compiler
-hakyll :: Rules a -> IO ()
-hakyll = hakyllWith Config.defaultConfiguration
-
---------------------------------------------------------------------------------
--- | A variant of 'hakyll' which allows the user to specify a custom
--- configuration
-hakyllWith :: Config.Configuration -> Rules a -> IO ()
-hakyllWith conf rules = hakyllWithExitCode conf rules >>= exitWith
-
---------------------------------------------------------------------------------
--- | A variant of 'hakyll' which returns an 'ExitCode'
-hakyllWithExitCode :: Config.Configuration -> Rules a -> IO ExitCode
-hakyllWithExitCode conf rules = do
- args <- defaultParser conf
- hakyllWithExitCodeAndArgs conf args rules
-
---------------------------------------------------------------------------------
--- | A variant of 'hakyll' which expects a 'Configuration' and command-line
--- 'Options'. This gives freedom to implement your own parsing.
-hakyllWithArgs :: Config.Configuration -> Options -> Rules a -> IO ()
-hakyllWithArgs conf args rules =
- hakyllWithExitCodeAndArgs conf args rules >>= exitWith
-
---------------------------------------------------------------------------------
-hakyllWithExitCodeAndArgs :: Config.Configuration ->
- Options -> Rules a -> IO ExitCode
-hakyllWithExitCodeAndArgs conf args rules = do
- let args' = optCommand args
- verbosity' = if verbosity args then Logger.Debug else Logger.Message
- check =
- if internal_links args' then Check.InternalLinks else Check.All
-
- logger <- Logger.new verbosity'
- invokeCommands args' conf check logger rules
-
---------------------------------------------------------------------------------
-defaultParser :: Config.Configuration -> IO Options
-defaultParser conf =
- OA.customExecParser (OA.prefs OA.showHelpOnError)
- (OA.info (OA.helper <*> optionParser conf)
- (OA.fullDesc <> OA.progDesc
- (progName ++ " - Static site compiler created with Hakyll")))
-
-
---------------------------------------------------------------------------------
-invokeCommands :: Command -> Config.Configuration ->
- Check.Check -> Logger.Logger -> Rules a -> IO ExitCode
-invokeCommands args conf check logger rules =
- case args of
- Build -> Commands.build conf logger rules
- Check _ -> Commands.check conf logger check >> ok
- Clean -> Commands.clean conf logger >> ok
- Deploy -> Commands.deploy conf
- Preview p -> Commands.preview conf logger rules p >> ok
- Rebuild -> Commands.rebuild conf logger rules
- Server _ _ -> Commands.server conf logger (host args) (port args) >> ok
- Watch _ p s -> Commands.watch conf logger (host args) p (not s) rules >> ok
- where
- ok = return ExitSuccess
-
-
---------------------------------------------------------------------------------
-
-data Options = Options {verbosity :: Bool, optCommand :: Command}
- deriving (Show)
-
-data Command
- = Build
- | Check {internal_links :: Bool}
- | Clean
- | Deploy
- | Preview {port :: Int}
- | Rebuild
- | Server {host :: String, port :: Int}
- | Watch {host :: String, port :: Int, no_server :: Bool }
- deriving (Show)
-
-optionParser :: Config.Configuration -> OA.Parser Options
-optionParser conf = Options <$> verboseParser <*> commandParser conf
- where
- verboseParser = OA.switch (OA.long "verbose" <> OA.short 'v' <> OA.help "Run in verbose mode")
-
-
-commandParser :: Config.Configuration -> OA.Parser Command
-commandParser conf = OA.subparser $ foldr ((<>) . produceCommand) mempty commands
- where
- portParser = OA.option OA.auto (OA.long "port" <> OA.help "Port to listen on" <> OA.value (Config.previewPort conf))
- hostParser = OA.strOption (OA.long "host" <> OA.help "Host to bind on" <> OA.value (Config.previewHost conf))
-
- produceCommand (c,a,b) = OA.command c (OA.info (OA.helper <*> a) (b))
-
- commands =
- [ ( "build"
- , pure Build
- , OA.fullDesc <> OA.progDesc "Generate the site"
- )
- , ( "check"
- , pure Check <*> OA.switch (OA.long "internal-links" <> OA.help "Check internal links only")
- , OA.fullDesc <> OA.progDesc "Validate the site output"
- )
- , ( "clean"
- , pure Clean
- , OA.fullDesc <> OA.progDesc "Clean up and remove cache"
- )
- , ( "deploy"
- , pure Deploy
- , OA.fullDesc <> OA.progDesc "Upload/deploy your site"
- )
- , ( "preview"
- , pure Preview <*> portParser
- , OA.fullDesc <> OA.progDesc "[DEPRECATED] Please use the watch command"
- )
- , ( "rebuild"
- , pure Rebuild
- , OA.fullDesc <> OA.progDesc "Clean and build again"
- )
- , ( "server"
- , pure Server <*> hostParser <*> portParser
- , OA.fullDesc <> OA.progDesc "Start a preview server"
- )
- , ( "watch"
- , pure Watch <*> hostParser <*> portParser <*> OA.switch (OA.long "no-server" <> OA.help "Disable the built-in web server")
- , OA.fullDesc <> OA.progDesc "Autocompile on changes and start a preview server. You can watch and recompile without running a server with --no-server."
- )
- ]
-
-
---------------------------------------------------------------------------------
--- | This is necessary because not everyone calls their program the same...
-progName :: String
-progName = unsafePerformIO getProgName
-{-# NOINLINE progName #-}
diff --git a/src/Hakyll/Preview/Poll.hs b/src/Hakyll/Preview/Poll.hs
deleted file mode 100644
index e197d3f..0000000
--- a/src/Hakyll/Preview/Poll.hs
+++ /dev/null
@@ -1,119 +0,0 @@
---------------------------------------------------------------------------------
-{-# LANGUAGE CPP #-}
-module Hakyll.Preview.Poll
- ( watchUpdates
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Concurrent (forkIO)
-import Control.Concurrent.MVar (newEmptyMVar, takeMVar,
- tryPutMVar)
-import Control.Exception (AsyncException, fromException,
- handle, throw)
-import Control.Monad (forever, void, when)
-import System.Directory (canonicalizePath)
-import System.FilePath (pathSeparators)
-import System.FSNotify (Event (..), startManager,
- watchTree)
-
-#ifdef mingw32_HOST_OS
-import Control.Concurrent (threadDelay)
-import Control.Exception (IOException, throw, try)
-import System.Directory (doesFileExist)
-import System.Exit (exitFailure)
-import System.FilePath ((</>))
-import System.IO (Handle, IOMode (ReadMode),
- hClose, openFile)
-import System.IO.Error (isPermissionError)
-#endif
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Configuration
-import Hakyll.Core.Identifier
-import Hakyll.Core.Identifier.Pattern
-
-
---------------------------------------------------------------------------------
--- | A thread that watches for updates in a 'providerDirectory' and recompiles
--- a site as soon as any changes occur
-watchUpdates :: Configuration -> IO Pattern -> IO ()
-watchUpdates conf update = do
- let providerDir = providerDirectory conf
- shouldBuild <- newEmptyMVar
- pattern <- update
- fullProviderDir <- canonicalizePath $ providerDirectory conf
- manager <- startManager
-
- let allowed event = do
- -- Absolute path of the changed file. This must be inside provider
- -- dir, since that's the only dir we're watching.
- let path = eventPath event
- relative = dropWhile (`elem` pathSeparators) $
- drop (length fullProviderDir) path
- identifier = fromFilePath relative
-
- shouldIgnore <- shouldIgnoreFile conf path
- return $ not shouldIgnore && matches pattern identifier
-
- -- This thread continually watches the `shouldBuild` MVar and builds
- -- whenever a value is present.
- _ <- forkIO $ forever $ do
- event <- takeMVar shouldBuild
- handle
- (\e -> case fromException e of
- Nothing -> putStrLn (show e)
- Just async -> throw (async :: AsyncException))
- (update' event providerDir)
-
- -- Send an event whenever something occurs so that the thread described
- -- above will do a build.
- void $ watchTree manager providerDir (not . isRemove) $ \event -> do
- allowed' <- allowed event
- when allowed' $ void $ tryPutMVar shouldBuild event
- where
-#ifndef mingw32_HOST_OS
- update' _ _ = void update
-#else
- update' event provider = do
- let path = provider </> eventPath event
- -- on windows, a 'Modified' event is also sent on file deletion
- fileExists <- doesFileExist path
-
- when fileExists . void $ waitOpen path ReadMode (\_ -> update) 10
-
- -- continuously attempts to open the file in between sleep intervals
- -- handler is run only once it is able to open the file
- waitOpen :: FilePath -> IOMode -> (Handle -> IO r) -> Integer -> IO r
- waitOpen _ _ _ 0 = do
- putStrLn "[ERROR] Failed to retrieve modified file for regeneration"
- exitFailure
- waitOpen path mode handler retries = do
- res <- try $ openFile path mode :: IO (Either IOException Handle)
- case res of
- Left ex -> if isPermissionError ex
- then do
- threadDelay 100000
- waitOpen path mode handler (retries - 1)
- else throw ex
- Right h -> do
- handled <- handler h
- hClose h
- return handled
-#endif
-
-
---------------------------------------------------------------------------------
-eventPath :: Event -> FilePath
-eventPath evt = evtPath evt
- where
- evtPath (Added p _) = p
- evtPath (Modified p _) = p
- evtPath (Removed p _) = p
-
-
---------------------------------------------------------------------------------
-isRemove :: Event -> Bool
-isRemove (Removed _ _) = True
-isRemove _ = False
diff --git a/src/Hakyll/Preview/Server.hs b/src/Hakyll/Preview/Server.hs
deleted file mode 100644
index a84016a..0000000
--- a/src/Hakyll/Preview/Server.hs
+++ /dev/null
@@ -1,35 +0,0 @@
---------------------------------------------------------------------------------
--- | Implements a basic static file server for previewing options
-{-# LANGUAGE OverloadedStrings #-}
-module Hakyll.Preview.Server
- ( staticServer
- ) where
-
-
---------------------------------------------------------------------------------
-import Data.String
-import qualified Network.Wai.Handler.Warp as Warp
-import qualified Network.Wai.Application.Static as Static
-import qualified Network.Wai as Wai
-import Network.HTTP.Types.Status (Status)
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Logger (Logger)
-import qualified Hakyll.Core.Logger as Logger
-
-staticServer :: Logger -- ^ Logger
- -> FilePath -- ^ Directory to serve
- -> String -- ^ Host to bind on
- -> Int -- ^ Port to listen on
- -> IO () -- ^ Blocks forever
-staticServer logger directory host port = do
- Logger.header logger $ "Listening on http://" ++ host ++ ":" ++ show port
- Warp.runSettings warpSettings $
- Static.staticApp (Static.defaultFileServerSettings directory)
- where
- warpSettings = Warp.setLogger noLog
- $ Warp.setHost (fromString host)
- $ Warp.setPort port Warp.defaultSettings
-
-noLog :: Wai.Request -> Status -> Maybe Integer -> IO ()
-noLog _ _ _ = return ()
diff --git a/src/Hakyll/Web/CompressCss.hs b/src/Hakyll/Web/CompressCss.hs
deleted file mode 100644
index 9f61534..0000000
--- a/src/Hakyll/Web/CompressCss.hs
+++ /dev/null
@@ -1,86 +0,0 @@
---------------------------------------------------------------------------------
--- | Module used for CSS compression. The compression is currently in a simple
--- state, but would typically reduce the number of bytes by about 25%.
-module Hakyll.Web.CompressCss
- ( compressCssCompiler
- , compressCss
- ) where
-
-
---------------------------------------------------------------------------------
-import Data.List (isPrefixOf)
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler
-import Hakyll.Core.Item
-
-
---------------------------------------------------------------------------------
--- | Compiler form of 'compressCss'
-compressCssCompiler :: Compiler (Item String)
-compressCssCompiler = fmap compressCss <$> getResourceString
-
-
---------------------------------------------------------------------------------
--- | Compress CSS to speed up your site.
-compressCss :: String -> String
-compressCss = compressSeparators . stripComments . compressWhitespace
-
-
---------------------------------------------------------------------------------
--- | Compresses certain forms of separators.
-compressSeparators :: String -> String
-compressSeparators [] = []
-compressSeparators str
- | isConstant = head str : retainConstants compressSeparators (head str) (drop 1 str)
- | stripFirst = compressSeparators (drop 1 str)
- | stripSecond = compressSeparators (head str : (drop 2 str))
- | otherwise = head str : compressSeparators (drop 1 str)
- where
- isConstant = or $ map (isOfPrefix str) ["\"", "'"]
- stripFirst = or $ map (isOfPrefix str) $ [";;", ";}"] ++ (map (\c -> " " ++ c) separators)
- stripSecond = or $ map (isOfPrefix str) $ map (\c -> c ++ " ") separators
- separators = [" ", "{", "}", ":", ";", ",", ">", "+", "!"]
-
---------------------------------------------------------------------------------
--- | Compresses all whitespace.
-compressWhitespace :: String -> String
-compressWhitespace [] = []
-compressWhitespace str
- | isConstant = head str : retainConstants compressWhitespace (head str) (drop 1 str)
- | replaceOne = compressWhitespace (' ' : (drop 1 str))
- | replaceTwo = compressWhitespace (' ' : (drop 2 str))
- | otherwise = head str : compressWhitespace (drop 1 str)
- where
- isConstant = or $ map (isOfPrefix str) ["\"", "'"]
- replaceOne = or $ map (isOfPrefix str) ["\t", "\n", "\r"]
- replaceTwo = or $ map (isOfPrefix str) [" \t", " \n", " \r", " "]
-
---------------------------------------------------------------------------------
--- | Function that strips CSS comments away.
-stripComments :: String -> String
-stripComments [] = []
-stripComments str
- | isConstant = head str : retainConstants stripComments (head str) (drop 1 str)
- | isPrefixOf "/*" str = stripComments $ eatComments $ drop 2 str
- | otherwise = head str : stripComments (drop 1 str)
- where
- isConstant = or $ map (isOfPrefix str) ["\"", "'"]
- eatComments str'
- | null str' = []
- | isPrefixOf "*/" str' = drop 2 str'
- | otherwise = eatComments $ drop 1 str'
-
---------------------------------------------------------------------------------
--- | Helper function to handle string constants correctly.
-retainConstants :: (String -> String) -> Char -> String -> String
-retainConstants f delim str
- | null str = []
- | isPrefixOf [delim] str = head str : f (drop 1 str)
- | otherwise = head str : retainConstants f delim (drop 1 str)
-
---------------------------------------------------------------------------------
--- | Helper function to determine whether a string is a substring.
-isOfPrefix :: String -> String -> Bool
-isOfPrefix = flip isPrefixOf
diff --git a/src/Hakyll/Web/Feed.hs b/src/Hakyll/Web/Feed.hs
deleted file mode 100644
index 6c6fa76..0000000
--- a/src/Hakyll/Web/Feed.hs
+++ /dev/null
@@ -1,135 +0,0 @@
---------------------------------------------------------------------------------
--- | A Module that allows easy rendering of RSS feeds.
---
--- The main rendering functions (@renderRss@, @renderAtom@) all assume that
--- you pass the list of items so that the most recent entry in the feed is the
--- first item in the list.
---
--- Also note that the context should have (at least) the following fields to
--- produce a correct feed:
---
--- - @$title$@: Title of the item
---
--- - @$description$@: Description to appear in the feed
---
--- - @$url$@: URL to the item - this is usually set automatically.
---
--- In addition, the posts should be named according to the rules for
--- 'Hakyll.Web.Template.Context.dateField'
-module Hakyll.Web.Feed
- ( FeedConfiguration (..)
- , renderRss
- , renderAtom
- ) where
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler
-import Hakyll.Core.Compiler.Internal
-import Hakyll.Core.Item
-import Hakyll.Core.Util.String (replaceAll)
-import Hakyll.Web.Template
-import Hakyll.Web.Template.Context
-import Hakyll.Web.Template.List
-
-
---------------------------------------------------------------------------------
-import Paths_hakyll
-
-
---------------------------------------------------------------------------------
--- | This is a data structure to keep the configuration of a feed.
-data FeedConfiguration = FeedConfiguration
- { -- | Title of the feed.
- feedTitle :: String
- , -- | Description of the feed.
- feedDescription :: String
- , -- | Name of the feed author.
- feedAuthorName :: String
- , -- | Email of the feed author.
- feedAuthorEmail :: String
- , -- | Absolute root URL of the feed site (e.g. @http://jaspervdj.be@)
- feedRoot :: String
- } deriving (Show, Eq)
-
-
---------------------------------------------------------------------------------
--- | Abstract function to render any feed.
-renderFeed :: FilePath -- ^ Feed template
- -> FilePath -- ^ Item template
- -> FeedConfiguration -- ^ Feed configuration
- -> Context String -- ^ Context for the items
- -> [Item String] -- ^ Input items
- -> Compiler (Item String) -- ^ Resulting item
-renderFeed feedPath itemPath config itemContext items = do
- feedTpl <- loadTemplate feedPath
- itemTpl <- loadTemplate itemPath
-
- protectedItems <- mapM (applyFilter protectCDATA) items
- body <- makeItem =<< applyTemplateList itemTpl itemContext' protectedItems
- applyTemplate feedTpl feedContext body
- where
- applyFilter :: (Monad m,Functor f) => (String -> String) -> f String -> m (f String)
- applyFilter tr str = return $ fmap tr str
- protectCDATA :: String -> String
- protectCDATA = replaceAll "]]>" (const "]]&gt;")
- -- Auxiliary: load a template from a datafile
- loadTemplate path = do
- file <- compilerUnsafeIO $ getDataFileName path
- unsafeReadTemplateFile file
-
- itemContext' = mconcat
- [ itemContext
- , constField "root" (feedRoot config)
- , constField "authorName" (feedAuthorName config)
- , constField "authorEmail" (feedAuthorEmail config)
- ]
-
- feedContext = mconcat
- [ bodyField "body"
- , constField "title" (feedTitle config)
- , constField "description" (feedDescription config)
- , constField "authorName" (feedAuthorName config)
- , constField "authorEmail" (feedAuthorEmail config)
- , constField "root" (feedRoot config)
- , urlField "url"
- , updatedField
- , missingField
- ]
-
- -- Take the first "updated" field from all items -- this should be the most
- -- recent.
- updatedField = field "updated" $ \_ -> case items of
- [] -> return "Unknown"
- (x : _) -> unContext itemContext' "updated" [] x >>= \cf -> case cf of
- ListField _ _ -> fail "Hakyll.Web.Feed.renderFeed: Internal error"
- StringField s -> return s
-
-
---------------------------------------------------------------------------------
--- | Render an RSS feed with a number of items.
-renderRss :: FeedConfiguration -- ^ Feed configuration
- -> Context String -- ^ Item context
- -> [Item String] -- ^ Feed items
- -> Compiler (Item String) -- ^ Resulting feed
-renderRss config context = renderFeed
- "templates/rss.xml" "templates/rss-item.xml" config
- (makeItemContext "%a, %d %b %Y %H:%M:%S UT" context)
-
-
---------------------------------------------------------------------------------
--- | Render an Atom feed with a number of items.
-renderAtom :: FeedConfiguration -- ^ Feed configuration
- -> Context String -- ^ Item context
- -> [Item String] -- ^ Feed items
- -> Compiler (Item String) -- ^ Resulting feed
-renderAtom config context = renderFeed
- "templates/atom.xml" "templates/atom-item.xml" config
- (makeItemContext "%Y-%m-%dT%H:%M:%SZ" context)
-
-
---------------------------------------------------------------------------------
--- | Copies @$updated$@ from @$published$@ if it is not already set.
-makeItemContext :: String -> Context a -> Context a
-makeItemContext fmt context = mconcat
- [dateField "published" fmt, context, dateField "updated" fmt]
diff --git a/src/Hakyll/Web/Html.hs b/src/Hakyll/Web/Html.hs
deleted file mode 100644
index 6b7ec88..0000000
--- a/src/Hakyll/Web/Html.hs
+++ /dev/null
@@ -1,184 +0,0 @@
---------------------------------------------------------------------------------
--- | Provides utilities to manipulate HTML pages
-module Hakyll.Web.Html
- ( -- * Generic
- withTags
-
- -- * Headers
- , demoteHeaders
-
- -- * Url manipulation
- , getUrls
- , withUrls
- , toUrl
- , toSiteRoot
- , isExternal
-
- -- * Stripping/escaping
- , stripTags
- , escapeHtml
- ) where
-
-
---------------------------------------------------------------------------------
-import Data.Char (digitToInt, intToDigit,
- isDigit, toLower)
-import Data.List (isPrefixOf)
-import qualified Data.Set as S
-import System.FilePath.Posix (joinPath, splitPath,
- takeDirectory)
-import Text.Blaze.Html (toHtml)
-import Text.Blaze.Html.Renderer.String (renderHtml)
-import qualified Text.HTML.TagSoup as TS
-import Network.URI (isUnreserved, escapeURIString)
-
-
---------------------------------------------------------------------------------
--- | Map over all tags in the document
-withTags :: (TS.Tag String -> TS.Tag String) -> String -> String
-withTags f = renderTags' . map f . parseTags'
-
-
---------------------------------------------------------------------------------
--- | Map every @h1@ to an @h2@, @h2@ to @h3@, etc.
-demoteHeaders :: String -> String
-demoteHeaders = withTags $ \tag -> case tag of
- TS.TagOpen t a -> TS.TagOpen (demote t) a
- TS.TagClose t -> TS.TagClose (demote t)
- t -> t
- where
- demote t@['h', n]
- | isDigit n = ['h', intToDigit (min 6 $ digitToInt n + 1)]
- | otherwise = t
- demote t = t
-
-
---------------------------------------------------------------------------------
-isUrlAttribute :: String -> Bool
-isUrlAttribute = (`elem` ["src", "href", "data", "poster"])
-
-
---------------------------------------------------------------------------------
-getUrls :: [TS.Tag String] -> [String]
-getUrls tags = [v | TS.TagOpen _ as <- tags, (k, v) <- as, isUrlAttribute k]
-
-
---------------------------------------------------------------------------------
--- | Apply a function to each URL on a webpage
-withUrls :: (String -> String) -> String -> String
-withUrls f = withTags tag
- where
- tag (TS.TagOpen s a) = TS.TagOpen s $ map attr a
- tag x = x
- attr (k, v) = (k, if isUrlAttribute k then f v else v)
-
-
---------------------------------------------------------------------------------
--- | Customized TagSoup renderer. The default TagSoup renderer escape CSS
--- within style tags, and doesn't properly minimize.
-renderTags' :: [TS.Tag String] -> String
-renderTags' = TS.renderTagsOptions TS.RenderOptions
- { TS.optRawTag = (`elem` ["script", "style"]) . map toLower
- , TS.optMinimize = (`S.member` minimize) . map toLower
- , TS.optEscape = id
- }
- where
- -- A list of elements which must be minimized
- minimize = S.fromList
- [ "area", "br", "col", "embed", "hr", "img", "input", "meta", "link"
- , "param"
- ]
-
-
---------------------------------------------------------------------------------
--- | Customized TagSoup parser: do not decode any entities.
-parseTags' :: String -> [TS.Tag String]
-parseTags' = TS.parseTagsOptions (TS.parseOptions :: TS.ParseOptions String)
- { TS.optEntityData = \(str, b) -> [TS.TagText $ "&" ++ str ++ [';' | b]]
- , TS.optEntityAttrib = \(str, b) -> ("&" ++ str ++ [';' | b], [])
- }
-
-
---------------------------------------------------------------------------------
--- | Convert a filepath to an URL starting from the site root
---
--- Example:
---
--- > toUrl "foo/bar.html"
---
--- Result:
---
--- > "/foo/bar.html"
---
--- This also sanitizes the URL, e.g. converting spaces into '%20'
-toUrl :: FilePath -> String
-toUrl url = case url of
- ('/' : xs) -> '/' : sanitize xs
- xs -> '/' : sanitize xs
- where
- -- Everything but unreserved characters should be escaped as we are
- -- sanitising the path therefore reserved characters which have a
- -- meaning in URI does not appear. Special casing for `/`, because it has
- -- a special meaning in FilePath as well as in URI.
- sanitize = escapeURIString (\c -> c == '/' || isUnreserved c)
-
-
---------------------------------------------------------------------------------
--- | Get the relative url to the site root, for a given (absolute) url
-toSiteRoot :: String -> String
-toSiteRoot = emptyException . joinPath . map parent
- . filter relevant . splitPath . takeDirectory
- where
- parent = const ".."
- emptyException [] = "."
- emptyException x = x
- relevant "." = False
- relevant "/" = False
- relevant "./" = False
- relevant _ = True
-
-
---------------------------------------------------------------------------------
--- | Check if an URL links to an external HTTP(S) source
-isExternal :: String -> Bool
-isExternal url = any (flip isPrefixOf url) ["http://", "https://", "//"]
-
-
---------------------------------------------------------------------------------
--- | Strip all HTML tags from a string
---
--- Example:
---
--- > stripTags "<p>foo</p>"
---
--- Result:
---
--- > "foo"
---
--- This also works for incomplete tags
---
--- Example:
---
--- > stripTags "<p>foo</p"
---
--- Result:
---
--- > "foo"
-stripTags :: String -> String
-stripTags [] = []
-stripTags ('<' : xs) = stripTags $ drop 1 $ dropWhile (/= '>') xs
-stripTags (x : xs) = x : stripTags xs
-
-
---------------------------------------------------------------------------------
--- | HTML-escape a string
---
--- Example:
---
--- > escapeHtml "Me & Dean"
---
--- Result:
---
--- > "Me &amp; Dean"
-escapeHtml :: String -> String
-escapeHtml = renderHtml . toHtml
diff --git a/src/Hakyll/Web/Html/RelativizeUrls.hs b/src/Hakyll/Web/Html/RelativizeUrls.hs
deleted file mode 100644
index 33b0c2c..0000000
--- a/src/Hakyll/Web/Html/RelativizeUrls.hs
+++ /dev/null
@@ -1,52 +0,0 @@
---------------------------------------------------------------------------------
--- | This module exposes a function which can relativize URL's on a webpage.
---
--- This means that one can deploy the resulting site on
--- @http:\/\/example.com\/@, but also on @http:\/\/example.com\/some-folder\/@
--- without having to change anything (simply copy over the files).
---
--- To use it, you should use absolute URL's from the site root everywhere. For
--- example, use
---
--- > <img src="/images/lolcat.png" alt="Funny zomgroflcopter" />
---
--- in a blogpost. When running this through the relativize URL's module, this
--- will result in (suppose your blogpost is located at @\/posts\/foo.html@:
---
--- > <img src="../images/lolcat.png" alt="Funny zomgroflcopter" />
-module Hakyll.Web.Html.RelativizeUrls
- ( relativizeUrls
- , relativizeUrlsWith
- ) where
-
-
---------------------------------------------------------------------------------
-import Data.List (isPrefixOf)
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler
-import Hakyll.Core.Item
-import Hakyll.Web.Html
-
-
---------------------------------------------------------------------------------
--- | Compiler form of 'relativizeUrls' which automatically picks the right root
--- path
-relativizeUrls :: Item String -> Compiler (Item String)
-relativizeUrls item = do
- route <- getRoute $ itemIdentifier item
- return $ case route of
- Nothing -> item
- Just r -> fmap (relativizeUrlsWith $ toSiteRoot r) item
-
-
---------------------------------------------------------------------------------
--- | Relativize URL's in HTML
-relativizeUrlsWith :: String -- ^ Path to the site root
- -> String -- ^ HTML to relativize
- -> String -- ^ Resulting HTML
-relativizeUrlsWith root = withUrls rel
- where
- isRel x = "/" `isPrefixOf` x && not ("//" `isPrefixOf` x)
- rel x = if isRel x then root ++ x else x
diff --git a/src/Hakyll/Web/Paginate.hs b/src/Hakyll/Web/Paginate.hs
deleted file mode 100644
index dd058f6..0000000
--- a/src/Hakyll/Web/Paginate.hs
+++ /dev/null
@@ -1,153 +0,0 @@
---------------------------------------------------------------------------------
-{-# LANGUAGE OverloadedStrings #-}
-module Hakyll.Web.Paginate
- ( PageNumber
- , Paginate (..)
- , buildPaginateWith
- , paginateEvery
- , paginateRules
- , paginateContext
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Applicative (empty)
-import Control.Monad (forM_, forM)
-import qualified Data.Map as M
-import qualified Data.Set as S
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler
-import Hakyll.Core.Identifier
-import Hakyll.Core.Identifier.Pattern
-import Hakyll.Core.Item
-import Hakyll.Core.Metadata
-import Hakyll.Core.Rules
-import Hakyll.Web.Html
-import Hakyll.Web.Template.Context
-
-
---------------------------------------------------------------------------------
-type PageNumber = Int
-
-
---------------------------------------------------------------------------------
--- | Data about paginators
-data Paginate = Paginate
- { paginateMap :: M.Map PageNumber [Identifier]
- , paginateMakeId :: PageNumber -> Identifier
- , paginateDependency :: Dependency
- }
-
-
---------------------------------------------------------------------------------
-paginateNumPages :: Paginate -> Int
-paginateNumPages = M.size . paginateMap
-
-
---------------------------------------------------------------------------------
-paginateEvery :: Int -> [a] -> [[a]]
-paginateEvery n = go
- where
- go [] = []
- go xs = let (y, ys) = splitAt n xs in y : go ys
-
-
---------------------------------------------------------------------------------
-buildPaginateWith
- :: MonadMetadata m
- => ([Identifier] -> m [[Identifier]]) -- ^ Group items into pages
- -> Pattern -- ^ Select items to paginate
- -> (PageNumber -> Identifier) -- ^ Identifiers for the pages
- -> m Paginate
-buildPaginateWith grouper pattern makeId = do
- ids <- getMatches pattern
- idGroups <- grouper ids
- let idsSet = S.fromList ids
- return Paginate
- { paginateMap = M.fromList (zip [1 ..] idGroups)
- , paginateMakeId = makeId
- , paginateDependency = PatternDependency pattern idsSet
- }
-
-
---------------------------------------------------------------------------------
-paginateRules :: Paginate -> (PageNumber -> Pattern -> Rules ()) -> Rules ()
-paginateRules paginator rules =
- forM_ (M.toList $ paginateMap paginator) $ \(idx, identifiers) ->
- rulesExtraDependencies [paginateDependency paginator] $
- create [paginateMakeId paginator idx] $
- rules idx $ fromList identifiers
-
-
---------------------------------------------------------------------------------
--- | Get the identifier for a certain page by passing in the page number.
-paginatePage :: Paginate -> PageNumber -> Maybe Identifier
-paginatePage pag pageNumber
- | pageNumber < 1 = Nothing
- | pageNumber > (paginateNumPages pag) = Nothing
- | otherwise = Just $ paginateMakeId pag pageNumber
-
-
---------------------------------------------------------------------------------
--- | A default paginate context which provides the following keys:
---
---
--- * @firstPageNum@
--- * @firstPageUrl@
--- * @previousPageNum@
--- * @previousPageUrl@
--- * @nextPageNum@
--- * @nextPageUrl@
--- * @lastPageNum@
--- * @lastPageUrl@
--- * @currentPageNum@
--- * @currentPageUrl@
--- * @numPages@
--- * @allPages@
-paginateContext :: Paginate -> PageNumber -> Context a
-paginateContext pag currentPage = mconcat
- [ field "firstPageNum" $ \_ -> otherPage 1 >>= num
- , field "firstPageUrl" $ \_ -> otherPage 1 >>= url
- , field "previousPageNum" $ \_ -> otherPage (currentPage - 1) >>= num
- , field "previousPageUrl" $ \_ -> otherPage (currentPage - 1) >>= url
- , field "nextPageNum" $ \_ -> otherPage (currentPage + 1) >>= num
- , field "nextPageUrl" $ \_ -> otherPage (currentPage + 1) >>= url
- , field "lastPageNum" $ \_ -> otherPage lastPage >>= num
- , field "lastPageUrl" $ \_ -> otherPage lastPage >>= url
- , field "currentPageNum" $ \i -> thisPage i >>= num
- , field "currentPageUrl" $ \i -> thisPage i >>= url
- , constField "numPages" $ show $ paginateNumPages pag
- , Context $ \k _ i -> case k of
- "allPages" -> do
- let ctx =
- field "isCurrent" (\n -> if fst (itemBody n) == currentPage then return "true" else empty) `mappend`
- field "num" (num . itemBody) `mappend`
- field "url" (url . itemBody)
-
- list <- forM [1 .. lastPage] $
- \n -> if n == currentPage then thisPage i else otherPage n
- items <- mapM makeItem list
- return $ ListField ctx items
- _ -> do
- empty
-
- ]
- where
- lastPage = paginateNumPages pag
-
- thisPage i = return (currentPage, itemIdentifier i)
- otherPage n
- | n == currentPage = fail $ "This is the current page: " ++ show n
- | otherwise = case paginatePage pag n of
- Nothing -> fail $ "No such page: " ++ show n
- Just i -> return (n, i)
-
- num :: (Int, Identifier) -> Compiler String
- num = return . show . fst
-
- url :: (Int, Identifier) -> Compiler String
- url (n, i) = getRoute i >>= \mbR -> case mbR of
- Just r -> return $ toUrl r
- Nothing -> fail $ "No URL for page: " ++ show n
diff --git a/src/Hakyll/Web/Pandoc.hs b/src/Hakyll/Web/Pandoc.hs
deleted file mode 100644
index eec0a8a..0000000
--- a/src/Hakyll/Web/Pandoc.hs
+++ /dev/null
@@ -1,164 +0,0 @@
---------------------------------------------------------------------------------
--- | Module exporting convenient pandoc bindings
-module Hakyll.Web.Pandoc
- ( -- * The basic building blocks
- readPandoc
- , readPandocWith
- , writePandoc
- , writePandocWith
- , renderPandoc
- , renderPandocWith
-
- -- * Derived compilers
- , pandocCompiler
- , pandocCompilerWith
- , pandocCompilerWithTransform
- , pandocCompilerWithTransformM
-
- -- * Default options
- , defaultHakyllReaderOptions
- , defaultHakyllWriterOptions
- ) where
-
-
---------------------------------------------------------------------------------
-import qualified Data.Set as S
-import Text.Pandoc
-import Text.Pandoc.Error (PandocError (..))
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler
-import Hakyll.Core.Item
-import Hakyll.Web.Pandoc.FileType
-
-
---------------------------------------------------------------------------------
--- | Read a string using pandoc, with the default options
-readPandoc
- :: Item String -- ^ String to read
- -> Compiler (Item Pandoc) -- ^ Resulting document
-readPandoc = readPandocWith defaultHakyllReaderOptions
-
-
---------------------------------------------------------------------------------
--- | Read a string using pandoc, with the supplied options
-readPandocWith
- :: ReaderOptions -- ^ Parser options
- -> Item String -- ^ String to read
- -> Compiler (Item Pandoc) -- ^ Resulting document
-readPandocWith ropt item =
- case traverse (reader ropt (itemFileType item)) item of
- Left (ParseFailure err) -> fail $
- "Hakyll.Web.Pandoc.readPandocWith: parse failed: " ++ err
- Left (ParsecError _ err) -> fail $
- "Hakyll.Web.Pandoc.readPandocWith: parse failed: " ++ show err
- Right item' -> return item'
- where
- reader ro t = case t of
- DocBook -> readDocBook ro
- Html -> readHtml ro
- LaTeX -> readLaTeX ro
- LiterateHaskell t' -> reader (addExt ro Ext_literate_haskell) t'
- Markdown -> readMarkdown ro
- MediaWiki -> readMediaWiki ro
- OrgMode -> readOrg ro
- Rst -> readRST ro
- Textile -> readTextile ro
- _ -> error $
- "Hakyll.Web.readPandocWith: I don't know how to read a file of " ++
- "the type " ++ show t ++ " for: " ++ show (itemIdentifier item)
-
- addExt ro e = ro {readerExtensions = S.insert e $ readerExtensions ro}
-
-
---------------------------------------------------------------------------------
--- | Write a document (as HTML) using pandoc, with the default options
-writePandoc :: Item Pandoc -- ^ Document to write
- -> Item String -- ^ Resulting HTML
-writePandoc = writePandocWith defaultHakyllWriterOptions
-
-
---------------------------------------------------------------------------------
--- | Write a document (as HTML) using pandoc, with the supplied options
-writePandocWith :: WriterOptions -- ^ Writer options for pandoc
- -> Item Pandoc -- ^ Document to write
- -> Item String -- ^ Resulting HTML
-writePandocWith wopt = fmap $ writeHtmlString wopt
-
-
---------------------------------------------------------------------------------
--- | Render the resource using pandoc
-renderPandoc :: Item String -> Compiler (Item String)
-renderPandoc =
- renderPandocWith defaultHakyllReaderOptions defaultHakyllWriterOptions
-
-
---------------------------------------------------------------------------------
--- | Render the resource using pandoc
-renderPandocWith
- :: ReaderOptions -> WriterOptions -> Item String -> Compiler (Item String)
-renderPandocWith ropt wopt item =
- writePandocWith wopt <$> readPandocWith ropt item
-
-
---------------------------------------------------------------------------------
--- | Read a page render using pandoc
-pandocCompiler :: Compiler (Item String)
-pandocCompiler =
- pandocCompilerWith defaultHakyllReaderOptions defaultHakyllWriterOptions
-
-
---------------------------------------------------------------------------------
--- | A version of 'pandocCompiler' which allows you to specify your own pandoc
--- options
-pandocCompilerWith :: ReaderOptions -> WriterOptions -> Compiler (Item String)
-pandocCompilerWith ropt wopt =
- cached "Hakyll.Web.Pandoc.pandocCompilerWith" $
- pandocCompilerWithTransform ropt wopt id
-
-
---------------------------------------------------------------------------------
--- | An extension of 'pandocCompilerWith' which allows you to specify a custom
--- pandoc transformation for the content
-pandocCompilerWithTransform :: ReaderOptions -> WriterOptions
- -> (Pandoc -> Pandoc)
- -> Compiler (Item String)
-pandocCompilerWithTransform ropt wopt f =
- pandocCompilerWithTransformM ropt wopt (return . f)
-
-
---------------------------------------------------------------------------------
--- | Similar to 'pandocCompilerWithTransform', but the transformation
--- function is monadic. This is useful when you want the pandoc
--- transformation to use the 'Compiler' information such as routes,
--- metadata, etc
-pandocCompilerWithTransformM :: ReaderOptions -> WriterOptions
- -> (Pandoc -> Compiler Pandoc)
- -> Compiler (Item String)
-pandocCompilerWithTransformM ropt wopt f =
- writePandocWith wopt <$>
- (traverse f =<< readPandocWith ropt =<< getResourceBody)
-
-
---------------------------------------------------------------------------------
--- | The default reader options for pandoc parsing in hakyll
-defaultHakyllReaderOptions :: ReaderOptions
-defaultHakyllReaderOptions = def
- { -- The following option causes pandoc to read smart typography, a nice
- -- and free bonus.
- readerSmart = True
- }
-
-
---------------------------------------------------------------------------------
--- | The default writer options for pandoc rendering in hakyll
-defaultHakyllWriterOptions :: WriterOptions
-defaultHakyllWriterOptions = def
- { -- This option causes literate haskell to be written using '>' marks in
- -- html, which I think is a good default.
- writerExtensions = S.insert Ext_literate_haskell (writerExtensions def)
- , -- We want to have hightlighting by default, to be compatible with earlier
- -- Hakyll releases
- writerHighlight = True
- }
diff --git a/src/Hakyll/Web/Pandoc/Biblio.hs b/src/Hakyll/Web/Pandoc/Biblio.hs
deleted file mode 100644
index dfe6d93..0000000
--- a/src/Hakyll/Web/Pandoc/Biblio.hs
+++ /dev/null
@@ -1,115 +0,0 @@
---------------------------------------------------------------------------------
--- | Wraps pandocs bibiliography handling
---
--- In order to add a bibliography, you will need a bibliography file (e.g.
--- @.bib@) and a CSL file (@.csl@). Both need to be compiled with their
--- respective compilers ('biblioCompiler' and 'cslCompiler'). Then, you can
--- refer to these files when you use 'readPandocBiblio'. This function also
--- takes the reader options for completeness -- you can use
--- 'defaultHakyllReaderOptions' if you're unsure.
--- 'pandocBiblioCompiler' is a convenience wrapper which works like 'pandocCompiler',
--- but also takes paths to compiled bibliography and csl files.
-{-# LANGUAGE Arrows #-}
-{-# LANGUAGE DeriveDataTypeable #-}
-{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-module Hakyll.Web.Pandoc.Biblio
- ( CSL
- , cslCompiler
- , Biblio (..)
- , biblioCompiler
- , readPandocBiblio
- , pandocBiblioCompiler
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Monad (liftM, replicateM)
-import Data.Binary (Binary (..))
-import Data.Default (def)
-import Data.Typeable (Typeable)
-import Hakyll.Core.Compiler
-import Hakyll.Core.Identifier
-import Hakyll.Core.Item
-import Hakyll.Core.Writable
-import Hakyll.Web.Pandoc
-import Hakyll.Web.Pandoc.Binary ()
-import qualified Text.CSL as CSL
-import Text.CSL.Pandoc (processCites)
-import Text.Pandoc (Pandoc, ReaderOptions (..))
-
-
---------------------------------------------------------------------------------
-data CSL = CSL
- deriving (Show, Typeable)
-
-
---------------------------------------------------------------------------------
-instance Binary CSL where
- put CSL = return ()
- get = return CSL
-
-
---------------------------------------------------------------------------------
-instance Writable CSL where
- -- Shouldn't be written.
- write _ _ = return ()
-
-
---------------------------------------------------------------------------------
-cslCompiler :: Compiler (Item CSL)
-cslCompiler = makeItem CSL
-
-
---------------------------------------------------------------------------------
-newtype Biblio = Biblio [CSL.Reference]
- deriving (Show, Typeable)
-
-
---------------------------------------------------------------------------------
-instance Binary Biblio where
- -- Ugly.
- get = do
- len <- get
- Biblio <$> replicateM len get
- put (Biblio rs) = put (length rs) >> mapM_ put rs
-
-
---------------------------------------------------------------------------------
-instance Writable Biblio where
- -- Shouldn't be written.
- write _ _ = return ()
-
-
---------------------------------------------------------------------------------
-biblioCompiler :: Compiler (Item Biblio)
-biblioCompiler = do
- filePath <- toFilePath <$> getUnderlying
- makeItem =<< unsafeCompiler (Biblio <$> CSL.readBiblioFile filePath)
-
-
---------------------------------------------------------------------------------
-readPandocBiblio :: ReaderOptions
- -> Item CSL
- -> Item Biblio
- -> (Item String)
- -> Compiler (Item Pandoc)
-readPandocBiblio ropt csl biblio item = do
- -- Parse CSL file, if given
- style <- unsafeCompiler $ CSL.readCSLFile Nothing . toFilePath . itemIdentifier $ csl
-
- -- We need to know the citation keys, add then *before* actually parsing the
- -- actual page. If we don't do this, pandoc won't even consider them
- -- citations!
- let Biblio refs = itemBody biblio
- pandoc <- itemBody <$> readPandocWith ropt item
- let pandoc' = processCites style refs pandoc
-
- return $ fmap (const pandoc') item
-
---------------------------------------------------------------------------------
-pandocBiblioCompiler :: String -> String -> Compiler (Item String)
-pandocBiblioCompiler cslFileName bibFileName = do
- csl <- load $ fromFilePath cslFileName
- bib <- load $ fromFilePath bibFileName
- liftM writePandoc
- (getResourceBody >>= readPandocBiblio def csl bib)
diff --git a/src/Hakyll/Web/Pandoc/Binary.hs b/src/Hakyll/Web/Pandoc/Binary.hs
deleted file mode 100644
index 3c5b5a3..0000000
--- a/src/Hakyll/Web/Pandoc/Binary.hs
+++ /dev/null
@@ -1,32 +0,0 @@
-{-# OPTIONS_GHC -fno-warn-orphans #-}
-{-# LANGUAGE DeriveGeneric #-}
-module Hakyll.Web.Pandoc.Binary where
-
-import Data.Binary (Binary (..))
-
-import qualified Text.CSL as CSL
-import qualified Text.CSL.Reference as REF
-import qualified Text.CSL.Style as STY
-import Text.Pandoc
-
---------------------------------------------------------------------------------
--- orphans
-
-instance Binary Alignment
-instance Binary Block
-instance Binary CSL.Reference
-instance Binary Citation
-instance Binary CitationMode
-instance Binary Format
-instance Binary Inline
-instance Binary ListNumberDelim
-instance Binary ListNumberStyle
-instance Binary MathType
-instance Binary QuoteType
-instance Binary REF.CLabel
-instance Binary REF.CNum
-instance Binary REF.Literal
-instance Binary REF.RefDate
-instance Binary REF.RefType
-instance Binary STY.Agent
-instance Binary STY.Formatted
diff --git a/src/Hakyll/Web/Pandoc/FileType.hs b/src/Hakyll/Web/Pandoc/FileType.hs
deleted file mode 100644
index 3636e41..0000000
--- a/src/Hakyll/Web/Pandoc/FileType.hs
+++ /dev/null
@@ -1,74 +0,0 @@
---------------------------------------------------------------------------------
--- | A module dealing with pandoc file extensions and associated file types
-module Hakyll.Web.Pandoc.FileType
- ( FileType (..)
- , fileType
- , itemFileType
- ) where
-
-
---------------------------------------------------------------------------------
-import System.FilePath (splitExtension)
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Identifier
-import Hakyll.Core.Item
-
-
---------------------------------------------------------------------------------
--- | Datatype to represent the different file types Hakyll can deal with by
--- default
-data FileType
- = Binary
- | Css
- | DocBook
- | Html
- | LaTeX
- | LiterateHaskell FileType
- | Markdown
- | MediaWiki
- | OrgMode
- | PlainText
- | Rst
- | Textile
- deriving (Eq, Ord, Show, Read)
-
-
---------------------------------------------------------------------------------
--- | Get the file type for a certain file. The type is determined by extension.
-fileType :: FilePath -> FileType
-fileType = uncurry fileType' . splitExtension
- where
- fileType' _ ".css" = Css
- fileType' _ ".dbk" = DocBook
- fileType' _ ".htm" = Html
- fileType' _ ".html" = Html
- fileType' f ".lhs" = LiterateHaskell $ case fileType f of
- -- If no extension is given, default to Markdown + LiterateHaskell
- Binary -> Markdown
- -- Otherwise, LaTeX + LiterateHaskell or whatever the user specified
- x -> x
- fileType' _ ".markdown" = Markdown
- fileType' _ ".mediawiki" = MediaWiki
- fileType' _ ".md" = Markdown
- fileType' _ ".mdn" = Markdown
- fileType' _ ".mdown" = Markdown
- fileType' _ ".mdwn" = Markdown
- fileType' _ ".mkd" = Markdown
- fileType' _ ".mkdwn" = Markdown
- fileType' _ ".org" = OrgMode
- fileType' _ ".page" = Markdown
- fileType' _ ".rst" = Rst
- fileType' _ ".tex" = LaTeX
- fileType' _ ".text" = PlainText
- fileType' _ ".textile" = Textile
- fileType' _ ".txt" = PlainText
- fileType' _ ".wiki" = MediaWiki
- fileType' _ _ = Binary -- Treat unknown files as binary
-
-
---------------------------------------------------------------------------------
--- | Get the file type for the current file
-itemFileType :: Item a -> FileType
-itemFileType = fileType . toFilePath . itemIdentifier
diff --git a/src/Hakyll/Web/Redirect.hs b/src/Hakyll/Web/Redirect.hs
deleted file mode 100644
index 4760cff..0000000
--- a/src/Hakyll/Web/Redirect.hs
+++ /dev/null
@@ -1,87 +0,0 @@
--- | Module used for generating HTML redirect pages. This allows renaming pages
--- to avoid breaking existing links without requiring server-side support for
--- formal 301 Redirect error codes
-module Hakyll.Web.Redirect
- ( Redirect (..)
- , createRedirects
- ) where
-
-import Control.Applicative ((<$>))
-import Control.Monad (forM_)
-import Data.Binary (Binary (..))
-import Hakyll.Core.Compiler
-import Hakyll.Core.Identifier
-import Hakyll.Core.Routes
-import Hakyll.Core.Rules
-import Hakyll.Core.Writable (Writable (..))
-
--- | This function exposes a higher-level interface compared to using the
--- 'Redirect' type manually.
---
--- This creates, using a database mapping broken URLs to working ones, HTML
--- files which will do HTML META tag redirect pages (since, as a static site, we
--- can't use web-server-level 301 redirects, and using JS is gross).
---
--- This is useful for sending people using old URLs to renamed versions, dealing
--- with common typos etc, and will increase site traffic. Such broken URLs can
--- be found by looking at server logs or by using Google Webmaster Tools.
--- Broken URLs must be valid Haskell strings, non-URL-escaped valid POSIX
--- filenames, and relative links, since they will be defined in a @hakyll.hs@
--- and during generation, written to disk with the filename corresponding to the
--- broken URLs. (Target URLs can be absolute or relative, but should be
--- URL-escaped.) So broken incoming links like <http://www.gwern.net/foo/> which
--- should be <http://www.gwern.net/foobar> cannot be fixed (since you cannot
--- create a HTML file named @"foo/"@ on disk, as that would be a directory).
---
--- An example of a valid association list would be:
---
--- > brokenLinks =
--- > [ ("projects.html", "http://github.com/gwern")
--- > , ("/Black-market archive", "Black-market%20archives")
--- > ]
---
--- In which case the functionality can then be used in `main` with a line like:
---
--- > version "redirects" $ createRedirects brokenLinks
---
--- The 'version' is recommended to separate these items from your other pages.
---
--- The on-disk files can then be uploaded with HTML mimetypes
--- (either explicitly by generating and uploading them separately, by
--- auto-detection of the filetype, or an upload tool defaulting to HTML
--- mimetype, such as calling @s3cmd@ with @--default-mime-type=text/html@) and
--- will redirect browsers and search engines going to the old/broken URLs.
---
--- See also <https://groups.google.com/d/msg/hakyll/sWc6zxfh-uM/fUpZPsFNDgAJ>.
-createRedirects :: [(Identifier, String)] -> Rules ()
-createRedirects redirects =
- forM_ redirects $ \(ident, to) ->
- create [ident] $ do
- route idRoute
- compile $ makeItem $! Redirect to
-
--- | This datatype can be used directly if you want a lower-level interface to
--- generate redirects. For example, if you want to redirect @foo.html@ to
--- @bar.jpg@, you can use:
---
--- > create ["foo.html"] $ do
--- > route idRoute
--- > compile $ makeItem $ Redirect "bar.jpg"
-data Redirect = Redirect
- { redirectTo :: String
- } deriving (Eq, Ord, Show)
-
-instance Binary Redirect where
- put (Redirect to) = put to
- get = Redirect <$> get
-
-instance Writable Redirect where
- write path = write path . fmap redirectToHtml
-
-redirectToHtml :: Redirect -> String
-redirectToHtml (Redirect working) =
- "<!DOCTYPE html><html><head><meta charset=\"utf-8\"/><meta name=\"generator\" content=\"hakyll\"/>" ++
- "<meta http-equiv=\"refresh\" content=\"0; url=" ++ working ++
- "\"><link rel=\"canonical\" href=\"" ++ working ++
- "\"><title>Permanent Redirect</title></head><body><p>The page has moved to: <a href=\"" ++ working ++
- "\">this page</a></p></body></html>"
diff --git a/src/Hakyll/Web/Tags.hs b/src/Hakyll/Web/Tags.hs
deleted file mode 100644
index 88119c2..0000000
--- a/src/Hakyll/Web/Tags.hs
+++ /dev/null
@@ -1,344 +0,0 @@
---------------------------------------------------------------------------------
--- | This module containing some specialized functions to deal with tags. It
--- assumes you follow some conventions.
---
--- We support two types of tags: tags and categories.
---
--- To use default tags, use 'buildTags'. Tags are placed in a comma-separated
--- metadata field like this:
---
--- > ---
--- > author: Philip K. Dick
--- > title: Do androids dream of electric sheep?
--- > tags: future, science fiction, humanoid
--- > ---
--- > The novel is set in a post-apocalyptic near future, where the Earth and
--- > its populations have been damaged greatly by Nuclear...
---
--- To use categories, use the 'buildCategories' function. Categories are
--- determined by the directory a page is in, for example, the post
---
--- > posts/coding/2010-01-28-hakyll-categories.markdown
---
--- will receive the @coding@ category.
---
--- Advanced users may implement custom systems using 'buildTagsWith' if desired.
---
--- In the above example, we would want to create a page which lists all pages in
--- the @coding@ category, for example, with the 'Identifier':
---
--- > tags/coding.html
---
--- This is where the first parameter of 'buildTags' and 'buildCategories' comes
--- in. In the above case, we used the function:
---
--- > fromCapture "tags/*.html" :: String -> Identifier
---
--- The 'tagsRules' function lets you generate such a page for each tag in the
--- 'Rules' monad.
-{-# LANGUAGE Arrows #-}
-{-# LANGUAGE DeriveDataTypeable #-}
-{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-{-# LANGUAGE OverloadedStrings #-}
-module Hakyll.Web.Tags
- ( Tags (..)
- , getTags
- , buildTagsWith
- , buildTags
- , buildCategories
- , tagsRules
- , renderTags
- , renderTagCloud
- , renderTagCloudWith
- , tagCloudField
- , tagCloudFieldWith
- , renderTagList
- , tagsField
- , tagsFieldWith
- , categoryField
- , sortTagsBy
- , caseInsensitiveTags
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Arrow ((&&&))
-import Control.Monad (foldM, forM, forM_, mplus)
-import Data.Char (toLower)
-import Data.List (intercalate, intersperse,
- sortBy)
-import qualified Data.Map as M
-import Data.Maybe (catMaybes, fromMaybe)
-import Data.Ord (comparing)
-import qualified Data.Set as S
-import System.FilePath (takeBaseName, takeDirectory)
-import Text.Blaze.Html (toHtml, toValue, (!))
-import Text.Blaze.Html.Renderer.String (renderHtml)
-import qualified Text.Blaze.Html5 as H
-import qualified Text.Blaze.Html5.Attributes as A
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler
-import Hakyll.Core.Dependencies
-import Hakyll.Core.Identifier
-import Hakyll.Core.Identifier.Pattern
-import Hakyll.Core.Item
-import Hakyll.Core.Metadata
-import Hakyll.Core.Rules
-import Hakyll.Core.Util.String
-import Hakyll.Web.Html
-import Hakyll.Web.Template.Context
-
-
---------------------------------------------------------------------------------
--- | Data about tags
-data Tags = Tags
- { tagsMap :: [(String, [Identifier])]
- , tagsMakeId :: String -> Identifier
- , tagsDependency :: Dependency
- }
-
-
---------------------------------------------------------------------------------
--- | Obtain tags from a page in the default way: parse them from the @tags@
--- metadata field. This can either be a list or a comma-separated string.
-getTags :: MonadMetadata m => Identifier -> m [String]
-getTags identifier = do
- metadata <- getMetadata identifier
- return $ fromMaybe [] $
- (lookupStringList "tags" metadata) `mplus`
- (map trim . splitAll "," <$> lookupString "tags" metadata)
-
-
---------------------------------------------------------------------------------
--- | Obtain categories from a page.
-getCategory :: MonadMetadata m => Identifier -> m [String]
-getCategory = return . return . takeBaseName . takeDirectory . toFilePath
-
-
---------------------------------------------------------------------------------
--- | Higher-order function to read tags
-buildTagsWith :: MonadMetadata m
- => (Identifier -> m [String])
- -> Pattern
- -> (String -> Identifier)
- -> m Tags
-buildTagsWith f pattern makeId = do
- ids <- getMatches pattern
- tagMap <- foldM addTags M.empty ids
- let set' = S.fromList ids
- return $ Tags (M.toList tagMap) makeId (PatternDependency pattern set')
- where
- -- Create a tag map for one page
- addTags tagMap id' = do
- tags <- f id'
- let tagMap' = M.fromList $ zip tags $ repeat [id']
- return $ M.unionWith (++) tagMap tagMap'
-
-
---------------------------------------------------------------------------------
-buildTags :: MonadMetadata m => Pattern -> (String -> Identifier) -> m Tags
-buildTags = buildTagsWith getTags
-
-
---------------------------------------------------------------------------------
-buildCategories :: MonadMetadata m => Pattern -> (String -> Identifier)
- -> m Tags
-buildCategories = buildTagsWith getCategory
-
-
---------------------------------------------------------------------------------
-tagsRules :: Tags -> (String -> Pattern -> Rules ()) -> Rules ()
-tagsRules tags rules =
- forM_ (tagsMap tags) $ \(tag, identifiers) ->
- rulesExtraDependencies [tagsDependency tags] $
- create [tagsMakeId tags tag] $
- rules tag $ fromList identifiers
-
-
---------------------------------------------------------------------------------
--- | Render tags in HTML (the flexible higher-order function)
-renderTags :: (String -> String -> Int -> Int -> Int -> String)
- -- ^ Produce a tag item: tag, url, count, min count, max count
- -> ([String] -> String)
- -- ^ Join items
- -> Tags
- -- ^ Tag cloud renderer
- -> Compiler String
-renderTags makeHtml concatHtml tags = do
- -- In tags' we create a list: [((tag, route), count)]
- tags' <- forM (tagsMap tags) $ \(tag, ids) -> do
- route' <- getRoute $ tagsMakeId tags tag
- return ((tag, route'), length ids)
-
- -- TODO: We actually need to tell a dependency here!
-
- let -- Absolute frequencies of the pages
- freqs = map snd tags'
-
- -- The minimum and maximum count found
- (min', max')
- | null freqs = (0, 1)
- | otherwise = (minimum &&& maximum) freqs
-
- -- Create a link for one item
- makeHtml' ((tag, url), count) =
- makeHtml tag (toUrl $ fromMaybe "/" url) count min' max'
-
- -- Render and return the HTML
- return $ concatHtml $ map makeHtml' tags'
-
-
---------------------------------------------------------------------------------
--- | Render a tag cloud in HTML
-renderTagCloud :: Double
- -- ^ Smallest font size, in percent
- -> Double
- -- ^ Biggest font size, in percent
- -> Tags
- -- ^ Input tags
- -> Compiler String
- -- ^ Rendered cloud
-renderTagCloud = renderTagCloudWith makeLink (intercalate " ")
- where
- makeLink minSize maxSize tag url count min' max' =
- -- Show the relative size of one 'count' in percent
- let diff = 1 + fromIntegral max' - fromIntegral min'
- relative = (fromIntegral count - fromIntegral min') / diff
- size = floor $ minSize + relative * (maxSize - minSize) :: Int
- in renderHtml $
- H.a ! A.style (toValue $ "font-size: " ++ show size ++ "%")
- ! A.href (toValue url)
- $ toHtml tag
-
-
---------------------------------------------------------------------------------
--- | Render a tag cloud in HTML
-renderTagCloudWith :: (Double -> Double ->
- String -> String -> Int -> Int -> Int -> String)
- -- ^ Render a single tag link
- -> ([String] -> String)
- -- ^ Concatenate links
- -> Double
- -- ^ Smallest font size, in percent
- -> Double
- -- ^ Biggest font size, in percent
- -> Tags
- -- ^ Input tags
- -> Compiler String
- -- ^ Rendered cloud
-renderTagCloudWith makeLink cat minSize maxSize =
- renderTags (makeLink minSize maxSize) cat
-
-
---------------------------------------------------------------------------------
--- | Render a tag cloud in HTML as a context
-tagCloudField :: String
- -- ^ Destination key
- -> Double
- -- ^ Smallest font size, in percent
- -> Double
- -- ^ Biggest font size, in percent
- -> Tags
- -- ^ Input tags
- -> Context a
- -- ^ Context
-tagCloudField key minSize maxSize tags =
- field key $ \_ -> renderTagCloud minSize maxSize tags
-
-
---------------------------------------------------------------------------------
--- | Render a tag cloud in HTML as a context
-tagCloudFieldWith :: String
- -- ^ Destination key
- -> (Double -> Double ->
- String -> String -> Int -> Int -> Int -> String)
- -- ^ Render a single tag link
- -> ([String] -> String)
- -- ^ Concatenate links
- -> Double
- -- ^ Smallest font size, in percent
- -> Double
- -- ^ Biggest font size, in percent
- -> Tags
- -- ^ Input tags
- -> Context a
- -- ^ Context
-tagCloudFieldWith key makeLink cat minSize maxSize tags =
- field key $ \_ -> renderTagCloudWith makeLink cat minSize maxSize tags
-
-
---------------------------------------------------------------------------------
--- | Render a simple tag list in HTML, with the tag count next to the item
--- TODO: Maybe produce a Context here
-renderTagList :: Tags -> Compiler (String)
-renderTagList = renderTags makeLink (intercalate ", ")
- where
- makeLink tag url count _ _ = renderHtml $
- H.a ! A.href (toValue url) $ toHtml (tag ++ " (" ++ show count ++ ")")
-
-
---------------------------------------------------------------------------------
--- | Render tags with links with custom functions to get tags and to
--- render links
-tagsFieldWith :: (Identifier -> Compiler [String])
- -- ^ Get the tags
- -> (String -> (Maybe FilePath) -> Maybe H.Html)
- -- ^ Render link for one tag
- -> ([H.Html] -> H.Html)
- -- ^ Concatenate tag links
- -> String
- -- ^ Destination field
- -> Tags
- -- ^ Tags structure
- -> Context a
- -- ^ Resulting context
-tagsFieldWith getTags' renderLink cat key tags = field key $ \item -> do
- tags' <- getTags' $ itemIdentifier item
- links <- forM tags' $ \tag -> do
- route' <- getRoute $ tagsMakeId tags tag
- return $ renderLink tag route'
-
- return $ renderHtml $ cat $ catMaybes $ links
-
-
---------------------------------------------------------------------------------
--- | Render tags with links
-tagsField :: String -- ^ Destination key
- -> Tags -- ^ Tags
- -> Context a -- ^ Context
-tagsField =
- tagsFieldWith getTags simpleRenderLink (mconcat . intersperse ", ")
-
-
---------------------------------------------------------------------------------
--- | Render the category in a link
-categoryField :: String -- ^ Destination key
- -> Tags -- ^ Tags
- -> Context a -- ^ Context
-categoryField =
- tagsFieldWith getCategory simpleRenderLink (mconcat . intersperse ", ")
-
-
---------------------------------------------------------------------------------
--- | Render one tag link
-simpleRenderLink :: String -> (Maybe FilePath) -> Maybe H.Html
-simpleRenderLink _ Nothing = Nothing
-simpleRenderLink tag (Just filePath) =
- Just $ H.a ! A.href (toValue $ toUrl filePath) $ toHtml tag
-
-
---------------------------------------------------------------------------------
--- | Sort tags using supplied function. First element of the tuple passed to
--- the comparing function is the actual tag name.
-sortTagsBy :: ((String, [Identifier]) -> (String, [Identifier]) -> Ordering)
- -> Tags -> Tags
-sortTagsBy f t = t {tagsMap = sortBy f (tagsMap t)}
-
-
---------------------------------------------------------------------------------
--- | Sample sorting function that compares tags case insensitively.
-caseInsensitiveTags :: (String, [Identifier]) -> (String, [Identifier])
- -> Ordering
-caseInsensitiveTags = comparing $ map toLower . fst
diff --git a/src/Hakyll/Web/Template.hs b/src/Hakyll/Web/Template.hs
deleted file mode 100644
index 2a9684b..0000000
--- a/src/Hakyll/Web/Template.hs
+++ /dev/null
@@ -1,154 +0,0 @@
--- | This module provides means for reading and applying 'Template's.
---
--- Templates are tools to convert items into a string. They are perfectly suited
--- for laying out your site.
---
--- Let's look at an example template:
---
--- > <html>
--- > <head>
--- > <title>My crazy homepage - $title$</title>
--- > </head>
--- > <body>
--- > <div id="header">
--- > <h1>My crazy homepage - $title$</h1>
--- > </div>
--- > <div id="content">
--- > $body$
--- > </div>
--- > <div id="footer">
--- > By reading this you agree that I now own your soul
--- > </div>
--- > </body>
--- > </html>
---
--- As you can see, the format is very simple -- @$key$@ is used to render the
--- @$key$@ field from the page, everything else is literally copied. If you want
--- to literally insert @\"$key$\"@ into your page (for example, when you're
--- writing a Hakyll tutorial) you can use
---
--- > <p>
--- > A literal $$key$$.
--- > </p>
---
--- Because of it's simplicity, these templates can be used for more than HTML:
--- you could make, for example, CSS or JS templates as well.
---
--- Apart from interpolating @$key$@s from the 'Context' you can also
--- use the following macros:
---
--- * @$if(key)$@
---
--- > $if(key)$
--- > <b> Defined </b>
--- > $else$
--- > <b> Non-defined </b>
--- > $endif$
---
--- This example will print @Defined@ if @key@ is defined in the
--- context and @Non-defined@ otherwise. The @$else$@ clause is
--- optional.
---
--- * @$for(key)$@
---
--- The @for@ macro is used for enumerating 'Context' elements that are
--- lists, i.e. constructed using the 'listField' function. Assume that
--- in a context we have an element @listField \"key\" c itms@. Then
--- the snippet
---
--- > $for(key)$
--- > $x$
--- > $sep$,
--- > $endfor$
---
--- would, for each item @i@ in 'itms', lookup @$x$@ in the context @c@
--- with item @i@, interpolate it, and join the resulting list with
--- @,@.
---
--- Another concrete example one may consider is the following. Given the
--- context
---
--- > listField "things" (field "thing" (return . itemBody))
--- > (sequence [makeItem "fruits", makeItem "vegetables"])
---
--- and a template
---
--- > I like
--- > $for(things)$
--- > fresh $thing$$sep$, and
--- > $endfor$
---
--- the resulting page would look like
---
--- > <p>
--- > I like
--- >
--- > fresh fruits, and
--- >
--- > fresh vegetables
--- > </p>
---
--- The @$sep$@ part can be omitted. Usually, you can get by using the
--- 'applyListTemplate' and 'applyJoinListTemplate' functions.
---
--- * @$partial(path)$@
---
--- Loads a template located in a separate file and interpolates it
--- under the current context.
---
--- Assuming that the file @test.html@ contains
---
--- > <b>$key$</b>
---
--- The result of rendering
---
--- > <p>
--- > $partial("test.html")$
--- > </p>
---
--- is the same as the result of rendering
---
--- > <p>
--- > <b>$key$</b>
--- > </p>
---
--- That is, calling @$partial$@ is equivalent to just copying and pasting
--- template code.
---
--- In the examples above you can see that the outputs contain a lot of leftover
--- whitespace that you may wish to remove. Using @'$-'@ or @'-$'@ instead of
--- @'$'@ in a macro strips all whitespace to the left or right of that clause
--- respectively. Given the context
---
--- > listField "counts" (field "count" (return . itemBody))
--- > (sequence [makeItem "3", makeItem "2", makeItem "1"])
---
--- and a template
---
--- > <p>
--- > $for(counts)-$
--- > $count$
--- > $-sep$...
--- > $-endfor$
--- > </p>
---
--- the resulting page would look like
---
--- > <p>
--- > 3...2...1
--- > </p>
---
-module Hakyll.Web.Template
- ( Template
- , templateBodyCompiler
- , templateCompiler
- , applyTemplate
- , loadAndApplyTemplate
- , applyAsTemplate
- , readTemplate
- , unsafeReadTemplateFile
- ) where
-
-
---------------------------------------------------------------------------------
-import Hakyll.Web.Template.Internal
diff --git a/src/Hakyll/Web/Template/Context.hs b/src/Hakyll/Web/Template/Context.hs
deleted file mode 100644
index b6c7994..0000000
--- a/src/Hakyll/Web/Template/Context.hs
+++ /dev/null
@@ -1,379 +0,0 @@
---------------------------------------------------------------------------------
-{-# LANGUAGE CPP #-}
-{-# LANGUAGE ExistentialQuantification #-}
-module Hakyll.Web.Template.Context
- ( ContextField (..)
- , Context (..)
- , field
- , boolField
- , constField
- , listField
- , listFieldWith
- , functionField
- , mapContext
-
- , defaultContext
- , bodyField
- , metadataField
- , urlField
- , pathField
- , titleField
- , snippetField
- , dateField
- , dateFieldWith
- , getItemUTC
- , getItemModificationTime
- , modificationTimeField
- , modificationTimeFieldWith
- , teaserField
- , teaserFieldWithSeparator
- , missingField
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Applicative (Alternative (..))
-import Control.Monad (msum)
-import Data.List (intercalate)
-import Data.Time.Clock (UTCTime (..))
-import Data.Time.Format (formatTime)
-import qualified Data.Time.Format as TF
-import Data.Time.Locale.Compat (TimeLocale, defaultTimeLocale)
-import Hakyll.Core.Compiler
-import Hakyll.Core.Compiler.Internal
-import Hakyll.Core.Identifier
-import Hakyll.Core.Item
-import Hakyll.Core.Metadata
-import Hakyll.Core.Provider
-import Hakyll.Core.Util.String (needlePrefix, splitAll)
-import Hakyll.Web.Html
-import System.FilePath (splitDirectories, takeBaseName)
-
-
---------------------------------------------------------------------------------
--- | Mostly for internal usage
-data ContextField
- = StringField String
- | forall a. ListField (Context a) [Item a]
-
-
---------------------------------------------------------------------------------
--- | The 'Context' monoid. Please note that the order in which you
--- compose the items is important. For example in
---
--- > field "A" f1 <> field "A" f2
---
--- the first context will overwrite the second. This is especially
--- important when something is being composed with
--- 'metadataField' (or 'defaultContext'). If you want your context to be
--- overwritten by the metadata fields, compose it from the right:
---
--- @
--- 'metadataField' \<\> field \"date\" fDate
--- @
---
-newtype Context a = Context
- { unContext :: String -> [String] -> Item a -> Compiler ContextField
- }
-
-
---------------------------------------------------------------------------------
-instance Monoid (Context a) where
- mempty = missingField
- mappend (Context f) (Context g) = Context $ \k a i -> f k a i <|> g k a i
-
-
---------------------------------------------------------------------------------
-field' :: String -> (Item a -> Compiler ContextField) -> Context a
-field' key value = Context $ \k _ i -> if k == key then value i else empty
-
-
---------------------------------------------------------------------------------
--- | Constructs a new field in the 'Context.'
-field
- :: String -- ^ Key
- -> (Item a -> Compiler String) -- ^ Function that constructs a value based
- -- on the item
- -> Context a
-field key value = field' key (fmap StringField . value)
-
-
---------------------------------------------------------------------------------
--- | Creates a 'field' to use with the @$if()$@ template macro.
-boolField
- :: String
- -> (Item a -> Bool)
- -> Context a
-boolField name f = field name (\i -> if f i
- then pure (error $ unwords ["no string value for bool field:",name])
- else empty)
-
-
---------------------------------------------------------------------------------
--- | Creates a 'field' that does not depend on the 'Item'
-constField :: String -> String -> Context a
-constField key = field key . const . return
-
-
---------------------------------------------------------------------------------
-listField :: String -> Context a -> Compiler [Item a] -> Context b
-listField key c xs = listFieldWith key c (const xs)
-
-
---------------------------------------------------------------------------------
-listFieldWith
- :: String -> Context a -> (Item b -> Compiler [Item a]) -> Context b
-listFieldWith key c f = field' key $ fmap (ListField c) . f
-
-
---------------------------------------------------------------------------------
-functionField :: String -> ([String] -> Item a -> Compiler String) -> Context a
-functionField name value = Context $ \k args i ->
- if k == name
- then StringField <$> value args i
- else empty
-
-
---------------------------------------------------------------------------------
-mapContext :: (String -> String) -> Context a -> Context a
-mapContext f (Context c) = Context $ \k a i -> do
- fld <- c k a i
- case fld of
- StringField str -> return $ StringField (f str)
- ListField _ _ -> fail $
- "Hakyll.Web.Template.Context.mapContext: " ++
- "can't map over a ListField!"
-
---------------------------------------------------------------------------------
--- | A context that allows snippet inclusion. In processed file, use as:
---
--- > ...
--- > $snippet("path/to/snippet/")$
--- > ...
---
--- The contents of the included file will not be interpolated.
---
-snippetField :: Context String
-snippetField = functionField "snippet" f
- where
- f [contentsPath] _ = loadBody (fromFilePath contentsPath)
- f _ i = error $
- "Too many arguments to function 'snippet()' in item " ++
- show (itemIdentifier i)
-
---------------------------------------------------------------------------------
--- | A context that contains (in that order)
---
--- 1. A @$body$@ field
---
--- 2. Metadata fields
---
--- 3. A @$url$@ 'urlField'
---
--- 4. A @$path$@ 'pathField'
---
--- 5. A @$title$@ 'titleField'
-defaultContext :: Context String
-defaultContext =
- bodyField "body" `mappend`
- metadataField `mappend`
- urlField "url" `mappend`
- pathField "path" `mappend`
- titleField "title" `mappend`
- missingField
-
-
---------------------------------------------------------------------------------
-teaserSeparator :: String
-teaserSeparator = "<!--more-->"
-
-
---------------------------------------------------------------------------------
--- | Constructs a 'field' that contains the body of the item.
-bodyField :: String -> Context String
-bodyField key = field key $ return . itemBody
-
-
---------------------------------------------------------------------------------
--- | Map any field to its metadata value, if present
-metadataField :: Context a
-metadataField = Context $ \k _ i -> do
- value <- getMetadataField (itemIdentifier i) k
- maybe empty (return . StringField) value
-
-
---------------------------------------------------------------------------------
--- | Absolute url to the resulting item
-urlField :: String -> Context a
-urlField key = field key $
- fmap (maybe empty toUrl) . getRoute . itemIdentifier
-
-
---------------------------------------------------------------------------------
--- | Filepath of the underlying file of the item
-pathField :: String -> Context a
-pathField key = field key $ return . toFilePath . itemIdentifier
-
-
---------------------------------------------------------------------------------
--- | This title 'field' takes the basename of the underlying file by default
-titleField :: String -> Context a
-titleField = mapContext takeBaseName . pathField
-
-
---------------------------------------------------------------------------------
--- | When the metadata has a field called @published@ in one of the
--- following formats then this function can render the date.
---
--- * @Mon, 06 Sep 2010 00:01:00 +0000@
---
--- * @Mon, 06 Sep 2010 00:01:00 UTC@
---
--- * @Mon, 06 Sep 2010 00:01:00@
---
--- * @2010-09-06T00:01:00+0000@
---
--- * @2010-09-06T00:01:00Z@
---
--- * @2010-09-06T00:01:00@
---
--- * @2010-09-06 00:01:00+0000@
---
--- * @2010-09-06 00:01:00@
---
--- * @September 06, 2010 00:01 AM@
---
--- Following date-only formats are supported too (@00:00:00@ for time is
--- assumed)
---
--- * @2010-09-06@
---
--- * @September 06, 2010@
---
--- Alternatively, when the metadata has a field called @path@ in a
--- @folder/yyyy-mm-dd-title.extension@ format (the convention for pages)
--- and no @published@ metadata field set, this function can render
--- the date. This pattern matches the file name or directory names
--- that begins with @yyyy-mm-dd@ . For example:
--- @folder//yyyy-mm-dd-title//dist//main.extension@ .
--- In case of multiple matches, the rightmost one is used.
-
-dateField :: String -- ^ Key in which the rendered date should be placed
- -> String -- ^ Format to use on the date
- -> Context a -- ^ Resulting context
-dateField = dateFieldWith defaultTimeLocale
-
-
---------------------------------------------------------------------------------
--- | This is an extended version of 'dateField' that allows you to
--- specify a time locale that is used for outputting the date. For more
--- details, see 'dateField'.
-dateFieldWith :: TimeLocale -- ^ Output time locale
- -> String -- ^ Destination key
- -> String -- ^ Format to use on the date
- -> Context a -- ^ Resulting context
-dateFieldWith locale key format = field key $ \i -> do
- time <- getItemUTC locale $ itemIdentifier i
- return $ formatTime locale format time
-
-
---------------------------------------------------------------------------------
--- | Parser to try to extract and parse the time from the @published@
--- field or from the filename. See 'dateField' for more information.
--- Exported for user convenience.
-getItemUTC :: MonadMetadata m
- => TimeLocale -- ^ Output time locale
- -> Identifier -- ^ Input page
- -> m UTCTime -- ^ Parsed UTCTime
-getItemUTC locale id' = do
- metadata <- getMetadata id'
- let tryField k fmt = lookupString k metadata >>= parseTime' fmt
- paths = splitDirectories $ toFilePath id'
-
- maybe empty' return $ msum $
- [tryField "published" fmt | fmt <- formats] ++
- [tryField "date" fmt | fmt <- formats] ++
- [parseTime' "%Y-%m-%d" $ intercalate "-" $ take 3 $ splitAll "-" fnCand | fnCand <- reverse paths]
- where
- empty' = fail $ "Hakyll.Web.Template.Context.getItemUTC: " ++
- "could not parse time for " ++ show id'
- parseTime' = parseTimeM True locale
- formats =
- [ "%a, %d %b %Y %H:%M:%S %Z"
- , "%Y-%m-%dT%H:%M:%S%Z"
- , "%Y-%m-%d %H:%M:%S%Z"
- , "%Y-%m-%d"
- , "%B %e, %Y %l:%M %p"
- , "%B %e, %Y"
- , "%b %d, %Y"
- ]
-
-
---------------------------------------------------------------------------------
--- | Get the time on which the actual file was last modified. This only works if
--- there actually is an underlying file, of couse.
-getItemModificationTime
- :: Identifier
- -> Compiler UTCTime
-getItemModificationTime identifier = do
- provider <- compilerProvider <$> compilerAsk
- return $ resourceModificationTime provider identifier
-
-
---------------------------------------------------------------------------------
-modificationTimeField :: String -- ^ Key
- -> String -- ^ Format
- -> Context a -- ^ Resuting context
-modificationTimeField = modificationTimeFieldWith defaultTimeLocale
-
-
---------------------------------------------------------------------------------
-modificationTimeFieldWith :: TimeLocale -- ^ Time output locale
- -> String -- ^ Key
- -> String -- ^ Format
- -> Context a -- ^ Resulting context
-modificationTimeFieldWith locale key fmt = field key $ \i -> do
- mtime <- getItemModificationTime $ itemIdentifier i
- return $ formatTime locale fmt mtime
-
-
---------------------------------------------------------------------------------
--- | A context with "teaser" key which contain a teaser of the item.
--- The item is loaded from the given snapshot (which should be saved
--- in the user code before any templates are applied).
-teaserField :: String -- ^ Key to use
- -> Snapshot -- ^ Snapshot to load
- -> Context String -- ^ Resulting context
-teaserField = teaserFieldWithSeparator teaserSeparator
-
-
---------------------------------------------------------------------------------
--- | A context with "teaser" key which contain a teaser of the item, defined as
--- the snapshot content before the teaser separator. The item is loaded from the
--- given snapshot (which should be saved in the user code before any templates
--- are applied).
-teaserFieldWithSeparator :: String -- ^ Separator to use
- -> String -- ^ Key to use
- -> Snapshot -- ^ Snapshot to load
- -> Context String -- ^ Resulting context
-teaserFieldWithSeparator separator key snapshot = field key $ \item -> do
- body <- itemBody <$> loadSnapshot (itemIdentifier item) snapshot
- case needlePrefix separator body of
- Nothing -> fail $
- "Hakyll.Web.Template.Context: no teaser defined for " ++
- show (itemIdentifier item)
- Just t -> return t
-
-
---------------------------------------------------------------------------------
-missingField :: Context a
-missingField = Context $ \k _ i -> fail $
- "Missing field $" ++ k ++ "$ in context for item " ++
- show (itemIdentifier i)
-
-parseTimeM :: Bool -> TimeLocale -> String -> String -> Maybe UTCTime
-#if MIN_VERSION_time(1,5,0)
-parseTimeM = TF.parseTimeM
-#else
-parseTimeM _ = TF.parseTime
-#endif
diff --git a/src/Hakyll/Web/Template/Internal.hs b/src/Hakyll/Web/Template/Internal.hs
deleted file mode 100644
index d0e4d47..0000000
--- a/src/Hakyll/Web/Template/Internal.hs
+++ /dev/null
@@ -1,203 +0,0 @@
-{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-{-# LANGUAGE ScopedTypeVariables #-}
-module Hakyll.Web.Template.Internal
- ( Template (..)
- , template
- , templateBodyCompiler
- , templateCompiler
- , applyTemplate
- , applyTemplate'
- , loadAndApplyTemplate
- , applyAsTemplate
- , readTemplate
- , unsafeReadTemplateFile
-
- , module Hakyll.Web.Template.Internal.Element
- , module Hakyll.Web.Template.Internal.Trim
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Monad.Except (MonadError (..))
-import Data.Binary (Binary)
-import Data.List (intercalate)
-import Data.Typeable (Typeable)
-import GHC.Exts (IsString (..))
-import Prelude hiding (id)
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler
-import Hakyll.Core.Identifier
-import Hakyll.Core.Item
-import Hakyll.Core.Writable
-import Hakyll.Web.Template.Context
-import Hakyll.Web.Template.Internal.Element
-import Hakyll.Web.Template.Internal.Trim
-
-
---------------------------------------------------------------------------------
--- | Datatype used for template substitutions.
-newtype Template = Template
- { unTemplate :: [TemplateElement]
- } deriving (Show, Eq, Binary, Typeable)
-
-
---------------------------------------------------------------------------------
-instance Writable Template where
- -- Writing a template is impossible
- write _ _ = return ()
-
-
---------------------------------------------------------------------------------
-instance IsString Template where
- fromString = readTemplate
-
-
---------------------------------------------------------------------------------
--- | Wrap the constructor to ensure trim is called.
-template :: [TemplateElement] -> Template
-template = Template . trim
-
-
---------------------------------------------------------------------------------
-readTemplate :: String -> Template
-readTemplate = Template . trim . readTemplateElems
-
---------------------------------------------------------------------------------
--- | Read a template, without metadata header
-templateBodyCompiler :: Compiler (Item Template)
-templateBodyCompiler = cached "Hakyll.Web.Template.templateBodyCompiler" $ do
- item <- getResourceBody
- file <- getResourceFilePath
- return $ fmap (template . readTemplateElemsFile file) item
-
---------------------------------------------------------------------------------
--- | Read complete file contents as a template
-templateCompiler :: Compiler (Item Template)
-templateCompiler = cached "Hakyll.Web.Template.templateCompiler" $ do
- item <- getResourceString
- file <- getResourceFilePath
- return $ fmap (template . readTemplateElemsFile file) item
-
-
---------------------------------------------------------------------------------
-applyTemplate :: Template -- ^ Template
- -> Context a -- ^ Context
- -> Item a -- ^ Page
- -> Compiler (Item String) -- ^ Resulting item
-applyTemplate tpl context item = do
- body <- applyTemplate' (unTemplate tpl) context item
- return $ itemSetBody body item
-
-
---------------------------------------------------------------------------------
-applyTemplate'
- :: forall a.
- [TemplateElement] -- ^ Unwrapped Template
- -> Context a -- ^ Context
- -> Item a -- ^ Page
- -> Compiler String -- ^ Resulting item
-applyTemplate' tes context x = go tes
- where
- context' :: String -> [String] -> Item a -> Compiler ContextField
- context' = unContext (context `mappend` missingField)
-
- go = fmap concat . mapM applyElem
-
- trimError = error $ "Hakyll.Web.Template.applyTemplate: template not " ++
- "fully trimmed."
-
- ---------------------------------------------------------------------------
-
- applyElem :: TemplateElement -> Compiler String
-
- applyElem TrimL = trimError
-
- applyElem TrimR = trimError
-
- applyElem (Chunk c) = return c
-
- applyElem (Expr e) = applyExpr e >>= getString e
-
- applyElem Escaped = return "$"
-
- applyElem (If e t mf) = (applyExpr e >> go t) `catchError` handler
- where
- handler _ = case mf of
- Nothing -> return ""
- Just f -> go f
-
- applyElem (For e b s) = applyExpr e >>= \cf -> case cf of
- StringField _ -> fail $
- "Hakyll.Web.Template.applyTemplateWith: expected ListField but " ++
- "got StringField for expr " ++ show e
- ListField c xs -> do
- sep <- maybe (return "") go s
- bs <- mapM (applyTemplate' b c) xs
- return $ intercalate sep bs
-
- applyElem (Partial e) = do
- p <- applyExpr e >>= getString e
- Template tpl' <- loadBody (fromFilePath p)
- applyTemplate' tpl' context x
-
- ---------------------------------------------------------------------------
-
- applyExpr :: TemplateExpr -> Compiler ContextField
-
- applyExpr (Ident (TemplateKey k)) = context' k [] x
-
- applyExpr (Call (TemplateKey k) args) = do
- args' <- mapM (\e -> applyExpr e >>= getString e) args
- context' k args' x
-
- applyExpr (StringLiteral s) = return (StringField s)
-
- ----------------------------------------------------------------------------
-
- getString _ (StringField s) = return s
- getString e (ListField _ _) = fail $
- "Hakyll.Web.Template.applyTemplateWith: expected StringField but " ++
- "got ListField for expr " ++ show e
-
-
---------------------------------------------------------------------------------
--- | The following pattern is so common:
---
--- > tpl <- loadBody "templates/foo.html"
--- > someCompiler
--- > >>= applyTemplate tpl context
---
--- That we have a single function which does this:
---
--- > someCompiler
--- > >>= loadAndApplyTemplate "templates/foo.html" context
-loadAndApplyTemplate :: Identifier -- ^ Template identifier
- -> Context a -- ^ Context
- -> Item a -- ^ Page
- -> Compiler (Item String) -- ^ Resulting item
-loadAndApplyTemplate identifier context item = do
- tpl <- loadBody identifier
- applyTemplate tpl context item
-
-
---------------------------------------------------------------------------------
--- | It is also possible that you want to substitute @$key$@s within the body of
--- an item. This function does that by interpreting the item body as a template,
--- and then applying it to itself.
-applyAsTemplate :: Context String -- ^ Context
- -> Item String -- ^ Item and template
- -> Compiler (Item String) -- ^ Resulting item
-applyAsTemplate context item =
- let tpl = template $ readTemplateElemsFile file (itemBody item)
- file = toFilePath $ itemIdentifier item
- in applyTemplate tpl context item
-
-
---------------------------------------------------------------------------------
-unsafeReadTemplateFile :: FilePath -> Compiler Template
-unsafeReadTemplateFile file = do
- tpl <- unsafeCompiler $ readFile file
- pure $ template $ readTemplateElemsFile file tpl
-
diff --git a/src/Hakyll/Web/Template/Internal/Element.hs b/src/Hakyll/Web/Template/Internal/Element.hs
deleted file mode 100644
index f564355..0000000
--- a/src/Hakyll/Web/Template/Internal/Element.hs
+++ /dev/null
@@ -1,298 +0,0 @@
---------------------------------------------------------------------------------
--- | Module containing the elements used in a template. A template is generally
--- just a list of these elements.
-{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-module Hakyll.Web.Template.Internal.Element
- ( TemplateKey (..)
- , TemplateExpr (..)
- , TemplateElement (..)
- , templateElems
- , readTemplateElems
- , readTemplateElemsFile
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Applicative ((<|>))
-import Control.Monad (void)
-import Data.Binary (Binary, get, getWord8, put, putWord8)
-import Data.List (intercalate)
-import Data.Maybe (isJust)
-import Data.Typeable (Typeable)
-import GHC.Exts (IsString (..))
-import qualified Text.Parsec as P
-import qualified Text.Parsec.String as P
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Util.Parser
-
-
---------------------------------------------------------------------------------
-newtype TemplateKey = TemplateKey String
- deriving (Binary, Show, Eq, Typeable)
-
-
---------------------------------------------------------------------------------
-instance IsString TemplateKey where
- fromString = TemplateKey
-
-
---------------------------------------------------------------------------------
--- | Elements of a template.
-data TemplateElement
- = Chunk String
- | Expr TemplateExpr
- | Escaped
- -- expr, then, else
- | If TemplateExpr [TemplateElement] (Maybe [TemplateElement])
- -- expr, body, separator
- | For TemplateExpr [TemplateElement] (Maybe [TemplateElement])
- -- filename
- | Partial TemplateExpr
- | TrimL
- | TrimR
- deriving (Show, Eq, Typeable)
-
-
---------------------------------------------------------------------------------
-instance Binary TemplateElement where
- put (Chunk string) = putWord8 0 >> put string
- put (Expr e) = putWord8 1 >> put e
- put Escaped = putWord8 2
- put (If e t f) = putWord8 3 >> put e >> put t >> put f
- put (For e b s) = putWord8 4 >> put e >> put b >> put s
- put (Partial e) = putWord8 5 >> put e
- put TrimL = putWord8 6
- put TrimR = putWord8 7
-
- get = getWord8 >>= \tag -> case tag of
- 0 -> Chunk <$> get
- 1 -> Expr <$> get
- 2 -> pure Escaped
- 3 -> If <$> get <*> get <*> get
- 4 -> For <$> get <*> get <*> get
- 5 -> Partial <$> get
- 6 -> pure TrimL
- 7 -> pure TrimR
- _ -> error "Hakyll.Web.Template.Internal: Error reading cached template"
-
-
---------------------------------------------------------------------------------
--- | Expression in a template
-data TemplateExpr
- = Ident TemplateKey
- | Call TemplateKey [TemplateExpr]
- | StringLiteral String
- deriving (Eq, Typeable)
-
-
---------------------------------------------------------------------------------
-instance Show TemplateExpr where
- show (Ident (TemplateKey k)) = k
- show (Call (TemplateKey k) as) =
- k ++ "(" ++ intercalate ", " (map show as) ++ ")"
- show (StringLiteral s) = show s
-
-
---------------------------------------------------------------------------------
-instance Binary TemplateExpr where
- put (Ident k) = putWord8 0 >> put k
- put (Call k as) = putWord8 1 >> put k >> put as
- put (StringLiteral s) = putWord8 2 >> put s
-
- get = getWord8 >>= \tag -> case tag of
- 0 -> Ident <$> get
- 1 -> Call <$> get <*> get
- 2 -> StringLiteral <$> get
- _ -> error "Hakyll.Web.Template.Internal: Error reading cached template"
-
-
---------------------------------------------------------------------------------
-readTemplateElems :: String -> [TemplateElement]
-readTemplateElems = readTemplateElemsFile "{literal}"
-
-
---------------------------------------------------------------------------------
-readTemplateElemsFile :: FilePath -> String -> [TemplateElement]
-readTemplateElemsFile file input = case P.parse templateElems file input of
- Left err -> error $ "Cannot parse template: " ++ show err
- Right t -> t
-
-
---------------------------------------------------------------------------------
-templateElems :: P.Parser [TemplateElement]
-templateElems = mconcat <$> P.many (P.choice [ lift chunk
- , lift escaped
- , conditional
- , for
- , partial
- , expr
- ])
- where lift = fmap (:[])
-
-
---------------------------------------------------------------------------------
-chunk :: P.Parser TemplateElement
-chunk = Chunk <$> P.many1 (P.noneOf "$")
-
-
---------------------------------------------------------------------------------
-expr :: P.Parser [TemplateElement]
-expr = P.try $ do
- trimLExpr <- trimOpen
- e <- expr'
- trimRExpr <- trimClose
- return $ [TrimL | trimLExpr] ++ [Expr e] ++ [TrimR | trimRExpr]
-
-
---------------------------------------------------------------------------------
-expr' :: P.Parser TemplateExpr
-expr' = stringLiteral <|> call <|> ident
-
-
---------------------------------------------------------------------------------
-escaped :: P.Parser TemplateElement
-escaped = Escaped <$ P.try (P.string "$$")
-
-
---------------------------------------------------------------------------------
-trimOpen :: P.Parser Bool
-trimOpen = do
- void $ P.char '$'
- trimLIf <- P.optionMaybe $ P.try (P.char '-')
- pure $ isJust trimLIf
-
-
---------------------------------------------------------------------------------
-trimClose :: P.Parser Bool
-trimClose = do
- trimIfR <- P.optionMaybe $ P.try (P.char '-')
- void $ P.char '$'
- pure $ isJust trimIfR
-
-
---------------------------------------------------------------------------------
-conditional :: P.Parser [TemplateElement]
-conditional = P.try $ do
- -- if
- trimLIf <- trimOpen
- void $ P.string "if("
- e <- expr'
- void $ P.char ')'
- trimRIf <- trimClose
- -- then
- thenBranch <- templateElems
- -- else
- elseParse <- opt "else"
- -- endif
- trimLEnd <- trimOpen
- void $ P.string "endif"
- trimREnd <- trimClose
-
- -- As else is optional we need to sort out where any Trim_s need to go.
- let (thenBody, elseBody) = maybe (thenNoElse, Nothing) thenElse elseParse
- where thenNoElse =
- [TrimR | trimRIf] ++ thenBranch ++ [TrimL | trimLEnd]
-
- thenElse (trimLElse, elseBranch, trimRElse) = (thenB, elseB)
- where thenB = [TrimR | trimRIf]
- ++ thenBranch
- ++ [TrimL | trimLElse]
-
- elseB = Just $ [TrimR | trimRElse]
- ++ elseBranch
- ++ [TrimL | trimLEnd]
-
- pure $ [TrimL | trimLIf] ++ [If e thenBody elseBody] ++ [TrimR | trimREnd]
-
-
---------------------------------------------------------------------------------
-for :: P.Parser [TemplateElement]
-for = P.try $ do
- -- for
- trimLFor <- trimOpen
- void $ P.string "for("
- e <- expr'
- void $ P.char ')'
- trimRFor <- trimClose
- -- body
- bodyBranch <- templateElems
- -- sep
- sepParse <- opt "sep"
- -- endfor
- trimLEnd <- trimOpen
- void $ P.string "endfor"
- trimREnd <- trimClose
-
- -- As sep is optional we need to sort out where any Trim_s need to go.
- let (forBody, sepBody) = maybe (forNoSep, Nothing) forSep sepParse
- where forNoSep =
- [TrimR | trimRFor] ++ bodyBranch ++ [TrimL | trimLEnd]
-
- forSep (trimLSep, sepBranch, trimRSep) = (forB, sepB)
- where forB = [TrimR | trimRFor]
- ++ bodyBranch
- ++ [TrimL | trimLSep]
-
- sepB = Just $ [TrimR | trimRSep]
- ++ sepBranch
- ++ [TrimL | trimLEnd]
-
- pure $ [TrimL | trimLFor] ++ [For e forBody sepBody] ++ [TrimR | trimREnd]
-
-
---------------------------------------------------------------------------------
-partial :: P.Parser [TemplateElement]
-partial = P.try $ do
- trimLPart <- trimOpen
- void $ P.string "partial("
- e <- expr'
- void $ P.char ')'
- trimRPart <- trimClose
-
- pure $ [TrimL | trimLPart] ++ [Partial e] ++ [TrimR | trimRPart]
-
-
---------------------------------------------------------------------------------
-ident :: P.Parser TemplateExpr
-ident = P.try $ Ident <$> key
-
-
---------------------------------------------------------------------------------
-call :: P.Parser TemplateExpr
-call = P.try $ do
- f <- key
- void $ P.char '('
- P.spaces
- as <- P.sepBy expr' (P.spaces >> P.char ',' >> P.spaces)
- P.spaces
- void $ P.char ')'
- return $ Call f as
-
-
---------------------------------------------------------------------------------
-stringLiteral :: P.Parser TemplateExpr
-stringLiteral = do
- void $ P.char '\"'
- str <- P.many $ do
- x <- P.noneOf "\""
- if x == '\\' then P.anyChar else return x
- void $ P.char '\"'
- return $ StringLiteral str
-
-
---------------------------------------------------------------------------------
-key :: P.Parser TemplateKey
-key = TemplateKey <$> metadataKey
-
-
---------------------------------------------------------------------------------
-opt :: String -> P.Parser (Maybe (Bool, [TemplateElement], Bool))
-opt clause = P.optionMaybe $ P.try $ do
- trimL <- trimOpen
- void $ P.string clause
- trimR <- trimClose
- branch <- templateElems
- pure (trimL, branch, trimR)
-
diff --git a/src/Hakyll/Web/Template/Internal/Trim.hs b/src/Hakyll/Web/Template/Internal/Trim.hs
deleted file mode 100644
index e416ff2..0000000
--- a/src/Hakyll/Web/Template/Internal/Trim.hs
+++ /dev/null
@@ -1,95 +0,0 @@
---------------------------------------------------------------------------------
--- | Module for trimming whitespace from tempaltes.
-module Hakyll.Web.Template.Internal.Trim
- ( trim
- ) where
-
-
---------------------------------------------------------------------------------
-import Data.Char (isSpace)
-import Data.List (dropWhileEnd)
-
-
---------------------------------------------------------------------------------
-import Hakyll.Web.Template.Internal.Element
-
-
---------------------------------------------------------------------------------
-trim :: [TemplateElement] -> [TemplateElement]
-trim = cleanse . canonicalize
-
-
---------------------------------------------------------------------------------
--- | Apply the Trim nodes to the Chunks.
-cleanse :: [TemplateElement] -> [TemplateElement]
-cleanse = recurse cleanse . process
- where process [] = []
- process (TrimR:Chunk str:ts) = let str' = dropWhile isSpace str
- in if null str'
- then process ts
- -- Might need to TrimL.
- else process $ Chunk str':ts
-
- process (Chunk str:TrimL:ts) = let str' = dropWhileEnd isSpace str
- in if null str'
- then process ts
- else Chunk str':process ts
-
- process (t:ts) = t:process ts
-
---------------------------------------------------------------------------------
--- | Enforce the invariant that:
---
--- * Every 'TrimL' has a 'Chunk' to its left.
--- * Every 'TrimR' has a 'Chunk' to its right.
---
-canonicalize :: [TemplateElement] -> [TemplateElement]
-canonicalize = go
- where go t = let t' = redundant . swap $ dedupe t
- in if t == t' then t else go t'
-
-
---------------------------------------------------------------------------------
--- | Remove the 'TrimR' and 'TrimL's that are no-ops.
-redundant :: [TemplateElement] -> [TemplateElement]
-redundant = recurse redundant . process
- where -- Remove the leading 'TrimL's.
- process (TrimL:ts) = process ts
- -- Remove trailing 'TrimR's.
- process ts = foldr trailing [] ts
- where trailing TrimR [] = []
- trailing x xs = x:xs
-
-
---------------------------------------------------------------------------------
--- >>> swap $ [TrimR, TrimL]
--- [TrimL, TrimR]
-swap :: [TemplateElement] -> [TemplateElement]
-swap = recurse swap . process
- where process [] = []
- process (TrimR:TrimL:ts) = TrimL:process (TrimR:ts)
- process (t:ts) = t:process ts
-
-
---------------------------------------------------------------------------------
--- | Remove 'TrimR' and 'TrimL' duplication.
-dedupe :: [TemplateElement] -> [TemplateElement]
-dedupe = recurse dedupe . process
- where process [] = []
- process (TrimR:TrimR:ts) = process (TrimR:ts)
- process (TrimL:TrimL:ts) = process (TrimL:ts)
- process (t:ts) = t:process ts
-
-
---------------------------------------------------------------------------------
--- | @'recurse' f t@ applies f to every '[TemplateElement]' in t.
-recurse :: ([TemplateElement] -> [TemplateElement])
- -> [TemplateElement]
- -> [TemplateElement]
-recurse _ [] = []
-recurse f (x:xs) = process x:recurse f xs
- where process y = case y of
- If e tb eb -> If e (f tb) (f <$> eb)
- For e t s -> For e (f t) (f <$> s)
- _ -> y
-
diff --git a/src/Hakyll/Web/Template/List.hs b/src/Hakyll/Web/Template/List.hs
deleted file mode 100644
index 4d769fc..0000000
--- a/src/Hakyll/Web/Template/List.hs
+++ /dev/null
@@ -1,91 +0,0 @@
---------------------------------------------------------------------------------
--- | Provides an easy way to combine several items in a list. The applications
--- are obvious:
---
--- * A post list on a blog
---
--- * An image list in a gallery
---
--- * A sitemap
-{-# LANGUAGE TupleSections #-}
-module Hakyll.Web.Template.List
- ( applyTemplateList
- , applyJoinTemplateList
- , chronological
- , recentFirst
- , sortChronological
- , sortRecentFirst
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Monad (liftM)
-import Data.List (intersperse, sortBy)
-import Data.Ord (comparing)
-import Data.Time.Locale.Compat (defaultTimeLocale)
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler
-import Hakyll.Core.Identifier
-import Hakyll.Core.Item
-import Hakyll.Core.Metadata
-import Hakyll.Web.Template
-import Hakyll.Web.Template.Context
-
-
---------------------------------------------------------------------------------
--- | Generate a string of a listing of pages, after applying a template to each
--- page.
-applyTemplateList :: Template
- -> Context a
- -> [Item a]
- -> Compiler String
-applyTemplateList = applyJoinTemplateList ""
-
-
---------------------------------------------------------------------------------
--- | Join a listing of pages with a string in between, after applying a template
--- to each page.
-applyJoinTemplateList :: String
- -> Template
- -> Context a
- -> [Item a]
- -> Compiler String
-applyJoinTemplateList delimiter tpl context items = do
- items' <- mapM (applyTemplate tpl context) items
- return $ concat $ intersperse delimiter $ map itemBody items'
-
-
---------------------------------------------------------------------------------
--- | Sort pages chronologically. Uses the same method as 'dateField' for
--- extracting the date.
-chronological :: MonadMetadata m => [Item a] -> m [Item a]
-chronological =
- sortByM $ getItemUTC defaultTimeLocale . itemIdentifier
- where
- sortByM :: (Monad m, Ord k) => (a -> m k) -> [a] -> m [a]
- sortByM f xs = liftM (map fst . sortBy (comparing snd)) $
- mapM (\x -> liftM (x,) (f x)) xs
-
-
---------------------------------------------------------------------------------
--- | The reverse of 'chronological'
-recentFirst :: MonadMetadata m => [Item a] -> m [Item a]
-recentFirst = liftM reverse . chronological
-
-
---------------------------------------------------------------------------------
--- | Version of 'chronological' which doesn't need the actual items.
-sortChronological
- :: MonadMetadata m => [Identifier] -> m [Identifier]
-sortChronological ids =
- liftM (map itemIdentifier) $ chronological [Item i () | i <- ids]
-
-
---------------------------------------------------------------------------------
--- | Version of 'recentFirst' which doesn't need the actual items.
-sortRecentFirst
- :: MonadMetadata m => [Identifier] -> m [Identifier]
-sortRecentFirst ids =
- liftM (map itemIdentifier) $ recentFirst [Item i () | i <- ids]
diff --git a/src/Init.hs b/src/Init.hs
new file mode 100644
index 0000000..71055f0
--- /dev/null
+++ b/src/Init.hs
@@ -0,0 +1,96 @@
+--------------------------------------------------------------------------------
+module Main
+ ( main
+ ) where
+
+
+--------------------------------------------------------------------------------
+import Control.Arrow (first)
+import Control.Monad (forM_)
+import Data.Char (isAlphaNum, isNumber)
+import Data.List (foldl')
+import Data.List (intercalate, isPrefixOf)
+import Data.Version (Version (..))
+import System.Directory (canonicalizePath, copyFile)
+import System.Environment (getArgs, getProgName)
+import System.Exit (exitFailure)
+import System.FilePath (splitDirectories, (</>))
+
+
+--------------------------------------------------------------------------------
+import Hakyll.Core.Util.File
+import Paths_hakyll
+
+
+--------------------------------------------------------------------------------
+main :: IO ()
+main = do
+ progName <- getProgName
+ args <- getArgs
+ srcDir <- getDataFileName "example"
+ files <- getRecursiveContents (const $ return False) srcDir
+
+ case args of
+ -- When the argument begins with hyphens, it's more likely that the user
+ -- intends to attempt some arguments like ("--help", "-h", "--version", etc.)
+ -- rather than create directory with that name.
+ -- If dstDir begins with hyphens, the guard will prevent it from creating
+ -- directory with that name so we can fall to the second alternative
+ -- which prints a usage info for user.
+ [dstDir] | not ("-" `isPrefixOf` dstDir) -> do
+ forM_ files $ \file -> do
+ let dst = dstDir </> file
+ src = srcDir </> file
+ putStrLn $ "Creating " ++ dst
+ makeDirectories dst
+ copyFile src dst
+
+ name <- makeName dstDir
+ let cabalPath = dstDir </> name ++ ".cabal"
+ putStrLn $ "Creating " ++ cabalPath
+ createCabal cabalPath name
+ _ -> do
+ putStrLn $ "Usage: " ++ progName ++ " <directory>"
+ exitFailure
+
+-- | Figure out a good cabal package name from the given (existing) directory
+-- name
+makeName :: FilePath -> IO String
+makeName dstDir = do
+ canonical <- canonicalizePath dstDir
+ return $ case safeLast (splitDirectories canonical) of
+ Nothing -> fallbackName
+ Just "/" -> fallbackName
+ Just x -> repair (fallbackName ++) id x
+ where
+ -- Package name repair code comes from
+ -- cabal-install.Distribution.Client.Init.Heuristics
+ repair invalid valid x = case dropWhile (not . isAlphaNum) x of
+ "" -> repairComponent ""
+ x' -> let (c, r) = first repairComponent $ break (not . isAlphaNum) x'
+ in c ++ repairRest r
+ where repairComponent c | all isNumber c = invalid c
+ | otherwise = valid c
+ repairRest = repair id ('-' :)
+ fallbackName = "site"
+
+ safeLast = foldl' (\_ x -> Just x) Nothing
+
+createCabal :: FilePath -> String -> IO ()
+createCabal path name = do
+ writeFile path $ unlines [
+ "name: " ++ name
+ , "version: 0.1.0.0"
+ , "build-type: Simple"
+ , "cabal-version: >= 1.10"
+ , ""
+ , "executable site"
+ , " main-is: site.hs"
+ , " build-depends: base == 4.*"
+ , " , hakyll == " ++ version' ++ ".*"
+ , " ghc-options: -threaded"
+ , " default-language: Haskell2010"
+ ]
+ where
+ -- Major hakyll version
+ version' = intercalate "." . take 2 . map show $ versionBranch version