aboutsummaryrefslogtreecommitdiff
path: root/site.hs
diff options
context:
space:
mode:
authorMirek Kratochvil <miroslav.kratochvil@uni.lu>2023-07-21 10:54:45 +0200
committerMirek Kratochvil <miroslav.kratochvil@uni.lu>2023-07-21 10:54:45 +0200
commit435f1aff3c38b69d8e9ebc06ef43c4a0104bff2c (patch)
treedbaf7efec8328402888ed22dcd638339b3f76184 /site.hs
parent83e6f4afb8f8ca434175f32f70981bb66c1b6b47 (diff)
downloadreploy-435f1aff3c38b69d8e9ebc06ef43c4a0104bff2c.tar.gz
reploy-435f1aff3c38b69d8e9ebc06ef43c4a0104bff2c.tar.bz2
rename the executable to reploy
Diffstat (limited to 'site.hs')
-rw-r--r--site.hs563
1 files changed, 0 insertions, 563 deletions
diff --git a/site.hs b/site.hs
deleted file mode 100644
index 0c3a645..0000000
--- a/site.hs
+++ /dev/null
@@ -1,563 +0,0 @@
-{-
- - Copyright (C) 2023 University of Luxembourg
- -
- - Licensed under the Apache License, Version 2.0 (the "License"); you may not
- - use this file except in compliance with the License. You may obtain a copy
- - of the License from the LICENSE file in this repository, or at:
- -
- - http://www.apache.org/licenses/LICENSE-2.0
- -
- - Unless required by applicable law or agreed to in writing, software
- - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- - License for the specific language governing permissions and limitations
- - under the License.
- -}
-
-{-# LANGUAGE OverloadedStrings #-}
-
--- | The main deployment script.
-module Main where
-
-import Control.Monad ((>=>), filterM, join, unless, when)
-import Control.Monad.Extra (ifM, whenM)
-import Control.Monad.Trans.State.Lazy
-import qualified Data.Aeson as AE
-import qualified Data.Aeson.Key as K
-import qualified Data.Aeson.KeyMap as KM
-import qualified Data.ByteString.Lazy as B
-import Data.Digest.Pure.SHA (sha256, showDigest)
-import Data.Foldable (traverse_)
-import Data.List (inits, nub, sort)
-import Data.List.Extra (groupSort)
-import qualified Data.Map as M
-import qualified Data.Scientific
-import qualified Data.Set as S
-import Data.String (fromString)
-import qualified Data.Text as T
-import qualified Data.Text.Encoding
-import qualified Data.Text.IO as TIO
-import qualified Data.Yaml as Y
-import Hakyll.Core.Util.File (getRecursiveContents, makeDirectories)
-import Lens.Micro
-import Lens.Micro.Aeson
-import Lens.Micro.Mtl
-import qualified Options.Applicative
-import System.Directory (doesFileExist)
-import System.FilePath
- ( (</>)
- , isAbsolute
- , joinPath
- , splitDirectories
- , splitFileName
- , splitPath
- , takeDirectory
- , takeFileName
- )
-import qualified Text.Mustache as Mu
-import Text.Pandoc.Class (runIOorExplode)
-import qualified Text.Pandoc.Definition
-import Text.Pandoc.Readers.Markdown (readMarkdown)
-import Text.Pandoc.Writers (writePlain)
-import Text.Pandoc.Writers.HTML (writeHtml5String)
-import Text.Pandoc.Writers.Shared (toTableOfContents)
-import qualified Text.Parsec.Error
-
-import FormatOpts
-import Types
-import Utils
-
--- | Check if a given path should be sourced or not
-isSourceablePath :: FilePath -> Site Bool
-isSourceablePath fp = do
- notSource <- use notSourceDirs
- pure $ (&&) <$> hasSuffix ".md" . last <*> not . any (`elem` notSource) . init $
- splitDirectories fp
-
--- | Load the pages from a directory and add them to `pages`.
-sourcePages :: FilePath -> Site ()
-sourcePages fp =
- (io $ getRecursiveContents (pure . const False) fp) >>=
- filterM isSourceablePath >>=
- traverse_ (loadPage . (fp </>))
-
-{- | Extract `PageInfo` about a single page and save it into `pages` in
- - `SiteState`. -}
-loadPage :: FilePath -> Site ()
-loadPage fp = do
- io $ putStrLn $ "P <- " ++ fp
- txt <- io $ TIO.readFile fp
- {- tear out the metadata manually -}
- (T.take 4 txt == "---\n") `unless`
- error ("metadata block start missing in " ++ fp)
- let (meta, markdown) = T.breakOn "\n---\n" (T.drop 4 txt)
- T.null meta `when` error ("metadata block bad in " ++ fp)
- {- parse everything -}
- yml <- Y.decodeThrow $ Data.Text.Encoding.encodeUtf8 meta
- md <- io $ runIOorExplode $ readMarkdown markdownReadOpts (T.drop 5 markdown)
- {- find the main mount point for the page -}
- let mount =
- unAbsolute . T.unpack . just ("mount point of " ++ fp) $ yml ^?
- key "mount" .
- _String
- existing <- use $ pages . to (M.!? mount)
- case existing of
- Just pi ->
- error $ "mount for " ++ fp ++ " already exists from " ++ _pagePath pi
- _ -> pure ()
- {- save to the state -}
- pages %=
- M.insert mount PageInfo {_pagePath = fp, _pageMeta = yml, _pageDoc = md}
-
--- | Find which template to use for rendering a page.
-pageTemplate :: PageInfo -> Site FilePath
-pageTemplate pi = do
- dt <- use defaultTemplate
- pure . maybe dt T.unpack $ pi ^? pageMeta . key "template" . _String
-
--- | Collect all templates required for rendering the currently loaded pages.
-pageTemplates :: Site [FilePath]
-pageTemplates = do
- rt <- use redirectTemplate
- tt <- use tagTemplate
- lt <- use listTemplate
- nub . ([rt, tt, lt] ++) <$>
- (gets (^.. pages . traverse) >>= traverse pageTemplate)
-
--- | Compile a single template in a directory
-compileTemplate ::
- FilePath
- -> FilePath
- -> Site (Either Text.Parsec.Error.ParseError Mu.Template)
-compileTemplate templdir templ =
- io $ do
- putStrLn $ "T <- " ++ (templdir </> templ)
- Mu.automaticCompile [templdir] templ
-
--- | Use a template set from a given directory.
-sourceTemplates :: FilePath -> Site ()
-sourceTemplates templdir = do
- ts <- pageTemplates
- templs' <- fmap sequence . traverse (compileTemplate templdir) $ ts
- case templs' of
- Left err -> error $ "template compilation: " ++ show err
- Right templs -> templates .= M.fromList (zip ts templs)
-
--- | Find the path to the "index.html" of a given mount.
-indexFilename :: FilePath -> Site FilePath
-indexFilename mount = do
- od <- use outputDir
- pure (od </> mount </> "index.html")
-
--- | Check that the page was not rendered before, and add it to the rendered set
-checkTarget :: FilePath -> Site ()
-checkTarget fp = do
- found <- S.member fp <$> use targets
- if found
- then error $ "colliding renders for page: " ++ fp
- else targets %= S.insert fp
-
--- | Prepend the root path to the given link
-rootUrl :: FilePath -> Site FilePath
-rootUrl fp = (</> unAbsolute fp) <$> use urlBase
-
--- | Process a single link pointing out from a page.
-processLink :: FilePath -> FilePath -> Site String
-processLink base l =
- if any (l `hasUriScheme`) ["http", "https", "ftp", "mailto"] || take 1 l ==
- "#"
- then pure l
- else if isAbsolute l
- then rootUrl l
- else installFile (base </> l) >>= rootUrl
-
--- | Get a mount point of the page into the correct location.
--- (Pages are currently mounted just to the root.)
-pageFilename :: FilePath -> Site FilePath
-pageFilename = indexFilename
-
--- | Like `Mu.substitute` but writes (and eventually should throw) stuff on errors
-checkedSubstitute :: Mu.Template -> Y.Value -> IO T.Text
-checkedSubstitute t v = do
- let (es, txt) = Mu.checkedSubstitute t v
- traverse_ (putStrLn . ("Error: " ++) . show) es
- --null es `unless` error "template substitution problems"
- pure txt
-
--- | Add global information to page metadata for rendering (at this point just the url base)
-addGlobalMeta :: Y.Value -> Site Y.Value
-addGlobalMeta (Y.Object m) = do
- r <- fromString <$> use urlBase
- pure . Y.Object $ KM.insert "root" r m
-
--- | Get the expected timestamp file for a given filepath
-timestampFile :: FilePath -> Site FilePath
-timestampFile fp = do
- sfx <- use timestampSuffix
- pure . uncurry (</>) . fmap (++ sfx) . splitFileName $ fp
-
--- | If a timestamp file for the page exists, add the timestamp metadata.
-addTimeMeta :: PageInfo -> Y.Value -> Site Y.Value
-addTimeMeta pi m'@(Y.Object m)
- | "timestamp" `KM.member` m = pure m' -- do not overwrite the timestamp if present
- | otherwise = do
- tspath <- timestampFile $ pi ^. pagePath
- io $
- ifM
- (doesFileExist tspath)
- (do putStrLn $ "timestamp <- " ++ tspath
- ts <- Y.String <$> TIO.readFile tspath
- pure . Y.Object $ KM.insert "timestamp" ts m)
- (pure m')
-
--- | Add page-specific information to the metadata. In this instance, this just
--- expands the tags for rendering. Eventually would be nice to have the timestamps
--- and possibly other info sourced right here.
-addPageMeta :: PageInfo -> Y.Value -> Site Y.Value
-addPageMeta pi (Y.Object m) = do
- htagMeta <-
- traverse (makeHTagMeta tagLink) . sort $ pi ^.. pageMeta . key "tags" .
- values .
- _String .
- to T.unpack .
- to splitDirectories
- addTimeMeta pi . Y.Object $ KM.insert "htags" (Y.array htagMeta) m
-
--- | If viable for a page (by config), add the TOC field
-addTOC :: PageInfo -> Text.Pandoc.Definition.Pandoc -> Y.Value -> Site Y.Value
-addTOC pi doc meta@(Y.Object meta') =
- let go n = do
- toc <-
- io . runIOorExplode $ writeHtml5String htmlWriteOpts $
- withPandocBlocks (pure . toTableOfContents (tocWriteOpts n)) doc
- pure . Y.Object $ KM.insert "toc" (Y.String toc) meta'
- in case ( pi ^? pageMeta . key "toc" . _Bool
- , join $ pi ^? pageMeta . key "toc" . _Number .
- to Data.Scientific.toBoundedInteger) of
- (Just False, _) -> pure meta
- (_, Nothing) -> go (3 :: Int)
- (_, Just n) -> go n
-
--- | Render a page using the current template.
-installPage :: FilePath -> PageInfo -> Site ()
-installPage mount pi = do
- tname <- pageTemplate pi
- templ <- use $ templates . to (M.! fromString tname)
- file <- pageFilename mount
- fixedUrlDoc <-
- walkURLs (processLink $ pi ^. pagePath . to takeDirectory) $ pi ^. pageDoc
- checkTarget file
- body <-
- io . runIOorExplode $ writeHtml5String htmlWriteOpts $
- addHeadingLinks "header-local-anchor" fixedUrlDoc
- let Y.Object meta' = pi ^. pageMeta
- meta = Y.Object $ KM.insert "body" (Y.String body) meta'
- meta <- addGlobalMeta meta >>= addTOC pi fixedUrlDoc >>= addPageMeta pi
- io $ do
- putStrLn $ "P -> " ++ file
- makeDirectories file
- checkedSubstitute templ meta >>= TIO.writeFile file
- installPageRedirects mount pi
-
-{- | Install a simple redirect handler page. -}
-installRedirect :: FilePath -> FilePath -> Site ()
-installRedirect target from = do
- tname <- use redirectTemplate
- templ <- use $ templates . to (M.! fromString tname)
- file <- indexFilename from
- checkTarget file
- io $ do
- putStrLn $ "@ -> " ++ file ++ " -> " ++ target
- makeDirectories file
- txt <-
- checkedSubstitute templ $ Y.object [("target", Y.String $ T.pack target)]
- TIO.writeFile file txt
-
--- | Install all redirects required by one page.
-installPageRedirects :: FilePath -> PageInfo -> Site ()
-installPageRedirects target pi = do
- traverse_
- (installRedirect target)
- (pi ^.. pageMeta . key "redirects" . values . _String . to T.unpack .
- to unAbsolute)
-
--- | Find the path to the file of a given hash
-dataFilename :: String -> FilePath -> Site (FilePath, FilePath)
-dataFilename hash basename = do
- od <- use outputDir
- let (h1, h2) = splitAt 3 hash
- loc = "files" </> h1 </> h2 </> basename
- pure (od </> loc, loc)
-
--- | Copy a source file to the destination, making the necessary directories in the process.
-copy :: FilePath -> FilePath -> IO ()
-copy src dst = do
- makeDirectories dst
- B.readFile src >>= B.writeFile dst
-
-{- | Install a file. Files are installed into a single shared location. That
- - prevents file duplication and also gives a bit of control for where the
- - files reside and what are their names. -}
-installFile :: FilePath -> Site FilePath
-installFile fp = do
- let basename = takeFileName fp
- hash <- showDigest . sha256 <$> io (B.readFile fp)
- (file, loc) <- dataFilename hash basename
- alreadyExists <- S.member (hash, basename) <$> use installs
- unless alreadyExists $ do
- installs %= S.insert (hash, basename)
- checkTarget file
- io $ do
- putStrLn $ "F -> " ++ fp ++ " -> " ++ file
- copy fp file
- pure loc
-
--- | Simply copy a strictly named asset.
-installAsset :: FilePath -> Site ()
-installAsset fp = do
- od <- use outputDir
- ad <- use assetDir
- let [src, dst] = map (</> fp) [ad, od]
- checkTarget dst
- io $ do
- putStrLn $ "A -> " ++ src ++ " -> " ++ dst
- copy src dst
-
--- | Copy all files from asset directory.
-installAssets :: Site ()
-installAssets =
- use assetDir >>=
- (io . getRecursiveContents (pure . const False) >=> traverse_ installAsset)
-
--- | Load tag names from a directory and add them to `tagNames`.
-sourceTagnames :: FilePath -> Site ()
-sourceTagnames fp =
- io
- (map (fp </>) . filter ((== "tagnames.yml") . last . splitPath) <$>
- getRecursiveContents (pure . const False) fp) >>=
- traverse_ sourceTagnameFile
-
--- | Single item for `sourceTagnames`
-sourceTagnameFile :: FilePath -> Site ()
-sourceTagnameFile fp = do
- yml' <-
- io $ do
- putStrLn $ "# <- " ++ fp
- Y.decodeFileEither fp
- case yml' of
- Left err ->
- error $ "Failed to load tagnames from " ++ fp ++ ": " ++ show err
- Right yml -> traverse_ add $ KM.toList (yml :: KM.KeyMap String)
- where add :: (KM.Key, String) -> Site ()
- add (k, v) =
- let go (Just ov) =
- if v == ov
- then Just ov
- else error
- ("conflicting tag names for tag " ++ K.toString k)
- go Nothing = Just v
- in tagNames %= M.alter go (K.toString k)
-
--- | Find the humanized name for a tag piece
-getTagName :: String -> Site String
-getTagName t = handleEmpty . maybe t id <$> use (tagNames . to (M.!? t))
- where
- handleEmpty "" = "all"
- handleEmpty x = x
-
--- | Get all tags from the pages of the site.
-sourceTags :: Site ()
-sourceTags = do
- sgat <-
- map
- (second $ map splitDirectories .
- (^.. pageMeta . key "tags" . values . _String . to T.unpack)) .
- M.assocs <$>
- use pages
- ehtags .= M.fromList (invExpandTags sgat)
- htags .= M.fromList (invTags sgat)
-
--- | Organize a list of pages with hierarchical tags to a list with
--- hierarchical tags with pages attached; with tags implying parents.
-invExpandTags :: [(FilePath, [[String]])] -> [([String], [FilePath])]
-invExpandTags x =
- map (fmap nub) $ groupSort [(t, p) | (p, htl) <- x, ht <- htl, t <- inits ht]
-
--- | Like `invExpandTags` but without the expansion.
-invTags :: [(FilePath, [[String]])] -> [([String], [FilePath])]
-invTags x = map (fmap nub) $ groupSort [(t, p) | (p, htl) <- x, t <- htl]
-
--- | Get the destination for the tag page.
-tagFilename :: FilePath -> Site FilePath
-tagFilename tag = indexFilename $ "tag" </> tag
-
--- | Get the destination for the tag page.
-listFilename :: FilePath -> Site FilePath
-listFilename tag = indexFilename $ "list" </> tag
-
--- | Fold the hierarchical tag bits to a slashed path.
-tagPath :: [String] -> FilePath
-tagPath = joinPath
-
--- | Make a link to the tag page
-tagLink :: [String] -> Site FilePath
-tagLink = rootUrl . ("tag" </>) . tagPath
-
--- | Fold the hierarchical tag bits to a slashed path.
-listPath :: [String] -> FilePath
-listPath = joinPath
-
--- | Make a link to the tag page
-listLink :: [String] -> Site FilePath
-listLink = rootUrl . ("list" </>) . listPath
-
--- | Make metadata for printing out a single hierarchical tag (all levels clickable)
-makeHTagMeta :: ([String] -> Site FilePath) -> [String] -> Site Y.Value
-makeHTagMeta lf tag = do
- links <- traverse lf (inits tag)
- tags <- traverse getTagName ("" : tag)
- pure . Y.array $
- zipWith
- (\t l -> Y.object [("tag", fromString t), ("href", fromString l)])
- tags
- links
-
--- | Make metadata for printing out a link to a page
-makePageLinkMeta :: FilePath -> Site Y.Value
-makePageLinkMeta mount = do
- link <- rootUrl mount
- meta <- use $ pages . to (M.! mount) . pageMeta
- pure $ Y.object [("href", fromString link), ("meta", meta)]
-
--- | Create the complete metadata structure for the template that renders a given categorical tag pages
-makeTagMeta :: [String] -> Site Y.Value
-makeTagMeta tag = do
- taggedPages <- use $ htags . to (M.!? tag) . to (maybe [] id)
- subtags <-
- gets
- (^.. ehtags . to M.keys . each . filtered (not . null) .
- filtered ((== tag) . init))
- htagMeta <- makeHTagMeta tagLink tag
- subtagsMeta <- Y.array <$> traverse makeTagMeta subtags
- pagesMeta <- Y.array <$> traverse makePageLinkMeta taggedPages
- link <- tagLink tag
- listlink <- listLink tag
- tags <- Y.array . map fromString <$> traverse getTagName tag
- addGlobalMeta $
- Y.object
- [ ("href", fromString link)
- , ("tags", tags)
- , ("htag", htagMeta)
- , ("subtags", subtagsMeta)
- , ("pages", pagesMeta)
- , ("listhref", fromString listlink)
- ]
-
--- | Make metadata for printing out a single tag as-is, without levels
-makeHTagLinkMeta :: [String] -> Site Y.Value
-makeHTagLinkMeta tag = do
- link <- listLink tag
- tags <- Y.array . map fromString <$> traverse getTagName tag
- pure $ Y.object [("href", fromString link), ("tags", tags)]
-
--- | Create the structure for rendering a complete listing of one hierarchical tag.
-makeListMeta :: [String] -> Site Y.Value
-makeListMeta tag = do
- taggedPages <- use $ ehtags . to (M.! tag)
- subtags <-
- gets
- (^.. ehtags . to M.keys . each . filtered (not . null) .
- filtered ((== tag) . init))
- htagMeta <- makeHTagMeta listLink tag
- subtagsMeta <- Y.array <$> traverse makeHTagLinkMeta subtags
- pagesMeta <- Y.array <$> traverse makePageLinkMeta taggedPages
- tl <- tagLink tag
- addGlobalMeta $
- Y.object
- [ ("htag", htagMeta)
- , ("subtags", subtagsMeta)
- , ("pages", pagesMeta)
- , ("taghref", fromString tl)
- ]
-
--- | Render a site for a given tag string.
-renderTag :: [String] -> Site ()
-renderTag tag = do
- tname <- use tagTemplate
- templ <- use $ templates . to (M.! fromString tname)
- file <- tagFilename (tagPath tag)
- checkTarget file
- meta <- makeTagMeta tag
- io $ do
- putStrLn $ "# -> " ++ file
- makeDirectories file
- checkedSubstitute templ meta >>= TIO.writeFile file
-
--- | Render all tag sites.
-renderTags :: Site ()
-renderTags = use (ehtags . to M.keys) >>= traverse_ renderTag
-
--- | Render a site for a given tag string.
-renderList :: [String] -> Site ()
-renderList tag = do
- tname <- use listTemplate
- templ <- use $ templates . to (M.! fromString tname)
- file <- listFilename (listPath tag)
- checkTarget file
- meta <- makeListMeta tag
- io $ do
- putStrLn $ "* -> " ++ file
- makeDirectories file
- checkedSubstitute templ meta >>= TIO.writeFile file
-
--- | Render all tag sites.
-renderLists :: Site ()
-renderLists = use (ehtags . to M.keys) >>= traverse_ renderList
-
--- | Transform one mounted PageInfo to the base search data
-mkSearchData :: FilePath -> PageInfo -> Site [Y.Value]
-mkSearchData mount pi = do
- link <- rootUrl mount
- text <- io . runIOorExplode $ writePlain plainWriteOpts (pi ^. pageDoc)
- let title = pi ^? pageMeta . key "title" . _String
- -- TODO: unify retrieval of tags
- let tags =
- sort $ pi ^.. pageMeta . key "tags" . values . _String . to T.unpack
- tagnames <- traverse (traverse getTagName . splitDirectories) tags
- let tagarray = Y.array . map (Y.array . map fromString) $ tagnames
- if pi ^? pageMeta . key "search" . _Bool == Just False -- `off` gets parsed as bool
- then pure []
- else pure $
- [ Y.object
- [ ("link", fromString link)
- , ("title", maybe (fromString mount) Y.String title)
- , ("tags", tagarray)
- , ("text", Y.String text)
- ]
- ]
-
--- | Collect all pages' search data to the file
-renderSearchData :: Site ()
-renderSearchData = use searchDataOut >>= traverse_ go
- where
- go out = do
- ps <- use (pages . to M.assocs) >>= traverse (uncurry mkSearchData)
- io $ do
- putStrLn $ "S -> " ++ out
- AE.encodeFile out $ Y.array (concat ps)
-
--- | Build the whole site.
-main = do
- init <- Options.Applicative.execParser siteOptions
- flip runStateT init $ do
- installAssets
- use sourceDirs >>= traverse sourcePages
- use sourceDirs >>= traverse sourceTagnames
- sourceTags
- use templateDir >>= sourceTemplates
- use pages >>= traverse (uncurry installPage) . M.assocs
- renderTags
- renderLists
- renderSearchData
- io $ putStrLn "OK"
- whenM (use dumpFinalState) $ get >>= io . print