code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE CPP #-}
module Network.Wai.Handler.Warp.Response (
sendResponse
, fileRange -- for testing
, warpVersion
, defaultServerValue
) where
#ifndef MIN_VERSION_base
#define MIN_VERSION_base(x,y,z) 1
#endif
import Blaze.ByteString.Builder (fromByteString, Builder, flush)
import Blaze.ByteString.Builder.HTTP (chunkedTransferEncoding, chunkedTransferTerminator)
import Control.Applicative
import Control.Exception
import Data.Array ((!))
import Data.ByteString (ByteString)
import Data.Streaming.Blaze (newBlazeRecv, reuseBufferStrategy)
import qualified Data.ByteString as S
import Control.Monad (unless, when)
import qualified Data.ByteString.Char8 as B (pack)
import qualified Data.CaseInsensitive as CI
import Data.Function (on)
import Data.List (deleteBy)
import Data.Maybe (isJust, listToMaybe)
#if MIN_VERSION_base(4,5,0)
import Data.Monoid ((<>), mempty)
#else
import Data.Monoid (mappend, mempty)
#endif
import Data.Version (showVersion)
import qualified Network.HTTP.Types as H
import Network.Wai
import qualified Network.Wai.Handler.Warp.Date as D
import Network.Wai.Handler.Warp.Buffer (toBlazeBuffer)
import Network.Wai.Handler.Warp.Header
import Network.Wai.Handler.Warp.IO (toBufIOWith)
import Network.Wai.Handler.Warp.ResponseHeader
import Network.Wai.Handler.Warp.RequestHeader (parseByteRanges)
import qualified Network.Wai.Handler.Warp.Timeout as T
import Network.Wai.Handler.Warp.Types
import Network.Wai.Internal
import Numeric (showInt)
import qualified Paths_warp
import qualified System.PosixCompat.Files as P
#if !MIN_VERSION_base(4,5,0)
(<>) :: Monoid m => m -> m -> m
(<>) = mappend
#endif
-- $setup
-- >>> :set -XOverloadedStrings
mapRight :: (b -> c) -> Either a b -> Either a c
mapRight f eith = case eith of
Right x -> Right (f x)
Left l -> Left l
----------------------------------------------------------------
fileRange :: H.Status -> H.ResponseHeaders -> FilePath
-> Maybe FilePart -> Maybe HeaderValue
-> IO (Either IOException
(H.Status, H.ResponseHeaders, Integer, Integer))
fileRange s0 hs0 path Nothing mRange =
mapRight (fileRangeSized s0 hs0 Nothing mRange . fromIntegral . P.fileSize) <$>
try (P.getFileStatus path)
fileRange s0 hs0 _ mPart@(Just part) mRange =
return . Right $ fileRangeSized s0 hs0 mPart mRange size
where
size = filePartFileSize part
fileRangeSized :: H.Status -> H.ResponseHeaders
-> Maybe FilePart -> Maybe HeaderValue -> Integer
-> (H.Status, H.ResponseHeaders, Integer, Integer)
fileRangeSized s0 hs0 mPart mRange fileSize = (s, hs, beg, len)
where
(beg, end, len, isEntire) = checkPartRange fileSize mPart mRange
hs1 = addContentLength len hs0
hs | isEntire = hs1
| otherwise = addContentRange beg end fileSize hs1
s | isEntire = s0
| otherwise = H.status206
checkPartRange :: Integer -> Maybe FilePart -> Maybe HeaderValue
-> (Integer, Integer, Integer, Bool)
checkPartRange fileSize = checkPart
where
checkPart Nothing Nothing = (0, fileSize - 1, fileSize, True)
checkPart Nothing (Just range) = case parseByteRanges range >>= listToMaybe of
-- Range is broken
Nothing -> (0, fileSize - 1, fileSize, True)
Just hrange -> checkRange hrange
-- Ignore Range if FilePart is specified.
-- We assume that an application handled Range and specified
-- FilePart.
checkPart (Just part) _ = (beg, end, len, isEntire)
where
beg = filePartOffset part
len = filePartByteCount part
end = beg + len - 1
isEntire = beg == 0 && len == fileSize
checkRange (H.ByteRangeFrom beg) = fromRange beg (fileSize - 1)
checkRange (H.ByteRangeFromTo beg end) = fromRange beg (min (fileSize - 1) end)
checkRange (H.ByteRangeSuffix count) = fromRange (max 0 (fileSize - count)) (fileSize - 1)
fromRange beg end = (beg, end, len, isEntire)
where
len = end - beg + 1
isEntire = beg == 0 && len == fileSize
----------------------------------------------------------------
-- | Sending a HTTP response to 'Connection' according to 'Response'.
--
-- Applications/middlewares MUST specify a proper 'H.ResponseHeaders'.
-- so that inconsistency does not happen.
-- No header is deleted by this function.
--
-- Especially, Applications/middlewares MUST take care of
-- Content-Length, Content-Range, and Transfer-Encoding
-- because they are inserted, when necessary,
-- regardless they already exist.
-- This function does not insert Content-Encoding. It's middleware's
-- responsibility.
--
-- The Date and Server header is added if not exist
-- in HTTP response header.
--
-- There are three basic APIs to create 'Response':
--
-- ['responseFile' :: 'H.Status' -> 'H.ResponseHeaders' -> 'FilePath' -> 'Maybe' 'FilePart' -> 'Response']
-- HTTP response body is sent by sendfile() for GET method.
-- HTTP response body is not sent by HEAD method.
-- Applications are categorized into simple and sophisticated.
-- Simple applications should specify 'Nothing' to
-- 'Maybe' 'FilePart'. The size of the specified file is obtained
-- by disk access. Then Range is handled.
-- Sophisticated applications should specify 'Just' to
-- 'Maybe' 'FilePart'. They should treat Range (and If-Range) by
-- themselves. In both cases,
-- Content-Length and Content-Range (if necessary) are automatically
-- added into the HTTP response header.
-- If Content-Length and Content-Range exist in the HTTP response header,
-- they would cause inconsistency.
-- Status is also changed to 206 (Partial Content) if necessary.
--
-- ['responseBuilder' :: 'H.Status' -> 'H.ResponseHeaders' -> 'Builder' -> 'Response']
-- HTTP response body is created from 'Builder'.
-- Transfer-Encoding: chunked is used in HTTP/1.1.
--
-- ['responseStream' :: 'H.Status' -> 'H.ResponseHeaders' -> 'StreamingBody' -> 'Response']
-- HTTP response body is created from 'Builder'.
-- Transfer-Encoding: chunked is used in HTTP/1.1.
--
-- ['responseRaw' :: ('IO' 'ByteString' -> ('ByteString' -> 'IO' ()) -> 'IO' ()) -> 'Response' -> 'Response']
-- No header is added and no Transfer-Encoding: is applied.
sendResponse :: ByteString -- ^ default server value
-> Connection
-> InternalInfo
-> Request -- ^ HTTP request.
-> IndexedHeader -- ^ Indexed header of HTTP request.
-> IO ByteString -- ^ source from client, for raw response
-> Response -- ^ HTTP response including status code and response header.
-> IO Bool -- ^ Returing True if the connection is persistent.
sendResponse defServer conn ii req reqidxhdr src response = do
hs <- addServerAndDate hs0
if hasBody s req then do
-- HEAD comes here even if it does not have body.
sendRsp conn ver s hs rsp
T.tickle th
return ret
else do
sendResponseNoBody conn ver s hs
T.tickle th
return isPersist
where
ver = httpVersion req
s = responseStatus response
hs0 = responseHeaders response
rspidxhdr = indexResponseHeader hs0
th = threadHandle ii
dc = dateCacher ii
addServerAndDate = addDate dc rspidxhdr . addServer defServer rspidxhdr
mRange = reqidxhdr ! idxRange
(isPersist,isChunked0) = infoFromRequest req reqidxhdr
isChunked = if isHead then False else isChunked0
(isKeepAlive, needsChunked) = infoFromResponse rspidxhdr (isPersist,isChunked)
isHead = requestMethod req == H.methodHead
rsp = case response of
ResponseFile _ _ path mPart -> RspFile path mPart mRange isHead (T.tickle th)
ResponseBuilder _ _ b -> RspBuilder b needsChunked
ResponseStream _ _ fb -> RspStream fb needsChunked th
ResponseRaw raw _ -> RspRaw raw src (T.tickle th)
ret = case response of
ResponseFile {} -> isPersist
ResponseBuilder {} -> isKeepAlive
ResponseStream {} -> isKeepAlive
ResponseRaw {} -> False
----------------------------------------------------------------
data Rsp = RspFile FilePath (Maybe FilePart) (Maybe HeaderValue) Bool (IO ())
| RspBuilder Builder Bool
| RspStream StreamingBody Bool T.Handle
| RspRaw (IO ByteString -> (ByteString -> IO ()) -> IO ()) (IO ByteString) (IO ())
----------------------------------------------------------------
sendRsp :: Connection
-> H.HttpVersion
-> H.Status
-> H.ResponseHeaders
-> Rsp
-> IO ()
sendRsp conn ver s0 hs0 (RspFile path mPart mRange isHead hook) = do
ex <- fileRange s0 hs path mPart mRange
case ex of
Left _ex ->
#ifdef WARP_DEBUG
print _ex >>
#endif
sendRsp conn ver s2 hs2 (RspBuilder body True)
Right (s, hs1, beg, len)
| len >= 0 ->
if isHead then
sendRsp conn ver s hs1 (RspBuilder mempty False)
else do
lheader <- composeHeader ver s hs1
connSendFile conn path beg len hook [lheader]
| otherwise -> do
sendRsp conn ver H.status416
(filter (\(k, _) -> k /= "content-length") hs1)
(RspBuilder mempty True)
where
hs = addAcceptRanges hs0
s2 = H.status404
hs2 = replaceHeader H.hContentType "text/plain; charset=utf-8" hs0
body = fromByteString "File not found"
----------------------------------------------------------------
sendRsp conn ver s hs (RspBuilder body needsChunked) = do
header <- composeHeaderBuilder ver s hs needsChunked
let hdrBdy
| needsChunked = header <> chunkedTransferEncoding body
<> chunkedTransferTerminator
| otherwise = header <> body
buffer = connWriteBuffer conn
size = connBufferSize conn
toBufIOWith buffer size (connSendAll conn) hdrBdy
----------------------------------------------------------------
sendRsp conn ver s hs (RspStream streamingBody needsChunked th) = do
header <- composeHeaderBuilder ver s hs needsChunked
(recv, finish) <- newBlazeRecv $ reuseBufferStrategy
$ toBlazeBuffer (connWriteBuffer conn) (connBufferSize conn)
let send builder = do
popper <- recv builder
let loop = do
bs <- popper
unless (S.null bs) $ do
sendFragment conn th bs
loop
loop
sendChunk
| needsChunked = send . chunkedTransferEncoding
| otherwise = send
send header
streamingBody sendChunk (sendChunk flush)
when needsChunked $ send chunkedTransferTerminator
mbs <- finish
maybe (return ()) (sendFragment conn th) mbs
----------------------------------------------------------------
sendRsp conn _ _ _ (RspRaw withApp src tickle) =
withApp recv send
where
recv = do
bs <- src
unless (S.null bs) tickle
return bs
send bs = connSendAll conn bs >> tickle
----------------------------------------------------------------
sendResponseNoBody :: Connection
-> H.HttpVersion
-> H.Status
-> H.ResponseHeaders
-> IO ()
sendResponseNoBody conn ver s hs = composeHeader ver s hs >>= connSendAll conn
----------------------------------------------------------------
----------------------------------------------------------------
-- | Use 'connSendAll' to send this data while respecting timeout rules.
sendFragment :: Connection -> T.Handle -> ByteString -> IO ()
sendFragment Connection { connSendAll = send } th bs = do
T.resume th
send bs
T.pause th
-- We pause timeouts before passing control back to user code. This ensures
-- that a timeout will only ever be executed when Warp is in control. We
-- also make sure to resume the timeout after the completion of user code
-- so that we can kill idle connections.
----------------------------------------------------------------
infoFromRequest :: Request -> IndexedHeader -> (Bool -- isPersist
,Bool) -- isChunked
infoFromRequest req reqidxhdr = (checkPersist req reqidxhdr, checkChunk req)
checkPersist :: Request -> IndexedHeader -> Bool
checkPersist req reqidxhdr
| ver == H.http11 = checkPersist11 conn
| otherwise = checkPersist10 conn
where
ver = httpVersion req
conn = reqidxhdr ! idxConnection
checkPersist11 (Just x)
| CI.foldCase x == "close" = False
checkPersist11 _ = True
checkPersist10 (Just x)
| CI.foldCase x == "keep-alive" = True
checkPersist10 _ = False
checkChunk :: Request -> Bool
checkChunk req = httpVersion req == H.http11
----------------------------------------------------------------
-- Used for ResponseBuilder and ResponseSource.
-- Don't use this for ResponseFile since this logic does not fit
-- for ResponseFile. For instance, isKeepAlive should be True in some cases
-- even if the response header does not have Content-Length.
--
-- Content-Length is specified by a reverse proxy.
-- Note that CGI does not specify Content-Length.
infoFromResponse :: IndexedHeader -> (Bool,Bool) -> (Bool,Bool)
infoFromResponse rspidxhdr (isPersist,isChunked) = (isKeepAlive, needsChunked)
where
needsChunked = isChunked && not hasLength
isKeepAlive = isPersist && (isChunked || hasLength)
hasLength = isJust $ rspidxhdr ! idxContentLength
----------------------------------------------------------------
hasBody :: H.Status -> Request -> Bool
hasBody s req = sc /= 204
&& sc /= 304
&& sc >= 200
where
sc = H.statusCode s
method = requestMethod req
----------------------------------------------------------------
addAcceptRanges :: H.ResponseHeaders -> H.ResponseHeaders
addAcceptRanges hdrs = (hAcceptRanges, "bytes") : hdrs
addTransferEncoding :: H.ResponseHeaders -> H.ResponseHeaders
addTransferEncoding hdrs = (hTransferEncoding, "chunked") : hdrs
addContentLength :: Integer -> H.ResponseHeaders -> H.ResponseHeaders
addContentLength cl hdrs = (H.hContentLength, len) : hdrs
where
len = B.pack $ show cl
addContentRange :: Integer -> Integer -> Integer
-> H.ResponseHeaders -> H.ResponseHeaders
addContentRange beg end total hdrs = (hContentRange, range) : hdrs
where
range = B.pack
-- building with ShowS
$ 'b' : 'y': 't' : 'e' : 's' : ' '
: (if beg > end then ('*':) else
(showInt beg)
. ('-' :)
. (showInt end))
( '/'
: showInt total "")
addDate :: D.DateCache -> IndexedHeader -> H.ResponseHeaders -> IO H.ResponseHeaders
addDate dc rspidxhdr hdrs = case rspidxhdr ! idxDate of
Nothing -> do
gmtdate <- D.getDate dc
return $ (H.hDate, gmtdate) : hdrs
Just _ -> return hdrs
----------------------------------------------------------------
-- | The version of Warp.
warpVersion :: String
warpVersion = showVersion Paths_warp.version
defaultServerValue :: HeaderValue
defaultServerValue = B.pack $ "Warp/" ++ warpVersion
addServer :: HeaderValue -> IndexedHeader -> H.ResponseHeaders -> H.ResponseHeaders
addServer defaultServerValue' rspidxhdr hdrs = case rspidxhdr ! idxServer of
Nothing -> (hServer, defaultServerValue') : hdrs
_ -> hdrs
----------------------------------------------------------------
-- |
--
-- >>> replaceHeader "Content-Type" "new" [("content-type","old")]
-- [("Content-Type","new")]
replaceHeader :: H.HeaderName -> HeaderValue -> H.ResponseHeaders -> H.ResponseHeaders
replaceHeader k v hdrs = (k,v) : deleteBy ((==) `on` fst) (k,v) hdrs
----------------------------------------------------------------
composeHeaderBuilder :: H.HttpVersion -> H.Status -> H.ResponseHeaders -> Bool -> IO Builder
composeHeaderBuilder ver s hs True =
fromByteString <$> composeHeader ver s (addTransferEncoding hs)
composeHeaderBuilder ver s hs False =
fromByteString <$> composeHeader ver s hs
| jberryman/wai | warp/Network/Wai/Handler/Warp/Response.hs | mit | 16,454 | 1 | 24 | 3,836 | 3,650 | 1,943 | 1,707 | 266 | 9 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE QuasiQuotes #-}
module Main
( main
) where
#ifdef USE_MICROLENS
import Lens.Micro
import Lens.Micro.Mtl
#else
import Control.Lens
#endif
import Control.Monad.State.Lazy
import Data.ByteString.Lazy (ByteString)
import qualified Data.ByteString.Lazy as LB
import Data.Map (Map)
import qualified Data.Map as M
import Data.Time.Clock.POSIX (POSIXTime)
import qualified Data.Vector as V
import Text.RawString.QQ
import Text.XML
import Test.Tasty (defaultMain, testGroup)
import Test.Tasty.HUnit (testCase)
import Test.Tasty.HUnit ((@=?))
import Codec.Xlsx
import Codec.Xlsx.Formatted
import Codec.Xlsx.Types.Internal
import Codec.Xlsx.Types.Internal.CommentTable
import Codec.Xlsx.Types.Internal.CustomProperties
as CustomProperties
import Codec.Xlsx.Types.Internal.SharedStringTable
import AutoFilterTests
import Common
import CommonTests
import CondFmtTests
import Diff
import PivotTableTests
import DrawingTests
main :: IO ()
main = defaultMain $
testGroup "Tests"
[ testCase "write . read == id" $ do
let bs = fromXlsx testTime testXlsx
LB.writeFile "data-test.xlsx" bs
testXlsx @==? toXlsx (fromXlsx testTime testXlsx)
, testCase "write . fast-read == id" $ do
let bs = fromXlsx testTime testXlsx
LB.writeFile "data-test.xlsx" bs
testXlsx @==? toXlsxFast (fromXlsx testTime testXlsx)
, testCase "fromRows . toRows == id" $
testCellMap1 @=? fromRows (toRows testCellMap1)
, testCase "fromRight . parseStyleSheet . renderStyleSheet == id" $
testStyleSheet @==? fromRight (parseStyleSheet (renderStyleSheet testStyleSheet))
, testCase "correct shared strings parsing" $
[testSharedStringTable] @=? parseBS testStrings
, testCase "correct shared strings parsing: single underline" $
[withSingleUnderline testSharedStringTable] @=? parseBS testStringsWithSingleUnderline
, testCase "correct shared strings parsing: double underline" $
[withDoubleUnderline testSharedStringTable] @=? parseBS testStringsWithDoubleUnderline
, testCase "correct shared strings parsing even when one of the shared strings entry is just <t/>" $
[testSharedStringTableWithEmpty] @=? parseBS testStringsWithEmpty
, testCase "correct comments parsing" $
[testCommentTable] @=? parseBS testComments
, testCase "correct custom properties parsing" $
[testCustomProperties] @==? parseBS testCustomPropertiesXml
, testCase "proper results from `formatted`" $
testFormattedResult @==? testRunFormatted
, testCase "proper results from `formatWorkbook`" $
testFormatWorkbookResult @==? testFormatWorkbook
, testCase "formatted . toFormattedCells = id" $ do
let fmtd = formatted testFormattedCells minimalStyleSheet
testFormattedCells @==? toFormattedCells (formattedCellMap fmtd) (formattedMerges fmtd)
(formattedStyleSheet fmtd)
, testCase "proper results from `conditionalltyFormatted`" $
testCondFormattedResult @==? testRunCondFormatted
, testCase "toXlsxEither: properly formatted" $
Right testXlsx @==? toXlsxEither (fromXlsx testTime testXlsx)
, testCase "toXlsxEither: invalid format" $
Left InvalidZipArchive @==? toXlsxEither "this is not a valid XLSX file"
, CommonTests.tests
, CondFmtTests.tests
, PivotTableTests.tests
, DrawingTests.tests
, AutoFilterTests.tests
]
testXlsx :: Xlsx
testXlsx = Xlsx sheets minimalStyles definedNames customProperties DateBase1904
where
sheets =
[("List1", sheet1), ("Another sheet", sheet2), ("with pivot table", pvSheet)]
sheet1 = Worksheet cols rowProps testCellMap1 drawing ranges
sheetViews pageSetup cFormatting validations [] (Just autoFilter)
tables (Just protection) sharedFormulas
sharedFormulas =
M.fromList
[ (SharedFormulaIndex 0, SharedFormulaOptions (CellRef "A5:C5") (Formula "A4"))
, (SharedFormulaIndex 1, SharedFormulaOptions (CellRef "B6:C6") (Formula "B3+12"))
]
autoFilter = def & afRef ?~ CellRef "A1:E10"
& afFilterColumns .~ fCols
fCols = M.fromList [ (1, Filters DontFilterByBlank
[FilterValue "a", FilterValue "b",FilterValue "ZZZ"])
, (2, CustomFiltersAnd (CustomFilter FltrGreaterThanOrEqual "0")
(CustomFilter FltrLessThan "42"))]
tables =
[ Table
{ tblName = Just "Table1"
, tblDisplayName = "Table1"
, tblRef = CellRef "A3"
, tblColumns = [TableColumn "another text"]
, tblAutoFilter = Just (def & afRef ?~ CellRef "A3")
}
]
protection =
fullSheetProtection
{ _sprScenarios = False
, _sprLegacyPassword = Just $ legacyPassword "hard password"
}
sheet2 = def & wsCells .~ testCellMap2
pvSheet = sheetWithPvCells & wsPivotTables .~ [testPivotTable]
sheetWithPvCells = def & wsCells .~ testPivotSrcCells
rowProps = M.fromList [(1, RowProps { rowHeight = Just (CustomHeight 50)
, rowStyle = Just 3
, rowHidden = False
})]
cols = [ColumnsProperties 1 10 (Just 15) (Just 1) False False False]
drawing = Just $ testDrawing { _xdrAnchors = map resolve $ _xdrAnchors testDrawing }
resolve :: Anchor RefId RefId -> Anchor FileInfo ChartSpace
resolve Anchor {..} =
let obj =
case _anchObject of
Picture {..} ->
let blipFill = (_picBlipFill & bfpImageInfo ?~ fileInfo)
in Picture
{ _picMacro = _picMacro
, _picPublished = _picPublished
, _picNonVisual = _picNonVisual
, _picBlipFill = blipFill
, _picShapeProperties = _picShapeProperties
}
Graphic nv _ tr ->
Graphic nv testLineChartSpace tr
in Anchor
{ _anchAnchoring = _anchAnchoring
, _anchObject = obj
, _anchClientData = _anchClientData
}
fileInfo = FileInfo "dummy.png" "image/png" "fake contents"
ranges = [mkRange (1,1) (1,2), mkRange (2,2) (10, 5)]
minimalStyles = renderStyleSheet minimalStyleSheet
definedNames = DefinedNames [("SampleName", Nothing, "A10:A20")]
sheetViews = Just [sheetView1, sheetView2]
sheetView1 = def & sheetViewRightToLeft ?~ True
& sheetViewTopLeftCell ?~ CellRef "B5"
sheetView2 = def & sheetViewType ?~ SheetViewTypePageBreakPreview
& sheetViewWorkbookViewId .~ 5
& sheetViewSelection .~ [ def & selectionActiveCell ?~ CellRef "C2"
& selectionPane ?~ PaneTypeBottomRight
, def & selectionActiveCellId ?~ 1
& selectionSqref ?~ SqRef [ CellRef "A3:A10"
, CellRef "B1:G3"]
]
pageSetup = Just $ def & pageSetupBlackAndWhite ?~ True
& pageSetupCopies ?~ 2
& pageSetupErrors ?~ PrintErrorsDash
& pageSetupPaperSize ?~ PaperA4
customProperties = M.fromList [("some_prop", VtInt 42)]
cFormatting = M.fromList [(SqRef [CellRef "A1:B3"], rules1), (SqRef [CellRef "C1:C10"], rules2)]
cfRule c d = CfRule { _cfrCondition = c
, _cfrDxfId = Just d
, _cfrPriority = topCfPriority
, _cfrStopIfTrue = Nothing
}
rules1 = [ cfRule ContainsBlanks 1
, cfRule (ContainsText "foo") 2
, cfRule (CellIs (OpBetween (Formula "A1") (Formula "B10"))) 3
]
rules2 = [ cfRule ContainsErrors 3 ]
testCellMap1 :: CellMap
testCellMap1 = M.fromList [ ((1, 2), cd1_2), ((1, 5), cd1_5), ((1, 10), cd1_10)
, ((3, 1), cd3_1), ((3, 2), cd3_2), ((3, 3), cd3_3), ((3, 7), cd3_7)
, ((4, 1), cd4_1), ((4, 2), cd4_2), ((4, 3), cd4_3)
, ((5, 1), cd5_1), ((5, 2), cd5_2), ((5, 3), cd5_3)
, ((6, 2), cd6_2), ((6, 3), cd6_3)
]
where
cd v = def {_cellValue=Just v}
cd1_2 = cd (CellText "just a text, fließen, русский <> и & \"in quotes\"")
cd1_5 = cd (CellDouble 42.4567)
cd1_10 = cd (CellText "")
cd3_1 = cd (CellText "another text")
cd3_2 = def -- shouldn't it be skipped?
cd3_3 = def & cellValue ?~ CellError ErrorDiv0
& cellFormula ?~ simpleCellFormula "1/0"
cd3_7 = cd (CellBool True)
cd4_1 = cd (CellDouble 1)
cd4_2 = cd (CellDouble 123456789012345)
cd4_3 = (cd (CellDouble (1+2))) { _cellFormula =
Just $ simpleCellFormula "A4+B4<>11"
}
cd5_1 = def & cellFormula ?~ sharedFormulaByIndex (SharedFormulaIndex 0)
cd5_2 = def & cellFormula ?~ sharedFormulaByIndex (SharedFormulaIndex 0)
cd5_3 = def & cellFormula ?~ sharedFormulaByIndex (SharedFormulaIndex 0)
cd6_2 = def & cellFormula ?~ sharedFormulaByIndex (SharedFormulaIndex 1)
cd6_3 = def & cellFormula ?~ sharedFormulaByIndex (SharedFormulaIndex 1)
testCellMap2 :: CellMap
testCellMap2 = M.fromList [ ((1, 2), def & cellValue ?~ CellText "something here")
, ((3, 5), def & cellValue ?~ CellDouble 123.456)
, ((2, 4),
def & cellValue ?~ CellText "value"
& cellComment ?~ comment1
)
, ((10, 7),
def & cellValue ?~ CellText "value"
& cellComment ?~ comment2
)
, ((11, 4), def & cellComment ?~ comment3)
]
where
comment1 = Comment (XlsxText "simple comment") "bob" True
comment2 = Comment (XlsxRichText [rich1, rich2]) "alice" False
comment3 = Comment (XlsxText "comment for an empty cell") "bob" True
rich1 = def & richTextRunText.~ "Look ma!"
& richTextRunProperties ?~ (
def & runPropertiesBold ?~ True
& runPropertiesFont ?~ "Tahoma")
rich2 = def & richTextRunText .~ "It's blue!"
& richTextRunProperties ?~ (
def & runPropertiesItalic ?~ True
& runPropertiesColor ?~ (def & colorARGB ?~ "FF000080"))
testTime :: POSIXTime
testTime = 123
fromRight :: Show a => Either a b -> b
fromRight (Right b) = b
fromRight (Left x) = error $ "Right _ was expected but Left " ++ show x ++ " found"
testStyleSheet :: StyleSheet
testStyleSheet = minimalStyleSheet & styleSheetDxfs .~ [dxf1, dxf2, dxf3]
& styleSheetNumFmts .~ M.fromList [(164, "0.000")]
& styleSheetCellXfs %~ (++ [cellXf1, cellXf2])
where
dxf1 = def & dxfFont ?~ (def & fontBold ?~ True
& fontSize ?~ 12)
dxf2 = def & dxfFill ?~ (def & fillPattern ?~ (def & fillPatternBgColor ?~ red))
dxf3 = def & dxfNumFmt ?~ NumFmt 164 "0.000"
red = def & colorARGB ?~ "FFFF0000"
cellXf1 = def
{ _cellXfApplyNumberFormat = Just True
, _cellXfNumFmtId = Just 2 }
cellXf2 = def
{ _cellXfApplyNumberFormat = Just True
, _cellXfNumFmtId = Just 164 }
withSingleUnderline :: SharedStringTable -> SharedStringTable
withSingleUnderline = withUnderline FontUnderlineSingle
withDoubleUnderline :: SharedStringTable -> SharedStringTable
withDoubleUnderline = withUnderline FontUnderlineDouble
withUnderline :: FontUnderline -> SharedStringTable -> SharedStringTable
withUnderline u (SharedStringTable [text, XlsxRichText [rich1, RichTextRun (Just props) val]]) =
let newprops = props & runPropertiesUnderline .~ Just u
in SharedStringTable [text, XlsxRichText [rich1, RichTextRun (Just newprops) val]]
testSharedStringTable :: SharedStringTable
testSharedStringTable = SharedStringTable $ V.fromList items
where
items = [text, rich]
text = XlsxText "plain text"
rich = XlsxRichText [ RichTextRun Nothing "Just "
, RichTextRun (Just props) "example" ]
props = def & runPropertiesBold .~ Just True
& runPropertiesItalic .~ Just True
& runPropertiesSize .~ Just 10
& runPropertiesFont .~ Just "Arial"
& runPropertiesFontFamily .~ Just FontFamilySwiss
testSharedStringTableWithEmpty :: SharedStringTable
testSharedStringTableWithEmpty =
SharedStringTable $ V.fromList [XlsxText ""]
testCommentTable :: CommentTable
testCommentTable = CommentTable $ M.fromList
[ (CellRef "D4", Comment (XlsxRichText rich) "Bob" True)
, (CellRef "A2", Comment (XlsxText "Some comment here") "CBR" True) ]
where
rich = [ RichTextRun
{ _richTextRunProperties =
Just $ def & runPropertiesBold ?~ True
& runPropertiesCharset ?~ 1
& runPropertiesColor ?~ def -- TODO: why not Nothing here?
& runPropertiesFont ?~ "Calibri"
& runPropertiesScheme ?~ FontSchemeMinor
& runPropertiesSize ?~ 8.0
, _richTextRunText = "Bob:"}
, RichTextRun
{ _richTextRunProperties =
Just $ def & runPropertiesCharset ?~ 1
& runPropertiesColor ?~ def
& runPropertiesFont ?~ "Calibri"
& runPropertiesScheme ?~ FontSchemeMinor
& runPropertiesSize ?~ 8.0
, _richTextRunText = "Why such high expense?"}]
testStrings :: ByteString
testStrings = [r|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<sst xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main" count="2" uniqueCount="2">
<si><t>plain text</t></si>
<si><r><t>Just </t></r><r><rPr><b /><i />
<sz val="10"/><rFont val="Arial"/><family val="2"/></rPr><t>example</t></r></si>
</sst>
|]
testStringsWithSingleUnderline :: ByteString
testStringsWithSingleUnderline = [r|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<sst xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main" count="2" uniqueCount="2">
<si><t>plain text</t></si>
<si><r><t>Just </t></r><r><rPr><b /><i /><u />
<sz val="10"/><rFont val="Arial"/><family val="2"/></rPr><t>example</t></r></si>
</sst>
|]
testStringsWithDoubleUnderline :: ByteString
testStringsWithDoubleUnderline = [r|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<sst xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main" count="2" uniqueCount="2">
<si><t>plain text</t></si>
<si><r><t>Just </t></r><r><rPr><b /><i /><u val="double"/>
<sz val="10"/><rFont val="Arial"/><family val="2"/></rPr><t>example</t></r></si>
</sst>
|]
testStringsWithEmpty :: ByteString
testStringsWithEmpty = [r|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<sst xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main" count="2" uniqueCount="2">
<si><t/></si>
</sst>
|]
testComments :: ByteString
testComments = [r|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<comments xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main">
<authors>
<author>Bob</author>
<author>CBR</author>
</authors>
<commentList>
<comment ref="D4" authorId="0">
<text>
<r>
<rPr>
<b/><sz val="8"/><color indexed="81"/><rFont val="Calibri"/>
<charset val="1"/><scheme val="minor"/>
</rPr>
<t>Bob:</t>
</r>
<r>
<rPr>
<sz val="8"/><color indexed="81"/><rFont val="Calibri"/>
<charset val="1"/> <scheme val="minor"/>
</rPr>
<t xml:space="preserve">Why such high expense?</t>
</r>
</text>
</comment>
<comment ref="A2" authorId="1">
<text><t>Some comment here</t></text>
</comment>
</commentList>
</comments>
|]
testCustomProperties :: CustomProperties
testCustomProperties = CustomProperties.fromList
[ ("testTextProp", VtLpwstr "test text property value")
, ("prop2", VtLpwstr "222")
, ("bool", VtBool False)
, ("prop333", VtInt 1)
, ("decimal", VtDecimal 1.234) ]
testCustomPropertiesXml :: ByteString
testCustomPropertiesXml = [r|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<Properties xmlns="http://schemas.openxmlformats.org/officeDocument/2006/custom-properties" xmlns:vt="http://schemas.openxmlformats.org/officeDocument/2006/docPropsVTypes">
<property fmtid="{D5CDD505-2E9C-101B-9397-08002B2CF9AE}" pid="2" name="prop2">
<vt:lpwstr>222</vt:lpwstr>
</property>
<property fmtid="{D5CDD505-2E9C-101B-9397-08002B2CF9AE}" pid="3" name="prop333">
<vt:int>1</vt:int>
</property>
<property fmtid="{D5CDD505-2E9C-101B-9397-08002B2CF9AE}" pid="4" name="testTextProp">
<vt:lpwstr>test text property value</vt:lpwstr>
</property>
<property fmtid="{D5CDD505-2E9C-101B-9397-08002B2CF9AE}" pid="5" name="decimal">
<vt:decimal>1.234</vt:decimal>
</property>
<property fmtid="{D5CDD505-2E9C-101B-9397-08002B2CF9AE}" pid="6" name="bool">
<vt:bool>false</vt:bool>
</property>
<property fmtid="{D5CDD505-2E9C-101B-9397-08002B2CF9AE}" pid="7" name="blob">
<vt:blob>
ZXhhbXBs
ZSBibG9i
IGNvbnRl
bnRz
</vt:blob>
</property>
</Properties>
|]
testFormattedResult :: Formatted
testFormattedResult = Formatted cm styleSheet merges
where
cm = M.fromList [ ((1, 1), cell11)
, ((1, 2), cell12)
, ((2, 5), cell25) ]
cell11 = Cell
{ _cellStyle = Just 1
, _cellValue = Just (CellText "text at A1")
, _cellComment = Nothing
, _cellFormula = Nothing }
cell12 = Cell
{ _cellStyle = Just 2
, _cellValue = Just (CellDouble 1.23)
, _cellComment = Nothing
, _cellFormula = Nothing }
cell25 = Cell
{ _cellStyle = Just 3
, _cellValue = Just (CellDouble 1.23456)
, _cellComment = Nothing
, _cellFormula = Nothing }
merges = []
styleSheet =
minimalStyleSheet & styleSheetCellXfs %~ (++ [cellXf1, cellXf2, cellXf3])
& styleSheetFonts %~ (++ [font1, font2])
& styleSheetNumFmts .~ numFmts
nextFontId = length (minimalStyleSheet ^. styleSheetFonts)
cellXf1 = def
{ _cellXfApplyFont = Just True
, _cellXfFontId = Just nextFontId }
font1 = def
{ _fontName = Just "Calibri"
, _fontBold = Just True }
cellXf2 = def
{ _cellXfApplyFont = Just True
, _cellXfFontId = Just (nextFontId + 1)
, _cellXfApplyNumberFormat = Just True
, _cellXfNumFmtId = Just 164 }
font2 = def
{ _fontItalic = Just True }
cellXf3 = def
{ _cellXfApplyNumberFormat = Just True
, _cellXfNumFmtId = Just 2 }
numFmts = M.fromList [(164, "0.0000")]
testRunFormatted :: Formatted
testRunFormatted = formatted formattedCellMap minimalStyleSheet
where
formattedCellMap = flip execState def $ do
let font1 = def & fontBold ?~ True
& fontName ?~ "Calibri"
at (1, 1) ?= (def & formattedCell . cellValue ?~ CellText "text at A1"
& formattedFormat . formatFont ?~ font1)
at (1, 2) ?= (def & formattedCell . cellValue ?~ CellDouble 1.23
& formattedFormat . formatFont . non def . fontItalic ?~ True
& formattedFormat . formatNumberFormat ?~ fmtDecimalsZeroes 4)
at (2, 5) ?= (def & formattedCell . cellValue ?~ CellDouble 1.23456
& formattedFormat . formatNumberFormat ?~ StdNumberFormat Nf2Decimal)
testFormatWorkbookResult :: Xlsx
testFormatWorkbookResult = def & xlSheets .~ sheets
& xlStyles .~ renderStyleSheet style
where
testCellMap1 = M.fromList [((1, 1), Cell { _cellStyle = Nothing
, _cellValue = Just (CellText "text at A1 Sheet1")
, _cellComment = Nothing
, _cellFormula = Nothing })]
testCellMap2 = M.fromList [((2, 3), Cell { _cellStyle = Just 1
, _cellValue = Just (CellDouble 1.23456)
, _cellComment = Nothing
, _cellFormula = Nothing })]
sheets = [ ("Sheet1", def & wsCells .~ testCellMap1)
, ("Sheet2", def & wsCells .~ testCellMap2)
]
style = minimalStyleSheet & styleSheetNumFmts .~ M.fromList [(164, "DD.MM.YYYY")]
& styleSheetCellXfs .~ [cellXf1, cellXf2]
cellXf1 = def
& cellXfBorderId .~ Just 0
& cellXfFillId .~ Just 0
& cellXfFontId .~ Just 0
cellXf2 = def
{ _cellXfApplyNumberFormat = Just True
, _cellXfNumFmtId = Just 164 }
testFormatWorkbook :: Xlsx
testFormatWorkbook = formatWorkbook sheets minimalStyleSheet
where
sheetNames = ["Sheet1", "Sheet2"]
testFormattedCellMap1 = M.fromList [((1,1), (def & formattedCell . cellValue ?~ CellText "text at A1 Sheet1"))]
testFormattedCellMap2 = M.fromList [((2,3), (def & formattedCell . cellValue ?~ CellDouble 1.23456
& formattedFormat . formatNumberFormat ?~ (UserNumberFormat "DD.MM.YYYY")))]
sheets = zip sheetNames [testFormattedCellMap1, testFormattedCellMap2]
testCondFormattedResult :: CondFormatted
testCondFormattedResult = CondFormatted styleSheet formattings
where
styleSheet =
minimalStyleSheet & styleSheetDxfs .~ dxfs
dxfs = [ def & dxfFont ?~ (def & fontUnderline ?~ FontUnderlineSingle)
, def & dxfFont ?~ (def & fontStrikeThrough ?~ True)
, def & dxfFont ?~ (def & fontBold ?~ True) ]
formattings = M.fromList [ (SqRef [CellRef "A1:A2", CellRef "B2:B3"], [cfRule1, cfRule2])
, (SqRef [CellRef "C3:E10"], [cfRule1])
, (SqRef [CellRef "F1:G10"], [cfRule3]) ]
cfRule1 = CfRule
{ _cfrCondition = ContainsBlanks
, _cfrDxfId = Just 0
, _cfrPriority = 1
, _cfrStopIfTrue = Nothing }
cfRule2 = CfRule
{ _cfrCondition = BeginsWith "foo"
, _cfrDxfId = Just 1
, _cfrPriority = 1
, _cfrStopIfTrue = Nothing }
cfRule3 = CfRule
{ _cfrCondition = CellIs (OpGreaterThan (Formula "A1"))
, _cfrDxfId = Just 2
, _cfrPriority = 1
, _cfrStopIfTrue = Nothing }
testFormattedCells :: Map (Int, Int) FormattedCell
testFormattedCells = flip execState def $ do
at (1,1) ?= (def & formattedRowSpan .~ 5
& formattedColSpan .~ 5
& formattedFormat . formatBorder . non def . borderTop .
non def . borderStyleLine ?~ LineStyleDashed
& formattedFormat . formatBorder . non def . borderBottom .
non def . borderStyleLine ?~ LineStyleDashed)
at (10,2) ?= (def & formattedFormat . formatFont . non def . fontBold ?~ True)
testRunCondFormatted :: CondFormatted
testRunCondFormatted = conditionallyFormatted condFmts minimalStyleSheet
where
condFmts = flip execState def $ do
let cfRule1 = def & condfmtCondition .~ ContainsBlanks
& condfmtDxf . dxfFont . non def . fontUnderline ?~ FontUnderlineSingle
cfRule2 = def & condfmtCondition .~ BeginsWith "foo"
& condfmtDxf . dxfFont . non def . fontStrikeThrough ?~ True
cfRule3 = def & condfmtCondition .~ CellIs (OpGreaterThan (Formula "A1"))
& condfmtDxf . dxfFont . non def . fontBold ?~ True
at (CellRef "A1:A2") ?= [cfRule1, cfRule2]
at (CellRef "B2:B3") ?= [cfRule1, cfRule2]
at (CellRef "C3:E10") ?= [cfRule1]
at (CellRef "F1:G10") ?= [cfRule3]
validations :: Map SqRef DataValidation
validations = M.fromList
[ ( SqRef [CellRef "A1"], def
)
, ( SqRef [CellRef "A1", CellRef "B2:C3"], def
{ _dvAllowBlank = True
, _dvError = Just "incorrect data"
, _dvErrorStyle = ErrorStyleInformation
, _dvErrorTitle = Just "error title"
, _dvPrompt = Just "enter data"
, _dvPromptTitle = Just "prompt title"
, _dvShowDropDown = True
, _dvShowErrorMessage = True
, _dvShowInputMessage = True
, _dvValidationType = ValidationTypeList ["aaaa","bbbb","cccc"]
}
)
, ( SqRef [CellRef "A6", CellRef "I2"], def
{ _dvAllowBlank = False
, _dvError = Just "aaa"
, _dvErrorStyle = ErrorStyleWarning
, _dvErrorTitle = Just "bbb"
, _dvPrompt = Just "ccc"
, _dvPromptTitle = Just "ddd"
, _dvShowDropDown = False
, _dvShowErrorMessage = False
, _dvShowInputMessage = False
, _dvValidationType = ValidationTypeDecimal $ ValGreaterThan $ Formula "10"
}
)
, ( SqRef [CellRef "A7"], def
{ _dvAllowBlank = False
, _dvError = Just "aaa"
, _dvErrorStyle = ErrorStyleStop
, _dvErrorTitle = Just "bbb"
, _dvPrompt = Just "ccc"
, _dvPromptTitle = Just "ddd"
, _dvShowDropDown = False
, _dvShowErrorMessage = False
, _dvShowInputMessage = False
, _dvValidationType = ValidationTypeWhole $ ValNotBetween (Formula "10") (Formula "12")
}
)
]
| qrilka/xlsx | test/Main.hs | mit | 26,750 | 0 | 27 | 8,202 | 5,784 | 3,171 | 2,613 | -1 | -1 |
module Y2017.M07.D05.Exercise where
-- below imports available via 1HaskellADay git repository
import Data.QBit
import Data.Numeral.QBits
{--
AHA! I'm BACK!
I'm BACK in the saddle again!
From the Mensa Genius Quiz-a-Day Book by Dr. Abbie F. Salny, July 3 problem:
The following multiplication example uses all the digits from 0 to 9 once and
only once (not counting the intermediate steps). Finish the problem. One number
has been filled in to get you started.
x x x (a)
x 5 (b)
---------
x x x x x (c)
DO IT TO IT!
I LIKE TO MOVE IT, MOVE IT!
--}
multiplicationProblem :: Nums -> Nums -> Nums -> [(Nums, Nums, Nums)]
multiplicationProblem a b c = undefined
| geophf/1HaskellADay | exercises/HAD/Y2017/M07/D05/Exercise.hs | mit | 712 | 0 | 9 | 170 | 62 | 38 | 24 | 5 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
module Api where
import Data.Aeson
import Data.Int (Int64)
import Data.Proxy
import Data.Text (Text)
import Database
import Database.Persist
import GHC.Generics
import Servant.API
import Servant.JS
data PersonTruthiness = PersonTruthiness
{ tVal :: Text
, total :: Int
} deriving (Show, ToJSON, Generic, Eq)
truthApi :: Proxy TruthApi
truthApi = Proxy
type TruthApi =
ListPersons
:<|> FindPerson
:<|> ListStatements
:<|> ListTruthiness
type ListPersons =
"persons"
:> QueryParam "q" Text
:> Get '[JSON] [Entity Person]
type FindPerson =
"persons"
:> Capture "person_id" Int64
:> Get '[JSON] (Entity Person)
type ListStatements =
"statements"
:> QueryParam "person_name" Text
:> Get '[JSON] [PersonStatement]
type ListTruthiness =
"persons"
:> Capture "person_id" Int64
:> "truthiness"
:> Get '[JSON] [PersonTruthiness]
generateJavaScript :: IO ()
generateJavaScript = writeJSForAPI (Proxy :: Proxy TruthApi) vanillaJS "../site/assets/api.js"
| tippenein/scrape_the_truth | src/Api.hs | mit | 1,138 | 0 | 10 | 214 | 301 | 167 | 134 | 44 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
module MarketData where
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Int
--import Data.Serialize (Serialize)
--import qualified Data.Serialize as S
import Control.Applicative
import Test.QuickCheck
--import Control.DeepSeq
data MarketData = MarketData {
ticker :: Text,
price :: Double,
volume :: Int32,
time :: Int64
} deriving (Eq, Show)
mkTicker :: Int -> Text
mkTicker i = T.justifyLeft 5 'A' i'
where i' = T.pack ("S" ++ show i)
--instance Serialize MarketData where
-- put m = do
-- S.putByteString . T.encodeUtf16BE . ticker $ m
-- S.putFloat64be . price $ m
-- S.put . volume $ m
-- S.put . time $ m
-- get = MarketData <$> (T.decodeUtf16BE <$> S.getByteString 8)
-- <*> S.getFloat64be
-- <*> S.get
-- <*> S.get
instance Arbitrary MarketData where
arbitrary = MarketData <$> (mkTicker <$> choose (0,499))
<*> (getPositive <$> arbitrary)
<*> (getPositive <$> arbitrary)
<*> (getPositive <$> arbitrary)
--instance NFData MarketData where
| fhaust/pipes-eep | bench/MarketData.hs | mit | 1,256 | 0 | 13 | 344 | 220 | 134 | 86 | 23 | 1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.HTMLModElement
(js_setCite, setCite, js_getCite, getCite, js_setDateTime,
setDateTime, js_getDateTime, getDateTime, HTMLModElement,
castToHTMLModElement, gTypeHTMLModElement)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"cite\"] = $2;" js_setCite ::
HTMLModElement -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLModElement.cite Mozilla HTMLModElement.cite documentation>
setCite ::
(MonadIO m, ToJSString val) => HTMLModElement -> val -> m ()
setCite self val = liftIO (js_setCite (self) (toJSString val))
foreign import javascript unsafe "$1[\"cite\"]" js_getCite ::
HTMLModElement -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLModElement.cite Mozilla HTMLModElement.cite documentation>
getCite ::
(MonadIO m, FromJSString result) => HTMLModElement -> m result
getCite self = liftIO (fromJSString <$> (js_getCite (self)))
foreign import javascript unsafe "$1[\"dateTime\"] = $2;"
js_setDateTime :: HTMLModElement -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLModElement.dateTime Mozilla HTMLModElement.dateTime documentation>
setDateTime ::
(MonadIO m, ToJSString val) => HTMLModElement -> val -> m ()
setDateTime self val
= liftIO (js_setDateTime (self) (toJSString val))
foreign import javascript unsafe "$1[\"dateTime\"]" js_getDateTime
:: HTMLModElement -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLModElement.dateTime Mozilla HTMLModElement.dateTime documentation>
getDateTime ::
(MonadIO m, FromJSString result) => HTMLModElement -> m result
getDateTime self
= liftIO (fromJSString <$> (js_getDateTime (self))) | manyoo/ghcjs-dom | ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/HTMLModElement.hs | mit | 2,617 | 28 | 10 | 367 | 644 | 379 | 265 | 41 | 1 |
module System.Nix.Store.Remote.Protocol (
WorkerOp(..)
, simpleOp
, simpleOpArgs
, runOp
, runOpArgs
, runStore) where
import Control.Exception (bracket)
import Control.Monad.Except
import Control.Monad.Reader
import Control.Monad.State
import Data.Binary.Get
import Data.Binary.Put
import qualified Data.ByteString.Char8 as BSC
import qualified Data.ByteString.Lazy as LBS
import Network.Socket hiding (send, sendTo, recv, recvFrom)
import Network.Socket.ByteString (recv)
import System.Nix.Store.Remote.Logger
import System.Nix.Store.Remote.Types
import System.Nix.Store.Remote.Util
import System.Nix.Util
protoVersion :: Int
protoVersion = 0x115
-- major protoVersion & 0xFF00
-- minor .. & 0x00FF
workerMagic1 :: Int
workerMagic1 = 0x6e697863
workerMagic2 :: Int
workerMagic2 = 0x6478696f
sockPath :: String
sockPath = "/nix/var/nix/daemon-socket/socket"
data WorkerOp =
IsValidPath
| HasSubstitutes
| QueryReferrers
| AddToStore
| AddTextToStore
| BuildPaths
| EnsurePath
| AddTempRoot
| AddIndirectRoot
| SyncWithGC
| FindRoots
| SetOptions
| CollectGarbage
| QuerySubstitutablePathInfo
| QueryDerivationOutputs
| QueryAllValidPaths
| QueryFailedPaths
| ClearFailedPaths
| QueryPathInfo
| QueryDerivationOutputNames
| QueryPathFromHashPart
| QuerySubstitutablePathInfos
| QueryValidPaths
| QuerySubstitutablePaths
| QueryValidDerivers
| OptimiseStore
| VerifyStore
| BuildDerivation
| AddSignatures
| NarFromPath
| AddToStoreNar
| QueryMissing
deriving (Eq, Ord, Show)
opNum :: WorkerOp -> Int
opNum IsValidPath = 1
opNum HasSubstitutes = 3
opNum QueryReferrers = 6
opNum AddToStore = 7
opNum AddTextToStore = 8
opNum BuildPaths = 9
opNum EnsurePath = 10
opNum AddTempRoot = 11
opNum AddIndirectRoot = 12
opNum SyncWithGC = 13
opNum FindRoots = 14
opNum SetOptions = 19
opNum CollectGarbage = 20
opNum QuerySubstitutablePathInfo = 21
opNum QueryDerivationOutputs = 22
opNum QueryAllValidPaths = 23
opNum QueryFailedPaths = 24
opNum ClearFailedPaths = 25
opNum QueryPathInfo = 26
opNum QueryDerivationOutputNames = 28
opNum QueryPathFromHashPart = 29
opNum QuerySubstitutablePathInfos = 30
opNum QueryValidPaths = 31
opNum QuerySubstitutablePaths = 32
opNum QueryValidDerivers = 33
opNum OptimiseStore = 34
opNum VerifyStore = 35
opNum BuildDerivation = 36
opNum AddSignatures = 37
opNum NarFromPath = 38
opNum AddToStoreNar = 39
opNum QueryMissing = 40
simpleOp :: WorkerOp -> MonadStore Bool
simpleOp op = do
simpleOpArgs op $ return ()
simpleOpArgs :: WorkerOp -> Put -> MonadStore Bool
simpleOpArgs op args = do
runOpArgs op args
err <- gotError
case err of
True -> do
Error _num msg <- head <$> getError
throwError $ BSC.unpack $ LBS.toStrict msg
False -> do
sockGetBool
runOp :: WorkerOp -> MonadStore ()
runOp op = runOpArgs op $ return ()
runOpArgs :: WorkerOp -> Put -> MonadStore ()
runOpArgs op args = do
-- Temporary hack for printing the messages destined for nix-daemon socket
when False $
liftIO $ LBS.writeFile "mytestfile2" $ runPut $ do
putInt $ opNum op
args
sockPut $ do
putInt $ opNum op
args
out <- processOutput
modify (++out)
err <- gotError
when err $ do
Error _num msg <- head <$> getError
throwError $ BSC.unpack $ LBS.toStrict msg
runStore :: MonadStore a -> IO (Either String a, [Logger])
runStore code = do
bracket (open sockPath) close run
where
open path = do
soc <- socket AF_UNIX Stream 0
connect soc (SockAddrUnix path)
return soc
greet = do
sockPut $ putInt workerMagic1
soc <- ask
vermagic <- liftIO $ recv soc 16
let (magic2, daemonProtoVersion) = flip runGet (LBS.fromStrict vermagic) $ (,) <$> getInt <*> getInt
unless (magic2 == workerMagic2) $ error "Worker magic 2 mismatch"
sockPut $ putInt protoVersion -- clientVersion
sockPut $ putInt (0 :: Int) -- affinity
sockPut $ putInt (0 :: Int) -- obsolete reserveSpace
processOutput
run sock =
flip runReaderT sock $ flip runStateT [] $ runExceptT (greet >> code)
| shlevy/hnix-store | hnix-store-remote/src/System/Nix/Store/Remote/Protocol.hs | mit | 4,680 | 0 | 18 | 1,375 | 1,182 | 619 | 563 | 145 | 2 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
module Sproxy.Application
( sproxy
, redirect
) where
import Blaze.ByteString.Builder (toByteString)
import Blaze.ByteString.Builder.ByteString (fromByteString)
import Control.Exception
( Exception
, Handler(..)
, SomeException
, catches
, displayException
)
import qualified Data.Aeson as JSON
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import Data.ByteString.Char8 (pack, unpack)
import Data.ByteString.Lazy (fromStrict)
import Data.Conduit (Flush(Chunk), mapOutput)
import qualified Data.HashMap.Strict as HM
import Data.List (find, partition)
import Data.Maybe (fromJust, fromMaybe)
import Data.Monoid ((<>))
import Data.Text (Text)
import Data.Text.Encoding (decodeUtf8, encodeUtf8)
import Data.Time.Clock.POSIX (posixSecondsToUTCTime)
import Data.Word (Word16)
import Data.Word8 (_colon)
import Foreign.C.Types (CTime(..))
import qualified Network.HTTP.Client as BE
import Network.HTTP.Client.Conduit (bodyReaderSource)
import Network.HTTP.Conduit (requestBodySourceChunkedIO, requestBodySourceIO)
import Network.HTTP.Types
( RequestHeaders
, ResponseHeaders
, methodGet
, methodPost
)
import Network.HTTP.Types.Header
( hConnection
, hContentLength
, hContentType
, hCookie
, hLocation
, hTransferEncoding
)
import Network.HTTP.Types.Status
( Status(..)
, badGateway502
, badRequest400
, forbidden403
, found302
, internalServerError500
, methodNotAllowed405
, movedPermanently301
, networkAuthenticationRequired511
, notFound404
, ok200
, seeOther303
, temporaryRedirect307
)
import Network.Socket (NameInfoFlag(NI_NUMERICHOST), getNameInfo)
import qualified Network.Wai as W
import Network.Wai.Conduit (responseSource, sourceRequestBody)
import System.FilePath.Glob (Pattern, match)
import System.Posix.Time (epochTime)
import Text.InterpolatedString.Perl6 (qc)
import Web.Cookie (Cookies, parseCookies, renderCookies)
import qualified Web.Cookie as WC
import Sproxy.Application.Cookie
( AuthCookie(..)
, AuthUser
, cookieDecode
, cookieEncode
, getEmail
, getEmailUtf8
, getFamilyNameUtf8
, getGivenNameUtf8
)
import Sproxy.Application.OAuth2.Common (OAuth2Client(..))
import qualified Sproxy.Application.State as State
import Sproxy.Config (BackendConf(..))
import qualified Sproxy.Logging as Log
import Sproxy.Server.DB (Database, userAccess, userExists, userGroups)
redirect :: Word16 -> W.Application
redirect p req resp =
case requestDomain req of
Nothing -> badRequest "missing host" req resp
Just domain -> do
Log.info $ "redirecting to " ++ show location ++ ": " ++ showReq req
resp $ W.responseBuilder status [(hLocation, location)] mempty
where status =
if W.requestMethod req == methodGet
then movedPermanently301
else temporaryRedirect307
newhost =
if p == 443
then domain
else domain <> ":" <> pack (show p)
location =
"https://" <> newhost <> W.rawPathInfo req <> W.rawQueryString req
sproxy ::
ByteString
-> Database
-> HM.HashMap Text OAuth2Client
-> [(Pattern, BackendConf, BE.Manager)]
-> W.Application
sproxy key db oa2 backends =
logException $ \req resp -> do
Log.debug $ "sproxy <<< " ++ showReq req
case requestDomain req of
Nothing -> badRequest "missing host" req resp
Just domain ->
case find (\(p, _, _) -> match p (unpack domain)) backends of
Nothing -> notFound "backend" req resp
Just (_, be, mgr) -> do
let cookieName = pack $ beCookieName be
cookieDomain = pack <$> beCookieDomain be
case W.pathInfo req of
["robots.txt"] -> get robots req resp
(".sproxy":proxy) ->
case proxy of
["logout"] ->
get (logout key cookieName cookieDomain) req resp
["oauth2", provider] ->
case HM.lookup provider oa2 of
Nothing -> notFound "OAuth2 provider" req resp
Just oa2c ->
get (oauth2callback key db (provider, oa2c) be) req resp
["access"] -> do
now <- Just <$> epochTime
case extractCookie key now cookieName req of
Nothing -> authenticationRequired key oa2 req resp
Just (authCookie, _) ->
post (checkAccess db authCookie) req resp
_ -> notFound "proxy" req resp
_ -> do
now <- Just <$> epochTime
case extractCookie key now cookieName req of
Nothing -> authenticationRequired key oa2 req resp
Just cs@(authCookie, _) ->
authorize db cs req >>= \case
Nothing -> forbidden authCookie req resp
Just req' -> forward mgr req' resp
robots :: W.Application
robots _ resp =
resp $
W.responseLBS
ok200
[(hContentType, "text/plain; charset=utf-8")]
"User-agent: *\nDisallow: /"
oauth2callback ::
ByteString
-> Database
-> (Text, OAuth2Client)
-> BackendConf
-> W.Application
oauth2callback key db (provider, oa2c) be req resp =
case param "code" of
Nothing -> badRequest "missing auth code" req resp
Just code ->
case param "state" of
Nothing -> badRequest "missing auth state" req resp
Just state ->
case State.decode key state of
Left msg -> badRequest ("invalid state: " ++ msg) req resp
Right url -> do
au <- oauth2Authenticate oa2c code (redirectURL req provider)
let email = getEmail au
Log.info $ "login " ++ show email ++ " by " ++ show provider
exists <- userExists db email
if exists
then authenticate key be au url req resp
else userNotFound au req resp
where
param p = do
(_, v) <- find ((==) p . fst) $ W.queryString req
v
-- XXX: RFC6265: the user agent MUST NOT attach more than one Cookie header field
extractCookie ::
ByteString
-> Maybe CTime
-> ByteString
-> W.Request
-> Maybe (AuthCookie, Cookies)
extractCookie key now name req = do
(_, cookies) <- find ((==) hCookie . fst) $ W.requestHeaders req
(auth, others) <- discriminate cookies
case cookieDecode key auth of
Left _ -> Nothing
Right cookie ->
if maybe True (acExpiry cookie >) now
then Just (cookie, others)
else Nothing
where
discriminate cs =
case partition ((==) name . fst) $ parseCookies cs of
((_, x):_, xs) -> Just (x, xs)
_ -> Nothing
authenticate ::
ByteString -> BackendConf -> AuthUser -> ByteString -> W.Application
authenticate key be user url _req resp = do
now <- epochTime
let domain = pack <$> beCookieDomain be
expiry = now + CTime (beCookieMaxAge be)
authCookie = AuthCookie {acUser = user, acExpiry = expiry}
cookie =
WC.def
{ WC.setCookieName = pack $ beCookieName be
, WC.setCookieHttpOnly = True
, WC.setCookiePath = Just "/"
, WC.setCookieSameSite = Nothing
, WC.setCookieSecure = True
, WC.setCookieValue = cookieEncode key authCookie
, WC.setCookieDomain = domain
, WC.setCookieExpires =
Just . posixSecondsToUTCTime . realToFrac $ expiry
}
resp $
W.responseLBS
seeOther303
[ (hLocation, url)
, ("Set-Cookie", toByteString $ WC.renderSetCookie cookie)
]
""
authorize ::
Database -> (AuthCookie, Cookies) -> W.Request -> IO (Maybe W.Request)
authorize db (authCookie, otherCookies) req = do
let user = acUser authCookie
domain = decodeUtf8 . fromJust $ requestDomain req
email = getEmail user
emailUtf8 = getEmailUtf8 user
familyUtf8 = getFamilyNameUtf8 user
givenUtf8 = getGivenNameUtf8 user
method = decodeUtf8 $ W.requestMethod req
path = decodeUtf8 $ W.rawPathInfo req
grps <- userGroups db email domain path method
if null grps
then return Nothing
else do
ip <-
pack . fromJust . fst <$>
getNameInfo [NI_NUMERICHOST] True False (W.remoteHost req)
return . Just $
req
{ W.requestHeaders =
HM.toList $
HM.insert "From" emailUtf8 $
HM.insert "X-Groups" (BS.intercalate "," $ encodeUtf8 <$> grps) $
HM.insert "X-Given-Name" givenUtf8 $
HM.insert "X-Family-Name" familyUtf8 $
HM.insert "X-Forwarded-Proto" "https" $
HM.insertWith (flip combine) "X-Forwarded-For" ip $
setCookies otherCookies $
HM.fromListWith combine $ W.requestHeaders req
}
where
combine a b = a <> "," <> b
setCookies [] = HM.delete hCookie
setCookies cs = HM.insert hCookie (toByteString . renderCookies $ cs)
checkAccess :: Database -> AuthCookie -> W.Application
checkAccess db authCookie req resp = do
let email = getEmail . acUser $ authCookie
domain = decodeUtf8 . fromJust $ requestDomain req
body <- W.strictRequestBody req
case JSON.eitherDecode' body of
Left err -> badRequest err req resp
Right inq -> do
Log.debug $ "access <<< " ++ show inq
tags <- userAccess db email domain inq
Log.debug $ "access >>> " ++ show tags
resp $
W.responseLBS
ok200
[(hContentType, "application/json")]
(JSON.encode tags)
-- XXX If something seems strange, think about HTTP/1.1 <-> HTTP/1.0.
-- FIXME For HTTP/1.0 backends we might need an option
-- FIXME in config file. HTTP Client does HTTP/1.1 by default.
forward :: BE.Manager -> W.Application
forward mgr req resp = do
let beReq =
BE.defaultRequest
{ BE.method = W.requestMethod req
, BE.path = W.rawPathInfo req
, BE.queryString = W.rawQueryString req
, BE.requestHeaders = modifyRequestHeaders $ W.requestHeaders req
, BE.redirectCount = 0
, BE.decompress = const False
, BE.requestBody =
case W.requestBodyLength req of
W.ChunkedBody ->
requestBodySourceChunkedIO (sourceRequestBody req)
W.KnownLength l ->
requestBodySourceIO (fromIntegral l) (sourceRequestBody req)
}
msg =
unpack (BE.method beReq <> " " <> BE.path beReq <> BE.queryString beReq)
Log.debug $ "BACKEND <<< " ++ msg ++ " " ++ show (BE.requestHeaders beReq)
BE.withResponse beReq mgr $ \res -> do
let status = BE.responseStatus res
headers = BE.responseHeaders res
body =
mapOutput (Chunk . fromByteString) . bodyReaderSource $
BE.responseBody res
logging =
if statusCode status `elem` [400, 500]
then Log.warn
else Log.debug
logging $
"BACKEND >>> " ++
show (statusCode status) ++ " on " ++ msg ++ " " ++ show headers ++ "\n"
resp $ responseSource status (modifyResponseHeaders headers) body
modifyRequestHeaders :: RequestHeaders -> RequestHeaders
modifyRequestHeaders = filter (\(n, _) -> n `notElem` ban)
where
ban =
[ hConnection
, hContentLength -- XXX This is set automtically before sending request to backend
, hTransferEncoding -- XXX Likewise
]
modifyResponseHeaders :: ResponseHeaders -> ResponseHeaders
modifyResponseHeaders = filter (\(n, _) -> n `notElem` ban)
where
ban =
[ hConnection
-- XXX WAI docs say we MUST NOT add (keep) Content-Length, Content-Range, and Transfer-Encoding,
-- XXX but we use streaming body, which may add Transfer-Encoding only.
-- XXX Thus we keep Content-* headers.
, hTransferEncoding
]
authenticationRequired ::
ByteString -> HM.HashMap Text OAuth2Client -> W.Application
authenticationRequired key oa2 req resp = do
Log.info $ "511 Unauthenticated: " ++ showReq req
resp $
W.responseLBS
networkAuthenticationRequired511
[(hContentType, "text/html; charset=utf-8")]
page
where
path =
if W.requestMethod req == methodGet
then W.rawPathInfo req <> W.rawQueryString req
else "/"
state =
State.encode key $
"https://" <> fromJust (W.requestHeaderHost req) <> path
authLink :: Text -> OAuth2Client -> ByteString -> ByteString
authLink provider oa2c html =
let u = oauth2AuthorizeURL oa2c state (redirectURL req provider)
d = pack $ oauth2Description oa2c
in [qc|{html}<p><a href="{u}">Authenticate with {d}</a></p>|]
authHtml = HM.foldrWithKey authLink "" oa2
page =
fromStrict
[qc|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Authentication required</title>
</head>
<body style="text-align:center;">
<h1>Authentication required</h1>
{authHtml}
</body>
</html>
|]
forbidden :: AuthCookie -> W.Application
forbidden ac req resp = do
Log.info $ "403 Forbidden: " ++ show email ++ ": " ++ showReq req
resp $
W.responseLBS forbidden403 [(hContentType, "text/html; charset=utf-8")] page
where
email = getEmailUtf8 . acUser $ ac
page =
fromStrict
[qc|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Access Denied</title>
</head>
<body>
<h1>Access Denied</h1>
<p>You are currently logged in as <strong>{email}</strong></p>
<p><a href="/.sproxy/logout">Logout</a></p>
</body>
</html>
|]
userNotFound :: AuthUser -> W.Application
userNotFound au _ resp = do
Log.info $ "404 User not found: " ++ show email
resp $
W.responseLBS notFound404 [(hContentType, "text/html; charset=utf-8")] page
where
email = getEmailUtf8 au
page =
fromStrict
[qc|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Access Denied</title>
</head>
<body>
<h1>Access Denied</h1>
<p>You are not allowed to login as <strong>{email}</strong></p>
<p><a href="/">Main page</a></p>
</body>
</html>
|]
logout :: ByteString -> ByteString -> Maybe ByteString -> W.Application
logout key cookieName cookieDomain req resp = do
let host = fromJust $ W.requestHeaderHost req
case extractCookie key Nothing cookieName req of
Nothing ->
resp $ W.responseLBS found302 [(hLocation, "https://" <> host)] ""
Just _ -> do
let cookie =
WC.def
{ WC.setCookieName = cookieName
, WC.setCookieHttpOnly = True
, WC.setCookiePath = Just "/"
, WC.setCookieSameSite = Just WC.sameSiteStrict
, WC.setCookieSecure = True
, WC.setCookieValue = "goodbye"
, WC.setCookieDomain = cookieDomain
, WC.setCookieExpires =
Just . posixSecondsToUTCTime . realToFrac $ CTime 0
}
resp $
W.responseLBS
found302
[ (hLocation, "https://" <> host)
, ("Set-Cookie", toByteString $ WC.renderSetCookie cookie)
]
""
badRequest :: String -> W.Application
badRequest msg req resp = do
Log.warn $ "400 Bad Request (" ++ msg ++ "): " ++ showReq req
resp $ W.responseLBS badRequest400 [] "Bad Request"
notFound :: String -> W.Application
notFound msg req resp = do
Log.warn $ "404 Not Found (" ++ msg ++ "): " ++ showReq req
resp $ W.responseLBS notFound404 [] "Not Found"
logException :: W.Middleware
logException app req resp =
catches (app req resp) [Handler badGateway, Handler internalError]
where
internalError :: SomeException -> IO W.ResponseReceived
internalError = response internalServerError500
badGateway :: BE.HttpException -> IO W.ResponseReceived
badGateway = response badGateway502
response :: Exception e => Status -> e -> IO W.ResponseReceived
response st e = do
Log.error $
show (statusCode st) ++
" " ++
unpack (statusMessage st) ++
": " ++ displayException e ++ " on " ++ showReq req
resp $
W.responseLBS
st
[(hContentType, "text/plain")]
(fromStrict $ statusMessage st)
get :: W.Middleware
get app req resp
| W.requestMethod req == methodGet = app req resp
| otherwise = do
Log.warn $ "405 Method Not Allowed: " ++ showReq req
resp $
W.responseLBS methodNotAllowed405 [("Allow", "GET")] "Method Not Allowed"
post :: W.Middleware
post app req resp
| W.requestMethod req == methodPost = app req resp
| otherwise = do
Log.warn $ "405 Method Not Allowed: " ++ showReq req
resp $
W.responseLBS methodNotAllowed405 [("Allow", "POST")] "Method Not Allowed"
redirectURL :: W.Request -> Text -> ByteString
redirectURL req provider =
"https://" <> fromJust (W.requestHeaderHost req) <> "/.sproxy/oauth2/" <>
encodeUtf8 provider
requestDomain :: W.Request -> Maybe ByteString
requestDomain req = do
h <- W.requestHeaderHost req
return . fst . BS.break (== _colon) $ h
-- XXX: make sure not to reveal the cookie, which can be valid (!)
showReq :: W.Request -> String
showReq req =
unpack
(W.requestMethod req <> " " <>
fromMaybe "<no host>" (W.requestHeaderHost req) <>
W.rawPathInfo req <>
W.rawQueryString req <>
" ") ++
show (W.httpVersion req) ++
" " ++
show (fromMaybe "-" $ W.requestHeaderReferer req) ++
" " ++
show (fromMaybe "-" $ W.requestHeaderUserAgent req) ++
" from " ++ show (W.remoteHost req)
| ip1981/sproxy2 | src/Sproxy/Application.hs | mit | 17,806 | 0 | 30 | 4,887 | 4,779 | 2,487 | 2,292 | 440 | 12 |
{-# LANGUAGE OverloadedStrings #-}
module Data.Niagra.Selector.Combinators
(
-- * Pseudoclass Combinators
active,
checked,
disabled,
empty,
enabled,
firstChild,
firstOfType,
focus,
hover,
inRange,
invalid,
lang,
lastChild,
lastOfType,
link,
not,
nthChild,
nthLastOfType,
nthOfType,
onlyOfType,
onlyChild,
optional,
outOfRange,
readOnly,
readWrite,
required,
root,
target,
valid,
visited,
-- * Pseudotype combinators
before,
after,
firstLetter,
firstLine,
selection
)
where
import Data.Niagra.Selector
import Data.String
import Data.Text (Text)
import Prelude hiding (not)
{- pseudoclasses -}
-- |Selects active elements.
active :: Selector
active = pseudoClass' "active"
-- |Selects checked elements.
checked :: Selector
checked = pseudoClass' "checked"
-- |Selects disabled elements.
disabled :: Selector
disabled = pseudoClass' "disabled"
-- |Selects elements that have no children.
empty :: Selector
empty = pseudoClass' "empty"
-- |Selects enabled elements.
enabled :: Selector
enabled = pseudoClass' "enabled"
-- |Selects elements that are the first child of its parent.
firstChild :: Selector
firstChild = pseudoClass' "first-child"
-- |Selects elements that are the first element
-- of its type that is a child of its parent.
firstOfType :: Selector
firstOfType = pseudoClass' "first-of-type"
-- |Selects elements that have focus.
focus :: Selector
focus = pseudoClass' "focus"
-- |Selects elements over which the mouse hovers.
hover :: Selector
hover = pseudoClass' "hover"
-- |Selects elements with a value within a specified range.
inRange :: Selector
inRange = pseudoClass' "in-range"
-- |Selects elements with an invalid value.
invalid :: Selector
invalid = pseudoClass' "invalid"
-- |Selects elements with a lang attribute starting with
-- the given language identifier.
lang :: Text -- ^ language identifier, eg en
-> Selector
lang = pseudoClass "lang" . Just . Raw
-- |Selects elements that are the last child of their parent.
lastChild :: Selector
lastChild = pseudoClass' "last-child"
-- |Selects elements that are the last of their type in their
-- parent's children.
lastOfType :: Selector
lastOfType = pseudoClass' "last-of-type"
-- |Selects unvisited links
link :: Selector
link = pseudoClass' "link"
-- |Selects every element that doesn't match the given selector.
not :: Selector -- ^ selector to not match
-> Selector
not = pseudoClass "not" . Just
-- |Selects elements that are the nth child of their parent.
nthChild :: Integer -- ^ n
-> Selector
nthChild = pseudoClass "nth-child" . Just . Raw . fromString . show
-- |Selects elements that are the nth child of their parent,
-- counting from the last child.
nthLastChild :: Integer -- ^ n
-> Selector
nthLastChild = pseudoClass "nth-last-child" . Just . Raw . fromString . show
-- |Selects elements that are the nth element of their parent, counting from the last child.
nthLastOfType :: Integer -- ^ n
-> Selector
nthLastOfType = pseudoClass "nth-last-of-type" . Just . Raw . fromString . show
-- |Selects elements that are the second of their parent.
nthOfType :: Integer -- ^ n
-> Selector
nthOfType = pseudoClass "nth-of-type" . Just . Raw . fromString . show
-- |Selects elements that are the only element of their type
-- in the children of their parents.
onlyOfType :: Selector
onlyOfType = pseudoClass' "only-of-type"
-- |Selects elements that are the only child of their parent.
onlyChild :: Selector
onlyChild = pseudoClass' "only-child"
-- |Selects elements with no @required@ attribute specified.
optional :: Selector
optional = pseudoClass' "optional"
-- |Selects elements with a value outside a specified range.
outOfRange :: Selector
outOfRange = pseudoClass' "out-of-range"
-- |Selects elements with a @readonly@ attribute specified.
readOnly :: Selector
readOnly = pseudoClass' "read-only"
-- |Selects elements with no @readonly@ attribute specified.
readWrite :: Selector
readWrite = pseudoClass' "read-write"
-- |Selects elements with a @required@ attribute specified.
required :: Selector
required = pseudoClass' "required"
-- |Selects the document's root element.
root :: Selector
root = pseudoClass' "root"
-- |Selects the current active element.
target :: Selector
target = pseudoClass' "target"
-- |Selects all elements with a valid value.
valid :: Selector
valid = pseudoClass' "valid"
-- |Selects all visited links.
visited :: Selector
visited = pseudoClass' "visited"
{- Pseudotype combinators -}
-- |Insert content after elements.
after :: Selector
after = pseudoType' "after"
-- |Insert content before elements.
before :: Selector
before = pseudoType' "before"
-- |Selects the first letter of an element.
firstLetter :: Selector
firstLetter = pseudoType' "first-letter"
-- |Selects the first line of an element.
firstLine :: Selector
firstLine = pseudoType' "first-line"
-- |Selects the portion of an element that's selected by a user.
selection :: Selector
selection = pseudoType' "selection" | fhsjaagshs/niagra | src/Data/Niagra/Selector/Combinators.hs | mit | 5,078 | 0 | 9 | 900 | 765 | 450 | 315 | 120 | 1 |
{-# LANGUAGE EmptyCase #-}
{-# LANGUAGE FlexibleInstances #-}
module Parametricity where
f1 :: a -> a
f1 x = x
f2 :: a -> b
f2 = const undefined
f2' :: a -> b
f2' = (\x -> case x of {})
f3 :: a -> b -> a
f3 x _ = x
f4 :: [a] -> [a]
f4 x = x
f5 :: (b -> c) -> (a -> b) -> (a -> c)
f5 f g = (\x -> f $ g x)
f6 :: (a -> a) -> a -> a
f6 f x = f x
data Foo = F Int | G Char deriving (Show)
instance Eq Foo where
(F i1) == (F i2) = i1 == i2
(G c1) == (G c2) = c1 == c2
_ == _ = False
foo1 /= foo2 = not (foo1 == foo2)
data Foo' = F' Int | G' Char
deriving (Eq, Ord, Show)
-- type class
class Listable a where
toList :: a -> [Int]
instance Listable Int where
toList x = [x]
instance Listable Bool where
toList True = [1]
toList False = [0]
instance Listable [Int] where
toList = id
data Tree a = Empty
| Node a (Tree a) (Tree a)
instance Listable (Tree Int) where
toList Empty = []
toList (Node val left right) = toList left ++ [val] ++ toList right
sumL :: (Listable a) => a -> Int
sumL x = sum (toList x)
foo :: (Listable a, Ord a) => a -> a -> Bool
foo x y = sum (toList x) == sum (toList y) || x < y
instance (Listable a, Listable b) => Listable (a, b) where
toList (x, y) = toList x ++ toList y
| harrisi/on-being-better | list-expansion/Haskell/Learning/Parametricity.hs | cc0-1.0 | 1,259 | 0 | 10 | 357 | 681 | 359 | 322 | 45 | 1 |
{-# LANGUAGE MultiParamTypeClasses, TypeSynonymInstances, FlexibleInstances #-}
{- |
Module : $Header$
Description : Inclusion of CASL_DL into CASL
Copyright : (c) Uni Bremen 2007
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable (via Logic.Logic)
-}
module Comorphisms.CASL_DL2CASL
(
CASL_DL2CASL(..)
)
where
import Logic.Logic
import Logic.Comorphism
import Common.AS_Annotation
import Common.ProofTree
import Common.Result
import qualified Common.Lib.Rel as Rel
--CASL_DL = domain
import CASL_DL.PredefinedCASLAxioms
import CASL_DL.Logic_CASL_DL
import CASL_DL.AS_CASL_DL
import CASL_DL.Sign()
import CASL_DL.StatAna -- DLSign
import CASL_DL.Sublogics
--CASL = codomain
import CASL.Logic_CASL
import CASL.AS_Basic_CASL
import CASL.Sign
import CASL.Morphism
import CASL.Sublogic as Sublogic
import qualified Data.Set as Set
data CASL_DL2CASL = CASL_DL2CASL deriving Show
instance Language CASL_DL2CASL
instance Comorphism
CASL_DL2CASL -- comorphism
CASL_DL -- lid domain
CASL_DL_SL -- sublogics domain
DL_BASIC_SPEC -- Basic spec domain
DLFORMULA -- sentence domain
SYMB_ITEMS -- symbol items domain
SYMB_MAP_ITEMS -- symbol map items domain
DLSign -- signature domain
DLMor -- morphism domain
Symbol -- symbol domain
RawSymbol -- rawsymbol domain
ProofTree -- proof tree codomain
CASL -- lid codomain
CASL_Sublogics -- sublogics codomain
CASLBasicSpec -- Basic spec codomain
CASLFORMULA -- sentence codomain
SYMB_ITEMS -- symbol items codomain
SYMB_MAP_ITEMS -- symbol map items codomain
CASLSign -- signature codomain
CASLMor -- morphism codomain
Symbol -- symbol codomain
RawSymbol -- rawsymbol codomain
ProofTree -- proof tree domain
where
sourceLogic CASL_DL2CASL = CASL_DL
targetLogic CASL_DL2CASL = CASL
sourceSublogic CASL_DL2CASL = SROIQ
mapSublogic CASL_DL2CASL _ = Just $ Sublogic.caslTop
{ sub_features = LocFilSub
, cons_features = emptyMapConsFeature }
map_symbol CASL_DL2CASL _ = Set.singleton
map_sentence CASL_DL2CASL = trSentence
map_morphism CASL_DL2CASL = mapMor
map_theory CASL_DL2CASL = trTheory
isInclusionComorphism CASL_DL2CASL = True
has_model_expansion CASL_DL2CASL = True
-- ^ mapping of morphims, we just forget the
-- ^ additional features
mapMor :: DLMor -> Result CASLMor
mapMor inMor =
let
ms = trSign $ msource inMor
mt = trSign $ mtarget inMor
sm = sort_map inMor
fm = op_map inMor
pm = pred_map inMor
in return (embedMorphism () ms mt)
{ sort_map = sm
, op_map = fm
, pred_map = pm }
-- ^ we forget additional information in the signature
projectToCASL :: DLSign -> CASLSign
projectToCASL dls = dls
{
sentences = []
, extendedInfo = ()
}
-- ^ Thing is established as the TopSort of all sorts
-- ^ defined in the CASL_DL spec, a predefined signature
-- ^ is added
trSign :: DLSign -> CASLSign
trSign inSig =
let
inC = projectToCASL inSig `uniteCASLSign` predefSign
inSorts = sortSet inSig
inData = sortSet predefSign
in
inC
{
sortRel = Rel.insertKey thing
$ Rel.insertKey dataS
$ Set.fold (`Rel.insertDiffPair` dataS)
(Set.fold (`Rel.insertDiffPair` thing)
(sortRel inC) inSorts)
$ Set.delete dataS inData
}
-- ^ translation of the signature
-- ^ predefined axioms are added
-- Translation of theories
trTheory :: (DLSign, [Named (FORMULA DL_FORMULA)]) ->
Result (CASLSign, [Named (FORMULA ())])
trTheory (inSig, inForms) = do
outForms <- mapR (trNamedSentence inSig) inForms
return (trSign inSig, predefinedAxioms ++ outForms)
-- ^ translation of named sentences
trNamedSentence :: DLSign -> Named (FORMULA DL_FORMULA) ->
Result (Named (FORMULA ()))
trNamedSentence inSig inForm = do
outSen <- trSentence inSig $ sentence inForm
return $ mapNamed (const outSen) inForm
-- ^ translation of sentences
trSentence :: DLSign -> FORMULA DL_FORMULA -> Result (FORMULA ())
trSentence inSig inF =
case inF of
Quantification qf vs frm rn ->
do
outF <- trSentence inSig frm
return (Quantification qf vs outF rn)
Conjunction fns rn ->
do
outF <- mapR (trSentence inSig) fns
return (Conjunction outF rn)
Disjunction fns rn ->
do
outF <- mapR (trSentence inSig) fns
return (Disjunction outF rn)
Implication f1 f2 b rn ->
do
out1 <- trSentence inSig f1
out2 <- trSentence inSig f2
return (Implication out1 out2 b rn)
Equivalence f1 f2 rn ->
do
out1 <- trSentence inSig f1
out2 <- trSentence inSig f2
return (Equivalence out1 out2 rn)
Negation frm rn ->
do
outF <- trSentence inSig frm
return (Negation outF rn)
True_atom rn -> return (True_atom rn)
False_atom rn -> return (False_atom rn)
Predication pr trm rn ->
do
ot <- mapR (trTerm inSig) trm
return (Predication pr ot rn)
Definedness tm rn ->
do
ot <- trTerm inSig tm
return (Definedness ot rn)
Existl_equation t1 t2 rn ->
do
ot1 <- trTerm inSig t1
ot2 <- trTerm inSig t2
return (Existl_equation ot1 ot2 rn)
Strong_equation t1 t2 rn ->
do
ot1 <- trTerm inSig t1
ot2 <- trTerm inSig t2
return (Strong_equation ot1 ot2 rn)
Membership t1 st rn ->
do
ot <- trTerm inSig t1
return (Membership ot st rn)
Mixfix_formula trm ->
do
ot <- trTerm inSig trm
return (Mixfix_formula ot)
Unparsed_formula str rn ->
return (Unparsed_formula str rn)
Sort_gen_ax cstr ft ->
return (Sort_gen_ax cstr ft)
QuantOp _ _ _ -> fail "CASL_DL2CASL.QuantOp"
QuantPred _ _ _ -> fail "CASL_DL2CASL.QuantPred"
ExtFORMULA form ->
case form of
Cardinality _ _ _ _ _ _ ->
fail "Mapping of cardinality not implemented"
-- ^ translation of terms
trTerm :: DLSign -> TERM DL_FORMULA -> Result (TERM ())
trTerm inSig inF =
case inF of
Qual_var v s rn -> return (Qual_var v s rn)
Application os tms rn ->
do
ot <- mapR (trTerm inSig) tms
return (Application os ot rn)
Sorted_term trm st rn ->
do
ot <- trTerm inSig trm
return (Sorted_term ot st rn)
Cast trm st rn ->
do
ot <- trTerm inSig trm
return (Cast ot st rn)
Conditional t1 frm t2 rn ->
do
ot1 <- trTerm inSig t1
ot2 <- trTerm inSig t2
of1 <- trSentence inSig frm
return (Conditional ot1 of1 ot2 rn)
Unparsed_term str rn -> return (Unparsed_term str rn)
Mixfix_qual_pred ps -> return (Mixfix_qual_pred ps)
Mixfix_term trm ->
do
ot <- mapR (trTerm inSig) trm
return (Mixfix_term ot)
Mixfix_token tok -> return (Mixfix_token tok)
Mixfix_sorted_term st rn -> return (Mixfix_sorted_term st rn)
Mixfix_cast st rn -> return (Mixfix_cast st rn)
Mixfix_parenthesized trm rn ->
do
ot <- mapR (trTerm inSig) trm
return (Mixfix_parenthesized ot rn)
Mixfix_bracketed trm rn ->
do
ot <- mapR (trTerm inSig) trm
return (Mixfix_bracketed ot rn)
Mixfix_braced trm rn ->
do
ot <- mapR (trTerm inSig) trm
return (Mixfix_braced ot rn)
ExtTERM _ -> return $ ExtTERM ()
| nevrenato/Hets_Fork | Comorphisms/CASL_DL2CASL.hs | gpl-2.0 | 8,448 | 0 | 15 | 2,929 | 2,075 | 1,011 | 1,064 | 210 | 19 |
{-# LANGUAGE GeneralizedNewtypeDeriving, CPP #-}
{-
Copyright (C) 2010-2014 John MacFarlane <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111(-1)307 USA
-}
{- |
Module : Text.Pandoc.Pretty
Copyright : Copyright (C) 2010-2014 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <[email protected]>
Stability : alpha
Portability : portable
A prettyprinting library for the production of text documents,
including wrapped text, indentated blocks, and tables.
-}
module Text.Pandoc.Pretty (
Doc
, render
, cr
, blankline
, space
, text
, char
, prefixed
, flush
, nest
, hang
, beforeNonBlank
, nowrap
, offset
, height
, lblock
, cblock
, rblock
, (<>)
, (<+>)
, ($$)
, ($+$)
, isEmpty
, empty
, cat
, hcat
, hsep
, vcat
, vsep
, nestle
, chomp
, inside
, braces
, brackets
, parens
, quotes
, doubleQuotes
, charWidth
, realLength
)
where
import Data.Sequence (Seq, fromList, (<|), singleton, mapWithIndex, viewl, ViewL(..))
import Data.Foldable (toList)
import Data.List (intercalate)
import Data.Monoid
import Data.String
import Control.Monad.State
import Data.Char (isSpace)
data RenderState a = RenderState{
output :: [a] -- ^ In reverse order
, prefix :: String
, usePrefix :: Bool
, lineLength :: Maybe Int -- ^ 'Nothing' means no wrapping
, column :: Int
, newlines :: Int -- ^ Number of preceding newlines
}
type DocState a = State (RenderState a) ()
data D = Text Int String
| Block Int [String]
| Prefixed String Doc
| BeforeNonBlank Doc
| Flush Doc
| BreakingSpace
| CarriageReturn
| NewLine
| BlankLine
deriving (Show)
newtype Doc = Doc { unDoc :: Seq D }
deriving (Monoid, Show)
instance IsString Doc where
fromString = text
isBlank :: D -> Bool
isBlank BreakingSpace = True
isBlank CarriageReturn = True
isBlank NewLine = True
isBlank BlankLine = True
isBlank (Text _ (c:_)) = isSpace c
isBlank _ = False
-- | True if the document is empty.
isEmpty :: Doc -> Bool
isEmpty = null . toList . unDoc
-- | The empty document.
empty :: Doc
empty = mempty
#if MIN_VERSION_base(4,5,0)
-- (<>) is defined in Data.Monoid
#else
infixr 6 <>
-- | An infix synonym for 'mappend'.
-- @a <> b@ is the result of concatenating @a@ with @b@.
(<>) :: Monoid m => m -> m -> m
(<>) = mappend
{-# INLINE (<>) #-}
#endif
-- | Concatenate a list of 'Doc's.
cat :: [Doc] -> Doc
cat = mconcat
-- | Same as 'cat'.
hcat :: [Doc] -> Doc
hcat = mconcat
-- | Concatenate a list of 'Doc's, putting breakable spaces
-- between them.
infixr 6 <+>
(<+>) :: Doc -> Doc -> Doc
(<+>) x y = if isEmpty x
then y
else if isEmpty y
then x
else x <> space <> y
-- | Same as 'cat', but putting breakable spaces between the
-- 'Doc's.
hsep :: [Doc] -> Doc
hsep = foldr (<+>) empty
infixr 5 $$
-- | @a $$ b@ puts @a@ above @b@.
($$) :: Doc -> Doc -> Doc
($$) x y = if isEmpty x
then y
else if isEmpty y
then x
else x <> cr <> y
infixr 5 $+$
-- | @a $$ b@ puts @a@ above @b@, with a blank line between.
($+$) :: Doc -> Doc -> Doc
($+$) x y = if isEmpty x
then y
else if isEmpty y
then x
else x <> blankline <> y
-- | List version of '$$'.
vcat :: [Doc] -> Doc
vcat = foldr ($$) empty
-- | List version of '$+$'.
vsep :: [Doc] -> Doc
vsep = foldr ($+$) empty
-- | Removes leading blank lines from a 'Doc'.
nestle :: Doc -> Doc
nestle (Doc d) = Doc $ go d
where go x = case viewl x of
(BlankLine :< rest) -> go rest
(NewLine :< rest) -> go rest
_ -> x
-- | Chomps trailing blank space off of a 'Doc'.
chomp :: Doc -> Doc
chomp d = Doc (fromList dl')
where dl = toList (unDoc d)
dl' = reverse $ go $ reverse dl
go [] = []
go (BreakingSpace : xs) = go xs
go (CarriageReturn : xs) = go xs
go (NewLine : xs) = go xs
go (BlankLine : xs) = go xs
go (Prefixed s d' : xs) = Prefixed s (chomp d') : xs
go xs = xs
outp :: (IsString a, Monoid a)
=> Int -> String -> DocState a
outp off s | off < 0 = do -- offset < 0 means newline characters
st' <- get
let rawpref = prefix st'
when (column st' == 0 && usePrefix st' && not (null rawpref)) $ do
let pref = reverse $ dropWhile isSpace $ reverse rawpref
modify $ \st -> st{ output = fromString pref : output st
, column = column st + realLength pref }
modify $ \st -> st { output = fromString s : output st
, column = 0
, newlines = newlines st + 1 }
outp off s = do -- offset >= 0 (0 might be combining char)
st' <- get
let pref = prefix st'
when (column st' == 0 && usePrefix st' && not (null pref)) $ do
modify $ \st -> st{ output = fromString pref : output st
, column = column st + realLength pref }
modify $ \st -> st{ output = fromString s : output st
, column = column st + off
, newlines = 0 }
-- | Renders a 'Doc'. @render (Just n)@ will use
-- a line length of @n@ to reflow text on breakable spaces.
-- @render Nothing@ will not reflow text.
render :: (Monoid a, IsString a)
=> Maybe Int -> Doc -> a
render linelen doc = fromString . mconcat . reverse . output $
execState (renderDoc doc) startingState
where startingState = RenderState{
output = mempty
, prefix = ""
, usePrefix = True
, lineLength = linelen
, column = 0
, newlines = 2 }
renderDoc :: (IsString a, Monoid a)
=> Doc -> DocState a
renderDoc = renderList . toList . unDoc
renderList :: (IsString a, Monoid a)
=> [D] -> DocState a
renderList [] = return ()
renderList (Text off s : xs) = do
outp off s
renderList xs
renderList (Prefixed pref d : xs) = do
st <- get
let oldPref = prefix st
put st{ prefix = prefix st ++ pref }
renderDoc d
modify $ \s -> s{ prefix = oldPref }
renderList xs
renderList (Flush d : xs) = do
st <- get
let oldUsePrefix = usePrefix st
put st{ usePrefix = False }
renderDoc d
modify $ \s -> s{ usePrefix = oldUsePrefix }
renderList xs
renderList (BeforeNonBlank d : xs) =
case xs of
(x:_) | isBlank x -> renderList xs
| otherwise -> renderDoc d >> renderList xs
[] -> renderList xs
renderList (BlankLine : xs) = do
st <- get
case output st of
_ | newlines st > 1 || null xs -> return ()
_ | column st == 0 -> do
outp (-1) "\n"
_ -> do
outp (-1) "\n"
outp (-1) "\n"
renderList xs
renderList (CarriageReturn : xs) = do
st <- get
if newlines st > 0 || null xs
then renderList xs
else do
outp (-1) "\n"
renderList xs
renderList (NewLine : xs) = do
outp (-1) "\n"
renderList xs
renderList (BreakingSpace : CarriageReturn : xs) = renderList (CarriageReturn:xs)
renderList (BreakingSpace : NewLine : xs) = renderList (NewLine:xs)
renderList (BreakingSpace : BlankLine : xs) = renderList (BlankLine:xs)
renderList (BreakingSpace : BreakingSpace : xs) = renderList (BreakingSpace:xs)
renderList (BreakingSpace : xs) = do
let isText (Text _ _) = True
isText (Block _ _) = True
isText _ = False
let isBreakingSpace BreakingSpace = True
isBreakingSpace _ = False
let xs' = dropWhile isBreakingSpace xs
let next = takeWhile isText xs'
st <- get
let off = sum $ map offsetOf next
case lineLength st of
Just l | column st + 1 + off > l -> do
outp (-1) "\n"
renderList xs'
_ -> do
outp 1 " "
renderList xs'
renderList (b1@Block{} : b2@Block{} : xs) =
renderList (mergeBlocks False b1 b2 : xs)
renderList (b1@Block{} : BreakingSpace : b2@Block{} : xs) =
renderList (mergeBlocks True b1 b2 : xs)
renderList (Block width lns : xs) = do
st <- get
let oldPref = prefix st
case column st - realLength oldPref of
n | n > 0 -> modify $ \s -> s{ prefix = oldPref ++ replicate n ' ' }
_ -> return ()
renderDoc $ blockToDoc width lns
modify $ \s -> s{ prefix = oldPref }
renderList xs
mergeBlocks :: Bool -> D -> D -> D
mergeBlocks addSpace (Block w1 lns1) (Block w2 lns2) =
Block (w1 + w2 + if addSpace then 1 else 0) $
zipWith (\l1 l2 -> pad w1 l1 ++ l2) (lns1 ++ empties) (map sp lns2 ++ empties)
where empties = replicate (abs $ length lns1 - length lns2) ""
pad n s = s ++ replicate (n - realLength s) ' '
sp "" = ""
sp xs = if addSpace then (' ' : xs) else xs
mergeBlocks _ _ _ = error "mergeBlocks tried on non-Block!"
blockToDoc :: Int -> [String] -> Doc
blockToDoc _ lns = text $ intercalate "\n" lns
offsetOf :: D -> Int
offsetOf (Text o _) = o
offsetOf (Block w _) = w
offsetOf BreakingSpace = 1
offsetOf _ = 0
-- | A literal string.
text :: String -> Doc
text = Doc . toChunks
where toChunks :: String -> Seq D
toChunks [] = mempty
toChunks s = case break (=='\n') s of
([], _:ys) -> NewLine <| toChunks ys
(xs, _:ys) -> Text (realLength xs) xs <|
(NewLine <| toChunks ys)
(xs, []) -> singleton $ Text (realLength xs) xs
-- | A character.
char :: Char -> Doc
char c = text [c]
-- | A breaking (reflowable) space.
space :: Doc
space = Doc $ singleton BreakingSpace
-- | A carriage return. Does nothing if we're at the beginning of
-- a line; otherwise inserts a newline.
cr :: Doc
cr = Doc $ singleton CarriageReturn
-- | Inserts a blank line unless one exists already.
-- (@blankline <> blankline@ has the same effect as @blankline@.
-- If you want multiple blank lines, use @text "\\n\\n"@.
blankline :: Doc
blankline = Doc $ singleton BlankLine
-- | Uses the specified string as a prefix for every line of
-- the inside document (except the first, if not at the beginning
-- of the line).
prefixed :: String -> Doc -> Doc
prefixed pref doc = Doc $ singleton $ Prefixed pref doc
-- | Makes a 'Doc' flush against the left margin.
flush :: Doc -> Doc
flush doc = Doc $ singleton $ Flush doc
-- | Indents a 'Doc' by the specified number of spaces.
nest :: Int -> Doc -> Doc
nest ind = prefixed (replicate ind ' ')
-- | A hanging indent. @hang ind start doc@ prints @start@,
-- then @doc@, leaving an indent of @ind@ spaces on every
-- line but the first.
hang :: Int -> Doc -> Doc -> Doc
hang ind start doc = start <> nest ind doc
-- | @beforeNonBlank d@ conditionally includes @d@ unless it is
-- followed by blank space.
beforeNonBlank :: Doc -> Doc
beforeNonBlank d = Doc $ singleton (BeforeNonBlank d)
-- | Makes a 'Doc' non-reflowable.
nowrap :: Doc -> Doc
nowrap doc = Doc $ mapWithIndex replaceSpace $ unDoc doc
where replaceSpace _ BreakingSpace = Text 1 " "
replaceSpace _ x = x
-- | Returns the width of a 'Doc'.
offset :: Doc -> Int
offset d = case map realLength . lines . render Nothing $ d of
[] -> 0
os -> maximum os
block :: (String -> String) -> Int -> Doc -> Doc
block filler width = Doc . singleton . Block width .
map filler . chop width . render (Just width)
-- | @lblock n d@ is a block of width @n@ characters, with
-- text derived from @d@ and aligned to the left.
lblock :: Int -> Doc -> Doc
lblock = block id
-- | Like 'lblock' but aligned to the right.
rblock :: Int -> Doc -> Doc
rblock w = block (\s -> replicate (w - realLength s) ' ' ++ s) w
-- | Like 'lblock' but aligned centered.
cblock :: Int -> Doc -> Doc
cblock w = block (\s -> replicate ((w - realLength s) `div` 2) ' ' ++ s) w
-- | Returns the height of a block or other 'Doc'.
height :: Doc -> Int
height = length . lines . render Nothing
chop :: Int -> String -> [String]
chop _ [] = []
chop n cs = case break (=='\n') cs of
(xs, ys) -> if len <= n
then case ys of
[] -> [xs]
(_:[]) -> [xs, ""]
(_:zs) -> xs : chop n zs
else take n xs : chop n (drop n xs ++ ys)
where len = realLength xs
-- | Encloses a 'Doc' inside a start and end 'Doc'.
inside :: Doc -> Doc -> Doc -> Doc
inside start end contents =
start <> contents <> end
-- | Puts a 'Doc' in curly braces.
braces :: Doc -> Doc
braces = inside (char '{') (char '}')
-- | Puts a 'Doc' in square brackets.
brackets :: Doc -> Doc
brackets = inside (char '[') (char ']')
-- | Puts a 'Doc' in parentheses.
parens :: Doc -> Doc
parens = inside (char '(') (char ')')
-- | Wraps a 'Doc' in single quotes.
quotes :: Doc -> Doc
quotes = inside (char '\'') (char '\'')
-- | Wraps a 'Doc' in double quotes.
doubleQuotes :: Doc -> Doc
doubleQuotes = inside (char '"') (char '"')
-- | Returns width of a character in a monospace font: 0 for a combining
-- character, 1 for a regular character, 2 for an East Asian wide character.
charWidth :: Char -> Int
charWidth c =
case c of
_ | c < '\x0300' -> 1
| c >= '\x0300' && c <= '\x036F' -> 0 -- combining
| c >= '\x0370' && c <= '\x10FC' -> 1
| c >= '\x1100' && c <= '\x115F' -> 2
| c >= '\x1160' && c <= '\x11A2' -> 1
| c >= '\x11A3' && c <= '\x11A7' -> 2
| c >= '\x11A8' && c <= '\x11F9' -> 1
| c >= '\x11FA' && c <= '\x11FF' -> 2
| c >= '\x1200' && c <= '\x2328' -> 1
| c >= '\x2329' && c <= '\x232A' -> 2
| c >= '\x232B' && c <= '\x2E31' -> 1
| c >= '\x2E80' && c <= '\x303E' -> 2
| c == '\x303F' -> 1
| c >= '\x3041' && c <= '\x3247' -> 2
| c >= '\x3248' && c <= '\x324F' -> 1 -- ambiguous
| c >= '\x3250' && c <= '\x4DBF' -> 2
| c >= '\x4DC0' && c <= '\x4DFF' -> 1
| c >= '\x4E00' && c <= '\xA4C6' -> 2
| c >= '\xA4D0' && c <= '\xA95F' -> 1
| c >= '\xA960' && c <= '\xA97C' -> 2
| c >= '\xA980' && c <= '\xABF9' -> 1
| c >= '\xAC00' && c <= '\xD7FB' -> 2
| c >= '\xD800' && c <= '\xDFFF' -> 1
| c >= '\xE000' && c <= '\xF8FF' -> 1 -- ambiguous
| c >= '\xF900' && c <= '\xFAFF' -> 2
| c >= '\xFB00' && c <= '\xFDFD' -> 1
| c >= '\xFE00' && c <= '\xFE0F' -> 1 -- ambiguous
| c >= '\xFE10' && c <= '\xFE19' -> 2
| c >= '\xFE20' && c <= '\xFE26' -> 1
| c >= '\xFE30' && c <= '\xFE6B' -> 2
| c >= '\xFE70' && c <= '\xFEFF' -> 1
| c >= '\xFF01' && c <= '\xFF60' -> 2
| c >= '\xFF61' && c <= '\x16A38' -> 1
| c >= '\x1B000' && c <= '\x1B001' -> 2
| c >= '\x1D000' && c <= '\x1F1FF' -> 1
| c >= '\x1F200' && c <= '\x1F251' -> 2
| c >= '\x1F300' && c <= '\x1F773' -> 1
| c >= '\x20000' && c <= '\x3FFFD' -> 2
| otherwise -> 1
-- | Get real length of string, taking into account combining and double-wide
-- characters.
realLength :: String -> Int
realLength = sum . map charWidth
| nickbart1980/pandoc | src/Text/Pandoc/Pretty.hs | gpl-2.0 | 16,517 | 0 | 16 | 5,407 | 5,064 | 2,606 | 2,458 | 376 | 10 |
module Wfg (wfg,nds)
where
import qualified Data.List.Stream as S
import Utils
boundPoint :: Point -> Point -> Point
boundPoint [] [] = []
boundPoint (b:bound) (p:point) = min b p : boundPoint bound point
data Dominated = XbeatsY | YbeatsX | NeitherDominates deriving (Enum, Eq)
dom :: Dominated -> Point -> Point -> Dominated
dom NeitherDominates [] [] = XbeatsY
dom domsofar [] [] = domsofar
dom NeitherDominates (x:xx) (y:yy) | x > y = dom XbeatsY xx yy
| y > x = dom YbeatsX xx yy
| otherwise = dom NeitherDominates xx yy
dom XbeatsY (x:xx) (y:yy) | x >= y = dom XbeatsY xx yy
| y > x = NeitherDominates
dom YbeatsX (x:xx) (y:yy) | y >= x = dom YbeatsX xx yy
| x > y = NeitherDominates
filterDominated :: Point -> Points -> (Maybe Point, Points)
filterDominated dp [] = (Just dp, [])
filterDominated dp (p:ps) | domResult == XbeatsY = filterDominated dp ps
| domResult == YbeatsX = (Nothing, (p:ps))
| otherwise = (\(x, xx) -> (x, (p : xx))) (filterDominated dp ps)
where domResult = dom NeitherDominates dp p
nds :: Points -> Points
nds [] = []
nds (p : ps) = dropP (filterDominated p ps)
where dropP (Nothing, xs) = nds xs
dropP (Just x, xs) = x : nds xs
wfg :: Objective -> Point -> Points -> Volume
wfg ref bound ps = wfg' ref bound (S.sort ps)
wfg' :: Objective -> Point -> Points -> Volume
wfg' _ref _bound [] = 0
wfg' ref bound (p:ps) = exclhv ref bound p ps + wfg' ref bound ps
exclhv :: Objective -> Point -> Point -> Points -> Volume
exclhv ref _bound p [] = inclhv ref p
exclhv ref bound p ps = inclhv ref p - wfg' ref newbound (S.sort nonDomFront)
where newbound = boundPoint bound p
boundedFront = (S.map (boundPoint newbound) ps)
nonDomFront = nds boundedFront
inclhv :: Objective -> Point -> Volume
inclhv ref p = S.product [x - ref | x <- p]
| lbradstreet/wfg-hs | Wfg.hs | gpl-2.0 | 1,942 | 2 | 12 | 520 | 889 | 452 | 437 | 41 | 2 |
{- |
Module : $Header$
Copyright : (c) Christian Maeder, DFKI GmbH 2010
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable (ATP GUI)
call metis prover
-}
module SoftFOL.ProveMetis
( metisProver
, metisProveCMDLautomaticBatch
) where
import Common.AS_Annotation as AS_Anno
import Common.ProofTree
import Common.Timing
import Common.Utils
import Common.SZSOntology
import qualified Common.Result as Result
import qualified Control.Concurrent as Concurrent
import Control.Monad
import Data.List
import Data.Maybe
import GUI.GenericATP
import Interfaces.GenericATPState
import Logic.Prover
import Proofs.BatchProcessing
import SoftFOL.ProverState
import SoftFOL.Sign
import SoftFOL.Translate
import System.Directory
import System.Exit
-- | The Prover implementation.
metisProver :: Prover Sign Sentence SoftFOLMorphism () ProofTree
metisProver = mkAutomaticProver "metis" () metisGUI
metisProveCMDLautomaticBatch
{- |
Record for prover specific functions. This is used by both GUI and command
line interface.
-}
atpFun :: String -- ^ theory name
-> ATPFunctions Sign Sentence SoftFOLMorphism ProofTree SoftFOLProverState
atpFun thName = ATPFunctions
{ initialProverState = spassProverState
, atpTransSenName = transSenName
, atpInsertSentence = insertSentenceGen
, goalOutput = showTPTPProblem thName
, proverHelpText = ""
, batchTimeEnv = ""
, fileExtensions = FileExtensions
{ problemOutput = ".tptp"
, proverOutput = ".spass"
, theoryConfiguration = "" }
, runProver = runMetis
, createProverOptions = extraOpts }
{- |
Invokes the generic prover GUI.
-}
metisGUI :: String -- ^ theory name
-> Theory Sign Sentence ProofTree
{- ^ theory consisting of a SoftFOL.Sign.Sign
and a list of Named SoftFOL.Sign.Sentence -}
-> [FreeDefMorphism SPTerm SoftFOLMorphism] -- ^ freeness constraints
-> IO [ProofStatus ProofTree] -- ^ proof status for each goal
metisGUI thName th freedefs =
genericATPgui (atpFun thName) True (proverName metisProver) thName th
freedefs emptyProofTree
-- ** command line function
{- |
Implementation of 'Logic.Prover.proveCMDLautomaticBatch' which provides an
automatic command line interface to the Metis prover.
-}
metisProveCMDLautomaticBatch ::
Bool -- ^ True means include proved theorems
-> Bool -- ^ True means save problem file
-> Concurrent.MVar (Result.Result [ProofStatus ProofTree])
-- ^ used to store the result of the batch run
-> String -- ^ theory name
-> TacticScript -- ^ default tactic script
-> Theory Sign Sentence ProofTree {- ^ theory consisting of a
'SoftFOL.Sign.Sign' and a list of Named 'SoftFOL.Sign.Sentence' -}
-> [FreeDefMorphism SPTerm SoftFOLMorphism] -- ^ freeness constraints
-> IO (Concurrent.ThreadId, Concurrent.MVar ())
{- ^ fst: identifier of the batch thread for killing it
snd: MVar to wait for the end of the thread -}
metisProveCMDLautomaticBatch inclProvedThs saveProblem_batch resultMVar
thName defTS th freedefs =
genericCMDLautomaticBatch (atpFun thName) inclProvedThs saveProblem_batch
resultMVar (proverName metisProver) thName
(parseTacticScript batchTimeLimit [] defTS) th freedefs emptyProofTree
runMetis :: SoftFOLProverState
{- ^ logical part containing the input Sign and axioms and possibly
goals that have been proved earlier as additional axioms -}
-> GenericConfig ProofTree -- ^ configuration to use
-> Bool -- ^ True means save TPTP file
-> String -- ^ name of the theory in the DevGraph
-> AS_Anno.Named SPTerm -- ^ goal to prove
-> IO (ATPRetval, GenericConfig ProofTree)
-- ^ (retval, configuration with proof status and complete output)
runMetis sps cfg saveTPTP thName nGoal = do
let saveFile = basename thName ++ '_' : AS_Anno.senAttr nGoal ++ ".tptp"
prob <- showTPTPProblem thName sps nGoal []
when saveTPTP (writeFile saveFile prob)
timeTmpFile <- getTempFile prob saveFile
start <- getHetsTime
-- try timeout using perl
(ex, out, err) <- executeProcess "perl"
["-e", "alarm shift @ARGV; exec @ARGV"
, show (configTimeLimit cfg), "metis", timeTmpFile] ""
finish <- getHetsTime
let executetime = diffHetsTime finish start
newCfg = cfg
{ timeUsed = executetime
, proofStatus = (proofStatus cfg) {usedTime = executetime}}
finCfg = newCfg { resultOutput = lines $ out ++ err }
removeFile timeTmpFile
return $ case ex of
ExitSuccess ->
( ATPSuccess
, finCfg
{ proofStatus = (proofStatus finCfg)
{ usedAxioms = getAxioms sps
, goalStatus = getGoalStatus out }})
ExitFailure e -> if e == 14 then
( ATPTLimitExceeded
, newCfg
{ timeLimitExceeded = True
, resultOutput = ["TimeOut"] })
else (ATPError err, finCfg)
{-
mapping from SZS Status to Goalstatus
-}
getGoalStatus :: String -> GoalStatus
getGoalStatus l = let ll = lines l in
case mapMaybe (stripPrefix "SZS status") ll of
[] -> Open (Reason ll)
z@(s : _) -> case words s of
w : _
| szsProved w -> Proved True
| szsDisproved w -> Disproved
_ -> Open (Reason z)
| nevrenato/HetsAlloy | SoftFOL/ProveMetis.hs | gpl-2.0 | 5,530 | 0 | 16 | 1,354 | 1,031 | 556 | 475 | 105 | 3 |
{-# LANGUAGE OverloadedStrings #-}
import System.Random
import Network
import Control.Monad
import qualified Data.ByteString.Char8 as B
import Control.Concurrent
import Foreign.Marshal.Alloc
import GHC.IO.Handle
import System.Environment
main = do
[host, port] <- getArgs
sh <- connectTo host $ PortNumber $ fromIntegral $ (read port :: Integer)
forM_ ([0..98]::[Int]) $ \_ -> forkIO (attack host (fromIntegral (read port)))
attack1 sh 0
attack host port = do
hnd <- connectTo host $ PortNumber port
start <- randomRIO (0,2^32)
attack1 hnd start
attack1 sock start = do
forkIO $ do
let sz = 1024*1024
buf <- mallocBytes sz
forever $ hGetBuf sock buf sz >> return ()
let loop n = do
hPutStr sock (msg n)
loop (n+1)
in loop start
msg :: Integer -> [Char]
msg n = "POST /register/robgssp" ++ (show n) ++ " HTTP/1.1\r\n\r\n"
| robgssp/uvb | names.hs | gpl-3.0 | 876 | 0 | 15 | 186 | 354 | 176 | 178 | 29 | 1 |
{-- Format, and construction of secpal proofs -}
module Logic.SecPAL.Proof where
import Logic.SecPAL.Language hiding (constraint, delegation)
import Logic.General.Constraints (C)
import Logic.SecPAL.Pretty
import Logic.General.Pretty()
import Logic.SecPAL.Context
import Data.List
import qualified Logic.SecPAL.Substitutions as S
import Debug.Trace
{- A proof is the application of a rule and the proof of its conditions -}
data Proof a = PStated { conclusion :: (Context, a) }
| PCond { conclusion :: (Context, a)
, ifs :: [[Proof Assertion]]
, constraint :: [Proof C]
, flatness :: Bool
}
| PCanSay { conclusion :: (Context, a)
, delegation :: [Proof Assertion]
, action :: [Proof Assertion]
}
| PCanActAs { conclusion :: (Context, a)
, renaming :: [Proof Assertion]
, renamed :: [Proof Assertion]
}
deriving (Show)
interferes :: Proof a -> Proof b -> Bool
a `interferes` b =
let ta = relevantVariables $ a
tb = relevantVariables $ b
result = ta `S.interferent` tb
in
if result
--then trace ( "@@@ "++pShow ta++" interferes with "++pShow tb ) True
then True
else False
interferent :: [Proof a] -> [Proof b] -> Bool
interferent xs ys = not.null$ [ (x,y)
| x <- xs
, y <- ys
, x `interferes` y
]
relevantVariables :: Proof b -> [S.Substitution]
relevantVariables PCanSay{ conclusion=c, action=as } =
let vAs = concatMap relevantVariables as
vC = theta . fst $ c
in [ x | x <- vAs
, y <- vC
, S.var x == S.var y
]
relevantVariables x = theta . fst . conclusion $ x
instance (PShow a) => PShow (Proof a) where
pShow prf =
let ac' = ac . fst . conclusion $ prf
--in "AC := " ++ pShow ac' ++ "\n" ++ pShow' 0 prf
in pShow' 0 prf
showCtx :: (PShow a, PShow b) => (a, b) -> String
showCtx (ctx, a) = pShow ctx ++" |= "++pShow a
pShow' :: (PShow a) => Int -> Proof a -> String
pShow' n (PStated stm) =
let statement = showCtx stm
proven = replicate (length statement) '-'
in
intercalate "\n" $ map (replicate (n*2) ' ' ++) [statement, proven]
pShow' n PCond{conclusion=cc, ifs=[], constraint=c} =
intercalate "\n" [ replicate (n*2) ' ' ++ showCtx cc
, pShow' (n+1) (head c)
]
pShow' n PCond{conclusion=cc, ifs=is, constraint=c} =
intercalate "\n" [ replicate (n*2) ' ' ++ showCtx cc
, intercalate "\n" $ map (pShow' (n+1)) (head is)
, pShow' (n+1) (head c)
]
pShow' n PCanSay{conclusion=cc, delegation=de, action=a} =
intercalate "\n" [ replicate (n*2) ' ' ++ showCtx cc
, pShow' (n+1) $ head de
, pShow' (n+1) $ head a
]
pShow' n PCanActAs{conclusion=cc, renaming=r, renamed=q} =
intercalate "\n" [ replicate (n*2) ' ' ++ showCtx cc
, pShow' (n+1) $ head r
, pShow' (n+1) $ head q
]
makeCond :: (Context, Assertion)
-> [[Proof Assertion]]
-> [Proof C]
-> Bool
-> [Proof Assertion]
makeCond cc@(_, Assertion{ says=Claim{ conditions=conds }}) is cs flat
| not (null conds) && null is = []
| null cs = []
| not flat = []
| otherwise = [PCond cc is cs flat]
makeCanSay :: (Context, Assertion)
-> [ Proof Assertion ]
-> [ Proof Assertion ]
-> [ Proof Assertion ]
makeCanSay cc de a
| null de = []
| null a = []
| otherwise = [PCanSay cc de a]
makeCanActAs :: (Context, Assertion)
-> [ Proof Assertion ]
-> [ Proof Assertion ]
-> [ Proof Assertion ]
makeCanActAs cc delta q
| null delta = []
| null q = []
| otherwise = [PCanActAs cc delta q]
| bogwonch/SecPAL | src/Logic/SecPAL/Proof.hs | gpl-3.0 | 4,168 | 4 | 13 | 1,528 | 1,489 | 792 | 697 | 94 | 2 |
module Main where
import Test.Framework (defaultMain)
import Test.Framework.Providers.API
import System.FilePath
import System.Directory
import System.Process
import System.Exit
import Control.Exception
import Data.Typeable
import Prelude hiding (catch)
data CheckResult = CheckOk
-- ^ The property is true as far as we could check it
| CheckFailed String
-- ^ The property was not true. The string is the reason.
| CheckNoExpectedFailure
-- ^ We expected that a property would fail but it didn't
| CheckTimedOut
-- ^ The property timed out during execution
| CheckException String
-- ^ The property raised an exception during execution
deriving Typeable
instance Show CheckResult where
show CheckOk = "Ok, passed."
show (CheckFailed err) = "Failed: " ++ err
show CheckNoExpectedFailure = "No expected failure."
show CheckTimedOut = "Timed out."
show (CheckException err) = "Exception: " ++ err
instance Exception CheckResult
data CheckRunning = CheckRunning
deriving Show
instance TestResultlike CheckRunning CheckResult where
testSucceeded CheckOk = True
testSucceeded _ = False
data Expectation = ExpectSuccess | ExpectFailure
data Check = Check { chk_test :: String
, chk_deps :: [String]
, chk_expect :: Expectation }
instance Testlike CheckRunning CheckResult Check where
runTest topts Check{..} = runImprovingIO $ do
yieldImprovement CheckRunning
result <- maybeTimeoutImprovingIO (unK $ topt_timeout topts) $
liftIO (dkrun chk_test chk_deps chk_expect)
return (result `orElse` CheckTimedOut)
testTypeName _ = "Type Checking Runs"
dkrun :: FilePath -> [FilePath] -> Expectation -> IO CheckResult
dkrun dk deps exp = do
cwd <- getCurrentDirectory
setCurrentDirectory $ cwd </> "t"
result <- catch (do
let compile = rawSystem "../dist/build/dedukti/dedukti" [dk <.> "dk"]
run = rawSystem "../scripts/dkrun" $ (dk <.> "dko") : map (<.> "dko") deps
test (return ()) fail compile
case exp of
ExpectSuccess -> test (return CheckOk) fail run
ExpectFailure -> test (return CheckNoExpectedFailure) (const $ return CheckOk) run)
(\e -> return (e :: CheckResult))
setCurrentDirectory cwd
return result
where test s f m = m >>= \code -> case code of
ExitSuccess -> s
ExitFailure x -> f x
fail x = throw $ CheckFailed $ "exit status " ++ show x
check :: TestName -> [String] -> Test
check name deps = Test name (Check name deps ExpectSuccess)
checkFailure :: TestName -> [String] -> Test
checkFailure name deps = Test name (Check name deps ExpectFailure)
-- * Actual tests.
main = defaultMain tests
tests = [ testGroup "Smoke tests"
[ check "nat" []
, check "coc" []
, check "logic" ["coc"]
, check "peano" ["logic", "coc"]
, checkFailure "bug" [] ] ]
| mboes/dedukti | Test.hs | gpl-3.0 | 3,104 | 0 | 18 | 860 | 790 | 411 | 379 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module TestUtils where
import Test.Microspec
import Prelude hiding ((<*), (*>))
import Data.List (sort)
import Sound.Tidal.Context
import qualified Data.Map.Strict as Map
-- | Compare the events of two patterns using the given arc
compareP :: (Ord a, Show a) => Arc -> Pattern a -> Pattern a -> Property
compareP a p p' = (sort $ query (stripContext p) $ State a Map.empty) `shouldBe` (sort $ query (stripContext p') $ State a Map.empty)
-- | Like @compareP@, but tries to 'defragment' the events
comparePD :: (Ord a) => Arc -> Pattern a -> Pattern a -> Bool
comparePD a p p' = compareDefrag es es'
where es = query (stripContext p) (State a Map.empty)
es' = query (stripContext p') (State a Map.empty)
-- | Like @compareP@, but for control patterns, with some tolerance for floating point error
compareTol :: Arc -> ControlPattern -> ControlPattern -> Bool
compareTol a p p' = (sort $ queryArc (stripContext p) a) ~== (sort $ queryArc (stripContext p') a)
-- | Utility to create a pattern from a String
ps :: String -> Pattern String
ps = parseBP_E
stripContext :: Pattern a -> Pattern a
stripContext = setContext $ Context []
| d0kt0r0/Tidal | test/TestUtils.hs | gpl-3.0 | 1,184 | 0 | 11 | 220 | 383 | 204 | 179 | 19 | 1 |
{-# LANGUAGE UnicodeSyntax, OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts, ConstraintKinds, TypeSynonymInstances, FlexibleInstances #-}
--
-- implements ToMarkup for diff tree so that you can call Blaze toHtml on a
-- diff tree.
--
module ExtRep.DiffTreeToHtml (RenderableTree) where
import BaseImport
import Data.List
import Data.Ord
import Data.Tree.Class
import Text.Blaze.Html5
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import Diff.Content
import Diff.DiffTree
-- convenience constraint synonym
type RenderableTree t ξ = (ContentTree t ξ, ToMarkup (Data ξ))
{-
produce an HTML ul structure to mirror input tree, e.g.
ul
li RootClass
ul
li field₁
li field₂
li ClassX
ul
li f₁
li ClassY
ul
li f₁
li text = rendered content node; rendering as follows:
<span class="unchanged inner"> (or new or deleted; inner|leaf to indicate node depth)
<span class="nodeName">…</span>
<span class="payload">…</span>
</span>
for Unchanged, New, or Deleted diff node. Changed diff node rendered as:
<span class="changed leaf"> (or inner)
<span class="nodeName">…</span>
<span class="payload">…</span>
<span class="newPayload">…</span>
</span>
-}
instance RenderableTree t ξ ⇒ ToMarkup (DiffTree t ξ) where
toMarkup t = renderForest [t] -- NB Blaze toHtml function is an alias for this:
-- type Html = Markup, toHtml = toMarkup
renderTree ∷ RenderableTree t ξ ⇒ DiffTree t ξ → Html
renderTree t = li (renderNode t >> renderForest (getChildren t))
renderForest ∷ RenderableTree t ξ ⇒ [DiffTree t ξ] → Html
renderForest [] = return ()
renderForest ts = ul $ mapM_ renderTree (sortBy cmp ts)
where
cmp = comparing (diffContentNodeId ∘ getNode)
renderNode ∷ RenderableTree t ξ ⇒ DiffTree t ξ → Html
renderNode t = itm ∘ getNode $ t
where
nic ∷ String → AttributeValue -- node indicator class
nic clazz = toValue $ (clazz++) $ if isLeaf t then " leaf" else " inner"
itm (New x) = wrapS (nic "new") x
itm (Deleted x) = wrapS (nic "deleted") x
itm (Unchanged x) = wrapS (nic "unchanged") x
itm (Changed old new) = mkSpan (nic "changed") $
(mkNameValueSpan old >> mkSpanH "newPayload" (payload new))
mkSpan clazz = H.span ! A.class_ clazz
mkSpanH clazz content = mkSpan clazz $ (toHtml content)
mkNameValueSpan x = mkSpanH "nodeName" (nodeName x) >> mkSpanH "payload" (payload x)
wrapS clazz x = mkSpan clazz $ mkNameValueSpan x
| c0c0n3/audidoza | app/ExtRep/DiffTreeToHtml.hs | gpl-3.0 | 2,778 | 0 | 12 | 728 | 581 | 303 | 278 | 34 | 5 |
module Problems01thru10 where
import Data.List (group)
-- Problem 1
-- Find the last element of a list
last' :: [a] -> a
last' [] = undefined
last' [x] = x
last' (x:xs) = last' xs
last'' :: [a] -> a
last'' xs
| null xs = undefined
| otherwise = xs !! (n-1)
where n = length xs
-- Problem 2
-- Find the last but one element of a list.
penultimate :: [a] -> a
penultimate xs
| n < 2 = undefined
| otherwise = xs !! (n-2)
where n = length xs
-- Problem 3
-- Find the K'th element of a list. The first element in the list is number 1.
elementAt :: [a] -> Int -> a
elementAt xs k
| n < k = undefined
| otherwise = xs !! (k-1)
where n = length xs
-- Problem 4
-- Find the number of elements of a list.
length' :: Num b => [a] -> b
length' = foldl (\acc x -> acc + 1) 0
length'' :: Num b => [a] -> b
length'' [] = 0
length'' (x:xs) = 1 + length'' xs
-- Problem 5
-- Reverse a list.
reverse' :: [a] -> [a]
reverse' [] = []
reverse' (x:xs) = (reverse' xs) ++ [x]
reverse'' :: [a] -> [a]
reverse'' = foldr (\x acc -> acc++[x]) []
reverse''' :: [a] -> [a]
reverse''' = foldl (flip (:)) []
-- Problem 6
-- Find out whether a list is a palindrome. A palindrome can be read forward or
-- backward; e.g. (x a m a x).
isPalindrome :: Eq a => [a] -> Bool
isPalindrome xs = xs == reverse xs
-- Problem 7
-- Flatten a nested list structure.
-- Transform a list, possibly holding lists as elements into a `flat' list by
-- replacing each list with its elements (recursively).
data NestedList a = Elem a | List [NestedList a]
flatten :: NestedList a -> [a]
flatten (Elem x) = [x]
flatten (List []) = []
flatten (List (x:xs)) = flatten x ++ flatten (List xs)
-- Problem 8
-- Eliminate consecutive duplicates of list elements.
-- If a list contains repeated elements they should be replaced with a single
-- copy of the element. The order of the elements should not be changed.
compress :: Eq a => [a] -> [a]
compress [] = []
compress [x] = [x]
compress (x:xs)
| x == head xs = compress xs
| otherwise = x:compress xs
-- Problem 9
-- Pack consecutive duplicates of list elements into sublists. If a list
-- contains repeated elements they should be placed in separate sublists.
pack :: Eq a => [a] -> [[a]]
pack = foldr collect [[]]
where collect :: (Eq a) => a -> [[a]] -> [[a]]
collect x (a:acc)
| null a = [x]:acc
| x == head a = (x:a):acc
| otherwise = [x]:a:acc
-- Problem 10
-- Run-length encoding of a list. Use the result of problem P09 to implement
-- the so-called run-length encoding data compression method. Consecutive
-- duplicates of elements are encoded as lists (N E) where N is the number of
-- duplicates of the element E.
encode :: Eq a => [a] -> [(Int, a)]
encode = map (\x -> (length x, head x)) . group
| zcesur/h99 | src/Problems01thru10.hs | gpl-3.0 | 2,853 | 0 | 11 | 703 | 960 | 516 | 444 | 55 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.BigtableAdmin.Projects.Instances.GetIAMPolicy
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets the access control policy for an instance resource. Returns an
-- empty policy if an instance exists but does not have a policy set.
--
-- /See:/ <https://cloud.google.com/bigtable/ Cloud Bigtable Admin API Reference> for @bigtableadmin.projects.instances.getIamPolicy@.
module Network.Google.Resource.BigtableAdmin.Projects.Instances.GetIAMPolicy
(
-- * REST Resource
ProjectsInstancesGetIAMPolicyResource
-- * Creating a Request
, projectsInstancesGetIAMPolicy
, ProjectsInstancesGetIAMPolicy
-- * Request Lenses
, pigipXgafv
, pigipUploadProtocol
, pigipAccessToken
, pigipUploadType
, pigipPayload
, pigipResource
, pigipCallback
) where
import Network.Google.BigtableAdmin.Types
import Network.Google.Prelude
-- | A resource alias for @bigtableadmin.projects.instances.getIamPolicy@ method which the
-- 'ProjectsInstancesGetIAMPolicy' request conforms to.
type ProjectsInstancesGetIAMPolicyResource =
"v2" :>
CaptureMode "resource" "getIamPolicy" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] GetIAMPolicyRequest :>
Post '[JSON] Policy
-- | Gets the access control policy for an instance resource. Returns an
-- empty policy if an instance exists but does not have a policy set.
--
-- /See:/ 'projectsInstancesGetIAMPolicy' smart constructor.
data ProjectsInstancesGetIAMPolicy =
ProjectsInstancesGetIAMPolicy'
{ _pigipXgafv :: !(Maybe Xgafv)
, _pigipUploadProtocol :: !(Maybe Text)
, _pigipAccessToken :: !(Maybe Text)
, _pigipUploadType :: !(Maybe Text)
, _pigipPayload :: !GetIAMPolicyRequest
, _pigipResource :: !Text
, _pigipCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsInstancesGetIAMPolicy' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pigipXgafv'
--
-- * 'pigipUploadProtocol'
--
-- * 'pigipAccessToken'
--
-- * 'pigipUploadType'
--
-- * 'pigipPayload'
--
-- * 'pigipResource'
--
-- * 'pigipCallback'
projectsInstancesGetIAMPolicy
:: GetIAMPolicyRequest -- ^ 'pigipPayload'
-> Text -- ^ 'pigipResource'
-> ProjectsInstancesGetIAMPolicy
projectsInstancesGetIAMPolicy pPigipPayload_ pPigipResource_ =
ProjectsInstancesGetIAMPolicy'
{ _pigipXgafv = Nothing
, _pigipUploadProtocol = Nothing
, _pigipAccessToken = Nothing
, _pigipUploadType = Nothing
, _pigipPayload = pPigipPayload_
, _pigipResource = pPigipResource_
, _pigipCallback = Nothing
}
-- | V1 error format.
pigipXgafv :: Lens' ProjectsInstancesGetIAMPolicy (Maybe Xgafv)
pigipXgafv
= lens _pigipXgafv (\ s a -> s{_pigipXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pigipUploadProtocol :: Lens' ProjectsInstancesGetIAMPolicy (Maybe Text)
pigipUploadProtocol
= lens _pigipUploadProtocol
(\ s a -> s{_pigipUploadProtocol = a})
-- | OAuth access token.
pigipAccessToken :: Lens' ProjectsInstancesGetIAMPolicy (Maybe Text)
pigipAccessToken
= lens _pigipAccessToken
(\ s a -> s{_pigipAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pigipUploadType :: Lens' ProjectsInstancesGetIAMPolicy (Maybe Text)
pigipUploadType
= lens _pigipUploadType
(\ s a -> s{_pigipUploadType = a})
-- | Multipart request metadata.
pigipPayload :: Lens' ProjectsInstancesGetIAMPolicy GetIAMPolicyRequest
pigipPayload
= lens _pigipPayload (\ s a -> s{_pigipPayload = a})
-- | REQUIRED: The resource for which the policy is being requested. See the
-- operation documentation for the appropriate value for this field.
pigipResource :: Lens' ProjectsInstancesGetIAMPolicy Text
pigipResource
= lens _pigipResource
(\ s a -> s{_pigipResource = a})
-- | JSONP
pigipCallback :: Lens' ProjectsInstancesGetIAMPolicy (Maybe Text)
pigipCallback
= lens _pigipCallback
(\ s a -> s{_pigipCallback = a})
instance GoogleRequest ProjectsInstancesGetIAMPolicy
where
type Rs ProjectsInstancesGetIAMPolicy = Policy
type Scopes ProjectsInstancesGetIAMPolicy =
'["https://www.googleapis.com/auth/bigtable.admin",
"https://www.googleapis.com/auth/bigtable.admin.cluster",
"https://www.googleapis.com/auth/bigtable.admin.instance",
"https://www.googleapis.com/auth/cloud-bigtable.admin",
"https://www.googleapis.com/auth/cloud-bigtable.admin.cluster",
"https://www.googleapis.com/auth/cloud-platform"]
requestClient ProjectsInstancesGetIAMPolicy'{..}
= go _pigipResource _pigipXgafv _pigipUploadProtocol
_pigipAccessToken
_pigipUploadType
_pigipCallback
(Just AltJSON)
_pigipPayload
bigtableAdminService
where go
= buildClient
(Proxy ::
Proxy ProjectsInstancesGetIAMPolicyResource)
mempty
| brendanhay/gogol | gogol-bigtableadmin/gen/Network/Google/Resource/BigtableAdmin/Projects/Instances/GetIAMPolicy.hs | mpl-2.0 | 6,149 | 0 | 16 | 1,333 | 795 | 466 | 329 | 122 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DialogFlow.Projects.Locations.Agents.Flows.TransitionRouteGroups.Patch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates the specified TransitionRouteGroup. Note: You should always
-- train a flow prior to sending it queries. See the [training
-- documentation](https:\/\/cloud.google.com\/dialogflow\/cx\/docs\/concept\/training).
--
-- /See:/ <https://cloud.google.com/dialogflow/ Dialogflow API Reference> for @dialogflow.projects.locations.agents.flows.transitionRouteGroups.patch@.
module Network.Google.Resource.DialogFlow.Projects.Locations.Agents.Flows.TransitionRouteGroups.Patch
(
-- * REST Resource
ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatchResource
-- * Creating a Request
, projectsLocationsAgentsFlowsTransitionRouteGroupsPatch
, ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatch
-- * Request Lenses
, plaftrgpXgafv
, plaftrgpLanguageCode
, plaftrgpUploadProtocol
, plaftrgpUpdateMask
, plaftrgpAccessToken
, plaftrgpUploadType
, plaftrgpPayload
, plaftrgpName
, plaftrgpCallback
) where
import Network.Google.DialogFlow.Types
import Network.Google.Prelude
-- | A resource alias for @dialogflow.projects.locations.agents.flows.transitionRouteGroups.patch@ method which the
-- 'ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatch' request conforms to.
type ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatchResource
=
"v3" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "languageCode" Text :>
QueryParam "upload_protocol" Text :>
QueryParam "updateMask" GFieldMask :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
GoogleCloudDialogflowCxV3TransitionRouteGroup
:>
Patch '[JSON]
GoogleCloudDialogflowCxV3TransitionRouteGroup
-- | Updates the specified TransitionRouteGroup. Note: You should always
-- train a flow prior to sending it queries. See the [training
-- documentation](https:\/\/cloud.google.com\/dialogflow\/cx\/docs\/concept\/training).
--
-- /See:/ 'projectsLocationsAgentsFlowsTransitionRouteGroupsPatch' smart constructor.
data ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatch =
ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatch'
{ _plaftrgpXgafv :: !(Maybe Xgafv)
, _plaftrgpLanguageCode :: !(Maybe Text)
, _plaftrgpUploadProtocol :: !(Maybe Text)
, _plaftrgpUpdateMask :: !(Maybe GFieldMask)
, _plaftrgpAccessToken :: !(Maybe Text)
, _plaftrgpUploadType :: !(Maybe Text)
, _plaftrgpPayload :: !GoogleCloudDialogflowCxV3TransitionRouteGroup
, _plaftrgpName :: !Text
, _plaftrgpCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plaftrgpXgafv'
--
-- * 'plaftrgpLanguageCode'
--
-- * 'plaftrgpUploadProtocol'
--
-- * 'plaftrgpUpdateMask'
--
-- * 'plaftrgpAccessToken'
--
-- * 'plaftrgpUploadType'
--
-- * 'plaftrgpPayload'
--
-- * 'plaftrgpName'
--
-- * 'plaftrgpCallback'
projectsLocationsAgentsFlowsTransitionRouteGroupsPatch
:: GoogleCloudDialogflowCxV3TransitionRouteGroup -- ^ 'plaftrgpPayload'
-> Text -- ^ 'plaftrgpName'
-> ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatch
projectsLocationsAgentsFlowsTransitionRouteGroupsPatch pPlaftrgpPayload_ pPlaftrgpName_ =
ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatch'
{ _plaftrgpXgafv = Nothing
, _plaftrgpLanguageCode = Nothing
, _plaftrgpUploadProtocol = Nothing
, _plaftrgpUpdateMask = Nothing
, _plaftrgpAccessToken = Nothing
, _plaftrgpUploadType = Nothing
, _plaftrgpPayload = pPlaftrgpPayload_
, _plaftrgpName = pPlaftrgpName_
, _plaftrgpCallback = Nothing
}
-- | V1 error format.
plaftrgpXgafv :: Lens' ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatch (Maybe Xgafv)
plaftrgpXgafv
= lens _plaftrgpXgafv
(\ s a -> s{_plaftrgpXgafv = a})
-- | The language of the following fields in \`TransitionRouteGroup\`: *
-- \`TransitionRouteGroup.transition_routes.trigger_fulfillment.messages\`
-- *
-- \`TransitionRouteGroup.transition_routes.trigger_fulfillment.conditional_cases\`
-- If not specified, the agent\'s default language is used. [Many
-- languages](https:\/\/cloud.google.com\/dialogflow\/cx\/docs\/reference\/language)
-- are supported. Note: languages must be enabled in the agent before they
-- can be used.
plaftrgpLanguageCode :: Lens' ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatch (Maybe Text)
plaftrgpLanguageCode
= lens _plaftrgpLanguageCode
(\ s a -> s{_plaftrgpLanguageCode = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plaftrgpUploadProtocol :: Lens' ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatch (Maybe Text)
plaftrgpUploadProtocol
= lens _plaftrgpUploadProtocol
(\ s a -> s{_plaftrgpUploadProtocol = a})
-- | The mask to control which fields get updated.
plaftrgpUpdateMask :: Lens' ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatch (Maybe GFieldMask)
plaftrgpUpdateMask
= lens _plaftrgpUpdateMask
(\ s a -> s{_plaftrgpUpdateMask = a})
-- | OAuth access token.
plaftrgpAccessToken :: Lens' ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatch (Maybe Text)
plaftrgpAccessToken
= lens _plaftrgpAccessToken
(\ s a -> s{_plaftrgpAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plaftrgpUploadType :: Lens' ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatch (Maybe Text)
plaftrgpUploadType
= lens _plaftrgpUploadType
(\ s a -> s{_plaftrgpUploadType = a})
-- | Multipart request metadata.
plaftrgpPayload :: Lens' ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatch GoogleCloudDialogflowCxV3TransitionRouteGroup
plaftrgpPayload
= lens _plaftrgpPayload
(\ s a -> s{_plaftrgpPayload = a})
-- | The unique identifier of the transition route group.
-- TransitionRouteGroups.CreateTransitionRouteGroup populates the name
-- automatically. Format:
-- \`projects\/\/locations\/\/agents\/\/flows\/\/transitionRouteGroups\/\`.
plaftrgpName :: Lens' ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatch Text
plaftrgpName
= lens _plaftrgpName (\ s a -> s{_plaftrgpName = a})
-- | JSONP
plaftrgpCallback :: Lens' ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatch (Maybe Text)
plaftrgpCallback
= lens _plaftrgpCallback
(\ s a -> s{_plaftrgpCallback = a})
instance GoogleRequest
ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatch
where
type Rs
ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatch
= GoogleCloudDialogflowCxV3TransitionRouteGroup
type Scopes
ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatch
=
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow"]
requestClient
ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatch'{..}
= go _plaftrgpName _plaftrgpXgafv
_plaftrgpLanguageCode
_plaftrgpUploadProtocol
_plaftrgpUpdateMask
_plaftrgpAccessToken
_plaftrgpUploadType
_plaftrgpCallback
(Just AltJSON)
_plaftrgpPayload
dialogFlowService
where go
= buildClient
(Proxy ::
Proxy
ProjectsLocationsAgentsFlowsTransitionRouteGroupsPatchResource)
mempty
| brendanhay/gogol | gogol-dialogflow/gen/Network/Google/Resource/DialogFlow/Projects/Locations/Agents/Flows/TransitionRouteGroups/Patch.hs | mpl-2.0 | 8,733 | 0 | 18 | 1,721 | 955 | 561 | 394 | 151 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module TransportUDP
(startNodeComm)
where
import Blockchain (Block, BlockData)
import CommandDispatcher
import Logging (consensusFollower)
import Control.Concurrent (forkIO)
import Data.Monoid ((<>))
import Network.Multicast as NM (multicastReceiver,
multicastSender)
import Network.Socket as N (HostName, PortNumber, SockAddr,
Socket)
import Network.Socket.ByteString as N (recvFrom, sendTo)
import System.Log.Logger (infoM)
startNodeComm :: CommandDispatcher -> HostName -> PortNumber -> IO ()
startNodeComm (CommandDispatcher handleConsensusMessage getMsgsToSendToConsensusNodes sendToConsensusNodes _ _ isValid) host port = do
_ <- infoN host port "startNodeComm: ENTER"
(sendSock, sendAddr) <- multicastSender host port
recSock <- multicastReceiver host port
forkIO $ send host port sendSock sendAddr getMsgsToSendToConsensusNodes
forkIO $ rec host port recSock sendSock sendAddr handleConsensusMessage sendToConsensusNodes isValid
infoN host port "startNodeComm: EXIT"
return ()
rec :: HostName -> PortNumber -> Socket -> Socket -> SockAddr
-> HandleConsensusMessage
-> (BlockData -> IO ())
-> (Block -> IO (Maybe String))
-> IO ()
rec host port recSock sendSock sendAddr handleConsensusMessage sendToConsensusNodes isValid = do
infoN host port "rec: waiting"
(msg,addr) <- N.recvFrom recSock 1024
infoN host port ("rec: from: " <> show addr <> " " <> show msg)
handleConsensusMessage host port sendToConsensusNodes isValid msg
rec host port recSock sendSock sendAddr handleConsensusMessage sendToConsensusNodes isValid
-- Read from sendToConsensusNodes and broadcast
send :: HostName -> PortNumber -> Socket -> SockAddr -> IO BlockData -> IO () -- TODO ByteString
send host port sock addr getMsgsToSendToConsensusNodes = do
infoN host port "send: waiting"
msg <- getMsgsToSendToConsensusNodes
infoN host port ("send: " ++ show msg)
sendTo sock msg addr
send host port sock addr getMsgsToSendToConsensusNodes
infoN :: HostName -> PortNumber -> String -> IO Int
infoN h p msg = do
infoM consensusFollower ("T " <> h <> ":" <> show p <> " " <> msg)
return 1 -- to match sendTo
| haroldcarr/learn-haskell-coq-ml-etc | haskell/playpen/blockchain/blockchain-framework-DELETE/src/TransportUDP.hs | unlicense | 2,451 | 0 | 17 | 621 | 644 | 324 | 320 | 45 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
module DeployR.Types where
import Data.Aeson.Types
import GHC.Generics
import Data.Text(Text)
import qualified Data.Text as T
import Data.Map(Map)
import qualified Data.Map as Map
import Data.List(isPrefixOf)
import Data.Maybe(catMaybes, maybeToList)
import Servant.API -- for instances
import DeployR.RObject
---------------------------------
-- Data types for the deployR API
-- | generic response type for DeployR.
-- The responses are largely the same, but may contain additional
-- fields in particular cases. Modelled by a type argument.
data DRResponse a =
DRSuccess { -- if success is "true", read result payload and cookie
drCall :: Text
, drCookie :: Maybe Text
, drExtra :: a -- Extra data, often more than one thing
-- This extra field will probably end up ugly for *JSON instances.
-- The other option is to repeat all fields over and over...
}
| DRError { -- if success is "false": error, retrieve message and code
drError :: Text
, drErrCode :: Int
, drCookie :: Maybe Text
}
deriving (Eq, Show, Read, Generic)
-- | helper to dive into object hierarchies
-- I thought withObject would do this (one level), but found out it does not.
-- The Value is verified to be an Object, then the named fields from
-- it are selected in order, all expected to be nested object. The extractA
-- function is applied to the innermost object to yield the desired result a.
inPath :: [Text] -> (Object -> Parser a) -> Value -> Parser a
inPath fs extractA = withObject ("path " ++ T.unpack (T.intercalate "." fs))
(foldr descend extractA fs)
where descend :: Text -> (Object -> Parser a) -> Object -> Parser a
descend n f o = o .: n >>= withObject (T.unpack $ T.unwords ["object",n]) f
-- meaning: "parse an object called n inside o, then do f with it"
{-
with an Object -> Parser a function:
(Object -> Parser a) -> Text -> (Object -> Parser a)
\ extractO name ->
withObject (printf "object with %s" name)
(\o -> o .: name >>= extract)
-}
-- | The JSON parser for DRResponse a uses a parseJSONPayload parser for a.
instance (FromJSONPayload a) => FromJSON (DRResponse a) where
parseJSON = inPath ["deployr", "response"] parseResponse
where parseResponse re = do
ok <- re .: "success"
if ok then DRSuccess <$> re .: "call"
<*> re .:? "httpcookie"
<*> parseJSONPayload re
else DRError <$> re .: "error"
<*> re .: "errorCode"
<*> re .:? "httpcookie"
-- | a special type class for the payload in a DeployR response. If this
-- was simply using FromJSON, one would need to create bogus parsers that
-- could not be used outside the context of the parseJSON :: Value ->
-- DRResponse instance. For instance, the payload might consist of multiple
-- members of the response object, which we can model as tuples here.
class FromJSONPayload a where
parseJSONPayload :: Object -> Parser a -- used inside the DRResponse parser
-- Response payload data
-- no payload in response
instance FromJSONPayload () where
parseJSONPayload _ = return ()
-- | response to login
data DRUser = DRUser {
username :: Text
, displayname :: Text
, cookie :: Maybe Text
, permissions :: Object
-- this is the "user" structure, "limits" struture has been omitted
}
deriving (Eq, Show, Read, Generic)
instance FromJSON DRUser
instance FromJSONPayload DRUser where
parseJSONPayload r = r .: "user" >>= parseJSON
-- These field names are returned in ALL_CAPS... contrary to the doc.s
-- Omitting this type for now, we don't expect to need it anyway.
-- data DRPermissions = DRPermissions {
-- scriptManager :: Bool
-- , powerUser :: Bool
-- , packageManager :: Bool
-- , administrator :: Bool
-- , basicUser :: Bool
-- }
-- deriving (Eq, Show, Read, Generic)
-- instance FromJSON DRPermissions
-- | Files in the repository, represented in responses
data RepoFile = RepoFile {
filename :: FilePath
, directory :: FilePath
, descr :: Text
, length :: Int
, authors :: [Text]
, shared :: Bool
, restricted :: Bool
, url :: Text -- TODO use URL type
-- , type :: Text -- application/octet-stream or text/plain, mostly
-- plus some version and access stuff which was not modelled
}
deriving (Eq, Show, Read, Generic)
instance FromJSON RepoFile
instance FromJSONPayload RepoFile where
parseJSONPayload r = r .: "repository" >>= (.: "file") >>= parseJSON
instance FromJSONPayload [RepoFile] where
parseJSONPayload r = r .: "repository" >>= (.: "files") >>= parseJSON
-- | "script" alias for repository files, to parse payload in responses
newtype RepoScript = Script RepoFile
deriving (Eq, Show, Read, Generic)
instance FromJSON RepoScript where
parseJSON o = Script <$> parseJSON o
instance FromJSONPayload [RepoScript] where
parseJSONPayload r = r .: "repository" >>=
(.: "scripts") >>= \fs ->
map Script <$> (parseJSON fs:: Parser [RepoFile])
-- | Files in a project directory
data ProjectFile = ProjectFile {
filename :: FilePath
, descr :: Maybe Text
, length :: Int
, url :: Text -- TODO use URL type
-- , type :: Text -- application/octet-stream or text/plain, mostly
-- ... more fields: lastmodified, category, not implemented
}
deriving (Eq, Show, Read, Generic)
instance FromJSON ProjectFile
instance FromJSONPayload ProjectFile where
parseJSONPayload r = r .: "directory" >>= (.: "file") >>= parseJSON
-- project directory listing:
instance FromJSONPayload [ProjectFile] where
parseJSONPayload r = r .: "directory" >>= (.: "files") >>= parseJSON
-- | Result of executing code (literal or a script)
data ExecResult = ExecResult {
interrupted :: Bool
, project :: Maybe DRProject
, objects :: Map Text RObject
, execution :: DRExecution
-- , files :: [RepoFile]
}
deriving (Eq, Show, Read, Generic)
instance FromJSON ExecResult
instance FromJSONPayload ExecResult where
parseJSONPayload r = do
interrupted <- r .: "interrupted"
project <- r .:? "project" >>= \maybeP ->
case maybeP of
Just p -> fmap Just (parseJSON p)
Nothing -> return Nothing
execution <- r .: "execution" >>= parseJSON
objects <- r .: "workspace" >>= (.: "objects") >>= parseObjects
return ExecResult{..}
data DRProject = DRProject {
project :: Text
, name :: Maybe Text
, descr :: Maybe Text
-- , a few more...
}
deriving (Eq, Show, Read, Generic)
instance FromJSON DRProject
instance FromJSONPayload DRProject where
parseJSONPayload r = r .: "project" >>= parseJSON
data DRExecution = DRExecution {
console :: Text
, code :: Text
-- , and more things
}
deriving (Eq, Show, Read, Generic)
instance FromJSON DRExecution
-- | Result of listing a workspace (resembles ExecResult)
data WSObjects = WSObjects {
project :: DRProject -- compulsory in WS listing
, objects :: Map Text RObject
}
deriving (Eq, Show, Read, Generic)
instance FromJSON WSObjects
instance FromJSONPayload WSObjects where
parseJSONPayload r = do
project <- r .: "project" >>= parseJSON
objects <- r .: "workspace" >>= (.: "objects") >>= parseObjects
return WSObjects{..}
--------------------------------------------------
-- input data (will be used in ReqBody FormUrlEncoded)
------------------------------------------------------------
-- helpers
-- | include optional fields if present, in ToFormUrlEncoded. Field content is
-- transformed to text
optional :: (value -> Text) -> (Text, Maybe value) -> Maybe (Text, Text)
optional convert (name, Nothing) = Nothing
optional convert (name, Just v) = Just (name, convert v)
-- TODO could use a type class for value -> Text
-- | optional text fields (passed through directly if present)
optionalText = optional id
optionalBool = optional (\b -> if b then "true" else "false")
-- a little CPP hack to avoid repeating myself all the time
-- #define FIELD( S ) ( #S , S)
-- alas, GHC does not support stringification...
----------------------------------------
-- form data types, replicating required deployR input format exactly
-- tired to write this over and over again, we always use json anyway
data Format = FormatJSON deriving (Eq, Generic) -- will this work?
instance Show Format where show _ = "json"
instance Read Format where readsPrec _ input
| "json" `isPrefixOf` input
= [(FormatJSON, drop 4 input)]
| otherwise = []
instance ToJSON Format
instance ToHttpApiData Format
where toUrlPiece = T.pack . show
toQueryParam = toUrlPiece
-- | even simpler: include this in every POST and request
formatEncoded :: (Text, Text)
formatEncoded = ("format", "json")
-- | user login, with password
data LoginData = LoginData {
format :: Format -- requires ghc-8 (duplicate record field names)
, username :: Text
, password :: Text
, disableautosave :: Maybe Bool
}
deriving (Eq, Show, Read, Generic)
instance ToJSON LoginData
instance ToFormUrlEncoded LoginData where
toFormUrlEncoded LoginData{..} =
formatEncoded:
[ ("username", username)
, ("password", password)
]
++ maybeToList (optionalBool ("disableautosave", disableautosave))
-- | user login, with password
data LogoutData = LogoutData {
format :: Format -- "json"
, usercookie :: Maybe Text -- not used
}
deriving (Eq, Show, Read, Generic)
instance ToJSON LogoutData
instance ToFormUrlEncoded LogoutData where
toFormUrlEncoded LogoutData{..} =
formatEncoded:
maybeToList (optionalText ("usercookie", usercookie))
-- | executing code in a project context
-- we have omitted a number of parameters here
data ExecCode = ExecCode {
format :: Format
, project :: Text
, code :: Text
-- pre-execution parameters
, inputs :: Maybe Text
-- on-execution parameters
, phantom :: Maybe Bool
, echooff :: Maybe Bool
, consoleoff :: Maybe Bool
, artifactsoff :: Maybe Bool
-- post-execution parameters
, robjects :: Maybe [Text]
}
deriving (Eq, Show, Read, Generic)
instance ToJSON ExecCode
instance ToFormUrlEncoded ExecCode where
toFormUrlEncoded ExecCode{..} =
formatEncoded :
concat
[[ ("project", project)
, ("code", code)
]
, maybeToList (optional (T.intercalate ",") ("robjects", robjects))
, catMaybes $ map optionalBool
[ ("phantom", phantom)
, ("echooff", echooff)
, ("consoleoff", consoleoff)
, ("artifactsoff", artifactsoff)
]
]
-- | executing a script from the repository in a project context
-- The API call would allow multiple scripts, or even external files, to be
-- executed in a chain. We limit the API to one script from the repository,
-- and omit a number of parameters here (as in ExecCode)
data ExecScript = ExecScript {
format :: Format
, project :: Text
, filename :: Text
, author :: Text
, directory :: Maybe Text
-- pre-execution parameters
, inputs :: Maybe Text
-- on-execution parameters
, phantom :: Maybe Bool
, echooff :: Maybe Bool
, consoleoff :: Maybe Bool
, artifactsoff :: Maybe Bool
-- post-execution parameters
, robjects :: Maybe [Text]
}
deriving (Eq, Show, Read, Generic)
instance ToJSON ExecScript
instance ToFormUrlEncoded ExecScript where
toFormUrlEncoded ExecScript{..} =
formatEncoded :
concat
[[ ("project", project)
, ("filename", filename)
, ("author", author) ]
, maybeToList (optionalText ("inputs", inputs))
, maybeToList (optionalText ("directory", directory))
, catMaybes $ map optionalBool [ ("phantom", phantom)
, ("echooff", echooff)
, ("consoleoff", consoleoff)
, ("artifactsoff", artifactsoff)
]
, maybeToList (optional (T.intercalate ",") ("robjects", robjects))
]
-- | specifying a project, for execute/flush, workspace/list,
-- directory/list in project API).
-- We only support flushing the entire execution history
data RqProject = RqProject {
format :: Format
, project :: Text
}
deriving (Eq, Show, Read, Generic)
instance ToJSON RqProject
instance ToFormUrlEncoded RqProject where
toFormUrlEncoded RqProject{..} =
[ formatEncoded, ("project", project) ]
-- | specifying a directory to be created (or deleted)
-- With some optional boolean fields, this could be used for update.
-- With a destionation field, this could be used for move,copy,rename
-- (none of this is implemented yet)
data RqDir = RqDir {
format :: Format
, directory :: Text
}
deriving (Eq, Show, Read, Generic)
instance ToJSON RqDir
instance ToFormUrlEncoded RqDir where
toFormUrlEncoded RqDir{..} =
[ formatEncoded, ("directory", directory) ]
| jberthold/deployR-hs | src/DeployR/Types.hs | apache-2.0 | 13,810 | 0 | 16 | 3,574 | 2,677 | 1,511 | 1,166 | 238 | 2 |
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveTraversable #-}
module LambdaQuest.SystemF.Type
(TypeT(..,TyInt,TyReal,TyBool,TyUnit),Type
,TermT(..),Term
,isValue
,BindingT(..),Binding
,getTypeFromContext
,module LambdaQuest.Common.Type
) where
import LambdaQuest.Common.Type hiding (genPrimTypeOf)
import Data.Void
data TypeT a = TyPrim !PrimType
| TyArr (TypeT a) (TypeT a)
| TyRef !Int String -- type variable (de Bruijn index)
| TyAll String (TypeT a) -- type abstraction (forall)
| TyExtra !a
deriving (Show,Functor,Foldable,Traversable)
pattern TyInt = TyPrim PTyInt
pattern TyReal = TyPrim PTyReal
pattern TyBool = TyPrim PTyBool
pattern TyUnit = TyPrim PTyUnit
data TermT a = TPrimValue !PrimValue -- primitive value
| TAbs String (TypeT a) (TermT a) -- lambda abstraction
| TTyAbs String (TermT a) -- type abstraction
| TRef !Int String -- variable (de Bruijn index)
| TApp (TermT a) (TermT a) -- function application
| TTyApp (TermT a) (TypeT a) -- type application
| TLet String (TermT a) (TermT a) -- let-in
| TIf (TermT a) (TermT a) (TermT a) -- if-then-else
deriving (Show,Functor,Foldable,Traversable)
data BindingT a = VarBind String (TypeT a) -- variable binding (name, type)
| TyVarBind String -- type variable binding (name)
| AnonymousBind -- placeholder for function type
deriving (Eq,Show)
type Type = TypeT Void
type Term = TermT Void
type Binding = BindingT Void
isValue :: TermT a -> Bool
isValue t = case t of
TPrimValue _ -> True
TAbs _ _ _ -> True
TTyAbs _ _ -> True
TApp (TPrimValue (PVBuiltinBinary _)) x -> isValue x -- partial application
_ -> False
instance (Eq a) => Eq (TypeT a) where
TyPrim p == TyPrim p' = p == p'
TyArr s t == TyArr s' t' = s == s' && t == t'
TyRef i _ == TyRef i' _ = i == i'
TyAll _ t == TyAll _ t' = t == t' -- ignore type variable name
TyExtra x == TyExtra x' = x == x'
_ == _ = False
instance (Eq a) => Eq (TermT a) where
TPrimValue p == TPrimValue p' = p == p'
TAbs _ t x == TAbs _ t' x' = t == t' && x == x' -- ignore variable name
TTyAbs _ x == TTyAbs _ x' = x == x' -- ignore type variable name
TRef i _ == TRef i' _ = i == i'
TApp s t == TApp s' t' = s == s' && t == t'
TTyApp s t == TTyApp s' t' = s == s' && t == t'
TLet _ s t == TLet _ s' t' = s == s' && t == t'
TIf s t u == TIf s' t' u' = s == s' && t == t' && u == u'
_ == _ = False
getTypeFromContext :: (Show a) => [BindingT a] -> Int -> TypeT a
getTypeFromContext ctx i
| i < length ctx = case ctx !! i of
VarBind _ t -> t
b -> error ("TRef: expected a variable binding, found " ++ show b)
| otherwise = error "TRef: index out of bounds"
| minoki/LambdaQuest | src/LambdaQuest/SystemF/Type.hs | bsd-3-clause | 3,129 | 2 | 12 | 1,013 | 1,083 | 551 | 532 | 79 | 5 |
----------------------------------------------------------------------------
-- |
-- Module : Language.Core.Interpreter.Util
-- Copyright : (c) Carlos López-Camey, University of Freiburg
-- License : BSD-3
--
-- Maintainer : [email protected]
-- Stability : stable
--
--
-- Useful combinators to use in Language.Core.Interpreter or even
-- Language.Core.Interpreter.Libraries.{GHC.*}
--
-- Show instances for data types in Language.Core.Interpreter.Structures
-----------------------------------------------------------------------------
module Language.Core.Interpreter.Util where
--------------------------------------------------------------------------------
-- base type funs
import Data.Either(partitionEithers,rights)
import Data.List(findIndices,intersperse)
import Prelude hiding (showList)
--------------------------------------------------------------------------------
import DART.CmdLine(debugM)
import DART.Util.StringUtils(separateWithSpaces,separateWithCommas)
import Language.Core.Interpreter
import Language.Core.Interpreter.Apply
--import Language.Core.Interpreter.Structures
-- | A function that ignores its parameters and returns a value
-- the parenthesis in the signature have no effects and are only here to understand better
return' :: Value -> (Id -> Env -> IM Value)
return' v = \_ -> \_ -> return v
-- | There are some values that contain addresses for which we must, in order to
-- pretty print the given value, look up their actual value in the heap
showValue :: Value -> IM String
showValue (TyConApp tc ptrs) = do
--io $ putStrLn "tyconapp.."
showTyConApp tc ptrs
showValue val = do
io $ putStrLn "not tyconapp?"
{- case val of
tc@(TyCon tycon ty_name) -> do
io . putStrLn $ "TyCon " ++ (show tycon) ++ ty_name
io . putStrLn . show$ tc
io $ putStrLn "1"
SumType _ -> io $ putStrLn "2"
MkListOfValues _ -> io $ putStrLn "3"
FreeTypeVariable _ -> io $ putStrLn "4"
Pointer _ -> io $ putStrLn "5"
Pair _ _ -> io $ putStrLn "6"
Fun _ _ -> io $ putStrLn "7"
String _ -> io $ putStrLn "8"
Char _ -> io $ putStrLn "9"
Boolean _ -> io $ putStrLn "10"
Rat _ -> io $ putStrLn "11"
Wrong _ -> io $ putStrLn "41"
TyConApp _ b -> io $ putStrLn " ;)"
Num _ -> io $ putStrLn "12"
_ -> do io $ putStrLn "Otra cosa" -}
return $ show val
-- | Function called by showValue that handles the showing of a type constructor application.
-- Special cases include the List and the Tuple constructors.
-- As we know from the semantics, the showing forces the evaluation of the arguments of the data constructor
showTyConApp :: DataCon-> [Pointer] -> IM String
showTyConApp (MkDataCon "ghc-prim:GHC.Types.[]" _ _) [] = return "[]" -- empty list
showTyConApp (MkDataCon "ghc-prim:GHC.Types.:" (ty:_) _) ptrs = showList ty ptrs
showTyConApp (MkDataCon "ghc-prim:GHC.Types.:" [] ((_,tvar_ty):_)) ptrs = showList tvar_ty ptrs
showTyConApp (MkDataCon "ghc-prim:GHC.Tuple.Z2T" _ _) [x,y] = do
x_str <- evalPtr x >>= showValue
y_str <- evalPtr y >>= showValue
return $ show (x_str,y_str)
showTyConApp tycon@(MkDataCon datacon_name' signature applied_types) ptrs = do
debugM $ " Constructor " ++ (show datacon_name') ++ " , signature: " ++ (show signature) ++ ", applied: " ++ (show applied_types) ++ ", to pointers: " ++ show ptrs
io $ putStrLn " Now"
vals <- mapM evalPtr ptrs -- [Value]
mapM (showValue) vals >>= debugM . show
whnf_strings <- mapM showValue' vals -- [String]
return $ let
tycon_name = idName datacon_name'
arg_strings = separateWithSpaces whnf_strings
in tycon_name ++ " " ++ arg_strings
where
-- Should we wrap a value in parenthesis? Wrap the tycon apps! (iff they have applied vals)
showValue' :: Value -> IM String
showValue' t@(TyConApp tycon []) = return . idName . datacon_name $ tycon
showValue' t@(TyConApp _ _) = showValue t >>= return . wrapInParenthesis
showValue' v = showValue v
evalPtr :: Pointer -> IM Value
evalPtr = flip eval []
-- | Function in charge of showing the application of the type constructor "ghc-prim:GHC.Types.:"
showList :: Ty -> [Pointer] -> IM String
showList ty ptrs = do
io $ putStrLn " LIST"
elem_strs <- mapM (showPtr ?? showValue) ptrs
case ty of
Tvar("ghc-prim:GHC.Types.Char") -> return $ "\"" ++ map (!! 1) elem_strs ++ "\""
_ -> return $ "[" ++ separateWithCommas elem_strs ++ "]"
-- If we find an empty list, we ought not show it as [] but rather as the empty string
where
-- If we find another list, don't show the []
showValue' :: Value -> IM String
showValue' t@(TyConApp (MkDataCon "ghc-prim:GHC.Types.[]" _ _) []) = return ""
showValue' t@(TyConApp (MkDataCon "ghc-prim:GHC.Types.[]" _ _) [ptr]) = (showPtr ?? showValue') ptr
showValue' t@(TyConApp (MkDataCon "ghc-prim:GHC.Types.:" _ _) ptrs) = mapM (showPtr ?? showValue') ptrs >>= return . separateWithSpaces
showValue' t@(TyCon (MkDataCon "ghc-prim:GHC.Types.[]" [] _) "ghc-prim:GHC.Types.[]")= return ""
showValue' v = showValue v
-- from the operators of the package `lens`
(??) :: Functor f => f (a -> b) -> a -> f b
fab ?? a = fmap ($ a) fab
showPtr :: Pointer -> (Value -> IM String) -> IM String
showPtr ptr showValue' = evalPtr ptr >>= showValue'
wrapInParenthesis :: String -> String
wrapInParenthesis s = "(" ++ s ++ ")"
-- | Take a qualified name and return only its last name. E.g. idName "main.Module.A" = "A"
idName :: Id -> String
idName id = let
name = drop (lastDotIndex id + 1) id -- name with a possible parenthesis at the end
in case name of
[] -> id
":" -> "(:)"
_ -> if (last name == ')')
then init name
else name
where
isDot = ((==) '.')
dotIndexes = findIndices isDot
lastDotIndex s =
case dotIndexes s of
[] -> 0
idxs -> last idxs
-- last . dotIndexes
| kmels/dart-haskell | src/Language/Core/Interpreter/Util.hs | bsd-3-clause | 5,970 | 0 | 15 | 1,255 | 1,263 | 656 | 607 | 74 | 6 |
{-# LANGUAGE OverloadedStrings #-}
module ETA.CodeGen.Monad
(CgEnv(..),
CgState(..),
CodeGen(..),
crashDoc,
debugDoc,
printDoc,
debugState,
debug,
withSequel,
emit,
initCg,
getCgLoc,
getCodeWithResult,
newTemp,
newIdLoc,
peekNextLocal,
setNextLocal,
newLocal,
newLabel,
setNextLabel,
getSequel,
getSelfLoop,
setSuperClass,
getSuperClass,
setClosureClass,
withSelfLoop,
withMethod,
getModClass,
getClass,
addBinding,
addBindings,
setBindings,
printBindings,
defineMethod,
defineMethods,
defineField,
defineFields,
getCgIdInfo,
newTypeClosure,
newExportedClosure,
newHiddenClosure,
newClosure,
classFromCgState,
runCodeGen,
addInitStep,
forkClosureBody,
forkLneBody,
forkAlts,
unimplemented,
getDynFlags)
where
import ETA.Main.DynFlags
import ETA.BasicTypes.Module
import ETA.BasicTypes.VarEnv
import ETA.BasicTypes.Id
import ETA.BasicTypes.Name
import ETA.Utils.Outputable hiding ((<>))
import ETA.Utils.FastString
import ETA.Types.TyCon
import Data.Monoid((<>))
import Data.List
import Data.Maybe (fromMaybe)
import Data.Text hiding (foldl, length, concatMap, map, intercalate)
import Control.Monad (liftM, ap, when, forM)
import Control.Monad.State (MonadState(..), get, gets, modify)
import Control.Monad.Reader (MonadReader(..), ask, asks, local)
import Control.Monad.IO.Class
import qualified Data.ByteString.Lazy as B
import Codec.JVM
import ETA.CodeGen.Types
import ETA.CodeGen.Closure
import ETA.CodeGen.Name
import ETA.CodeGen.ArgRep
import ETA.Debug
import ETA.Util
import ETA.Utils.Digraph
data CgEnv =
CgEnv { cgQClassName :: !Text
, cgModule :: !Module
, cgDynFlags :: !DynFlags
, cgSequel :: !Sequel
, cgSelfLoop :: !(Maybe SelfLoopInfo) }
data CgState =
CgState { cgBindings :: !CgBindings
-- Accumulating
, cgCompiledClosures :: ![ClassFile]
, cgClassInitCode :: ![Node FieldRef Code]
-- Top-level definitions
, cgAccessFlags :: [AccessFlag]
, cgMethodDefs :: ![MethodDef]
, cgFieldDefs :: ![FieldDef]
, cgClassName :: !Text
, cgSuperClassName :: !(Maybe Text)
-- Current method
, cgCode :: !Code
, cgNextLocal :: Int
, cgNextLabel :: Int }
instance Show CgState where
show CgState {..} = "cgClassName: " ++ show cgClassName ++ "\n"
++ "cgClassInitCode: " ++ show cgClassInitCode ++ "\n"
++ "cgMethodDefs: " ++ show cgMethodDefs ++ "\n"
++ "cgFieldDefs: " ++ show cgFieldDefs ++ "\n"
++ "cgSuperClassName: " ++ show cgSuperClassName ++ "\n"
++ "cgCompiledClosures: \n" ++ (intercalate "\n" . map show $ cgCompiledClosures)
newtype CodeGen a = CG { unCG :: CgEnv -> CgState -> IO (CgState, a) }
instance Functor CodeGen where
fmap = liftM
instance Applicative CodeGen where
pure = return
(<*>) = ap
instance Monad CodeGen where
{-# INLINE return #-}
return x = CG $ \_ s -> return (s, x)
{-# INLINE (>>=) #-}
m >>= f = CG $ \e s -> do
(!s0, !x) <- unCG m e s
unCG (f x) e s0
instance MonadState CgState CodeGen where
{-# INLINE state #-}
state action = CG $ \_ s -> case action s of
(!a, !s') -> return (s', a)
instance MonadReader CgEnv CodeGen where
{-# INLINE ask #-}
ask = CG $ \env s -> return (s, env)
{-# INLINE local #-}
local f action = CG $ \env s -> unCG action (f env) s
instance HasModule CodeGen where
getModule = asks cgModule
instance HasDynFlags CodeGen where
getDynFlags = asks cgDynFlags
instance MonadIO CodeGen where
{-# INLINE liftIO #-}
liftIO io = CG $ \_ s -> io >>= (\a -> return (s, a))
initCg :: DynFlags -> Module -> (CgEnv, CgState)
initCg dflags mod =
(CgEnv { cgModule = mod
, cgQClassName = className
, cgDynFlags = dflags
, cgSequel = Return
, cgSelfLoop = Nothing },
CgState { cgBindings = emptyVarEnv
, cgCode = mempty
, cgAccessFlags = [Public, Super]
, cgMethodDefs = []
, cgFieldDefs = []
, cgClassInitCode = mempty
, cgClassName = className
, cgCompiledClosures = []
, cgSuperClassName = Nothing
, cgNextLocal = 0
, cgNextLabel = 0 })
where className = moduleJavaClass mod
emit :: Code -> CodeGen ()
emit code = modify $ \s@CgState { cgCode } -> s { cgCode = cgCode <> code }
peekNextLocal :: CodeGen Int
peekNextLocal = gets cgNextLocal
peekNextLabel :: CodeGen Int
peekNextLabel = gets cgNextLabel
newLabel :: CodeGen Label
newLabel = do
next <- peekNextLabel
modify $ \s@CgState { cgNextLabel } ->
s { cgNextLabel = cgNextLabel + 1}
return $ mkLabel next
newLocal :: FieldType -> CodeGen Int
newLocal ft = do
next <- peekNextLocal
modify $ \s@CgState { cgNextLocal } ->
s { cgNextLocal = cgNextLocal + fieldSz}
return next
where fieldSz = fieldSize ft
setNextLocal :: Int -> CodeGen ()
setNextLocal n = modify $ \s -> s { cgNextLocal = n }
setNextLabel :: Int -> CodeGen ()
setNextLabel n = modify $ \s -> s { cgNextLabel = n }
getMethodCode :: CodeGen Code
getMethodCode = gets cgCode
setMethodCode :: Code -> CodeGen ()
setMethodCode code = modify $ \s -> s { cgCode = code }
getClass :: CodeGen Text
getClass = gets cgClassName
getModClass :: CodeGen Text
getModClass = asks cgQClassName
getInitCode :: CodeGen Code
getInitCode = gets (foldMap (\(a, _, _) -> a) . flattenSCCs . stronglyConnCompG . graphFromEdgedVertices . cgClassInitCode)
getBindings :: CodeGen CgBindings
getBindings = gets cgBindings
setBindings :: CgBindings -> CodeGen ()
setBindings bindings = modify $ \s -> s { cgBindings = bindings }
getCgIdInfo :: Id -> CodeGen CgIdInfo
getCgIdInfo id = do
localBindings <- getBindings
case lookupVarEnv localBindings id of
Just info -> return info
Nothing -> do
curMod <- getModule
let name = idName id
-- TODO: Change this back.
let mod = fromMaybe (pprPanic "getCgIdInfo: no module" (ppr id)) $ nameModule_maybe name
--let mod = fromMaybe curMod $ nameModule_maybe name
dflags <- getDynFlags
if mod /= curMod then return . mkCgIdInfo dflags id $ mkLFImported id
else return . mkCgIdInfo dflags id $ mkLFImported id
-- TODO: Change this back.
-- crashDoc $ str "getCgIdInfo[not external name]:" <+> ppr id
printBindings :: CodeGen ()
printBindings = do
bindings <- getBindings
debugDoc $ str "printBindings" <+> ppr bindings
addBinding :: CgIdInfo -> CodeGen ()
addBinding cgIdInfo = do
bindings <- getBindings
setBindings $ extendVarEnv bindings (cgId cgIdInfo) cgIdInfo
addBindings :: [CgIdInfo] -> CodeGen ()
addBindings newCgIdInfos = do
bindings <- getBindings
let newBindings = foldl
(\binds info -> extendVarEnv binds (cgId info) info)
bindings
newCgIdInfos
setBindings newBindings
mergeCompiledClosures :: [ClassFile] -> CodeGen ()
mergeCompiledClosures classFiles = modify $ \s@CgState{..} ->
s { cgCompiledClosures = classFiles ++ cgCompiledClosures }
addCompiledClosure :: ClassFile -> CodeGen ()
addCompiledClosure classFile = modify $ \s@CgState{..} ->
s { cgCompiledClosures = classFile : cgCompiledClosures }
defineMethod :: MethodDef -> CodeGen ()
defineMethod md = modify $ \s@CgState{..} ->
s { cgMethodDefs = md : cgMethodDefs }
defineMethods :: [MethodDef] -> CodeGen ()
defineMethods mds = modify $ \s@CgState{..} ->
s { cgMethodDefs = mds ++ cgMethodDefs }
defineField :: FieldDef -> CodeGen ()
defineField md = modify $ \s@CgState{..} ->
s { cgFieldDefs = md : cgFieldDefs }
defineFields :: [FieldDef] -> CodeGen ()
defineFields md = modify $ \s@CgState{..} ->
s { cgFieldDefs = md ++ cgFieldDefs }
newExportedClosure, newHiddenClosure
:: Text
-> Text
-> CodeGen a
-> CodeGen (a, CgState)
newExportedClosure = newClosure [Public, Super, Final]
newHiddenClosure = newClosure [Private, Super, Final]
newTypeClosure
:: Text
-> Text
-> CodeGen ((), CgState)
newTypeClosure thisClass superClass =
newClosure [Public, Abstract, Super] thisClass superClass $
defineMethod $ mkDefaultConstructor thisClass superClass
newClosure
:: [AccessFlag]
-> Text
-> Text
-> CodeGen a
-> CodeGen (a, CgState)
newClosure accessFlags clName superClassName genCode =
newClosureGeneric $ do
setAccessFlags accessFlags
setClosureClass clName
setSuperClass superClassName
genCode
setAccessFlags :: [AccessFlag] -> CodeGen ()
setAccessFlags accessFlags = modify $ \s -> s { cgAccessFlags = accessFlags }
setSuperClass :: Text -> CodeGen ()
setSuperClass superClassName =
modify $ \s -> s { cgSuperClassName = Just superClassName }
-- NOTE: We make an assumption that we never directly derive from
-- java.lang.Object
getSuperClass :: CodeGen Text
getSuperClass = fmap (expectJust "getSuperClass") . gets $ cgSuperClassName
setClosureClass :: Text -> CodeGen ()
setClosureClass clName = do
modClass <- getModClass
let qClName = qualifiedName modClass clName
modify $ \s -> s { cgClassName = qClName }
-- NOTE: Changes made to class generation state are forgotten after
-- the body is executed
newClosureGeneric :: CodeGen a -> CodeGen (a, CgState)
newClosureGeneric genCode = do
state0@CgState
{ cgAccessFlags = a
, cgMethodDefs = b
, cgFieldDefs = c
, cgClassName = d
, cgSuperClassName = e } <- get
-- TODO: Ensure the proper state is reset.
modify $ \s -> s { cgAccessFlags = [Public, Super, Final]
, cgMethodDefs = []
, cgFieldDefs = []
, cgClassName = mempty
, cgSuperClassName = Nothing }
result <- genCode
state1 <- get
let compiledClosure = classFromCgState state1
-- TODO: Ensure the state is restored properly
modify $ \s -> s { cgAccessFlags = a
, cgMethodDefs = b
, cgFieldDefs = c
, cgClassName = d
, cgSuperClassName = e }
addCompiledClosure compiledClosure
return (result, state1)
classFromCgState :: CgState -> ClassFile
classFromCgState CgState {..} =
mkClassFile java7 cgAccessFlags cgClassName cgSuperClassName []
cgFieldDefs cgMethodDefs
runCodeGen :: CgEnv -> CgState -> CodeGen a -> IO [ClassFile]
runCodeGen env state codeGenAction = do
let codeGenActionPlus = do
codeGenAction
modClass <- getModClass
initCode <- getInitCode
defineMethod $ mkMethodDef modClass
[Public, Static] "<clinit>" [] void (initCode <> vreturn)
(state'@CgState {..}, _) <- unCG codeGenActionPlus env state
-- NOTE: addInnerClasses is to ensure that any unused data types/closures
-- are added to the constant pool
let compiledModuleClass =
addInnerClasses cgCompiledClosures $
classFromCgState state'
return (compiledModuleClass : cgCompiledClosures)
addInitStep :: Node FieldRef Code -> CodeGen ()
addInitStep code = modify $ \s@CgState{..} ->
s { cgClassInitCode = cgClassInitCode <> [ code ] }
-- NOTE: New bindings generated by the body are forgotten after
-- the body is executed
forkClosureBody :: CodeGen a -> CodeGen (a, CgState)
forkClosureBody body =
local (\env -> env { cgSequel = Return
, cgSelfLoop = Nothing })
. newClosureGeneric $ do
oldBindings <- getBindings
result <- body
setBindings oldBindings
return result
withMethod :: [AccessFlag] -> Text -> [FieldType] -> ReturnType -> CodeGen () -> CodeGen MethodDef
withMethod accessFlags name fts rt body = do
oldCode <- getMethodCode
oldNextLocal <- peekNextLocal
oldNextLabel <- peekNextLabel
setMethodCode mempty
setNextLocal 2
setNextLabel 0
body
emit vreturn
clsName <- getClass
newCode <- getMethodCode
let methodDef = mkMethodDef clsName accessFlags name fts rt newCode
defineMethod methodDef
setMethodCode oldCode
setNextLocal oldNextLocal
setNextLabel oldNextLabel
return methodDef
withSelfLoop :: SelfLoopInfo -> CodeGen a -> CodeGen a
withSelfLoop selfLoopInfo =
local (\env -> env { cgSelfLoop = Just selfLoopInfo })
unimplemented :: String -> CodeGen a
unimplemented msg = do
liftIO . putStrLn $ "Not implemented: " ++ msg
return undefined
getSequel :: CodeGen Sequel
getSequel = asks cgSequel
getSelfLoop :: CodeGen (Maybe SelfLoopInfo)
getSelfLoop = asks cgSelfLoop
newTemp :: Bool -> FieldType -> CodeGen CgLoc
newTemp isClosure ft = do
n <- newLocal ft
return $ LocLocal isClosure ft n
-- TODO: Verify that this does as intended
getCodeWithResult :: CodeGen a -> CodeGen (a, Code)
getCodeWithResult gen = do
state1 <- get
modify $ \s -> s { cgCode = mempty }
a <- gen
state2 <- get
put $ state2 { cgCode = cgCode state1 }
return (a, cgCode state2)
newIdLoc :: NonVoid Id -> CodeGen CgLoc
newIdLoc (NonVoid id) = newTemp (isGcPtrRep rep) (primRepFieldType rep)
where rep = idPrimRep id
getCgLoc :: NonVoid Id -> CodeGen CgLoc
getCgLoc (NonVoid id) = do
info <- getCgIdInfo id
return $ cgLocation info
forkAlts :: [(a, CodeGen ())] -> CodeGen [(a, Code)]
forkAlts alts =
forM alts $ \(val, altCode) -> do
code <- forkLneBody altCode
return (val, code)
withSequel :: Sequel -> CodeGen a -> CodeGen a
withSequel sequel = local (\env -> env { cgSequel = sequel, cgSelfLoop = Nothing })
forkLneBody :: CodeGen () -> CodeGen Code
forkLneBody body = do
oldBindings <- getBindings
oldCode <- getMethodCode
oldNextLocal <- peekNextLocal
setMethodCode mempty
body
newCode <- getMethodCode
setMethodCode oldCode
setNextLocal oldNextLocal
setBindings oldBindings
return newCode
debug :: String -> CodeGen ()
debug msg = do
dflags <- getDynFlags
when (verbosity dflags > 1) $
liftIO $ putStrLn msg
debugDoc :: SDoc -> CodeGen ()
debugDoc sdoc = do
dflags <- getDynFlags
when (verbosity dflags > 1) $
liftIO . putStrLn $ showSDocDump dflags sdoc
printDoc :: SDoc -> CodeGen ()
printDoc sdoc = do
dflags <- getDynFlags
liftIO . putStrLn $ showSDocDump dflags sdoc
debugState :: CodeGen ()
debugState = do
dflags <- getDynFlags
bindings <- getBindings
when (verbosity dflags > 1) $
debugDoc $ str "cgBindings: " <+> ppr bindings
crashDoc :: SDoc -> CodeGen a
crashDoc sdoc = do
debugDoc sdoc
error "crash"
| alexander-at-github/eta | compiler/ETA/CodeGen/Monad.hs | bsd-3-clause | 14,767 | 0 | 22 | 3,619 | 4,358 | 2,295 | 2,063 | -1 | -1 |
{-# LANGUAGE QuasiQuotes #-}
import LiquidHaskell
import Language.Haskell.Liquid.Prelude
myabs x = if x `gt` 0 then x else 0 `minus` x
absList xs = map myabs xs
prop1 = map (liquidAssertB . (`geq` 0)) $ absList $ map choose [1..]
numAbs x = if x > 0 then x else (0 - x)
numAbsList = map numAbs
prop2 = map (liquidAssertB . (>= 0)) $ numAbsList $ map choose [1..]
| spinda/liquidhaskell | tests/gsoc15/unknown/pos/poslist.hs | bsd-3-clause | 386 | 0 | 10 | 93 | 160 | 90 | 70 | 9 | 2 |
-- | This primarily exports the CStorable typeclass, which may have its
-- methods automatically defaulted if it has a Generic instance.
-- Then, this instance can be transfered via the `Storable' constructor.
module Foreign.CStorable
(CStorable(..),
StorableWrap(..)
) where
import Foreign.CStorable.TypeClass
import Foreign.CStorable.BaseInstances
import Foreign.Storable
import Foreign.Ptr
-- | Applying the `Storable' constructor to something which is Storable
-- gives it a corresponding CStorable instance.
newtype StorableWrap a = Storable a
-- | Translates a Storable instance to a CStorable instance
instance (Storable a) => CStorable (StorableWrap a) where
cPeek p = fmap Storable $ peek (castPtr p)
cPoke p (Storable x) = poke (castPtr p) x
cAlignment (Storable x) = alignment x
cSizeOf (Storable x) = sizeOf x
| maurer/c-storable-deriving | Foreign/CStorable.hs | bsd-3-clause | 862 | 0 | 9 | 159 | 167 | 93 | 74 | 13 | 0 |
-- | The module is used as a convinient starting point. It re-exports:
--
-- * "Music.Diatonic.Note"
--
-- * "Music.Diatonic.Interval"
--
-- * "Music.Diatonic.Degree"
--
-- * "Music.Diatonic.Quality"
--
-- * "Music.Diatonic.Equivalence"
module Music.Diatonic (
module Music.Diatonic.Note,
module Music.Diatonic.Interval,
module Music.Diatonic.Degree,
module Music.Diatonic.Quality,
module Music.Diatonic.Equivalence
) where
import Music.Diatonic.Note
import Music.Diatonic.Interval
import Music.Diatonic.Quality
import Music.Diatonic.Equivalence
import Music.Diatonic.Degree
| xpika/music-diatonic | Music/Diatonic.hs | bsd-3-clause | 600 | 0 | 5 | 83 | 84 | 61 | 23 | 11 | 0 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE RankNTypes #-}
{-# OPTIONS_HADDOCK hide #-}
-- |
-- Module : Data.Array.Accelerate.Trafo.Substitution
-- Copyright : [2012] Manuel M T Chakravarty, Gabriele Keller, Trevor L. McDonell
-- License : BSD3
--
-- Maintainer : Manuel M T Chakravarty <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
module Data.Array.Accelerate.Trafo.Substitution (
-- * Renaming & Substitution
inline, substitute, compose,
-- * Weakening
weakenA, weakenEA, weakenFA,
weakenE, weakenFE,
weakenByA, weakenByEA, weakenByFA,
weakenByE, weakenByFE,
-- * Shrinking
shrinkE, shrinkFE,
shrinkA, shrinkAfun, shrinkOpenAcc,
-- * Rebuilding
rebuildA, rebuildAfun, rebuildOpenAcc,
rebuildE, rebuildEA,
rebuildFA,
) where
import Data.Array.Accelerate.AST
import Data.Array.Accelerate.Tuple
import Data.Array.Accelerate.Analysis.Match
import Data.Array.Accelerate.Array.Sugar ( Elt, Arrays )
import Prelude hiding ( exp )
-- NOTE: [Renaming and Substitution]
--
-- To do things like renaming and substitution, we need some operation on
-- variables that we push structurally through terms, applying to each variable.
-- We have a type preserving but environment changing operation:
--
-- v :: forall t. Idx env t -> f env' aenv t
--
-- The crafty bit is that 'f' might represent variables (for renaming) or terms
-- (for substitutions). The demonic forall, --- which is to say that the
-- quantifier is in a position which gives us obligation, not opportunity ---
-- forces us to respect type: when pattern matching detects the variable we care
-- about, happily we discover that it has the type we must respect. The demon is
-- not so free to mess with us as one might fear at first.
--
-- We then lift this to an operation which traverses terms and rebuild them
-- after applying 'v' to the variables:
--
-- rebuild v :: OpenExp env aenv t -> OpenExp env' aenv t
--
-- The Syntactic class tells us what we need to know about 'f' if we want to be
-- able to rebuild terms. In essence, the crucial functionality is to propagate
-- a class of operations on variables that is closed under shifting.
--
infixr `compose`
infixr `substitute`
-- | Replace the first variable with the given expression. The environment
-- shrinks.
--
inline :: Elt t
=> PreOpenExp acc (env, s) aenv t
-> PreOpenExp acc env aenv s
-> PreOpenExp acc env aenv t
inline f g = rebuildE (subTop g) f
where
subTop :: Elt t => PreOpenExp acc env aenv s -> Idx (env, s) t -> PreOpenExp acc env aenv t
subTop s ZeroIdx = s
subTop _ (SuccIdx ix) = Var ix
-- | Replace an expression that uses the top environment variable with another.
-- The result of the first is let bound into the second.
--
substitute :: (Elt b, Elt c)
=> PreOpenExp acc (env, b) aenv c
-> PreOpenExp acc (env, a) aenv b
-> PreOpenExp acc (env, a) aenv c
substitute f g
| Var ZeroIdx <- g = f -- don't rebind an identity function
| otherwise = Let g $ rebuildE split f
where
split :: Elt c => Idx (env,b) c -> PreOpenExp acc ((env,a),b) aenv c
split ZeroIdx = Var ZeroIdx
split (SuccIdx ix) = Var (SuccIdx (SuccIdx ix))
-- | Composition of unary functions.
--
compose :: Elt c
=> PreOpenFun acc env aenv (b -> c)
-> PreOpenFun acc env aenv (a -> b)
-> PreOpenFun acc env aenv (a -> c)
compose (Lam (Body f)) (Lam (Body g)) = Lam . Body $ substitute f g
compose _ _ = error "compose: impossible evaluation"
-- NOTE: [Weakening]
--
-- Weakening is something we usually take for granted: every time you learn a
-- new word, old sentences still make sense. If a conclusion is justified by a
-- hypothesis, it is still justified if you add more hypotheses. Similarly, a
-- term remains in scope if you bind more (fresh) variables. Weakening is the
-- operation of shifting things from one scope to a larger scope in which new
-- things have become meaningful, but no old things have vanished.
--
-- When we use a named representation (or HOAS) we get weakening for free. But
-- in the de Bruijn representation weakening takes work: you have to shift all
-- variable references to make room for the new bindings.
--
-- Functions to increase the scope of scalar or array environments of OpenAcc
-- tied expressions by a single index.
--
weakenA :: OpenAcc aenv t -> OpenAcc (aenv, s) t
weakenA = weakenByA SuccIdx
weakenE :: OpenExp env aenv t -> OpenExp (env, s) aenv t
weakenE = weakenByE SuccIdx
weakenEA :: OpenExp env aenv t -> OpenExp env (aenv,s) t
weakenEA = weakenByEA SuccIdx
weakenFA :: OpenFun env aenv t -> OpenFun env (aenv,s) t
weakenFA = weakenByFA SuccIdx
weakenFE :: OpenFun env aenv t -> OpenFun (env,s) aenv t
weakenFE = weakenByFE SuccIdx
-- Weakening functions parameterised by an index manipulation
--
weakenByA :: (forall t'. Idx aenv t' -> Idx aenv' t') -> OpenAcc aenv t -> OpenAcc aenv' t
weakenByA k = rebuildOpenAcc (Avar . k)
weakenByE :: (forall t'. Idx env t' -> Idx env' t') -> OpenExp env aenv t -> OpenExp env' aenv t
weakenByE k = rebuildE (Var . k)
weakenByEA :: (forall t'. Idx aenv t' -> Idx aenv' t') -> OpenExp env aenv t -> OpenExp env aenv' t
weakenByEA k = rebuildEA rebuildOpenAcc (Avar . k)
weakenByFA :: (forall t'. Idx aenv t' -> Idx aenv' t') -> OpenFun env aenv t -> OpenFun env aenv' t
weakenByFA k = rebuildFA rebuildOpenAcc (Avar . k)
weakenByFE :: (forall t'. Idx env t' -> Idx env' t') -> OpenFun env aenv t -> OpenFun env' aenv t
weakenByFE k = rebuildFE (Var . k)
-- Simultaneous Substitution ===================================================
--
-- Scalar expressions
-- ------------------
-- SEE: [Renaming and Substitution]
-- SEE: [Weakening]
--
class SyntacticExp f where
varIn :: Elt t => Idx env t -> f acc env aenv t
expOut :: Elt t => f acc env aenv t -> PreOpenExp acc env aenv t
weakenExp :: Elt t => f acc env aenv t -> f acc (env, s) aenv t
newtype IdxE (acc :: * -> * -> *) env aenv t = IE { unIE :: Idx env t }
instance SyntacticExp IdxE where
varIn = IE
expOut = Var . unIE
weakenExp = IE . SuccIdx . unIE
instance SyntacticExp PreOpenExp where
varIn = Var
expOut = id
weakenExp = rebuildE (weakenExp . IE)
shiftE
:: (SyntacticExp f, Elt t)
=> (forall t'. Elt t' => Idx env t' -> f acc env' aenv t')
-> Idx (env, s) t
-> f acc (env', s) aenv t
shiftE _ ZeroIdx = varIn ZeroIdx
shiftE v (SuccIdx ix) = weakenExp (v ix)
rebuildE
:: SyntacticExp f
=> (forall t'. Elt t' => Idx env t' -> f acc env' aenv t')
-> PreOpenExp acc env aenv t
-> PreOpenExp acc env' aenv t
rebuildE v exp =
case exp of
Let a b -> Let (rebuildE v a) (rebuildE (shiftE v) b)
Var ix -> expOut (v ix)
Const c -> Const c
Tuple tup -> Tuple (rebuildTE v tup)
Prj tup e -> Prj tup (rebuildE v e)
IndexNil -> IndexNil
IndexCons sh sz -> IndexCons (rebuildE v sh) (rebuildE v sz)
IndexHead sh -> IndexHead (rebuildE v sh)
IndexTail sh -> IndexTail (rebuildE v sh)
IndexAny -> IndexAny
IndexSlice x ix sh -> IndexSlice x (rebuildE v ix) (rebuildE v sh)
IndexFull x ix sl -> IndexFull x (rebuildE v ix) (rebuildE v sl)
ToIndex sh ix -> ToIndex (rebuildE v sh) (rebuildE v ix)
FromIndex sh ix -> FromIndex (rebuildE v sh) (rebuildE v ix)
Cond p t e -> Cond (rebuildE v p) (rebuildE v t) (rebuildE v e)
Iterate n f x -> Iterate (rebuildE v n) (rebuildE (shiftE v) f) (rebuildE v x)
PrimConst c -> PrimConst c
PrimApp f x -> PrimApp f (rebuildE v x)
Index a sh -> Index a (rebuildE v sh)
LinearIndex a i -> LinearIndex a (rebuildE v i)
Shape a -> Shape a
ShapeSize sh -> ShapeSize (rebuildE v sh)
Intersect s t -> Intersect (rebuildE v s) (rebuildE v t)
rebuildTE
:: SyntacticExp f
=> (forall t'. Elt t' => Idx env t' -> f acc env' aenv t')
-> Tuple (PreOpenExp acc env aenv) t
-> Tuple (PreOpenExp acc env' aenv) t
rebuildTE v tup =
case tup of
NilTup -> NilTup
SnocTup t e -> rebuildTE v t `SnocTup` rebuildE v e
rebuildFE
:: SyntacticExp f
=> (forall t'. Elt t' => Idx env t' -> f acc env' aenv t')
-> PreOpenFun acc env aenv t
-> PreOpenFun acc env' aenv t
rebuildFE v fun =
case fun of
Body e -> Body (rebuildE v e)
Lam f -> Lam (rebuildFE (shiftE v) f)
-- Array expressions
-- -----------------
type RebuildAcc acc =
forall aenv aenv' f a. SyntacticAcc f
=> (forall a'. Arrays a' => Idx aenv a' -> f acc aenv' a')
-> acc aenv a
-> acc aenv' a
class SyntacticAcc f where
avarIn :: Arrays t => Idx aenv t -> f acc aenv t
accOut :: Arrays t => f acc aenv t -> PreOpenAcc acc aenv t
weakenAcc :: Arrays t => RebuildAcc acc -> f acc aenv t -> f acc (aenv, s) t
newtype IdxA (acc :: * -> * -> *) aenv t = IA { unIA :: Idx aenv t }
instance SyntacticAcc IdxA where
avarIn = IA
accOut = Avar . unIA
weakenAcc _ = IA . SuccIdx . unIA
instance SyntacticAcc PreOpenAcc where
avarIn = Avar
accOut = id
weakenAcc k = rebuildA k (weakenAcc k . IA)
rebuildOpenAcc
:: SyntacticAcc f
=> (forall t'. Arrays t' => Idx aenv t' -> f OpenAcc aenv' t')
-> OpenAcc aenv t
-> OpenAcc aenv' t
rebuildOpenAcc v (OpenAcc acc) = OpenAcc (rebuildA rebuildOpenAcc v acc)
shiftA
:: (SyntacticAcc f, Arrays t)
=> RebuildAcc acc
-> (forall t'. Arrays t' => Idx aenv t' -> f acc aenv' t')
-> Idx (aenv, s) t
-> f acc (aenv', s) t
shiftA _ _ ZeroIdx = avarIn ZeroIdx
shiftA k v (SuccIdx ix) = weakenAcc k (v ix)
rebuildA
:: SyntacticAcc f
=> RebuildAcc acc
-> (forall t'. Arrays t' => Idx aenv t' -> f acc aenv' t')
-> PreOpenAcc acc aenv t
-> PreOpenAcc acc aenv' t
rebuildA rebuild v acc =
case acc of
Alet a b -> Alet (rebuild v a) (rebuild (shiftA rebuild v) b)
Avar ix -> accOut (v ix)
Atuple tup -> Atuple (rebuildATA rebuild v tup)
Aprj tup a -> Aprj tup (rebuild v a)
Apply f a -> Apply f (rebuild v a)
Acond p t e -> Acond (rebuildEA rebuild v p) (rebuild v t) (rebuild v e)
Use a -> Use a
Unit e -> Unit (rebuildEA rebuild v e)
Reshape e a -> Reshape (rebuildEA rebuild v e) (rebuild v a)
Generate e f -> Generate (rebuildEA rebuild v e) (rebuildFA rebuild v f)
Transform sh ix f a -> Transform (rebuildEA rebuild v sh) (rebuildFA rebuild v ix) (rebuildFA rebuild v f) (rebuild v a)
Replicate sl slix a -> Replicate sl (rebuildEA rebuild v slix) (rebuild v a)
Slice sl a slix -> Slice sl (rebuild v a) (rebuildEA rebuild v slix)
Map f a -> Map (rebuildFA rebuild v f) (rebuild v a)
ZipWith f a1 a2 -> ZipWith (rebuildFA rebuild v f) (rebuild v a1) (rebuild v a2)
Fold f z a -> Fold (rebuildFA rebuild v f) (rebuildEA rebuild v z) (rebuild v a)
Fold1 f a -> Fold1 (rebuildFA rebuild v f) (rebuild v a)
FoldSeg f z a s -> FoldSeg (rebuildFA rebuild v f) (rebuildEA rebuild v z) (rebuild v a) (rebuild v s)
Fold1Seg f a s -> Fold1Seg (rebuildFA rebuild v f) (rebuild v a) (rebuild v s)
Scanl f z a -> Scanl (rebuildFA rebuild v f) (rebuildEA rebuild v z) (rebuild v a)
Scanl' f z a -> Scanl' (rebuildFA rebuild v f) (rebuildEA rebuild v z) (rebuild v a)
Scanl1 f a -> Scanl1 (rebuildFA rebuild v f) (rebuild v a)
Scanr f z a -> Scanr (rebuildFA rebuild v f) (rebuildEA rebuild v z) (rebuild v a)
Scanr' f z a -> Scanr' (rebuildFA rebuild v f) (rebuildEA rebuild v z) (rebuild v a)
Scanr1 f a -> Scanr1 (rebuildFA rebuild v f) (rebuild v a)
Permute f1 a1 f2 a2 -> Permute (rebuildFA rebuild v f1) (rebuild v a1) (rebuildFA rebuild v f2) (rebuild v a2)
Backpermute sh f a -> Backpermute (rebuildEA rebuild v sh) (rebuildFA rebuild v f) (rebuild v a)
Stencil f b a -> Stencil (rebuildFA rebuild v f) b (rebuild v a)
Stencil2 f b1 a1 b2 a2
-> Stencil2 (rebuildFA rebuild v f) b1 (rebuild v a1) b2 (rebuild v a2)
Foreign ff afun as -> Foreign ff afun (rebuild v as)
-- Rebuilding array computations
--
rebuildAfun
:: SyntacticAcc f
=> RebuildAcc acc
-> (forall t'. Arrays t' => Idx aenv t' -> f acc aenv' t')
-> PreOpenAfun acc aenv t
-> PreOpenAfun acc aenv' t
rebuildAfun k v afun =
case afun of
Abody b -> Abody (k v b)
Alam f -> Alam (rebuildAfun k (shiftA k v) f)
rebuildATA
:: SyntacticAcc f
=> RebuildAcc acc
-> (forall t'. Arrays t' => Idx aenv t' -> f acc aenv' t')
-> Atuple (acc aenv) t
-> Atuple (acc aenv') t
rebuildATA k v atup =
case atup of
NilAtup -> NilAtup
SnocAtup t a -> rebuildATA k v t `SnocAtup` k v a
-- Rebuilding scalar expressions
--
rebuildEA
:: SyntacticAcc f
=> RebuildAcc acc
-> (forall t'. Arrays t' => Idx aenv t' -> f acc aenv' t')
-> PreOpenExp acc env aenv t
-> PreOpenExp acc env aenv' t
rebuildEA k v exp =
case exp of
Let a b -> Let (rebuildEA k v a) (rebuildEA k v b)
Var ix -> Var ix
Const c -> Const c
Tuple tup -> Tuple (rebuildTA k v tup)
Prj tup e -> Prj tup (rebuildEA k v e)
IndexNil -> IndexNil
IndexCons sh sz -> IndexCons (rebuildEA k v sh) (rebuildEA k v sz)
IndexHead sh -> IndexHead (rebuildEA k v sh)
IndexTail sh -> IndexTail (rebuildEA k v sh)
IndexAny -> IndexAny
IndexSlice x ix sh -> IndexSlice x (rebuildEA k v ix) (rebuildEA k v sh)
IndexFull x ix sl -> IndexFull x (rebuildEA k v ix) (rebuildEA k v sl)
ToIndex sh ix -> ToIndex (rebuildEA k v sh) (rebuildEA k v ix)
FromIndex sh ix -> FromIndex (rebuildEA k v sh) (rebuildEA k v ix)
Cond p t e -> Cond (rebuildEA k v p) (rebuildEA k v t) (rebuildEA k v e)
Iterate n f x -> Iterate (rebuildEA k v n) (rebuildEA k v f) (rebuildEA k v x)
PrimConst c -> PrimConst c
PrimApp f x -> PrimApp f (rebuildEA k v x)
Index a sh -> Index (k v a) (rebuildEA k v sh)
LinearIndex a i -> LinearIndex (k v a) (rebuildEA k v i)
Shape a -> Shape (k v a)
ShapeSize sh -> ShapeSize (rebuildEA k v sh)
Intersect s t -> Intersect (rebuildEA k v s) (rebuildEA k v t)
rebuildTA
:: SyntacticAcc f
=> RebuildAcc acc
-> (forall t'. Arrays t' => Idx aenv t' -> f acc aenv' t')
-> Tuple (PreOpenExp acc env aenv) t
-> Tuple (PreOpenExp acc env aenv') t
rebuildTA k v tup =
case tup of
NilTup -> NilTup
SnocTup t e -> rebuildTA k v t `SnocTup` rebuildEA k v e
rebuildFA
:: SyntacticAcc f
=> RebuildAcc acc
-> (forall t'. Arrays t' => Idx aenv t' -> f acc aenv' t')
-> PreOpenFun acc env aenv t
-> PreOpenFun acc env aenv' t
rebuildFA k v fun =
case fun of
Body e -> Body (rebuildEA k v e)
Lam f -> Lam (rebuildFA k v f)
-- Shrinking ===================================================================
--
-- The shrinking substitution arises as a restriction of beta-reduction to cases
-- where the bound variable is used zero (dead-code elimination) or one (linear
-- inlining) times.
--
-- Scalar expressions
-- ------------------
shrinkE :: PreOpenExp acc env aenv t -> PreOpenExp acc env aenv t
shrinkE exp =
case exp of
Let bnd body
| Var _ <- bnd -> shrinkE (inline body bnd)
| usesOfE ZeroIdx body' <= lIMIT -> shrinkE (inline body' bnd')
| otherwise -> Let bnd' body'
where
bnd' = shrinkE bnd
body' = shrinkE body
-- Allow inlining and dead-code elimination
--
lIMIT = 1
--
Var idx -> Var idx
Const c -> Const c
Tuple t -> Tuple (shrinkTE t)
Prj tup e -> Prj tup (shrinkE e)
IndexNil -> IndexNil
IndexCons sl sz -> IndexCons (shrinkE sl) (shrinkE sz)
IndexHead sh -> IndexHead (shrinkE sh)
IndexTail sh -> IndexTail (shrinkE sh)
IndexSlice x ix sh -> IndexSlice x (shrinkE ix) (shrinkE sh)
IndexFull x ix sl -> IndexFull x (shrinkE ix) (shrinkE sl)
IndexAny -> IndexAny
ToIndex sh ix -> ToIndex (shrinkE sh) (shrinkE ix)
FromIndex sh i -> FromIndex (shrinkE sh) (shrinkE i)
Cond p t e -> Cond (shrinkE p) (shrinkE t) (shrinkE e)
Iterate n f x -> Iterate (shrinkE n) (shrinkE f) (shrinkE x)
PrimConst c -> PrimConst c
PrimApp f x -> PrimApp f (shrinkE x)
Index a sh -> Index a (shrinkE sh)
LinearIndex a i -> LinearIndex a (shrinkE i)
Shape a -> Shape a
ShapeSize sh -> ShapeSize (shrinkE sh)
Intersect sh sz -> Intersect (shrinkE sh) (shrinkE sz)
shrinkFE
:: PreOpenFun acc env aenv f
-> PreOpenFun acc env aenv f
shrinkFE fun =
case fun of
Body e -> Body (shrinkE e)
Lam f -> Lam (shrinkFE f)
shrinkTE
:: Tuple (PreOpenExp acc env aenv) t
-> Tuple (PreOpenExp acc env aenv) t
shrinkTE tup =
case tup of
NilTup -> NilTup
SnocTup t e -> SnocTup (shrinkTE t) (shrinkE e)
usesOfE :: forall acc env aenv s t. Idx env s -> PreOpenExp acc env aenv t -> Int
usesOfE idx exp =
case exp of
Let bnd body -> usesOfE idx bnd + usesOfE (SuccIdx idx) body
Var idx'
| Just REFL <- matchIdx idx idx' -> 1
| otherwise -> 0
Const _ -> 0
Tuple t -> usesOfTE idx t
Prj _ e -> usesOfE idx e
IndexNil -> 0
IndexCons sl sz -> usesOfE idx sl + usesOfE idx sz
IndexHead sh -> usesOfE idx sh
IndexTail sh -> usesOfE idx sh
IndexSlice _ ix sh -> usesOfE idx ix + usesOfE idx sh
IndexFull _ ix sl -> usesOfE idx ix + usesOfE idx sl
IndexAny -> 0
ToIndex sh ix -> usesOfE idx sh + usesOfE idx ix
FromIndex sh i -> usesOfE idx sh + usesOfE idx i
Cond p t e -> usesOfE idx p + usesOfE idx t + usesOfE idx e
Iterate n f x -> usesOfE idx n + usesOfE idx x + usesOfE (SuccIdx idx) f
PrimConst _ -> 0
PrimApp _ x -> usesOfE idx x
Index _ sh -> usesOfE idx sh
LinearIndex _ i -> usesOfE idx i
Shape _ -> 0
ShapeSize sh -> usesOfE idx sh
Intersect sh sz -> usesOfE idx sh + usesOfE idx sz
usesOfTE :: Idx env s -> Tuple (PreOpenExp acc env aenv) t -> Int
usesOfTE idx tup =
case tup of
NilTup -> 0
SnocTup t e -> usesOfTE idx t + usesOfE idx e
-- Array expressions
-- -----------------
type UsesOfAcc acc = forall aenv s t. Idx aenv s -> acc aenv t -> Int
type ShrinkAcc acc = forall aenv a. acc aenv a -> acc aenv a
shrinkOpenAcc :: OpenAcc aenv a -> OpenAcc aenv a
shrinkOpenAcc (OpenAcc pacc) =
OpenAcc (shrinkA rebuildOpenAcc shrinkOpenAcc usesOfOpenAcc pacc)
-- TLM: Shrinking of array expressions is currently specialised to OpenAcc
-- because we need to unwrap terms to do further substitution and
-- shrinking at the Alet case.
--
shrinkA
:: RebuildAcc OpenAcc
-> ShrinkAcc OpenAcc
-> UsesOfAcc OpenAcc
-> PreOpenAcc OpenAcc aenv t
-> PreOpenAcc OpenAcc aenv t
shrinkA k s u pacc =
let subTop :: Arrays t => PreOpenAcc acc aenv s -> Idx (aenv,s) t -> PreOpenAcc acc aenv t
subTop t ZeroIdx = t
subTop _ (SuccIdx idx) = Avar idx
in
case pacc of
Alet bnd@(OpenAcc pbnd) body@(OpenAcc pbody)
| Avar _ <- pbnd -> shrinkA k s u (rebuildA k (subTop pbnd) pbody)
| u ZeroIdx body' <= lIMIT -> shrinkA k s u (rebuildA k (subTop pbnd') pbody')
| otherwise -> Alet bnd' body'
where
bnd'@(OpenAcc pbnd') = s bnd
body'@(OpenAcc pbody') = s body
-- Allow only dead code elimination, otherwise we might inline array
-- computations directly into scalar expressions, and later stages rely
-- on there being only variables embedded in scalar expressions.
--
lIMIT = 0
--
Avar ix -> Avar ix
Atuple tup -> Atuple (shrinkATA s tup)
Aprj tup a -> Aprj tup (s a)
Apply f a -> Apply f (s a)
Acond p t e -> Acond (shrinkEA s p) (s t) (s e)
Use a -> Use a
Unit e -> Unit (shrinkEA s e)
Reshape e a -> Reshape (shrinkEA s e) (s a)
Generate e f -> Generate (shrinkEA s e) (shrinkFA s f)
Transform sh ix f a -> Transform (shrinkEA s sh) (shrinkFA s ix) (shrinkFA s f) (s a)
Replicate sl slix a -> Replicate sl (shrinkEA s slix) (s a)
Slice sl a slix -> Slice sl (s a) (shrinkEA s slix)
Map f a -> Map (shrinkFA s f) (s a)
ZipWith f a1 a2 -> ZipWith (shrinkFA s f) (s a1) (s a2)
Fold f z a -> Fold (shrinkFA s f) (shrinkEA s z) (s a)
Fold1 f a -> Fold1 (shrinkFA s f) (s a)
FoldSeg f z a b -> FoldSeg (shrinkFA s f) (shrinkEA s z) (s a) (s b)
Fold1Seg f a b -> Fold1Seg (shrinkFA s f) (s a) (s b)
Scanl f z a -> Scanl (shrinkFA s f) (shrinkEA s z) (s a)
Scanl' f z a -> Scanl' (shrinkFA s f) (shrinkEA s z) (s a)
Scanl1 f a -> Scanl1 (shrinkFA s f) (s a)
Scanr f z a -> Scanr (shrinkFA s f) (shrinkEA s z) (s a)
Scanr' f z a -> Scanr' (shrinkFA s f) (shrinkEA s z) (s a)
Scanr1 f a -> Scanr1 (shrinkFA s f) (s a)
Permute f1 a1 f2 a2 -> Permute (shrinkFA s f1) (s a1) (shrinkFA s f2) (s a2)
Backpermute sh f a -> Backpermute (shrinkEA s sh) (shrinkFA s f) (s a)
Stencil f b a -> Stencil (shrinkFA s f) b (s a)
Stencil2 f b1 a1 b2 a2
-> Stencil2 (shrinkFA s f) b1 (s a1) b2 (s a2)
Foreign ff afun a -> Foreign ff afun $ s a
shrinkAfun :: ShrinkAcc acc -> PreOpenAfun acc aenv t -> PreOpenAfun acc aenv t
shrinkAfun s afun =
case afun of
Abody b -> Abody (s b)
Alam f -> Alam (shrinkAfun s f)
shrinkATA :: ShrinkAcc acc -> Atuple (acc aenv) t -> Atuple (acc aenv) t
shrinkATA s atup =
case atup of
NilAtup -> NilAtup
SnocAtup t a -> shrinkATA s t `SnocAtup` s a
shrinkFA :: ShrinkAcc acc -> PreOpenFun acc env aenv t -> PreOpenFun acc env aenv t
shrinkFA s fun =
case fun of
Body b -> Body (shrinkEA s b)
Lam f -> Lam (shrinkFA s f)
shrinkEA :: ShrinkAcc acc -> PreOpenExp acc env aenv t -> PreOpenExp acc env aenv t
shrinkEA s exp =
case exp of
Let bnd body -> Let (shrinkEA s bnd) (shrinkEA s body)
Var idx -> Var idx
Const c -> Const c
Tuple t -> Tuple (shrinkTA s t)
Prj tup e -> Prj tup (shrinkEA s e)
IndexNil -> IndexNil
IndexCons sl sz -> IndexCons (shrinkEA s sl) (shrinkEA s sz)
IndexHead sh -> IndexHead (shrinkEA s sh)
IndexTail sh -> IndexTail (shrinkEA s sh)
IndexSlice x ix sh -> IndexSlice x (shrinkEA s ix) (shrinkEA s sh)
IndexFull x ix sl -> IndexFull x (shrinkEA s ix) (shrinkEA s sl)
IndexAny -> IndexAny
ToIndex sh ix -> ToIndex (shrinkEA s sh) (shrinkEA s ix)
FromIndex sh i -> FromIndex (shrinkEA s sh) (shrinkEA s i)
Cond p t e -> Cond (shrinkEA s p) (shrinkEA s t) (shrinkEA s e)
Iterate n f x -> Iterate (shrinkEA s n) (shrinkEA s f) (shrinkEA s x)
PrimConst c -> PrimConst c
PrimApp f x -> PrimApp f (shrinkEA s x)
Index a sh -> Index (s a) (shrinkEA s sh)
LinearIndex a i -> LinearIndex (s a) (shrinkEA s i)
Shape a -> Shape (s a)
ShapeSize sh -> ShapeSize (shrinkEA s sh)
Intersect sh sz -> Intersect (shrinkEA s sh) (shrinkEA s sz)
shrinkTA :: ShrinkAcc acc -> Tuple (PreOpenExp acc env aenv) t -> Tuple (PreOpenExp acc env aenv) t
shrinkTA s tup =
case tup of
NilTup -> NilTup
SnocTup t e -> shrinkTA s t `SnocTup` shrinkEA s e
usesOfOpenAcc :: Idx aenv s -> OpenAcc aenv t -> Int
usesOfOpenAcc idx (OpenAcc acc) = usesOfA usesOfOpenAcc idx acc
usesOfA :: UsesOfAcc acc -> Idx aenv s -> PreOpenAcc acc aenv t -> Int
usesOfA u idx acc =
case acc of
Alet bnd body -> u idx bnd + u (SuccIdx idx) body
Avar idx'
| Just REFL <- matchIdx idx idx' -> 1
| otherwise -> 0
Atuple tup -> usesOfATA u idx tup
Aprj _ a -> u idx a
Apply _ a -> u idx a
Acond p t e -> usesOfEA u idx p + u idx t + u idx e
Use _ -> 0
Unit e -> usesOfEA u idx e
Reshape e a -> usesOfEA u idx e + u idx a
Generate e f -> usesOfEA u idx e + usesOfFA u idx f
Transform sh ix f a -> usesOfEA u idx sh + usesOfFA u idx ix + usesOfFA u idx f + u idx a
Replicate _ slix a -> usesOfEA u idx slix + u idx a
Slice _ a slix -> usesOfEA u idx slix + u idx a
Map f a -> usesOfFA u idx f + u idx a
ZipWith f a1 a2 -> usesOfFA u idx f + u idx a1 + u idx a2
Fold f z a -> usesOfFA u idx f + usesOfEA u idx z + u idx a
Fold1 f a -> usesOfFA u idx f + u idx a
FoldSeg f z a s -> usesOfFA u idx f + usesOfEA u idx z + u idx a + u idx s
Fold1Seg f a s -> usesOfFA u idx f + u idx a + u idx s
Scanl f z a -> usesOfFA u idx f + usesOfEA u idx z + u idx a
Scanl' f z a -> usesOfFA u idx f + usesOfEA u idx z + u idx a
Scanl1 f a -> usesOfFA u idx f + u idx a
Scanr f z a -> usesOfFA u idx f + usesOfEA u idx z + u idx a
Scanr' f z a -> usesOfFA u idx f + usesOfEA u idx z + u idx a
Scanr1 f a -> usesOfFA u idx f + u idx a
Permute f1 a1 f2 a2 -> usesOfFA u idx f1 + u idx a1 + usesOfFA u idx f2 + u idx a2
Backpermute sh f a -> usesOfEA u idx sh + usesOfFA u idx f + u idx a
Stencil f _ a -> usesOfFA u idx f + u idx a
Stencil2 f _ a1 _ a2-> usesOfFA u idx f + u idx a1 + u idx a2
Foreign _ _ a -> u idx a
usesOfATA :: UsesOfAcc acc -> Idx aenv s -> Atuple (acc aenv) t -> Int
usesOfATA s idx atup =
case atup of
NilAtup -> 0
SnocAtup t a -> usesOfATA s idx t + s idx a
usesOfEA :: UsesOfAcc acc -> Idx aenv a -> PreOpenExp acc env aenv t -> Int
usesOfEA s idx exp =
case exp of
Let bnd body -> usesOfEA s idx bnd + usesOfEA s idx body
Var _ -> 0
Const _ -> 0
Tuple t -> usesOfTA s idx t
Prj _ e -> usesOfEA s idx e
IndexNil -> 0
IndexCons sl sz -> usesOfEA s idx sl + usesOfEA s idx sz
IndexHead sh -> usesOfEA s idx sh
IndexTail sh -> usesOfEA s idx sh
IndexSlice _ ix sh -> usesOfEA s idx ix + usesOfEA s idx sh
IndexFull _ ix sl -> usesOfEA s idx ix + usesOfEA s idx sl
IndexAny -> 0
ToIndex sh ix -> usesOfEA s idx sh + usesOfEA s idx ix
FromIndex sh i -> usesOfEA s idx sh + usesOfEA s idx i
Cond p t e -> usesOfEA s idx p + usesOfEA s idx t + usesOfEA s idx e
Iterate n f x -> usesOfEA s idx n + usesOfEA s idx f + usesOfEA s idx x
PrimConst _ -> 0
PrimApp _ x -> usesOfEA s idx x
Index a sh -> s idx a + usesOfEA s idx sh
LinearIndex a i -> s idx a + usesOfEA s idx i
Shape a -> s idx a
ShapeSize sh -> usesOfEA s idx sh
Intersect sh sz -> usesOfEA s idx sh + usesOfEA s idx sz
usesOfTA :: UsesOfAcc acc -> Idx aenv a -> Tuple (PreOpenExp acc env aenv) t -> Int
usesOfTA s idx tup =
case tup of
NilTup -> 0
SnocTup t e -> usesOfTA s idx t + usesOfEA s idx e
usesOfFA :: UsesOfAcc acc -> Idx aenv a -> PreOpenFun acc env aenv f -> Int
usesOfFA s idx fun =
case fun of
Body e -> usesOfEA s idx e
Lam f -> usesOfFA s idx f
| robeverest/accelerate | Data/Array/Accelerate/Trafo/Substitution.hs | bsd-3-clause | 28,760 | 0 | 15 | 9,142 | 11,164 | 5,361 | 5,803 | 533 | 31 |
{-# LANGUAGE StandaloneDeriving, GeneralizedNewtypeDeriving #-}
module Data.TrieMap.ReverseMap.Tests where
import Data.TrieMap.ReverseMap ()
import Data.TrieMap.WordMap ()
import Data.TrieMap.Modifiers
import Data.Word
import qualified Data.TrieMap.TrieKey.Tests as TrieKeyTests
import Test.QuickCheck
deriving instance Arbitrary a => Arbitrary (Rev a)
tests :: Property
tests = TrieKeyTests.tests "Data.TrieMap.ReverseMap" (Rev (0 :: Word)) | lowasser/TrieMap | Data/TrieMap/ReverseMap/Tests.hs | bsd-3-clause | 446 | 0 | 8 | 48 | 103 | 63 | 40 | 11 | 1 |
{-|
Module : Algebra.Function
Description : Boilerplate code.
Copyright : (c) Janthelme, 2016
License : BDS3
Maintainer : [email protected]
Stability : experimental
Portability : POSIX
Boilerplate code to translate @a->b@ functions into corresponding @'Relvar.Elem'->'Relvar.Elem'@ functions (see 'liftElem') and also into @['Relvar.Elem']->'Relvar.Elem'@ functions for the function 'Algebra.extend'.
-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
-- usage
-- provides ways to produce [Elem]->Elem functions
-- Warning: A lot of boilerplate code. FIXME: use generics or dynamic instead (see http://stackoverflow.com/questions/27393968/from-a-b-to-mytype-mytype)
-- CHECK: use TH?
module Algebra.Function
( module Relvar,
-- * Classes
LiftElem (..)
, LiftElem2 (..)
, LiftElem3 (..)
, BoolFun (..)
, BoolFun2 (..)
, BoolFun3 (..)
) where
-- import Debug.Trace
import Relvar
import Data.Text (Text)
import Data.Time (Day, UTCTime)
import Data.ByteString (ByteString)
import Data.Maybe (fromMaybe)
import qualified Data.Map.Strict as Map (fromList, findWithDefault)
-- | Lifting functions into the "Elem domain"
-- Use this class to translate f :: a -> b into the corresponding (Elem -> Elem) function
class (Elementable a, Elementable b) => LiftElem a b where
liftLeft' :: (a -> b) -> (Elem -> Maybe b)
liftLeft' f x = case fromElem x of
Nothing -> Nothing
Just x' -> Just (f x')
-- necessary to define boolFun, necessary to restrict with unary filter functions
liftLeft :: (a -> b) -> ([Elem] -> Maybe b)
liftLeft f [] = error "Empty argument list"
liftLeft f (x:xs) = (liftLeft' f) x
liftEl' :: (a -> b) -> (Elem -> Elem)
liftEl' f x = case (liftLeft' f) x of
Nothing -> Nil
Just z -> toElem z
liftEl :: (a -> b) -> ([Elem] -> Elem)
liftEl f (x:xs) = (liftEl' f) x
liftEl f [] = Nil
-- | Lifting functions into the "Elem domain"
-- Use this class to translate f :: a1 -> a2 -> b into the corresponding (Elem -> Elem -> Elem) function
class (Elementable a1, Elementable a2, Elementable b) => LiftElem2 a1 a2 b where
liftLeft2' :: (a1 -> a2 -> b) -> (Elem -> Elem -> Maybe b)
liftLeft2' f x y = case (fromElem x, fromElem y) of
(Just x', Just y') -> Just (f x' y')
otherwise -> Nothing
-- necessary to define boolFun2, necessary to restrict with binary filter functions
liftLeft2 :: (a1 -> a2 -> b) -> ([Elem] -> Maybe b)
liftLeft2 f [] = error "Empty argument list"
liftLeft2 f (x:[]) = error "Argument list: Need 2 or more elements"
liftLeft2 f (x:y:xs) = (liftLeft2' f) x y
liftEl2' :: (a1 -> a2 -> b) -> (Elem -> Elem -> Elem)
liftEl2' f x y = case (liftLeft2' f) x y of
Nothing -> Nil
Just z -> toElem z
liftEl2 :: (a1 -> a2 -> b) -> ([Elem] -> Elem)
liftEl2 f (x:y:xs) = (liftEl2' f) x y
liftEl2 f _ = Nil
-- | Lifting functions into the "Elem domain"
-- Use this class to translate f :: a1 -> a2 -> a3 -> b into the corresponding (Elem -> Elem -> Elem -> Elem) function
class (Elementable a1, Elementable a2, Elementable a3, Elementable b) => LiftElem3 a1 a2 a3 b where
liftLeft3' :: (a1 -> a2 -> a3 -> b) -> (Elem -> Elem -> Elem -> Maybe b)
liftLeft3' f x1 x2 x3 = case (fromElem x1, fromElem x2, fromElem x3) of
(Just x1', Just x2', Just x3') -> Just (f x1' x2' x3')
otherwise -> Nothing
-- necessary to define boolFun3, necessary to use restrict with ternary filter functions
liftLeft3 :: (a1 -> a2 -> a3 -> b) -> ([Elem] -> Maybe b)
liftLeft3 f [] = error "Empty argument list"
liftLeft3 f (x1:[]) = error "Argument list: Need 3 or more elements"
liftLeft3 f (x1:x2:[]) = error "Argument list: Need 3 or more elements"
liftLeft3 f (x1:x2:x3:xs) = (liftLeft3' f) x1 x2 x3
liftEl3' :: (a1 -> a2 -> a3 -> b) -> (Elem -> Elem -> Elem -> Elem)
liftEl3' f x1 x2 x3 = case (liftLeft3' f) x1 x2 x3 of
Nothing -> Nil
Just z -> toElem z
liftEl3 :: (a1 -> a2 -> a3 -> b) -> ([Elem] -> Elem)
liftEl3 f (x1:x2:x3:xs) = (liftEl3' f) x1 x2 x3
liftEl3 f _ = Nil
-- ADD ADDITIONAL CLASSES FOR HIGHER ARITY
-- | "Lifting" (a->Bool) functions to ([Elem]->Bool) functions.
-- This is a convenience function when using 'Relvar.restrict' function.
class LiftElem a Bool => BoolFun a where
liftBoolFun :: (a -> Bool) -> ([Elem] -> Bool)
liftBoolFun f x = fromMaybe False $ liftLeft f x
-- | "Lifting" (a1->a2->Bool) functions to ([Elem]->Bool) functions.
-- This is a convenience function when using 'Relvar.restrict' function.
class LiftElem2 a1 a2 Bool => BoolFun2 a1 a2 where
liftBoolFun2 :: (a1 -> a2 -> Bool) -> ([Elem] -> Bool)
liftBoolFun2 f x = fromMaybe False $ liftLeft2 f x
-- | "Lifting" (a1->a2->Bool) functions to ([Elem]->Bool) functions.
-- This is a convenience function when using 'Relvar.restrict' function.
class LiftElem3 a1 a2 a3 Bool => BoolFun3 a1 a2 a3 where
liftBoolFun3 :: (a1 -> a2 -> a3 -> Bool) -> ([Elem] -> Bool)
liftBoolFun3 f x = fromMaybe False $ liftLeft3 f x
-- ADD ADDITIONAL CLASSES FOR HIGHER ARITY
-- FIXME: no instance for arrays (A x) and tuples (T2 x) and (T3 x)
-- == START : Code generated by genBoilerCode1 ==
instance BoolFun Bool
instance BoolFun Char
instance BoolFun String
instance BoolFun Text
instance BoolFun Int
instance BoolFun Integer
instance BoolFun Double
instance BoolFun Day
instance BoolFun UTCTime
instance BoolFun ByteString
instance BoolFun Relvar
instance Elementable a => LiftElem Bool a
instance Elementable a => LiftElem Char a
instance Elementable a => LiftElem String a
instance Elementable a => LiftElem Text a
instance Elementable a => LiftElem Int a
instance Elementable a => LiftElem Integer a
instance Elementable a => LiftElem Double a
instance Elementable a => LiftElem Day a
instance Elementable a => LiftElem UTCTime a
instance Elementable a => LiftElem ByteString a
instance Elementable a => LiftElem Relvar a
-- == END : Code generated by genBoilerCode1 ==
-- == START : Code generated by genBoilerCode2 ==
instance BoolFun2 Bool Bool
instance BoolFun2 Bool Char
instance BoolFun2 Bool String
instance BoolFun2 Bool Text
instance BoolFun2 Bool Int
instance BoolFun2 Bool Integer
instance BoolFun2 Bool Double
instance BoolFun2 Bool Day
instance BoolFun2 Bool UTCTime
instance BoolFun2 Bool ByteString
instance BoolFun2 Bool Relvar
instance BoolFun2 Char Bool
instance BoolFun2 Char Char
instance BoolFun2 Char String
instance BoolFun2 Char Text
instance BoolFun2 Char Int
instance BoolFun2 Char Integer
instance BoolFun2 Char Double
instance BoolFun2 Char Day
instance BoolFun2 Char UTCTime
instance BoolFun2 Char ByteString
instance BoolFun2 Char Relvar
instance BoolFun2 String Bool
instance BoolFun2 String Char
instance BoolFun2 String String
instance BoolFun2 String Text
instance BoolFun2 String Int
instance BoolFun2 String Integer
instance BoolFun2 String Double
instance BoolFun2 String Day
instance BoolFun2 String UTCTime
instance BoolFun2 String ByteString
instance BoolFun2 String Relvar
instance BoolFun2 Text Bool
instance BoolFun2 Text Char
instance BoolFun2 Text String
instance BoolFun2 Text Text
instance BoolFun2 Text Int
instance BoolFun2 Text Integer
instance BoolFun2 Text Double
instance BoolFun2 Text Day
instance BoolFun2 Text UTCTime
instance BoolFun2 Text ByteString
instance BoolFun2 Text Relvar
instance BoolFun2 Int Bool
instance BoolFun2 Int Char
instance BoolFun2 Int String
instance BoolFun2 Int Text
instance BoolFun2 Int Int
instance BoolFun2 Int Integer
instance BoolFun2 Int Double
instance BoolFun2 Int Day
instance BoolFun2 Int UTCTime
instance BoolFun2 Int ByteString
instance BoolFun2 Int Relvar
instance BoolFun2 Integer Bool
instance BoolFun2 Integer Char
instance BoolFun2 Integer String
instance BoolFun2 Integer Text
instance BoolFun2 Integer Int
instance BoolFun2 Integer Integer
instance BoolFun2 Integer Double
instance BoolFun2 Integer Day
instance BoolFun2 Integer UTCTime
instance BoolFun2 Integer ByteString
instance BoolFun2 Integer Relvar
instance BoolFun2 Double Bool
instance BoolFun2 Double Char
instance BoolFun2 Double String
instance BoolFun2 Double Text
instance BoolFun2 Double Int
instance BoolFun2 Double Integer
instance BoolFun2 Double Double
instance BoolFun2 Double Day
instance BoolFun2 Double UTCTime
instance BoolFun2 Double ByteString
instance BoolFun2 Double Relvar
instance BoolFun2 Day Bool
instance BoolFun2 Day Char
instance BoolFun2 Day String
instance BoolFun2 Day Text
instance BoolFun2 Day Int
instance BoolFun2 Day Integer
instance BoolFun2 Day Double
instance BoolFun2 Day Day
instance BoolFun2 Day UTCTime
instance BoolFun2 Day ByteString
instance BoolFun2 Day Relvar
instance BoolFun2 UTCTime Bool
instance BoolFun2 UTCTime Char
instance BoolFun2 UTCTime String
instance BoolFun2 UTCTime Text
instance BoolFun2 UTCTime Int
instance BoolFun2 UTCTime Integer
instance BoolFun2 UTCTime Double
instance BoolFun2 UTCTime Day
instance BoolFun2 UTCTime UTCTime
instance BoolFun2 UTCTime ByteString
instance BoolFun2 UTCTime Relvar
instance BoolFun2 ByteString Bool
instance BoolFun2 ByteString Char
instance BoolFun2 ByteString String
instance BoolFun2 ByteString Text
instance BoolFun2 ByteString Int
instance BoolFun2 ByteString Integer
instance BoolFun2 ByteString Double
instance BoolFun2 ByteString Day
instance BoolFun2 ByteString UTCTime
instance BoolFun2 ByteString ByteString
instance BoolFun2 ByteString Relvar
instance BoolFun2 Relvar Bool
instance BoolFun2 Relvar Char
instance BoolFun2 Relvar String
instance BoolFun2 Relvar Text
instance BoolFun2 Relvar Int
instance BoolFun2 Relvar Integer
instance BoolFun2 Relvar Double
instance BoolFun2 Relvar Day
instance BoolFun2 Relvar UTCTime
instance BoolFun2 Relvar ByteString
instance BoolFun2 Relvar Relvar
instance Elementable a => LiftElem2 Bool Bool a
instance Elementable a => LiftElem2 Bool Char a
instance Elementable a => LiftElem2 Bool String a
instance Elementable a => LiftElem2 Bool Text a
instance Elementable a => LiftElem2 Bool Int a
instance Elementable a => LiftElem2 Bool Integer a
instance Elementable a => LiftElem2 Bool Double a
instance Elementable a => LiftElem2 Bool Day a
instance Elementable a => LiftElem2 Bool UTCTime a
instance Elementable a => LiftElem2 Bool ByteString a
instance Elementable a => LiftElem2 Bool Relvar a
instance Elementable a => LiftElem2 Char Bool a
instance Elementable a => LiftElem2 Char Char a
instance Elementable a => LiftElem2 Char String a
instance Elementable a => LiftElem2 Char Text a
instance Elementable a => LiftElem2 Char Int a
instance Elementable a => LiftElem2 Char Integer a
instance Elementable a => LiftElem2 Char Double a
instance Elementable a => LiftElem2 Char Day a
instance Elementable a => LiftElem2 Char UTCTime a
instance Elementable a => LiftElem2 Char ByteString a
instance Elementable a => LiftElem2 Char Relvar a
instance Elementable a => LiftElem2 String Bool a
instance Elementable a => LiftElem2 String Char a
instance Elementable a => LiftElem2 String String a
instance Elementable a => LiftElem2 String Text a
instance Elementable a => LiftElem2 String Int a
instance Elementable a => LiftElem2 String Integer a
instance Elementable a => LiftElem2 String Double a
instance Elementable a => LiftElem2 String Day a
instance Elementable a => LiftElem2 String UTCTime a
instance Elementable a => LiftElem2 String ByteString a
instance Elementable a => LiftElem2 String Relvar a
instance Elementable a => LiftElem2 Text Bool a
instance Elementable a => LiftElem2 Text Char a
instance Elementable a => LiftElem2 Text String a
instance Elementable a => LiftElem2 Text Text a
instance Elementable a => LiftElem2 Text Int a
instance Elementable a => LiftElem2 Text Integer a
instance Elementable a => LiftElem2 Text Double a
instance Elementable a => LiftElem2 Text Day a
instance Elementable a => LiftElem2 Text UTCTime a
instance Elementable a => LiftElem2 Text ByteString a
instance Elementable a => LiftElem2 Text Relvar a
instance Elementable a => LiftElem2 Int Bool a
instance Elementable a => LiftElem2 Int Char a
instance Elementable a => LiftElem2 Int String a
instance Elementable a => LiftElem2 Int Text a
instance Elementable a => LiftElem2 Int Int a
instance Elementable a => LiftElem2 Int Integer a
instance Elementable a => LiftElem2 Int Double a
instance Elementable a => LiftElem2 Int Day a
instance Elementable a => LiftElem2 Int UTCTime a
instance Elementable a => LiftElem2 Int ByteString a
instance Elementable a => LiftElem2 Int Relvar a
instance Elementable a => LiftElem2 Integer Bool a
instance Elementable a => LiftElem2 Integer Char a
instance Elementable a => LiftElem2 Integer String a
instance Elementable a => LiftElem2 Integer Text a
instance Elementable a => LiftElem2 Integer Int a
instance Elementable a => LiftElem2 Integer Integer a
instance Elementable a => LiftElem2 Integer Double a
instance Elementable a => LiftElem2 Integer Day a
instance Elementable a => LiftElem2 Integer UTCTime a
instance Elementable a => LiftElem2 Integer ByteString a
instance Elementable a => LiftElem2 Integer Relvar a
instance Elementable a => LiftElem2 Double Bool a
instance Elementable a => LiftElem2 Double Char a
instance Elementable a => LiftElem2 Double String a
instance Elementable a => LiftElem2 Double Text a
instance Elementable a => LiftElem2 Double Int a
instance Elementable a => LiftElem2 Double Integer a
instance Elementable a => LiftElem2 Double Double a
instance Elementable a => LiftElem2 Double Day a
instance Elementable a => LiftElem2 Double UTCTime a
instance Elementable a => LiftElem2 Double ByteString a
instance Elementable a => LiftElem2 Double Relvar a
instance Elementable a => LiftElem2 Day Bool a
instance Elementable a => LiftElem2 Day Char a
instance Elementable a => LiftElem2 Day String a
instance Elementable a => LiftElem2 Day Text a
instance Elementable a => LiftElem2 Day Int a
instance Elementable a => LiftElem2 Day Integer a
instance Elementable a => LiftElem2 Day Double a
instance Elementable a => LiftElem2 Day Day a
instance Elementable a => LiftElem2 Day UTCTime a
instance Elementable a => LiftElem2 Day ByteString a
instance Elementable a => LiftElem2 Day Relvar a
instance Elementable a => LiftElem2 UTCTime Bool a
instance Elementable a => LiftElem2 UTCTime Char a
instance Elementable a => LiftElem2 UTCTime String a
instance Elementable a => LiftElem2 UTCTime Text a
instance Elementable a => LiftElem2 UTCTime Int a
instance Elementable a => LiftElem2 UTCTime Integer a
instance Elementable a => LiftElem2 UTCTime Double a
instance Elementable a => LiftElem2 UTCTime Day a
instance Elementable a => LiftElem2 UTCTime UTCTime a
instance Elementable a => LiftElem2 UTCTime ByteString a
instance Elementable a => LiftElem2 UTCTime Relvar a
instance Elementable a => LiftElem2 ByteString Bool a
instance Elementable a => LiftElem2 ByteString Char a
instance Elementable a => LiftElem2 ByteString String a
instance Elementable a => LiftElem2 ByteString Text a
instance Elementable a => LiftElem2 ByteString Int a
instance Elementable a => LiftElem2 ByteString Integer a
instance Elementable a => LiftElem2 ByteString Double a
instance Elementable a => LiftElem2 ByteString Day a
instance Elementable a => LiftElem2 ByteString UTCTime a
instance Elementable a => LiftElem2 ByteString ByteString a
instance Elementable a => LiftElem2 ByteString Relvar a
instance Elementable a => LiftElem2 Relvar Bool a
instance Elementable a => LiftElem2 Relvar Char a
instance Elementable a => LiftElem2 Relvar String a
instance Elementable a => LiftElem2 Relvar Text a
instance Elementable a => LiftElem2 Relvar Int a
instance Elementable a => LiftElem2 Relvar Integer a
instance Elementable a => LiftElem2 Relvar Double a
instance Elementable a => LiftElem2 Relvar Day a
instance Elementable a => LiftElem2 Relvar UTCTime a
instance Elementable a => LiftElem2 Relvar ByteString a
instance Elementable a => LiftElem2 Relvar Relvar a
-- == END : Code generated by genBoilCode2 ==
-------------------------
-- boilerplate code generation -- CHECK: use TH?
-------------------------
-- update these lists with Elem definitions (eg when adding a new basic type)
genStrTy = ["Bool", "Char", "String", "Text", "Int", "Integer", "Double", "Day", "UTCTime", "ByteString", "Relvar"]
genStrEl = ["B", "C", "S", "T", "I", "J", "D", "DD", "DT", "BS", "R"]
genTypeMap = Map.fromList $ zip genStrTy genStrEl
-- boilerplate functions
-- unary functions
genBoolFun1 :: String -> String
genBoolFun1 strTy1 = "instance BoolFun " ++ strTy1
genLiftElem1 :: String -> String
genLiftElem1 strTy1 = "instance Elementable a => LiftElem " ++ strTy1 ++ " a"
-- boilercode generation for unary functions (run genBoilerCode11, copy stdout and paste in code)
genBoilerCode1 :: IO()
genBoilerCode1 = do
-- let pairs = [(x,y) | x <- genStrTy, y <- genStrTy]
putStrLn "-- == START : Code generated by genBoilerCode1 =="
mapM_ putStrLn $ map genBoolFun1 genStrTy
mapM_ putStrLn $ map genLiftElem1 genStrTy
putStrLn "-- == END : Code generated by genBoilerCode1 =="
-- binary functions
genBoolFun2 :: (String, String) -> String
genBoolFun2 (strTy1,strTy2) = "instance BoolFun2 " ++ strTy1 ++ " " ++ strTy2
genLiftElem2 :: (String, String) -> String
genLiftElem2 (strTy1,strTy2) = "instance Elementable a => LiftElem2 " ++ strTy1 ++ " " ++ strTy2 ++ " a"
-- boilercode generation for binary function (run genBoilerCode12)
genBoilerCode2 :: IO()
genBoilerCode2 = do
let pairs = [(x,y) | x <- genStrTy, y <- genStrTy]
xs = ["-- == START : Code generated by genBoilerCode2 =="]
++ map genBoolFun2 pairs
++ map genLiftElem2 pairs
++ ["-- == END : Code generated by genBoilCode2 =="]
mapM_ putStrLn xs
| JAnthelme/relation-tool | src/Algebra/Function.hs | bsd-3-clause | 17,849 | 0 | 13 | 3,083 | 5,416 | 2,575 | 2,841 | 364 | 1 |
module ReaderT
(
) where
import Control.Applicative
import Control.Monad
newtype ReaderT r m a =
ReaderT { runReaderT :: r -> m a}
instance Functor m => Functor (ReaderT r m) where
fmap f (ReaderT rma) = ReaderT $
(fmap . fmap) f rma
-- ^ ^
-- | |
-- apply f inside the monad
-- compose f with the reader function
instance Applicative m => Applicative (ReaderT r m) where
pure = ReaderT . pure . pure
(ReaderT rmab) <*> (ReaderT rma) =
ReaderT $ liftA2 (<*>) rmab rma
instance Monad m => Monad (ReaderT r m) where
return = pure
(ReaderT rma) >>= artmb =
ReaderT $ \r ->
rma r >>= (\a -> runReaderT (artmb a) r)
| nicklawls/haskellbook | src/ReaderT.hs | bsd-3-clause | 726 | 0 | 13 | 236 | 251 | 133 | 118 | 18 | 0 |
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE Strict #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Graphics.Vulkan.CommandBufferBuilding where
import Graphics.Vulkan.Buffer( VkBuffer(..)
)
import Graphics.Vulkan.Pass( VkDependencyFlagBits(..)
, VkFramebuffer(..)
, VkRenderPass(..)
, VkAccessFlags(..)
, VkDependencyFlags(..)
, VkAccessFlagBits(..)
)
import Graphics.Vulkan.Event( VkEvent(..)
)
import Graphics.Vulkan.Pipeline( VkPipelineStageFlagBits(..)
, VkPipelineStageFlags(..)
, VkPipeline(..)
, VkPipelineBindPoint(..)
)
import Data.Word( Word64
, Word32
)
import Foreign.Ptr( Ptr
, castPtr
, plusPtr
)
import Graphics.Vulkan.DescriptorSet( VkDescriptorSet(..)
)
import Graphics.Vulkan.CommandBuffer( VkCommandBuffer(..)
)
import Data.Int( Int32
)
import Data.Vector.Fixed.Cont( ToPeano
)
import Data.Bits( Bits
, FiniteBits
)
import Foreign.Storable( Storable(..)
)
import Data.Void( Void
)
import Graphics.Vulkan.PipelineLayout( VkPipelineLayout(..)
)
import Graphics.Vulkan.Shader( VkShaderStageFlagBits(..)
, VkShaderStageFlags(..)
)
import Graphics.Vulkan.Sampler( VkFilter(..)
)
import Graphics.Vulkan.Image( VkImage(..)
, VkImageLayout(..)
, VkImageAspectFlagBits(..)
, VkImageSubresourceRange(..)
, VkImageAspectFlags(..)
)
import Graphics.Vulkan.Query( VkQueryResultFlagBits(..)
, VkQueryControlFlagBits(..)
, VkQueryControlFlags(..)
, VkQueryPool(..)
, VkQueryResultFlags(..)
)
import Data.Vector.Fixed.Storable( Vec
)
import Graphics.Vulkan.OtherTypes( VkImageMemoryBarrier(..)
, VkMemoryBarrier(..)
, VkBufferMemoryBarrier(..)
)
import Graphics.Vulkan.Core( VkExtent3D(..)
, VkDeviceSize(..)
, VkExtent2D(..)
, VkFlags(..)
, VkOffset2D(..)
, VkOffset3D(..)
, VkRect2D(..)
, VkViewport(..)
, VkStructureType(..)
)
import Foreign.C.Types( CFloat
, CFloat(..)
)
-- ** vkCmdPushConstants
foreign import ccall "vkCmdPushConstants" vkCmdPushConstants ::
VkCommandBuffer ->
VkPipelineLayout ->
VkShaderStageFlags -> Word32 -> Word32 -> Ptr Void -> IO ()
-- ** vkCmdSetStencilWriteMask
foreign import ccall "vkCmdSetStencilWriteMask" vkCmdSetStencilWriteMask ::
VkCommandBuffer -> VkStencilFaceFlags -> Word32 -> IO ()
-- ** vkCmdBindIndexBuffer
foreign import ccall "vkCmdBindIndexBuffer" vkCmdBindIndexBuffer ::
VkCommandBuffer -> VkBuffer -> VkDeviceSize -> VkIndexType -> IO ()
-- ** vkCmdResetQueryPool
foreign import ccall "vkCmdResetQueryPool" vkCmdResetQueryPool ::
VkCommandBuffer -> VkQueryPool -> Word32 -> Word32 -> IO ()
-- ** vkCmdResolveImage
foreign import ccall "vkCmdResolveImage" vkCmdResolveImage ::
VkCommandBuffer ->
VkImage ->
VkImageLayout ->
VkImage -> VkImageLayout -> Word32 -> Ptr VkImageResolve -> IO ()
-- ** vkCmdBindPipeline
foreign import ccall "vkCmdBindPipeline" vkCmdBindPipeline ::
VkCommandBuffer -> VkPipelineBindPoint -> VkPipeline -> IO ()
-- ** vkCmdBindVertexBuffers
foreign import ccall "vkCmdBindVertexBuffers" vkCmdBindVertexBuffers ::
VkCommandBuffer ->
Word32 -> Word32 -> Ptr VkBuffer -> Ptr VkDeviceSize -> IO ()
-- ** vkCmdDraw
foreign import ccall "vkCmdDraw" vkCmdDraw ::
VkCommandBuffer -> Word32 -> Word32 -> Word32 -> Word32 -> IO ()
data VkImageCopy =
VkImageCopy{ vkSrcSubresource :: VkImageSubresourceLayers
, vkSrcOffset :: VkOffset3D
, vkDstSubresource :: VkImageSubresourceLayers
, vkDstOffset :: VkOffset3D
, vkExtent :: VkExtent3D
}
deriving (Eq)
instance Storable VkImageCopy where
sizeOf ~_ = 68
alignment ~_ = 4
peek ptr = VkImageCopy <$> peek (ptr `plusPtr` 0)
<*> peek (ptr `plusPtr` 16)
<*> peek (ptr `plusPtr` 28)
<*> peek (ptr `plusPtr` 44)
<*> peek (ptr `plusPtr` 56)
poke ptr poked = poke (ptr `plusPtr` 0) (vkSrcSubresource (poked :: VkImageCopy))
*> poke (ptr `plusPtr` 16) (vkSrcOffset (poked :: VkImageCopy))
*> poke (ptr `plusPtr` 28) (vkDstSubresource (poked :: VkImageCopy))
*> poke (ptr `plusPtr` 44) (vkDstOffset (poked :: VkImageCopy))
*> poke (ptr `plusPtr` 56) (vkExtent (poked :: VkImageCopy))
-- ** vkCmdNextSubpass
foreign import ccall "vkCmdNextSubpass" vkCmdNextSubpass ::
VkCommandBuffer -> VkSubpassContents -> IO ()
-- ** vkCmdEndQuery
foreign import ccall "vkCmdEndQuery" vkCmdEndQuery ::
VkCommandBuffer -> VkQueryPool -> Word32 -> IO ()
-- ** vkCmdSetScissor
foreign import ccall "vkCmdSetScissor" vkCmdSetScissor ::
VkCommandBuffer -> Word32 -> Word32 -> Ptr VkRect2D -> IO ()
-- ** vkCmdSetEvent
foreign import ccall "vkCmdSetEvent" vkCmdSetEvent ::
VkCommandBuffer -> VkEvent -> VkPipelineStageFlags -> IO ()
-- ** vkCmdCopyImageToBuffer
foreign import ccall "vkCmdCopyImageToBuffer" vkCmdCopyImageToBuffer ::
VkCommandBuffer ->
VkImage ->
VkImageLayout ->
VkBuffer -> Word32 -> Ptr VkBufferImageCopy -> IO ()
-- ** vkCmdDispatchIndirect
foreign import ccall "vkCmdDispatchIndirect" vkCmdDispatchIndirect ::
VkCommandBuffer -> VkBuffer -> VkDeviceSize -> IO ()
-- ** vkCmdBeginQuery
foreign import ccall "vkCmdBeginQuery" vkCmdBeginQuery ::
VkCommandBuffer ->
VkQueryPool -> Word32 -> VkQueryControlFlags -> IO ()
-- ** vkCmdEndRenderPass
foreign import ccall "vkCmdEndRenderPass" vkCmdEndRenderPass ::
VkCommandBuffer -> IO ()
-- ** vkCmdFillBuffer
foreign import ccall "vkCmdFillBuffer" vkCmdFillBuffer ::
VkCommandBuffer ->
VkBuffer -> VkDeviceSize -> VkDeviceSize -> Word32 -> IO ()
data VkClearRect =
VkClearRect{ vkRect :: VkRect2D
, vkBaseArrayLayer :: Word32
, vkLayerCount :: Word32
}
deriving (Eq)
instance Storable VkClearRect where
sizeOf ~_ = 24
alignment ~_ = 4
peek ptr = VkClearRect <$> peek (ptr `plusPtr` 0)
<*> peek (ptr `plusPtr` 16)
<*> peek (ptr `plusPtr` 20)
poke ptr poked = poke (ptr `plusPtr` 0) (vkRect (poked :: VkClearRect))
*> poke (ptr `plusPtr` 16) (vkBaseArrayLayer (poked :: VkClearRect))
*> poke (ptr `plusPtr` 20) (vkLayerCount (poked :: VkClearRect))
-- ** vkCmdWaitEvents
foreign import ccall "vkCmdWaitEvents" vkCmdWaitEvents ::
VkCommandBuffer ->
Word32 ->
Ptr VkEvent ->
VkPipelineStageFlags ->
VkPipelineStageFlags ->
Word32 ->
Ptr VkMemoryBarrier ->
Word32 ->
Ptr VkBufferMemoryBarrier ->
Word32 -> Ptr VkImageMemoryBarrier -> IO ()
-- ** vkCmdClearColorImage
foreign import ccall "vkCmdClearColorImage" vkCmdClearColorImage ::
VkCommandBuffer ->
VkImage ->
VkImageLayout ->
Ptr VkClearColorValue ->
Word32 -> Ptr VkImageSubresourceRange -> IO ()
-- ** VkIndexType
newtype VkIndexType = VkIndexType Int32
deriving (Eq, Storable)
pattern VK_INDEX_TYPE_UINT16 = VkIndexType 0
pattern VK_INDEX_TYPE_UINT32 = VkIndexType 1
data VkBufferImageCopy =
VkBufferImageCopy{ vkBufferOffset :: VkDeviceSize
, vkBufferRowLength :: Word32
, vkBufferImageHeight :: Word32
, vkImageSubresource :: VkImageSubresourceLayers
, vkImageOffset :: VkOffset3D
, vkImageExtent :: VkExtent3D
}
deriving (Eq)
instance Storable VkBufferImageCopy where
sizeOf ~_ = 56
alignment ~_ = 8
peek ptr = VkBufferImageCopy <$> peek (ptr `plusPtr` 0)
<*> peek (ptr `plusPtr` 8)
<*> peek (ptr `plusPtr` 12)
<*> peek (ptr `plusPtr` 16)
<*> peek (ptr `plusPtr` 32)
<*> peek (ptr `plusPtr` 44)
poke ptr poked = poke (ptr `plusPtr` 0) (vkBufferOffset (poked :: VkBufferImageCopy))
*> poke (ptr `plusPtr` 8) (vkBufferRowLength (poked :: VkBufferImageCopy))
*> poke (ptr `plusPtr` 12) (vkBufferImageHeight (poked :: VkBufferImageCopy))
*> poke (ptr `plusPtr` 16) (vkImageSubresource (poked :: VkBufferImageCopy))
*> poke (ptr `plusPtr` 32) (vkImageOffset (poked :: VkBufferImageCopy))
*> poke (ptr `plusPtr` 44) (vkImageExtent (poked :: VkBufferImageCopy))
-- ** vkCmdSetDepthBounds
foreign import ccall "vkCmdSetDepthBounds" vkCmdSetDepthBounds ::
VkCommandBuffer -> CFloat -> CFloat -> IO ()
-- ** vkCmdCopyBufferToImage
foreign import ccall "vkCmdCopyBufferToImage" vkCmdCopyBufferToImage ::
VkCommandBuffer ->
VkBuffer ->
VkImage ->
VkImageLayout -> Word32 -> Ptr VkBufferImageCopy -> IO ()
-- ** vkCmdDrawIndexedIndirect
foreign import ccall "vkCmdDrawIndexedIndirect" vkCmdDrawIndexedIndirect ::
VkCommandBuffer ->
VkBuffer -> VkDeviceSize -> Word32 -> Word32 -> IO ()
-- ** vkCmdUpdateBuffer
foreign import ccall "vkCmdUpdateBuffer" vkCmdUpdateBuffer ::
VkCommandBuffer ->
VkBuffer -> VkDeviceSize -> VkDeviceSize -> Ptr Word32 -> IO ()
-- ** vkCmdCopyImage
foreign import ccall "vkCmdCopyImage" vkCmdCopyImage ::
VkCommandBuffer ->
VkImage ->
VkImageLayout ->
VkImage -> VkImageLayout -> Word32 -> Ptr VkImageCopy -> IO ()
-- ** vkCmdWriteTimestamp
foreign import ccall "vkCmdWriteTimestamp" vkCmdWriteTimestamp ::
VkCommandBuffer ->
VkPipelineStageFlagBits -> VkQueryPool -> Word32 -> IO ()
data VkImageSubresourceLayers =
VkImageSubresourceLayers{ vkAspectMask :: VkImageAspectFlags
, vkMipLevel :: Word32
, vkBaseArrayLayer :: Word32
, vkLayerCount :: Word32
}
deriving (Eq)
instance Storable VkImageSubresourceLayers where
sizeOf ~_ = 16
alignment ~_ = 4
peek ptr = VkImageSubresourceLayers <$> peek (ptr `plusPtr` 0)
<*> peek (ptr `plusPtr` 4)
<*> peek (ptr `plusPtr` 8)
<*> peek (ptr `plusPtr` 12)
poke ptr poked = poke (ptr `plusPtr` 0) (vkAspectMask (poked :: VkImageSubresourceLayers))
*> poke (ptr `plusPtr` 4) (vkMipLevel (poked :: VkImageSubresourceLayers))
*> poke (ptr `plusPtr` 8) (vkBaseArrayLayer (poked :: VkImageSubresourceLayers))
*> poke (ptr `plusPtr` 12) (vkLayerCount (poked :: VkImageSubresourceLayers))
-- ** vkCmdDrawIndexed
foreign import ccall "vkCmdDrawIndexed" vkCmdDrawIndexed ::
VkCommandBuffer ->
Word32 -> Word32 -> Word32 -> Int32 -> Word32 -> IO ()
-- ** vkCmdSetDepthBias
foreign import ccall "vkCmdSetDepthBias" vkCmdSetDepthBias ::
VkCommandBuffer -> CFloat -> CFloat -> CFloat -> IO ()
-- ** vkCmdDrawIndirect
foreign import ccall "vkCmdDrawIndirect" vkCmdDrawIndirect ::
VkCommandBuffer ->
VkBuffer -> VkDeviceSize -> Word32 -> Word32 -> IO ()
data VkClearDepthStencilValue =
VkClearDepthStencilValue{ vkDepth :: CFloat
, vkStencil :: Word32
}
deriving (Eq)
instance Storable VkClearDepthStencilValue where
sizeOf ~_ = 8
alignment ~_ = 4
peek ptr = VkClearDepthStencilValue <$> peek (ptr `plusPtr` 0)
<*> peek (ptr `plusPtr` 4)
poke ptr poked = poke (ptr `plusPtr` 0) (vkDepth (poked :: VkClearDepthStencilValue))
*> poke (ptr `plusPtr` 4) (vkStencil (poked :: VkClearDepthStencilValue))
data VkBufferCopy =
VkBufferCopy{ vkSrcOffset :: VkDeviceSize
, vkDstOffset :: VkDeviceSize
, vkSize :: VkDeviceSize
}
deriving (Eq)
instance Storable VkBufferCopy where
sizeOf ~_ = 24
alignment ~_ = 8
peek ptr = VkBufferCopy <$> peek (ptr `plusPtr` 0)
<*> peek (ptr `plusPtr` 8)
<*> peek (ptr `plusPtr` 16)
poke ptr poked = poke (ptr `plusPtr` 0) (vkSrcOffset (poked :: VkBufferCopy))
*> poke (ptr `plusPtr` 8) (vkDstOffset (poked :: VkBufferCopy))
*> poke (ptr `plusPtr` 16) (vkSize (poked :: VkBufferCopy))
-- ** vkCmdClearAttachments
foreign import ccall "vkCmdClearAttachments" vkCmdClearAttachments ::
VkCommandBuffer ->
Word32 ->
Ptr VkClearAttachment -> Word32 -> Ptr VkClearRect -> IO ()
-- ** vkCmdSetViewport
foreign import ccall "vkCmdSetViewport" vkCmdSetViewport ::
VkCommandBuffer -> Word32 -> Word32 -> Ptr VkViewport -> IO ()
-- ** vkCmdCopyBuffer
foreign import ccall "vkCmdCopyBuffer" vkCmdCopyBuffer ::
VkCommandBuffer ->
VkBuffer -> VkBuffer -> Word32 -> Ptr VkBufferCopy -> IO ()
-- ** vkCmdBindDescriptorSets
foreign import ccall "vkCmdBindDescriptorSets" vkCmdBindDescriptorSets ::
VkCommandBuffer ->
VkPipelineBindPoint ->
VkPipelineLayout ->
Word32 ->
Word32 -> Ptr VkDescriptorSet -> Word32 -> Ptr Word32 -> IO ()
-- ** vkCmdSetLineWidth
foreign import ccall "vkCmdSetLineWidth" vkCmdSetLineWidth ::
VkCommandBuffer -> CFloat -> IO ()
-- ** vkCmdExecuteCommands
foreign import ccall "vkCmdExecuteCommands" vkCmdExecuteCommands ::
VkCommandBuffer -> Word32 -> Ptr VkCommandBuffer -> IO ()
data VkRenderPassBeginInfo =
VkRenderPassBeginInfo{ vkSType :: VkStructureType
, vkPNext :: Ptr Void
, vkRenderPass :: VkRenderPass
, vkFramebuffer :: VkFramebuffer
, vkRenderArea :: VkRect2D
, vkClearValueCount :: Word32
, vkPClearValues :: Ptr VkClearValue
}
deriving (Eq)
instance Storable VkRenderPassBeginInfo where
sizeOf ~_ = 64
alignment ~_ = 8
peek ptr = VkRenderPassBeginInfo <$> peek (ptr `plusPtr` 0)
<*> peek (ptr `plusPtr` 8)
<*> peek (ptr `plusPtr` 16)
<*> peek (ptr `plusPtr` 24)
<*> peek (ptr `plusPtr` 32)
<*> peek (ptr `plusPtr` 48)
<*> peek (ptr `plusPtr` 56)
poke ptr poked = poke (ptr `plusPtr` 0) (vkSType (poked :: VkRenderPassBeginInfo))
*> poke (ptr `plusPtr` 8) (vkPNext (poked :: VkRenderPassBeginInfo))
*> poke (ptr `plusPtr` 16) (vkRenderPass (poked :: VkRenderPassBeginInfo))
*> poke (ptr `plusPtr` 24) (vkFramebuffer (poked :: VkRenderPassBeginInfo))
*> poke (ptr `plusPtr` 32) (vkRenderArea (poked :: VkRenderPassBeginInfo))
*> poke (ptr `plusPtr` 48) (vkClearValueCount (poked :: VkRenderPassBeginInfo))
*> poke (ptr `plusPtr` 56) (vkPClearValues (poked :: VkRenderPassBeginInfo))
-- ** vkCmdSetStencilCompareMask
foreign import ccall "vkCmdSetStencilCompareMask" vkCmdSetStencilCompareMask ::
VkCommandBuffer -> VkStencilFaceFlags -> Word32 -> IO ()
data VkImageBlit =
VkImageBlit{ vkSrcSubresource :: VkImageSubresourceLayers
, vkSrcOffsets :: Vec (ToPeano 2) VkOffset3D
, vkDstSubresource :: VkImageSubresourceLayers
, vkDstOffsets :: Vec (ToPeano 2) VkOffset3D
}
deriving (Eq)
instance Storable VkImageBlit where
sizeOf ~_ = 80
alignment ~_ = 4
peek ptr = VkImageBlit <$> peek (ptr `plusPtr` 0)
<*> peek (ptr `plusPtr` 16)
<*> peek (ptr `plusPtr` 40)
<*> peek (ptr `plusPtr` 56)
poke ptr poked = poke (ptr `plusPtr` 0) (vkSrcSubresource (poked :: VkImageBlit))
*> poke (ptr `plusPtr` 16) (vkSrcOffsets (poked :: VkImageBlit))
*> poke (ptr `plusPtr` 40) (vkDstSubresource (poked :: VkImageBlit))
*> poke (ptr `plusPtr` 56) (vkDstOffsets (poked :: VkImageBlit))
data VkClearAttachment =
VkClearAttachment{ vkAspectMask :: VkImageAspectFlags
, vkColorAttachment :: Word32
, vkClearValue :: VkClearValue
}
deriving (Eq)
instance Storable VkClearAttachment where
sizeOf ~_ = 24
alignment ~_ = 4
peek ptr = VkClearAttachment <$> peek (ptr `plusPtr` 0)
<*> peek (ptr `plusPtr` 4)
<*> peek (ptr `plusPtr` 8)
poke ptr poked = poke (ptr `plusPtr` 0) (vkAspectMask (poked :: VkClearAttachment))
*> poke (ptr `plusPtr` 4) (vkColorAttachment (poked :: VkClearAttachment))
*> poke (ptr `plusPtr` 8) (vkClearValue (poked :: VkClearAttachment))
-- | // Union allowing specification of color or depth and stencil values. Actual value selected is based on attachment being cleared.
data VkClearValue = VkColor VkClearColorValue
| VkDepthStencil VkClearDepthStencilValue
deriving (Eq)
-- | _Note_: peek is undefined as we wouldn't know which constructor to use
instance Storable VkClearValue where
sizeOf ~_ = 16
alignment ~_ = 4
peek ~_ = error "peek@VkClearValue"
poke ptr poked = case poked of
VkColor e -> poke (castPtr ptr) e
VkDepthStencil e -> poke (castPtr ptr) e
-- ** VkStencilFaceFlags
newtype VkStencilFaceFlagBits = VkStencilFaceFlagBits VkFlags
deriving (Eq, Storable, Bits, FiniteBits)
-- | Alias for VkStencilFaceFlagBits
type VkStencilFaceFlags = VkStencilFaceFlagBits
-- | Front face
pattern VK_STENCIL_FACE_FRONT_BIT = VkStencilFaceFlagBits 0x1
-- | Back face
pattern VK_STENCIL_FACE_BACK_BIT = VkStencilFaceFlagBits 0x2
-- | Front and back faces
pattern VK_STENCIL_FRONT_AND_BACK = VkStencilFaceFlagBits 0x3
-- | // Union allowing specification of floating point, integer, or unsigned integer color data. Actual value selected is based on image/attachment being cleared.
data VkClearColorValue = VkFloat (Vec (ToPeano 4) CFloat)
| VkInt (Vec (ToPeano 4) Int32)
| VkUint (Vec (ToPeano 4) Word32)
deriving (Eq)
-- | _Note_: peek is undefined as we wouldn't know which constructor to use
instance Storable VkClearColorValue where
sizeOf ~_ = 16
alignment ~_ = 4
peek ~_ = error "peek@VkClearColorValue"
poke ptr poked = case poked of
VkFloat e -> poke (castPtr ptr) e
VkInt e -> poke (castPtr ptr) e
VkUint e -> poke (castPtr ptr) e
-- ** VkSubpassContents
newtype VkSubpassContents = VkSubpassContents Int32
deriving (Eq, Storable)
pattern VK_SUBPASS_CONTENTS_INLINE = VkSubpassContents 0
pattern VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS = VkSubpassContents 1
-- ** vkCmdCopyQueryPoolResults
foreign import ccall "vkCmdCopyQueryPoolResults" vkCmdCopyQueryPoolResults ::
VkCommandBuffer ->
VkQueryPool ->
Word32 ->
Word32 ->
VkBuffer ->
VkDeviceSize -> VkDeviceSize -> VkQueryResultFlags -> IO ()
-- ** vkCmdBlitImage
foreign import ccall "vkCmdBlitImage" vkCmdBlitImage ::
VkCommandBuffer ->
VkImage ->
VkImageLayout ->
VkImage ->
VkImageLayout -> Word32 -> Ptr VkImageBlit -> VkFilter -> IO ()
-- ** vkCmdSetBlendConstants
foreign import ccall "vkCmdSetBlendConstants" vkCmdSetBlendConstants ::
VkCommandBuffer -> Ptr CFloat -> IO ()
-- ** vkCmdClearDepthStencilImage
foreign import ccall "vkCmdClearDepthStencilImage" vkCmdClearDepthStencilImage ::
VkCommandBuffer ->
VkImage ->
VkImageLayout ->
Ptr VkClearDepthStencilValue ->
Word32 -> Ptr VkImageSubresourceRange -> IO ()
data VkImageResolve =
VkImageResolve{ vkSrcSubresource :: VkImageSubresourceLayers
, vkSrcOffset :: VkOffset3D
, vkDstSubresource :: VkImageSubresourceLayers
, vkDstOffset :: VkOffset3D
, vkExtent :: VkExtent3D
}
deriving (Eq)
instance Storable VkImageResolve where
sizeOf ~_ = 68
alignment ~_ = 4
peek ptr = VkImageResolve <$> peek (ptr `plusPtr` 0)
<*> peek (ptr `plusPtr` 16)
<*> peek (ptr `plusPtr` 28)
<*> peek (ptr `plusPtr` 44)
<*> peek (ptr `plusPtr` 56)
poke ptr poked = poke (ptr `plusPtr` 0) (vkSrcSubresource (poked :: VkImageResolve))
*> poke (ptr `plusPtr` 16) (vkSrcOffset (poked :: VkImageResolve))
*> poke (ptr `plusPtr` 28) (vkDstSubresource (poked :: VkImageResolve))
*> poke (ptr `plusPtr` 44) (vkDstOffset (poked :: VkImageResolve))
*> poke (ptr `plusPtr` 56) (vkExtent (poked :: VkImageResolve))
-- ** vkCmdDispatch
foreign import ccall "vkCmdDispatch" vkCmdDispatch ::
VkCommandBuffer -> Word32 -> Word32 -> Word32 -> IO ()
-- ** vkCmdSetStencilReference
foreign import ccall "vkCmdSetStencilReference" vkCmdSetStencilReference ::
VkCommandBuffer -> VkStencilFaceFlags -> Word32 -> IO ()
-- ** vkCmdPipelineBarrier
foreign import ccall "vkCmdPipelineBarrier" vkCmdPipelineBarrier ::
VkCommandBuffer ->
VkPipelineStageFlags ->
VkPipelineStageFlags ->
VkDependencyFlags ->
Word32 ->
Ptr VkMemoryBarrier ->
Word32 ->
Ptr VkBufferMemoryBarrier ->
Word32 -> Ptr VkImageMemoryBarrier -> IO ()
-- ** vkCmdBeginRenderPass
foreign import ccall "vkCmdBeginRenderPass" vkCmdBeginRenderPass ::
VkCommandBuffer ->
Ptr VkRenderPassBeginInfo -> VkSubpassContents -> IO ()
-- ** vkCmdResetEvent
foreign import ccall "vkCmdResetEvent" vkCmdResetEvent ::
VkCommandBuffer -> VkEvent -> VkPipelineStageFlags -> IO ()
| oldmanmike/vulkan | src/Graphics/Vulkan/CommandBufferBuilding.hs | bsd-3-clause | 23,206 | 0 | 18 | 7,031 | 5,448 | 3,014 | 2,434 | 424 | 0 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Application
( getApplicationDev
, appMain
, develMain
, makeFoundation
-- * for DevelMain
, getApplicationRepl
, shutdownApp
-- * for GHCI
, handler
, db
) where
import Control.Monad.Logger (liftLoc, runLoggingT)
import Database.Persist.Postgresql (createPostgresqlPool, pgConnStr,
pgPoolSize, runSqlPool)
import Import
import Language.Haskell.TH.Syntax (qLocation)
import Network.Wai.Handler.Warp (Settings, defaultSettings,
defaultShouldDisplayException,
runSettings, setHost,
setOnException, setPort, getPort)
import Network.Wai.Middleware.RequestLogger (Destination (Logger),
IPAddrSource (..),
OutputFormat (..), destination,
mkRequestLogger, outputFormat)
import System.Log.FastLogger (defaultBufSize, newStdoutLoggerSet,
toLogStr)
-- Import all relevant handler modules here.
import Handler.Admin
import Handler.Common
import Handler.Profile
import Handler.Root
import Handler.S3
import Handler.Wiki
-- This line actually creates our YesodSite instance. It is the second half
-- of the call to mkYesodData which occurs in BISocie.hs. Please see
-- the comments there for more details.
mkYesodDispatch "App" resourcesApp
-- | This function allocates resources (such as a database connection pool),
-- performs initialization and returns a foundation datatype value. This is also
-- the place to put your migrate statements to have automatic database
-- migrations handled by Yesod.
makeFoundation :: AppSettings -> IO App
makeFoundation appSettings = do
-- Some basic initializations: HTTP connection manager, logger, and static
-- subsite.
appHttpManager <- newManager
appLogger <- newStdoutLoggerSet defaultBufSize >>= makeYesodLogger
appStatic <-
(if appMutableStatic appSettings then staticDevel else static)
(appStaticDir appSettings)
-- We need a log function to create a connection pool. We need a connection
-- pool to create our foundation. And we need our foundation to get a
-- logging function. To get out of this loop, we initially create a
-- temporary foundation without a real connection pool, get a log function
-- from there, and then create the real foundation.
let mkFoundation appConnPool = App {..}
-- The App {..} syntax is an example of record wild cards. For more
-- information, see:
-- https://ocharles.org.uk/blog/posts/2014-12-04-record-wildcards.html
tempFoundation = mkFoundation $ error "connPool forced in tempFoundation"
logFunc = messageLoggerSource tempFoundation appLogger
-- Create the database connection pool
pool <- flip runLoggingT logFunc $ createPostgresqlPool
(pgConnStr $ appDatabaseConf appSettings)
(pgPoolSize $ appDatabaseConf appSettings)
-- Perform database migration using our application's logging settings.
runLoggingT (runSqlPool (runMigration migrateAll) pool) logFunc
-- Return the foundation
return $ mkFoundation pool
-- | Convert our foundation to a WAI Application by calling @toWaiAppPlain@ and
-- applying some additional middlewares.
makeApplication :: App -> IO Application
makeApplication foundation = do
logWare <- mkRequestLogger def
{ outputFormat =
if appDetailedRequestLogging $ appSettings foundation
then Detailed True
else Apache
(if appIpFromHeader $ appSettings foundation
then FromFallback
else FromSocket)
, destination = Logger $ loggerSet $ appLogger foundation
}
-- Create the WAI application and apply middlewares
appPlain <- toWaiAppPlain foundation
return $ logWare $ defaultMiddlewaresNoLogging appPlain
-- | Warp settings for the given foundation value.
warpSettings :: App -> Settings
warpSettings foundation =
setPort (appPort $ appSettings foundation)
$ setHost (appHost $ appSettings foundation)
$ setOnException (\_req e ->
when (defaultShouldDisplayException e) $ messageLoggerSource
foundation
(appLogger foundation)
$(qLocation >>= liftLoc)
"yesod"
LevelError
(toLogStr $ "Exception from Warp: " ++ show e))
defaultSettings
-- | For yesod devel, return the Warp settings and WAI Application.
getApplicationDev :: IO (Settings, Application)
getApplicationDev = do
settings <- getAppSettings
foundation <- makeFoundation settings
wsettings <- getDevSettings $ warpSettings foundation
app <- makeApplication foundation
return (wsettings, app)
getAppSettings :: IO AppSettings
getAppSettings = loadAppSettings [configSettingsYml] [] useEnv
-- | main function for use by yesod devel
develMain :: IO ()
develMain = develMainHelper getApplicationDev
-- | The @main@ function for an executable running this site.
appMain :: IO ()
appMain = do
-- Get the settings from all relevant sources
settings <- loadAppSettingsArgs
-- fall back to compile-time values, set to [] to require values at runtime
[configSettingsYmlValue]
-- allow environment variables to override
useEnv
-- Generate the foundation from the settings
foundation <- makeFoundation settings
-- Generate a WAI Application from the foundation
app <- makeApplication foundation
-- Run the application with Warp
runSettings (warpSettings foundation) app
--------------------------------------------------------------
-- Functions for DevelMain.hs (a way to run the app from GHCi)
--------------------------------------------------------------
getApplicationRepl :: IO (Int, App, Application)
getApplicationRepl = do
settings <- getAppSettings
foundation <- makeFoundation settings
wsettings <- getDevSettings $ warpSettings foundation
app1 <- makeApplication foundation
return (getPort wsettings, foundation, app1)
shutdownApp :: App -> IO ()
shutdownApp _ = return ()
---------------------------------------------
-- Functions for use in development with GHCi
---------------------------------------------
-- | Run a handler
handler :: Handler a -> IO a
handler h = getAppSettings >>= makeFoundation >>= flip unsafeHandler h
-- | Run DB queries
db :: ReaderT SqlBackend (HandlerT App IO) a -> IO a
db = handler . runDB
| cutsea110/Kestrel | Application.hs | bsd-3-clause | 6,819 | 0 | 16 | 1,755 | 1,031 | 553 | 478 | -1 | -1 |
module LaTeXGrapher.Data.Function
(
Function(..)
, FunctionData(..)
, defineFunction
, FExpression(..)
, EPoint
) where
import Data.List(intersperse)
import LaTeXGrapher.Data.Expression
import LaTeXGrapher.Data.Tabular
data Function = Function
{
name :: String
, functionData :: FunctionData
}
data FunctionData = FunctionData
{
expression :: [FExpression]
, arguments :: [String]
, special :: [Expr]
}
| DefinedFunction
{
function :: [Double] -> Double
}
data FExpression = FExpression Expr | ConditionalFExpression Expr Expr
type EPoint = (Expr,Expr)
defineFunction :: String -> ([Double] -> Double) -> Function
defineFunction n f = Function n (DefinedFunction f)
instance Show Function where
show (Function n (DefinedFunction _)) = "(" ++ n ++ ")"
show (Function n (FunctionData es as _)) = showTabular (rows es)
where d = (n ++ "(" ++ (concat (intersperse ", " as)) ++ ") = ")
rows :: [FExpression] -> [[String]]
rows (v:vs) = h : ts
where h = d : (showFExpression v)
ts = ["" : (showFExpression x) | x <- vs]
instance Show FExpression where
show e = concat (showFExpression e)
showFExpression (FExpression e) = [show e]
showFExpression (ConditionalFExpression e c) = [show e, " | " ++ show c]
| fryguybob/LaTeXGrapher | src/LaTeXGrapher/Data/Function.hs | bsd-3-clause | 1,373 | 0 | 14 | 363 | 471 | 263 | 208 | 35 | 1 |
module Game.Server.Player(
PlayerMapping
, playersCollection
, ServerPlayer
, ServerPlayerExt(..)
, PlayerShoots
) where
import Control.Monad
import Control.Monad.IO.Class
import Data.Align
import Data.Map.Strict (Map)
import Data.Monoid
import Data.These
import Linear
import qualified Data.List.NonEmpty as NE
import qualified Data.Map.Strict as M
import qualified Data.Set as S
import Game.GoreAndAsh
import Game.GoreAndAsh.Logging
import Game.GoreAndAsh.Network
import Game.GoreAndAsh.Sync
import Game.GoreAndAsh.Time
import Game.Bullet
import Game.Monad
import Game.Player
-- | Contains mappings between player ids, peers and player payload
type PlayerMapping = (Map PlayerId ServerPlayer, Map AppPeer PlayerId)
-- | Requests from players to create bullets
type PlayerShoots t = Event t (Map PlayerId CreateBullet)
-- | Shared players collection
playersCollection :: forall t m . MonadApp t m
=> Event t (Map PlayerId PlayerId) -- ^ Fires when a hit from bullets are occured. (killed => killer)
-> m (Dynamic t PlayerMapping, PlayerShoots t)
playersCollection hitsE = do
-- we need a player counter to generate ids
playerCounterRef <- newExternalRef (0 :: Int)
playerCounter <- externalRefDynamic playerCounterRef
-- retreive necessary events
connPeerE <- peerConnected
discPeerE <- peerDisconnected
rec
-- Lets calculate update actions for players collection
-- updE :: Event t (Map PlayerId (Maybe Peer))
updE <- performEvent $ ffor (align connPeerE discPeerE) $ \e -> do
i <- sample . current $ playerCounter
case e of
This connPeer -> do
modifyExternalRef playerCounterRef $ \v -> (v+1, ())
return $ M.singleton (PlayerId i) (Just connPeer)
That discPeer -> do
players <- sample . current $ fmap snd playersMappingDyn
return $ case M.lookup discPeer players of
Nothing -> mempty
Just i' -> M.singleton i' Nothing
These connPeer discPeer -> if connPeer == discPeer then return mempty
else do
modifyExternalRef playerCounterRef $ \v -> (v+1, ())
players <- sample . current $ fmap snd playersMappingDyn
let
delMap :: Map PlayerId (Maybe AppPeer)
delMap = case M.lookup discPeer players of
Nothing -> mempty
Just i' -> M.singleton i' Nothing
return $ M.insert (PlayerId i) (Just connPeer) delMap
-- collection primitive, note recursive dependency
colorRoller <- makeColorRoller
let playerWrapper i = player colorRoller (mkHitE i) (mkKills i) i
peers <- networkPeers
let filterPeer _ _ = () -- Don't send peer to clients
collReses <- hostCollection playerCollectionId peers mempty updE filterPeer playerWrapper
let playersMapDyn = joinDynThroughMap $ fmap fst <$> collReses
shootsEvents = switch . current $ mergeMap . fmap snd <$> collReses
-- post processing to get peer-id map
let playersMappingDyn :: Dynamic t PlayerMapping
playersMappingDyn = do
playersMap <- playersMapDyn
let elems = M.toList $ playerPeer . playerCustom <$> playersMap
peersMap = M.fromList . fmap (\(i, p) -> (p, i)) $ elems
return (playersMap, peersMap)
return (playersMappingDyn, shootsEvents)
where
-- Construct event that particular player is hit by a bullet
mkHitE i = fforMaybe hitsE $ \m -> if M.member i m then Just () else Nothing
-- Construct event that particular player killed another player
mkKills i = fforMaybe hitsE $ \m -> let n = length . filter (== i) . M.elems $ m
in if n > 0 then Just n else Nothing
-- | Extension of shared player with server private data
type ServerPlayer = Player ServerPlayerExt
-- | Player server private data
data ServerPlayerExt = ServerPlayerExt {
playerPeer :: AppPeer
} deriving (Show)
-- | Player component
player :: forall t m . MonadApp t m
=> ItemRoller t (V3 Double) -- ^ Roller of colors
-> Event t () -- ^ Hit event from bullet
-> Event t Int -- ^ Event about count of murders of another players
-> PlayerId -- ^ Player ID that is simulated
-> AppPeer -- ^ Player peer
-- | Returns dynamic player and event when user requests bullet creation
-> m (Dynamic t ServerPlayer, Event t CreateBullet)
player colorRoller hitE killsE i peer = do
-- Initialisation
buildE <- getPostBuild
logInfoE $ ffor buildE $ const $ "Player " <> showl i <> " is spawned!"
_ <- performEvent_ $ ffor buildE $ const $ liftIO $ snd colorRoller
-- Local simulation of player (creation and score acummulation)
c <- sample . current $ fst colorRoller
playerDyn <- simulatePlayer $ initialPlayer c
-- Sync player with clients
playerDyn' <- syncPlayer playerDyn
-- Process commands for client-server communication
let yourIdMsgE = ffor buildE $ const [YourPlayerId i]
let commandsE = yourIdMsgE
shootE <- syncCommands commandsE playerDyn'
-- Print and return state and control events
-- printPlayer i playerDyn'
return (playerDyn', shootE)
where
initialPlayer c = Player {
playerPos = initialPosition
, playerColor = c
, playerSpeed = 50
, playerSize = 5
, playerScore = 0
, playerCustom = ServerPlayerExt {
playerPeer = peer
}
}
initialPosition = V2 0 0
-- | Reactimate player with local server logic.
simulatePlayer :: ServerPlayer -> m (Dynamic t ServerPlayer)
simulatePlayer Player{..} = do
score <- collectPlayerScore
return $ Player
<$> pure playerPos
<*> pure playerColor
<*> pure playerSpeed
<*> pure playerSize
<*> score
<*> pure playerCustom
-- | Collect events about player murders
collectPlayerScore :: m (Dynamic t Int)
collectPlayerScore = foldDyn (+) 0 killsE
-- synchronisation of state
syncPlayer :: Dynamic t ServerPlayer -> m (Dynamic t ServerPlayer)
syncPlayer pdyn = fmap join $ syncWithName (show i) pdyn $ do
allPeers <- networkPeers
let otherPeers = S.delete peer <$> allPeers
posDyn <- syncPosition $ playerSpeed <$> pdyn
-- unrelieable fast sync
posDyn' <- alignWithFps 60 posDyn
_ <- syncToClients otherPeers playerPosId ReliableMessage posDyn'
-- reliable control
-- tickE <- tickEvery (realToFrac (1 :: Double))
-- _ <- syncToClientsManual otherPeers playerPosId ReliableMessage posDyn tickE
-- other rare-changing fields
_ <- syncToClients allPeers playerColorId ReliableMessage $ playerColor <$> pdyn
_ <- syncToClients allPeers playerSpeedId ReliableMessage $ playerSpeed <$> pdyn
_ <- syncToClients allPeers playerSizeId ReliableMessage $ playerSize <$> pdyn
_ <- syncToClients allPeers playerScoreId ReliableMessage $ playerScore <$> pdyn
return $ do
pos <- posDyn
p <- pdyn
return $ p { playerPos = pos }
-- Synchronise position from client with rejecting if player moves too fast
syncPosition :: Dynamic t Double -> m (Dynamic t (V2 Double))
syncPosition spdDyn = do
let dt = 0.5 :: Double -- ^ Interval between cheats check
epsylon = 0.1 :: Double -- ^ Accuracy of checking
rec
oldPosDyn <- lookPast (realToFrac dt) initialPosition rejectE posDyn
let posRejectE = flip push (updated oldPosDyn) $ \oldPos -> do
pos <- sample . current $ posDyn
spd <- sample . current $ spdDyn
let absSpeed = norm (pos - oldPos) / dt
expectedSpeed = spd * sqrt 2 * (1 + epsylon) -- diagonal movement
return $ if absSpeed > expectedSpeed
then Just oldPos
else Nothing
respawnE = ffor hitE $ const initialPosition
rejectE = leftmost [respawnE, posRejectE]
(posDyn, _) <- syncFromClient playerPosId (return $ pure 0) rejectE peer
return posDyn
-- process network messages for player
syncCommands commandsE playerDyn = do
-- listen requests for id
msgE <- receiveFromClient playerCommandId peer
let respE = fforMaybe msgE $ \case
RequestPlayerId -> Just [YourPlayerId i]
_ -> Nothing
shootE = flip push msgE $ \case
PlayerShoot v -> do
Player{..} <- sample . current $ playerDyn
let dpos = (realToFrac $ playerSize * 1.5) * normalize v
return $ Just CreateBullet {
createBulletPos = playerPos + dpos
, createBulletDir = v
, createBulletPlayer = i
, createBulletVel = 100
}
_ -> return Nothing
-- send commands/responses to peer
_ <- sendToClientMany playerCommandId ReliableMessage (commandsE <> respE) peer
limitRate playerShootRatio shootE
-- | Create item roller for player colors
makeColorRoller :: MonadGame t m => m (ItemRoller t (V3 Double))
makeColorRoller = itemRoller $ NE.fromList [
V3 1 0 0
, V3 0 1 0
, V3 0 0 1
, V3 1 1 0
, V3 1 0 1
, V3 0 1 1
]
| Teaspot-Studio/gore-and-ash-demo | src/server/Game/Server/Player.hs | bsd-3-clause | 9,158 | 0 | 27 | 2,447 | 2,341 | 1,182 | 1,159 | -1 | -1 |
module Lib
( parseInput
, DateNumOrDate
, dateNumOrDateToString
, dateToDateNum
, dateNumToDate
) where
import Text.ParserCombinators.Parsec hiding ((<|>))
import Control.Applicative
import Data.Time
type DateNum = Integer
type Date = Day
data DateNumOrDate = DateNum DateNum | Date Date
cwEpoch :: Date
cwEpoch = fromGregorian 1999 12 31
dateNumToDate :: DateNum -> Date
dateNumToDate dateNum = addDays dateNum cwEpoch
dateToDateNum :: Date -> DateNum
dateToDateNum date = diffDays date cwEpoch
dateNumOrDateToString :: DateNumOrDate -> String
dateNumOrDateToString (DateNum datenum) = showGregorian $ dateNumToDate datenum
dateNumOrDateToString (Date date) = show $ dateToDateNum date
parseInput :: String -> Maybe DateNumOrDate
parseInput inp = case parse datenumOrDate "" inp of
Left error -> Nothing
Right parsed -> Just parsed
{- Parsers
-
- Grammar for valid inputs (along with parser types):
-
- datenumOrDate = datenum | date :: DateNumOrDate
- datenum = -? digit+ eof :: Integer
- date = mdy | ymd :: Date (mdy before ymd, in order to make 01-01-01 be mdy)
- mdy = month sep day sep year eof :: Date
- ymd = year sep month sep day eof :: Date
- month = shortint :: Int
- day = shortint :: Int
- year = digit digit digit digit | digit digit :: Integer (Integer because that's what fromGregorian requires)
- shortint = digit digit | digit :: Int
- sep = [ /-]+ :: ()
-}
datenumOrDate = try (DateNum <$> datenum) <|> (Date <$> date) :: Parser DateNumOrDate
datenum = read <$> ((++) <$> option "" (string "-") <*> many1 digit) <* eof :: Parser Integer
date = try mdy <|> ymd :: Parser Date
mdy = (\m d y -> fromGregorian y m d) <$> month <* sep <*> day <* sep <*> year <* eof :: Parser Date
ymd = fromGregorian <$> year <* sep <*> month <* sep <*> day <* eof :: Parser Date
month = shortint :: Parser Int
day = shortint :: Parser Int
year = read <$> (try (count 4 digit) <|> ("20" ++) <$> count 2 digit) :: Parser Integer
shortint = read <$> (try (count 2 digit) <|> count 1 digit) :: Parser Int
sep = skipMany1 $ oneOf " /-" :: Parser ()
| mhweaver/daynum | src/Lib.hs | bsd-3-clause | 2,773 | 0 | 13 | 1,096 | 547 | 288 | 259 | 35 | 2 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Data.Typewriter.Variadic.Flip where
import Data.Typewriter.Core
-- TODO: this is really ad-hoc, need something more sensible here
type family Swap t :: *
type instance Swap (f (g x)) = g (f x)
class SwapF f g where
swap :: f (g x) -> Swap (f (g x))
instance SwapF ((->) a) ((->) b) where
swap = flip
instance SwapF ((,) a) ((,) b) where
swap (x, (y, z)) = (y, (x, z))
instance SwapF ((,) a) ((->) b) where
swap (x, f) = \y -> (x, f y)
instance SwapF ((,) a) Maybe where
swap (x, m) = fmap ((,) x) m
-- | Rotates the first N+1 arguments of a function. @rotate Z@ is id,
-- @rotate (S Z)@ is @flip@, @rotate (S (S Z))@ is @flip . (flip .)@, &c.
class RotateN n t where
type Rotate n t :: *
rotate :: n -> t -> Rotate n t
instance RotateN Z t where
type Rotate Z t = t
rotate Zero x = x
instance ( RotateN n t, (g t') ~ Rotate n t
, SwapF f g, Functor f
) => RotateN (S n) (f t) where
type Rotate (S n) (f t) = Swap (f (Rotate n t))
rotate (Succ n) x = swap $ fmap (rotate n) x
| isomorphism/typewriter | Data/Typewriter/Variadic/Flip.hs | bsd-3-clause | 1,199 | 0 | 12 | 307 | 506 | 273 | 233 | 29 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module WebServerApi (getSumAction) where
import Web.Scotty
import Data.Monoid (mconcat)
import Control.Monad.Trans (liftIO)
import qualified Data.Text.Lazy as LazyText
import qualified Data.Text as Text
import Database.PostgreSQL.Simple
import DatabaseApi
getSumAction :: Connection -> ActionM ()
getSumAction connection = do
beam <- param "word"
liftIO $ putStrLn "hello"
number <- liftIO $ fmap (LazyText.pack . show) $ getSumFromDb connection
html $ mconcat ["<h1>Scotty, ", beam, " And a number ", number, " me up!</h1>"]
| jorgen/scotty-postgres | src/WebServerApi.hs | bsd-3-clause | 583 | 0 | 13 | 96 | 159 | 89 | 70 | 15 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, NoImplicitPrelude, CApiFFI #-}
{-# OPTIONS_HADDOCK hide #-}
#ifdef ghcjs_HOST_OS
{-# LANGUAGE JavaScriptFFI #-}
#endif
-----------------------------------------------------------------------------
-- |
-- Module : System.Posix.Internals
-- Copyright : (c) The University of Glasgow, 1992-2002
-- License : see libraries/base/LICENSE
--
-- Maintainer : [email protected]
-- Stability : internal
-- Portability : non-portable (requires POSIX)
--
-- POSIX support layer for the standard libraries.
-- This library is built on *every* platform, including Win32.
--
-- Non-posix compliant in order to support the following features:
-- * S_ISSOCK (no sockets in POSIX)
--
-----------------------------------------------------------------------------
module System.Posix.Internals where
#include "HsBaseConfig.h"
import System.Posix.Types
import Foreign
import Foreign.C
-- import Data.Bits
import Data.Maybe
#if !defined(HTYPE_TCFLAG_T)
import System.IO.Error
#endif
import GHC.Base
import GHC.Num
import GHC.Real
import GHC.IO
import GHC.IO.IOMode
import GHC.IO.Exception
import GHC.IO.Device
#ifndef mingw32_HOST_OS
import {-# SOURCE #-} GHC.IO.Encoding (getFileSystemEncoding)
import qualified GHC.Foreign as GHC
#endif
-- ---------------------------------------------------------------------------
-- Debugging the base package
puts :: String -> IO ()
puts s = withCAStringLen (s ++ "\n") $ \(p, len) -> do
-- In reality should be withCString, but assume ASCII to avoid loop
-- if this is called by GHC.Foreign
_ <- c_write 1 (castPtr p) (fromIntegral len)
return ()
-- ---------------------------------------------------------------------------
-- Types
data {-# CTYPE "struct flock" #-} CFLock
data {-# CTYPE "struct group" #-} CGroup
data {-# CTYPE "struct lconv" #-} CLconv
data {-# CTYPE "struct passwd" #-} CPasswd
data {-# CTYPE "struct sigaction" #-} CSigaction
data {-# CTYPE "sigset_t" #-} CSigset
data {-# CTYPE "struct stat" #-} CStat
data {-# CTYPE "struct termios" #-} CTermios
data {-# CTYPE "struct tm" #-} CTm
data {-# CTYPE "struct tms" #-} CTms
data {-# CTYPE "struct utimbuf" #-} CUtimbuf
data {-# CTYPE "struct utsname" #-} CUtsname
type FD = CInt
-- ---------------------------------------------------------------------------
-- stat()-related stuff
fdFileSize :: FD -> IO Integer
fdFileSize fd =
allocaBytes sizeof_stat $ \ p_stat -> do
throwErrnoIfMinus1Retry_ "fileSize" $
c_fstat fd p_stat
c_mode <- st_mode p_stat :: IO CMode
if not (s_isreg c_mode)
then return (-1)
else do
c_size <- st_size p_stat
return (fromIntegral c_size)
fileType :: FilePath -> IO IODeviceType
fileType file =
allocaBytes sizeof_stat $ \ p_stat -> do
withFilePath file $ \p_file -> do
throwErrnoIfMinus1Retry_ "fileType" $
c_stat p_file p_stat
statGetType p_stat
-- NOTE: On Win32 platforms, this will only work with file descriptors
-- referring to file handles. i.e., it'll fail for socket FDs.
fdStat :: FD -> IO (IODeviceType, CDev, CIno)
fdStat fd =
allocaBytes sizeof_stat $ \ p_stat -> do
throwErrnoIfMinus1Retry_ "fdType" $
c_fstat fd p_stat
ty <- statGetType p_stat
dev <- st_dev p_stat
ino <- st_ino p_stat
return (ty,dev,ino)
fdType :: FD -> IO IODeviceType
fdType fd = do (ty,_,_) <- fdStat fd; return ty
statGetType :: Ptr CStat -> IO IODeviceType
statGetType p_stat = do
c_mode <- st_mode p_stat :: IO CMode
case () of
_ | s_isdir c_mode -> return Directory
| s_isfifo c_mode || s_issock c_mode || s_ischr c_mode
-> return Stream
| s_isreg c_mode -> return RegularFile
-- Q: map char devices to RawDevice too?
| s_isblk c_mode -> return RawDevice
| otherwise -> ioError ioe_unknownfiletype
ioe_unknownfiletype :: IOException
ioe_unknownfiletype = IOError Nothing UnsupportedOperation "fdType"
"unknown file type"
Nothing
Nothing
fdGetMode :: FD -> IO IOMode
#if defined(mingw32_HOST_OS) || defined(ghcjs_HOST_OS)
fdGetMode _ = do
-- We don't have a way of finding out which flags are set on FDs
-- on Windows, so make a handle that thinks that anything goes.
let flags = o_RDWR
#else
fdGetMode fd = do
flags <- throwErrnoIfMinus1Retry "fdGetMode"
(c_fcntl_read fd const_f_getfl)
#endif
let
wH = (flags .&. o_WRONLY) /= 0
aH = (flags .&. o_APPEND) /= 0
rwH = (flags .&. o_RDWR) /= 0
mode
| wH && aH = AppendMode
| wH = WriteMode
| rwH = ReadWriteMode
| otherwise = ReadMode
return mode
#ifdef mingw32_HOST_OS
withFilePath :: FilePath -> (CWString -> IO a) -> IO a
withFilePath = withCWString
newFilePath :: FilePath -> IO CWString
newFilePath = newCWString
peekFilePath :: CWString -> IO FilePath
peekFilePath = peekCWString
#else
withFilePath :: FilePath -> (CString -> IO a) -> IO a
newFilePath :: FilePath -> IO CString
peekFilePath :: CString -> IO FilePath
peekFilePathLen :: CStringLen -> IO FilePath
withFilePath fp f = getFileSystemEncoding >>= \enc -> GHC.withCString enc fp f
newFilePath fp = getFileSystemEncoding >>= \enc -> GHC.newCString enc fp
peekFilePath fp = getFileSystemEncoding >>= \enc -> GHC.peekCString enc fp
peekFilePathLen fp = getFileSystemEncoding >>= \enc -> GHC.peekCStringLen enc fp
#endif
-- ---------------------------------------------------------------------------
-- Terminal-related stuff
#if defined(HTYPE_TCFLAG_T)
setEcho :: FD -> Bool -> IO ()
setEcho fd on = do
tcSetAttr fd $ \ p_tios -> do
lflag <- c_lflag p_tios :: IO CTcflag
let new_lflag
| on = lflag .|. fromIntegral const_echo
| otherwise = lflag .&. complement (fromIntegral const_echo)
poke_c_lflag p_tios (new_lflag :: CTcflag)
getEcho :: FD -> IO Bool
getEcho fd = do
tcSetAttr fd $ \ p_tios -> do
lflag <- c_lflag p_tios :: IO CTcflag
return ((lflag .&. fromIntegral const_echo) /= 0)
setCooked :: FD -> Bool -> IO ()
setCooked fd cooked =
tcSetAttr fd $ \ p_tios -> do
-- turn on/off ICANON
lflag <- c_lflag p_tios :: IO CTcflag
let new_lflag | cooked = lflag .|. (fromIntegral const_icanon)
| otherwise = lflag .&. complement (fromIntegral const_icanon)
poke_c_lflag p_tios (new_lflag :: CTcflag)
-- set VMIN & VTIME to 1/0 respectively
when (not cooked) $ do
c_cc <- ptr_c_cc p_tios
let vmin = (c_cc `plusPtr` (fromIntegral const_vmin)) :: Ptr Word8
vtime = (c_cc `plusPtr` (fromIntegral const_vtime)) :: Ptr Word8
poke vmin 1
poke vtime 0
tcSetAttr :: FD -> (Ptr CTermios -> IO a) -> IO a
tcSetAttr fd fun = do
allocaBytes sizeof_termios $ \p_tios -> do
throwErrnoIfMinus1Retry_ "tcSetAttr"
(c_tcgetattr fd p_tios)
-- Save a copy of termios, if this is a standard file descriptor.
-- These terminal settings are restored in hs_exit().
when (fd <= 2) $ do
p <- get_saved_termios fd
when (p == nullPtr) $ do
saved_tios <- mallocBytes sizeof_termios
copyBytes saved_tios p_tios sizeof_termios
set_saved_termios fd saved_tios
-- tcsetattr() when invoked by a background process causes the process
-- to be sent SIGTTOU regardless of whether the process has TOSTOP set
-- in its terminal flags (try it...). This function provides a
-- wrapper which temporarily blocks SIGTTOU around the call, making it
-- transparent.
allocaBytes sizeof_sigset_t $ \ p_sigset -> do
allocaBytes sizeof_sigset_t $ \ p_old_sigset -> do
throwErrnoIfMinus1_ "sigemptyset" $
c_sigemptyset p_sigset
throwErrnoIfMinus1_ "sigaddset" $
c_sigaddset p_sigset const_sigttou
throwErrnoIfMinus1_ "sigprocmask" $
c_sigprocmask const_sig_block p_sigset p_old_sigset
r <- fun p_tios -- do the business
throwErrnoIfMinus1Retry_ "tcSetAttr" $
c_tcsetattr fd const_tcsanow p_tios
throwErrnoIfMinus1_ "sigprocmask" $
c_sigprocmask const_sig_setmask p_old_sigset nullPtr
return r
foreign import ccall unsafe "HsBase.h __hscore_get_saved_termios"
get_saved_termios :: CInt -> IO (Ptr CTermios)
foreign import ccall unsafe "HsBase.h __hscore_set_saved_termios"
set_saved_termios :: CInt -> (Ptr CTermios) -> IO ()
#else
-- 'raw' mode for Win32 means turn off 'line input' (=> buffering and
-- character translation for the console.) The Win32 API for doing
-- this is GetConsoleMode(), which also requires echoing to be disabled
-- when turning off 'line input' processing. Notice that turning off
-- 'line input' implies enter/return is reported as '\r' (and it won't
-- report that character until another character is input..odd.) This
-- latter feature doesn't sit too well with IO actions like IO.hGetLine..
-- consider yourself warned.
setCooked :: FD -> Bool -> IO ()
setCooked fd cooked = do
x <- set_console_buffering fd (if cooked then 1 else 0)
if (x /= 0)
then ioError (ioe_unk_error "setCooked" "failed to set buffering")
else return ()
ioe_unk_error :: String -> String -> IOException
ioe_unk_error loc msg
= ioeSetErrorString (mkIOError OtherError loc Nothing Nothing) msg
-- Note: echoing goes hand in hand with enabling 'line input' / raw-ness
-- for Win32 consoles, hence setEcho ends up being the inverse of setCooked.
setEcho :: FD -> Bool -> IO ()
setEcho fd on = do
x <- set_console_echo fd (if on then 1 else 0)
if (x /= 0)
then ioError (ioe_unk_error "setEcho" "failed to set echoing")
else return ()
getEcho :: FD -> IO Bool
getEcho fd = do
r <- get_console_echo fd
if (r == (-1))
then ioError (ioe_unk_error "getEcho" "failed to get echoing")
else return (r == 1)
foreign import ccall unsafe "consUtils.h set_console_buffering__"
set_console_buffering :: CInt -> CInt -> IO CInt
foreign import ccall unsafe "consUtils.h set_console_echo__"
set_console_echo :: CInt -> CInt -> IO CInt
foreign import ccall unsafe "consUtils.h get_console_echo__"
get_console_echo :: CInt -> IO CInt
foreign import ccall unsafe "consUtils.h is_console__"
is_console :: CInt -> IO CInt
#endif
-- ---------------------------------------------------------------------------
-- Turning on non-blocking for a file descriptor
setNonBlockingFD :: FD -> Bool -> IO ()
#if !defined(mingw32_HOST_OS) && !defined(ghcjs_HOST_OS)
setNonBlockingFD fd set = do
flags <- throwErrnoIfMinus1Retry "setNonBlockingFD"
(c_fcntl_read fd const_f_getfl)
let flags' | set = flags .|. o_NONBLOCK
| otherwise = flags .&. complement o_NONBLOCK
when (flags /= flags') $ do
-- An error when setting O_NONBLOCK isn't fatal: on some systems
-- there are certain file handles on which this will fail (eg. /dev/null
-- on FreeBSD) so we throw away the return code from fcntl_write.
_ <- c_fcntl_write fd const_f_setfl (fromIntegral flags')
return ()
#else
-- bogus defns for win32
setNonBlockingFD _ _ = return ()
#endif
-- -----------------------------------------------------------------------------
-- Set close-on-exec for a file descriptor
#if !defined(mingw32_HOST_OS) && !defined(ghcjs_HOST_OS)
setCloseOnExec :: FD -> IO ()
setCloseOnExec fd = do
throwErrnoIfMinus1_ "setCloseOnExec" $
c_fcntl_write fd const_f_setfd const_fd_cloexec
#endif
-- -----------------------------------------------------------------------------
-- foreign imports
#if !defined(mingw32_HOST_OS)
type CFilePath = CString
#else
type CFilePath = CWString
#endif
#if !defined(ghcjs_HOST_OS)
foreign import ccall unsafe "HsBase.h __hscore_open"
c_open :: CFilePath -> CInt -> CMode -> IO CInt
foreign import ccall safe "HsBase.h __hscore_open"
c_safe_open :: CFilePath -> CInt -> CMode -> IO CInt
foreign import ccall unsafe "HsBase.h __hscore_fstat"
c_fstat :: CInt -> Ptr CStat -> IO CInt
foreign import ccall unsafe "HsBase.h __hscore_lstat"
lstat :: CFilePath -> Ptr CStat -> IO CInt
#endif
{- Note: Win32 POSIX functions
Functions that are not part of the POSIX standards were
at some point deprecated by Microsoft. This deprecation
was performed by renaming the functions according to the
C++ ABI Section 17.6.4.3.2b. This was done to free up the
namespace of normal Windows programs since Windows isn't
POSIX compliant anyway.
These were working before since the RTS was re-exporting
these symbols under the undeprecated names. This is no longer
being done. See #11223
See https://msdn.microsoft.com/en-us/library/ms235384.aspx
for more.
However since we can't hope to get people to support Windows
packages we should support the deprecated names. See #12497
-}
#ifdef ghcjs_HOST_OS
foreign import javascript interruptible "h$base_access($1_1,$1_2,$2,$c);"
c_access :: CString -> CInt -> IO CInt
foreign import javascript interruptible "h$base_chmod($1_1,$1_2,$2,$c);"
c_chmod :: CString -> CMode -> IO CInt
foreign import javascript interruptible "h$base_close($1,$c);"
c_close :: CInt -> IO CInt
foreign import javascript interruptible "h$base_creat($1,$c);"
c_creat :: CString -> CMode -> IO CInt
foreign import javascript interruptible "h$base_dup($1,$c);"
c_dup :: CInt -> IO CInt
foreign import javascript interruptible "h$base_dup2($1,$2,$c);"
c_dup2 :: CInt -> CInt -> IO CInt
foreign import javascript interruptible "h$base_fstat($1,$2_1,$2_2,$c);" -- fixme wrong type
c_fstat :: CInt -> Ptr CStat -> IO CInt
foreign import javascript unsafe "$r = h$base_isatty($1);"
c_isatty :: CInt -> IO CInt
foreign import javascript interruptible "h$base_lseek($1,$2_1,$2_2,$3,$c);"
c_lseek :: CInt -> Int64 -> CInt -> IO Int64
foreign import javascript interruptible "h$base_lstat($1_1,$1_2,$2_1,$2_2,$c);" -- fixme wrong type
lstat :: CFilePath -> Ptr CStat -> IO CInt
foreign import javascript interruptible "h$base_open($1_1,$1_2,$2,$3,$c);"
c_open :: CFilePath -> CInt -> CMode -> IO CInt
foreign import javascript interruptible "h$base_open($1_1,$1_2,$2,$3,$c);"
c_safe_open :: CFilePath -> CInt -> CMode -> IO CInt
foreign import javascript interruptible "h$base_read($1,$2_1,$2_2,$3,$c);"
c_read :: CInt -> Ptr Word8 -> CSize -> IO CSsize
foreign import javascript interruptible "h$base_read($1,$2_1,$2_2,$3,$c);"
c_safe_read :: CInt -> Ptr Word8 -> CSize -> IO CSsize
foreign import javascript interruptible "h$base_stat($1_1,$1_2,$2_1,$2_2,$c);" -- fixme wrong type
c_stat :: CFilePath -> Ptr CStat -> IO CInt
foreign import javascript unsafe "$r = h$base_umask($1);"
c_umask :: CMode -> IO CMode
foreign import javascript interruptible "h$base_write($1,$2_1,$2_2,$3,$c);"
c_write :: CInt -> Ptr Word8 -> CSize -> IO CSsize
foreign import javascript interruptible "h$base_write($1,$2_1,$2_2,$3,$c);"
c_safe_write :: CInt -> Ptr Word8 -> CSize -> IO CSsize
foreign import javascript interruptible "h$base_ftruncate($1,$2_1,$2_2,$c);" -- fixme COff
c_ftruncate :: CInt -> Int64 -> IO CInt
foreign import javascript interruptible "h$base_unlink($1_1,$1_2,$c);"
c_unlink :: CString -> IO CInt
foreign import javascript unsafe "$r = h$base_getpid();"
c_getpid :: IO CPid
-- foreign import ccall unsafe "HsBase.h fork"
-- c_fork :: IO CPid
foreign import javascript interruptible "h$base_link($1_1,$1_2,$2_1,$2_2,$c);"
c_link :: CString -> CString -> IO CInt
foreign import javascript interruptible "h$base_mkfifo($1_1,$1_2,$2,$c);"
c_mkfifo :: CString -> CMode -> IO CInt
-- foreign import javascript interruptible "h$base_pipe($1_1,$1_2,$c);"
-- c_pipe :: Ptr CInt -> IO CInt
foreign import javascript unsafe "$r = h$base_sigemptyset($1_1,$1_2);"
c_sigemptyset :: Ptr CSigset -> IO CInt
foreign import javascript unsafe "$r = h$base_sigaddset($1_1,$1_2,$2);"
c_sigaddset :: Ptr CSigset -> CInt -> IO CInt
foreign import javascript unsafe "$r = h$base_sigprocmask($1,$2_1,$2_2,$3_1,$3_2);"
c_sigprocmask :: CInt -> Ptr CSigset -> Ptr CSigset -> IO CInt
foreign import javascript unsafe "$r = h$base_tcgetattr($1,$2_1,$2_2);"
c_tcgetattr :: CInt -> Ptr CTermios -> IO CInt
foreign import javascript unsafe "$r = h$base_tcsetattr($1,$2,$3_1,$3_2);"
c_tcsetattr :: CInt -> CInt -> Ptr CTermios -> IO CInt
foreign import javascript unsafe "$r = h$base_utime($1_1,$1_2,$2_1,$2_2);" -- should this be async?
c_utime :: CString -> Ptr CUtimbuf -> IO CInt
foreign import javascript interruptible "h$base_waitpid($1,$2_1,$2_2,$3,$c);"
c_waitpid :: CPid -> Ptr CInt -> CInt -> IO CPid
foreign import javascript unsafe "$r = h$base_o_rdonly;" o_RDONLY :: CInt
foreign import javascript unsafe "$r = h$base_o_wronly;" o_WRONLY :: CInt
foreign import javascript unsafe "$r = h$base_o_rdwr;" o_RDWR :: CInt
foreign import javascript unsafe "$r = h$base_o_append;" o_APPEND :: CInt
foreign import javascript unsafe "$r = h$base_o_creat;" o_CREAT :: CInt
foreign import javascript unsafe "$r = h$base_o_excl;" o_EXCL :: CInt
foreign import javascript unsafe "$r = h$base_o_trunc;" o_TRUNC :: CInt
foreign import javascript unsafe "$r = h$base_o_noctty;" o_NOCTTY :: CInt
foreign import javascript unsafe "$r = h$base_o_nonblock;" o_NONBLOCK :: CInt
foreign import javascript unsafe "$r = h$base_o_binary;" o_BINARY :: CInt
foreign import javascript unsafe "$r = h$base_c_s_isreg($1);" c_s_isreg :: CMode -> CInt
foreign import javascript unsafe "$r = h$base_c_s_ischr($1);" c_s_ischr :: CMode -> CInt
foreign import javascript unsafe "$r = h$base_c_s_isblk($1);" c_s_isblk :: CMode -> CInt
foreign import javascript unsafe "$r = h$base_c_s_isdir($1);" c_s_isdir :: CMode -> CInt
foreign import javascript unsafe "$r = h$base_c_s_isfifo($1);" c_s_isfifo :: CMode -> CInt
s_isreg :: CMode -> Bool
s_isreg cm = c_s_isreg cm /= 0
s_ischr :: CMode -> Bool
s_ischr cm = c_s_ischr cm /= 0
s_isblk :: CMode -> Bool
s_isblk cm = c_s_isblk cm /= 0
s_isdir :: CMode -> Bool
s_isdir cm = c_s_isdir cm /= 0
s_isfifo :: CMode -> Bool
s_isfifo cm = c_s_isfifo cm /= 0
foreign import javascript unsafe "$r = h$base_sizeof_stat;" sizeof_stat :: Int
foreign import javascript unsafe "h$base_st_mtime($1_1,$1_2)" st_mtime :: Ptr CStat -> IO CTime
foreign import javascript unsafe "$r1 = h$base_st_size($1_1,$1_2); $r2 = h$ret1;" st_size :: Ptr CStat -> IO Int64
foreign import javascript unsafe "$r = h$base_st_mode($1_1,$1_2);" st_mode :: Ptr CStat -> IO CMode
foreign import javascript unsafe "$r = h$base_st_dev($1_1,$1_2);" st_dev :: Ptr CStat -> IO CDev
foreign import javascript unsafe "$r1 = h$base_st_ino($1_1,$1_2); $r2 = h$ret1;" st_ino :: Ptr CStat -> IO CIno
foreign import javascript unsafe "$r = h$base_echo;" const_echo :: CInt
foreign import javascript unsafe "$r = h$base_tcsanow;" const_tcsanow :: CInt
foreign import javascript unsafe "$r = h$base_icanon;" const_icanon :: CInt
foreign import javascript unsafe "$r = h$base_vmin;" const_vmin :: CInt
foreign import javascript unsafe "$r = h$base_vtime;" const_vtime :: CInt
foreign import javascript unsafe "$r = h$base_sigttou;" const_sigttou :: CInt
foreign import javascript unsafe "$r = h$base_sig_block;" const_sig_block :: CInt
foreign import javascript unsafe "$r = h$base_sig_setmask;" const_sig_setmask :: CInt
foreign import javascript unsafe "$r = h$base_f_getfl;" const_f_getfl :: CInt
foreign import javascript unsafe "$r = h$base_f_setfl;" const_f_setfl :: CInt
foreign import javascript unsafe "$r = h$base_f_setfd;" const_f_setfd :: CInt
foreign import javascript unsafe "$r = h$base_fd_cloexec;" const_fd_cloexec :: CLong
foreign import javascript unsafe "$r = h$base_sizeof_termios;" sizeof_termios :: Int
foreign import javascript unsafe "$r = h$base_sizeof_sigset_t;" sizeof_sigset_t :: Int
foreign import javascript unsafe "$r = h$base_lflag($1_1,$1_2);" c_lflag :: Ptr CTermios -> IO CTcflag
foreign import javascript unsafe "h$base_poke_lflag($1_1,$1_2,$2);" poke_c_lflag :: Ptr CTermios -> CTcflag -> IO ()
foreign import javascript unsafe "$r1 = h$base_ptr_c_cc($1_1,$1_2); $r2 = h$ret_1;" ptr_c_cc :: Ptr CTermios -> IO (Ptr Word8)
s_issock :: CMode -> Bool
s_issock cmode = c_s_issock cmode /= 0
foreign import javascript unsafe "h$base_c_s_issock($1)" c_s_issock :: CMode -> CInt
foreign import javascript unsafe "$r = h$base_default_buffer_size;" dEFAULT_BUFFER_SIZE :: Int
foreign import javascript unsafe "$r = h$base_SEEK_CUR;" sEEK_CUR :: CInt
foreign import javascript unsafe "$r = h$base_SEEK_SET;" sEEK_SET :: CInt
foreign import javascript unsafe "$r = h$base_SEEK_END" sEEK_END :: CInt
-- fixme, unclear if these can be supported, remove?
foreign import javascript unsafe "$r = h$base_c_fcntl_read($1,$2)" c_fcntl_read :: CInt -> CInt -> IO CInt
foreign import javascript unsafe "$r = h$base_c_fcntl_write($1,$2,$3);" c_fcntl_write :: CInt -> CInt -> CLong -> IO CInt
foreign import javascript unsafe "$r = h$base_c_fcntl_lock($1,$2,$3_1,$3_2);" c_fcntl_lock :: CInt -> CInt -> Ptr CFLock -> IO CInt
#else
foreign import capi unsafe "unistd.h lseek"
c_lseek :: CInt -> COff -> CInt -> IO COff
foreign import ccall unsafe "HsBase.h access"
c_access :: CString -> CInt -> IO CInt
foreign import ccall unsafe "HsBase.h chmod"
c_chmod :: CString -> CMode -> IO CInt
foreign import ccall unsafe "HsBase.h close"
c_close :: CInt -> IO CInt
foreign import ccall unsafe "HsBase.h creat"
c_creat :: CString -> CMode -> IO CInt
foreign import ccall unsafe "HsBase.h dup"
c_dup :: CInt -> IO CInt
foreign import ccall unsafe "HsBase.h dup2"
c_dup2 :: CInt -> CInt -> IO CInt
foreign import ccall unsafe "HsBase.h isatty"
c_isatty :: CInt -> IO CInt
#if defined(mingw32_HOST_OS)
-- See Note: Windows types
foreign import capi unsafe "HsBase.h _read"
c_read :: CInt -> Ptr Word8 -> CUInt -> IO CInt
-- See Note: Windows types
foreign import capi safe "HsBase.h _read"
c_safe_read :: CInt -> Ptr Word8 -> CUInt -> IO CInt
foreign import ccall unsafe "HsBase.h _umask"
c_umask :: CMode -> IO CMode
-- See Note: Windows types
foreign import capi unsafe "HsBase.h _write"
c_write :: CInt -> Ptr Word8 -> CUInt -> IO CInt
-- See Note: Windows types
foreign import capi safe "HsBase.h _write"
c_safe_write :: CInt -> Ptr Word8 -> CUInt -> IO CInt
foreign import ccall unsafe "HsBase.h _pipe"
c_pipe :: Ptr CInt -> IO CInt
#else
-- We use CAPI as on some OSs (eg. Linux) this is wrapped by a macro
-- which redirects to the 64-bit-off_t versions when large file
-- support is enabled.
-- See Note: Windows types
foreign import capi unsafe "HsBase.h read"
c_read :: CInt -> Ptr Word8 -> CSize -> IO CSsize
-- See Note: Windows types
foreign import capi safe "HsBase.h read"
c_safe_read :: CInt -> Ptr Word8 -> CSize -> IO CSsize
foreign import ccall unsafe "HsBase.h umask"
c_umask :: CMode -> IO CMode
-- See Note: Windows types
foreign import capi unsafe "HsBase.h write"
c_write :: CInt -> Ptr Word8 -> CSize -> IO CSsize
-- See Note: Windows types
foreign import capi safe "HsBase.h write"
c_safe_write :: CInt -> Ptr Word8 -> CSize -> IO CSsize
foreign import ccall unsafe "HsBase.h pipe"
c_pipe :: Ptr CInt -> IO CInt
#endif
foreign import ccall unsafe "HsBase.h unlink"
c_unlink :: CString -> IO CInt
foreign import capi unsafe "HsBase.h utime"
c_utime :: CString -> Ptr CUtimbuf -> IO CInt
foreign import ccall unsafe "HsBase.h getpid"
c_getpid :: IO CPid
#if !defined(ghcjs_HOST_OS)
foreign import ccall unsafe "HsBase.h __hscore_stat"
c_stat :: CFilePath -> Ptr CStat -> IO CInt
foreign import ccall unsafe "HsBase.h __hscore_ftruncate"
c_ftruncate :: CInt -> COff -> IO CInt
#endif
#if !defined(mingw32_HOST_OS) && !defined(ghcjs_HOST_OS)
foreign import capi unsafe "HsBase.h fcntl"
c_fcntl_read :: CInt -> CInt -> IO CInt
foreign import capi unsafe "HsBase.h fcntl"
c_fcntl_write :: CInt -> CInt -> CLong -> IO CInt
foreign import capi unsafe "HsBase.h fcntl"
c_fcntl_lock :: CInt -> CInt -> Ptr CFLock -> IO CInt
foreign import ccall unsafe "HsBase.h fork"
c_fork :: IO CPid
foreign import ccall unsafe "HsBase.h link"
c_link :: CString -> CString -> IO CInt
-- capi is required at least on Android
foreign import capi unsafe "HsBase.h mkfifo"
c_mkfifo :: CString -> CMode -> IO CInt
foreign import capi unsafe "signal.h sigemptyset"
c_sigemptyset :: Ptr CSigset -> IO CInt
foreign import capi unsafe "signal.h sigaddset"
c_sigaddset :: Ptr CSigset -> CInt -> IO CInt
foreign import capi unsafe "signal.h sigprocmask"
c_sigprocmask :: CInt -> Ptr CSigset -> Ptr CSigset -> IO CInt
-- capi is required at least on Android
foreign import capi unsafe "HsBase.h tcgetattr"
c_tcgetattr :: CInt -> Ptr CTermios -> IO CInt
-- capi is required at least on Android
foreign import capi unsafe "HsBase.h tcsetattr"
c_tcsetattr :: CInt -> CInt -> Ptr CTermios -> IO CInt
foreign import ccall unsafe "HsBase.h waitpid"
c_waitpid :: CPid -> Ptr CInt -> CInt -> IO CPid
#endif
#if !defined(ghcjs_HOST_OS)
-- POSIX flags only:
foreign import ccall unsafe "HsBase.h __hscore_o_rdonly" o_RDONLY :: CInt
foreign import ccall unsafe "HsBase.h __hscore_o_wronly" o_WRONLY :: CInt
foreign import ccall unsafe "HsBase.h __hscore_o_rdwr" o_RDWR :: CInt
foreign import ccall unsafe "HsBase.h __hscore_o_append" o_APPEND :: CInt
foreign import ccall unsafe "HsBase.h __hscore_o_creat" o_CREAT :: CInt
foreign import ccall unsafe "HsBase.h __hscore_o_excl" o_EXCL :: CInt
foreign import ccall unsafe "HsBase.h __hscore_o_trunc" o_TRUNC :: CInt
-- non-POSIX flags.
foreign import ccall unsafe "HsBase.h __hscore_o_noctty" o_NOCTTY :: CInt
foreign import ccall unsafe "HsBase.h __hscore_o_nonblock" o_NONBLOCK :: CInt
foreign import ccall unsafe "HsBase.h __hscore_o_binary" o_BINARY :: CInt
foreign import capi unsafe "sys/stat.h S_ISREG" c_s_isreg :: CMode -> CInt
foreign import capi unsafe "sys/stat.h S_ISCHR" c_s_ischr :: CMode -> CInt
foreign import capi unsafe "sys/stat.h S_ISBLK" c_s_isblk :: CMode -> CInt
foreign import capi unsafe "sys/stat.h S_ISDIR" c_s_isdir :: CMode -> CInt
foreign import capi unsafe "sys/stat.h S_ISFIFO" c_s_isfifo :: CMode -> CInt
s_isreg :: CMode -> Bool
s_isreg cm = c_s_isreg cm /= 0
s_ischr :: CMode -> Bool
s_ischr cm = c_s_ischr cm /= 0
s_isblk :: CMode -> Bool
s_isblk cm = c_s_isblk cm /= 0
s_isdir :: CMode -> Bool
s_isdir cm = c_s_isdir cm /= 0
s_isfifo :: CMode -> Bool
s_isfifo cm = c_s_isfifo cm /= 0
foreign import ccall unsafe "HsBase.h __hscore_sizeof_stat" sizeof_stat :: Int
foreign import ccall unsafe "HsBase.h __hscore_st_mtime" st_mtime :: Ptr CStat -> IO CTime
#ifdef mingw32_HOST_OS
foreign import ccall unsafe "HsBase.h __hscore_st_size" st_size :: Ptr CStat -> IO Int64
#else
foreign import ccall unsafe "HsBase.h __hscore_st_size" st_size :: Ptr CStat -> IO COff
#endif
foreign import ccall unsafe "HsBase.h __hscore_st_mode" st_mode :: Ptr CStat -> IO CMode
foreign import ccall unsafe "HsBase.h __hscore_st_dev" st_dev :: Ptr CStat -> IO CDev
foreign import ccall unsafe "HsBase.h __hscore_st_ino" st_ino :: Ptr CStat -> IO CIno
foreign import ccall unsafe "HsBase.h __hscore_echo" const_echo :: CInt
foreign import ccall unsafe "HsBase.h __hscore_tcsanow" const_tcsanow :: CInt
foreign import ccall unsafe "HsBase.h __hscore_icanon" const_icanon :: CInt
foreign import ccall unsafe "HsBase.h __hscore_vmin" const_vmin :: CInt
foreign import ccall unsafe "HsBase.h __hscore_vtime" const_vtime :: CInt
foreign import ccall unsafe "HsBase.h __hscore_sigttou" const_sigttou :: CInt
foreign import ccall unsafe "HsBase.h __hscore_sig_block" const_sig_block :: CInt
foreign import ccall unsafe "HsBase.h __hscore_sig_setmask" const_sig_setmask :: CInt
foreign import ccall unsafe "HsBase.h __hscore_f_getfl" const_f_getfl :: CInt
foreign import ccall unsafe "HsBase.h __hscore_f_setfl" const_f_setfl :: CInt
foreign import ccall unsafe "HsBase.h __hscore_f_setfd" const_f_setfd :: CInt
foreign import ccall unsafe "HsBase.h __hscore_fd_cloexec" const_fd_cloexec :: CLong
#if defined(HTYPE_TCFLAG_T)
foreign import ccall unsafe "HsBase.h __hscore_sizeof_termios" sizeof_termios :: Int
foreign import ccall unsafe "HsBase.h __hscore_sizeof_sigset_t" sizeof_sigset_t :: Int
foreign import ccall unsafe "HsBase.h __hscore_lflag" c_lflag :: Ptr CTermios -> IO CTcflag
foreign import ccall unsafe "HsBase.h __hscore_poke_lflag" poke_c_lflag :: Ptr CTermios -> CTcflag -> IO ()
foreign import ccall unsafe "HsBase.h __hscore_ptr_c_cc" ptr_c_cc :: Ptr CTermios -> IO (Ptr Word8)
#endif
s_issock :: CMode -> Bool
#if !defined(mingw32_HOST_OS)
s_issock cmode = c_s_issock cmode /= 0
foreign import capi unsafe "sys/stat.h S_ISSOCK" c_s_issock :: CMode -> CInt
#else
s_issock _ = False
#endif
foreign import ccall unsafe "__hscore_bufsiz" dEFAULT_BUFFER_SIZE :: Int
foreign import capi unsafe "stdio.h value SEEK_CUR" sEEK_CUR :: CInt
foreign import capi unsafe "stdio.h value SEEK_SET" sEEK_SET :: CInt
foreign import capi unsafe "stdio.h value SEEK_END" sEEK_END :: CInt
#endif
#endif
{-
Note: Windows types
Windows' _read and _write have types that differ from POSIX. They take an
unsigned int for lengh and return a signed int where POSIX uses size_t and
ssize_t. Those are different on x86_64 and equivalent on x86. We import them
with the types in Microsoft's documentation which means that c_read,
c_safe_read, c_write and c_safe_write have different Haskell types depending on
the OS.
-}
| tolysz/prepare-ghcjs | spec-lts8/base/System/Posix/Internals.hs | bsd-3-clause | 30,172 | 330 | 20 | 5,888 | 5,279 | 2,680 | 2,599 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Appraiser where
import TPM
import VChanUtil
import Demo3Shared
import Provisioning
import CommTools
import Data.Word
import Data.Binary
import Data.ByteString.Lazy (ByteString, pack, append, empty, cons, fromStrict, length)
import Data.Bits
import qualified Data.Text as T
import Control.Monad
import Data.Digest.Pure.SHA (bytestringDigest, sha1)
import qualified Data.Map.Lazy as M (fromList, lookup, empty)
import qualified Data.ByteString.Char8 as Char8 (pack) --just for testing.
import System.IO
--withOpenSSL
mkTPMRequest :: [Word8] -> IO (TPM_PCR_SELECTION, TPM_NONCE)
mkTPMRequest xs = do
let max = 24 -- <- tpm_getcap_pcrs tpm --Maybe add this to assumptions?(24)
nonce <- nonce_create
let selection = tpm_pcr_selection max xs
return (selection, nonce)
mkMeasureReq :: [Int] -> DesiredEvidence
mkMeasureReq = map f
where f :: Int -> EvidenceDescriptor
f 0 = D0
f 1 = D1
f 2 = D2
sendRequest :: Request -> IO LibXenVChan
sendRequest req = sendShared attId (WRequest req)
receiveResponse :: LibXenVChan -> IO (Either String Response)--Response
receiveResponse chan = do
eithershared <- receiveShared chan
case (eithershared) of
(Left err) -> return (Left err)
(Right (WResponse resp)) -> return (Right resp)
(Right x) -> return (Left ("Received unexpected type. I expected a 'Response' but here is what I received instead: " ++ (show x)))
--EVALUATION-------------------------------------
evaluate :: Request -> Response -> IO Demo3EvalResult
evaluate (Request d pcrSelect nonce)
(Response (EvidencePackage eList eNonce eSig) caCert@(Signed pubKey caSig) tpmQuote@(Quote pcrComposite qSig)) = do
caPublicKey <- readPubCA
let blobEvidence :: ByteString
blobEvidence = ePack eList eNonce caCert --pubKey
evBlobSha1 = bytestringDigest $ sha1 blobEvidence
quoteInfo :: TPM_QUOTE_INFO
quoteInfo = TPM_QUOTE_INFO (tpm_pcr_composite_hash $ pcrComposite) (TPM_NONCE evBlobSha1)
{- blobQuote :: ByteString
blobQuote = encode quoteInfo
-}
aikPublicKey = tpm_get_rsa_PublicKey pubKey
r1 = verify caPublicKey caCert
signedQuoteInfo = Signed quoteInfo qSig
size = tpm_key_pubsize pubKey
mod = tpm_key_pubmod pubKey
modLength = Data.ByteString.Lazy.length $ encode mod
exp = (tpm_key_pubexp pubKey)
blobSize = Data.ByteString.Lazy.length $ encode quoteInfo
qSigSize = Data.ByteString.Lazy.length $ qSig
shaBlobLen = Data.ByteString.Lazy.length evBlobSha1
nonceLength = Data.ByteString.Lazy.length $ encode eNonce
{-
putStrLn $ "Nonce Length: " ++ show nonceLength
putStrLn $ "Key Size: " ++ show size
putStrLn $ "Key Mod: " ++ show mod
putStrLn $ "Mod Length: " ++ show modLength
putStrLn $ "Key Exp: " ++ show exp
putStrLn $ "Blob Length: " ++ show blobSize
putStrLn $ "SHA1 Blob Len: " ++ show shaBlobLen
putStrLn $ "Quote Sig Length: " ++ show qSigSize
-}
let r2 = verify aikPublicKey signedQuoteInfo
r3 = nonce == eNonce
goldenPcrComposite <- readComp
let r4 = pcrComposite == goldenPcrComposite
ms = evaluateEvidence d eList
return (r1, r2, r3, r4, ms)
type Demo3EvalResult = (Bool, Bool, Bool, Bool, [MeasureEval])
showDemo3EvalResult :: Demo3EvalResult -> IO ()
showDemo3EvalResult (r1, r2, r3, r4, ms) =
let rs = [r1, r2, r3, r4] in do
putStrLn ""
zipWithM_ f evalStrings rs
mapM_ g ms
where
f :: String -> Bool -> IO ()
f s b = putStrLn $ s ++ show b
g :: MeasureEval -> IO ()
g (d, b) = putStrLn $ show d ++ ": " ++ show b
evalStrings :: [String]
evalStrings = [e1, e2, e3, e4]
e1 :: String
e1 = "CACert Signature: "
e2 :: String
e2 = "Quote Package Signature: "
e3 :: String
e3 = "Nonce: "
e4 :: String
e4 = "PCR values: "
type MeasureEval = (EvidenceDescriptor, Bool)
evaluateEvidence :: DesiredEvidence -> Evidence -> [MeasureEval]
evaluateEvidence = zipWith f
where
f :: EvidenceDescriptor -> EvidencePiece -> MeasureEval
f ed ep = case ed of
D0 -> let res = check 0 ep in
(D0, res)
D1 -> let res = check 1 ep in
(D1, res)
D2 -> let res = check 2 ep in
(D2, res)
check :: Int -> EvidencePiece -> Bool
check id ep = let expected = M.lookup id goldenMap in
case expected of
Nothing -> error (noGolden ++ show id)
Just goldEp -> goldEp == ep
goldenMap = M.fromList $ zip [0..2] expectedEvidence
expectedEvidence :: Evidence
expectedEvidence =
[M0 expectedM0Val , M1 expectedM1Val, M2 expectedM2Val]
expectedM0Val :: M0Rep
expectedM0Val = decodeFromTextL' (T.pack "357893594")
-- "560146190" --cons (bit 0) empty
expectedM1Val :: M1Rep
expectedM1Val = decodeFromTextL' (T.pack "560146190") --cons (bit 0) empty
expectedM2Val :: M2Rep
expectedM2Val = decodeFromTextL' (T.pack "929611828") --cons (bit 2) empty
readPubCA :: IO PubKey
readPubCA = do
handle <- openFile exportCAPubFileName ReadMode
pubKeyString <- hGetLine handle
let pubKey :: PubKey
pubKey = read pubKeyString
hClose handle
return pubKey
--Error messages(only for debugging, at least for now)
quoteReceiveError :: String
quoteReceiveError = "Appraiser did not receive a Quote as expected"
noGolden :: String
noGolden = "No Golden Value for measurement #"
{-
m0Val :: M0Rep
m0Val = cons (bit 0) empty
m1Val :: M1Rep
m1Val = cons (bit 1) empty
-}
readPubEK :: IO TPM_PUBKEY
readPubEK = do
handle <- openFile exportEKFileName ReadMode
pubKeyString <- hGetLine handle
let pubKey :: TPM_PUBKEY
pubKey = read pubKeyString
hClose handle
return pubKey
{-
testRequest :: IO Request
testRequest = do
(pcrSelect, nonce) <- mkTPMRequest ([0..23]::[Word8])
let mReq = mkMeasureReq [0..2]
return (Request mReq pcrSelect nonce)
testResponse :: IO Response
testResponse = do
pubKey <- readPubEK
comp <- readComp
putStrLn $ show comp
let caCert = Signed pubKey m1Val
quote = Quote comp (fromStrict (Char8.pack "hello")) --m0Val
return $ Response evPack caCert quote
where evPack = EvidencePackage [M0 m0Val, M1 m1Val] (TPM_NONCE m1Val) m1Val
-}
{-
goldenFileName :: String
goldenFileName= "goldenPcrComosite.txt"
exportEKFileName = "attEKPubKey.txt"
-}
{-
readComp :: IO TPM_PCR_COMPOSITE
readComp = do
handle <- openFile goldenFileName ReadMode
compString <- hGetLine handle
let comp :: TPM_PCR_COMPOSITE
comp = read compString
hClose handle
return comp
-}
{-
sendPubKeyRequest :: Bool -> IO LibXenVChan
sendPubKeyRequest b = do
putStrLn $ "Appraiser Domain id: "++ show appId
chan <- client_init attId
putStrLn $ "\n" ++ "Appraiser Sending: "++
"PubKey Request: " ++ (show b) ++ "\n"
send chan $ b
return chan
-}
{-
receivePubKeyResponse :: LibXenVChan -> IO PubKeyResponse
receivePubKeyResponse chan = do
ctrlWait chan
resp :: PubKeyResponse <- receive chan --TODO: error handling?
putStrLn $ "\n" ++ "Appraiser Received: " ++ "Pubkey Response: "
++ show resp ++ "\n"
return resp
-}
| armoredsoftware/protocol | tpm/mainline/appraisal/Appraiser.hs | bsd-3-clause | 7,427 | 0 | 16 | 1,839 | 1,530 | 813 | 717 | 131 | 3 |
#include "Any/File.h"
main = print "Hello"
| alphaHeavy/hlint | tests/cpp-simple.hs | gpl-2.0 | 44 | 0 | 5 | 7 | 10 | 5 | 5 | 1 | 1 |
{-# LANGUAGE TemplateHaskell, TypeSynonymInstances, FlexibleInstances #-}
module Language.Java.Paragon.Annotated where
import Language.Haskell.TH
--import Language.Haskell.TH.Syntax
import Control.Monad ((<=<))
class Functor ast => Annotated ast where
ann :: ast l -> l
amap :: (l -> l) -> ast l -> ast l
modName :: String
modName = "Language.Java.Paragon.Annotated"
-- | Derive Annotated instances for the given datatype.
deriveAnn :: Name -> Q [Dec]
deriveAnn = deriveAnn' <=< reify
-- | Derive Annotated instances for many datatypes.
deriveAnnMany :: [Name] -> Q [Dec]
deriveAnnMany = deriveAnnMany' <=< mapM reify
-- | Obtain Info values through a custom reification function. This is useful
-- when generating instances for datatypes that have not yet been declared.
deriveAnn' :: Info -> Q [Dec]
deriveAnn' = fmap (:[]) . deriveAnnOne
deriveAnnMany' :: [Info] -> Q [Dec]
deriveAnnMany' = mapM deriveAnnOne
deriveAnnOne :: Info -> Q Dec
deriveAnnOne i =
case i of
TyConI (DataD dcx n vsk cons _) ->
annInstance dcx n (map unTyVarBndr vsk) (map doCons cons)
TyConI (NewtypeD dcx n vsk con _) ->
annInstance dcx n (map unTyVarBndr vsk) [doCons con]
_ -> error (modName ++ ".deriveAnn: unhandled: " ++ pprint i)
where annInstance _ _ [] _ = error (modName ++ ".annInstance: unhandled " ++ pprint i)
annInstance dcx n vs cases =
let (anns, amaps) = unzip cases
in instanceD (ctxt dcx (init vs)) (conT ''Annotated `appT` typ n (init vs)) [funD 'ann anns, funD 'amap amaps]
typ n = foldl appT (conT n) . map varT
ctxt dcx = fmap (dcx ++) . cxt . map annPred
unTyVarBndr (PlainTV v) = v
unTyVarBndr (KindedTV v _) = v
annPred n = classP ''Annotated [varT n]
doCons :: Con -> (ClauseQ, ClauseQ)
doCons (NormalC c vs@((_,VarT n):sts)) =
let annDef = clause [conP c (varP n : map (const wildP) sts)] (normalB (varE n)) []
nams = [ mkName x | x <- zipWith (\_ i -> "x" ++ show i) vs ([0..]::[Int]) ]
f = mkName "f"
arg1 = [| $(varE f) $(varE (head nams)) |]
args = [ varE nam | nam <- tail nams ]
rhs = foldl appE (conE c) (arg1:args)
amapDef = clause [varP f, conP c (map varP nams)] (normalB rhs) []
in (annDef, amapDef)
doCons (RecC c sts) = doCons $ NormalC c [(s, t) | (_, s, t) <- sts]
-- doCons (InfixC sty1 c sty2) =
-- let con = [| conE c |]
-- left = [| lift $(varE (mkName "x0")) |]
-- right = [| lift $(varE (mkName "x1")) |]
-- e = [| infixApp $left $con $right |]
-- clause [infixP (varP (mkName "x0")) c (varP (mkName "x1"))] (normalB e) [] -}
doCons c = error (modName ++ ".doCons: Unhandled constructor: " ++ pprint c)
removeAnnotation :: Annotated ast => ast l -> ast ()
removeAnnotation = fmap (const ())
removeAnnotationMany :: Annotated ast => [ast l] -> [ast ()]
removeAnnotationMany = map removeAnnotation
| bvdelft/parac2 | src/Language/Java/Paragon/Annotated.hs | bsd-3-clause | 2,902 | 0 | 16 | 664 | 1,024 | 532 | 492 | 50 | 5 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ForeignFunctionInterface #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
#ifndef ACCELERATE_DEBUG
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-do-bind #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
#endif
-- |
-- Module : Data.Array.Accelerate.Debug.Flags
-- Copyright : [2008..2014] Manuel M T Chakravarty, Gabriele Keller
-- [2009..2014] Trevor L. McDonell
-- License : BSD3
--
-- Maintainer : Manuel M T Chakravarty <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Option parsing for debug flags
--
module Data.Array.Accelerate.Debug.Flags (
Flags, Mode,
acc_sharing, exp_sharing, fusion, simplify, flush_cache, fast_math, verbose,
dump_sharing, dump_simpl_stats, dump_simpl_iterations, dump_vectorisation,
dump_gc, dump_gc_stats, debug_cc, dump_cc, dump_asm, dump_exec, dump_sched,
accInit,
queryFlag, setFlag, setFlags, clearFlag, clearFlags,
when, unless,
) where
import Control.Monad.IO.Class
import Data.IORef
import Data.Label
import Data.List
import System.Environment
import System.IO.Unsafe
import Text.PrettyPrint hiding ( Mode )
import qualified Control.Monad as M ( when, unless )
import Foreign.C
import Foreign.Marshal
import Foreign.Ptr
import GHC.Foreign as GHC
import GHC.IO.Encoding ( getFileSystemEncoding )
import Debug.Trace
data FlagSpec flag = Option String -- external form
flag -- internal form
-- The runtime debug and control options supported by Accelerate. This is a bit
-- awkward, as we process both frontend as well as backend option flags, but
-- gives some control over error messages and overlapping options.
--
fclabels [d|
data Flags = Flags
{
-- Functionality and phase control
-- -------------------------------
--
-- These are Maybe types because they will only override the backend
-- options if the user specifies a value
--
acc_sharing :: !(Maybe Bool) -- recover sharing of array computations
, exp_sharing :: !(Maybe Bool) -- recover sharing of scalar expressions
, fusion :: !(Maybe Bool) -- fuse array expressions
, simplify :: !(Maybe Bool) -- simplify scalar expressions
-- , unfolding_use_threshold :: !(Maybe Int) -- the magic cut-off figure for inlining
, flush_cache :: !(Maybe Bool) -- delete persistent compilation cache(s)
, fast_math :: !(Maybe Bool) -- use faster, less precise math library operations
-- Debug trace
-- -----------
, verbose :: !Bool -- be very chatty
-- optimisation and simplification
, dump_sharing :: !Bool -- sharing recovery phase
, dump_simpl_stats :: !Bool -- statistics form fusion/simplification
, dump_simpl_iterations :: !Bool -- output from each simplifier iteration
, dump_vectorisation :: !Bool -- output from the vectoriser
-- garbage collection
, dump_gc :: !Bool -- dump GC trace
, dump_gc_stats :: !Bool -- output GC statistics
-- code generation / compilation
, debug_cc :: !Bool -- compile with debug symbols
, dump_cc :: !Bool -- compilation trace
, dump_asm :: !Bool -- dump generated code
-- execution
, dump_exec :: !Bool -- dump execution trace
, dump_sched :: !Bool -- dump scheduler trace
}
|]
allFlags :: [FlagSpec (Flags -> Flags)]
allFlags
= map (enable 'd') dflags
++ map (enable 'f') fflags ++ map (disable 'f') fflags
where
enable p (Option f go) = Option ('-':p:f) (go True)
disable p (Option f go) = Option ('-':p:"no-"++f) (go False)
-- These @-f\<blah\>@ phase control flags can be reversed with @-fno-\<blah\>@
--
fflags :: [FlagSpec (Bool -> Flags -> Flags)]
fflags =
[ Option "acc-sharing" (set' acc_sharing)
, Option "exp-sharing" (set' exp_sharing)
, Option "fusion" (set' fusion)
, Option "simplify" (set' simplify)
, Option "flush-cache" (set' flush_cache)
, Option "fast-math" (set' fast_math)
]
where
set' f v = set f (Just v)
-- These debugging flags default to off and can be enable with @-d\<blah\>@
--
dflags :: [FlagSpec (Bool -> Flags -> Flags)]
dflags =
[ Option "verbose" (set verbose)
, Option "dump-sharing" (set dump_sharing)
, Option "dump-simpl-stats" (set dump_simpl_stats)
, Option "dump-simpl-iterations" (set dump_simpl_iterations)
, Option "dump-vectorisation" (set dump_vectorisation)
, Option "dump-gc" (set dump_gc)
, Option "dump-gc-stats" (set dump_gc_stats)
, Option "debug-cc" (set debug_cc)
, Option "dump-cc" (set dump_cc)
, Option "dump-asm" (set dump_asm)
, Option "dump-exec" (set dump_exec)
, Option "dump-sched" (set dump_sched)
]
class DebugFlag a where
def :: a
instance DebugFlag Bool where
{-# INLINE def #-}
def = False
instance DebugFlag (Maybe a) where
{-# INLINE def #-}
def = Nothing
-- | A bit of a hack to get the command line options processing out of the way.
--
-- We would like to have this automatically called once during program
-- initialisation, so that our command-line debug flags between +ACC .. [-ACC]
-- don't interfere with other programs.
--
-- Hacks beget hacks beget hacks...
--
accInit :: IO ()
#ifdef ACCELERATE_DEBUG
accInit = _flags `seq` return ()
#else
accInit = getUpdateArgs >> return ()
#endif
-- Initialise the debugging flags structure. This reads from both the command
-- line arguments as well as the environment variable "ACCELERATE_FLAGS".
-- Where applicable, options on the command line take precedence.
--
-- This is only available when compiled with debugging mode, because trying to
-- access it at any other time is an error.
--
#ifdef ACCELERATE_DEBUG
initialiseFlags :: IO Flags
initialiseFlags = do
argv <- getUpdateArgs
env <- maybe [] words `fmap` lookupEnv "ACCELERATE_FLAGS"
return $ parse (env ++ argv)
where
defaults = Flags def def def def def def def def def def def def def def def def def def
parse = foldl parse1 defaults
parse1 opts this =
case filter (\(Option flag _) -> this `isPrefixOf` flag) allFlags of
[Option _ go] -> go opts
[] -> trace unknown opts
alts -> case find (\(Option flag _) -> flag == this) alts of
Just (Option _ go) -> go opts
Nothing -> trace (ambiguous alts) opts
where
unknown = render $ text "Unknown option:" <+> quotes (text this)
ambiguous alts = render $
vcat [ text "Ambiguous option:" <+> quotes (text this)
, text ""
, text "Did you mean one of these?"
, nest 4 $ vcat (map (\(Option s _) -> text s) alts)
]
#endif
-- If the command line arguments include a section "+ACC ... [-ACC]" then return
-- that section, and update the command line arguments to not include that part.
--
getUpdateArgs :: IO [String]
getUpdateArgs = do
prog <- getProgName
argv <- getArgs
--
let (before, r1) = span (/= "+ACC") argv
(flags, r2) = span (/= "-ACC") $ dropWhile (== "+ACC") r1
after = dropWhile (== "-ACC") r2
--
#ifdef ACCELERATE_DEBUG
setProgArgv (prog : before ++ after)
#else
M.unless (null flags)
$ error "Data.Array.Accelerate: Debugging options are disabled. Install with -fdebug to enable them."
#endif
return flags
-- This is only defined in debug mode because to access it at any other time
-- should be an error.
--
#ifdef ACCELERATE_DEBUG
{-# NOINLINE _flags #-}
_flags :: IORef Flags
_flags = unsafePerformIO $ newIORef =<< initialiseFlags
#endif
{-# INLINE queryFlag #-}
queryFlag :: DebugFlag a => (Flags :-> a) -> IO a
#ifdef ACCELERATE_DEBUG
queryFlag f = get f `fmap` readIORef _flags
#else
queryFlag _ = return def
#endif
type Mode = Flags :-> Bool
setFlag, clearFlag :: Mode -> IO ()
setFlag f = setFlags [f]
clearFlag f = clearFlags [f]
setFlags, clearFlags :: [Mode] -> IO ()
#ifdef ACCELERATE_DEBUG
setFlags f = modifyIORef _flags (\opt -> foldr (flip set True) opt f)
clearFlags f = modifyIORef _flags (\opt -> foldr (flip set False) opt f)
#else
setFlags _ = return ()
clearFlags _ = return ()
#endif
-- | Conditional execution of a monadic debugging expression
--
{-# SPECIALISE when :: Mode -> IO () -> IO () #-}
when :: MonadIO m => Mode -> m () -> m ()
when f s = do
yes <- liftIO $ queryFlag f
M.when yes s
-- | The opposite of 'when'
--
{-# SPECIALISE unless :: Mode -> IO () -> IO () #-}
unless :: MonadIO m => Mode -> m () -> m ()
unless f s = do
yes <- liftIO $ queryFlag f
M.unless yes s
#ifdef ACCELERATE_DEBUG
-- Stolen from System.Environment
--
setProgArgv :: [String] -> IO ()
setProgArgv argv = do
enc <- getFileSystemEncoding
vs <- mapM (GHC.newCString enc) argv >>= newArray0 nullPtr
c_setProgArgv (genericLength argv) vs
foreign import ccall unsafe "setProgArgv"
c_setProgArgv :: CInt -> Ptr CString -> IO ()
#endif
| rrnewton/accelerate | Data/Array/Accelerate/Debug/Flags.hs | bsd-3-clause | 9,991 | 0 | 19 | 2,978 | 1,788 | 968 | 820 | 156 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.S3.HeadBucket
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | This operation is useful to determine if a bucket exists and you have
-- permission to access it.
--
-- <http://docs.aws.amazon.com/AmazonS3/latest/API/HeadBucket.html>
module Network.AWS.S3.HeadBucket
(
-- * Request
HeadBucket
-- ** Request constructor
, headBucket
-- ** Request lenses
, hbBucket
-- * Response
, HeadBucketResponse
-- ** Response constructor
, headBucketResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.S3
import Network.AWS.S3.Types
import qualified GHC.Exts
newtype HeadBucket = HeadBucket
{ _hbBucket :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'HeadBucket' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'hbBucket' @::@ 'Text'
--
headBucket :: Text -- ^ 'hbBucket'
-> HeadBucket
headBucket p1 = HeadBucket
{ _hbBucket = p1
}
hbBucket :: Lens' HeadBucket Text
hbBucket = lens _hbBucket (\s a -> s { _hbBucket = a })
data HeadBucketResponse = HeadBucketResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'HeadBucketResponse' constructor.
headBucketResponse :: HeadBucketResponse
headBucketResponse = HeadBucketResponse
instance ToPath HeadBucket where
toPath HeadBucket{..} = mconcat
[ "/"
, toText _hbBucket
]
instance ToQuery HeadBucket where
toQuery = const mempty
instance ToHeaders HeadBucket
instance ToXMLRoot HeadBucket where
toXMLRoot = const (namespaced ns "HeadBucket" [])
instance ToXML HeadBucket
instance AWSRequest HeadBucket where
type Sv HeadBucket = S3
type Rs HeadBucket = HeadBucketResponse
request = head
response = nullResponse HeadBucketResponse
| romanb/amazonka | amazonka-s3/gen/Network/AWS/S3/HeadBucket.hs | mpl-2.0 | 2,750 | 0 | 9 | 649 | 363 | 219 | 144 | 49 | 1 |
{-
Copyright 2015 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE NoImplicitPrelude #-}
module Foreign.Concurrent (module M) where
import "base" Foreign.Concurrent as M
| xwysp/codeworld | codeworld-base/src/Foreign/Concurrent.hs | apache-2.0 | 751 | 0 | 4 | 136 | 23 | 17 | 6 | 4 | 0 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-}
{-
Copyright 2017 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
import CodeWorld.Message
import CodeWorld
import System.Clock
import Data.List
import Data.Maybe
import Text.Read
import Control.Monad
import qualified Data.Text as T
import qualified Data.ByteString.Char8 as BS
import qualified Network.WebSockets as WS
import Control.Concurrent
import Control.Concurrent.Async
import Control.Exception
import Options.Applicative
import Text.Regex
connect :: Config -> WS.ClientApp a -> IO a
connect Config {..} = WS.runClient hostname port path
type Timestamp = Double
encodeEvent :: (Timestamp, Maybe Event) -> String
encodeEvent = show
decodeEvent :: String -> Maybe (Timestamp, Maybe Event)
decodeEvent = readMaybe
sendClientMessage :: Config -> WS.Connection -> ClientMessage -> IO ()
sendClientMessage config conn msg = do
when (debug config) $
putStrLn $ "→ " ++ show msg
WS.sendTextData conn (T.pack (show msg))
getServerMessage :: Config -> WS.Connection -> IO ServerMessage
getServerMessage config conn = do
msg <- WS.receiveData conn
case readMaybe (T.unpack msg) of
Just msg -> do
when (debug config) $
putStrLn $ "← " ++ show msg
return msg
Nothing -> fail "Invalid server message"
joinGame :: Config -> IO [ServerMessage]
joinGame config = connect config $ \conn -> do
sendClientMessage config conn (JoinGame (gameId config) "BOT")
JoinedAs _ _ <- getServerMessage config conn
waitForStart config conn
waitForStart :: Config -> WS.Connection -> IO [ServerMessage]
waitForStart config conn = go
where
go = do
m <- getServerMessage config conn
case m of
Started {} -> playGame config conn
_ -> go
playGame :: Config -> WS.Connection -> IO [ServerMessage]
playGame config conn = do
startTime <- getTime Monotonic
forever $ do
OutEvent pid eo <- getServerMessage config conn
when (pid == 0) $
case decodeEvent eo of
Just (t,mbEvent) -> do
let mbEvent' = modify <$> mbEvent
currentTime <- getTime Monotonic
let t' | Just ms <- delay config = max 0 (t + ms/1000)
| otherwise = timeSpecToS (currentTime - startTime)
sendClientMessage config conn (InEvent (show (t',mbEvent')))
Nothing -> putStrLn $ "Could not parse event: " ++ eo
where
modify e | not (invert config) = e
modify (KeyPress d) = KeyPress (inv d)
modify (KeyRelease d) = KeyRelease (inv d)
modify e = e
inv "Up" = "Down"
inv "Down" = "Up"
inv "Left" = "Right"
inv "Right" = "Left"
inv c = c
timeSpecToS ts = fromIntegral (sec ts) + fromIntegral (nsec ts) * 1E-9
data Config = Config
{ clients :: Int
, invert :: Bool
, delay :: Maybe Double
, hostname :: String
, port :: Int
, path :: String
, gameId :: GameId
, debug :: Bool
}
opts = info (helper <*> config)
( fullDesc
<> progDesc "CodeWorld simple bot"
<> header "codeword-game-bot - a simple mirroring bot for codeworld-gameserver")
where
config :: Parser Config
config = Config
<$> option auto
( long "clients"
<> short 'c'
<> showDefault
<> metavar "N"
<> value 1
<> help "Number of clients to simulate (>=1)" )
<*> switch
( long "invert"
<> showDefault
<> help "Return opposite direction" )
<*> optional (option auto
( long "delay"
<> showDefault
<> metavar "ms"
<> help "Use remote timestamp and adjust with this many milli seconds. Default is to use local time stamps. Can be negative."))
<*> strOption
( long "hostname"
<> showDefault
<> value "0.0.0.0"
<> metavar "HOSTNAME"
<> help "Hostname" )
<*> option auto
( long "port"
<> showDefault
<> metavar "PORT"
<> value 9160
<> help "Port" )
<*> strOption
( long "path"
<> showDefault
<> metavar "PATH"
<> value "gameserver"
<> help "Path")
<*> (T.pack <$> strOption
( long "gameid"
<> showDefault
<> metavar "ID"
<> help "The ID of the game to join (4 letters)"))
<*> switch
( long "debug"
<> showDefault
<> help "Show debugging output" )
main = do
config <- execParser opts
start <- getTime Monotonic
mapConcurrently id $ replicate (clients config) (joinGame config)
| venkat24/codeworld | codeworld-game-server/src/Bot.hs | apache-2.0 | 5,355 | 0 | 23 | 1,597 | 1,381 | 676 | 705 | 137 | 9 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-}
module Yesod.Core
( -- * Type classes
Yesod (..)
, YesodDispatch (..)
, YesodSubDispatch (..)
, RenderRoute (..)
, ParseRoute (..)
, RouteAttrs (..)
-- ** Breadcrumbs
, YesodBreadcrumbs (..)
, breadcrumbs
-- * Types
, Approot (..)
, FileUpload (..)
, ErrorResponse (..)
-- * Utitlities
, maybeAuthorized
, widgetToPageContent
-- * Defaults
, defaultErrorHandler
, defaultYesodMiddleware
, authorizationCheck
-- * Data types
, AuthResult (..)
, unauthorizedI
-- * Logging
, defaultMakeLogger
, defaultMessageLoggerSource
, defaultShouldLog
, defaultShouldLogIO
, formatLogMessage
, LogLevel (..)
, logDebug
, logInfo
, logWarn
, logError
, logOther
, logDebugS
, logInfoS
, logWarnS
, logErrorS
, logOtherS
-- * Sessions
, SessionBackend (..)
, customizeSessionCookies
, defaultClientSessionBackend
, envClientSessionBackend
, clientSessionBackend
, sslOnlySessions
, sslOnlyMiddleware
, clientSessionDateCacher
, loadClientSession
, Header(..)
-- * JS loaders
, ScriptLoadPosition (..)
, BottomOfHeadAsync
-- * Subsites
, MonadHandler (..)
, MonadWidget (..)
, getRouteToParent
, defaultLayoutSub
-- * Misc
, yesodVersion
, yesodRender
, Yesod.Core.runFakeHandler
-- * LiteApp
, module Yesod.Core.Internal.LiteApp
-- * Low-level
, yesodRunner
-- * Re-exports
, module Yesod.Core.Content
, module Yesod.Core.Dispatch
, module Yesod.Core.Handler
, module Yesod.Core.Widget
, module Yesod.Core.Json
, module Text.Shakespeare.I18N
, module Yesod.Core.Internal.Util
, module Text.Blaze.Html
, MonadTrans (..)
, MonadIO (..)
, MonadBase (..)
, MonadBaseControl
, MonadResource (..)
, MonadLogger
-- * Commonly referenced functions/datatypes
, Application
-- * Utilities
, showIntegral
, readIntegral
-- * Shakespeare
-- ** Hamlet
, hamlet
, shamlet
, xhamlet
, HtmlUrl
-- ** Julius
, julius
, JavascriptUrl
, renderJavascriptUrl
-- ** Cassius/Lucius
, cassius
, lucius
, CssUrl
, renderCssUrl
) where
import Yesod.Core.Content
import Yesod.Core.Dispatch
import Yesod.Core.Handler
import Yesod.Core.Class.Handler
import Yesod.Core.Widget
import Yesod.Core.Json
import Yesod.Core.Types
import Text.Shakespeare.I18N
import Yesod.Core.Internal.Util (formatW3 , formatRFC1123 , formatRFC822)
import Text.Blaze.Html (Html, toHtml, preEscapedToMarkup)
import Control.Monad.Logger
import Control.Monad.Trans.Class (MonadTrans (..))
import Yesod.Core.Internal.Session
import Yesod.Core.Internal.Run (yesodRunner)
import Yesod.Core.Class.Yesod
import Yesod.Core.Class.Dispatch
import Yesod.Core.Class.Breadcrumbs
import Yesod.Core.Internal.Run (yesodRender)
import qualified Yesod.Core.Internal.Run
import qualified Paths_yesod_core
import Data.Version (showVersion)
import Yesod.Routes.Class
import Control.Monad.IO.Class (MonadIO (..))
import Control.Monad.Base (MonadBase (..))
import Control.Monad.Trans.Control (MonadBaseControl (..))
import Control.Monad.Trans.Resource (MonadResource (..))
import Yesod.Core.Internal.LiteApp
import Text.Hamlet
import Text.Cassius
import Text.Lucius
import Text.Julius
import Network.Wai (Application)
runFakeHandler :: (Yesod site, MonadIO m) =>
SessionMap
-> (site -> Logger)
-> site
-> HandlerT site IO a
-> m (Either ErrorResponse a)
runFakeHandler = Yesod.Core.Internal.Run.runFakeHandler
{-# DEPRECATED runFakeHandler "import runFakeHandler from Yesod.Core.Unsafe" #-}
-- | Return an 'Unauthorized' value, with the given i18n message.
unauthorizedI :: (MonadHandler m, RenderMessage (HandlerSite m) msg) => msg -> m AuthResult
unauthorizedI msg = do
mr <- getMessageRender
return $ Unauthorized $ mr msg
yesodVersion :: String
yesodVersion = showVersion Paths_yesod_core.version
-- | Return the same URL if the user is authorized to see it.
--
-- Built on top of 'isAuthorized'. This is useful for building page that only
-- contain links to pages the user is allowed to see.
maybeAuthorized :: Yesod site
=> Route site
-> Bool -- ^ is this a write request?
-> HandlerT site IO (Maybe (Route site))
maybeAuthorized r isWrite = do
x <- isAuthorized r isWrite
return $ if x == Authorized then Just r else Nothing
getRouteToParent :: Monad m => HandlerT child (HandlerT parent m) (Route child -> Route parent)
getRouteToParent = HandlerT $ return . handlerToParent
defaultLayoutSub :: Yesod parent
=> WidgetT child IO ()
-> HandlerT child (HandlerT parent IO) Html
defaultLayoutSub cwidget = widgetToParentWidget cwidget >>= lift . defaultLayout
showIntegral :: Integral a => a -> String
showIntegral x = show (fromIntegral x :: Integer)
readIntegral :: Num a => String -> Maybe a
readIntegral s =
case reads s of
(i, _):_ -> Just $ fromInteger i
[] -> Nothing
| andrewthad/yesod | yesod-core/Yesod/Core.hs | mit | 5,471 | 0 | 12 | 1,289 | 1,150 | 712 | 438 | 157 | 2 |
{-# LANGUAGE PolyKinds, TypeApplications, KindSignatures, DataKinds, GADTs
, TypeFamilies, RankNTypes #-}
module T12045c where
import Data.Kind
type family F a where
F @Type a = Bool
F @(Type -> Type) b = Char
| sdiehl/ghc | testsuite/tests/typecheck/should_fail/T12045c.hs | bsd-3-clause | 229 | 2 | 8 | 52 | 48 | 28 | 20 | -1 | -1 |
module TMVar where
import Control.Concurrent.STM hiding (TMVar, takeTMVar)
-- <<TMVar
newtype TMVar a = TMVar (TVar (Maybe a))
-- >>
newTMVar :: a -> STM (TMVar a)
newTMVar a = do
t <- newTVar (Just a)
return (TMVar t)
-- <<newEmptyTMVar
newEmptyTMVar :: STM (TMVar a)
newEmptyTMVar = do
t <- newTVar Nothing
return (TMVar t)
-- >>
-- <<takeTMVar
takeTMVar :: TMVar a -> STM a
takeTMVar (TMVar t) = do
m <- readTVar t -- <1>
case m of
Nothing -> retry -- <2>
Just a -> do
writeTVar t Nothing -- <3>
return a
-- >>
-- <<putTMVar
putTMVar :: TMVar a -> a -> STM ()
putTMVar (TMVar t) a = do
m <- readTVar t
case m of
Nothing -> do
writeTVar t (Just a)
return ()
Just _ -> retry
-- >>
-- <<takeEitherTMVar
takeEitherTMVar :: TMVar a -> TMVar b -> STM (Either a b)
takeEitherTMVar ma mb =
fmap Left (takeTMVar ma)
`orElse`
fmap Right (takeTMVar mb)
-- >>
| AndrewRademacher/parconc-examples | tmvar.hs | bsd-3-clause | 979 | 0 | 14 | 295 | 382 | 189 | 193 | 32 | 2 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# OPTIONS_GHC -fdefer-type-errors -O #-}
-- Oddly this bug was only triggered with -fdefer-type-errors
-- The -O ensures that the RULE is processed
module T14732 where
import Prelude hiding (zip, zipWith)
import Data.Kind (Type)
zipWith :: (a -> b -> c)
-> Bundle v a
-> Bundle v b
-> Bundle v c
zipWith = undefined
class GVector (v :: Type -> Type) a
instance GVector Vector a
data Bundle (v :: Type -> Type) a
data Vector a
class Unbox a
stream :: GVector v a => v a -> Bundle v a
{-# INLINE [1] stream #-}
stream = undefined
zip :: (Unbox a, Unbox b) => Vector a -> Vector b -> Vector (a, b)
{-# INLINE [1] zip #-}
zip = undefined
{-# RULES "stream/zip [Vector.Unboxed]" forall as bs .
stream (zip as bs) = zipWith (,) (stream as)
(stream bs) #-}
| sdiehl/ghc | testsuite/tests/typecheck/should_compile/T14732.hs | bsd-3-clause | 928 | 0 | 9 | 228 | 216 | 120 | 96 | -1 | -1 |
module Main where
foreign import ccall "power3" power3 :: Int -> Int
main = print $ power3 4
| sdiehl/ghc | testsuite/tests/rts/linker/T11223/power3.hs | bsd-3-clause | 95 | 0 | 6 | 20 | 32 | 18 | 14 | 3 | 1 |
{-# LANGUAGE MagicHash, UnboxedTuples #-}
import GHC.Prim
import GHC.Word
big :: Word
big = maxBound
carry :: Word
carry = case big of
W# w -> case plusWord2# w w of
(# hi, lo #) -> W# hi
main = print carry
| oldmanmike/ghc | testsuite/tests/codeGen/should_run/T9013.hs | bsd-3-clause | 217 | 0 | 11 | 52 | 76 | 40 | 36 | 10 | 1 |
{-# OPTIONS_GHC -fwarn-safe #-}
-- | Basic test to see if Safe warning flags compile
-- Warn if module is infered safe
-- In this test the warning _shouldn't_ fire
module SafeFlags23 where
import System.IO.Unsafe
f :: Int
f = 1
| urbanslug/ghc | testsuite/tests/safeHaskell/flags/SafeFlags24.hs | bsd-3-clause | 231 | 0 | 4 | 44 | 24 | 17 | 7 | 5 | 1 |
data NBaum a = NBlatt a | NKnoten a [NBaum a] deriving (Show)
entferneBlaetter :: NBaum a -> NBaum a
entferneBlaetter (NKnoten s xs) = NKnoten s [entferneBlaetter x | x <- xs, not $ isLeaf x]
entferneBlaetter x = x
isLeaf :: NBaum a -> Bool
isLeaf (NBlatt _) = True
isLeaf (NKnoten _ []) = True
isLeaf s = False
main :: IO ()
-- main = putStrLn $ show $ entferneBlaetter $ NKnoten "hello" [NBlatt "leaf", NKnoten "empty" [], NKnoten "stuff" [NBlatt "leaf", NKnoten "stuff" [NBlatt "leaf"]]]
main = putStrLn $ show $ entferneBlaetter $ NKnoten "test" []
| michalc/haskell-experiments | tree.hs | mit | 587 | 0 | 9 | 135 | 187 | 94 | 93 | 10 | 1 |
main = do
print c
where
a = 1
b = 2
c = a + b
| shigemk2/haskell_abc | Hoge.hs | mit | 79 | 0 | 7 | 50 | 33 | 17 | 16 | 5 | 1 |
module Main where
import Data.List (sort, reverse)
type CountryName = String
type Bugs = Int
data Country = Country {
name :: CountryName,
bugs :: Bugs
} deriving (Show, Eq)
instance Ord Country where
(Country _ b1) `compare` (Country _ b2) = b1 `compare` b2
updateCountries :: [Country] -> CountryName -> [Country]
updateCountries [] country_name = [Country country_name 1]
updateCountries (x:xs) country_name = if name' == country_name
then x':xs
else x:(updateCountries xs country_name)
where x' = Country name' num'
name' = name x
num' = 1 + bugs x
multipleUpdateCountries :: [Country] -> [CountryName] -> [Country]
multipleUpdateCountries cs [] = cs
multipleUpdateCountries cs (x:xs) = multipleUpdateCountries cs' xs
where cs' = updateCountries cs x
popularCountries :: Int -> [Country] -> [Country]
popularCountries n cs = take n $ (reverse . sort) cs
main = do
undefined
| Valiev/contests | darkus/201506/Main.hs | mit | 1,131 | 0 | 8 | 389 | 342 | 186 | 156 | 26 | 2 |
module ArbitrarySum where
import Test.QuickCheck
import Data.Monoid
-- the module designed to avoid the warning:
-- "No instance for (Arbitrary (Sum Int))"
-- see http://austinrochford.com/posts/2014-05-27-quickcheck-laws.html
instance (Arbitrary a) => Arbitrary (Sum a) where
arbitrary = Sum <$> arbitrary
-- my attempts to understand the above:
--instance (Arbitrary a) => Arbitrary (Sum a) where
-- arbitrary = fmap Sum arbitrary
--instance (Arbitrary a) => Arbitrary (Sum a) where
-- arbitrary = do
-- x <- arbitrary
-- return (Sum x)
| NickAger/LearningHaskell | HaskellProgrammingFromFirstPrinciples/Chapter15.hsproj/ArbitrarySum.hs | mit | 574 | 0 | 7 | 114 | 56 | 36 | 20 | 5 | 0 |
module Rebase.Data.Unique
(
module Data.Unique
)
where
import Data.Unique
| nikita-volkov/rebase | library/Rebase/Data/Unique.hs | mit | 77 | 0 | 5 | 12 | 20 | 13 | 7 | 4 | 0 |
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
module Solar.Storage.Context
( Context(..)
, noContext
, addToContext
, (~+=:)
-- * Context Wrappers
, contextWrap
, contextWrap2
, contextWrap3
, contextWrap4
, contextWrap5
, contextWrap6
, contextWrap7
, contextWrapS
, contextWrapS2
, contextWrapS3
, contextWrapS4
, contextWrapS5
, contextWrapS6
, contextWrapS7
)
where
import Solar.Storage.Types
import Control.Monad.Trans.RWS as R
import Data.Monoid(Monoid(..))
import qualified Data.Dynamic as D
import qualified Data.Map as Map
import qualified Data.Typeable as T
-- | Empty context constant
noContext :: Context
noContext = Context Map.empty
{-# INLINABLE noContext #-}
-- | Adds the type into the context IOap.
--
-- Note: Types are their own record and inserting the
-- same type twice will not preserve the old value of
-- that type.
addToContext :: (T.Typeable t)
=> Context -- ^ The old context
-> t -- ^ The new value
-> Context -- ^ The resulting value added to the context
addToContext c t = Context $ Map.insert (T.typeOf t) (D.toDyn t) (unwrapContext c)
{-# INLINABLE addToContext #-}
infixl 7 ~+=:
-- | infix version of 'addToContext'
(~+=:) :: (T.Typeable t) => Context -> t -> Context
c ~+=: t = addToContext c t
{-# INLINABLE (~+=:) #-}
contextWrap' :: (T.Typeable k)
=> k -- ^ The type of this value
-> Context -- ^ The context to be searched
-> (k -> a) -- ^ What to execute if found
-> a -- ^ Default if not found
-> a -- ^ Resulting action
contextWrap' k c f df =
case (Map.lookup key (unwrapContext c)) of
Nothing -> df
Just d ->
let path = D.fromDynamic d
in case path of
Nothing -> df
Just p -> f p
where
key = T.typeOf k
-- | Helps unwrap the context value desired.
-- If the value is not present, the default action will be executed.
contextWrap :: (T.Typeable k)
=> Context -- ^ Context anticipated to have feature
-> (k -> a)
-- ^ Given a successful context, give this Action / Result
-> a -- ^ Default Action for failure
-> a -- ^ Final Action / Result
contextWrap = contextWrap' undefined
-- | Stateful version of 'contextWrap'
contextWrapS :: (T.Typeable k, Monad m, Monoid b)
=> (k -> RWST a b Context m z) -- ^ Action to take
-> RWST a b Context m z -- ^ Default Action when context resolution fails
-> RWST a b Context m z -- Resulting Action
contextWrapS action defAction = do
c <- get
contextWrap c action defAction
contextWrap2 c f d = contextWrap c (\v -> contextWrap c (f v) d) d
contextWrap3 c f d = contextWrap c (\v -> contextWrap2 c (f v) d) d
contextWrap4 c f d = contextWrap c (\v -> contextWrap3 c (f v) d) d
contextWrap5 c f d = contextWrap c (\v -> contextWrap4 c (f v) d) d
contextWrap6 c f d = contextWrap c (\v -> contextWrap5 c (f v) d) d
contextWrap7 c f d = contextWrap c (\v -> contextWrap6 c (f v) d) d
contextWrapS2 f d = contextWrapS (\v -> contextWrapS (f v) d) d
contextWrapS3 f d = contextWrapS (\v -> contextWrapS2 (f v) d) d
contextWrapS4 f d = contextWrapS (\v -> contextWrapS3 (f v) d) d
contextWrapS5 f d = contextWrapS (\v -> contextWrapS4 (f v) d) d
contextWrapS6 f d = contextWrapS (\v -> contextWrapS5 (f v) d) d
contextWrapS7 f d = contextWrapS (\v -> contextWrapS6 (f v) d) d | Cordite-Studios/solar | solar-cells/Solar/Storage/Context.hs | mit | 3,613 | 0 | 13 | 1,040 | 1,011 | 546 | 465 | 76 | 3 |
{-# LANGUAGE FlexibleInstances #-}
module Bot.Component.Impl.History (
HistoryHandle
, Nick
, Message
, newHistoryHandle
, historyService
, getHistory
) where
import Bot.Component
import Bot.IO
import Control.Applicative
import Control.Concurrent
import Control.Monad.Error
import Control.Monad.State
import Prelude hiding (readFile)
import System.FilePath
import System.IO hiding (readFile)
import System.IO.Strict
import qualified Data.Sequence as S
type Nick = String
type Message = String
-- | The internal state of the history service. The datatype is opaque and
-- should only be accessed through the exposed API.
type HistoryInfo = S.Seq (Nick, Message)
-- | Opaque type for the HistoryInfo that is exposed externally.
type HistoryHandle = MVar HistoryInfo
-- | Creates a new HistoryHandle for use with the historyService component.
-- A reference to this required in order to make API calls.
newHistoryHandle :: IO HistoryHandle
newHistoryHandle = liftIO newEmptyMVar
-- | The `BotComponent` portion of the history service. This service must be
-- included in the Bot otherwise all API calls will hang.
historyService :: HistoryHandle -> Bot Component
historyService handle = loadLog >> mkComponent action
where
action = onPrivMsg $ \message -> do
nick <- gets currentNick
logFile <- logPath
let logLine = nick ++ '\t' : message
liftIO $ do
modifyMVar_ handle (return . (S.|> (nick, message)))
withFile logFile AppendMode (`hPutStrLn` logLine)
logPath = do
directory <- gets dataDirectory
return (directory </> "log.txt")
loadLog = do
logFile <- logPath
liftIO $ do
log <- (map splitLine . lines <$> readFile logFile)
`catchError` const (return [])
putMVar handle (S.fromList log)
splitLine = (,)
<$> takeWhile (/= '\t')
<*> drop 1 . dropWhile (/= '\t')
getHistory :: (BotMonad m, MonadIO m)
=> HistoryHandle -> m (S.Seq (Nick, Message))
getHistory = liftBot . liftIO . readMVar
| numberten/zhenya_bot | Bot/Component/Impl/History.hs | mit | 2,205 | 0 | 18 | 598 | 483 | 266 | 217 | 49 | 1 |
module Anagram (anagramsFor) where
import Data.Char (ord, toUpper)
import Data.List (sort)
anagramsFor :: String -> [String] -> [String]
anagramsFor = filter . isAnagram
isAnagram :: String -> String -> Bool
isAnagram xs ys
| length xs /= length ys = False
| sumAscii xs' /= sumAscii ys' = False
| xs' == ys' = False
| sort xs' /= sort ys' = False
| otherwise = True
where
xs' = map toUpper xs
ys' = map toUpper ys
sumAscii = sum . map ord
| vaibhav276/exercism_haskell | anagram/src/Anagram.hs | mit | 558 | 0 | 9 | 202 | 193 | 97 | 96 | 15 | 1 |
module Actions where
import Import
import DbFunctions
import Database.Persist.Sql
go :: Text -> Int64 -> Text -> Handler Text
go loc areaId urlHash = do
area <- lookAtArea areaId
case area of
Just area' -> do
let newAreaId = case loc of
"north" -> areaGo_north area'
"east" -> areaGo_east area'
"west" -> areaGo_west area'
"south" -> areaGo_south area'
_ -> Nothing
case newAreaId of
Just newAreaId' -> do
_ <- updateArea (fromIntegral newAreaId') urlHash
areaDescription <- lookAround (fromIntegral newAreaId') urlHash
return $ "You went " ++ loc ++ ". " ++ areaDescription
Nothing -> return "You cannot go there."
Nothing -> return "Invalid location."
useWith :: Text -> Int64 -> Text -> Handler Text
useWith obj areaId urlHash = do
let words' = words obj
case words' of
x:"with":xs -> do
obj <- lookAtItemByUnique (unwords xs) areaId
case obj of
Just obj' -> do
inv <- showInventory urlHash
case itemInInventoryByName inv x of
True -> do
case (x,(unwords xs)) of
("key", "chest") -> win "You found the magic secret!" urlHash
("pokeball", "missingno") -> die "of memory corruption" urlHash
_ -> return "Cannot use those two items together."
False -> return "The item has to be in your inventory."
Nothing -> return "No such item in this area."
_ -> return "Invalid input."
die :: Text -> Text -> Handler Text
die out urlHash = do
deletePlayer urlHash
return $ "You died because " ++ out ++ "! Visit the startpage to restart."
win :: Text -> Text -> Handler Text
win out urlHash = do
deletePlayer urlHash
return $ "You won! " ++ out ++ " Visit the startpage to restart."
eat :: Text -> Int64 -> Text -> Handler Text
eat obj areaId urlHash = do
item <- lookAtItemByUnique obj areaId
case item of
Just (Entity _ itemVal) -> do
case itemUse_action itemVal of
-- "eat nothing" -> return $ pack $ "You eat the " ++ (unpack $ itemName itemVal) ++ ". Nothing happens."
"eat die" -> die "you weren't listening" urlHash
_ -> return "This item is not edible."
Nothing -> return "No such item in this area."
--open :: Maybe Text -> Handler (Maybe (Entity Item_status))
--open obj = do
-- -- TODO
examine :: Text -> Int64 -> Handler Text
examine obj areaId = do
output <- lookAtItemByUnique obj areaId
case output of
Just (Entity _ itemVal) -> return $ itemItem_description itemVal
Nothing -> return "No such item in this area."
pickUp :: Text -> Int64 -> Text -> Handler Text
pickUp obj areaId urlHash = do
item <- lookAtItemByUnique obj areaId
itemsInInventory <- showInventory urlHash
case item of
Just (Entity itemKey itemVal) -> do
case itemInInventory itemsInInventory (Entity itemKey itemVal) of
True -> do return $ pack $ (unpack $ itemName itemVal) ++ " is already in your inventory."
False -> do
case itemTakeable itemVal of
True -> do
_ <- insertItemWithStatus obj areaId urlHash "inventory"
return $ pack $ "Picked up " ++ (unpack $ itemName itemVal) ++ "."
False -> return $ pack "This item is not takeable."
Nothing -> return "No such item in this Area."
inventory :: Text -> Handler Text
inventory urlHash = do
itemsInInventory <- showInventory urlHash
let items = toItemName "" itemsInInventory
case items of
[] -> return "No items in inventory."
_ -> return $ pack $ "Your inventory contains: " ++ items
lookAround :: Int64 -> Text -> Handler Text
lookAround areaId urlHash = do
areaDescription <- lookAtArea areaId
items <- showItemsInArea areaId
inv <- showInventory urlHash
case areaDescription of
Just areaVal ->
case items of
[] -> return $ areaArea_description areaVal
items' -> return $ pack $ (unpack $ areaArea_description areaVal) ++ "\nThere are the following items: " ++ (unpack $ toItemName "" (listMinus items' inv))
_ -> return "Wrong area ID."
-- helper functions
toItemName :: [Char] -> [Entity Item] -> [Char]
toItemName out ((Entity _ itemVal):x:xs) =
toItemName (out ++ (unpack $ itemName itemVal) ++ ", ") (x:xs)
toItemName out ((Entity _ itemVal):xs) =
toItemName (out ++ (unpack $ itemName itemVal)) xs
toItemName out _ =
out
itemInInventory :: [Entity Item] -> (Entity Item) -> Bool
itemInInventory [] _ = False
itemInInventory ((Entity itemKey' _):xs) (Entity itemKey itemVal) =
(itemKey == itemKey') || itemInInventory xs (Entity itemKey itemVal)
itemInInventoryByName :: [Entity Item] -> Text -> Bool
itemInInventoryByName [] _ = False
itemInInventoryByName ((Entity _ itemVal):xs) y =
((itemName itemVal) == y) || itemInInventoryByName xs y
listMinus :: [Entity Item] -> [Entity Item] -> [Entity Item]
listMinus items inv = filter (((not .) .) itemInInventory inv) items
getAreaId :: Text -> Handler Int64
getAreaId urlHash = do
player <- getPlayer urlHash
case player of
Just (Entity _ playerVal) -> return $ fromSqlKey $ player_statusArea_id playerVal
Nothing -> return 0
| total-git/missingno | yesodMissingNo/Actions.hs | mit | 5,731 | 19 | 29 | 1,809 | 1,665 | 778 | 887 | 119 | 7 |
module GHCJS.DOM.CSSPageRule (
) where
| manyoo/ghcjs-dom | ghcjs-dom-webkit/src/GHCJS/DOM/CSSPageRule.hs | mit | 41 | 0 | 3 | 7 | 10 | 7 | 3 | 1 | 0 |
{-# LANGUAGE OverloadedLists #-}
-- | Example using the pure scene graph to make a static scene.
module Main where
import qualified Linear as L
import qualified Iris.Backends.GLFW as W
import Iris.Camera
import Iris.SceneGraph
import Iris.Visuals.Line
main :: IO ()
main =
do win <- W.makeWindow "Line Plot" (640, 640)
canvas <- W.initGLFW win
lineNode <- lineInit $ LineSpec lineVerts (L.V3 0.2 0.5 1)
let cam = panZoomCamera { panZoomCenter = L.V2 1 2
, panZoomWidth = 10
, panZoomHeight = 7 }
camNode = transNode (cameraTrans cam) lineNode
scene = sceneRoot canvas camNode
W.mainLoop' win (drawGraph scene)
lineVerts :: LineVertices
lineVerts = [ L.V3 0 0 0
, L.V3 0 1 0
, L.V3 1 1 0
, L.V3 1 0 0
, L.V3 0 0 0
]
| jdreaver/iris | examples/Pure2D.hs | mit | 915 | 0 | 13 | 329 | 260 | 139 | 121 | 24 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE DeriveDataTypeable #-}
-- | Dynamically look up available executables.
module Plugins
( Plugin
, pluginPrefix
, pluginName
, pluginSummary
, pluginProc
, Plugins
, findPlugins
, listPlugins
, lookupPlugin
, callPlugin
, PluginException (..)
) where
import Control.Applicative
import Control.Exception (Exception)
import Control.Monad
import Control.Monad.Catch (MonadThrow, throwM)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.State.Strict (StateT, get, put)
import Data.Conduit
import Data.Hashable (Hashable)
import Data.HashSet (HashSet)
import Data.HashMap.Strict (HashMap)
import qualified Data.HashSet as HashSet
import qualified Data.HashMap.Strict as HashMap
import qualified Data.Conduit.List as CL
import Data.Conduit.Lift (evalStateC)
import qualified Data.List as L
import Data.List.Split (splitOn)
import Data.Text (Text, pack, unpack)
import qualified Data.Text as T
import Data.Typeable (Typeable)
import Data.Monoid
import System.Directory
import System.Process (CreateProcess, proc, readProcess, readProcessWithExitCode, createProcess, waitForProcess)
import System.FilePath ((</>), getSearchPath, splitExtension)
import System.Environment (getEnv)
import System.Exit (ExitCode (..))
-- | Represents a runnable plugin.
-- Plugins must be discovered via `findPlugins`.
data Plugin = Plugin
{ _pluginPrefix :: !Text
, _pluginName :: !Text
, _pluginSummary :: !Text
}
deriving (Show)
-- | The program being plugged into.
pluginPrefix :: Plugin -> Text
pluginPrefix = _pluginPrefix
-- | The name of this plugin (without the prefix).
pluginName :: Plugin -> Text
pluginName = _pluginName
-- | A summary of what this plugin does
pluginSummary :: Plugin -> Text
pluginSummary = _pluginSummary
-- | Describes how to create a process out of a plugin and arguments.
-- You may use Data.Process and Data.Conduit.Process
-- to manage the process's stdin, stdout, and stderr in various ways.
pluginProc :: Plugin -> [String] -> CreateProcess
pluginProc = proc . pluginProcessName
-- Not exported
pluginProcessName :: Plugin -> String
pluginProcessName p = unpack $ pluginPrefix p <> "-" <> pluginName p
-- | Represents the plugins available to a given program.
-- See: `findPlugins`.
data Plugins = Plugins
{ _pluginsPrefix :: !Text
, _pluginsMap :: !(HashMap Text Plugin)
}
deriving (Show)
-- | Find the plugins for a given program by inspecting everything on the PATH.
-- Any program that is prefixed with the given name and responds
-- to the `--summary` flag by writing one line to stdout
-- is considered a plugin.
findPlugins :: Text -> IO Plugins
findPlugins t = fmap (Plugins t)
$ discoverPlugins t
$$ awaitForever (toPlugin t)
=$ CL.fold insertPlugin HashMap.empty
where
insertPlugin m p = HashMap.insert (pluginName p) p m
toPlugin :: (MonadIO m) => Text -> Text -> Producer m Plugin
toPlugin prefix name = do
let proc = unpack $ prefix <> "-" <> name
(exit, out, _err) <- liftIO $ readProcessWithExitCode proc ["--summary"] ""
case exit of
ExitSuccess -> case T.lines (pack out) of
[summary] -> yield $ Plugin
{ _pluginPrefix = prefix
, _pluginName = name
, _pluginSummary = summary
}
_ -> return ()
_ -> return ()
-- | Things that can go wrong when using `callPlugin`.
data PluginException
= PluginNotFound !Plugins !Text
| PluginExitFailure !Plugin !Int
deriving (Show, Typeable)
instance Exception PluginException
-- | Look up a particular plugin by name.
lookupPlugin :: Plugins -> Text -> Maybe Plugin
lookupPlugin ps t = HashMap.lookup t $ _pluginsMap ps
-- | List the available plugins.
listPlugins :: Plugins -> [Plugin]
listPlugins = HashMap.elems . _pluginsMap
-- | A convenience wrapper around lookupPlugin and pluginProc.
-- Handles stdin, stdout, and stderr are all inherited by the plugin.
-- Throws PluginException.
callPlugin :: (MonadIO m, MonadThrow m)
=> Plugins -> Text -> [String] -> m ()
callPlugin ps name args = case lookupPlugin ps name of
Nothing -> throwM $ PluginNotFound ps name
Just plugin -> do
exit <- liftIO $ do
(_, _, _, process) <- createProcess $ pluginProc plugin args
waitForProcess process
case exit of
ExitFailure i -> throwM $ PluginExitFailure plugin i
ExitSuccess -> return ()
discoverPlugins :: MonadIO m => Text -> Producer m Text
discoverPlugins t
= getPathDirs
$= clNub -- unique dirs on path
$= awaitForever (executablesPrefixed $ unpack $ t <> "-")
$= CL.map pack
$= clNub -- unique executables
executablesPrefixed :: (MonadIO m) => FilePath -> FilePath -> Producer m FilePath
executablesPrefixed prefix dir
= pathToContents dir
$= CL.filter (L.isPrefixOf prefix)
$= clFilterM (fileExistsIn dir)
$= clFilterM (isExecutableIn dir)
$= CL.mapMaybe (L.stripPrefix prefix . dropExeExt)
-- | Drop the .exe extension if present
dropExeExt :: FilePath -> FilePath
dropExeExt fp
| y == ".exe" = x
| otherwise = fp
where
(x, y) = splitExtension fp
getPathDirs :: (MonadIO m) => Producer m FilePath
getPathDirs = liftIO getSearchPath >>= mapM_ yield
pathToContents :: (MonadIO m) => FilePath -> Producer m FilePath
pathToContents dir = do
exists <- liftIO $ doesDirectoryExist dir
when exists $ do
contents <- liftIO $ getDirectoryContents dir
CL.sourceList contents
fileExistsIn :: (MonadIO m) => FilePath -> FilePath -> m Bool
fileExistsIn dir file = liftIO $ doesFileExist $ dir </> file
isExecutableIn :: (MonadIO m) => FilePath -> FilePath -> m Bool
isExecutableIn dir file = liftIO $ do
perms <- getPermissions $ dir </> file
return (executable perms)
clFilterM :: Monad m => (a -> m Bool) -> Conduit a m a
clFilterM pred = awaitForever $ \a -> do
predPassed <- lift $ pred a
when predPassed $ yield a
clNub :: (Monad m, Eq a, Hashable a)
=> Conduit a m a
clNub = evalStateC HashSet.empty clNubState
clNubState :: (Monad m, Eq a, Hashable a)
=> Conduit a (StateT (HashSet a) m) a
clNubState = awaitForever $ \a -> do
seen <- lift get
unless (HashSet.member a seen) $ do
lift $ put $ HashSet.insert a seen
yield a
| fpco/stackage-cli | src/Plugins.hs | mit | 6,271 | 0 | 16 | 1,176 | 1,744 | 930 | 814 | 163 | 3 |
module Main (main) where
import qualified Data.Text as T
data Variable = Variable String
deriving (Show, Eq)
data Predicate = Predicate String
deriving (Show, Eq)
data Expr = Atomic Predicate [Variable]
| Not Expr
| Expr :|: Expr
| Expr :&: Expr
| Forall Variable Expr
| Exists Variable Expr
deriving (Show, Eq)
imp :: Expr -> Expr -> Expr
imp p q = Not p :|: q
iff :: Expr -> Expr -> Expr
iff p q = (p `imp` q) :&: (q `imp` p)
ap :: String -> [String] -> Expr
ap p xs = Atomic (Predicate p) (map Variable xs)
data PrenexBody = PAtomic Predicate [Variable]
| PNot PrenexBody
| POr PrenexBody PrenexBody
| PAnd PrenexBody PrenexBody
deriving (Show, Eq)
data Quantifier = QForall Variable | QExists Variable
deriving (Show, Eq)
data Prenex = Prenex [Quantifier] PrenexBody
deriving (Show, Eq)
flipQuantifier :: Quantifier -> Quantifier
flipQuantifier (QForall x) = QExists x
flipQuantifier (QExists x) = QForall x
prenex :: Expr -> Prenex
prenex (Atomic p xs) = Prenex [] (PAtomic p xs)
prenex (Not p) = let Prenex quants p' = prenex p
in Prenex (map flipQuantifier quants) (PNot p')
prenex (p :|: q) = let Prenex quantsp' p' = prenex p
Prenex quantsq' q' = prenex q
in Prenex (quantsp' ++ quantsq') (POr p' q')
prenex (p :&: q) = let Prenex quantsp' p' = prenex p
Prenex quantsq' q' = prenex q
in Prenex (quantsp' ++ quantsq') (PAnd p' q')
prenex (Forall x p) = let Prenex quantsp' p' = prenex p
in Prenex (QForall x:quantsp') p'
prenex (Exists x p) = let Prenex quantsp' p' = prenex p
in Prenex (QExists x:quantsp') p'
data SignedPrenexBody = SPAtomic Bool Predicate [Variable]
| SPOr SignedPrenexBody SignedPrenexBody
| SPAnd SignedPrenexBody SignedPrenexBody
deriving (Show, Eq)
dual :: SignedPrenexBody -> SignedPrenexBody
dual (SPAtomic b p xs) = SPAtomic (not b) p xs
dual (p `SPOr` q) = dual p `SPAnd` dual q
dual (p `SPAnd` q) = dual p `SPOr` dual q
makeSigned :: PrenexBody -> SignedPrenexBody
makeSigned (PAtomic p xs) = SPAtomic True p xs
makeSigned (p `POr` q) = makeSigned p `SPOr` makeSigned q
makeSigned (p `PAnd` q) = makeSigned p `SPAnd` makeSigned q
makeSigned (PNot p) = dual (makeSigned p)
data Signed = Signed Bool Predicate [Variable]
deriving (Show, Eq)
data Conjunction = Conjunction [Signed]
deriving (Show, Eq)
data Disjunction = Disjunction [Conjunction]
deriving (Show, Eq)
conj' :: Conjunction -> Conjunction -> Conjunction
Conjunction ps `conj'` Conjunction qs = Conjunction (ps ++ qs)
disj :: Disjunction -> Disjunction -> Disjunction
Disjunction dps `disj` Disjunction dqs = Disjunction (dps ++ dqs)
conj :: Disjunction -> Disjunction -> Disjunction
Disjunction dps `conj` Disjunction dqs = Disjunction [ps `conj'` qs | ps <- dps, qs <- dqs]
makeDNF :: SignedPrenexBody -> Disjunction
makeDNF (SPAtomic b p xs) = Disjunction [Conjunction [Signed b p xs]]
makeDNF (p `SPOr` q) = makeDNF p `disj` makeDNF q
makeDNF (p `SPAnd` q) = makeDNF p `conj` makeDNF q
data PrenexDNF = PrenexDNF [Quantifier] Disjunction
deriving (Show, Eq)
exprToPrenexDNF :: Expr -> PrenexDNF
exprToPrenexDNF p = let Prenex quants p' = prenex p
in PrenexDNF quants (makeDNF (makeSigned p'))
p = Not ((ap "A" [] :|: ap "B" []) :&: Forall (Variable "x") (ap "C" ["x"])) :|: (ap "D" [] :|: ap "E" [])
-- ~((A v B) & (forall x. C x)) v D v E
main = putStr $ show $ exprToPrenexDNF p
-- exists x. (~ A & ~ B) v (~ C x) v D v E | IreneKnapp/Inductive | Haskell/Inductive.hs | mit | 3,820 | 0 | 12 | 1,082 | 1,458 | 752 | 706 | 81 | 1 |
module Shared.Input where
import qualified Graphics.UI.SDL as SDL
import Graphics.UI.SDL.Types
data KeyDirection = KeyUp | KeyDown | KeyLeft | KeyRight | KeyOther deriving (Show, Read)
data Key = Q | W | E | A | S | D | Unknown
type Input = Maybe SDL.Event
handleNoInput :: IO (Maybe SDL.Event) -> IO Bool
handleNoInput stream = do
maybeEvent <- stream
case maybeEvent of
Nothing -> return False
Just (SDL.QuitEvent _ _) -> return True
_ -> return False
handleKeyInput :: IO (Maybe SDL.Event) -> (KeyDirection -> IO a) -> IO Bool
handleKeyInput stream keyHandler = do
maybeEvent <- stream
case maybeEvent of
Nothing -> return False
Just (SDL.QuitEvent _ _) -> return True
Just (SDL.KeyboardEvent _ _ _ _ _ keysym) -> do
_ <- keyHandler $ keymap keysym
return False
_ -> return False
keymap :: SDL.Keysym -> KeyDirection
keymap (SDL.Keysym code _ _) = case code of
79 -> KeyRight
80 -> KeyLeft
81 -> KeyDown
82 -> KeyUp
_ -> KeyOther
getKey :: SDL.Keysym -> Key
getKey keysym = case keysymScancode keysym of
20 -> Q
26 -> W
8 -> E
4 -> A
22 -> S
7 -> D
_ -> Unknown
| oldmanmike/haskellSDL2Examples | src/shared/input.hs | gpl-2.0 | 1,220 | 0 | 14 | 353 | 454 | 230 | 224 | 39 | 7 |
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
module PackagesNG.ParsePackage where
import Control.Applicative ((<*))
import qualified Data.Attoparsec.ByteString.Char8 as Atto (Parser (), char,
char, decimal,
endOfLine, isSpace,
many1, parseOnly,
sepBy, space, string,
takeByteString,
takeTill, takeWhile)
import qualified Data.ByteString as BS (ByteString (),
readFile)
import qualified Data.ByteString.Char8 as BSC (pack)
import qualified Data.Map.Strict as M (Map (), findWithDefault,
fromList, lookup)
import System.IO (hPutStrLn, stderr)
import PackagesNG.Portage (CPV (..), Metadata,
Package (..))
parseCPV :: Atto.Parser CPV
parseCPV = do
_ <- Atto.string "./"
category <- Atto.takeWhile (/= '/')
_ <- Atto.char '/'
-- TODO: this should be greedy
package <- Atto.takeWhile (/= '-')
_ <- Atto.char '-'
version <- Atto.takeByteString
return $ CPV category package version
parseLine :: Atto.Parser (BS.ByteString, BS.ByteString)
parseLine = do
variable <- Atto.takeWhile (/= '=')
_ <- Atto.char '='
value <- Atto.takeWhile (/= '\n')
return (variable, value)
parseMetadata :: Atto.Parser Metadata
parseMetadata = fmap M.fromList . Atto.many1 $ parseLine <* Atto.endOfLine
orElse :: Maybe a -> String -> Either String a
orElse Nothing err = Left err
orElse (Just a) _ = Right a
lookup' :: (Ord k, Show k) => k -> M.Map k a -> Either String a
lookup' k m = M.lookup k m `orElse` ("Missing field " ++ show k)
lookupWithDef :: Ord k => a -> k -> M.Map k a -> Either String a
lookupWithDef a k m = Right $ M.findWithDefault a k m
parseWord :: Atto.Parser BS.ByteString
parseWord = Atto.takeTill Atto.isSpace
parseWords :: Atto.Parser [BS.ByteString]
parseWords = parseWord `Atto.sepBy` Atto.space
parsePackage :: BS.ByteString -> BS.ByteString -> Either String Package
parsePackage cpvString metadataString = do
CPV category package version <- Atto.parseOnly parseCPV cpvString
metadata <- Atto.parseOnly parseMetadata metadataString
description <- lookup' "DESCRIPTION" metadata
eapis <- lookupWithDef "5" "EAPI" metadata
eapi <- Atto.parseOnly Atto.decimal eapis
homepages <- lookup' "HOMEPAGE" metadata
homepage <- Atto.parseOnly parseWords homepages
iuses <- lookupWithDef "" "IUSE" metadata
iuse <- Atto.parseOnly parseWords iuses
keywordss <- lookupWithDef "" "KEYWORDS" metadata
keywords <- Atto.parseOnly parseWords keywordss
licenses <- lookup' "LICENSE" metadata
license <- Atto.parseOnly parseWords licenses
srcUris <- lookupWithDef "" "SRC_URI" metadata
srcUri <- Atto.parseOnly parseWords srcUris
slot <- lookup' "SLOT" metadata
return $ Package category
package
version
description
eapi
homepage
iuse
keywords
license
srcUri
slot
printPackage :: FilePath -> IO ()
printPackage path = do
metadata <- BS.readFile path
let cpv = BSC.pack path
either (hPutStrLn stderr . ((path ++ ": ") ++ ))
print
(parsePackage cpv metadata)
| vikraman/packages-ng | parse-packages/src/PackagesNG/ParsePackage.hs | gpl-2.0 | 4,015 | 0 | 12 | 1,557 | 1,012 | 515 | 497 | 83 | 1 |
module State
( Sym, dosym
, gensym, push
)
where
-- state Monad ----------------------------------------------------
data Sym s a = Sym (s -> (s, a))
dosym :: Sym (Int, [s]) a -> (a, [s])
-- start computation, show effect
dosym (Sym f) = let ((_, x), r) = f (0, []) in (r, x)
instance Functor (Sym s) where
fmap f (Sym s) = Sym (\ c ->
let (d, a) = s c in (d, f a) )
instance Monad (Sym s) where
return x = Sym (\ c -> (c, x))
Sym x >>= f = Sym (\ c ->
-- phorward state is this:
let (d, r) = x c; Sym y = f r; (e, s) = y d in (e, s) )
-- but we're using backward state (NOT)
-- let (d, s) = y c; Sym y = f r; (e, r) = x d in (e, s) )
-- used for symbol supply
gensym :: Sym (Int, a) String
gensym = Sym (\ (c,x) -> ((c+1,x), "$" ++ show c))
-- remember a result
push :: a -> Sym (b, [a]) ()
push x = Sym ( \ (c, xs) -> ((c, x : xs), () ))
| jwaldmann/rx | src/State.hs | gpl-3.0 | 868 | 4 | 13 | 231 | 460 | 257 | 203 | 17 | 1 |
module Pipes.Network.TCP.UIO ((>^>), fromSocket, toSocket) where
import Control.Monad.Morph (MFunctor, hoist)
import Control.Monad.Trans.Class (MonadTrans, lift)
import Data.ByteString (ByteString)
import Data.Word (Word16)
import Pipes (Producer, Consumer, Effect, (>->))
import qualified Pipes.Network.TCP as T
import UnexceptionalIO.ExceptT (EUIO, fromIO)
(>^>) ∷ (MFunctor b, MonadTrans a, MonadTrans b, Monad i, Monad (b i), Monad (a i), Monad (b (a i))) ⇒
Producer x (a i) r → Consumer x (b i) r → Effect (b (a i)) r
(>^>) a b = hoist lift a >-> hoist (hoist lift) b
fromSocket ∷ T.Socket → Word16 → Producer ByteString EUIO ()
fromSocket sock = hoist fromIO . T.fromSocket sock . fromIntegral
toSocket ∷ T.Socket → Consumer ByteString EUIO r
toSocket = hoist fromIO . T.toSocket
| rimmington/pirandello | src/Pipes/Network/TCP/UIO.hs | gpl-3.0 | 812 | 0 | 12 | 127 | 350 | 193 | 157 | -1 | -1 |
module Utility where
import Control.Monad
import Control.Monad.Random
import Debug.Trace
import Text.Regex
randomPM1 :: (MonadRandom m) => m Double
randomPM1 = (+ (-1)) . (* 2) <$> getRandom
screen :: (Show a) => a -> a
screen = join traceShow
screenT :: (Show a) => String -> a -> a
screenT text value = let
in (text ++ show value) `trace` value
splitNumbers :: String -> [Int]
splitNumbers = map read . splitRegex (mkRegex "[^0-9]+")
| mrlovre/super-memory | Pro5/src/Utility.hs | gpl-3.0 | 485 | 0 | 10 | 123 | 180 | 100 | 80 | 14 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module SNet.Network
( SNet
, Info (..)
, location
, createTasks
, Location (..)
, initChildNet
, spawnSNet
) where
import Control.Lens
import Control.Monad.State
import Data.Default
import SNet.Stream
newtype Location = Loc Int
data Info = Info {
_location :: Location,
_createTasks :: Bool
}
makeLenses ''Info
instance Default Info where
def = Info
{ _location = Loc 0
, _createTasks = True
}
type SNet = Stream -> StateT Info IO Stream
initChildNet :: SNet -> StateT Info IO Info
initChildNet net = do
info <- get
createTasks .= False
net undefined
createTasks .= True
return info
spawnSNet :: MonadIO m => SNet -> Info -> Stream -> m Stream
spawnSNet net info output = liftIO $ evalStateT (net output) info
| merijn/SNet2.0 | SNet/Network.hs | gpl-3.0 | 804 | 0 | 9 | 187 | 254 | 137 | 117 | 32 | 1 |
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeOperators #-}
module Cereal where
import Data.Bits
import GHC.Generics
data Bit = O | I deriving Show
class Serialize a where
put :: a -> [Bit]
default put :: (Generic a, GSerialize (Rep a)) => a -> [Bit]
put a = gput (from a)
get :: [Bit] -> (a, [Bit])
default get :: (Generic a, GSerialize (Rep a)) => [Bit] -> (a, [Bit])
get xs = (to x, xs')
where (x, xs') = gget xs
class GSerialize f where
gput :: f a -> [Bit]
gget :: [Bit] -> (f a, [Bit])
-- | Unit: used for constructors without arguments
instance GSerialize U1 where
gput U1 = []
gget xs = (U1, xs)
-- | Constants, additional parameters and recursion of kind *
instance (GSerialize a, GSerialize b) => GSerialize (a :*: b) where
gput (a :*: b) = gput a ++ gput b
gget xs = (a :*: b, xs'')
where (a, xs') = gget xs
(b, xs'') = gget xs'
-- | Meta-information (constructor names, etc.)
instance (GSerialize a, GSerialize b) => GSerialize (a :+: b) where
gput (L1 x) = O : gput x
gput (R1 x) = I : gput x
gget (O:xs) = (L1 x, xs')
where (x, xs') = gget xs
gget (I:xs) = (R1 x, xs')
where (x, xs') = gget xs
-- | Sums: encode choice between constructors
instance (GSerialize a) => GSerialize (M1 i c a) where
gput (M1 x) = gput x
gget xs = (M1 x, xs')
where (x, xs') = gget xs
-- | Products: encode multiple arguments to constructors
instance (Serialize a) => GSerialize (K1 i a) where
gput (K1 x) = put x
gget xs = (K1 x, xs')
where (x, xs') = get xs
instance Serialize Bool where
put True = [I]
put False = [O]
get (I:xs) = (True, xs)
get (O:xs) = (False, xs)
--
-- Try it out. (Normally this would be in a separate module.)
--
data UserTree a = Node a (UserTree a) (UserTree a) | Leaf
deriving (Generic, Show)
instance (Serialize a) => Serialize (UserTree a)
main = do
let xs = put True
print (fst . get $ xs :: Bool)
let ys = put (Leaf :: UserTree Bool)
print (fst . get $ ys :: UserTree Bool)
let zs = put (Node False Leaf Leaf :: UserTree Bool)
print (fst . get $ zs :: UserTree Bool)
| capitanbatata/sandbox | scrapping-expressions-boilerplate/src/Cereal.hs | gpl-3.0 | 2,208 | 0 | 12 | 560 | 970 | 516 | 454 | 57 | 1 |
toExp :: (a -> b) -> Exp a b
toExp f = Lan (f . fst) (I ())
fromExp :: Exp a b -> (a -> b)
fromExp (Lan f (I x)) = \a -> f (a, x) | hmemcpy/milewski-ctfp-pdf | src/content/3.11/code/haskell/snippet08.hs | gpl-3.0 | 130 | 0 | 9 | 38 | 107 | 55 | 52 | 4 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Blogger.Types.Product
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.Blogger.Types.Product where
import Network.Google.Blogger.Types.Sum
import Network.Google.Prelude
--
-- /See:/ 'postUserInfo' smart constructor.
data PostUserInfo = PostUserInfo'
{ _puiPostUserInfo :: !(Maybe PostPerUserInfo)
, _puiPost :: !(Maybe Post')
, _puiKind :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'PostUserInfo' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'puiPostUserInfo'
--
-- * 'puiPost'
--
-- * 'puiKind'
postUserInfo
:: PostUserInfo
postUserInfo =
PostUserInfo'
{ _puiPostUserInfo = Nothing
, _puiPost = Nothing
, _puiKind = "blogger#postUserInfo"
}
-- | Information about a User for the Post.
puiPostUserInfo :: Lens' PostUserInfo (Maybe PostPerUserInfo)
puiPostUserInfo
= lens _puiPostUserInfo
(\ s a -> s{_puiPostUserInfo = a})
-- | The Post resource.
puiPost :: Lens' PostUserInfo (Maybe Post')
puiPost = lens _puiPost (\ s a -> s{_puiPost = a})
-- | The kind of this entity. Always blogger#postUserInfo
puiKind :: Lens' PostUserInfo Text
puiKind = lens _puiKind (\ s a -> s{_puiKind = a})
instance FromJSON PostUserInfo where
parseJSON
= withObject "PostUserInfo"
(\ o ->
PostUserInfo' <$>
(o .:? "post_user_info") <*> (o .:? "post") <*>
(o .:? "kind" .!= "blogger#postUserInfo"))
instance ToJSON PostUserInfo where
toJSON PostUserInfo'{..}
= object
(catMaybes
[("post_user_info" .=) <$> _puiPostUserInfo,
("post" .=) <$> _puiPost, Just ("kind" .= _puiKind)])
-- | The Post author\'s avatar.
--
-- /See:/ 'postAuthorImage' smart constructor.
newtype PostAuthorImage = PostAuthorImage'
{ _paiURL :: Maybe Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'PostAuthorImage' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'paiURL'
postAuthorImage
:: PostAuthorImage
postAuthorImage =
PostAuthorImage'
{ _paiURL = Nothing
}
-- | The Post author\'s avatar URL.
paiURL :: Lens' PostAuthorImage (Maybe Text)
paiURL = lens _paiURL (\ s a -> s{_paiURL = a})
instance FromJSON PostAuthorImage where
parseJSON
= withObject "PostAuthorImage"
(\ o -> PostAuthorImage' <$> (o .:? "url"))
instance ToJSON PostAuthorImage where
toJSON PostAuthorImage'{..}
= object (catMaybes [("url" .=) <$> _paiURL])
--
-- /See:/ 'postList' smart constructor.
data PostList = PostList'
{ _plEtag :: !(Maybe Text)
, _plNextPageToken :: !(Maybe Text)
, _plKind :: !Text
, _plItems :: !(Maybe [Post'])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'PostList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plEtag'
--
-- * 'plNextPageToken'
--
-- * 'plKind'
--
-- * 'plItems'
postList
:: PostList
postList =
PostList'
{ _plEtag = Nothing
, _plNextPageToken = Nothing
, _plKind = "blogger#postList"
, _plItems = Nothing
}
-- | Etag of the response.
plEtag :: Lens' PostList (Maybe Text)
plEtag = lens _plEtag (\ s a -> s{_plEtag = a})
-- | Pagination token to fetch the next page, if one exists.
plNextPageToken :: Lens' PostList (Maybe Text)
plNextPageToken
= lens _plNextPageToken
(\ s a -> s{_plNextPageToken = a})
-- | The kind of this entity. Always blogger#postList
plKind :: Lens' PostList Text
plKind = lens _plKind (\ s a -> s{_plKind = a})
-- | The list of Posts for this Blog.
plItems :: Lens' PostList [Post']
plItems
= lens _plItems (\ s a -> s{_plItems = a}) . _Default
. _Coerce
instance FromJSON PostList where
parseJSON
= withObject "PostList"
(\ o ->
PostList' <$>
(o .:? "etag") <*> (o .:? "nextPageToken") <*>
(o .:? "kind" .!= "blogger#postList")
<*> (o .:? "items" .!= mempty))
instance ToJSON PostList where
toJSON PostList'{..}
= object
(catMaybes
[("etag" .=) <$> _plEtag,
("nextPageToken" .=) <$> _plNextPageToken,
Just ("kind" .= _plKind), ("items" .=) <$> _plItems])
-- | Data about the comment this is in reply to.
--
-- /See:/ 'commentInReplyTo' smart constructor.
newtype CommentInReplyTo = CommentInReplyTo'
{ _cirtId :: Maybe Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CommentInReplyTo' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cirtId'
commentInReplyTo
:: CommentInReplyTo
commentInReplyTo =
CommentInReplyTo'
{ _cirtId = Nothing
}
-- | The identified of the parent of this comment.
cirtId :: Lens' CommentInReplyTo (Maybe Text)
cirtId = lens _cirtId (\ s a -> s{_cirtId = a})
instance FromJSON CommentInReplyTo where
parseJSON
= withObject "CommentInReplyTo"
(\ o -> CommentInReplyTo' <$> (o .:? "id"))
instance ToJSON CommentInReplyTo where
toJSON CommentInReplyTo'{..}
= object (catMaybes [("id" .=) <$> _cirtId])
-- | Data about the blog containing this comment.
--
-- /See:/ 'commentBlog' smart constructor.
newtype CommentBlog = CommentBlog'
{ _cbId :: Maybe Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CommentBlog' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cbId'
commentBlog
:: CommentBlog
commentBlog =
CommentBlog'
{ _cbId = Nothing
}
-- | The identifier of the blog containing this comment.
cbId :: Lens' CommentBlog (Maybe Text)
cbId = lens _cbId (\ s a -> s{_cbId = a})
instance FromJSON CommentBlog where
parseJSON
= withObject "CommentBlog"
(\ o -> CommentBlog' <$> (o .:? "id"))
instance ToJSON CommentBlog where
toJSON CommentBlog'{..}
= object (catMaybes [("id" .=) <$> _cbId])
--
-- /See:/ 'pageviews' smart constructor.
data Pageviews = Pageviews'
{ _pKind :: !Text
, _pCounts :: !(Maybe [PageviewsCountsItem])
, _pBlogId :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Pageviews' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pKind'
--
-- * 'pCounts'
--
-- * 'pBlogId'
pageviews
:: Pageviews
pageviews =
Pageviews'
{ _pKind = "blogger#page_views"
, _pCounts = Nothing
, _pBlogId = Nothing
}
-- | The kind of this entry. Always blogger#page_views
pKind :: Lens' Pageviews Text
pKind = lens _pKind (\ s a -> s{_pKind = a})
-- | The container of posts in this blog.
pCounts :: Lens' Pageviews [PageviewsCountsItem]
pCounts
= lens _pCounts (\ s a -> s{_pCounts = a}) . _Default
. _Coerce
-- | Blog Id
pBlogId :: Lens' Pageviews (Maybe Text)
pBlogId = lens _pBlogId (\ s a -> s{_pBlogId = a})
instance FromJSON Pageviews where
parseJSON
= withObject "Pageviews"
(\ o ->
Pageviews' <$>
(o .:? "kind" .!= "blogger#page_views") <*>
(o .:? "counts" .!= mempty)
<*> (o .:? "blogId"))
instance ToJSON Pageviews where
toJSON Pageviews'{..}
= object
(catMaybes
[Just ("kind" .= _pKind), ("counts" .=) <$> _pCounts,
("blogId" .=) <$> _pBlogId])
-- | The location for geotagged posts.
--
-- /See:/ 'postLocation' smart constructor.
data PostLocation = PostLocation'
{ _plSpan :: !(Maybe Text)
, _plLat :: !(Maybe (Textual Double))
, _plName :: !(Maybe Text)
, _plLng :: !(Maybe (Textual Double))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'PostLocation' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plSpan'
--
-- * 'plLat'
--
-- * 'plName'
--
-- * 'plLng'
postLocation
:: PostLocation
postLocation =
PostLocation'
{ _plSpan = Nothing
, _plLat = Nothing
, _plName = Nothing
, _plLng = Nothing
}
-- | Location\'s viewport span. Can be used when rendering a map preview.
plSpan :: Lens' PostLocation (Maybe Text)
plSpan = lens _plSpan (\ s a -> s{_plSpan = a})
-- | Location\'s latitude.
plLat :: Lens' PostLocation (Maybe Double)
plLat
= lens _plLat (\ s a -> s{_plLat = a}) .
mapping _Coerce
-- | Location name.
plName :: Lens' PostLocation (Maybe Text)
plName = lens _plName (\ s a -> s{_plName = a})
-- | Location\'s longitude.
plLng :: Lens' PostLocation (Maybe Double)
plLng
= lens _plLng (\ s a -> s{_plLng = a}) .
mapping _Coerce
instance FromJSON PostLocation where
parseJSON
= withObject "PostLocation"
(\ o ->
PostLocation' <$>
(o .:? "span") <*> (o .:? "lat") <*> (o .:? "name")
<*> (o .:? "lng"))
instance ToJSON PostLocation where
toJSON PostLocation'{..}
= object
(catMaybes
[("span" .=) <$> _plSpan, ("lat" .=) <$> _plLat,
("name" .=) <$> _plName, ("lng" .=) <$> _plLng])
-- | The container of posts in this blog.
--
-- /See:/ 'blogPosts' smart constructor.
data BlogPosts = BlogPosts'
{ _bpTotalItems :: !(Maybe (Textual Int32))
, _bpItems :: !(Maybe [Post'])
, _bpSelfLink :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'BlogPosts' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bpTotalItems'
--
-- * 'bpItems'
--
-- * 'bpSelfLink'
blogPosts
:: BlogPosts
blogPosts =
BlogPosts'
{ _bpTotalItems = Nothing
, _bpItems = Nothing
, _bpSelfLink = Nothing
}
-- | The count of posts in this blog.
bpTotalItems :: Lens' BlogPosts (Maybe Int32)
bpTotalItems
= lens _bpTotalItems (\ s a -> s{_bpTotalItems = a})
. mapping _Coerce
-- | The List of Posts for this Blog.
bpItems :: Lens' BlogPosts [Post']
bpItems
= lens _bpItems (\ s a -> s{_bpItems = a}) . _Default
. _Coerce
-- | The URL of the container for posts in this blog.
bpSelfLink :: Lens' BlogPosts (Maybe Text)
bpSelfLink
= lens _bpSelfLink (\ s a -> s{_bpSelfLink = a})
instance FromJSON BlogPosts where
parseJSON
= withObject "BlogPosts"
(\ o ->
BlogPosts' <$>
(o .:? "totalItems") <*> (o .:? "items" .!= mempty)
<*> (o .:? "selfLink"))
instance ToJSON BlogPosts where
toJSON BlogPosts'{..}
= object
(catMaybes
[("totalItems" .=) <$> _bpTotalItems,
("items" .=) <$> _bpItems,
("selfLink" .=) <$> _bpSelfLink])
--
-- /See:/ 'post' smart constructor.
data Post' = Post''
{ _posImages :: !(Maybe [PostImagesItem])
, _posStatus :: !(Maybe Text)
, _posEtag :: !(Maybe Text)
, _posReaderComments :: !(Maybe Text)
, _posLocation :: !(Maybe PostLocation)
, _posKind :: !Text
, _posPublished :: !(Maybe DateTime')
, _posURL :: !(Maybe Text)
, _posBlog :: !(Maybe PostBlog)
, _posCustomMetaData :: !(Maybe Text)
, _posContent :: !(Maybe Text)
, _posReplies :: !(Maybe PostReplies)
, _posSelfLink :: !(Maybe Text)
, _posAuthor :: !(Maybe PostAuthor)
, _posId :: !(Maybe Text)
, _posLabels :: !(Maybe [Text])
, _posUpdated :: !(Maybe DateTime')
, _posTitleLink :: !(Maybe Text)
, _posTitle :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Post' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'posImages'
--
-- * 'posStatus'
--
-- * 'posEtag'
--
-- * 'posReaderComments'
--
-- * 'posLocation'
--
-- * 'posKind'
--
-- * 'posPublished'
--
-- * 'posURL'
--
-- * 'posBlog'
--
-- * 'posCustomMetaData'
--
-- * 'posContent'
--
-- * 'posReplies'
--
-- * 'posSelfLink'
--
-- * 'posAuthor'
--
-- * 'posId'
--
-- * 'posLabels'
--
-- * 'posUpdated'
--
-- * 'posTitleLink'
--
-- * 'posTitle'
post
:: Post'
post =
Post''
{ _posImages = Nothing
, _posStatus = Nothing
, _posEtag = Nothing
, _posReaderComments = Nothing
, _posLocation = Nothing
, _posKind = "blogger#post"
, _posPublished = Nothing
, _posURL = Nothing
, _posBlog = Nothing
, _posCustomMetaData = Nothing
, _posContent = Nothing
, _posReplies = Nothing
, _posSelfLink = Nothing
, _posAuthor = Nothing
, _posId = Nothing
, _posLabels = Nothing
, _posUpdated = Nothing
, _posTitleLink = Nothing
, _posTitle = Nothing
}
-- | Display image for the Post.
posImages :: Lens' Post' [PostImagesItem]
posImages
= lens _posImages (\ s a -> s{_posImages = a}) .
_Default
. _Coerce
-- | Status of the post. Only set for admin-level requests
posStatus :: Lens' Post' (Maybe Text)
posStatus
= lens _posStatus (\ s a -> s{_posStatus = a})
-- | Etag of the resource.
posEtag :: Lens' Post' (Maybe Text)
posEtag = lens _posEtag (\ s a -> s{_posEtag = a})
-- | Comment control and display setting for readers of this post.
posReaderComments :: Lens' Post' (Maybe Text)
posReaderComments
= lens _posReaderComments
(\ s a -> s{_posReaderComments = a})
-- | The location for geotagged posts.
posLocation :: Lens' Post' (Maybe PostLocation)
posLocation
= lens _posLocation (\ s a -> s{_posLocation = a})
-- | The kind of this entity. Always blogger#post
posKind :: Lens' Post' Text
posKind = lens _posKind (\ s a -> s{_posKind = a})
-- | RFC 3339 date-time when this Post was published.
posPublished :: Lens' Post' (Maybe UTCTime)
posPublished
= lens _posPublished (\ s a -> s{_posPublished = a})
. mapping _DateTime
-- | The URL where this Post is displayed.
posURL :: Lens' Post' (Maybe Text)
posURL = lens _posURL (\ s a -> s{_posURL = a})
-- | Data about the blog containing this Post.
posBlog :: Lens' Post' (Maybe PostBlog)
posBlog = lens _posBlog (\ s a -> s{_posBlog = a})
-- | The JSON meta-data for the Post.
posCustomMetaData :: Lens' Post' (Maybe Text)
posCustomMetaData
= lens _posCustomMetaData
(\ s a -> s{_posCustomMetaData = a})
-- | The content of the Post. May contain HTML markup.
posContent :: Lens' Post' (Maybe Text)
posContent
= lens _posContent (\ s a -> s{_posContent = a})
-- | The container of comments on this Post.
posReplies :: Lens' Post' (Maybe PostReplies)
posReplies
= lens _posReplies (\ s a -> s{_posReplies = a})
-- | The API REST URL to fetch this resource from.
posSelfLink :: Lens' Post' (Maybe Text)
posSelfLink
= lens _posSelfLink (\ s a -> s{_posSelfLink = a})
-- | The author of this Post.
posAuthor :: Lens' Post' (Maybe PostAuthor)
posAuthor
= lens _posAuthor (\ s a -> s{_posAuthor = a})
-- | The identifier of this Post.
posId :: Lens' Post' (Maybe Text)
posId = lens _posId (\ s a -> s{_posId = a})
-- | The list of labels this Post was tagged with.
posLabels :: Lens' Post' [Text]
posLabels
= lens _posLabels (\ s a -> s{_posLabels = a}) .
_Default
. _Coerce
-- | RFC 3339 date-time when this Post was last updated.
posUpdated :: Lens' Post' (Maybe UTCTime)
posUpdated
= lens _posUpdated (\ s a -> s{_posUpdated = a}) .
mapping _DateTime
-- | The title link URL, similar to atom\'s related link.
posTitleLink :: Lens' Post' (Maybe Text)
posTitleLink
= lens _posTitleLink (\ s a -> s{_posTitleLink = a})
-- | The title of the Post.
posTitle :: Lens' Post' (Maybe Text)
posTitle = lens _posTitle (\ s a -> s{_posTitle = a})
instance FromJSON Post' where
parseJSON
= withObject "Post"
(\ o ->
Post'' <$>
(o .:? "images" .!= mempty) <*> (o .:? "status") <*>
(o .:? "etag")
<*> (o .:? "readerComments")
<*> (o .:? "location")
<*> (o .:? "kind" .!= "blogger#post")
<*> (o .:? "published")
<*> (o .:? "url")
<*> (o .:? "blog")
<*> (o .:? "customMetaData")
<*> (o .:? "content")
<*> (o .:? "replies")
<*> (o .:? "selfLink")
<*> (o .:? "author")
<*> (o .:? "id")
<*> (o .:? "labels" .!= mempty)
<*> (o .:? "updated")
<*> (o .:? "titleLink")
<*> (o .:? "title"))
instance ToJSON Post' where
toJSON Post''{..}
= object
(catMaybes
[("images" .=) <$> _posImages,
("status" .=) <$> _posStatus,
("etag" .=) <$> _posEtag,
("readerComments" .=) <$> _posReaderComments,
("location" .=) <$> _posLocation,
Just ("kind" .= _posKind),
("published" .=) <$> _posPublished,
("url" .=) <$> _posURL, ("blog" .=) <$> _posBlog,
("customMetaData" .=) <$> _posCustomMetaData,
("content" .=) <$> _posContent,
("replies" .=) <$> _posReplies,
("selfLink" .=) <$> _posSelfLink,
("author" .=) <$> _posAuthor, ("id" .=) <$> _posId,
("labels" .=) <$> _posLabels,
("updated" .=) <$> _posUpdated,
("titleLink" .=) <$> _posTitleLink,
("title" .=) <$> _posTitle])
--
-- /See:/ 'page' smart constructor.
data Page = Page'
{ _pagStatus :: !(Maybe Text)
, _pagEtag :: !(Maybe Text)
, _pagKind :: !Text
, _pagPublished :: !(Maybe DateTime')
, _pagURL :: !(Maybe Text)
, _pagBlog :: !(Maybe PageBlog)
, _pagContent :: !(Maybe Text)
, _pagSelfLink :: !(Maybe Text)
, _pagAuthor :: !(Maybe PageAuthor)
, _pagId :: !(Maybe Text)
, _pagUpdated :: !(Maybe DateTime')
, _pagTitle :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Page' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pagStatus'
--
-- * 'pagEtag'
--
-- * 'pagKind'
--
-- * 'pagPublished'
--
-- * 'pagURL'
--
-- * 'pagBlog'
--
-- * 'pagContent'
--
-- * 'pagSelfLink'
--
-- * 'pagAuthor'
--
-- * 'pagId'
--
-- * 'pagUpdated'
--
-- * 'pagTitle'
page
:: Page
page =
Page'
{ _pagStatus = Nothing
, _pagEtag = Nothing
, _pagKind = "blogger#page"
, _pagPublished = Nothing
, _pagURL = Nothing
, _pagBlog = Nothing
, _pagContent = Nothing
, _pagSelfLink = Nothing
, _pagAuthor = Nothing
, _pagId = Nothing
, _pagUpdated = Nothing
, _pagTitle = Nothing
}
-- | The status of the page for admin resources (either LIVE or DRAFT).
pagStatus :: Lens' Page (Maybe Text)
pagStatus
= lens _pagStatus (\ s a -> s{_pagStatus = a})
-- | Etag of the resource.
pagEtag :: Lens' Page (Maybe Text)
pagEtag = lens _pagEtag (\ s a -> s{_pagEtag = a})
-- | The kind of this entity. Always blogger#page
pagKind :: Lens' Page Text
pagKind = lens _pagKind (\ s a -> s{_pagKind = a})
-- | RFC 3339 date-time when this Page was published.
pagPublished :: Lens' Page (Maybe UTCTime)
pagPublished
= lens _pagPublished (\ s a -> s{_pagPublished = a})
. mapping _DateTime
-- | The URL that this Page is displayed at.
pagURL :: Lens' Page (Maybe Text)
pagURL = lens _pagURL (\ s a -> s{_pagURL = a})
-- | Data about the blog containing this Page.
pagBlog :: Lens' Page (Maybe PageBlog)
pagBlog = lens _pagBlog (\ s a -> s{_pagBlog = a})
-- | The body content of this Page, in HTML.
pagContent :: Lens' Page (Maybe Text)
pagContent
= lens _pagContent (\ s a -> s{_pagContent = a})
-- | The API REST URL to fetch this resource from.
pagSelfLink :: Lens' Page (Maybe Text)
pagSelfLink
= lens _pagSelfLink (\ s a -> s{_pagSelfLink = a})
-- | The author of this Page.
pagAuthor :: Lens' Page (Maybe PageAuthor)
pagAuthor
= lens _pagAuthor (\ s a -> s{_pagAuthor = a})
-- | The identifier for this resource.
pagId :: Lens' Page (Maybe Text)
pagId = lens _pagId (\ s a -> s{_pagId = a})
-- | RFC 3339 date-time when this Page was last updated.
pagUpdated :: Lens' Page (Maybe UTCTime)
pagUpdated
= lens _pagUpdated (\ s a -> s{_pagUpdated = a}) .
mapping _DateTime
-- | The title of this entity. This is the name displayed in the Admin user
-- interface.
pagTitle :: Lens' Page (Maybe Text)
pagTitle = lens _pagTitle (\ s a -> s{_pagTitle = a})
instance FromJSON Page where
parseJSON
= withObject "Page"
(\ o ->
Page' <$>
(o .:? "status") <*> (o .:? "etag") <*>
(o .:? "kind" .!= "blogger#page")
<*> (o .:? "published")
<*> (o .:? "url")
<*> (o .:? "blog")
<*> (o .:? "content")
<*> (o .:? "selfLink")
<*> (o .:? "author")
<*> (o .:? "id")
<*> (o .:? "updated")
<*> (o .:? "title"))
instance ToJSON Page where
toJSON Page'{..}
= object
(catMaybes
[("status" .=) <$> _pagStatus,
("etag" .=) <$> _pagEtag, Just ("kind" .= _pagKind),
("published" .=) <$> _pagPublished,
("url" .=) <$> _pagURL, ("blog" .=) <$> _pagBlog,
("content" .=) <$> _pagContent,
("selfLink" .=) <$> _pagSelfLink,
("author" .=) <$> _pagAuthor, ("id" .=) <$> _pagId,
("updated" .=) <$> _pagUpdated,
("title" .=) <$> _pagTitle])
-- | The locale this Blog is set to.
--
-- /See:/ 'blogLocale' smart constructor.
data BlogLocale = BlogLocale'
{ _blVariant :: !(Maybe Text)
, _blCountry :: !(Maybe Text)
, _blLanguage :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'BlogLocale' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'blVariant'
--
-- * 'blCountry'
--
-- * 'blLanguage'
blogLocale
:: BlogLocale
blogLocale =
BlogLocale'
{ _blVariant = Nothing
, _blCountry = Nothing
, _blLanguage = Nothing
}
-- | The language variant this blog is authored in.
blVariant :: Lens' BlogLocale (Maybe Text)
blVariant
= lens _blVariant (\ s a -> s{_blVariant = a})
-- | The country this blog\'s locale is set to.
blCountry :: Lens' BlogLocale (Maybe Text)
blCountry
= lens _blCountry (\ s a -> s{_blCountry = a})
-- | The language this blog is authored in.
blLanguage :: Lens' BlogLocale (Maybe Text)
blLanguage
= lens _blLanguage (\ s a -> s{_blLanguage = a})
instance FromJSON BlogLocale where
parseJSON
= withObject "BlogLocale"
(\ o ->
BlogLocale' <$>
(o .:? "variant") <*> (o .:? "country") <*>
(o .:? "language"))
instance ToJSON BlogLocale where
toJSON BlogLocale'{..}
= object
(catMaybes
[("variant" .=) <$> _blVariant,
("country" .=) <$> _blCountry,
("language" .=) <$> _blLanguage])
-- | The author of this Page.
--
-- /See:/ 'pageAuthor' smart constructor.
data PageAuthor = PageAuthor'
{ _paImage :: !(Maybe PageAuthorImage)
, _paURL :: !(Maybe Text)
, _paDisplayName :: !(Maybe Text)
, _paId :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'PageAuthor' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'paImage'
--
-- * 'paURL'
--
-- * 'paDisplayName'
--
-- * 'paId'
pageAuthor
:: PageAuthor
pageAuthor =
PageAuthor'
{ _paImage = Nothing
, _paURL = Nothing
, _paDisplayName = Nothing
, _paId = Nothing
}
-- | The page author\'s avatar.
paImage :: Lens' PageAuthor (Maybe PageAuthorImage)
paImage = lens _paImage (\ s a -> s{_paImage = a})
-- | The URL of the Page creator\'s Profile page.
paURL :: Lens' PageAuthor (Maybe Text)
paURL = lens _paURL (\ s a -> s{_paURL = a})
-- | The display name.
paDisplayName :: Lens' PageAuthor (Maybe Text)
paDisplayName
= lens _paDisplayName
(\ s a -> s{_paDisplayName = a})
-- | The identifier of the Page creator.
paId :: Lens' PageAuthor (Maybe Text)
paId = lens _paId (\ s a -> s{_paId = a})
instance FromJSON PageAuthor where
parseJSON
= withObject "PageAuthor"
(\ o ->
PageAuthor' <$>
(o .:? "image") <*> (o .:? "url") <*>
(o .:? "displayName")
<*> (o .:? "id"))
instance ToJSON PageAuthor where
toJSON PageAuthor'{..}
= object
(catMaybes
[("image" .=) <$> _paImage, ("url" .=) <$> _paURL,
("displayName" .=) <$> _paDisplayName,
("id" .=) <$> _paId])
--
-- /See:/ 'blog' smart constructor.
data Blog = Blog'
{ _bStatus :: !(Maybe Text)
, _bKind :: !Text
, _bPages :: !(Maybe BlogPages)
, _bLocale :: !(Maybe BlogLocale)
, _bPublished :: !(Maybe DateTime')
, _bURL :: !(Maybe Text)
, _bCustomMetaData :: !(Maybe Text)
, _bSelfLink :: !(Maybe Text)
, _bName :: !(Maybe Text)
, _bId :: !(Maybe Text)
, _bUpdated :: !(Maybe DateTime')
, _bPosts :: !(Maybe BlogPosts)
, _bDescription :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Blog' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bStatus'
--
-- * 'bKind'
--
-- * 'bPages'
--
-- * 'bLocale'
--
-- * 'bPublished'
--
-- * 'bURL'
--
-- * 'bCustomMetaData'
--
-- * 'bSelfLink'
--
-- * 'bName'
--
-- * 'bId'
--
-- * 'bUpdated'
--
-- * 'bPosts'
--
-- * 'bDescription'
blog
:: Blog
blog =
Blog'
{ _bStatus = Nothing
, _bKind = "blogger#blog"
, _bPages = Nothing
, _bLocale = Nothing
, _bPublished = Nothing
, _bURL = Nothing
, _bCustomMetaData = Nothing
, _bSelfLink = Nothing
, _bName = Nothing
, _bId = Nothing
, _bUpdated = Nothing
, _bPosts = Nothing
, _bDescription = Nothing
}
-- | The status of the blog.
bStatus :: Lens' Blog (Maybe Text)
bStatus = lens _bStatus (\ s a -> s{_bStatus = a})
-- | The kind of this entry. Always blogger#blog
bKind :: Lens' Blog Text
bKind = lens _bKind (\ s a -> s{_bKind = a})
-- | The container of pages in this blog.
bPages :: Lens' Blog (Maybe BlogPages)
bPages = lens _bPages (\ s a -> s{_bPages = a})
-- | The locale this Blog is set to.
bLocale :: Lens' Blog (Maybe BlogLocale)
bLocale = lens _bLocale (\ s a -> s{_bLocale = a})
-- | RFC 3339 date-time when this blog was published.
bPublished :: Lens' Blog (Maybe UTCTime)
bPublished
= lens _bPublished (\ s a -> s{_bPublished = a}) .
mapping _DateTime
-- | The URL where this blog is published.
bURL :: Lens' Blog (Maybe Text)
bURL = lens _bURL (\ s a -> s{_bURL = a})
-- | The JSON custom meta-data for the Blog
bCustomMetaData :: Lens' Blog (Maybe Text)
bCustomMetaData
= lens _bCustomMetaData
(\ s a -> s{_bCustomMetaData = a})
-- | The API REST URL to fetch this resource from.
bSelfLink :: Lens' Blog (Maybe Text)
bSelfLink
= lens _bSelfLink (\ s a -> s{_bSelfLink = a})
-- | The name of this blog. This is displayed as the title.
bName :: Lens' Blog (Maybe Text)
bName = lens _bName (\ s a -> s{_bName = a})
-- | The identifier for this resource.
bId :: Lens' Blog (Maybe Text)
bId = lens _bId (\ s a -> s{_bId = a})
-- | RFC 3339 date-time when this blog was last updated.
bUpdated :: Lens' Blog (Maybe UTCTime)
bUpdated
= lens _bUpdated (\ s a -> s{_bUpdated = a}) .
mapping _DateTime
-- | The container of posts in this blog.
bPosts :: Lens' Blog (Maybe BlogPosts)
bPosts = lens _bPosts (\ s a -> s{_bPosts = a})
-- | The description of this blog. This is displayed underneath the title.
bDescription :: Lens' Blog (Maybe Text)
bDescription
= lens _bDescription (\ s a -> s{_bDescription = a})
instance FromJSON Blog where
parseJSON
= withObject "Blog"
(\ o ->
Blog' <$>
(o .:? "status") <*>
(o .:? "kind" .!= "blogger#blog")
<*> (o .:? "pages")
<*> (o .:? "locale")
<*> (o .:? "published")
<*> (o .:? "url")
<*> (o .:? "customMetaData")
<*> (o .:? "selfLink")
<*> (o .:? "name")
<*> (o .:? "id")
<*> (o .:? "updated")
<*> (o .:? "posts")
<*> (o .:? "description"))
instance ToJSON Blog where
toJSON Blog'{..}
= object
(catMaybes
[("status" .=) <$> _bStatus, Just ("kind" .= _bKind),
("pages" .=) <$> _bPages, ("locale" .=) <$> _bLocale,
("published" .=) <$> _bPublished,
("url" .=) <$> _bURL,
("customMetaData" .=) <$> _bCustomMetaData,
("selfLink" .=) <$> _bSelfLink,
("name" .=) <$> _bName, ("id" .=) <$> _bId,
("updated" .=) <$> _bUpdated,
("posts" .=) <$> _bPosts,
("description" .=) <$> _bDescription])
-- | The container of pages in this blog.
--
-- /See:/ 'blogPages' smart constructor.
data BlogPages = BlogPages'
{ _bpsTotalItems :: !(Maybe (Textual Int32))
, _bpsSelfLink :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'BlogPages' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bpsTotalItems'
--
-- * 'bpsSelfLink'
blogPages
:: BlogPages
blogPages =
BlogPages'
{ _bpsTotalItems = Nothing
, _bpsSelfLink = Nothing
}
-- | The count of pages in this blog.
bpsTotalItems :: Lens' BlogPages (Maybe Int32)
bpsTotalItems
= lens _bpsTotalItems
(\ s a -> s{_bpsTotalItems = a})
. mapping _Coerce
-- | The URL of the container for pages in this blog.
bpsSelfLink :: Lens' BlogPages (Maybe Text)
bpsSelfLink
= lens _bpsSelfLink (\ s a -> s{_bpsSelfLink = a})
instance FromJSON BlogPages where
parseJSON
= withObject "BlogPages"
(\ o ->
BlogPages' <$>
(o .:? "totalItems") <*> (o .:? "selfLink"))
instance ToJSON BlogPages where
toJSON BlogPages'{..}
= object
(catMaybes
[("totalItems" .=) <$> _bpsTotalItems,
("selfLink" .=) <$> _bpsSelfLink])
-- | Data about the blog containing this Post.
--
-- /See:/ 'postBlog' smart constructor.
newtype PostBlog = PostBlog'
{ _pbId :: Maybe Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'PostBlog' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pbId'
postBlog
:: PostBlog
postBlog =
PostBlog'
{ _pbId = Nothing
}
-- | The identifier of the Blog that contains this Post.
pbId :: Lens' PostBlog (Maybe Text)
pbId = lens _pbId (\ s a -> s{_pbId = a})
instance FromJSON PostBlog where
parseJSON
= withObject "PostBlog"
(\ o -> PostBlog' <$> (o .:? "id"))
instance ToJSON PostBlog where
toJSON PostBlog'{..}
= object (catMaybes [("id" .=) <$> _pbId])
--
-- /See:/ 'pageList' smart constructor.
data PageList = PageList'
{ _pllEtag :: !(Maybe Text)
, _pllNextPageToken :: !(Maybe Text)
, _pllKind :: !Text
, _pllItems :: !(Maybe [Page])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'PageList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pllEtag'
--
-- * 'pllNextPageToken'
--
-- * 'pllKind'
--
-- * 'pllItems'
pageList
:: PageList
pageList =
PageList'
{ _pllEtag = Nothing
, _pllNextPageToken = Nothing
, _pllKind = "blogger#pageList"
, _pllItems = Nothing
}
-- | Etag of the response.
pllEtag :: Lens' PageList (Maybe Text)
pllEtag = lens _pllEtag (\ s a -> s{_pllEtag = a})
-- | Pagination token to fetch the next page, if one exists.
pllNextPageToken :: Lens' PageList (Maybe Text)
pllNextPageToken
= lens _pllNextPageToken
(\ s a -> s{_pllNextPageToken = a})
-- | The kind of this entity. Always blogger#pageList
pllKind :: Lens' PageList Text
pllKind = lens _pllKind (\ s a -> s{_pllKind = a})
-- | The list of Pages for a Blog.
pllItems :: Lens' PageList [Page]
pllItems
= lens _pllItems (\ s a -> s{_pllItems = a}) .
_Default
. _Coerce
instance FromJSON PageList where
parseJSON
= withObject "PageList"
(\ o ->
PageList' <$>
(o .:? "etag") <*> (o .:? "nextPageToken") <*>
(o .:? "kind" .!= "blogger#pageList")
<*> (o .:? "items" .!= mempty))
instance ToJSON PageList where
toJSON PageList'{..}
= object
(catMaybes
[("etag" .=) <$> _pllEtag,
("nextPageToken" .=) <$> _pllNextPageToken,
Just ("kind" .= _pllKind),
("items" .=) <$> _pllItems])
-- | This user\'s locale
--
-- /See:/ 'userLocale' smart constructor.
data UserLocale = UserLocale'
{ _ulVariant :: !(Maybe Text)
, _ulCountry :: !(Maybe Text)
, _ulLanguage :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'UserLocale' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ulVariant'
--
-- * 'ulCountry'
--
-- * 'ulLanguage'
userLocale
:: UserLocale
userLocale =
UserLocale'
{ _ulVariant = Nothing
, _ulCountry = Nothing
, _ulLanguage = Nothing
}
-- | The user\'s language variant setting.
ulVariant :: Lens' UserLocale (Maybe Text)
ulVariant
= lens _ulVariant (\ s a -> s{_ulVariant = a})
-- | The user\'s country setting.
ulCountry :: Lens' UserLocale (Maybe Text)
ulCountry
= lens _ulCountry (\ s a -> s{_ulCountry = a})
-- | The user\'s language setting.
ulLanguage :: Lens' UserLocale (Maybe Text)
ulLanguage
= lens _ulLanguage (\ s a -> s{_ulLanguage = a})
instance FromJSON UserLocale where
parseJSON
= withObject "UserLocale"
(\ o ->
UserLocale' <$>
(o .:? "variant") <*> (o .:? "country") <*>
(o .:? "language"))
instance ToJSON UserLocale where
toJSON UserLocale'{..}
= object
(catMaybes
[("variant" .=) <$> _ulVariant,
("country" .=) <$> _ulCountry,
("language" .=) <$> _ulLanguage])
-- | The comment creator\'s avatar.
--
-- /See:/ 'commentAuthorImage' smart constructor.
newtype CommentAuthorImage = CommentAuthorImage'
{ _caiURL :: Maybe Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CommentAuthorImage' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'caiURL'
commentAuthorImage
:: CommentAuthorImage
commentAuthorImage =
CommentAuthorImage'
{ _caiURL = Nothing
}
-- | The comment creator\'s avatar URL.
caiURL :: Lens' CommentAuthorImage (Maybe Text)
caiURL = lens _caiURL (\ s a -> s{_caiURL = a})
instance FromJSON CommentAuthorImage where
parseJSON
= withObject "CommentAuthorImage"
(\ o -> CommentAuthorImage' <$> (o .:? "url"))
instance ToJSON CommentAuthorImage where
toJSON CommentAuthorImage'{..}
= object (catMaybes [("url" .=) <$> _caiURL])
--
-- /See:/ 'user' smart constructor.
data User = User'
{ _uBlogs :: !(Maybe UserBlogs)
, _uKind :: !Text
, _uCreated :: !(Maybe DateTime')
, _uLocale :: !(Maybe UserLocale)
, _uURL :: !(Maybe Text)
, _uSelfLink :: !(Maybe Text)
, _uAbout :: !(Maybe Text)
, _uDisplayName :: !(Maybe Text)
, _uId :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'User' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'uBlogs'
--
-- * 'uKind'
--
-- * 'uCreated'
--
-- * 'uLocale'
--
-- * 'uURL'
--
-- * 'uSelfLink'
--
-- * 'uAbout'
--
-- * 'uDisplayName'
--
-- * 'uId'
user
:: User
user =
User'
{ _uBlogs = Nothing
, _uKind = "blogger#user"
, _uCreated = Nothing
, _uLocale = Nothing
, _uURL = Nothing
, _uSelfLink = Nothing
, _uAbout = Nothing
, _uDisplayName = Nothing
, _uId = Nothing
}
-- | The container of blogs for this user.
uBlogs :: Lens' User (Maybe UserBlogs)
uBlogs = lens _uBlogs (\ s a -> s{_uBlogs = a})
-- | The kind of this entity. Always blogger#user
uKind :: Lens' User Text
uKind = lens _uKind (\ s a -> s{_uKind = a})
-- | The timestamp of when this profile was created, in seconds since epoch.
uCreated :: Lens' User (Maybe UTCTime)
uCreated
= lens _uCreated (\ s a -> s{_uCreated = a}) .
mapping _DateTime
-- | This user\'s locale
uLocale :: Lens' User (Maybe UserLocale)
uLocale = lens _uLocale (\ s a -> s{_uLocale = a})
-- | The user\'s profile page.
uURL :: Lens' User (Maybe Text)
uURL = lens _uURL (\ s a -> s{_uURL = a})
-- | The API REST URL to fetch this resource from.
uSelfLink :: Lens' User (Maybe Text)
uSelfLink
= lens _uSelfLink (\ s a -> s{_uSelfLink = a})
-- | Profile summary information.
uAbout :: Lens' User (Maybe Text)
uAbout = lens _uAbout (\ s a -> s{_uAbout = a})
-- | The display name.
uDisplayName :: Lens' User (Maybe Text)
uDisplayName
= lens _uDisplayName (\ s a -> s{_uDisplayName = a})
-- | The identifier for this User.
uId :: Lens' User (Maybe Text)
uId = lens _uId (\ s a -> s{_uId = a})
instance FromJSON User where
parseJSON
= withObject "User"
(\ o ->
User' <$>
(o .:? "blogs") <*> (o .:? "kind" .!= "blogger#user")
<*> (o .:? "created")
<*> (o .:? "locale")
<*> (o .:? "url")
<*> (o .:? "selfLink")
<*> (o .:? "about")
<*> (o .:? "displayName")
<*> (o .:? "id"))
instance ToJSON User where
toJSON User'{..}
= object
(catMaybes
[("blogs" .=) <$> _uBlogs, Just ("kind" .= _uKind),
("created" .=) <$> _uCreated,
("locale" .=) <$> _uLocale, ("url" .=) <$> _uURL,
("selfLink" .=) <$> _uSelfLink,
("about" .=) <$> _uAbout,
("displayName" .=) <$> _uDisplayName,
("id" .=) <$> _uId])
-- | The container of blogs for this user.
--
-- /See:/ 'userBlogs' smart constructor.
newtype UserBlogs = UserBlogs'
{ _ubSelfLink :: Maybe Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'UserBlogs' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ubSelfLink'
userBlogs
:: UserBlogs
userBlogs =
UserBlogs'
{ _ubSelfLink = Nothing
}
-- | The URL of the Blogs for this user.
ubSelfLink :: Lens' UserBlogs (Maybe Text)
ubSelfLink
= lens _ubSelfLink (\ s a -> s{_ubSelfLink = a})
instance FromJSON UserBlogs where
parseJSON
= withObject "UserBlogs"
(\ o -> UserBlogs' <$> (o .:? "selfLink"))
instance ToJSON UserBlogs where
toJSON UserBlogs'{..}
= object
(catMaybes [("selfLink" .=) <$> _ubSelfLink])
-- | The container of comments on this Post.
--
-- /See:/ 'postReplies' smart constructor.
data PostReplies = PostReplies'
{ _prTotalItems :: !(Maybe (Textual Int64))
, _prItems :: !(Maybe [Comment])
, _prSelfLink :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'PostReplies' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'prTotalItems'
--
-- * 'prItems'
--
-- * 'prSelfLink'
postReplies
:: PostReplies
postReplies =
PostReplies'
{ _prTotalItems = Nothing
, _prItems = Nothing
, _prSelfLink = Nothing
}
-- | The count of comments on this post.
prTotalItems :: Lens' PostReplies (Maybe Int64)
prTotalItems
= lens _prTotalItems (\ s a -> s{_prTotalItems = a})
. mapping _Coerce
-- | The List of Comments for this Post.
prItems :: Lens' PostReplies [Comment]
prItems
= lens _prItems (\ s a -> s{_prItems = a}) . _Default
. _Coerce
-- | The URL of the comments on this post.
prSelfLink :: Lens' PostReplies (Maybe Text)
prSelfLink
= lens _prSelfLink (\ s a -> s{_prSelfLink = a})
instance FromJSON PostReplies where
parseJSON
= withObject "PostReplies"
(\ o ->
PostReplies' <$>
(o .:? "totalItems") <*> (o .:? "items" .!= mempty)
<*> (o .:? "selfLink"))
instance ToJSON PostReplies where
toJSON PostReplies'{..}
= object
(catMaybes
[("totalItems" .=) <$> _prTotalItems,
("items" .=) <$> _prItems,
("selfLink" .=) <$> _prSelfLink])
--
-- /See:/ 'blogList' smart constructor.
data BlogList = BlogList'
{ _blKind :: !Text
, _blItems :: !(Maybe [Blog])
, _blBlogUserInfos :: !(Maybe [BlogUserInfo])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'BlogList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'blKind'
--
-- * 'blItems'
--
-- * 'blBlogUserInfos'
blogList
:: BlogList
blogList =
BlogList'
{ _blKind = "blogger#blogList"
, _blItems = Nothing
, _blBlogUserInfos = Nothing
}
-- | The kind of this entity. Always blogger#blogList
blKind :: Lens' BlogList Text
blKind = lens _blKind (\ s a -> s{_blKind = a})
-- | The list of Blogs this user has Authorship or Admin rights over.
blItems :: Lens' BlogList [Blog]
blItems
= lens _blItems (\ s a -> s{_blItems = a}) . _Default
. _Coerce
-- | Admin level list of blog per-user information
blBlogUserInfos :: Lens' BlogList [BlogUserInfo]
blBlogUserInfos
= lens _blBlogUserInfos
(\ s a -> s{_blBlogUserInfos = a})
. _Default
. _Coerce
instance FromJSON BlogList where
parseJSON
= withObject "BlogList"
(\ o ->
BlogList' <$>
(o .:? "kind" .!= "blogger#blogList") <*>
(o .:? "items" .!= mempty)
<*> (o .:? "blogUserInfos" .!= mempty))
instance ToJSON BlogList where
toJSON BlogList'{..}
= object
(catMaybes
[Just ("kind" .= _blKind), ("items" .=) <$> _blItems,
("blogUserInfos" .=) <$> _blBlogUserInfos])
-- | Data about the blog containing this Page.
--
-- /See:/ 'pageBlog' smart constructor.
newtype PageBlog = PageBlog'
{ _pId :: Maybe Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'PageBlog' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pId'
pageBlog
:: PageBlog
pageBlog =
PageBlog'
{ _pId = Nothing
}
-- | The identifier of the blog containing this page.
pId :: Lens' PageBlog (Maybe Text)
pId = lens _pId (\ s a -> s{_pId = a})
instance FromJSON PageBlog where
parseJSON
= withObject "PageBlog"
(\ o -> PageBlog' <$> (o .:? "id"))
instance ToJSON PageBlog where
toJSON PageBlog'{..}
= object (catMaybes [("id" .=) <$> _pId])
-- | The author of this Post.
--
-- /See:/ 'postAuthor' smart constructor.
data PostAuthor = PostAuthor'
{ _paaImage :: !(Maybe PostAuthorImage)
, _paaURL :: !(Maybe Text)
, _paaDisplayName :: !(Maybe Text)
, _paaId :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'PostAuthor' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'paaImage'
--
-- * 'paaURL'
--
-- * 'paaDisplayName'
--
-- * 'paaId'
postAuthor
:: PostAuthor
postAuthor =
PostAuthor'
{ _paaImage = Nothing
, _paaURL = Nothing
, _paaDisplayName = Nothing
, _paaId = Nothing
}
-- | The Post author\'s avatar.
paaImage :: Lens' PostAuthor (Maybe PostAuthorImage)
paaImage = lens _paaImage (\ s a -> s{_paaImage = a})
-- | The URL of the Post creator\'s Profile page.
paaURL :: Lens' PostAuthor (Maybe Text)
paaURL = lens _paaURL (\ s a -> s{_paaURL = a})
-- | The display name.
paaDisplayName :: Lens' PostAuthor (Maybe Text)
paaDisplayName
= lens _paaDisplayName
(\ s a -> s{_paaDisplayName = a})
-- | The identifier of the Post creator.
paaId :: Lens' PostAuthor (Maybe Text)
paaId = lens _paaId (\ s a -> s{_paaId = a})
instance FromJSON PostAuthor where
parseJSON
= withObject "PostAuthor"
(\ o ->
PostAuthor' <$>
(o .:? "image") <*> (o .:? "url") <*>
(o .:? "displayName")
<*> (o .:? "id"))
instance ToJSON PostAuthor where
toJSON PostAuthor'{..}
= object
(catMaybes
[("image" .=) <$> _paaImage, ("url" .=) <$> _paaURL,
("displayName" .=) <$> _paaDisplayName,
("id" .=) <$> _paaId])
--
-- /See:/ 'postPerUserInfo' smart constructor.
data PostPerUserInfo = PostPerUserInfo'
{ _ppuiKind :: !Text
, _ppuiBlogId :: !(Maybe Text)
, _ppuiUserId :: !(Maybe Text)
, _ppuiHasEditAccess :: !(Maybe Bool)
, _ppuiPostId :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'PostPerUserInfo' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ppuiKind'
--
-- * 'ppuiBlogId'
--
-- * 'ppuiUserId'
--
-- * 'ppuiHasEditAccess'
--
-- * 'ppuiPostId'
postPerUserInfo
:: PostPerUserInfo
postPerUserInfo =
PostPerUserInfo'
{ _ppuiKind = "blogger#postPerUserInfo"
, _ppuiBlogId = Nothing
, _ppuiUserId = Nothing
, _ppuiHasEditAccess = Nothing
, _ppuiPostId = Nothing
}
-- | The kind of this entity. Always blogger#postPerUserInfo
ppuiKind :: Lens' PostPerUserInfo Text
ppuiKind = lens _ppuiKind (\ s a -> s{_ppuiKind = a})
-- | ID of the Blog that the post resource belongs to.
ppuiBlogId :: Lens' PostPerUserInfo (Maybe Text)
ppuiBlogId
= lens _ppuiBlogId (\ s a -> s{_ppuiBlogId = a})
-- | ID of the User.
ppuiUserId :: Lens' PostPerUserInfo (Maybe Text)
ppuiUserId
= lens _ppuiUserId (\ s a -> s{_ppuiUserId = a})
-- | True if the user has Author level access to the post.
ppuiHasEditAccess :: Lens' PostPerUserInfo (Maybe Bool)
ppuiHasEditAccess
= lens _ppuiHasEditAccess
(\ s a -> s{_ppuiHasEditAccess = a})
-- | ID of the Post resource.
ppuiPostId :: Lens' PostPerUserInfo (Maybe Text)
ppuiPostId
= lens _ppuiPostId (\ s a -> s{_ppuiPostId = a})
instance FromJSON PostPerUserInfo where
parseJSON
= withObject "PostPerUserInfo"
(\ o ->
PostPerUserInfo' <$>
(o .:? "kind" .!= "blogger#postPerUserInfo") <*>
(o .:? "blogId")
<*> (o .:? "userId")
<*> (o .:? "hasEditAccess")
<*> (o .:? "postId"))
instance ToJSON PostPerUserInfo where
toJSON PostPerUserInfo'{..}
= object
(catMaybes
[Just ("kind" .= _ppuiKind),
("blogId" .=) <$> _ppuiBlogId,
("userId" .=) <$> _ppuiUserId,
("hasEditAccess" .=) <$> _ppuiHasEditAccess,
("postId" .=) <$> _ppuiPostId])
--
-- /See:/ 'pageviewsCountsItem' smart constructor.
data PageviewsCountsItem = PageviewsCountsItem'
{ _pciTimeRange :: !(Maybe Text)
, _pciCount :: !(Maybe (Textual Int64))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'PageviewsCountsItem' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pciTimeRange'
--
-- * 'pciCount'
pageviewsCountsItem
:: PageviewsCountsItem
pageviewsCountsItem =
PageviewsCountsItem'
{ _pciTimeRange = Nothing
, _pciCount = Nothing
}
-- | Time range the given count applies to
pciTimeRange :: Lens' PageviewsCountsItem (Maybe Text)
pciTimeRange
= lens _pciTimeRange (\ s a -> s{_pciTimeRange = a})
-- | Count of page views for the given time range
pciCount :: Lens' PageviewsCountsItem (Maybe Int64)
pciCount
= lens _pciCount (\ s a -> s{_pciCount = a}) .
mapping _Coerce
instance FromJSON PageviewsCountsItem where
parseJSON
= withObject "PageviewsCountsItem"
(\ o ->
PageviewsCountsItem' <$>
(o .:? "timeRange") <*> (o .:? "count"))
instance ToJSON PageviewsCountsItem where
toJSON PageviewsCountsItem'{..}
= object
(catMaybes
[("timeRange" .=) <$> _pciTimeRange,
("count" .=) <$> _pciCount])
--
-- /See:/ 'comment' smart constructor.
data Comment = Comment'
{ _cStatus :: !(Maybe Text)
, _cPost :: !(Maybe CommentPost)
, _cKind :: !Text
, _cPublished :: !(Maybe DateTime')
, _cBlog :: !(Maybe CommentBlog)
, _cContent :: !(Maybe Text)
, _cSelfLink :: !(Maybe Text)
, _cAuthor :: !(Maybe CommentAuthor)
, _cId :: !(Maybe Text)
, _cUpdated :: !(Maybe DateTime')
, _cInReplyTo :: !(Maybe CommentInReplyTo)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Comment' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cStatus'
--
-- * 'cPost'
--
-- * 'cKind'
--
-- * 'cPublished'
--
-- * 'cBlog'
--
-- * 'cContent'
--
-- * 'cSelfLink'
--
-- * 'cAuthor'
--
-- * 'cId'
--
-- * 'cUpdated'
--
-- * 'cInReplyTo'
comment
:: Comment
comment =
Comment'
{ _cStatus = Nothing
, _cPost = Nothing
, _cKind = "blogger#comment"
, _cPublished = Nothing
, _cBlog = Nothing
, _cContent = Nothing
, _cSelfLink = Nothing
, _cAuthor = Nothing
, _cId = Nothing
, _cUpdated = Nothing
, _cInReplyTo = Nothing
}
-- | The status of the comment (only populated for admin users)
cStatus :: Lens' Comment (Maybe Text)
cStatus = lens _cStatus (\ s a -> s{_cStatus = a})
-- | Data about the post containing this comment.
cPost :: Lens' Comment (Maybe CommentPost)
cPost = lens _cPost (\ s a -> s{_cPost = a})
-- | The kind of this entry. Always blogger#comment
cKind :: Lens' Comment Text
cKind = lens _cKind (\ s a -> s{_cKind = a})
-- | RFC 3339 date-time when this comment was published.
cPublished :: Lens' Comment (Maybe UTCTime)
cPublished
= lens _cPublished (\ s a -> s{_cPublished = a}) .
mapping _DateTime
-- | Data about the blog containing this comment.
cBlog :: Lens' Comment (Maybe CommentBlog)
cBlog = lens _cBlog (\ s a -> s{_cBlog = a})
-- | The actual content of the comment. May include HTML markup.
cContent :: Lens' Comment (Maybe Text)
cContent = lens _cContent (\ s a -> s{_cContent = a})
-- | The API REST URL to fetch this resource from.
cSelfLink :: Lens' Comment (Maybe Text)
cSelfLink
= lens _cSelfLink (\ s a -> s{_cSelfLink = a})
-- | The author of this Comment.
cAuthor :: Lens' Comment (Maybe CommentAuthor)
cAuthor = lens _cAuthor (\ s a -> s{_cAuthor = a})
-- | The identifier for this resource.
cId :: Lens' Comment (Maybe Text)
cId = lens _cId (\ s a -> s{_cId = a})
-- | RFC 3339 date-time when this comment was last updated.
cUpdated :: Lens' Comment (Maybe UTCTime)
cUpdated
= lens _cUpdated (\ s a -> s{_cUpdated = a}) .
mapping _DateTime
-- | Data about the comment this is in reply to.
cInReplyTo :: Lens' Comment (Maybe CommentInReplyTo)
cInReplyTo
= lens _cInReplyTo (\ s a -> s{_cInReplyTo = a})
instance FromJSON Comment where
parseJSON
= withObject "Comment"
(\ o ->
Comment' <$>
(o .:? "status") <*> (o .:? "post") <*>
(o .:? "kind" .!= "blogger#comment")
<*> (o .:? "published")
<*> (o .:? "blog")
<*> (o .:? "content")
<*> (o .:? "selfLink")
<*> (o .:? "author")
<*> (o .:? "id")
<*> (o .:? "updated")
<*> (o .:? "inReplyTo"))
instance ToJSON Comment where
toJSON Comment'{..}
= object
(catMaybes
[("status" .=) <$> _cStatus, ("post" .=) <$> _cPost,
Just ("kind" .= _cKind),
("published" .=) <$> _cPublished,
("blog" .=) <$> _cBlog, ("content" .=) <$> _cContent,
("selfLink" .=) <$> _cSelfLink,
("author" .=) <$> _cAuthor, ("id" .=) <$> _cId,
("updated" .=) <$> _cUpdated,
("inReplyTo" .=) <$> _cInReplyTo])
-- | Data about the post containing this comment.
--
-- /See:/ 'commentPost' smart constructor.
newtype CommentPost = CommentPost'
{ _cpId :: Maybe Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CommentPost' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cpId'
commentPost
:: CommentPost
commentPost =
CommentPost'
{ _cpId = Nothing
}
-- | The identifier of the post containing this comment.
cpId :: Lens' CommentPost (Maybe Text)
cpId = lens _cpId (\ s a -> s{_cpId = a})
instance FromJSON CommentPost where
parseJSON
= withObject "CommentPost"
(\ o -> CommentPost' <$> (o .:? "id"))
instance ToJSON CommentPost where
toJSON CommentPost'{..}
= object (catMaybes [("id" .=) <$> _cpId])
--
-- /See:/ 'blogPerUserInfo' smart constructor.
data BlogPerUserInfo = BlogPerUserInfo'
{ _bpuiPhotosAlbumKey :: !(Maybe Text)
, _bpuiKind :: !Text
, _bpuiBlogId :: !(Maybe Text)
, _bpuiUserId :: !(Maybe Text)
, _bpuiRole :: !(Maybe Text)
, _bpuiHasAdminAccess :: !(Maybe Bool)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'BlogPerUserInfo' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bpuiPhotosAlbumKey'
--
-- * 'bpuiKind'
--
-- * 'bpuiBlogId'
--
-- * 'bpuiUserId'
--
-- * 'bpuiRole'
--
-- * 'bpuiHasAdminAccess'
blogPerUserInfo
:: BlogPerUserInfo
blogPerUserInfo =
BlogPerUserInfo'
{ _bpuiPhotosAlbumKey = Nothing
, _bpuiKind = "blogger#blogPerUserInfo"
, _bpuiBlogId = Nothing
, _bpuiUserId = Nothing
, _bpuiRole = Nothing
, _bpuiHasAdminAccess = Nothing
}
-- | The Photo Album Key for the user when adding photos to the blog
bpuiPhotosAlbumKey :: Lens' BlogPerUserInfo (Maybe Text)
bpuiPhotosAlbumKey
= lens _bpuiPhotosAlbumKey
(\ s a -> s{_bpuiPhotosAlbumKey = a})
-- | The kind of this entity. Always blogger#blogPerUserInfo
bpuiKind :: Lens' BlogPerUserInfo Text
bpuiKind = lens _bpuiKind (\ s a -> s{_bpuiKind = a})
-- | ID of the Blog resource
bpuiBlogId :: Lens' BlogPerUserInfo (Maybe Text)
bpuiBlogId
= lens _bpuiBlogId (\ s a -> s{_bpuiBlogId = a})
-- | ID of the User
bpuiUserId :: Lens' BlogPerUserInfo (Maybe Text)
bpuiUserId
= lens _bpuiUserId (\ s a -> s{_bpuiUserId = a})
-- | Access permissions that the user has for the blog (ADMIN, AUTHOR, or
-- READER).
bpuiRole :: Lens' BlogPerUserInfo (Maybe Text)
bpuiRole = lens _bpuiRole (\ s a -> s{_bpuiRole = a})
-- | True if the user has Admin level access to the blog.
bpuiHasAdminAccess :: Lens' BlogPerUserInfo (Maybe Bool)
bpuiHasAdminAccess
= lens _bpuiHasAdminAccess
(\ s a -> s{_bpuiHasAdminAccess = a})
instance FromJSON BlogPerUserInfo where
parseJSON
= withObject "BlogPerUserInfo"
(\ o ->
BlogPerUserInfo' <$>
(o .:? "photosAlbumKey") <*>
(o .:? "kind" .!= "blogger#blogPerUserInfo")
<*> (o .:? "blogId")
<*> (o .:? "userId")
<*> (o .:? "role")
<*> (o .:? "hasAdminAccess"))
instance ToJSON BlogPerUserInfo where
toJSON BlogPerUserInfo'{..}
= object
(catMaybes
[("photosAlbumKey" .=) <$> _bpuiPhotosAlbumKey,
Just ("kind" .= _bpuiKind),
("blogId" .=) <$> _bpuiBlogId,
("userId" .=) <$> _bpuiUserId,
("role" .=) <$> _bpuiRole,
("hasAdminAccess" .=) <$> _bpuiHasAdminAccess])
--
-- /See:/ 'postUserInfosList' smart constructor.
data PostUserInfosList = PostUserInfosList'
{ _puilNextPageToken :: !(Maybe Text)
, _puilKind :: !Text
, _puilItems :: !(Maybe [PostUserInfo])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'PostUserInfosList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'puilNextPageToken'
--
-- * 'puilKind'
--
-- * 'puilItems'
postUserInfosList
:: PostUserInfosList
postUserInfosList =
PostUserInfosList'
{ _puilNextPageToken = Nothing
, _puilKind = "blogger#postUserInfosList"
, _puilItems = Nothing
}
-- | Pagination token to fetch the next page, if one exists.
puilNextPageToken :: Lens' PostUserInfosList (Maybe Text)
puilNextPageToken
= lens _puilNextPageToken
(\ s a -> s{_puilNextPageToken = a})
-- | The kind of this entity. Always blogger#postList
puilKind :: Lens' PostUserInfosList Text
puilKind = lens _puilKind (\ s a -> s{_puilKind = a})
-- | The list of Posts with User information for the post, for this Blog.
puilItems :: Lens' PostUserInfosList [PostUserInfo]
puilItems
= lens _puilItems (\ s a -> s{_puilItems = a}) .
_Default
. _Coerce
instance FromJSON PostUserInfosList where
parseJSON
= withObject "PostUserInfosList"
(\ o ->
PostUserInfosList' <$>
(o .:? "nextPageToken") <*>
(o .:? "kind" .!= "blogger#postUserInfosList")
<*> (o .:? "items" .!= mempty))
instance ToJSON PostUserInfosList where
toJSON PostUserInfosList'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _puilNextPageToken,
Just ("kind" .= _puilKind),
("items" .=) <$> _puilItems])
-- | The author of this Comment.
--
-- /See:/ 'commentAuthor' smart constructor.
data CommentAuthor = CommentAuthor'
{ _caImage :: !(Maybe CommentAuthorImage)
, _caURL :: !(Maybe Text)
, _caDisplayName :: !(Maybe Text)
, _caId :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CommentAuthor' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'caImage'
--
-- * 'caURL'
--
-- * 'caDisplayName'
--
-- * 'caId'
commentAuthor
:: CommentAuthor
commentAuthor =
CommentAuthor'
{ _caImage = Nothing
, _caURL = Nothing
, _caDisplayName = Nothing
, _caId = Nothing
}
-- | The comment creator\'s avatar.
caImage :: Lens' CommentAuthor (Maybe CommentAuthorImage)
caImage = lens _caImage (\ s a -> s{_caImage = a})
-- | The URL of the Comment creator\'s Profile page.
caURL :: Lens' CommentAuthor (Maybe Text)
caURL = lens _caURL (\ s a -> s{_caURL = a})
-- | The display name.
caDisplayName :: Lens' CommentAuthor (Maybe Text)
caDisplayName
= lens _caDisplayName
(\ s a -> s{_caDisplayName = a})
-- | The identifier of the Comment creator.
caId :: Lens' CommentAuthor (Maybe Text)
caId = lens _caId (\ s a -> s{_caId = a})
instance FromJSON CommentAuthor where
parseJSON
= withObject "CommentAuthor"
(\ o ->
CommentAuthor' <$>
(o .:? "image") <*> (o .:? "url") <*>
(o .:? "displayName")
<*> (o .:? "id"))
instance ToJSON CommentAuthor where
toJSON CommentAuthor'{..}
= object
(catMaybes
[("image" .=) <$> _caImage, ("url" .=) <$> _caURL,
("displayName" .=) <$> _caDisplayName,
("id" .=) <$> _caId])
--
-- /See:/ 'blogUserInfo' smart constructor.
data BlogUserInfo = BlogUserInfo'
{ _buiKind :: !Text
, _buiBlog :: !(Maybe Blog)
, _buiBlogUserInfo :: !(Maybe BlogPerUserInfo)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'BlogUserInfo' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'buiKind'
--
-- * 'buiBlog'
--
-- * 'buiBlogUserInfo'
blogUserInfo
:: BlogUserInfo
blogUserInfo =
BlogUserInfo'
{ _buiKind = "blogger#blogUserInfo"
, _buiBlog = Nothing
, _buiBlogUserInfo = Nothing
}
-- | The kind of this entity. Always blogger#blogUserInfo
buiKind :: Lens' BlogUserInfo Text
buiKind = lens _buiKind (\ s a -> s{_buiKind = a})
-- | The Blog resource.
buiBlog :: Lens' BlogUserInfo (Maybe Blog)
buiBlog = lens _buiBlog (\ s a -> s{_buiBlog = a})
-- | Information about a User for the Blog.
buiBlogUserInfo :: Lens' BlogUserInfo (Maybe BlogPerUserInfo)
buiBlogUserInfo
= lens _buiBlogUserInfo
(\ s a -> s{_buiBlogUserInfo = a})
instance FromJSON BlogUserInfo where
parseJSON
= withObject "BlogUserInfo"
(\ o ->
BlogUserInfo' <$>
(o .:? "kind" .!= "blogger#blogUserInfo") <*>
(o .:? "blog")
<*> (o .:? "blog_user_info"))
instance ToJSON BlogUserInfo where
toJSON BlogUserInfo'{..}
= object
(catMaybes
[Just ("kind" .= _buiKind), ("blog" .=) <$> _buiBlog,
("blog_user_info" .=) <$> _buiBlogUserInfo])
-- | The page author\'s avatar.
--
-- /See:/ 'pageAuthorImage' smart constructor.
newtype PageAuthorImage = PageAuthorImage'
{ _pURL :: Maybe Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'PageAuthorImage' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pURL'
pageAuthorImage
:: PageAuthorImage
pageAuthorImage =
PageAuthorImage'
{ _pURL = Nothing
}
-- | The page author\'s avatar URL.
pURL :: Lens' PageAuthorImage (Maybe Text)
pURL = lens _pURL (\ s a -> s{_pURL = a})
instance FromJSON PageAuthorImage where
parseJSON
= withObject "PageAuthorImage"
(\ o -> PageAuthorImage' <$> (o .:? "url"))
instance ToJSON PageAuthorImage where
toJSON PageAuthorImage'{..}
= object (catMaybes [("url" .=) <$> _pURL])
--
-- /See:/ 'commentList' smart constructor.
data CommentList = CommentList'
{ _clEtag :: !(Maybe Text)
, _clNextPageToken :: !(Maybe Text)
, _clKind :: !Text
, _clItems :: !(Maybe [Comment])
, _clPrevPageToken :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CommentList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'clEtag'
--
-- * 'clNextPageToken'
--
-- * 'clKind'
--
-- * 'clItems'
--
-- * 'clPrevPageToken'
commentList
:: CommentList
commentList =
CommentList'
{ _clEtag = Nothing
, _clNextPageToken = Nothing
, _clKind = "blogger#commentList"
, _clItems = Nothing
, _clPrevPageToken = Nothing
}
-- | Etag of the response.
clEtag :: Lens' CommentList (Maybe Text)
clEtag = lens _clEtag (\ s a -> s{_clEtag = a})
-- | Pagination token to fetch the next page, if one exists.
clNextPageToken :: Lens' CommentList (Maybe Text)
clNextPageToken
= lens _clNextPageToken
(\ s a -> s{_clNextPageToken = a})
-- | The kind of this entry. Always blogger#commentList
clKind :: Lens' CommentList Text
clKind = lens _clKind (\ s a -> s{_clKind = a})
-- | The List of Comments for a Post.
clItems :: Lens' CommentList [Comment]
clItems
= lens _clItems (\ s a -> s{_clItems = a}) . _Default
. _Coerce
-- | Pagination token to fetch the previous page, if one exists.
clPrevPageToken :: Lens' CommentList (Maybe Text)
clPrevPageToken
= lens _clPrevPageToken
(\ s a -> s{_clPrevPageToken = a})
instance FromJSON CommentList where
parseJSON
= withObject "CommentList"
(\ o ->
CommentList' <$>
(o .:? "etag") <*> (o .:? "nextPageToken") <*>
(o .:? "kind" .!= "blogger#commentList")
<*> (o .:? "items" .!= mempty)
<*> (o .:? "prevPageToken"))
instance ToJSON CommentList where
toJSON CommentList'{..}
= object
(catMaybes
[("etag" .=) <$> _clEtag,
("nextPageToken" .=) <$> _clNextPageToken,
Just ("kind" .= _clKind), ("items" .=) <$> _clItems,
("prevPageToken" .=) <$> _clPrevPageToken])
--
-- /See:/ 'postImagesItem' smart constructor.
newtype PostImagesItem = PostImagesItem'
{ _piiURL :: Maybe Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'PostImagesItem' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'piiURL'
postImagesItem
:: PostImagesItem
postImagesItem =
PostImagesItem'
{ _piiURL = Nothing
}
piiURL :: Lens' PostImagesItem (Maybe Text)
piiURL = lens _piiURL (\ s a -> s{_piiURL = a})
instance FromJSON PostImagesItem where
parseJSON
= withObject "PostImagesItem"
(\ o -> PostImagesItem' <$> (o .:? "url"))
instance ToJSON PostImagesItem where
toJSON PostImagesItem'{..}
= object (catMaybes [("url" .=) <$> _piiURL])
| rueshyna/gogol | gogol-blogger/gen/Network/Google/Blogger/Types/Product.hs | mpl-2.0 | 69,208 | 0 | 30 | 19,689 | 16,650 | 9,536 | 7,114 | 1,717 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Reports.Activities.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves a list of activities for a specific customer\'s account and
-- application such as the Admin console application or the Google Drive
-- application. For more information, see the guides for administrator and
-- Google Drive activity reports. For more information about the activity
-- report\'s parameters, see the activity parameters reference guides.
--
-- /See:/ <https://developers.google.com/admin-sdk/ Admin SDK API Reference> for @reports.activities.list@.
module Network.Google.Resource.Reports.Activities.List
(
-- * REST Resource
ActivitiesListResource
-- * Creating a Request
, activitiesList
, ActivitiesList
-- * Request Lenses
, alXgafv
, alGroupIdFilter
, alUploadProtocol
, alStartTime
, alAccessToken
, alFilters
, alUploadType
, alCustomerId
, alActorIPAddress
, alEndTime
, alApplicationName
, alPageToken
, alEventName
, alUserKey
, alMaxResults
, alOrgUnitId
, alCallback
) where
import Network.Google.Prelude
import Network.Google.Reports.Types
-- | A resource alias for @reports.activities.list@ method which the
-- 'ActivitiesList' request conforms to.
type ActivitiesListResource =
"admin" :>
"reports" :>
"v1" :>
"activity" :>
"users" :>
Capture "userKey" Text :>
"applications" :>
Capture "applicationName"
ActivitiesListApplicationName
:>
QueryParam "$.xgafv" Xgafv :>
QueryParam "groupIdFilter" Text :>
QueryParam "upload_protocol" Text :>
QueryParam "startTime" Text :>
QueryParam "access_token" Text :>
QueryParam "filters" Text :>
QueryParam "uploadType" Text :>
QueryParam "customerId" Text :>
QueryParam "actorIpAddress" Text :>
QueryParam "endTime" Text :>
QueryParam "pageToken" Text :>
QueryParam "eventName" Text :>
QueryParam "maxResults"
(Textual Int32)
:>
QueryParam "orgUnitID" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] Activities
-- | Retrieves a list of activities for a specific customer\'s account and
-- application such as the Admin console application or the Google Drive
-- application. For more information, see the guides for administrator and
-- Google Drive activity reports. For more information about the activity
-- report\'s parameters, see the activity parameters reference guides.
--
-- /See:/ 'activitiesList' smart constructor.
data ActivitiesList =
ActivitiesList'
{ _alXgafv :: !(Maybe Xgafv)
, _alGroupIdFilter :: !(Maybe Text)
, _alUploadProtocol :: !(Maybe Text)
, _alStartTime :: !(Maybe Text)
, _alAccessToken :: !(Maybe Text)
, _alFilters :: !(Maybe Text)
, _alUploadType :: !(Maybe Text)
, _alCustomerId :: !(Maybe Text)
, _alActorIPAddress :: !(Maybe Text)
, _alEndTime :: !(Maybe Text)
, _alApplicationName :: !ActivitiesListApplicationName
, _alPageToken :: !(Maybe Text)
, _alEventName :: !(Maybe Text)
, _alUserKey :: !Text
, _alMaxResults :: !(Textual Int32)
, _alOrgUnitId :: !Text
, _alCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ActivitiesList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'alXgafv'
--
-- * 'alGroupIdFilter'
--
-- * 'alUploadProtocol'
--
-- * 'alStartTime'
--
-- * 'alAccessToken'
--
-- * 'alFilters'
--
-- * 'alUploadType'
--
-- * 'alCustomerId'
--
-- * 'alActorIPAddress'
--
-- * 'alEndTime'
--
-- * 'alApplicationName'
--
-- * 'alPageToken'
--
-- * 'alEventName'
--
-- * 'alUserKey'
--
-- * 'alMaxResults'
--
-- * 'alOrgUnitId'
--
-- * 'alCallback'
activitiesList
:: ActivitiesListApplicationName -- ^ 'alApplicationName'
-> Text -- ^ 'alUserKey'
-> ActivitiesList
activitiesList pAlApplicationName_ pAlUserKey_ =
ActivitiesList'
{ _alXgafv = Nothing
, _alGroupIdFilter = Nothing
, _alUploadProtocol = Nothing
, _alStartTime = Nothing
, _alAccessToken = Nothing
, _alFilters = Nothing
, _alUploadType = Nothing
, _alCustomerId = Nothing
, _alActorIPAddress = Nothing
, _alEndTime = Nothing
, _alApplicationName = pAlApplicationName_
, _alPageToken = Nothing
, _alEventName = Nothing
, _alUserKey = pAlUserKey_
, _alMaxResults = 1000
, _alOrgUnitId = ""
, _alCallback = Nothing
}
-- | V1 error format.
alXgafv :: Lens' ActivitiesList (Maybe Xgafv)
alXgafv = lens _alXgafv (\ s a -> s{_alXgafv = a})
-- | Comma separated group ids (obfuscated) on which user activities are
-- filtered, i.e. the response will contain activities for only those users
-- that are a part of at least one of the group ids mentioned here. Format:
-- \"id:abc123,id:xyz456\"
alGroupIdFilter :: Lens' ActivitiesList (Maybe Text)
alGroupIdFilter
= lens _alGroupIdFilter
(\ s a -> s{_alGroupIdFilter = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
alUploadProtocol :: Lens' ActivitiesList (Maybe Text)
alUploadProtocol
= lens _alUploadProtocol
(\ s a -> s{_alUploadProtocol = a})
-- | Sets the beginning of the range of time shown in the report. The date is
-- in the RFC 3339 format, for example 2010-10-28T10:26:35.000Z. The report
-- returns all activities from \`startTime\` until \`endTime\`. The
-- \`startTime\` must be before the \`endTime\` (if specified) and the
-- current time when the request is made, or the API returns an error.
alStartTime :: Lens' ActivitiesList (Maybe Text)
alStartTime
= lens _alStartTime (\ s a -> s{_alStartTime = a})
-- | OAuth access token.
alAccessToken :: Lens' ActivitiesList (Maybe Text)
alAccessToken
= lens _alAccessToken
(\ s a -> s{_alAccessToken = a})
-- | The \`filters\` query string is a comma-separated list. The list is
-- composed of event parameters that are manipulated by relational
-- operators. Event parameters are in the form \`parameter1 name[parameter1
-- value],parameter2 name[parameter2 value],...\` These event parameters
-- are associated with a specific \`eventName\`. An empty report is
-- returned if the filtered request\'s parameter does not belong to the
-- \`eventName\`. For more information about \`eventName\` parameters, see
-- the list of event names for various applications above in
-- \`applicationName\`. In the following Admin Activity example, the \<>
-- operator is URL-encoded in the request\'s query string (%3C%3E):
-- GET...&eventName=CHANGE_CALENDAR_SETTING
-- &filters=NEW_VALUE%3C%3EREAD_ONLY_ACCESS In the following Drive example,
-- the list can be a view or edit event\'s \`doc_id\` parameter with a
-- value that is manipulated by an \'equal to\' (==) or \'not equal to\'
-- (\<>) relational operator. In the first example, the report returns each
-- edited document\'s \`doc_id\`. In the second example, the report returns
-- each viewed document\'s \`doc_id\` that equals the value 12345 and does
-- not return any viewed document\'s which have a \`doc_id\` value of
-- 98765. The \<> operator is URL-encoded in the request\'s query string
-- (%3C%3E): GET...&eventName=edit&filters=doc_id
-- GET...&eventName=view&filters=doc_id==12345,doc_id%3C%3E98765 The
-- relational operators include: - \`==\` - \'equal to\'. - \`\<>\` - \'not
-- equal to\'. It is URL-encoded (%3C%3E). - \`\<\` - \'less than\'. It is
-- URL-encoded (%3C). - \`\<=\` - \'less than or equal to\'. It is
-- URL-encoded (%3C=). - \`>\` - \'greater than\'. It is URL-encoded (%3E).
-- - \`>=\` - \'greater than or equal to\'. It is URL-encoded (%3E=).
-- *Note:* The API doesn\'t accept multiple values of a parameter. If a
-- particular parameter is supplied more than once in the API request, the
-- API only accepts the last value of that request parameter. In addition,
-- if an invalid request parameter is supplied in the API request, the API
-- ignores that request parameter and returns the response corresponding to
-- the remaining valid request parameters. If no parameters are requested,
-- all parameters are returned.
alFilters :: Lens' ActivitiesList (Maybe Text)
alFilters
= lens _alFilters (\ s a -> s{_alFilters = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
alUploadType :: Lens' ActivitiesList (Maybe Text)
alUploadType
= lens _alUploadType (\ s a -> s{_alUploadType = a})
-- | The unique ID of the customer to retrieve data for.
alCustomerId :: Lens' ActivitiesList (Maybe Text)
alCustomerId
= lens _alCustomerId (\ s a -> s{_alCustomerId = a})
-- | The Internet Protocol (IP) Address of host where the event was
-- performed. This is an additional way to filter a report\'s summary using
-- the IP address of the user whose activity is being reported. This IP
-- address may or may not reflect the user\'s physical location. For
-- example, the IP address can be the user\'s proxy server\'s address or a
-- virtual private network (VPN) address. This parameter supports both IPv4
-- and IPv6 address versions.
alActorIPAddress :: Lens' ActivitiesList (Maybe Text)
alActorIPAddress
= lens _alActorIPAddress
(\ s a -> s{_alActorIPAddress = a})
-- | Sets the end of the range of time shown in the report. The date is in
-- the RFC 3339 format, for example 2010-10-28T10:26:35.000Z. The default
-- value is the approximate time of the API request. An API report has
-- three basic time concepts: - *Date of the API\'s request for a report*:
-- When the API created and retrieved the report. - *Report\'s start time*:
-- The beginning of the timespan shown in the report. The \`startTime\`
-- must be before the \`endTime\` (if specified) and the current time when
-- the request is made, or the API returns an error. - *Report\'s end
-- time*: The end of the timespan shown in the report. For example, the
-- timespan of events summarized in a report can start in April and end in
-- May. The report itself can be requested in August. If the \`endTime\` is
-- not specified, the report returns all activities from the \`startTime\`
-- until the current time or the most recent 180 days if the \`startTime\`
-- is more than 180 days in the past.
alEndTime :: Lens' ActivitiesList (Maybe Text)
alEndTime
= lens _alEndTime (\ s a -> s{_alEndTime = a})
-- | Application name for which the events are to be retrieved.
alApplicationName :: Lens' ActivitiesList ActivitiesListApplicationName
alApplicationName
= lens _alApplicationName
(\ s a -> s{_alApplicationName = a})
-- | The token to specify next page. A report with multiple pages has a
-- \`nextPageToken\` property in the response. In your follow-on request
-- getting the next page of the report, enter the \`nextPageToken\` value
-- in the \`pageToken\` query string.
alPageToken :: Lens' ActivitiesList (Maybe Text)
alPageToken
= lens _alPageToken (\ s a -> s{_alPageToken = a})
-- | The name of the event being queried by the API. Each \`eventName\` is
-- related to a specific Google Workspace service or feature which the API
-- organizes into types of events. An example is the Google Calendar events
-- in the Admin console application\'s reports. The Calendar Settings
-- \`type\` structure has all of the Calendar \`eventName\` activities
-- reported by the API. When an administrator changes a Calendar setting,
-- the API reports this activity in the Calendar Settings \`type\` and
-- \`eventName\` parameters. For more information about \`eventName\` query
-- strings and parameters, see the list of event names for various
-- applications above in \`applicationName\`.
alEventName :: Lens' ActivitiesList (Maybe Text)
alEventName
= lens _alEventName (\ s a -> s{_alEventName = a})
-- | Represents the profile ID or the user email for which the data should be
-- filtered. Can be \`all\` for all information, or \`userKey\` for a
-- user\'s unique Google Workspace profile ID or their primary email
-- address. Must not be a deleted user. For a deleted user, call
-- \`users.list\` in Directory API with \`showDeleted=true\`, then use the
-- returned \`ID\` as the \`userKey\`.
alUserKey :: Lens' ActivitiesList Text
alUserKey
= lens _alUserKey (\ s a -> s{_alUserKey = a})
-- | Determines how many activity records are shown on each response page.
-- For example, if the request sets \`maxResults=1\` and the report has two
-- activities, the report has two pages. The response\'s \`nextPageToken\`
-- property has the token to the second page. The \`maxResults\` query
-- string is optional in the request. The default value is 1000.
alMaxResults :: Lens' ActivitiesList Int32
alMaxResults
= lens _alMaxResults (\ s a -> s{_alMaxResults = a})
. _Coerce
-- | ID of the organizational unit to report on. Activity records will be
-- shown only for users who belong to the specified organizational unit.
-- Data before Dec 17, 2018 doesn\'t appear in the filtered results.
alOrgUnitId :: Lens' ActivitiesList Text
alOrgUnitId
= lens _alOrgUnitId (\ s a -> s{_alOrgUnitId = a})
-- | JSONP
alCallback :: Lens' ActivitiesList (Maybe Text)
alCallback
= lens _alCallback (\ s a -> s{_alCallback = a})
instance GoogleRequest ActivitiesList where
type Rs ActivitiesList = Activities
type Scopes ActivitiesList =
'["https://www.googleapis.com/auth/admin.reports.audit.readonly"]
requestClient ActivitiesList'{..}
= go _alUserKey _alApplicationName _alXgafv
_alGroupIdFilter
_alUploadProtocol
_alStartTime
_alAccessToken
_alFilters
_alUploadType
_alCustomerId
_alActorIPAddress
_alEndTime
_alPageToken
_alEventName
(Just _alMaxResults)
(Just _alOrgUnitId)
_alCallback
(Just AltJSON)
reportsService
where go
= buildClient (Proxy :: Proxy ActivitiesListResource)
mempty
| brendanhay/gogol | gogol-admin-reports/gen/Network/Google/Resource/Reports/Activities/List.hs | mpl-2.0 | 15,536 | 0 | 31 | 3,686 | 1,682 | 999 | 683 | 224 | 1 |
{-#LANGUAGE OverloadedStrings#-}
module Data.P440.XML.Instances.Parse.ComplexTypes where
import qualified Data.P440.Domain.SimpleTypes as S
import qualified Data.P440.Domain.ComplexTypes as C
import Control.Applicative
import Data.P440.XML.Parse
-- Сложные типы из раздела 2.18
-- Сведения о банке (филиале) БанкТип
банк name =
complex_ name
(C.Банк <$> (attribute "РегНом")
<*> (attribute "НомФил")
<*> (S.БИК <$> attribute "БИК")
<*> (attribute "НаимБанк")
<*> (S.ИННЮЛ <$> attribute "ИННБанк")
<*> (S.КПП <$> attribute "КППБанк"))
-- Сведения об учреждении Банка России УБРТип
убр name =
complex_ name
(C.УБР <$> (S.БИК <$> attribute "БИК")
<*> (attribute "НаимУБР")
<*> (attribute "ИННБР")
<*> (S.КПП <$> attribute "КППУБР"))
-- Сведения о налоговом органе СвНОТип
свНО' name =
complex'_ name
(C.СвНО <$> (S.ИФНС <$> attribute "КодНО")
<*> (attribute "НаимНО"))
-- Руководитель (заместитель) НО РукНОТип
рукНО' name =
complex' name
(C.РукНО <$> attributeMaybe "КласЧин")
(\рукно -> рукно <$> фио' "ФИО")
-- Представитель КО/УБР ПредБТип
предБ' name =
complex' name
(C.ПредБ <$> attribute "Должность")
(\предб -> предб <$> фио' "ФИО")
-- Плательщик - организация ПлЮЛТип
плЮЛ name =
complex_ name
(C.ПлЮЛ <$> (S.ИННЮЛ <$> attribute "ИННЮЛ")
<*> (S.КПП <$> attribute "КПП")
<*> (attribute "НаимЮЛ"))
-- Плательщик - ИП/Нотариус/Адвокат ПлИПТип
плИП name =
complex name
(C.ПлИП <$> (S.ИННФЛ <$> attribute "ИННИП"))
(\плип -> плип <$> фио' "ФИО")
-- Плательщик - Физ. лицо ПлФЛТип
плФЛ name =
complex name
(C.ПлФЛ <$> ((S.ИННФЛ <$>) <$> attributeMaybe "ИННФЛ"))
(\плфл -> плфл <$> фио' "ФИО")
-- Плательщик - Физ. лицо (полные данные)
пфл name =
complex name
(C.ПФЛ <$> ((S.ИННФЛ <$>) <$> attributeMaybe "ИННФЛ")
<*> (S.Date <$> attribute "ДатаРожд")
<*> (attribute "МестоРожд")
<*> (attribute "КодДУЛ")
<*> (attribute "СерНомДок")
<*> ((S.Date <$>) <$> attributeMaybe "ДатаДок"))
(\пфл' -> пфл' <$> фио' "ФИО"
<*> адрРФ "АдрПлат")
-- ФИО ФИОТип
фио' name =
complex'_ name
(C.ФИО <$> attribute "Фамилия"
<*> attribute "Имя"
<*> attributeMaybe "Отчество")
-- Адрес в Российской Федерации АдрРФТип
адрРФ name =
complex_ name
(C.АдрРФ <$> attribute "Индекс"
<*> attribute "КодРегион"
<*> attributeMaybe "Район"
<*> attributeMaybe "Город"
<*> attributeMaybe "НаселПункт"
<*> attributeMaybe "Улица"
<*> attributeMaybe "Дом"
<*> attributeMaybe "Корпус"
<*> attributeMaybe "Кварт")
адрРФ' name =
complex'_ name
(C.АдрРФ <$> attribute "Индекс"
<*> attribute "КодРегион"
<*> attributeMaybe "Район"
<*> attributeMaybe "Город"
<*> attributeMaybe "НаселПункт"
<*> attributeMaybe "Улица"
<*> attributeMaybe "Дом"
<*> attributeMaybe "Корпус"
<*> attributeMaybe "Кварт")
| Macil-dev/p440 | src/Data/P440/XML/Instances/Parse/ComplexTypes.hs | unlicense | 4,267 | 0 | 17 | 1,131 | 1,178 | 586 | 592 | 82 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveAnyClass #-}
module Nanocoin.Network.Cmd (
Cmd(..),
cmdProc
) where
{-
This module is necessary because the Process type cannot easily be the base
monad for some transformer stacks because Control.Distributed.Process does
not export the `Process` monad datatype. This means we cannot define certain
MonadX instances for the datatype and, thus, need to build architectural work
arounds for this problem since these transformer stacks do not support sending
cloud haskell messages to other processes within them.
For instance:
The RPC and CLI modules cannot have `Process` as the base monad in their
transformer stacks, so instead we have to pass a Control.Concurrent.Chan channel
to those functions where, whenever a message must be issued to the network from
these modules, they must feed a `Cmd` value into the channel which is then read
by the `Cmd` cloud-haskell process, and translated into a network Msg and finally
relayed to the network.
-}
import Protolude
import Control.Concurrent.Chan (Chan(..), readChan)
import Control.Distributed.Process.Lifted
import Data.Serialize (Serialize)
import Logger
import Nanocoin.Block (Block)
import Nanocoin.Transaction (Transaction)
import Nanocoin.Network.Node (NodeProcessM, nsendAllPeers, getSelfPeer)
import Nanocoin.Network.Message (Msg(..))
import Nanocoin.Network.Service (Service(Messaging))
data Cmd
= TransactionCmd Transaction
| BlockCmd Block
deriving (Eq, Show, Generic, Serialize)
cmdProc :: Chan Cmd -> NodeProcessM ()
cmdProc chan =
forever $ do
handleCmd =<<
liftIO (readChan chan)
handleCmd :: Cmd -> NodeProcessM ()
handleCmd cmd = do
logInfoText $ "handleCmd: ReceivedCmd: " <> show cmd
selfPeer <- getSelfPeer
case cmd of
TransactionCmd tx ->
nsendAllPeers Messaging $
TransactionMsg tx
BlockCmd block ->
nsendAllPeers Messaging $
BlockMsg selfPeer block
| tdietert/nanocoin | src/Nanocoin/Network/Cmd.hs | apache-2.0 | 1,986 | 0 | 11 | 371 | 298 | 165 | 133 | 35 | 2 |
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Utilities for rewriting: e.g. inlining, specialisation, etc.
module CLaSH.Rewrite.Util where
import Control.DeepSeq
import Control.Lens (Lens', (%=), (+=), (^.))
import qualified Control.Lens as Lens
import qualified Control.Monad as Monad
import qualified Control.Monad.Reader as Reader
import qualified Control.Monad.State as State
import Control.Monad.Trans.Class (lift)
import qualified Control.Monad.Writer as Writer
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Lazy as HML
import qualified Data.HashMap.Strict as HMS
import qualified Data.List as List
import qualified Data.Map as Map
import qualified Data.Monoid as Monoid
import qualified Data.Set as Set
import qualified Data.Set.Lens as Lens
import Unbound.Generics.LocallyNameless (Fresh, bind,
embed, makeName, name2String,
rebind, rec, string2Name, unbind,
unembed, unrec)
import qualified Unbound.Generics.LocallyNameless as Unbound
import CLaSH.Core.DataCon (dataConInstArgTys)
import CLaSH.Core.FreeVars (termFreeIds, termFreeTyVars, typeFreeVars)
import CLaSH.Core.Pretty (showDoc)
import CLaSH.Core.Subst (substTm)
import CLaSH.Core.Term (LetBinding, Pat (..), Term (..),
TmName)
import CLaSH.Core.TyCon (TyCon, TyConName, tyConDataCons)
import CLaSH.Core.Type (KindOrType, Type (..),
TypeView (..), transparentTy,
typeKind, coreView)
import CLaSH.Core.Util (Delta, Gamma, collectArgs,
mkAbstraction, mkApps, mkId,
mkLams, mkTmApps, mkTyApps,
mkTyLams, mkTyVar, termType)
import CLaSH.Core.Var (Id, TyVar, Var (..))
import CLaSH.Netlist.Util (representableType)
import CLaSH.Rewrite.Types
import CLaSH.Util
-- | Lift an action working in the inner monad to the 'RewriteMonad'
liftR :: Monad m => m a -> RewriteMonad m a
liftR m = lift . lift . lift . lift $ m
-- | Lift an action working in the inner monad to the 'RewriteSession'
liftRS :: Monad m => m a -> RewriteSession m a
liftRS m = lift . lift . lift $ m
-- | Record if a transformation is succesfully applied
apply :: (Monad m, Functor m)
=> String -- ^ Name of the transformation
-> Rewrite m -- ^ Transformation to be applied
-> Rewrite m
apply name rewrite ctx expr = R $ do
lvl <- Lens.view dbgLevel
let before = showDoc expr
(expr', anyChanged) <- traceIf (lvl >= DebugAll) ("Trying: " ++ name ++ " on:\n" ++ before) $ Writer.listen $ runR $ rewrite ctx expr
let hasChanged = Monoid.getAny anyChanged
Monad.when hasChanged $ transformCounter += 1
let after = showDoc expr'
let expr'' = if hasChanged then expr' else expr
Monad.when (lvl > DebugNone && hasChanged) $ do
tcm <- Lens.use tcCache
beforeTy <- fmap transparentTy $ termType tcm expr
let beforeFTV = Lens.setOf termFreeTyVars expr
beforeFV <- Lens.setOf <$> localFreeIds <*> pure expr
afterTy <- fmap transparentTy $ termType tcm expr'
let afterFTV = Lens.setOf termFreeTyVars expr
afterFV <- Lens.setOf <$> localFreeIds <*> pure expr'
let newFV = Set.size afterFTV > Set.size beforeFTV ||
Set.size afterFV > Set.size beforeFV
Monad.when newFV $
error ( concat [ $(curLoc)
, "Error when applying rewrite ", name
, " to:\n" , before
, "\nResult:\n" ++ after ++ "\n"
, "Changes free variables from: ", show (beforeFTV,beforeFV)
, "\nto: ", show (afterFTV,afterFV)
]
)
traceIf ( beforeTy /= afterTy)
( concat [ $(curLoc)
, "Error when applying rewrite ", name
, " to:\n" , before
, "\nResult:\n" ++ after ++ "\n"
, "Changes type from:\n", showDoc beforeTy
, "\nto:\n", showDoc afterTy
]
) (return ())
Monad.when (lvl >= DebugApplied && not hasChanged && expr /= expr') $
error $ $(curLoc) ++ "Expression changed without notice(" ++ name ++ "): before" ++ before ++ "\nafter:\n" ++ after
traceIf (lvl >= DebugName && hasChanged) name $
traceIf (lvl >= DebugApplied && hasChanged) ("Changes when applying rewrite to:\n" ++ before ++ "\nResult:\n" ++ after ++ "\n") $
traceIf (lvl >= DebugAll && not hasChanged) ("No changes when applying rewrite " ++ name ++ " to:\n" ++ after ++ "\n") $
return expr''
-- | Perform a transformation on a Term
runRewrite :: (Monad m, Functor m)
=> String -- ^ Name of the transformation
-> Rewrite m -- ^ Transformation to perform
-> Term -- ^ Term to transform
-> RewriteSession m Term
runRewrite name rewrite expr = do
(expr',_) <- Writer.runWriterT . runR $ apply name rewrite [] expr
return expr'
-- | Evaluate a RewriteSession to its inner monad
runRewriteSession :: (Functor m, Monad m)
=> DebugLevel
-> RewriteState
-> RewriteSession m a
-> m a
runRewriteSession lvl st
= Unbound.runFreshMT
. fmap (\(a,s) -> traceIf True ("Applied " ++ show (s ^. transformCounter) ++ " transformations") a)
. (`State.runStateT` st)
. (`Reader.runReaderT` RE lvl)
-- | Notify that a transformation has changed the expression
setChanged :: Monad m => RewriteMonad m ()
setChanged = Writer.tell (Monoid.Any True)
-- | Identity function that additionally notifies that a transformation has
-- changed the expression
changed :: Monad m => a -> RewriteMonad m a
changed val = do
Writer.tell (Monoid.Any True)
return val
-- | Create a type and kind context out of a transformation context
contextEnv :: [CoreContext]
-> (Gamma, Delta)
contextEnv = go HML.empty HML.empty
where
go gamma delta [] = (gamma,delta)
go gamma delta (LetBinding ids:ctx) = go gamma' delta ctx
where
gamma' = foldl addToGamma gamma ids
go gamma delta (LetBody ids:ctx) = go gamma' delta ctx
where
gamma' = foldl addToGamma gamma ids
go gamma delta (LamBody lId:ctx) = go gamma' delta ctx
where
gamma' = addToGamma gamma lId
go gamma delta (TyLamBody tv:ctx) = go gamma delta' ctx
where
delta' = addToDelta delta tv
go gamma delta (CaseAlt ids:ctx) = go gamma' delta ctx
where
gamma' = foldl addToGamma gamma ids
go gamma delta (_:ctx) = go gamma delta ctx
addToGamma gamma (Id idName ty) = HML.insert idName (unembed ty) gamma
addToGamma _ _ = error $ $(curLoc) ++ "Adding TyVar to Gamma"
addToDelta delta (TyVar tvName ki) = HML.insert tvName (unembed ki) delta
addToDelta _ _ = error $ $(curLoc) ++ "Adding Id to Delta"
-- | Create a complete type and kind context out of the global binders and the
-- transformation context
mkEnv :: (Functor m, Monad m)
=> [CoreContext]
-> RewriteMonad m (Gamma, Delta)
mkEnv ctx = do
let (gamma,delta) = contextEnv ctx
tsMap <- fmap (HML.map fst) $ Lens.use bindings
let gamma' = tsMap `HML.union` gamma
return (gamma',delta)
-- | Make a new binder and variable reference for a term
mkTmBinderFor :: (Functor m, Fresh m, MonadUnique m)
=> HashMap TyConName TyCon -- ^ TyCon cache
-> String -- ^ Name of the new binder
-> Term -- ^ Term to bind
-> m (Id, Term)
mkTmBinderFor tcm name e = do
(Left r) <- mkBinderFor tcm name (Left e)
return r
-- | Make a new binder and variable reference for either a term or a type
mkBinderFor :: (Functor m, Monad m, MonadUnique m, Fresh m)
=> HashMap TyConName TyCon -- ^ TyCon cache
-> String -- ^ Name of the new binder
-> Either Term Type -- ^ Type or Term to bind
-> m (Either (Id,Term) (TyVar,Type))
mkBinderFor tcm name (Left term) =
Left <$> (mkInternalVar name =<< termType tcm term)
mkBinderFor tcm name (Right ty) = do
name' <- fmap (makeName name . toInteger) getUniqueM
let kind = typeKind tcm ty
return $ Right (TyVar name' (embed kind), VarTy kind name')
-- | Make a new, unique, identifier and corresponding variable reference
mkInternalVar :: (Functor m, Monad m, MonadUnique m)
=> String -- ^ Name of the identifier
-> KindOrType
-> m (Id,Term)
mkInternalVar name ty = do
name' <- fmap (makeName name . toInteger) getUniqueM
return (Id name' (embed ty),Var ty name')
-- | Inline the binders in a let-binding that have a certain property
inlineBinders :: Monad m
=> (LetBinding -> RewriteMonad m Bool) -- ^ Property test
-> Rewrite m
inlineBinders condition _ expr@(Letrec b) = R $ do
(xes,res) <- unbind b
(replace,others) <- partitionM condition (unrec xes)
case replace of
[] -> return expr
_ -> do
let (others',res') = substituteBinders replace others res
newExpr = case others' of
[] -> res'
_ -> Letrec (bind (rec others') res')
changed newExpr
inlineBinders _ _ e = return e
-- | Substitute the RHS of the first set of Let-binders for references to the
-- first set of Let-binders in: the second set of Let-binders and the additional
-- term
substituteBinders :: [LetBinding] -- ^ Let-binders to substitute
-> [LetBinding] -- ^ Let-binders where substitution takes place
-> Term -- ^ Expression where substitution takes place
-> ([LetBinding],Term)
substituteBinders [] others res = (others,res)
substituteBinders ((bndr,valE):rest) others res = substituteBinders rest' others' res'
where
val = unembed valE
bndrName = varName bndr
selfRef = bndrName `elem` Lens.toListOf termFreeIds val
(res',rest',others') = if selfRef
then (res,rest,(bndr,valE):others)
else ( substTm (varName bndr) val res
, map (second ( embed
. substTm bndrName val
. unembed)
) rest
, map (second ( embed
. substTm bndrName val
. unembed)
) others
)
-- | Calculate the /local/ free variable of an expression: the free variables
-- that are not bound in the global environment.
localFreeIds :: (Applicative f, Lens.Contravariant f, Monad m)
=> RewriteMonad m ((TmName -> f TmName) -> Term -> f Term)
localFreeIds = do
globalBndrs <- Lens.use bindings
return ((termFreeIds . Lens.filtered (not . (`HML.member` globalBndrs))))
-- | Lift the binders in a let-binding to a global function that have a certain
-- property
liftBinders :: (Functor m, Monad m)
=> (LetBinding -> RewriteMonad m Bool) -- ^ Property test
-> Rewrite m
liftBinders condition ctx expr@(Letrec b) = R $ do
(xes,res) <- unbind b
(replace,others) <- partitionM condition (unrec xes)
case replace of
[] -> return expr
_ -> do
(gamma,delta) <- mkEnv (LetBinding (map fst $ unrec xes) : ctx)
replace' <- mapM (liftBinding gamma delta) replace
let (others',res') = substituteBinders replace' others res
newExpr = case others' of
[] -> res'
_ -> Letrec (bind (rec others') res')
changed newExpr
liftBinders _ _ e = return e
-- | Create a global function for a Let-binding and return a Let-binding where
-- the RHS is a reference to the new global function applied to the free
-- variables of the original RHS
liftBinding :: (Functor m, Monad m)
=> Gamma
-> Delta
-> LetBinding
-> RewriteMonad m LetBinding
liftBinding gamma delta (Id idName tyE,eE) = do
let ty = unembed tyE
e = unembed eE
-- Get all local FVs, excluding the 'idName' from the let-binding
let localFTVs = List.nub $ Lens.toListOf termFreeTyVars e
localFVs <- List.nub <$> (Lens.toListOf <$> localFreeIds <*> pure e)
let localFTVkinds = map (\k -> HML.lookupDefault (error $ $(curLoc) ++ show k ++ " not found") k delta) localFTVs
localFVs' = filter (/= idName) localFVs
localFVtys' = map (\k -> HML.lookupDefault (error $ $(curLoc) ++ show k ++ " not found") k gamma) localFVs'
-- Abstract expression over its local FVs
boundFTVs = zipWith mkTyVar localFTVkinds localFTVs
boundFVs = zipWith mkId localFVtys' localFVs'
-- Make a new global ID
tcm <- Lens.use tcCache
newBodyTy <- termType tcm $ mkTyLams (mkLams e boundFVs) boundFTVs
newBodyId <- fmap (makeName (name2String idName) . toInteger) getUniqueM
-- Make a new expression, consisting of the the lifted function applied to
-- its free variables
let newExpr = mkTmApps
(mkTyApps (Var newBodyTy newBodyId)
(zipWith VarTy localFTVkinds localFTVs))
(zipWith Var localFVtys' localFVs')
-- Substitute the recursive calls by the new expression
e' = substTm idName newExpr e
-- Create a new body that abstracts over the free variables
newBody = mkTyLams (mkLams e' boundFVs) boundFTVs
-- Add the created function to the list of global bindings
bindings %= HMS.insert newBodyId (newBodyTy,newBody)
-- Return the new binder
return (Id idName (embed ty), embed newExpr)
liftBinding _ _ _ = error $ $(curLoc) ++ "liftBinding: invalid core, expr bound to tyvar"
-- | Make a global function for a name-term tuple
mkFunction :: (Functor m, Monad m)
=> TmName -- ^ Name of the function
-> Term -- ^ Term bound to the function
-> RewriteMonad m (TmName,Type) -- ^ Name with a proper unique and the type of the function
mkFunction bndr body = do
tcm <- Lens.use tcCache
bodyTy <- termType tcm body
bodyId <- cloneVar bndr
addGlobalBind bodyId bodyTy body
return (bodyId,bodyTy)
-- | Add a function to the set of global binders
addGlobalBind :: (Functor m, Monad m)
=> TmName
-> Type
-> Term
-> RewriteMonad m ()
addGlobalBind vId ty body = (ty,body) `deepseq` bindings %= HMS.insert vId (ty,body)
-- | Create a new name out of the given name, but with another unique
cloneVar :: (Functor m, Monad m)
=> TmName
-> RewriteMonad m TmName
cloneVar name = fmap (makeName (name2String name) . toInteger) getUniqueM
-- | Test whether a term is a variable reference to a local binder
isLocalVar :: (Functor m, Monad m)
=> Term
-> RewriteMonad m Bool
isLocalVar (Var _ name)
= fmap (not . HML.member name)
$ Lens.use bindings
isLocalVar _ = return False
-- | Determine if a term cannot be represented in hardware
isUntranslatable :: (Functor m, Monad m)
=> Term
-> RewriteMonad m Bool
isUntranslatable tm = do
tcm <- Lens.use tcCache
not <$> (representableType <$> Lens.use typeTranslator <*> pure tcm <*> termType tcm tm)
-- | Is the Context a Lambda/Term-abstraction context?
isLambdaBodyCtx :: CoreContext
-> Bool
isLambdaBodyCtx (LamBody _) = True
isLambdaBodyCtx _ = False
-- | Make a binder that should not be referenced
mkWildValBinder :: (Functor m, Monad m, MonadUnique m)
=> Type
-> m Id
mkWildValBinder = fmap fst . mkInternalVar "wild"
-- | Make a case-decomposition that extracts a field out of a (Sum-of-)Product type
mkSelectorCase :: (Functor m, Monad m, MonadUnique m, Fresh m)
=> String -- ^ Name of the caller of this function
-> HashMap TyConName TyCon -- ^ TyCon cache
-> [CoreContext] -- ^ Transformation Context in which this function is called
-> Term -- ^ Subject of the case-composition
-> Int -- n'th DataCon
-> Int -- n'th field
-> m Term
mkSelectorCase caller tcm _ scrut dcI fieldI = do
scrutTy <- termType tcm scrut
let cantCreate loc info = error $ loc ++ "Can't create selector " ++ show (caller,dcI,fieldI) ++ " for: (" ++ showDoc scrut ++ " :: " ++ showDoc scrutTy ++ ")\nAdditional info: " ++ info
case coreView tcm scrutTy of
TyConApp tc args ->
case tyConDataCons (tcm HMS.! tc) of
[] -> cantCreate $(curLoc) ("TyCon has no DataCons: " ++ show tc ++ " " ++ showDoc tc)
dcs | dcI > length dcs -> cantCreate $(curLoc) "DC index exceeds max"
| otherwise -> do
let dc = indexNote ($(curLoc) ++ "No DC with tag: " ++ show (dcI-1)) dcs (dcI-1)
let fieldTys = dataConInstArgTys dc args
if fieldI >= length fieldTys
then cantCreate $(curLoc) "Field index exceed max"
else do
wildBndrs <- mapM mkWildValBinder fieldTys
let ty = indexNote ($(curLoc) ++ "No DC field#: " ++ show fieldI) fieldTys fieldI
selBndr <- mkInternalVar "sel" ty
let bndrs = take fieldI wildBndrs ++ [fst selBndr] ++ drop (fieldI+1) wildBndrs
pat = DataPat (embed dc) (rebind [] bndrs)
retVal = Case scrut ty [ bind pat (snd selBndr) ]
return retVal
_ -> cantCreate $(curLoc) ("Type of subject is not a datatype: " ++ showDoc scrutTy)
-- | Specialise an application on its argument
specialise :: (Functor m, State.MonadState s m)
=> Lens' s (Map.Map (TmName, Int, Either Term Type) (TmName,Type)) -- ^ Lens into previous specialisations
-> Lens' s (HashMap TmName Int) -- ^ Lens into the specialisation history
-> Lens' s Int -- ^ Lens into the specialisation limit
-> Bool
-> Rewrite m
specialise specMapLbl specHistLbl specLimitLbl doCheck ctx e = case e of
(TyApp e1 ty) -> specialise' specMapLbl specHistLbl specLimitLbl False ctx e (collectArgs e1) (Right ty)
(App e1 e2) -> specialise' specMapLbl specHistLbl specLimitLbl doCheck ctx e (collectArgs e1) (Left e2)
_ -> return e
-- | Specialise an application on its argument
specialise' :: (Functor m, State.MonadState s m)
=> Lens' s (Map.Map (TmName, Int, Either Term Type) (TmName,Type)) -- ^ Lens into previous specialisations
-> Lens' s (HashMap TmName Int) -- ^ Lens into specialisation history
-> Lens' s Int -- ^ Lens into the specialisation limit
-> Bool -- ^ Perform specialisation limit check
-> [CoreContext] -- Transformation context
-> Term -- ^ Original term
-> (Term, [Either Term Type]) -- ^ Function part of the term, split into root and applied arguments
-> Either Term Type -- ^ Argument to specialize on
-> R m Term
specialise' specMapLbl specHistLbl specLimitLbl doCheck ctx e (Var _ f, args) specArg = R $ do
lvl <- Lens.view dbgLevel
-- Create binders and variable references for free variables in 'specArg'
(specBndrs,specVars) <- specArgBndrsAndVars ctx specArg
let argLen = length args
specAbs = either (Left . (`mkAbstraction` specBndrs)) (Right . id) specArg
-- Determine if 'f' has already been specialized on 'specArg'
specM <- liftR $ fmap (Map.lookup (f,argLen,specAbs))
$ Lens.use specMapLbl
case specM of
-- Use previously specialized function
Just (fname,fty) ->
traceIf (lvl >= DebugApplied) ("Using previous specialization of " ++ showDoc f ++ " on " ++ (either showDoc showDoc) specAbs ++ ": " ++ showDoc fname) $
changed $ mkApps (Var fty fname) (args ++ specVars)
-- Create new specialized function
Nothing -> do
-- Determine if we can specialize f
bodyMaybe <- fmap (HML.lookup f) $ Lens.use bindings
case bodyMaybe of
Just (_,bodyTm) -> do
-- Determine if we see a sequence of specialisations on a growing argument
specHistM <- liftR $ fmap (HML.lookup f) (Lens.use specHistLbl)
specLim <- liftR $ Lens.use specLimitLbl
if doCheck && maybe False (> specLim) specHistM
then fail $ unlines [ "Hit specialisation limit on function `" ++ showDoc f ++ "'.\n"
, "The function `" ++ showDoc f ++ "' is most likely recursive, and looks like it is being indefinitely specialized on a growing argument.\n"
, "Body of `" ++ showDoc f ++ "':\n" ++ showDoc bodyTm ++ "\n"
, "Argument (in position: " ++ show argLen ++ ") that triggered termination:\n" ++ (either showDoc showDoc) specArg
]
else do
-- Make new binders for existing arguments
tcm <- Lens.use tcCache
(boundArgs,argVars) <- fmap (unzip . map (either (Left *** Left) (Right *** Right))) $
mapM (mkBinderFor tcm "pTS") args
-- Create specialized functions
let newBody = mkAbstraction (mkApps bodyTm (argVars ++ [specArg])) (boundArgs ++ specBndrs)
newf <- mkFunction f newBody
-- Remember specialization
liftR $ specHistLbl %= HML.insertWith (+) f 1
liftR $ specMapLbl %= Map.insert (f,argLen,specAbs) newf
-- use specialized function
let newExpr = mkApps ((uncurry . flip) Var newf) (args ++ specVars)
newf `deepseq` changed newExpr
Nothing -> return e
specialise' _ _ _ _ ctx _ (appE,args) (Left specArg) = R $ do
-- Create binders and variable references for free variables in 'specArg'
(specBndrs,specVars) <- specArgBndrsAndVars ctx (Left specArg)
-- Create specialized function
let newBody = mkAbstraction specArg specBndrs
newf <- mkFunction (string2Name "specF") newBody
-- Create specialized argument
let newArg = Left $ mkApps ((uncurry . flip) Var newf) specVars
-- Use specialized argument
let newExpr = mkApps appE (args ++ [newArg])
changed newExpr
specialise' _ _ _ _ _ e _ _ = return e
-- | Create binders and variable references for free variables in 'specArg'
specArgBndrsAndVars :: (Functor m, Monad m)
=> [CoreContext]
-> Either Term Type
-> RewriteMonad m ([Either Id TyVar],[Either Term Type])
specArgBndrsAndVars ctx specArg = do
let specFTVs = List.nub $ either (Lens.toListOf termFreeTyVars) (Lens.toListOf typeFreeVars) specArg
specFVs <- List.nub <$> either ((Lens.toListOf <$> localFreeIds <*>) . pure) (const (pure [])) specArg
(gamma,delta) <- mkEnv ctx
let (specTyBndrs,specTyVars) = unzip
$ map (\tv -> let ki = HML.lookupDefault (error $ $(curLoc) ++ show tv ++ " not found") tv delta
in (Right $ TyVar tv (embed ki), Right $ VarTy ki tv)) specFTVs
(specTmBndrs,specTmVars) = unzip
$ map (\tm -> let ty = HML.lookupDefault (error $ $(curLoc) ++ show tm ++ " not found") tm gamma
in (Left $ Id tm (embed ty), Left $ Var ty tm)) specFVs
return (specTyBndrs ++ specTmBndrs,specTyVars ++ specTmVars)
| christiaanb/clash-compiler | clash-lib/src/CLaSH/Rewrite/Util.hs | bsd-2-clause | 24,176 | 0 | 30 | 7,473 | 6,547 | 3,371 | 3,176 | 397 | 9 |
{-# LANGUAGE OverloadedStrings, LambdaCase #-}
module Halive where
import GHC
import Linker
import Packages
import DynFlags
import GHC.Paths
import Outputable
import Data.IORef
import Data.String (fromString)
import Control.Monad
import Control.Concurrent
import Control.Monad.IO.Class
import System.FSNotify
import System.FilePath
import qualified Filesystem.Path.CurrentOS as FSP
import SandboxPath
directoryWatcher :: IO (Chan Event)
directoryWatcher = do
let predicate event = case event of
Modified path _ -> FSP.extension (fromString path) `elem` map Just ["hs", "vert", "frag", "pd"]
_ -> False
eventChan <- newChan
_ <- forkIO $ withManager $ \manager -> do
-- start a watching job (in the background)
let watchDirectory = "."
_stopListening <- watchTreeChan
manager
watchDirectory
predicate
eventChan
-- Keep the watcher alive forever
forever $ threadDelay 10000000
return eventChan
recompiler :: FilePath -> [FilePath] -> IO ()
recompiler mainFileName importPaths' = withGHCSession mainFileName importPaths' $ do
mainThreadId <- liftIO myThreadId
{-
Watcher:
Tell the main thread to recompile.
If the main thread isn't done yet, kill it.
Compiler:
Wait for the signal to recompile.
Before recompiling & running, mark that we've started,
and after we're done running, mark that we're done.
-}
mainDone <- liftIO $ newIORef False
-- Start with a full MVar so we recompile right away.
recompile <- liftIO $ newMVar ()
-- Watch for changes and recompile whenever they occur
watcher <- liftIO directoryWatcher
_ <- liftIO . forkIO . forever $ do
_ <- readChan watcher
putMVar recompile ()
mainIsDone <- readIORef mainDone
unless mainIsDone $ killThread mainThreadId
-- Start up the app
forever $ do
_ <- liftIO $ takeMVar recompile
liftIO $ writeIORef mainDone False
recompileTargets
liftIO $ writeIORef mainDone True
withGHCSession :: FilePath -> [FilePath] -> Ghc () -> IO ()
withGHCSession mainFileName importPaths' action = do
defaultErrorHandler defaultFatalMessager defaultFlushOut $ runGhc (Just libdir) $ do
-- Add the main file's path to the import path list
let mainFilePath = dropFileName mainFileName
importPaths'' = mainFilePath:importPaths'
-- Get the default dynFlags
dflags0 <- getSessionDynFlags
-- If there's a sandbox, add its package DB
dflags1 <- liftIO getSandboxDb >>= \case
Nothing -> return dflags0
Just sandboxDB -> do
let pkgs = map PkgConfFile [sandboxDB]
return dflags0 { extraPkgConfs = (pkgs ++) . extraPkgConfs dflags0 }
-- Make sure we're configured for live-reload, and turn off the GHCi sandbox
-- since it breaks OpenGL/GUI usage
let dflags2 = dflags1 { hscTarget = HscInterpreted
, ghcLink = LinkInMemory
, ghcMode = CompManager
, importPaths = importPaths''
} `gopt_unset` Opt_GhciSandbox
-- We must set dynflags before calling initPackages or any other GHC API
_ <- setSessionDynFlags dflags2
-- Initialize the package database
(dflags3, _) <- liftIO $ initPackages dflags2
-- Initialize the dynamic linker
liftIO $ initDynLinker dflags3
-- Set the given filename as a compilation target
setTargets =<< sequence [guessTarget mainFileName Nothing]
action
-- Recompiles the current targets
recompileTargets :: Ghc ()
recompileTargets = handleSourceError printException $ do
-- Get the dependencies of the main target
graph <- depanal [] False
-- Reload the main target
loadSuccess <- load LoadAllTargets
unless (failed loadSuccess) $ do
-- We must parse and typecheck modules before they'll be available for usage
forM_ graph (typecheckModule <=< parseModule)
-- Load the dependencies of the main target
setContext $ map (IIModule . ms_mod_name) graph
-- Run the target file's "main" function
rr <- runStmt "main" RunToCompletion
case rr of
RunOk _ -> liftIO $ putStrLn "OK"
RunException exception -> liftIO $ print exception
RunBreak _ _ _ -> liftIO $ putStrLn "Breakpoint"
-- A helper from interactive-diagrams to print out GHC API values,
-- useful while debugging the API.
-- | Outputs any value that can be pretty-printed using the default style
output :: (GhcMonad m, MonadIO m) => Outputable a => a -> m ()
output a = do
dfs <- getSessionDynFlags
let style = defaultUserStyle
let cntx = initSDocContext dfs style
liftIO $ print $ runSDoc (ppr a) cntx
| nfjinjing/halive | exec/Halive.hs | bsd-2-clause | 5,035 | 0 | 21 | 1,472 | 993 | 490 | 503 | -1 | -1 |
{-# LANGUAGE NoMonomorphismRestriction, Rank2Types, RebindableSyntax #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing -fno-warn-unused-do-bind #-}
module Language.Brainfuck.Emitter (emit) where
import Control.Monad (forM_)
import Control.Monad.Fix
import Control.Monad.Code
import Control.Monad.ConstantPool
import Control.Monad.Indexed
import Control.Monad.Indexed.Syntax hiding (return)
import Data.Int
import Language.Brainfuck.Command
import Prelude hiding (Monad (..))
emit :: ( MonadFix (m () ())
, MonadCode m
) => [Command] -> m () () (Label m ())
emit xs = do
label <- emitHeader
forM_ xs emitCommand
emitFooter
returnM label
emitHeader = do
label <- ldc 30000
newarray byte
astore 1
ldc 0
istore 2
returnM label
emitCommand (IncrementPointer x) = emitIncrementPointer x
emitCommand (IncrementByte x) = emitIncrementByte x
emitCommand OutputByte = emitOutputByte
emitCommand InputByte = emitInputByte
emitCommand (WhileNonzero xs) = emitWhileNonzero xs
emitIncrementPointer = iinc 2
emitIncrementByte x = do
label <- aload 1
iload 2
dup2
baload
ldc x
iadd
i2b
bastore
returnM label
emitOutputByte = do
label <- getstatic "java/lang/System" "out" (L"java/io/PrintStream")
aload 1
iload 2
baload
invokevirtual "java/io/PrintStream" "write" (I)V
returnM label
emitInputByte = do
label <- aload 1
iload 2
getstatic "java/lang/System" "in" (L"java/io/InputStream")
invokevirtual "java/io/InputStream" "read" ()I
i2b
bastore
returnM label
emitWhileNonzero xs = do
(start, _) <- mfix (\ ~(_, end) -> do
start <- aload 1
iload 2
baload
ifeq end
forM_ xs emitCommand
goto start
end <- nop
returnM (start, end))
returnM start
emitFooter = do
label <- nop
getstatic "java/lang/System" "out" (L"java/io/PrintStream")
invokevirtual "java/io/PrintStream" "flush" ()V
return
returnM label | sonyandy/tnt | Language/Brainfuck/Emitter.hs | bsd-3-clause | 1,917 | 0 | 14 | 369 | 626 | 293 | 333 | 75 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE DataKinds #-}
module Main where
import Control.Exception.Base
import Control.Concurrent
import Control.Concurrent.Async
import Control.Concurrent.STM
import Control.Monad
import qualified Data.Attoparsec.ByteString.Lazy as AL
import qualified Data.ByteString.Lazy as BL
import Data.Foldable (traverse_)
import Data.Maybe (catMaybes)
import Network.BitTorrent.Bencoding
import Network.BitTorrent.Client
import Network.BitTorrent.MetaInfo
import qualified Network.BitTorrent.RPCServer as RPC
import Network.BitTorrent.Types
import System.Environment
import System.Posix.Signals
import Web.Scotty
openTorrentFile :: String -> IO (Maybe MetaInfo)
openTorrentFile filename = do
contents <- BL.readFile filename
return $ AL.maybeResult (AL.parse value contents) >>= parseMetaInfo
main :: IO ()
main = do
args <- getArgs
print args
globalState <- newGlobalState 8035
torrents <- traverse openTorrentFile args
rpcServer <- async $ scotty 8036 (RPC.server globalState)
listener <- btListen globalState
traverse_ (forkIO . runTorrent globalState) (catMaybes torrents)
void $ installHandler sigINT (CatchOnce (cancel listener *> cancel rpcServer)) Nothing
void $ forkIO $ progressLogger globalState
void $ forkIO $ periodicCheckup globalState
(do wait listener
wait rpcServer)
`finally` cleanup globalState
cleanup :: GlobalState -> IO ()
cleanup globalState = do
torrents <- atomically $ readTVar $ globalStateTorrents globalState
traverse_ (stopTorrent globalState) torrents
putStrLn "quiting cleanup"
progressLogger :: GlobalState -> IO ()
progressLogger globalState = forever $ do
torrents <- atomically $ readTVar (globalStateTorrents globalState)
let printBitField state = do
bf <- atomically $ readTVar $ torrentStateBitField state
print bf
traverse_ printBitField torrents
threadDelay 5000000
periodicCheckup :: GlobalState -> IO ()
periodicCheckup globalState = forever $ do
threadDelay 1000000
torrents <- atomically $ readTVar (globalStateTorrents globalState)
traverse_ (flip writeChan Checkup . torrentStateSharedMessages) torrents
| farnoy/torrent | app/Main.hs | bsd-3-clause | 2,198 | 0 | 15 | 325 | 611 | 303 | 308 | 58 | 1 |
{-# LANGUAGE NoImplicitPrelude, RankNTypes, TemplateHaskell, QuasiQuotes, OverloadedStrings, ConstraintKinds, FlexibleInstances, FlexibleContexts, TypeFamilies, GADTs #-}
{-# LANGUAGE RecordWildCards #-}
module MediaSub.PlainAnnex where
import qualified Data.Conduit.List as CL
import MediaSub.Import
import MediaSub.Playlist
import MediaSub.GitTools
plainAnnex :: YesodMediaSub m => FilePath -> SectionId -> Section m
plainAnnex gitdir sid = defaultSection
{ secGetPlayables = recursiveFiles gitdir
, secGetContents = renderFiles sid . recursiveFiles gitdir
}
renderFiles :: YesodMediaSub m => SectionId -> Source (HandlerT m IO) Text -> SubHandler m TypedContent
renderFiles sid source = selectRep $
provideRep $ do
elements <- lift $ source $$ CL.consume
defaultLayoutMedia $
[whamlet|
|]
-- | Recursively find all files
recursiveFiles :: FilePath -> FPS -> Source (HandlerT m IO) Text
recursiveFiles gitdir fps =
sourceGitFiles (fpToString gitdir) (fpsToFilePath fps)
$= CL.mapMaybe playableOnly
playableOnly :: GitFile -> Maybe Text
playableOnly (False, path) = Just path
playableOnly _ = Nothing
| SimSaladin/rnfssp | rnfssp-media/MediaSub/PlainAnnex.hs | bsd-3-clause | 1,210 | 0 | 10 | 250 | 276 | 144 | 132 | 24 | 1 |
{-# LANGUAGE TypeFamilies #-}
module Pipes.PostgreSQL.Simple.SafeT (Format(..), toTable) where
import Control.Monad (void)
import Control.Monad.Catch (catchAll, throwM)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Data.ByteString (ByteString)
import Data.String (fromString)
import qualified Database.PostgreSQL.Simple as Pg
import qualified Database.PostgreSQL.Simple.Copy as Pg
import qualified Data.Text as Text
import qualified Data.Text.Encoding as Text
import qualified Pipes
import qualified Pipes.Safe as Pipes
--------------------------------------------------------------------------------
-- | The PostgreSQL file format, used by the @COPY@ command
data Format = Text | Binary | CSV
deriving (Show)
--------------------------------------------------------------------------------
toTable
:: (MonadIO m, Pipes.MonadSafe m, Pipes.Base m ~ IO)
=> Pg.Connection
-> Format
-> String
-> Pipes.Consumer ByteString m a
toTable c fmt tblName = do
putCopyEnd <- Pipes.register (void $ Pg.putCopyEnd c)
Pipes.liftBase $ Pg.copy_ c $ fromString $ concat
[ "COPY ", tblName
, " FROM STDIN WITH (FORMAT " , show fmt, ")"
]
Pipes.for Pipes.cat (liftIO . Pg.putCopyData c)
`onException` (\e -> do Pipes.release putCopyEnd
Pipes.liftBase $ Pg.putCopyError c $
Text.encodeUtf8 . Text.pack $ show e)
where
action `onException` handler =
action `catchAll` \e -> handler e >> throwM e
{-# INLINABLE toTable #-}
| ocharles/pipes-postgresql-simple | src/Pipes/PostgreSQL/Simple/SafeT.hs | bsd-3-clause | 1,530 | 0 | 17 | 295 | 404 | 230 | 174 | 32 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
import Control.Concurrent.STM
import Control.Monad.IO.Class
import Data.Aeson
import Data.Proxy
import GHC.Generics
import Network.Wai.Handler.Warp (run)
import Servant
import Servant.JQuery
import System.FilePath
-- * A simple Counter data type
newtype Counter = Counter { value :: Int }
deriving (Generic, Show, Num)
instance ToJSON Counter
-- * Shared counter operations
-- Creating a counter that starts from 0
newCounter :: IO (TVar Counter)
newCounter = newTVarIO 0
-- Increasing the counter by 1
counterPlusOne :: MonadIO m => TVar Counter -> m Counter
counterPlusOne counter = liftIO . atomically $ do
oldValue <- readTVar counter
let newValue = oldValue + 1
writeTVar counter newValue
return newValue
currentValue :: MonadIO m => TVar Counter -> m Counter
currentValue counter = liftIO $ readTVarIO counter
-- * Our API type
type TestApi = "counter" :> Post Counter -- endpoint for increasing the counter
:<|> "counter" :> Get Counter -- endpoint to get the current value
:<|> Raw -- used for serving static files
testApi :: Proxy TestApi
testApi = Proxy
-- * Server-side handler
-- where our static files reside
www :: FilePath
www = "examples/www"
-- defining handlers
server :: TVar Counter -> Server TestApi
server counter = counterPlusOne counter -- (+1) on the TVar
:<|> currentValue counter -- read the TVar
:<|> serveDirectory www -- serve static files
runServer :: TVar Counter -- ^ shared variable for the counter
-> Int -- ^ port the server should listen on
-> IO ()
runServer var port = run port (serve testApi $ server var)
-- * Generating the JQuery code
incCounterJS :<|> currentValueJS :<|> _ = jquery testApi
writeJS :: FilePath -> [AjaxReq] -> IO ()
writeJS fp functions = writeFile fp $
concatMap generateJS functions
main :: IO ()
main = do
-- write the JS code to www/api.js at startup
writeJS (www </> "api.js")
[ incCounterJS, currentValueJS ]
-- setup a shared counter
cnt <- newCounter
-- listen to requests on port 8080
runServer cnt 8080
| derekelkins/servant-jquery | examples/counter.hs | bsd-3-clause | 2,275 | 0 | 11 | 508 | 498 | 261 | 237 | 51 | 1 |
{-
- Claq (c) 2013 NEC Laboratories America, Inc. All rights reserved.
-
- This file is part of Claq.
- Claq is distributed under the 3-clause BSD license.
- See the LICENSE file for more details.
-}
{-# LANGUAGE FlexibleContexts #-}
module Language.Claq.Parser (circuit) where
import Control.Monad.Free
import Data.Functor ((<$>))
import Data.Maybe (catMaybes)
import Text.Parsec
import Text.Parsec.Expr
import Text.Parsec.Language (GenLanguageDef, emptyDef)
import Text.Parsec.Token (GenTokenParser)
import qualified Text.Parsec.Token as P
import Data.ClassicalCircuit
import Language.Claq.Syntax
circuit :: Stream s m Char => ParsecT s u m [Stmt String]
circuit = do
whiteSpace
stmts <- catMaybes <$> semiSep1 (optionMaybe stmt)
eof
return stmts
stmt :: Stream s m Char => ParsecT s u m (Stmt String)
stmt =
SInputs <$> inputsSpec
<|>
SOutputs <$> outputsSpec
<|>
do { v <- identifier
; _ <- symbol "="
; e <- expr
; return $ SEquation v e
}
<?> "statement"
inputsSpec :: Stream s m Char => ParsecT s u m [String]
inputsSpec =
do { reserved ".inputs"
; commaSep identifier
}
outputsSpec :: Stream s m Char => ParsecT s u m [Free ClaGate String]
outputsSpec =
do { reserved ".outputs"
; commaSep expr
}
expr :: Stream s m Char => ParsecT s u m (Free ClaGate String)
expr = buildExpressionParser opTable term
<?> "expression"
where
opTable =
[ [prefix "~" (Free . GNot)]
, [binary "&" (\e1 e2 -> Free $ GAnd e1 e2) AssocLeft]
, [binary "^" (\e1 e2 -> Free $ GXor e1 e2) AssocLeft]
, [binary "|" (\e1 e2 -> Free $ GOr e1 e2) AssocLeft]
]
binary name fun = Infix (do { reservedOp name; return fun })
prefix name fun = Prefix (do { reservedOp name; return fun })
postfix name fun = Postfix (do { reservedOp name; return fun })
term :: Stream s m Char => ParsecT s u m (Free ClaGate String)
term =
parens expr
<|>
do { v <- identifier
; return $ Pure v
}
<|>
do { n <- natural
; case n of
0 -> return $ Free $ GConst False
1 -> return $ Free $ GConst True
_ -> parserZero
}
<?> "simple expression"
identifier :: Stream s m Char => ParsecT s u m String
identifier = P.identifier lexer
reserved :: Stream s m Char => String -> ParsecT s u m ()
reserved = P.reserved lexer
reservedOp :: Stream s m Char => String -> ParsecT s u m ()
reservedOp = P.reservedOp lexer
natural :: Stream s m Char => ParsecT s u m Integer
natural = P.natural lexer
symbol :: Stream s m Char => String -> ParsecT s u m String
symbol = P.symbol lexer
whiteSpace :: Stream s m Char => ParsecT s u m ()
whiteSpace = P.whiteSpace lexer
parens :: Stream s m Char => ParsecT s u m a -> ParsecT s u m a
parens = P.parens lexer
semiSep1 :: Stream s m Char => ParsecT s u m a -> ParsecT s u m [a]
semiSep1 = P.semiSep1 lexer
commaSep :: Stream s m Char => ParsecT s u m a -> ParsecT s u m [a]
commaSep = P.commaSep lexer
lexer :: Stream s m Char => GenTokenParser s u m
lexer = P.makeTokenParser claqDef
claqDef :: Stream s m Char => GenLanguageDef s u m
claqDef = emptyDef
{ P.commentStart = "{-"
, P.commentEnd = "-}"
, P.commentLine = "--"
, P.nestedComments = True
, P.identStart = letter <|> oneOf "_."
, P.identLetter = alphaNum <|> oneOf "_.'"
, P.opStart = P.opLetter claqDef
, P.opLetter = oneOf ":!#$%&*+/<=>?@\\^|-~"
, P.reservedOpNames= ["~", "&", "^", "|"]
, P.reservedNames = [".inputs", ".outputs"]
, P.caseSensitive = True
}
| ti1024/claq | src/Language/Claq/Parser.hs | bsd-3-clause | 3,787 | 0 | 13 | 1,096 | 1,324 | 682 | 642 | 94 | 3 |
module Jana.Format where
import Prelude hiding (GT, LT, EQ, (<>))
import Data.List (intersperse)
import Text.PrettyPrint
import qualified Data.Map as Map
import Jana.Ast
commasep = hsep . punctuate (char ',')
formatType (Int _) = text "int"
formatType (Stack _) = text "stack"
formatIdent :: Ident -> Doc
formatIdent id = text (ident id)
formatLval :: Lval -> Doc
formatLval (Var id) = formatIdent id
formatLval (Lookup id expr) = formatIdent id <> brackets (formatExpr expr)
formatModOp AddEq = text "+="
formatModOp SubEq = text "-="
formatModOp XorEq = text "^="
-- Operators and their precedence
-- Should match the operator table in Jana.Parser
unaryOpMap = Map.fromList [
(Not, ("!", 5))
]
binOpMap = Map.fromList [
(Mul , ("*", 4))
, (Div , ("/", 4))
, (Mod , ("%", 4))
, (Add , ("+", 3))
, (Sub , ("-", 3))
, (GE , (">=", 2))
, (GT , (">", 2))
, (LE , ("<=", 2))
, (LT , ("<", 2))
, (EQ , ("=", 2))
, (NEQ , ("!=", 2))
, (And , ("&", 1))
, (Or , ("|", 1))
, (Xor , ("^", 1))
, (LAnd, ("&&", 0))
, (LOr , ("||", 0))
]
formatUnaryOp = text . fst . (unaryOpMap Map.!)
formatBinOp = text . fst . (binOpMap Map.!)
formatExpr = f 0
where f _ (Number num _) = integer num
f _ (Boolean True _) = text "true"
f _ (Boolean False _) = text "false"
f _ (LV lval _) = formatLval lval
f _ (Empty id _) = text "empty" <> parens (formatIdent id)
f _ (Top id _) = text "top" <> parens (formatIdent id)
f _ (Size id _) = text "size" <> parens (formatIdent id)
f _ (Nil _) = text "nil"
f d (UnaryOp op e) =
let opd = unaryOpPrec op in
parens' (d > opd) (formatUnaryOp op <> f opd e)
f d (BinOp op e1 e2) =
let opd = binOpPrec op in
parens' (d > opd) (f opd e1 <+> formatBinOp op <+> f opd e2)
unaryOpPrec = snd . (unaryOpMap Map.!)
binOpPrec = snd . (binOpMap Map.!)
parens' bool = if bool then parens else id
formatVdecl (Scalar typ id _) =
formatType typ <+> formatIdent id
formatVdecl (Array id size _) =
text "int" <+> formatIdent id <> brackets (formatSize size)
where formatSize (Just x) = integer x
formatSize Nothing = empty
formatStmts = vcat . map formatStmt
formatStmt (Assign modOp lval expr _) =
formatLval lval <+> formatModOp modOp <+> formatExpr expr
formatStmt (If e1 s1 s2 e2 _) =
text "if" <+> formatExpr e1 <+> text "then" $+$
nest 4 (formatStmts s1) $+$
elsePart $+$
text "fi" <+> formatExpr e2
where elsePart | null s2 = empty
| otherwise = text "else" $+$ nest 4 (formatStmts s2)
formatStmt (From e1 s1 s2 e2 _) =
text "from" <+> formatExpr e1 <+> keyword $+$
vcat inside $+$
text "until" <+> formatExpr e2
where (keyword:inside) = doPart ++ loopPart
doPart | null s1 = []
| otherwise = [text "do", nest 4 (formatStmts s1)]
loopPart | null s2 = [empty]
| otherwise = [text "loop", nest 4 (formatStmts s2)]
formatStmt (Push id1 id2 _) =
text "push" <> parens (formatIdent id1 <> comma <+> formatIdent id2)
formatStmt (Pop id1 id2 _) =
text "pop" <> parens (formatIdent id1 <> comma <+> formatIdent id2)
formatStmt (Local (typ1, id1, e1) s (typ2, id2, e2) _) =
text "local" <+> localDecl typ1 id1 e1 $+$
formatStmts s $+$
text "delocal" <+> localDecl typ2 id2 e2
where localDecl typ id expr =
formatType typ <+> formatIdent id <+> equals <+> formatExpr expr
formatStmt (Call id args _) =
text "call" <+> formatIdent id <> parens (commasep $ map formatIdent args)
formatStmt (Uncall id args _) =
text "uncall" <+> formatIdent id <> parens (commasep $ map formatIdent args)
formatStmt (Swap id1 id2 _) =
formatIdent id1 <+> text "<=>" <+> formatIdent id2
formatStmt (UserError msg _) =
text "error" <> parens (text (show msg))
formatStmt (Prints (Print str) _) =
text "print" <> parens (text (show str))
formatStmt (Prints (Printf str []) _) =
text "printf" <> parens (text (show str))
formatStmt (Prints (Printf str idents) _) =
text "printf" <> parens (text (show str) <> comma <+> commasep (map formatIdent idents))
formatStmt (Prints (Show idents) _) =
text "show" <> parens (commasep $ map formatIdent idents)
formatStmt (Skip _) =
text "skip"
formatStmtsAbbrv [] = empty
formatStmtsAbbrv [If {}] = text "..."
formatStmtsAbbrv [From {}] = text "..."
formatStmtsAbbrv [Local {}] = text "..."
formatStmtsAbbrv [s] = formatStmt s
formatStmtsAbbrv _ = text "..."
formatStmtAbbrv (If e1 s1 s2 e2 _) =
text "if" <+> formatExpr e1 <+> text "then" $+$
nest 4 (formatStmtsAbbrv s1) $+$
elsePart $+$
text "fi" <+> formatExpr e2
where elsePart | null s2 = empty
| otherwise = text "else" $+$ nest 4 (formatStmtsAbbrv s2)
formatStmtAbbrv (From e1 s1 s2 e2 _) =
text "from" <+> formatExpr e1 <+> keyword $+$
vcat inside $+$
text "until" <+> formatExpr e2
where (keyword:inside) = doPart ++ loopPart
doPart | null s1 = []
| otherwise = [text "do", nest 4 (formatStmtsAbbrv s1)]
loopPart | null s2 = [empty]
| otherwise = [text "loop", nest 4 (formatStmtsAbbrv s2)]
formatStmtAbbrv (Local (typ1, id1, e1) s (typ2, id2, e2) _) =
text "local" <+> localDecl typ1 id1 e1 $+$
formatStmtsAbbrv s $+$
text "delocal" <+> localDecl typ2 id2 e2
where localDecl typ id expr =
formatType typ <+> formatIdent id <+> equals <+> formatExpr expr
formatStmtAbbrv s = formatStmt s
formatMain (ProcMain vdecls body _) =
text "procedure main()" $+$
nest 4 (vcat (map formatVdecl vdecls) $+$
text "" $+$
formatStmts body)
formatParams = commasep . map formatVdecl
formatProc proc =
text "procedure" <+> formatIdent (procname proc) <>
parens (formatParams $ params proc) $+$
nest 4 (formatStmts $ body proc)
formatProgram (Program [main] procs) =
vcat (intersperse (text "") $ map formatProc procs) $+$
text "" $+$
formatMain main
instance Show Type where
show = render . formatType
instance Show Ident where
show = render . formatIdent
instance Show Lval where
show = render . formatLval
instance Show Expr where
show = render . formatExpr
instance Show Stmt where
show = render . formatStmt
instance Show Vdecl where
show = render . formatVdecl
instance Show Proc where
show = render . formatProc
instance Show ProcMain where
show = render . formatMain
instance Show Program where
show = render . formatProgram
| mbudde/jana | src/Jana/Format.hs | bsd-3-clause | 6,662 | 0 | 13 | 1,744 | 2,776 | 1,393 | 1,383 | 170 | 11 |
module Data.Text.Encoding where
encodeUtf8 = id
decodeUtf8 = id
| Soostone/string-compat | src/Data/Text/Encoding.hs | bsd-3-clause | 67 | 0 | 4 | 12 | 17 | 11 | 6 | 3 | 1 |
module Data.LFSR.Tap2 (
Tap2(..), tap2,
) where
import Data.Array (Array, array, (!))
data Tap2 = Tap2 { width :: Int
, bits :: (Int, Int)
} deriving Show
tapPair :: (Int, (Int, Int)) -> (Int, Tap2)
tapPair (i, bs) = (i, Tap2 { width = i, bits = bs })
tap2Table :: Array Int Tap2
tap2Table =
array (2, 768)
. map tapPair
$ [ (7, (7, 6))
, (15, (15, 14))
, (31, (31, 28))
, (63, (63, 62))
]
tap2 :: Int -> Tap2
tap2 = (tap2Table !)
| khibino/haskell-lfsr | src/Data/LFSR/Tap2.hs | bsd-3-clause | 502 | 0 | 9 | 161 | 242 | 152 | 90 | 18 | 1 |
module Main where
data List a = Cons a (List a) | Nil
deriving (Show)
sumList :: List Integer -> Integer
sumList lst = undefined -- must be unguarded identifier of undefined
add :: Integer -> Integer -> Integer
add = (+)
zero,one :: Integer
zero = 0
one = 1
{-
check :: [Bool]
check = [check_a, check_b]
check_a, check_b :: Bool
check_a = 6 == foo (Cons 1 (Cons 2 (Cons 3 Nil)))
check_b = 3 == foo (Cons 1 (Cons 1 (Cons 1 Nil)))
main = do
print $ and check
-}
| sw17ch/Volcano | stash/spec.hs | bsd-3-clause | 475 | 0 | 8 | 110 | 91 | 54 | 37 | 10 | 1 |
{-|
Copyright : (c) Dave Laing, 2017
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : non-portable
-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DataKinds #-}
module Rules.Type.Infer.Offline (
UnifyT
, ITOffline
) where
import Control.Monad (unless)
import Data.Functor.Identity (Identity(..))
import Data.List (tails)
import Data.Proxy (Proxy(..))
import Bound (Bound)
import Control.Lens (review)
import Control.Lens.Wrapped (_Wrapped, _Unwrapped)
import Control.Monad.Except (MonadError)
import Control.Monad.Writer (MonadWriter(..), WriterT, execWriterT, runWriterT)
import Data.List.NonEmpty (NonEmpty(..))
import qualified Data.List.NonEmpty as N
import qualified Data.Map as M
import Data.Bitransversable
import Data.Functor.Rec
import Ast.Type
import Ast.Term
import Ast.Error.Common
import Rules.Unification
import Rules.Type.Infer.Common
type UnifyT ki ty a = WriterT [UConstraint (TyAst ki ty) (TyAstVar a)]
mkInferType' :: (Ord a, UnificationContext e m (TyAst ki ty) (TyAstVar a), MonadError e m, AsUnknownTypeError e)
=> (Term ki ty pt tm a -> UnifyT ki ty a m (Type ki ty a))
-> ([UConstraint (TyAst ki ty) (TyAstVar a)] -> m (M.Map (TyAstVar a) (TyAst ki ty (TyAstVar a))))
-> Term ki ty pt tm a
-> m (Type ki ty a)
mkInferType' go unifyFn x = do
(ty, cs) <- runWriterT $ go x
let ty' = review _Unwrapped ty
s <- unifyFn cs
let ty'' = mapSubst _TyAstVar s ty'
return $ review _Wrapped ty''
data ITOffline
mkCheck' :: MkInferTypeConstraint e w s r m ki ty a ITOffline
=> Proxy (MonadProxy e w s r m)
-> (Term ki ty pt tm a -> UnifyT ki ty a m (Type ki ty a))
-> ([UConstraint (TyAst ki ty) (TyAstVar a)] -> m (M.Map (TyAstVar a) (TyAst ki ty (TyAstVar a))))
-> Term ki ty pt tm a
-> Type ki ty a
-> m ()
mkCheck' m inferFn unifyFn x y = do
cs <- execWriterT $ (mkCheckType m (Proxy :: Proxy ITOffline) inferFn) x y
_ <- unifyFn cs
return ()
fixupNormalize :: (Type ki ty a -> Type ki ty a)
-> TyAst ki ty (TyAstVar a)
-> TyAst ki ty (TyAstVar a)
fixupNormalize fn =
review _Unwrapped .
fn .
review _Wrapped
fixupUnify :: Monad m
=> ([UConstraint (TyAst ki ty) (TyAstVar a)] -> m (M.Map (TyAstVar a) (TyAst ki ty (TyAstVar a))))
-> [UConstraint (Type ki ty) a]
-> m (M.Map a (Type ki ty a))
fixupUnify u =
let
fixConstraint (UCEq a1 a2) = UCEq (review _Unwrapped a1) (review _Unwrapped a2)
fixMap = undefined
in
fmap fixMap .
u .
fmap fixConstraint
instance MkInferType ITOffline where
type MkInferTypeConstraint e w s r m ki ty a ITOffline =
( Ord a
, OrdRec ki
, OrdRec (ty ki)
, MonadError e m
, AsUnknownTypeError e
, AsOccursError e (TyAst ki ty) (TyAstVar a)
, AsUnificationMismatch e (TyAst ki ty) (TyAstVar a)
, AsUnificationExpectedEq e (TyAst ki ty) (TyAstVar a)
, Bound ki
, Bound (ty ki)
, Bitransversable ki
, Bitransversable (ty ki)
)
type InferTypeMonad m ki ty a ITOffline =
UnifyT ki ty a m
type MkInferTypeErrorList ki ty pt tm a ITOffline =
'[ ErrOccursError (TyAst ki ty) (TyAstVar a)
, ErrUnificationMismatch (TyAst ki ty) (TyAstVar a)
, ErrUnificationExpectedEq (TyAst ki ty) (TyAstVar a)
]
type MkInferTypeWarningList ki ty pt tm a ITOffline =
'[]
mkCheckType m i =
mkCheckType' (expectType m i)
expectType _ _ (ExpectedType ty1) (ActualType ty2) =
unless (ty1 == ty2) $
tell [UCEq (review _Unwrapped ty1) (review _Unwrapped ty2)]
expectTypeEq _ _ ty1 ty2 =
unless (ty1 == ty2) $
tell [UCEq (review _Unwrapped ty1) (review _Unwrapped ty2)]
expectTypeAllEq _ _ n@(ty :| tys) = do
unless (all (== ty) tys ) $
let
xss = tails . N.toList $ n
f [] = []
f (x : xs) = fmap (UCEq (review _Unwrapped x)) (fmap (review _Unwrapped) xs)
ws = xss >>= f
in
tell ws
return ty
prepareInferType pm pi inferKindFn normalizeFn ii =
let
n = fixupNormalize normalizeFn
u = mkUnify _TyAstVar n . iiUnifyRules $ ii
u' = fixupUnify u
pc = mkPCheck . iiPCheckRules $ ii
i = mkInferType inferKindFn normalizeFn pc . iiInferTypeRules $ ii
i' = mkInferType' i u
c = mkCheck' pm i u
in
InferTypeOutput u' i' c
| dalaing/type-systems | src/Rules/Type/Infer/Offline.hs | bsd-3-clause | 4,564 | 0 | 17 | 1,161 | 1,778 | 911 | 867 | -1 | -1 |
module SAWScript.Prover.RME where
import Control.Monad.IO.Class
import qualified Data.Map as Map
import qualified Data.RME as RME
import Verifier.SAW.FiniteValue
import qualified Verifier.SAW.Simulator.RME as RME
import SAWScript.Proof(Prop, propToSATQuery, propSize, CEX)
import SAWScript.Prover.SolverStats
import SAWScript.Prover.Util
import SAWScript.Value
-- | Bit-blast a proposition and check its validity using RME.
proveRME ::
Prop {- ^ A proposition to be proved -} ->
TopLevel (Maybe CEX, SolverStats)
proveRME goal = getSharedContext >>= \sc -> liftIO $
do satq <- propToSATQuery sc mempty goal
RME.withBitBlastedSATQuery sc Map.empty satq $ \lit shapes ->
let stats = solverStats "RME" (propSize goal)
in case RME.sat lit of
Nothing -> return (Nothing, stats)
Just cex -> do
let m = Map.fromList cex
let n = sum (map (sizeFiniteType . snd) shapes)
let bs = map (maybe False id . flip Map.lookup m) $ take n [0..]
let r = liftCexBB (map snd shapes) bs
case r of
Left err -> fail $ "Can't parse counterexample: " ++ err
Right vs
| length shapes == length vs -> do
let model = zip (map fst shapes) (map toFirstOrderValue vs)
return (Just model, stats)
| otherwise -> fail $ unwords ["RME SAT results do not match expected arguments", show shapes, show vs]
| GaloisInc/saw-script | src/SAWScript/Prover/RME.hs | bsd-3-clause | 1,508 | 0 | 30 | 440 | 451 | 231 | 220 | 31 | 3 |
module Module4.Task27 where
newtype Maybe' a = Maybe' { getMaybe :: Maybe a } deriving (Eq,Show)
instance Monoid a => Monoid (Maybe' a) where
mempty = Maybe' $ Just mempty
mappend (Maybe' Nothing) _ = Maybe' Nothing
mappend _ (Maybe' Nothing) = Maybe' Nothing
mappend (Maybe' a) (Maybe' b) = Maybe' (mappend a b)
| dstarcev/stepic-haskell | src/Module4/Task27.hs | bsd-3-clause | 332 | 0 | 8 | 73 | 138 | 71 | 67 | 7 | 0 |
module VMA.Bracket
( brackets
) where
import Relude hiding ( Handle
, Type
)
import Data.Vector ( Vector )
import qualified Data.Map as Map
import qualified Data.Text.Extra as T
import Spec.Name
import Bracket
import Render.Element
import Render.Names
import Render.SpecInfo
import Error
import Marshal.Command
import Render.Utils
brackets
:: forall r
. (HasErr r, HasRenderParams r, HasSpecInfo r, HasRenderedNames r)
=> Vector MarshaledCommand
-> Sem r (Vector (CName, CName, RenderElement))
brackets marshaledCommands = context "brackets" $ do
let getMarshaledCommand =
let mcMap = Map.fromList
[ (mcName, m)
| m@MarshaledCommand {..} <- toList marshaledCommands
]
in \c -> note ("Unable to find marshaled command " <> show c) . (`Map.lookup` mcMap) $ c
autoBracket' :: BracketType -> CName -> CName -> CName -> Sem r Bracket
autoBracket' bracketType create destroy with = do
create' <- getMarshaledCommand create
destroy' <- getMarshaledCommand destroy
autoBracket bracketType create' destroy' with
bs <- sequenceV
[ autoBracket' BracketCPS "vmaCreateAllocator" "vmaDestroyAllocator" "vmaWithAllocator"
, autoBracket' BracketCPS "vmaCreatePool" "vmaDestroyPool" "vmaWithPool"
, autoBracket' BracketCPS "vmaAllocateMemory" "vmaFreeMemory" "vmaWithMemory"
, autoBracket' BracketCPS "vmaAllocateMemoryForBuffer" "vmaFreeMemory" "vmaWithMemoryForBuffer"
, autoBracket' BracketCPS "vmaAllocateMemoryForImage" "vmaFreeMemory" "vmaWithMemoryForImage"
, autoBracket' BracketCPS "vmaAllocateMemoryPages" "vmaFreeMemoryPages" "vmaWithMemoryPages"
, autoBracket' BracketCPS "vmaMapMemory" "vmaUnmapMemory" "vmaWithMappedMemory"
, autoBracket' BracketCPS "vmaDefragmentationBegin" "vmaDefragmentationEnd" "vmaWithDefragmentation"
, autoBracket' BracketBookend "vmaBeginDefragmentationPass" "vmaEndDefragmentationPass" "vmaUseDefragmentationPass"
, autoBracket' BracketCPS "vmaCreateBuffer" "vmaDestroyBuffer" "vmaWithBuffer"
, autoBracket' BracketCPS "vmaCreateImage" "vmaDestroyImage" "vmaWithImage"
, autoBracket' BracketCPS "vmaCreateVirtualBlock" "vmaDestroyVirtualBlock" "vmaWithVirtualBlock"
, autoBracket' BracketCPS "vmaVirtualAllocate" "vmaVirtualFree" "vmaWithVirtualAllocation"
]
fromList <$> traverseV (renderBracket paramName) bs
paramName :: Text -> Text
paramName = unReservedWord . T.lowerCaseFirst . dropVma
dropVma :: Text -> Text
dropVma t = if "vma" `T.isPrefixOf` T.toLower t
then T.dropWhile (== '_') . T.drop 2 $ t
else t
| expipiplus1/vulkan | generate-new/vma/VMA/Bracket.hs | bsd-3-clause | 2,912 | 0 | 21 | 742 | 593 | 311 | 282 | -1 | -1 |
{-# LANGUAGE CPP, BangPatterns, RecordWildCards, NamedFieldPuns,
DeriveGeneric, DeriveDataTypeable, GeneralizedNewtypeDeriving,
ScopedTypeVariables #-}
-- |
--
module Distribution.Client.ProjectBuilding (
BuildStatus(..),
BuildStatusMap,
BuildStatusRebuild(..),
BuildReason(..),
MonitorChangedReason(..),
rebuildTargetsDryRun,
rebuildTargets
) where
import Distribution.Client.PackageHash (renderPackageHashInputs)
import Distribution.Client.RebuildMonad
import Distribution.Client.ProjectConfig
import Distribution.Client.ProjectPlanning
import Distribution.Client.Types
( PackageLocation(..), GenericReadyPackage(..)
, InstalledPackageId, installedPackageId )
import Distribution.Client.InstallPlan
( GenericInstallPlan, GenericPlanPackage )
import qualified Distribution.Client.InstallPlan as InstallPlan
import Distribution.Client.DistDirLayout
import Distribution.Client.FileMonitor
import Distribution.Client.SetupWrapper
import Distribution.Client.JobControl
import Distribution.Client.FetchUtils
import Distribution.Client.GlobalFlags (RepoContext)
import qualified Distribution.Client.Tar as Tar
import Distribution.Client.Setup (filterConfigureFlags)
import Distribution.Client.SrcDist (allPackageSourceFiles)
import Distribution.Client.Utils (removeExistingFile)
import qualified Distribution.Solver.Types.ComponentDeps as CD
import Distribution.Solver.Types.ComponentDeps (ComponentDeps)
import Distribution.Solver.Types.PackageFixedDeps
import Distribution.Package hiding (InstalledPackageId, installedPackageId)
import Distribution.InstalledPackageInfo (InstalledPackageInfo)
import qualified Distribution.InstalledPackageInfo as Installed
import Distribution.Simple.Program
import qualified Distribution.Simple.Setup as Cabal
import Distribution.Simple.Command (CommandUI)
import qualified Distribution.Simple.Register as Cabal
import qualified Distribution.Simple.InstallDirs as InstallDirs
import Distribution.Simple.LocalBuildInfo (ComponentName)
import qualified Distribution.Simple.Program.HcPkg as HcPkg
import Distribution.Simple.Utils hiding (matchFileGlob)
import Distribution.Version
import Distribution.Verbosity
import Distribution.Text
import Distribution.ParseUtils ( showPWarning )
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
import qualified Data.ByteString.Lazy as LBS
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative
#endif
import Control.Monad
import Control.Exception
import Control.Concurrent.Async
import Control.Concurrent.MVar
import Data.List
import Data.Maybe
import System.FilePath
import System.IO
import System.Directory
import System.Exit (ExitCode)
------------------------------------------------------------------------------
-- * Overall building strategy.
------------------------------------------------------------------------------
--
-- We start with an 'ElaboratedInstallPlan' that has already been improved by
-- reusing packages from the store. So the remaining packages in the
-- 'InstallPlan.Configured' state are ones we either need to build or rebuild.
--
-- First, we do a preliminary dry run phase where we work out which packages
-- we really need to (re)build, and for the ones we do need to build which
-- build phase to start at.
------------------------------------------------------------------------------
-- * Dry run: what bits of the 'ElaboratedInstallPlan' will we execute?
------------------------------------------------------------------------------
-- We split things like this for a couple reasons. Firstly we need to be able
-- to do dry runs, and these need to be reasonably accurate in terms of
-- letting users know what (and why) things are going to be (re)built.
--
-- Given that we need to be able to do dry runs, it would not be great if
-- we had to repeat all the same work when we do it for real. Not only is
-- it duplicate work, but it's duplicate code which is likely to get out of
-- sync. So we do things only once. We preserve info we discover in the dry
-- run phase and rely on it later when we build things for real. This also
-- somewhat simplifies the build phase. So this way the dry run can't so
-- easily drift out of sync with the real thing since we're relying on the
-- info it produces.
--
-- An additional advantage is that it makes it easier to debug rebuild
-- errors (ie rebuilding too much or too little), since all the rebuild
-- decisions are made without making any state changes at the same time
-- (that would make it harder to reproduce the problem sitation).
-- | The 'BuildStatus' of every package in the 'ElaboratedInstallPlan'
--
type BuildStatusMap = Map InstalledPackageId BuildStatus
-- | The build status for an individual package. That is, the state that the
-- package is in prior to initiating a (re)build.
--
-- It serves two purposes:
--
-- * For dry-run output, it lets us explain to the user if and why a package
-- is going to be (re)built.
--
-- * It tell us what step to start or resume building from, and carries
-- enough information for us to be able to do so.
--
data BuildStatus =
-- | The package is in the 'InstallPlan.PreExisting' state, so does not
-- need building.
BuildStatusPreExisting
-- | The package has not been downloaded yet, so it will have to be
-- downloaded, unpacked and built.
| BuildStatusDownload
-- | The package has not been unpacked yet, so it will have to be
-- unpacked and built.
| BuildStatusUnpack FilePath
-- | The package exists in a local dir already, and just needs building
-- or rebuilding. So this can only happen for 'BuildInplaceOnly' style
-- packages.
| BuildStatusRebuild FilePath BuildStatusRebuild
-- | The package exists in a local dir already, and is fully up to date.
-- So this package can be put into the 'InstallPlan.Installed' state
-- and it does not need to be built.
| BuildStatusUpToDate [InstalledPackageInfo] BuildSuccess
-- | For a package that is going to be built or rebuilt, the state it's in now.
--
-- So again, this tells us why a package needs to be rebuilt and what build
-- phases need to be run. The 'MonitorChangedReason' gives us details like
-- which file changed, which is mainly for high verbosity debug output.
--
data BuildStatusRebuild =
-- | The package configuration changed, so the configure and build phases
-- needs to be (re)run.
BuildStatusConfigure (MonitorChangedReason ())
-- | The configuration has not changed but the build phase needs to be
-- rerun. We record the reason the (re)build is needed.
--
-- The optional registration info here tells us if we've registered the
-- package already, or if we stil need to do that after building.
--
| BuildStatusBuild (Maybe [InstalledPackageInfo]) BuildReason
data BuildReason =
-- | The depencencies of this package have been (re)built so the build
-- phase needs to be rerun.
--
-- The optional registration info here tells us if we've registered the
-- package already, or if we stil need to do that after building.
--
BuildReasonDepsRebuilt
-- | Changes in files within the package (or first run or corrupt cache)
| BuildReasonFilesChanged (MonitorChangedReason ())
-- | An important special case is that no files have changed but the
-- set of components the /user asked to build/ has changed. We track the
-- set of components /we have built/, which of course only grows (until
-- some other change resets it).
--
-- The @Set 'ComponentName'@ is the set of components we have built
-- previously. When we update the monitor we take the union of the ones
-- we have built previously with the ones the user has asked for this
-- time and save those. See 'updatePackageBuildFileMonitor'.
--
| BuildReasonExtraTargets (Set ComponentName)
-- | Although we're not going to build any additional targets as a whole,
-- we're going to build some part of a component or run a repl or any
-- other action that does not result in additional persistent artifacts.
--
| BuildReasonEphemeralTargets
-- | Which 'BuildStatus' values indicate we'll have to do some build work of
-- some sort. In particular we use this as part of checking if any of a
-- package's deps have changed.
--
buildStatusRequiresBuild :: BuildStatus -> Bool
buildStatusRequiresBuild BuildStatusPreExisting = False
buildStatusRequiresBuild BuildStatusUpToDate {} = False
buildStatusRequiresBuild _ = True
-- | Do the dry run pass. This is a prerequisite of 'rebuildTargets'.
--
-- It gives us the 'BuildStatusMap' and also gives us an improved version of
-- the 'ElaboratedInstallPlan' with packages switched to the
-- 'InstallPlan.Installed' state when we find that they're already up to date.
--
rebuildTargetsDryRun :: DistDirLayout
-> ElaboratedInstallPlan
-> IO (ElaboratedInstallPlan, BuildStatusMap)
rebuildTargetsDryRun distDirLayout@DistDirLayout{..} = \installPlan -> do
-- Do the various checks to work out the 'BuildStatus' of each package
pkgsBuildStatus <- foldMInstallPlanDepOrder installPlan dryRunPkg
-- For 'BuildStatusUpToDate' packages, improve the plan by marking them as
-- 'InstallPlan.Installed'.
let installPlan' = improveInstallPlanWithUpToDatePackages
installPlan pkgsBuildStatus
return (installPlan', pkgsBuildStatus)
where
dryRunPkg :: ElaboratedPlanPackage
-> ComponentDeps [BuildStatus]
-> IO BuildStatus
dryRunPkg (InstallPlan.PreExisting _pkg) _depsBuildStatus =
return BuildStatusPreExisting
dryRunPkg (InstallPlan.Configured pkg) depsBuildStatus = do
mloc <- checkFetched (pkgSourceLocation pkg)
case mloc of
Nothing -> return BuildStatusDownload
Just (LocalUnpackedPackage srcdir) ->
-- For the case of a user-managed local dir, irrespective of the
-- build style, we build from that directory and put build
-- artifacts under the shared dist directory.
dryRunLocalPkg pkg depsBuildStatus srcdir
-- The three tarball cases are handled the same as each other,
-- though depending on the build style.
Just (LocalTarballPackage tarball) ->
dryRunTarballPkg pkg depsBuildStatus tarball
Just (RemoteTarballPackage _ tarball) ->
dryRunTarballPkg pkg depsBuildStatus tarball
Just (RepoTarballPackage _ _ tarball) ->
dryRunTarballPkg pkg depsBuildStatus tarball
dryRunPkg (InstallPlan.Processing {}) _ = unexpectedState
dryRunPkg (InstallPlan.Installed {}) _ = unexpectedState
dryRunPkg (InstallPlan.Failed {}) _ = unexpectedState
unexpectedState = error "rebuildTargetsDryRun: unexpected package state"
dryRunTarballPkg :: ElaboratedConfiguredPackage
-> ComponentDeps [BuildStatus]
-> FilePath
-> IO BuildStatus
dryRunTarballPkg pkg depsBuildStatus tarball =
case pkgBuildStyle pkg of
BuildAndInstall -> return (BuildStatusUnpack tarball)
BuildInplaceOnly -> do
-- TODO: [nice to have] use a proper file monitor rather than this dir exists test
exists <- doesDirectoryExist srcdir
if exists
then dryRunLocalPkg pkg depsBuildStatus srcdir
else return (BuildStatusUnpack tarball)
where
srcdir = distUnpackedSrcDirectory (packageId pkg)
dryRunLocalPkg :: ElaboratedConfiguredPackage
-> ComponentDeps [BuildStatus]
-> FilePath
-> IO BuildStatus
dryRunLocalPkg pkg depsBuildStatus srcdir = do
-- Go and do lots of I/O, reading caches and probing files to work out
-- if anything has changed
change <- checkPackageFileMonitorChanged
packageFileMonitor pkg srcdir depsBuildStatus
case change of
-- It did change, giving us 'BuildStatusRebuild' info on why
Left rebuild ->
return (BuildStatusRebuild srcdir rebuild)
-- No changes, the package is up to date. Use the saved build results.
Right (ipkgs, buildSuccess) ->
return (BuildStatusUpToDate ipkgs buildSuccess)
where
packageFileMonitor =
newPackageFileMonitor distDirLayout (packageId pkg)
-- | A specialised traversal over the packages in an install plan.
--
-- The packages are visited in dependency order, starting with packages with no
-- depencencies. The result for each package is accumulated into a 'Map' and
-- returned as the final result. In addition, when visting a package, the
-- visiting function is passed the results for all the immediate package
-- depencencies. This can be used to propagate information from depencencies.
--
foldMInstallPlanDepOrder
:: forall m ipkg srcpkg iresult ifailure b.
(Monad m,
HasUnitId ipkg, PackageFixedDeps ipkg,
HasUnitId srcpkg, PackageFixedDeps srcpkg)
=> GenericInstallPlan ipkg srcpkg iresult ifailure
-> (GenericPlanPackage ipkg srcpkg iresult ifailure ->
ComponentDeps [b] -> m b)
-> m (Map InstalledPackageId b)
foldMInstallPlanDepOrder plan0 visit =
go Map.empty (InstallPlan.reverseTopologicalOrder plan0)
where
go :: Map InstalledPackageId b
-> [GenericPlanPackage ipkg srcpkg iresult ifailure]
-> m (Map InstalledPackageId b)
go !results [] = return results
go !results (pkg : pkgs) = do
-- we go in the right order so the results map has entries for all deps
let depresults :: ComponentDeps [b]
depresults =
fmap (map (\ipkgid -> let Just result = Map.lookup ipkgid results
in result))
(depends pkg)
result <- visit pkg depresults
let results' = Map.insert (installedPackageId pkg) result results
go results' pkgs
improveInstallPlanWithUpToDatePackages :: ElaboratedInstallPlan
-> BuildStatusMap
-> ElaboratedInstallPlan
improveInstallPlanWithUpToDatePackages installPlan pkgsBuildStatus =
replaceWithPreInstalled installPlan
[ (installedPackageId pkg, ipkgs, buildSuccess)
| InstallPlan.Configured pkg
<- InstallPlan.reverseTopologicalOrder installPlan
, let ipkgid = installedPackageId pkg
Just pkgBuildStatus = Map.lookup ipkgid pkgsBuildStatus
, BuildStatusUpToDate ipkgs buildSuccess <- [pkgBuildStatus]
]
where
replaceWithPreInstalled =
foldl' (\plan (ipkgid, ipkgs, buildSuccess) ->
InstallPlan.preinstalled ipkgid
(find (\ipkg -> installedPackageId ipkg == ipkgid) ipkgs)
buildSuccess plan)
-----------------------------
-- Package change detection
--
-- | As part of the dry run for local unpacked packages we have to check if the
-- package config or files have changed. That is the purpose of
-- 'PackageFileMonitor' and 'checkPackageFileMonitorChanged'.
--
-- When a package is (re)built, the monitor must be updated to reflect the new
-- state of the package. Because we sometimes build without reconfiguring the
-- state updates are split into two, one for package config changes and one
-- for other changes. This is the purpose of 'updatePackageConfigFileMonitor'
-- and 'updatePackageBuildFileMonitor'.
--
data PackageFileMonitor = PackageFileMonitor {
pkgFileMonitorConfig :: FileMonitor ElaboratedConfiguredPackage (),
pkgFileMonitorBuild :: FileMonitor (Set ComponentName) BuildSuccess,
pkgFileMonitorReg :: FileMonitor () [InstalledPackageInfo]
}
newPackageFileMonitor :: DistDirLayout -> PackageId -> PackageFileMonitor
newPackageFileMonitor DistDirLayout{distPackageCacheFile} pkgid =
PackageFileMonitor {
pkgFileMonitorConfig =
newFileMonitor (distPackageCacheFile pkgid "config"),
pkgFileMonitorBuild =
FileMonitor {
fileMonitorCacheFile = distPackageCacheFile pkgid "build",
fileMonitorKeyValid = \componentsToBuild componentsAlreadyBuilt ->
componentsToBuild `Set.isSubsetOf` componentsAlreadyBuilt,
fileMonitorCheckIfOnlyValueChanged = True
},
pkgFileMonitorReg =
newFileMonitor (distPackageCacheFile pkgid "registration")
}
-- | Helper function for 'checkPackageFileMonitorChanged',
-- 'updatePackageConfigFileMonitor' and 'updatePackageBuildFileMonitor'.
--
-- It selects the info from a 'ElaboratedConfiguredPackage' that are used by
-- the 'FileMonitor's (in the 'PackageFileMonitor') to detect value changes.
--
packageFileMonitorKeyValues :: ElaboratedConfiguredPackage
-> (ElaboratedConfiguredPackage, Set ComponentName)
packageFileMonitorKeyValues pkg =
(pkgconfig, buildComponents)
where
-- The first part is the value used to guard (re)configuring the package.
-- That is, if this value changes then we will reconfigure.
-- The ElaboratedConfiguredPackage consists mostly (but not entirely) of
-- information that affects the (re)configure step. But those parts that
-- do not affect the configure step need to be nulled out. Those parts are
-- the specific targets that we're going to build.
--
pkgconfig = pkg {
pkgBuildTargets = [],
pkgReplTarget = Nothing,
pkgBuildHaddocks = False
}
-- The second part is the value used to guard the build step. So this is
-- more or less the opposite of the first part, as it's just the info about
-- what targets we're going to build.
--
buildComponents = pkgBuildTargetWholeComponents pkg
-- | Do all the checks on whether a package has changed and thus needs either
-- rebuilding or reconfiguring and rebuilding.
--
checkPackageFileMonitorChanged :: PackageFileMonitor
-> ElaboratedConfiguredPackage
-> FilePath
-> ComponentDeps [BuildStatus]
-> IO (Either BuildStatusRebuild
([InstalledPackageInfo],
BuildSuccess))
checkPackageFileMonitorChanged PackageFileMonitor{..}
pkg srcdir depsBuildStatus = do
--TODO: [nice to have] some debug-level message about file changes, like rerunIfChanged
configChanged <- checkFileMonitorChanged
pkgFileMonitorConfig srcdir pkgconfig
case configChanged of
MonitorChanged monitorReason ->
return (Left (BuildStatusConfigure monitorReason'))
where
monitorReason' = fmap (const ()) monitorReason
MonitorUnchanged () _
-- The configChanged here includes the identity of the dependencies,
-- so depsBuildStatus is just needed for the changes in the content
-- of depencencies.
| any buildStatusRequiresBuild (CD.flatDeps depsBuildStatus) -> do
regChanged <- checkFileMonitorChanged pkgFileMonitorReg srcdir ()
let mreg = changedToMaybe regChanged
return (Left (BuildStatusBuild mreg BuildReasonDepsRebuilt))
| otherwise -> do
buildChanged <- checkFileMonitorChanged
pkgFileMonitorBuild srcdir buildComponents
regChanged <- checkFileMonitorChanged
pkgFileMonitorReg srcdir ()
let mreg = changedToMaybe regChanged
case (buildChanged, regChanged) of
(MonitorChanged (MonitoredValueChanged prevBuildComponents), _) ->
return (Left (BuildStatusBuild mreg buildReason))
where
buildReason = BuildReasonExtraTargets prevBuildComponents
(MonitorChanged monitorReason, _) ->
return (Left (BuildStatusBuild mreg buildReason))
where
buildReason = BuildReasonFilesChanged monitorReason'
monitorReason' = fmap (const ()) monitorReason
(MonitorUnchanged _ _, MonitorChanged monitorReason) ->
-- this should only happen if the file is corrupt or been
-- manually deleted. We don't want to bother with another
-- phase just for this, so we'll reregister by doing a build.
return (Left (BuildStatusBuild Nothing buildReason))
where
buildReason = BuildReasonFilesChanged monitorReason'
monitorReason' = fmap (const ()) monitorReason
(MonitorUnchanged _ _, MonitorUnchanged _ _)
| pkgHasEphemeralBuildTargets pkg ->
return (Left (BuildStatusBuild mreg buildReason))
where
buildReason = BuildReasonEphemeralTargets
(MonitorUnchanged buildSuccess _, MonitorUnchanged ipkgs _) ->
return (Right (ipkgs, buildSuccess))
where
(pkgconfig, buildComponents) = packageFileMonitorKeyValues pkg
changedToMaybe (MonitorChanged _) = Nothing
changedToMaybe (MonitorUnchanged x _) = Just x
updatePackageConfigFileMonitor :: PackageFileMonitor
-> FilePath
-> ElaboratedConfiguredPackage
-> IO ()
updatePackageConfigFileMonitor PackageFileMonitor{pkgFileMonitorConfig}
srcdir pkg =
updateFileMonitor pkgFileMonitorConfig srcdir Nothing
[] pkgconfig ()
where
(pkgconfig, _buildComponents) = packageFileMonitorKeyValues pkg
updatePackageBuildFileMonitor :: PackageFileMonitor
-> FilePath
-> MonitorTimestamp
-> ElaboratedConfiguredPackage
-> BuildStatusRebuild
-> [FilePath]
-> BuildSuccess
-> IO ()
updatePackageBuildFileMonitor PackageFileMonitor{pkgFileMonitorBuild}
srcdir timestamp pkg pkgBuildStatus
allSrcFiles buildSuccess =
updateFileMonitor pkgFileMonitorBuild srcdir (Just timestamp)
(map monitorFileHashed allSrcFiles)
buildComponents' buildSuccess
where
(_pkgconfig, buildComponents) = packageFileMonitorKeyValues pkg
-- If the only thing that's changed is that we're now building extra
-- components, then we can avoid later unnecessary rebuilds by saving the
-- total set of components that have been built, namely the union of the
-- existing ones plus the new ones. If files also changed this would be
-- the wrong thing to do. Note that we rely on the
-- fileMonitorCheckIfOnlyValueChanged = True mode to get this guarantee
-- that it's /only/ the value that changed not any files that changed.
buildComponents' =
case pkgBuildStatus of
BuildStatusBuild _ (BuildReasonExtraTargets prevBuildComponents)
-> buildComponents `Set.union` prevBuildComponents
_ -> buildComponents
updatePackageRegFileMonitor :: PackageFileMonitor
-> FilePath
-> [InstalledPackageInfo]
-> IO ()
updatePackageRegFileMonitor PackageFileMonitor{pkgFileMonitorReg}
srcdir ipkgs =
updateFileMonitor pkgFileMonitorReg srcdir Nothing
[] () ipkgs
invalidatePackageRegFileMonitor :: PackageFileMonitor -> IO ()
invalidatePackageRegFileMonitor PackageFileMonitor{pkgFileMonitorReg} =
removeExistingFile (fileMonitorCacheFile pkgFileMonitorReg)
------------------------------------------------------------------------------
-- * Doing it: executing an 'ElaboratedInstallPlan'
------------------------------------------------------------------------------
-- | Build things for real.
--
-- It requires the 'BuildStatusMap' gathered by 'rebuildTargetsDryRun'.
--
rebuildTargets :: Verbosity
-> DistDirLayout
-> ElaboratedInstallPlan
-> ElaboratedSharedConfig
-> BuildStatusMap
-> BuildTimeSettings
-> IO ElaboratedInstallPlan
rebuildTargets verbosity
distDirLayout@DistDirLayout{..}
installPlan
sharedPackageConfig
pkgsBuildStatus
buildSettings@BuildTimeSettings{buildSettingNumJobs} = do
-- Concurrency control: create the job controller and concurrency limits
-- for downloading, building and installing.
jobControl <- if isParallelBuild then newParallelJobControl
else newSerialJobControl
buildLimit <- newJobLimit buildSettingNumJobs
installLock <- newLock -- serialise installation
cacheLock <- newLock -- serialise access to setup exe cache
--TODO: [code cleanup] eliminate setup exe cache
createDirectoryIfMissingVerbose verbosity False distBuildRootDirectory
createDirectoryIfMissingVerbose verbosity False distTempDirectory
-- Before traversing the install plan, pre-emptively find all packages that
-- will need to be downloaded and start downloading them.
asyncDownloadPackages verbosity withRepoCtx
installPlan pkgsBuildStatus $ \downloadMap ->
-- For each package in the plan, in dependency order, but in parallel...
executeInstallPlan verbosity jobControl installPlan $ \pkg ->
handle (return . BuildFailure) $ --TODO: review exception handling
let ipkgid = installedPackageId pkg
Just pkgBuildStatus = Map.lookup ipkgid pkgsBuildStatus in
rebuildTarget
verbosity
distDirLayout
buildSettings downloadMap
buildLimit installLock cacheLock
sharedPackageConfig
pkg
pkgBuildStatus
where
isParallelBuild = buildSettingNumJobs >= 2
withRepoCtx = projectConfigWithBuilderRepoContext verbosity
buildSettings
-- | Given all the context and resources, (re)build an individual package.
--
rebuildTarget :: Verbosity
-> DistDirLayout
-> BuildTimeSettings
-> AsyncDownloadMap
-> JobLimit -> Lock -> Lock
-> ElaboratedSharedConfig
-> ElaboratedReadyPackage
-> BuildStatus
-> IO BuildResult
rebuildTarget verbosity
distDirLayout@DistDirLayout{distBuildDirectory}
buildSettings downloadMap
buildLimit installLock cacheLock
sharedPackageConfig
rpkg@(ReadyPackage pkg)
pkgBuildStatus =
-- We rely on the 'BuildStatus' to decide which phase to start from:
case pkgBuildStatus of
BuildStatusDownload -> downloadPhase
BuildStatusUnpack tarball -> unpackTarballPhase tarball
BuildStatusRebuild srcdir status -> rebuildPhase status srcdir
-- TODO: perhaps re-nest the types to make these impossible
BuildStatusPreExisting {} -> unexpectedState
BuildStatusUpToDate {} -> unexpectedState
where
unexpectedState = error "rebuildTarget: unexpected package status"
downloadPhase = do
downsrcloc <- waitAsyncPackageDownload verbosity downloadMap pkg
case downsrcloc of
DownloadedTarball tarball -> unpackTarballPhase tarball
--TODO: [nice to have] git/darcs repos etc
unpackTarballPhase tarball =
withJobLimit buildLimit $
withTarballLocalDirectory
verbosity distDirLayout tarball
(packageId pkg) (pkgBuildStyle pkg)
(pkgDescriptionOverride pkg) $
case pkgBuildStyle pkg of
BuildAndInstall -> buildAndInstall
BuildInplaceOnly -> buildInplace buildStatus
where
buildStatus = BuildStatusConfigure MonitorFirstRun
-- Note that this really is rebuild, not build. It can only happen for
-- 'BuildInplaceOnly' style packages. 'BuildAndInstall' style packages
-- would only start from download or unpack phases.
--
rebuildPhase buildStatus srcdir =
assert (pkgBuildStyle pkg == BuildInplaceOnly) $
withJobLimit buildLimit $
buildInplace buildStatus srcdir builddir
where
builddir = distBuildDirectory (packageId pkg)
buildAndInstall srcdir builddir =
buildAndInstallUnpackedPackage
verbosity distDirLayout
buildSettings installLock cacheLock
sharedPackageConfig
rpkg
srcdir builddir'
where
builddir' = makeRelative srcdir builddir
--TODO: [nice to have] ^^ do this relative stuff better
buildInplace buildStatus srcdir builddir =
--TODO: [nice to have] use a relative build dir rather than absolute
buildInplaceUnpackedPackage
verbosity distDirLayout
buildSettings cacheLock
sharedPackageConfig
rpkg
buildStatus
srcdir builddir
--TODO: [nice to have] do we need to use a with-style for the temp files for downloading http
-- packages, or are we going to cache them persistently?
type AsyncDownloadMap = Map (PackageLocation (Maybe FilePath))
(MVar DownloadedSourceLocation)
data DownloadedSourceLocation = DownloadedTarball FilePath
--TODO: [nice to have] git/darcs repos etc
downloadedSourceLocation :: PackageLocation FilePath
-> Maybe DownloadedSourceLocation
downloadedSourceLocation pkgloc =
case pkgloc of
RemoteTarballPackage _ tarball -> Just (DownloadedTarball tarball)
RepoTarballPackage _ _ tarball -> Just (DownloadedTarball tarball)
_ -> Nothing
-- | Given the current 'InstallPlan' and 'BuildStatusMap', select all the
-- packages we have to download and fork off an async action to download them.
-- We download them in dependency order so that the one's we'll need
-- first are the ones we will start downloading first.
--
-- The body action is passed a map from those packages (identified by their
-- location) to a completion var for that package. So the body action should
-- lookup the location and use 'waitAsyncPackageDownload' to get the result.
--
asyncDownloadPackages :: Verbosity
-> ((RepoContext -> IO ()) -> IO ())
-> ElaboratedInstallPlan
-> BuildStatusMap
-> (AsyncDownloadMap -> IO a)
-> IO a
asyncDownloadPackages verbosity withRepoCtx installPlan pkgsBuildStatus body
| null pkgsToDownload = body Map.empty
| otherwise = do
--TODO: [research required] use parallel downloads? if so, use the fetchLimit
asyncDownloadVars <- mapM (\loc -> (,) loc <$> newEmptyMVar) pkgsToDownload
let downloadAction :: IO ()
downloadAction =
withRepoCtx $ \repoctx ->
forM_ asyncDownloadVars $ \(pkgloc, var) -> do
Just scrloc <- downloadedSourceLocation <$>
fetchPackage verbosity repoctx pkgloc
putMVar var scrloc
withAsync downloadAction $ \_ ->
body (Map.fromList asyncDownloadVars)
where
pkgsToDownload =
[ pkgSourceLocation pkg
| InstallPlan.Configured pkg
<- InstallPlan.reverseTopologicalOrder installPlan
, let ipkgid = installedPackageId pkg
Just pkgBuildStatus = Map.lookup ipkgid pkgsBuildStatus
, BuildStatusDownload <- [pkgBuildStatus]
]
-- | Check if a package needs downloading, and if so expect to find a download
-- in progress in the given 'AsyncDownloadMap' and wait on it to finish.
--
waitAsyncPackageDownload :: Verbosity
-> AsyncDownloadMap
-> ElaboratedConfiguredPackage
-> IO DownloadedSourceLocation
waitAsyncPackageDownload verbosity downloadMap pkg =
case Map.lookup (pkgSourceLocation pkg) downloadMap of
Just hnd -> do
debug verbosity $
"Waiting for download of " ++ display (packageId pkg) ++ " to finish"
--TODO: [required eventually] do the exception handling on download stuff
takeMVar hnd
Nothing ->
fail "waitAsyncPackageDownload: package not being download"
executeInstallPlan
:: forall ipkg srcpkg iresult.
(HasUnitId ipkg, PackageFixedDeps ipkg,
HasUnitId srcpkg, PackageFixedDeps srcpkg)
=> Verbosity
-> JobControl IO ( GenericReadyPackage srcpkg
, GenericBuildResult ipkg iresult BuildFailure )
-> GenericInstallPlan ipkg srcpkg iresult BuildFailure
-> ( GenericReadyPackage srcpkg
-> IO (GenericBuildResult ipkg iresult BuildFailure))
-> IO (GenericInstallPlan ipkg srcpkg iresult BuildFailure)
executeInstallPlan verbosity jobCtl plan0 installPkg =
tryNewTasks 0 plan0
where
tryNewTasks taskCount plan = do
case InstallPlan.ready plan of
[] | taskCount == 0 -> return plan
| otherwise -> waitForTasks taskCount plan
pkgs -> do
sequence_
[ do debug verbosity $ "Ready to install " ++ display pkgid
spawnJob jobCtl $ do
buildResult <- installPkg pkg
return (pkg, buildResult)
| pkg <- pkgs
, let pkgid = packageId pkg
]
let taskCount' = taskCount + length pkgs
plan' = InstallPlan.processing pkgs plan
waitForTasks taskCount' plan'
waitForTasks taskCount plan = do
debug verbosity $ "Waiting for install task to finish..."
(pkg, buildResult) <- collectJob jobCtl
let taskCount' = taskCount-1
plan' = updatePlan pkg buildResult plan
tryNewTasks taskCount' plan'
updatePlan :: GenericReadyPackage srcpkg
-> GenericBuildResult ipkg iresult BuildFailure
-> GenericInstallPlan ipkg srcpkg iresult BuildFailure
-> GenericInstallPlan ipkg srcpkg iresult BuildFailure
updatePlan pkg (BuildSuccess ipkgs buildSuccess) =
InstallPlan.completed (installedPackageId pkg)
mipkg
buildSuccess
where
mipkg = case (ipkgs, find (\ipkg -> installedPackageId ipkg
== installedPackageId pkg) ipkgs) of
([], _) -> Nothing
((_:_), Just ipkg) -> Just ipkg
((_:_), Nothing) ->
error $ "executeInstallPlan: package " ++ display (packageId pkg)
++ " was expected to register the unit "
++ display (installedPackageId pkg)
++ " but is actually registering the unit(s) "
++ intercalate ", " (map (display . installedPackageId) ipkgs)
updatePlan pkg (BuildFailure buildFailure) =
InstallPlan.failed (installedPackageId pkg) buildFailure depsFailure
where
depsFailure = DependentFailed (packageId pkg)
-- So this first pkgid failed for whatever reason (buildFailure).
-- All the other packages that depended on this pkgid, which we
-- now cannot build, we mark as failing due to 'DependentFailed'
-- which kind of means it was not their fault.
-- | Ensure that the package is unpacked in an appropriate directory, either
-- a temporary one or a persistent one under the shared dist directory.
--
withTarballLocalDirectory
:: Verbosity
-> DistDirLayout
-> FilePath
-> PackageId
-> BuildStyle
-> Maybe CabalFileText
-> (FilePath -> FilePath -> IO a)
-> IO a
withTarballLocalDirectory verbosity distDirLayout@DistDirLayout{..}
tarball pkgid buildstyle pkgTextOverride
buildPkg =
case buildstyle of
-- In this case we make a temp dir, unpack the tarball to there and
-- build and install it from that temp dir.
BuildAndInstall ->
withTempDirectory verbosity distTempDirectory
(display (packageName pkgid)) $ \tmpdir -> do
unpackPackageTarball verbosity tarball tmpdir
pkgid pkgTextOverride
let srcdir = tmpdir </> display pkgid
builddir = srcdir </> "dist"
buildPkg srcdir builddir
-- In this case we make sure the tarball has been unpacked to the
-- appropriate location under the shared dist dir, and then build it
-- inplace there
BuildInplaceOnly -> do
let srcrootdir = distUnpackedSrcRootDirectory
srcdir = distUnpackedSrcDirectory pkgid
builddir = distBuildDirectory pkgid
-- TODO: [nice to have] use a proper file monitor rather than this dir exists test
exists <- doesDirectoryExist srcdir
unless exists $ do
createDirectoryIfMissingVerbose verbosity False srcrootdir
unpackPackageTarball verbosity tarball srcrootdir
pkgid pkgTextOverride
moveTarballShippedDistDirectory verbosity distDirLayout
srcrootdir pkgid
buildPkg srcdir builddir
unpackPackageTarball :: Verbosity -> FilePath -> FilePath
-> PackageId -> Maybe CabalFileText
-> IO ()
unpackPackageTarball verbosity tarball parentdir pkgid pkgTextOverride =
--TODO: [nice to have] switch to tar package and catch tar exceptions
annotateFailure UnpackFailed $ do
-- Unpack the tarball
--
info verbosity $ "Extracting " ++ tarball ++ " to " ++ parentdir ++ "..."
Tar.extractTarGzFile parentdir pkgsubdir tarball
-- Sanity check
--
exists <- doesFileExist cabalFile
when (not exists) $
die $ "Package .cabal file not found in the tarball: " ++ cabalFile
-- Overwrite the .cabal with the one from the index, when appropriate
--
case pkgTextOverride of
Nothing -> return ()
Just pkgtxt -> do
info verbosity $ "Updating " ++ display pkgname <.> "cabal"
++ " with the latest revision from the index."
writeFileAtomic cabalFile pkgtxt
where
cabalFile = parentdir </> pkgsubdir
</> display pkgname <.> "cabal"
pkgsubdir = display pkgid
pkgname = packageName pkgid
-- | This is a bit of a hacky workaround. A number of packages ship
-- pre-processed .hs files in a dist directory inside the tarball. We don't
-- use the standard 'dist' location so unless we move this dist dir to the
-- right place then we'll miss the shipped pre-procssed files. This hacky
-- approach to shipped pre-procssed files ought to be replaced by a proper
-- system, though we'll still need to keep this hack for older packages.
--
moveTarballShippedDistDirectory :: Verbosity -> DistDirLayout
-> FilePath -> PackageId -> IO ()
moveTarballShippedDistDirectory verbosity DistDirLayout{distBuildDirectory}
parentdir pkgid = do
distDirExists <- doesDirectoryExist tarballDistDir
when distDirExists $ do
debug verbosity $ "Moving '" ++ tarballDistDir ++ "' to '"
++ targetDistDir ++ "'"
--TODO: [nice to have] or perhaps better to copy, and use a file monitor
renameDirectory tarballDistDir targetDistDir
where
tarballDistDir = parentdir </> display pkgid </> "dist"
targetDistDir = distBuildDirectory pkgid
buildAndInstallUnpackedPackage :: Verbosity
-> DistDirLayout
-> BuildTimeSettings -> Lock -> Lock
-> ElaboratedSharedConfig
-> ElaboratedReadyPackage
-> FilePath -> FilePath
-> IO BuildResult
buildAndInstallUnpackedPackage verbosity
DistDirLayout{distTempDirectory}
BuildTimeSettings {
buildSettingNumJobs,
buildSettingLogFile
}
installLock cacheLock
pkgshared@ElaboratedSharedConfig {
pkgConfigPlatform = platform,
pkgConfigCompiler = compiler,
pkgConfigCompilerProgs = progdb
}
rpkg@(ReadyPackage pkg)
srcdir builddir = do
createDirectoryIfMissingVerbose verbosity False builddir
initLogFile
--TODO: [code cleanup] deal consistently with talking to older Setup.hs versions, much like
-- we do for ghc, with a proper options type and rendering step
-- which will also let us call directly into the lib, rather than always
-- going via the lib's command line interface, which would also allow
-- passing data like installed packages, compiler, and program db for a
-- quicker configure.
--TODO: [required feature] docs and tests
--TODO: [required feature] sudo re-exec
-- Configure phase
when isParallelBuild $
notice verbosity $ "Configuring " ++ display pkgid ++ "..."
annotateFailure ConfigureFailed $
setup configureCommand configureFlags
-- Build phase
when isParallelBuild $
notice verbosity $ "Building " ++ display pkgid ++ "..."
annotateFailure BuildFailed $
setup buildCommand buildFlags
-- Install phase
mipkg <-
criticalSection installLock $
annotateFailure InstallFailed $ do
--TODO: [research required] do we need the installLock for copying? can we not do that in
-- parallel? Isn't it just registering that we have to lock for?
--TODO: [required eventually] need to lock installing this ipkig so other processes don't
-- stomp on our files, since we don't have ABI compat, not safe to replace
-- TODO: [required eventually] note that for nix-style installations it is not necessary to do
-- the 'withWin32SelfUpgrade' dance, but it would be necessary for a
-- shared bin dir.
-- Actual installation
setup Cabal.copyCommand copyFlags
LBS.writeFile
(InstallDirs.prefix (pkgInstallDirs pkg) </> "cabal-hash.txt") $
(renderPackageHashInputs (packageHashInputs pkgshared pkg))
-- here's where we could keep track of the installed files ourselves if
-- we wanted by calling copy to an image dir and then we would make a
-- manifest and move it to its final location
--TODO: [nice to have] we should actually have it make an image in store/incomming and
-- then when it's done, move it to its final location, to reduce problems
-- with installs failing half-way. Could also register and then move.
if pkgRequiresRegistration pkg
then do
ipkgs <- generateInstalledPackageInfos
-- We register ourselves rather than via Setup.hs. We need to
-- grab and modify the InstalledPackageInfo. We decide what
-- the installed package id is, not the build system.
-- See Note [Updating installedUnitId]
let ipkgs' = case ipkgs of
-- Case A and B
[ipkg] -> [ipkg { Installed.installedUnitId = ipkgid }]
-- Case C
_ -> ipkgs
unless (any ((== ipkgid) . Installed.installedUnitId) ipkgs') $
die $ "the package " ++ display (packageId pkg) ++ " was expected "
++ " to produce registeration info for the unit Id "
++ display ipkgid ++ " but it actually produced info for "
++ intercalate ", "
(map (display . Installed.installedUnitId) ipkgs')
forM_ ipkgs' $ \ipkg' ->
Cabal.registerPackage verbosity compiler progdb
HcPkg.MultiInstance
(pkgRegisterPackageDBStack pkg) ipkg'
return ipkgs'
else return []
--TODO: [required feature] docs and test phases
let docsResult = DocsNotTried
testsResult = TestsNotTried
return (BuildSuccess mipkg (BuildOk docsResult testsResult))
where
pkgid = packageId rpkg
ipkgid = installedPackageId rpkg
isParallelBuild = buildSettingNumJobs >= 2
configureCommand = Cabal.configureCommand defaultProgramConfiguration
configureFlags v = flip filterConfigureFlags v $
setupHsConfigureFlags rpkg pkgshared
verbosity builddir
buildCommand = Cabal.buildCommand defaultProgramConfiguration
buildFlags _ = setupHsBuildFlags pkg pkgshared verbosity builddir
generateInstalledPackageInfos :: IO [InstalledPackageInfo]
generateInstalledPackageInfos =
withTempInstalledPackageInfoFiles
verbosity distTempDirectory $ \pkgConfDest -> do
let registerFlags _ = setupHsRegisterFlags
pkg pkgshared
verbosity builddir
pkgConfDest
setup Cabal.registerCommand registerFlags
copyFlags _ = setupHsCopyFlags pkg pkgshared verbosity builddir
scriptOptions = setupHsScriptOptions rpkg pkgshared srcdir builddir
isParallelBuild cacheLock
setup :: CommandUI flags -> (Version -> flags) -> IO ()
setup cmd flags =
withLogging $ \mLogFileHandle ->
setupWrapper
verbosity
scriptOptions { useLoggingHandle = mLogFileHandle }
(Just (pkgDescription pkg))
cmd flags []
mlogFile =
case buildSettingLogFile of
Nothing -> Nothing
Just mkLogFile -> Just (mkLogFile compiler platform pkgid ipkgid)
initLogFile =
case mlogFile of
Nothing -> return ()
Just logFile -> do
createDirectoryIfMissing True (takeDirectory logFile)
exists <- doesFileExist logFile
when exists $ removeFile logFile
withLogging action =
case mlogFile of
Nothing -> action Nothing
Just logFile -> withFile logFile AppendMode (action . Just)
buildInplaceUnpackedPackage :: Verbosity
-> DistDirLayout
-> BuildTimeSettings -> Lock
-> ElaboratedSharedConfig
-> ElaboratedReadyPackage
-> BuildStatusRebuild
-> FilePath -> FilePath
-> IO BuildResult
buildInplaceUnpackedPackage verbosity
distDirLayout@DistDirLayout {
distTempDirectory,
distPackageCacheDirectory
}
BuildTimeSettings{buildSettingNumJobs}
cacheLock
pkgshared@ElaboratedSharedConfig {
pkgConfigCompiler = compiler,
pkgConfigCompilerProgs = progdb
}
rpkg@(ReadyPackage pkg)
buildStatus
srcdir builddir = do
--TODO: [code cleanup] there is duplication between the distdirlayout and the builddir here
-- builddir is not enough, we also need the per-package cachedir
createDirectoryIfMissingVerbose verbosity False builddir
createDirectoryIfMissingVerbose verbosity False (distPackageCacheDirectory pkgid)
createPackageDBIfMissing verbosity compiler progdb (pkgBuildPackageDBStack pkg)
-- Configure phase
--
whenReConfigure $ do
annotateFailure ConfigureFailed $
setup configureCommand configureFlags []
invalidatePackageRegFileMonitor packageFileMonitor
updatePackageConfigFileMonitor packageFileMonitor srcdir pkg
-- Build phase
--
let docsResult = DocsNotTried
testsResult = TestsNotTried
buildSuccess :: BuildSuccess
buildSuccess = BuildOk docsResult testsResult
whenRebuild $ do
timestamp <- beginUpdateFileMonitor
annotateFailure BuildFailed $
setup buildCommand buildFlags buildArgs
--TODO: [required eventually] this doesn't track file
--non-existence, so we could fail to rebuild if someone
--adds a new file which changes behavior.
allSrcFiles <- allPackageSourceFiles verbosity srcdir
updatePackageBuildFileMonitor packageFileMonitor srcdir timestamp
pkg buildStatus
allSrcFiles buildSuccess
ipkgs <- whenReRegister $ annotateFailure InstallFailed $ do
-- Register locally
ipkgs <- if pkgRequiresRegistration pkg
then do
ipkgs <- generateInstalledPackageInfos
-- We register ourselves rather than via Setup.hs. We need to
-- grab and modify the InstalledPackageInfo. We decide what
-- the installed package id is, not the build system.
-- Note [Updating installedUnitId]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- This is a bit tricky. There are three variables we
-- care about:
--
-- 1. Does the Setup script we're interfacing with
-- support --ipid? (Only if version >= 1.23)
-- If not, we have to explicitly update the
-- the UID that was recorded.
--
-- 2. Does the Setup script we're interfacing with
-- support internal libraries? (Only if
-- version >= 1.25). If so, there may be
-- multiple IPIs... and it would be wrong to
-- update them all to the same UID (you need
-- to generate derived UIDs for each
-- subcomponent.)
--
-- 3. Does GHC require that the IPID be input at
-- configure time? (Only if GHC >= 8.0, which
-- also implies Cabal version >= 1.23, as earlier
-- Cabal's don't know how to do this properly).
-- If so, it is IMPERMISSIBLE to update the
-- UID that was recorded.
--
-- This means that there are three situations:
--
-- A. Cabal < 1.23
-- B. Cabal >= 1.23 && < 1.25
-- C. Cabal >= 1.25
--
-- We consider each in turn:
--
-- A. There is only ever one IPI, and we must
-- update it.
--
-- B. There is only ever one IPI, but because
-- --ipid is supported, the installedUnitId of
-- this IPI is ipkgid (so it's harmless to
-- overwrite).
--
-- C. There may be multiple IPIs, but because
-- --ipid is supported they always have the
-- right installedUnitIds.
--
let ipkgs' = case ipkgs of
-- Case A and B
[ipkg] -> [ipkg { Installed.installedUnitId = ipkgid }]
-- Case C
_ -> assert (any ((== ipkgid) . Installed.installedUnitId)
ipkgs) ipkgs
forM_ ipkgs' $ \ipkg' ->
Cabal.registerPackage verbosity compiler progdb HcPkg.NoMultiInstance
(pkgRegisterPackageDBStack pkg)
ipkg'
return ipkgs'
else return []
updatePackageRegFileMonitor packageFileMonitor srcdir ipkgs
return ipkgs
-- Repl phase
--
whenRepl $
annotateFailure BuildFailed $
setup replCommand replFlags replArgs
-- Haddock phase
whenHaddock $
annotateFailure BuildFailed $
setup haddockCommand haddockFlags []
return (BuildSuccess ipkgs buildSuccess)
where
pkgid = packageId rpkg
ipkgid = installedPackageId rpkg
isParallelBuild = buildSettingNumJobs >= 2
packageFileMonitor = newPackageFileMonitor distDirLayout pkgid
whenReConfigure action = case buildStatus of
BuildStatusConfigure _ -> action
_ -> return ()
whenRebuild action
| null (pkgBuildTargets pkg) = return ()
| otherwise = action
whenRepl action
| isNothing (pkgReplTarget pkg) = return ()
| otherwise = action
whenHaddock action
| pkgBuildHaddocks pkg = action
| otherwise = return ()
whenReRegister action = case buildStatus of
BuildStatusConfigure _ -> action
BuildStatusBuild Nothing _ -> action
BuildStatusBuild (Just ipkgs) _ -> return ipkgs
configureCommand = Cabal.configureCommand defaultProgramConfiguration
configureFlags v = flip filterConfigureFlags v $
setupHsConfigureFlags rpkg pkgshared
verbosity builddir
buildCommand = Cabal.buildCommand defaultProgramConfiguration
buildFlags _ = setupHsBuildFlags pkg pkgshared
verbosity builddir
buildArgs = setupHsBuildArgs pkg
replCommand = Cabal.replCommand defaultProgramConfiguration
replFlags _ = setupHsReplFlags pkg pkgshared
verbosity builddir
replArgs = setupHsReplArgs pkg
haddockCommand = Cabal.haddockCommand
haddockFlags _ = setupHsHaddockFlags pkg pkgshared
verbosity builddir
scriptOptions = setupHsScriptOptions rpkg pkgshared
srcdir builddir
isParallelBuild cacheLock
setup :: CommandUI flags -> (Version -> flags) -> [String] -> IO ()
setup cmd flags args =
setupWrapper verbosity
scriptOptions
(Just (pkgDescription pkg))
cmd flags args
generateInstalledPackageInfos :: IO [InstalledPackageInfo]
generateInstalledPackageInfos =
withTempInstalledPackageInfoFiles
verbosity distTempDirectory $ \pkgConfDest -> do
let registerFlags _ = setupHsRegisterFlags
pkg pkgshared
verbosity builddir
pkgConfDest
setup Cabal.registerCommand registerFlags []
-- helper
annotateFailure :: (String -> BuildFailure) -> IO a -> IO a
annotateFailure annotate action =
action `catches`
[ Handler $ \ioe -> handler (ioe :: IOException)
, Handler $ \exit -> handler (exit :: ExitCode)
]
where
handler :: Exception e => e -> IO a
handler = throwIO . annotate
#if MIN_VERSION_base(4,8,0)
. displayException
#else
. show
#endif
withTempInstalledPackageInfoFiles :: Verbosity -> FilePath
-> (FilePath -> IO ())
-> IO [InstalledPackageInfo]
withTempInstalledPackageInfoFiles verbosity tempdir action =
withTempDirectory verbosity tempdir "package-registration-" $ \dir -> do
-- make absolute since @action@ will often change directory
abs_dir <- canonicalizePath dir
let pkgConfDest = abs_dir </> "pkgConf"
action pkgConfDest
is_dir <- doesDirectoryExist pkgConfDest
let notHidden = not . isHidden
isHidden name = "." `isPrefixOf` name
if is_dir
then mapM (readPkgConf pkgConfDest) . sort . filter notHidden
=<< getDirectoryContents pkgConfDest
else fmap (:[]) $ readPkgConf "." pkgConfDest
where
pkgConfParseFailed :: Installed.PError -> IO a
pkgConfParseFailed perror =
die $ "Couldn't parse the output of 'setup register --gen-pkg-config':"
++ show perror
readPkgConf pkgConfDir pkgConfFile = do
(warns, ipkg) <- withUTF8FileContents (pkgConfDir </> pkgConfFile) $ \pkgConfStr ->
case Installed.parseInstalledPackageInfo pkgConfStr of
Installed.ParseFailed perror -> pkgConfParseFailed perror
Installed.ParseOk warns ipkg -> return (warns, ipkg)
unless (null warns) $
warn verbosity $ unlines (map (showPWarning pkgConfFile) warns)
return ipkg
| headprogrammingczar/cabal | cabal-install/Distribution/Client/ProjectBuilding.hs | bsd-3-clause | 58,703 | 0 | 27 | 18,286 | 8,108 | 4,201 | 3,907 | 807 | 12 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Pushbullet.Models.PushRequest (
Target(..),
Message(..),
PushRequest(..)
) where
import Control.Applicative ((<$>), (<*>), (<|>))
import Data.Aeson (FromJSON, ToJSON, object, parseJSON,
toJSON, (.:), (.:?), (.=))
import qualified Data.Aeson.Types as A
import Data.Monoid ((<>))
import qualified Data.Text as T
import GHC.Generics (Generic)
data Target = All
| Device T.Text
| Email T.Text
| Channel T.Text
| Client T.Text
deriving (Show, Eq, Generic)
targetPairs :: Target -> [A.Pair]
targetPairs All = []
targetPairs (Device t) = ["device_id" .= t]
targetPairs (Email t) = ["email" .= t]
targetPairs (Channel t) = ["channel_tag" .= t]
targetPairs (Client t) = ["client_iden" .= t]
instance ToJSON Target where
toJSON = object . targetPairs
instance FromJSON Target where
parseJSON (A.Object v) = (Device <?> "device_id")
<|> (Email <?> "email")
<|> (Channel <?> "channel_tag")
<|> (Client <?> "client_iden")
<|> return All
where
f <?> k = f <$> v .: k
parseJSON invalid = A.typeMismatch "Target" invalid
data Message = Note (Maybe T.Text) (Maybe T.Text)
| Link (Maybe T.Text) (Maybe T.Text) (Maybe T.Text)
deriving (Show, Eq, Generic)
messagePairs :: Message -> [A.Pair]
messagePairs (Note title body) = [
"type" .= ("note" :: T.Text),
"title" .= title,
"body" .= body
]
messagePairs (Link title body url) = [
"type" .= ("link" :: T.Text),
"title" .= title,
"body" .= body,
"url" .= url
]
instance ToJSON Message where
toJSON = object . messagePairs
instance FromJSON Message where
parseJSON (A.Object v) = do
(t :: T.Text) <- v .: "type"
case t of
"note" -> Note <$> v .:? "title" <*> v .:? "body"
"link" -> Link <$> v .:? "title" <*> v .:? "body" <*> v .:? "url"
_ -> fail "type should be one of \"note\" or \"link\"."
parseJSON invalid = A.typeMismatch "Target" invalid
data PushRequest = PushRequest Target Message deriving (Show, Eq, Generic)
instance ToJSON PushRequest where
toJSON (PushRequest t m) = object $ targetPairs t <> messagePairs m
instance FromJSON PushRequest where
parseJSON v = PushRequest <$> parseJSON v <*> parseJSON v
| joshbohde/pushbullet | src/Pushbullet/Models/PushRequest.hs | bsd-3-clause | 2,588 | 0 | 16 | 768 | 839 | 460 | 379 | 64 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ForeignFunctionInterface #-}
module Web.Browser.Windows
( openBrowserWindows
) where
import System.Win32.Types (INT, HANDLE, HINSTANCE, LPCTSTR,
handleToWord, nullPtr, withTString)
openBrowserWindows :: String -> IO Bool
openBrowserWindows url =
withTString "open" $ \openStr ->
withTString url $ \urlStr ->
exitCodeToBool `fmap` c_ShellExecute nullPtr
openStr
urlStr
nullPtr
nullPtr
1
where exitCodeToBool hinst | handleToWord hinst > 32 = True
| otherwise = False
-- https://msdn.microsoft.com/en-us/library/windows/desktop/bb762153(v=vs.85).aspx
foreign import WINDOWS_CCONV unsafe "windows.h ShellExecuteW"
c_ShellExecute :: HANDLE -- _In_opt_
-> LPCTSTR -- _In_opt_
-> LPCTSTR -- _In_
-> LPCTSTR -- _In_opt_
-> LPCTSTR -- _In_opt_
-> INT -- _In_
-> IO HINSTANCE
| rightfold/open-browser | lib/Web/Browser/Windows.hs | bsd-3-clause | 1,268 | 2 | 11 | 568 | 181 | 103 | 78 | -1 | -1 |
Subsets and Splits