code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE OverloadedStrings #-}
module EDDA.Schema.ShipyardV2Test where
import Test.HUnit
import Data.Maybe (fromJust,isJust)
import Data.Aeson
import Data.Aeson.Types
import qualified Data.ByteString.Char8 as C
import qualified Data.HashSet as HS
import Control.Monad.Trans.Reader
import EDDA.Config
import EDDA.Schema.OutfittingV1 (parseOutfitting)
import EDDA.Schema.Util
import EDDA.Schema.Parser
import EDDA.Types
import EDDA.Test.Util
test1 :: Test
test1 = TestCase $ do conf <- readConf
val <- readJsonTestFile "test/EDDA/Schema/shipyard2.json"
maybeHeader <- runReaderT (parseHeader val) conf
assertBool "header couldn't be parsed" (isJust maybeHeader)
let header = fromJust maybeHeader
assertEqual "uploader id" "Dovacube" (headerUploaderId header)
assertEqual "software name" "E:D Market Connector [Windows]" (headerSoftwareName header)
assertEqual "software version" "2.1.7.0" (headerSoftwareVersion header)
maybeMessage <- runReaderT (parseMessage val) conf
assertBool "message couldn't be parsed" (isJust maybeMessage)
let shipyardInfo = fromJust maybeMessage
assertEqual "system name" "Gateway" (shipyardInfoSystemName shipyardInfo)
assertEqual "station name" "Dublin Citadel" (shipyardInfoStationName shipyardInfo)
assertEqual "ships" 3 (HS.size (shipyardInfoShips shipyardInfo))
shipyardV2Tests = [TestLabel "shipyard2 test" test1]
| troydm/edda | test/EDDA/Schema/ShipyardV2Test.hs | mit | 1,666 | 0 | 12 | 453 | 336 | 173 | 163 | 31 | 1 |
-- generate code for combinators
import List (intersperse)
import Char (toUpper)
import IO
type Var = Char
data Expr = EVar Var
| EApp Expr Expr
isVar, isApp :: Expr -> Bool
isVar (EVar _) = True
isVar _ = False
isApp (EApp _ _) = True
isApp _ = False
eVar :: Expr -> Var
eVar (EVar x) = x
instance Show Expr where
showsPrec _ e = se False e
where
se _ (EVar x) = showChar x
se False (EApp f x) = se False f . showChar ' ' . se True x
se True (EApp f x) = showChar '(' . se False f . showChar ' ' . se True x . showChar ')'
type Instr = String
-- projection combinators
combGen :: (String, [Var], Expr) -> Bool -> CodeGen ()
combGen (c, args, expr@(EVar v)) True =
do bindArgs args expr
emitSet (arity - 1) ("a_" ++ show expr)
emitPop (arity - 1)
emit "c.get1().overwriteHole()"
emit "c.eval()"
emit "Node result = c.getTos()"
emit "c.get1().overwriteInd(result)"
emitSet 1 "result"
emitPop 1
emitUnwind
where
arity = length args
combGen (c, args, expr@(EVar v)) False =
do bindArgs args expr
emitPop arity
emit ("c.getTos().overwriteInd(" ++ result ++ ")")
emit ("c.setTos(" ++ result ++ ")")
emitUnwind
where
arity = length args
result = "a_" ++ show expr
-- combinators resulting in an application
combGen (c, args, expr@(EApp f a)) _ =
do bindArgs args expr
emit ("Node redex = " ++ getSpine arity)
f' <- buildNode f
a' <- buildNode a
emit ("redex.overwriteApp(" ++ f' ++ ", " ++ a' ++ ")")
emitSet (arity - 1) f'
emitPop (arity - 1)
emitUnwind
where
arity = length args
buildNode :: Expr -> CodeGen String
buildNode (EVar x) = return ("a_" ++ [x])
buildNode (EApp f a) = do f' <- buildNode f
a' <- buildNode a
r <- newVar "g"
emit ("Node " ++ r ++ " = c.mkApp(" ++ f' ++ ", " ++ a' ++ ")")
return r
bindArgs :: [Var] -> Expr -> CodeGen ()
bindArgs args expr = loop args 1
where loop [] _ = return ()
loop (v:vs) n = do bindArg v n; loop vs (n+1)
bindArg v n | occurs v expr = emit ("Node a_" ++ [v] ++ " = " ++ getArg n)
| otherwise = return ()
-- variable occurs in expression?
occurs :: Var -> Expr -> Bool
occurs v (EVar x) = v == x
occurs v (EApp f a) = occurs v f || occurs v a
combDefns = [
("S", "fgx", EApp (EApp (EVar 'f') (EVar 'x')) (EApp (EVar 'g') (EVar 'x'))),
("K", "cx", EVar 'c'),
("K'", "xc", EVar 'c'),
("I", "x", EVar 'x'),
("J", "fgx", EApp (EVar 'f') (EVar 'g')),
("J'", "kfgx", EApp (EApp (EVar 'k') (EVar 'f')) (EVar 'g')),
("C", "fxy", EApp (EApp (EVar 'f') (EVar 'y')) (EVar 'x')),
("B", "fgx", EApp (EVar 'f') (EApp (EVar 'g') (EVar 'x'))),
("B*", "cfgx", EApp (EVar 'c') (EApp (EVar 'f') (EApp (EVar 'g') (EVar 'x')))),
("B'", "cfgx", EApp (EApp (EVar 'c') (EVar 'f')) (EApp (EVar 'g') (EVar 'x'))),
("C'", "cfgx", EApp (EApp (EVar 'c') (EApp (EVar 'f') (EVar 'x'))) (EVar 'g')),
-- S' c f g x = c (f x) (g x)
("S'", "cfgx", EApp (EApp (EVar 'c') (EApp (EVar 'f') (EVar 'x'))) (EApp (EVar 'g') (EVar 'x'))),
("W", "fx", EApp (EApp (EVar 'f') (EVar 'x')) (EVar 'x'))
]
intOps = [("add", "+"), ("sub", "-"), ("mul", "*"),
("div", "/"), ("rem", "%"),
("Rsub", "-"), ("Rdiv", "/"), ("Rrem", "%")]
relOps = [("less", "PrimLess", "<"),
("greater", "PrimGreater", ">"),
("less_eq", "PrimLessEq", "<="),
("gr_eq", "PrimGreaterEq", ">="),
("eq", "PrimEq", "=="),
("neq", "PrimNeq", "!=")]
main = do mapM_ showDef combDefns
mapM_ (\(name, op) -> genBinOpInt name op (isRevOp name)) intOps
mapM_ (\(name, cname, op) -> genRelOp name cname op) relOps
where
isRevOp ('R':name) = True
isRevOp _ = False
showDef (c,a,e) = do putStr c
putStr " "
putStr (intersperse ' ' a)
putStr " = "
putStrLn (show e)
genCombs (c,a,e)
genCombs :: (String, [Var], Expr) -> IO ()
genCombs s@(c, a, e@(EVar _)) = do genComb s False
genComb s True
genCombs s = genComb s False
putCode :: Handle -> [Instr] -> IO ()
putCode h = mapM_ (\i -> if null i then return () else do hPutStr h " "; hPutStr h i; hPutStrLn h ";")
genComb :: (String, [Var], Expr) -> Bool -> IO ()
genComb (c,a,e) eval =
do h <- openFile fileName WriteMode
hPutStrLn h "package de.bokeh.skred.red;\n"
hPutStrLn h "/**"
hPutStrLn h (" * The " ++ c ++ " combinator.")
hPutStrLn h " * <p>"
hPutStrLn h " * Reduction rule:"
hPutStrLn h (" * " ++ c ++ " " ++ intersperse ' ' a ++ " ==> " ++ show e)
hPutStrLn h " */"
hPutStrLn h ("class " ++ className ++ " extends Function {\n")
hPutStrLn h (" public " ++ className ++ "() {")
hPutStrLn h (" super(\"" ++ c ++ "\", " ++ show arity ++ ");")
hPutStrLn h " }\n"
hPutStrLn h " @Override"
hPutStrLn h " Node exec(RedContext c) {"
let code = runCG (combGen (c,a,e) eval)
putCode h code
hPutStrLn h " }\n"
hPutStrLn h "}"
hClose h
where
arity = length a
className = "Comb" ++ map toJava c ++ if eval then "_Eval" else ""
fileName = className ++ ".java"
toJava :: Char -> Char
toJava '\'' = '1'
toJava '*' = 's'
toJava c = c
initCap :: String -> String
initCap "" = ""
initCap (c:cs) = Char.toUpper c : cs
genBinOpInt :: String -> String -> Bool -> IO ()
genBinOpInt name op rev =
do h <- openFile fileName WriteMode
hPutStrLn h "package de.bokeh.skred.red;\n"
hPutStrLn h "/**"
hPutStrLn h (" * Int " ++ name)
hPutStrLn h " */"
hPutStrLn h ("class " ++ className ++ " extends Function {\n")
hPutStrLn h (" public " ++ className ++ "() {")
hPutStrLn h (" super(\"" ++ name ++ "\", 2);")
hPutStrLn h " }\n\n\
\ @Override\n\
\ Node exec(RedContext c) {\n\
\ c.rearrange2();\n\
\ c.eval();\n\
\ c.swap();\n\
\ c.eval();\n\
\ Node a2 = c.getTos();\n\
\ Node a1 = c.get1();\n\
\ int n1 = a1.intValue();\n\
\ int n2 = a2.intValue();"
hPutStrLn h (if rev then " int r = n2 " ++ op ++ " n1;"
else " int r = n1 " ++ op ++ " n2;")
hPutStrLn h " Node result = Int.valueOf(r);\n\
\ c.pop2();\n\
\ c.getTos().overwriteInd(result);\n\
\ c.setTos(result);\n\
\ return result;\n\
\ }\n\n\
\}"
hClose h
where
className = "Prim" ++ initCap name ++ "Int"
fileName = className ++ ".java"
genRelOp :: String -> String -> String -> IO ()
genRelOp name className op =
do h <- openFile fileName WriteMode
hPutStrLn h "package de.bokeh.skred.red;\n"
hPutStrLn h "/**"
hPutStrLn h (" * Relop " ++ name)
hPutStrLn h " */"
hPutStrLn h ("class " ++ className ++ " extends Function {\n")
hPutStrLn h (" public " ++ className ++ "() {")
hPutStrLn h (" super(\"" ++ name ++ "\", 2);")
hPutStrLn h " }\n\n\
\ @Override\n\
\ Node exec(RedContext c) {\n\
\ c.rearrange2();\n\
\ c.eval();\n\
\ c.swap();\n\
\ c.eval();\n\
\ Node a2 = c.getTos();\n\
\ Node a1 = c.get1();\n\
\ int n1 = a1.intValue();\n\
\ int n2 = a2.intValue();"
hPutStrLn h (" boolean r = n1 " ++ op ++ " n2;")
hPutStrLn h " Node result = Data.valueOf(r?1:0);\n\
\ c.pop2();\n\
\ c.getTos().overwriteInd(result);\n\
\ c.setTos(result);\n\
\ return result;\n\
\ }\n\n\
\}"
hClose h
where
fileName = className ++ ".java"
data CGState = CG Int [Instr]
newtype CodeGen a = CGM (CGState -> (CGState, a))
runCG :: CodeGen a -> [Instr]
runCG (CGM m) = let (CG _ is, _) = m (CG 1 []) in reverse is
instance Monad CodeGen where
return x = CGM (\st -> (st, x))
(CGM m) >>= f = CGM (\s -> let (s',x) = m s; CGM f' = f x in f' s')
emit :: Instr -> CodeGen ()
emit i = CGM (\(CG ns is) -> (CG ns (i:is), ()))
newVar :: String -> CodeGen String
newVar prefix = CGM (\(CG ns is) -> (CG (ns+1) is, prefix ++ show ns))
getCode :: CodeGen [Instr]
getCode = CGM (\st@(CG _ is) -> (st, reverse is))
mAX_SPECIALIZED_OFFSET :: Int
mAX_SPECIALIZED_OFFSET = 3
-- instruction helpers
emitPop :: Int -> CodeGen ()
emitPop 0 = return ()
emitPop n | n <= mAX_SPECIALIZED_OFFSET = emit ("c.pop" ++ shows n "()")
| otherwise = emit ("c.pop(" ++ shows n ")")
emitSet :: Int -> String -> CodeGen ()
emitSet 0 x = emit ("c.setTos(" ++ x ++ ")")
emitSet n x | n <= mAX_SPECIALIZED_OFFSET = emit ("c.set" ++ show n ++ "(" ++ x ++ ")")
| otherwise = emit ("c.set(" ++ show n ++ ", " ++ x ++ ")")
emitUnwind :: CodeGen ()
emitUnwind = emit "return null"
getSpine, getArg :: Int -> String
getSpine 0 = "c.getTos()"
getSpine n | n <= mAX_SPECIALIZED_OFFSET = "c.get" ++ show n ++ "()"
| otherwise = "c.get(" ++ show n ++ ")"
getArg n | n <= mAX_SPECIALIZED_OFFSET = "c.getArg" ++ show n ++ "()"
| otherwise = "c.getArg(" ++ show n ++ ")"
| bokesan/skred | tools/CombGen.hs | mit | 11,196 | 0 | 14 | 4,678 | 3,633 | 1,821 | 1,812 | 200 | 2 |
module Tuura.Fantasi.Main (main) where
import Tuura.Fantasi.Options
import qualified Pangraph.GraphML.Parser as P
import qualified Tuura.Fantasi.VHDL.Writer as VHDL
import Data.ByteString (readFile, writeFile)
import Prelude hiding (readFile, writeFile)
import Data.Maybe (maybe)
main :: IO ()
main = do
-- get arguments
options <- getOptions
let graphMLPath = optGraphML options
graphVHDLPath = optGraphName options
simulationEnvVhdlPath = optSimName options
-- parse graph
pangraph <- ((maybe (error "file or graph is malformed") id) . P.parse) <$> readFile graphMLPath
let graphVHDL = VHDL.writeGraph pangraph
let simEnvVHDL = VHDL.writeEnvironment pangraph
-- output vhdl graph
writeFile graphVHDLPath graphVHDL
-- output vhdl simulation environment
writeFile simulationEnvVhdlPath simEnvVHDL
| tuura/fantasi | src/fantasi/Tuura/Fantasi/Main.hs | mit | 896 | 0 | 14 | 196 | 207 | 113 | 94 | 18 | 1 |
{-# LANGUAGE CPP #-}
module Test.Hspec.Core.FailureReport (
FailureReport (..)
, writeFailureReport
, readFailureReport
) where
import Prelude ()
import Test.Hspec.Core.Compat
#ifndef __GHCJS__
import System.SetEnv (setEnv)
import Test.Hspec.Core.Util (safeTry)
#endif
import System.IO
import System.Directory
import Test.Hspec.Core.Util (Path)
import Test.Hspec.Core.Config.Definition (Config(..))
data FailureReport = FailureReport {
failureReportSeed :: Integer
, failureReportMaxSuccess :: Int
, failureReportMaxSize :: Int
, failureReportMaxDiscardRatio :: Int
, failureReportPaths :: [Path]
} deriving (Eq, Show, Read)
writeFailureReport :: Config -> FailureReport -> IO ()
writeFailureReport config report = case configFailureReport config of
Just file -> writeFile file (show report)
Nothing -> do
#ifdef __GHCJS__
-- ghcjs currently does not support setting environment variables
-- (https://github.com/ghcjs/ghcjs/issues/263). Since writing a failure report
-- into the environment is a non-essential feature we just disable this to be
-- able to run hspec test-suites with ghcjs at all. Should be reverted once
-- the issue is fixed.
return ()
#else
-- on Windows this can throw an exception when the input is too large, hence
-- we use `safeTry` here
safeTry (setEnv "HSPEC_FAILURES" $ show report) >>= either onError return
where
onError err = do
hPutStrLn stderr ("WARNING: Could not write environment variable HSPEC_FAILURES (" ++ show err ++ ")")
#endif
readFailureReport :: Config -> IO (Maybe FailureReport)
readFailureReport config = case configFailureReport config of
Just file -> do
exists <- doesFileExist file
if exists
then do
r <- readFile file
let report = readMaybe r
when (report == Nothing) $ do
hPutStrLn stderr ("WARNING: Could not read failure report from file " ++ show file ++ "!")
return report
else return Nothing
Nothing -> do
mx <- lookupEnv "HSPEC_FAILURES"
case mx >>= readMaybe of
Nothing -> do
hPutStrLn stderr "WARNING: Could not read environment variable HSPEC_FAILURES; `--rerun' is ignored!"
return Nothing
report -> return report
| hspec/hspec | hspec-core/src/Test/Hspec/Core/FailureReport.hs | mit | 2,326 | 0 | 20 | 543 | 423 | 227 | 196 | 46 | 4 |
{-# LANGUAGE TemplateHaskell #-}
{-| Implementation of the Ganeti confd types.
-}
{-
Copyright (C) 2011, 2012 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.Confd.Types
( ConfdClient(..)
, ConfdRequestType(..)
, confdRequestTypeToRaw
, ConfdReqField(..)
, confdReqFieldToRaw
, ConfdReqQ(..)
, ConfdReplyStatus(..)
, confdReplyStatusToRaw
, ConfdNodeRole(..)
, confdNodeRoleToRaw
, ConfdErrorType(..)
, confdErrorTypeToRaw
, ConfdRequest(..)
, newConfdRequest
, ConfdReply(..)
, ConfdQuery(..)
, SignedMessage(..)
) where
import Text.JSON
import qualified Network.Socket as S
import qualified Ganeti.ConstantUtils as ConstantUtils
import Ganeti.Hash
import Ganeti.THH
import Ganeti.Utils (newUUID)
$(declareILADT "ConfdRequestType"
[ ("ReqPing", 0)
, ("ReqNodeRoleByName", 1)
, ("ReqNodePipByInstPip", 2)
, ("ReqClusterMaster", 3)
, ("ReqNodePipList", 4)
, ("ReqMcPipList", 5)
, ("ReqInstIpsList", 6)
, ("ReqNodeDrbd", 7)
, ("ReqNodeInstances", 8)
, ("ReqInstanceDisks", 9)
])
$(makeJSONInstance ''ConfdRequestType)
$(declareILADT "ConfdReqField"
[ ("ReqFieldName", 0)
, ("ReqFieldIp", 1)
, ("ReqFieldMNodePip", 2)
])
$(makeJSONInstance ''ConfdReqField)
-- Confd request query fields. These are used to narrow down queries.
-- These must be strings rather than integers, because json-encoding
-- converts them to strings anyway, as they're used as dict-keys.
$(buildObject "ConfdReqQ" "confdReqQ"
[ renameField "Ip" .
optionalField $
simpleField ConstantUtils.confdReqqIp [t| String |]
, renameField "IpList" .
defaultField [| [] |] $
simpleField ConstantUtils.confdReqqIplist [t| [String] |]
, renameField "Link" .
optionalField $
simpleField ConstantUtils.confdReqqLink [t| String |]
, renameField "Fields" .
defaultField [| [] |] $
simpleField ConstantUtils.confdReqqFields [t| [ConfdReqField] |]
])
-- | Confd query type. This is complex enough that we can't
-- automatically derive it via THH.
data ConfdQuery = EmptyQuery
| PlainQuery String
| DictQuery ConfdReqQ
deriving (Show, Eq)
instance JSON ConfdQuery where
readJSON o = case o of
JSNull -> return EmptyQuery
JSString s -> return . PlainQuery . fromJSString $ s
JSObject _ -> fmap DictQuery (readJSON o::Result ConfdReqQ)
_ -> fail $ "Cannot deserialise into ConfdQuery\
\ the value '" ++ show o ++ "'"
showJSON cq = case cq of
EmptyQuery -> JSNull
PlainQuery s -> showJSON s
DictQuery drq -> showJSON drq
$(declareILADT "ConfdReplyStatus"
[ ("ReplyStatusOk", 0)
, ("ReplyStatusError", 1)
, ("ReplyStatusNotImpl", 2)
])
$(makeJSONInstance ''ConfdReplyStatus)
$(declareILADT "ConfdNodeRole"
[ ("NodeRoleMaster", 0)
, ("NodeRoleCandidate", 1)
, ("NodeRoleOffline", 2)
, ("NodeRoleDrained", 3)
, ("NodeRoleRegular", 4)
])
$(makeJSONInstance ''ConfdNodeRole)
-- Note that the next item is not a frozenset in Python, but we make
-- it a separate type for safety
$(declareILADT "ConfdErrorType"
[ ("ConfdErrorUnknownEntry", 0)
, ("ConfdErrorInternal", 1)
, ("ConfdErrorArgument", 2)
])
$(makeJSONInstance ''ConfdErrorType)
$(buildObject "ConfdRequest" "confdRq"
[ simpleField "protocol" [t| Int |]
, simpleField "type" [t| ConfdRequestType |]
, defaultField [| EmptyQuery |] $ simpleField "query" [t| ConfdQuery |]
, simpleField "rsalt" [t| String |]
])
-- | Client side helper function for creating requests. It automatically fills
-- in some default values.
newConfdRequest :: ConfdRequestType -> ConfdQuery -> IO ConfdRequest
newConfdRequest reqType query = do
rsalt <- newUUID
return $ ConfdRequest ConstantUtils.confdProtocolVersion reqType query rsalt
$(buildObject "ConfdReply" "confdReply"
[ simpleField "protocol" [t| Int |]
, simpleField "status" [t| ConfdReplyStatus |]
, simpleField "answer" [t| JSValue |]
, simpleField "serial" [t| Int |]
])
$(buildObject "SignedMessage" "signedMsg"
[ simpleField "hmac" [t| String |]
, simpleField "msg" [t| String |]
, simpleField "salt" [t| String |]
])
-- | Data type containing information used by the Confd client.
data ConfdClient = ConfdClient
{ hmacKey :: HashKey -- ^ The hmac used for authentication
, peers :: [String] -- ^ The list of nodes to query
, serverPort :: S.PortNumber -- ^ The port where confd server is listening
}
| kawamuray/ganeti | src/Ganeti/Confd/Types.hs | gpl-2.0 | 5,365 | 0 | 11 | 1,231 | 1,057 | 617 | 440 | 109 | 1 |
module BadIntel.TestData where
import Control.Lens
import BadIntel.Types.Agent
import BadIntel.Types.Agency
{- The UK agency. Will probably be moved to some config file. -}
directorRank = (Rank "Director" Politics Nothing)
ukHierarchy :: Hierarchy
ukHierarchy = Hierarchy directorRank
[Hierarchy (Rank "Political director" Politics Nothing)
[Hierarchy (Rank "Foreign office liaison" Politics Nothing) []
,Hierarchy (Rank "Treasury negociator" Budget Nothing) []
,Hierarchy (Rank "Chief analyst" Intel (Just $ view analyzing))
[Hierarchy (Rank "Allies strategy analyst" Intel Nothing) []
,Hierarchy (Rank "Sovietology analyst" Intel Nothing) []
,Hierarchy (Rank "Third-world analyst" Intel Nothing) []]]
,Hierarchy (Rank "Intelligence director" Raw (Just $ view collecting))
[Hierarchy (Rank "European director" Raw (Just $ view collecting))
[Hierarchy (Rank "Operative" Raw Nothing) []
,Hierarchy (Rank "Operative" Raw Nothing) []
,Hierarchy (Rank "Operative" Raw Nothing) []]
,Hierarchy (Rank "Soviet block director" Raw (Just$ view collecting))
[Hierarchy (Rank "Operative" Raw Nothing) []
,Hierarchy (Rank "Operative" Raw Nothing) []
,Hierarchy (Rank "Operative" Raw Nothing) []]
,Hierarchy (Rank "Asia director" Raw (Just $ view collecting))
[Hierarchy (Rank "Operative" Raw Nothing) []
,Hierarchy (Rank "Operative" Raw Nothing) []
,Hierarchy (Rank "Operative" Raw Nothing) []]
,Hierarchy (Rank "Africa director" Raw (Just $ view collecting))
[Hierarchy (Rank "Operative" Raw Nothing) []
,Hierarchy (Rank "Operative" Raw Nothing) []
,Hierarchy (Rank "Operative" Raw Nothing) []]
,Hierarchy (Rank "South-America director" Raw (Just $ view collecting))
[Hierarchy (Rank "Operative" Raw Nothing) []
,Hierarchy (Rank "Operative" Raw Nothing) []
,Hierarchy (Rank "Operative" Raw Nothing) []]
,Hierarchy (Rank "North-America director" Raw (Just $ view collecting))
[Hierarchy (Rank "Operative" Raw Nothing) []
,Hierarchy (Rank "Operative" Raw Nothing) []
,Hierarchy (Rank "Operative" Raw Nothing) []]
,Hierarchy (Rank "Middle-East director" Raw (Just $ view collecting))
[Hierarchy (Rank "Operative" Raw Nothing) []
,Hierarchy (Rank "Operative" Raw Nothing) []
,Hierarchy (Rank "Operative" Raw Nothing) []]]
,Hierarchy (Rank "Operational director" Countering (Just $ view loyalty))
[Hierarchy (Rank "Foreign actions director" OffensiveMissions (Just $ view hiding))
[Hierarchy (Rank "Operative" OffensiveMissions Nothing) []
,Hierarchy (Rank "Operative" OffensiveMissions Nothing) []
,Hierarchy (Rank "Operative" OffensiveMissions Nothing) []]
,Hierarchy (Rank "Domestic actions director" DefensiveMissions (Just $ view fighting))
[Hierarchy (Rank "Operative" DefensiveMissions Nothing) []
,Hierarchy (Rank "Operative" DefensiveMissions Nothing) []
,Hierarchy (Rank "Operative" DefensiveMissions Nothing) []]
,Hierarchy (Rank "Covert operations director" Infiltration (Just $ view convincing))
[Hierarchy (Rank "Operative" Raw Nothing) []
,Hierarchy (Rank "Operative" Raw Nothing) []
,Hierarchy (Rank "Operative" Raw Nothing) []]]
,Hierarchy (Rank "Counter-intelligence director" Countering (Just $ view loyalty))
[Hierarchy (Rank "Counter-terrorism director" Countering (Just $ view fighting))
[Hierarchy (Rank "Operative" Countering Nothing) []
,Hierarchy (Rank "Operative" Countering Nothing) []
,Hierarchy (Rank "Operative" Countering Nothing) []]
,Hierarchy (Rank "Counter-espionnage director" Countering (Just $ view analyzing))
[Hierarchy (Rank "Operative" Countering Nothing) []
,Hierarchy (Rank "Operative" Countering Nothing) []
,Hierarchy (Rank "Operative" Countering Nothing) []]]]
ukAgency' = buildAgency ukHierarchy
| Raveline/BadIntel | testsuite/BadIntel/TestData.hs | gpl-2.0 | 4,562 | 0 | 14 | 1,379 | 1,348 | 679 | 669 | 66 | 1 |
module Data.Dns.Parser where
import Data.Attoparsec.ByteString
import Data.Bits
{-
What does a DNS record look like?
-}
| cabrera/haskell-parsing-tutorial | src/Data/Dns/Parser.hs | gpl-3.0 | 124 | 0 | 4 | 20 | 19 | 13 | 6 | 3 | 0 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NamedFieldPuns #-}
module Language.Verse.Transform.Code where
import Text.Blaze.Html5 as H
import Language.Verse.Renderer
import Language.Verse.Renderer.Html
import System.IO
import System.Process
import Control.Monad.IO.Class
import Control.Monad.State
import Control.Monad.Trans
data CodeTC = CodeTC { pygmentsCommand :: String } deriving (Show)
instance Transform HtmlRenderContext CodeTC where
renderTransform (CodeTC { pygmentsCommand }) s = do
let (language, code) = break (== ':') s
parsedCode <- liftIO $
readProcess pygmentsCommand ["-f", "html", "-l", language, "-O", "noclasses"] (tail code)
return $ H.preEscapedToHtml parsedCode
| sykora/verse | src/Language/Verse/Transform/Code.hs | gpl-3.0 | 740 | 0 | 12 | 125 | 190 | 111 | 79 | 18 | 0 |
module Peer.Handshake.Conduit where
import Peer.Handshake.Parse
import Peer.Handshake.Put
import Torrent.Env
import HTorrentPrelude
import Data.Conduit.Attoparsec
import Data.Conduit.Serialization.Binary
sourceHandshake :: (MonadReader TorrentInfo m, MonadThrow m) =>
Producer m ByteString
sourceHandshake = ask >>= sourcePut . runReaderT putHandshake
sinkHandshake :: MonadThrow m =>
Consumer ByteString m (ByteString, ByteString)
sinkHandshake = sinkParser parseHandshake
| ian-mi/hTorrent | Peer/Handshake/Conduit.hs | gpl-3.0 | 486 | 0 | 7 | 60 | 116 | 66 | 50 | -1 | -1 |
module Problem61 (Tree(..), countLeaves, leaves, leaf, buildTree) where
data Tree a = Empty | Node a (Tree a) (Tree a)
deriving (Show)
countLeaves :: Tree a -> Integer
countLeaves Empty = 0
countLeaves (Node _ Empty Empty) = 1
countLeaves (Node _ left right) = countLeaves left + countLeaves right
leaves :: Tree a -> [a]
leaves Empty = []
leaves (Node a Empty Empty) = [a]
leaves (Node _ left right) = leaves left ++ leaves right
leaf :: Show a => a -> Tree a
leaf x = Node x Empty Empty
internals :: Tree a -> [a]
internals Empty = []
internals (Node _ Empty Empty) = []
internals (Node x left right) = [x] ++ internals left ++ internals right
atLevel :: Tree a -> Integer -> [a]
atLevel (Node x _ _) 1 = [x]
atLevel (Node _ left right) n = recur left ++ recur right
where recur tree = atLevel tree $ pred n
buildTree :: Integer -> Tree Char
buildTree h = build 1
where build x | x > h = Empty
| otherwise = Node 's' (build (2*x)) (build (2*x+1))
| wando-hs/H-99 | src/Problem61.hs | gpl-3.0 | 987 | 0 | 13 | 228 | 496 | 250 | 246 | 25 | 1 |
-- | Parsec-based parser for Boogie 2
module Boogie.Parser (
program,
type_,
expression,
statement,
decl
) where
import Boogie.AST
import Boogie.Position
import Boogie.Tokens
import Boogie.Util
import Data.List
import Data.Map ((!), elems)
import Text.ParserCombinators.Parsec hiding (token, label)
import qualified Text.ParserCombinators.Parsec.Token as P
import Text.ParserCombinators.Parsec.Expr
import Control.Monad
import Control.Applicative ((<$>), (<*>), (<*), (*>))
{- Interface -}
-- | Program parser
program :: Parser Program
program = do
whiteSpace
p <- many decl
eof
return $ Program p
{- Lexical analysis -}
opNames :: [String]
opNames = elems unOpTokens ++ (elems binOpTokens \\ keywords) ++ otherOps
opStart :: [Char]
opStart = nub (map head opNames)
opLetter :: [Char]
opLetter = nub (concatMap tail opNames)
boogieDef :: P.LanguageDef st
boogieDef = P.LanguageDef
commentStart
commentEnd
commentLine
False
(letter <|> oneOf identifierChars)
(alphaNum <|> oneOf identifierChars)
(oneOf opStart)
(oneOf opLetter)
keywords
opNames
True
lexer :: P.TokenParser ()
lexer = P.makeTokenParser boogieDef
identifier = P.identifier lexer
reserved = P.reserved lexer
reservedOp = P.reservedOp lexer
charLiteral = P.charLiteral lexer
stringLiteral = P.stringLiteral lexer
natural = P.natural lexer
integer = P.integer lexer
symbol = P.symbol lexer
whiteSpace = P.whiteSpace lexer
angles = P.angles lexer
brackets = P.brackets lexer
parens = P.parens lexer
braces = P.braces lexer
semi = P.semi lexer
comma = P.comma lexer
commaSep = P.commaSep lexer
commaSep1 = P.commaSep1 lexer
{- Types -}
typeAtom :: Parser Type
typeAtom = choice [
reserved "int" >> return IntType,
reserved "bool" >> return BoolType,
-- bit vector: [JEF] Temp fix
reserved "bv" >> integer >>= (\i -> return (IdType ("bv" ++ show i) [])),
parens type_
]
typeArgs :: Parser [Id]
typeArgs = try (angles (commaSep1 identifier)) <|> return []
mapType :: Parser Type
mapType = do
args <- typeArgs
domains <- brackets (commaSep1 type_)
range <- type_
return $ MapType args domains range
typeCtorArgs :: Parser [Type]
typeCtorArgs = choice [
do
x <- typeAtom
xs <- option [] typeCtorArgs
return $ x : xs,
do
x <- identifier
xs <- option [] typeCtorArgs
return $ IdType x [] : xs,
do
x <- mapType
return [x]
]
-- | Type parser
type_ :: Parser Type
type_ = choice [
typeAtom,
mapType,
do
id <- identifier
args <- option [] typeCtorArgs
return $ IdType id args
] <?> "type"
{- Expressions -}
qop :: Parser QOp
qop = (reserved "forall" >> return Forall)
<|> (reserved "exists" >> return Exists)
<|> (reserved "lambda" >> return Lambda)
atom :: Parser BareExpression
atom = choice [
reserved "false" >> return ff,
reserved "true" >> return tt,
numeral <$> natural,
varOrCall,
old,
ifThenElse,
node <$> try (parens expression),
parens quantified
]
where
varOrCall = do
id <- identifier
(parens (commaSep expression) >>= (return . Application id)) <|> (return $ Var id)
old = do
reserved "old"
e <- parens expression
return $ Old e
ifThenElse = do
reserved "if"
cond <- expression
reserved "then"
thenExpr <- expression
reserved "else"
elseExpr <- expression
return $ IfExpr cond thenExpr elseExpr
quantified = do
op <- qop
args <- typeArgs
vars <- case args of
[] -> commaSep1 idsType
_ -> commaSep idsType
reservedOp "::"
case op of
Lambda -> return []
_ -> many trigAttr
-- trig <- case op of
-- _ -> hasTrigger -- [JEF]: This is not fully correct!
-- -- Lambda -> return []
-- -- _ -> many trigAttr
e <- expression
return $ Quantified False op args (ungroup vars) e
arrayExpression :: Parser Expression
arrayExpression = do
e <- attachPosBefore atom
mapOps <- many (brackets (mapOp))
return $ foldr (.) id (reverse mapOps) e
where
mapOp = do
args <- commaSep1 expression
option (inheritPos ((flip MapSelection) args)) (do
reservedOp ":="
e <- expression
return $ inheritPos (flip ((flip MapUpdate) args) e))
coercionExpression :: Parser Expression
coercionExpression = do
e <- arrayExpression
coercedTos <- many coercedTo
return $ foldr (.) id (reverse coercedTos) e
where
coercedTo = do
reservedOp ":"
t <- type_
return $ inheritPos ((flip Coercion) t)
-- | Expression parser
expression :: Parser Expression
expression = buildExpressionParser table coercionExpression <?> "expression"
table = [[unOp Neg, unOp Not],
[binOp Times AssocLeft, binOp Div AssocLeft, binOp Mod AssocLeft],
[binOp Plus AssocLeft, binOp Minus AssocLeft],
--[binOp Concat AssocLeft],
[binOp Eq AssocNone, binOp Neq AssocNone, binOp Ls AssocNone, binOp Leq AssocNone, binOp Gt AssocNone, binOp Geq AssocNone, binOp Lc AssocNone],
[binOp And AssocLeft], -- ToDo: && and || on the same level but do not interassociate
[binOp Or AssocLeft],
[binOp Implies AssocRight, binOp Explies AssocLeft], -- Mixing is prevented by different associativities
[binOp Equiv AssocRight]]
where
binOp op assoc = Infix (reservedOp (binOpTokens ! op) >> return (\e1 e2 -> attachPos (position e1) (BinaryExpression op e1 e2))) assoc
unOp op = Prefix (do
pos <- getPosition
reservedOp (unOpTokens ! op)
return (\e -> attachPos pos (UnaryExpression op e)))
wildcardExpression :: Parser WildcardExpression
wildcardExpression = (expression >>= return . Expr) <|> (reservedOp "*" >> return Wildcard)
{- Statements -}
lhs :: Parser (Id, [[Expression]])
lhs = do
id <- identifier
selects <- many (brackets (commaSep1 expression))
return (id, selects)
assign :: Parser BareStatement
assign = do
lefts <- commaSep1 lhs
reservedOp ":="
rights <- commaSep1 expression
semi
return $ Assign lefts rights
call :: Parser BareStatement
call = do
reserved "call"
void (many attribute)
lefts <- option [] (try lhss)
id <- identifier
args <- parens (commaSep expression)
semi
return $ Call lefts id args
where
lhss = do
ids <- commaSep1 identifier
reservedOp ":="
return ids
callForall :: Parser BareStatement
callForall = do
reserved "call"
reserved "forall"
id <- identifier
args <- parens (commaSep wildcardExpression)
semi
return $ CallForall id args
ifStatement :: Parser BareStatement
ifStatement = do
reserved "if"
cond <- parens wildcardExpression
thenBlock <- block
elseBlock <- optionMaybe (reserved "else" >> (block <|> elseIf))
return $ If cond thenBlock elseBlock
where
elseIf = do
i <- attachPosBefore ifStatement
return $ singletonBlock i
whileStatement :: Parser BareStatement
whileStatement = do
reserved "while"
cond <- parens wildcardExpression
invs <- many loopInvariant
body <- block
return $ While cond invs body
where
loopInvariant = do
free <- hasKeyword "free"
reserved "invariant"
e <- expression
semi
return (SpecClause LoopInvariant free e)
-- | Statement parser
statement :: Parser Statement
statement = attachPosBefore (choice [
do { reserved "assert"; attrs <- many attribute; e <- expression; semi; return $ Predicate attrs (SpecClause Inline False e) },
do { reserved "assume"; attrs <- many attribute; e <- expression; semi; return $ Predicate attrs (SpecClause Inline True e) },
do { reserved "havoc"; ids <- commaSep1 identifier; semi; return $ Havoc ids },
assign,
try call,
callForall,
ifStatement,
whileStatement,
do { reserved "break"; id <- optionMaybe identifier; semi; return $ Break id },
do { reserved "return"; semi; return Return },
do { reserved "goto"; ids <- commaSep1 identifier; semi; return $ Goto ids }
] <?> "statement")
label :: Parser Id
label = do
id <- identifier
reservedOp ":"
return id
<?> "label"
lStatement :: Parser LStatement
lStatement = attachPosBefore $ do
lbs <- many (try label)
s <- statement
return (lbs, s)
statementList :: Parser Block
statementList = do
lstatements <- many (try lStatement)
pos1 <- getPosition
lempty <- many (try label)
pos2 <- getPosition
return $ if null lempty
then lstatements
else lstatements ++ [attachPos pos1 (lempty, attachPos pos2 Skip)]
block :: Parser Block
block = braces statementList
{- Declarations -}
newType :: Parser NewType
newType = do
name <- identifier
args <- many identifier
value <- optionMaybe (reservedOp "=" >> type_ )
return $ NewType name args value
typeDecl :: Parser BareDecl
typeDecl = do
reserved "type"
void (many attribute)
ts <- commaSep newType
semi
return $ TypeDecl ts
parentEdge :: Parser ParentEdge
parentEdge = do
unique <- hasKeyword "unique"
id <- identifier
return (unique, id)
constantDecl :: Parser BareDecl
constantDecl = do
reserved "const"
void (many attribute)
unique <- hasKeyword "unique"
ids <- idsType
orderSpec <- optionMaybe (reserved "extends" >> commaSep parentEdge)
complete <- hasKeyword "complete"
semi
return $ ConstantDecl unique (fst ids) (snd ids) orderSpec complete
functionDecl :: Parser BareDecl
functionDecl = do
reserved "function"
attrs <- many attribute
name <- identifier
tArgs <- typeArgs
args <- parens (option [] (try namedArgs <|> unnamedArgs))
ret <- returns <|> returnType
body <- (semi >> return Nothing) <|> (Just <$> braces expression)
return $ FunctionDecl attrs name tArgs args ret body
where
unnamedArgs = map (\t -> (Nothing, t)) <$> commaSep1 type_
namedArgs = map (\(id, t) -> (Just id, t)) . ungroup <$> commaSep1 idsType
returns = do
reserved "returns"
parens fArg
fArg = do
name <- optionMaybe (try (identifier <* reservedOp ":"))
t <- type_
return (name, t)
returnType = do
reservedOp ":"
t <- type_
return (Nothing, t)
axiomDecl :: Parser BareDecl
axiomDecl = do
reserved "axiom"
void (many attribute)
e <- expression
semi
return $ AxiomDecl e
varList :: Parser [IdTypeWhere]
varList = do
reserved "var"
void (many attribute) -- attributes are voided (not sure if we want to keep them?)
vars <- commaSep1 idsTypeWhere
semi
return $ ungroupWhere vars
varDecl :: Parser BareDecl
varDecl = VarDecl <$> varList
body :: Parser Body
body = braces (do
locals <- many varList
statements <- statementList
return (locals, statements))
procDecl :: Parser BareDecl
procDecl = do
reserved "procedure"
void (many attribute)
name <- identifier
tArgs <- typeArgs
args <- parens (commaSep idsTypeWhere)
rets <- option [] (reserved "returns" >> parens (commaSep idsTypeWhere))
noBody name tArgs args rets <|> withBody name tArgs args rets
where
noBody name tArgs args rets = do
semi
specs <- many spec
return (ProcedureDecl name tArgs (ungroupWhere args) (ungroupWhere rets) specs Nothing)
withBody name tArgs args rets = do
specs <- many spec
b <- body
return (ProcedureDecl name tArgs (ungroupWhere args) (ungroupWhere rets) specs (Just b))
implDecl :: Parser BareDecl
implDecl = do
reserved "implementation"
void (many attribute)
name <- identifier
tArgs <- typeArgs
args <- parens (commaSep idsType)
rets <- option [] (reserved "returns" >> parens (commaSep idsType))
bs <- many body -- [JEF] Why many body here?
return $ ImplementationDecl name tArgs (ungroup args) (ungroup rets) bs
-- | Top-level declaration parser
decl :: Parser Decl
decl = attachPosBefore (choice [
typeDecl,
constantDecl,
functionDecl,
axiomDecl,
varDecl,
procDecl,
implDecl
] <?> "declaration")
{- Contracts -}
spec :: Parser Contract
spec = do
free <- hasKeyword "free"
choice [
do
reserved "requires"
e <- expression
semi
return $ Requires free e,
do
reserved "modifies"
ids <- commaSep identifier
semi
return $ Modifies free ids,
do
reserved "ensures"
e <- expression
semi
return $ Ensures free e]
{- Misc -}
hasTrigger :: Parser Bool
hasTrigger = option False (trigger >> return True)
hasKeyword :: String -> Parser Bool
hasKeyword s = option False (reserved s >> return True)
idsType :: Parser ([Id], Type)
idsType = do
ids <- commaSep1 identifier
reservedOp ":"
t <- type_
return (ids, t)
ungroup :: [([Id], Type)] -> [(IdType)]
ungroup = concatMap (\x -> zip (fst x) (repeat (snd x)))
idsTypeWhere :: Parser ([Id], Type, Expression)
idsTypeWhere = do
ids <- idsType
pos <- getPosition
e <- option (attachPos pos tt) (reserved "where" >> expression)
return ((fst ids), (snd ids), e)
ungroupWhere :: [([Id], Type, Expression)] -> [IdTypeWhere]
ungroupWhere = concatMap ungroupWhereOne
where ungroupWhereOne (ids, t, e) = zipWith3 IdTypeWhere ids (repeat t) (repeat e)
trigAttr :: Parser ()
trigAttr = (try trigger) <|> (void attribute) <?> "attribute or trigger"
trigger :: Parser ()
trigger = (void (braces (commaSep1 expression))) <?> "trigger"
-- | attribute are the "tag-along" msg use, not affecting the overall Boogie program
-- However, it can be the case that the frontend uses attribute to indicate 'skip the proof of this'
-- see This is Boogie2 page 45
attribute :: Parser Attribute
attribute = (braces (do
reservedOp ":"
tag <- identifier
vals <- commaSep ((EAttr <$> expression) <|> (SAttr <$> stringLiteral))
return $ Attribute tag vals
)) <?> "attribute"
| emptylambda/BLT | src/Boogie/Parser.hs | gpl-3.0 | 13,810 | 0 | 20 | 3,239 | 4,762 | 2,314 | 2,448 | 427 | 3 |
{-# LANGUAGE Haskell2010 #-}
module Main where
import Control.Applicative ((<$>))
import Control.Concurrent
import Control.Monad
import Data.Word
import Graphics.Gloss.Interface.IO.Game
import Network
import System.Environment
import System.Exit
import System.IO
import Text.Printf
import GuiChat.Types
import GuiChat.Canvas
import GuiChat.EventHandling
main = withSocketsDo $ do
-- Command line args processing
(host, port) <- getArgs >>= \args -> case args of
[host, port] -> do
let portNumber = read port :: Word16
return (host, PortNumber $ fromIntegral portNumber)
_ -> do
getProgName >>= printf "Usage: %s <host> <port>\n"
exitFailure
-- Internal communication and outside connection
recv <- newEmptyMVar
send <- newEmptyMVar
handle <- connectTo host port
hSetBuffering handle LineBuffering
-- Receive loop
forkIO $ forever $ do
hIsEOF handle >>= \eof -> case eof of
True -> do
putStrLn "Server has gone away."
exitSuccess
_ -> do
str <- hGetLine handle
putMVar recv (read str)
-- Send loop
forkIO $ forever $ do
shape <- takeMVar send
hPutStrLn handle (show shape)
putMVar recv shape
-- Graphics loop
playIO
(InWindow "GUI Chat" (600, 400) (100, 100))
white
100
emptyCanvas
renderCanvas
(handleEvent send)
(updateCanvas recv)
updateCanvas :: MVar Image -> Float -> Canvas -> IO Canvas
updateCanvas recv _ canvas@(Canvas { cPictures = pics }) = do
pics' <- maybe pics (updatePictures pics) <$> tryTakeMVar recv
return (canvas { cPictures = pics' })
where
updatePictures pics image = pics ++ [mkPicture image]
| scravy/GuiChat | src/chat.hs | gpl-3.0 | 1,839 | 0 | 20 | 547 | 501 | 254 | 247 | 52 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Analytics.Management.Goals.Patch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates an existing goal. This method supports patch semantics.
--
-- /See:/ <https://developers.google.com/analytics/ Google Analytics API Reference> for @analytics.management.goals.patch@.
module Network.Google.Resource.Analytics.Management.Goals.Patch
(
-- * REST Resource
ManagementGoalsPatchResource
-- * Creating a Request
, managementGoalsPatch
, ManagementGoalsPatch
-- * Request Lenses
, mgpWebPropertyId
, mgpGoalId
, mgpProFileId
, mgpPayload
, mgpAccountId
) where
import Network.Google.Analytics.Types
import Network.Google.Prelude
-- | A resource alias for @analytics.management.goals.patch@ method which the
-- 'ManagementGoalsPatch' request conforms to.
type ManagementGoalsPatchResource =
"analytics" :>
"v3" :>
"management" :>
"accounts" :>
Capture "accountId" Text :>
"webproperties" :>
Capture "webPropertyId" Text :>
"profiles" :>
Capture "profileId" Text :>
"goals" :>
Capture "goalId" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Goal :> Patch '[JSON] Goal
-- | Updates an existing goal. This method supports patch semantics.
--
-- /See:/ 'managementGoalsPatch' smart constructor.
data ManagementGoalsPatch = ManagementGoalsPatch'
{ _mgpWebPropertyId :: !Text
, _mgpGoalId :: !Text
, _mgpProFileId :: !Text
, _mgpPayload :: !Goal
, _mgpAccountId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ManagementGoalsPatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mgpWebPropertyId'
--
-- * 'mgpGoalId'
--
-- * 'mgpProFileId'
--
-- * 'mgpPayload'
--
-- * 'mgpAccountId'
managementGoalsPatch
:: Text -- ^ 'mgpWebPropertyId'
-> Text -- ^ 'mgpGoalId'
-> Text -- ^ 'mgpProFileId'
-> Goal -- ^ 'mgpPayload'
-> Text -- ^ 'mgpAccountId'
-> ManagementGoalsPatch
managementGoalsPatch pMgpWebPropertyId_ pMgpGoalId_ pMgpProFileId_ pMgpPayload_ pMgpAccountId_ =
ManagementGoalsPatch'
{ _mgpWebPropertyId = pMgpWebPropertyId_
, _mgpGoalId = pMgpGoalId_
, _mgpProFileId = pMgpProFileId_
, _mgpPayload = pMgpPayload_
, _mgpAccountId = pMgpAccountId_
}
-- | Web property ID to update the goal.
mgpWebPropertyId :: Lens' ManagementGoalsPatch Text
mgpWebPropertyId
= lens _mgpWebPropertyId
(\ s a -> s{_mgpWebPropertyId = a})
-- | Index of the goal to be updated.
mgpGoalId :: Lens' ManagementGoalsPatch Text
mgpGoalId
= lens _mgpGoalId (\ s a -> s{_mgpGoalId = a})
-- | View (Profile) ID to update the goal.
mgpProFileId :: Lens' ManagementGoalsPatch Text
mgpProFileId
= lens _mgpProFileId (\ s a -> s{_mgpProFileId = a})
-- | Multipart request metadata.
mgpPayload :: Lens' ManagementGoalsPatch Goal
mgpPayload
= lens _mgpPayload (\ s a -> s{_mgpPayload = a})
-- | Account ID to update the goal.
mgpAccountId :: Lens' ManagementGoalsPatch Text
mgpAccountId
= lens _mgpAccountId (\ s a -> s{_mgpAccountId = a})
instance GoogleRequest ManagementGoalsPatch where
type Rs ManagementGoalsPatch = Goal
type Scopes ManagementGoalsPatch =
'["https://www.googleapis.com/auth/analytics.edit"]
requestClient ManagementGoalsPatch'{..}
= go _mgpAccountId _mgpWebPropertyId _mgpProFileId
_mgpGoalId
(Just AltJSON)
_mgpPayload
analyticsService
where go
= buildClient
(Proxy :: Proxy ManagementGoalsPatchResource)
mempty
| rueshyna/gogol | gogol-analytics/gen/Network/Google/Resource/Analytics/Management/Goals/Patch.hs | mpl-2.0 | 4,610 | 0 | 20 | 1,158 | 624 | 368 | 256 | 99 | 1 |
-- Web2RSS - A feed generator, that keeps tabs on web sites
-- Copyright (C) 2015 Daniel Landau
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU Affero General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU Affero General Public License for more details.
--
-- You should have received a copy of the GNU Affero General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>.
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
module Lib
-- ( makeFeed,
-- createFeedInfo,
-- migration
-- )
where
import MyPrelude
import Network.HTTP.Conduit
import Text.HTML.TagSoup
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as Char8
import Text.StringLike
import Data.Maybe
import Database.Persist.Quasi
import Database.Persist.MySQL
import Database.Persist.TH
import Data.List (sortBy)
import Data.Time.Clock (UTCTime, getCurrentTime)
import Data.Time.Format
import Data.Text as T (Text, pack, unpack)
import Control.Monad.IO.Class (liftIO, MonadIO)
import Control.Monad.Logger
import Control.Monad.Trans.Except
import Control.Monad.Trans
import HTMLEntities.Text as HText
import Text.Feed.Constructor
import Text.Feed.Export
import Text.XML.Light.Output
import Text.Feed.Types
import Data.UUID.V4 as V4
import Data.UUID
import Data.List (sort, find)
import qualified Text.Atom.Feed as AFeed
import qualified Text.Feed.Types as FTypes
import Network.HTTP.Types.Header
import Crypto.Random (getSystemDRG, randomBytesGenerate)
import Crypto.Hash (Digest, MD5, hash)
import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Algorithm.Diff
import Data.Algorithm.DiffOutput
data Response = Response { body :: B.ByteString, contentType :: B.ByteString }
fetch :: Text -> IO Lib.Response
fetch url = do
request <- parseUrl $ T.unpack url
manager <- newManager tlsManagerSettings
response <- httpLbs request manager
let headers = responseHeaders response
let contentType' = maybe "text/html" snd $ find ((== hContentType ) . fst) headers
let body' = L.toStrict $ responseBody response
return $ Response body' contentType'
parse :: Lib.Response -> B.ByteString
parse response =
if B.isPrefixOf "text/html" (contentType response)
then let tags = parseTags (body response)
bodyTag = getBody tags
withoutScripts = filterScripts bodyTag
texts = filter isTagText withoutScripts
in innerText texts
else body response
getBody :: [Tag B.ByteString] -> [Tag B.ByteString]
getBody tags = dropWhile (~/= body) tags
where body :: String
body = "<body>"
filterScripts :: StringLike str => [Tag str] -> [Tag str]
filterScripts tags = reverse $ filterScripts' False [] tags
filterScripts' :: StringLike str => Bool -> [Tag str] -> [Tag str] -> [Tag str]
filterScripts' _ result [] = result
filterScripts' inScript result (tag : tags)
| inScript && not (isTagCloseName scriptTagName tag) =
filterScripts' True result tags
| inScript && isTagCloseName scriptTagName tag =
filterScripts' False result tags
| not inScript && (isTagOpenName scriptTagName tag) =
filterScripts' True result tags
| otherwise =
filterScripts' False (tag : result) tags
where scriptTagName = Text.StringLike.fromString "script"
share [ mkPersist sqlSettings
, mkMigrate "migrateAll"
]
$(persistFileWith lowerCaseSettings "models")
getSaved :: (MonadBaseControl IO m, MonadIO m) => Text -> SqlPersistT m [Page]
getSaved url = do
pageEntities <- selectList [PageUrl ==. url] [Desc PageFetched]
return $ map entityVal pageEntities
format :: UTCTime -> String
format = formatTime defaultTimeLocale rfc822DateFormat
makeItem :: (Text , UTCTime , Text , String) -> Item
makeItem (url, when, itemId, content) = atomEntryToItem $
item { AFeed.entryContent = Just (AFeed.HTMLContent $ "<pre>" ++ T.unpack (HText.text (T.pack content)) ++ "</pre>"), AFeed.entryLinks = [AFeed.nullLink (T.unpack url)] }
where item = AFeed.nullEntry ("uurn:uuid:" ++ (T.unpack itemId)) (AFeed.TextString (T.unpack url ++ " has changed")) (format when)
prettyPrintFeed :: Feed -> String
prettyPrintFeed = ppElement . xmlFeed
prettyPrintDiff :: B.ByteString -> B.ByteString -> String
prettyPrintDiff old new =
ppDiff diffs
where diffs :: [Diff [String]]
diffs = getGroupedDiff oldLines newLines
oldLines = lines $ Char8.unpack old
newLines = lines $ Char8.unpack new
parseFromSaved :: Page -> B.ByteString
parseFromSaved page = Lib.parse . responseFromPage $ page
throw :: Monad m => Text -> MyMonadStack m a
throw reason = lift . lift . throwE $ reason
hasFeed feedHash = do
feedEntityMaybe <- getFeedInfo feedHash
return $ isJust feedEntityMaybe
type MyMonadStack m = SqlPersistT (LoggingT (ExceptT Text m))
idForHash :: (MonadBaseControl IO m, MonadIO m) => Text -> MyMonadStack m (Key FeedInfo)
idForHash feedHash = do
feedEntityMaybe <- getFeedInfo feedHash
let feedIdMaybe = fmap (\(Entity key _) -> key) feedEntityMaybe
maybe
(throw $ T.pack "The given hash does not match any existing feed")
return feedIdMaybe
addUrlToFeed feedHash url = do
id <- idForHash feedHash
insert_ (Url id url)
modifyUrlInFeed feedHash urlId url = do
id <- idForHash feedHash
updateWhere [UrlFeedId ==. id, UrlId ==. urlId] [UrlUrl =. url]
return ()
deleteUrlFromFeed feedHash urlId = do
id <- idForHash feedHash
deleteWhere [UrlFeedId ==. id, UrlId ==. urlId]
urlsForFeed feedHash = do
id <- idForHash feedHash
urlEntities <- selectList [UrlFeedId ==. id] []
let urlIdPairs = map (\(Entity key (Url _ url)) -> ( url, key )) urlEntities
return urlIdPairs
responseFromPage page = Lib.Response (pageBody page) (pageContentType page)
itemsForUrl feedId url = do
response <- liftIO $ fetch url
let content = Lib.parse response
now <- liftIO $ getCurrentTime
saved <- getSaved url
let latestSaved = listToMaybe saved
let reverseSaved = reverse saved
let contents = map parseFromSaved reverseSaved
let contentPairs = zip ("" : contents) contents
let diffs = map (\(_1, _2) -> prettyPrintDiff _1 _2) contentPairs
let oldItems = reverse $ map (\(page, diff) -> (url, (pageFetched page), (pageUuid page), diff)) (zip reverseSaved diffs)
let isSame = maybe False (== content) (fmap parseFromSaved latestSaved)
if isSame
then return oldItems
else do
itemId <- liftIO $ V4.nextRandom
let oldContent = maybe "" parseFromSaved latestSaved
let diffContent = (prettyPrintDiff oldContent content)
insert_ (Page url (body response) (contentType response) now (toText itemId) feedId)
return ((url, now, (toText itemId), diffContent) : oldItems)
getRandomHash :: IO Text
getRandomHash = do
drg <- getSystemDRG
let (bytes, _) = randomBytesGenerate 16 drg
let byteString = bytes :: B.ByteString
let digest = hash byteString :: Digest MD5
return . T.pack . show $ digest
getFeedInfo :: (MonadBaseControl IO m, MonadIO m, MonadLogger m) => Text -> SqlPersistT m (Maybe (Entity FeedInfo))
getFeedInfo feedHash =
selectFirst [FeedInfoHash ==. feedHash] []
createFeedInfo :: (MonadBaseControl IO m, MonadIO m, MonadLogger m) => SqlPersistT m String
createFeedInfo = do
newUuid <- fmap toText $ liftIO V4.nextRandom
newHash <- liftIO getRandomHash
let newFeedInfo = FeedInfo newUuid newHash
insert_ newFeedInfo
return $ T.unpack newHash
require msg x =
maybe (throw msg) return x
makeFeed feedHash = do
feedEntityMaybe <- getFeedInfo feedHash
(Entity key feedInfo) <- require "feed not found" feedEntityMaybe
urls <- map fst <$> urlsForFeed feedHash
items <- mapM (itemsForUrl key) urls >>= return . concat
let feed = withFeedItems (map makeItem (sortBy (\(_, a, _, _) (_, b, _, _) -> compare b a) items)) $ feedFromAtom $
AFeed.nullFeed ("uurn:uuid:" ++ T.unpack (feedInfoUuid feedInfo))
(AFeed.TextString "Changes in the followed pages")
(maybe "" identity (listToMaybe . reverse . sort . (map (\(FTypes.AtomItem entry) ->
AFeed.entryUpdated entry)) $ (map makeItem items)) )
return $ prettyPrintFeed feed
migration :: (MonadBaseControl IO m, MonadIO m, MonadLogger m) => SqlPersistT m ()
migration = runMigration migrateAll
| daniellandau/web2rss | src/Lib.hs | agpl-3.0 | 9,028 | 0 | 23 | 1,729 | 2,721 | 1,404 | 1,317 | 187 | 2 |
{-#LANGUAGE OverloadedStrings#-}
module Data.P440.XML.Instances.ROO where
import qualified Data.P440.Domain.ROO as ROO
import Data.P440.Domain.ComplexTypes
import Data.P440.XML.XML
import qualified Data.P440.XML.Instances.ComplexTypes as C
instance ToNode ROO.Файл where
toNode (ROO.Файл идЭС версПрог телОтпр
должнОтпр фамОтпр решОтмена) =
complex "Файл"
["ИдЭС" =: идЭС
,"ВерсПрог" =: версПрог
,"ТелОтпр" =: телОтпр
,"ДолжнОтпр" =: должнОтпр
,"ФамОтпр" =: фамОтпр]
[Sequence [решОтмена]]
instance ToNode ROO.РешОтмена where
toNode (ROO.РешОтмена номРешОт датаПодп видРеш номРешВО
датаРешВО номРешПр датаРешПр бик
наимБ номФ свНО свПл
счет кэсп руководитель) =
complex "РешОтмена"
["НомРешОт" =: номРешОт
,"ДатаПодп" =: датаПодп
,"ВидРеш" =: видРеш
,"НомРешВО" =: номРешВО
,"ДатаРешВО" =: датаРешВО
,"НомРешПр" =: номРешПр
,"ДатаРешПр" =: датаРешПр
,"БИК" =: бик
,"НаимБ" =: наимБ
,"НомФ" =: номФ]
[Sequence [C.свНО "СвНО" свНО]
,Sequence [свПл]
,Sequence счет
,Sequence кэсп
,Sequence [C.рукНО "Руководитель" руководитель]]
instance ToNode ROO.СвПл where
toNode (ROO.СвПл плательщик адрПлат) =
complex "СвПл"
[]
[Sequence [плательщик]
,Sequence [адрПлат]]
instance ToNode ROO.ПлЮЛИлиПлИП where
toNode (ROO.ПлЮЛ' плюл) = C.плЮЛ "ПлЮЛ" плюл
toNode (ROO.ПлИП' плип) = C.плФЛ "ПлИП" плип
instance ToNode ROO.Счет where
toNode (ROO.Счет номСч видСч) =
complex_ "Счет"
[ "НомСч" =: номСч
, "ВидСч" =: видСч]
instance ToNode ROO.КЭСП where
toNode (ROO.КЭСП идКЭСП валКЭСП) =
complex_ "КЭСП"
[ "ИдКЭСП" =: идКЭСП
, "ВалКЭСП" =: валКЭСП]
| Macil-dev/440P-old | src/Data/P440/XML/Instances/ROO.hs | unlicense | 2,828 | 0 | 11 | 851 | 1,318 | 668 | 650 | 56 | 0 |
module Main where
-- External Imports
import System.Environment( getArgs )
import System.Console.GetOpt
import Control.Applicative
import System.Exit
import System.IO
-- Project Imports
import Blackbox
main = do
args <- getArgs
case getOpt RequireOrder options args of
(flags, [], []) -> processFlags flags
(_, nonOpts, []) -> error $ "Invalid arguments: " ++ unwords nonOpts
(_, _, msgs) -> error $ concat msgs ++ usageInfo header options
data Flag = Version | Infile String | Markup String | GHCI String deriving (Show, Eq)
options :: [OptDescr Flag]
options = [
Option "v" ["version"] (NoArg Version) "show version number",
Option "f" ["file"] (ReqArg Infile "FILE") "file to be processed, this won't be edited",
Option "m" ["markup"] (ReqArg Markup "FILE") "marked up copy of the file to be processed, this may be edited",
Option "g" ["ghci"] (ReqArg GHCI "FILE") "path to ghci"
]
showVersion = do
putStrLn "Blackbox 0.1 by Darren Mowat"
exitSuccess
die = do
putStrLn header
exitFailure
header = "Usage: blackbox [OPTION...]"
processFlags :: [Flag] -> IO ()
processFlags f = do
let ver = lookupFlag "Version" f
let inf = lookupFlag "Infile" f
let mar = lookupFlag "Markup" f
let ghci = lookupFlag "Ghci" f
case lookupFlag "Version" f of
Just v -> showVersion
Nothing -> case (inf, mar, ghci) of
(Just (Infile ifile), Just (Markup mfile), Just (GHCI ghc)) -> do
resp <- runBlackbox ifile mfile ghc
case resp of
Left err -> do
hPutStrLn stderr err
exitFailure
Right file -> do
putStrLn file
exitSuccess
(_,_,_) -> die
lookupFlag :: String -> [Flag] -> Maybe Flag
lookupFlag _ [] = Nothing
lookupFlag "Version" (Version : fs) = Just Version
lookupFlag "Infile" (Infile f : fs) = Just (Infile f)
lookupFlag "Markup" (Markup f : fs) = Just (Markup f)
lookupFlag "Ghci" (GHCI f : fs) = Just (GHCI f)
lookupFlag x (f:fs) = lookupFlag x fs
| DarrenMowat/blackbox | src/Main.hs | unlicense | 2,145 | 0 | 20 | 615 | 716 | 361 | 355 | 53 | 4 |
-- | Query and update documents
{-# LANGUAGE OverloadedStrings, RecordWildCards, NamedFieldPuns, TupleSections, FlexibleContexts, FlexibleInstances, UndecidableInstances, MultiParamTypeClasses, GeneralizedNewtypeDeriving, StandaloneDeriving, TypeSynonymInstances, TypeFamilies, CPP, DeriveDataTypeable, ScopedTypeVariables, BangPatterns #-}
module Database.MongoDB.Query (
-- * Monad
Action, access, Failure(..), ErrorCode,
AccessMode(..), GetLastError, master, slaveOk, accessMode,
liftDB,
MongoContext(..), HasMongoContext(..),
-- * Database
Database, allDatabases, useDb, thisDatabase,
-- ** Authentication
Username, Password, auth, authMongoCR, authSCRAMSHA1,
-- * Collection
Collection, allCollections,
-- ** Selection
Selection(..), Selector, whereJS,
Select(select),
-- * Write
-- ** Insert
insert, insert_, insertMany, insertMany_, insertAll, insertAll_,
-- ** Update
save, replace, repsert, upsert, Modifier, modify, updateMany, updateAll,
WriteResult(..), UpdateOption(..), Upserted(..),
-- ** Delete
delete, deleteOne, deleteMany, deleteAll, DeleteOption(..),
-- * Read
-- ** Query
Query(..), QueryOption(NoCursorTimeout, TailableCursor, AwaitData, Partial),
Projector, Limit, Order, BatchSize,
explain, find, findOne, fetch,
findAndModify, findAndModifyOpts, FindAndModifyOpts(..), defFamUpdateOpts,
count, distinct,
-- *** Cursor
Cursor, nextBatch, next, nextN, rest, closeCursor, isCursorClosed,
-- ** Aggregate
Pipeline, AggregateConfig(..), aggregate, aggregateCursor,
-- ** Group
Group(..), GroupKey(..), group,
-- ** MapReduce
MapReduce(..), MapFun, ReduceFun, FinalizeFun, MROut(..), MRMerge(..),
MRResult, mapReduce, runMR, runMR',
-- * Command
Command, runCommand, runCommand1,
eval, retrieveServerData, ServerData(..)
) where
import Prelude hiding (lookup)
import Control.Exception (Exception, throwIO)
import Control.Monad (unless, replicateM, liftM, liftM2)
import Data.Default.Class (Default(..))
import Data.Int (Int32, Int64)
import Data.Either (lefts, rights)
import Data.List (foldl1')
import Data.Maybe (listToMaybe, catMaybes, isNothing)
import Data.Word (Word32)
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid (mappend)
#endif
import Data.Typeable (Typeable)
import System.Mem.Weak (Weak)
import qualified Control.Concurrent.MVar as MV
#if MIN_VERSION_base(4,6,0)
import Control.Concurrent.MVar.Lifted (MVar,
readMVar)
#else
import Control.Concurrent.MVar.Lifted (MVar, addMVarFinalizer,
readMVar)
#endif
import Control.Applicative ((<$>))
import Control.Exception (catch)
import Control.Monad (when, void)
import Control.Monad.Error (Error(..))
import Control.Monad.Reader (MonadReader, ReaderT, runReaderT, ask, asks, local)
import Control.Monad.Trans (MonadIO, liftIO)
import Data.Binary.Put (runPut)
import Data.Bson (Document, Field(..), Label, Val, Value(String, Doc, Bool),
Javascript, at, valueAt, lookup, look, genObjectId, (=:),
(=?), (!?), Val(..), ObjectId, Value(..))
import Data.Bson.Binary (putDocument)
import Data.Text (Text)
import qualified Data.Text as T
import Database.MongoDB.Internal.Protocol (Reply(..), QueryOption(..),
ResponseFlag(..), InsertOption(..),
UpdateOption(..), DeleteOption(..),
CursorId, FullCollection, Username,
Password, Pipe, Notice(..),
Request(GetMore, qOptions, qSkip,
qFullCollection, qBatchSize,
qSelector, qProjector),
pwKey, ServerData(..))
import Database.MongoDB.Internal.Util (loop, liftIOE, true1, (<.>))
import qualified Database.MongoDB.Internal.Protocol as P
import qualified Crypto.Nonce as Nonce
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import qualified Data.ByteString.Base16 as B16
import qualified Data.ByteString.Base64 as B64
import qualified Data.ByteString.Char8 as B
import qualified Data.Either as E
import qualified Crypto.Hash.MD5 as MD5
import qualified Crypto.Hash.SHA1 as SHA1
import qualified Crypto.MAC.HMAC as HMAC
import Data.Bits (xor)
import qualified Data.Map as Map
import Text.Read (readMaybe)
import Data.Maybe (fromMaybe)
-- * Monad
type Action = ReaderT MongoContext
-- ^ A monad on top of m (which must be a MonadIO) that may access the database and may fail with a DB 'Failure'
access :: (MonadIO m) => Pipe -> AccessMode -> Database -> Action m a -> m a
-- ^ Run action against database on server at other end of pipe. Use access mode for any reads and writes.
-- Throw 'Failure' in case of any error.
access mongoPipe mongoAccessMode mongoDatabase action = runReaderT action MongoContext{..}
-- | A connection failure, or a read or write exception like cursor expired or inserting a duplicate key.
-- Note, unexpected data from the server is not a Failure, rather it is a programming error (you should call 'error' in this case) because the client and server are incompatible and requires a programming change.
data Failure =
ConnectionFailure IOError -- ^ TCP connection ('Pipeline') failed. May work if you try again on the same Mongo 'Connection' which will create a new Pipe.
| CursorNotFoundFailure CursorId -- ^ Cursor expired because it wasn't accessed for over 10 minutes, or this cursor came from a different server that the one you are currently connected to (perhaps a fail over happen between servers in a replica set)
| QueryFailure ErrorCode String -- ^ Query failed for some reason as described in the string
| WriteFailure Int ErrorCode String -- ^ Error observed by getLastError after a write, error description is in string, index of failed document is the first argument
| WriteConcernFailure Int String -- ^ Write concern error. It's reported only by insert, update, delete commands. Not by wire protocol.
| DocNotFound Selection -- ^ 'fetch' found no document matching selection
| AggregateFailure String -- ^ 'aggregate' returned an error
| CompoundFailure [Failure] -- ^ When we need to aggregate several failures and report them.
| ProtocolFailure Int String -- ^ The structure of the returned documents doesn't match what we expected
deriving (Show, Eq, Typeable)
instance Exception Failure
type ErrorCode = Int
-- ^ Error code from getLastError or query failure
instance Error Failure where strMsg = error
-- ^ 'fail' is treated the same as a programming 'error'. In other words, don't use it.
-- | Type of reads and writes to perform
data AccessMode =
ReadStaleOk -- ^ Read-only action, reading stale data from a slave is OK.
| UnconfirmedWrites -- ^ Read-write action, slave not OK, every write is fire & forget.
| ConfirmWrites GetLastError -- ^ Read-write action, slave not OK, every write is confirmed with getLastError.
deriving Show
type GetLastError = Document
-- ^ Parameters for getLastError command. For example @[\"w\" =: 2]@ tells the server to wait for the write to reach at least two servers in replica set before acknowledging. See <http://www.mongodb.org/display/DOCS/Last+Error+Commands> for more options.
class Result a where
isFailed :: a -> Bool
data WriteResult = WriteResult
{ failed :: Bool
, nMatched :: Int
, nModified :: Maybe Int
, nRemoved :: Int
-- ^ Mongodb server before 2.6 doesn't allow to calculate this value.
-- This field is meaningless if we can't calculate the number of modified documents.
, upserted :: [Upserted]
, writeErrors :: [Failure]
, writeConcernErrors :: [Failure]
} deriving Show
instance Result WriteResult where
isFailed = failed
instance Result (Either a b) where
isFailed (Left _) = True
isFailed _ = False
data Upserted = Upserted
{ upsertedIndex :: Int
, upsertedId :: ObjectId
} deriving Show
master :: AccessMode
-- ^ Same as 'ConfirmWrites' []
master = ConfirmWrites []
slaveOk :: AccessMode
-- ^ Same as 'ReadStaleOk'
slaveOk = ReadStaleOk
accessMode :: (Monad m) => AccessMode -> Action m a -> Action m a
-- ^ Run action with given 'AccessMode'
accessMode mode act = local (\ctx -> ctx {mongoAccessMode = mode}) act
readMode :: AccessMode -> ReadMode
readMode ReadStaleOk = StaleOk
readMode _ = Fresh
writeMode :: AccessMode -> WriteMode
writeMode ReadStaleOk = Confirm []
writeMode UnconfirmedWrites = NoConfirm
writeMode (ConfirmWrites z) = Confirm z
-- | Values needed when executing a db operation
data MongoContext = MongoContext {
mongoPipe :: Pipe, -- ^ operations read/write to this pipelined TCP connection to a MongoDB server
mongoAccessMode :: AccessMode, -- ^ read/write operation will use this access mode
mongoDatabase :: Database } -- ^ operations query/update this database
mongoReadMode :: MongoContext -> ReadMode
mongoReadMode = readMode . mongoAccessMode
mongoWriteMode :: MongoContext -> WriteMode
mongoWriteMode = writeMode . mongoAccessMode
class HasMongoContext env where
mongoContext :: env -> MongoContext
instance HasMongoContext MongoContext where
mongoContext = id
liftDB :: (MonadReader env m, HasMongoContext env, MonadIO m)
=> Action IO a
-> m a
liftDB m = do
env <- ask
liftIO $ runReaderT m (mongoContext env)
-- * Database
type Database = Text
allDatabases :: (MonadIO m) => Action m [Database]
-- ^ List all databases residing on server
allDatabases = (map (at "name") . at "databases") `liftM` useDb "admin" (runCommand1 "listDatabases")
thisDatabase :: (Monad m) => Action m Database
-- ^ Current database in use
thisDatabase = asks mongoDatabase
useDb :: (Monad m) => Database -> Action m a -> Action m a
-- ^ Run action against given database
useDb db act = local (\ctx -> ctx {mongoDatabase = db}) act
-- * Authentication
auth :: MonadIO m => Username -> Password -> Action m Bool
-- ^ Authenticate with the current database (if server is running in secure mode). Return whether authentication was successful or not. Reauthentication is required for every new pipe. SCRAM-SHA-1 will be used for server versions 3.0+, MONGO-CR for lower versions.
auth un pw = do
let serverVersion = liftM (at "version") $ useDb "admin" $ runCommand ["buildinfo" =: (1 :: Int)]
mmv <- liftM (readMaybe . T.unpack . head . T.splitOn ".") $ serverVersion
maybe (return False) performAuth mmv
where
performAuth majorVersion =
case (majorVersion >= (3 :: Int)) of
True -> authSCRAMSHA1 un pw
False -> authMongoCR un pw
authMongoCR :: (MonadIO m) => Username -> Password -> Action m Bool
-- ^ Authenticate with the current database, using the MongoDB-CR authentication mechanism (default in MongoDB server < 3.0)
authMongoCR usr pss = do
n <- at "nonce" `liftM` runCommand ["getnonce" =: (1 :: Int)]
true1 "ok" `liftM` runCommand ["authenticate" =: (1 :: Int), "user" =: usr, "nonce" =: n, "key" =: pwKey n usr pss]
authSCRAMSHA1 :: MonadIO m => Username -> Password -> Action m Bool
-- ^ Authenticate with the current database, using the SCRAM-SHA-1 authentication mechanism (default in MongoDB server >= 3.0)
authSCRAMSHA1 un pw = do
let hmac = HMAC.hmac SHA1.hash 64
nonce <- liftIO (Nonce.withGenerator Nonce.nonce128 >>= return . B64.encode)
let firstBare = B.concat [B.pack $ "n=" ++ (T.unpack un) ++ ",r=", nonce]
let client1 = ["saslStart" =: (1 :: Int), "mechanism" =: ("SCRAM-SHA-1" :: String), "payload" =: (B.unpack . B64.encode $ B.concat [B.pack "n,,", firstBare]), "autoAuthorize" =: (1 :: Int)]
server1 <- runCommand client1
shortcircuit (true1 "ok" server1) $ do
let serverPayload1 = B64.decodeLenient . B.pack . at "payload" $ server1
let serverData1 = parseSCRAM serverPayload1
let iterations = read . B.unpack $ Map.findWithDefault "1" "i" serverData1
let salt = B64.decodeLenient $ Map.findWithDefault "" "s" serverData1
let snonce = Map.findWithDefault "" "r" serverData1
shortcircuit (B.isInfixOf nonce snonce) $ do
let withoutProof = B.concat [B.pack "c=biws,r=", snonce]
let digestS = B.pack $ T.unpack un ++ ":mongo:" ++ T.unpack pw
let digest = B16.encode $ MD5.hash digestS
let saltedPass = scramHI digest salt iterations
let clientKey = hmac saltedPass (B.pack "Client Key")
let storedKey = SHA1.hash clientKey
let authMsg = B.concat [firstBare, B.pack ",", serverPayload1, B.pack ",", withoutProof]
let clientSig = hmac storedKey authMsg
let pval = B64.encode . BS.pack $ BS.zipWith xor clientKey clientSig
let clientFinal = B.concat [withoutProof, B.pack ",p=", pval]
let serverKey = hmac saltedPass (B.pack "Server Key")
let serverSig = B64.encode $ hmac serverKey authMsg
let client2 = ["saslContinue" =: (1 :: Int), "conversationId" =: (at "conversationId" server1 :: Int), "payload" =: (B.unpack $ B64.encode clientFinal)]
server2 <- runCommand client2
shortcircuit (true1 "ok" server2) $ do
let serverPayload2 = B64.decodeLenient . B.pack $ at "payload" server2
let serverData2 = parseSCRAM serverPayload2
let serverSigComp = Map.findWithDefault "" "v" serverData2
shortcircuit (serverSig == serverSigComp) $ do
let done = true1 "done" server2
if done
then return True
else do
let client2Step2 = [ "saslContinue" =: (1 :: Int)
, "conversationId" =: (at "conversationId" server1 :: Int)
, "payload" =: String ""]
server3 <- runCommand client2Step2
shortcircuit (true1 "ok" server3) $ do
return True
where
shortcircuit True f = f
shortcircuit False _ = return False
scramHI :: B.ByteString -> B.ByteString -> Int -> B.ByteString
scramHI digest salt iters = snd $ foldl com (u1, u1) [1..(iters-1)]
where
hmacd = HMAC.hmac SHA1.hash 64 digest
u1 = hmacd (B.concat [salt, BS.pack [0, 0, 0, 1]])
com (u,uc) _ = let u' = hmacd u in (u', BS.pack $ BS.zipWith xor uc u')
parseSCRAM :: B.ByteString -> Map.Map B.ByteString B.ByteString
parseSCRAM = Map.fromList . fmap cleanup . (fmap $ T.breakOn "=") . T.splitOn "," . T.pack . B.unpack
where cleanup (t1, t2) = (B.pack $ T.unpack t1, B.pack . T.unpack $ T.drop 1 t2)
retrieveServerData :: (MonadIO m) => Action m ServerData
retrieveServerData = do
d <- runCommand1 "isMaster"
let newSd = ServerData
{ isMaster = (fromMaybe False $ lookup "ismaster" d)
, minWireVersion = (fromMaybe 0 $ lookup "minWireVersion" d)
, maxWireVersion = (fromMaybe 0 $ lookup "maxWireVersion" d)
, maxMessageSizeBytes = (fromMaybe 48000000 $ lookup "maxMessageSizeBytes" d)
, maxBsonObjectSize = (fromMaybe (16 * 1024 * 1024) $ lookup "maxBsonObjectSize" d)
, maxWriteBatchSize = (fromMaybe 1000 $ lookup "maxWriteBatchSize" d)
}
return newSd
-- * Collection
type Collection = Text
-- ^ Collection name (not prefixed with database)
allCollections :: MonadIO m => Action m [Collection]
-- ^ List all collections in this database
allCollections = do
p <- asks mongoPipe
let sd = P.serverData p
if (maxWireVersion sd <= 2)
then do
db <- thisDatabase
docs <- rest =<< find (query [] "system.namespaces") {sort = ["name" =: (1 :: Int)]}
return . filter (not . isSpecial db) . map dropDbPrefix $ map (at "name") docs
else do
r <- runCommand1 "listCollections"
let curData = do
(Doc curDoc) <- r !? "cursor"
(curId :: Int64) <- curDoc !? "id"
(curNs :: Text) <- curDoc !? "ns"
(firstBatch :: [Value]) <- curDoc !? "firstBatch"
return $ (curId, curNs, ((catMaybes (map cast' firstBatch)) :: [Document]))
case curData of
Nothing -> return []
Just (curId, curNs, firstBatch) -> do
db <- thisDatabase
nc <- newCursor db curNs 0 $ return $ Batch Nothing curId firstBatch
docs <- rest nc
return $ catMaybes $ map (\d -> (d !? "name")) docs
where
dropDbPrefix = T.tail . T.dropWhile (/= '.')
isSpecial db col = T.any (== '$') col && db <.> col /= "local.oplog.$main"
-- * Selection
data Selection = Select {selector :: Selector, coll :: Collection} deriving (Show, Eq)
-- ^ Selects documents in collection that match selector
type Selector = Document
-- ^ Filter for a query, analogous to the where clause in SQL. @[]@ matches all documents in collection. @[\"x\" =: a, \"y\" =: b]@ is analogous to @where x = a and y = b@ in SQL. See <http://www.mongodb.org/display/DOCS/Querying> for full selector syntax.
whereJS :: Selector -> Javascript -> Selector
-- ^ Add Javascript predicate to selector, in which case a document must match both selector and predicate
whereJS sel js = ("$where" =: js) : sel
class Select aQueryOrSelection where
select :: Selector -> Collection -> aQueryOrSelection
-- ^ 'Query' or 'Selection' that selects documents in collection that match selector. The choice of type depends on use, for example, in @'find' (select sel col)@ it is a Query, and in @'delete' (select sel col)@ it is a Selection.
instance Select Selection where
select = Select
instance Select Query where
select = query
-- * Write
data WriteMode =
NoConfirm -- ^ Submit writes without receiving acknowledgments. Fast. Assumes writes succeed even though they may not.
| Confirm GetLastError -- ^ Receive an acknowledgment after every write, and raise exception if one says the write failed. This is acomplished by sending the getLastError command, with given 'GetLastError' parameters, after every write.
deriving (Show, Eq)
write :: Notice -> Action IO (Maybe Document)
-- ^ Send write to server, and if write-mode is 'Safe' then include getLastError request and raise 'WriteFailure' if it reports an error.
write notice = asks mongoWriteMode >>= \mode -> case mode of
NoConfirm -> do
pipe <- asks mongoPipe
liftIOE ConnectionFailure $ P.send pipe [notice]
return Nothing
Confirm params -> do
let q = query (("getlasterror" =: (1 :: Int)) : params) "$cmd"
pipe <- asks mongoPipe
Batch _ _ [doc] <- do
r <- queryRequest False q {limit = 1}
rr <- liftIO $ request pipe [notice] r
fulfill rr
return $ Just doc
-- ** Insert
insert :: (MonadIO m) => Collection -> Document -> Action m Value
-- ^ Insert document into collection and return its \"_id\" value, which is created automatically if not supplied
insert col doc = do
doc' <- liftIO $ assignId doc
res <- insertBlock [] col (0, [doc'])
case res of
Left failure -> liftIO $ throwIO failure
Right r -> return $ head r
insert_ :: (MonadIO m) => Collection -> Document -> Action m ()
-- ^ Same as 'insert' except don't return _id
insert_ col doc = insert col doc >> return ()
insertMany :: (MonadIO m) => Collection -> [Document] -> Action m [Value]
-- ^ Insert documents into collection and return their \"_id\" values,
-- which are created automatically if not supplied.
-- If a document fails to be inserted (eg. due to duplicate key)
-- then remaining docs are aborted, and LastError is set.
-- An exception will be throw if any error occurs.
insertMany = insert' []
insertMany_ :: (MonadIO m) => Collection -> [Document] -> Action m ()
-- ^ Same as 'insertMany' except don't return _ids
insertMany_ col docs = insertMany col docs >> return ()
insertAll :: (MonadIO m) => Collection -> [Document] -> Action m [Value]
-- ^ Insert documents into collection and return their \"_id\" values,
-- which are created automatically if not supplied. If a document fails
-- to be inserted (eg. due to duplicate key) then remaining docs
-- are still inserted.
insertAll = insert' [KeepGoing]
insertAll_ :: (MonadIO m) => Collection -> [Document] -> Action m ()
-- ^ Same as 'insertAll' except don't return _ids
insertAll_ col docs = insertAll col docs >> return ()
insertCommandDocument :: [InsertOption] -> Collection -> [Document] -> Document -> Document
insertCommandDocument opts col docs writeConcern =
[ "insert" =: col
, "ordered" =: (KeepGoing `notElem` opts)
, "documents" =: docs
, "writeConcern" =: writeConcern
]
takeRightsUpToLeft :: [Either a b] -> [b]
takeRightsUpToLeft l = E.rights $ takeWhile E.isRight l
insert' :: (MonadIO m)
=> [InsertOption] -> Collection -> [Document] -> Action m [Value]
-- ^ Insert documents into collection and return their \"_id\" values, which are created automatically if not supplied
insert' opts col docs = do
p <- asks mongoPipe
let sd = P.serverData p
docs' <- liftIO $ mapM assignId docs
mode <- asks mongoWriteMode
let writeConcern = case mode of
NoConfirm -> ["w" =: (0 :: Int)]
Confirm params -> params
let docSize = sizeOfDocument $ insertCommandDocument opts col [] writeConcern
let ordered = (not (KeepGoing `elem` opts))
let preChunks = splitAtLimit
(maxBsonObjectSize sd - docSize)
-- size of auxiliary part of insert
-- document should be subtracted from
-- the overall size
(maxWriteBatchSize sd)
docs'
let chunks =
if ordered
then takeRightsUpToLeft preChunks
else rights preChunks
let lens = map length chunks
let lSums = 0 : (zipWith (+) lSums lens)
chunkResults <- interruptibleFor ordered (zip lSums chunks) $ insertBlock opts col
let lchunks = lefts preChunks
when (not $ null lchunks) $ do
liftIO $ throwIO $ head lchunks
let lresults = lefts chunkResults
when (not $ null lresults) $ liftIO $ throwIO $ head lresults
return $ concat $ rights chunkResults
insertBlock :: (MonadIO m)
=> [InsertOption] -> Collection -> (Int, [Document]) -> Action m (Either Failure [Value])
-- ^ This will fail if the list of documents is bigger than restrictions
insertBlock _ _ (_, []) = return $ Right []
insertBlock opts col (prevCount, docs) = do
db <- thisDatabase
p <- asks mongoPipe
let sd = P.serverData p
if (maxWireVersion sd < 2)
then do
res <- liftDB $ write (Insert (db <.> col) opts docs)
let errorMessage = do
jRes <- res
em <- lookup "err" jRes
return $ WriteFailure prevCount (maybe 0 id $ lookup "code" jRes) em
-- In older versions of ^^ the protocol we can't really say which document failed.
-- So we just report the accumulated number of documents in the previous blocks.
case errorMessage of
Just failure -> return $ Left failure
Nothing -> return $ Right $ map (valueAt "_id") docs
else do
mode <- asks mongoWriteMode
let writeConcern = case mode of
NoConfirm -> ["w" =: (0 :: Int)]
Confirm params -> params
doc <- runCommand $ insertCommandDocument opts col docs writeConcern
case (look "writeErrors" doc, look "writeConcernError" doc) of
(Nothing, Nothing) -> return $ Right $ map (valueAt "_id") docs
(Just (Array errs), Nothing) -> do
let writeErrors = map (anyToWriteError prevCount) $ errs
let errorsWithFailureIndex = map (addFailureIndex prevCount) writeErrors
return $ Left $ CompoundFailure errorsWithFailureIndex
(Nothing, Just err) -> do
return $ Left $ WriteFailure
prevCount
(maybe 0 id $ lookup "ok" doc)
(show err)
(Just (Array errs), Just writeConcernErr) -> do
let writeErrors = map (anyToWriteError prevCount) $ errs
let errorsWithFailureIndex = map (addFailureIndex prevCount) writeErrors
return $ Left $ CompoundFailure $ (WriteFailure
prevCount
(maybe 0 id $ lookup "ok" doc)
(show writeConcernErr)) : errorsWithFailureIndex
(Just unknownValue, Nothing) -> do
return $ Left $ ProtocolFailure prevCount $ "Expected array of errors. Received: " ++ show unknownValue
(Just unknownValue, Just writeConcernErr) -> do
return $ Left $ CompoundFailure $ [ ProtocolFailure prevCount $ "Expected array of errors. Received: " ++ show unknownValue
, WriteFailure prevCount (maybe 0 id $ lookup "ok" doc) $ show writeConcernErr]
splitAtLimit :: Int -> Int -> [Document] -> [Either Failure [Document]]
splitAtLimit maxSize maxCount list = chop (go 0 0 []) list
where
go :: Int -> Int -> [Document] -> [Document] -> ((Either Failure [Document]), [Document])
go _ _ res [] = (Right $ reverse res, [])
go curSize curCount [] (x:xs) |
((curSize + (sizeOfDocument x) + 2 + curCount) > maxSize) =
(Left $ WriteFailure 0 0 "One document is too big for the message", xs)
go curSize curCount res (x:xs) =
if ( ((curSize + (sizeOfDocument x) + 2 + curCount) > maxSize)
-- we have ^ 2 brackets and curCount commas in
-- the document that we need to take into
-- account
|| ((curCount + 1) > maxCount))
then
(Right $ reverse res, x:xs)
else
go (curSize + (sizeOfDocument x)) (curCount + 1) (x:res) xs
chop :: ([a] -> (b, [a])) -> [a] -> [b]
chop _ [] = []
chop f as = let (b, as') = f as in b : chop f as'
sizeOfDocument :: Document -> Int
sizeOfDocument d = fromIntegral $ LBS.length $ runPut $ putDocument d
assignId :: Document -> IO Document
-- ^ Assign a unique value to _id field if missing
assignId doc = if any (("_id" ==) . label) doc
then return doc
else (\oid -> ("_id" =: oid) : doc) `liftM` genObjectId
-- ** Update
save :: (MonadIO m)
=> Collection -> Document -> Action m ()
-- ^ Save document to collection, meaning insert it if its new (has no \"_id\" field) or upsert it if its not new (has \"_id\" field)
save col doc = case look "_id" doc of
Nothing -> insert_ col doc
Just i -> upsert (Select ["_id" := i] col) doc
replace :: (MonadIO m)
=> Selection -> Document -> Action m ()
-- ^ Replace first document in selection with given document
replace = update []
repsert :: (MonadIO m)
=> Selection -> Document -> Action m ()
-- ^ Replace first document in selection with given document, or insert document if selection is empty
repsert = update [Upsert]
{-# DEPRECATED repsert "use upsert instead" #-}
upsert :: (MonadIO m)
=> Selection -> Document -> Action m ()
-- ^ Update first document in selection with given document, or insert document if selection is empty
upsert = update [Upsert]
type Modifier = Document
-- ^ Update operations on fields in a document. See <http://www.mongodb.org/display/DOCS/Updating#Updating-ModifierOperations>
modify :: (MonadIO m)
=> Selection -> Modifier -> Action m ()
-- ^ Update all documents in selection using given modifier
modify = update [MultiUpdate]
update :: (MonadIO m)
=> [UpdateOption] -> Selection -> Document -> Action m ()
-- ^ Update first document in selection using updater document, unless 'MultiUpdate' option is supplied then update all documents in selection. If 'Upsert' option is supplied then treat updater as document and insert it if selection is empty.
update opts (Select sel col) up = do
db <- thisDatabase
ctx <- ask
liftIO $ runReaderT (void $ write (Update (db <.> col) opts sel up)) ctx
updateCommandDocument :: Collection -> Bool -> [Document] -> Document -> Document
updateCommandDocument col ordered updates writeConcern =
[ "update" =: col
, "ordered" =: ordered
, "updates" =: updates
, "writeConcern" =: writeConcern
]
{-| Bulk update operation. If one update fails it will not update the remaining
- documents. Current returned value is only a place holder. With mongodb server
- before 2.6 it will send update requests one by one. In order to receive
- error messages in versions under 2.6 you need to user confirmed writes.
- Otherwise even if the errors had place the list of errors will be empty and
- the result will be success. After 2.6 it will use bulk update feature in
- mongodb.
-}
updateMany :: (MonadIO m)
=> Collection
-> [(Selector, Document, [UpdateOption])]
-> Action m WriteResult
updateMany = update' True
{-| Bulk update operation. If one update fails it will proceed with the
- remaining documents. With mongodb server before 2.6 it will send update
- requests one by one. In order to receive error messages in versions under
- 2.6 you need to use confirmed writes. Otherwise even if the errors had
- place the list of errors will be empty and the result will be success.
- After 2.6 it will use bulk update feature in mongodb.
-}
updateAll :: (MonadIO m)
=> Collection
-> [(Selector, Document, [UpdateOption])]
-> Action m WriteResult
updateAll = update' False
update' :: (MonadIO m)
=> Bool
-> Collection
-> [(Selector, Document, [UpdateOption])]
-> Action m WriteResult
update' ordered col updateDocs = do
p <- asks mongoPipe
let sd = P.serverData p
let updates = map (\(s, d, os) -> [ "q" =: s
, "u" =: d
, "upsert" =: (Upsert `elem` os)
, "multi" =: (MultiUpdate `elem` os)])
updateDocs
mode <- asks mongoWriteMode
ctx <- ask
liftIO $ do
let writeConcern = case mode of
NoConfirm -> ["w" =: (0 :: Int)]
Confirm params -> params
let docSize = sizeOfDocument $ updateCommandDocument
col
ordered
[]
writeConcern
let preChunks = splitAtLimit
(maxBsonObjectSize sd - docSize)
-- size of auxiliary part of update
-- document should be subtracted from
-- the overall size
(maxWriteBatchSize sd)
updates
let chunks =
if ordered
then takeRightsUpToLeft preChunks
else rights preChunks
let lens = map length chunks
let lSums = 0 : (zipWith (+) lSums lens)
blocks <- interruptibleFor ordered (zip lSums chunks) $ \b -> do
ur <- runReaderT (updateBlock ordered col b) ctx
return ur
`catch` \(e :: Failure) -> do
return $ WriteResult True 0 Nothing 0 [] [e] []
let failedTotal = or $ map failed blocks
let updatedTotal = sum $ map nMatched blocks
let modifiedTotal =
if all isNothing $ map nModified blocks
then Nothing
else Just $ sum $ catMaybes $ map nModified blocks
let totalWriteErrors = concat $ map writeErrors blocks
let totalWriteConcernErrors = concat $ map writeConcernErrors blocks
let upsertedTotal = concat $ map upserted blocks
return $ WriteResult
failedTotal
updatedTotal
modifiedTotal
0 -- nRemoved
upsertedTotal
totalWriteErrors
totalWriteConcernErrors
`catch` \(e :: Failure) -> return $ WriteResult True 0 Nothing 0 [] [e] []
updateBlock :: (MonadIO m)
=> Bool -> Collection -> (Int, [Document]) -> Action m WriteResult
updateBlock ordered col (prevCount, docs) = do
p <- asks mongoPipe
let sd = P.serverData p
if (maxWireVersion sd < 2)
then liftIO $ ioError $ userError "updateMany doesn't support mongodb older than 2.6"
else do
mode <- asks mongoWriteMode
let writeConcern = case mode of
NoConfirm -> ["w" =: (0 :: Int)]
Confirm params -> params
doc <- runCommand $ updateCommandDocument col ordered docs writeConcern
let n = fromMaybe 0 $ doc !? "n"
let writeErrorsResults =
case look "writeErrors" doc of
Nothing -> WriteResult False 0 (Just 0) 0 [] [] []
Just (Array err) -> WriteResult True 0 (Just 0) 0 [] (map (anyToWriteError prevCount) err) []
Just unknownErr -> WriteResult
True
0
(Just 0)
0
[]
[ ProtocolFailure
prevCount
$ "Expected array of error docs, but received: "
++ (show unknownErr)]
[]
let writeConcernResults =
case look "writeConcernError" doc of
Nothing -> WriteResult False 0 (Just 0) 0 [] [] []
Just (Doc err) -> WriteResult
True
0
(Just 0)
0
[]
[]
[ WriteConcernFailure
(fromMaybe (-1) $ err !? "code")
(fromMaybe "" $ err !? "errmsg")
]
Just unknownErr -> WriteResult
True
0
(Just 0)
0
[]
[]
[ ProtocolFailure
prevCount
$ "Expected doc in writeConcernError, but received: "
++ (show unknownErr)]
let upsertedList = map docToUpserted $ fromMaybe [] (doc !? "upserted")
let successResults = WriteResult False n (doc !? "nModified") 0 upsertedList [] []
return $ foldl1' mergeWriteResults [writeErrorsResults, writeConcernResults, successResults]
interruptibleFor :: (Monad m, Result b) => Bool -> [a] -> (a -> m b) -> m [b]
interruptibleFor ordered = go []
where
go !res [] _ = return $ reverse res
go !res (x:xs) f = do
y <- f x
if isFailed y && ordered
then return $ reverse (y:res)
else go (y:res) xs f
mergeWriteResults :: WriteResult -> WriteResult -> WriteResult
mergeWriteResults
(WriteResult failed1 nMatched1 nModified1 nDeleted1 upserted1 writeErrors1 writeConcernErrors1)
(WriteResult failed2 nMatched2 nModified2 nDeleted2 upserted2 writeErrors2 writeConcernErrors2) =
(WriteResult
(failed1 || failed2)
(nMatched1 + nMatched2)
((liftM2 (+)) nModified1 nModified2)
(nDeleted1 + nDeleted2)
-- This function is used in foldl1' function. The first argument is the accumulator.
-- The list in the accumulator is usually longer than the subsequent value which goes in the second argument.
-- So, changing the order of list concatenation allows us to keep linear complexity of the
-- whole list accumulation process.
(upserted2 ++ upserted1)
(writeErrors2 ++ writeErrors1)
(writeConcernErrors2 ++ writeConcernErrors1)
)
docToUpserted :: Document -> Upserted
docToUpserted doc = Upserted ind uid
where
ind = at "index" doc
uid = at "_id" doc
docToWriteError :: Document -> Failure
docToWriteError doc = WriteFailure ind code msg
where
ind = at "index" doc
code = at "code" doc
msg = at "errmsg" doc
-- ** Delete
delete :: (MonadIO m)
=> Selection -> Action m ()
-- ^ Delete all documents in selection
delete = deleteHelper []
deleteOne :: (MonadIO m)
=> Selection -> Action m ()
-- ^ Delete first document in selection
deleteOne = deleteHelper [SingleRemove]
deleteHelper :: (MonadIO m)
=> [DeleteOption] -> Selection -> Action m ()
deleteHelper opts (Select sel col) = do
db <- thisDatabase
ctx <- ask
liftIO $ runReaderT (void $ write (Delete (db <.> col) opts sel)) ctx
{-| Bulk delete operation. If one delete fails it will not delete the remaining
- documents. Current returned value is only a place holder. With mongodb server
- before 2.6 it will send delete requests one by one. After 2.6 it will use
- bulk delete feature in mongodb.
-}
deleteMany :: (MonadIO m)
=> Collection
-> [(Selector, [DeleteOption])]
-> Action m WriteResult
deleteMany = delete' True
{-| Bulk delete operation. If one delete fails it will proceed with the
- remaining documents. Current returned value is only a place holder. With
- mongodb server before 2.6 it will send delete requests one by one. After 2.6
- it will use bulk delete feature in mongodb.
-}
deleteAll :: (MonadIO m)
=> Collection
-> [(Selector, [DeleteOption])]
-> Action m WriteResult
deleteAll = delete' False
deleteCommandDocument :: Collection -> Bool -> [Document] -> Document -> Document
deleteCommandDocument col ordered deletes writeConcern =
[ "delete" =: col
, "ordered" =: ordered
, "deletes" =: deletes
, "writeConcern" =: writeConcern
]
delete' :: (MonadIO m)
=> Bool
-> Collection
-> [(Selector, [DeleteOption])]
-> Action m WriteResult
delete' ordered col deleteDocs = do
p <- asks mongoPipe
let sd = P.serverData p
let deletes = map (\(s, os) -> [ "q" =: s
, "limit" =: if SingleRemove `elem` os
then (1 :: Int) -- Remove only one matching
else (0 :: Int) -- Remove all matching
])
deleteDocs
mode <- asks mongoWriteMode
let writeConcern = case mode of
NoConfirm -> ["w" =: (0 :: Int)]
Confirm params -> params
let docSize = sizeOfDocument $ deleteCommandDocument col ordered [] writeConcern
let preChunks = splitAtLimit
(maxBsonObjectSize sd - docSize)
-- size of auxiliary part of delete
-- document should be subtracted from
-- the overall size
(maxWriteBatchSize sd)
deletes
let chunks =
if ordered
then takeRightsUpToLeft preChunks
else rights preChunks
ctx <- ask
let lens = map length chunks
let lSums = 0 : (zipWith (+) lSums lens)
blockResult <- liftIO $ interruptibleFor ordered (zip lSums chunks) $ \b -> do
dr <- runReaderT (deleteBlock ordered col b) ctx
return dr
`catch` \(e :: Failure) -> do
return $ WriteResult True 0 Nothing 0 [] [e] []
return $ foldl1' mergeWriteResults blockResult
addFailureIndex :: Int -> Failure -> Failure
addFailureIndex i (WriteFailure ind code s) = WriteFailure (ind + i) code s
addFailureIndex _ f = f
deleteBlock :: (MonadIO m)
=> Bool -> Collection -> (Int, [Document]) -> Action m WriteResult
deleteBlock ordered col (prevCount, docs) = do
p <- asks mongoPipe
let sd = P.serverData p
if (maxWireVersion sd < 2)
then liftIO $ ioError $ userError "deleteMany doesn't support mongodb older than 2.6"
else do
mode <- asks mongoWriteMode
let writeConcern = case mode of
NoConfirm -> ["w" =: (0 :: Int)]
Confirm params -> params
doc <- runCommand $ deleteCommandDocument col ordered docs writeConcern
let n = fromMaybe 0 $ doc !? "n"
let successResults = WriteResult False 0 Nothing n [] [] []
let writeErrorsResults =
case look "writeErrors" doc of
Nothing -> WriteResult False 0 Nothing 0 [] [] []
Just (Array err) -> WriteResult True 0 Nothing 0 [] (map (anyToWriteError prevCount) err) []
Just unknownErr -> WriteResult
True
0
Nothing
0
[]
[ ProtocolFailure
prevCount
$ "Expected array of error docs, but received: "
++ (show unknownErr)]
[]
let writeConcernResults =
case look "writeConcernError" doc of
Nothing -> WriteResult False 0 Nothing 0 [] [] []
Just (Doc err) -> WriteResult
True
0
Nothing
0
[]
[]
[ WriteConcernFailure
(fromMaybe (-1) $ err !? "code")
(fromMaybe "" $ err !? "errmsg")
]
Just unknownErr -> WriteResult
True
0
Nothing
0
[]
[]
[ ProtocolFailure
prevCount
$ "Expected doc in writeConcernError, but received: "
++ (show unknownErr)]
return $ foldl1' mergeWriteResults [successResults, writeErrorsResults, writeConcernResults]
anyToWriteError :: Int -> Value -> Failure
anyToWriteError _ (Doc d) = docToWriteError d
anyToWriteError ind _ = ProtocolFailure ind "Unknown bson value"
-- * Read
data ReadMode =
Fresh -- ^ read from master only
| StaleOk -- ^ read from slave ok
deriving (Show, Eq)
readModeOption :: ReadMode -> [QueryOption]
readModeOption Fresh = []
readModeOption StaleOk = [SlaveOK]
-- ** Query
-- | Use 'select' to create a basic query with defaults, then modify if desired. For example, @(select sel col) {limit = 10}@
data Query = Query {
options :: [QueryOption], -- ^ Default = []
selection :: Selection,
project :: Projector, -- ^ \[\] = all fields. Default = []
skip :: Word32, -- ^ Number of initial matching documents to skip. Default = 0
limit :: Limit, -- ^ Maximum number of documents to return, 0 = no limit. Default = 0
sort :: Order, -- ^ Sort results by this order, [] = no sort. Default = []
snapshot :: Bool, -- ^ If true assures no duplicates are returned, or objects missed, which were present at both the start and end of the query's execution (even if the object were updated). If an object is new during the query, or deleted during the query, it may or may not be returned, even with snapshot mode. Note that short query responses (less than 1MB) are always effectively snapshotted. Default = False
batchSize :: BatchSize, -- ^ The number of document to return in each batch response from the server. 0 means use Mongo default. Default = 0
hint :: Order -- ^ Force MongoDB to use this index, [] = no hint. Default = []
} deriving (Show, Eq)
type Projector = Document
-- ^ Fields to return, analogous to the select clause in SQL. @[]@ means return whole document (analogous to * in SQL). @[\"x\" =: 1, \"y\" =: 1]@ means return only @x@ and @y@ fields of each document. @[\"x\" =: 0]@ means return all fields except @x@.
type Limit = Word32
-- ^ Maximum number of documents to return, i.e. cursor will close after iterating over this number of documents. 0 means no limit.
type Order = Document
-- ^ Fields to sort by. Each one is associated with 1 or -1. Eg. @[\"x\" =: 1, \"y\" =: -1]@ means sort by @x@ ascending then @y@ descending
type BatchSize = Word32
-- ^ The number of document to return in each batch response from the server. 0 means use Mongo default.
query :: Selector -> Collection -> Query
-- ^ Selects documents in collection that match selector. It uses no query options, projects all fields, does not skip any documents, does not limit result size, uses default batch size, does not sort, does not hint, and does not snapshot.
query sel col = Query [] (Select sel col) [] 0 0 [] False 0 []
find :: MonadIO m => Query -> Action m Cursor
-- ^ Fetch documents satisfying query
find q@Query{selection, batchSize} = do
db <- thisDatabase
pipe <- asks mongoPipe
qr <- queryRequest False q
dBatch <- liftIO $ request pipe [] qr
newCursor db (coll selection) batchSize dBatch
findOne :: (MonadIO m) => Query -> Action m (Maybe Document)
-- ^ Fetch first document satisfying query or Nothing if none satisfy it
findOne q = do
pipe <- asks mongoPipe
qr <- queryRequest False q {limit = 1}
rq <- liftIO $ request pipe [] qr
Batch _ _ docs <- liftDB $ fulfill rq
return (listToMaybe docs)
fetch :: (MonadIO m) => Query -> Action m Document
-- ^ Same as 'findOne' except throw 'DocNotFound' if none match
fetch q = findOne q >>= maybe (liftIO $ throwIO $ DocNotFound $ selection q) return
data FindAndModifyOpts = FamRemove Bool
| FamUpdate
{ famUpdate :: Document
, famNew :: Bool
, famUpsert :: Bool
}
deriving Show
defFamUpdateOpts :: Document -> FindAndModifyOpts
defFamUpdateOpts ups = FamUpdate
{ famNew = True
, famUpsert = False
, famUpdate = ups
}
-- | runs the findAndModify command as an update without an upsert and new set to true.
-- Returns a single updated document (new option is set to true).
--
-- see 'findAndModifyOpts' if you want to use findAndModify in a differnt way
findAndModify :: MonadIO m
=> Query
-> Document -- ^ updates
-> Action m (Either String Document)
findAndModify q ups = do
eres <- findAndModifyOpts q (defFamUpdateOpts ups)
return $ case eres of
Left l -> Left l
Right r -> case r of
-- only possible when upsert is True and new is False
Nothing -> Left "findAndModify: impossible null result"
Just doc -> Right doc
-- | runs the findAndModify command,
-- allows more options than 'findAndModify'
findAndModifyOpts :: MonadIO m
=> Query
->FindAndModifyOpts
-> Action m (Either String (Maybe Document))
findAndModifyOpts (Query {
selection = Select sel collection
, project = project
, sort = sort
}) famOpts = do
result <- runCommand
([ "findAndModify" := String collection
, "query" := Doc sel
, "fields" := Doc project
, "sort" := Doc sort
] ++
case famOpts of
FamRemove shouldRemove -> [ "remove" := Bool shouldRemove ]
FamUpdate {..} ->
[ "update" := Doc famUpdate
, "new" := Bool famNew -- return updated document, not original document
, "upsert" := Bool famUpsert -- insert if nothing is found
])
return $ case lookupErr result of
Just e -> leftErr e
Nothing -> case lookup "value" result of
Left err -> leftErr $ "no document found: " `mappend` err
Right mdoc -> case mdoc of
Just doc@(_:_) -> Right (Just doc)
Just [] -> case famOpts of
FamUpdate { famUpsert = True, famNew = False } -> Right Nothing
_ -> leftErr $ show result
_ -> leftErr $ show result
where
leftErr err = Left $ "findAndModify " `mappend` show collection
`mappend` "\nfrom query: " `mappend` show sel
`mappend` "\nerror: " `mappend` err
-- return Nothing means ok, Just is the error message
lookupErr result = case lookup "lastErrorObject" result of
Right errObject -> lookup "err" errObject
Left err -> Just err
explain :: (MonadIO m) => Query -> Action m Document
-- ^ Return performance stats of query execution
explain q = do -- same as findOne but with explain set to true
pipe <- asks mongoPipe
qr <- queryRequest True q {limit = 1}
r <- liftIO $ request pipe [] qr
Batch _ _ docs <- liftDB $ fulfill r
return $ if null docs then error ("no explain: " ++ show q) else head docs
count :: (MonadIO m) => Query -> Action m Int
-- ^ Fetch number of documents satisfying query (including effect of skip and/or limit if present)
count Query{selection = Select sel col, skip, limit} = at "n" `liftM` runCommand
(["count" =: col, "query" =: sel, "skip" =: (fromIntegral skip :: Int32)]
++ ("limit" =? if limit == 0 then Nothing else Just (fromIntegral limit :: Int32)))
distinct :: (MonadIO m) => Label -> Selection -> Action m [Value]
-- ^ Fetch distinct values of field in selected documents
distinct k (Select sel col) = at "values" `liftM` runCommand ["distinct" =: col, "key" =: k, "query" =: sel]
queryRequest :: (Monad m) => Bool -> Query -> Action m (Request, Maybe Limit)
-- ^ Translate Query to Protocol.Query. If first arg is true then add special $explain attribute.
queryRequest isExplain Query{..} = do
ctx <- ask
return $ queryRequest' (mongoReadMode ctx) (mongoDatabase ctx)
where
queryRequest' rm db = (P.Query{..}, remainingLimit) where
qOptions = readModeOption rm ++ options
qFullCollection = db <.> coll selection
qSkip = fromIntegral skip
(qBatchSize, remainingLimit) = batchSizeRemainingLimit batchSize (if limit == 0 then Nothing else Just limit)
qProjector = project
mOrder = if null sort then Nothing else Just ("$orderby" =: sort)
mSnapshot = if snapshot then Just ("$snapshot" =: True) else Nothing
mHint = if null hint then Nothing else Just ("$hint" =: hint)
mExplain = if isExplain then Just ("$explain" =: True) else Nothing
special = catMaybes [mOrder, mSnapshot, mHint, mExplain]
qSelector = if null special then s else ("$query" =: s) : special where s = selector selection
batchSizeRemainingLimit :: BatchSize -> (Maybe Limit) -> (Int32, Maybe Limit)
-- ^ Given batchSize and limit return P.qBatchSize and remaining limit
batchSizeRemainingLimit batchSize mLimit =
let remaining =
case mLimit of
Nothing -> batchSize
Just limit ->
if 0 < batchSize && batchSize < limit
then batchSize
else limit
in (fromIntegral remaining, mLimit)
type DelayedBatch = IO Batch
-- ^ A promised batch which may fail
data Batch = Batch (Maybe Limit) CursorId [Document]
-- ^ CursorId = 0 means cursor is finished. Documents is remaining documents to serve in current batch. Limit is number of documents to return. Nothing means no limit.
request :: Pipe -> [Notice] -> (Request, Maybe Limit) -> IO DelayedBatch
-- ^ Send notices and request and return promised batch
request pipe ns (req, remainingLimit) = do
promise <- liftIOE ConnectionFailure $ P.call pipe ns req
let protectedPromise = liftIOE ConnectionFailure promise
return $ fromReply remainingLimit =<< protectedPromise
fromReply :: Maybe Limit -> Reply -> DelayedBatch
-- ^ Convert Reply to Batch or Failure
fromReply limit Reply{..} = do
mapM_ checkResponseFlag rResponseFlags
return (Batch limit rCursorId rDocuments)
where
-- If response flag indicates failure then throw it, otherwise do nothing
checkResponseFlag flag = case flag of
AwaitCapable -> return ()
CursorNotFound -> throwIO $ CursorNotFoundFailure rCursorId
QueryError -> throwIO $ QueryFailure (at "code" $ head rDocuments) (at "$err" $ head rDocuments)
fulfill :: DelayedBatch -> Action IO Batch
-- ^ Demand and wait for result, raise failure if exception
fulfill = liftIO
-- *** Cursor
data Cursor = Cursor FullCollection BatchSize (MVar DelayedBatch)
-- ^ Iterator over results of a query. Use 'next' to iterate or 'rest' to get all results. A cursor is closed when it is explicitly closed, all results have been read from it, garbage collected, or not used for over 10 minutes (unless 'NoCursorTimeout' option was specified in 'Query'). Reading from a closed cursor raises a 'CursorNotFoundFailure'. Note, a cursor is not closed when the pipe is closed, so you can open another pipe to the same server and continue using the cursor.
newCursor :: MonadIO m => Database -> Collection -> BatchSize -> DelayedBatch -> Action m Cursor
-- ^ Create new cursor. If you don't read all results then close it. Cursor will be closed automatically when all results are read from it or when eventually garbage collected.
newCursor db col batchSize dBatch = do
var <- liftIO $ MV.newMVar dBatch
let cursor = Cursor (db <.> col) batchSize var
_ <- liftDB $ mkWeakMVar var (closeCursor cursor)
return cursor
nextBatch :: MonadIO m => Cursor -> Action m [Document]
-- ^ Return next batch of documents in query result, which will be empty if finished.
nextBatch (Cursor fcol batchSize var) = liftDB $ modifyMVar var $ \dBatch -> do
-- Pre-fetch next batch promise from server and return current batch.
Batch mLimit cid docs <- liftDB $ fulfill' fcol batchSize dBatch
let newLimit = do
limit <- mLimit
return $ limit - (min limit $ fromIntegral $ length docs)
let emptyBatch = return $ Batch (Just 0) 0 []
let getNextBatch = nextBatch' fcol batchSize newLimit cid
let resultDocs = (maybe id (take . fromIntegral) mLimit) docs
case (cid, newLimit) of
(0, _) -> return (emptyBatch, resultDocs)
(_, Just 0) -> do
pipe <- asks mongoPipe
liftIOE ConnectionFailure $ P.send pipe [KillCursors [cid]]
return (emptyBatch, resultDocs)
(_, _) -> (, resultDocs) <$> getNextBatch
fulfill' :: FullCollection -> BatchSize -> DelayedBatch -> Action IO Batch
-- Discard pre-fetched batch if empty with nonzero cid.
fulfill' fcol batchSize dBatch = do
b@(Batch limit cid docs) <- fulfill dBatch
if cid /= 0 && null docs && (limit > (Just 0))
then nextBatch' fcol batchSize limit cid >>= fulfill
else return b
nextBatch' :: (MonadIO m) => FullCollection -> BatchSize -> (Maybe Limit) -> CursorId -> Action m DelayedBatch
nextBatch' fcol batchSize limit cid = do
pipe <- asks mongoPipe
liftIO $ request pipe [] (GetMore fcol batchSize' cid, remLimit)
where (batchSize', remLimit) = batchSizeRemainingLimit batchSize limit
next :: MonadIO m => Cursor -> Action m (Maybe Document)
-- ^ Return next document in query result, or Nothing if finished.
next (Cursor fcol batchSize var) = liftDB $ modifyMVar var nextState where
-- Pre-fetch next batch promise from server when last one in current batch is returned.
-- nextState:: DelayedBatch -> Action m (DelayedBatch, Maybe Document)
nextState dBatch = do
Batch mLimit cid docs <- liftDB $ fulfill' fcol batchSize dBatch
if mLimit == (Just 0)
then return (return $ Batch (Just 0) 0 [], Nothing)
else
case docs of
doc : docs' -> do
let newLimit = do
limit <- mLimit
return $ limit - 1
dBatch' <- if null docs' && cid /= 0 && ((newLimit > (Just 0)) || (isNothing newLimit))
then nextBatch' fcol batchSize newLimit cid
else return $ return (Batch newLimit cid docs')
when (newLimit == (Just 0)) $ unless (cid == 0) $ do
pipe <- asks mongoPipe
liftIOE ConnectionFailure $ P.send pipe [KillCursors [cid]]
return (dBatch', Just doc)
[] -> if cid == 0
then return (return $ Batch (Just 0) 0 [], Nothing) -- finished
else do
nb <- nextBatch' fcol batchSize mLimit cid
return (nb, Nothing)
nextN :: MonadIO m => Int -> Cursor -> Action m [Document]
-- ^ Return next N documents or less if end is reached
nextN n c = catMaybes `liftM` replicateM n (next c)
rest :: MonadIO m => Cursor -> Action m [Document]
-- ^ Return remaining documents in query result
rest c = loop (next c)
closeCursor :: MonadIO m => Cursor -> Action m ()
closeCursor (Cursor _ _ var) = liftDB $ modifyMVar var $ \dBatch -> do
Batch _ cid _ <- fulfill dBatch
unless (cid == 0) $ do
pipe <- asks mongoPipe
liftIOE ConnectionFailure $ P.send pipe [KillCursors [cid]]
return $ (return $ Batch (Just 0) 0 [], ())
isCursorClosed :: MonadIO m => Cursor -> Action m Bool
isCursorClosed (Cursor _ _ var) = do
Batch _ cid docs <- liftDB $ fulfill =<< readMVar var
return (cid == 0 && null docs)
-- ** Aggregate
type Pipeline = [Document]
-- ^ The Aggregate Pipeline
aggregate :: MonadIO m => Collection -> Pipeline -> Action m [Document]
-- ^ Runs an aggregate and unpacks the result. See <http://docs.mongodb.org/manual/core/aggregation/> for details.
aggregate aColl agg = do
aggregateCursor aColl agg def >>= rest
data AggregateConfig = AggregateConfig {}
deriving Show
instance Default AggregateConfig where
def = AggregateConfig {}
aggregateCursor :: MonadIO m => Collection -> Pipeline -> AggregateConfig -> Action m Cursor
-- ^ Runs an aggregate and unpacks the result. See <http://docs.mongodb.org/manual/core/aggregation/> for details.
aggregateCursor aColl agg _ = do
response <- runCommand ["aggregate" =: aColl, "pipeline" =: agg, "cursor" =: ([] :: Document)]
case true1 "ok" response of
True -> do
cursor :: Document <- lookup "cursor" response
firstBatch :: [Document] <- lookup "firstBatch" cursor
cursorId :: Int64 <- lookup "id" cursor
db <- thisDatabase
newCursor db aColl 0 $ return $ Batch Nothing cursorId firstBatch
False -> liftIO $ throwIO $ AggregateFailure $ at "errmsg" response
-- ** Group
-- | Groups documents in collection by key then reduces (aggregates) each group
data Group = Group {
gColl :: Collection,
gKey :: GroupKey, -- ^ Fields to group by
gReduce :: Javascript, -- ^ @(doc, agg) -> ()@. The reduce function reduces (aggregates) the objects iterated. Typical operations of a reduce function include summing and counting. It takes two arguments, the current document being iterated over and the aggregation value, and updates the aggregate value.
gInitial :: Document, -- ^ @agg@. Initial aggregation value supplied to reduce
gCond :: Selector, -- ^ Condition that must be true for a row to be considered. [] means always true.
gFinalize :: Maybe Javascript -- ^ @agg -> () | result@. An optional function to be run on each item in the result set just before the item is returned. Can either modify the item (e.g., add an average field given a count and a total) or return a replacement object (returning a new object with just _id and average fields).
} deriving (Show, Eq)
data GroupKey = Key [Label] | KeyF Javascript deriving (Show, Eq)
-- ^ Fields to group by, or function (@doc -> key@) returning a "key object" to be used as the grouping key. Use KeyF instead of Key to specify a key that is not an existing member of the object (or, to access embedded members).
groupDocument :: Group -> Document
-- ^ Translate Group data into expected document form
groupDocument Group{..} =
("finalize" =? gFinalize) ++ [
"ns" =: gColl,
case gKey of Key k -> "key" =: map (=: True) k; KeyF f -> "$keyf" =: f,
"$reduce" =: gReduce,
"initial" =: gInitial,
"cond" =: gCond ]
group :: (MonadIO m) => Group -> Action m [Document]
-- ^ Execute group query and return resulting aggregate value for each distinct key
group g = at "retval" `liftM` runCommand ["group" =: groupDocument g]
-- ** MapReduce
-- | Maps every document in collection to a list of (key, value) pairs, then for each unique key reduces all its associated values to a single result. There are additional parameters that may be set to tweak this basic operation.
-- This implements the latest version of map-reduce that requires MongoDB 1.7.4 or greater. To map-reduce against an older server use runCommand directly as described in http://www.mongodb.org/display/DOCS/MapReduce.
data MapReduce = MapReduce {
rColl :: Collection,
rMap :: MapFun,
rReduce :: ReduceFun,
rSelect :: Selector, -- ^ Operate on only those documents selected. Default is [] meaning all documents.
rSort :: Order, -- ^ Default is [] meaning no sort
rLimit :: Limit, -- ^ Default is 0 meaning no limit
rOut :: MROut, -- ^ Output to a collection with a certain merge policy. Default is no collection ('Inline'). Note, you don't want this default if your result set is large.
rFinalize :: Maybe FinalizeFun, -- ^ Function to apply to all the results when finished. Default is Nothing.
rScope :: Document, -- ^ Variables (environment) that can be accessed from map/reduce/finalize. Default is [].
rVerbose :: Bool -- ^ Provide statistics on job execution time. Default is False.
} deriving (Show, Eq)
type MapFun = Javascript
-- ^ @() -> void@. The map function references the variable @this@ to inspect the current object under consideration. The function must call @emit(key,value)@ at least once, but may be invoked any number of times, as may be appropriate.
type ReduceFun = Javascript
-- ^ @(key, [value]) -> value@. The reduce function receives a key and an array of values and returns an aggregate result value. The MapReduce engine may invoke reduce functions iteratively; thus, these functions must be idempotent. That is, the following must hold for your reduce function: @reduce(k, [reduce(k,vs)]) == reduce(k,vs)@. If you need to perform an operation only once, use a finalize function. The output of emit (the 2nd param) and reduce should be the same format to make iterative reduce possible.
type FinalizeFun = Javascript
-- ^ @(key, value) -> final_value@. A finalize function may be run after reduction. Such a function is optional and is not necessary for many map/reduce cases. The finalize function takes a key and a value, and returns a finalized value.
data MROut =
Inline -- ^ Return results directly instead of writing them to an output collection. Results must fit within 16MB limit of a single document
| Output MRMerge Collection (Maybe Database) -- ^ Write results to given collection, in other database if specified. Follow merge policy when entry already exists
deriving (Show, Eq)
data MRMerge =
Replace -- ^ Clear all old data and replace it with new data
| Merge -- ^ Leave old data but overwrite entries with the same key with new data
| Reduce -- ^ Leave old data but combine entries with the same key via MR's reduce function
deriving (Show, Eq)
type MRResult = Document
-- ^ Result of running a MapReduce has some stats besides the output. See http://www.mongodb.org/display/DOCS/MapReduce#MapReduce-Resultobject
mrDocument :: MapReduce -> Document
-- ^ Translate MapReduce data into expected document form
mrDocument MapReduce{..} =
("mapreduce" =: rColl) :
("out" =: mrOutDoc rOut) :
("finalize" =? rFinalize) ++ [
"map" =: rMap,
"reduce" =: rReduce,
"query" =: rSelect,
"sort" =: rSort,
"limit" =: (fromIntegral rLimit :: Int),
"scope" =: rScope,
"verbose" =: rVerbose ]
mrOutDoc :: MROut -> Document
-- ^ Translate MROut into expected document form
mrOutDoc Inline = ["inline" =: (1 :: Int)]
mrOutDoc (Output mrMerge coll mDB) = (mergeName mrMerge =: coll) : mdb mDB where
mergeName Replace = "replace"
mergeName Merge = "merge"
mergeName Reduce = "reduce"
mdb Nothing = []
mdb (Just db) = ["db" =: db]
mapReduce :: Collection -> MapFun -> ReduceFun -> MapReduce
-- ^ MapReduce on collection with given map and reduce functions. Remaining attributes are set to their defaults, which are stated in their comments.
mapReduce col map' red = MapReduce col map' red [] [] 0 Inline Nothing [] False
runMR :: MonadIO m => MapReduce -> Action m Cursor
-- ^ Run MapReduce and return cursor of results. Error if map/reduce fails (because of bad Javascript)
runMR mr = do
res <- runMR' mr
case look "result" res of
Just (String coll) -> find $ query [] coll
Just (Doc doc) -> useDb (at "db" doc) $ find $ query [] (at "collection" doc)
Just x -> error $ "unexpected map-reduce result field: " ++ show x
Nothing -> newCursor "" "" 0 $ return $ Batch (Just 0) 0 (at "results" res)
runMR' :: (MonadIO m) => MapReduce -> Action m MRResult
-- ^ Run MapReduce and return a MR result document containing stats and the results if Inlined. Error if the map/reduce failed (because of bad Javascript).
runMR' mr = do
doc <- runCommand (mrDocument mr)
return $ if true1 "ok" doc then doc else error $ "mapReduce error:\n" ++ show doc ++ "\nin:\n" ++ show mr
-- * Command
type Command = Document
-- ^ A command is a special query or action against the database. See <http://www.mongodb.org/display/DOCS/Commands> for details.
runCommand :: (MonadIO m) => Command -> Action m Document
-- ^ Run command against the database and return its result
runCommand c = maybe err id `liftM` findOne (query c "$cmd") where
err = error $ "Nothing returned for command: " ++ show c
runCommand1 :: (MonadIO m) => Text -> Action m Document
-- ^ @runCommand1 foo = runCommand [foo =: 1]@
runCommand1 c = runCommand [c =: (1 :: Int)]
eval :: (MonadIO m, Val v) => Javascript -> Action m v
-- ^ Run code on server
eval code = at "retval" `liftM` runCommand ["$eval" =: code]
modifyMVar :: MVar a -> (a -> Action IO (a, b)) -> Action IO b
modifyMVar v f = do
ctx <- ask
liftIO $ MV.modifyMVar v (\x -> runReaderT (f x) ctx)
mkWeakMVar :: MVar a -> Action IO () -> Action IO (Weak (MVar a))
mkWeakMVar m closing = do
ctx <- ask
#if MIN_VERSION_base(4,6,0)
liftIO $ MV.mkWeakMVar m $ runReaderT closing ctx
#else
liftIO $ MV.addMVarFinalizer m $ runReaderT closing ctx
#endif
{- Authors: Tony Hannan <[email protected]>
Copyright 2011 10gen Inc.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at: http://www.apache.org/licenses/LICENSE-2.0. Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -}
| Yuras/mongodb | Database/MongoDB/Query.hs | apache-2.0 | 69,101 | 2 | 27 | 19,972 | 15,849 | 8,290 | 7,559 | 1,066 | 9 |
{-# LANGUAGE BangPatterns, CPP, MagicHash, Rank2Types,
RecordWildCards, UnboxedTuples, UnliftedFFITypes #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- |
-- Module : Data.Text.Array
-- Copyright : (c) 2009, 2010, 2011 Bryan O'Sullivan
--
-- License : BSD-style
-- Maintainer : [email protected]
-- Portability : portable
--
-- Packed, unboxed, heap-resident arrays. Suitable for performance
-- critical use, both in terms of large data quantities and high
-- speed.
--
-- This module is intended to be imported @qualified@, to avoid name
-- clashes with "Prelude" functions, e.g.
--
-- > import qualified Data.Text.Array as A
--
-- The names in this module resemble those in the 'Data.Array' family
-- of modules, but are shorter due to the assumption of qualified
-- naming.
module Data.Text.Array
(
-- * Types
Array(Array, aBA)
, MArray(MArray, maBA)
-- * Functions
, copyM
, copyI
, empty
, equal
#if defined(ASSERTS)
, length
#endif
, run
, run2
, toList
, unsafeFreeze
, unsafeIndex
, new
, unsafeWrite
) where
#if defined(ASSERTS)
-- This fugly hack is brought by GHC's apparent reluctance to deal
-- with MagicHash and UnboxedTuples when inferring types. Eek!
# define CHECK_BOUNDS(_func_,_len_,_k_) \
if (_k_) < 0 || (_k_) >= (_len_) then error ("Data.Text.Array." ++ (_func_) ++ ": bounds error, offset " ++ show (_k_) ++ ", length " ++ show (_len_)) else
#else
# define CHECK_BOUNDS(_func_,_len_,_k_)
#endif
#include "MachDeps.h"
#if defined(ASSERTS)
import Control.Exception (assert)
#endif
#if MIN_VERSION_base(4,4,0)
import Control.Monad.ST.Unsafe (unsafeIOToST)
#else
import Control.Monad.ST (unsafeIOToST)
#endif
import Data.Bits ((.&.), xor)
import Data.Text.Internal.Unsafe (inlinePerformIO)
import Data.Text.Internal.Unsafe.Shift (shiftL, shiftR)
#if MIN_VERSION_base(4,5,0)
import Foreign.C.Types (CInt(CInt), CSize(CSize))
#else
import Foreign.C.Types (CInt, CSize)
#endif
import GHC.Base (ByteArray#, MutableByteArray#, Int(..),
indexWord16Array#, newByteArray#,
unsafeFreezeByteArray#, writeWord16Array#)
import GHC.ST (ST(..), runST)
import GHC.Word (Word16(..))
import Prelude hiding (length, read)
-- | Immutable array type.
--
-- The 'Array' constructor is exposed since @text-1.1.1.3@
data Array = Array {
aBA :: ByteArray#
#if defined(ASSERTS)
, aLen :: {-# UNPACK #-} !Int -- length (in units of Word16, not bytes)
#endif
}
-- | Mutable array type, for use in the ST monad.
--
-- The 'MArray' constructor is exposed since @text-1.1.1.3@
data MArray s = MArray {
maBA :: MutableByteArray# s
#if defined(ASSERTS)
, maLen :: {-# UNPACK #-} !Int -- length (in units of Word16, not bytes)
#endif
}
#if defined(ASSERTS)
-- | Operations supported by all arrays.
class IArray a where
-- | Return the length of an array.
length :: a -> Int
instance IArray Array where
length = aLen
{-# INLINE length #-}
instance IArray (MArray s) where
length = maLen
{-# INLINE length #-}
#endif
-- | Create an uninitialized mutable array.
new :: forall s. Int -> ST s (MArray s)
new n
| n < 0 || n .&. highBit /= 0 = array_size_error
| otherwise = ST $ \s1# ->
case newByteArray# len# s1# of
(# s2#, marr# #) -> (# s2#, MArray marr#
#if defined(ASSERTS)
n
#endif
#)
where !(I# len#) = bytesInArray n
highBit = maxBound `xor` (maxBound `shiftR` 1)
{-# INLINE new #-}
array_size_error :: a
array_size_error = error "Data.Text.Array.new: size overflow"
-- | Freeze a mutable array. Do not mutate the 'MArray' afterwards!
unsafeFreeze :: MArray s -> ST s Array
unsafeFreeze MArray{..} = ST $ \s1# ->
case unsafeFreezeByteArray# maBA s1# of
(# s2#, ba# #) -> (# s2#, Array ba#
#if defined(ASSERTS)
maLen
#endif
#)
{-# INLINE unsafeFreeze #-}
-- | Indicate how many bytes would be used for an array of the given
-- size.
bytesInArray :: Int -> Int
bytesInArray n = n `shiftL` 1
{-# INLINE bytesInArray #-}
-- | Unchecked read of an immutable array. May return garbage or
-- crash on an out-of-bounds access.
unsafeIndex :: Array -> Int -> Word16
unsafeIndex Array{..} i@(I# i#) =
CHECK_BOUNDS("unsafeIndex",aLen,i)
case indexWord16Array# aBA i# of r# -> (W16# r#)
{-# INLINE unsafeIndex #-}
-- | Unchecked write of a mutable array. May return garbage or crash
-- on an out-of-bounds access.
unsafeWrite :: MArray s -> Int -> Word16 -> ST s ()
unsafeWrite MArray{..} i@(I# i#) (W16# e#) = ST $ \s1# ->
CHECK_BOUNDS("unsafeWrite",maLen,i)
case writeWord16Array# maBA i# e# s1# of
s2# -> (# s2#, () #)
{-# INLINE unsafeWrite #-}
-- | Convert an immutable array to a list.
toList :: Array -> Int -> Int -> [Word16]
toList ary off len = loop 0
where loop i | i < len = unsafeIndex ary (off+i) : loop (i+1)
| otherwise = []
-- | An empty immutable array.
empty :: Array
empty = runST (new 0 >>= unsafeFreeze)
-- | Run an action in the ST monad and return an immutable array of
-- its result.
run :: (forall s. ST s (MArray s)) -> Array
run k = runST (k >>= unsafeFreeze)
-- | Run an action in the ST monad and return an immutable array of
-- its result paired with whatever else the action returns.
run2 :: (forall s. ST s (MArray s, a)) -> (Array, a)
run2 k = runST (do
(marr,b) <- k
arr <- unsafeFreeze marr
return (arr,b))
{-# INLINE run2 #-}
-- | Copy some elements of a mutable array.
copyM :: MArray s -- ^ Destination
-> Int -- ^ Destination offset
-> MArray s -- ^ Source
-> Int -- ^ Source offset
-> Int -- ^ Count
-> ST s ()
copyM dest didx src sidx count
| count <= 0 = return ()
| otherwise =
#if defined(ASSERTS)
assert (sidx + count <= length src) .
assert (didx + count <= length dest) .
#endif
unsafeIOToST $ memcpyM (maBA dest) (fromIntegral didx)
(maBA src) (fromIntegral sidx)
(fromIntegral count)
{-# INLINE copyM #-}
-- | Copy some elements of an immutable array.
copyI :: MArray s -- ^ Destination
-> Int -- ^ Destination offset
-> Array -- ^ Source
-> Int -- ^ Source offset
-> Int -- ^ First offset in destination /not/ to
-- copy (i.e. /not/ length)
-> ST s ()
copyI dest i0 src j0 top
| i0 >= top = return ()
| otherwise = unsafeIOToST $
memcpyI (maBA dest) (fromIntegral i0)
(aBA src) (fromIntegral j0)
(fromIntegral (top-i0))
{-# INLINE copyI #-}
-- | Compare portions of two arrays for equality. No bounds checking
-- is performed.
equal :: Array -- ^ First
-> Int -- ^ Offset into first
-> Array -- ^ Second
-> Int -- ^ Offset into second
-> Int -- ^ Count
-> Bool
equal arrA offA arrB offB count = inlinePerformIO $ do
i <- memcmp (aBA arrA) (fromIntegral offA)
(aBA arrB) (fromIntegral offB) (fromIntegral count)
return $! i == 0
{-# INLINE equal #-}
foreign import ccall unsafe "_hs_text_memcpy" memcpyI
:: MutableByteArray# s -> CSize -> ByteArray# -> CSize -> CSize -> IO ()
foreign import ccall unsafe "_hs_text_memcmp" memcmp
:: ByteArray# -> CSize -> ByteArray# -> CSize -> CSize -> IO CInt
foreign import ccall unsafe "_hs_text_memcpy" memcpyM
:: MutableByteArray# s -> CSize -> MutableByteArray# s -> CSize -> CSize
-> IO ()
| bgamari/text | src/Data/Text/Array.hs | bsd-2-clause | 7,903 | 0 | 12 | 2,201 | 1,629 | 914 | 715 | -1 | -1 |
module Compiler.CodeGeneration.CompilationState where
import Control.Monad.State
import Data.Set as Set
import Data.Ix
import Compiler.CodeGeneration.InstructionSet
import Compiler.CodeGeneration.SymbolResolution
import Compiler.CodeGeneration.LabelResolution
import Compiler.CodeGeneration.LabelTable
import Compiler.SymbolTable
data CompilationBlock = CompilationBlock {
symbolTable :: SymbolTable,
labelTable :: LabelTable,
instructions :: [Instruction],
stackPointer :: Int
}
data CompilationState = CompilationState {
usedRegisters :: Set Int,
instCounter :: Int, -- ^Current program address
blocks :: [CompilationBlock]
}
newBlock = do top <- topBlock
return $ top { instructions = [] }
initBlock = CompilationBlock {
symbolTable = initSymbolTable,
labelTable = initLabelTable,
instructions = [],
stackPointer = -3
}
type Compiler a = State CompilationState a
-- |Compiles something within a compilation environment,
-- |returning the resulting instructions
withCompiler x = instructions $ head $ blocks $ execState x init
where init = CompilationState {
usedRegisters = empty,
instCounter = 0,
blocks = [initBlock]
}
emit i = do b <- topBlock
i' <- return $ (instructions b) ++ [i]
replaceTopBlock $ b { instructions = i' }
unless (isComment i) incrementAddress
-- |Return the address at which the next instruction will be
-- |placed
currentAddress = do st <- get :: Compiler CompilationState
return $ instCounter st
incrementAddress = do st <- get
put $ st { instCounter = (instCounter st) + 1 }
currentStackPointer = do b <- topBlock
return $ stackPointer b
incrementStackPointer i = do b <- topBlock
b' <- return $ b { stackPointer = (stackPointer b) - i }
replaceTopBlock b'
currentSymbolTable = do b <- topBlock
return $ symbolTable b
availableRegisters st = toList $ all `difference` used
where all = fromList $ range (0,8)
reserved = fromList [pc, gp, fp, ac]
used = usedRegisters st
-- |This is the poor man's register allocator. Think of it like a
-- |penny tray at a convenience store: "need a register? take a
-- |register. have a register? leave a register." What if there
-- |are no free registers, you ask? Tough shit, it isn't smart
-- |enough to spill to memory.
claimRegister = do st <- get :: Compiler CompilationState
case availableRegisters st of
[] -> error "ow, my brain! there's no free registers!"
(x:xs) -> do put $ st { usedRegisters = insert x (usedRegisters st) }
return x
freeRegister r = do st <- get :: Compiler CompilationState
put $ st { usedRegisters = delete r (usedRegisters st) }
declareSymbol s = declareSymbols [s]
declareSymbols syms = do b <- topBlock
table' <- return $ insertSymbols (symbolTable b) syms
replaceTopBlock $ b { symbolTable = table' }
-- |Pushes a new compilation block to the stack. All labels within
-- |a block are isolated from their surrounding environment
pushBlock = do st <- get :: Compiler CompilationState
b <- newBlock
blocks' <- return $ b:(blocks st)
put $ st { blocks = blocks' }
return ()
popBlock = do st <- get :: Compiler CompilationState
(x:xs) <- return $ blocks st
put $ st { blocks = xs }
return x
topBlock = do st <- get :: Compiler CompilationState
return $ head $ blocks st
appendInstructions i = do top <- topBlock
top' <- return $ top { instructions = (instructions top) ++ i }
replaceTopBlock top'
defineLabel name address = do b <- topBlock
replaceTopBlock $ b { labelTable = (insertLabel (labelTable b) name address) }
replaceTopBlock x = do st <- get
xs <- return $ tail $ blocks st
put $ st { blocks = (x:xs) }
finalizeBlock b = do i' <- return $ resolveSymbols (symbolTable b) (instructions b)
i'' <- return $ resolveLabels i' (labelTable b)
appendInstructions i''
withBlock x = do pushBlock
x
b <- popBlock
finalizeBlock b
withBlockRet x = do pushBlock
ret <- x
b <- popBlock
finalizeBlock b
return ret
comment x = emit $ COMMENT x
label name = do a <- currentAddress
comment ("label " ++ name ++ " defined at " ++ show a)
defineLabel name a
| michaelmelanson/cminus-compiler | Compiler/CodeGeneration/CompilationState.hs | bsd-2-clause | 5,583 | 0 | 17 | 2,292 | 1,260 | 642 | 618 | 96 | 2 |
module RePack where
import CLaSH.Prelude
topEntity :: (Unsigned 1,Unsigned 1)
topEntity = (unpack (pack True), unpack (pack False))
| ggreif/clash-compiler | tests/shouldwork/BitVector/RePack.hs | bsd-2-clause | 134 | 0 | 8 | 20 | 55 | 30 | 25 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE Trustworthy #-}
-- | Provide primitives to communicate among family members. It provides an API for sequential 'linkMAC' and concurrent ('forkMAC') setting
module MAC.Control
(
-- Defined here
linkMAC -- Secure communication for sequential programs
, forkMAC -- Spawing threads
, forkMACMVar -- Returning futures
)
where
import MAC.Lattice
import MAC.Core (MAC(),ioTCB,runMAC)
import MAC.Exception
import MAC.Labeled
import MAC.MVar
import Control.Exception
import Control.Concurrent
{-|
Primitive which allows family members to safely communicate. The function
finishes even if an exception is raised---the exception is rethrown when
the returned value gets inspected
-}
linkMAC :: (Less l l') => MAC l' a -> MAC l (Labeled l' a)
linkMAC m = (ioTCB . runMAC)
(catchMAC m (\(e :: SomeException) -> throwMAC e)) >>= label
-- | Safely spawning new threads
forkMAC :: Less l l' => MAC l' () -> MAC l ()
forkMAC m = (ioTCB . forkIO . runMAC) m >> return ()
{-|
Safely spawning new threads. The function returns a labeled 'MVar' where
the outcome of the thread is stored
-}
forkMACMVar :: (Less l' l', Less l l') => MAC l' a -> MAC l (MACMVar l' a)
forkMACMVar m = do lmv <- newMACEmptyMVar
forkMAC (m >>= putMACMVar lmv)
return lmv
| alejandrorusso/mac-privacy | MAC/Control.hs | bsd-3-clause | 1,392 | 0 | 12 | 324 | 308 | 164 | 144 | 23 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module Tinfoil.Key(
genSymmetricKey
) where
import P
import System.IO (IO)
import Tinfoil.Data.Key
import Tinfoil.Data.Random
import Tinfoil.Random
-- | Generate a 256-bit symmetric cryptographic key.
genSymmetricKey :: IO SymmetricKey
genSymmetricKey = fmap (SymmetricKey . unEntropy) $ entropy symmetricKeyLength
| ambiata/tinfoil | src/Tinfoil/Key.hs | bsd-3-clause | 445 | 0 | 8 | 105 | 74 | 44 | 30 | 11 | 1 |
{-# LANGUAGE RecordWildCards, ScopedTypeVariables, BangPatterns, CPP #-}
--
-- | Interacting with the interpreter, whether it is running on an
-- external process or in the current process.
--
module GHCi
( -- * High-level interface to the interpreter
evalStmt, EvalStatus_(..), EvalStatus, EvalResult(..), EvalExpr(..)
, resumeStmt
, abandonStmt
, evalIO
, evalString
, evalStringToIOString
, mallocData
, createBCOs
, addSptEntry
, mkCostCentres
, costCentreStackInfo
, newBreakArray
, enableBreakpoint
, breakpointStatus
, getBreakpointVar
, getClosure
, seqHValue
-- * The object-code linker
, initObjLinker
, lookupSymbol
, lookupClosure
, loadDLL
, loadArchive
, loadObj
, unloadObj
, addLibrarySearchPath
, removeLibrarySearchPath
, resolveObjs
, findSystemLibrary
-- * Lower-level API using messages
, iservCmd, Message(..), withIServ, stopIServ
, iservCall, readIServ, writeIServ
, purgeLookupSymbolCache
, freeHValueRefs
, mkFinalizedHValue
, wormhole, wormholeRef
, mkEvalOpts
, fromEvalResult
) where
import GhcPrelude
import GHCi.Message
#if defined(HAVE_INTERNAL_INTERPRETER)
import GHCi.Run
#endif
import GHCi.RemoteTypes
import GHCi.ResolvedBCO
import GHCi.BreakArray (BreakArray)
import Fingerprint
import HscTypes
import UniqFM
import Panic
import DynFlags
import ErrUtils
import Outputable
import Exception
import BasicTypes
import FastString
import Util
import Hooks
import Control.Concurrent
import Control.Monad
import Control.Monad.IO.Class
import Data.Binary
import Data.Binary.Put
import Data.ByteString (ByteString)
import qualified Data.ByteString.Lazy as LB
import Data.IORef
import Foreign hiding (void)
import GHC.Exts.Heap
import GHC.Stack.CCS (CostCentre,CostCentreStack)
import System.Exit
import Data.Maybe
import GHC.IO.Handle.Types (Handle)
#if defined(mingw32_HOST_OS)
import Foreign.C
import GHC.IO.Handle.FD (fdToHandle)
#else
import System.Posix as Posix
#endif
import System.Directory
import System.Process
import GHC.Conc (getNumProcessors, pseq, par)
{- Note [Remote GHCi]
When the flag -fexternal-interpreter is given to GHC, interpreted code
is run in a separate process called iserv, and we communicate with the
external process over a pipe using Binary-encoded messages.
Motivation
~~~~~~~~~~
When the interpreted code is running in a separate process, it can
use a different "way", e.g. profiled or dynamic. This means
- compiling Template Haskell code with -prof does not require
building the code without -prof first
- when GHC itself is profiled, it can interpret unprofiled code,
and the same applies to dynamic linking.
- An unprofiled GHCi can load and run profiled code, which means it
can use the stack-trace functionality provided by profiling without
taking the performance hit on the compiler that profiling would
entail.
For other reasons see remote-GHCi on the wiki.
Implementation Overview
~~~~~~~~~~~~~~~~~~~~~~~
The main pieces are:
- libraries/ghci, containing:
- types for talking about remote values (GHCi.RemoteTypes)
- the message protocol (GHCi.Message),
- implementation of the messages (GHCi.Run)
- implementation of Template Haskell (GHCi.TH)
- a few other things needed to run interpreted code
- top-level iserv directory, containing the codefor the external
server. This is a fairly simple wrapper, most of the functionality
is provided by modules in libraries/ghci.
- This module (GHCi) which provides the interface to the server used
by the rest of GHC.
GHC works with and without -fexternal-interpreter. With the flag, all
interpreted code is run by the iserv binary. Without the flag,
interpreted code is run in the same process as GHC.
Things that do not work with -fexternal-interpreter
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
dynCompileExpr cannot work, because we have no way to run code of an
unknown type in the remote process. This API fails with an error
message if it is used with -fexternal-interpreter.
Other Notes on Remote GHCi
~~~~~~~~~~~~~~~~~~~~~~~~~~
* This wiki page has an implementation overview:
https://gitlab.haskell.org/ghc/ghc/wikis/commentary/compiler/external-interpreter
* Note [External GHCi pointers] in compiler/ghci/GHCi.hs
* Note [Remote Template Haskell] in libraries/ghci/GHCi/TH.hs
-}
#if !defined(HAVE_INTERNAL_INTERPRETER)
needExtInt :: IO a
needExtInt = throwIO
(InstallationError "this operation requires -fexternal-interpreter")
#endif
-- | Run a command in the interpreter's context. With
-- @-fexternal-interpreter@, the command is serialized and sent to an
-- external iserv process, and the response is deserialized (hence the
-- @Binary@ constraint). With @-fno-external-interpreter@ we execute
-- the command directly here.
iservCmd :: Binary a => HscEnv -> Message a -> IO a
iservCmd hsc_env@HscEnv{..} msg
| gopt Opt_ExternalInterpreter hsc_dflags =
withIServ hsc_env $ \iserv ->
uninterruptibleMask_ $ do -- Note [uninterruptibleMask_]
iservCall iserv msg
| otherwise = -- Just run it directly
#if defined(HAVE_INTERNAL_INTERPRETER)
run msg
#else
needExtInt
#endif
-- Note [uninterruptibleMask_ and iservCmd]
--
-- If we receive an async exception, such as ^C, while communicating
-- with the iserv process then we will be out-of-sync and not be able
-- to recoever. Thus we use uninterruptibleMask_ during
-- communication. A ^C will be delivered to the iserv process (because
-- signals get sent to the whole process group) which will interrupt
-- the running computation and return an EvalException result.
-- | Grab a lock on the 'IServ' and do something with it.
-- Overloaded because this is used from TcM as well as IO.
withIServ
:: (MonadIO m, ExceptionMonad m)
=> HscEnv -> (IServ -> m a) -> m a
withIServ HscEnv{..} action =
gmask $ \restore -> do
m <- liftIO $ takeMVar hsc_iserv
-- start the iserv process if we haven't done so yet
iserv <- maybe (liftIO $ startIServ hsc_dflags) return m
`gonException` (liftIO $ putMVar hsc_iserv Nothing)
-- free any ForeignHValues that have been garbage collected.
let iserv' = iserv{ iservPendingFrees = [] }
a <- (do
liftIO $ when (not (null (iservPendingFrees iserv))) $
iservCall iserv (FreeHValueRefs (iservPendingFrees iserv))
-- run the inner action
restore $ action iserv)
`gonException` (liftIO $ putMVar hsc_iserv (Just iserv'))
liftIO $ putMVar hsc_iserv (Just iserv')
return a
-- -----------------------------------------------------------------------------
-- Wrappers around messages
-- | Execute an action of type @IO [a]@, returning 'ForeignHValue's for
-- each of the results.
evalStmt
:: HscEnv -> Bool -> EvalExpr ForeignHValue
-> IO (EvalStatus_ [ForeignHValue] [HValueRef])
evalStmt hsc_env step foreign_expr = do
let dflags = hsc_dflags hsc_env
status <- withExpr foreign_expr $ \expr ->
iservCmd hsc_env (EvalStmt (mkEvalOpts dflags step) expr)
handleEvalStatus hsc_env status
where
withExpr :: EvalExpr ForeignHValue -> (EvalExpr HValueRef -> IO a) -> IO a
withExpr (EvalThis fhv) cont =
withForeignRef fhv $ \hvref -> cont (EvalThis hvref)
withExpr (EvalApp fl fr) cont =
withExpr fl $ \fl' ->
withExpr fr $ \fr' ->
cont (EvalApp fl' fr')
resumeStmt
:: HscEnv -> Bool -> ForeignRef (ResumeContext [HValueRef])
-> IO (EvalStatus_ [ForeignHValue] [HValueRef])
resumeStmt hsc_env step resume_ctxt = do
let dflags = hsc_dflags hsc_env
status <- withForeignRef resume_ctxt $ \rhv ->
iservCmd hsc_env (ResumeStmt (mkEvalOpts dflags step) rhv)
handleEvalStatus hsc_env status
abandonStmt :: HscEnv -> ForeignRef (ResumeContext [HValueRef]) -> IO ()
abandonStmt hsc_env resume_ctxt = do
withForeignRef resume_ctxt $ \rhv ->
iservCmd hsc_env (AbandonStmt rhv)
handleEvalStatus
:: HscEnv -> EvalStatus [HValueRef]
-> IO (EvalStatus_ [ForeignHValue] [HValueRef])
handleEvalStatus hsc_env status =
case status of
EvalBreak a b c d e f -> return (EvalBreak a b c d e f)
EvalComplete alloc res ->
EvalComplete alloc <$> addFinalizer res
where
addFinalizer (EvalException e) = return (EvalException e)
addFinalizer (EvalSuccess rs) = do
EvalSuccess <$> mapM (mkFinalizedHValue hsc_env) rs
-- | Execute an action of type @IO ()@
evalIO :: HscEnv -> ForeignHValue -> IO ()
evalIO hsc_env fhv = do
liftIO $ withForeignRef fhv $ \fhv ->
iservCmd hsc_env (EvalIO fhv) >>= fromEvalResult
-- | Execute an action of type @IO String@
evalString :: HscEnv -> ForeignHValue -> IO String
evalString hsc_env fhv = do
liftIO $ withForeignRef fhv $ \fhv ->
iservCmd hsc_env (EvalString fhv) >>= fromEvalResult
-- | Execute an action of type @String -> IO String@
evalStringToIOString :: HscEnv -> ForeignHValue -> String -> IO String
evalStringToIOString hsc_env fhv str = do
liftIO $ withForeignRef fhv $ \fhv ->
iservCmd hsc_env (EvalStringToString fhv str) >>= fromEvalResult
-- | Allocate and store the given bytes in memory, returning a pointer
-- to the memory in the remote process.
mallocData :: HscEnv -> ByteString -> IO (RemotePtr ())
mallocData hsc_env bs = iservCmd hsc_env (MallocData bs)
mkCostCentres
:: HscEnv -> String -> [(String,String)] -> IO [RemotePtr CostCentre]
mkCostCentres hsc_env mod ccs =
iservCmd hsc_env (MkCostCentres mod ccs)
-- | Create a set of BCOs that may be mutually recursive.
createBCOs :: HscEnv -> [ResolvedBCO] -> IO [HValueRef]
createBCOs hsc_env rbcos = do
n_jobs <- case parMakeCount (hsc_dflags hsc_env) of
Nothing -> liftIO getNumProcessors
Just n -> return n
-- Serializing ResolvedBCO is expensive, so if we're in parallel mode
-- (-j<n>) parallelise the serialization.
if (n_jobs == 1)
then
iservCmd hsc_env (CreateBCOs [runPut (put rbcos)])
else do
old_caps <- getNumCapabilities
if old_caps == n_jobs
then void $ evaluate puts
else bracket_ (setNumCapabilities n_jobs)
(setNumCapabilities old_caps)
(void $ evaluate puts)
iservCmd hsc_env (CreateBCOs puts)
where
puts = parMap doChunk (chunkList 100 rbcos)
-- make sure we force the whole lazy ByteString
doChunk c = pseq (LB.length bs) bs
where bs = runPut (put c)
-- We don't have the parallel package, so roll our own simple parMap
parMap _ [] = []
parMap f (x:xs) = fx `par` (fxs `pseq` (fx : fxs))
where fx = f x; fxs = parMap f xs
addSptEntry :: HscEnv -> Fingerprint -> ForeignHValue -> IO ()
addSptEntry hsc_env fpr ref =
withForeignRef ref $ \val ->
iservCmd hsc_env (AddSptEntry fpr val)
costCentreStackInfo :: HscEnv -> RemotePtr CostCentreStack -> IO [String]
costCentreStackInfo hsc_env ccs =
iservCmd hsc_env (CostCentreStackInfo ccs)
newBreakArray :: HscEnv -> Int -> IO (ForeignRef BreakArray)
newBreakArray hsc_env size = do
breakArray <- iservCmd hsc_env (NewBreakArray size)
mkFinalizedHValue hsc_env breakArray
enableBreakpoint :: HscEnv -> ForeignRef BreakArray -> Int -> Bool -> IO ()
enableBreakpoint hsc_env ref ix b = do
withForeignRef ref $ \breakarray ->
iservCmd hsc_env (EnableBreakpoint breakarray ix b)
breakpointStatus :: HscEnv -> ForeignRef BreakArray -> Int -> IO Bool
breakpointStatus hsc_env ref ix = do
withForeignRef ref $ \breakarray ->
iservCmd hsc_env (BreakpointStatus breakarray ix)
getBreakpointVar :: HscEnv -> ForeignHValue -> Int -> IO (Maybe ForeignHValue)
getBreakpointVar hsc_env ref ix =
withForeignRef ref $ \apStack -> do
mb <- iservCmd hsc_env (GetBreakpointVar apStack ix)
mapM (mkFinalizedHValue hsc_env) mb
getClosure :: HscEnv -> ForeignHValue -> IO (GenClosure ForeignHValue)
getClosure hsc_env ref =
withForeignRef ref $ \hval -> do
mb <- iservCmd hsc_env (GetClosure hval)
mapM (mkFinalizedHValue hsc_env) mb
seqHValue :: HscEnv -> ForeignHValue -> IO ()
seqHValue hsc_env ref =
withForeignRef ref $ \hval ->
iservCmd hsc_env (Seq hval) >>= fromEvalResult
-- -----------------------------------------------------------------------------
-- Interface to the object-code linker
initObjLinker :: HscEnv -> IO ()
initObjLinker hsc_env = iservCmd hsc_env InitLinker
lookupSymbol :: HscEnv -> FastString -> IO (Maybe (Ptr ()))
lookupSymbol hsc_env@HscEnv{..} str
| gopt Opt_ExternalInterpreter hsc_dflags =
-- Profiling of GHCi showed a lot of time and allocation spent
-- making cross-process LookupSymbol calls, so I added a GHC-side
-- cache which sped things up quite a lot. We have to be careful
-- to purge this cache when unloading code though.
withIServ hsc_env $ \iserv@IServ{..} -> do
cache <- readIORef iservLookupSymbolCache
case lookupUFM cache str of
Just p -> return (Just p)
Nothing -> do
m <- uninterruptibleMask_ $
iservCall iserv (LookupSymbol (unpackFS str))
case m of
Nothing -> return Nothing
Just r -> do
let p = fromRemotePtr r
writeIORef iservLookupSymbolCache $! addToUFM cache str p
return (Just p)
| otherwise =
#if defined(HAVE_INTERNAL_INTERPRETER)
fmap fromRemotePtr <$> run (LookupSymbol (unpackFS str))
#else
needExtInt
#endif
lookupClosure :: HscEnv -> String -> IO (Maybe HValueRef)
lookupClosure hsc_env str =
iservCmd hsc_env (LookupClosure str)
purgeLookupSymbolCache :: HscEnv -> IO ()
purgeLookupSymbolCache hsc_env@HscEnv{..} =
when (gopt Opt_ExternalInterpreter hsc_dflags) $
withIServ hsc_env $ \IServ{..} ->
writeIORef iservLookupSymbolCache emptyUFM
-- | loadDLL loads a dynamic library using the OS's native linker
-- (i.e. dlopen() on Unix, LoadLibrary() on Windows). It takes either
-- an absolute pathname to the file, or a relative filename
-- (e.g. "libfoo.so" or "foo.dll"). In the latter case, loadDLL
-- searches the standard locations for the appropriate library.
--
-- Returns:
--
-- Nothing => success
-- Just err_msg => failure
loadDLL :: HscEnv -> String -> IO (Maybe String)
loadDLL hsc_env str = iservCmd hsc_env (LoadDLL str)
loadArchive :: HscEnv -> String -> IO ()
loadArchive hsc_env path = do
path' <- canonicalizePath path -- Note [loadObj and relative paths]
iservCmd hsc_env (LoadArchive path')
loadObj :: HscEnv -> String -> IO ()
loadObj hsc_env path = do
path' <- canonicalizePath path -- Note [loadObj and relative paths]
iservCmd hsc_env (LoadObj path')
unloadObj :: HscEnv -> String -> IO ()
unloadObj hsc_env path = do
path' <- canonicalizePath path -- Note [loadObj and relative paths]
iservCmd hsc_env (UnloadObj path')
-- Note [loadObj and relative paths]
-- the iserv process might have a different current directory from the
-- GHC process, so we must make paths absolute before sending them
-- over.
addLibrarySearchPath :: HscEnv -> String -> IO (Ptr ())
addLibrarySearchPath hsc_env str =
fromRemotePtr <$> iservCmd hsc_env (AddLibrarySearchPath str)
removeLibrarySearchPath :: HscEnv -> Ptr () -> IO Bool
removeLibrarySearchPath hsc_env p =
iservCmd hsc_env (RemoveLibrarySearchPath (toRemotePtr p))
resolveObjs :: HscEnv -> IO SuccessFlag
resolveObjs hsc_env = successIf <$> iservCmd hsc_env ResolveObjs
findSystemLibrary :: HscEnv -> String -> IO (Maybe String)
findSystemLibrary hsc_env str = iservCmd hsc_env (FindSystemLibrary str)
-- -----------------------------------------------------------------------------
-- Raw calls and messages
-- | Send a 'Message' and receive the response from the iserv process
iservCall :: Binary a => IServ -> Message a -> IO a
iservCall iserv@IServ{..} msg =
remoteCall iservPipe msg
`catch` \(e :: SomeException) -> handleIServFailure iserv e
-- | Read a value from the iserv process
readIServ :: IServ -> Get a -> IO a
readIServ iserv@IServ{..} get =
readPipe iservPipe get
`catch` \(e :: SomeException) -> handleIServFailure iserv e
-- | Send a value to the iserv process
writeIServ :: IServ -> Put -> IO ()
writeIServ iserv@IServ{..} put =
writePipe iservPipe put
`catch` \(e :: SomeException) -> handleIServFailure iserv e
handleIServFailure :: IServ -> SomeException -> IO a
handleIServFailure IServ{..} e = do
ex <- getProcessExitCode iservProcess
case ex of
Just (ExitFailure n) ->
throw (InstallationError ("ghc-iserv terminated (" ++ show n ++ ")"))
_ -> do
terminateProcess iservProcess
_ <- waitForProcess iservProcess
throw e
-- -----------------------------------------------------------------------------
-- Starting and stopping the iserv process
startIServ :: DynFlags -> IO IServ
startIServ dflags = do
let flavour
| WayProf `elem` ways dflags = "-prof"
| WayDyn `elem` ways dflags = "-dyn"
| otherwise = ""
prog = pgm_i dflags ++ flavour
opts = getOpts dflags opt_i
debugTraceMsg dflags 3 $ text "Starting " <> text prog
let createProc = lookupHook createIservProcessHook
(\cp -> do { (_,_,_,ph) <- createProcess cp
; return ph })
dflags
(ph, rh, wh) <- runWithPipes createProc prog opts
lo_ref <- newIORef Nothing
cache_ref <- newIORef emptyUFM
return $ IServ
{ iservPipe = Pipe { pipeRead = rh
, pipeWrite = wh
, pipeLeftovers = lo_ref }
, iservProcess = ph
, iservLookupSymbolCache = cache_ref
, iservPendingFrees = []
}
stopIServ :: HscEnv -> IO ()
stopIServ HscEnv{..} =
gmask $ \_restore -> do
m <- takeMVar hsc_iserv
maybe (return ()) stop m
putMVar hsc_iserv Nothing
where
stop iserv = do
ex <- getProcessExitCode (iservProcess iserv)
if isJust ex
then return ()
else iservCall iserv Shutdown
runWithPipes :: (CreateProcess -> IO ProcessHandle)
-> FilePath -> [String] -> IO (ProcessHandle, Handle, Handle)
#if defined(mingw32_HOST_OS)
foreign import ccall "io.h _close"
c__close :: CInt -> IO CInt
foreign import ccall unsafe "io.h _get_osfhandle"
_get_osfhandle :: CInt -> IO CInt
runWithPipes createProc prog opts = do
(rfd1, wfd1) <- createPipeFd -- we read on rfd1
(rfd2, wfd2) <- createPipeFd -- we write on wfd2
wh_client <- _get_osfhandle wfd1
rh_client <- _get_osfhandle rfd2
let args = show wh_client : show rh_client : opts
ph <- createProc (proc prog args)
rh <- mkHandle rfd1
wh <- mkHandle wfd2
return (ph, rh, wh)
where mkHandle :: CInt -> IO Handle
mkHandle fd = (fdToHandle fd) `onException` (c__close fd)
#else
runWithPipes createProc prog opts = do
(rfd1, wfd1) <- Posix.createPipe -- we read on rfd1
(rfd2, wfd2) <- Posix.createPipe -- we write on wfd2
setFdOption rfd1 CloseOnExec True
setFdOption wfd2 CloseOnExec True
let args = show wfd1 : show rfd2 : opts
ph <- createProc (proc prog args)
closeFd wfd1
closeFd rfd2
rh <- fdToHandle rfd1
wh <- fdToHandle wfd2
return (ph, rh, wh)
#endif
-- -----------------------------------------------------------------------------
{- Note [External GHCi pointers]
We have the following ways to reference things in GHCi:
HValue
------
HValue is a direct reference to a value in the local heap. Obviously
we cannot use this to refer to things in the external process.
RemoteRef
---------
RemoteRef is a StablePtr to a heap-resident value. When
-fexternal-interpreter is used, this value resides in the external
process's heap. RemoteRefs are mostly used to send pointers in
messages between GHC and iserv.
A RemoteRef must be explicitly freed when no longer required, using
freeHValueRefs, or by attaching a finalizer with mkForeignHValue.
To get from a RemoteRef to an HValue you can use 'wormholeRef', which
fails with an error message if -fexternal-interpreter is in use.
ForeignRef
----------
A ForeignRef is a RemoteRef with a finalizer that will free the
'RemoteRef' when it is garbage collected. We mostly use ForeignHValue
on the GHC side.
The finalizer adds the RemoteRef to the iservPendingFrees list in the
IServ record. The next call to iservCmd will free any RemoteRefs in
the list. It was done this way rather than calling iservCmd directly,
because I didn't want to have arbitrary threads calling iservCmd. In
principle it would probably be ok, but it seems less hairy this way.
-}
-- | Creates a 'ForeignRef' that will automatically release the
-- 'RemoteRef' when it is no longer referenced.
mkFinalizedHValue :: HscEnv -> RemoteRef a -> IO (ForeignRef a)
mkFinalizedHValue HscEnv{..} rref = mkForeignRef rref free
where
!external = gopt Opt_ExternalInterpreter hsc_dflags
hvref = toHValueRef rref
free :: IO ()
free
| not external = freeRemoteRef hvref
| otherwise =
modifyMVar_ hsc_iserv $ \mb_iserv ->
case mb_iserv of
Nothing -> return Nothing -- already shut down
Just iserv@IServ{..} ->
return (Just iserv{iservPendingFrees = hvref : iservPendingFrees})
freeHValueRefs :: HscEnv -> [HValueRef] -> IO ()
freeHValueRefs _ [] = return ()
freeHValueRefs hsc_env refs = iservCmd hsc_env (FreeHValueRefs refs)
-- | Convert a 'ForeignRef' to the value it references directly. This
-- only works when the interpreter is running in the same process as
-- the compiler, so it fails when @-fexternal-interpreter@ is on.
wormhole :: DynFlags -> ForeignRef a -> IO a
wormhole dflags r = wormholeRef dflags (unsafeForeignRefToRemoteRef r)
-- | Convert an 'RemoteRef' to the value it references directly. This
-- only works when the interpreter is running in the same process as
-- the compiler, so it fails when @-fexternal-interpreter@ is on.
wormholeRef :: DynFlags -> RemoteRef a -> IO a
wormholeRef dflags _r
| gopt Opt_ExternalInterpreter dflags
= throwIO (InstallationError
"this operation requires -fno-external-interpreter")
#if defined(HAVE_INTERNAL_INTERPRETER)
| otherwise
= localRef _r
#else
| otherwise
= throwIO (InstallationError
"can't wormhole a value in a stage1 compiler")
#endif
-- -----------------------------------------------------------------------------
-- Misc utils
mkEvalOpts :: DynFlags -> Bool -> EvalOpts
mkEvalOpts dflags step =
EvalOpts
{ useSandboxThread = gopt Opt_GhciSandbox dflags
, singleStep = step
, breakOnException = gopt Opt_BreakOnException dflags
, breakOnError = gopt Opt_BreakOnError dflags }
fromEvalResult :: EvalResult a -> IO a
fromEvalResult (EvalException e) = throwIO (fromSerializableException e)
fromEvalResult (EvalSuccess a) = return a
| sdiehl/ghc | compiler/ghci/GHCi.hs | bsd-3-clause | 22,786 | 0 | 23 | 4,558 | 4,732 | 2,414 | 2,318 | 369 | 5 |
{-# LANGUAGE OverloadedStrings #-}
module CommitMsgParsers
( loadParsers
, findCategory
, unknownCategory
, ParserDef
, getParserName
) where
import Text.Regex.Posix
import Data.Map as Map
import Data.List as List
import Data.Maybe
import Control.Applicative
import qualified Data.Yaml as Y
import Data.Yaml (FromJSON(..), (.:), (.:?))
import qualified Data.ByteString.Char8 as BS
import Paths_hstats
type RegExp = String
data ParserDef = ParserDef { name :: String
, matcher :: RegExp
} deriving (Show)
instance FromJSON ParserDef where
parseJSON (Y.Object v) =
ParserDef <$>
v .: "name" <*>
v .: "matcher"
unknownCategory = "unknown"
loadParsers :: IO [ParserDef]
loadParsers = do
conf <- getDataFileName "data/commitMsgPrefixes.yaml" >>= BS.readFile
return $ fromMaybe [] $ (Y.decode conf :: Maybe [ParserDef])
findCategory :: [ParserDef] -> String -> String
findCategory defs msg = maybe unknownCategory name . List.find match $ defs
where match p = msg =~ matcher p :: Bool
getParserName :: ParserDef -> String
getParserName = name
| LFDM/hstats | src/lib/CommitMsgParsers.hs | bsd-3-clause | 1,128 | 0 | 10 | 239 | 318 | 183 | 135 | 35 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
-- | This module includes the machinery necessary to use hint to load
-- action code dynamically. It includes a Template Haskell function
-- to gather the necessary compile-time information about code
-- location, compiler arguments, etc, and bind that information into
-- the calls to the dynamic loader.
module Snap.Loader.Hint where
import Data.List (groupBy, intercalate, isPrefixOf, nub)
import Control.Concurrent (forkIO, myThreadId)
import Control.Concurrent.MVar
import Control.Exception
import Control.Monad (when)
import Control.Monad.Trans (liftIO)
import Data.Maybe (catMaybes)
import Data.Time.Clock
import Language.Haskell.Interpreter hiding (lift, liftIO)
import Language.Haskell.Interpreter.Unsafe
import Language.Haskell.TH
import Prelude hiding (catch)
import System.Environment (getArgs)
------------------------------------------------------------------------------
import Snap.Types
import qualified Snap.Loader.Static as Static
------------------------------------------------------------------------------
-- | This function derives all the information necessary to use the
-- interpreter from the compile-time environment, and compiles it in
-- to the generated code.
--
-- This could be considered a TH wrapper around a function
--
-- > loadSnap :: IO a -> (a -> IO ()) -> (a -> Snap ()) -> IO (IO (), Snap ())
--
-- with a magical implementation.
--
-- The returned IO action does nothing. The returned Snap action does
-- initialization, runs the action, and does the cleanup. This means
-- that the whole application state will be loaded and unloaded for
-- each request. To make this worthwhile, those steps should be made
-- quite fast.
--
-- The upshot is that you shouldn't need to recompile your server
-- during development unless your .cabal file changes, or the code
-- that uses this splice changes.
loadSnapTH :: Name -> Name -> Name -> Q Exp
loadSnapTH initialize cleanup action = do
args <- runIO getArgs
let initMod = nameModule initialize
initBase = nameBase initialize
cleanMod = nameModule cleanup
cleanBase = nameBase cleanup
actMod = nameModule action
actBase = nameBase action
modules = catMaybes [initMod, cleanMod, actMod]
opts = getHintOpts args
let static = Static.loadSnapTH initialize cleanup action
-- The let in this block causes the static expression to be
-- pattern-matched, providing an extra check that the types were
-- correct at compile-time, at least.
[| do let _ = $static :: IO (IO (), Snap ())
hint <- hintSnap opts modules initBase cleanBase actBase
return (return (), hint) |]
------------------------------------------------------------------------------
-- | Convert the command-line arguments passed in to options for the
-- hint interpreter. This is somewhat brittle code, based on a few
-- experimental datapoints regarding the structure of the command-line
-- arguments cabal produces.
getHintOpts :: [String] -> [String]
getHintOpts args = removeBad opts
where
bad = ["-threaded", "-O"]
removeBad = filter (\x -> not $ any (`isPrefixOf` x) bad)
hideAll = filter (== "-hide-all-packages") args
srcOpts = filter (\x -> "-i" `isPrefixOf` x
&& not ("-idist" `isPrefixOf` x)) args
toCopy = init' $ dropWhile (not . ("-package" `isPrefixOf`)) args
copy = map (intercalate " ") . groupBy (\_ s -> not $ "-" `isPrefixOf` s)
opts = hideAll ++ srcOpts ++ copy toCopy
init' [] = []
init' xs = init xs
------------------------------------------------------------------------------
-- | This function creates the Snap handler that actually is
-- responsible for doing the dynamic loading of actions via hint,
-- given all of the configuration information that the interpreter
-- needs. It also ensures safe concurrent access to the interpreter,
-- and caches the interpreter results for a short time before allowing
-- it to run again.
--
-- This constructs an expression of type Snap (), that is essentially
--
-- > bracketSnap initialization cleanup handler
--
-- for the values of initialization, cleanup, and handler passed in.
--
-- Generally, this won't be called manually. Instead, loadSnapTH will
-- generate a call to it at compile-time, calculating all the
-- arguments from its environment.
hintSnap :: [String] -- ^ A list of command-line options for the interpreter
-> [String] -- ^ A list of modules that need to be
-- interpreted. This should contain only the
-- modules which contain the initialization,
-- cleanup, and handler actions. Everything else
-- they require will be loaded transitively.
-> String -- ^ The name of the initialization action
-> String -- ^ The name of the cleanup action
-> String -- ^ The name of the handler action
-> IO (Snap ())
hintSnap opts modules initialization cleanup handler = do
let action = intercalate " " [ "bracketSnap"
, initialization
, cleanup
, handler
]
interpreter = do
loadModules . nub $ modules
let imports = "Prelude" : "Snap.Types" : modules
setImports . nub $ imports
interpret action (as :: Snap ())
loadInterpreter = unsafeRunInterpreterWithArgs opts interpreter
-- Protect the interpreter from concurrent and high-speed serial
-- access.
loadAction <- protectedActionEvaluator 3 loadInterpreter
return $ do
interpreterResult <- liftIO loadAction
case interpreterResult of
Left err -> error $ format err
Right handlerAction -> handlerAction
------------------------------------------------------------------------------
-- | Convert an InterpreterError to a String for presentation
format :: InterpreterError -> String
format (UnknownError e) = "Unknown interpreter error:\r\n\r\n" ++ e
format (NotAllowed e) = "Interpreter action not allowed:\r\n\r\n" ++ e
format (GhcException e) = "GHC error:\r\n\r\n" ++ e
format (WontCompile errs) = "Compile errors:\r\n\r\n" ++
(intercalate "\r\n" $ nub $ map errMsg errs)
------------------------------------------------------------------------------
-- | Create a wrapper for an action that protects the action from
-- concurrent or rapid evaluation.
--
-- There will be at least the passed-in 'NominalDiffTime' delay
-- between the finish of one execution of the action the start of the
-- next. Concurrent calls to the wrapper, and calls within the delay
-- period, end up with the same calculated value returned.
--
-- If an exception is raised during the processing of the action, it
-- will be thrown to all waiting threads, and for all requests made
-- before the delay time has expired after the exception was raised.
protectedActionEvaluator :: NominalDiffTime -> IO a -> IO (IO a)
protectedActionEvaluator minReEval action = do
-- The list of requesters waiting for a result. Contains the
-- ThreadId in case of exceptions, and an empty MVar awaiting a
-- successful result.
--
-- type: MVar [(ThreadId, MVar a)]
readerContainer <- newMVar []
-- Contains the previous result, and the time it was stored, if a
-- previous result has been computed. The result stored is either
-- the actual result, or the exception thrown by the calculation.
--
-- type: MVar (Maybe (Either SomeException a, UTCTime))
resultContainer <- newMVar Nothing
-- The model used for the above MVars in the returned action is
-- "keep them full, unless updating them." In every case, when
-- one of those MVars is emptied, the next action is to fill that
-- same MVar. This makes deadlocking on MVar wait impossible.
return $ do
existingResult <- readMVar resultContainer
now <- getCurrentTime
case existingResult of
Just (res, ts) | diffUTCTime now ts < minReEval ->
-- There's an existing result, and it's still valid
case res of
Right val -> return val
Left e -> throwIO e
_ -> do
-- Need to calculate a new result
tid <- myThreadId
reader <- newEmptyMVar
readers <- takeMVar readerContainer
-- Some strictness is employed to ensure the MVar
-- isn't holding on to a chain of unevaluated thunks.
let pair = (tid, reader)
newReaders = readers `seq` pair `seq` (pair : readers)
putMVar readerContainer $! newReaders
-- If this is the first reader, kick off evaluation of
-- the action in a new thread. This is slightly
-- careful to block asynchronous exceptions to that
-- thread except when actually running the action.
when (null readers) $ do
let runAndFill = block $ do
a <- unblock action
clearAndNotify (Right a) (flip putMVar a . snd)
killWaiting :: SomeException -> IO ()
killWaiting e = block $ do
clearAndNotify (Left e) (flip throwTo e . fst)
throwIO e
clearAndNotify r f = do
t <- getCurrentTime
_ <- swapMVar resultContainer $ Just (r, t)
allReaders <- swapMVar readerContainer []
mapM_ f allReaders
_ <- forkIO $ runAndFill `catch` killWaiting
return ()
-- Wait for the evaluation of the action to complete,
-- and return its result.
takeMVar reader
| janrain/snap | src/Snap/Loader/Hint.hs | bsd-3-clause | 10,281 | 0 | 27 | 2,933 | 1,383 | 755 | 628 | 109 | 3 |
module CSPM.Evaluator.Dot (
combineDots, dataTypeInfo,
extensions, extensionsSet, oneFieldExtensions,
productions, productionsSet, splitIntoFields,
compressIntoEnumeratedSet,
) where
import CSPM.Syntax.Names
import {-# SOURCE #-} CSPM.Evaluator.Exceptions
import CSPM.Evaluator.Monad
import CSPM.Evaluator.Values
import CSPM.Evaluator.ValueSet hiding (cartesianProduct)
import qualified CSPM.Evaluator.ValueSet as S
import Data.List (groupBy, sortBy)
import Data.Maybe (catMaybes, isJust)
import Util.Annotated
import Util.List
import Util.Prelude
dataTypeInfo :: Name -> EvaluationMonad (Value, Int, Array Int ValueSet)
dataTypeInfo n = do
VTuple dta <- lookupVar n
let VInt a = dta!1
VTuple fs = dta!2
return $ (dta!0, a, fmap (\(VSet s) -> s) fs)
{-# INLINE dataTypeInfo #-}
-- | The number of fields this datatype or channel has.
arityOfDataTypeClause :: Name -> EvaluationMonad Int
arityOfDataTypeClause n = do
(_, a, _) <- dataTypeInfo n
return a
-- | Returns true if the value is a complete field.
isCompleteField :: Value -> EvaluationMonad Bool
isCompleteField (v@(VDot vs)) =
case maybeNamedDot v of
Nothing -> return True
Just n -> do
arity <- arityOfDataTypeClause n
if arity == length vs -1 then
isCompleteField (last vs)
else return False
isCompleteField _ = return True
-- | Takes two values and dots then together appropriately.
combineDots :: SrcSpan -> Value -> Value -> EvaluationMonad Value
combineDots loc v1 v2 =
let
-- | Dots the given value onto the right of the given base, providing
-- the left hand value is a field.
maybeDotFieldOn :: Value -> Value -> EvaluationMonad (Maybe Value)
maybeDotFieldOn vbase v = do
fields <-
case maybeNamedDot vbase of
Just n -> do
(_, _, fs) <- dataTypeInfo n
let VDot (nd:_) = vbase
return $! S.cartesianProduct CartDot $
fromList [nd] : elems fs
Nothing -> return S.emptySet
dotNamedFieldOn (maybeNamedDot vbase) fields vbase v
dotNamedFieldOn :: Maybe Name -> ValueSet -> Value -> Value ->
EvaluationMonad (Maybe Value)
dotNamedFieldOn (Just n) allowedValues (VDot vs) v = do
let fieldCount = length vs -1
lastField = last vs
getField ix = splitFieldSet ix (VDot vs) allowedValues
b <- isCompleteField lastField
arity <- arityOfDataTypeClause n
if b then
if arity == fieldCount then return Nothing
else do
let newValue = VDot (vs++[v])
fieldSet = getField fieldCount
checkIsValidForField fieldSet newValue fieldCount v $
return $ Just newValue
else do
let fieldSet = getField (fieldCount-1)
vLast <- dotNamedFieldOn (maybeNamedDot lastField) fieldSet
lastField v
case vLast of
Nothing -> return Nothing
Just vLast -> do
let newValue = VDot (replaceLast vs vLast)
checkIsValidForField fieldSet newValue fieldCount vLast $
return $ Just newValue
dotNamedFieldOn Nothing _ _ _ = return Nothing
checkIsValidForField :: ValueSet -> Value -> Int ->
Value -> EvaluationMonad a -> EvaluationMonad a
checkIsValidForField allowedSet overallValue field v result = do
b <- isCompleteField v
if not b then result else do
if member v allowedSet then result
else throwError' $
dotIsNotValidMessage overallValue field v allowedSet loc
splitFieldSet :: Int -> Value -> ValueSet -> ValueSet
splitFieldSet ix v fieldSet =
case fastUnDotCartProduct fieldSet v of
Just restrictByField ->restrictByField!!(ix+1)
Nothing -> slowMatchDotPrefix (\ _ vs -> vs!!(ix+1)) fieldSet v
-- | Dots the two values together, ensuring that if either the left or
-- the right value is a dot list combines them into one dot list.
-- This function assumes that any data values are not meant to be split
-- apart.
dotAndReduce :: Value -> Value -> Value
-- We don't need to split v2 into fields because whenever we call
-- this function the second value is simply being dotted onto the right
-- and not put into a field of any sort
dotAndReduce v1 v2 = VDot (splitIntoFields v1 ++ [v2])
-- | Given a base value and the value of a field dots the field onto
-- the right of the base. Assumes that the value provided is a field.
dotFieldOn :: Value -> Value -> EvaluationMonad Value
dotFieldOn vBase vField = do
mv <- maybeDotFieldOn vBase vField
case mv of
Just v -> return v
Nothing -> return $ dotAndReduce vBase vField
-- | Split a value up into the values that could be used as fields.
splitIntoFields :: Value -> [Value]
splitIntoFields (v@(VDot (VDataType n:_))) = [v]
splitIntoFields (v@(VDot (VChannel n:_))) = [v]
splitIntoFields (VDot vs) = vs
splitIntoFields v = [v]
-- | Given a base value and a list of many fields dots the fields onto
-- the base. Assumes that the values provided are fields.
dotManyFieldsOn :: Value -> [Value] -> EvaluationMonad Value
dotManyFieldsOn v [] = return v
dotManyFieldsOn vBase (v:vs) = do
vBase' <- dotFieldOn vBase v
dotManyFieldsOn vBase' vs
in
-- Split v2 up into its composite fields and then dot them onto v1.
dotManyFieldsOn v1 (splitIntoFields v2)
-- | Returns an x such that ev.x has been extended by exactly one atomic field.
-- This could be inside a subfield or elsewhere.
oneFieldExtensions :: Value -> EvaluationMonad [Value]
oneFieldExtensions v =
let
exts :: [ValueSet] -> Value -> EvaluationMonad [Value]
exts fieldSets (VDot vs) = do
case maybeNamedDot (VDot vs) of
Nothing -> return [VDot []]
Just n -> do
let fieldCount = length vs -1
b <- isCompleteField (last vs)
if b then return $!
if length fieldSets == fieldCount then [VDot []]
else toList (fieldSets!!fieldCount)
else do
let field = fieldSets!!(fieldCount-1)
case fastUnDotCartProduct field (last vs) of
Just restrictByField ->
exts (tail restrictByField) (last vs)
Nothing -> return $! toList $ slowMatchDotPrefix
(\ i v -> v!!i) field (last vs)
exts _ _ = return [VDot []]
in do
case maybeNamedDot v of
Just n -> do
(_, _, fieldSets) <- dataTypeInfo n
exts (elems fieldSets) v
Nothing -> return [VDot []]
maybeNamedDot :: Value -> Maybe Name
maybeNamedDot (VDot (VChannel n : _)) = Just n
maybeNamedDot (VDot (VDataType n : _)) = Just n
maybeNamedDot _ = Nothing
-- | Takes a datatype or a channel value and then computes all x such that
-- ev.x is a full datatype/event. Each of the returned values is guaranteed
-- to be a VDot.
extensions :: Value -> EvaluationMonad [Value]
extensions v = extensionsSet v >>= return . toList
extensionsSet :: Value -> EvaluationMonad ValueSet
extensionsSet v = do
case maybeNamedDot v of
Nothing -> return S.emptySet
Just n -> do
b <- isCompleteField v
if b then return $! S.fromList [VDot []] else do
(_, _, fieldSets) <- dataTypeInfo n
sets <- extensionsSets (elems fieldSets) v
return $
case sets of
[s] -> s
sets -> S.cartesianProduct CartDot sets
-- | Takes a value and returns a set of fields such that ev.x is a full thing.
-- Further, the field sets are guaranteed to be representable as a full
-- carteisan product.
extensionsSets :: [ValueSet] -> Value -> EvaluationMonad [ValueSet]
extensionsSets fieldSets (VDot vs) = do
let fieldCount = length vs - 1
maybeWrap [v] = v
maybeWrap vs = VDot vs
-- Firstly, complete the last field in the current value (in case
-- it is only half formed).
exsLast <-
if fieldCount == 0 || not (isJust (maybeNamedDot (last vs))) then
return []
else do
b <- isCompleteField (last vs)
if b then return []
else do
let field = fieldSets!!(fieldCount-1)
case fastUnDotCartProduct field (last vs) of
Just restrictByField ->
extensionsSets (tail restrictByField) (last vs)
Nothing -> -- Need to do a slow scan
return $!
[slowMatchDotPrefix (\ i v -> maybeWrap (drop i v))
field (last vs)]
return $! exsLast ++ drop fieldCount fieldSets
extensionsSets _ _ = return []
-- | Given a set of dotted values, and a dotted value, scans the set of dotted
-- values and calls the specified function for each value that matches.
slowMatchDotPrefix :: (Int -> [Value] -> Value) -> ValueSet -> Value -> ValueSet
slowMatchDotPrefix f set v1 =
let
matches v2 | v2 `isProductionOf` v1 =
let VDot vs' = v2
VDot vs = v1
in [f (length vs) vs']
matches _ = []
in
fromList (concatMap matches (toList set))
-- | Given two dot lists, the second of which may be an incomplete dot-list,
-- returns True if the first is a production of the second.
isProductionOf :: Value -> Value -> Bool
isProductionOf (VDot (n1:fs1)) (VDot (n2:fs2)) =
n1 == n2 && length fs1 >= length fs2 && listIsProductionOf fs1 fs2
where
listIsProductionOf _ [] = True
listIsProductionOf [] _ = False
listIsProductionOf (f1:fs1) [f2] = f1 `isProductionOf` f2
listIsProductionOf (f1:fs1) (f2:fs2) =
f1 == f2 && listIsProductionOf fs1 fs2
isProductionOf v1 v2 = v1 == v2
-- | Takes a datatype or a channel value and computes v.x for all x that
-- complete the value.
productions :: Value -> EvaluationMonad [Value]
productions v = productionsSet v >>= return . toList
productionsSet :: Value -> EvaluationMonad ValueSet
productionsSet v = do
case maybeNamedDot v of
Nothing -> return S.emptySet
Just n -> do
b <- isCompleteField v
if b then return $! S.fromList [v] else do
(_, _, fieldSets) <- dataTypeInfo n
sets <- productionsSets (elems fieldSets) v
return $! S.cartesianProduct CartDot sets
productionsSets :: [ValueSet] -> Value -> EvaluationMonad [ValueSet]
productionsSets fieldSets (VDot vs) = do
let fieldCount = length vs - 1
psLast <-
if fieldCount == 0 then return []
else if not (isJust (maybeNamedDot (last vs))) then return []
else do
b <- isCompleteField (last vs)
if b then return []
else do
let field = fieldSets!!(fieldCount-1)
case fastUnDotCartProduct field (last vs) of
Just restrictByField -> do
sets <- productionsSets (tail restrictByField) (last vs)
return [S.cartesianProduct CartDot sets]
Nothing -> return
[slowMatchDotPrefix (\ _ -> VDot) field (last vs)]
let psSets = case psLast of
[] -> map (\v -> fromList [v]) vs
_ ->
-- We cannot express this as a simple cart product, as
-- the resulting item has dots at two levels. Thus,
-- dot together this lot and form an explicit set,
-- then we proceed as before
map (\v -> fromList [v]) (init vs) ++ psLast
return $! psSets ++ drop fieldCount fieldSets
productionsSets _ v = return []
takeFields :: Int -> [Value] -> EvaluationMonad ([Value], [Value])
takeFields 0 vs = return ([], vs)
takeFields 1 vs = do
(f, vs) <- takeFirstField False vs
return ([f], vs)
takeFields n vs = do
(f, vs') <- takeFirstField False vs
(fs, vs'') <- takeFields (n-1) vs'
return (f:fs, vs'')
takeFirstField :: Bool -> [Value] -> EvaluationMonad (Value, [Value])
takeFirstField True (VDataType n : vs) = return (VDataType n, vs)
takeFirstField True (VChannel n : vs) = return (VChannel n, vs)
takeFirstField False (VDataType n : vs) = do
(_, arity, fieldSets) <- dataTypeInfo n
(fs, vs) <- takeFields arity vs
return $ (VDot (VDataType n : fs), vs)
takeFirstField False (VChannel n : vs) = do
(_, arity, fieldSets) <- dataTypeInfo n
(fs, vs) <- takeFields arity vs
return $ (VDot (VChannel n : fs), vs)
takeFirstField forceSplit (v:vs) = return (v, vs)
-- | Takes a set of dotted values (i.e. a set of VDot _) and returns a list of
-- sets such that the cartesian product is equal to the original set.
--
-- This throws an error if the set cannot be decomposed.
splitIntoFields :: Bool -> Name -> ValueSet -> EvaluationMonad [ValueSet]
splitIntoFields forceSplit n vs = do
case unDotProduct vs of
Just ss -> return ss
Nothing -> manuallySplitValues forceSplit n vs (toList vs)
isDot :: Value -> Bool
isDot (VDot _ ) = True
isDot _ = False
manuallySplitValues :: Bool -> Name -> ValueSet -> [Value] ->
EvaluationMonad [ValueSet]
manuallySplitValues forceSplit n vs (values@(VDot fs : _)) = do
let extract (VDot vs) = vs
-- | Splits a dot list into the separate fields.
split :: [Value] -> EvaluationMonad [Value]
split [] = return []
split vs = do
(v, vs') <- takeFirstField forceSplit vs
ss <- split vs'
return $ v:ss
splitValues <- mapM (split . extract) (toList vs)
if splitValues == [] then return [] else do
let fieldCount = length (head splitValues)
combine [] = replicate fieldCount []
combine (vs:vss) = zipWith (:) vs (combine vss)
-- | The list of values such that cart producting them together should
-- yield the overall datatype.
cartProductFields :: [[Value]]
cartProductFields = combine splitValues
-- | Given a set, recursively checks that it is ok, and reconstruct the
-- set as a cart product.
recursivelySplit vs = do
if length vs > 0 && isDot (head vs)
&& length (extract (head vs)) > 1 then do
-- We've got a dotted field - check to see if this field is
-- recursively decomposable
sets <- splitIntoFields True n (fromList vs)
if length sets == 1 then return $ head sets
else return $ S.cartesianProduct S.CartDot sets
else return $! fromList vs
if or (map isMixedList cartProductFields) then
if forceSplit || length cartProductFields == 1 then return [vs]
else throwError $ setNotRectangularErrorMessage (nameDefinition n) vs
Nothing
else do
sets <- mapM recursivelySplit cartProductFields
let cartProduct =
if length sets == 1 && isDot (head (toList (head sets))) then do
-- Don't wrap with extra dots if we already have some
head sets
else S.cartesianProduct S.CartDot sets
if cartProduct /= vs then
if forceSplit then
return [vs]
else throwError $
setNotRectangularErrorMessage (nameDefinition n) vs
(Just cartProduct)
else return $ sets
manuallySplitValues _ _ vs _ = return [vs]
isMixedList :: [Value] -> Bool
isMixedList [] = False
isMixedList [x] = False
isMixedList (VInt _ : (xs@(VInt _ : _))) = isMixedList xs
isMixedList (VBool _ : (xs@(VBool _ : _))) = isMixedList xs
isMixedList (VChar _ : (xs@(VChar _ : _))) = isMixedList xs
isMixedList (VTuple t1 : (xs@(VTuple t2 : _))) =
let vs1 = elems t1
vs2 = elems t2
in length vs1 /= length vs2
|| or (zipWith (\ x y -> isMixedList [x,y]) vs1 vs2)
|| isMixedList xs
isMixedList (VDot vs1 : (xs@(VDot vs2 : _))) =
length vs1 /= length vs2
|| or (zipWith (\ x y -> isMixedList [x,y]) vs1 vs2)
|| isMixedList xs
isMixedList (VChannel _ : (xs@(VChannel _ : _))) = isMixedList xs
isMixedList (VDataType _ : (xs@(VDataType _ : _))) = isMixedList xs
isMixedList (VProc _ : (xs@(VProc _ : _))) = isMixedList xs
isMixedList (VList vs1 : (xs@(VList vs2 : _))) =
(length vs1 > 0 && length vs2 > 0 && isMixedList [head vs1, head vs2])
|| isMixedList xs
isMixedList (VSet s1 : (xs@(VSet s2 : _))) =
let vs1 = toList s1
vs2 = toList s2
in (length vs1 > 0 && length vs2 > 0 && isMixedList [head vs1, head vs2])
|| isMixedList xs
isMixedList _ = True
-- | Takes a set and returns a list of values xs such that
-- Union({productions(x) | x <- xs}) == xs. For example, if c is a channel of
-- type {0,1} then {c.0, c.1} would return [c].
--
-- This is primarily used for display purposes.
compressIntoEnumeratedSet :: ValueSet -> EvaluationMonad (Maybe [Value])
compressIntoEnumeratedSet vs =
let
haveAllOfLastField :: [[Value]] -> EvaluationMonad Bool
haveAllOfLastField ys = do
let n = case head (head ys) of
VDataType n -> n
VChannel n -> n
fieldIx = length (head ys) - 2
(_, _, fieldSets) <- dataTypeInfo n
if fromList (map last ys) == fieldSets!fieldIx then
-- All values are used
return True
else return False
splitGroup :: [[Value]] -> EvaluationMonad (Maybe [Value])
splitGroup ([_]:_) = return Nothing
splitGroup vs = do
b <- haveAllOfLastField vs
if b then
-- have everything, and inits are equal, so can compress.
-- Since the inits are equal just take the init of the first
-- item.
return $ Just $ init (head vs)
else return $ Nothing
forceRepeatablyCompress :: [[Value]] -> EvaluationMonad [Value]
forceRepeatablyCompress vs = do
mt <- repeatablyCompress vs
return $! case mt of
Just vs -> vs
Nothing -> map VDot vs
-- | Repeatably compresses the supplied values from the back, returning
-- the compressed set.
repeatablyCompress :: [[Value]] -> EvaluationMonad (Maybe [Value])
repeatablyCompress [] = return Nothing
repeatablyCompress vs = do
let initiallyEqual :: [[[Value]]]
initiallyEqual = groupBy (\ xs ys ->
head xs == head ys && init xs == init ys) $
sortBy (\ xs ys -> compare (head xs) (head ys)
`thenCmp` compare (init xs) (init ys)) vs
-- head is required above (consider [x]).
processNothing Nothing vss = map VDot vss
processNothing (Just _) vss = []
gs <- mapM splitGroup initiallyEqual
let vsDone = zipWith processNothing gs initiallyEqual
-- Now, repeatably compress the prefixes that were equal.
case catMaybes gs of
[] -> return Nothing
xs -> do
vsRecursive <- forceRepeatablyCompress xs
return $! Just (vsRecursive ++ concat vsDone)
in case toList vs of
[] -> return Nothing
(vs @ (VDot ((VChannel _) :_) : _)) ->
repeatablyCompress (map (\ (VDot xs) -> xs) vs)
_ -> return Nothing -- must be a set that we cannot handle
| sashabu/libcspm | src/CSPM/Evaluator/Dot.hs | bsd-3-clause | 20,468 | 2 | 26 | 6,812 | 5,812 | 2,913 | 2,899 | -1 | -1 |
-- | Parsing argument-like things.
module Data.Attoparsec.Args (EscapingMode(..), argsParser) where
import Control.Applicative
import Data.Attoparsec.Text ((<?>))
import qualified Data.Attoparsec.Text as P
import Data.Attoparsec.Types (Parser)
import Data.Text (Text)
-- | Mode for parsing escape characters.
data EscapingMode
= Escaping
| NoEscaping
deriving (Show,Eq,Enum)
-- | A basic argument parser. It supports space-separated text, and
-- string quotation with identity escaping: \x -> x.
argsParser :: EscapingMode -> Parser Text [String]
argsParser mode = many (P.skipSpace *> (quoted <|> unquoted)) <*
P.skipSpace <* (P.endOfInput <?> "unterminated string")
where
unquoted = P.many1 naked
quoted = P.char '"' *> string <* P.char '"'
string = many (case mode of
Escaping -> escaped <|> nonquote
NoEscaping -> nonquote)
escaped = P.char '\\' *> P.anyChar
nonquote = P.satisfy (not . (=='"'))
naked = P.satisfy (not . flip elem ("\" " :: String))
| hesselink/stack | src/Data/Attoparsec/Args.hs | bsd-3-clause | 1,096 | 0 | 12 | 277 | 290 | 164 | 126 | 21 | 2 |
{-# LANGUAGE ParallelListComp, OverloadedStrings #-}
module Main where
import Web.Scotty
import Graphics.Blank
import Control.Concurrent
import Control.Monad
import Data.Array
import Data.Maybe
-- import Data.Binary (decodeFile)
import System.Environment
import System.FilePath
import BreakthroughGame
import GenericGame
import ThreadLocal
import AgentGeneric
import MinimalNN
import qualified Data.HashMap as HashMap
import Text.Printf
data Fill = FillEmpty | FillP1 | FillP2
data BG = BGLight | BGDark | BGSelected | BGPossible deriving (Eq,Read,Show,Ord)
data DrawingBoard = DrawingBoard { getArrDB :: (Array (Int,Int) Field) }
data Field = Field { fFill :: Fill
, fBG :: BG
, fSuperBG :: Maybe BG
}
-- | enable feedback on moving mouse. works poorly with big latency links.
enableMouseMoveFeedback :: Bool
enableMouseMoveFeedback = False
-- | path where static directory resides
staticDataPath :: FilePath
staticDataPath = "."
-- game params
maxTiles, offset, side :: (Num a) => a
side = 50
maxTiles = 8
offset = 50
drawPointEv :: Event -> Canvas ()
drawPointEv e = do
case e of
Event _ (Just (x,y)) -> drawPoint x y
_ -> return ()
drawPoint :: Int -> Int -> Canvas ()
drawPoint x y = do
font "bold 20pt Mono"
textBaseline "middle"
textAlign "center"
strokeStyle "rgb(240, 124, 50)"
strokeText ("+",fromIntegral x, fromIntegral y)
return ()
bgToStyle BGLight = "rgb(218, 208, 199)"
bgToStyle BGDark = "rgb(134, 113, 92)"
bgToStyle BGSelected = "rgb(102,153,0)"
bgToStyle BGPossible = "rgb(153,204,0)"
drawField :: (Float,Float) -> Bool -> Field -> Canvas ()
drawField baseXY@(x,y) highlight field = do
let s2 = side/2
s4 = side/4
-- background
let actualBG = fromMaybe (fBG field) (fSuperBG field)
fillStyle (bgToStyle actualBG)
fillRect (x,y,side,side)
-- border
strokeStyle "rgb(10,10,10)"
strokeRect (x,y,side,side)
-- fill
let drawFill style1 style2 = do
save
beginPath
-- lineWidth 4
let px = x+s2
py = y+s2
arc (px,py, s4, 0, (2*pi), False)
custom $ unlines $ [
printf "var grd=c.createRadialGradient(%f,%f,3,%f,%f,10); " px py px py
,printf "grd.addColorStop(0,%s); " (show style1)
,printf "grd.addColorStop(1,%s); " (show style2)
,"c.fillStyle=grd; "
]
fill
restore
case (fFill field) of
FillEmpty -> return ()
FillP1 -> drawFill "rgb(250,250,250)" "rgb(240,240,240)"
FillP2 -> drawFill "rgb(50,50,50)" "rgb(40,40,40)"
-- highlight
when highlight $ do
strokeStyle "rgb(120, 210, 30)"
strokeRect (x+side*0.1,y+side*0.1,side*0.8,side*0.8)
return ()
positionToIndex :: (Int,Int) -> Maybe (Int,Int)
positionToIndex (px,py) = do
cx <- toCoord px
cy <- toCoord py
return (cx, cy)
where
toCoord val = case (val-offset) `div` side of
x | x < 0 -> Nothing
| x >= maxTiles -> Nothing
| otherwise -> Just x
drawFills :: [Fill] -> Canvas ()
drawFills boardFills = do
let pos = zip (zip boardFills (cycle [True,False,False]))
[ (((offset + x*side),(offset + y*side)),bg)
| y <- [0..maxTiles-1], x <- [0..maxTiles-1] | bg <- boardBackgrounds ]
mapM_ (\ ((f,hl),((x,y),bg)) -> drawField (fromIntegral x, fromIntegral y) hl (Field f bg Nothing)) pos
boardBackgrounds = let xs = (take maxTiles $ cycle [BGLight, BGDark]) in cycle (xs ++ reverse xs)
ixToBackground (x,y) = if ((x-y) `mod` 2) == 0 then BGLight else BGDark
newDemoBoard = DrawingBoard $ array ((0,0), ((maxTiles-1),(maxTiles-1)))
[ ((x,y),(Field f bg Nothing)) | y <- [0..maxTiles-1], x <- [0..maxTiles-1]
| bg <- boardBackgrounds
| f <- cycle [FillEmpty, FillP1, FillP2, FillP2, FillP1]
]
drawBoard maybeHighlightPos (DrawingBoard arr) = mapM_ (\ (pos,field) -> drawField (fi pos) (hl pos) field) (assocs arr)
where
hl p = Just p == maybeHighlightPos
fi (x,y) = (offset+side*(fromIntegral x), offset+side*(fromIntegral y))
drawBreakthroughGame :: Breakthrough -> DrawingBoard
drawBreakthroughGame br = let (w,h) = boardSize br
toFill Nothing = FillEmpty
toFill (Just P1) = FillP1
toFill (Just P2) = FillP2
getFill pos = toFill $ HashMap.lookup pos (board br)
arr = array ((0,0), (w-1,h-1))
[ ((x,y),(Field (getFill (x,y)) (ixToBackground (x,y)) Nothing)) | y <- [0..w-1], x <- [0..h-1]]
result = DrawingBoard arr
in result
data CanvasGameState = CanvasGameState { boardDrawn :: DrawingBoard
, lastHighlight :: (Maybe Position)
, boardState :: Breakthrough
, playerNow :: Player2
, allFinished :: Bool
}
makeCGS b p = CanvasGameState (drawBreakthroughGame b) Nothing b p False
drawUpdateCGS ctx cgs getPName = send ctx $ do
drawBoard (lastHighlight cgs) (boardDrawn cgs)
let p = playerNow cgs
drawCurrentPlayer p (getPName p)
let win = winner (boardState cgs)
case win of
Nothing -> return ()
Just w -> drawWinner w
return (cgs { allFinished = (win /= Nothing) })
p2Txt :: Player2 -> String
p2Txt P1 = "Player 1"
p2Txt P2 = "Player 2"
drawWinner w = do
let txt = p2Txt w ++ " wins the game!"
tpx = offset + (maxTiles * side / 2)
tpy = offset + (maxTiles * side / 2)
rpx = offset
rpy = offset
rdimx = maxTiles * side
rdimy = maxTiles * side
globalAlpha 0.75
fillStyle "gray"
fillRect (rpx,rpy,rdimx,rdimy)
globalAlpha 1
textBaseline "middle"
textAlign "center"
font "bold 20pt Sans"
strokeStyle (if w == P1 then "darkred" else "darkgreen")
strokeText (txt, tpx, tpy)
drawCurrentPlayer pl plName = do
-- put text
let txt = printf ("Current move: %s (%s)"::String) (p2Txt pl) plName
font "15pt Sans"
clearRect (0,0,1500,offset*0.9) -- fix for damaging board border
fillStyle "black"
fillText (txt, offset, offset/2)
-- put symbol on the left side
clearRect (0,0,offset*0.9,1500)
let px = offset/2
py = offset + (side * pside)
pside = 0.5 + if pl == P1 then 0 else (maxTiles-1)
pcol = if pl == P1 then "red" else "blue"
save
font "bold 20pt Mono"
textBaseline "middle"
textAlign "center"
strokeStyle "rgb(240, 124, 50)"
custom $ unlines $ [
printf "var grd=c.createRadialGradient(%f,%f,3,%f,%f,10); " px py px py
,printf "grd.addColorStop(0,%s); " (show pcol)
,"grd.addColorStop(1,\"white\"); "
,"c.strokeStyle=grd; "
]
strokeText ("+",px,py)
restore
main :: IO ()
main = do
-- crude command line parsing
args <- getArgs
let port = case args of
(x:_) -> read x
_ -> 3000
print ("port",port)
let dbn = case args of
(_:fn:_) -> fn
_ -> "assets/dbn.bin"
print ("dbn",dbn)
network <- MinimalNN.decodeFile dbn
-- apps
let app1 = ("/pvc", (pvc network))
app2 = ("/pvp", pvp)
apps <- mapM (\ (path, app) -> blankCanvasParamsScotty app staticDataPath False path) [app1, app2]
-- main page
let index = get "/" (file (staticDataPath </> "static" </> "global-index.html"))
-- launch server
scotty port (sequence_ (apps ++ [index]))
pvc :: TNetwork -> Context -> IO ()
pvc network context = do
let agent'0 = runThrLocMainIO (mkTimed "wtf3" network) :: IO (AgentTrace (AgentSimple TNetwork))
agent'1 = runThrLocMainIO (mkTimed "rnd" ()) :: IO (AgentTrace AgentRandom)
agent'2 = runThrLocMainIO (mkTimed "wtf2" 1000) :: IO (AgentTrace AgentMCTS)
-- agent'3 = runThrLocMainIO (mkTimed "tree" (network, 4)) :: IO (AgentTrace AgentGameTree)
agent'4 = runThrLocMainIO (mkTimed "wtf" (2, 50, network)) :: IO (AgentTrace (AgentParMCTS (AgentSimple TNetwork)))
-- let agent'0 = runThrLocMainIO (mkAgent network) :: IO ((AgentSimple TNetwork))
-- agent'1 = runThrLocMainIO (mkAgent ()) :: IO (AgentRandom)
-- agent'2 = runThrLocMainIO (mkAgent 1000) :: IO (AgentMCTS)
-- agent'3 = runThrLocMainIO (mkAgent (network, 4)) :: IO (AgentGameTree)
-- agent'4 = runThrLocMainIO (mkAgent (2, 50, network)) :: IO ((AgentParMCTS (AgentSimple TNetwork)))
agent <- agent'0
let initial = makeCGS br P1
br = freshGame (maxTiles,maxTiles) :: Breakthrough
getPlayerName P1 = "human"
getPlayerName P2 = agentName agent
drawCGS' cgs = drawUpdateCGS context cgs getPlayerName
var <- newMVar =<< drawCGS' initial
let drawMove mPos = modifyMVar_ var $ \ cgs -> if allFinished cgs then return cgs else do
let prevPos = lastHighlight cgs
when (mPos /= prevPos) (send context (drawBoard mPos (boardDrawn cgs)))
return (cgs { lastHighlight = mPos })
autoPlay cgs | allFinished cgs = return cgs
| otherwise = do
let board = boardState cgs
player = playerNow cgs
newBoard <- runThrLocMainIO (applyAgent agent board player)
drawCGS' (makeCGS newBoard (nextPlayer player))
clearSuperBG (Field f bg _) = (Field f bg Nothing)
lastSelect cgs = case filter (\ (pos,(Field _ _ sup)) -> sup == Just BGSelected) (assocs (getArrDB $ boardDrawn cgs)) of
[(pos,_)] -> Just pos
_ -> Nothing -- no matches or more than one match
clickSelect ix cgs = do
let DrawingBoard brd = boardDrawn cgs
brdClean = fmap clearSuperBG brd
brd' = accum (\ (Field f bg _) sup -> (Field f bg sup)) brdClean [(ix,(Just BGSelected))]
send context (drawBoard (Just ix) (DrawingBoard brd'))
return (cgs { boardDrawn = DrawingBoard brd' })
clickClear cgs = do
let DrawingBoard brd = boardDrawn cgs
brd' = fmap clearSuperBG brd
send context (drawBoard (lastHighlight cgs) (DrawingBoard brd'))
return (cgs { boardDrawn = DrawingBoard brd' })
drawClick Nothing = return ()
drawClick mPos@(Just sndPos@(x,y)) = modifyMVar_ var $ \ cgs -> if allFinished cgs then return cgs else do
let valid state = state `elem` moves (boardState cgs) (playerNow cgs)
case lastSelect cgs of
Nothing -> clickSelect sndPos cgs
Just fstPos | fstPos == sndPos -> clickClear cgs
| otherwise -> case applyMove (boardState cgs) (fstPos,sndPos) of
Nothing -> clickSelect sndPos cgs
Just newState | valid newState -> do
newCGS <- drawCGS' (makeCGS newState (nextPlayer (playerNow cgs)))
autoPlay newCGS
| otherwise -> clickSelect sndPos cgs
when enableMouseMoveFeedback $ do
moveQ <- events context MouseMove
void $ forkIO $ forever $ do
evnt <- readEventQueue moveQ
case jsMouse evnt of
Nothing -> return ()
Just xy -> do
drawMove (positionToIndex xy)
downQ <- events context MouseDown
forkIO $ forever $ do
evnt <- readEventQueue downQ
case jsMouse evnt of
Nothing -> return ()
Just xy -> do
drawClick (positionToIndex xy)
return ()
pvp :: Context -> IO ()
pvp context = do
let initial = makeCGS br P1
br = freshGame (maxTiles,maxTiles) :: Breakthrough
drawCGS' cgs = drawUpdateCGS context cgs (const ("human" :: String))
var <- newMVar =<< drawCGS' initial
let drawMove mPos = modifyMVar_ var $ \ cgs -> if allFinished cgs then return cgs else do
let prevPos = lastHighlight cgs
when (mPos /= prevPos) (send context (drawBoard mPos (boardDrawn cgs)))
return (cgs { lastHighlight = mPos })
clearSuperBG (Field f bg _) = (Field f bg Nothing)
lastSelect cgs = case filter (\ (pos,(Field _ _ sup)) -> sup == Just BGSelected) (assocs (getArrDB $ boardDrawn cgs)) of
[(pos,_)] -> Just pos
_ -> Nothing -- no matches or more than one match
clickSelect ix cgs = do
let DrawingBoard brd = boardDrawn cgs
brdClean = fmap clearSuperBG brd
brd' = accum (\ (Field f bg _) sup -> (Field f bg sup)) brdClean [(ix,(Just BGSelected))]
send context (drawBoard (Just ix) (DrawingBoard brd'))
return (cgs { boardDrawn = DrawingBoard brd' })
clickClear cgs = do
let DrawingBoard brd = boardDrawn cgs
brd' = fmap clearSuperBG brd
send context (drawBoard (lastHighlight cgs) (DrawingBoard brd'))
return (cgs { boardDrawn = DrawingBoard brd' })
drawClick Nothing = return ()
drawClick mPos@(Just sndPos@(x,y)) = modifyMVar_ var $ \ cgs -> if allFinished cgs then return cgs else do
let valid state = state `elem` moves (boardState cgs) (playerNow cgs)
case lastSelect cgs of
Nothing -> clickSelect sndPos cgs
Just fstPos | fstPos == sndPos -> clickClear cgs
| otherwise -> case applyMove (boardState cgs) (fstPos,sndPos) of
Nothing -> clickSelect sndPos cgs
Just newState | valid newState -> drawCGS' (makeCGS newState (nextPlayer (playerNow cgs)))
| otherwise -> clickSelect sndPos cgs
-- -- disabled: requires unreleased null-canvas-0.2.8
-- when True $ do
-- clickQ <- events context Click
-- void $ forkIO $ forever $ do
-- evnt <- readEventQueue clickQ
-- print ("click",evnt)
when enableMouseMoveFeedback $ do
moveQ <- events context MouseMove
void $ forkIO $ forever $ do
evnt <- readEventQueue moveQ
case jsMouse evnt of
Nothing -> return ()
Just xy -> drawMove (positionToIndex xy)
downQ <- events context MouseDown
forkIO $ forever $ do
evnt <- readEventQueue downQ
case jsMouse evnt of
Nothing -> return ()
Just xy -> drawClick (positionToIndex xy)
return ()
| Tener/deeplearning-thesis | gui/breakthrough-gui.hs | bsd-3-clause | 14,979 | 3 | 32 | 4,787 | 4,879 | 2,451 | 2,428 | 302 | 10 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
module Serv.Api.Types
( EntityId(..)
, failure
, failureNoBody
, failureReqBody
, ResponseStatus(..)
, Response(..)
, success
, successNoBody
) where
import Control.Lens (mapped, (&), (?~))
-- import Data.Aeson
import Data.Aeson (FromJSON (..), ToJSON (..), Value (..),
object, withObject, (.!=), (.:), (.:?), (.=))
import qualified Data.Swagger as Swagger
import Data.Text (Text)
import qualified Data.Text.Lazy as LT
import GHC.Generics
import Servant.API
import Servant.Swagger
-- | Entity Identifier
newtype EntityId = EntityId Int
deriving (Show, Eq, Ord, ToHttpApiData, FromHttpApiData, Generic)
instance FromJSON EntityId
instance ToJSON EntityId
instance Swagger.ToParamSchema EntityId
instance Swagger.ToSchema EntityId
type StatusCode = Int
type StatusDetails = Maybe LT.Text
-- | Response status
data ResponseStatus = ResponseStatus
{ code :: StatusCode
, details :: StatusDetails
} deriving (Show, Generic)
instance FromJSON ResponseStatus
instance ToJSON ResponseStatus where
toJSON (ResponseStatus code Nothing) = object [ "code" .= code
]
toJSON (ResponseStatus code details) = object [ "code" .= code
, "details" .= details
]
instance Swagger.ToSchema ResponseStatus where
declareNamedSchema proxy = Swagger.genericDeclareNamedSchema Swagger.defaultSchemaOptions proxy
& mapped.Swagger.schema.Swagger.description ?~ "Response status example"
& mapped.Swagger.schema.Swagger.example ?~ toJSON (ResponseStatus 0 (Just "Success"))
-- | Response body
data Response a = Response
{ body :: Maybe a
, status :: ResponseStatus
} deriving (Show, Generic)
instance (FromJSON a) => FromJSON (Response a) where
parseJSON =
withObject "Response" (\o ->
Response <$> ( o .:? "data")
<*> (o .: "status")
)
instance (ToJSON a) => ToJSON (Response a) where
toJSON (Response Nothing status) = object [ "status" .= status
]
toJSON (Response body status) = object [ "data" .= body
, "status" .= status
]
instance (Swagger.ToSchema a) => Swagger.ToSchema (Response a) where
declareNamedSchema proxy = Swagger.genericDeclareNamedSchema Swagger.defaultSchemaOptions proxy
& mapped.Swagger.schema.Swagger.description ?~ "Response example"
& mapped.Swagger.schema.Swagger.example ?~ toJSON successNoBody
mkResp :: (ToJSON a) => StatusCode -> StatusDetails -> Maybe a -> Response a
mkResp c d b = Response { body = b, status = ResponseStatus c d}
success b = mkResp 0 Nothing (Just b)
successNoBody :: Response LT.Text
successNoBody = mkResp 0 Nothing Nothing
failure b = mkResp 700 Nothing (Just b)
failureNoBody :: Response LT.Text
failureNoBody = mkResp 700 Nothing Nothing
--failureReqBody :: Response LT.Text
failureReqBody d = mkResp 701 (Just d) (Nothing :: Maybe LT.Text)
| orangefiredragon/bear | src/Serv/Api/Types.hs | bsd-3-clause | 3,478 | 8 | 12 | 1,016 | 890 | 493 | 397 | -1 | -1 |
{-# LANGUAGE Unsafe, CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Metrology.Unsafe
-- Copyright : (C) 2013 Richard Eisenberg
-- License : BSD-style (see LICENSE)
-- Maintainer : Richard Eisenberg ([email protected])
-- Stability : experimental
-- Portability : non-portable
--
-- This module exports the constructor of the 'Qu' type. This allows you
-- to write code that takes creates and reads quantities at will,
-- which may lead to dimension unsafety. Use at your peril.
--
-- This module also exports 'UnsafeQu', which is a simple wrapper around
-- 'Qu' that has 'Functor', etc., instances. The reason 'Qu' itself doesn't
-- have a 'Functor' instance is that it would be unit-unsafe, allowing you,
-- say, to add 1 to a quantity.... but 1 what? That's the problem. However,
-- a 'Functor' instance is likely useful, hence 'UnsafeQu'.
-----------------------------------------------------------------------------
module Data.Metrology.Unsafe (
-- * The 'Qu' type
Qu(..),
-- * 'UnsafeQu'
UnsafeQu(..)
) where
import Data.Metrology.Qu
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative
import Data.Foldable
import Data.Traversable
#endif
-- | A basic wrapper around 'Qu' that has more instances.
newtype UnsafeQu d l n = UnsafeQu { qu :: Qu d l n }
instance Functor (UnsafeQu d l) where
fmap f (UnsafeQu (Qu x)) = UnsafeQu (Qu (f x))
instance Applicative (UnsafeQu d l) where
pure x = UnsafeQu (Qu x)
UnsafeQu (Qu f) <*> UnsafeQu (Qu x) = UnsafeQu (Qu (f x))
instance Foldable (UnsafeQu d l) where
foldMap f (UnsafeQu (Qu x)) = f x
instance Traversable (UnsafeQu d l) where
traverse f (UnsafeQu (Qu x)) = UnsafeQu . Qu <$> f x
| goldfirere/units | units/Data/Metrology/Unsafe.hs | bsd-3-clause | 1,751 | 0 | 10 | 315 | 312 | 175 | 137 | 18 | 0 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE DataKinds #-}
module Lucid.Ink.Internal where
import GHC.Exts (IsString(..))
import Data.Text (Text)
import qualified Data.Text as T
import Data.Monoid ((<>))
data Orientation
= Vertical
| Horizontal
deriving (Eq,Ord,Show)
newtype Direction (o :: Orientation) = Direction
{ getDirection :: Text
} deriving (Eq,Ord,Show,IsString,Monoid)
newtype Size = Size
{ getSize :: Text
} deriving (Eq,Ord,Show,IsString,Monoid)
onAll_, onTiny_, onSmall_, onMedium_, onLarge_, onXLarge_ :: Size -> Text
onAll_ = sp . prefix "all" . getSize
onTiny_ = sp . prefix "tiny" . getSize
onSmall_ = sp . prefix "small" . getSize
onMedium_ = sp . prefix "medium" . getSize
onLarge_ = sp . prefix "large" . getSize
onXLarge_ = sp . prefix "xlarge" . getSize
auto_ :: Size
auto_ = "auto"
perc_ :: Int -> Size
perc_ = (<> "%") . gShow
getOrientation :: Orientation -> Text
getOrientation = \case
Vertical -> "vertical"
Horizontal -> "horizontal"
push_ :: Direction o -> Text
push_ = sp . prefix "push" . getDirection
align_ :: Direction Horizontal -> Text
align_ = sp . prefix "align" . getDirection
top_, middle_, bottom_ :: Direction Vertical
left_, center_, right_ :: Direction Horizontal
top_ = "top"
middle_ = "middle"
bottom_ = "bottom"
left_ = "left"
center_ = "center"
right_ = "right"
padding_ :: Text -> Text
padding_ = sp . postfix "padding"
space_ :: Text -> Text
space_ = sp . postfix "space"
ink_ :: Text -> Text
ink_ = sp . prefix "ink"
over_ :: Text -> Text
over_ = sp . prefix "over"
fw_ :: Int -> Text
fw_ = sp . prefix "fw" . tShow
tShow :: Show a => a -> Text
tShow = T.pack . show
gShow :: (Show a, IsString b) => a -> b
gShow = fromString . show
sp :: Text -> Text
sp t = " " <> t <> " "
prefix :: Text -> Text -> Text
prefix p s = p <> "-" <> s
postfix :: Text -> Text -> Text
postfix = flip prefix
| kylcarte/ink-ui | src/Lucid/Ink/Internal.hs | bsd-3-clause | 2,010 | 0 | 7 | 407 | 691 | 388 | 303 | 67 | 2 |
{-# LANGUAGE CPP #-}
------------------------------------------------------------------------------
-- |
-- Module: Database.PostgreSQL.Simple.Internal.PQResultUtils
-- Copyright: (c) 2011 MailRank, Inc.
-- (c) 2011-2012 Leon P Smith
-- License: BSD3
-- Maintainer: Leon P Smith <[email protected]>
-- Stability: experimental
--
------------------------------------------------------------------------------
module Database.PostgreSQL.Simple.Internal.PQResultUtils
( finishQueryWith
, finishQueryWithV
, finishQueryWithVU
, getRowWith
) where
import Control.Exception as E
import Data.ByteString (ByteString)
import Data.Foldable (for_)
import Database.PostgreSQL.Simple.FromField (ResultError(..))
import Database.PostgreSQL.Simple.Ok
import Database.PostgreSQL.Simple.Types (Query(..))
import Database.PostgreSQL.Simple.Internal as Base hiding (result, row)
import Database.PostgreSQL.Simple.TypeInfo
import qualified Database.PostgreSQL.LibPQ as PQ
import qualified Data.ByteString.Char8 as B
import qualified Data.Vector as V
import qualified Data.Vector.Mutable as MV
import qualified Data.Vector.Unboxed as VU
import qualified Data.Vector.Unboxed.Mutable as MVU
import Control.Monad.Trans.Reader
import Control.Monad.Trans.State.Strict
finishQueryWith :: RowParser r -> Connection -> Query -> PQ.Result -> IO [r]
finishQueryWith parser conn q result = finishQueryWith' q result $ do
nrows <- PQ.ntuples result
ncols <- PQ.nfields result
forM' 0 (nrows-1) $ \row ->
getRowWith parser row ncols conn result
finishQueryWithV :: RowParser r -> Connection -> Query -> PQ.Result -> IO (V.Vector r)
finishQueryWithV parser conn q result = finishQueryWith' q result $ do
nrows <- PQ.ntuples result
let PQ.Row nrows' = nrows
ncols <- PQ.nfields result
mv <- MV.unsafeNew (fromIntegral nrows')
for_ [ 0 .. nrows-1 ] $ \row -> do
let PQ.Row row' = row
value <- getRowWith parser row ncols conn result
MV.unsafeWrite mv (fromIntegral row') value
V.unsafeFreeze mv
finishQueryWithVU :: VU.Unbox r => RowParser r -> Connection -> Query -> PQ.Result -> IO (VU.Vector r)
finishQueryWithVU parser conn q result = finishQueryWith' q result $ do
nrows <- PQ.ntuples result
let PQ.Row nrows' = nrows
ncols <- PQ.nfields result
mv <- MVU.unsafeNew (fromIntegral nrows')
for_ [ 0 .. nrows-1 ] $ \row -> do
let PQ.Row row' = row
value <- getRowWith parser row ncols conn result
MVU.unsafeWrite mv (fromIntegral row') value
VU.unsafeFreeze mv
finishQueryWith' :: Query -> PQ.Result -> IO a -> IO a
finishQueryWith' q result k = do
status <- PQ.resultStatus result
case status of
PQ.TuplesOk -> k
PQ.EmptyQuery -> queryErr "query: Empty query"
PQ.CommandOk -> queryErr "query resulted in a command response (did you mean to use `execute` or forget a RETURNING?)"
PQ.CopyOut -> queryErr "query: COPY TO is not supported"
PQ.CopyIn -> queryErr "query: COPY FROM is not supported"
#if MIN_VERSION_postgresql_libpq(0,9,3)
PQ.CopyBoth -> queryErr "query: COPY BOTH is not supported"
#endif
#if MIN_VERSION_postgresql_libpq(0,9,2)
PQ.SingleTuple -> queryErr "query: single-row mode is not supported"
#endif
PQ.BadResponse -> throwResultError "query" result status
PQ.NonfatalError -> throwResultError "query" result status
PQ.FatalError -> throwResultError "query" result status
where
queryErr msg = throwIO $ QueryError msg q
getRowWith :: RowParser r -> PQ.Row -> PQ.Column -> Connection -> PQ.Result -> IO r
getRowWith parser row ncols conn result = do
let rw = Row row result
let unCol (PQ.Col x) = fromIntegral x :: Int
okvc <- runConversion (runStateT (runReaderT (unRP parser) rw) 0) conn
case okvc of
Ok (val,col) | col == ncols -> return val
| otherwise -> do
vals <- forM' 0 (ncols-1) $ \c -> do
tinfo <- getTypeInfo conn =<< PQ.ftype result c
v <- PQ.getvalue result row c
return ( tinfo
, fmap ellipsis v )
throw (ConversionFailed
(show (unCol ncols) ++ " values: " ++ show vals)
Nothing
""
(show (unCol col) ++ " slots in target type")
"mismatch between number of columns to convert and number in target type")
Errors [] -> throwIO $ ConversionFailed "" Nothing "" "" "unknown error"
Errors [x] -> throwIO x
Errors xs -> throwIO $ ManyErrors xs
ellipsis :: ByteString -> ByteString
ellipsis bs
| B.length bs > 15 = B.take 10 bs `B.append` "[...]"
| otherwise = bs
forM' :: (Ord n, Num n) => n -> n -> (n -> IO a) -> IO [a]
forM' lo hi m = loop hi []
where
loop !n !as
| n < lo = return as
| otherwise = do
a <- m n
loop (n-1) (a:as)
{-# INLINE forM' #-}
| tomjaguarpaw/postgresql-simple | src/Database/PostgreSQL/Simple/Internal/PQResultUtils.hs | bsd-3-clause | 5,167 | 0 | 21 | 1,370 | 1,478 | 740 | 738 | -1 | -1 |
{-# LANGUAGE LambdaCase, TupleSections, RecordWildCards #-}
module Transformations.Optimising.SparseCaseOptimisation where
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Functor.Foldable as Foldable
import Control.Monad.Trans.Except
import Grin.Grin
import Grin.Pretty
import Grin.TypeEnv
import Transformations.Util
sparseCaseOptimisation :: TypeEnv -> Exp -> Either String Exp
sparseCaseOptimisation TypeEnv{..} = runExcept . anaM builder where
builder :: Exp -> Except String (ExpF Exp)
builder = \case
-- TODO: reduce noise and redundancy
ECase scrut@(Var name) alts -> do
scrutType <- lookupExcept (notInTyEnv scrut) name _variable
let alts' = filterAlts scrutType alts
pure $ ECaseF scrut alts'
ECase scrut@(ConstTagNode tag _) alts -> do
let scrutType = T_NodeSet $ Map.singleton tag mempty
alts' = filterAlts scrutType alts
pure $ ECaseF scrut alts'
ECase scrut@(Lit l) alts -> do
let scrutType = typeOfLit l
alts' = filterAlts scrutType alts
pure $ ECaseF scrut alts'
ECase scrut@(Undefined ty) alts -> do
let alts' = filterAlts ty alts
pure $ ECaseF scrut alts'
ECase scrut _ -> throwE $ unsuppScrut scrut
exp -> pure . project $ exp
notInTyEnv v = "SCO: Variable " ++ show (PP v) ++ " not found in type env"
unsuppScrut scrut = "SCO: Unsupported case scrutinee: " ++ show (PP scrut)
filterAlts :: Type -> [Exp] -> [Exp]
filterAlts scrutTy alts =
[ alt
| alt@(Alt cpat body) <- alts
, possible scrutTy allPatTags cpat
] where allPatTags = Set.fromList [tag | Alt (NodePat tag _) _ <- alts]
possible :: Type -> Set Tag -> CPat -> Bool
possible (T_NodeSet nodeSet) allPatTags cpat = case cpat of
NodePat tag _args -> Map.member tag nodeSet
-- HINT: the default case is redundant if normal cases fully cover the domain
DefaultPat -> not $ null (Set.difference (Map.keysSet nodeSet) allPatTags)
_ -> False
possible ty@T_SimpleType{} _ cpat = case cpat of
LitPat lit -> ty == typeOfLit lit
DefaultPat -> True -- HINT: the value domain is unknown, it is not possible to prove if it overlaps or it is fully covered
_ -> False
possible ty _ _ = ty /= dead_t -- bypass everything else
| andorp/grin | grin/src/Transformations/Optimising/SparseCaseOptimisation.hs | bsd-3-clause | 2,324 | 0 | 17 | 530 | 711 | 359 | 352 | 50 | 12 |
execSymZCFA :: Call -> StateSpace Sym_Delta ZCFA_AAM
execSymZCFA = exec Sym_Delta ZCFA_AAM
| davdar/quals | writeup-old/sections/03AAMByExample/05Recovering0CFA/02Exec.hs | bsd-3-clause | 91 | 0 | 7 | 12 | 30 | 13 | 17 | 2 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Tests.Simple
-- Copyright : (c)2011, Texas Instruments France
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- QuickCheck tests
--
-----------------------------------------------------------------------------
module Tests.Simple(
-- * Validation tests
runSimpleTests
) where
import Test.QuickCheck.Monadic
import Benchmark
import Test.QuickCheck
import TestTools
import Kernels
import Data.Bits(shiftL)
import Data.List(findIndex)
import Data.Int
{-
Properties to test
-}
-- | Test addition of a constant to a list of one float
floatTest :: Options -> Float -> Property
floatTest opts l = monadicIO $ do
run $ gpuRoundingMode opts
let a = l -- 4.384424 --a = -1.565178
c = 10.0
r <- run $ onBoardOnlyData opts (simpleNDRange (size1D 1)) (simpleAdd (CLFloatArray [a]) c 1 (clFloatArrayOO 1))
let result = head . fromFloatResult $ (head r)
assert $ result ~= (a + c)
-- | Test addition of a constant to a list of float
add4Test :: Options -> [Float4] -> Property
add4Test opts l = (not . null) l ==> monadicIO $ do
run $ gpuRoundingMode opts
let nb = length l
d = (10.0,5.0,1.0,8.0)
r <- run $ onBoardOnlyData opts (simpleNDRange (size1D nb)) (simpleAdd4 (CLFloat4Array l) d nb (clFloat4ArrayOO nb))
assert $ fromFloat4Result (head r) ~= map (vecAdd d) l
where
vecAdd (x0,y0,z0,t0) (x1,y1,z1,t1) = (x0+x1,y0+y1,z0+z1,t0+t1)
-- | Test addition of a constant to a list of float
addTest :: Options -> [Float] -> Property
addTest opts l = (not . null) l ==> monadicIO $ do
run $ gpuRoundingMode opts
let nb = length l
r <- run $ onBoardOnlyData opts (simpleNDRange (size1D nb)) (simpleAdd (CLFloatArray l) 10.0 nb (clFloatArrayOO nb))
assert $ fromFloatResult (head r) ~= map (+ 10.0) l
-- | Test addition of a constant to a list of float
moveIntTest :: Options -> Property
moveIntTest opts =
forAll (choose (1,60)) $ \num -> do
let randomInt32 = (choose (-200,200) :: Gen Int) >>= return . fromIntegral
forAll (vectorOf (num*4) (randomInt32)) $ \l -> monadicIO $ do
let nb = num*4
r <- run $ onBoardOnlyData opts (simpleNDRange (size1D num)) (simpleIntMove (CLIntArray l) nb (clIntArrayOO nb))
assert $ fromIntResult (head r) == l
-- | Test copy of a vector
moveTest :: Options -> [Float] -> Property
moveTest opts l = (not . null) l ==> monadicIO $ do
let nb = length l
r <- run $ onBoardOnlyData opts (simpleNDRange (size1D nb)) (simpleMove (CLFloatArray l) nb (clFloatArrayOO nb))
assert $ fromFloatResult (head r) ~= l
complexMoveTest :: Options -> Property
complexMoveTest opts = do
forAll (choose (0,5)) $ \num -> do
let copiesInKernel = 1 `shiftL` num
i = 320 `div` copiesInKernel
forAll (vectorOf (copiesInKernel*i) (choose (-200.0,200.0))) $ \l -> do
let a = CLFloatArray l
monadicIO $ do
r <- run $ onBoardOnlyData opts (simpleNDRange (size1D i)) (complexMove a num (clFloatArrayOO (copiesInKernel*i)))
assert $ fromFloatResult (head r) ~= l
-- | Test atensor product of two vectors
tensorTest :: Options -> [Float] -> Property
tensorTest opts l = (not . null) l ==> monadicIO $ do
run $ gpuRoundingMode opts
let nb = length l
r <- run $ onBoardOnlyData opts (simpleNDRange (size1D nb)) (simpleTensor nb (CLFloatArray l) (CLFloatArray l) (clFloatArrayOO nb))
--reference <- run $ mapM (withGPURounding . (\x -> x * x) . gpuF) $ l
assert $ fromFloatResult (head r) ~= map (\x -> x * x) l
-- | Test addition of a constant to a matrix
-- We generate a random width between 2 and 20
-- A random height between 2 and 20
-- A random list of width*height elements with the elements being random between -200 and 200.
addConstantMatrixTest :: Options -> Property
addConstantMatrixTest opts =
forAll (choose (2,20)) $ \cols ->
forAll (choose (2,20)) $ \rows ->
forAll (vectorOf (cols * rows) (choose (-200.0,200.0))) $ \l ->
classify (cols > rows) "cols > rows" $
classify (cols < rows) "cols < rows" $
classify (cols == rows) "cols == rows" $
monadicIO $ do
run $ gpuRoundingMode opts
let nb = length l
r <- run $ onBoardOnlyData opts (simpleNDRange (size2D cols rows))
(simpleAddConstantToMatrix cols (CLFloatArray l) 10.0 (clFloatArrayOO nb))
assert $ fromFloatResult (head r) ~= map (+ 10.0) l
{-
Test Driver
-}
-- | Test an OpenCL kernel
test :: Testable prop
=> Int -- ^ Max nb of tests
-> (Options -> prop) -- ^ Property to test
-> (Options -> IO ()) -- ^ Test
test n a = \opts -> quickCheckWith (stdArgs {maxSuccess=n}) (a opts)
-- | List of test categories and the tests
tests = [("SIMPLE TEST",[
("floatTest", test 20 floatTest)
, ("simpleIntAddTest",test 20 moveIntTest)
, ("addTest",test 20 addTest)
, ("add4Test",test 20 add4Test)
, ("moveTest",test 20 moveTest)
, ("complexMoveTest",test 20 complexMoveTest)
, ("tensorTest",test 20 tensorTest)
, ("addConstantMatrixTest",test 20 addConstantMatrixTest)
])
]
-- | Run a test with given title and options to connect to the server
runATest opts (title,a) = do
putStrLn title
a opts
putStrLn ""
-- | Run a test category
runACategory opts (title,tests) = do
putStrLn title
putStrLn ""
mapM_ (runATest opts) tests
-- | Run all tests
runSimpleTests opts = mapM_ (runACategory opts) tests
| ChristopheF/OpenCLTestFramework | Client/Tests/Simple.hs | bsd-3-clause | 5,676 | 7 | 28 | 1,290 | 1,895 | 971 | 924 | 98 | 1 |
{-# LANGUAGE ScopedTypeVariables, TypeOperators, OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric, FlexibleInstances, QuasiQuotes #-}
{-# LANGUAGE CPP, FlexibleContexts, UndecidableInstances, RecordWildCards #-}
{-# LANGUAGE DeriveFunctor, LambdaCase, OverloadedStrings #-}
{-# LANGUAGE TupleSections, GeneralizedNewtypeDeriving #-}
#ifndef _SERVER_IS_MAIN_
module Server where
#endif
import Web.Scotty (ScottyM, ActionM, json)
import Control.Concurrent
import Data.Aeson.QQ
import Control.Monad.IO.Class
import Network.Wai.Handler.Warp
( defaultSettings
, openFreePort
, Port
)
import GHC.Generics
import Data.Aeson hiding (json)
import Data.Text (Text)
import qualified Web.Scotty as Scotty
import qualified Data.Text as T
import Data.Monoid
import Wrecker
import Wrecker.Runner
import Control.Concurrent.NextRef (NextRef)
import qualified Control.Concurrent.NextRef as NextRef
import qualified Control.Immortal as Immortal
import qualified Wrecker.Statistics as Wrecker
import qualified Network.Wai.Handler.Warp as Warp
import qualified Network.Wai as Wai
import Network.Socket (Socket)
import qualified Network.Socket as N
import Control.Exception
import Data.Maybe (listToMaybe)
import System.Environment
import Control.Applicative
data Envelope a = Envelope { value :: a }
deriving (Show, Eq, Generic)
instance ToJSON a => ToJSON (Envelope a)
rootRef :: Int -> Text
rootRef port = T.pack $ "http://localhost:" ++ show port
jsonE :: ToJSON a => a -> ActionM ()
jsonE = json . Envelope
data Root a = Root
{ root :: a
, products :: a
, cartsIndex :: a
, cartsIndexItems :: a
, usersIndex :: a
, login :: a
, checkout :: a
} deriving (Show, Eq, Functor)
type RootInt = Root Int
instance Applicative Root where
pure x = Root
{ root = x
, products = x
, login = x
, usersIndex = x
, cartsIndex = x
, cartsIndexItems = x
, checkout = x
}
f <*> x = Root
{ root = root f $ root x
, products = products f $ products x
, login = login f $ login x
, usersIndex = usersIndex f $ usersIndex x
, cartsIndex = cartsIndex f $ cartsIndex x
, cartsIndexItems = cartsIndexItems f $ cartsIndexItems x
, checkout = checkout f $ checkout x
}
app :: RootInt -> Port -> ScottyM ()
app Root {..} port = do
let host = rootRef port
Scotty.get "/root" $ do
liftIO $ threadDelay root
jsonE [aesonQQ|
{ "products" : #{host <> "/products" }
, "carts" : #{host <> "/carts" }
, "users" : #{host <> "/users" }
, "login" : #{host <> "/login" }
, "checkout" : #{host <> "/checkout" }
}
|]
Scotty.get "/products" $ do
liftIO $ threadDelay products
jsonE [aesonQQ|
[ #{host <> "/products/0"}
]
|]
Scotty.get "/product/:id" $ do
liftIO $ threadDelay products
jsonE [aesonQQ|
{ "summary" : "shirt" }
|]
Scotty.get "/carts" $ do
-- sleepDist gen carts
jsonE [aesonQQ|
[ #{host <> "/carts/0"}
]
|]
Scotty.get "/carts/:id" $ do
liftIO $ threadDelay cartsIndex
jsonE [aesonQQ|
{ "items" : #{host <> "/carts/0/items"}
}
|]
Scotty.post "/carts/:id/items" $ do
liftIO $ threadDelay cartsIndexItems
jsonE [aesonQQ|
#{host <> "/carts/0/items"}
|]
Scotty.get "/users" $ do
-- sleepDist gen users
jsonE [aesonQQ|
[ #{host <> "/users/0"}
]
|]
Scotty.get "/users/:id" $ do
liftIO $ threadDelay usersIndex
jsonE [aesonQQ|
{ "cart" : #{host <> "/carts/0"}
, "username" : "example"
}
|]
Scotty.post "/login" $ do
liftIO $ threadDelay login
jsonE [aesonQQ|
#{host <> "/users/0"}
|]
Scotty.post "/checkout" $ do
liftIO $ threadDelay checkout
jsonE ()
run :: RootInt -> IO (Port, Immortal.Thread, ThreadId, NextRef AllStats)
run = start Nothing
stop :: (Port, ThreadId, NextRef AllStats) -> IO AllStats
stop (_, threadId, ref) = do
killThread threadId
NextRef.readLast ref
toKey :: Wai.Request -> String
toKey x = case Wai.pathInfo x of
["root"] -> "/root"
["products"] -> "/products"
"carts" : _ : "items" : _ -> "/carts/0/items"
"carts" : _ : _ -> "/carts/0"
"users" : _ -> "/users/0"
["login"] -> "/login"
["checkout"] -> "/checkout"
_ -> error "FAIL! UNKNOWN REQUEST FOR EXAMPLE!"
recordMiddleware :: Recorder -> Wai.Application -> Wai.Application
recordMiddleware recorder waiApp req sendResponse
= record recorder (toKey req) $! waiApp req $ \res -> sendResponse res
getASocket :: Maybe Port -> IO (Port, Socket)
getASocket = \case
Just port -> do s <- N.socket N.AF_INET N.Stream N.defaultProtocol
localhost <- N.inet_addr "127.0.0.1"
N.bind s (N.SockAddrInet (fromIntegral port) localhost)
N.listen s 1000
return (port, s)
Nothing -> openFreePort
start :: Maybe Port -> RootInt -> IO ( Port
, Immortal.Thread
, ThreadId
, NextRef AllStats
)
start mport dist = do
(port, socket) <- getASocket mport
(ref, recorderThread, recorder) <- newStandaloneRecorder
scottyApp <- Scotty.scottyApp $ app dist port
threadId <- flip forkFinally (\_ -> N.close socket)
$ Warp.runSettingsSocket defaultSettings socket
$ recordMiddleware recorder
$ scottyApp
return (port, recorderThread, threadId, ref)
main :: IO ()
main = do
xs <- getArgs
let delay = maybe 0 read $ listToMaybe xs
(port, socket) <- getASocket $ Just 3000
(ref, recorderThread, recorder) <- newStandaloneRecorder
scottyApp <- Scotty.scottyApp $ app (pure delay) port
(Warp.runSettingsSocket defaultSettings socket
$ recordMiddleware recorder
$ scottyApp) `finally`
( do N.close socket
Immortal.stop recorderThread
allStats <- NextRef.readLast ref
putStrLn $ Wrecker.pprStats Nothing Path allStats
)
| skedgeme/wrecker | examples/Server.hs | bsd-3-clause | 6,724 | 0 | 15 | 2,223 | 1,667 | 875 | 792 | 153 | 8 |
module Chipher where
import Control.Arrow
import Data.Char
import Data.List
type Keyword = String
charPairs :: Keyword -> String -> [(Char, Char)]
charPairs k s =
reverse . snd $
foldl'
(\ (k, acc) s ->
if s == ' ' then (k, (' ', ' ') : acc)
else
let (kh : kt) = k
in (kt, (kh, s) : acc)
)
(concat . repeat $ k, [])
s
ordPairs :: Keyword -> String -> [(Int, Int)]
ordPairs k s = map ((\k -> ord k - ord 'A') *** ord) $ charPairs k s
encode :: Keyword -> String -> String
encode k s =
let shifted =
map
(\(k, s) ->
let n = if s == 32 then 32 else s + k in
if n > ord 'Z' then n - (ord 'Z' - ord 'A') else n)
(ordPairs k s)
in map chr shifted
--in shifted
main :: IO ()
main = print $ encode "foo" "bar"
| vasily-kirichenko/haskell-book | src/Chipher.hs | bsd-3-clause | 844 | 0 | 18 | 304 | 386 | 209 | 177 | 28 | 3 |
module ProjectEuler.Problem092 (solution092, genericSolution092) where
import Data.Digits
import Util
digitsSquareSum :: Integer -> Integer
digitsSquareSum = sum . map sq . digits 10
chainEndsWith :: Integer -> Integer
chainEndsWith x =
let next = digitsSquareSum x in
if next == 1 || next == 89
then next
else chainEndsWith next
genericSolution092 :: Integer -> Integer
genericSolution092 = toInteger . length . filter (== 89) . map chainEndsWith . enumFromTo 1
solution092 :: Integer
solution092 = genericSolution092 1e7
| guillaume-nargeot/project-euler-haskell | src/ProjectEuler/Problem092.hs | bsd-3-clause | 564 | 0 | 10 | 118 | 160 | 84 | 76 | 15 | 2 |
{-# LANGUAGE Unsafe #-}
{-# LANGUAGE NoImplicitPrelude
, BangPatterns
, MagicHash
, UnboxedTuples
#-}
{-# OPTIONS_HADDOCK hide #-}
{-# LANGUAGE AutoDeriveTypeable, StandaloneDeriving #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.ForeignPtr
-- Copyright : (c) The University of Glasgow, 1992-2003
-- License : see libraries/base/LICENSE
--
-- Maintainer : [email protected]
-- Stability : internal
-- Portability : non-portable (GHC extensions)
--
-- GHC's implementation of the 'ForeignPtr' data type.
--
-----------------------------------------------------------------------------
module GHC.ForeignPtr
(
ForeignPtr(..),
ForeignPtrContents(..),
FinalizerPtr,
FinalizerEnvPtr,
newForeignPtr_,
mallocForeignPtr,
mallocPlainForeignPtr,
mallocForeignPtrBytes,
mallocPlainForeignPtrBytes,
mallocForeignPtrAlignedBytes,
mallocPlainForeignPtrAlignedBytes,
addForeignPtrFinalizer,
addForeignPtrFinalizerEnv,
touchForeignPtr,
unsafeForeignPtrToPtr,
castForeignPtr,
newConcForeignPtr,
addForeignPtrConcFinalizer,
finalizeForeignPtr
) where
import Foreign.Storable
import Data.Foldable ( sequence_ )
import Data.Typeable
import GHC.Show
import GHC.Base
import GHC.IORef
import GHC.STRef ( STRef(..) )
import GHC.Ptr ( Ptr(..), FunPtr(..) )
-- |The type 'ForeignPtr' represents references to objects that are
-- maintained in a foreign language, i.e., that are not part of the
-- data structures usually managed by the Haskell storage manager.
-- The essential difference between 'ForeignPtr's and vanilla memory
-- references of type @Ptr a@ is that the former may be associated
-- with /finalizers/. A finalizer is a routine that is invoked when
-- the Haskell storage manager detects that - within the Haskell heap
-- and stack - there are no more references left that are pointing to
-- the 'ForeignPtr'. Typically, the finalizer will, then, invoke
-- routines in the foreign language that free the resources bound by
-- the foreign object.
--
-- The 'ForeignPtr' is parameterised in the same way as 'Ptr'. The
-- type argument of 'ForeignPtr' should normally be an instance of
-- class 'Storable'.
--
data ForeignPtr a = ForeignPtr Addr# ForeignPtrContents
deriving Typeable
-- we cache the Addr# in the ForeignPtr object, but attach
-- the finalizer to the IORef (or the MutableByteArray# in
-- the case of a MallocPtr). The aim of the representation
-- is to make withForeignPtr efficient; in fact, withForeignPtr
-- should be just as efficient as unpacking a Ptr, and multiple
-- withForeignPtrs can share an unpacked ForeignPtr. Note
-- that touchForeignPtr only has to touch the ForeignPtrContents
-- object, because that ensures that whatever the finalizer is
-- attached to is kept alive.
data Finalizers
= NoFinalizers
| CFinalizers (Weak# ())
| HaskellFinalizers [IO ()]
data ForeignPtrContents
= PlainForeignPtr !(IORef Finalizers)
| MallocPtr (MutableByteArray# RealWorld) !(IORef Finalizers)
| PlainPtr (MutableByteArray# RealWorld)
instance Eq (ForeignPtr a) where
p == q = unsafeForeignPtrToPtr p == unsafeForeignPtrToPtr q
instance Ord (ForeignPtr a) where
compare p q = compare (unsafeForeignPtrToPtr p) (unsafeForeignPtrToPtr q)
instance Show (ForeignPtr a) where
showsPrec p f = showsPrec p (unsafeForeignPtrToPtr f)
-- |A finalizer is represented as a pointer to a foreign function that, at
-- finalisation time, gets as an argument a plain pointer variant of the
-- foreign pointer that the finalizer is associated with.
--
-- Note that the foreign function /must/ use the @ccall@ calling convention.
--
type FinalizerPtr a = FunPtr (Ptr a -> IO ())
type FinalizerEnvPtr env a = FunPtr (Ptr env -> Ptr a -> IO ())
newConcForeignPtr :: Ptr a -> IO () -> IO (ForeignPtr a)
--
-- ^Turns a plain memory reference into a foreign object by
-- associating a finalizer - given by the monadic operation - with the
-- reference. The storage manager will start the finalizer, in a
-- separate thread, some time after the last reference to the
-- @ForeignPtr@ is dropped. There is no guarantee of promptness, and
-- in fact there is no guarantee that the finalizer will eventually
-- run at all.
--
-- Note that references from a finalizer do not necessarily prevent
-- another object from being finalized. If A's finalizer refers to B
-- (perhaps using 'touchForeignPtr', then the only guarantee is that
-- B's finalizer will never be started before A's. If both A and B
-- are unreachable, then both finalizers will start together. See
-- 'touchForeignPtr' for more on finalizer ordering.
--
newConcForeignPtr p finalizer
= do fObj <- newForeignPtr_ p
addForeignPtrConcFinalizer fObj finalizer
return fObj
mallocForeignPtr :: Storable a => IO (ForeignPtr a)
-- ^ Allocate some memory and return a 'ForeignPtr' to it. The memory
-- will be released automatically when the 'ForeignPtr' is discarded.
--
-- 'mallocForeignPtr' is equivalent to
--
-- > do { p <- malloc; newForeignPtr finalizerFree p }
--
-- although it may be implemented differently internally: you may not
-- assume that the memory returned by 'mallocForeignPtr' has been
-- allocated with 'Foreign.Marshal.Alloc.malloc'.
--
-- GHC notes: 'mallocForeignPtr' has a heavily optimised
-- implementation in GHC. It uses pinned memory in the garbage
-- collected heap, so the 'ForeignPtr' does not require a finalizer to
-- free the memory. Use of 'mallocForeignPtr' and associated
-- functions is strongly recommended in preference to 'newForeignPtr'
-- with a finalizer.
--
mallocForeignPtr = doMalloc undefined
where doMalloc :: Storable b => b -> IO (ForeignPtr b)
doMalloc a
| I# size < 0 = error "mallocForeignPtr: size must be >= 0"
| otherwise = do
r <- newIORef NoFinalizers
IO $ \s ->
case newAlignedPinnedByteArray# size align s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(MallocPtr mbarr# r) #)
}
where !(I# size) = sizeOf a
!(I# align) = alignment a
-- | This function is similar to 'mallocForeignPtr', except that the
-- size of the memory required is given explicitly as a number of bytes.
mallocForeignPtrBytes :: Int -> IO (ForeignPtr a)
mallocForeignPtrBytes size | size < 0 =
error "mallocForeignPtrBytes: size must be >= 0"
mallocForeignPtrBytes (I# size) = do
r <- newIORef NoFinalizers
IO $ \s ->
case newPinnedByteArray# size s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(MallocPtr mbarr# r) #)
}
-- | This function is similar to 'mallocForeignPtrBytes', except that the
-- size and alignment of the memory required is given explicitly as numbers of
-- bytes.
mallocForeignPtrAlignedBytes :: Int -> Int -> IO (ForeignPtr a)
mallocForeignPtrAlignedBytes size _align | size < 0 =
error "mallocForeignPtrAlignedBytes: size must be >= 0"
mallocForeignPtrAlignedBytes (I# size) (I# align) = do
r <- newIORef NoFinalizers
IO $ \s ->
case newAlignedPinnedByteArray# size align s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(MallocPtr mbarr# r) #)
}
-- | Allocate some memory and return a 'ForeignPtr' to it. The memory
-- will be released automatically when the 'ForeignPtr' is discarded.
--
-- GHC notes: 'mallocPlainForeignPtr' has a heavily optimised
-- implementation in GHC. It uses pinned memory in the garbage
-- collected heap, as for mallocForeignPtr. Unlike mallocForeignPtr, a
-- ForeignPtr created with mallocPlainForeignPtr carries no finalizers.
-- It is not possible to add a finalizer to a ForeignPtr created with
-- mallocPlainForeignPtr. This is useful for ForeignPtrs that will live
-- only inside Haskell (such as those created for packed strings).
-- Attempts to add a finalizer to a ForeignPtr created this way, or to
-- finalize such a pointer, will throw an exception.
--
mallocPlainForeignPtr :: Storable a => IO (ForeignPtr a)
mallocPlainForeignPtr = doMalloc undefined
where doMalloc :: Storable b => b -> IO (ForeignPtr b)
doMalloc a
| I# size < 0 = error "mallocForeignPtr: size must be >= 0"
| otherwise = IO $ \s ->
case newAlignedPinnedByteArray# size align s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(PlainPtr mbarr#) #)
}
where !(I# size) = sizeOf a
!(I# align) = alignment a
-- | This function is similar to 'mallocForeignPtrBytes', except that
-- the internally an optimised ForeignPtr representation with no
-- finalizer is used. Attempts to add a finalizer will cause an
-- exception to be thrown.
mallocPlainForeignPtrBytes :: Int -> IO (ForeignPtr a)
mallocPlainForeignPtrBytes size | size < 0 =
error "mallocPlainForeignPtrBytes: size must be >= 0"
mallocPlainForeignPtrBytes (I# size) = IO $ \s ->
case newPinnedByteArray# size s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(PlainPtr mbarr#) #)
}
-- | This function is similar to 'mallocForeignPtrAlignedBytes', except that
-- the internally an optimised ForeignPtr representation with no
-- finalizer is used. Attempts to add a finalizer will cause an
-- exception to be thrown.
mallocPlainForeignPtrAlignedBytes :: Int -> Int -> IO (ForeignPtr a)
mallocPlainForeignPtrAlignedBytes size _align | size < 0 =
error "mallocPlainForeignPtrAlignedBytes: size must be >= 0"
mallocPlainForeignPtrAlignedBytes (I# size) (I# align) = IO $ \s ->
case newAlignedPinnedByteArray# size align s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(PlainPtr mbarr#) #)
}
addForeignPtrFinalizer :: FinalizerPtr a -> ForeignPtr a -> IO ()
-- ^This function adds a finalizer to the given foreign object. The
-- finalizer will run /before/ all other finalizers for the same
-- object which have already been registered.
addForeignPtrFinalizer (FunPtr fp) (ForeignPtr p c) = case c of
PlainForeignPtr r -> f r >> return ()
MallocPtr _ r -> f r >> return ()
_ -> error "GHC.ForeignPtr: attempt to add a finalizer to a plain pointer"
where
f r = insertCFinalizer r fp 0# nullAddr# p
addForeignPtrFinalizerEnv ::
FinalizerEnvPtr env a -> Ptr env -> ForeignPtr a -> IO ()
-- ^ Like 'addForeignPtrFinalizerEnv' but allows the finalizer to be
-- passed an additional environment parameter to be passed to the
-- finalizer. The environment passed to the finalizer is fixed by the
-- second argument to 'addForeignPtrFinalizerEnv'
addForeignPtrFinalizerEnv (FunPtr fp) (Ptr ep) (ForeignPtr p c) = case c of
PlainForeignPtr r -> f r >> return ()
MallocPtr _ r -> f r >> return ()
_ -> error "GHC.ForeignPtr: attempt to add a finalizer to a plain pointer"
where
f r = insertCFinalizer r fp 1# ep p
addForeignPtrConcFinalizer :: ForeignPtr a -> IO () -> IO ()
-- ^This function adds a finalizer to the given @ForeignPtr@. The
-- finalizer will run /before/ all other finalizers for the same
-- object which have already been registered.
--
-- This is a variant of @addForeignPtrFinalizer@, where the finalizer
-- is an arbitrary @IO@ action. When it is invoked, the finalizer
-- will run in a new thread.
--
-- NB. Be very careful with these finalizers. One common trap is that
-- if a finalizer references another finalized value, it does not
-- prevent that value from being finalized. In particular, 'Handle's
-- are finalized objects, so a finalizer should not refer to a 'Handle'
-- (including @stdout@, @stdin@ or @stderr@).
--
addForeignPtrConcFinalizer (ForeignPtr _ c) finalizer =
addForeignPtrConcFinalizer_ c finalizer
addForeignPtrConcFinalizer_ :: ForeignPtrContents -> IO () -> IO ()
addForeignPtrConcFinalizer_ (PlainForeignPtr r) finalizer = do
noFinalizers <- insertHaskellFinalizer r finalizer
if noFinalizers
then IO $ \s ->
case r of { IORef (STRef r#) ->
case mkWeak# r# () (foreignPtrFinalizer r) s of { (# s1, _ #) ->
(# s1, () #) }}
else return ()
addForeignPtrConcFinalizer_ f@(MallocPtr fo r) finalizer = do
noFinalizers <- insertHaskellFinalizer r finalizer
if noFinalizers
then IO $ \s ->
case mkWeak# fo () (do foreignPtrFinalizer r; touch f) s of
(# s1, _ #) -> (# s1, () #)
else return ()
addForeignPtrConcFinalizer_ _ _ =
error "GHC.ForeignPtr: attempt to add a finalizer to plain pointer"
insertHaskellFinalizer :: IORef Finalizers -> IO () -> IO Bool
insertHaskellFinalizer r f = do
!wasEmpty <- atomicModifyIORef r $ \finalizers -> case finalizers of
NoFinalizers -> (HaskellFinalizers [f], True)
HaskellFinalizers fs -> (HaskellFinalizers (f:fs), False)
_ -> noMixingError
return wasEmpty
-- | A box around Weak#, private to this module.
data MyWeak = MyWeak (Weak# ())
insertCFinalizer ::
IORef Finalizers -> Addr# -> Int# -> Addr# -> Addr# -> IO ()
insertCFinalizer r fp flag ep p = do
MyWeak w <- ensureCFinalizerWeak r
IO $ \s -> case addCFinalizerToWeak# fp p flag ep w s of
(# s1, 1# #) -> (# s1, () #)
-- Failed to add the finalizer because some other thread
-- has finalized w by calling foreignPtrFinalizer. We retry now.
-- This won't be an infinite loop because that thread must have
-- replaced the content of r before calling finalizeWeak#.
(# s1, _ #) -> unIO (insertCFinalizer r fp flag ep p) s1
ensureCFinalizerWeak :: IORef Finalizers -> IO MyWeak
ensureCFinalizerWeak ref@(IORef (STRef r#)) = do
fin <- readIORef ref
case fin of
CFinalizers weak -> return (MyWeak weak)
HaskellFinalizers{} -> noMixingError
NoFinalizers -> IO $ \s ->
case mkWeakNoFinalizer# r# () s of { (# s1, w #) ->
case atomicModifyMutVar# r# (update w) s1 of
{ (# s2, (weak, needKill ) #) ->
if needKill
then case finalizeWeak# w s2 of { (# s3, _, _ #) ->
(# s3, weak #) }
else (# s2, weak #) }}
where
update _ fin@(CFinalizers w) = (fin, (MyWeak w, True))
update w NoFinalizers = (CFinalizers w, (MyWeak w, False))
update _ _ = noMixingError
noMixingError :: a
noMixingError = error $
"GHC.ForeignPtr: attempt to mix Haskell and C finalizers " ++
"in the same ForeignPtr"
foreignPtrFinalizer :: IORef Finalizers -> IO ()
foreignPtrFinalizer r = do
fs <- atomicModifyIORef r $ \fs -> (NoFinalizers, fs) -- atomic, see #7170
case fs of
NoFinalizers -> return ()
CFinalizers w -> IO $ \s -> case finalizeWeak# w s of
(# s1, 1#, f #) -> f s1
(# s1, _, _ #) -> (# s1, () #)
HaskellFinalizers actions -> sequence_ actions
newForeignPtr_ :: Ptr a -> IO (ForeignPtr a)
-- ^Turns a plain memory reference into a foreign pointer that may be
-- associated with finalizers by using 'addForeignPtrFinalizer'.
newForeignPtr_ (Ptr obj) = do
r <- newIORef NoFinalizers
return (ForeignPtr obj (PlainForeignPtr r))
touchForeignPtr :: ForeignPtr a -> IO ()
-- ^This function ensures that the foreign object in
-- question is alive at the given place in the sequence of IO
-- actions. In particular 'Foreign.ForeignPtr.withForeignPtr'
-- does a 'touchForeignPtr' after it
-- executes the user action.
--
-- Note that this function should not be used to express dependencies
-- between finalizers on 'ForeignPtr's. For example, if the finalizer
-- for a 'ForeignPtr' @F1@ calls 'touchForeignPtr' on a second
-- 'ForeignPtr' @F2@, then the only guarantee is that the finalizer
-- for @F2@ is never started before the finalizer for @F1@. They
-- might be started together if for example both @F1@ and @F2@ are
-- otherwise unreachable, and in that case the scheduler might end up
-- running the finalizer for @F2@ first.
--
-- In general, it is not recommended to use finalizers on separate
-- objects with ordering constraints between them. To express the
-- ordering robustly requires explicit synchronisation using @MVar@s
-- between the finalizers, but even then the runtime sometimes runs
-- multiple finalizers sequentially in a single thread (for
-- performance reasons), so synchronisation between finalizers could
-- result in artificial deadlock. Another alternative is to use
-- explicit reference counting.
--
touchForeignPtr (ForeignPtr _ r) = touch r
touch :: ForeignPtrContents -> IO ()
touch r = IO $ \s -> case touch# r s of s' -> (# s', () #)
unsafeForeignPtrToPtr :: ForeignPtr a -> Ptr a
-- ^This function extracts the pointer component of a foreign
-- pointer. This is a potentially dangerous operations, as if the
-- argument to 'unsafeForeignPtrToPtr' is the last usage
-- occurrence of the given foreign pointer, then its finalizer(s) will
-- be run, which potentially invalidates the plain pointer just
-- obtained. Hence, 'touchForeignPtr' must be used
-- wherever it has to be guaranteed that the pointer lives on - i.e.,
-- has another usage occurrence.
--
-- To avoid subtle coding errors, hand written marshalling code
-- should preferably use 'Foreign.ForeignPtr.withForeignPtr' rather
-- than combinations of 'unsafeForeignPtrToPtr' and
-- 'touchForeignPtr'. However, the latter routines
-- are occasionally preferred in tool generated marshalling code.
unsafeForeignPtrToPtr (ForeignPtr fo _) = Ptr fo
castForeignPtr :: ForeignPtr a -> ForeignPtr b
-- ^This function casts a 'ForeignPtr'
-- parameterised by one type into another type.
castForeignPtr f = unsafeCoerce# f
-- | Causes the finalizers associated with a foreign pointer to be run
-- immediately.
finalizeForeignPtr :: ForeignPtr a -> IO ()
finalizeForeignPtr (ForeignPtr _ (PlainPtr _)) = return () -- no effect
finalizeForeignPtr (ForeignPtr _ foreignPtr) = foreignPtrFinalizer refFinalizers
where
refFinalizers = case foreignPtr of
(PlainForeignPtr ref) -> ref
(MallocPtr _ ref) -> ref
PlainPtr _ ->
error "finalizeForeignPtr PlainPtr"
| spacekitteh/smcghc | libraries/base/GHC/ForeignPtr.hs | bsd-3-clause | 18,737 | 0 | 22 | 4,158 | 3,155 | 1,661 | 1,494 | 218 | 6 |
module Main where
import Control.Applicative
import Evaluate
import Parser.AST
import Parser.RPN
import Pipes
import qualified Pipes.Prelude as P
import System.IO
import Text.Parsec.Error (errorMessages, messageString)
evaluatePrint :: Consumer String IO ()
evaluatePrint = do
lift $ putStr "> " >> hFlush stdout
str <- await
lift $ case evaluate <$> parseRpn str of
Left err -> print err
Right (IntLit i) -> print i
Right (DoubleLit d) -> print d
Right ast -> print ast
evaluatePrint
main :: IO ()
main = runEffect $ P.stdinLn >-> evaluatePrint
| lightquake/sym | REPL.hs | bsd-3-clause | 691 | 0 | 13 | 231 | 201 | 102 | 99 | 21 | 4 |
{-# LANGUAGE MultiParamTypeClasses, TypeSynonymInstances, FlexibleInstances #-}
{- |
Module : ./Comorphisms/CASL2CoCASL.hs
Description : embedding from CASL to CoCASL
Copyright : (c) Till Mossakowski and Uni Bremen 2004
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable (imports Logic.Logic)
The embedding comorphism from CASL to CoCASL.
-}
module Comorphisms.CASL2CoCASL where
import Logic.Logic
import Logic.Comorphism
import qualified Data.Set as Set
import Common.ProofTree
-- CASL
import CASL.Logic_CASL
import CASL.Sublogic as SL
import CASL.Sign
import CASL.AS_Basic_CASL
import CASL.Morphism
-- CoCASLCASL
import CoCASL.Logic_CoCASL
import CoCASL.AS_CoCASL
import CoCASL.CoCASLSign
import CoCASL.StatAna (CSign)
import CoCASL.Sublogic
-- | The identity of the comorphism
data CASL2CoCASL = CASL2CoCASL deriving (Show)
instance Language CASL2CoCASL -- default definition is okay
instance Comorphism CASL2CoCASL
CASL CASL_Sublogics
CASLBasicSpec CASLFORMULA SYMB_ITEMS SYMB_MAP_ITEMS
CASLSign
CASLMor
Symbol RawSymbol ProofTree
CoCASL CoCASL_Sublogics
C_BASIC_SPEC CoCASLFORMULA SYMB_ITEMS SYMB_MAP_ITEMS
CSign
CoCASLMor
Symbol RawSymbol () where
sourceLogic CASL2CoCASL = CASL
sourceSublogic CASL2CoCASL = SL.top
targetLogic CASL2CoCASL = CoCASL
mapSublogic CASL2CoCASL s = Just $ s { ext_features = False }
map_theory CASL2CoCASL = return . embedCASLTheory emptyCoCASLSign
map_morphism CASL2CoCASL = return . mapCASLMor emptyCoCASLSign emptyMorExt
map_sentence CASL2CoCASL _ = return . mapFORMULA
map_symbol CASL2CoCASL _ = Set.singleton . id
has_model_expansion CASL2CoCASL = True
is_weakly_amalgamable CASL2CoCASL = True
isInclusionComorphism CASL2CoCASL = True
| spechub/Hets | Comorphisms/CASL2CoCASL.hs | gpl-2.0 | 1,985 | 0 | 8 | 432 | 301 | 165 | 136 | 40 | 0 |
{-# LANGUAGE OverloadedStrings #-}
-- Module : Test.AWS.CloudWatch
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
module Test.AWS.CloudWatch
( tests
, fixtures
) where
import Network.AWS.CloudWatch
import Test.AWS.Gen.CloudWatch
import Test.Tasty
tests :: [TestTree]
tests = []
fixtures :: [TestTree]
fixtures = []
| fmapfmapfmap/amazonka | amazonka-cloudwatch/test/Test/AWS/CloudWatch.hs | mpl-2.0 | 752 | 0 | 5 | 201 | 73 | 50 | 23 | 11 | 1 |
{-# LANGUAGE OverloadedStrings, Rank2Types, PatternGuards #-}
module Haste.Config (
Config (..), AppStart, def, stdJSLibs, startCustom, fastMultiply,
safeMultiply, debugLib) where
import Data.JSTarget
import Control.Shell (replaceExtension, (</>))
import Data.ByteString.Builder
import Data.Monoid
import Haste.Environment
import Outputable (Outputable)
import Data.Default
import Data.List (stripPrefix, nub)
type AppStart = Builder -> Builder
stdJSLibs :: [FilePath]
stdJSLibs = map (jsDir </>) [
"rts.js", "floatdecode.js", "stdlib.js", "jsstring.js", "endian.js",
"MVar.js", "StableName.js", "Integer.js", "Int64.js", "md5.js", "array.js",
"pointers.js", "cheap-unicode.js", "Canvas.js", "Handle.js", "Weak.js",
"Foreign.js"
]
debugLib :: FilePath
debugLib = jsDir </> "debug.js"
-- | Execute the program as soon as it's loaded into memory.
-- Evaluate the result of applying main, as we might get a thunk back if
-- we're doing TCE. This is so cheap, small and non-intrusive we might
-- as well always do it this way, to simplify the config a bit.
startASAP :: AppStart
startASAP mainSym =
mainSym <> "();"
-- | Launch the application using a custom command.
startCustom :: String -> AppStart
startCustom "onload" = startOnLoadComplete
startCustom "asap" = startASAP
startCustom "onexec" = startASAP
startCustom str = insertSym str
-- | Replace the first occurrence of $HASTE_MAIN with Haste's entry point
-- symbol.
insertSym :: String -> AppStart
insertSym [] _ = stringUtf8 ""
insertSym str sym
| Just r <- stripPrefix "$HASTE_MAIN" str = sym <> stringUtf8 r
| (l,r) <- span (/= '$') str = stringUtf8 l <> insertSym r sym
-- | Execute the program when the document has finished loading.
startOnLoadComplete :: AppStart
startOnLoadComplete mainSym =
"window.onload = " <> mainSym <> ";"
-- | Int op wrapper for strictly 32 bit (|0).
strictly32Bits :: Exp -> Exp
strictly32Bits = flip (binOp BitOr) (litN 0)
-- | Safe Int multiplication.
safeMultiply :: Exp -> Exp -> Exp
safeMultiply a b = callForeign "imul" [a, b]
-- | Fast but unsafe Int multiplication.
fastMultiply :: Exp -> Exp -> Exp
fastMultiply = binOp Mul
-- | Compiler configuration.
data Config = Config {
-- | Runtime files to dump into the JS blob.
rtsLibs :: [FilePath],
-- | Path to directory where system jsmods are located.
libPaths :: [FilePath],
-- | Write all jsmods to this path.
targetLibPath :: FilePath,
-- | A function that takes the main symbol as its input and outputs the
-- code that starts the program.
appStart :: AppStart,
-- | Wrap the program in its own namespace?
wrapProg :: Bool,
-- | Options to the pretty printer.
ppOpts :: PPOpts,
-- | A function that takes the name of the a target as its input and
-- outputs the name of the file its JS blob should be written to.
outFile :: Config -> String -> String,
-- | Link the program?
performLink :: Bool,
-- | A function to call on each Int arithmetic primop.
wrapIntMath :: Exp -> Exp,
-- | Operation to use for Int multiplication.
multiplyIntOp :: Exp -> Exp -> Exp,
-- | Be verbose about warnings, etc.?
verbose :: Bool,
-- | Perform optimizations over the whole program at link time?
wholeProgramOpts :: Bool,
-- | Allow the possibility that some tail recursion may not be optimized
-- in order to gain slightly smaller code?
sloppyTCE :: Bool,
-- | Turn on run-time tracing of primops?
tracePrimops :: Bool,
-- | Run the entire thing through Google Closure when done?
useGoogleClosure :: Maybe FilePath,
-- | Extra flags for Google Closure to take?
useGoogleClosureFlags :: [String],
-- | Any external Javascript to link into the JS bundle.
jsExternals :: [FilePath],
-- | Produce a skeleton HTML file containing the program rather than a
-- JS file.
outputHTML :: Bool,
-- | GHC DynFlags used for STG generation.
-- Currently only used for printing StgSyn values.
showOutputable :: forall a. Outputable a => a -> String,
-- | Which module contains the program's main function?
-- Defaults to Just ("main", "Main")
mainMod :: Maybe (String, String),
-- | Perform optimizations.
-- Defaults to True.
optimize :: Bool,
-- | Emit @"use strict";@ declaration. Does not affect minification, but
-- *does* affect any external JS.
-- Defaults to True.
useStrict :: Bool
}
-- | Default compiler configuration.
defConfig :: Config
defConfig = Config {
rtsLibs = stdJSLibs,
libPaths = nub [jsmodUserDir, jsmodSysDir],
targetLibPath = ".",
appStart = startOnLoadComplete,
wrapProg = False,
ppOpts = def,
outFile = \cfg f -> let ext = if outputHTML cfg
then "html"
else "js"
in replaceExtension f ext,
performLink = True,
wrapIntMath = strictly32Bits,
multiplyIntOp = safeMultiply,
verbose = False,
wholeProgramOpts = False,
sloppyTCE = False,
tracePrimops = False,
useGoogleClosure = Nothing,
useGoogleClosureFlags = [],
jsExternals = [],
outputHTML = False,
showOutputable = const "No showOutputable defined in config!",
mainMod = Just ("main", "Main"),
optimize = True,
useStrict = True
}
instance Default Config where
def = defConfig
| akru/haste-compiler | src/Haste/Config.hs | bsd-3-clause | 5,667 | 0 | 13 | 1,487 | 924 | 560 | 364 | 95 | 2 |
-------------------------------------------------------------------------
--
-- Main.hs
--
-- The main module of the Huffman example
--
-- (c) Addison-Wesley, 1996-2011.
--
-------------------------------------------------------------------------
-- The main module of the Huffman example
module Main (main, codeMessage, decodeMessage, codes, codeTable ) where
import Types ( Tree(Leaf,Node), Bit(L,R), HCode , Table )
import Coding ( codeMessage, decodeMessage )
import MakeCode ( codes, codeTable )
main = print decoded
-- Examples
-- ^^^^^^^^
-- The coding table generated from the text "there is a green hill".
tableEx :: Table
tableEx = codeTable (codes "there is a green hill")
-- The Huffman tree generated from the text "there is a green hill",
-- from which tableEx is produced by applying codeTable.
treeEx :: Tree
treeEx = codes "there is a green hill"
-- A message to be coded.
message :: String
message = "there are green hills here"
-- The message in code.
coded :: HCode
coded = codeMessage tableEx message
-- The coded message decoded.
decoded :: String
decoded = decodeMessage treeEx coded
| Numberartificial/workflow | snipets/src/Craft/Chapter15/Main.hs | mit | 1,168 | 0 | 7 | 227 | 170 | 110 | 60 | 15 | 1 |
{- |
Module : $Header$
Description : folder description
Copyright : (c) Christian Maeder and Uni Bremen 2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
The folder Syntax contains abstract syntax, parsing and printing
for heterogeneous structured and architectural specifications and
specification libraries. Parsing is based on
<http://www.cs.uu.nl/people/daan/parsec.html>,
pretty printing on "Common.Doc".
/Abstract syntax/
"Syntax.AS_Structured"
"Syntax.AS_Architecture"
"Syntax.AS_Library"
/Parsing/
"Syntax.Parse_AS_Structured"
"Syntax.Parse_AS_Architecture"
"Syntax.Parse_AS_Library"
/Pretty printing/
"Syntax.Print_AS_Structured"
"Syntax.Print_AS_Architecture"
"Syntax.Print_AS_Library"
-}
module Syntax where
| nevrenato/HetsAlloy | Syntax.hs | gpl-2.0 | 832 | 0 | 2 | 109 | 5 | 4 | 1 | 1 | 0 |
{-# LANGUAGE TypeFamilies, StandaloneDeriving, FlexibleInstances, FlexibleContexts, UndecidableInstances #-}
-- | This module declares some commonly used functions and instances.
module JVM.Common
(toCharList,
poolSize,
(!),
showListIx,
mapFindIndex,
byteString
) where
import Data.Binary
import Data.Binary.Put
import qualified Data.ByteString.Lazy as B
import qualified Data.Map as M
import Data.Default
import Data.List
import JVM.ClassFile
instance Default B.ByteString where
def = B.empty
toCharList :: B.ByteString -> [Int]
toCharList bstr = map fromIntegral $ B.unpack bstr
poolSize :: Pool stage -> Int
poolSize = M.size
(!) :: (Ord k) => M.Map k a -> k -> a
(!) = (M.!)
showListIx :: (Show i, Show a) => [(i,a)] -> String
showListIx list = unlines $ map s list
where s (i, x) = show i ++ ":\t" ++ show x
byteString :: (Binary t) => t -> B.ByteString
byteString x = runPut (put x)
mapFindIndex :: (Num k) => (v -> Bool) -> M.Map k v -> Maybe k
mapFindIndex check m =
case find (check . snd) (M.assocs m) of
Nothing -> Nothing
Just (k,_) -> Just k
| ledyba/hs-java | JVM/Common.hs | lgpl-3.0 | 1,094 | 0 | 9 | 211 | 399 | 221 | 178 | 33 | 2 |
-- | Response types used by the security feature
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Distribution.Server.Features.Security.ResponseContentTypes (
TUFFile(..)
, mkTUFFile
, IsTUFFile(..)
, Timestamp(..)
, Snapshot(..)
, Root(..)
, Mirrors(..)
) where
-- stdlib
import Happstack.Server
import Control.DeepSeq
import Data.Typeable
import Data.SafeCopy
import qualified Data.ByteString.Lazy as BS.Lazy
-- hackage
import Distribution.Server.Features.Security.FileInfo
import Distribution.Server.Features.Security.Orphans ()
import Distribution.Server.Features.Security.MD5
import Distribution.Server.Features.Security.SHA256
import Distribution.Server.Framework.ResponseContentTypes
import Distribution.Server.Framework.MemSize
import Text.JSON.Canonical (Int54)
-- | Serialized TUF file along with some metadata necessary to serve the file
data TUFFile = TUFFile {
_tufFileContent :: !BS.Lazy.ByteString
, _tufFileLength :: !Int54
, _tufFileHashMD5 :: !MD5Digest
, _tufFileHashSHA256 :: !SHA256Digest
}
deriving (Typeable, Show, Eq)
deriveSafeCopy 0 'base ''TUFFile
instance NFData TUFFile where
rnf (TUFFile a b c d) = rnf (a, b, c, d)
instance MemSize TUFFile where
memSize (TUFFile a b c d) = memSize4 a b c d
instance ToMessage TUFFile where
toResponse file =
mkResponseLen (tufFileContent file) (fromIntegral (tufFileLength file)) [
("Content-Type", "application/json")
, ("Content-MD5", formatMD5Digest (tufFileHashMD5 file))
]
instance HasFileInfo TUFFile where
fileInfo file = FileInfo (tufFileLength file) (tufFileHashSHA256 file) (Just $ tufFileHashMD5 file)
mkTUFFile :: BS.Lazy.ByteString -> TUFFile
mkTUFFile content =
TUFFile {
_tufFileContent = content,
_tufFileLength = fromIntegral $ BS.Lazy.length content,
_tufFileHashMD5 = md5 content,
_tufFileHashSHA256 = sha256 content
}
{-------------------------------------------------------------------------------
Wrappers around TUFFile
(We originally had a phantom type argument here indicating what kind of TUF
file this is, but that lead to problems with 'deriveSafeCopy'; now we use a
bunch of newtype wrappers instead.)
-------------------------------------------------------------------------------}
class IsTUFFile a where
tufFileContent :: a -> BS.Lazy.ByteString
tufFileLength :: a -> Int54
tufFileHashMD5 :: a -> MD5Digest
tufFileHashSHA256 :: a -> SHA256Digest
instance IsTUFFile TUFFile where
tufFileContent TUFFile{..} = _tufFileContent
tufFileLength TUFFile{..} = _tufFileLength
tufFileHashMD5 TUFFile{..} = _tufFileHashMD5
tufFileHashSHA256 TUFFile{..} = _tufFileHashSHA256
newtype Timestamp = Timestamp { timestampFile :: TUFFile }
deriving (Typeable, Show, Eq, NFData, MemSize,
ToMessage, IsTUFFile, HasFileInfo)
newtype Snapshot = Snapshot { snapshotFile :: TUFFile }
deriving (Typeable, Show, Eq, NFData, MemSize,
ToMessage, IsTUFFile, HasFileInfo)
newtype Root = Root { rootFile :: TUFFile }
deriving (Typeable, Show, Eq, NFData, MemSize,
ToMessage, IsTUFFile, HasFileInfo)
newtype Mirrors = Mirrors { mirrorsFile :: TUFFile }
deriving (Typeable, Show, Eq, NFData, MemSize,
ToMessage, IsTUFFile, HasFileInfo)
deriveSafeCopy 0 'base ''Timestamp
deriveSafeCopy 0 'base ''Snapshot
deriveSafeCopy 0 'base ''Root
deriveSafeCopy 0 'base ''Mirrors
| agrafix/hackage-server | Distribution/Server/Features/Security/ResponseContentTypes.hs | bsd-3-clause | 3,578 | 0 | 11 | 628 | 835 | 473 | 362 | 83 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Haddock
-- Copyright : Isaac Jones 2003-2005
-- License : BSD3
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- This module deals with the @haddock@ and @hscolour@ commands.
-- It uses information about installed packages (from @ghc-pkg@) to find the
-- locations of documentation for dependent packages, so it can create links.
--
-- The @hscolour@ support allows generating HTML versions of the original
-- source, with coloured syntax highlighting.
module Distribution.Simple.Haddock (
haddock, hscolour,
haddockPackagePaths
) where
import Prelude ()
import Distribution.Compat.Prelude
import qualified Distribution.Simple.GHC as GHC
import qualified Distribution.Simple.GHCJS as GHCJS
-- local
import Distribution.Backpack.DescribeUnitId
import Distribution.Types.ForeignLib
import Distribution.Types.UnqualComponentName
import Distribution.Types.ComponentLocalBuildInfo
import Distribution.Types.ExecutableScope
import Distribution.Package
import qualified Distribution.ModuleName as ModuleName
import Distribution.PackageDescription as PD hiding (Flag)
import Distribution.Simple.Compiler hiding (Flag)
import Distribution.Simple.Program.GHC
import Distribution.Simple.Program.ResponseFile
import Distribution.Simple.Program
import Distribution.Simple.PreProcess
import Distribution.Simple.Setup
import Distribution.Simple.Build
import Distribution.Simple.InstallDirs
import Distribution.Simple.LocalBuildInfo hiding (substPathTemplate)
import Distribution.Simple.BuildPaths
import qualified Distribution.Simple.PackageIndex as PackageIndex
import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo
import Distribution.InstalledPackageInfo ( InstalledPackageInfo )
import Distribution.Simple.Utils
import Distribution.System
import Distribution.Text
import Distribution.Utils.NubList
import Distribution.Version
import Distribution.Verbosity
import Language.Haskell.Extension
import Distribution.Compat.Semigroup (All (..), Any (..))
import Data.Either ( rights )
import System.Directory (doesFileExist)
import System.FilePath ( (</>), (<.>), normalise, isAbsolute )
import System.IO (hClose, hPutStrLn, hSetEncoding, utf8)
-- ------------------------------------------------------------------------------
-- Types
-- | A record that represents the arguments to the haddock executable, a product
-- monoid.
data HaddockArgs = HaddockArgs {
argInterfaceFile :: Flag FilePath,
-- ^ Path to the interface file, relative to argOutputDir, required.
argPackageName :: Flag PackageIdentifier,
-- ^ Package name, required.
argHideModules :: (All,[ModuleName.ModuleName]),
-- ^ (Hide modules ?, modules to hide)
argIgnoreExports :: Any,
-- ^ Ignore export lists in modules?
argLinkSource :: Flag (Template,Template,Template),
-- ^ (Template for modules, template for symbols, template for lines).
argCssFile :: Flag FilePath,
-- ^ Optional custom CSS file.
argContents :: Flag String,
-- ^ Optional URL to contents page.
argVerbose :: Any,
argOutput :: Flag [Output],
-- ^ HTML or Hoogle doc or both? Required.
argInterfaces :: [(FilePath, Maybe String)],
-- ^ [(Interface file, URL to the HTML docs for links)].
argOutputDir :: Directory,
-- ^ Where to generate the documentation.
argTitle :: Flag String,
-- ^ Page title, required.
argPrologue :: Flag String,
-- ^ Prologue text, required.
argGhcOptions :: Flag (GhcOptions, Version),
-- ^ Additional flags to pass to GHC.
argGhcLibDir :: Flag FilePath,
-- ^ To find the correct GHC, required.
argTargets :: [FilePath]
-- ^ Modules to process.
} deriving Generic
-- | The FilePath of a directory, it's a monoid under '(</>)'.
newtype Directory = Dir { unDir' :: FilePath } deriving (Read,Show,Eq,Ord)
unDir :: Directory -> FilePath
unDir = normalise . unDir'
type Template = String
data Output = Html | Hoogle
-- ------------------------------------------------------------------------------
-- Haddock support
haddock :: PackageDescription
-> LocalBuildInfo
-> [PPSuffixHandler]
-> HaddockFlags
-> IO ()
haddock pkg_descr _ _ haddockFlags
| not (hasLibs pkg_descr)
&& not (fromFlag $ haddockExecutables haddockFlags)
&& not (fromFlag $ haddockTestSuites haddockFlags)
&& not (fromFlag $ haddockBenchmarks haddockFlags)
&& not (fromFlag $ haddockForeignLibs haddockFlags)
=
warn (fromFlag $ haddockVerbosity haddockFlags) $
"No documentation was generated as this package does not contain "
++ "a library. Perhaps you want to use the --executables, --tests,"
++ " --benchmarks or --foreign-libraries flags."
haddock pkg_descr lbi suffixes flags' = do
let verbosity = flag haddockVerbosity
comp = compiler lbi
platform = hostPlatform lbi
flags = case haddockTarget of
ForDevelopment -> flags'
ForHackage -> flags'
{ haddockHoogle = Flag True
, haddockHtml = Flag True
, haddockHtmlLocation = Flag (pkg_url ++ "/docs")
, haddockContents = Flag (toPathTemplate pkg_url)
, haddockHscolour = Flag True
}
pkg_url = "/package/$pkg-$version"
flag f = fromFlag $ f flags
tmpFileOpts = defaultTempFileOptions
{ optKeepTempFiles = flag haddockKeepTempFiles }
htmlTemplate = fmap toPathTemplate . flagToMaybe . haddockHtmlLocation
$ flags
haddockTarget =
fromFlagOrDefault ForDevelopment (haddockForHackage flags')
(haddockProg, version, _) <-
requireProgramVersion verbosity haddockProgram
(orLaterVersion (mkVersion [2,0])) (withPrograms lbi)
-- various sanity checks
when ( flag haddockHoogle
&& version < mkVersion [2,2]) $
die' verbosity "haddock 2.0 and 2.1 do not support the --hoogle flag."
haddockGhcVersionStr <- getProgramOutput verbosity haddockProg
["--ghc-version"]
case (simpleParse haddockGhcVersionStr, compilerCompatVersion GHC comp) of
(Nothing, _) -> die' verbosity "Could not get GHC version from Haddock"
(_, Nothing) -> die' verbosity "Could not get GHC version from compiler"
(Just haddockGhcVersion, Just ghcVersion)
| haddockGhcVersion == ghcVersion -> return ()
| otherwise -> die' verbosity $
"Haddock's internal GHC version must match the configured "
++ "GHC version.\n"
++ "The GHC version is " ++ display ghcVersion ++ " but "
++ "haddock is using GHC version " ++ display haddockGhcVersion
-- the tools match the requests, we can proceed
when (flag haddockHscolour) $
hscolour' (warn verbosity) haddockTarget pkg_descr lbi suffixes
(defaultHscolourFlags `mappend` haddockToHscolour flags)
libdirArgs <- getGhcLibDir verbosity lbi
let commonArgs = mconcat
[ libdirArgs
, fromFlags (haddockTemplateEnv lbi (packageId pkg_descr)) flags
, fromPackageDescription haddockTarget pkg_descr ]
withAllComponentsInBuildOrder pkg_descr lbi $ \component clbi -> do
componentInitialBuildSteps (flag haddockDistPref) pkg_descr lbi clbi verbosity
preprocessComponent pkg_descr component lbi clbi False verbosity suffixes
let
doExe com = case (compToExe com) of
Just exe -> do
withTempDirectoryEx verbosity tmpFileOpts (buildDir lbi) "tmp" $
\tmp -> do
exeArgs <- fromExecutable verbosity tmp lbi clbi htmlTemplate
version exe
let exeArgs' = commonArgs `mappend` exeArgs
runHaddock verbosity tmpFileOpts comp platform
haddockProg exeArgs'
Nothing -> do
warn (fromFlag $ haddockVerbosity flags)
"Unsupported component, skipping..."
return ()
-- We define 'smsg' once and then reuse it inside the case, so that
-- we don't say we are running Haddock when we actually aren't
-- (e.g., Haddock is not run on non-libraries)
smsg :: IO ()
smsg = setupMessage' verbosity "Running Haddock on" (packageId pkg_descr)
(componentLocalName clbi) (maybeComponentInstantiatedWith clbi)
case component of
CLib lib -> do
withTempDirectoryEx verbosity tmpFileOpts (buildDir lbi) "tmp" $
\tmp -> do
smsg
libArgs <- fromLibrary verbosity tmp lbi clbi htmlTemplate
version lib
let libArgs' = commonArgs `mappend` libArgs
runHaddock verbosity tmpFileOpts comp platform haddockProg libArgs'
CFLib flib -> when (flag haddockForeignLibs) $ do
withTempDirectoryEx verbosity tmpFileOpts (buildDir lbi) "tmp" $
\tmp -> do
smsg
flibArgs <- fromForeignLib verbosity tmp lbi clbi htmlTemplate
version flib
let libArgs' = commonArgs `mappend` flibArgs
runHaddock verbosity tmpFileOpts comp platform haddockProg libArgs'
CExe _ -> when (flag haddockExecutables) $ smsg >> doExe component
CTest _ -> when (flag haddockTestSuites) $ smsg >> doExe component
CBench _ -> when (flag haddockBenchmarks) $ smsg >> doExe component
for_ (extraDocFiles pkg_descr) $ \ fpath -> do
files <- matchFileGlob fpath
for_ files $ copyFileTo verbosity (unDir $ argOutputDir commonArgs)
-- ------------------------------------------------------------------------------
-- Contributions to HaddockArgs (see also Doctest.hs for very similar code).
fromFlags :: PathTemplateEnv -> HaddockFlags -> HaddockArgs
fromFlags env flags =
mempty {
argHideModules = (maybe mempty (All . not)
$ flagToMaybe (haddockInternal flags), mempty),
argLinkSource = if fromFlag (haddockHscolour flags)
then Flag ("src/%{MODULE/./-}.html"
,"src/%{MODULE/./-}.html#%{NAME}"
,"src/%{MODULE/./-}.html#line-%{LINE}")
else NoFlag,
argCssFile = haddockCss flags,
argContents = fmap (fromPathTemplate . substPathTemplate env)
(haddockContents flags),
argVerbose = maybe mempty (Any . (>= deafening))
. flagToMaybe $ haddockVerbosity flags,
argOutput =
Flag $ case [ Html | Flag True <- [haddockHtml flags] ] ++
[ Hoogle | Flag True <- [haddockHoogle flags] ]
of [] -> [ Html ]
os -> os,
argOutputDir = maybe mempty Dir . flagToMaybe $ haddockDistPref flags
}
fromPackageDescription :: HaddockTarget -> PackageDescription -> HaddockArgs
fromPackageDescription haddockTarget pkg_descr =
mempty { argInterfaceFile = Flag $ haddockName pkg_descr,
argPackageName = Flag $ packageId $ pkg_descr,
argOutputDir = Dir $
"doc" </> "html" </> haddockDirName haddockTarget pkg_descr,
argPrologue = Flag $ if null desc then synopsis pkg_descr
else desc,
argTitle = Flag $ showPkg ++ subtitle
}
where
desc = PD.description pkg_descr
showPkg = display (packageId pkg_descr)
subtitle | null (synopsis pkg_descr) = ""
| otherwise = ": " ++ synopsis pkg_descr
componentGhcOptions :: Verbosity -> LocalBuildInfo
-> BuildInfo -> ComponentLocalBuildInfo -> FilePath
-> GhcOptions
componentGhcOptions verbosity lbi bi clbi odir =
let f = case compilerFlavor (compiler lbi) of
GHC -> GHC.componentGhcOptions
GHCJS -> GHCJS.componentGhcOptions
_ -> error $
"Distribution.Simple.Haddock.componentGhcOptions:" ++
"haddock only supports GHC and GHCJS"
in f verbosity lbi bi clbi odir
mkHaddockArgs :: Verbosity
-> FilePath
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> Maybe PathTemplate -- ^ template for HTML location
-> Version
-> [FilePath]
-> BuildInfo
-> IO HaddockArgs
mkHaddockArgs verbosity tmp lbi clbi htmlTemplate haddockVersion inFiles bi = do
ifaceArgs <- getInterfaces verbosity lbi clbi htmlTemplate
let vanillaOpts = (componentGhcOptions normal lbi bi clbi (buildDir lbi)) {
-- Noooooooooo!!!!!111
-- haddock stomps on our precious .hi
-- and .o files. Workaround by telling
-- haddock to write them elsewhere.
ghcOptObjDir = toFlag tmp,
ghcOptHiDir = toFlag tmp,
ghcOptStubDir = toFlag tmp
} `mappend` getGhcCppOpts haddockVersion bi
sharedOpts = vanillaOpts {
ghcOptDynLinkMode = toFlag GhcDynamicOnly,
ghcOptFPic = toFlag True,
ghcOptHiSuffix = toFlag "dyn_hi",
ghcOptObjSuffix = toFlag "dyn_o",
ghcOptExtra =
toNubListR $ hcSharedOptions GHC bi
}
opts <- if withVanillaLib lbi
then return vanillaOpts
else if withSharedLib lbi
then return sharedOpts
else die' verbosity $ "Must have vanilla or shared libraries "
++ "enabled in order to run haddock"
ghcVersion <- maybe (die' verbosity "Compiler has no GHC version")
return
(compilerCompatVersion GHC (compiler lbi))
return ifaceArgs {
argGhcOptions = toFlag (opts, ghcVersion),
argTargets = inFiles
}
fromLibrary :: Verbosity
-> FilePath
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> Maybe PathTemplate -- ^ template for HTML location
-> Version
-> Library
-> IO HaddockArgs
fromLibrary verbosity tmp lbi clbi htmlTemplate haddockVersion lib = do
inFiles <- map snd `fmap` getLibSourceFiles verbosity lbi lib clbi
args <- mkHaddockArgs verbosity tmp lbi clbi htmlTemplate haddockVersion
inFiles (libBuildInfo lib)
return args {
argHideModules = (mempty, otherModules (libBuildInfo lib))
}
fromExecutable :: Verbosity
-> FilePath
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> Maybe PathTemplate -- ^ template for HTML location
-> Version
-> Executable
-> IO HaddockArgs
fromExecutable verbosity tmp lbi clbi htmlTemplate haddockVersion exe = do
inFiles <- map snd `fmap` getExeSourceFiles verbosity lbi exe clbi
args <- mkHaddockArgs verbosity tmp lbi clbi htmlTemplate
haddockVersion inFiles (buildInfo exe)
return args {
argOutputDir = Dir $ unUnqualComponentName $ exeName exe,
argTitle = Flag $ unUnqualComponentName $ exeName exe
}
fromForeignLib :: Verbosity
-> FilePath
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> Maybe PathTemplate -- ^ template for HTML location
-> Version
-> ForeignLib
-> IO HaddockArgs
fromForeignLib verbosity tmp lbi clbi htmlTemplate haddockVersion flib = do
inFiles <- map snd `fmap` getFLibSourceFiles verbosity lbi flib clbi
args <- mkHaddockArgs verbosity tmp lbi clbi htmlTemplate
haddockVersion inFiles (foreignLibBuildInfo flib)
return args {
argOutputDir = Dir $ unUnqualComponentName $ foreignLibName flib,
argTitle = Flag $ unUnqualComponentName $ foreignLibName flib
}
compToExe :: Component -> Maybe Executable
compToExe comp =
case comp of
CTest test@TestSuite { testInterface = TestSuiteExeV10 _ f } ->
Just Executable {
exeName = testName test,
modulePath = f,
exeScope = ExecutablePublic,
buildInfo = testBuildInfo test
}
CBench bench@Benchmark { benchmarkInterface = BenchmarkExeV10 _ f } ->
Just Executable {
exeName = benchmarkName bench,
modulePath = f,
exeScope = ExecutablePublic,
buildInfo = benchmarkBuildInfo bench
}
CExe exe -> Just exe
_ -> Nothing
getInterfaces :: Verbosity
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> Maybe PathTemplate -- ^ template for HTML location
-> IO HaddockArgs
getInterfaces verbosity lbi clbi htmlTemplate = do
(packageFlags, warnings) <- haddockPackageFlags verbosity lbi clbi htmlTemplate
traverse_ (warn (verboseUnmarkOutput verbosity)) warnings
return $ mempty {
argInterfaces = packageFlags
}
getGhcCppOpts :: Version
-> BuildInfo
-> GhcOptions
getGhcCppOpts haddockVersion bi =
mempty {
ghcOptExtensions = toNubListR [EnableExtension CPP | needsCpp],
ghcOptCppOptions = toNubListR defines
}
where
needsCpp = EnableExtension CPP `elem` usedExtensions bi
defines = [haddockVersionMacro]
haddockVersionMacro = "-D__HADDOCK_VERSION__="
++ show (v1 * 1000 + v2 * 10 + v3)
where
[v1, v2, v3] = take 3 $ versionNumbers haddockVersion ++ [0,0]
getGhcLibDir :: Verbosity -> LocalBuildInfo
-> IO HaddockArgs
getGhcLibDir verbosity lbi = do
l <- case compilerFlavor (compiler lbi) of
GHC -> GHC.getLibDir verbosity lbi
GHCJS -> GHCJS.getLibDir verbosity lbi
_ -> error "haddock only supports GHC and GHCJS"
return $ mempty { argGhcLibDir = Flag l }
-- ------------------------------------------------------------------------------
-- | Call haddock with the specified arguments.
runHaddock :: Verbosity
-> TempFileOptions
-> Compiler
-> Platform
-> ConfiguredProgram
-> HaddockArgs
-> IO ()
runHaddock verbosity tmpFileOpts comp platform haddockProg args = do
let haddockVersion = fromMaybe (error "unable to determine haddock version")
(programVersion haddockProg)
renderArgs verbosity tmpFileOpts haddockVersion comp platform args $
\(flags,result)-> do
runProgram verbosity haddockProg flags
notice verbosity $ "Documentation created: " ++ result
renderArgs :: Verbosity
-> TempFileOptions
-> Version
-> Compiler
-> Platform
-> HaddockArgs
-> (([String], FilePath) -> IO a)
-> IO a
renderArgs verbosity tmpFileOpts version comp platform args k = do
let haddockSupportsUTF8 = version >= mkVersion [2,14,4]
haddockSupportsResponseFiles = version > mkVersion [2,16,2]
createDirectoryIfMissingVerbose verbosity True outputDir
withTempFileEx tmpFileOpts outputDir "haddock-prologue.txt" $
\prologueFileName h -> do
do
when haddockSupportsUTF8 (hSetEncoding h utf8)
hPutStrLn h $ fromFlag $ argPrologue args
hClose h
let pflag = "--prologue=" ++ prologueFileName
renderedArgs = pflag : renderPureArgs version comp platform args
if haddockSupportsResponseFiles
then
withResponseFile
verbosity
tmpFileOpts
outputDir
"haddock-response.txt"
(if haddockSupportsUTF8 then Just utf8 else Nothing)
renderedArgs
(\responseFileName -> k (["@" ++ responseFileName], result))
else
k (renderedArgs, result)
where
outputDir = (unDir $ argOutputDir args)
result = intercalate ", "
. map (\o -> outputDir </>
case o of
Html -> "index.html"
Hoogle -> pkgstr <.> "txt")
$ arg argOutput
where
pkgstr = display $ packageName pkgid
pkgid = arg argPackageName
arg f = fromFlag $ f args
renderPureArgs :: Version -> Compiler -> Platform -> HaddockArgs -> [String]
renderPureArgs version comp platform args = concat
[ (:[]) . (\f -> "--dump-interface="++ unDir (argOutputDir args) </> f)
. fromFlag . argInterfaceFile $ args
, if isVersion 2 16
then (\pkg -> [ "--package-name=" ++ display (pkgName pkg)
, "--package-version="++display (pkgVersion pkg)
])
. fromFlag . argPackageName $ args
else []
, (\(All b,xs) -> bool (map (("--hide=" ++). display) xs) [] b)
. argHideModules $ args
, bool ["--ignore-all-exports"] [] . getAny . argIgnoreExports $ args
, maybe [] (\(m,e,l) ->
["--source-module=" ++ m
,"--source-entity=" ++ e]
++ if isVersion 2 14 then ["--source-entity-line=" ++ l]
else []
) . flagToMaybe . argLinkSource $ args
, maybe [] ((:[]) . ("--css="++)) . flagToMaybe . argCssFile $ args
, maybe [] ((:[]) . ("--use-contents="++)) . flagToMaybe . argContents $ args
, bool [] [verbosityFlag] . getAny . argVerbose $ args
, map (\o -> case o of Hoogle -> "--hoogle"; Html -> "--html")
. fromFlag . argOutput $ args
, renderInterfaces . argInterfaces $ args
, (:[]) . ("--odir="++) . unDir . argOutputDir $ args
, (:[]) . ("--title="++)
. (bool (++" (internal documentation)")
id (getAny $ argIgnoreExports args))
. fromFlag . argTitle $ args
, [ "--optghc=" ++ opt | (opts, _ghcVer) <- flagToList (argGhcOptions args)
, opt <- renderGhcOptions comp platform opts ]
, maybe [] (\l -> ["-B"++l]) $
flagToMaybe (argGhcLibDir args) -- error if Nothing?
, argTargets $ args
]
where
renderInterfaces =
map (\(i,mh) -> "--read-interface=" ++
maybe "" (++",") mh ++ i)
bool a b c = if c then a else b
isVersion major minor = version >= mkVersion [major,minor]
verbosityFlag
| isVersion 2 5 = "--verbosity=1"
| otherwise = "--verbose"
---------------------------------------------------------------------------------
-- | Given a list of 'InstalledPackageInfo's, return a list of interfaces and
-- HTML paths, and an optional warning for packages with missing documentation.
haddockPackagePaths :: [InstalledPackageInfo]
-> Maybe (InstalledPackageInfo -> FilePath)
-> NoCallStackIO ([(FilePath, Maybe FilePath)], Maybe String)
haddockPackagePaths ipkgs mkHtmlPath = do
interfaces <- sequenceA
[ case interfaceAndHtmlPath ipkg of
Nothing -> return (Left (packageId ipkg))
Just (interface, html) -> do
exists <- doesFileExist interface
if exists
then return (Right (interface, html))
else return (Left pkgid)
| ipkg <- ipkgs, let pkgid = packageId ipkg
, pkgName pkgid `notElem` noHaddockWhitelist
]
let missing = [ pkgid | Left pkgid <- interfaces ]
warning = "The documentation for the following packages are not "
++ "installed. No links will be generated to these packages: "
++ intercalate ", " (map display missing)
flags = rights interfaces
return (flags, if null missing then Nothing else Just warning)
where
-- Don't warn about missing documentation for these packages. See #1231.
noHaddockWhitelist = map mkPackageName [ "rts" ]
-- Actually extract interface and HTML paths from an 'InstalledPackageInfo'.
interfaceAndHtmlPath :: InstalledPackageInfo
-> Maybe (FilePath, Maybe FilePath)
interfaceAndHtmlPath pkg = do
interface <- listToMaybe (InstalledPackageInfo.haddockInterfaces pkg)
html <- case mkHtmlPath of
Nothing -> fmap fixFileUrl
(listToMaybe (InstalledPackageInfo.haddockHTMLs pkg))
Just mkPath -> Just (mkPath pkg)
return (interface, if null html then Nothing else Just html)
where
-- The 'haddock-html' field in the hc-pkg output is often set as a
-- native path, but we need it as a URL. See #1064.
fixFileUrl f | isAbsolute f = "file://" ++ f
| otherwise = f
haddockPackageFlags :: Verbosity
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> Maybe PathTemplate
-> IO ([(FilePath, Maybe FilePath)], Maybe String)
haddockPackageFlags verbosity lbi clbi htmlTemplate = do
let allPkgs = installedPkgs lbi
directDeps = map fst (componentPackageDeps clbi)
transitiveDeps <- case PackageIndex.dependencyClosure allPkgs directDeps of
Left x -> return x
Right inf -> die' verbosity $ "internal error when calculating transitive "
++ "package dependencies.\nDebug info: " ++ show inf
haddockPackagePaths (PackageIndex.allPackages transitiveDeps) mkHtmlPath
where
mkHtmlPath = fmap expandTemplateVars htmlTemplate
expandTemplateVars tmpl pkg =
fromPathTemplate . substPathTemplate (env pkg) $ tmpl
env pkg = haddockTemplateEnv lbi (packageId pkg)
haddockTemplateEnv :: LocalBuildInfo -> PackageIdentifier -> PathTemplateEnv
haddockTemplateEnv lbi pkg_id =
(PrefixVar, prefix (installDirTemplates lbi))
-- We want the legacy unit ID here, because it gives us nice paths
-- (Haddock people don't care about the dependencies)
: initialPathTemplateEnv
pkg_id
(mkLegacyUnitId pkg_id)
(compilerInfo (compiler lbi))
(hostPlatform lbi)
-- ------------------------------------------------------------------------------
-- hscolour support.
hscolour :: PackageDescription
-> LocalBuildInfo
-> [PPSuffixHandler]
-> HscolourFlags
-> IO ()
hscolour = hscolour' dieNoVerbosity ForDevelopment
hscolour' :: (String -> IO ()) -- ^ Called when the 'hscolour' exe is not found.
-> HaddockTarget
-> PackageDescription
-> LocalBuildInfo
-> [PPSuffixHandler]
-> HscolourFlags
-> IO ()
hscolour' onNoHsColour haddockTarget pkg_descr lbi suffixes flags =
either onNoHsColour (\(hscolourProg, _, _) -> go hscolourProg) =<<
lookupProgramVersion verbosity hscolourProgram
(orLaterVersion (mkVersion [1,8])) (withPrograms lbi)
where
go :: ConfiguredProgram -> IO ()
go hscolourProg = do
setupMessage verbosity "Running hscolour for" (packageId pkg_descr)
createDirectoryIfMissingVerbose verbosity True $
hscolourPref haddockTarget distPref pkg_descr
withAllComponentsInBuildOrder pkg_descr lbi $ \comp clbi -> do
componentInitialBuildSteps distPref pkg_descr lbi clbi verbosity
preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes
let
doExe com = case (compToExe com) of
Just exe -> do
let outputDir = hscolourPref haddockTarget distPref pkg_descr
</> unUnqualComponentName (exeName exe) </> "src"
runHsColour hscolourProg outputDir =<< getExeSourceFiles verbosity lbi exe clbi
Nothing -> do
warn (fromFlag $ hscolourVerbosity flags)
"Unsupported component, skipping..."
return ()
case comp of
CLib lib -> do
let outputDir = hscolourPref haddockTarget distPref pkg_descr </> "src"
runHsColour hscolourProg outputDir =<< getLibSourceFiles verbosity lbi lib clbi
CFLib flib -> do
let outputDir = hscolourPref haddockTarget distPref pkg_descr
</> unUnqualComponentName (foreignLibName flib) </> "src"
runHsColour hscolourProg outputDir =<< getFLibSourceFiles verbosity lbi flib clbi
CExe _ -> when (fromFlag (hscolourExecutables flags)) $ doExe comp
CTest _ -> when (fromFlag (hscolourTestSuites flags)) $ doExe comp
CBench _ -> when (fromFlag (hscolourBenchmarks flags)) $ doExe comp
stylesheet = flagToMaybe (hscolourCSS flags)
verbosity = fromFlag (hscolourVerbosity flags)
distPref = fromFlag (hscolourDistPref flags)
runHsColour prog outputDir moduleFiles = do
createDirectoryIfMissingVerbose verbosity True outputDir
case stylesheet of -- copy the CSS file
Nothing | programVersion prog >= Just (mkVersion [1,9]) ->
runProgram verbosity prog
["-print-css", "-o" ++ outputDir </> "hscolour.css"]
| otherwise -> return ()
Just s -> copyFileVerbose verbosity s (outputDir </> "hscolour.css")
for_ moduleFiles $ \(m, inFile) ->
runProgram verbosity prog
["-css", "-anchor", "-o" ++ outFile m, inFile]
where
outFile m = outputDir </>
intercalate "-" (ModuleName.components m) <.> "html"
haddockToHscolour :: HaddockFlags -> HscolourFlags
haddockToHscolour flags =
HscolourFlags {
hscolourCSS = haddockHscolourCss flags,
hscolourExecutables = haddockExecutables flags,
hscolourTestSuites = haddockTestSuites flags,
hscolourBenchmarks = haddockBenchmarks flags,
hscolourForeignLibs = haddockForeignLibs flags,
hscolourVerbosity = haddockVerbosity flags,
hscolourDistPref = haddockDistPref flags
}
-- ------------------------------------------------------------------------------
-- Boilerplate Monoid instance.
instance Monoid HaddockArgs where
mempty = gmempty
mappend = (<>)
instance Semigroup HaddockArgs where
(<>) = gmappend
instance Monoid Directory where
mempty = Dir "."
mappend = (<>)
instance Semigroup Directory where
Dir m <> Dir n = Dir $ m </> n
| themoritz/cabal | Cabal/Distribution/Simple/Haddock.hs | bsd-3-clause | 31,077 | 0 | 28 | 9,222 | 6,836 | 3,527 | 3,309 | 576 | 9 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ru-RU">
<title>Дополнение Eval Villain</title>
<maps>
<homeID>evalvillain</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Содержание</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Индекс</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Поиск</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Избранное</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/evalvillain/src/main/javahelp/org/zaproxy/addon/evalvillain/resources/help_ru_RU/helpset_ru_RU.hs | apache-2.0 | 1,018 | 77 | 66 | 157 | 483 | 242 | 241 | -1 | -1 |
module Main where
import Eval
import Type
import Check
import Parser
import Pretty
import Syntax
import Data.Maybe
import Control.Monad.Trans
import System.Console.Haskeline
eval' :: Expr -> Expr
eval' = fromJust . eval
process :: String -> IO ()
process line = do
let res = parseExpr line
case res of
Left err -> print err
Right ex -> do
let chk = check ex
case chk of
Left err -> print err
Right ty -> putStrLn $ (ppexpr $ eval' ex) ++ " : " ++ (pptype ty)
main :: IO ()
main = runInputT defaultSettings loop
where
loop = do
minput <- getInputLine "Arith> "
case minput of
Nothing -> outputStrLn "Goodbye."
Just input -> (liftIO $ process input) >> loop
| yupferris/write-you-a-haskell | chapter5/calc_typed/Main.hs | mit | 726 | 0 | 20 | 191 | 263 | 131 | 132 | 29 | 3 |
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Prompt.AppLauncher
-- Copyright : (C) 2008 Luis Cabellos
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : unstable
-- Portability : unportable
--
-- A module for launch applicationes that receive parameters in the command
-- line. The launcher call a prompt to get the parameters.
--
-----------------------------------------------------------------------------
module XMonad.Prompt.AppLauncher ( -- * Usage
-- $usage
launchApp
,module XMonad.Prompt
-- * Use case: launching gimp with file
-- $tip
-- * Types
,Application, AppPrompt,
) where
import XMonad (X(),MonadIO)
import XMonad.Core (spawn)
import XMonad.Prompt (XPrompt(showXPrompt), mkXPrompt, XPConfig())
import XMonad.Prompt.Shell (getShellCompl)
{- $usage
This module is intended to allow the launch of the same application
but changing the parameters using the user response. For example, when
you want to open a image in gimp program, you can open gimp and then use
the File Menu to open the image or you can use this module to select
the image in the command line.
We use Prompt to get the user command line. This also allow to autoexpand
the names of the files when we are writing the command line.
-}
{- $tip
First, you need to import necessary modules. Prompt is used to get the promp
configuration and the AppLauncher module itself.
> import XMonad.Prompt
> import XMonad.Prompt.AppLauncher as AL
Then you can add the bindings to the applications.
> ...
> , ((modm, xK_g), AL.launchApp defaultXPConfig "gimp" )
> , ((modm, xK_g), AL.launchApp defaultXPConfig "evince" )
> ...
-}
-- A customized prompt
data AppPrompt = AppPrompt String
instance XPrompt AppPrompt where
showXPrompt (AppPrompt n) = n ++ " "
type Application = String
type Parameters = String
{- | Given an application and its parameters, launch the application. -}
launch :: MonadIO m => Application -> Parameters -> m ()
launch app params = spawn ( app ++ " " ++ params )
{- | Get the user's response to a prompt an launch an application using the
input as command parameters of the application.-}
launchApp :: XPConfig -> Application -> X ()
launchApp config app = mkXPrompt (AppPrompt app) config (getShellCompl []) $ launch app
| adinapoli/xmonad-contrib | XMonad/Prompt/AppLauncher.hs | bsd-3-clause | 2,634 | 0 | 9 | 699 | 259 | 155 | 104 | 17 | 1 |
module B1 where
data Data1 a
= C1 a Int Int | C4 Float | C2 Int | C3 Float
addedC4 = error "added C4 Float to Data1"
g (C1 x y z) = y
g (C4 a) = addedC4
g (C2 x) = x
g (C3 x) = 42
| kmate/HaRe | old/testing/addCon/B1AST.hs | bsd-3-clause | 189 | 0 | 7 | 59 | 101 | 54 | 47 | 8 | 1 |
-- | A description of the platform we're compiling for.
--
module Platform (
Platform(..),
Arch(..),
OS(..),
ArmISA(..),
ArmISAExt(..),
ArmABI(..),
PPC_64ABI(..),
target32Bit,
isARM,
osElfTarget,
osMachOTarget,
platformUsesFrameworks,
platformBinariesAreStaticLibs,
)
where
-- | Contains enough information for the native code generator to emit
-- code for this platform.
data Platform
= Platform {
platformArch :: Arch,
platformOS :: OS,
-- Word size in bytes (i.e. normally 4 or 8,
-- for 32bit and 64bit platforms respectively)
platformWordSize :: {-# UNPACK #-} !Int,
platformUnregisterised :: Bool,
platformHasGnuNonexecStack :: Bool,
platformHasIdentDirective :: Bool,
platformHasSubsectionsViaSymbols :: Bool,
platformIsCrossCompiling :: Bool
}
deriving (Read, Show, Eq)
-- | Architectures that the native code generator knows about.
-- TODO: It might be nice to extend these constructors with information
-- about what instruction set extensions an architecture might support.
--
data Arch
= ArchUnknown
| ArchX86
| ArchX86_64
| ArchPPC
| ArchPPC_64
{ ppc_64ABI :: PPC_64ABI
}
| ArchSPARC
| ArchSPARC64
| ArchARM
{ armISA :: ArmISA
, armISAExt :: [ArmISAExt]
, armABI :: ArmABI
}
| ArchARM64
| ArchAlpha
| ArchMipseb
| ArchMipsel
| ArchJavaScript
deriving (Read, Show, Eq)
isARM :: Arch -> Bool
isARM (ArchARM {}) = True
isARM ArchARM64 = True
isARM _ = False
-- | Operating systems that the native code generator knows about.
-- Having OSUnknown should produce a sensible default, but no promises.
data OS
= OSUnknown
| OSLinux
| OSDarwin
| OSiOS
| OSSolaris2
| OSMinGW32
| OSFreeBSD
| OSDragonFly
| OSOpenBSD
| OSNetBSD
| OSKFreeBSD
| OSHaiku
| OSQNXNTO
| OSAndroid
| OSAIX
deriving (Read, Show, Eq)
-- | ARM Instruction Set Architecture, Extensions and ABI
--
data ArmISA
= ARMv5
| ARMv6
| ARMv7
deriving (Read, Show, Eq)
data ArmISAExt
= VFPv2
| VFPv3
| VFPv3D16
| NEON
| IWMMX2
deriving (Read, Show, Eq)
data ArmABI
= SOFT
| SOFTFP
| HARD
deriving (Read, Show, Eq)
-- | PowerPC 64-bit ABI
--
data PPC_64ABI
= ELF_V1
| ELF_V2
deriving (Read, Show, Eq)
-- | This predicate tells us whether the platform is 32-bit.
target32Bit :: Platform -> Bool
target32Bit p = platformWordSize p == 4
-- | This predicate tells us whether the OS supports ELF-like shared libraries.
osElfTarget :: OS -> Bool
osElfTarget OSLinux = True
osElfTarget OSFreeBSD = True
osElfTarget OSDragonFly = True
osElfTarget OSOpenBSD = True
osElfTarget OSNetBSD = True
osElfTarget OSSolaris2 = True
osElfTarget OSDarwin = False
osElfTarget OSiOS = False
osElfTarget OSMinGW32 = False
osElfTarget OSKFreeBSD = True
osElfTarget OSHaiku = True
osElfTarget OSQNXNTO = False
osElfTarget OSAndroid = True
osElfTarget OSAIX = False
osElfTarget OSUnknown = False
-- Defaulting to False is safe; it means don't rely on any
-- ELF-specific functionality. It is important to have a default for
-- portability, otherwise we have to answer this question for every
-- new platform we compile on (even unreg).
-- | This predicate tells us whether the OS support Mach-O shared libraries.
osMachOTarget :: OS -> Bool
osMachOTarget OSDarwin = True
osMachOTarget _ = False
osUsesFrameworks :: OS -> Bool
osUsesFrameworks OSDarwin = True
osUsesFrameworks OSiOS = True
osUsesFrameworks _ = False
platformUsesFrameworks :: Platform -> Bool
platformUsesFrameworks = osUsesFrameworks . platformOS
osBinariesAreStaticLibs :: OS -> Bool
osBinariesAreStaticLibs OSiOS = True
osBinariesAreStaticLibs _ = False
platformBinariesAreStaticLibs :: Platform -> Bool
platformBinariesAreStaticLibs = osBinariesAreStaticLibs . platformOS
| olsner/ghc | compiler/utils/Platform.hs | bsd-3-clause | 4,415 | 0 | 9 | 1,379 | 751 | 444 | 307 | 118 | 1 |
import Test.Cabal.Prelude
-- Test building a vanilla library/executable which uses Template Haskell
main = setupAndCabalTest $ setup_build []
| mydaum/cabal | cabal-testsuite/PackageTests/TemplateHaskell/vanilla/setup.test.hs | bsd-3-clause | 142 | 0 | 7 | 18 | 22 | 12 | 10 | 2 | 1 |
{-# LANGUAGE DeriveFoldable #-}
module Main where
import Data.Semigroup
-- Just a list without any special fusion rules.
data List a = Nil | Cons a (List a) deriving Foldable
instance Semigroup (List a) where
Nil <> ys = ys
Cons x xs <> ys = Cons x (xs <> ys)
replicateList :: Int -> a -> List a
replicateList 0 x = Nil
replicateList n x = Cons x (replicateList (n - 1) x)
newtype ListList a = ListList (List (List a)) deriving Foldable
long :: Int -> Bool
long n = null $ ListList $ replicateList n Nil <> Cons (Cons () Nil) Nil
main :: IO ()
main = print $ long (10^(6 :: Int))
| ezyang/ghc | testsuite/tests/perf/should_run/DeriveNull.hs | bsd-3-clause | 591 | 0 | 9 | 129 | 252 | 132 | 120 | 15 | 1 |
{-# LANGUAGE TypeFamilies, ConstraintKinds, UndecidableInstances #-}
module Ctx where
import GHC.Prim( Constraint )
type family Indirect :: * -> Constraint
type instance Indirect = Show
class Cls a where
f :: a -> String
instance Indirect a => Cls [a] where
f = show
| tibbe/ghc | testsuite/tests/typecheck/should_compile/tc255.hs | bsd-3-clause | 279 | 0 | 7 | 56 | 76 | 43 | 33 | 9 | 0 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Foo where
class C a
instance C Int
newtype Foo = Foo Int
deriving C
| wxwxwwxxx/ghc | testsuite/tests/parser/should_compile/read057.hs | bsd-3-clause | 128 | 0 | 5 | 29 | 30 | 17 | 13 | -1 | -1 |
{-# Language CPP, FlexibleContexts, TypeFamilies, KindSignatures, TemplateHaskell, GADTs, ScopedTypeVariables, TypeOperators #-}
#if __GLASGOW_HASKELL__ >= 704
{-# LANGUAGE ConstraintKinds #-}
#endif
#if MIN_VERSION_template_haskell(2,8,0)
{-# Language PolyKinds #-}
#endif
#if __GLASGOW_HASKELL__ >= 800
{-# Language TypeInType #-}
#endif
{-|
Module : Types
Description : Test cases for the th-abstraction package
Copyright : Eric Mertens 2017
License : ISC
Maintainer : [email protected]
This module defined types used for testing features of @th-abstraction@
on various versions of GHC.
-}
module Types where
#if __GLASGOW_HASKELL__ >= 704
import GHC.Exts (Constraint)
#endif
import Language.Haskell.TH hiding (Type)
import Language.Haskell.TH.Datatype
import Language.Haskell.TH.Datatype.TyVarBndr
import Language.Haskell.TH.Lib (starK)
#if __GLASGOW_HASKELL__ >= 800
import Data.Kind
#endif
type Gadt1Int = Gadt1 Int
infixr 6 :**:
data Gadt1 (a :: *) where
Gadtc1 :: Int -> Gadt1Int
Gadtc2 :: (a,a) -> Gadt1 a
(:**:) :: Bool -> Char -> Gadt1 () -- This is declared infix
(:!!:) :: Char -> Bool -> Gadt1 Double -- This is not
data Adt1 (a :: *) (b :: *) = Adtc1 (a,b) | Bool `Adtc2` Int
data Gadtrec1 a where
Gadtrecc1, Gadtrecc2 :: { gadtrec1a :: a, gadtrec1b :: b } -> Gadtrec1 (a,b)
data Equal :: * -> * -> * -> * where
Equalc :: (Read a, Show a) => [a] -> Maybe a -> Equal a a a
data Showable :: * where
Showable :: Show a => a -> Showable
data R = R1 { field1, field2 :: Int }
data Gadt2 :: * -> * -> * where
Gadt2c1 :: Gadt2 a [a]
Gadt2c2 :: Gadt2 [a] a
Gadt2c3 :: Gadt2 [a] [a]
data VoidStoS (f :: * -> *)
data StrictDemo = StrictDemo Int !Int {-# UNPACK #-} !Int
type (:+:) = Either
-- Data families
data family T43Fam
type Id (a :: *) = a
#if MIN_VERSION_template_haskell(2,7,0)
data family DF (a :: *)
data instance DF (Maybe a) = DFMaybe Int [a]
# if MIN_VERSION_template_haskell(2,8,0)
data family DF1 (a :: k)
# else
data family DF1 (a :: *)
# endif
data instance DF1 (b :: *) = DF1 b
data family Quoted (a :: *)
# if MIN_VERSION_template_haskell(2,8,0)
data family Poly (a :: k)
# else
data family Poly (a :: *)
# endif
data instance Poly a = MkPoly
data family GadtFam (a :: *) (b :: *)
data instance GadtFam c d where
MkGadtFam1 :: x -> y -> GadtFam y x
(:&&:) :: e -> f -> GadtFam [e] f -- This is declard infix
(:^^:) :: Int -> Int -> GadtFam Int Int -- This is not
(:#%:) :: { famRec1, famRec2 :: Bool } -> GadtFam Bool Bool -- Nor is this
MkGadtFam4 :: (Int ~ z) => z -> GadtFam z z
MkGadtFam5 :: (q ~ Char) => q -> GadtFam Bool Bool
infixl 3 :&&:, :#%:
data family FamLocalDec1 a
data family FamLocalDec2 a b c
data family T46 a b c
data instance T46 (f (p :: *)) (f p) q = MkT46 q
data family T73 a b
data instance T73 Int b = MkT73 b
#endif
#if __GLASGOW_HASKELL__ >= 704
type Konst (a :: Constraint) (b :: Constraint) = a
type PredSyn1 a b = Konst (Show a) (Read b)
type PredSyn2 a b = Konst (PredSyn1 a b) (Show a)
type PredSyn3 c = Int ~ c
data PredSynT =
PredSyn1 Int Int => MkPredSynT1 Int
| PredSyn2 Int Int => MkPredSynT2 Int
| PredSyn3 Int => MkPredSynT3 Int
#endif
#if __GLASGOW_HASKELL__ >= 800
data T37a (k :: Type) :: k -> Type where
MkT37a :: T37a Bool a
data T37b (a :: k) where
MkT37b :: forall (a :: Bool). T37b a
data T37c (a :: k) where
MkT37c :: T37c Bool
data Prox (a :: k) = Prox
data T48 :: Type -> Type where
MkT48 :: forall a (x :: a). Prox x -> T48 a
data T75 (k :: Type) where
MkT75 :: forall k (a :: k). Prox a -> T75 k
#endif
-- We must define these here due to Template Haskell staging restrictions
justCI :: ConstructorInfo
justCI =
ConstructorInfo
{ constructorName = 'Just
, constructorVars = []
, constructorContext = []
, constructorFields = [VarT (mkName "a")]
, constructorStrictness = [notStrictAnnot]
, constructorVariant = NormalConstructor
}
gadtRecVanillaCI :: ConstructorInfo
gadtRecVanillaCI =
ConstructorInfo
{ constructorName = 'Gadtrecc1
, constructorVars = [v1K, v2K]
, constructorContext =
[equalPred a (AppT (AppT (TupleT 2) (VarT v1)) (VarT v2))]
, constructorFields = [VarT v1, VarT v2]
, constructorStrictness = [notStrictAnnot, notStrictAnnot]
, constructorVariant = RecordConstructor ['gadtrec1a, 'gadtrec1b] }
where
a = VarT (mkName "a")
names@[v1,v2] = map mkName ["v1","v2"]
[v1K,v2K] = map (\n -> kindedTV n starK) names
#if MIN_VERSION_template_haskell(2,7,0)
gadtRecFamCI :: ConstructorInfo
gadtRecFamCI =
ConstructorInfo
{ constructorName = '(:#%:)
, constructorVars = []
, constructorContext = [ equalPred cTy (ConT ''Bool)
, equalPred dTy (ConT ''Bool)
]
, constructorFields = [ConT ''Bool, ConT ''Bool]
, constructorStrictness = [notStrictAnnot, notStrictAnnot]
, constructorVariant = RecordConstructor ['famRec1, 'famRec2] }
where
[cTy,dTy] = map (VarT . mkName) ["c", "d"]
#endif
| glguy/th-abstraction | test/Types.hs | isc | 5,269 | 2 | 14 | 1,326 | 1,548 | 939 | 609 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import Control.Concurrent (forkIO, threadDelay)
import Control.Monad (mapM_)
import Data.Monoid ((<>))
import qualified Data.Text as T
import System.Hworker.SES
main :: IO ()
main = do hw <- create "ses-example"
2
"Daniel Patterson <[email protected]>"
(\e -> print e >> error "BLAH")
forkIO (worker hw)
forkIO (worker hw)
forkIO (worker hw)
forkIO (worker hw)
forkIO (monitor hw)
mapM_ (\n -> queue hw (SESJob "[email protected]"
("Message #" <> T.pack (show n) <> "")
(Just "Hi Daniel, this is the message in text.\n\n")
(Just "<h2>Hi Daniel, this is the message in html.</h2>")
(Right ("hello" :: T.Text, n :: Int))))
[1..3]
putStrLn "Waiting 10 seconds..."
threadDelay 10000000
| dbp/hworker-ses | example/src/Main.hs | isc | 1,162 | 0 | 18 | 502 | 270 | 139 | 131 | 26 | 1 |
module Main where
import qualified Data.ByteString as B
import Language.STL.Lex
import Language.STL.Lex.Normalize
import Language.STL.Parse
import Prelude hiding (lex)
import System.IO
main :: IO ()
main = withFile "main.stl" ReadMode $ \h -> do
m <- fmap lex (B.hGetContents h)
case m of
Success ts -> do
let ns = normalize ts
print $ parse "main.stl" ns
e -> error $ show e
| pikajude/stl | src/stl.hs | mit | 426 | 0 | 17 | 112 | 147 | 78 | 69 | 15 | 2 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
-------------------------------------------
-- |
-- Module : Web.Stripe.Balance
-- Copyright : (c) David Johnson, 2014
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : POSIX
--
-- < https:/\/\stripe.com/docs/api#balance >
--
-- @
-- {-\# LANGUAGE OverloadedStrings \#-}
-- import Web.Stripe
-- import Web.Stripe.Balance (getBalance)
--
-- main :: IO ()
-- main = do
-- let config = StripeConfig (StripeKey "secret_key")
-- result <- stripe config getBalance
-- case result of
-- Right balance -> print balance
-- Left stripeError -> print stripeError
-- @
module Web.Stripe.Balance
( -- * API
GetBalance
, getBalance
, GetBalanceTransaction
, getBalanceTransaction
, GetBalanceTransactionHistory
, getBalanceTransactionHistory
-- * Types
, AvailableOn (..)
, Balance (..)
, BalanceAmount (..)
, BalanceTransaction (..)
, Created (..)
, Currency (..)
, EndingBefore (..)
, ExpandParams (..)
, Limit (..)
, Source (..)
, StartingAfter (..)
, StripeList (..)
, TimeRange (..)
, TransactionId (..)
, TransactionType (..)
) where
import Web.Stripe.StripeRequest (Method (GET), StripeHasParam,
StripeRequest (..),
StripeReturn, ToStripeParam(..),
mkStripeRequest)
import Web.Stripe.Util ((</>))
import Web.Stripe.Types (AvailableOn(..), Balance (..),
BalanceAmount(..), BalanceTransaction(..),
Created(..), Currency(..),
EndingBefore(..), ExpandParams(..),
Limit(..), Source(..), StartingAfter(..),
StripeList (..), TimeRange(..),
TransferId(..), TransactionId (..),
TransactionType(..))
import Web.Stripe.Types.Util (getTransactionId)
------------------------------------------------------------------------------
-- | Retrieve the current `Balance` for your Stripe account
getBalance :: StripeRequest GetBalance
getBalance = request
where request = mkStripeRequest GET url params
url = "balance"
params = []
data GetBalance
type instance StripeReturn GetBalance = Balance
------------------------------------------------------------------------------
-- | Retrieve a 'BalanceTransaction' by 'TransactionId'
getBalanceTransaction
:: TransactionId -- ^ The `TransactionId` of the `Transaction` to retrieve
-> StripeRequest GetBalanceTransaction
getBalanceTransaction
transactionid = request
where request = mkStripeRequest GET url params
url = "balance" </> "history" </> getTransactionId transactionid
params = []
data GetBalanceTransaction
type instance StripeReturn GetBalanceTransaction = BalanceTransaction
instance StripeHasParam GetBalanceTransaction ExpandParams
------------------------------------------------------------------------------
-- | Retrieve the history of `BalanceTransaction`s
getBalanceTransactionHistory
:: StripeRequest GetBalanceTransactionHistory
getBalanceTransactionHistory
= request
where request = mkStripeRequest GET url params
url = "balance" </> "history"
params = []
data GetBalanceTransactionHistory
type instance StripeReturn GetBalanceTransactionHistory = (StripeList BalanceTransaction)
instance StripeHasParam GetBalanceTransactionHistory AvailableOn
instance StripeHasParam GetBalanceTransactionHistory (TimeRange AvailableOn)
instance StripeHasParam GetBalanceTransactionHistory Created
instance StripeHasParam GetBalanceTransactionHistory (TimeRange Created)
instance StripeHasParam GetBalanceTransactionHistory Currency
instance StripeHasParam GetBalanceTransactionHistory (EndingBefore TransactionId)
instance StripeHasParam GetBalanceTransactionHistory Limit
instance StripeHasParam GetBalanceTransactionHistory (StartingAfter TransactionId)
instance (ToStripeParam a) => StripeHasParam GetBalanceTransactionHistory (Source a)
instance StripeHasParam GetBalanceTransactionHistory TransferId
instance StripeHasParam GetBalanceTransactionHistory TransactionType
| dmjio/stripe | stripe-core/src/Web/Stripe/Balance.hs | mit | 4,788 | 0 | 8 | 1,308 | 682 | 425 | 257 | -1 | -1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE DeriveGeneric #-}
module FP15.Evaluator.FPValue where
import GHC.Generics(Generic)
import Data.IORef
import Control.DeepSeq
import FP15.Disp
import FP15.Value
type Rev = (Int, Int)
data FPRef a = FPRef Rev (IORef a)
-- | The 'FPValue' type represents all possible values in the FP15 runtime. This
-- includes the well-behaved values and the unserializable values such as
-- functions and the @RealWorld@.
type FPValue = XValue Extended
data Extended = Lambda !(FPValue -> FPValue)
| Ref !(FPRef FPValue)
| RealWorld !RealWorld
deriving (Generic)
instance NFData Extended where rnf x = seq x ()
data RealWorld = RW deriving (Eq, Show, Read, Ord, Generic)
instance NFData RealWorld where rnf x = seq x ()
fromFPValue :: FPValue -> Maybe Value
fromFPValue = convToValue
instance Disp Extended where
disp (Lambda _) = "#<lambda>"
disp (Ref _) = "#<ref>"
disp (RealWorld _) = "#<RealWorld>"
instance Disp FPValue where
pretty = prettyXValue pretty
class FPValueConvertible t where
toFPValue :: t -> FPValue
default (FPValue)
instance Num FPValue where
(+) _ _ = undefined
(*) _ _ = undefined
abs _ = undefined
signum _ = undefined
fromInteger _ = undefined
negate _ = undefined
instance FPValueConvertible RealWorld where
toFPValue = Extended . RealWorld
instance FPValueConvertible Value where
toFPValue = fmap (\_ -> error "toFPValue")
instance FPValueConvertible FPValue where
toFPValue = id
instance FPValueConvertible Bool where
toFPValue = Bool
instance FPValueConvertible Char where
toFPValue = Char
instance FPValueConvertible Integer where
toFPValue = Int
instance FPValueConvertible Int where
toFPValue = Int . fromIntegral
instance FPValueConvertible Double where
toFPValue = Real
instance FPValueConvertible String where
toFPValue = String
instance FPValueConvertible a => FPValueConvertible [a] where
toFPValue = List . map toFPValue
instance (FPValueConvertible a, FPValueConvertible b)
=> FPValueConvertible (a, b) where
toFPValue (a, b) = List [toFPValue a, toFPValue b]
instance (FPValueConvertible a, FPValueConvertible b, FPValueConvertible c)
=> FPValueConvertible (a, b, c) where
toFPValue (a, b, c) = List [toFPValue a, toFPValue b, toFPValue c]
instance (FPValueConvertible a, FPValueConvertible b, FPValueConvertible c,
FPValueConvertible d)
=> FPValueConvertible (a, b, c, d) where
toFPValue (a, b, c, d) = List [toFPValue a, toFPValue b, toFPValue c, toFPValue d]
instance (FPValueConvertible a, FPValueConvertible b, FPValueConvertible c,
FPValueConvertible d, FPValueConvertible e)
=> FPValueConvertible (a, b, c, d, e) where
toFPValue (a, b, c, d, e) = List [toFPValue a, toFPValue b, toFPValue c, toFPValue d
, toFPValue e]
| Ming-Tang/FP15 | src/FP15/Evaluator/FPValue.hs | mit | 2,944 | 0 | 9 | 602 | 880 | 474 | 406 | 78 | 1 |
import System.Environment
import qualified Data.Map as Map
import qualified Data.Text as Text
import qualified Data.List as List
main = do
[s] <- getArgs
f <- readFile s
putStr ((unlines . map getChecksum . lines) f)
dict s = show(Map.toList $ Map.fromListWith (+) [(c, 1) | c <- s])
process = dict . removeDashes . Text.unpack . (Text.dropEnd 10) . Text.pack
getChecksum = Text.unpack . (Text.takeEnd 5) . (Text.dropEnd 1) . Text.pack
--computeChecksum = map fst . take 5 . List.sortBy (comparing $ (snd))
removeDashes xs = [ x | x <- xs, not (x `elem` "-") ]
--valid s =
| VisionistInc/advent-of-code-2016 | joshgordon/04/part1.hs | mit | 583 | 0 | 13 | 110 | 234 | 127 | 107 | 12 | 1 |
module Test.Time where
import Import
import Test.Hspec
import Station.Types.Card.Time
test :: SpecWith ()
test = do
describe "the TAI implementation" $ do
it "has the correct unixEpoch" $ do
(show unixEpoch :: Text) `shouldBe` "1970-01-01 00:00:00 UTC"
| seagreen/station | test/Test/Time.hs | mit | 312 | 0 | 15 | 96 | 74 | 40 | 34 | 9 | 1 |
module Main where
import Test.QuickCheck
import Prelude hiding (reverse)
reverse :: [a] -> [a]
reverse [] = []
reverse (x:xs) = reverse xs ++ [x]
-- XXX: Now some quick-check property declaration
rev_rev :: [Int] -> [Int] -> Bool
rev_rev x y = reverse (x++y) == reverse y ++ reverse x
main :: IO ()
main = quickCheck rev_rev
| amal029/haskell-tests | qcheck.hs | mit | 333 | 0 | 9 | 67 | 142 | 77 | 65 | 10 | 1 |
module Unison.Note where
import Data.List
import Control.Monad
import Control.Applicative
-- | Hierarchical error message type used throughout Unison
newtype Note = Note [String]
-- | Monad transformer for adding notes
newtype Noted m a = Noted { unnote :: m (Either Note a) }
run :: Monad m => Noted m a -> m a
run (Noted m) = m >>= \e -> case e of
Left (Note stack) -> fail ("\n" ++ intercalate "\n" stack)
Right a -> return a
attemptRun :: Functor m => Noted m a -> m (Either String a)
attemptRun n = collapse <$> unnote n where
collapse (Left (Note stack)) = Left ("\n" ++ intercalate "\n" stack)
collapse (Right a) = Right a
noted :: m (Either Note a) -> Noted m a
noted = Noted
fromEither :: Applicative m => Either Note a -> Noted m a
fromEither = Noted . pure
fromMaybe :: Applicative m => String -> Maybe a -> Noted m a
fromMaybe msg Nothing = failure msg
fromMaybe _ (Just a) = pure a
noted' :: Functor m => String -> m (Maybe a) -> Noted m a
noted' ifNothing moa = noted (fmap (maybe (Left (note ifNothing)) Right) moa)
lift :: Functor m => m a -> Noted m a
lift = Noted . fmap Right
failure :: Applicative m => String -> Noted m a
failure = Noted . pure . Left . note
scoped :: Functor m => String -> Noted m a -> Noted m a
scoped msg inner = Noted $ fmap (scope msg) (unnote inner)
orElse :: Monad m => Noted m a -> Noted m a -> Noted m a
orElse a b = Noted $ unnote a >>= go
where go (Left _) = unnote b
go (Right a) = return (Right a)
instance Monad m => Monad (Noted m) where
return = Noted . return . return
fail s = Noted . return . Left . note $ s
Noted a >>= f = Noted $ a >>= \e -> case e of
Left e -> return $ Left e
Right a -> unnote (f a)
instance Functor m => Functor (Noted m) where
fmap f (Noted a) = Noted $ fmap go a where
go (Left e) = Left e
go (Right a) = Right (f a)
instance Applicative m => Applicative (Noted m) where
pure = Noted . pure . pure
(Noted f) <*> (Noted a) = Noted $ liftA2 (<*>) f a
instance Monad m => MonadPlus (Noted m) where
mzero = Noted (pure (Left (Note [])))
mplus (Noted n1) (Noted n2) = Noted $ do
n1 <- n1
case n1 of
Left _ -> n2
Right a -> pure (Right a)
instance Monad m => Alternative (Noted m) where
empty = mzero
(<|>) = mplus
note :: String -> Note
note s = Note [s]
note' :: String -> Maybe a -> Either Note a
note' s Nothing = Left (note s)
note' _ (Just a) = Right a
scope :: String -> Either Note a -> Either Note a
scope s (Left (Note stack)) = Left (Note (s : stack))
scope _ e = e
scopeM :: Monad m => String -> m (Either Note a) -> m (Either Note a)
scopeM s = liftM (scope s)
scopeF :: Functor f => String -> f (Either Note a) -> f (Either Note a)
scopeF s = fmap (scope s)
instance Show Note where
show (Note stack) = intercalate "\n" stack
| nightscape/platform | shared/src/Unison/Note.hs | mit | 2,817 | 0 | 14 | 685 | 1,415 | 690 | 725 | 70 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE BangPatterns #-}
module Data.Streaming.Network
( -- * Types
ServerSettings
, ClientSettings
, HostPreference
, Message (..)
, AppData
#if !WINDOWS
, ServerSettingsUnix
, ClientSettingsUnix
, AppDataUnix
#endif
-- ** Smart constructors
, serverSettingsTCP
, serverSettingsTCPSocket
, clientSettingsTCP
, serverSettingsUDP
, clientSettingsUDP
#if !WINDOWS
, serverSettingsUnix
, clientSettingsUnix
#endif
, message
-- ** Classes
, HasPort (..)
, HasAfterBind (..)
, HasReadWrite (..)
, HasReadBufferSize (..)
#if !WINDOWS
, HasPath (..)
#endif
-- ** Setters
, setPort
, setHost
, setAddrFamily
, setAfterBind
, setNeedLocalAddr
, setReadBufferSize
#if !WINDOWS
, setPath
#endif
-- ** Getters
, getPort
, getHost
, getAddrFamily
, getAfterBind
, getNeedLocalAddr
, getReadBufferSize
#if !WINDOWS
, getPath
#endif
, appRead
, appWrite
, appSockAddr
, appLocalAddr
, appCloseConnection
, appRawSocket
-- * Functions
-- ** General
, bindPortGen
, bindPortGenEx
, bindRandomPortGen
, getSocketGen
, getSocketFamilyGen
, acceptSafe
, unassignedPorts
, getUnassignedPort
-- ** TCP
, bindPortTCP
, bindRandomPortTCP
, getSocketTCP
, getSocketFamilyTCP
, safeRecv
, runTCPServer
, runTCPClient
, ConnectionHandle()
, runTCPServerWithHandle
-- ** UDP
, bindPortUDP
, bindRandomPortUDP
, getSocketUDP
#if !WINDOWS
-- ** Unix
, bindPath
, getSocketUnix
, runUnixServer
, runUnixClient
#endif
) where
import qualified Network.Socket as NS
import Data.Streaming.Network.Internal
import Control.Concurrent (threadDelay)
import Control.Exception (IOException, try, SomeException, throwIO, bracketOnError, bracket)
import Network.Socket (Socket, AddrInfo, SocketType)
import Network.Socket.ByteString (recv, sendAll)
import System.IO.Error (isDoesNotExistError)
import qualified Data.ByteString.Char8 as S8
import qualified Control.Exception as E
import Data.ByteString (ByteString)
import System.Directory (removeFile)
import Data.Functor.Constant (Constant (Constant), getConstant)
import Data.Functor.Identity (Identity (Identity), runIdentity)
import Control.Concurrent (forkIO)
import Control.Monad (forever)
import Data.IORef (IORef, newIORef, atomicModifyIORef)
import Data.Array.Unboxed ((!), UArray, listArray)
import System.IO.Unsafe (unsafePerformIO, unsafeDupablePerformIO)
import System.Random (randomRIO)
import System.IO.Error (isFullErrorType, ioeGetErrorType)
#if WINDOWS
import Control.Concurrent.MVar (putMVar, takeMVar, newEmptyMVar)
#endif
getPossibleAddrs :: SocketType -> String -> Int -> NS.Family -> IO [AddrInfo]
getPossibleAddrs sockettype host' port' af =
NS.getAddrInfo (Just hints) (Just host') (Just $ show port')
where
hints = NS.defaultHints {
NS.addrSocketType = sockettype
, NS.addrFamily = af
}
-- | Attempt to connect to the given host/port/address family using given @SocketType@.
--
-- Since 0.1.3
getSocketFamilyGen :: SocketType -> String -> Int -> NS.Family -> IO (Socket, AddrInfo)
getSocketFamilyGen sockettype host' port' af = do
(addr:_) <- getPossibleAddrs sockettype host' port' af
sock <- NS.socket (NS.addrFamily addr) (NS.addrSocketType addr)
(NS.addrProtocol addr)
return (sock, addr)
-- | Attempt to connect to the given host/port using given @SocketType@.
getSocketGen :: SocketType -> String -> Int -> IO (Socket, AddrInfo)
getSocketGen sockettype host port = getSocketFamilyGen sockettype host port NS.AF_UNSPEC
defaultSocketOptions :: SocketType -> [(NS.SocketOption, Int)]
defaultSocketOptions sockettype =
case sockettype of
NS.Datagram -> [(NS.ReuseAddr,1)]
_ -> [(NS.NoDelay,1), (NS.ReuseAddr,1)]
-- | Attempt to bind a listening @Socket@ on the given host/port using given
-- @SocketType@. If no host is given, will use the first address available.
bindPortGen :: SocketType -> Int -> HostPreference -> IO Socket
bindPortGen sockettype = bindPortGenEx (defaultSocketOptions sockettype) sockettype
-- | Attempt to bind a listening @Socket@ on the given host/port using given
-- socket options and @SocketType@. If no host is given, will use the first address available.
--
-- Since 0.1.17
bindPortGenEx :: [(NS.SocketOption, Int)] -> SocketType -> Int -> HostPreference -> IO Socket
bindPortGenEx sockOpts sockettype p s = do
let hints = NS.defaultHints
{ NS.addrFlags = [NS.AI_PASSIVE]
, NS.addrSocketType = sockettype
}
host =
case s of
Host s' -> Just s'
_ -> Nothing
port = Just . show $ p
addrs <- NS.getAddrInfo (Just hints) host port
-- Choose an IPv6 socket if exists. This ensures the socket can
-- handle both IPv4 and IPv6 if v6only is false.
let addrs4 = filter (\x -> NS.addrFamily x /= NS.AF_INET6) addrs
addrs6 = filter (\x -> NS.addrFamily x == NS.AF_INET6) addrs
addrs' =
case s of
HostIPv4 -> addrs4 ++ addrs6
HostIPv4Only -> addrs4
HostIPv6 -> addrs6 ++ addrs4
HostIPv6Only -> addrs6
_ -> addrs
tryAddrs (addr1:rest@(_:_)) =
E.catch
(theBody addr1)
(\(_ :: IOException) -> tryAddrs rest)
tryAddrs (addr1:[]) = theBody addr1
tryAddrs _ = error "bindPort: addrs is empty"
theBody addr =
bracketOnError
(NS.socket (NS.addrFamily addr) (NS.addrSocketType addr) (NS.addrProtocol addr))
NS.close
(\sock -> do
mapM_ (\(opt,v) -> NS.setSocketOption sock opt v) sockOpts
NS.bind sock (NS.addrAddress addr)
return sock
)
tryAddrs addrs'
-- | Bind to a random port number. Especially useful for writing network tests.
--
-- Since 0.1.1
bindRandomPortGen :: SocketType -> HostPreference -> IO (Int, Socket)
bindRandomPortGen sockettype s = do
socket <- bindPortGen sockettype 0 s
port <- NS.socketPort socket
return (fromIntegral port, socket)
-- | Top 10 Largest IANA unassigned port ranges with no unauthorized uses known
unassignedPortsList :: [Int]
unassignedPortsList = concat
[ [43124..44320]
, [28120..29166]
, [45967..46997]
, [28241..29117]
, [40001..40840]
, [29170..29998]
, [38866..39680]
, [43442..44122]
, [41122..41793]
, [35358..36000]
]
unassignedPorts :: UArray Int Int
unassignedPorts = listArray (unassignedPortsMin, unassignedPortsMax) unassignedPortsList
unassignedPortsMin, unassignedPortsMax :: Int
unassignedPortsMin = 0
unassignedPortsMax = length unassignedPortsList - 1
nextUnusedPort :: IORef Int
nextUnusedPort = unsafePerformIO
$ randomRIO (unassignedPortsMin, unassignedPortsMax) >>= newIORef
{-# NOINLINE nextUnusedPort #-}
-- | Get a port from the IANA list of unassigned ports.
--
-- Internally, this function uses an @IORef@ to cycle through the list of ports
getUnassignedPort :: IO Int
getUnassignedPort = do
port <- atomicModifyIORef nextUnusedPort go
return $! port
where
go i
| i > unassignedPortsMax = (succ unassignedPortsMin, unassignedPorts ! unassignedPortsMin)
| otherwise = (succ i, unassignedPorts ! i)
-- | Attempt to connect to the given host/port.
getSocketUDP :: String -> Int -> IO (Socket, AddrInfo)
getSocketUDP = getSocketGen NS.Datagram
-- | Attempt to bind a listening @Socket@ on the given host/port. If no host is
-- given, will use the first address available.
bindPortUDP :: Int -> HostPreference -> IO Socket
bindPortUDP = bindPortGen NS.Datagram
-- | Bind a random UDP port.
--
-- See 'bindRandomPortGen'
--
-- Since 0.1.1
bindRandomPortUDP :: HostPreference -> IO (Int, Socket)
bindRandomPortUDP = bindRandomPortGen NS.Datagram
{-# NOINLINE defaultReadBufferSize #-}
defaultReadBufferSize :: Int
defaultReadBufferSize = unsafeDupablePerformIO $
bracket (NS.socket NS.AF_INET NS.Stream 0) NS.close (\sock -> NS.getSocketOption sock NS.RecvBuffer)
#if !WINDOWS
-- | Attempt to connect to the given Unix domain socket path.
getSocketUnix :: FilePath -> IO Socket
getSocketUnix path = do
sock <- NS.socket NS.AF_UNIX NS.Stream 0
ee <- try' $ NS.connect sock (NS.SockAddrUnix path)
case ee of
Left e -> NS.close sock >> throwIO e
Right () -> return sock
where
try' :: IO a -> IO (Either SomeException a)
try' = try
-- | Attempt to bind a listening Unix domain socket at the given path.
bindPath :: FilePath -> IO Socket
bindPath path = do
sock <- bracketOnError
(NS.socket NS.AF_UNIX NS.Stream 0)
NS.close
(\sock -> do
removeFileSafe path -- Cannot bind if the socket file exists.
NS.bind sock (NS.SockAddrUnix path)
return sock)
NS.listen sock (max 2048 NS.maxListenQueue)
return sock
removeFileSafe :: FilePath -> IO ()
removeFileSafe path =
removeFile path `E.catch` handleExists
where
handleExists e
| isDoesNotExistError e = return ()
| otherwise = throwIO e
-- | Smart constructor.
serverSettingsUnix
:: FilePath -- ^ path to bind to
-> ServerSettingsUnix
serverSettingsUnix path = ServerSettingsUnix
{ serverPath = path
, serverAfterBindUnix = const $ return ()
, serverReadBufferSizeUnix = defaultReadBufferSize
}
-- | Smart constructor.
clientSettingsUnix
:: FilePath -- ^ path to connect to
-> ClientSettingsUnix
clientSettingsUnix path = ClientSettingsUnix
{ clientPath = path
, clientReadBufferSizeUnix = defaultReadBufferSize
}
#endif
#if defined(__GLASGOW_HASKELL__) && WINDOWS
-- Socket recv and accept calls on Windows platform cannot be interrupted when compiled with -threaded.
-- See https://ghc.haskell.org/trac/ghc/ticket/5797 for details.
-- The following enables simple workaround
#define SOCKET_ACCEPT_RECV_WORKAROUND
#endif
safeRecv :: Socket -> Int -> IO ByteString
#ifndef SOCKET_ACCEPT_RECV_WORKAROUND
safeRecv = recv
#else
safeRecv s buf = do
var <- newEmptyMVar
forkIO $ recv s buf `E.catch` (\(_::IOException) -> return S8.empty) >>= putMVar var
takeMVar var
#endif
-- | Smart constructor.
serverSettingsUDP
:: Int -- ^ port to bind to
-> HostPreference -- ^ host binding preferences
-> ServerSettings
serverSettingsUDP = serverSettingsTCP
-- | Smart constructor.
serverSettingsTCP
:: Int -- ^ port to bind to
-> HostPreference -- ^ host binding preferences
-> ServerSettings
serverSettingsTCP port host = ServerSettings
{ serverPort = port
, serverHost = host
, serverSocket = Nothing
, serverAfterBind = const $ return ()
, serverNeedLocalAddr = False
, serverReadBufferSize = defaultReadBufferSize
}
-- | Create a server settings that uses an already available listening socket.
-- Any port and host modifications made to this value will be ignored.
--
-- Since 0.1.1
serverSettingsTCPSocket :: Socket -> ServerSettings
serverSettingsTCPSocket lsocket = ServerSettings
{ serverPort = 0
, serverHost = HostAny
, serverSocket = Just lsocket
, serverAfterBind = const $ return ()
, serverNeedLocalAddr = False
, serverReadBufferSize = defaultReadBufferSize
}
-- | Smart constructor.
clientSettingsUDP
:: Int -- ^ port to connect to
-> ByteString -- ^ host to connect to
-> ClientSettings
clientSettingsUDP = clientSettingsTCP
-- | Smart constructor.
clientSettingsTCP
:: Int -- ^ port to connect to
-> ByteString -- ^ host to connect to
-> ClientSettings
clientSettingsTCP port host = ClientSettings
{ clientPort = port
, clientHost = host
, clientAddrFamily = NS.AF_UNSPEC
, clientReadBufferSize = defaultReadBufferSize
}
-- | Attempt to connect to the given host/port/address family.
--
-- Since 0.1.3
getSocketFamilyTCP :: ByteString -> Int -> NS.Family -> IO (NS.Socket, NS.SockAddr)
getSocketFamilyTCP host' port' addrFamily = do
addrsInfo <- getPossibleAddrs NS.Stream (S8.unpack host') port' addrFamily
firstSuccess addrsInfo
where
firstSuccess [ai] = connect ai
firstSuccess (ai:ais) = connect ai `E.catch` \(_ :: IOException) -> firstSuccess ais
firstSuccess _ = error "getSocketFamilyTCP: can't happen"
createSocket addrInfo = do
sock <- NS.socket (NS.addrFamily addrInfo) (NS.addrSocketType addrInfo)
(NS.addrProtocol addrInfo)
NS.setSocketOption sock NS.NoDelay 1
return sock
connect addrInfo = E.bracketOnError (createSocket addrInfo) NS.close $ \sock -> do
NS.connect sock (NS.addrAddress addrInfo)
return (sock, NS.addrAddress addrInfo)
-- | Attempt to connect to the given host/port.
getSocketTCP :: ByteString -> Int -> IO (NS.Socket, NS.SockAddr)
getSocketTCP host port = getSocketFamilyTCP host port NS.AF_UNSPEC
-- | Attempt to bind a listening @Socket@ on the given host/port. If no host is
-- given, will use the first address available.
-- 'maxListenQueue' is topically 128 which is too short for
-- high performance servers. So, we specify 'max 2048 maxListenQueue' to
-- the listen queue.
bindPortTCP :: Int -> HostPreference -> IO Socket
bindPortTCP p s = do
sock <- bindPortGen NS.Stream p s
NS.listen sock (max 2048 NS.maxListenQueue)
return sock
-- | Bind a random TCP port.
--
-- See 'bindRandomPortGen'.
--
-- Since 0.1.1
bindRandomPortTCP :: HostPreference -> IO (Int, Socket)
bindRandomPortTCP s = do
(port, sock) <- bindRandomPortGen NS.Stream s
NS.listen sock (max 2048 NS.maxListenQueue)
return (port, sock)
-- | Try to accept a connection, recovering automatically from exceptions.
--
-- As reported by Kazu against Warp, "resource exhausted (Too many open files)"
-- may be thrown by accept(). This function will catch that exception, wait a
-- second, and then try again.
acceptSafe :: Socket -> IO (Socket, NS.SockAddr)
acceptSafe socket =
#ifndef SOCKET_ACCEPT_RECV_WORKAROUND
loop
#else
do var <- newEmptyMVar
forkIO $ loop >>= putMVar var
takeMVar var
#endif
where
loop =
NS.accept socket `E.catch` \e ->
if isFullErrorType (ioeGetErrorType e)
then do
threadDelay 1000000
loop
else E.throwIO e
message :: ByteString -> NS.SockAddr -> Message
message = Message
class HasPort a where
portLens :: Functor f => (Int -> f Int) -> a -> f a
instance HasPort ServerSettings where
portLens f ss = fmap (\p -> ss { serverPort = p }) (f (serverPort ss))
instance HasPort ClientSettings where
portLens f ss = fmap (\p -> ss { clientPort = p }) (f (clientPort ss))
getPort :: HasPort a => a -> Int
getPort = getConstant . portLens Constant
setPort :: HasPort a => Int -> a -> a
setPort p = runIdentity . portLens (const (Identity p))
setHost :: ByteString -> ClientSettings -> ClientSettings
setHost hp ss = ss { clientHost = hp }
getHost :: ClientSettings -> ByteString
getHost = clientHost
-- | Set the address family for the given settings.
--
-- Since 0.1.3
setAddrFamily :: NS.Family -> ClientSettings -> ClientSettings
setAddrFamily af cs = cs { clientAddrFamily = af }
-- | Get the address family for the given settings.
--
-- Since 0.1.3
getAddrFamily :: ClientSettings -> NS.Family
getAddrFamily = clientAddrFamily
#if !WINDOWS
class HasPath a where
pathLens :: Functor f => (FilePath -> f FilePath) -> a -> f a
instance HasPath ServerSettingsUnix where
pathLens f ss = fmap (\p -> ss { serverPath = p }) (f (serverPath ss))
instance HasPath ClientSettingsUnix where
pathLens f ss = fmap (\p -> ss { clientPath = p }) (f (clientPath ss))
getPath :: HasPath a => a -> FilePath
getPath = getConstant . pathLens Constant
setPath :: HasPath a => FilePath -> a -> a
setPath p = runIdentity . pathLens (const (Identity p))
#endif
setNeedLocalAddr :: Bool -> ServerSettings -> ServerSettings
setNeedLocalAddr x y = y { serverNeedLocalAddr = x }
getNeedLocalAddr :: ServerSettings -> Bool
getNeedLocalAddr = serverNeedLocalAddr
class HasAfterBind a where
afterBindLens :: Functor f => ((Socket -> IO ()) -> f (Socket -> IO ())) -> a -> f a
instance HasAfterBind ServerSettings where
afterBindLens f ss = fmap (\p -> ss { serverAfterBind = p }) (f (serverAfterBind ss))
#if !WINDOWS
instance HasAfterBind ServerSettingsUnix where
afterBindLens f ss = fmap (\p -> ss { serverAfterBindUnix = p }) (f (serverAfterBindUnix ss))
#endif
getAfterBind :: HasAfterBind a => a -> (Socket -> IO ())
getAfterBind = getConstant . afterBindLens Constant
setAfterBind :: HasAfterBind a => (Socket -> IO ()) -> a -> a
setAfterBind p = runIdentity . afterBindLens (const (Identity p))
-- | Since 0.1.13
class HasReadBufferSize a where
readBufferSizeLens :: Functor f => (Int -> f Int) -> a -> f a
-- | Since 0.1.13
instance HasReadBufferSize ServerSettings where
readBufferSizeLens f ss = fmap (\p -> ss { serverReadBufferSize = p }) (f (serverReadBufferSize ss))
-- | Since 0.1.13
instance HasReadBufferSize ClientSettings where
readBufferSizeLens f cs = fmap (\p -> cs { clientReadBufferSize = p }) (f (clientReadBufferSize cs))
#if !WINDOWS
-- | Since 0.1.13
instance HasReadBufferSize ServerSettingsUnix where
readBufferSizeLens f ss = fmap (\p -> ss { serverReadBufferSizeUnix = p }) (f (serverReadBufferSizeUnix ss))
-- | Since 0.1.14
instance HasReadBufferSize ClientSettingsUnix where
readBufferSizeLens f ss = fmap (\p -> ss { clientReadBufferSizeUnix = p }) (f (clientReadBufferSizeUnix ss))
#endif
-- | Get buffer size used when reading from socket.
--
-- Since 0.1.13
getReadBufferSize :: HasReadBufferSize a => a -> Int
getReadBufferSize = getConstant . readBufferSizeLens Constant
-- | Set buffer size used when reading from socket.
--
-- Since 0.1.13
setReadBufferSize :: HasReadBufferSize a => Int -> a -> a
setReadBufferSize p = runIdentity . readBufferSizeLens (const (Identity p))
type ConnectionHandle = Socket -> NS.SockAddr -> Maybe NS.SockAddr -> IO ()
runTCPServerWithHandle :: ServerSettings -> ConnectionHandle -> IO a
runTCPServerWithHandle (ServerSettings port host msocket afterBind needLocalAddr _) handle =
case msocket of
Nothing -> E.bracket (bindPortTCP port host) NS.close inner
Just lsocket -> inner lsocket
where
inner lsocket = afterBind lsocket >> forever (serve lsocket)
serve lsocket = E.bracketOnError
(acceptSafe lsocket)
(\(socket, _) -> NS.close socket)
$ \(socket, addr) -> do
mlocal <- if needLocalAddr
then fmap Just $ NS.getSocketName socket
else return Nothing
_ <- E.mask $ \restore -> forkIO
$ restore (handle socket addr mlocal)
`E.finally` NS.close socket
return ()
-- | Run an @Application@ with the given settings. This function will create a
-- new listening socket, accept connections on it, and spawn a new thread for
-- each connection.
runTCPServer :: ServerSettings -> (AppData -> IO ()) -> IO a
runTCPServer settings app = runTCPServerWithHandle settings app'
where app' socket addr mlocal =
let ad = AppData
{ appRead' = safeRecv socket $ getReadBufferSize settings
, appWrite' = sendAll socket
, appSockAddr' = addr
, appLocalAddr' = mlocal
, appCloseConnection' = NS.close socket
, appRawSocket' = Just socket
}
in
app ad
-- | Run an @Application@ by connecting to the specified server.
runTCPClient :: ClientSettings -> (AppData -> IO a) -> IO a
runTCPClient (ClientSettings port host addrFamily readBufferSize) app = E.bracket
(getSocketFamilyTCP host port addrFamily)
(NS.close . fst)
(\(s, address) -> app AppData
{ appRead' = safeRecv s readBufferSize
, appWrite' = sendAll s
, appSockAddr' = address
, appLocalAddr' = Nothing
, appCloseConnection' = NS.close s
, appRawSocket' = Just s
})
appLocalAddr :: AppData -> Maybe NS.SockAddr
appLocalAddr = appLocalAddr'
appSockAddr :: AppData -> NS.SockAddr
appSockAddr = appSockAddr'
-- | Close the underlying connection. One possible use case is simulating
-- connection failures in a test suite.
--
-- Since 0.1.6
appCloseConnection :: AppData -> IO ()
appCloseConnection = appCloseConnection'
-- | Get the raw socket for this @AppData@, if available.
--
-- Since 0.1.12
appRawSocket :: AppData -> Maybe NS.Socket
appRawSocket = appRawSocket'
class HasReadWrite a where
readLens :: Functor f => (IO ByteString -> f (IO ByteString)) -> a -> f a
writeLens :: Functor f => ((ByteString -> IO ()) -> f (ByteString -> IO ())) -> a -> f a
instance HasReadWrite AppData where
readLens f a = fmap (\x -> a { appRead' = x }) (f (appRead' a))
writeLens f a = fmap (\x -> a { appWrite' = x }) (f (appWrite' a))
#if !WINDOWS
instance HasReadWrite AppDataUnix where
readLens f a = fmap (\x -> a { appReadUnix = x }) (f (appReadUnix a))
writeLens f a = fmap (\x -> a { appWriteUnix = x }) (f (appWriteUnix a))
#endif
appRead :: HasReadWrite a => a -> IO ByteString
appRead = getConstant . readLens Constant
appWrite :: HasReadWrite a => a -> ByteString -> IO ()
appWrite = getConstant . writeLens Constant
#if !WINDOWS
-- | Run an @Application@ with the given settings. This function will create a
-- new listening socket, accept connections on it, and spawn a new thread for
-- each connection.
runUnixServer :: ServerSettingsUnix -> (AppDataUnix -> IO ()) -> IO a
runUnixServer (ServerSettingsUnix path afterBind readBufferSize) app = E.bracket
(bindPath path)
NS.close
(\socket -> do
afterBind socket
forever $ serve socket)
where
serve lsocket = E.bracketOnError
(acceptSafe lsocket)
(\(socket, _) -> NS.close socket)
$ \(socket, _) -> do
let ad = AppDataUnix
{ appReadUnix = safeRecv socket readBufferSize
, appWriteUnix = sendAll socket
}
_ <- E.mask $ \restore -> forkIO
$ restore (app ad)
`E.finally` NS.close socket
return ()
-- | Run an @Application@ by connecting to the specified server.
runUnixClient :: ClientSettingsUnix -> (AppDataUnix -> IO a) -> IO a
runUnixClient (ClientSettingsUnix path readBufferSize) app = E.bracket
(getSocketUnix path)
NS.close
(\sock -> app AppDataUnix
{ appReadUnix = safeRecv sock readBufferSize
, appWriteUnix = sendAll sock
})
#endif
| fpco/streaming-commons | Data/Streaming/Network.hs | mit | 23,204 | 0 | 19 | 5,475 | 5,690 | 3,065 | 2,625 | 452 | 8 |
---------------------------------------------------------------------------------------------------
-----------------------------------------------Import----------------------------------------------
---------------------------------------------------------------------------------------------------
import Data.List
---------------------------------------------------------------------------------------------------
-----------------------------------------------Part 1----------------------------------------------
---------------------------------------------------------------------------------------------------
largestPower :: Int -> Int -> Int
largestPower n p = (largestPowerHelper n p (0 - n))
largestPowerHelper :: Int -> Int -> Int -> Int
largestPowerHelper n p count
| n > 0 = (largestPowerHelper (n `div` p) p (count + n))
| otherwise = count
---------------------------------------------------------------------------------------------------
-----------------------------------------------Part 2----------------------------------------------
---------------------------------------------------------------------------------------------------
binary :: Int -> [Int]
binary 0 = [0]
binary x = binary (x `div` 2) ++ [x `mod` 2]
nohundred_list = [i | i <- [1,2..], (([1,0,0] `isInfixOf` (binary i)) == False)]
nohundred :: Int -> Int
nohundred n
| n <= 0 = 0
| otherwise = nohundred_list !! (n - 1)
---------------------------------------------------------------------------------------------------
-----------------------------------------------Part 3----------------------------------------------
---------------------------------------------------------------------------------------------------
remDup::[Int]->[Int]
remDup = remDupHelper []
where
remDupHelper seen [] = seen
remDupHelper seen (x:xs)
| x `elem` seen = remDupHelper seen xs
| otherwise = remDupHelper (seen ++ [x]) xs
infListCreate :: Int -> Int -> [Int] -> [Int]
infListCreate n i list
| n > i = (infListCreate n (i + 1) (sort (remDup (((list !! i) * 2) : ((list !! i) * 3) : ((list !! i) * 5) : list))))
| otherwise = list
infListElem :: Int -> Int
infListElem n
| n <= 0 = 0
| n == 1 = 1
| otherwise = (infListCreate n 0 [1]) !! (n - 1)
infList :: [Integer]
infList = [toInteger (infListElem i) | i <- [1..]]
---------------------------------------------------------------------------------------------------
-----------------------------------------------Part 4----------------------------------------------
---------------------------------------------------------------------------------------------------
priorTo :: String -> String -> Bool
priorTo s1 s2 = length s1 < length s2 || (length s1 == length s2 && s1 < s2)
abundant :: Int -> String
abundant 1 = "abab"
abundant 2 = "aabab"
abundant 5 = "ababb"
abundant 10 = "aababa"
---------------------------------------------------------------------------------------------------
-----------------------------------------------Part 5----------------------------------------------
---------------------------------------------------------------------------------------------------
sumDigits :: Int->Int
sumDigits n
| n == 0 = 0
| otherwise = (n `mod` 10) + (sumDigits (n `div` 10))
sumDigitsCheck :: Int -> [Int] -> Int
sumDigitsCheck n (x : xs)
| n >= (sumDigits x) = (sumDigitsCheck n xs)
| otherwise = x
minus (x : xs) (y : ys)
| x < y = x : minus xs (y : ys)
| x == y = minus xs ys
| x > y = minus (x : xs) ys
minus xs _ = xs
primes :: [Int]
primes = sieve [2..]
where
sieve [] = []
sieve (p:xs) = p : sieve (xs `minus` [p * p, p * p + p..])
goodPrime :: Int -> Int
goodPrime n = sumDigitsCheck n primes
---------------------------------------------------------------------------------------------------
-----------------------------------------------Part 6----------------------------------------------
---------------------------------------------------------------------------------------------------
getDigitsList :: Integer -> [Integer]
getDigitsList x
| x == 0 = []
| otherwise = getDigitsList (x `div` 10) ++ [x `mod` 10]
lookAndSay :: Integer -> Integer
lookAndSay n = read (concatMap describe (group (show n)))
where
describe run = show (length run) ++ take 1 run
lookAndSayList = iterate lookAndSay 1
stringLookAndSay :: Int -> String
stringLookAndSay n = show (fromInteger (lookAndSayList !! n))
getDigitsListHelper :: Int -> Int -> Integer
getDigitsListHelper n m = ((getDigitsList (lookAndSayList !! n)) !! m)
las :: Int -> Integer
las n
| n < 0 = 0
| length (stringLookAndSay n) >= 4 =
((getDigitsListHelper n 0) * 1000)
+ ((getDigitsListHelper n 1) * 100)
+ ((getDigitsListHelper n 2) * 10)
+ ((getDigitsListHelper n 3) * 1)
| length (show (fromInteger (lookAndSayList !! n))) == 3 =
((getDigitsListHelper n 0) * 100)
+ ((getDigitsListHelper n 1) * 10)
+ ((getDigitsListHelper n 2) * 1)
| length (show (fromInteger (lookAndSayList !! n))) == 2 =
((getDigitsListHelper n 0) * 10)
+ ((getDigitsListHelper n 1) * 1)
| length (show (fromInteger (lookAndSayList !! n))) == 1 =
((getDigitsListHelper n 0) * 1)
| divayprakash/haskell-course | week7programmingAssignment.hs | mit | 5,390 | 0 | 19 | 948 | 1,631 | 853 | 778 | 87 | 2 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.RTCSessionDescriptionCallback
(newRTCSessionDescriptionCallback,
newRTCSessionDescriptionCallbackSync,
newRTCSessionDescriptionCallbackAsync,
RTCSessionDescriptionCallback)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/RTCSessionDescriptionCallback Mozilla RTCSessionDescriptionCallback documentation>
newRTCSessionDescriptionCallback ::
(MonadIO m) =>
(Maybe RTCSessionDescription -> IO ()) ->
m RTCSessionDescriptionCallback
newRTCSessionDescriptionCallback callback
= liftIO
(syncCallback1 ThrowWouldBlock
(\ sdp -> fromJSRefUnchecked sdp >>= \ sdp' -> callback sdp'))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/RTCSessionDescriptionCallback Mozilla RTCSessionDescriptionCallback documentation>
newRTCSessionDescriptionCallbackSync ::
(MonadIO m) =>
(Maybe RTCSessionDescription -> IO ()) ->
m RTCSessionDescriptionCallback
newRTCSessionDescriptionCallbackSync callback
= liftIO
(syncCallback1 ContinueAsync
(\ sdp -> fromJSRefUnchecked sdp >>= \ sdp' -> callback sdp'))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/RTCSessionDescriptionCallback Mozilla RTCSessionDescriptionCallback documentation>
newRTCSessionDescriptionCallbackAsync ::
(MonadIO m) =>
(Maybe RTCSessionDescription -> IO ()) ->
m RTCSessionDescriptionCallback
newRTCSessionDescriptionCallbackAsync callback
= liftIO
(asyncCallback1
(\ sdp -> fromJSRefUnchecked sdp >>= \ sdp' -> callback sdp')) | plow-technologies/ghcjs-dom | src/GHCJS/DOM/JSFFI/Generated/RTCSessionDescriptionCallback.hs | mit | 2,688 | 0 | 12 | 625 | 521 | 310 | 211 | 44 | 1 |
add :: Integer -> Integer -> Integer
add x y = x + y
inc = add 1
sum' :: (Num a) => [a] -> a
sum' = foldl (+) 0 | joelbirchler/talks | _includes/function-oriented-js/add.hs | mit | 116 | 0 | 7 | 35 | 69 | 37 | 32 | 5 | 1 |
import Control.Monad (unless)
import Test.Hspec (Spec, describe, expectationFailure, it, shouldBe)
import Test.Hspec.Runner (configFastFail, defaultConfig, hspecWith)
import FoodChain (song)
main :: IO ()
main = hspecWith defaultConfig {configFastFail = True} specs
specs :: Spec
specs = describe "song" $ do
-- First we test the input, line by line, to give more
-- useful error messages.
it "matches lines" $ sequence_ lineAssertions
-- Finally, because testing lines we are unable
-- to detect a missing newline at the end of the
-- lyrics, we test the full song.
it "matches full song" $ song `shouldBe` lyrics
where
lineAssertions = zipWith checkLine [1 :: Int ..] $ zipMaybe (lines song) (lines lyrics)
checkLine lineno (got, want) =
unless (got == want) $
expectationFailure $ "mismatch at line " ++ show lineno ++ "\nexpected: " ++ show want ++ "\n but got: " ++ show got
zipMaybe [] [] = []
zipMaybe (x:xs) [] = (Just x , Nothing) : zipMaybe xs []
zipMaybe [] (y:ys) = (Nothing, Just y ) : zipMaybe [] ys
zipMaybe (x:xs) (y:ys) = (Just x , Just y ) : zipMaybe xs ys
-- Lyrics extracted from `exercism/problem-specifications` on 2016-09-21.
lyrics :: String
lyrics =
"I know an old lady who swallowed a fly.\n\
\I don't know why she swallowed the fly. Perhaps she'll die.\n\
\\n\
\I know an old lady who swallowed a spider.\n\
\It wriggled and jiggled and tickled inside her.\n\
\She swallowed the spider to catch the fly.\n\
\I don't know why she swallowed the fly. Perhaps she'll die.\n\
\\n\
\I know an old lady who swallowed a bird.\n\
\How absurd to swallow a bird!\n\
\She swallowed the bird to catch the spider that wriggled and jiggled and tickled inside her.\n\
\She swallowed the spider to catch the fly.\n\
\I don't know why she swallowed the fly. Perhaps she'll die.\n\
\\n\
\I know an old lady who swallowed a cat.\n\
\Imagine that, to swallow a cat!\n\
\She swallowed the cat to catch the bird.\n\
\She swallowed the bird to catch the spider that wriggled and jiggled and tickled inside her.\n\
\She swallowed the spider to catch the fly.\n\
\I don't know why she swallowed the fly. Perhaps she'll die.\n\
\\n\
\I know an old lady who swallowed a dog.\n\
\What a hog, to swallow a dog!\n\
\She swallowed the dog to catch the cat.\n\
\She swallowed the cat to catch the bird.\n\
\She swallowed the bird to catch the spider that wriggled and jiggled and tickled inside her.\n\
\She swallowed the spider to catch the fly.\n\
\I don't know why she swallowed the fly. Perhaps she'll die.\n\
\\n\
\I know an old lady who swallowed a goat.\n\
\Just opened her throat and swallowed a goat!\n\
\She swallowed the goat to catch the dog.\n\
\She swallowed the dog to catch the cat.\n\
\She swallowed the cat to catch the bird.\n\
\She swallowed the bird to catch the spider that wriggled and jiggled and tickled inside her.\n\
\She swallowed the spider to catch the fly.\n\
\I don't know why she swallowed the fly. Perhaps she'll die.\n\
\\n\
\I know an old lady who swallowed a cow.\n\
\I don't know how she swallowed a cow!\n\
\She swallowed the cow to catch the goat.\n\
\She swallowed the goat to catch the dog.\n\
\She swallowed the dog to catch the cat.\n\
\She swallowed the cat to catch the bird.\n\
\She swallowed the bird to catch the spider that wriggled and jiggled and tickled inside her.\n\
\She swallowed the spider to catch the fly.\n\
\I don't know why she swallowed the fly. Perhaps she'll die.\n\
\\n\
\I know an old lady who swallowed a horse.\n\
\She's dead, of course!\n"
-- d814ecca0455613eab66e9c86797ed12e714a2de
| exercism/xhaskell | exercises/practice/food-chain/test/Tests.hs | mit | 3,899 | 0 | 16 | 961 | 391 | 209 | 182 | 21 | 4 |
module Cases
(
-- * Processor
process,
-- ** Case Transformers
CaseTransformer,
lower,
upper,
title,
-- ** Delimiters
Delimiter,
spinal,
snake,
whitespace,
camel,
-- * Default Processors
spinalize,
snakify,
camelize,
)
where
import Cases.Prelude hiding (Word)
import qualified Data.Attoparsec.Text as A
import qualified Data.Text as T
-- * Part
-------------------------
-- | A parsed info and a text of a part.
data Part =
Word Case T.Text |
Digits T.Text
data Case = Title | Upper | Lower
partToText :: Part -> T.Text
partToText = \case
Word _ t -> t
Digits t -> t
-- * Parsers
-------------------------
upperParser :: A.Parser Part
upperParser = Word Upper <$> T.pack <$> A.many1 char where
char = do
c <- A.satisfy isUpper
ok <- maybe True (not . isLower) <$> A.peekChar
if ok
then return c
else empty
lowerParser :: A.Parser Part
lowerParser = Word Lower <$> (A.takeWhile1 isLower)
titleParser :: A.Parser Part
titleParser = Word Title <$> (T.cons <$> headChar <*> remainder) where
headChar = A.satisfy isUpper
remainder = A.takeWhile1 isLower
digitsParser :: A.Parser Part
digitsParser = Digits <$> (A.takeWhile1 isDigit)
partParser :: A.Parser Part
partParser = titleParser <|> upperParser <|> lowerParser <|> digitsParser
-- |
-- A parser, which does in-place processing, using the supplied 'Folder'.
partsParser :: Monoid r => Folder r -> A.Parser r
partsParser fold = loop mempty where
loop r =
(partParser >>= loop . fold r) <|>
(A.anyChar *> loop r) <|>
(A.endOfInput *> pure r)
-- * Folders
-------------------------
type Folder r = r -> Part -> r
type Delimiter = Folder (Maybe T.Text)
spinal :: Delimiter
spinal =
(. partToText) .
fmap Just .
maybe id (\l r -> l <> "-" <> r)
snake :: Delimiter
snake =
(. partToText) .
fmap Just .
maybe id (\l r -> l <> "_" <> r)
whitespace :: Delimiter
whitespace =
(. partToText) .
fmap Just .
maybe id (\l r -> l <> " " <> r)
camel :: Delimiter
camel =
fmap Just .
maybe partToText (\l r -> l <> partToText (title r))
-- * CaseTransformers
-------------------------
type CaseTransformer = Part -> Part
lower :: CaseTransformer
lower = \case
Word c t -> Word Lower t' where
t' = case c of
Title -> T.uncons t |> \case
Nothing -> t
Just (h, t) -> T.cons (toLower h) t
Upper -> T.toLower t
Lower -> t
p -> p
upper :: CaseTransformer
upper = \case
Word c t -> Word Upper t' where
t' = case c of
Title -> T.uncons t |> \case
Nothing -> t
Just (h, t) -> T.cons h (T.toUpper t)
Upper -> t
Lower -> T.toUpper t
p -> p
title :: CaseTransformer
title = \case
Word c t -> Word Title t' where
t' = case c of
Title -> t
Upper -> T.uncons t |> \case
Nothing -> t
Just (h, t) -> T.cons (toUpper h) (T.toLower t)
Lower -> T.uncons t |> \case
Nothing -> t
Just (h, t) -> T.cons (toUpper h) t
p -> p
-- * API
-------------------------
-- |
-- Extract separate words from an arbitrary text using a smart parser and
-- produce a new text using case transformation and delimiter functions.
--
-- Note: to skip case transformation use the 'id' function.
process :: CaseTransformer -> Delimiter -> T.Text -> T.Text
process tr fo =
fromMaybe "" .
either (error . ("Parse failure: " <>)) id .
A.parseOnly (partsParser $ (. tr) . fo)
-- |
-- Transform an arbitrary text into a lower spinal case.
--
-- Same as @('process' 'lower' 'spinal')@.
spinalize :: T.Text -> T.Text
spinalize = process lower spinal
-- |
-- Transform an arbitrary text into a lower snake case.
--
-- Same as @('process' 'lower' 'snake')@.
snakify :: T.Text -> T.Text
snakify = process lower snake
-- |
-- Transform an arbitrary text into a camel case,
-- while preserving the case of the first character.
--
-- Same as @('process' 'id' 'camel')@.
camelize :: T.Text -> T.Text
camelize = process id camel
| nikita-volkov/cases | library/Cases.hs | mit | 4,015 | 0 | 20 | 984 | 1,270 | 679 | 591 | -1 | -1 |
module Test.Smoke.Types.Executable where
import Test.Smoke.Paths
import Test.Smoke.Types.Base
data Shell
= Shell (ResolvedPath File) Args
deriving (Eq, Show)
data Executable
= ExecutableProgram (ResolvedPath File) Args
| ExecutableScript Shell Script
deriving (Eq, Show)
| SamirTalwar/Smoke | src/lib/Test/Smoke/Types/Executable.hs | mit | 284 | 0 | 8 | 44 | 84 | 49 | 35 | 10 | 0 |
-- |
-- Extensions to the standard mutable Vector API.
module VectorBuilder.MVector where
import Data.Vector.Generic.Mutable
import qualified VectorBuilder.Core.Builder as A
import qualified VectorBuilder.Core.Update as C
import VectorBuilder.Prelude
-- |
-- Construct a mutable vector from a builder.
--
-- Supports all kinds of vectors: boxed, unboxed, primitive, storable.
{-# INLINEABLE build #-}
build :: MVector vector element => A.Builder element -> ST s (vector s element)
build (A.Builder size (C.Update update)) =
do
vector <- unsafeNew size
update vector 0
return vector
| nikita-volkov/vector-builder | library/VectorBuilder/MVector.hs | mit | 598 | 0 | 10 | 98 | 129 | 72 | 57 | 11 | 1 |
module Data.Aeson.Schema.Types.Tests
( tests
) where
import Test.Framework
import Test.Framework.Providers.HUnit
import qualified Test.HUnit as HU
import Data.Aeson
import qualified Data.ByteString.Lazy as L
import Data.Foldable (toList)
import qualified Data.HashMap.Strict as H
import Data.Text (Text)
import Data.Aeson.Schema
import Data.Aeson.Schema.Choice
import Paths_aeson_schema (getDataFileName)
data TestFunctor a = TestFunctor Int a
instance Functor TestFunctor where
fmap f (TestFunctor i a) = TestFunctor i (f a)
tests :: [Test]
tests =
[ testCase "parse schema.json" $ do
schemaJson <- getDataFileName "examples/schema.json"
schemaBS <- L.readFile schemaJson
case decode schemaBS :: Maybe Value of
Nothing -> HU.assertFailure "JSON syntax error"
Just val -> case fromJSON val :: Result (Schema Text) of
Error e -> HU.assertFailure e
Success schema -> do
Just "http://json-schema.org/schema#" HU.@=? schemaId schema
0 HU.@=? schemaMinItems schema
, testCase "Foldable instance" $ do
let schemaWithRef ref = empty { schemaDRef = Just ref }
let schema = empty
{ schemaType = [Choice2of2 $ schemaWithRef "a"]
, schemaProperties = H.fromList [("aProperty", schemaWithRef "b")]
, schemaPatternProperties = let Right p = mkPattern "lorem.+" in [(p, schemaWithRef "c")]
, schemaAdditionalProperties = Choice2of2 $ schemaWithRef "d"
, schemaItems = Just $ Choice2of2 [schemaWithRef "e", schemaWithRef "f"]
, schemaAdditionalItems = Choice2of2 $ schemaWithRef "g"
, schemaDependencies = H.fromList [("aProperty", Choice2of2 $ schemaWithRef "h")]
, schemaDisallow = [Choice2of2 $ schemaWithRef "i"]
, schemaExtends = [schemaWithRef "j"]
, schemaDRef = Just "k"
}
map (:[]) ['a'..'k'] HU.@=? toList schema
]
| Fuuzetsu/aeson-schema | test/Data/Aeson/Schema/Types/Tests.hs | mit | 2,128 | 1 | 19 | 646 | 558 | 298 | 260 | 42 | 3 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
-- | Hetcons_Transaction is a Monad for constructing transactions in which a message is processed.
-- From within Hetcons_Transaction, you can send messages (1a, 1b, 2a, 2b, proof_of_consensus), and
-- change the State.
-- You can also throw a Hetcons_Exception.
-- If an exception is thrown, it will be thrown in the IO monad, and NO STATE CHANGES WILL OCCUR, NO MESSAGES WILL BE SENT
module Hetcons.Receive_Message
(Hetcons_Transaction
,run_Hetcons_Transaction_IO
,get_state
,put_state
,update_state
,get_my_crypto_id
,get_my_private_key
,get_witness
,Add_Sent
,add_sent
,Receivable
,receive
,Sendable
,send
,Hetcons_Server(Hetcons_Server)
,hetcons_Server_crypto_id
,hetcons_Server_private_key
,hetcons_Server_address_book
,hetcons_Server_state_var
,hetcons_Server_verify_1a
,hetcons_Server_verify_1b
,hetcons_Server_verify_2a
,hetcons_Server_verify_2b
,hetcons_Server_verify_proof
,hetcons_Server_verify_quorums
,hetcons_Server_log_chan
)
where
import Hetcons.Hetcons_Exception ( Hetcons_Exception )
import Hetcons.Hetcons_State ( Hetcons_State, modify_and_read )
import Hetcons.Parsable (Parsable)
import Hetcons.Quorums (Monad_Verify_Quorums, verify_quorums, verify_quorums')
import Hetcons.Send_Message_IO ( send_Message_IO, Address_Book )
import Hetcons.Signed_Message
( Recursive_1b
,Recursive_1a
,Verified
,Recursive_2b
,Recursive_Proof_of_Consensus
,Recursive_2a
,Monad_Verify
,verify
,verify' )
import Hetcons.Value (Value)
import Charlotte_Types ( Crypto_ID, Signed_Message, Proposal_1a, Observers, Value_Witness )
import Control.Concurrent.Chan ( Chan )
import qualified Control.Concurrent.Map as CMap ( Map, lookup )
import Control.Concurrent.Map ( insertIfAbsent )
import Control.Concurrent.MVar ( MVar )
import Control.Exception.Base ( throw, catch )
import Control.Monad.Except ( throwError, catchError, MonadError )
import Control.Monad.IO.Class ( MonadIO(liftIO) )
import Control.Monad.Logger (MonadLogger, LoggingT, runChanLoggingT, unChanLoggingT, Loc, LogSource, LogLevel, LogStr)
import qualified Control.Monad.Parallel as Parallel ( sequence )
import Control.Monad.Reader ( MonadReader(reader, ask, local) )
import Control.Monad.Trans.Reader ( ReaderT, runReaderT )
import Crypto.Random
(drgNew, DRG(randomBytesGenerate), MonadRandom(getRandomBytes), getSystemDRG )
import Data.ByteString.Lazy ( ByteString )
import Data.Hashable ( Hashable )
import Data.HashSet ( HashSet, insert, toList, empty )
import Data.IORef
( IORef, writeIORef, readIORef, newIORef, atomicModifyIORef )
import Data.Serialize (Serialize)
import Data.Tuple ( swap )
-- | Hetcons_Transaction is a Monad for constructing transactions in which a message is processed.
-- From within Hetcons_Transaction, you can send messages (1a, 1b, 2a, 2b, proof_of_consensus), and
-- change the Participant_State.
-- You can also throw a Hetcons_Exception.
-- If an exception is thrown, it will be thrown in the IO monad, and NO STATE CHANGES WILL OCCUR, NO MESSAGES WILL BE SENT
-- It is constructed using the IO Monad, with a Reader Monad, so you can read environment veriables, that do stuff like
-- list the Var referencing State.
-- As a Newtype, you can't use, say, arbitrary IO stuff in this Monad, but only stuff exported in this submodule.
-- TODO: We may want to make this an adjective, like any Monad m can be instance Hetcons_Transaction s a m where ...
newtype Hetcons_Transaction s v a =
Hetcons_Transaction {unwrap :: (
LoggingT (
ReaderT (Hetcons_Transaction_Environment s v)
IO) a)
} deriving (MonadLogger, Functor, Applicative, Monad, MonadReader (Hetcons_Transaction_Environment s v))
-- | Defines all the data that constitute a server, which maintains some state of type `s`:
-- This includes Memoization Caches
data (Hetcons_State s, Value v) => Hetcons_Server s v = Hetcons_Server {
-- | The Server's Cryptographic ID (public key)
hetcons_Server_crypto_id :: Crypto_ID
-- | The Server's Private Key
,hetcons_Server_private_key :: ByteString
-- | The Cache of the Server's open TCP connection handles to its neighbors
,hetcons_Server_address_book :: Address_Book
-- | References the Server's mutable state
,hetcons_Server_state_var :: MVar s
-- | The Memoization Cache for verifying 1As
,hetcons_Server_verify_1a :: CMap.Map Signed_Message (Verified (Recursive_1a v))
-- | The Memoization Cache for verifying 1Bs
,hetcons_Server_verify_1b :: CMap.Map Signed_Message (Verified (Recursive_1b v))
-- | The Memoization Cache for verifying 2As
,hetcons_Server_verify_2a :: CMap.Map Signed_Message (Verified (Recursive_2a v))
-- | The Memoization Cache for verifying 2Bs
,hetcons_Server_verify_2b :: CMap.Map Signed_Message (Verified (Recursive_2b v))
-- | The Memoization Cache for verifying Proof_of_Consensus
,hetcons_Server_verify_proof :: CMap.Map Signed_Message (Verified (Recursive_Proof_of_Consensus v))
-- | The Memoization Cache for computing Quorums
,hetcons_Server_verify_quorums :: CMap.Map Proposal_1a Observers
-- | The Channel input to the logger
,hetcons_Server_log_chan :: Chan (Loc, LogSource, LogLevel, LogStr)
}
-- | The immutable part of the Hetcons_Transaction Monad's state.
-- For instance, this is how we reference stuff in the Server's definition.
-- You can read this stuff anywhere in the Monad, but never change it.
data (Hetcons_State s, Value v) => Hetcons_Transaction_Environment s v = Hetcons_Transaction_Environment {
-- | The data representing this server instance
hetcons_Transaction_Environment_hetcons_server :: Hetcons_Server s v
-- | A reference to this server's state
,hetcons_Transaction_Environment_transaction_state :: IORef (Hetcons_Transaction_State s v)
-- | The "witness" from over the wire if this is a 1A or a 1B
,hetcons_Transaction_Environment_witness :: Value_Witness
}
-- | This is the internal state maintained by the Hetcons_Transaction Monad.
-- It tracks messages to be sent (recall that this Monad represents a transaction)
-- It also tracks what the new Participant_State should be.
-- This will be reset (except the hetcons_state) in each transaction.
data (Hetcons_State s, Value v) => Hetcons_Transaction_State s v = Hetcons_Transaction_State {
-- | The 1As sent thus far in this transaction
sent_1as :: HashSet (Verified (Recursive_1a v))
-- | The 1Bs sent thus far in this transaction
,sent_1bs :: HashSet (Verified (Recursive_1b v))
-- | The 2As sent thus far in this transaction
,sent_2as :: HashSet (Verified (Recursive_2a v))
-- | The 2Bs sent thus far in this transaction
,sent_2bs :: HashSet (Verified (Recursive_2b v))
-- | The Proof_of_Consensus s sent thus far in this transaction
,sent_Proof_of_Consensus :: HashSet (Verified (Recursive_Proof_of_Consensus v))
-- | The state (possibly changed over the course of the transaction) of the server within the transaction.
-- | The server's "real" state will be set to this at the end of the transaction, if no Errors are thrown.
,hetcons_state :: s
}
-- | MonadError instantiation, In which we basically catch and throw errors into the IO Monad.
-- TODO: This could probably be done with "deriving," except that for some reason we don't have (MonadIO m) => instance MonadError Hetcons_Exception m
instance (Hetcons_State s, Value v) => MonadError Hetcons_Exception (Hetcons_Transaction s v) where
throwError = Hetcons_Transaction . throw
catchError action handler = do { environment@Hetcons_Transaction_Environment{hetcons_Transaction_Environment_hetcons_server =
Hetcons_Server{hetcons_Server_log_chan = chan}} <- ask
; Hetcons_Transaction $ liftIO $ catch (runReaderT (runChanLoggingT chan (unwrap action)) environment)
(\e -> runReaderT (runChanLoggingT chan $ unwrap (handler e)) environment)}
-- | When we want to getRandomBytes, we just call getSystemDRG down in the IO Monad
-- TODO: This could probably be done with "deriving," except that for some reason we don't have (MonadIO m) => instance MonadRandom m
instance (Hetcons_State s, Value v) => MonadRandom (Hetcons_Transaction s v) where
getRandomBytes i = (Hetcons_Transaction $ liftIO getSystemDRG) >>= return . fst . (randomBytesGenerate i)
-- | Helper function.
-- Creates a monadic, memoized version of the given function, given:
--
-- * a function to memoize
--
-- * a field which pulls the memoization cache from the Hetcons_Server
memoize :: (Eq a, Hashable a, Hetcons_State s, Value v) => (a -> (Hetcons_Transaction s v b)) -> ((Hetcons_Server s v) -> (CMap.Map a b)) -> (a -> (Hetcons_Transaction s v b))
memoize f m x = do { table <- reader (m . hetcons_Transaction_Environment_hetcons_server)
; cached <- Hetcons_Transaction $ liftIO $ CMap.lookup x table
; case cached of
Just y -> return y
Nothing -> do { y <- f x
; Hetcons_Transaction $ liftIO $ insertIfAbsent x y table
; return y}}
-- | Memoization for verifying 1As in a Hetcons Transaction
instance {-# OVERLAPPING #-} (Hetcons_State s, Value v, Parsable (Hetcons_Transaction s v v)) => Monad_Verify (Recursive_1a v) (Hetcons_Transaction s v) where
verify = memoize verify' hetcons_Server_verify_1a
-- | Memoization for verifying 1Bs in a Hetcons Transaction
instance {-# OVERLAPPING #-} (Hetcons_State s, Value v, Hashable v, Eq v, Parsable (Hetcons_Transaction s v v)) => Monad_Verify (Recursive_1b v) (Hetcons_Transaction s v) where
verify = memoize verify' hetcons_Server_verify_1b
-- | Memoization for verifying 2As in a Hetcons Transaction
instance {-# OVERLAPPING #-} (Hetcons_State s, Value v, Hashable v, Eq v, Parsable (Hetcons_Transaction s v v)) => Monad_Verify (Recursive_2a v) (Hetcons_Transaction s v) where
verify = memoize verify' hetcons_Server_verify_2a
-- | Memoization for verifying 2Bs in a Hetcons Transaction
instance {-# OVERLAPPING #-} (Hetcons_State s, Value v, Hashable v, Eq v, Parsable (Hetcons_Transaction s v v)) => Monad_Verify (Recursive_2b v) (Hetcons_Transaction s v) where
verify = memoize verify' hetcons_Server_verify_2b
-- | Memoization for verifying Proof_of_Consensus in a Hetcons Transaction
instance {-# OVERLAPPING #-} (Hetcons_State s, Value v, Hashable v, Eq v, Parsable (Hetcons_Transaction s v v)) =>
Monad_Verify (Recursive_Proof_of_Consensus v) (Hetcons_Transaction s v) where
verify = memoize verify' hetcons_Server_verify_proof
-- | Memoization for verifying Quorums in a Hetcons Transaction
instance {-# OVERLAPPING #-} (Hetcons_State s, Value v) => Monad_Verify_Quorums (Hetcons_Transaction s v) where
verify_quorums = memoize verify_quorums' hetcons_Server_verify_quorums
-- | reads the current Hetcons_Transaction_State from the Monad's state
get_Hetcons_Transaction_State :: (Hetcons_State s, Value v) => Hetcons_Transaction s v (Hetcons_Transaction_State s v)
get_Hetcons_Transaction_State = do { transaction_state_ref <- Hetcons_Transaction $ reader hetcons_Transaction_Environment_transaction_state
; Hetcons_Transaction $ liftIO $ readIORef transaction_state_ref}
-- | writes the Hetcons_Transaction_State to the Monad's state
put_Hetcons_Transaction_State :: (Hetcons_State s, Value v) => (Hetcons_Transaction_State s v) -> Hetcons_Transaction s v ()
put_Hetcons_Transaction_State v = do { transaction_state_ref <- Hetcons_Transaction $ reader hetcons_Transaction_Environment_transaction_state
; Hetcons_Transaction $ liftIO $ writeIORef transaction_state_ref v}
-- | changes the current Hetcons_Transaction_State in the Monad's state, returning an extra value as well.
update_Hetcons_Transaction_State :: (Hetcons_State s, Value v) => ((Hetcons_Transaction_State s v) -> (a, (Hetcons_Transaction_State s v))) -> Hetcons_Transaction s v a
update_Hetcons_Transaction_State f = do { transaction_state_ref <- Hetcons_Transaction $ reader hetcons_Transaction_Environment_transaction_state
; Hetcons_Transaction $ liftIO $ atomicModifyIORef transaction_state_ref (swap . f)}
-- | reads the current Participant_State from the Monad's state
get_state :: (Hetcons_State s, Value v) => Hetcons_Transaction s v s
get_state = get_Hetcons_Transaction_State >>= (return . hetcons_state)
-- | writes the Participant_State to the Monad's state
put_state :: (Hetcons_State s, Value v) => s -> Hetcons_Transaction s v ()
put_state s = update_Hetcons_Transaction_State (\x -> ((), x{hetcons_state = s}))
-- | changes the current Participant_State in the Monad's state, returning an extra value as well.
update_state :: (Hetcons_State s, Value v) => (s -> (a, s)) -> Hetcons_Transaction s v a
update_state f = update_Hetcons_Transaction_State (\x -> let (y,z) = f $ hetcons_state x
in (y,x{hetcons_state = z}))
-- | Reads the Witness from the Monad
get_witness :: (Hetcons_State s, Value v) => Hetcons_Transaction s v Value_Witness
get_witness = reader hetcons_Transaction_Environment_witness
-- | Reades the Crypto_ID (public key) from the Monad
get_my_crypto_id :: (Hetcons_State s, Value v) => Hetcons_Transaction s v Crypto_ID
get_my_crypto_id = reader (hetcons_Server_crypto_id . hetcons_Transaction_Environment_hetcons_server)
-- | Reades the private key from the Monad
get_my_private_key :: (Hetcons_State s, Value v) => Hetcons_Transaction s v ByteString
get_my_private_key = reader (hetcons_Server_private_key . hetcons_Transaction_Environment_hetcons_server)
-- | Given a Participant_State_Var which points to the current Participant_State,
-- atomically changes the Hetcons_Stae by running a Hetcons_Transaction.
-- This will then send messages (1a, 1b, 2a, 2b, proof_of_consensus), and
-- return the returned value into the IO Monad.
-- You can also throw a Hetcons_Exception.
-- If an exception is thrown, it will be thrown in the IO monad, and NO STATE CHANGES WILL OCCUR, NO MESSAGES WILL BE SENT
run_Hetcons_Transaction_IO :: (Hetcons_State s, Value v) =>
(Hetcons_Server s v) -> ((Verified (Recursive_Proof_of_Consensus v)) -> IO ()) -> Value_Witness -> (Hetcons_Transaction s v a) -> IO a
run_Hetcons_Transaction_IO (server@Hetcons_Server{hetcons_Server_log_chan = log_chan}) do_on_consensus witness receive_message =
do { (answer, final_state) <- modify_and_read (hetcons_Server_state_var server)
(\start_state -> do { start_transaction_state <- newIORef (
Hetcons_Transaction_State {
sent_1as = empty
,sent_1bs = empty
,sent_2as = empty
,sent_2bs = empty
,sent_Proof_of_Consensus = empty
,hetcons_state = start_state})
; let env = Hetcons_Transaction_Environment {
hetcons_Transaction_Environment_hetcons_server = server
,hetcons_Transaction_Environment_transaction_state = start_transaction_state
,hetcons_Transaction_Environment_witness = witness}
; runReaderT (runChanLoggingT log_chan $ unwrap (do
{ answer <- receive_message
; final_transaction_state <- get_Hetcons_Transaction_State
; final_state <- get_state
; return (final_state, (answer, final_transaction_state))}))
env})
-- as it is conceivable that sending messages could take arbitrarily long, we do so in parallel.
; Parallel.sequence ((map (send_Message_IO (hetcons_Server_address_book server) witness) $ toList $ sent_1as final_state)++
(map (send_Message_IO (hetcons_Server_address_book server) witness) $ toList $ sent_1bs final_state)++
(map (send_Message_IO (hetcons_Server_address_book server) witness) $ toList $ sent_2as final_state)++
(map (send_Message_IO (hetcons_Server_address_book server) witness) $ toList $ sent_2bs final_state)++
(map (send_Message_IO (hetcons_Server_address_book server) witness) $ toList $ sent_Proof_of_Consensus final_state)++
(map do_on_consensus $ toList $ sent_Proof_of_Consensus final_state))
; return answer}
-- | Class of types which can be sent as messages from within a Hetcons_Transaction monad.
class (Value v) => Add_Sent a v where
-- | Adds a message to the set of outgoing messages in this Monadic transaction.
-- This is intended to be used from within the `send` function.
-- Most of the time, you'll want to use `send`, which may have stuff to check to ensure everything's going correctly.
add_sent :: (Hetcons_State s) => a -> Hetcons_Transaction s v ()
-- | Adds a Proposal_1a to the set of outgoing messages in this Monadic transaction.
-- This is intended to be used from within the `send` function.
-- Most of the time, you'll want to use `send`, which may have stuff to check to ensure everything's going correctly.
instance (Value v) => Add_Sent (Verified (Recursive_1a v)) v where
add_sent p = update_Hetcons_Transaction_State (\x -> ((),x{sent_1as = insert p $ sent_1as x}))
-- | Adds a Phase_1b to the set of outgoing messages in this Monadic transaction.
-- This is intended to be used from within the `send` function.
-- Most of the time, you'll want to use `send`, which may have stuff to check to ensure everything's going correctly.
instance (Value v) => Add_Sent (Verified (Recursive_1b v)) v where
add_sent p = update_Hetcons_Transaction_State (\x -> ((),x{sent_1bs = insert p $ sent_1bs x}))
-- | Adds a Phase_2a to the set of outgoing messages in this Monadic transaction.
-- This is intended to be used from within the `send` function.
-- Most of the time, you'll want to use `send`, which may have stuff to check to ensure everything's going correctly.
instance (Value v) => Add_Sent (Verified (Recursive_2a v)) v where
add_sent p = update_Hetcons_Transaction_State (\x -> ((),x{sent_2as = insert p $ sent_2as x}))
-- | Adds a Phase_2b to the set of outgoing messages in this Monadic transaction.
-- This is intended to be used from within the `send` function.
-- Most of the time, you'll want to use `send`, which may have stuff to check to ensure everything's going correctly.
instance (Value v) => Add_Sent (Verified (Recursive_2b v)) v where
add_sent p = update_Hetcons_Transaction_State (\x -> ((),x{sent_2bs = insert p $ sent_2bs x}))
-- | Adds a Proof_of_Consensus to the set of outgoing messages in this Monadic transaction.
-- This is intended to be used from within the `send` function.
-- Most of the time, you'll want to use `send`, which may have stuff to check to ensure everything's going correctly.
instance (Value v) => Add_Sent (Verified (Recursive_Proof_of_Consensus v)) v where
add_sent p = update_Hetcons_Transaction_State (\x -> ((),x{sent_Proof_of_Consensus = insert p $ sent_Proof_of_Consensus x}))
-- | If you want to be able to receive a message and trigger a Monadic transaction (so, all the messages), this is what you implement.
class (Hetcons_State s, Value v) => Receivable s v a where
-- | Implement receive to dictate what to do when a message comes in.
receive :: a -> Hetcons_Transaction s v ()
-- | Those messages which you can send out from within a Monadic transaction are Sendable
class (Hetcons_State s, Value v) => Sendable s v a where
-- | send a message from within a monadic transaction
send :: a -> Hetcons_Transaction s v ()
| isheff/hetcons | src/Hetcons/Receive_Message.hs | mit | 20,820 | 1 | 22 | 4,681 | 3,564 | 1,979 | 1,585 | 196 | 2 |
{- Copyright (c) 2007 John Goerzen <[email protected]>
Please see the COPYRIGHT file -}
module Metacity where
import System.Gnome.GConf.GConfClient
import Utils
import Data.List
import Data.Maybe(catMaybes)
{- | Gets the currently bound macros. Returns [(Name, Shortcut)]. -}
getMacroBindings :: IO [(String, String)]
getMacroBindings = do
gc <- gconfGetDefault
bindings <- getBindings gc >>= return . filter ((/=) "disabled" . snd)
commands <- getCommands gc >>= return . filter ((/=) "" . snd)
putStrLn $ "GMB: b: " ++ show bindings
putStrLn $ "GMB: c: " ++ show commands
return . (flip combine) bindings .
map (\(x, y) -> (x, drop (length "gmacroplay ") y)) .
filter (isPrefixOf "gmacroplay " . snd) $
commands
-- Next 3 funcs: Convert both commands and bindings to ("command_x", value)
gc2str [] = []
gc2str ((x, GConfValueString s):xs) = (x, s):gc2str xs
gc2str (_:xs) = gc2str xs
getBindings gc =
do bindings <- gconfAllEntries gc dir
return $ map (\(name, val) -> (drop (length dir + 5) name, val)) .
filter (isPrefixOf (dir ++ "/run_command_") . fst) .
gc2str $ bindings
where dir = "/apps/metacity/global_keybindings"
getCommands gc =
do commands <- gconfAllEntries gc dir
return $ map (\(x, y) -> (drop (length dir + 1) x, y)) .
filter (isPrefixOf (dir ++ "/command_") . fst) .
gc2str $ commands
where dir = "/apps/metacity/keybinding_commands"
{- | Binds a new macro. -}
bindMacro :: String -- ^ macro name
-> String -- ^ keyboard shortcut
-> IO ()
bindMacro name shortcut =
do gc <- gconfGetDefault
bindings <- getBindings gc
commands <- getCommands gc
let toremove_cmds = map fst . filter ((==) shortcut . snd) $ bindings
let toremove_names = map (drop (length "gmacroplay ")) .
catMaybes . map (\x -> lookup x commands)
$ toremove_cmds
let command = findCommand bindings commands
gconfSet gc ("/apps/metacity/global_keybindings/run_" ++ command)
shortcut
gconfSet gc ("/apps/metacity/keybinding_commands/" ++ command)
("gmacroplay " ++ name)
mapM_ removeBinding toremove_names
where findCommand [] _ = error "Couldn't find available binding"
findCommand _ [] = error "Couldn't find available command"
findCommand ((bcmd,bshort):xs) commands =
if bshort == "disabled"
then case lookup bcmd commands of
Just "" -> bcmd
_ -> findCommand xs commands
else findCommand xs commands
{- | Remove binding. Takes macro name. -}
removeBinding :: String -> IO ()
removeBinding name =
do gc <- gconfGetDefault
putStrLn $ "removeBinding " ++ name
bindings <- getBindings gc
print 78
commands <- getCommands gc
print 80
let cmd = find ((==) ("gmacroplay " ++ name) . snd) commands
case cmd of
Nothing -> return () -- No command to unbind
Just (x,_) -> do
print 85
gconfSet gc ("/apps/metacity/global_keybindings/run_"
++ x) "disabled"
print 88
gconfSet gc ("/apps/metacity/keybinding_commands/" ++ x) ""
| jgoerzen/gmacro | Metacity.hs | gpl-2.0 | 3,462 | 7 | 18 | 1,096 | 1,000 | 496 | 504 | 73 | 5 |
-- | Formats Haskell source code as HTML with inline CSS.
module Language.Haskell.HsColour.InlineCSS (hscolour,top'n'tail) where
{-@ LIQUID "--totality" @-}
import Language.Haskell.HsColour.Anchors
import Language.Haskell.HsColour.Classify as Classify
import Language.Haskell.HsColour.Colourise
import Language.Haskell.HsColour.HTML (renderAnchors, renderComment,
renderNewLinesAnchors, escape)
import Text.Printf
-- | Formats Haskell source code as a complete HTML document with inline styling
hscolour :: ColourPrefs -- ^ Preferences for styling.
-> Bool -- ^ Whether to include anchors.
-> String -- ^ Haskell source code.
-> String -- ^ An HTML document containing the coloured
-- Haskell source code.
hscolour prefs anchor =
pre
. (if anchor
then renderNewLinesAnchors
. concatMap (renderAnchors (renderToken prefs))
. insertAnchors
else concatMap (renderToken prefs))
. tokenise
top'n'tail :: String -> String -> String
top'n'tail title = (cssPrefix title ++) . (++cssSuffix)
pre :: String -> String
pre = ("<pre style=\"font-family:Consolas, Monaco, Monospace;\">"++)
. (++"</pre>")
renderToken :: ColourPrefs -> (TokenType,String) -> String
renderToken prefs (cls,text) =
stylise (colourise prefs cls) $
if cls == Comment then renderComment text else escape text
stylise :: [Highlight] -> String -> String
stylise hs s = "<span style=\"" ++ concatMap style hs ++ "\">" ++s++ "</span>"
cssPrefix title = unlines
["<?xml version=\"1.0\" encoding=\"UTF-8\">"
,"<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">"
,"<html>"
,"<head>"
,"<!-- Generated by HsColour, http://code.haskell.org/~malcolm/hscolour/ -->"
,"<title>"++title++"</title>"
,"</head>"
,"<body style=\"background-color: #131313; color: #ffffff;\">"
]
cssSuffix = unlines
["</body>"
,"</html>"
]
style :: Highlight -> String
style Normal = ""
style Bold = "font-weight: bold;"
style Dim = "font-weight: lighter;"
style Underscore = "text-decoration: underline;"
style Blink = "text-decoration: blink;"
style ReverseVideo = ""
style Concealed = "text-decoration: line-through;"
style (Foreground c) = "color: "++csscolour c++";"
style (Background c) = "background-color: "++csscolour c++";"
style Italic = "font-style: italic;"
csscolour :: Colour -> String
csscolour Black = "#000000"
csscolour Red = "#ff0000"
csscolour Green = "#00ff00"
csscolour Yellow = "#ffff00"
csscolour Blue = "#0000ff"
csscolour Magenta = "#ff00ff"
csscolour Cyan = "#00ffff"
csscolour White = "#ffffff"
csscolour (Rgb r g b) = printf "#%02x%02x%02x" r g b
| nikivazou/hscolour | Language/Haskell/HsColour/InlineCSS.hs | gpl-2.0 | 2,858 | 0 | 15 | 618 | 608 | 333 | 275 | 63 | 2 |
{-# LANGUAGE RecordWildCards #-}
module VirMat.PhaseTrans
( PhaseTransformation (..)
, generateANG
, generateTransformation
) where
import DeUni.Types
import File.ANGReader
import Hammer.MicroGraph
import Hammer.VTK (writeMultiVTKfile)
import Linear.Vect
import Texture.Bingham
import Texture.Orientation
import qualified Data.Vector as V
import qualified Data.List as L
import qualified Data.IntMap as IM
import VirMat.Core.FlexMicro
import VirMat.Distributions.Texture.ODFSampling
import VirMat.IO.Export.ANG.RasterEngine
import VirMat.IO.Import.Types
import VirMat.Types
import VirMat.Run2D
import VirMat.Core.Sampling
data PhaseTransformation g
= PhaseTransformation
{ microParent :: FlexMicro Vec2 g
, microProduct :: FlexMicro Vec2 g
}
-- | Generate an ANG file with rasterization algorithm. Input: step size, simulation in 2D.
-- Output: square mode ANG.
generateANG :: Double -> Simulation Vec2 -> IO ANGdata
generateANG step Simulation{..} = let
fm :: FlexMicro Vec2 ()
fm = mkFlexMicro grainSet
q1 = mkQuaternion $ Vec4 0 0 0 1
q2 = mkQuaternion $ Vec4 0 0 1 0
q3 = mkQuaternion $ Vec4 0 1 0 0
dist = mkBingham (1, q1) (1, q2) (1, q3)
in do
fmTex <- addMicroFlexTexture dist fm
return $ flexmicroToANG 0 step box fmTex
generateTransformation :: JobRequest -> IO (PhaseTransformation GrainID)
generateTransformation job = do
simParent <- runVirMat2D job
simProduct <- runVirMat2D (job {gsDist = modDist 10})
let
microParent = modifyGrainProps (\gid _ -> gid) $ mkFlexMicro (grainSet simParent)
parentProps = getParentProp 0 microParent simProduct
microProduct = applyParentProp simProduct (mkGrainID (-1)) parentProps
return PhaseTransformation
{ microParent = microParent
, microProduct = microProduct
}
applyParentProp :: Simulation Vec2 -> g -> [(Int, g)] -> FlexMicro Vec2 g
applyParentProp simProduct nullprop props = let
table = IM.fromList props
func gid _ = IM.findWithDefault nullprop (unGrainID gid) table
in modifyGrainProps func $ mkFlexMicro (grainSet simProduct)
-- | Transform a microstructure to ANG using raster algorithm. INPUT: level of subdivision,
-- step size, bounding box, microstructure. OUTPUT: ANG data structure
getParentProp :: Int -> FlexMicro Vec2 g -> Simulation Vec2 -> [(Int, g)]
getParentProp n microParent simProduct = let
gs = getGrainIDList (flexGraph2D microParent)
cs = V.imap (\i p -> (i, point p)) (pointSet simProduct)
filterInsideGrain acc gid = case getGrainMeshAndProp gid n microParent of
Just (ps, ts, prop) -> V.toList xs ++ acc
where
xs = V.map (\(i,_) -> (i, prop)) (V.filter (func . snd) cs)
func c = V.any (\(p1,p2,p3) -> isInsideTriangle (ps V.! p1, ps V.! p2, ps V.! p3) c) ts
_ -> acc
in L.foldl' filterInsideGrain [] gs
-- ================================================================================
testJob :: JobRequest
testJob = VoronoiJob
{ dimension = Dimension2D
, distrType = RandomDistribution
, gsDist = [CombDist distJob]
, seed = Just 10
, output = Output "" "" []
}
modDist :: Double -> [CombDist]
modDist k = [CombDist $ distJob {normalMean = 5 / k}]
distJob :: Normal
distJob = Normal
{ normalScale = 1
, normalMean = 5
, normalVar = 1
}
runTest :: JobRequest -> IO ()
runTest job = do
sim <- generateTransformation job
let
dir = "/Users/edgar/Desktop/"
showTex = RenderGrainProp ("Value", \_ x -> fmap unGrainID x)
writeMultiVTKfile (dir ++ "virmat-parent.vtu" ) True . renderFlexMicro [showGrainID, showTex] 1 . microParent $ sim
writeMultiVTKfile (dir ++ "virmat-product2.vtu") True . renderFlexMicro [showGrainID, showTex] 1 . microProduct $ sim
| lostbean/VirMat | src/VirMat/PhaseTrans.hs | gpl-3.0 | 3,740 | 0 | 19 | 717 | 1,148 | 617 | 531 | 85 | 2 |
module Main where
import System.Environment (getArgs)
import Data.List (permutations)
import Data.Char (toLower)
isOrderedSquare :: [(Int,Int)] -> Bool
isOrderedSquare [(x1,y1),(x2,y2),(x3,y3),(x4,y4)] =
let l1 = (x1 - x2)^2 + (y1 - y2)^2
l2 = (x2 - x3)^2 + (y2 - y3)^2
l3 = (x3 - x4)^2 + (y3 - y4)^2
l4 = (x4 - x1)^2 + (y4 - y1)^2
l5 = (x4 - x2)^2 + (y4 - y2)^2
l6 = (x3 - x1)^2 + (y3 - y1)^2
in l1 == l2 && l2 == l3 && l3 == l4 && l5 == l6 && l1 > 0
isOrderedSquare _ = False
isSquare :: [(Int,Int)] -> Bool
isSquare = any isOrderedSquare . permutations
processLine :: String -> String
processLine line = map toLower . show . isSquare $ read ("[" ++ line ++ "]")
main :: IO ()
main = do
[inputFile] <- getArgs
input <- readFile inputFile
mapM_ putStrLn $ map processLine $ lines input
| cryptica/CodeEval | Challenges/101_FindASquare/main.hs | gpl-3.0 | 854 | 0 | 15 | 220 | 463 | 251 | 212 | 23 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Language.UHIM.Dictionary.Keybind.TcEl where
import Language.UHIM.Dictionary.Keybind.Vim (Mapping(..))
import Data.String
import Data.Map (Map)
import Data.Tuple
import qualified Data.Map as M
data KeyMap = KeyMap { name :: String
, indicator :: String
, mappings :: [Mapping]
}
escapeStringWithQuote :: (IsString s, Monoid s)=> String -> s
escapeStringWithQuote s= mconcat [ "\""
, escapeString s
, "\""
]
escapeString :: IsString s => String -> s
escapeString = fromString . concatMap escapeChar
where
escapeChar '"' = "\\\""
escapeChar '\\' = "\\\\"
escapeChar x = [x]
escapeSymbol :: IsString s => String -> s
escapeSymbol = fromString . concatMap escapeChar
where
escapeChar '"' = "\\\""
escapeChar '\\' = "\\\\"
escapeChar ' ' = "\\ "
escapeChar x = [x]
keyCodeMap :: Map Char Int
keyCodeMap = M.fromList $ zipWith (curry swap) [0..]
[ '1', '2', '3', '4', '5', '6', '7', '8', '9', '0'
, 'q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p'
, 'a', 's', 'd', 'f', 'g', 'h', 'j', 'k', 'l', ';'
, 'z', 'x', 'c', 'v', 'b', 'n', 'm', ',', '.', '/'
, ' '
]
convertKeyCodeMap :: String -> Maybe [Int]
convertKeyCodeMap = mapM (`M.lookup` keyCodeMap)
convertKeySequence :: (IsString s) => String -> Maybe s
convertKeySequence str = do
cs <- convertKeyCodeMap str
return . fromString . unwords . map show $ cs
keyEntry :: (IsString s, Monoid s) => String -> String -> String -> Maybe s
keyEntry k ch com = do
ks <- convertKeySequence k
return $ mconcat [ "(("
, ks
, ") . "
, fromString ch
, ")"
, " ;"
, fromString com
, "\n"
]
keyEntryStr :: (IsString s, Monoid s) => Mapping -> Maybe s
keyEntryStr m = keyEntry k ch com
where
k = key m
ch = escapeStringWithQuote $ character m
com = comment m
data Config = Config {}
deriving (Show, Read, Eq, Ord)
emitKeytable :: (IsString s, Monoid s) => KeyMap -> s
emitKeytable _ = mconcat [ "(setq tcode-tbl (make-vector 40 (make-string 40 ?□)))"
, "\n"
, "(setq tcode-non-2-stroke-char-list (list (tcode-string-to-char \"□\")))"
, "\n"
]
emit :: (IsString s, Monoid s) => KeyMap -> s
emit m = mconcat [ "(require 'tc)"
, "\n"
, "(setq tcode-input-method '", imSymbol, ")"
, "\n"
, setq "tcode-transparent-mode-indicator" "------"
, "\n"
, setq "tcode-alnum-2byte-tcode-mode-indicator" "T "
, "\n"
, setq "tcode-hiragana-mode-indicator" " ひ"
, "\n"
, setq "tcode-katakana-mode-indicator" " カ"
, "\n"
, setq "tcode-tcode-mode-indicator" imIndicator
, "\n"
, "(setq tcode-stroke-file-name (concat tcode-data-directory "
, imSymbol
, " \".st\"))"
, "\n"
, "(setq tcode-key-num 41 tcode-ext-keys '(32))"
, "\n"
, emitKeytable m
, "\n"
, "(setq tcode-special-commands-alist '(\n"
, defaultKeys
, keys
, "))\n"
, setq "tcode-mode-help-string" . fromString $ defaultMessage imIndicator
]
where
imSymbol :: (IsString s, Monoid s) => s
imSymbol = escapeStringWithQuote $ name m
imIndicator = fromString $ indicator m
Just defaultKeys = mconcat $ map (\(k, v) -> keyEntry k v "") defaultMaps
Just keys = mconcat $ map keyEntryStr $ mappings m
setq :: (IsString s, Monoid s) => String -> String -> s
setq symb str = mconcat [ "(setq "
, escapeSymbol symb
, " "
, escapeStringWithQuote str
, ")"
]
defaultMaps :: [(String, String)]
defaultMaps = [ ("alj", "tcode-mazegaki-begin-conversion")
, ("ala", "tcode-bushu-begin-conversion")
, ("09", "tcode-mazegaki-begin-alternate-conversion")
, ("18", "(lambda () (tcode-mazegaki-convert 1 current-prefix-arg))")
, ("28", "(lambda () (tcode-mazegaki-convert 2 current-prefix-arg))")
, ("38", "(lambda () (tcode-mazegaki-convert 3 current-prefix-arg))")
, ("48", "(lambda () (tcode-mazegaki-convert 4 current-prefix-arg))")
, ("58", "(lambda () (tcode-mazegaki-convert nil t))")
, ("29", "(lambda () (tcode-mazegaki-convert 2 t))")
, ("39", "(lambda () (tcode-mazegaki-convert 3 t))")
, ("49", "(lambda () (tcode-mazegaki-convert 4 t))")
, ("59", "(lambda () (tcode-mazegaki-convert 5 t))")
, (" ", "\" \"")
, ("44", "(lambda () (tcode-display-stroke-sequence tcode-last-help-char-list))")
, ("55", "(lambda () (tcode-query-stroke (point)))")
, ("77", "tcode-bushu-begin-alternate-conversion")
, ("88", "(lambda () (tcode-transpose-strokes nil))")
, ("99", "tcode-clear")
]
defaultMessage :: String -> String
defaultMessage mode = unlines $
[ mode ++ "コードモード中のキー操作は次のとおり。"
, " ala : 部首合成変換モードに入る。alaを打ち続けると再帰的に部首合成変換を行うことができる。"
, " alj : 交ぜ書き変換を行う(see variable `tcode-use-prefix-mazegaki')。"
, " 44 : 直前に表示した打ち方を再表示する。"
, " 55 : ポイント位置にある文字の打ち方を表示する。"
, " 58 : 活用語を優先して交ぜ書き変換を行う。"
, " 77 : ポイント前にある2文字で部首合成変換を行う。"
, " 88 : ポイント位置にある文字を逆ストローク化する(例: 年->の)。"
, " 行末ではポイントの直前の文字を変換する。"
, " 99 : 交ぜ書き変換モードや部首変換モードにいた時に、"
, " それらを全部キャンセルする。また、ヘルプを消す。"
, " [1-4]8, [2-5]9: 文字数を指定して交ぜ書き変換を行う。"
, " \\[toggle-input-method] : " ++ mode ++ "コードモードを抜ける。"
, ""
, "初めて起動された時には,`tcode-ready-hook' を実行する。"
, "また、起動される度に`tcode-toggle-hook'を実行する。"
]
| na4zagin3/uhim-dict | src/Language/UHIM/Dictionary/Keybind/TcEl.hs | gpl-3.0 | 6,925 | 1 | 11 | 2,195 | 1,347 | 769 | 578 | 137 | 4 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.OpsWorks.CreateApp
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Creates an app for a specified stack. For more information, see <http://docs.aws.amazon.com/opsworks/latest/userguide/workingapps-creating.html Creating Apps>.
--
-- Required Permissions: To use this action, an IAM user must have a Manage
-- permissions level for the stack, or an attached policy that explicitly grants
-- permissions. For more information on user permissions, see <http://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html Managing UserPermissions>.
--
-- <http://docs.aws.amazon.com/opsworks/latest/APIReference/API_CreateApp.html>
module Network.AWS.OpsWorks.CreateApp
(
-- * Request
CreateApp
-- ** Request constructor
, createApp
-- ** Request lenses
, caAppSource
, caAttributes
, caDataSources
, caDescription
, caDomains
, caEnableSsl
, caEnvironment
, caName
, caShortname
, caSslConfiguration
, caStackId
, caType
-- * Response
, CreateAppResponse
-- ** Response constructor
, createAppResponse
-- ** Response lenses
, carAppId
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.OpsWorks.Types
import qualified GHC.Exts
data CreateApp = CreateApp
{ _caAppSource :: Maybe Source
, _caAttributes :: Map AppAttributesKeys Text
, _caDataSources :: List "DataSources" DataSource
, _caDescription :: Maybe Text
, _caDomains :: List "Domains" Text
, _caEnableSsl :: Maybe Bool
, _caEnvironment :: List "Environment" EnvironmentVariable
, _caName :: Text
, _caShortname :: Maybe Text
, _caSslConfiguration :: Maybe SslConfiguration
, _caStackId :: Text
, _caType :: AppType
} deriving (Eq, Read, Show)
-- | 'CreateApp' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'caAppSource' @::@ 'Maybe' 'Source'
--
-- * 'caAttributes' @::@ 'HashMap' 'AppAttributesKeys' 'Text'
--
-- * 'caDataSources' @::@ ['DataSource']
--
-- * 'caDescription' @::@ 'Maybe' 'Text'
--
-- * 'caDomains' @::@ ['Text']
--
-- * 'caEnableSsl' @::@ 'Maybe' 'Bool'
--
-- * 'caEnvironment' @::@ ['EnvironmentVariable']
--
-- * 'caName' @::@ 'Text'
--
-- * 'caShortname' @::@ 'Maybe' 'Text'
--
-- * 'caSslConfiguration' @::@ 'Maybe' 'SslConfiguration'
--
-- * 'caStackId' @::@ 'Text'
--
-- * 'caType' @::@ 'AppType'
--
createApp :: Text -- ^ 'caStackId'
-> Text -- ^ 'caName'
-> AppType -- ^ 'caType'
-> CreateApp
createApp p1 p2 p3 = CreateApp
{ _caStackId = p1
, _caName = p2
, _caType = p3
, _caShortname = Nothing
, _caDescription = Nothing
, _caDataSources = mempty
, _caAppSource = Nothing
, _caDomains = mempty
, _caEnableSsl = Nothing
, _caSslConfiguration = Nothing
, _caAttributes = mempty
, _caEnvironment = mempty
}
-- | A 'Source' object that specifies the app repository.
caAppSource :: Lens' CreateApp (Maybe Source)
caAppSource = lens _caAppSource (\s a -> s { _caAppSource = a })
-- | One or more user-defined key/value pairs to be added to the stack attributes.
caAttributes :: Lens' CreateApp (HashMap AppAttributesKeys Text)
caAttributes = lens _caAttributes (\s a -> s { _caAttributes = a }) . _Map
-- | The app's data source.
caDataSources :: Lens' CreateApp [DataSource]
caDataSources = lens _caDataSources (\s a -> s { _caDataSources = a }) . _List
-- | A description of the app.
caDescription :: Lens' CreateApp (Maybe Text)
caDescription = lens _caDescription (\s a -> s { _caDescription = a })
-- | The app virtual host settings, with multiple domains separated by commas. For
-- example: ''www.example.com, example.com''
caDomains :: Lens' CreateApp [Text]
caDomains = lens _caDomains (\s a -> s { _caDomains = a }) . _List
-- | Whether to enable SSL for the app.
caEnableSsl :: Lens' CreateApp (Maybe Bool)
caEnableSsl = lens _caEnableSsl (\s a -> s { _caEnableSsl = a })
-- | An array of 'EnvironmentVariable' objects that specify environment variables to
-- be associated with the app. After you deploy the app, these variables are
-- defined on the associated app server instance. For more information, see <http://docs.aws.amazon.com/opsworks/latest/userguide/workingapps-creating.html#workingapps-creating-environment Environment Variables>.
--
-- There is no specific limit on the number of environment variables. However,
-- the size of the associated data structure - which includes the variables'
-- names, values, and protected flag values - cannot exceed 10 KB (10240 Bytes).
-- This limit should accommodate most if not all use cases. Exceeding it will
-- cause an exception with the message, "Environment: is too large (maximum is
-- 10KB)."
--
-- This parameter is supported only by Chef 11.10 stacks. If you have specified
-- one or more environment variables, you cannot modify the stack's Chef version.
caEnvironment :: Lens' CreateApp [EnvironmentVariable]
caEnvironment = lens _caEnvironment (\s a -> s { _caEnvironment = a }) . _List
-- | The app name.
caName :: Lens' CreateApp Text
caName = lens _caName (\s a -> s { _caName = a })
-- | The app's short name.
caShortname :: Lens' CreateApp (Maybe Text)
caShortname = lens _caShortname (\s a -> s { _caShortname = a })
-- | An 'SslConfiguration' object with the SSL configuration.
caSslConfiguration :: Lens' CreateApp (Maybe SslConfiguration)
caSslConfiguration =
lens _caSslConfiguration (\s a -> s { _caSslConfiguration = a })
-- | The stack ID.
caStackId :: Lens' CreateApp Text
caStackId = lens _caStackId (\s a -> s { _caStackId = a })
-- | The app type. Each supported type is associated with a particular layer. For
-- example, PHP applications are associated with a PHP layer. AWS OpsWorks
-- deploys an application to those instances that are members of the
-- corresponding layer. If your app isn't one of the standard types, or you
-- prefer to implement your own Deploy recipes, specify 'other'.
caType :: Lens' CreateApp AppType
caType = lens _caType (\s a -> s { _caType = a })
newtype CreateAppResponse = CreateAppResponse
{ _carAppId :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'CreateAppResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'carAppId' @::@ 'Maybe' 'Text'
--
createAppResponse :: CreateAppResponse
createAppResponse = CreateAppResponse
{ _carAppId = Nothing
}
-- | The app ID.
carAppId :: Lens' CreateAppResponse (Maybe Text)
carAppId = lens _carAppId (\s a -> s { _carAppId = a })
instance ToPath CreateApp where
toPath = const "/"
instance ToQuery CreateApp where
toQuery = const mempty
instance ToHeaders CreateApp
instance ToJSON CreateApp where
toJSON CreateApp{..} = object
[ "StackId" .= _caStackId
, "Shortname" .= _caShortname
, "Name" .= _caName
, "Description" .= _caDescription
, "DataSources" .= _caDataSources
, "Type" .= _caType
, "AppSource" .= _caAppSource
, "Domains" .= _caDomains
, "EnableSsl" .= _caEnableSsl
, "SslConfiguration" .= _caSslConfiguration
, "Attributes" .= _caAttributes
, "Environment" .= _caEnvironment
]
instance AWSRequest CreateApp where
type Sv CreateApp = OpsWorks
type Rs CreateApp = CreateAppResponse
request = post "CreateApp"
response = jsonResponse
instance FromJSON CreateAppResponse where
parseJSON = withObject "CreateAppResponse" $ \o -> CreateAppResponse
<$> o .:? "AppId"
| romanb/amazonka | amazonka-opsworks/gen/Network/AWS/OpsWorks/CreateApp.hs | mpl-2.0 | 8,840 | 0 | 10 | 2,040 | 1,273 | 764 | 509 | 125 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.ResourceViews.ZoneOperations.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves the list of operation resources contained within the specified
-- zone.
--
-- /See:/ <https://developers.google.com/compute/ Google Compute Engine Instance Groups API Reference> for @resourceviews.zoneOperations.list@.
module Network.Google.Resource.ResourceViews.ZoneOperations.List
(
-- * REST Resource
ZoneOperationsListResource
-- * Creating a Request
, zoneOperationsList
, ZoneOperationsList
-- * Request Lenses
, zolProject
, zolZone
, zolFilter
, zolPageToken
, zolMaxResults
) where
import Network.Google.Prelude
import Network.Google.ResourceViews.Types
-- | A resource alias for @resourceviews.zoneOperations.list@ method which the
-- 'ZoneOperationsList' request conforms to.
type ZoneOperationsListResource =
"resourceviews" :>
"v1beta2" :>
"projects" :>
Capture "project" Text :>
"zones" :>
Capture "zone" Text :>
"operations" :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "alt" AltJSON :> Get '[JSON] OperationList
-- | Retrieves the list of operation resources contained within the specified
-- zone.
--
-- /See:/ 'zoneOperationsList' smart constructor.
data ZoneOperationsList = ZoneOperationsList'
{ _zolProject :: !Text
, _zolZone :: !Text
, _zolFilter :: !(Maybe Text)
, _zolPageToken :: !(Maybe Text)
, _zolMaxResults :: !(Textual Word32)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ZoneOperationsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'zolProject'
--
-- * 'zolZone'
--
-- * 'zolFilter'
--
-- * 'zolPageToken'
--
-- * 'zolMaxResults'
zoneOperationsList
:: Text -- ^ 'zolProject'
-> Text -- ^ 'zolZone'
-> ZoneOperationsList
zoneOperationsList pZolProject_ pZolZone_ =
ZoneOperationsList'
{ _zolProject = pZolProject_
, _zolZone = pZolZone_
, _zolFilter = Nothing
, _zolPageToken = Nothing
, _zolMaxResults = 500
}
-- | Name of the project scoping this request.
zolProject :: Lens' ZoneOperationsList Text
zolProject
= lens _zolProject (\ s a -> s{_zolProject = a})
-- | Name of the zone scoping this request.
zolZone :: Lens' ZoneOperationsList Text
zolZone = lens _zolZone (\ s a -> s{_zolZone = a})
-- | Optional. Filter expression for filtering listed resources.
zolFilter :: Lens' ZoneOperationsList (Maybe Text)
zolFilter
= lens _zolFilter (\ s a -> s{_zolFilter = a})
-- | Optional. Tag returned by a previous list request truncated by
-- maxResults. Used to continue a previous list request.
zolPageToken :: Lens' ZoneOperationsList (Maybe Text)
zolPageToken
= lens _zolPageToken (\ s a -> s{_zolPageToken = a})
-- | Optional. Maximum count of results to be returned. Maximum value is 500
-- and default value is 500.
zolMaxResults :: Lens' ZoneOperationsList Word32
zolMaxResults
= lens _zolMaxResults
(\ s a -> s{_zolMaxResults = a})
. _Coerce
instance GoogleRequest ZoneOperationsList where
type Rs ZoneOperationsList = OperationList
type Scopes ZoneOperationsList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly",
"https://www.googleapis.com/auth/ndev.cloudman",
"https://www.googleapis.com/auth/ndev.cloudman.readonly"]
requestClient ZoneOperationsList'{..}
= go _zolProject _zolZone _zolFilter _zolPageToken
(Just _zolMaxResults)
(Just AltJSON)
resourceViewsService
where go
= buildClient
(Proxy :: Proxy ZoneOperationsListResource)
mempty
| rueshyna/gogol | gogol-resourceviews/gen/Network/Google/Resource/ResourceViews/ZoneOperations/List.hs | mpl-2.0 | 4,905 | 0 | 18 | 1,177 | 654 | 385 | 269 | 98 | 1 |
{- GPC parser -}
module GPC.Parser(parseSource) where
import Control.Applicative hiding ((<|>), many, optional, empty)
import Text.ParserCombinators.Parsec
import Text.Parsec.Expr
import GPC.AST
import GPC.Lexer
import Control.Arrow
import Data.List
{- Operator Tables -}
-- | Need operators to evaluate ordinary expressions and constant expressions
exprOperators pos = operators pos (\n c -> (Prefix (reservedOp n >> return (ExpUnaryOp c))))
(\n c -> (Infix (reservedOp n >> return (ExpBinOp c)) AssocLeft))
-- |Unary operators have higher precedence than binary ones
--
operators pos un bin = unaryOps pos un ++ binaryOps pos bin
-- |Binary operators from highest to lowest precedence
binaryOps :: SrcPos -> (String -> BinOps SrcPos -> Operator s u m a) -> [[Operator s u m a]]
binaryOps pos binary =
[[binary "*" (Mul pos) ,binary "/" (Div pos), binary "%" (Mod pos)]
,[binary "+" (Add pos), binary "-" (Sub pos)]
,[binary "<<" (ShiftL pos) ,binary ">>" (ShiftR pos)]
,[binary "<" (Less pos),binary "<=" (LessEq pos)]
,[binary ">" (Greater pos) ,binary ">=" (GreaterEq pos)]
,[binary "==" (Equals pos) ,binary "!=" (NEquals pos)]
,[binary "&" (BAnd pos)]
,[binary "^" (BXor pos)]
,[binary "|" (BOr pos)]
,[binary "&&" (And pos)]
,[binary "||" (Or pos)]
]
-- |Unary operators from highest to lowest precedence
unaryOps :: SrcPos -> (String -> UnaryOps SrcPos -> Operator s u m a) -> [[Operator s u m a]]
unaryOps pos unary = [[unary "-" (Neg pos), unary "!" (Not pos), unary "~" (BNot pos)]]
-- | Parse given source file, returns parse error string on
-- | failure otherwise returns the AST for the source
parseSource :: String -> Either String (Program SrcPos)
parseSource = left show . parse program "" . removeCPPPreprocessor
-- | Parse entire source file
program :: Parser (Program SrcPos)
program = Program <$> (whiteSpace *> topLevels)
-- | Remove C++ preprocessor directives and replace with a blank line
-- The lexer doesn't support
-- more than one set of characters to mark comments, and doesn't
-- have any way to check the first character in each line
removeCPPPreprocessor :: String -> String
removeCPPPreprocessor s = unlines $ map (\n -> if "#" `isPrefixOf` n then "" else n) (lines s)
-- | Parse top level statements/definitions
topLevels :: Parser [TopLevel SrcPos]
topLevels = ((:) <$> topLevel <*> topLevels)
<|> (eof >> return [])
-- | Parse Top Level definitions
topLevel :: Parser (TopLevel SrcPos)
topLevel = try function
<|> try (TLAssign <$> assign)
<|> try (TLObjs <$> objs)
<|> (TLConstructObjs <$> constructObjs)
-- | Parse C++ Object definitions
objs :: Parser (Objects SrcPos)
objs = do
ns <- sepBy2 parseIdent $ reservedOp "::"
var <- parseVar
_ <- semi
return $ Objects ns var
where sepBy2 seg sep = do
x <- seg
_ <- sep
xs <- sepBy1 seg sep
return (x:xs)
constructObjs :: Parser (ConstructObjs SrcPos)
constructObjs = do
var <- parseVar
reservedOp "="
ns <- sepBy1 parseIdent $ reservedOp "::"
exprs <- parens $ commaSep expr
_ <- semi
return $ ConstructObjs ns var exprs
-- | Parse Function definition
function :: Parser (TopLevel SrcPos)
function = Func <$> parseType <*> parseIdent <*> fArgs <*> block
where fArgs = parens args
-- | Parse Function arguments
args :: Parser [(Type SrcPos, Ident SrcPos)]
args = commaSep arg
where arg :: Parser (Type SrcPos, Ident SrcPos)
arg = do
aType <- parseType
aName <- parseIdent
return (aType, aName)
-- | Parse a block of statements encased in braces
block :: Parser (BlockStmt SrcPos)
block = BlockStmt <$> braces stmts
-- | Parse multiple statements
stmts :: Parser [Stmt SrcPos]
stmts = many stmt
-- | Parse individual statement
stmt :: Parser (Stmt SrcPos)
stmt = try (Return <$> (reserved "return" *> (expr <* semi)))
<|> try (BStmt <$> block)
<|> try ifStmt
<|> try seqBlock
<|> try parBlock
<|> try forLoop
<|> try (FunCallStmt <$> (funCall <* semi))
<|> try (MethodStmt <$> (methodCall <* semi))
<|> try (AssignStmt <$> assign)
-- | Parse if statement
ifStmt :: Parser (Stmt SrcPos)
ifStmt = try (IfElse <$> parseIf <*> stmt <*> (reserved "else" *> stmt))
<|> If <$> parseIf <*> stmt
where parseIf = reserved "if" *> parens expr
-- | Parse block to be executed sequentially
seqBlock :: Parser (Stmt SrcPos)
seqBlock = Seq <$> (reserved "seq" *> block)
-- | Parse block to be executed in parallel
parBlock :: Parser (Stmt SrcPos)
parBlock = BStmt <$> (reserved "par" *> block)
-- | Parse Expression
expr :: Parser (Expr SrcPos)
expr = do
pos <- getPos
buildExpressionParser (exprOperators pos) expr'
where expr' :: Parser (Expr SrcPos)
expr' = try (ExpFunCall <$> funCall)
<|> try (ExpMethodCall <$> methodCall)
<|> try (ExpIdent <$> parseIdent)
<|> try (ExpLit <$> literal)
<|> parens expr
-- | Parse variable assignment
assign :: Parser (Assign SrcPos)
assign = Assign <$> parseType <*>
parseIdent <* parseCh '=' <*>
(expr <* semi)
-- | Parse literal
literal :: Parser (Literal SrcPos)
literal = Ch <$> getPos <*> ch
<|> Str <$> getPos <*> str
<|> Bl <$> getPos <*> bool
<|> Number <$> getPos <*> num
-- | Parse for loop
forLoop :: Parser (Stmt SrcPos)
forLoop = do
varName <- reserved "for" *> reservedOp "(" *> reserved "int" *> parseIdent -- Identifier to use
start <- reservedOp "=" *> expr
stop <- semi *> expr -- Stop
let getPlus = reservedOp "+=" *> expr
getMinus = reservedOp "-=" *> (ExpUnaryOp <$> (Neg <$> getPos) <*> expr)
step <- semi *> reserved (show varName) *> (try getPlus <|> getMinus) <* reservedOp ")"
inner <- block
return $ ForLoop varName start stop step inner
-- | Parse function call
funCall :: Parser (FunCall SrcPos)
funCall = FunCall <$> parseIdent <*> args'
where args' = parens $ commaSep expr
-- | Parse method call
methodCall :: Parser (MethodCall SrcPos)
methodCall = do
var <- parseVar
reservedOp "."
method <- parseIdent
args'' <- args'
return $ MethodCall var method args''
where args' = parens $ commaSep expr
-- | Parse varaible
parseVar :: Parser (Var SrcPos)
parseVar = try (VarArrayElem <$> parseIdent <*> brackets expr)
<|> VarIdent <$> parseIdent
-- | Parse identifier
parseIdent :: Parser (Ident SrcPos)
parseIdent = Ident <$> getPos <*> ident
-- | Parse types
-- types can be either one of the basic types (int, bool, char, etc.)
-- or a pointer to a type
parseType :: Parser (Type SrcPos)
parseType =
try (reserved "__kernel") *> parseType' True
<|> parseType' False
parseType' :: Bool -> Parser (Type SrcPos)
parseType' inKernel = do
baseType <- NormalType <$> getPos <*> pure inKernel <*> typeT
ptrs <- many getPointer
return $ foldr (\ ptr cur -> (ptr cur)) baseType ptrs
where
getPointer :: Parser (Type SrcPos -> Type SrcPos)
getPointer = char '*' >> whiteSpace >> return PointerType
-- | Parse number
num :: Parser (Either Integer Double)
num = Right <$> try float
<|> Left <$> int
-- | Grab current source position
getPos :: Parser SrcPos
getPos = do
sp <- getPosition
return $ SrcPos (sourceLine sp) (sourceColumn sp)
| RossMeikleham/GPC | src/GPC/Parser.hs | bsd-2-clause | 7,596 | 0 | 19 | 1,896 | 2,414 | 1,227 | 1,187 | 154 | 2 |
-- | Controller library.
module HL.C
(module C
,App(..)
,C
,io)
where
import HL.Foundation (Handler)
import HL.Foundation as C (Route(..))
import HL.Types as C
import Control.Monad.Extra
import Data.Text as C (Text)
import Yesod as C
import Yesod.Blaze as C
-- | Controller type.
type C = Handler
| chrisdone/hl | src/HL/C.hs | bsd-3-clause | 311 | 0 | 6 | 61 | 96 | 66 | 30 | 13 | 0 |
-- Metric Temporal Logic (MTL) operators over a discrete time
-- domain consisting of sampled time values
module Copilot.Library.MTL
( eventually, eventuallyPrev, always, alwaysBeen,
until, release, since, Copilot.Library.MTL.trigger, matchingUntil,
matchingRelease, matchingSince, matchingTrigger ) where
import Copilot.Language
import qualified Prelude as P
import Copilot.Library.Utils
-- It is necessary to provide a positive number of time units
-- dist to each function, where the distance between the times
-- of any two adjacent clock samples is no less than dist
-- Eventually: True at time t iff s is true at some time t',
-- where (t + l) <= t' <= (t + u)
eventually :: ( Typed a, Integral a ) =>
a -> a -> Stream a -> a -> Stream Bool -> Stream Bool
eventually l u clk dist s = res clk s $ (u `P.div` dist) + 1
where
mins = clk + (constant l)
maxes = clk + (constant u)
res _ _ 0 = false
res c s k =
c <= maxes && ((mins <= c && s) || nextRes c s k)
nextRes c s k = res (drop 1 c) (drop 1 s) (k - 1)
-- EventuallyPrev: True at time t iff s is true at some time t',
-- where (t - u) <= t' <= (t - l)
eventuallyPrev :: ( Typed a, Integral a ) =>
a -> a -> Stream a -> a -> Stream Bool -> Stream Bool
eventuallyPrev l u clk dist s = res clk s $ (u `P.div` dist) + 1
where
mins = clk - (constant u)
maxes = clk - (constant l)
res _ _ 0 = false
res c s k =
mins <= c && ((c <= maxes && s) || nextRes c s k)
nextRes c s k = res ([0] ++ c) ([False] ++ s) (k - 1)
-- Always: True at time t iff s is true at all times t'
-- where (t + l) <= t' <= (t + u)
always :: ( Typed a, Integral a ) =>
a -> a -> Stream a -> a -> Stream Bool -> Stream Bool
always l u clk dist s = res clk s $ (u `P.div` dist) + 1
where
mins = clk + (constant l)
maxes = clk + (constant u)
res _ _ 0 = true
res c s k =
c > maxes || ((mins <= c ==> s) && nextRes c s k)
nextRes c s k = res (drop 1 c) (drop 1 s) (k - 1)
-- AlwaysBeen: True at time t iff s is true at all times t'
-- where (t - u) <= t' <= (t - l)
alwaysBeen :: ( Typed a, Integral a ) =>
a -> a -> Stream a -> a -> Stream Bool -> Stream Bool
alwaysBeen l u clk dist s = res clk s $ (u `P.div` dist) + 1
where
mins = clk - (constant u)
maxes = clk - (constant l)
res _ _ 0 = true
res c s k =
c < mins || ((c <= maxes ==> s) && nextRes c s k)
nextRes c s k = res ([0] ++ c) ([True] ++ s) (k - 1)
-- Until: True at time t iff there exists a d with l <= d <= u
-- such that s1 is true at time (t + d),
-- and for all times t' with t <= t' < t + d, s0 is true
until :: ( Typed a, Integral a ) =>
a -> a -> Stream a -> a -> Stream Bool -> Stream Bool -> Stream Bool
until l u clk dist s0 s1 = res clk s0 s1 $ (u `P.div` dist) + 1
where
mins = clk + (constant l)
maxes = clk + (constant u)
res _ _ _ 0 = false
res c s s' k =
c <= maxes && ((mins <= c && s') || (s && nextRes c s s' k))
nextRes c s s' k = res (drop 1 c) (drop 1 s) (drop 1 s') (k - 1)
-- Since: True at time t iff there exists a d with l <= d <= u
-- such that s1 is true at time (t - d),
-- and for all times t' with t - d < t' <= t, s0 is true
since :: ( Typed a, Integral a ) =>
a -> a -> Stream a -> a -> Stream Bool -> Stream Bool -> Stream Bool
since l u clk dist s0 s1 = res clk s0 s1 $ (u `P.div` dist) + 1
where
mins = clk - (constant u)
maxes = clk - (constant l)
res _ _ _ 0 = false
res c s s' k =
mins <= c && ((c <= maxes && s') || (s && nextRes c s s' k))
nextRes c s s' k = res ([0] ++ c) ([True] ++ s) ([False] ++ s') (k - 1)
-- Release: true at time t iff for all d with l <= d <= u where there
-- is a sample at time (t + d), s1 is true at time (t + d),
-- or s0 has a true sample at some time t' with t <= t' < t + d
release :: ( Typed a, Integral a ) =>
a -> a -> Stream a -> a -> Stream Bool -> Stream Bool -> Stream Bool
release l u clk dist s0 s1 =
(mins > clk || clk > maxes || s1) &&
(res (drop 1 clk) s0 (drop 1 s1) $ u `P.div` dist)
where
mins = clk + (constant l)
maxes = clk + (constant u)
res _ _ _ 0 = true
res c s s' k =
s || ((mins > c || c > maxes || s') && nextRes c s s' k)
nextRes c s s' k = res (drop 1 c) (drop 1 s) (drop 1 s') (k - 1)
-- Trigger: True at time t iff for all d with l <= d <= u where there
-- is a sample at time (t - d), s1 is true at time (t - d),
-- or s0 has a true sample at some time t' with t - d < t' <= t
trigger :: ( Typed a, Integral a ) =>
a -> a -> Stream a -> a -> Stream Bool -> Stream Bool -> Stream Bool
trigger l u clk dist s0 s1 =
(mins > clk || clk > maxes || s1) &&
(res ([0] ++ clk) s0 ([True] ++ s1) $ u `P.div` dist)
where
mins = clk - (constant u)
maxes = clk - (constant l)
res _ _ _ 0 = true
res c s s' k =
s || ((mins > c || c > maxes || s') && nextRes c s s' k)
nextRes c s s' k = res ([0] ++ c) ([False] ++ s) ([True] ++ s') (k - 1)
-- Matching Variants
-- Matching Until: Same semantics as Until, except with both s1 and s0
-- needing to hold at time (t + d) instead of just s1
matchingUntil :: ( Typed a, Integral a ) =>
a -> a -> Stream a -> a -> Stream Bool -> Stream Bool -> Stream Bool
matchingUntil l u clk dist s0 s1 = res clk s0 s1 $ (u `P.div` dist) + 1
where
mins = clk + (constant l)
maxes = clk + (constant u)
res _ _ _ 0 = false
res c s s' k =
c <= maxes && s && ((mins <= c && s') || nextRes c s s' k)
nextRes c s s' k = res (drop 1 c) (drop 1 s) (drop 1 s') (k - 1)
-- Matching Since: Same semantics as Since, except with both s1 and s0
-- needing to hold at time (t - d) instead of just s1
matchingSince :: ( Typed a, Integral a ) =>
a -> a -> Stream a -> a -> Stream Bool -> Stream Bool -> Stream Bool
matchingSince l u clk dist s0 s1 = since l u clk dist s0 (s0 && s1)
-- Matching Release: Same semantics as Release, except with
-- s1 or s0 needing to hold at time (t + d) instead of just s1
matchingRelease :: ( Typed a, Integral a ) =>
a -> a -> Stream a -> a -> Stream Bool -> Stream Bool -> Stream Bool
matchingRelease l u clk dist s0 s1 = res clk s0 s1 $ (u `P.div` dist) + 1
where
mins = clk + (constant l)
maxes = clk + (constant u)
res _ _ _ 0 = true
res c s s' k =
s || ((mins > c || c > maxes || s') && nextRes c s s' k)
nextRes c s s' k = res (drop 1 c) (drop 1 s) (drop 1 s') (k - 1)
-- Matching Trigger: Same semantics as Trigger, except with
-- s1 or s0 needing to hold at time (t - d) instead of just s1
matchingTrigger :: ( Typed a, Integral a ) =>
a -> a -> Stream a -> a -> Stream Bool -> Stream Bool -> Stream Bool
matchingTrigger l u clk dist s0 s1 =
Copilot.Library.MTL.trigger l u clk dist s0 (s0 || s1)
| fredyr/copilot-libraries | src/Copilot/Library/MTL.hs | bsd-3-clause | 6,655 | 0 | 14 | 1,820 | 2,790 | 1,454 | 1,336 | 108 | 2 |
module Control.Parallel.MPI.Utils (asBool, asInt, asEnum, debugOut) where
import Foreign
import Foreign.C.Types
import System.IO.Unsafe as Unsafe
asBool :: (Ptr CInt -> IO ()) -> IO Bool
asBool f =
alloca $ \ptr -> do
f ptr
res <- peek ptr
return $ res /= 0
asInt :: (Ptr CInt -> IO ()) -> IO Int
asInt f =
alloca $ \ptr -> do
f ptr
res <- peek ptr
return $ fromIntegral res
asEnum :: Enum a => (Ptr CInt -> IO ()) -> IO a
asEnum f =
alloca $ \ptr -> do
f ptr
res <- peek ptr
return $ toEnum $ fromIntegral res
debugOut :: Show a => a -> Bool
debugOut x = Unsafe.unsafePerformIO $ do
print x
return False
| bjpop/haskell-mpi | src/Control/Parallel/MPI/Utils.hs | bsd-3-clause | 658 | 0 | 10 | 176 | 303 | 148 | 155 | 26 | 1 |
{-|
Module : Numeric.ER.Real.Arithmetic.LinearSolver
Description : arbitrary precision piece-wise something function enclosures
Copyright : (c) 2008 Jan Duracz, Michal Konecny
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : portable
A simple validated solver for systems of linear equations with
interval coefficients. It uses a naive splitting approach and is
therefore very slow.
-}
module Numeric.ER.Real.Arithmetic.LinearSolver
(
linearSolver
)
where
import qualified Numeric.ER.Real.Approx as RA
import qualified Numeric.ER.BasicTypes.DomainBox as DBox
import Numeric.ER.BasicTypes.DomainBox (VariableID(..), DomainBox, DomainBoxMappable, DomainIntBox)
import Numeric.ER.BasicTypes
import Data.List
import Data.Maybe
--import qualified Data.Map as Map
-- the following is code for unit testing
{-
import Numeric.ER.Real.DefaultRepr
eq1 :: (Box IRA, IRA)
eq1 = (mkBox [1,2,1], 2)
eq2 = (mkBox [2,4,2], 4)
eq3 = (mkBox [2,4,4], 5)
eqs = [eq1,eq2,eq3]
box = mkBox $ replicate 3 $ (-1)RA.\/1
x1 = (-13/16)RA.\/(-3/4) :: IRA
x2 = (5/16)RA.\/(3/8) :: IRA
tol = 2^^(-20) :: IRA
mkBox :: [IRA] -> Box IRA
mkBox iras = Map.fromList $ zip [1..] iras
-}
linearSolver ::
(RA.ERIntApprox ira,
DomainIntBox box varid ira,
DomainBoxMappable box box varid ira ira) =>
[(box, ira)]
{-^ the equations;
each equation has coefficients of linear terms
+ constant term -} ->
box {-^ the domain of the variables -} ->
ira {-^ an upper bound on the size of an acceptable solution box -} ->
Maybe box
{-^
A box containing at least one solution within the domain;
Nothing if there is no solution.
-}
linearSolver eqns domBox tolerance =
linearSolver' eqns [domBox] tolerance
linearSolver' eqns [] tolerance =
Nothing
linearSolver' eqns (b:bs) tolerance
| not $ evalEqns b eqns = -- no solutions in the box
linearSolver' eqns bs tolerance
| belowTolerance =
Just b
| otherwise =
linearSolver' eqns (splitBox b ++ bs) tolerance
where
belowTolerance =
and $ map (\d -> width d `RA.ltSingletons` tolerance) $ DBox.elems b
evalEqns box eqns =
and $ map (evalEqn box) eqns
{-|
returns true iff there exists a solution to the equation in the box
-}
evalEqn box (expr,cons) =
cons `RA.refines` (evalExpr expr box)
where
evalExpr expr box = sum $ DBox.elems $ DBox.intersectionWith (*) expr box
{-|
returns the list of (two) boxes resulting from splitting the widest edge
of the box in half
-}
splitBox box =
[DBox.insert k (iLg RA.\/ iMg) box,
DBox.insert k (iMg RA.\/ iRg) box]
where
iMg = (iLg+iRg)/2
iLg = incrementGranularity iL
iRg = incrementGranularity iR
(iL,iR) = RA.bounds i
i = DBox.lookup "ER: LinearSolver: splitBox: " k box
k = widestVar box
incrementGranularity x =
RA.setMinGranularityOuter (RA.getGranularity x + 1) x
widestVar box =
fst $ DBox.bestSplit box
width i =
snd $ RA.bounds (iR-iL)
where
(iL,iR) = RA.bounds i
| michalkonecny/polypaver | src/Numeric/ER/Real/Arithmetic/LinearSolver.hs | bsd-3-clause | 3,225 | 0 | 13 | 817 | 607 | 328 | 279 | 51 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Spec.ExecuteF64 where
import Spec.Decode
import Spec.Machine
import Spec.ExecuteF (getRoundMode, updateFFlags, isNaN)
import Data.Int
import Data.Word
import Data.Bits
import SoftFloat
import Prelude hiding (isNaN)
execute :: forall p t. (RiscvMachine p t) => InstructionF64 -> p ()
execute (Fcvt_l_s rd rs1 rm) = do
roundMode <- getRoundMode rm
x <- getFPRegister rs1
let Result y flags = f32ToI64 roundMode (fromIntegral x :: Word32)
updateFFlags flags
-- Special case for the softfloat library.
let result | isNaN x || (y == 2^63 && not (testBit x 31)) = 2^63 - 1
| otherwise = y
setRegister rd (fromIntegral result)
execute (Fcvt_lu_s rd rs1 rm) = do
roundMode <- getRoundMode rm
x <- getFPRegister rs1
let Result y flags = f32ToUi64 roundMode (fromIntegral x :: Word32)
updateFFlags flags
-- Another special case for the softfloat library.
let result | not (isNaN x) && testBit x 31 = 0
| otherwise = y
setRegister rd (fromIntegral (fromIntegral result :: Int64))
execute (Fcvt_s_l rd rs1 rm) = do
roundMode <- getRoundMode rm
x <- getRegister rs1
let Result y flags = i64ToF32 roundMode (fromIntegral x :: Int64)
updateFFlags flags
setFPRegister rd (fromIntegral y)
execute (Fcvt_s_lu rd rs1 rm) = do
roundMode <- getRoundMode rm
x <- getRegister rs1
let Result y flags = ui64ToF32 roundMode (fromIntegral x :: Word64)
updateFFlags flags
setFPRegister rd (fromIntegral y)
execute inst = error $ "dispatch bug: " ++ show inst
| mit-plv/riscv-semantics | src/Spec/ExecuteF64.hs | bsd-3-clause | 1,550 | 0 | 18 | 311 | 576 | 271 | 305 | 40 | 1 |
{-# LANGUAGE QuasiQuotes, OverloadedStrings #-}
-- Here, we will dump the data contained in happythings_form.txt into
-- a sqlite database.
module SqlTable where
import Control.Exception
import Data.Typeable
import Text.RawString.QQ
import Database.SQLite.Simple
import Database.SQLite.Simple.Types
import Data.Text (Text)
-- import qualified Data.Text as T
import Control.Monad (forM_, forM)
import Data.String.Utils (strip)
import Data.Char (isControl)
-- import Debug.Trace (trace)
import System.Random
fname :: String
fname = "happythings_form.txt"
tblname :: String
tblname = "happy.db"
data Item =
Item {
itemId :: Integer
, content :: Text
}
instance Show Item where
show (Item id_ content_) = unwords ["Item", show id_, show content_]
data DuplicateData =
DuplicateData
deriving (Eq, Show, Typeable)
instance Exception DuplicateData
instance FromRow Item where
fromRow = Item <$> field <*> field
instance ToRow Item where
toRow (Item id_ content_) = toRow (id_, content_)
insertItemQ :: Query
insertItemQ = "INSERT INTO items VALUES (?, ?)"
getItemQ :: Query
getItemQ = "SELECT * FROM items WHERE id = ?"
createItems :: Query
createItems = [r|
CREATE TABLE IF NOT EXISTS items
(id INTEGER PRIMARY KEY AUTOINCREMENT,
content TEXT UNIQUE)
|]
-- Note: opens and closes connection within
createDatabase :: IO ()
createDatabase = do
conn <- open tblname
execute_ conn createItems
close conn
-- Note: opens and closes connection within
fillDatabase :: IO ()
fillDatabase = do
conn <- open tblname
content_ <- readFile fname
let happyLines = lines content_
happyTups = map ((,) Null . stripS) happyLines
forM_ happyTups (execute conn insertItemQ)
close conn
getCount :: Connection -> IO Integer
getCount conn = do
[[c]] <- query_ conn "SELECT COUNT(*) FROM items"
return c
getItem :: Connection -> Integer -> IO (Maybe Item)
getItem conn id_ = do
results <- query conn getItemQ (Only id_)
case results of
[] -> return Nothing
[item] -> return $ Just item
_ -> throwIO DuplicateData -- we don't catch this
getRandItem :: Connection -> IO (Maybe Item)
getRandItem conn = do
count <- getCount conn
randRow <- randomRIO (1, count)
getItem conn randRow
getRandItems :: Connection -> Int -> IO [Maybe Item]
getRandItems conn n = forM [1..n] (const $ getRandItem conn)
-- Inefficient, but doesn't matter
stripS :: String -> String
stripS = takeWhile (not . isControl) . dropWhile isControl . strip
| arcticmatt/happy-site | src/SqlTable.hs | bsd-3-clause | 2,490 | 0 | 14 | 466 | 708 | 369 | 339 | 69 | 3 |
-- | Full compiler tests.
module Language.Java.Paragon.ParacSpec (main, spec) where
import Test.Hspec
import Control.Monad
import System.Directory
import System.FilePath
import Language.Java.Paragon.Error
import Language.Java.Paragon.Interaction.Flags
import Language.Java.Paragon.Parac
-- | To be able to run this module from GHCi.
main :: IO ()
main = hspec spec
testDir :: FilePath
testDir = "test" </> "paractests"
-- | Main specification function.
spec :: Spec
spec = do
describe "Basic tests" $ do
it "The first elementary test" $ do
cf <- getCompFiles $ testDir </> "elementary"
err <- mapM callParac cf
-- With the current phases, there should be no errors. However with the
-- policy constraint solving phase, there should be.
mapM_ (\e -> e `shouldBe` []) err
-- | Given a filepath that contains a .compile file which instructs which files
-- should be compiled and in which order, relatively to that filepath. Runs the
-- paragon compiler on these files and returns the total list of errors.
callParac :: FilePath -> IO [Error]
callParac fp = do
files <- fmap lines $ readFile (fp </> ".compile")
res <- mapM (\file-> parac [PiPath fp, SourcePath fp] file) files
return $ concat res
-- | Returns all paths that contain .compile files found under the provided
-- path.
getCompFiles :: FilePath -> IO [FilePath]
getCompFiles fp = do
cont <- getDirectoryContents fp
let posDirs = map (fp </>) $ filter (\x -> head x /= '.') cont
dirs <- filterM doesDirectoryExist posDirs -- Filter possible directories
rec <- liftM concat $ mapM getCompFiles dirs -- Search subfolders
if ".compile" `elem` cont -- Possibly add this path
then return $ fp : rec
else return rec
{- Code for calling java compiler:
import System.Cmd (system)
import System.Exit (ExitCode(..))
exits <- mapM (\f -> system $ "javac " ++ (paraToJava (fp </> f))) files
-- Should we clean up class files here?
mapM_ (shouldBe ExitSuccess) exits
-- | Changes file extension from .para to .java
paraToJava :: FilePath -> FilePath
paraToJava file =
let (f,_ext) = splitExtension file
in f <.> "java"
-}
| bvdelft/paragon | test/Language/Java/Paragon/ParacSpec.hs | bsd-3-clause | 2,182 | 1 | 17 | 452 | 412 | 218 | 194 | 33 | 2 |
module Servant.Server.Auth.Token.RocksDB (
RocksDBBackendT
, runRocksDBBackendT
, RocksDBEnv
, newRocksDBEnv
) where
import Control.Monad.Base
import Control.Monad.Catch
import Control.Monad.Except
import Control.Monad.Reader
import Control.Monad.Trans.Control
import Control.Monad.Trans.Resource
import Servant.Server
import Servant.Server.Auth.Token.Config
import Servant.Server.Auth.Token.RocksDB.Schema (RocksDBEnv, newRocksDBEnv)
import Servant.Server.Auth.Token.Model
import qualified Servant.Server.Auth.Token.RocksDB.Schema as S
-- | Monad transformer that implements storage backend
newtype RocksDBBackendT m a = RocksDBBackendT { unRocksDBBackendT :: ReaderT (AuthConfig, RocksDBEnv) (ExceptT ServantErr (ResourceT m)) a }
deriving (Functor, Applicative, Monad, MonadIO, MonadError ServantErr, MonadReader (AuthConfig, RocksDBEnv), MonadThrow, MonadCatch)
deriving instance MonadBase IO m => MonadBase IO (RocksDBBackendT m)
deriving instance (MonadBase IO m, MonadThrow m, MonadIO m) => MonadResource (RocksDBBackendT m)
instance Monad m => HasAuthConfig (RocksDBBackendT m) where
getAuthConfig = fst <$> RocksDBBackendT ask
newtype StMRocksDBBackendT m a = StMRocksDBBackendT { unStMRocksDBBackendT :: StM (ReaderT (AuthConfig, RocksDBEnv) (ExceptT ServantErr m)) a }
instance MonadBaseControl IO m => MonadBaseControl IO (RocksDBBackendT m) where
type StM (RocksDBBackendT m) a = StMRocksDBBackendT m a
liftBaseWith f = RocksDBBackendT $ liftBaseWith $ \q -> f (fmap StMRocksDBBackendT . q . unRocksDBBackendT)
restoreM = RocksDBBackendT . restoreM . unStMRocksDBBackendT
-- | Execute backend action with given connection pool.
runRocksDBBackendT :: MonadBaseControl IO m => AuthConfig -> RocksDBEnv -> RocksDBBackendT m a -> m (Either ServantErr a)
runRocksDBBackendT cfg db ma = runResourceT . runExceptT $ runReaderT (unRocksDBBackendT ma) (cfg, db)
-- | Helper to extract RocksDB reference
getEnv :: Monad m => RocksDBBackendT m RocksDBEnv
getEnv = snd <$> RocksDBBackendT ask
-- | Helper to lift low-level RocksDB queries to backend monad
liftEnv :: Monad m => (RocksDBEnv -> ResourceT m a) -> RocksDBBackendT m a
liftEnv f = do
e <- getEnv
RocksDBBackendT . lift . lift $ f e
instance (MonadBase IO m, MonadIO m, MonadThrow m, MonadMask m) => HasStorage (RocksDBBackendT m) where
getUserImpl = liftEnv . flip S.load
getUserImplByLogin = liftEnv . S.getUserImplByLogin
listUsersPaged page size = liftEnv $ S.listUsersPaged page size
getUserImplPermissions = liftEnv . S.getUserImplPermissions
deleteUserPermissions = liftEnv . S.deleteUserPermissions
insertUserPerm = liftEnv . S.insertUserPerm
insertUserImpl = liftEnv . S.insertUserImpl
replaceUserImpl i v = liftEnv $ S.replaceUserImpl i v
deleteUserImpl = liftEnv . S.deleteUserImpl
hasPerm i p = liftEnv $ S.hasPerm i p
getFirstUserByPerm = liftEnv . S.getFirstUserByPerm
selectUserImplGroups = liftEnv . S.selectUserImplGroups
clearUserImplGroups = liftEnv . S.clearUserImplGroups
insertAuthUserGroup = liftEnv . S.insertAuthUserGroup
insertAuthUserGroupUsers = liftEnv . S.insertAuthUserGroupUsers
insertAuthUserGroupPerms = liftEnv . S.insertAuthUserGroupPerms
getAuthUserGroup = liftEnv . flip S.load
listAuthUserGroupPermissions = liftEnv . S.listAuthUserGroupPermissions
listAuthUserGroupUsers = liftEnv . S.listAuthUserGroupUsers
replaceAuthUserGroup i v = liftEnv $ S.replaceAuthUserGroup i v
clearAuthUserGroupUsers = liftEnv . S.clearAuthUserGroupUsers
clearAuthUserGroupPerms = liftEnv . S.clearAuthUserGroupPerms
deleteAuthUserGroup = liftEnv . S.deleteAuthUserGroup
listGroupsPaged page size = liftEnv $ S.listGroupsPaged page size
setAuthUserGroupName i n = liftEnv $ S.setAuthUserGroupName i n
setAuthUserGroupParent i mp = liftEnv $ S.setAuthUserGroupParent i mp
insertSingleUseCode = liftEnv . S.insertSingleUseCode
setSingleUseCodeUsed i mt = liftEnv $ S.setSingleUseCodeUsed i mt
getUnusedCode c i t = liftEnv $ S.getUnusedCode c i t
invalidatePermanentCodes i t = liftEnv $ S.invalidatePermanentCodes i t
selectLastRestoreCode i t = liftEnv $ S.selectLastRestoreCode i t
insertUserRestore = liftEnv . S.insertUserRestore
findRestoreCode i rc t = liftEnv $ S.findRestoreCode i rc t
replaceRestoreCode i v = liftEnv $ S.replaceRestoreCode i v
findAuthToken i t = liftEnv $ S.findAuthToken i t
findAuthTokenByValue t = liftEnv $ S.findAuthTokenByValue t
insertAuthToken = liftEnv . S.insertAuthToken
replaceAuthToken i v = liftEnv $ S.replaceAuthToken i v
{-# INLINE getUserImpl #-}
{-# INLINE getUserImplByLogin #-}
{-# INLINE listUsersPaged #-}
{-# INLINE getUserImplPermissions #-}
{-# INLINE deleteUserPermissions #-}
{-# INLINE insertUserPerm #-}
{-# INLINE insertUserImpl #-}
{-# INLINE replaceUserImpl #-}
{-# INLINE deleteUserImpl #-}
{-# INLINE hasPerm #-}
{-# INLINE getFirstUserByPerm #-}
{-# INLINE selectUserImplGroups #-}
{-# INLINE clearUserImplGroups #-}
{-# INLINE insertAuthUserGroup #-}
{-# INLINE insertAuthUserGroupUsers #-}
{-# INLINE insertAuthUserGroupPerms #-}
{-# INLINE getAuthUserGroup #-}
{-# INLINE listAuthUserGroupPermissions #-}
{-# INLINE listAuthUserGroupUsers #-}
{-# INLINE replaceAuthUserGroup #-}
{-# INLINE clearAuthUserGroupUsers #-}
{-# INLINE clearAuthUserGroupPerms #-}
{-# INLINE deleteAuthUserGroup #-}
{-# INLINE listGroupsPaged #-}
{-# INLINE setAuthUserGroupName #-}
{-# INLINE setAuthUserGroupParent #-}
{-# INLINE insertSingleUseCode #-}
{-# INLINE setSingleUseCodeUsed #-}
{-# INLINE getUnusedCode #-}
{-# INLINE invalidatePermanentCodes #-}
{-# INLINE selectLastRestoreCode #-}
{-# INLINE insertUserRestore #-}
{-# INLINE findRestoreCode #-}
{-# INLINE replaceRestoreCode #-}
{-# INLINE findAuthToken #-}
{-# INLINE findAuthTokenByValue #-}
{-# INLINE insertAuthToken #-}
{-# INLINE replaceAuthToken #-}
| NCrashed/servant-auth-token | servant-auth-token-rocksdb/src/Servant/Server/Auth/Token/RocksDB.hs | bsd-3-clause | 5,943 | 0 | 12 | 897 | 1,280 | 693 | 587 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
----------------------------------------------------------------------------------
-- |
-- Module : Tct.Processor.Bounds.Automata
-- Copyright : (c) Martin Avanzini <[email protected]>,
-- Georg Moser <[email protected]>,
-- Andreas Schnabl <[email protected]>
-- License : LGPL (see COPYING)
-- Maintainer : Martin Avanzini <[email protected]>,
-- Andreas Schnabl <[email protected]>
-- Stability : unstable
-- Portability : unportable
--
-- This module implements automata functionality as employed by
-- the bounds processor.
-----------------------------------------------------------------------------------
module Tct.Trs.Encoding.Bounds.Automata where
import Control.Monad.State.Class (MonadState (..))
import qualified Control.Monad.State.Lazy as St
import Data.IntMap (IntMap)
import qualified Data.IntMap as IM
import Data.List (nub)
import Data.Map (Map)
import qualified Data.Map as M
import Data.Maybe (fromMaybe)
import Data.Set (Set)
import qualified Data.Set as S
import qualified Data.Rewriting.Term as R
-- TODO: move utility functions
type MemoAction k a = St.State (M.Map k a)
memo :: Ord k => k -> MemoAction k a a -> MemoAction k a a
memo k m = do
s <- St.get
case M.lookup k s of
Just old -> return old
Nothing -> do { new <- m;
St.modify (M.insert k new);
return new}
runMemoAction :: (Ord k) => MemoAction k a b -> b
runMemoAction ma = fst $ St.runState ma M.empty
liftMemo :: (Ord k) => (k -> a) -> k -> MemoAction k a a
liftMemo f k = memo k (return $ f k)
ifM :: Monad m => m Bool -> m a -> m a -> m a
ifM b t e = do g <- b
if g then t else e
listProduct :: [[a]] -> [[a]]
listProduct [] = [[]]
listProduct (xs:xss) = foldl f [] xs
where f a x = map (\ xs' -> x:xs') (listProduct xss) ++ a
snub :: Ord a => [a] -> [a]
snub = S.toList . S.fromList
data Strictness = StrictRule | WeakRule
-- | This datatype represents the /enrichment/ employed.
data Enrichment =
Match -- ^ Matchbounds.
| Roof -- ^ Roofbounds.
| Top -- ^ Topbounds.
deriving (Enum, Bounded, Eq)
instance Show Enrichment where
show Match = "match"
show Roof = "roof"
show Top = "top"
data WeakBoundedness = WeakMayExceedBound | WeakMayNotExceedBound
-- TODO:MA: which types should be strict?
newtype Symbol = Symbol Int deriving (Eq, Ord, Show, Enum, Read)
type Label = Int
type LSym = (Symbol,Label)
type State = Int
data LTerm
= F LSym [LTerm]
| S State
deriving (Eq, Ord, Read, Show)
data Rule
= Collapse LSym [State] State
| Epsilon State State
deriving (Eq, Ord, Show)
-- TODO:MA: sym -> ... in beiden automaten
type FwdAutomaton = IntMap (IntMap (Map [State] (Set State)))
-- sym -> l -> args -> qs <=> forall q \in qs. sym_l(args) -> q \in A
type BwdAutomaton = IntMap (IntMap (IntMap (Set [State])))
-- sym -> q -> l -> argss <=> forall args \in argss. sym_l(args) -> q \in A
data Automaton = Automaton
{ fwd :: FwdAutomaton
, bwd :: BwdAutomaton
, epsilonTransitions :: Set (State,State)
, fresh :: State
, maxlabel :: Label
, finalStates :: [State] }
deriving (Eq, Show)
size :: LTerm -> Int
size (F _ ts) = 1 + sum (map size ts)
size (S _) = 0
isEpsilonRule :: Rule -> Bool
isEpsilonRule Epsilon{} = True
isEpsilonRule Collapse{} = False
lift :: Symbol -> Label -> LSym
lift = (,)
base :: LSym -> Symbol
base = fst
height :: LSym -> Label
height = snd
baseTerm :: LTerm -> R.Term Symbol v
baseTerm (F f ts) = R.Fun (base f) $ map baseTerm ts
baseTerm (S _) = error "Cannot convert a labeled term with Tree automaton states back to a normal term"
toRules :: Automaton -> [Rule]
toRules a = [Collapse (toEnum f,l) args q | (f,m1) <- IM.toList $ fwd a
, (l,m2) <- IM.toList m1
, (args, qs) <- M.toList m2
, q <- S.toList qs]
++ [Epsilon p q | (p,q) <- S.toList (epsilonTransitions a)]
states :: Automaton -> [State]
states = nub . concatMap statesRl . toRules where
statesRl (Epsilon p q) = [p,q]
statesRl (Collapse _ as q) = q : as
fromRules :: [State] -> [Rule] -> Automaton
fromRules fss = foldl (flip insert) empty {finalStates = fss, fresh = maximum (0:fss) + 1}
empty :: Automaton
empty = Automaton IM.empty IM.empty S.empty 0 0 []
freshState :: Automaton -> (State, Automaton)
freshState a = (fr, Automaton (fwd a) (bwd a) (epsilonTransitions a) (fr + 1) (maxlabel a) (finalStates a))
where fr = fresh a
freshStates :: Int -> Automaton -> ([State], Automaton)
freshStates 0 a = ([], a)
freshStates i a = case freshStates (i - 1) a' of (qs, a'') -> (q:qs,a'')
where (q, a') = freshState a
fwdInsert :: LSym -> [State] -> State -> FwdAutomaton -> FwdAutomaton
fwdInsert (f,l) qs q = IM.alter alter1 (fromEnum f)
where default3 = S.singleton q
default2 = M.singleton qs default3
default1 = IM.singleton l default2
alter1 = Just . maybe default1 (IM.alter alter2 l)
alter2 = Just . maybe default2 (M.alter alter3 qs)
alter3 = Just . maybe default3 (S.insert q)
bwdInsert :: LSym -> [State] -> State -> BwdAutomaton -> BwdAutomaton
bwdInsert (f,l) qs q = IM.alter alter1 (fromEnum f)
where default3 = S.singleton qs
default2 = IM.singleton l default3
default1 = IM.singleton q default2
alter1 = Just . maybe default1 (IM.alter alter2 q)
alter2 = Just . maybe default2 (IM.alter alter3 l)
alter3 = Just . maybe default3 (S.insert qs)
-- MA:TODO verifizieren dass fresh immer "frisch" ist
insert :: Rule -> Automaton -> Automaton
insert (Collapse sym args q) (Automaton f b e fr l iss) = Automaton (fwdInsert sym args q f) (bwdInsert sym args q b) e (maximum $ [fr, q + 1] ++ [a + 1 | a <- args]) (max l $ height sym) iss
insert (Epsilon p q) (Automaton f b e fr l iss) = Automaton f' b' (S.insert (p,q) e) (maximum [fr, p + 1, q + 1]) l iss
where
f' = IM.map (IM.map $ M.map addForwardRight) f
addForwardRight ps = if p `S.member` ps then S.insert q ps else ps
b' = IM.map addBackwardRight b
addBackwardRight mp = case IM.lookup p mp of
Just mp' -> addBackwardRight2 mp' mp
Nothing -> mp
addBackwardRight2 = IM.insertWith addBackwardRight3 q
addBackwardRight3 = IM.unionWith S.union
-- f'' = IM.map (IM.map addForwardLeft) f'
-- addForwardLeft mp = foldr addForwardLeft2 mp (M.keys mp)
-- addForwardLeft2 k mp = S.fold (addForwardLeft3 k) mp (modifiedArgs k)
-- addForwardLeft3 k k' mp = M.insertWith S.union k' (fromJust $ M.lookup k mp) mp
-- b'' = IM.map (IM.map $ IM.map $ S.unions . S.toList . S.map modifiedArgs) b'
-- modifiedArgs [] = S.singleton []
-- modifiedArgs (q':qs) | q == q' = let subresult = modifiedArgs qs in S.map ((:) p) subresult `S.union` S.map ((:) q) subresult
-- | otherwise = S.map ((:) q') $ modifiedArgs qs
-- fwd = IM.map (IM.map fixFwd) f
-- where
-- fixFwd m = M.fromList [ (args',concatMapS clState rs) | (args,rs) <- M.toList m, args' <- clArgs args]
-- bwd = IM.map fixBwd b
-- where
-- fixBwd m1 = IM.alter fixQ q m2
-- where
-- m2 = IM.map fixArgs m1
-- fixQ Nothing = IM.lookup p m2
-- fixQ (Just mq) =
-- case IM.lookup p m2 of
-- Nothing -> Just mq
-- Just mp -> Just (mp `union` mq)
-- union = IM.unionWith S.union
-- fixArgs = IM.map (S.fromList . concatMap clArgs . S.toList)
-- concatMapS f = S.fromList . concatMap f . S.toList
-- clState r = if r == p then [r,q] else [r]
-- clArgs [] = return []
-- clArgs (a:as) = do
-- a' <- clState a
-- as' <- clArgs as
-- return (a':as)
mkFreshState :: MonadState Automaton m => m State
mkFreshState = do a <- St.get
let (qi,a') = freshState a
St.put a'
return qi
mkInsertRule :: MonadState Automaton m => Rule -> m ()
mkInsertRule r = St.modify (insert r)
step :: Automaton -> LSym -> [State] -> Set State
-- q \in (step A f_l qs) <=> f_l(qs) -> q
step a (f,l) qs = fromMaybe S.empty look
where look = do m1 <- IM.lookup (fromEnum f) (fwd a)
m2 <- IM.lookup l m1
M.lookup qs m2
bstep :: Automaton -> LSym -> State -> Set [State]
-- qs \in bstep f_l q <=> f_l(qs) -> q
bstep a (f,l) q = fromMaybe S.empty look
where look = do m1 <- IM.lookup (fromEnum f) (bwd a)
m2 <- IM.lookup q m1
IM.lookup l m2
bstepUL :: Automaton -> Symbol -> State -> [(Label,Set [State])]
-- (l,[...,qs,...]) \in bstep f q <=> f_l(qs) -> q
bstepUL a f q = fromMaybe [] look
where look = do m1 <- IM.lookup (fromEnum f) (bwd a)
m2 <- IM.lookup q m1
return $ IM.toList m2
rulesDefiningUL :: Automaton -> Symbol -> [(Label,[State], Set State)]
-- (l,qs,[...,q,...]) \in rulesDefining f <=> f_l(qs) -> q
rulesDefiningUL a f = fromMaybe [] look
where look = do m1 <- IM.lookup (fromEnum f) (fwd a)
return [(l,qs,rs) | (l, m2) <- IM.toList m1
, (qs,rs) <- M.toList m2]
rulesDefining :: Automaton -> LSym -> [([State], Set State)]
-- (qs,[...,q,...]) \in rulesDefining f_l <=> f_l(qs) -> q
rulesDefining a (f,l) = fromMaybe [] look
where look = do m1 <- IM.lookup (fromEnum f) (fwd a)
m2 <- IM.lookup l m1
return $ M.toList m2
symbols :: Automaton -> Set LSym
symbols a = IM.foldrWithKey f S.empty (fwd a)
where f fn m s = S.fromList [(toEnum fn,l) | l <- IM.keys m] `S.union` s
| ComputationWithBoundedResources/tct-trs | src/Tct/Trs/Encoding/Bounds/Automata.hs | bsd-3-clause | 10,266 | 0 | 15 | 3,037 | 3,129 | 1,663 | 1,466 | 166 | 3 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module TCReader where
import Database.TokyoCabinet.Storable
import Control.Monad
import Control.Monad.Reader
import Database.TokyoCabinet
(
TCM
, TCDB
, HDB
, BDB
, FDB
, new
, runTCM
, OpenMode(..)
)
import qualified Database.TokyoCabinet as TC
newtype TCReader tc a =
TCReader { runTCR :: ReaderT tc TCM a } deriving (Monad, MonadReader tc)
runTCReader :: TCReader tc a -> tc -> TCM a
runTCReader = runReaderT . runTCR
open :: (TCDB tc) => String -> [OpenMode] -> TCReader tc Bool
open name mode = do tc <- ask
TCReader $ lift (TC.open tc name mode)
close :: (TCDB tc) => TCReader tc Bool
close = ask >>= (TCReader . lift . TC.close)
get :: (Storable k, Storable v, TCDB tc) => k -> TCReader tc (Maybe v)
get key = do tc <- ask
TCReader $ lift (TC.get tc key)
put :: (Storable k, Storable v, TCDB tc) => k -> v -> TCReader tc Bool
put key val = do tc <- ask
TCReader $ lift (TC.put tc key val)
kvstore :: (Storable k, Storable v, TCDB tc) => [(k, v)] -> TCReader tc Bool
kvstore kv = do open "abcd.tch" [OWRITER, OCREAT]
mapM_ (uncurry put) kv
close
main :: IO ()
main = runTCM $ do h <- new :: TCM HDB
let kv =[ ("foo", 112)
, ("bar", 200)
, ("baz", 300) ] :: [(String, Int)]
runTCReader (kvstore kv) h >> return ()
| tom-lpsd/tokyocabinet-haskell | examples/TCReader.hs | bsd-3-clause | 1,495 | 0 | 12 | 470 | 580 | 309 | 271 | 41 | 1 |
module Database.PostgreSQL.PQTypes.Transaction (
Savepoint(..)
, withSavepoint
, withTransaction
, begin
, commit
, rollback
, withTransaction'
, begin'
, commit'
, rollback'
) where
import Control.Monad
import Control.Monad.Catch
import Data.Function
import Data.String
import Data.Typeable
import Data.Monoid.Utils
import Database.PostgreSQL.PQTypes.Class
import Database.PostgreSQL.PQTypes.Internal.Exception
import Database.PostgreSQL.PQTypes.SQL.Raw
import Database.PostgreSQL.PQTypes.Transaction.Settings
import Database.PostgreSQL.PQTypes.Utils
-- | Wrapper that represents savepoint name.
newtype Savepoint = Savepoint (RawSQL ())
instance IsString Savepoint where
fromString = Savepoint . fromString
-- | Create a savepoint and roll back to it if given monadic action throws.
-- This may only be used if a transaction is already active. Note that it
-- provides something like \"nested transaction\".
--
-- See <http://www.postgresql.org/docs/current/static/sql-savepoint.html>
{-# INLINABLE withSavepoint #-}
withSavepoint :: (MonadDB m, MonadMask m) => Savepoint -> m a -> m a
withSavepoint (Savepoint savepoint) m = mask $ \restore -> do
runQuery_ $ "SAVEPOINT" <+> savepoint
res <- restore m `onException` rollbackAndReleaseSavepoint
runQuery_ sqlReleaseSavepoint
return res
where
sqlReleaseSavepoint = "RELEASE SAVEPOINT" <+> savepoint
rollbackAndReleaseSavepoint = do
runQuery_ $ "ROLLBACK TO SAVEPOINT" <+> savepoint
runQuery_ sqlReleaseSavepoint
----------------------------------------
-- | Same as 'withTransaction'' except that it uses current
-- transaction settings instead of custom ones. It is worth
-- noting that changing transaction settings inside supplied
-- monadic action won't have any effect on the final 'commit'
-- / 'rollback' as settings that were in effect during the call
-- to 'withTransaction' will be used.
{-# INLINABLE withTransaction #-}
withTransaction :: (MonadDB m, MonadMask m) => m a -> m a
withTransaction m = getTransactionSettings >>= flip withTransaction' m
-- | Begin transaction using current transaction settings.
{-# INLINABLE begin #-}
begin :: MonadDB m => m ()
begin = getTransactionSettings >>= begin'
-- | Commit active transaction using current transaction settings.
{-# INLINABLE commit #-}
commit :: MonadDB m => m ()
commit = getTransactionSettings >>= commit'
-- | Rollback active transaction using current transaction settings.
{-# INLINABLE rollback #-}
rollback :: MonadDB m => m ()
rollback = getTransactionSettings >>= rollback'
----------------------------------------
-- | Execute monadic action within a transaction using given transaction
-- settings. Note that it won't work as expected if a transaction is already
-- active (in such case 'withSavepoint' should be used instead).
{-# INLINABLE withTransaction' #-}
withTransaction' :: (MonadDB m, MonadMask m)
=> TransactionSettings -> m a -> m a
withTransaction' ts m = mask $ \restore -> (`fix` 1) $ \loop n -> do
-- Optimization for squashing possible space leaks.
-- It looks like GHC doesn't like 'catch' and passes
-- on introducing strictness in some cases.
let maybeRestart = case tsRestartPredicate ts of
Just _ -> handleJust (expred n) (\_ -> loop $ n+1)
Nothing -> id
maybeRestart $ do
begin' ts
res <- restore m `onException` rollback' ts
commit' ts
return res
where
expred :: Integer -> SomeException -> Maybe ()
expred !n e = do
-- check if the predicate exists
RestartPredicate f <- tsRestartPredicate ts
-- cast exception to the type expected by the predicate
err <- msum [
-- either cast the exception itself...
fromException e
-- ...or extract it from DBException
, fromException e >>= \DBException{..} -> cast dbeError
]
-- check if the predicate allows for the restart
guard $ f err n
-- | Begin transaction using given transaction settings.
{-# INLINABLE begin' #-}
begin' :: MonadDB m => TransactionSettings -> m ()
begin' ts = runSQL_ . mintercalate " " $ ["BEGIN", isolationLevel, permissions]
where
isolationLevel = case tsIsolationLevel ts of
DefaultLevel -> ""
ReadCommitted -> "ISOLATION LEVEL READ COMMITTED"
RepeatableRead -> "ISOLATION LEVEL REPEATABLE READ"
Serializable -> "ISOLATION LEVEL SERIALIZABLE"
permissions = case tsPermissions ts of
DefaultPermissions -> ""
ReadOnly -> "READ ONLY"
ReadWrite -> "READ WRITE"
-- | Commit active transaction using given transaction settings.
{-# INLINABLE commit' #-}
commit' :: MonadDB m => TransactionSettings -> m ()
commit' ts = do
runSQL_ "COMMIT"
when (tsAutoTransaction ts) $
begin' ts
-- | Rollback active transaction using given transaction settings.
{-# INLINABLE rollback' #-}
rollback' :: MonadDB m => TransactionSettings -> m ()
rollback' ts = do
runSQL_ "ROLLBACK"
when (tsAutoTransaction ts) $
begin' ts
| scrive/hpqtypes | src/Database/PostgreSQL/PQTypes/Transaction.hs | bsd-3-clause | 5,038 | 0 | 21 | 994 | 936 | 496 | 440 | -1 | -1 |
module Language.Xi.Base where
import Language.Xi.Base.Parser
import Language.Xi.Base.Analysis
import Language.Xi.Base.Interpreter
| fizruk/xi-base | src/Language/Xi/Base.hs | bsd-3-clause | 131 | 0 | 4 | 10 | 28 | 20 | 8 | 4 | 0 |
{-# LANGUAGE RecordWildCards, DeriveGeneric, DeriveDataTypeable #-}
module Data.Liblinear (
Problem(..),
Example(..),
Feature(..),
Parameter(..),
Model,
train,
def,
) where
import Control.Applicative
import Control.Exception
import Control.Monad
import Data.Data
import Data.Default
import Data.Monoid
import Foreign.C
import Foreign.ForeignPtr
import Foreign.Marshal.Utils
import Foreign.Ptr
import Foreign.Storable
import Foreign.Storable.Generic
import GHC.Generics
import System.IO
import qualified Data.Vector as V
import qualified Data.Vector.Storable as VS
import qualified Data.Vector.Storable.Mutable as VSM
import Data.Liblinear.Internal
data Problem
= Problem
{ problemData :: V.Vector Example
, problemBias :: Double
}
data Example
= Example
{ exampleLabel :: {-# UNPACK #-} !CDouble
, exampleFeatures :: {-# UNPACK #-} !(VS.Vector Feature)
}
data Feature
= Feature
{ featureIndex :: {-# UNPACK #-} !CInt
, featureValue :: {-# UNPACK #-} !CDouble
}
deriving (Generic)
data Parameter
= Parameter
{ paramSolverType :: Int
, paramEps :: Double
, paramC :: Double
, paramWeights :: VS.Vector Weight
, paramP :: Double
}
instance Default Parameter where
def = Parameter
{ paramSolverType = c'L2R_L2LOSS_SVC_DUAL
, paramEps = 0.1
, paramC = 1
, paramWeights = VS.empty
, paramP = 0.1
}
data Weight
= Weight
{ weightLabel :: {-# UNPACK #-} !CInt
, weightValue :: {-# UNPACK #-} !CDouble
}
deriving (Generic)
instance Storable Feature where
sizeOf = sizeOfDefault
alignment = alignmentDefault
peek = peekDefault
poke = pokeDefault
instance Storable Weight where
sizeOf = sizeOfDefault
alignment = alignmentDefault
peek = peekDefault
poke = pokeDefault
newtype Model = Model { unModel :: Ptr C'model }
train :: Problem -> Parameter -> IO Model
train Problem {..} Parameter {..} = do
let l = fromIntegral . V.length $ problemData
n = fromIntegral . V.foldl max 0 . V.map (VS.foldl max 0 . VS.map featureIndex . exampleFeatures) $ problemData
VS.unsafeWith (V.convert $ V.map exampleLabel problemData) $ \plabels ->
withManyV VS.unsafeWith (V.map exampleFeatures problemData) $ \pfeatures ->
VS.unsafeWith (VS.map weightLabel paramWeights) $ \pwlabel ->
VS.unsafeWith (VS.map weightValue paramWeights) $ \pwvalue ->
with (C'problem l n plabels (castPtr pfeatures) (realToFrac problemBias)) $ \pprob ->
with (C'parameter (fromIntegral paramSolverType) (realToFrac paramEps) (realToFrac paramC) (fromIntegral $ VS.length paramWeights) pwlabel pwvalue (realToFrac paramP)) $ \pparam ->
Model <$> c'train pprob pparam
predict :: Model -> VS.Vector Feature -> IO Double
predict (Model pmodel) features =
VS.unsafeWith (features <> VS.singleton (Feature (-1) 0)) $ \pfeat ->
realToFrac <$> c'predict pmodel (castPtr pfeat)
predictValues :: Model -> VS.Vector Feature -> IO (Double, VS.Vector Double)
predictValues (Model pmodel) features = do
nr_class <- peek $ p'model'nr_class pmodel
solver_type <- peek $ p'parameter'solver_type $ p'model'param pmodel
let nr_w | nr_class == 2 && solver_type /= c'MCSVM_CS = 1
| otherwise = nr_class
ptr <- mallocForeignPtrArray (fromIntegral nr_w)
VS.unsafeWith (features <> VS.singleton (Feature (-1) 0)) $ \pfeat -> do
ret <- withForeignPtr ptr $ c'predict_values pmodel (castPtr pfeat)
let vect = VS.unsafeFromForeignPtr0 (castForeignPtr ptr) $ fromIntegral nr_w
return (realToFrac ret, vect)
predictProbability :: Model -> VS.Vector Feature -> IO (Double, VS.Vector Double)
predictProbability (Model pmodel) features = do
nr_class <- peek $ p'model'nr_class pmodel
ptr <- mallocForeignPtrArray (fromIntegral nr_class)
VS.unsafeWith (features <> VS.singleton (Feature (-1) 0)) $ \pfeat -> do
ret <- withForeignPtr ptr $ c'predict_probability pmodel (castPtr pfeat)
let vect = VS.unsafeFromForeignPtr0 (castForeignPtr ptr) $ fromIntegral nr_class
return (realToFrac ret, vect)
saveModel :: FilePath -> Model -> IO ()
saveModel path (Model pmodel) = do
withCString path $ \ppath -> do
throwIfError "saveModel failed" $ c'save_model ppath pmodel
loadModel :: FilePath -> IO Model
loadModel path = do
withCString path $ \ppath -> do
Model <$> throwIfNull "loadModel failed" (c'load_model ppath)
crossValidation :: Problem -> Parameter -> Int -> IO [Double]
crossValidation (Problem prob) (Parameter param) numFold = do
allocaArray foldNum $ \ptr -> do
c'cross_validation prob param (fromIntegral foldNum) ptr
map realToFrac <$> peekArray numFold ptr
{-
int get_nr_feature(const struct model *model_);
int get_nr_class(const struct model *model_);
void get_labels(const struct model *model_, int* label);
void free_model_content(struct model *model_ptr);
void free_and_destroy_model(struct model **model_ptr_ptr);
void destroy_param(struct parameter *param);
const char *check_parameter(const struct problem *prob, const struct parameter *param);
int check_probability_model(const struct model *model);
void set_print_string_function(void (*print_func) (const char*));
-}
--
data LiblinearError
= LiblinearError String
deriving (Show, Data, Typeable)
instance Exception LiblinearError
throwIfError :: String -> IO CInt -> IO ()
throwIfError msg m = do
c <- m
when (c /= 0) $ throwIO $ LiblinearError msg
throwIfNull :: String -> IO (Ptr a) -> IO (Ptr a)
throwIfNull msg m = do
ptr <- m
when (ptr == nullPtr) $ throwIO $ LiblinearError msg
return ptr
withManyV :: (Storable e)
=> (VS.Vector e -> (Ptr e -> IO res) -> IO res)
-> V.Vector (VS.Vector e)
-> (Ptr (Ptr e) -> IO res)
-> IO res
withManyV withFoo v f = go 0 =<< VSM.new n where
n = V.length v
go ix w
| ix == n = VSM.unsafeWith w f
| otherwise = do
withFoo (v V.! ix) $ \ptr -> do
VSM.write w ix ptr
go (ix + 1) w
| tanakh/hs-liblinear | Data/Liblinear.hs | bsd-3-clause | 5,981 | 0 | 24 | 1,182 | 1,887 | 964 | 923 | 143 | 1 |
{-# OPTIONS -Wall #-}
import qualified Esge.Run as ERun
main :: IO ()
main = do
_ <- ERun.replRun "story.esge" [
ERun.defaultRepl
]
return ()
| neosam/esge | src/esgeAdvanture.hs | bsd-3-clause | 172 | 0 | 10 | 55 | 52 | 27 | 25 | 7 | 1 |
module Papps.Plugins.SQLExecute (
sqlResults
)
where
#ifdef sql-execute
import Database.HDBC
import Database.HDBC.MySQL
import Data.List (intercalate)
sqlResults :: String -> IO String
sqlResults sql = do
conn <- connectMySQL $ defaultMySQLConnectInfo {
mysqlHost = "localhost"
, mysqlUser = "moverman"
, mysqlPassword = ""
, mysqlDatabase = ""
, mysqlPort = 3306
, mysqlUnixSocket = ""}
stmt <- prepare conn sql
execute stmt []
res <- fetchAllRows stmt
colNames <- getColumnNames stmt
return $
formatSql colNames res
formatSql :: [String] -> [[SqlValue]] -> String
formatSql _ [[]] = "Query yields zero rows."
formatSql cols res = do
let r = map (map f) res
[]
where f (SqlString s) = s
f (SqlInteger s) = show s
f (SqlDouble s) = show s
f (SqlBool s) = show s
f (SqlNull ) = ""
f t = show t
#else
sqlResults :: String -> IO String
sqlResults sql = ""
#endif
| teneighty/papps | src/Papps/Plugins/SQLExecute.hs | bsd-3-clause | 1,131 | 0 | 12 | 420 | 320 | 166 | 154 | 4 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Render.MSurfaceT where
import Control.Lens (makeLenses)
import qualified Data.ByteString as B
import qualified Data.Vector.Unboxed as UV
import qualified Constants
import Render.MTexInfoT
import Types
makeLenses ''MSurfaceT
newMSurfaceT :: MSurfaceT
newMSurfaceT = MSurfaceT
{ _msVisFrame = 0
, _msPlane = Nothing
, _msFlags = 0
, _msFirstEdge = 0
, _msNumEdges = 0
, _msTextureMins = (-1, -1)
, _msExtents = (0, 0)
, _msLightS = 0
, _msLightT = 0
, _msDLightS = 0
, _msDLightT = 0
, _msPolys = Nothing
, _msTextureChain = Nothing
, _msLightmapChain = Nothing
, _msTexInfo = newMTexInfoT
, _msDLightFrame = 0
, _msDLightBits = 0
, _msLightmapTextureNum = 0
, _msStyles = B.replicate Constants.maxLightMaps 0
, _msCachedLight = UV.replicate Constants.maxLightMaps 0
, _msSamples = Nothing
} | ksaveljev/hake-2 | src/Render/MSurfaceT.hs | bsd-3-clause | 1,161 | 0 | 8 | 457 | 226 | 144 | 82 | 32 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.