code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
#!/usr/bin/env runhaskell
import Control.Monad (void)
import System.Libnotify
main :: IO ()
main = void $ withNotifications Nothing $
do new "Same title" "line 1" "" $
do addHint (HintString "append" "allowed")
removeHints
render
new "Same title" "line 2" "" $
do addHint (HintString "append" "allowed")
removeHints
render
| supki/libnotify | tests/remove-hints-test.hs | mit | 442 | 0 | 13 | 166 | 111 | 51 | 60 | 12 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Typecrawl.Platforms (typepad, wpLeMonde, blogger)
where
import Text.HTML.Scalpel ((@:), hasClass, (//))
import Typecrawl.Types
-- Should work with any basic typepad
typepad :: PlatformParseInstructions
typepad = Ppis ("span" @: [hasClass "pager-right"] // "a")
("h3" // "a")
(Pis
("h3" @: [hasClass "entry-header"])
("h2" @: [hasClass "date-header"])
("div" @: [hasClass "entry-body"]))
-- Should work with most WordPress on the french Le Monde newspaper
-- blogging platform
wpLeMonde :: PlatformParseInstructions
wpLeMonde = Ppis ("div" @: [hasClass "nav-previous"] // "a")
("h2" @: [hasClass "entry-title"] // "a")
(Pis
("h1" @: [hasClass "entry-title"])
("span" @: [hasClass "entry-date"])
("div" @: [hasClass "entry-content"]))
-- Should work with any basic Blogger website
blogger :: PlatformParseInstructions
blogger = Ppis ("a" @: [hasClass "blog-pager-older-link"])
("h3" @: [hasClass "post-title"] // "a")
(Pis
("h3" @: [hasClass "post-title"])
("h2" @: [hasClass "date-header"])
("div" @: [hasClass "entry-content"]))
| Raveline/typecrawl | lib/Typecrawl/Platforms.hs | mit | 1,339 | 0 | 11 | 403 | 334 | 186 | 148 | -1 | -1 |
module DoesItCompile where
-- Question 1
bigNum = (^) 5 $ 10
wahoo = (^) bigNum $ 10
-- Question 2
x = print
y = print "woohoo!"
z = x "hello world"
-- Question 3
a = (+)
b = 5
c = a b 10
d = a c 200
-- Question 4
a4 = 12 + b4
b4 = 10000 * c4
c4 = 10
| rasheedja/HaskellFromFirstPrinciples | Chapter5/doesItCompile.hs | mit | 255 | 0 | 6 | 72 | 112 | 65 | 47 | 13 | 1 |
-- | Type and functions for terms: the objects of first-order logic.
module Faun.Term where
import Data.Set (Set)
import qualified Data.Set as Set
import Data.List (foldl')
import qualified Data.Text as T
import Data.Monoid ((<>), mconcat)
import qualified Faun.Text as FT
import Faun.ShowTxt
-- | A term represents an object. Terms are not atoms, they are found in
-- predicates in first-order logic.
--
-- Warning: for Term String, several algorithms assume the string of variables
-- start with a lowercase character, while constants start with an uppercase
-- character. For example, the parser uses the case of the first character to
-- distinguish variables from constants.
data Term =
-- | Variables range over objects. For example the variable x might be a
-- number, t could be a city, etc.
Variable T.Text
-- | Constants represent actual objects: the number 0, Kyoto, Quebec City,
-- Aristotle could all be constants.
| Constant T.Text
-- | Functions map objects to objects. The function 'Add' maps numbers to
-- a number, the function "CapitalOf" maps a city to a country, etc.
| Function T.Text [Term]
instance Eq Term where
(Variable t0) == (Variable t1) = t0 == t1
(Constant t0) == (Constant t1) = t0 == t1
(Function n0 ts0) ==
(Function n1 ts1) = n0 == n1 && all (uncurry (==)) (zip ts0 ts1)
_ == _ = False
instance Ord Term where
(Constant t0) `compare` (Constant t1) = t0 `compare` t1
(Constant t0) `compare` (Variable t1) = t0 `compare` t1
(Constant _) `compare` (Function _ _) = LT
(Variable t0) `compare` (Variable t1) = t0 `compare` t1
(Variable t0) `compare` (Constant t1) = t0 `compare` t1
(Variable _) `compare` (Function _ _) = LT
(Function n0 ts0) `compare` (Function n1 ts1) = compareFun n0 ts0 n1 ts1
(Function _ _) `compare` _ = GT
instance Show Term where
show = T.unpack . showTxt
-- show = T.unpack . FT.rmQuotes . textTerm
instance ShowTxt Term where
showTxt t = case t of
Variable x -> x
Constant x -> x
Function n ts -> T.concat [n, "(", if null ts then "" else terms, ")"]
where terms = FT.mkString $ map showTxt ts
-- | Returns the number of variables in the term.
numVars :: (Num n) => Term -> n
numVars t = case t of
Variable _ -> 1
Constant _ -> 0
Function _ ts -> foldl' (\acc trm -> acc + numVars trm) 0 ts
-- | Returns the number of constants in the term.
numCons :: (Num n) => Term -> n
numCons t = case t of
Variable _ -> 0
Constant _ -> 1
Function _ ts -> foldl' (\acc trm -> acc + numCons trm) 0 ts
-- | Returns the number of functions in the term.
numFuns :: (Num n) => Term -> n
numFuns t = case t of
Variable _ -> 0
Constant _ -> 0
Function _ ts -> 1 + foldl' (\acc trm -> acc + numFuns trm) 0 ts
-- | Substitute a term for another.
substitute :: Term -> Term -> Term -> Term
substitute old new (Function n ts) =
if old == Function n ts then new
else Function n $ map (substitute old new) ts
substitute old new t0 = if t0 == old then new else t0
-- | Shows the internal structure of the term. This is particularly useful
-- to distinguish variables from constants in Term String, where otherwise
-- it would be impossible to tell them apart.
showStruct :: Term -> T.Text
showStruct t = case t of
Variable x -> T.concat ["Variable (", x, ")"]
Constant x -> T.concat ["Constant (", x, ")"]
Function n ts ->
T.concat ["Function ", n, " [", if null ts then "" else terms, "]"]
where terms = FT.mkString (map showStruct ts)
-- | Get all the constants from a term.
constants :: Term -> Set T.Text
constants = gat
where
gat = gather Set.empty
gather s t' = case t' of
Variable _ -> s
Constant t'' -> Set.insert t'' s
Function _ ts -> foldl' (\a t'' -> Set.union (gat t'') a) s ts
-- | Tests if the term is 'grounded', i.e. if it has no variables.
ground :: Term -> Bool
ground t = case t of
Variable _ -> False
Constant _ -> True
Function _ ts -> all ground ts
-- | Tests if the term has a specific variable.
hasVar :: T.Text -> Term -> Bool
hasVar v t = case t of
Variable x -> v == x
Constant _ -> False
Function _ ts -> any (hasVar v) ts
-- | Used to compare names and arguments for functions and predicate. First
-- look at the name, then the number of arguments, and finally for functions
-- with the same name and argument, look at the first term that differ.
compareFun :: T.Text -> [Term] -> T.Text -> [Term] -> Ordering
compareFun n0 ts0 n1 ts1 =
(n0 `compare` n1)
<> (length ts0 `compare` length ts1)
<> mconcat (zipWith compare ts0 ts1)
| PhDP/Sphinx-AI | Faun/Term.hs | mit | 4,609 | 0 | 15 | 1,068 | 1,427 | 750 | 677 | 84 | 4 |
-- | A generalization of LIO's core components to work for any monad, instead of just IO.
module LMonad (module LMonad) where
import LMonad.TCB as LMonad (
Label (..)
, LMonad (..)
, LMonadT
, runLMonad
, runLMonadWith
, lLift
, getCurrentLabel
, getClearance
, lubCurrentLabel
, canSetLabel
, setLabel
, taintLabel
, taintLabels
, setClearance
, Labeled
, label
, unlabel
, canUnlabel
, labelOf
, ToLabel(..)
, Lattice(..)
, swapBase
)
-- most code should import LMonad
-- trusted code can import LMonad.TCB
--
-- You will also need to import a LMonad.Label.* module or create an instance of Label.
| jprider63/LMonad | src/LMonad.hs | mit | 744 | 0 | 6 | 246 | 106 | 74 | 32 | 24 | 0 |
{- |
Module : $Header$
Description : run hets as server
Copyright : (c) Christian Maeder, DFKI GmbH 2010
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable (via imports)
-}
module PGIP.Server (hetsServer) where
import PGIP.Query as Query
import Driver.Options
import Driver.ReadFn
import Driver.Version
import Network.Wai.Handler.Warp
import Network.HTTP.Types (Status, status200, status400, status403, status405)
import Control.Monad.Trans (lift, liftIO)
import qualified Data.Text as T
import Network.Wai
import Network.Wai.Parse
import qualified Data.ByteString.Lazy.Char8 as BS
import qualified Data.ByteString.Char8 as B8
import Static.AnalysisLibrary
import Static.ApplyChanges
import Static.ComputeTheory
import Static.DevGraph
import Static.DgUtils
import Static.DotGraph
import Static.FromXml
import Static.GTheory
import Static.History (changeDGH)
import Static.PrintDevGraph
import Static.ToXml as ToXml
import Syntax.ToXml
import Syntax.Print_AS_Structured
import Interfaces.Command
import Interfaces.CmdAction
import Comorphisms.LogicGraph
import Logic.Prover
import Logic.Grothendieck
import Logic.Comorphism
import Logic.Logic
import Proofs.AbstractState
import Proofs.ConsistencyCheck
import Text.XML.Light
import Text.XML.Light.Cursor hiding (findChild)
import Common.AutoProofUtils
import Common.Doc
import Common.DocUtils (pretty, showGlobalDoc, showDoc)
import Common.ExtSign (ExtSign (..))
import Common.GtkGoal
import Common.LibName
import Common.PrintLaTeX
import Common.Result
import Common.ResultT
import Common.ToXml
import Common.Utils
import Common.XUpdate
import Control.Monad
import qualified Data.IntMap as IntMap
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Char
import Data.IORef
import Data.Function
import Data.List
import Data.Maybe
import Data.Ord
import Data.Graph.Inductive.Graph (lab)
import Data.Time.Clock
import System.Random
import System.Directory
import System.Exit
import System.FilePath
import System.IO
data Session = Session
{ sessLibEnv :: LibEnv
, sessLibName :: LibName
, sessKey :: Int
, _sessStart :: UTCTime }
type SessMap = Map.Map (String, [GlobCmd]) Session
type Cache = IORef (IntMap.IntMap Session, SessMap)
randomKey :: IO Int
randomKey = randomRIO (100000000, 999999999)
sessGraph :: DGQuery -> Session -> Maybe (LibName, DGraph)
sessGraph dgQ (Session le ln _ _) = case dgQ of
DGQuery _ (Just path) ->
find (\ (n, _) -> libToFileName n == path)
$ Map.toList le
_ -> fmap (\ dg -> (ln, dg)) $ Map.lookup ln le
getVal :: [QueryPair] -> String -> Maybe String
getVal qs = fromMaybe Nothing . (`lookup` qs)
hetsServer :: HetcatsOpts -> IO ()
hetsServer opts1 = do
tempDir <- getTemporaryDirectory
let tempHetsLib = tempDir </> "MyHetsLib"
permFile = tempDir </> "empty.txt"
opts = opts1 { libdirs = tempHetsLib : libdirs opts1 }
createDirectoryIfMissing False tempHetsLib
writeFile permFile ""
sessRef <- newIORef (IntMap.empty, Map.empty)
run 8000 $ \ re -> do
let rhost = shows (remoteHost re) "\n"
bots = ["180.76.", "77.75.77.", "66.249.", "141.8.147."]
splitQuery = map (\ (bs, ms) -> (B8.unpack bs, fmap B8.unpack ms))
$ queryString re
pathBits = map T.unpack $ pathInfo re
path = intercalate "/" pathBits
meth = B8.unpack (requestMethod re)
liftIO $ do
time <- getCurrentTime
createDirectoryIfMissing False tempHetsLib
(m, _) <- readIORef sessRef
if isPrefixOf "134.102.204.54" rhost -- nagios-plugins 1.4.15
then appendFile permFile "."
else do
appendFile permFile $ shows time " sessions: "
++ shows (IntMap.size m) "\n"
appendFile permFile rhost
appendFile permFile $ shows (requestHeaders re) "\n"
-- better try to read hosts to exclude from a file
if any (`isInfixOf` rhost) bots then return $ mkResponse status403 ""
-- if path could be a RESTfull request, try to parse it
else if isRESTfull pathBits then liftIO $
parseRESTfull opts sessRef pathBits splitQuery meth
-- only otherwise stick to the old response methods
else case meth of
"GET" -> liftIO $ if isJust $ lookup "menus" splitQuery
then mkMenuResponse else do
dirs@(_ : cs) <- getHetsLibContent opts path splitQuery
if not (null cs) || null path then mkHtmlPage path dirs
-- AUTOMATIC PROOFS (parsing)
else if isJust $ getVal splitQuery "autoproof" then
let qr k = Query (DGQuery k Nothing) $
anaAutoProofQuery splitQuery in do
Result ds ms <- runResultT $ case readMaybe $ head pathBits of
Nothing -> fail "cannot read session id for automatic proofs"
Just k' -> getHetsResult opts [] sessRef (qr k')
return $ case ms of
Nothing -> mkResponse status400 $ showRelDiags 1 ds
Just s -> mkOkResponse s
-- AUTOMATIC PROOFS E.N.D.
else getHetsResponse opts [] sessRef pathBits splitQuery
"POST" -> do
(params, files) <- parseRequestBody lbsBackEnd re
mTmpFile <- liftIO $ case lookup "content"
$ map (\ (a, b) -> (B8.unpack a, b)) params of
Nothing -> return Nothing
Just areatext -> let content = B8.unpack areatext in
if all isSpace content then return Nothing else do
tmpFile <- getTempFile content "temp.het"
copyPermissions permFile tmpFile
return $ Just tmpFile
let res tmpFile =
getHetsResponse opts [] sessRef [tmpFile] splitQuery
mRes = maybe (return $ mkResponse status400 "nothing submitted")
res mTmpFile
liftIO $ case files of
[] -> if isJust $ getVal splitQuery "prove" then
getHetsResponse opts [] sessRef pathBits
$ splitQuery ++ map (\ (a, b)
-> (B8.unpack a, Just $ B8.unpack b)) params
else mRes
[(_, f)] | isNothing $ lookup updateS splitQuery -> do
let fn = takeFileName $ B8.unpack $ fileName f
if any isAlphaNum fn then do
let tmpFile = tempHetsLib </> fn
BS.writeFile tmpFile $ fileContent f
copyPermissions permFile tmpFile
maybe (res tmpFile) res mTmpFile
else mRes
_ -> getHetsResponse
opts (map snd files) sessRef pathBits splitQuery
_ -> return $ mkResponse status405 ""
-- extract what we need to know from an autoproof request
anaAutoProofQuery :: [QueryPair] -> QueryKind
anaAutoProofQuery splitQuery = let
lookup2 = getVal splitQuery
prover = lookup2 "prover"
trans = lookup2 "translation"
timeout = lookup2 "timeout" >>= readMaybe
include = maybe False (== "on") $ lookup2 "includetheorems"
nodeSel = filter (/= "includetheorems")
$ map fst $ filter ((== Just "on") . snd) splitQuery
prOrCons = case lookup2 "autoproof" of
Just "proof" -> GlProofs
Just "cons" -> GlConsistency
err -> error $ "illegal autoproof method: " ++ show err
in GlAutoProve $ ProveCmd prOrCons include prover trans timeout nodeSel False
-- quick approach to whether or not the query can be a RESTfull request
isRESTfull :: [String] -> Bool
isRESTfull pathBits = case pathBits of
[] -> False
h : _ -> elem h listRESTfullIdentifiers
listRESTfullIdentifiers :: [String]
listRESTfullIdentifiers =
[ "libraries", "sessions", "menus", "hets-lib", "dir"]
++ nodeEdgeIdes ++ newRESTIdes
nodeEdgeIdes :: [String]
nodeEdgeIdes = ["nodes", "edges"]
newRESTIdes :: [String]
newRESTIdes =
[ "dg", "translate", "provers", "consistency-checkers", "prove"
, "consistency-check" ]
-- query is analysed and processed in accordance with RESTfull interface
parseRESTfull :: HetcatsOpts -> Cache -> [String] -> [QueryPair]
-> String -> IO Response
parseRESTfull opts sessRef pathBits splitQuery meth = let
{- some parameters from the paths query part might be needed more than once
(when using lookup upon querybits, you need to unpack Maybe twice) -}
lookup2 = getVal splitQuery
session = lookup2 "session" >>= readMaybe
library = lookup2 "library"
format = lookup2 "format"
nodeM = lookup2 "node"
transM = lookup2 "translation"
proverM = lookup2 "prover"
consM = lookup2 "consistency-checker"
inclM = lookup2 "include"
incl = maybe False (\ s ->
notElem (map toLower s) ["f", "false"]) inclM
timeout = lookup2 "timeout" >>= readMaybe
queryFailure = return . mkResponse status400
$ "this query does not comply with RESTfull interface: "
++ intercalate "/" (map encodeForQuery pathBits)
-- since used more often, generate full query out of nodeIRI and nodeCmd
nodeQuery s = NodeQuery $ maybe (Right s) Left (readMaybe s :: Maybe Int)
parseNodeQuery :: Monad m => String -> Int -> m NodeCommand -> m Query
parseNodeQuery p sId ncmd = ncmd >>= let
in return . Query (DGQuery sId (Just p)) . nodeQuery (getFragment p)
-- call getHetsResult with the properly generated query (Final Result)
getResponse qr = do
Result ds ms <- runResultT $ getHetsResult opts [] sessRef qr
return $ case ms of
Nothing -> mkResponse status400 $ showRelDiags 1 ds
Just s -> mkOkResponse s
-- respond depending on request Method
in case meth of
rm | elem rm ["GET", "POST"] -> case pathBits of
-- show all menu options
"menus" : [] -> mkMenuResponse
-- list files from directory
"dir" : r -> let path' = intercalate "/" r in
getHetsLibContent opts path' splitQuery >>= mkHtmlPage path'
-- get dgraph from file
"hets-lib" : r -> let file = intercalate "/" r in
getResponse $ Query (NewDGQuery file []) $ DisplayQuery format
-- get library (complies with get/hets-lib for now)
"libraries" : libIri : "development_graph" : [] ->
getResponse $ Query (NewDGQuery libIri []) $ DisplayQuery format
-- get previously created session
"sessions" : sessId : cmd -> case readMaybe sessId of
Nothing -> fail $ "failed to read session number from " ++ sessId
Just sId ->
(case nodeM of
Just ndIri -> parseNodeQuery ndIri sId $ case cmd of
["provers"] -> return $ NcProvers GlProofs transM
["translations"] -> return $ NcTranslations Nothing
_ -> fail $ "unknown node command for a GET-request: "
++ intercalate "/" cmd
Nothing -> fmap (Query (DGQuery sId Nothing)) $ case cmd of
[] -> return $ DisplayQuery format
["provers"] -> return $ GlProvers GlProofs transM
["translations"] -> return GlTranslations
_ -> fail $ "unknown global command for a GET-request: "
++ intercalate "/" cmd) >>= getResponse
-- get node or edge view
nodeOrEdge : p : c | elem nodeOrEdge nodeEdgeIdes -> let
iriPath = takeWhile (/= '#') p
dgQ = maybe (NewDGQuery (fromMaybe iriPath library) [])
(`DGQuery` library) session
f = getFragment p
in case elemIndex nodeOrEdge nodeEdgeIdes of
Just 0 -> let
i = maybe (Right f) Left $ readMaybe f in
getResponse $ Query dgQ $ NodeQuery i $ case c of
["theory"] -> NcCmd Query.Theory
_ -> NcCmd Query.Info
Just 1 -> case readMaybe f of
Just i -> getResponse $ Query dgQ $ EdgeQuery i "edge"
Nothing -> fail $ "failed to read edgeId from " ++ f
_ -> error $ "PGIP.Server.elemIndex " ++ nodeOrEdge
newIde : libIri : rest -> let cmdList = filter (/= "") rest in
if elem newIde newRESTIdes && all (`elem` globalCommands) cmdList
then getResponse . Query (NewDGQuery libIri cmdList) $ case newIde of
"translate" -> case nodeM of
Nothing -> GlTranslations
Just n -> nodeQuery n $ NcTranslations Nothing
_ | elem newIde ["provers", "consistency-checkers"] ->
let pm = if newIde == "provers" then GlProofs else GlConsistency
in case nodeM of
Nothing -> GlProvers pm transM
Just n -> nodeQuery n $ NcProvers pm transM
_ | elem newIde ["prove", "consistency-check"] ->
let isProve = newIde == "prove"
pm = if isProve then GlProofs else GlConsistency
pc = ProveCmd pm
(not (isProve && isJust inclM) || incl)
(if isProve then proverM else consM) transM timeout [] True
in case nodeM of
Nothing -> GlAutoProve pc
Just n -> nodeQuery n $ ProveNode pc
_ -> DisplayQuery (Just $ fromMaybe "xml" format)
else queryFailure
_ -> queryFailure
"PUT" -> case pathBits of
{- execute global commands
TODO load other library ??? -}
"libraries" : libIri : "proofs" : prId : cmd : [] ->
case readMaybe prId of
Nothing -> fail $ "failed to read sessionId from " ++ prId
Just sessId -> let
dgQ = DGQuery sessId $ Just libIri in
getResponse $ Query dgQ $ GlobCmdQuery cmd
-- execute a proof or calculus request
"sessions" : sessId : cmd : [] -> case readMaybe sessId of
Nothing -> fail $ "failed to read sessionId from " ++ sessId
Just sId -> case cmd of
"prove" ->
let pc = ProveCmd GlProofs incl proverM transM timeout [] False
in case nodeM of
-- prove all nodes if no singleton is selected
Nothing -> return $ Query (DGQuery sId Nothing)
$ GlAutoProve pc
-- otherwise run prover for single node only
Just ndIri -> parseNodeQuery ndIri sId $ return
$ ProveNode pc
>>= getResponse
-- on other cmd look for (optional) specification of node or edge
_ -> case (nodeM, lookup2 "edge") of
-- fail if both are specified
(Just _, Just _) ->
fail "please specify only either node or edge"
-- call command upon a single node
(Just ndIri, Nothing) -> parseNodeQuery ndIri sId
$ case lookup cmd $ map (\ a -> (showNodeCmd a, a)) nodeCmds of
Just nc -> return $ NcCmd nc
_ -> fail $ "unknown node command '" ++ cmd ++ "' "
-- call (the only) command upon a single edge
(Nothing, Just edIri) -> case readMaybe $ getFragOfCode edIri of
Just i -> return $ Query (DGQuery sId Nothing)
$ EdgeQuery i "edge"
Nothing ->
fail $ "failed to read edgeId from edgeIRI: " ++ edIri
-- call of global command
_ -> return $ Query (DGQuery sId Nothing) $ GlobCmdQuery cmd
>>= getResponse
-- fail if request doesn't comply
_ -> queryFailure
{- create failure response if request method is not known
(should never happen) -}
_ -> return $ mkResponse status405 ""
mkMenuResponse :: IO Response
mkMenuResponse = return $ mkOkResponse $ ppTopElement $ unode "menus" mkMenus
mkMenus :: [Element]
mkMenus = menuTriple "" "Get menu triples" "menus"
: menuTriple "/DGraph" updateS updateS
: map (\ (c, _) -> menuTriple "/" (menuTextGlobCmd c) $ cmdlGlobCmd c)
allGlobLibAct
++ map (\ nc -> menuTriple "/DGraph/DGNode" ("Show " ++ nc) nc) nodeCommands
++ [menuTriple "/DGraph/DGLink" "Show edge info" "edge"]
menuTriple :: String -> String -> String -> Element
menuTriple q d c = unode "triple"
[ unode "xquery" q
, unode "displayname" d
, unode "command" c ]
mkHtmlString :: FilePath -> [Element] -> String
mkHtmlString path dirs = htmlHead ++ mkHtmlElem
("Listing of" ++ if null path then " repository" else ": " ++ path)
(unode "h1" ("Hets " ++ hetcats_version) : unode "p"
[ bold "Hompage:"
, aRef "http://www.dfki.de/cps/hets" "dfki.de/cps/hets"
, bold "Contact:"
, aRef "mailto:[email protected]"
"[email protected]" ]
: headElems path ++ [unode "ul" dirs])
mkHtmlElem :: String -> [Element] -> String
mkHtmlElem title body = ppElement $ unode "html"
[ unode "head" $ unode "title" title, unode "body" body ]
-- include a script within page (manual tags to avoid encoding)
mkHtmlElemScript :: String -> String -> [Element] -> String
mkHtmlElemScript title scr body = "<html>\n<head>\n"
++ ppElement (unode "title" title) ++ "\n<script type=text/javascript>"
++ scr ++ "</script>\n</head>\n" ++ ppElement (unode "body" body)
++ "</html>"
mkHtmlPage :: FilePath -> [Element] -> IO Response
mkHtmlPage path = return . mkOkResponse . mkHtmlString path
mkResponse :: Status -> String -> Response
mkResponse st = responseLBS st [] . BS.pack
mkOkResponse :: String -> Response
mkOkResponse = mkResponse status200
addNewSess :: String -> [GlobCmd] -> Cache -> Session -> IO Int
addNewSess file cl sessRef sess = do
k <- randomKey
let s = sess { sessKey = k }
atomicModifyIORef sessRef $ \ (m, lm) ->
((IntMap.insert k s m, Map.insert (file, cl) s lm), k)
nextSess :: Session -> Cache -> LibEnv -> Int -> IO Session
nextSess sess sessRef newLib k = if k <= 0 then return sess else
atomicModifyIORef sessRef
(\ (m, lm) -> case IntMap.lookup k m of
Nothing -> error "nextSess"
Just s -> let newSess = s { sessLibEnv = newLib }
in ((IntMap.insert k newSess m, lm), newSess))
ppDGraph :: DGraph -> Maybe PrettyType -> ResultT IO String
ppDGraph dg mt = let ga = globalAnnos dg in case optLibDefn dg of
Nothing -> fail "parsed LIB-DEFN not avaible"
Just ld ->
let d = prettyLG logicGraph ld
latex = renderLatex Nothing $ toLatex ga d
in case mt of
Just pty -> case pty of
PrettyXml -> return $ ppTopElement $ xmlLibDefn logicGraph ga ld
PrettyAscii _ -> return $ renderText ga d ++ "\n"
PrettyHtml -> return $ htmlHead ++ renderHtml ga d
PrettyLatex _ -> return latex
Nothing -> lift $ do
tmpDir <- getTemporaryDirectory
tmpFile <- writeTempFile (latexHeader ++ latex ++ latexFooter)
tmpDir "temp.tex"
copyPermissions (tmpDir </> "empty.txt") tmpFile
mapM_ (\ s -> do
let sty = (</> "hetcasl.sty")
f = sty s
ex <- doesFileExist f
when ex $ copyFile f $ sty tmpDir)
[ "utils", "Hets/utils"
, "/home/www.informatik.uni-bremen.de/cofi/hets-tmp" ]
withinDirectory tmpDir $ do
(ex1, out1, err1) <- executeProcess "pdflatex" [tmpFile] ""
(ex2, out2, err2) <- executeProcess "pdflatex" [tmpFile] ""
let pdfFile = replaceExtension tmpFile "pdf"
pdf <- doesFileExist pdfFile
if ex1 == ExitSuccess && ex2 == ExitSuccess && pdf then do
pdfHdl <- openBinaryFile pdfFile ReadMode
str <- hGetContents pdfHdl
when (length str < 0) $ putStrLn "pdf file too large"
hClose pdfHdl
return str
else return $ "could not create pdf:\n"
++ unlines [out1, err1, out2, err2]
getDGraph :: HetcatsOpts -> Cache -> DGQuery
-> ResultT IO (Session, Int)
getDGraph opts sessRef dgQ = do
(m, lm) <- lift $ readIORef sessRef
case dgQ of
NewDGQuery file cmdList ->
let cl = map (\ s -> fromJust . find ((== s) . cmdlGlobCmd)
$ map fst allGlobLibAct) cmdList
in case Map.lookup (file, cl) lm of
Just sess -> return (sess, sessKey sess)
Nothing -> do
(ln, le1) <- do
mf <- lift $ getContent opts file
case mf of
Right (f, c) | isDgXmlFile opts f c -> readDGXmlR opts f Map.empty
_ -> anaSourceFile logicGraph opts
{ outputToStdout = False, useLibPos = True }
Set.empty emptyLibEnv emptyDG file
le2 <- foldM (\ e c -> liftR
$ fromJust (lookup c allGlobLibAct) ln e) le1 cl
time <- lift getCurrentTime
let sess = Session le2 ln 0 time
k <- lift $ addNewSess file cl sessRef sess
return (sess, k)
DGQuery k _ -> case IntMap.lookup k m of
Nothing -> fail "unknown development graph"
Just sess -> return (sess, k)
getSVG :: String -> String -> DGraph -> ResultT IO String
getSVG title url dg = do
(exCode, out, err) <- lift $ executeProcess "dot" ["-Tsvg"]
$ dotGraph title False url dg
case exCode of
ExitSuccess -> liftR $ extractSVG dg out
_ -> fail err
enrichSVG :: DGraph -> Element -> Element
enrichSVG dg e = processSVG dg $ fromElement e
processSVG :: DGraph -> Cursor -> Element
processSVG dg c = case nextDF c of
Nothing -> case toTree (root c) of
Elem e -> e
_ -> error "processSVG"
Just c2 -> processSVG dg
$ modifyContent (addSVGAttribs dg) c2
nodeAttrib :: DGNodeLab -> String
nodeAttrib l = let nt = getRealDGNodeType l in
(if isRefType nt then "Ref" else "")
++ (if hasSenKind (const True) l then
(if isProvenNode nt then "P" else "Unp") ++ "roven"
++ if isProvenCons nt then "Cons" else ""
else "LocallyEmpty")
++ (if isInternalSpec nt then "Internal" else "")
++ if labelHasHiding l then "HasIngoingHidingLink" else ""
edgeAttrib :: DGLinkLab -> String
edgeAttrib l = show (pretty $ dgl_type l) ++
if dglPending l then "IncompleteProofChain" else ""
addSVGAttribs :: DGraph -> Content -> Content
addSVGAttribs dg c = case c of
Elem e -> case getAttrVal "id" e of
Just istr | isNat istr -> let i = read istr in
case getAttrVal "class" e of
Just "node" -> case lab (dgBody dg) i of
Nothing -> c
Just l -> Elem $ add_attr (mkAttr "type" $ nodeAttrib l) e
Just "edge" -> case getDGLinksById (EdgeId i) dg of
[(_, _, l)] ->
Elem $ add_attr (mkAttr "type" $ edgeAttrib l) e
_ -> c
_ -> c
_ -> c
_ -> c
extractSVG :: DGraph -> String -> Result String
extractSVG dg str = case parseXMLDoc str of
Nothing -> fail "did not recognize svg element"
Just e -> return $ showTopElement $ enrichSVG dg e
cmpFilePath :: FilePath -> FilePath -> Ordering
cmpFilePath f1 f2 = case comparing hasTrailingPathSeparator f2 f1 of
EQ -> compare f1 f2
c -> c
-- | with the 'old' call of getHetsResponse, anaUri is called upon the path
getHetsResponse :: HetcatsOpts -> [FileInfo BS.ByteString]
-> Cache -> [String] -> [QueryPair] -> IO Response
getHetsResponse opts updates sessRef pathBits query = do
Result ds ms <- runResultT $ case anaUri pathBits query
$ updateS : globalCommands of
Left err -> fail err
Right q -> getHetsResult opts updates sessRef q
return $ case ms of
Just s | not $ hasErrors ds -> mkOkResponse s
_ -> mkResponse status400 $ showRelDiags 1 ds
getHetsResult :: HetcatsOpts -> [FileInfo BS.ByteString]
-> Cache -> Query -> ResultT IO String
getHetsResult opts updates sessRef (Query dgQ qk) = do
sk@(sess, k) <- getDGraph opts sessRef dgQ
let libEnv = sessLibEnv sess
(ln, dg) <- maybe (fail "unknown development graph") return
$ sessGraph dgQ sess
let title = libToFileName ln
svg <- getSVG title ('/' : show k) dg
case qk of
DisplayQuery ms -> case ms of
Just "svg" -> return svg
Just "xml" -> liftR $ return $ ppTopElement
$ ToXml.dGraph opts libEnv ln dg
Just "dot" -> liftR $ return $ dotGraph title False title dg
Just "symbols" -> liftR $ return $ ppTopElement
$ ToXml.dgSymbols dg
Just "session" -> liftR $ return $ ppElement
$ aRef (mkPath sess ln k) (show k)
Just str | elem str ppList
-> ppDGraph dg $ lookup str $ zip ppList prettyList
_ -> liftR $ return $ sessAns ln svg sk
GlProvers mp mt -> return $ getFullProverList mp mt dg
GlTranslations -> return $ getFullComorphList dg
GlShowProverWindow prOrCons -> showAutoProofWindow dg k prOrCons
GlAutoProve (ProveCmd prOrCons incl mp mt tl nds xForm) -> do
(newLib, sens) <-
proveMultiNodes xForm prOrCons libEnv ln dg incl mp mt tl nds
if null sens then return "nothing to prove" else do
lift $ nextSess sess sessRef newLib k
return $ formatResultsMultiple xForm k sens prOrCons
GlobCmdQuery s ->
case find ((s ==) . cmdlGlobCmd . fst) allGlobLibAct of
Nothing -> if s == updateS then
case filter ((== ".xupdate") . takeExtension . B8.unpack
. fileName) updates of
ch : _ -> do
let str = BS.unpack $ fileContent ch
(newLn, newLib) <- dgXUpdate opts str libEnv ln dg
newSess <- lift $ nextSess sess sessRef newLib k
liftR $ return $ sessAns newLn svg (newSess, k)
[] -> liftR $ return $ sessAns ln svg sk
else fail "getHetsResult.GlobCmdQuery"
Just (_, act) -> do
newLib <- liftR $ act ln libEnv
newSess <- lift $ nextSess sess sessRef newLib k
-- calculate updated SVG-view from modified development graph
newSvg <- getSVG title ('/' : show k) $ lookupDGraph ln newLib
liftR $ return $ sessAns ln newSvg (newSess, k)
NodeQuery ein nc -> do
nl@(i, dgnode) <- case ein of
Right n -> case lookupNodeByName n dg of
p : _ -> return p
[] -> fail $ "no node name: " ++ n
Left i -> case lab (dgBody dg) i of
Nothing -> fail $ "no node id: " ++ show i
Just dgnode -> return (i, dgnode)
let fstLine = (if isDGRef dgnode then ("reference " ++) else
if isInternalNode dgnode then ("internal " ++) else id)
"node " ++ getDGNodeName dgnode ++ " (#" ++ show i ++ ")\n"
ins = getImportNames dg i
showN d = showGlobalDoc (globalAnnos dg) d "\n"
case nc of
NcCmd cmd | elem cmd [Query.Node, Info, Symbols]
-> case cmd of
Symbols -> return $ ppTopElement
$ showSymbols ins (globalAnnos dg) dgnode
_ -> return $ fstLine ++ showN dgnode
_ -> case maybeResult $ getGlobalTheory dgnode of
Nothing -> fail $
"cannot compute global theory of:\n" ++ fstLine
Just gTh -> let subL = sublogicOfTh gTh in case nc of
ProveNode (ProveCmd pm incl mp mt tl thms xForm) ->
case pm of
GlProofs -> do
(newLib, sens) <- proveNode libEnv ln dg nl
gTh subL incl mp mt tl thms
if null sens then return "nothing to prove" else do
lift $ nextSess sess sessRef newLib k
return . formatResults xForm k i . unode "results"
$ map (\ (n, e, d) -> unode "goal"
[ unode "name" n
, unode "result" e
, unode "details" d]) sens
GlConsistency -> do
(newLib, [(_, res, txt)]) <- consNode libEnv ln dg nl
subL incl mp mt tl
lift $ nextSess sess sessRef newLib k
return . ppTopElement $ formatConsNode res txt
_ -> return $ case nc of
NcCmd Query.Theory ->
showGlobalTh dg i gTh k fstLine
NcProvers mp mt -> formatProvers mp $ case mp of
GlProofs -> getProversAux mt subL
GlConsistency -> getConsCheckersAux mt subL
NcTranslations mp -> getComorphs mp subL
_ -> error "getHetsResult.NodeQuery."
EdgeQuery i _ ->
case getDGLinksById (EdgeId i) dg of
[e@(_, _, l)] -> return $ showLEdge e ++ "\n" ++ showDoc l ""
[] -> fail $ "no edge found with id: " ++ show i
_ -> fail $ "multiple edges found with id: " ++ show i
formatConsNode :: String -> String -> Element
formatConsNode res txt = add_attr (mkAttr "state" res) $ unode "result" txt
formatResultsMultiple :: Bool -> Int -> [Element] -> ProverMode -> String
formatResultsMultiple xForm sessId rs prOrCons =
if xForm then ppTopElement $ unode "Results" rs else let
goBack1 = case prOrCons of
GlConsistency -> aRef ('/' : show sessId ++ "?consistency") "return"
GlProofs -> aRef ('/' : show sessId ++ "?autoproof") "return"
goBack2 = aRef ('/' : show sessId) "return to DGraph"
in ppElement $ unode "html" ( unode "head"
[ unode "title" "Results", add_attr ( mkAttr "type" "text/css" )
$ unode "style" resultStyles, goBack1, plain " ", goBack2 ]
: foldr (\ el r -> unode "h4" (qName $ elName el) : el : r) [] rs )
-- | display results of proving session (single node)
formatResults :: Bool -> Int -> Int -> Element -> String
formatResults xForm sessId i rs =
if xForm || sessId <= 0 then ppTopElement rs else let
goBack1 = aRef ('/' : show sessId ++ "?theory=" ++ show i) "return to Theory"
goBack2 = aRef ('/' : show sessId) "return to DGraph"
in ppElement $ unode "html" [ unode "head"
[ unode "title" "Results", add_attr ( mkAttr "type" "text/css" )
$ unode "style" resultStyles, goBack1, plain " ", goBack2 ], rs ]
resultStyles :: String
resultStyles = unlines
[ "results { margin: 5px; padding:5px; display:block; }"
, "goal { display:block; margin-left:15px; }"
, "name { display:inline; margin:5px; padding:10px; font-weight:bold; }"
, "result { display:inline; padding:30px; }" ]
showBool :: Bool -> String
showBool = map toLower . show
{- | displays the global theory for a node with the option to prove theorems
and select proving options -}
showGlobalTh :: DGraph -> Int -> G_theory -> Int -> String -> String
showGlobalTh dg i gTh sessId fstLine = case simplifyTh gTh of
sGTh@(G_theory lid _ (ExtSign sig _) _ thsens _) -> let
ga = globalAnnos dg
-- links to translations and provers xml view
transBt = aRef ('/' : show sessId ++ "?translations=" ++ show i)
"translations"
prvsBt = aRef ('/' : show sessId ++ "?provers=" ++ show i) "provers"
headr = unode "h3" fstLine
thShow = renderHtml ga $ vcat $ map (print_named lid) $ toNamedList thsens
sbShow = renderHtml ga $ pretty sig
in case getThGoals sGTh of
-- show simple view if no goals are found
[] -> mkHtmlElem fstLine [ headr, transBt, prvsBt,
unode "h4" "Theory" ] ++ sbShow ++ "\n<br />" ++ thShow
-- else create proving functionality
gs -> let
-- create list of theorems, selectable for proving
thmSl = map (\ (nm, bp) -> let gSt = maybe GOpen basicProofToGStatus bp
in add_attrs
[ mkAttr "type" "checkbox", mkAttr "name" $ escStr nm
, mkAttr "unproven" $ showBool $ elem gSt [GOpen, GTimeout]]
$ unode "input" $ nm ++ " (" ++ showSimple gSt ++ ")" ) gs
-- select unproven, all or none theorems by button
(btUnpr, btAll, btNone, jvScr1) = showSelectionButtons True
-- create prove button and prover/comorphism selection
(prSl, cmrSl, jvScr2) = showProverSelection GlProofs [sublogicOfTh gTh]
(prBt, timeout) = showProveButton True
-- hidden param field
hidStr = add_attrs [ mkAttr "name" "prove"
, mkAttr "type" "hidden", mkAttr "style" "display:none;"
, mkAttr "value" $ show i ]
inputNode
-- combine elements within a form
thmMenu = let br = unode "br " () in add_attrs
[ mkAttr "name" "thmSel", mkAttr "method" "get"]
. unode "form"
$ [hidStr, prSl, cmrSl, br, btUnpr, btAll, btNone, timeout]
++ intersperse br (prBt : thmSl)
-- save dg and return to svg-view
goBack = aRef ('/' : show sessId) "return to DGraph"
in mkHtmlElemScript fstLine (jvScr1 ++ jvScr2)
[ headr, transBt, prvsBt, plain " ", goBack, unode "h4" "Theorems"
, thmMenu, unode "h4" "Theory" ] ++ sbShow ++ "\n<br />" ++ thShow
-- | show window of the autoproof function
showAutoProofWindow :: DGraph -> Int -> ProverMode -> ResultT IO String
showAutoProofWindow dg sessId prOrCons = let
dgnodes = labNodesDG dg
-- some parameters need to be different for consistency and autoproof mode
(prMethod, isProver, title, nodeSel) = case prOrCons of
GlProofs -> ("proof", True, "automatic proofs"
, map (\ fn -> add_attrs [ mkAttr "type" "checkbox"
, mkAttr "unproven" $ showBool $ not $ allProved fn
, mkAttr "name" $ escStr $ name fn ]
$ unode "input" $ showHtml fn) $ initFNodes dgnodes)
-- TODO sort out nodes with no sentences!
GlConsistency -> ("cons", False, "consistency checker"
, map (\ (_, dgn) ->
let cstat = getConsistencyOf dgn
nm = getDGNodeName dgn in add_attrs [ mkAttr "type" "checkbox"
, mkAttr "unproven" $ showBool $ sType cstat == CSUnchecked
, mkAttr "name" nm]
$ unode "input" (cStatusToPrefix cstat ++ nm)) dgnodes)
-- generate param field for the query string, invisible to the user
hidStr = add_attrs [ mkAttr "name" "autoproof"
, mkAttr "type" "hidden", mkAttr "style" "display:none;"
, mkAttr "value" prMethod ] inputNode
-- select unproven, all or no nodes by button
(btUnpr, btAll, btNone, jvScr1) = showSelectionButtons isProver
(prBt, timeout) = showProveButton isProver
include = add_attrs [ mkAttr "type" "checkbox", mkAttr "checked" "true"
, mkAttr "name" "includetheorems"] $ unode "input" "include Theorems"
goBack = aRef ('/' : show sessId) "return to DGraph"
in do
(jvScr2, nodeMenu) <- case dgnodes of
-- return simple feedback if no nodes are present
[] -> return ("", plain "nothing to prove (graph has no nodes)")
-- otherwise
(_, nd) : _ -> case maybeResult $ getGlobalTheory nd of
Nothing -> fail $ "cannot compute global theory of:\n" ++ show nd
Just gTh -> let
br = unode "br " ()
(prSel, cmSel, jvSc) = showProverSelection prOrCons
[sublogicOfTh gTh]
in return (jvSc, add_attrs
[ mkAttr "name" "nodeSel", mkAttr "method" "get" ]
. unode "form" $
[ hidStr, prSel, cmSel, br, btAll, btNone, btUnpr, timeout, include ]
++ intersperse br (prBt : nodeSel))
return $ mkHtmlElemScript title (jvScr1 ++ jvScr2)
[ goBack, plain " ", nodeMenu ]
showProveButton :: Bool -> (Element, Element)
showProveButton isProver = (prBt, timeout) where
prBt = [ mkAttr "type" "submit", mkAttr "value"
$ if isProver then "Prove" else "Check"]
`add_attrs` inputNode
-- create timeout field
timeout = add_attrs [mkAttr "type" "text", mkAttr "name" "timeout"
, mkAttr "value" "1", mkAttr "size" "3"]
$ unode "input" "Sec/Goal "
-- | select unproven, all or none theorems by button
showSelectionButtons :: Bool -> (Element, Element, Element, String)
showSelectionButtons isProver = (selUnPr, selAll, selNone, jvScr)
where prChoice = if isProver then "SPASS" else "darwin"
selUnPr = add_attrs [mkAttr "type" "button"
, mkAttr "value" $ if isProver then "Unproven" else "Unchecked"
, mkAttr "onClick" "chkUnproven()"] inputNode
selAll = add_attrs [mkAttr "type" "button", mkAttr "value" "All"
, mkAttr "onClick" "chkAll(true)"] inputNode
selNone = add_attrs [mkAttr "type" "button", mkAttr "value" "None"
, mkAttr "onClick" "chkAll(false)"] inputNode
-- javascript features
jvScr = unlines
-- select unproven goals by button
[ "\nfunction chkUnproven() {"
, " var e = document.forms[0].elements;"
, " for (i = 0; i < e.length; i++) {"
, " if( e[i].type == 'checkbox'"
, " && e[i].name != 'includetheorems' )"
, " e[i].checked = e[i].getAttribute('unproven') == 'true';"
, " }"
-- select or deselect all theorems by button
, "}\nfunction chkAll(b) {"
, " var e = document.forms[0].elements;"
, " for (i = 0; i < e.length; i++) {"
, " if( e[i].type == 'checkbox'"
, " && e[i].name != 'includetheorems' ) e[i].checked = b;"
, " }"
-- autoselect SPASS if possible
, "}\nwindow.onload = function() {"
, " prSel = document.forms[0].elements.namedItem('prover');"
, " prs = prSel.getElementsByTagName('option');"
, " for ( i=0; i<prs.length; i++ ) {"
, " if( prs[i].value == '" ++ prChoice ++ "' ) {"
, " prs[i].selected = 'selected';"
, " updCmSel('" ++ prChoice ++ "');"
, " return;"
, " }"
, " }"
-- if SPASS unable, select first one in list
, " prs[0].selected = 'selected';"
, " updCmSel( prs[0].value );"
, "}" ]
-- | create prover and comorphism menu and combine them using javascript
showProverSelection :: ProverMode -> [G_sublogics]
-> (Element, Element, String)
showProverSelection prOrCons subLs = let
jvScr = unlines
-- the chosen prover is passed as param
[ "\nfunction updCmSel(pr) {"
, " var cmrSl = document.forms[0].elements.namedItem('translation');"
-- then, all selectable comorphisms are gathered and iterated
, " var opts = cmrSl.getElementsByTagName('option');"
-- try to keep current comorph-selection
, " var selAccept = false;"
, " for( var i = opts.length-1; i >= 0; i-- ) {"
, " var cmr = opts.item( i );"
-- the list of supported provers is extracted
, " var prs = cmr.getAttribute('4prover').split(';');"
, " var pFit = false;"
, " for( var j = 0; ! pFit && j < prs.length; j++ ) {"
, " pFit = prs[j] == pr;"
, " }"
-- if prover is supported, remove disabled attribute
, " if( pFit ) {"
, " cmr.removeAttribute('disabled');"
, " selAccept = selAccept || cmr.selected;"
-- else create and append a disabled attribute
, " } else {"
, " var ds = document.createAttribute('disabled');"
, " ds.value = 'disabled';"
, " cmr.setAttributeNode(ds);"
, " }"
, " }"
-- check if selected comorphism fits, and select fst. in list otherwise
, " if( ! selAccept ) {"
, " for( i = 0; i < opts.length; i++ ) {"
, " if( ! opts.item(i).disabled ) {"
, " opts.item(i).selected = 'selected';"
, " return;"
, " }"
, " }"
, " }"
, "}" ]
allPrCm = nub $ concatMap ((case prOrCons of
GlProofs -> getProversAux
GlConsistency -> getConsCheckersAux) Nothing) subLs
-- create prover selection (drop-down)
prs = add_attr (mkAttr "name" "prover") $ unode "select" $ map (\ p ->
add_attrs [mkAttr "value" p, mkAttr "onClick" $ "updCmSel('" ++ p ++ "')"]
$ unode "option" p) $ showProversOnly allPrCm
-- create comorphism selection (drop-down)
cmrs = add_attr (mkAttr "name" "translation") $ unode "select"
$ map (\ (cm, ps) -> let c = showComorph cm in
add_attrs [mkAttr "value" c, mkAttr "4prover" $ intercalate ";" ps]
$ unode "option" c) allPrCm
in (prs, cmrs, jvScr)
showHtml :: FNode -> String
showHtml fn = name fn ++ " " ++ goalsToPrefix (toGtkGoals fn)
getAllAutomaticProvers :: G_sublogics -> [(G_prover, AnyComorphism)]
getAllAutomaticProvers subL = getAllProvers ProveCMDLautomatic subL logicGraph
filterByProver :: Maybe String -> [(G_prover, AnyComorphism)]
-> [(G_prover, AnyComorphism)]
filterByProver mp = case mp of
Nothing -> id
Just p -> filter ((== p) . getWebProverName . fst)
filterByComorph :: Maybe String -> [(a, AnyComorphism)]
-> [(a, AnyComorphism)]
filterByComorph mt = case mt of
Nothing -> id
Just c -> filter ((== c) . showComorph . snd)
getProverAndComorph :: Maybe String -> Maybe String -> G_sublogics
-> [(G_prover, AnyComorphism)]
getProverAndComorph mp mc subL =
let ps = getFilteredProvers mc subL
spps = case filterByProver (Just "SPASS") ps of
[] -> ps
fps -> fps
in case mp of
Nothing -> spps
_ -> filterByProver mp ps
showComorph :: AnyComorphism -> String
showComorph (Comorphism cid) = removeFunnyChars . drop 1 . dropWhile (/= ':')
. map (\ c -> if c == ';' then ':' else c)
$ language_name cid
removeFunnyChars :: String -> String
removeFunnyChars = filter (\ c -> isAlphaNum c || elem c "_.:-")
getWebProverName :: G_prover -> String
getWebProverName = removeFunnyChars . getProverName
getFullProverList :: ProverMode -> Maybe String -> DGraph -> String
getFullProverList mp mt = formatProvers mp . foldr
(\ (_, nd) ls -> maybe ls ((++ ls) . case mp of
GlProofs -> getProversAux mt
GlConsistency -> getConsCheckersAux mt
. sublogicOfTh)
$ maybeResult $ getGlobalTheory nd) [] . labNodesDG
showProversOnly :: [(AnyComorphism, [String])] -> [String]
showProversOnly = nubOrd . concatMap snd
groupOnSnd :: Eq b => (a -> c) -> [(a, b)] -> [(b, [c])]
groupOnSnd f =
map (\ l@((_, b) : _) -> (b, map (f . fst) l)) . groupBy (on (==) snd)
{- | gather provers and comorphisms and resort them to
(comorhism, supported provers) while not changing orig comorphism order -}
getProversAux :: Maybe String -> G_sublogics -> [(AnyComorphism, [String])]
getProversAux mt = groupOnSnd getWebProverName . getFilteredProvers mt
getFilteredProvers :: Maybe String -> G_sublogics
-> [(G_prover, AnyComorphism)]
getFilteredProvers mt = filterByComorph mt . getAllAutomaticProvers
formatProvers :: ProverMode -> [(AnyComorphism, [String])] -> String
formatProvers pm = let
tag = case pm of
GlProofs -> "prover"
GlConsistency -> "consistency-checker"
in ppTopElement . unode (tag ++ "s") . map (unode tag)
. showProversOnly
-- | retrieve a list of consistency checkers
getConsCheckersAux :: Maybe String -> G_sublogics
-> [(AnyComorphism, [String])]
getConsCheckersAux mt = groupOnSnd getCcName . getFilteredConsCheckers mt
getFilteredConsCheckers :: Maybe String -> G_sublogics
-> [(G_cons_checker, AnyComorphism)]
getFilteredConsCheckers mt = filterByComorph mt . filter (getCcBatch . fst)
. getConsCheckers . findComorphismPaths logicGraph
getComorphs :: Maybe String -> G_sublogics -> String
getComorphs mp subL = formatComorphs . filterByProver mp
$ getAllAutomaticProvers subL
getFullComorphList :: DGraph -> String
getFullComorphList = formatComorphs . foldr
(\ (_, nd) ls -> maybe ls ((++ ls) . getAllAutomaticProvers . sublogicOfTh)
$ maybeResult $ getGlobalTheory nd) [] . labNodesDG
formatComorphs :: [(G_prover, AnyComorphism)] -> String
formatComorphs = ppTopElement . unode "translations"
. map (unode "comorphism") . nubOrd . map (showComorph . snd)
consNode :: LibEnv -> LibName -> DGraph -> (Int, DGNodeLab)
-> G_sublogics -> Bool -> Maybe String -> Maybe String -> Maybe Int
-> ResultT IO (LibEnv, [(String, String, String)])
consNode le ln dg nl@(i, lb) subL useTh mp mt tl = let
consList = getFilteredConsCheckers mt subL
findCC x = filter ((== x ) . getCcName . fst) consList
mcc = maybe (findCC "darwin") findCC mp
in case mcc of
[] -> fail "no cons checker found"
((cc, c) : _) -> lift $ do
cstat <- consistencyCheck useTh cc c ln le dg nl $ fromMaybe 1 tl
-- Consistency Results are stored in LibEnv via DGChange object
let cSt = sType cstat
le'' = if cSt == CSUnchecked then le else
Map.insert ln (changeDGH dg $ SetNodeLab lb
(i, case cSt of
CSInconsistent -> markNodeInconsistent "" lb
CSConsistent -> markNodeConsistent "" lb
_ -> lb)) le
return (le'', [(" ", drop 2 $ show cSt, show cstat)])
proveNode :: LibEnv -> LibName -> DGraph -> (Int, DGNodeLab) -> G_theory
-> G_sublogics -> Bool -> Maybe String -> Maybe String -> Maybe Int
-> [String] -> ResultT IO (LibEnv, [(String, String, String)])
proveNode le ln dg nl gTh subL useTh mp mt tl thms = case
getProverAndComorph mp mt subL of
[] -> fail "no matching translation or prover found"
cp : _ -> do
let ks = map fst $ getThGoals gTh
diffs = Set.difference (Set.fromList thms)
$ Set.fromList ks
unless (Set.null diffs) . fail $ "unknown theorems: " ++ show diffs
when (null thms && null ks) $ fail "no theorems to prove"
((nTh, sens), _) <- autoProofAtNode useTh (maybe 1 (max 1) tl)
(if null thms then ks else thms) gTh cp
return (if null sens then le else
Map.insert ln (updateLabelTheory le dg nl nTh) le, sens)
-- run over multiple dgnodes and prove available goals for each
proveMultiNodes :: Bool -> ProverMode -> LibEnv -> LibName -> DGraph -> Bool
-> Maybe String -> Maybe String -> Maybe Int -> [String]
-> ResultT IO (LibEnv, [Element])
proveMultiNodes xF pm le ln dg useTh mp mt tl nodeSel = let
runProof le' gTh nl = let
subL = sublogicOfTh gTh
dg' = lookupDGraph ln le' in case pm of
GlConsistency -> consNode le' ln dg' nl subL useTh mp mt tl
GlProofs -> proveNode le' ln dg' nl gTh subL useTh mp mt tl
$ map fst $ getThGoals gTh
nodes2check = filter (case nodeSel of
[] -> case pm of
GlConsistency -> const True
GlProofs -> hasOpenGoals . snd
_ -> (`elem` nodeSel) . getDGNodeName . snd) $ labNodesDG dg
in foldM
(\ (le', res) nl@(_, dgn) -> case maybeResult $ getGlobalTheory dgn of
Nothing -> fail $
"cannot compute global theory of:\n" ++ show dgn
Just gTh -> do
(le'', sens) <- runProof le' gTh nl
return (le'', formatResultsAux xF pm (getDGNodeName dgn) sens : res))
(le, []) nodes2check
formatResultsAux :: Bool -> ProverMode -> String -> [(String, String, String)]
-> Element
formatResultsAux xF pm nm sens = unode nm $ case (sens, pm) of
([(_, e, d)], GlConsistency) | xF -> formatConsNode e d
_ -> unode "results" $ map (\ (n, e, d) -> unode "goal"
$ [unode "name" n, unode "result" e]
++ [unode "details" d | xF]) sens
mkPath :: Session -> LibName -> Int -> String
mkPath sess l k =
'/' : concat [ libToFileName l ++ "?session="
| l /= sessLibName sess ]
++ show k
extPath :: Session -> LibName -> Int -> String
extPath sess l k = mkPath sess l k ++
if l /= sessLibName sess then "&" else "?"
globalCommands :: [String]
globalCommands = map (cmdlGlobCmd . fst) allGlobLibAct
sessAns :: LibName -> String -> (Session, Int) -> String
sessAns libName svg (sess, k) =
let libEnv = sessLibEnv sess
ln = libToFileName libName
libref l =
aRef (mkPath sess l k) (libToFileName l) : map (\ d ->
aRef (extPath sess l k ++ d) d) displayTypes
libPath = extPath sess libName k
ref d = aRef (libPath ++ d) d
autoProofBt = aRef ('/' : show k ++ "?autoproof") "automatic proofs"
consBt = aRef ('/' : show k ++ "?consistency") "consistency checker"
-- the html quicklinks to nodes and edges have been removed with R.16827
in htmlHead ++ mkHtmlElem
('(' : shows k ")" ++ ln)
(bold ("library " ++ ln)
: map ref displayTypes
++ menuElement : loadXUpdate (libPath ++ updateS)
: plain "tools:" : mkUnorderedList [autoProofBt, consBt]
: plain "commands:"
: mkUnorderedList (map ref globalCommands)
: plain "imported libraries:"
: [mkUnorderedList $ map libref $ Map.keys libEnv]
) ++ svg
getHetsLibContent :: HetcatsOpts -> String -> [QueryPair] -> IO [Element]
getHetsLibContent opts dir query = do
let hlibs = libdirs opts
ds <- if null dir then return hlibs else
filterM doesDirectoryExist $ map (</> dir) hlibs
fs <- fmap (sortBy cmpFilePath . filter (not . isPrefixOf ".") . concat)
$ mapM getDirContents ds
return $ map (mkHtmlRef query) $ getParent dir : fs
getParent :: String -> String
getParent = addTrailingPathSeparator . ("/" </>) . takeDirectory
. dropTrailingPathSeparator
-- | a variant that adds a trailing slash
getDirContents :: FilePath -> IO [FilePath]
getDirContents d = do
fs <- getDirectoryContents d
mapM (\ f -> doesDirectoryExist (d </> f) >>= \ b -> return
$ if b then addTrailingPathSeparator f else f) fs
aRef :: String -> String -> Element
aRef lnk txt = add_attr (mkAttr "href" lnk) $ unode "a" txt
mkHtmlRef :: [QueryPair] -> String -> Element
mkHtmlRef query entry = unode "dir" $ aRef
(entry ++ if null query then "" else '?' : intercalate "&"
(map (\ (x, ms) -> x ++ maybe "" ('=' :) ms) query)) entry
mkUnorderedList :: Node t => [t] -> Element
mkUnorderedList = unode "ul" . map (unode "li")
italic :: String -> Element
italic = unode "i"
bold :: String -> Element
bold = unode "b"
plain :: String -> Element
plain = unode "p"
headElems :: String -> [Element]
headElems path = let d = "default" in unode "strong" "Choose a display type:" :
map (\ q -> aRef (if q == d then "/" </> path else '?' : q) q)
(d : displayTypes)
++ [ unode "p"
[ unode "small" "internal command overview as XML:"
, menuElement ]
, plain $ "Select a local file as library or "
++ "enter a HetCASL specification in the text area and press \"submit\""
++ ", or browse through our Hets-lib library below."
, uploadHtml ]
menuElement :: Element
menuElement = aRef "?menus" "menus"
htmlHead :: String
htmlHead =
let dtd = "PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\""
url = "\"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\""
in concat ["<!DOCTYPE html ", dtd, " ", url, ">\n"]
inputNode :: Element
inputNode = unode "input" ()
loadNode :: String -> Element
loadNode nm = add_attrs
[ mkAttr "type" "file"
, mkAttr "name" nm
, mkAttr "size" "40"]
inputNode
submitNode :: Element
submitNode = add_attrs
[ mkAttr "type" "submit"
, mkAttr "value" "submit"]
inputNode
mkForm :: String -> [Element] -> Element
mkForm a = add_attrs
[ mkAttr "action" a
, mkAttr "enctype" "multipart/form-data"
, mkAttr "method" "post" ]
. unode "form"
uploadHtml :: Element
uploadHtml = mkForm "/"
[ loadNode "file"
, unode "p" $ add_attrs
[ mkAttr "cols" "68"
, mkAttr "rows" "22"
, mkAttr "name" "content" ] $ unode "textarea" ""
, submitNode ]
loadXUpdate :: String -> Element
loadXUpdate a = mkForm a
[ italic xupdateS
, loadNode xupdateS
, italic "impacts"
, loadNode "impacts"
, submitNode ]
| nevrenato/HetsAlloy | PGIP/Server.hs | gpl-2.0 | 52,065 | 1 | 37 | 15,052 | 15,179 | 7,693 | 7,486 | 1,033 | 43 |
{-# LANGUAGE ScopedTypeVariables, OverloadedStrings #-}
{-
Copyright (C) 2006-2014 John MacFarlane <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.Readers.LaTeX
Copyright : Copyright (C) 2006-2014 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <[email protected]>
Stability : alpha
Portability : portable
Conversion of LaTeX to 'Pandoc' document.
-}
module Text.Pandoc.Readers.LaTeX ( readLaTeX,
rawLaTeXInline,
rawLaTeXBlock,
handleIncludes
) where
import Text.Pandoc.Definition
import Text.Pandoc.Walk
import Text.Pandoc.Shared
import Text.Pandoc.Options
import Text.Pandoc.Parsing hiding ((<|>), many, optional, space,
mathDisplay, mathInline)
import Text.Parsec.Prim (ParsecT, runParserT)
import qualified Text.Pandoc.UTF8 as UTF8
import Data.Char ( chr, ord )
import Control.Monad.Trans (lift)
import Control.Monad
import Text.Pandoc.Builder
import Data.Char (isLetter, isAlphaNum)
import Control.Applicative
import Data.Monoid
import Data.Maybe (fromMaybe)
import System.Environment (getEnv)
import System.FilePath (replaceExtension, (</>))
import Data.List (intercalate, intersperse)
import qualified Data.Map as M
import qualified Control.Exception as E
import System.FilePath (takeExtension, addExtension)
import Text.Pandoc.Highlighting (fromListingsLanguage)
-- | Parse LaTeX from string and return 'Pandoc' document.
readLaTeX :: ReaderOptions -- ^ Reader options
-> String -- ^ String to parse (assumes @'\n'@ line endings)
-> Pandoc
readLaTeX opts = readWith parseLaTeX def{ stateOptions = opts }
parseLaTeX :: LP Pandoc
parseLaTeX = do
bs <- blocks
eof
st <- getState
let meta = stateMeta st
let (Pandoc _ bs') = doc bs
return $ Pandoc meta bs'
type LP = Parser [Char] ParserState
anyControlSeq :: LP String
anyControlSeq = do
char '\\'
next <- option '\n' anyChar
name <- case next of
'\n' -> return ""
c | isLetter c -> (c:) <$> (many letter <* optional sp)
| otherwise -> return [c]
return name
controlSeq :: String -> LP String
controlSeq name = try $ do
char '\\'
case name of
"" -> mzero
[c] | not (isLetter c) -> string [c]
cs -> string cs <* notFollowedBy letter <* optional sp
return name
dimenarg :: LP String
dimenarg = try $ do
ch <- option "" $ string "="
num <- many1 digit
dim <- oneOfStrings ["pt","pc","in","bp","cm","mm","dd","cc","sp"]
return $ ch ++ num ++ dim
sp :: LP ()
sp = skipMany1 $ satisfy (\c -> c == ' ' || c == '\t')
<|> (try $ newline >>~ lookAhead anyChar >>~ notFollowedBy blankline)
isLowerHex :: Char -> Bool
isLowerHex x = x >= '0' && x <= '9' || x >= 'a' && x <= 'f'
tildeEscape :: LP Char
tildeEscape = try $ do
string "^^"
c <- satisfy (\x -> x >= '\0' && x <= '\128')
d <- if isLowerHex c
then option "" $ count 1 (satisfy isLowerHex)
else return ""
if null d
then case ord c of
x | x >= 64 && x <= 127 -> return $ chr (x - 64)
| otherwise -> return $ chr (x + 64)
else return $ chr $ read ('0':'x':c:d)
comment :: LP ()
comment = do
char '%'
skipMany (satisfy (/='\n'))
optional newline
return ()
bgroup :: LP ()
bgroup = () <$ char '{'
<|> () <$ controlSeq "bgroup"
<|> () <$ controlSeq "begingroup"
egroup :: LP ()
egroup = () <$ char '}'
<|> () <$ controlSeq "egroup"
<|> () <$ controlSeq "endgroup"
grouped :: Monoid a => LP a -> LP a
grouped parser = try $ bgroup *> (mconcat <$> manyTill parser egroup)
braced :: LP String
braced = bgroup *> (concat <$> manyTill
( many1 (satisfy (\c -> c /= '\\' && c /= '}' && c /= '{'))
<|> try (string "\\}")
<|> try (string "\\{")
<|> try (string "\\\\")
<|> ((\x -> "{" ++ x ++ "}") <$> braced)
<|> count 1 anyChar
) egroup)
bracketed :: Monoid a => LP a -> LP a
bracketed parser = try $ char '[' *> (mconcat <$> manyTill parser (char ']'))
mathDisplay :: LP String -> LP Inlines
mathDisplay p = displayMath <$> (try p >>= applyMacros' . trim)
mathInline :: LP String -> LP Inlines
mathInline p = math <$> (try p >>= applyMacros')
mathChars :: LP String
mathChars = concat <$>
many ( many1 (satisfy (\c -> c /= '$' && c /='\\'))
<|> (\c -> ['\\',c]) <$> (try $ char '\\' *> anyChar)
)
quoted' :: (Inlines -> Inlines) -> LP String -> LP () -> LP Inlines
quoted' f starter ender = do
startchs <- starter
try ((f . mconcat) <$> manyTill inline ender) <|> lit startchs
double_quote :: LP Inlines
double_quote =
( quoted' doubleQuoted (try $ string "``") (void $ try $ string "''")
<|> quoted' doubleQuoted (string "“") (void $ char '”')
-- the following is used by babel for localized quotes:
<|> quoted' doubleQuoted (try $ string "\"`") (void $ try $ string "\"'")
<|> quoted' doubleQuoted (string "\"") (void $ char '"')
)
single_quote :: LP Inlines
single_quote =
( quoted' singleQuoted (string "`") (try $ char '\'' >> notFollowedBy letter)
<|> quoted' singleQuoted (string "‘") (try $ char '’' >> notFollowedBy letter)
)
inline :: LP Inlines
inline = (mempty <$ comment)
<|> (space <$ sp)
<|> inlineText
<|> inlineCommand
<|> inlineGroup
<|> (char '-' *> option (str "-")
((char '-') *> option (str "–") (str "—" <$ char '-')))
<|> double_quote
<|> single_quote
<|> (str "”" <$ try (string "''"))
<|> (str "”" <$ char '”')
<|> (str "’" <$ char '\'')
<|> (str "’" <$ char '’')
<|> (str "\160" <$ char '~')
<|> (mathDisplay $ string "$$" *> mathChars <* string "$$")
<|> (mathInline $ char '$' *> mathChars <* char '$')
<|> (superscript <$> (char '^' *> tok))
<|> (subscript <$> (char '_' *> tok))
<|> (guardEnabled Ext_literate_haskell *> char '|' *> doLHSverb)
<|> (str . (:[]) <$> tildeEscape)
<|> (str . (:[]) <$> oneOf "[]")
<|> (str . (:[]) <$> oneOf "#&") -- TODO print warning?
-- <|> (str <$> count 1 (satisfy (\c -> c /= '\\' && c /='\n' && c /='}' && c /='{'))) -- eat random leftover characters
inlines :: LP Inlines
inlines = mconcat <$> many (notFollowedBy (char '}') *> inline)
inlineGroup :: LP Inlines
inlineGroup = do
ils <- grouped inline
if isNull ils
then return mempty
else return $ spanWith nullAttr ils
-- we need the span so we can detitlecase bibtex entries;
-- we need to know when something is {C}apitalized
block :: LP Blocks
block = (mempty <$ comment)
<|> (mempty <$ ((spaceChar <|> newline) *> spaces))
<|> environment
<|> macro
<|> blockCommand
<|> paragraph
<|> grouped block
<|> (mempty <$ char '&') -- loose & in table environment
blocks :: LP Blocks
blocks = mconcat <$> many block
blockCommand :: LP Blocks
blockCommand = try $ do
name <- anyControlSeq
guard $ name /= "begin" && name /= "end"
star <- option "" (string "*" <* optional sp)
let name' = name ++ star
case M.lookup name' blockCommands of
Just p -> p
Nothing -> case M.lookup name blockCommands of
Just p -> p
Nothing -> mzero
inBrackets :: Inlines -> Inlines
inBrackets x = (str "[") <> x <> (str "]")
-- eat an optional argument and one or more arguments in braces
ignoreInlines :: String -> (String, LP Inlines)
ignoreInlines name = (name, doraw <|> (mempty <$ optargs))
where optargs = skipopts *> skipMany (try $ optional sp *> braced)
contseq = '\\':name
doraw = (rawInline "latex" . (contseq ++) . snd) <$>
(getOption readerParseRaw >>= guard >> (withRaw optargs))
ignoreBlocks :: String -> (String, LP Blocks)
ignoreBlocks name = (name, doraw <|> (mempty <$ optargs))
where optargs = skipopts *> skipMany (try $ optional sp *> braced)
contseq = '\\':name
doraw = (rawBlock "latex" . (contseq ++) . snd) <$>
(getOption readerParseRaw >>= guard >> (withRaw optargs))
blockCommands :: M.Map String (LP Blocks)
blockCommands = M.fromList $
[ ("par", mempty <$ skipopts)
, ("title", mempty <$ (skipopts *> tok >>= addMeta "title"))
, ("subtitle", mempty <$ (skipopts *> tok >>= addMeta "subtitle"))
, ("author", mempty <$ (skipopts *> authors))
-- -- in letter class, temp. store address & sig as title, author
, ("address", mempty <$ (skipopts *> tok >>= addMeta "address"))
, ("signature", mempty <$ (skipopts *> authors))
, ("date", mempty <$ (skipopts *> tok >>= addMeta "date"))
-- sectioning
, ("chapter", updateState (\s -> s{ stateHasChapters = True })
*> section nullAttr 0)
, ("chapter*", updateState (\s -> s{ stateHasChapters = True })
*> section ("",["unnumbered"],[]) 0)
, ("section", section nullAttr 1)
, ("section*", section ("",["unnumbered"],[]) 1)
, ("subsection", section nullAttr 2)
, ("subsection*", section ("",["unnumbered"],[]) 2)
, ("subsubsection", section nullAttr 3)
, ("subsubsection*", section ("",["unnumbered"],[]) 3)
, ("paragraph", section nullAttr 4)
, ("paragraph*", section ("",["unnumbered"],[]) 4)
, ("subparagraph", section nullAttr 5)
, ("subparagraph*", section ("",["unnumbered"],[]) 5)
-- beamer slides
, ("frametitle", section nullAttr 3)
, ("framesubtitle", section nullAttr 4)
-- letters
, ("opening", (para . trimInlines) <$> (skipopts *> tok))
, ("closing", skipopts *> closing)
--
, ("hrule", pure horizontalRule)
, ("rule", skipopts *> tok *> tok *> pure horizontalRule)
, ("item", skipopts *> loose_item)
, ("documentclass", skipopts *> braced *> preamble)
, ("centerline", (para . trimInlines) <$> (skipopts *> tok))
, ("caption", tok >>= setCaption)
, ("PandocStartInclude", startInclude)
, ("PandocEndInclude", endInclude)
, ("bibliography", mempty <$ (skipopts *> braced >>=
addMeta "bibliography" . splitBibs))
, ("addbibresource", mempty <$ (skipopts *> braced >>=
addMeta "bibliography" . splitBibs))
] ++ map ignoreBlocks
-- these commands will be ignored unless --parse-raw is specified,
-- in which case they will appear as raw latex blocks
[ "newcommand", "renewcommand", "newenvironment", "renewenvironment"
-- newcommand, etc. should be parsed by macro, but we need this
-- here so these aren't parsed as inline commands to ignore
, "special", "pdfannot", "pdfstringdef"
, "bibliographystyle"
, "maketitle", "makeindex", "makeglossary"
, "addcontentsline", "addtocontents", "addtocounter"
-- \ignore{} is used conventionally in literate haskell for definitions
-- that are to be processed by the compiler but not printed.
, "ignore"
, "hyperdef"
, "markboth", "markright", "markleft"
, "hspace", "vspace"
]
addMeta :: ToMetaValue a => String -> a -> LP ()
addMeta field val = updateState $ \st ->
st{ stateMeta = addMetaField field val $ stateMeta st }
splitBibs :: String -> [Inlines]
splitBibs = map (str . flip replaceExtension "bib" . trim) . splitBy (==',')
setCaption :: Inlines -> LP Blocks
setCaption ils = do
updateState $ \st -> st{ stateCaption = Just ils }
return mempty
resetCaption :: LP ()
resetCaption = updateState $ \st -> st{ stateCaption = Nothing }
authors :: LP ()
authors = try $ do
char '{'
let oneAuthor = mconcat <$>
many1 (notFollowedBy' (controlSeq "and") >>
(inline <|> mempty <$ blockCommand))
-- skip e.g. \vspace{10pt}
auths <- sepBy oneAuthor (controlSeq "and")
char '}'
addMeta "author" (map trimInlines auths)
section :: Attr -> Int -> LP Blocks
section (ident, classes, kvs) lvl = do
hasChapters <- stateHasChapters `fmap` getState
let lvl' = if hasChapters then lvl + 1 else lvl
skipopts
contents <- grouped inline
lab <- option ident $ try (spaces >> controlSeq "label" >> spaces >> braced)
attr' <- registerHeader (lab, classes, kvs) contents
return $ headerWith attr' lvl' contents
inlineCommand :: LP Inlines
inlineCommand = try $ do
name <- anyControlSeq
guard $ name /= "begin" && name /= "end"
guard $ not $ isBlockCommand name
parseRaw <- getOption readerParseRaw
star <- option "" (string "*")
let name' = name ++ star
let raw = do
rawargs <- withRaw (skipopts *> option "" dimenarg *> many braced)
let rawcommand = '\\' : name ++ star ++ snd rawargs
transformed <- applyMacros' rawcommand
if transformed /= rawcommand
then parseFromString inlines transformed
else if parseRaw
then return $ rawInline "latex" rawcommand
else return mempty
case M.lookup name' inlineCommands of
Just p -> p <|> raw
Nothing -> case M.lookup name inlineCommands of
Just p -> p <|> raw
Nothing -> raw
unlessParseRaw :: LP ()
unlessParseRaw = getOption readerParseRaw >>= guard . not
isBlockCommand :: String -> Bool
isBlockCommand s = maybe False (const True) $ M.lookup s blockCommands
inlineCommands :: M.Map String (LP Inlines)
inlineCommands = M.fromList $
[ ("emph", emph <$> tok)
, ("textit", emph <$> tok)
, ("textsl", emph <$> tok)
, ("textsc", smallcaps <$> tok)
, ("sout", strikeout <$> tok)
, ("textsuperscript", superscript <$> tok)
, ("textsubscript", subscript <$> tok)
, ("textbackslash", lit "\\")
, ("backslash", lit "\\")
, ("slash", lit "/")
, ("textbf", strong <$> tok)
, ("textnormal", spanWith ("",["nodecor"],[]) <$> tok)
, ("ldots", lit "…")
, ("dots", lit "…")
, ("mdots", lit "…")
, ("sim", lit "~")
, ("label", unlessParseRaw >> (inBrackets <$> tok))
, ("ref", unlessParseRaw >> (inBrackets <$> tok))
, ("(", mathInline $ manyTill anyChar (try $ string "\\)"))
, ("[", mathDisplay $ manyTill anyChar (try $ string "\\]"))
, ("ensuremath", mathInline $ braced)
, ("P", lit "¶")
, ("S", lit "§")
, ("$", lit "$")
, ("%", lit "%")
, ("&", lit "&")
, ("#", lit "#")
, ("_", lit "_")
, ("{", lit "{")
, ("}", lit "}")
-- old TeX commands
, ("em", emph <$> inlines)
, ("it", emph <$> inlines)
, ("sl", emph <$> inlines)
, ("bf", strong <$> inlines)
, ("rm", inlines)
, ("itshape", emph <$> inlines)
, ("slshape", emph <$> inlines)
, ("scshape", smallcaps <$> inlines)
, ("bfseries", strong <$> inlines)
, ("/", pure mempty) -- italic correction
, ("aa", lit "å")
, ("AA", lit "Å")
, ("ss", lit "ß")
, ("o", lit "ø")
, ("O", lit "Ø")
, ("L", lit "Ł")
, ("l", lit "ł")
, ("ae", lit "æ")
, ("AE", lit "Æ")
, ("oe", lit "œ")
, ("OE", lit "Œ")
, ("pounds", lit "£")
, ("euro", lit "€")
, ("copyright", lit "©")
, ("textasciicircum", lit "^")
, ("textasciitilde", lit "~")
, ("`", option (str "`") $ try $ tok >>= accent grave)
, ("'", option (str "'") $ try $ tok >>= accent acute)
, ("^", option (str "^") $ try $ tok >>= accent circ)
, ("~", option (str "~") $ try $ tok >>= accent tilde)
, ("\"", option (str "\"") $ try $ tok >>= accent umlaut)
, (".", option (str ".") $ try $ tok >>= accent dot)
, ("=", option (str "=") $ try $ tok >>= accent macron)
, ("c", option (str "c") $ try $ tok >>= accent cedilla)
, ("v", option (str "v") $ try $ tok >>= accent hacek)
, ("u", option (str "u") $ try $ tok >>= accent breve)
, ("i", lit "i")
, ("\\", linebreak <$ (optional (bracketed inline) *> optional sp))
, (",", pure mempty)
, ("@", pure mempty)
, (" ", lit "\160")
, ("ps", pure $ str "PS." <> space)
, ("TeX", lit "TeX")
, ("LaTeX", lit "LaTeX")
, ("bar", lit "|")
, ("textless", lit "<")
, ("textgreater", lit ">")
, ("thanks", (note . mconcat) <$> (char '{' *> manyTill block (char '}')))
, ("footnote", (note . mconcat) <$> (char '{' *> manyTill block (char '}')))
, ("verb", doverb)
, ("lstinline", doverb)
, ("Verb", doverb)
, ("texttt", (code . stringify . toList) <$> tok)
, ("url", (unescapeURL <$> braced) >>= \url ->
pure (link url "" (str url)))
, ("href", (unescapeURL <$> braced <* optional sp) >>= \url ->
tok >>= \lab ->
pure (link url "" lab))
, ("includegraphics", skipopts *> (unescapeURL <$> braced) >>= mkImage)
, ("enquote", enquote)
, ("cite", citation "cite" AuthorInText False)
, ("citep", citation "citep" NormalCitation False)
, ("citep*", citation "citep*" NormalCitation False)
, ("citeal", citation "citeal" NormalCitation False)
, ("citealp", citation "citealp" NormalCitation False)
, ("citealp*", citation "citealp*" NormalCitation False)
, ("autocite", citation "autocite" NormalCitation False)
, ("footcite", inNote <$> citation "footcite" NormalCitation False)
, ("parencite", citation "parencite" NormalCitation False)
, ("supercite", citation "supercite" NormalCitation False)
, ("footcitetext", inNote <$> citation "footcitetext" NormalCitation False)
, ("citeyearpar", citation "citeyearpar" SuppressAuthor False)
, ("citeyear", citation "citeyear" SuppressAuthor False)
, ("autocite*", citation "autocite*" SuppressAuthor False)
, ("cite*", citation "cite*" SuppressAuthor False)
, ("parencite*", citation "parencite*" SuppressAuthor False)
, ("textcite", citation "textcite" AuthorInText False)
, ("citet", citation "citet" AuthorInText False)
, ("citet*", citation "citet*" AuthorInText False)
, ("citealt", citation "citealt" AuthorInText False)
, ("citealt*", citation "citealt*" AuthorInText False)
, ("textcites", citation "textcites" AuthorInText True)
, ("cites", citation "cites" NormalCitation True)
, ("autocites", citation "autocites" NormalCitation True)
, ("footcites", inNote <$> citation "footcites" NormalCitation True)
, ("parencites", citation "parencites" NormalCitation True)
, ("supercites", citation "supercites" NormalCitation True)
, ("footcitetexts", inNote <$> citation "footcitetexts" NormalCitation True)
, ("Autocite", citation "Autocite" NormalCitation False)
, ("Footcite", citation "Footcite" NormalCitation False)
, ("Parencite", citation "Parencite" NormalCitation False)
, ("Supercite", citation "Supercite" NormalCitation False)
, ("Footcitetext", inNote <$> citation "Footcitetext" NormalCitation False)
, ("Citeyearpar", citation "Citeyearpar" SuppressAuthor False)
, ("Citeyear", citation "Citeyear" SuppressAuthor False)
, ("Autocite*", citation "Autocite*" SuppressAuthor False)
, ("Cite*", citation "Cite*" SuppressAuthor False)
, ("Parencite*", citation "Parencite*" SuppressAuthor False)
, ("Textcite", citation "Textcite" AuthorInText False)
, ("Textcites", citation "Textcites" AuthorInText True)
, ("Cites", citation "Cites" NormalCitation True)
, ("Autocites", citation "Autocites" NormalCitation True)
, ("Footcites", citation "Footcites" NormalCitation True)
, ("Parencites", citation "Parencites" NormalCitation True)
, ("Supercites", citation "Supercites" NormalCitation True)
, ("Footcitetexts", inNote <$> citation "Footcitetexts" NormalCitation True)
, ("citetext", complexNatbibCitation NormalCitation)
, ("citeauthor", (try (tok *> optional sp *> controlSeq "citetext") *>
complexNatbibCitation AuthorInText)
<|> citation "citeauthor" AuthorInText False)
, ("nocite", mempty <$ (citation "nocite" NormalCitation False >>=
addMeta "nocite"))
] ++ map ignoreInlines
-- these commands will be ignored unless --parse-raw is specified,
-- in which case they will appear as raw latex blocks:
[ "noindent", "index" ]
mkImage :: String -> LP Inlines
mkImage src = do
let alt = str "image"
case takeExtension src of
"" -> do
defaultExt <- getOption readerDefaultImageExtension
return $ image (addExtension src defaultExt) "" alt
_ -> return $ image src "" alt
inNote :: Inlines -> Inlines
inNote ils =
note $ para $ ils <> str "."
unescapeURL :: String -> String
unescapeURL ('\\':x:xs) | isEscapable x = x:unescapeURL xs
where isEscapable c = c `elem` "#$%&~_^\\{}"
unescapeURL (x:xs) = x:unescapeURL xs
unescapeURL [] = ""
enquote :: LP Inlines
enquote = do
skipopts
context <- stateQuoteContext <$> getState
if context == InDoubleQuote
then singleQuoted <$> withQuoteContext InSingleQuote tok
else doubleQuoted <$> withQuoteContext InDoubleQuote tok
doverb :: LP Inlines
doverb = do
marker <- anyChar
code <$> manyTill (satisfy (/='\n')) (char marker)
doLHSverb :: LP Inlines
doLHSverb = codeWith ("",["haskell"],[]) <$> manyTill (satisfy (/='\n')) (char '|')
lit :: String -> LP Inlines
lit = pure . str
accent :: (Char -> String) -> Inlines -> LP Inlines
accent f ils =
case toList ils of
(Str (x:xs) : ys) -> return $ fromList $ (Str (f x ++ xs) : ys)
[] -> mzero
_ -> return ils
grave :: Char -> String
grave 'A' = "À"
grave 'E' = "È"
grave 'I' = "Ì"
grave 'O' = "Ò"
grave 'U' = "Ù"
grave 'a' = "à"
grave 'e' = "è"
grave 'i' = "ì"
grave 'o' = "ò"
grave 'u' = "ù"
grave c = [c]
acute :: Char -> String
acute 'A' = "Á"
acute 'E' = "É"
acute 'I' = "Í"
acute 'O' = "Ó"
acute 'U' = "Ú"
acute 'Y' = "Ý"
acute 'a' = "á"
acute 'e' = "é"
acute 'i' = "í"
acute 'o' = "ó"
acute 'u' = "ú"
acute 'y' = "ý"
acute 'C' = "Ć"
acute 'c' = "ć"
acute 'L' = "Ĺ"
acute 'l' = "ĺ"
acute 'N' = "Ń"
acute 'n' = "ń"
acute 'R' = "Ŕ"
acute 'r' = "ŕ"
acute 'S' = "Ś"
acute 's' = "ś"
acute 'Z' = "Ź"
acute 'z' = "ź"
acute c = [c]
circ :: Char -> String
circ 'A' = "Â"
circ 'E' = "Ê"
circ 'I' = "Î"
circ 'O' = "Ô"
circ 'U' = "Û"
circ 'a' = "â"
circ 'e' = "ê"
circ 'i' = "î"
circ 'o' = "ô"
circ 'u' = "û"
circ 'C' = "Ĉ"
circ 'c' = "ĉ"
circ 'G' = "Ĝ"
circ 'g' = "ĝ"
circ 'H' = "Ĥ"
circ 'h' = "ĥ"
circ 'J' = "Ĵ"
circ 'j' = "ĵ"
circ 'S' = "Ŝ"
circ 's' = "ŝ"
circ 'W' = "Ŵ"
circ 'w' = "ŵ"
circ 'Y' = "Ŷ"
circ 'y' = "ŷ"
circ c = [c]
tilde :: Char -> String
tilde 'A' = "Ã"
tilde 'a' = "ã"
tilde 'O' = "Õ"
tilde 'o' = "õ"
tilde 'I' = "Ĩ"
tilde 'i' = "ĩ"
tilde 'U' = "Ũ"
tilde 'u' = "ũ"
tilde 'N' = "Ñ"
tilde 'n' = "ñ"
tilde c = [c]
umlaut :: Char -> String
umlaut 'A' = "Ä"
umlaut 'E' = "Ë"
umlaut 'I' = "Ï"
umlaut 'O' = "Ö"
umlaut 'U' = "Ü"
umlaut 'a' = "ä"
umlaut 'e' = "ë"
umlaut 'i' = "ï"
umlaut 'o' = "ö"
umlaut 'u' = "ü"
umlaut c = [c]
dot :: Char -> String
dot 'C' = "Ċ"
dot 'c' = "ċ"
dot 'E' = "Ė"
dot 'e' = "ė"
dot 'G' = "Ġ"
dot 'g' = "ġ"
dot 'I' = "İ"
dot 'Z' = "Ż"
dot 'z' = "ż"
dot c = [c]
macron :: Char -> String
macron 'A' = "Ā"
macron 'E' = "Ē"
macron 'I' = "Ī"
macron 'O' = "Ō"
macron 'U' = "Ū"
macron 'a' = "ā"
macron 'e' = "ē"
macron 'i' = "ī"
macron 'o' = "ō"
macron 'u' = "ū"
macron c = [c]
cedilla :: Char -> String
cedilla 'c' = "ç"
cedilla 'C' = "Ç"
cedilla 's' = "ş"
cedilla 'S' = "Ş"
cedilla 't' = "ţ"
cedilla 'T' = "Ţ"
cedilla 'e' = "ȩ"
cedilla 'E' = "Ȩ"
cedilla 'h' = "ḩ"
cedilla 'H' = "Ḩ"
cedilla 'o' = "o̧"
cedilla 'O' = "O̧"
cedilla c = [c]
hacek :: Char -> String
hacek 'A' = "Ǎ"
hacek 'a' = "ǎ"
hacek 'C' = "Č"
hacek 'c' = "č"
hacek 'D' = "Ď"
hacek 'd' = "ď"
hacek 'E' = "Ě"
hacek 'e' = "ě"
hacek 'G' = "Ǧ"
hacek 'g' = "ǧ"
hacek 'H' = "Ȟ"
hacek 'h' = "ȟ"
hacek 'I' = "Ǐ"
hacek 'i' = "ǐ"
hacek 'j' = "ǰ"
hacek 'K' = "Ǩ"
hacek 'k' = "ǩ"
hacek 'L' = "Ľ"
hacek 'l' = "ľ"
hacek 'N' = "Ň"
hacek 'n' = "ň"
hacek 'O' = "Ǒ"
hacek 'o' = "ǒ"
hacek 'R' = "Ř"
hacek 'r' = "ř"
hacek 'S' = "Š"
hacek 's' = "š"
hacek 'T' = "Ť"
hacek 't' = "ť"
hacek 'U' = "Ǔ"
hacek 'u' = "ǔ"
hacek 'Z' = "Ž"
hacek 'z' = "ž"
hacek c = [c]
breve :: Char -> String
breve 'A' = "Ă"
breve 'a' = "ă"
breve 'E' = "Ĕ"
breve 'e' = "ĕ"
breve 'G' = "Ğ"
breve 'g' = "ğ"
breve 'I' = "Ĭ"
breve 'i' = "ĭ"
breve 'O' = "Ŏ"
breve 'o' = "ŏ"
breve 'U' = "Ŭ"
breve 'u' = "ŭ"
breve c = [c]
tok :: LP Inlines
tok = try $ grouped inline <|> inlineCommand <|> str <$> (count 1 $ inlineChar)
opt :: LP Inlines
opt = bracketed inline <* optional sp
skipopts :: LP ()
skipopts = skipMany opt
inlineText :: LP Inlines
inlineText = str <$> many1 inlineChar
inlineChar :: LP Char
inlineChar = noneOf "\\$%^_&~#{}^'`\"‘’“”-[] \t\n"
environment :: LP Blocks
environment = do
controlSeq "begin"
name <- braced
case M.lookup name environments of
Just p -> p <|> rawEnv name
Nothing -> rawEnv name
rawEnv :: String -> LP Blocks
rawEnv name = do
let addBegin x = "\\begin{" ++ name ++ "}" ++ x
parseRaw <- getOption readerParseRaw
if parseRaw
then (rawBlock "latex" . addBegin) <$>
(withRaw (env name blocks) >>= applyMacros' . snd)
else env name blocks
----
type IncludeParser = ParsecT [Char] [String] IO String
-- | Replace "include" commands with file contents.
handleIncludes :: String -> IO String
handleIncludes s = do
res <- runParserT includeParser' [] "input" s
case res of
Right s' -> return s'
Left e -> error $ show e
includeParser' :: IncludeParser
includeParser' =
concat <$> many (comment' <|> escaped' <|> blob' <|> include'
<|> startMarker' <|> endMarker'
<|> verbCmd' <|> verbatimEnv' <|> backslash')
comment' :: IncludeParser
comment' = do
char '%'
xs <- manyTill anyChar newline
return ('%':xs ++ "\n")
escaped' :: IncludeParser
escaped' = try $ string "\\%" <|> string "\\\\"
verbCmd' :: IncludeParser
verbCmd' = fmap snd <$>
withRaw $ try $ do
string "\\verb"
c <- anyChar
manyTill anyChar (char c)
verbatimEnv' :: IncludeParser
verbatimEnv' = fmap snd <$>
withRaw $ try $ do
string "\\begin"
name <- braced'
guard $ name `elem` ["verbatim", "Verbatim", "lstlisting",
"minted", "alltt"]
manyTill anyChar (try $ string $ "\\end{" ++ name ++ "}")
blob' :: IncludeParser
blob' = try $ many1 (noneOf "\\%")
backslash' :: IncludeParser
backslash' = string "\\"
braced' :: IncludeParser
braced' = try $ char '{' *> manyTill (satisfy (/='}')) (char '}')
include' :: IncludeParser
include' = do
fs' <- try $ do
char '\\'
name <- try (string "include")
<|> try (string "input")
<|> string "usepackage"
-- skip options
skipMany $ try $ char '[' *> (manyTill anyChar (char ']'))
fs <- (map trim . splitBy (==',')) <$> braced'
return $ if name == "usepackage"
then map (flip replaceExtension ".sty") fs
else map (flip replaceExtension ".tex") fs
pos <- getPosition
containers <- getState
let fn = case containers of
(f':_) -> f'
[] -> "input"
-- now process each include file in order...
rest <- getInput
results' <- forM fs' (\f -> do
when (f `elem` containers) $
fail "Include file loop!"
contents <- lift $ readTeXFile f
return $ "\\PandocStartInclude{" ++ f ++ "}" ++
contents ++ "\\PandocEndInclude{" ++
fn ++ "}{" ++ show (sourceLine pos) ++ "}{"
++ show (sourceColumn pos) ++ "}")
setInput $ concat results' ++ rest
return ""
startMarker' :: IncludeParser
startMarker' = try $ do
string "\\PandocStartInclude"
fn <- braced'
updateState (fn:)
setPosition $ newPos fn 1 1
return $ "\\PandocStartInclude{" ++ fn ++ "}"
endMarker' :: IncludeParser
endMarker' = try $ do
string "\\PandocEndInclude"
fn <- braced'
ln <- braced'
co <- braced'
updateState tail
setPosition $ newPos fn (fromMaybe 1 $ safeRead ln) (fromMaybe 1 $ safeRead co)
return $ "\\PandocEndInclude{" ++ fn ++ "}{" ++ ln ++ "}{" ++
co ++ "}"
readTeXFile :: FilePath -> IO String
readTeXFile f = do
texinputs <- E.catch (getEnv "TEXINPUTS") $ \(_ :: E.SomeException) ->
return "."
let ds = splitBy (==':') texinputs
readFileFromDirs ds f
readFileFromDirs :: [FilePath] -> FilePath -> IO String
readFileFromDirs [] _ = return ""
readFileFromDirs (d:ds) f =
E.catch (UTF8.readFile $ d </> f) $ \(_ :: E.SomeException) ->
readFileFromDirs ds f
----
keyval :: LP (String, String)
keyval = try $ do
key <- many1 alphaNum
val <- option "" $ char '=' >> many1 alphaNum
skipMany spaceChar
optional (char ',')
skipMany spaceChar
return (key, val)
keyvals :: LP [(String, String)]
keyvals = try $ char '[' *> manyTill keyval (char ']')
alltt :: String -> LP Blocks
alltt t = walk strToCode <$> parseFromString blocks
(substitute " " "\\ " $ substitute "%" "\\%" $
concat $ intersperse "\\\\\n" $ lines t)
where strToCode (Str s) = Code nullAttr s
strToCode x = x
rawLaTeXBlock :: Parser [Char] ParserState String
rawLaTeXBlock = snd <$> try (withRaw (environment <|> blockCommand))
rawLaTeXInline :: Parser [Char] ParserState Inline
rawLaTeXInline = do
raw <- (snd <$> withRaw inlineCommand) <|> (snd <$> withRaw blockCommand)
RawInline "latex" <$> applyMacros' raw
addImageCaption :: Blocks -> LP Blocks
addImageCaption = walkM go
where go (Image alt (src,tit)) = do
mbcapt <- stateCaption <$> getState
case mbcapt of
Just ils -> return (Image (toList ils) (src, "fig:"))
Nothing -> return (Image alt (src,tit))
go x = return x
addTableCaption :: Blocks -> LP Blocks
addTableCaption = walkM go
where go (Table c als ws hs rs) = do
mbcapt <- stateCaption <$> getState
case mbcapt of
Just ils -> return (Table (toList ils) als ws hs rs)
Nothing -> return (Table c als ws hs rs)
go x = return x
environments :: M.Map String (LP Blocks)
environments = M.fromList
[ ("document", env "document" blocks <* skipMany anyChar)
, ("letter", env "letter" letter_contents)
, ("figure", env "figure" $
resetCaption *> skipopts *> blocks >>= addImageCaption)
, ("center", env "center" blocks)
, ("table", env "table" $
resetCaption *> skipopts *> blocks >>= addTableCaption)
, ("tabular", env "tabular" simpTable)
, ("quote", blockQuote <$> env "quote" blocks)
, ("quotation", blockQuote <$> env "quotation" blocks)
, ("verse", blockQuote <$> env "verse" blocks)
, ("itemize", bulletList <$> listenv "itemize" (many item))
, ("description", definitionList <$> listenv "description" (many descItem))
, ("enumerate", ordered_list)
, ("alltt", alltt =<< verbEnv "alltt")
, ("code", guardEnabled Ext_literate_haskell *>
(codeBlockWith ("",["sourceCode","literate","haskell"],[]) <$>
verbEnv "code"))
, ("verbatim", codeBlock <$> (verbEnv "verbatim"))
, ("Verbatim", do options <- option [] keyvals
let kvs = [ (if k == "firstnumber"
then "startFrom"
else k, v) | (k,v) <- options ]
let classes = [ "numberLines" |
lookup "numbers" options == Just "left" ]
let attr = ("",classes,kvs)
codeBlockWith attr <$> (verbEnv "Verbatim"))
, ("lstlisting", do options <- option [] keyvals
let kvs = [ (if k == "firstnumber"
then "startFrom"
else k, v) | (k,v) <- options ]
let classes = [ "numberLines" |
lookup "numbers" options == Just "left" ]
++ maybe [] (:[]) (lookup "language" options
>>= fromListingsLanguage)
let attr = (fromMaybe "" (lookup "label" options),classes,kvs)
codeBlockWith attr <$> (verbEnv "lstlisting"))
, ("minted", do options <- option [] keyvals
lang <- grouped (many1 $ satisfy (/='}'))
let kvs = [ (if k == "firstnumber"
then "startFrom"
else k, v) | (k,v) <- options ]
let classes = [ lang | not (null lang) ] ++
[ "numberLines" |
lookup "linenos" options == Just "true" ]
let attr = ("",classes,kvs)
codeBlockWith attr <$> (verbEnv "minted"))
, ("obeylines", parseFromString
(para . trimInlines . mconcat <$> many inline) =<<
intercalate "\\\\\n" . lines <$> verbEnv "obeylines")
, ("displaymath", mathEnv Nothing "displaymath")
, ("equation", mathEnv Nothing "equation")
, ("equation*", mathEnv Nothing "equation*")
, ("gather", mathEnv (Just "gathered") "gather")
, ("gather*", mathEnv (Just "gathered") "gather*")
, ("multline", mathEnv (Just "gathered") "multline")
, ("multline*", mathEnv (Just "gathered") "multline*")
, ("eqnarray", mathEnv (Just "aligned") "eqnarray")
, ("eqnarray*", mathEnv (Just "aligned") "eqnarray*")
, ("align", mathEnv (Just "aligned") "align")
, ("align*", mathEnv (Just "aligned") "align*")
, ("alignat", mathEnv (Just "aligned") "alignat")
, ("alignat*", mathEnv (Just "aligned") "alignat*")
]
letter_contents :: LP Blocks
letter_contents = do
bs <- blocks
st <- getState
-- add signature (author) and address (title)
let addr = case lookupMeta "address" (stateMeta st) of
Just (MetaBlocks [Plain xs]) ->
para $ trimInlines $ fromList xs
_ -> mempty
return $ addr <> bs -- sig added by \closing
closing :: LP Blocks
closing = do
contents <- tok
st <- getState
let extractInlines (MetaBlocks [Plain ys]) = ys
extractInlines (MetaBlocks [Para ys ]) = ys
extractInlines _ = []
let sigs = case lookupMeta "author" (stateMeta st) of
Just (MetaList xs) ->
para $ trimInlines $ fromList $
intercalate [LineBreak] $ map extractInlines xs
_ -> mempty
return $ para (trimInlines contents) <> sigs
item :: LP Blocks
item = blocks *> controlSeq "item" *> skipopts *> blocks
loose_item :: LP Blocks
loose_item = do
ctx <- stateParserContext `fmap` getState
if ctx == ListItemState
then mzero
else return mempty
descItem :: LP (Inlines, [Blocks])
descItem = do
blocks -- skip blocks before item
controlSeq "item"
optional sp
ils <- opt
bs <- blocks
return (ils, [bs])
env :: String -> LP a -> LP a
env name p = p <*
(try (controlSeq "end" *> braced >>= guard . (== name))
<?> ("\\end{" ++ name ++ "}"))
listenv :: String -> LP a -> LP a
listenv name p = try $ do
oldCtx <- stateParserContext `fmap` getState
updateState $ \st -> st{ stateParserContext = ListItemState }
res <- env name p
updateState $ \st -> st{ stateParserContext = oldCtx }
return res
mathEnv :: Maybe String -> String -> LP Blocks
mathEnv innerEnv name = para <$> mathDisplay (inner <$> verbEnv name)
where inner x = case innerEnv of
Nothing -> x
Just y -> "\\begin{" ++ y ++ "}\n" ++ x ++
"\\end{" ++ y ++ "}"
verbEnv :: String -> LP String
verbEnv name = do
skipopts
optional blankline
let endEnv = try $ controlSeq "end" *> braced >>= guard . (== name)
res <- manyTill anyChar endEnv
return $ stripTrailingNewlines res
ordered_list :: LP Blocks
ordered_list = do
optional sp
(_, style, delim) <- option (1, DefaultStyle, DefaultDelim) $
try $ char '[' *> anyOrderedListMarker <* char ']'
spaces
optional $ try $ controlSeq "setlength" *> grouped (controlSeq "itemindent") *> braced
spaces
start <- option 1 $ try $ do controlSeq "setcounter"
grouped (string "enum" *> many1 (oneOf "iv"))
optional sp
num <- grouped (many1 digit)
spaces
return $ (read num + 1 :: Int)
bs <- listenv "enumerate" (many item)
return $ orderedListWith (start, style, delim) bs
paragraph :: LP Blocks
paragraph = do
x <- trimInlines . mconcat <$> many1 inline
if x == mempty
then return mempty
else return $ para x
preamble :: LP Blocks
preamble = mempty <$> manyTill preambleBlock beginDoc
where beginDoc = lookAhead $ controlSeq "begin" *> string "{document}"
preambleBlock = (void comment)
<|> (void sp)
<|> (void blanklines)
<|> (void macro)
<|> (void blockCommand)
<|> (void anyControlSeq)
<|> (void braced)
<|> (void anyChar)
-------
-- citations
addPrefix :: [Inline] -> [Citation] -> [Citation]
addPrefix p (k:ks) = k {citationPrefix = p ++ citationPrefix k} : ks
addPrefix _ _ = []
addSuffix :: [Inline] -> [Citation] -> [Citation]
addSuffix s ks@(_:_) =
let k = last ks
in init ks ++ [k {citationSuffix = citationSuffix k ++ s}]
addSuffix _ _ = []
simpleCiteArgs :: LP [Citation]
simpleCiteArgs = try $ do
first <- optionMaybe $ toList <$> opt
second <- optionMaybe $ toList <$> opt
char '{'
optional sp
keys <- manyTill citationLabel (char '}')
let (pre, suf) = case (first , second ) of
(Just s , Nothing) -> (mempty, s )
(Just s , Just t ) -> (s , t )
_ -> (mempty, mempty)
conv k = Citation { citationId = k
, citationPrefix = []
, citationSuffix = []
, citationMode = NormalCitation
, citationHash = 0
, citationNoteNum = 0
}
return $ addPrefix pre $ addSuffix suf $ map conv keys
citationLabel :: LP String
citationLabel = optional sp *>
(many1 (satisfy isBibtexKeyChar)
<* optional sp
<* optional (char ',')
<* optional sp)
where isBibtexKeyChar c = isAlphaNum c || c `elem` ".:;?!`'()/*@_+=-[]*"
cites :: CitationMode -> Bool -> LP [Citation]
cites mode multi = try $ do
cits <- if multi
then many1 simpleCiteArgs
else count 1 simpleCiteArgs
let cs = concat cits
return $ case mode of
AuthorInText -> case cs of
(c:rest) -> c {citationMode = mode} : rest
[] -> []
_ -> map (\a -> a {citationMode = mode}) cs
citation :: String -> CitationMode -> Bool -> LP Inlines
citation name mode multi = do
(c,raw) <- withRaw $ cites mode multi
return $ cite c (rawInline "latex" $ "\\" ++ name ++ raw)
complexNatbibCitation :: CitationMode -> LP Inlines
complexNatbibCitation mode = try $ do
let ils = (toList . trimInlines . mconcat) <$>
many (notFollowedBy (oneOf "\\};") >> inline)
let parseOne = try $ do
skipSpaces
pref <- ils
cit' <- inline -- expect a citation
let citlist = toList cit'
cits' <- case citlist of
[Cite cs _] -> return cs
_ -> mzero
suff <- ils
skipSpaces
optional $ char ';'
return $ addPrefix pref $ addSuffix suff $ cits'
(c:cits, raw) <- withRaw $ grouped parseOne
return $ cite (c{ citationMode = mode }:cits)
(rawInline "latex" $ "\\citetext" ++ raw)
-- tables
parseAligns :: LP [Alignment]
parseAligns = try $ do
char '{'
let maybeBar = skipMany $ sp <|> () <$ char '|' <|> () <$ try (string "@{}")
maybeBar
let cAlign = AlignCenter <$ char 'c'
let lAlign = AlignLeft <$ char 'l'
let rAlign = AlignRight <$ char 'r'
let parAlign = AlignLeft <$ (char 'p' >> braced)
let alignChar = cAlign <|> lAlign <|> rAlign <|> parAlign
aligns' <- sepEndBy alignChar maybeBar
spaces
char '}'
spaces
return aligns'
hline :: LP ()
hline = () <$ (try $ spaces >> controlSeq "hline")
lbreak :: LP ()
lbreak = () <$ (try $ spaces *> controlSeq "\\")
amp :: LP ()
amp = () <$ (try $ spaces *> char '&')
parseTableRow :: Int -- ^ number of columns
-> LP [Blocks]
parseTableRow cols = try $ do
let tableCellInline = notFollowedBy (amp <|> lbreak) >> inline
let tableCell = (plain . trimInlines . mconcat) <$> many tableCellInline
cells' <- sepBy tableCell amp
guard $ length cells' == cols
spaces
return cells'
simpTable :: LP Blocks
simpTable = try $ do
spaces
aligns <- parseAligns
let cols = length aligns
optional hline
header' <- option [] $ try (parseTableRow cols <* lbreak <* hline)
rows <- sepEndBy (parseTableRow cols) (lbreak <* optional hline)
spaces
let header'' = if null header'
then replicate cols mempty
else header'
lookAhead $ controlSeq "end" -- make sure we're at end
return $ table mempty (zip aligns (repeat 0)) header'' rows
startInclude :: LP Blocks
startInclude = do
fn <- braced
setPosition $ newPos fn 1 1
return mempty
endInclude :: LP Blocks
endInclude = do
fn <- braced
ln <- braced
co <- braced
setPosition $ newPos fn (fromMaybe 1 $ safeRead ln) (fromMaybe 1 $ safeRead co)
return mempty
| nickbart1980/pandoc | src/Text/Pandoc/Readers/LaTeX.hs | gpl-2.0 | 42,630 | 0 | 29 | 11,494 | 14,871 | 7,654 | 7,217 | 1,099 | 5 |
module Handler.AufgabenAktuell where
import Import
import Data.Set (fromList)
import Control.Types
import Handler.Aufgaben (aufgabenListe)
getAufgabenAktuellR :: VorlesungId -> Handler Html
getAufgabenAktuellR = aufgabenListe $ fromList [Current]
| marcellussiegburg/autotool | yesod/Handler/AufgabenAktuell.hs | gpl-2.0 | 250 | 0 | 7 | 28 | 61 | 35 | 26 | 7 | 1 |
{- |
Module : $Header$
Description : To be replaced by SoftFOL.DFGParser
Copyright : (c) Immanuel Normann, Uni Bremen 2007
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
-}
module Search.SPASS.DFGParser where
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.Parsec.Prim
import qualified Text.ParserCombinators.Parsec.Token as PT
import Search.SPASS.Sign
-- ----------------------------------------------
-- * SPASS Language Definition
-- ----------------------------------------------
spassDef :: PT.LanguageDef st
spassDef
= PT.LanguageDef
{ PT.commentStart = ""--"{*"
, PT.commentEnd = ""--"*}"
, PT.commentLine = "%"
, PT.nestedComments = False
, PT.identStart = letter <|> digit -- digit is not conform to dfg-syntax definition, but needed for mptp.
, PT.identLetter = alphaNum <|> oneOf "_'"
, PT.opStart = letter -- brauche ich nicht
, PT.opLetter = letter --
, PT.reservedOpNames= []
, PT.reservedNames = ["forall", "exists", "equal", "true", "false", "or", "and", "not", "implies", "implied", "equiv", "xor"]
, PT.caseSensitive = True
}
-- helpers ----------------------------------------------------------
lexer :: PT.TokenParser st
lexer = PT.makeTokenParser spassDef
comma = PT.comma lexer
dot = PT.dot lexer
commaSep1 = PT.commaSep1 lexer
parens = PT.parens lexer
squares = PT.squares lexer
symbolT = PT.symbol lexer
natural = PT.natural lexer
whiteSpace = PT.whiteSpace lexer
--parensDot :: Text.ParserCombinators.Parsec.Char.CharParser st a -> Text.ParserCombinators.Parsec.Prim.GenParser Char st a
parensDot p = parens p << dot
squaresDot p = squares p << dot
text = string "{*" >> (manyTill anyChar (try (string "*}")))
{-
*SPASS.Parser> run text "{* mein Kommentar *}"
" mein Kommentar "
-}
identifierT = PT.identifier lexer
list_of sort = string "list_of_" >> string sort
list_of_dot sort = list_of sort >> dot
end_of_list = symbolT "end_of_list."
oneOfTokens ls = choice (map (try . symbolT) ls)
{-
*SPASS.Parser> run (oneOfTokens ["ab","cd"]) "abcd"
"ab"
-}
mapTokensToData ls = choice (map (try . tokenToData) ls)
where tokenToData (s,t) = symbolT s >> return t
maybeParser p = option Nothing (do {r <- p; return (Just r)})
parseSPASS = whiteSpace >> problem
-- ** SPASS Problem
problem :: Text.ParserCombinators.Parsec.Prim.GenParser Char st Search.SPASS.Sign.SPProblem
problem = do symbolT "begin_problem"
i <- parensDot identifierT
dl <- description_list
lp <- logical_part
-- s <- settings -- not yet supported!
symbolT "end_problem."
return (SPProblem
{identifier = i,
description = dl,
logicalPart = lp,
settings = []})
-- ** SPASS Desciptions
{- |
A description is mandatory for a SPASS problem. It has to specify at least
a 'name', the name of the 'author', the 'status' (see also 'SPLogState' below),
and a (verbose) description.
-}
description_list :: Text.ParserCombinators.Parsec.Prim.GenParser Char st Search.SPASS.Sign.SPDescription
description_list = do list_of_dot "descriptions"
n <- symbolT "name" >> parensDot text
a <- symbolT "author" >> parensDot text
v <- maybeParser (symbolT "version" >> parensDot text)
l <- maybeParser (symbolT "logic" >> parensDot text)
s <- symbolT "status" >> parensDot (mapTokensToData
[("satisfiable",SPStateSatisfiable),
("unsatisfiable",SPStateUnsatisfiable),
("unknown",SPStateUnknown)])
de <- symbolT "description" >> parensDot text
da <- maybeParser (symbolT "date" >> parensDot text)
end_of_list
return (SPDescription
{name = n, author = a, version = v, logic = l,
status = s, desc = de, date = da})
{-
*SPASS.Parser> run description_list "list_of_descriptions.name({* Pelletier?s Problem No. 57 *}).author({* Christoph Weidenbach *}).status(unsatisfiable).description({* Problem taken in revised form from the Pelletier Collection, Journal of Automated Reasoning, Vol. 2, No. 2, pages 191-216 *}).end_of_list."
SPDescription {name = " Pelletier?s Problem No. 57 ", author = " Christoph Weidenbach ", version = Nothing, logic = Nothing, status = SPStateUnsatisfiable, desc = " Problem taken in revised form from the Pelletier Collection, Journal of Automated Reasoning, Vol. 2, No. 2, pages 191-216 ", date = Nothing}
-}
{- |
The state of a SPASS problem can be satisfiable, unsatisfiable, or unknown.
-}
-- ** SPASS Settings
{- |
We only support one of the three types mentioned here:
<http://spass.mpi-sb.mpg.de/webspass/help/options.html>
-}
-- ** SPASS Logical Parts
{- |
A SPASS logical part consists of a symbol list, a declaration list, and a
set of formula lists. Support for clause lists and proof lists hasn't
been implemented yet.
-}
logical_part :: Text.ParserCombinators.Parsec.Prim.GenParser Char st Search.SPASS.Sign.SPLogicalPart
logical_part = do sl <- maybeParser symbol_list
--dl <- declaration_list -- braucht man nicht fuer mptp
fs <- many formula_list
--cl <- many clause_list -- braucht man nicht fuer mptp
--pl <- many proof_list -- braucht man nicht fuer mptp
return (SPLogicalPart
{symbolList = sl,
declarationList = [],
formulaLists = fs})
-- clauseLists :: [SPClauseList],
-- proofLists :: [SPProofList]
-- *** Symbol List
{- |
SPASS Symbol List
-}
symbol_list :: Text.ParserCombinators.Parsec.Prim.GenParser Char st Search.SPASS.Sign.SPSymbolList
symbol_list = do list_of_dot "symbols"
fs <- option [] (signSymFor "functions")
ps <- option [] (signSymFor "predicates")
ss <- option [] (signSymFor "sorts")
end_of_list
return (SPSymbolList
{functions = fs,
predicates = ps,
sorts = ss,
operators = [], -- not supported in dfg-syntax version 1.5
quantifiers = []}) -- not supported in dfg-syntax version 1.5
{-
*SPASS.Parser> run symbol_list "list_of_symbols.functions[(f,2), (a,0), (b,0), (c,0)].predicates[(F,2)].end_of_list."
SPSymbolList {functions = [SPSignSym {sym = "f", arity = 2},SPSignSym {sym = "a", arity = 0},SPSignSym {sym = "b", arity = 0},SPSignSym {sym = "c", arity = 0}], predicates = [SPSignSym {sym = "F", arity = 2}], sorts = [], operators = [], quantifiers = []}
-}
signSymFor kind = symbolT kind >> squaresDot (commaSep1 $ parens signSym)
signSym = do s <- identifierT
a <- maybeParser (comma >> natural) -- option Nothing ((do {comma; n <- natural; return (Just n)}))
return (case a
of (Just a) -> SPSignSym {sym = s, arity = fromInteger a}
Nothing -> SPSimpleSignSym s)
-- *** Declaration List
{- |
SPASS Declaration List
-}
--declaration_list
-- *** Formula List
{- |
SPASS Formula List
-}
formula_list :: Text.ParserCombinators.Parsec.Prim.GenParser Char st Search.SPASS.Sign.SPFormulaList
formula_list = do list_of "formulae"
ot <- parens (mapTokensToData [("axioms",SPOriginAxioms),
("conjectures",SPOriginConjectures)])
dot
fs <- many (formula (case ot of {SPOriginAxioms -> True; _ -> False}))
end_of_list
return (SPFormulaList { originType = ot,
formulae = fs })
{-
*SPASS.Parser> run formula_list "list_of_formulae(axioms).formula(all([a,b],R(a,b)),bla).end_of_list."
SPFormulaList {originType = SPOriginAxioms, formulae = [NamedSen {senName = "bla", isAxiom = True, isDef = False, sentence = SPQuantTerm {quantSym = SPCustomQuantSym "all", variableList = [SPSimpleTerm (SPCustomSymbol "a"),SPSimpleTerm (SPCustomSymbol "b")], qFormula = SPComplexTerm {symbol = SPCustomSymbol "R", arguments = [SPSimpleTerm (SPCustomSymbol "a"),SPSimpleTerm (SPCustomSymbol "b")]}}}]}
*SPASS.Parser> run formula_list "list_of_formulae(axioms).formula(forall([a,b],R(a,b)),bla).end_of_list."
SPFormulaList {originType = SPOriginAxioms, formulae = [NamedSen {senName = "bla", isAxiom = True, isDef = False, sentence = SPQuantTerm {quantSym = SPForall, variableList = [SPSimpleTerm (SPCustomSymbol "a"),SPSimpleTerm (SPCustomSymbol "b")], qFormula = SPComplexTerm {symbol = SPCustomSymbol "R", arguments = [SPSimpleTerm (SPCustomSymbol "a"),SPSimpleTerm (SPCustomSymbol "b")]}}}]}
*SPASS.Parser> run formula_list "list_of_formulae(axioms).formula(forall([a,b],equiv(a,b)),bla).end_of_list."
SPFormulaList {originType = SPOriginAxioms, formulae = [NamedSen {senName = "bla", isAxiom = True, isDef = False, sentence = SPQuantTerm {quantSym = SPForall, variableList = [SPSimpleTerm (SPCustomSymbol "a"),SPSimpleTerm (SPCustomSymbol "b")], qFormula = SPComplexTerm {symbol = SPEquiv, arguments = [SPSimpleTerm (SPCustomSymbol "a"),SPSimpleTerm (SPCustomSymbol "b")]}}}]}
-}
formula :: Bool -> Text.ParserCombinators.Parsec.Prim.GenParser Char st (Search.SPASS.Sign.Named Search.SPASS.Sign.SPTerm)
formula bool = do symbolT "formula"
pos <- getPosition
parensDot (do sen <- term
name <- (option "" (comma >> identifierT))
return (NamedSen
{senName = (show $ sourceLine pos), -- (sourceName pos) ++ " line: " ++ (show $ sourceLine pos) ++ " name:" ++ name,
isAxiom = bool, -- propagated from 'origin_type' of 'list_of_formulae'
isDef = False, -- this originTpe does not exist
sentence = sen}))
-- *** Terms
{- |
A SPASS Term.
-}
quantification :: Search.SPASS.Sign.SPQuantSym -> Text.ParserCombinators.Parsec.Prim.GenParser Char st Search.SPASS.Sign.SPTerm
quantification s = do (ts',t') <- parens (do ts <- squares (commaSep1 term) -- todo: var binding should allow only simple terms
comma; t <- term
return (ts,t))
return (SPQuantTerm
{quantSym = s,variableList = ts',qFormula = t'})
application :: Search.SPASS.Sign.SPSymbol -> Text.ParserCombinators.Parsec.Prim.GenParser Char st Search.SPASS.Sign.SPTerm
application s = do ts <- parens (commaSep1 term)
return (SPComplexTerm
{symbol = s, arguments = ts})
constant :: (Monad m) => Search.SPASS.Sign.SPSymbol -> m Search.SPASS.Sign.SPTerm
constant c = return (SPSimpleTerm c)
term :: Text.ParserCombinators.Parsec.Prim.GenParser Char st Search.SPASS.Sign.SPTerm
term = do s <- identifierT
do {try (quantification (SPCustomQuantSym s))
<|> try (application (SPCustomSymbol s))
<|> (constant (SPCustomSymbol s))}
<|>
do q <- mapTokensToData [("forall",SPForall), ("exists",SPExists)]
quantification q
<|>
do a <- mapTokensToData [("equal",SPEqual), ("or",SPOr), ("and",SPAnd),("not",SPNot),
("xor",SPXor),
("implies",SPImplies), ("implied",SPImplied),("equiv",SPEquiv)]
application a
<|>
do c <- mapTokensToData [("true",SPTrue), ("false",SPFalse)]
constant c
{-
For testing
-}
-- ----------------------------------------------
-- * Monad and Functor extensions
-- ----------------------------------------------
bind :: (Monad m) => (a -> b -> c) -> m a -> m b -> m c
bind f p q = do { x <- p; y <- q; return (f x y) }
infixl <<
(<<) :: (Monad m) => m a -> m b -> m a
(<<) = bind const
infixr 5 <:>
(<:>) :: (Monad m) => m a -> m [a] -> m [a]
(<:>) = bind (:)
infixr 5 <++>
(<++>) :: (Monad m) => m [a] -> m [a] -> m [a]
(<++>) = bind (++)
run p input = case (parse p "" input)
of Left err -> error (show err)
Right result -> return result | nevrenato/Hets_Fork | Search/SPASS/DFGParser.hs | gpl-2.0 | 11,823 | 151 | 15 | 2,565 | 2,350 | 1,322 | 1,028 | 154 | 2 |
module Text.Pandoc2 ( module Text.Pandoc2.Definition
, module Text.Pandoc2.Builder
, module Text.Pandoc2.Shared
, module Text.Pandoc2.Reader.Markdown
, module Text.Pandoc2.Writer.HTML
)
where
import Text.Pandoc2.Definition
import Text.Pandoc2.Builder
import Text.Pandoc2.Shared
import Text.Pandoc2.Reader.Markdown
import Text.Pandoc2.Writer.HTML
| jgm/pandoc2 | Text/Pandoc2.hs | gpl-2.0 | 441 | 0 | 5 | 124 | 77 | 54 | 23 | 10 | 0 |
{- |
mtlstats
Copyright (C) 1984, 1985, 2019, 2020, 2021 Rhéal Lamothe
<[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or (at
your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
-}
module Mtlstats.Control.NewGame (newGameC) where
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Trans.State (get, gets, modify)
import Data.Maybe (fromJust, fromMaybe, isJust)
import Lens.Micro ((^.), (.~))
import qualified UI.NCurses as C
import Mtlstats.Actions
import Mtlstats.Actions.NewGame
import Mtlstats.Config
import Mtlstats.Control.NewGame.GoalieInput
import Mtlstats.Format
import Mtlstats.Handlers
import Mtlstats.Menu
import Mtlstats.Prompt
import Mtlstats.Prompt.NewGame
import Mtlstats.Report
import Mtlstats.Types
import Mtlstats.Util
-- | Dispatcher for a new game
newGameC :: GameState -> Controller
newGameC gs
| null $ gs^.gameYear = gameYearC
| null $ gs^.gameMonth = gameMonthC
| null $ gs^.gameDay = gameDayC
| null $ gs^.gameType = gameTypeC
| null $ gs^.otherTeam = otherTeamC
| null $ gs^.homeScore = homeScoreC
| null $ gs^.awayScore = awayScoreC
| null $ gs^.overtimeFlag = overtimeFlagC
| not $ gs^.dataVerified = verifyDataC
| fromJust (unaccountedPoints gs) = goalInput gs
| isJust $ gs^.gameSelectedPlayer = getPMinsC
| not $ gs^.gamePMinsRecorded = pMinPlayerC
| not $ gs^.gameGoalieAssigned = goalieInputC gs
| otherwise = reportC
gameYearC :: Controller
gameYearC = promptControllerWith header gameYearPrompt
gameMonthC :: Controller
gameMonthC = promptControllerWith monthHeader gameMonthPrompt
gameDayC :: Controller
gameDayC = promptControllerWith header gameDayPrompt
gameTypeC :: Controller
gameTypeC = menuControllerWith header gameTypeMenu
otherTeamC :: Controller
otherTeamC = promptControllerWith header otherTeamPrompt
homeScoreC :: Controller
homeScoreC = promptControllerWith header homeScorePrompt
awayScoreC :: Controller
awayScoreC = promptControllerWith header awayScorePrompt
overtimeFlagC :: Controller
overtimeFlagC = Controller
{ drawController = \s -> do
header s
C.drawString "Did the game go into overtime? (Y/N)"
return C.CursorInvisible
, handleController = \e -> do
modify $ progMode.gameStateL.overtimeFlag .~ ynHandler e
return True
}
verifyDataC :: Controller
verifyDataC = Controller
{ drawController = \s -> do
let gs = s^.progMode.gameStateL
header s
C.drawString "\n"
C.drawString $ unlines $ labelTable
[ ( "Date", gameDate gs )
, ( "Game type", show $ fromJust $ gs^.gameType )
, ( "Other team", gs^.otherTeam )
, ( "Home score", show $ fromJust $ gs^.homeScore )
, ( "Away score", show $ fromJust $ gs^.awayScore )
, ( "Overtime", show $ fromJust $ gs^.overtimeFlag )
]
C.drawString "\nIs the above information correct? (Y/N)"
return C.CursorInvisible
, handleController = \e -> do
case ynHandler e of
Just True -> modify
$ (progMode.gameStateL.dataVerified .~ True)
. updateGameStats
. awardShutouts
Just False -> modify $ progMode.gameStateL .~ newGameState
Nothing -> return ()
return True
}
goalInput :: GameState -> Controller
goalInput gs
| null (gs^.goalBy ) = recordGoalC
| not (gs^.confirmGoalDataFlag) = recordAssistC
| otherwise = confirmGoalDataC
recordGoalC :: Controller
recordGoalC = Controller
{ drawController = \s -> let
(game, goal) = gameGoal s
in drawPrompt (recordGoalPrompt game goal) s
, handleController = \e -> do
(game, goal) <- gets gameGoal
promptHandler (recordGoalPrompt game goal) e
return True
}
recordAssistC :: Controller
recordAssistC = Controller
{ drawController = \s -> let
(game, goal, assist) = gameGoalAssist s
in drawPrompt (recordAssistPrompt game goal assist) s
, handleController = \e -> do
(game, goal, assist) <- gets gameGoalAssist
promptHandler (recordAssistPrompt game goal assist) e
return True
}
confirmGoalDataC :: Controller
confirmGoalDataC = Controller
{ drawController = \s -> do
let
(game, goal) = gameGoal s
gs = s^.progMode.gameStateL
players = s^.database.dbPlayers
msg = unlines $
[ " Game: " ++ padNum 2 game
, " Goal: " ++ show goal
, "Goal scored by: " ++
playerSummary (fromJust $ gs^.goalBy >>= flip nth players)
] ++
map
(\pid -> " Assisted by: " ++
playerSummary (fromJust $ nth pid players))
(gs^.assistsBy) ++
[ ""
, "Is the above information correct? (Y/N)"
]
C.drawString msg
return C.CursorInvisible
, handleController = \e -> do
case ynHandler e of
Just True -> modify recordGoalAssists
Just False -> modify resetGoalData
Nothing -> return ()
return True
}
pMinPlayerC :: Controller
pMinPlayerC = Controller
{ drawController = \s -> do
header s
drawPrompt pMinPlayerPrompt s
, handleController = \e -> do
promptHandler pMinPlayerPrompt e
return True
}
getPMinsC :: Controller
getPMinsC = Controller
{ drawController = \s -> do
header s
C.drawString $ fromMaybe "" $ do
pid <- s^.progMode.gameStateL.gameSelectedPlayer
player <- nth pid $ s^.database.dbPlayers
Just $ playerSummary player ++ "\n"
drawPrompt assignPMinsPrompt s
, handleController = \e -> do
promptHandler assignPMinsPrompt e
return True
}
reportC :: Controller
reportC = Controller
{ drawController = \s -> do
(rows, cols) <- C.windowSize
C.drawString $ unlines $ slice
(s^.scrollOffset)
(fromInteger $ pred rows)
(displayReport (fromInteger $ pred cols) s)
return C.CursorInvisible
, handleController = \e -> do
case e of
C.EventSpecialKey C.KeyUpArrow -> modify scrollUp
C.EventSpecialKey C.KeyDownArrow -> modify scrollDown
C.EventSpecialKey C.KeyHome -> modify $ scrollOffset .~ 0
C.EventCharacter '\n' -> do
get >>= liftIO . writeFile reportFilename . exportReport reportCols
modify backHome
_ -> return ()
return True
}
header :: ProgState -> C.Update ()
header s = C.drawString $
"*** GAME " ++ padNum 2 (s^.database.dbGames) ++ " ***\n"
monthHeader :: ProgState -> C.Update ()
monthHeader s = do
(_, cols) <- C.windowSize
header s
let
table = labelTable $ zip (map show ([1..] :: [Int]))
[ "JANUARY"
, "FEBRUARY"
, "MARCH"
, "APRIL"
, "MAY"
, "JUNE"
, "JULY"
, "AUGUST"
, "SEPTEMBER"
, "OCTOBER"
, "NOVEMBER"
, "DECEMBER"
]
C.drawString $ unlines $
map (centre $ fromIntegral $ pred cols) $
["MONTH:", ""] ++ table ++ [""]
gameGoal :: ProgState -> (Int, Int)
gameGoal s =
( s^.database.dbGames
, succ $ s^.progMode.gameStateL.pointsAccounted
)
gameGoalAssist :: ProgState -> (Int, Int, Int)
gameGoalAssist s = let
(game, goal) = gameGoal s
assist = succ $ length $ s^.progMode.gameStateL.assistsBy
in (game, goal, assist)
| mtlstats/mtlstats | src/Mtlstats/Control/NewGame.hs | gpl-3.0 | 7,871 | 0 | 22 | 2,005 | 2,168 | 1,112 | 1,056 | -1 | -1 |
-----------------------------------------------------------------------------
-- |
-- Module : Control.Concurrent.QSemN
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable (concurrency)
--
-- Quantity semaphores in which each thread may wait for an arbitrary
-- \"amount\".
--
-----------------------------------------------------------------------------
module Control.Concurrent.QSemN
( -- * General Quantity Semaphores
QSemN, -- abstract
newQSemN, -- :: Int -> IO QSemN
waitQSemN, -- :: QSemN -> Int -> IO ()
signalQSemN -- :: QSemN -> Int -> IO ()
) where
import Prelude
import Control.Concurrent.MVar
import Data.Typeable
-- |A 'QSemN' is a quantity semaphore, in which the available
-- \"quantity\" may be signalled or waited for in arbitrary amounts.
newtype QSemN = QSemN (MVar (Int,[(Int,MVar ())]))
qSemNTc = mkTyCon "QSemN"; instance Typeable QSemN where { typeOf _ = mkTyConApp qSemNTc [] }
-- |Build a new 'QSemN' with a supplied initial quantity.
newQSemN :: Int -> IO QSemN
newQSemN init = do
sem <- newMVar (init,[])
return (QSemN sem)
-- |Wait for the specified quantity to become available
waitQSemN :: QSemN -> Int -> IO ()
waitQSemN (QSemN sem) sz = do
(avail,blocked) <- takeMVar sem -- gain ex. access
if (avail - sz) >= 0 then
-- discharging 'sz' still leaves the semaphore
-- in an 'unblocked' state.
putMVar sem (avail-sz,blocked)
else do
block <- newEmptyMVar
putMVar sem (avail, blocked++[(sz,block)])
takeMVar block
-- |Signal that a given quantity is now available from the 'QSemN'.
signalQSemN :: QSemN -> Int -> IO ()
signalQSemN (QSemN sem) n = do
(avail,blocked) <- takeMVar sem
(avail',blocked') <- free (avail+n) blocked
putMVar sem (avail',blocked')
where
free avail [] = return (avail,[])
free avail ((req,block):blocked)
| avail >= req = do
putMVar block ()
free (avail-req) blocked
| otherwise = do
(avail',blocked') <- free avail blocked
return (avail',(req,block):blocked')
| kaoskorobase/mescaline | resources/hugs/packages/base/Control/Concurrent/QSemN.hs | gpl-3.0 | 2,747 | 10 | 14 | 980 | 536 | 293 | 243 | 37 | 2 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
-- |
-- Copyright : (c) 2010, 2011 Simon Meier
-- License : GPL v3 (see LICENSE)
--
-- Maintainer : Simon Meier <[email protected]>
-- Portability : GHC only
--
-- Conversion of the graph part of a sequent to a Graphviz Dot file.
module Theory.Constraint.System.Dot (
nonEmptyGraph
, nonEmptyGraphDiff
, dotSystemLoose
, dotSystemCompact
, compressSystem
, BoringNodeStyle(..)
) where
import Data.Char (isSpace)
import Data.Color
import qualified Data.DAG.Simple as D
import qualified Data.Foldable as F
import Data.List (find,foldl',intersect)
import qualified Data.Map as M
import Data.Maybe
import Data.Monoid (Any(..))
import qualified Data.Set as S
import Data.Ratio
import Safe
import Extension.Data.Label
import Extension.Prelude
import Control.Basics
import Control.Monad.Reader
import Control.Monad.State (StateT, evalStateT)
import qualified Text.Dot as D
import Text.PrettyPrint.Class
import Theory.Constraint.System
import Theory.Model
import Theory.Text.Pretty (opAction)
-- | 'True' iff the dotted system will be a non-empty graph.
nonEmptyGraph :: System -> Bool
nonEmptyGraph sys = not $
M.null (get sNodes sys) && null (unsolvedActionAtoms sys) &&
null (unsolvedChains sys) &&
S.null (get sEdges sys) && S.null (get sLessAtoms sys)
-- | 'True' iff the dotted system will be a non-empty graph.
nonEmptyGraphDiff :: DiffSystem -> Bool
nonEmptyGraphDiff diffSys = not $
case (get dsSystem diffSys) of
Nothing -> True
(Just sys) -> M.null (get sNodes sys) && null (unsolvedActionAtoms sys) &&
null (unsolvedChains sys) &&
S.null (get sEdges sys) && S.null (get sLessAtoms sys)
type NodeColorMap = M.Map (RuleInfo ProtoRuleACInstInfo IntrRuleACInfo) (RGB Rational)
type SeDot = ReaderT (System, NodeColorMap) (StateT DotState D.Dot)
-- | State to avoid multiple drawing of the same entity.
data DotState = DotState {
_dsNodes :: M.Map NodeId D.NodeId
, _dsPrems :: M.Map NodePrem D.NodeId
, _dsConcs :: M.Map NodeConc D.NodeId
, _dsSingles :: M.Map (NodeConc, NodePrem) D.NodeId
}
$(mkLabels [''DotState])
-- | Lift a 'D.Dot' action.
liftDot :: D.Dot a -> SeDot a
liftDot = lift . lift
-- | All edges in a bipartite graph that have neither start point nor endpoint
-- in common with any other edge.
singleEdges :: (Ord a, Ord b) => [(a,b)] -> [(a,b)]
singleEdges es =
singles fst es `intersect` singles snd es
where
singles proj = concatMap single . groupOn proj . sortOn proj
single [] = error "impossible"
single [x] = return x
single _ = mzero
-- | Ensure that a 'SeDot' action is only executed once by querying and
-- updating the 'DotState' accordingly.
dotOnce :: Ord k
=> (DotState :-> M.Map k D.NodeId) -- ^ Accessor to map storing this type of actions.
-> k -- ^ Action index.
-> SeDot D.NodeId -- ^ Action to execute only once.
-> SeDot D.NodeId
dotOnce mapL k dot = do
i <- join $ (maybe dot return . M.lookup k) `liftM` getM mapL
modM mapL (M.insert k i)
return i
dotNode :: NodeId -> SeDot D.NodeId
dotNode v = dotOnce dsNodes v $ do
(se, colorMap) <- ask
let nodes = get sNodes se
dot info moreStyle facts = do
vId <- liftDot $ D.node $ [("label", show v ++ info),("shape","ellipse")]
++ moreStyle
_ <- facts vId
return vId
case M.lookup v nodes of
Nothing -> do
dot "" [] (const $ return ()) -- \vId -> do
{-
premIds <- mapM dotPrem
[ NodePremFact v fa
| SeRequires v' fa <- S.toList $ get sRequires se
, v == v' ]
sequence_ [ dotIntraRuleEdge premId vId | premId <- premIds ]
-}
Just ru -> do
let
color = M.lookup (get rInfo ru) colorMap
nodeColor = maybe "white" rgbToHex color
dot (label ru) [("fillcolor", nodeColor),("style","filled")] $ \vId -> do
premIds <- mapM dotPrem
[ (v,i) | (i,_) <- enumPrems ru ]
concIds <- mapM dotConc
[ (v,i) | (i,_) <- enumConcs ru ]
sequence_ [ dotIntraRuleEdge premId vId | premId <- premIds ]
sequence_ [ dotIntraRuleEdge vId concId | concId <- concIds ]
where
label ru = " : " ++ render nameAndActs
where
nameAndActs =
ruleInfo (prettyProtoRuleName . get praciName) prettyIntrRuleACInfo (get rInfo ru) <->
brackets (vcat $ punctuate comma $ map prettyLNFact $ filter isNotDiffAnnotation $ get rActs ru)
isNotDiffAnnotation fa = (fa /= (Fact (ProtoFact Linear ("Diff" ++ getRuleNameDiff ru) 0) S.empty []))
-- | An edge from a rule node to its premises or conclusions.
dotIntraRuleEdge :: D.NodeId -> D.NodeId -> SeDot ()
dotIntraRuleEdge from to = liftDot $ D.edge from to [("color","gray")]
{-
-- | An edge from a rule node to some of its premises or conclusions.
dotNonFixedIntraRuleEdge :: D.NodeId -> D.NodeId -> SeDot ()
dotNonFixedIntraRuleEdge from to =
liftDot $ D.edge from to [("color","steelblue")]
-}
-- | The style of a node displaying a fact.
factNodeStyle :: LNFact -> [(String,String)]
factNodeStyle fa
| isJust (kFactView fa) = []
| otherwise = [("fillcolor","gray85"),("style","filled")]
-- | An edge that shares no endpoints with another edge and is therefore
-- contracted.
--
-- FIXME: There may be too many edges being contracted.
dotSingleEdge :: (NodeConc, NodePrem) -> SeDot D.NodeId
dotSingleEdge edge@(_, to) = dotOnce dsSingles edge $ do
se <- asks fst
let fa = nodePremFact to se
label = render $ prettyLNFact fa
liftDot $ D.node $ [("label", label),("shape", "hexagon")]
++ factNodeStyle fa
-- | A compressed edge.
dotTrySingleEdge :: Eq c
=> ((NodeConc, NodePrem) -> c) -> c
-> SeDot D.NodeId -> SeDot D.NodeId
dotTrySingleEdge sel x dot = do
singles <- getM dsSingles
maybe dot (return . snd) $ find ((x ==) . sel . fst) $ M.toList singles
-- | Premises.
dotPrem :: NodePrem -> SeDot D.NodeId
dotPrem prem@(v, i) =
dotOnce dsPrems prem $ dotTrySingleEdge snd prem $ do
nodes <- asks (get sNodes . fst)
let ppPrem = show prem -- FIXME: Use better pretty printing here
(label, moreStyle) = fromMaybe (ppPrem, []) $ do
ru <- M.lookup v nodes
fa <- lookupPrem i ru
return ( render $ prettyLNFact fa
, factNodeStyle fa
)
liftDot $ D.node $ [("label", label),("shape",shape)]
++ moreStyle
where
shape = "invtrapezium"
-- | Conclusions.
dotConc :: NodeConc -> SeDot D.NodeId
dotConc =
dotNodeWithIndex dsConcs fst rConcs (id *** getConcIdx) "trapezium"
where
dotNodeWithIndex stateSel edgeSel ruleSel unwrap shape x0 =
dotOnce stateSel x0 $ dotTrySingleEdge edgeSel x0 $ do
let x = unwrap x0
nodes <- asks (get sNodes . fst)
let (label, moreStyle) = fromMaybe (show x, []) $ do
ru <- M.lookup (fst x) nodes
fa <- (`atMay` snd x) $ get ruleSel ru
return ( render $ prettyLNFact fa
, factNodeStyle fa
)
liftDot $ D.node $ [("label", label),("shape",shape)]
++ moreStyle
-- | Convert the sequent to a 'D.Dot' action representing this sequent as a
-- graph in the GraphViz format. The style is loose in the sense that each
-- premise and conclusion gets its own node.
dotSystemLoose :: System -> D.Dot ()
dotSystemLoose se =
(`evalStateT` DotState M.empty M.empty M.empty M.empty) $
(`runReaderT` (se, nodeColorMap (M.elems $ get sNodes se))) $ do
liftDot $ setDefaultAttributes
-- draw single edges with matching facts.
mapM_ dotSingleEdge $ singleEdges $ do
Edge from to <- S.toList $ get sEdges se
-- FIXME: ensure that conclusion and premise are equal
guard (nodeConcFact from se == nodePremFact to se)
return (from, to)
sequence_ $ do
(v, ru) <- M.toList $ get sNodes se
(i, _) <- enumConcs ru
return (dotConc (v, i))
sequence_ $ do
(v, ru) <- M.toList $ get sNodes se
(i, _) <- enumPrems ru
return (dotPrem (v,i))
-- FIXME: Also dot unsolved actions.
mapM_ dotNode $ M.keys $ get sNodes se
mapM_ dotEdge $ S.toList $ get sEdges se
mapM_ dotChain $ unsolvedChains se
mapM_ dotLess $ S.toList $ get sLessAtoms se
where
dotEdge (Edge src tgt) = do
mayNid <- M.lookup (src,tgt) `liftM` getM dsSingles
maybe (dotGenEdge [] src tgt) (const $ return ()) mayNid
dotChain (src, tgt) =
dotGenEdge [("style","dashed"),("color","green")] src tgt
dotLess (src, tgt) = do
srcId <- dotNode src
tgtId <- dotNode tgt
liftDot $ D.edge srcId tgtId
[("color","black"),("style","dotted")] -- FIXME: Reactivate,("constraint","false")]
-- setting constraint to false ignores less-edges when ranking nodes.
dotGenEdge style src tgt = do
srcId <- dotConc src
tgtId <- dotPrem tgt
liftDot $ D.edge srcId tgtId style
-- | Set default attributes for nodes and edges.
setDefaultAttributes :: D.Dot ()
setDefaultAttributes = do
D.attribute ("nodesep","0.3")
D.attribute ("ranksep","0.3")
D.nodeAttributes [("fontsize","8"),("fontname","Helvetica"),("width","0.3"),("height","0.2")]
D.edgeAttributes [("fontsize","8"),("fontname","Helvetica")]
-- | Compute a color map for nodes labelled with a proof rule info of one of
-- the given rules.
nodeColorMap :: [RuleACInst] -> NodeColorMap
nodeColorMap rules =
M.fromList $
[ (get rInfo ru, case find colorAttr $ ruleAttributes ru of
Just (RuleColor c) -> c
Nothing -> hsvToRGB $ getColor (gIdx, mIdx))
| (gIdx, grp) <- groups, (mIdx, ru) <- zip [0..] grp ]
where
groupIdx ru | isDestrRule ru = 0
| isConstrRule ru = 2
| isFreshRule ru || isISendRule ru = 3
| otherwise = 1
-- groups of rules labeled with their index in the group
groups = [ (gIdx, [ ru | ru <- rules, gIdx == groupIdx ru])
| gIdx <- [0..3]
]
-- color for each member of a group
colors = M.fromList $ lightColorGroups intruderHue (map (length . snd) groups)
getColor idx = fromMaybe (HSV 0 1 1) $ M.lookup idx colors
-- Note: Currently RuleColors are the only Rule Attributes, so the second line is
-- commented out to remove the redundant pattern compiler warning. If more are added,
-- the second line can be uncommented.
colorAttr (RuleColor _) = True
-- colorAttr _ = False
-- The hue of the intruder rules
intruderHue :: Rational
intruderHue = 18 % 360
------------------------------------------------------------------------------
-- Record based dotting
------------------------------------------------------------------------------
-- | The style for nodes of the intruder.
data BoringNodeStyle = FullBoringNodes | CompactBoringNodes
deriving( Eq, Ord, Show )
-- | Dot a node in record based (compact) format.
dotNodeCompact :: BoringNodeStyle -> NodeId -> SeDot D.NodeId
dotNodeCompact boringStyle v = dotOnce dsNodes v $ do
(se, colorMap) <- ask
let hasOutgoingEdge =
or [ v == v' | Edge (v', _) _ <- S.toList $ get sEdges se ]
case M.lookup v $ get sNodes se of
Nothing -> case filter ((v ==) . fst) (unsolvedActionAtoms se) of
[] -> mkSimpleNode (show v) []
as -> let lbl = (fsep $ punctuate comma $ map (prettyLNFact . snd) as)
<-> opAction <-> text (show v)
attrs | any (isKUFact . snd) as = [("color","gray")]
| otherwise = [("color","darkblue")]
in mkSimpleNode (render lbl) attrs
Just ru -> do
let color = M.lookup (get rInfo ru) colorMap
nodeColor = maybe "white" rgbToHex color
attrs = [("fillcolor", nodeColor),("style","filled")
, ("fontcolor", if colorUsesWhiteFont color then "white" else "black")]
ids <- mkNode ru attrs hasOutgoingEdge
let prems = [ ((v, i), nid) | (Just (Left i), nid) <- ids ]
concs = [ ((v, i), nid) | (Just (Right i), nid) <- ids ]
modM dsPrems $ M.union $ M.fromList prems
modM dsConcs $ M.union $ M.fromList concs
return $ fromJust $ lookup Nothing ids
where
--True if there's a colour, and it's 'darker' than 0.5 in apparent luminosity
--This assumes a linear colourspace, which is what graphviz seems to use
colorUsesWhiteFont (Just (RGB r g b)) = (0.2126*r + 0.7152*g + 0.0722*b) < 0.5
colorUsesWhiteFont _ = False
mkSimpleNode lbl attrs =
liftDot $ D.node $ [("label", lbl),("shape","ellipse")] ++ attrs
mkNode :: RuleACInst -> [(String, String)] -> Bool
-> ReaderT (System, NodeColorMap) (StateT DotState D.Dot)
[(Maybe (Either PremIdx ConcIdx), D.NodeId)]
mkNode ru attrs hasOutgoingEdge
-- single node, share node-id for all premises and conclusions
| boringStyle == CompactBoringNodes &&
(isIntruderRule ru || isFreshRule ru) = do
let lbl | hasOutgoingEdge = show v ++ " : " ++ showRuleCaseName ru
| otherwise = concatMap snd as
nid <- mkSimpleNode lbl []
return [ (key, nid) | (key, _) <- ps ++ as ++ cs ]
-- full record syntax
| otherwise =
fmap snd $ liftDot $ (`D.record` attrs) $
D.vcat $ map D.hcat $ map (map (uncurry D.portField)) $
filter (not . null) [ps, as, cs]
where
ps = renderRow [ (Just (Left i), prettyLNFact p) | (i, p) <- enumPrems ru ]
as = renderRow [ (Nothing, ruleLabel ) ]
cs = renderRow [ (Just (Right i), prettyLNFact c) | (i, c) <- enumConcs ru ]
ruleLabel =
prettyNodeId v <-> colon <-> text (showRuleCaseName ru) <>
(brackets $ vcat $ punctuate comma $
map prettyLNFact $ filter isNotDiffAnnotation $ get rActs ru)
isNotDiffAnnotation fa = (fa /= (Fact (ProtoFact Linear ("Diff" ++ getRuleNameDiff ru) 0) S.empty []))
renderRow annDocs =
zipWith (\(ann, _) lbl -> (ann, lbl)) annDocs $
-- magic factor 1.3 compensates for space gained due to
-- non-propertional font
renderBalanced 100 (max 30 . round . (* 1.3)) (map snd annDocs)
renderBalanced :: Double -- ^ Total available width
-> (Double -> Int) -- ^ Convert available space to actual line-width.
-> [Doc] -- ^ Initial documents
-> [String] -- ^ Rendered documents
renderBalanced _ _ [] = []
renderBalanced totalWidth conv docs =
zipWith (\w d -> widthRender (conv (ratio * w)) d) usedWidths docs
where
oneLineRender = renderStyle (defaultStyle { mode = OneLineMode })
widthRender w = scaleIndent . renderStyle (defaultStyle { lineLength = w })
usedWidths = map (fromIntegral . length . oneLineRender) docs
ratio = totalWidth / sum usedWidths
scaleIndent line = case span isSpace line of
(spaces, rest) ->
-- spaces are not wide-enough by default => scale them up
let n = (1.5::Double) * fromIntegral (length spaces)
in replicate (round n) ' ' ++ rest
-- | Dot a sequent in compact form (one record per rule), if there is anything
-- to draw.
dotSystemCompact :: BoringNodeStyle -> System -> D.Dot ()
dotSystemCompact boringStyle se =
(`evalStateT` DotState M.empty M.empty M.empty M.empty) $
(`runReaderT` (se, nodeColorMap (M.elems $ get sNodes se))) $ do
liftDot $ setDefaultAttributes
mapM_ (dotNodeCompact boringStyle) $ M.keys $ get sNodes se
mapM_ (dotNodeCompact boringStyle . fst) $ unsolvedActionAtoms se
F.mapM_ dotEdge $ get sEdges se
F.mapM_ dotChain $ unsolvedChains se
F.mapM_ dotLess $ get sLessAtoms se
where
missingNode shape label = liftDot $ D.node $ [("label", render label),("shape",shape)]
dotPremC prem = dotOnce dsPrems prem $ missingNode "invtrapezium" $ prettyNodePrem prem
dotConcC conc = dotOnce dsConcs conc $ missingNode "trapezium" $ prettyNodeConc conc
dotEdge (Edge src tgt) = do
let check p = maybe False p (resolveNodePremFact tgt se) ||
maybe False p (resolveNodeConcFact src se)
attrs | check isProtoFact =
[("style","bold"),("weight","10.0")] ++
(guard (check isPersistentFact) >> [("color","gray50")])
| check isKFact = [("color","orangered2")]
| otherwise = [("color","gray30")]
dotGenEdge attrs src tgt
dotGenEdge style src tgt = do
srcId <- dotConcC src
tgtId <- dotPremC tgt
liftDot $ D.edge srcId tgtId style
dotChain (src, tgt) =
dotGenEdge [("style","dashed"),("color","green")] src tgt
dotLess (src, tgt) = do
srcId <- dotNodeCompact boringStyle src
tgtId <- dotNodeCompact boringStyle tgt
liftDot $ D.edge srcId tgtId
[("color","black"),("style","dotted")] -- FIXME: reactivate ,("constraint","false")]
-- setting constraint to false ignores less-edges when ranking nodes.
------------------------------------------------------------------------------
-- Compressed versions of a sequent
------------------------------------------------------------------------------
-- | Drop 'Less' atoms entailed by the edges of the 'System'.
dropEntailedOrdConstraints :: System -> System
dropEntailedOrdConstraints se =
modify sLessAtoms (S.filter (not . entailed)) se
where
edges = rawEdgeRel se
entailed (from, to) = to `S.member` D.reachableSet [from] edges
-- | Unsound compression of the sequent that drops fully connected learns and
-- knows nodes.
compressSystem :: System -> System
compressSystem se0 =
foldl' (flip tryHideNodeId) se (frees (get sLessAtoms se, get sNodes se))
where
se = dropEntailedOrdConstraints se0
-- | @hideTransferNode v se@ hides node @v@ in sequent @se@ if it is a
-- transfer node; i.e., a node annotated with a rule that is one of the
-- special intruder rules or a rule with with at most one premise and
-- at most one conclusion and both premises and conclusions have incoming
-- respectively outgoing edges.
--
-- The compression is chosen such that unly uninteresting nodes are that have
-- no open goal are suppressed.
tryHideNodeId :: NodeId -> System -> System
tryHideNodeId v se = fromMaybe se $ do
guard $ (lvarSort v == LSortNode)
&& notOccursIn unsolvedChains
&& notOccursIn (get sFormulas)
maybe hideAction hideRule (M.lookup v $ get sNodes se)
where
selectPart :: (System :-> S.Set a) -> (a -> Bool) -> [a]
selectPart l p = filter p $ S.toList $ get l se
notOccursIn :: HasFrees a => (System -> a) -> Bool
notOccursIn proj = not $ getAny $ foldFrees (Any . (v ==)) $ proj se
-- hide KU-actions deducing pairs, inverses, and simple terms
hideAction = do
guard $ not (null kuActions)
&& all eligibleTerm kuActions
&& all (\(i, j) -> not (i == j)) lNews
&& notOccursIn (standardActionAtoms)
&& notOccursIn (get sLastAtom)
&& notOccursIn (get sEdges)
return $ modify sLessAtoms ( (`S.union` S.fromList lNews)
. (`S.difference` S.fromList lIns)
. (`S.difference` S.fromList lOuts)
)
$ modify sGoals (\m -> foldl' removeAction m kuActions)
$ se
where
kuActions = [ x | x@(i,_,_) <- kuActionAtoms se, i == v ]
eligibleTerm (_,_,m) =
isPair m || isInverse m || sortOfLNTerm m == LSortPub
removeAction m (i, fa, _) = M.delete (ActionG i fa) m
lIns = selectPart sLessAtoms ((v ==) . snd)
lOuts = selectPart sLessAtoms ((v ==) . fst)
lNews = [ (i, j) | (i, _) <- lIns, (_, j) <- lOuts ]
-- hide a rule, if it is not "too complicated"
hideRule :: RuleACInst -> Maybe System
hideRule ru = do
guard $ eligibleRule
&& ( length eIns == length (get rPrems ru) )
&& ( length eOuts == length (get rConcs ru) )
&& ( all (not . selfEdge) eNews )
&& notOccursIn (get sLastAtom)
&& notOccursIn (get sLessAtoms)
&& notOccursIn (unsolvedActionAtoms)
return $ modify sEdges ( (`S.union` S.fromList eNews)
. (`S.difference` S.fromList eIns)
. (`S.difference` S.fromList eOuts)
)
$ modify sNodes (M.delete v)
$ se
where
eIns = selectPart sEdges ((v ==) . nodePremNode . eTgt)
eOuts = selectPart sEdges ((v ==) . nodeConcNode . eSrc)
eNews = [ Edge cIn pOut | Edge cIn _ <- eIns, Edge _ pOut <- eOuts ]
selfEdge (Edge cIn pOut) = nodeConcNode cIn == nodePremNode pOut
eligibleRule =
any ($ ru) [isISendRule, isIRecvRule, isCoerceRule, isFreshRule]
|| ( null (get rActs ru) &&
all (\l -> length (get l ru) <= 1) [rPrems, rConcs]
)
{-
-- | Try to hide a 'NodeId'. This only works if it has only action and either
-- edge or less constraints associated.
tryHideNodeId :: NodeId -> System -> System
-}
| rsasse/tamarin-prover | lib/theory/src/Theory/Constraint/System/Dot.hs | gpl-3.0 | 22,990 | 0 | 24 | 7,300 | 6,580 | 3,410 | 3,170 | 364 | 6 |
{-# LANGUAGE EmptyDataDecls, FlexibleContexts, GADTs, OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes, TemplateHaskell, TypeFamilies #-}
module Teatros.Persistent where
import Database.Persist.TH
import Data.Text (Text)
share [mkPersist sqlSettings, mkMigrate "migrateAll"] [persistLowerCase|
FichaTecnica
ubicacion UbicacionId
encabezamientos [EncabezamientoId] Maybe
fecha FechaId
sede SedeId
notas Text Maybe
lugar Text Maybe
UniqueFichaTecnica ubicacion fecha sede
Fecha
ano Int Maybe
mes Int Maybe
dia Int Maybe
Sede
nombre Text
UniqueSede nombre
Encabezamiento
nombre Text
UniqueEncabezamiento nombre
Ubicacion
fisica Text
copias Text
digital Text
Periodico
fichaTecnica FichaTecnicaId
titulo Text
nombre Text
idioma Text
pagina Text
resumen Text
autor Text
UniquePeriodico fichaTecnica nombre
ProgramaMano
fichaTecnica FichaTecnicaId
obra Text
directorObra Text
resumen Text
autor Text
UniqueProgramaMano fichaTecnica obra
Afiche
fichaTecnica FichaTecnicaId
titulo Text
formato FormatoAficheId
agrupacion Text
disenador Text
UniqueAfiche fichaTecnica titulo
Fotografia
fichaTecnica FichaTecnicaId
formato FormatoFotografiaId
tecnologia TecnologiaFotografiaId
fotografo Text
evento Text
disenadorV Text
escenografo Text
UniqueFotografia fichaTecnica fotografo
Audiovisual
fichaTecnica FichaTecnicaId
tecnologia TecnologiaAudiovisualId
titulo Text
edicion Text
tiempo Text
UniqueAudiovisual fichaTecnica titulo
Bibliografia
fichaTecnica FichaTecnicaId
tipoDoc TipoDocumentoId
titulo Text
editorial Text
paginas Int
autor Text
UniqueBibliografia fichaTecnica titulo
Premio
fichaTecnica FichaTecnicaId
titulo Text
institucionO Text
tecnica Text
UniquePremio fichaTecnica titulo
ObraGrafica
fichaTecnica FichaTecnicaId
titulo Text
formato FormatoAficheId
disenadorV Text
escenografo Text
autor Text
tecnica Text
UniqueObraGrafica fichaTecnica titulo
ActividadCultural
fichaTecnica FichaTecnicaId
agrupaciones Text
UniqueActividadCultural fichaTecnica
ProgramaAcademico
fichaTecnica FichaTecnicaId
titulo Text
profesores Text
programa Text
UniqueProgramaAcademico fichaTecnica titulo
TecnologiaFotografia
nombre Text
UniqueTecnologiaFotografia nombre
TecnologiaAudiovisual
nombre Text
UniqueTecnologiaAudiovisual nombre
FormatoFotografia
nombre Text
UniqueFormatoFotografia nombre
FormatoAfiche
nombre Text
UniqueFormatoAfiche nombre
TipoDocumento
nombre Text
UniqueTipoDocumento nombre
|]
| arpunk/jfdb | src/Teatros/Persistent.hs | gpl-3.0 | 2,844 | 0 | 7 | 697 | 48 | 29 | 19 | 6 | 0 |
{-
mtlstats
Copyright (C) 1984, 1985, 2019, 2020, 2021 Rhéal Lamothe
<[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or (at
your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
-}
module Helpers.PositionSpec (spec) where
import Lens.Micro ((&), (.~))
import Test.Hspec (Spec, context, describe, it, shouldBe)
import Mtlstats.Helpers.Position
import Mtlstats.Types
spec :: Spec
spec = describe "Position" $ do
posSearchSpec
posSearchExactSpec
getPositionsSpec
posSearchSpec :: Spec
posSearchSpec = describe "posSearch" $ mapM_
(\(sStr, expected) -> context ("search string: " ++ show sStr) $
it ("should be " ++ show expected) $
posSearch sStr db `shouldBe` expected)
[ ( "fOo"
, [ ( 2, "foo" )
]
)
, ( "A"
, [ ( 0, "bar" )
, ( 1, "baz" )
]
)
]
posSearchExactSpec :: Spec
posSearchExactSpec = describe "posSearchExact" $ mapM_
(\(input, expected) -> context ("input: " ++ show input) $
it ("should be " ++ show expected) $
posSearchExact input db `shouldBe` expected)
-- input, expected
[ ( "foo", Just 2 )
, ( "FOO", Nothing )
, ( "bar", Just 0 )
, ( "baz", Just 1 )
, ( "a", Nothing )
, ( "quux", Nothing )
]
getPositionsSpec :: Spec
getPositionsSpec = describe "getPositions" $ let
expected = ["bar", "baz", "foo"]
in it ("should be " ++ show expected) $
getPositions db `shouldBe` expected
db :: Database
db = newDatabase & dbPlayers .~
[ newPlayer 2 "Joe" "foo"
, newPlayer 3 "Bob" "bar"
, newPlayer 5 "Bill" "foo"
, newPlayer 8 "Ed" "baz"
]
| mtlstats/mtlstats | test/Helpers/PositionSpec.hs | gpl-3.0 | 2,116 | 0 | 15 | 472 | 480 | 270 | 210 | 42 | 1 |
module Utils.Xhtml ( niceDashes
, uC) where
import Text.XHtml
-- Very simple atm. Not easily extended to handle emdashes too.
niceDashes :: String -> String
niceDashes [] = []
niceDashes (x:xs)
| x == '-' = "–" ++ niceDashes xs
| otherwise = x : niceDashes xs
uC :: Char -> Html
uC c = thespan ! [strAttr "style" "text-decoration: underline;"] << [c]
| spwhitton/sariulclocks | src/Utils/Xhtml.hs | gpl-3.0 | 395 | 0 | 8 | 105 | 125 | 65 | 60 | 10 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, NoImplicitPrelude, BangPatterns, MagicHash #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Bits
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- This module defines bitwise operations for signed and unsigned
-- integers. Instances of the class 'Bits' for the 'Int' and
-- 'Integer' types are available from this module, and instances for
-- explicitly sized integral types are available from the
-- "Data.Int" and "Data.Word" modules.
--
-----------------------------------------------------------------------------
module Data.Bits (
Bits(
(.&.), (.|.), xor,
complement,
shift,
rotate,
zeroBits,
bit,
setBit,
clearBit,
complementBit,
testBit,
bitSizeMaybe,
bitSize,
isSigned,
shiftL, shiftR,
unsafeShiftL, unsafeShiftR,
rotateL, rotateR,
popCount
),
FiniteBits(finiteBitSize),
bitDefault,
testBitDefault,
popCountDefault
) where
-- Defines the @Bits@ class containing bit-based operations.
-- See library document for details on the semantics of the
-- individual operations.
#include "MachDeps.h"
import Data.Maybe
import GHC.Enum
import GHC.Num
import GHC.Base
infixl 8 `shift`, `rotate`, `shiftL`, `shiftR`, `rotateL`, `rotateR`
infixl 7 .&.
infixl 6 `xor`
infixl 5 .|.
{-# DEPRECATED bitSize "Use 'bitSizeMaybe' or 'finiteBitSize' instead" #-} -- deprecated in 7.8
{-|
The 'Bits' class defines bitwise operations over integral types.
* Bits are numbered from 0 with bit 0 being the least
significant bit.
Minimal complete definition: '.&.', '.|.', 'xor', 'complement',
('shift' or ('shiftL' and 'shiftR')), ('rotate' or ('rotateL' and 'rotateR')),
'bitSize', 'isSigned', 'testBit', 'bit', and 'popCount'. The latter three can
be implemented using `testBitDefault', 'bitDefault', and 'popCountDefault', if
@a@ is also an instance of 'Num'.
-}
class Eq a => Bits a where
-- | Bitwise \"and\"
(.&.) :: a -> a -> a
-- | Bitwise \"or\"
(.|.) :: a -> a -> a
-- | Bitwise \"xor\"
xor :: a -> a -> a
{-| Reverse all the bits in the argument -}
complement :: a -> a
{-| @'shift' x i@ shifts @x@ left by @i@ bits if @i@ is positive,
or right by @-i@ bits otherwise.
Right shifts perform sign extension on signed number types;
i.e. they fill the top bits with 1 if the @x@ is negative
and with 0 otherwise.
An instance can define either this unified 'shift' or 'shiftL' and
'shiftR', depending on which is more convenient for the type in
question. -}
shift :: a -> Int -> a
x `shift` i | i<0 = x `shiftR` (-i)
| i>0 = x `shiftL` i
| otherwise = x
{-| @'rotate' x i@ rotates @x@ left by @i@ bits if @i@ is positive,
or right by @-i@ bits otherwise.
For unbounded types like 'Integer', 'rotate' is equivalent to 'shift'.
An instance can define either this unified 'rotate' or 'rotateL' and
'rotateR', depending on which is more convenient for the type in
question. -}
rotate :: a -> Int -> a
x `rotate` i | i<0 = x `rotateR` (-i)
| i>0 = x `rotateL` i
| otherwise = x
{-
-- Rotation can be implemented in terms of two shifts, but care is
-- needed for negative values. This suggested implementation assumes
-- 2's-complement arithmetic. It is commented out because it would
-- require an extra context (Ord a) on the signature of 'rotate'.
x `rotate` i | i<0 && isSigned x && x<0
= let left = i+bitSize x in
((x `shift` i) .&. complement ((-1) `shift` left))
.|. (x `shift` left)
| i<0 = (x `shift` i) .|. (x `shift` (i+bitSize x))
| i==0 = x
| i>0 = (x `shift` i) .|. (x `shift` (i-bitSize x))
-}
-- | 'zeroBits' is the value with all bits unset.
--
-- The following laws ought to hold (for all valid bit indices @/n/@):
--
-- * @'clearBit' 'zeroBits' /n/ == 'zeroBits'@
-- * @'setBit' 'zeroBits' /n/ == 'bit' /n/@
-- * @'testBit' 'zeroBits' /n/ == False@
-- * @'popCount' 'zeroBits' == 0@
--
-- This method uses @'clearBit' ('bit' 0) 0@ as its default
-- implementation (which ought to be equivalent to 'zeroBits' for
-- types which possess a 0th bit).
--
-- /Since: 4.7.0.0/
zeroBits :: a
zeroBits = clearBit (bit 0) 0
-- | @bit /i/@ is a value with the @/i/@th bit set and all other bits clear.
--
-- See also 'zeroBits'.
bit :: Int -> a
-- | @x \`setBit\` i@ is the same as @x .|. bit i@
setBit :: a -> Int -> a
-- | @x \`clearBit\` i@ is the same as @x .&. complement (bit i)@
clearBit :: a -> Int -> a
-- | @x \`complementBit\` i@ is the same as @x \`xor\` bit i@
complementBit :: a -> Int -> a
-- | Return 'True' if the @n@th bit of the argument is 1
testBit :: a -> Int -> Bool
{-| Return the number of bits in the type of the argument. The actual
value of the argument is ignored. Returns Nothing
for types that do not have a fixed bitsize, like 'Integer'.
/Since: 4.7.0.0/
-}
bitSizeMaybe :: a -> Maybe Int
{-| Return the number of bits in the type of the argument. The actual
value of the argument is ignored. The function 'bitSize' is
undefined for types that do not have a fixed bitsize, like 'Integer'.
-}
bitSize :: a -> Int
{-| Return 'True' if the argument is a signed type. The actual
value of the argument is ignored -}
isSigned :: a -> Bool
{-# INLINE setBit #-}
{-# INLINE clearBit #-}
{-# INLINE complementBit #-}
x `setBit` i = x .|. bit i
x `clearBit` i = x .&. complement (bit i)
x `complementBit` i = x `xor` bit i
{-| Shift the argument left by the specified number of bits
(which must be non-negative).
An instance can define either this and 'shiftR' or the unified
'shift', depending on which is more convenient for the type in
question. -}
shiftL :: a -> Int -> a
{-# INLINE shiftL #-}
x `shiftL` i = x `shift` i
{-| Shift the argument left by the specified number of bits. The
result is undefined for negative shift amounts and shift amounts
greater or equal to the 'bitSize'.
Defaults to 'shiftL' unless defined explicitly by an instance.
/Since: 4.5.0.0/ -}
unsafeShiftL :: a -> Int -> a
{-# INLINE unsafeShiftL #-}
x `unsafeShiftL` i = x `shiftL` i
{-| Shift the first argument right by the specified number of bits. The
result is undefined for negative shift amounts and shift amounts
greater or equal to the 'bitSize'.
Right shifts perform sign extension on signed number types;
i.e. they fill the top bits with 1 if the @x@ is negative
and with 0 otherwise.
An instance can define either this and 'shiftL' or the unified
'shift', depending on which is more convenient for the type in
question. -}
shiftR :: a -> Int -> a
{-# INLINE shiftR #-}
x `shiftR` i = x `shift` (-i)
{-| Shift the first argument right by the specified number of bits, which
must be non-negative an smaller than the number of bits in the type.
Right shifts perform sign extension on signed number types;
i.e. they fill the top bits with 1 if the @x@ is negative
and with 0 otherwise.
Defaults to 'shiftR' unless defined explicitly by an instance.
/Since: 4.5.0.0/ -}
unsafeShiftR :: a -> Int -> a
{-# INLINE unsafeShiftR #-}
x `unsafeShiftR` i = x `shiftR` i
{-| Rotate the argument left by the specified number of bits
(which must be non-negative).
An instance can define either this and 'rotateR' or the unified
'rotate', depending on which is more convenient for the type in
question. -}
rotateL :: a -> Int -> a
{-# INLINE rotateL #-}
x `rotateL` i = x `rotate` i
{-| Rotate the argument right by the specified number of bits
(which must be non-negative).
An instance can define either this and 'rotateL' or the unified
'rotate', depending on which is more convenient for the type in
question. -}
rotateR :: a -> Int -> a
{-# INLINE rotateR #-}
x `rotateR` i = x `rotate` (-i)
{-| Return the number of set bits in the argument. This number is
known as the population count or the Hamming weight.
/Since: 4.5.0.0/ -}
popCount :: a -> Int
{-# MINIMAL (.&.), (.|.), xor, complement,
(shift | (shiftL, shiftR)),
(rotate | (rotateL, rotateR)),
bitSize, bitSizeMaybe, isSigned, testBit, bit, popCount #-}
-- |The 'FiniteBits' class denotes types with a finite, fixed number of bits.
--
-- /Since: 4.7.0.0/
class Bits b => FiniteBits b where
-- | Return the number of bits in the type of the argument.
-- The actual value of the argument is ignored. Moreover, 'finiteBitSize'
-- is total, in contrast to the deprecated 'bitSize' function it replaces.
--
-- @
-- 'finiteBitSize' = 'bitSize'
-- 'bitSizeMaybe' = 'Just' . 'finiteBitSize'
-- @
--
-- /Since: 4.7.0.0/
finiteBitSize :: b -> Int
-- The defaults below are written with lambdas so that e.g.
-- bit = bitDefault
-- is fully applied, so inlining will happen
-- | Default implementation for 'bit'.
--
-- Note that: @bitDefault i = 1 `shiftL` i@
--
-- /Since: 4.6.0.0/
bitDefault :: (Bits a, Num a) => Int -> a
bitDefault = \i -> 1 `shiftL` i
{-# INLINE bitDefault #-}
-- | Default implementation for 'testBit'.
--
-- Note that: @testBitDefault x i = (x .&. bit i) /= 0@
--
-- /Since: 4.6.0.0/
testBitDefault :: (Bits a, Num a) => a -> Int -> Bool
testBitDefault = \x i -> (x .&. bit i) /= 0
{-# INLINE testBitDefault #-}
-- | Default implementation for 'popCount'.
--
-- This implementation is intentionally naive. Instances are expected to provide
-- an optimized implementation for their size.
--
-- /Since: 4.6.0.0/
popCountDefault :: (Bits a, Num a) => a -> Int
popCountDefault = go 0
where
go !c 0 = c
go c w = go (c+1) (w .&. (w - 1)) -- clear the least significant
{-# INLINABLE popCountDefault #-}
-- Interpret 'Bool' as 1-bit bit-field; /Since: 4.7.0.0/
instance Bits Bool where
(.&.) = (&&)
(.|.) = (||)
xor = (/=)
complement = not
shift x 0 = x
shift _ _ = False
rotate x _ = x
bit 0 = True
bit _ = False
testBit x 0 = x
testBit _ _ = False
bitSizeMaybe _ = Just 1
bitSize _ = 1
isSigned _ = False
popCount False = 0
popCount True = 1
instance FiniteBits Bool where
finiteBitSize _ = 1
instance Bits Int where
{-# INLINE shift #-}
{-# INLINE bit #-}
{-# INLINE testBit #-}
zeroBits = 0
bit = bitDefault
testBit = testBitDefault
(I# x#) .&. (I# y#) = I# (x# `andI#` y#)
(I# x#) .|. (I# y#) = I# (x# `orI#` y#)
(I# x#) `xor` (I# y#) = I# (x# `xorI#` y#)
complement (I# x#) = I# (notI# x#)
(I# x#) `shift` (I# i#)
| isTrue# (i# >=# 0#) = I# (x# `iShiftL#` i#)
| otherwise = I# (x# `iShiftRA#` negateInt# i#)
(I# x#) `shiftL` (I# i#) = I# (x# `iShiftL#` i#)
(I# x#) `unsafeShiftL` (I# i#) = I# (x# `uncheckedIShiftL#` i#)
(I# x#) `shiftR` (I# i#) = I# (x# `iShiftRA#` i#)
(I# x#) `unsafeShiftR` (I# i#) = I# (x# `uncheckedIShiftRA#` i#)
{-# INLINE rotate #-} -- See Note [Constant folding for rotate]
(I# x#) `rotate` (I# i#) =
I# ((x# `uncheckedIShiftL#` i'#) `orI#` (x# `uncheckedIShiftRL#` (wsib -# i'#)))
where
!i'# = i# `andI#` (wsib -# 1#)
!wsib = WORD_SIZE_IN_BITS# {- work around preprocessor problem (??) -}
bitSizeMaybe i = Just (finiteBitSize i)
bitSize i = finiteBitSize i
popCount (I# x#) = I# (word2Int# (popCnt# (int2Word# x#)))
isSigned _ = True
instance FiniteBits Int where
finiteBitSize _ = WORD_SIZE_IN_BITS
instance Bits Word where
{-# INLINE shift #-}
{-# INLINE bit #-}
{-# INLINE testBit #-}
(W# x#) .&. (W# y#) = W# (x# `and#` y#)
(W# x#) .|. (W# y#) = W# (x# `or#` y#)
(W# x#) `xor` (W# y#) = W# (x# `xor#` y#)
complement (W# x#) = W# (x# `xor#` mb#)
where !(W# mb#) = maxBound
(W# x#) `shift` (I# i#)
| isTrue# (i# >=# 0#) = W# (x# `shiftL#` i#)
| otherwise = W# (x# `shiftRL#` negateInt# i#)
(W# x#) `shiftL` (I# i#) = W# (x# `shiftL#` i#)
(W# x#) `unsafeShiftL` (I# i#) = W# (x# `uncheckedShiftL#` i#)
(W# x#) `shiftR` (I# i#) = W# (x# `shiftRL#` i#)
(W# x#) `unsafeShiftR` (I# i#) = W# (x# `uncheckedShiftRL#` i#)
(W# x#) `rotate` (I# i#)
| isTrue# (i'# ==# 0#) = W# x#
| otherwise = W# ((x# `uncheckedShiftL#` i'#) `or#` (x# `uncheckedShiftRL#` (wsib -# i'#)))
where
!i'# = i# `andI#` (wsib -# 1#)
!wsib = WORD_SIZE_IN_BITS# {- work around preprocessor problem (??) -}
bitSizeMaybe i = Just (finiteBitSize i)
bitSize i = finiteBitSize i
isSigned _ = False
popCount (W# x#) = I# (word2Int# (popCnt# x#))
bit = bitDefault
testBit = testBitDefault
instance FiniteBits Word where
finiteBitSize _ = WORD_SIZE_IN_BITS
instance Bits Integer where
(.&.) = andInteger
(.|.) = orInteger
xor = xorInteger
complement = complementInteger
shift x i@(I# i#) | i >= 0 = shiftLInteger x i#
| otherwise = shiftRInteger x (negateInt# i#)
testBit x (I# i) = testBitInteger x i
zeroBits = 0
bit = bitDefault
popCount = popCountDefault
rotate x i = shift x i -- since an Integer never wraps around
bitSizeMaybe _ = Nothing
bitSize _ = error "Data.Bits.bitSize(Integer)"
isSigned _ = True
{- Note [Constant folding for rotate]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The INLINE on the Int instance of rotate enables it to be constant
folded. For example:
sumU . mapU (`rotate` 3) . replicateU 10000000 $ (7 :: Int)
goes to:
Main.$wfold =
\ (ww_sO7 :: Int#) (ww1_sOb :: Int#) ->
case ww1_sOb of wild_XM {
__DEFAULT -> Main.$wfold (+# ww_sO7 56) (+# wild_XM 1);
10000000 -> ww_sO7
whereas before it was left as a call to $wrotate.
All other Bits instances seem to inline well enough on their
own to enable constant folding; for example 'shift':
sumU . mapU (`shift` 3) . replicateU 10000000 $ (7 :: Int)
goes to:
Main.$wfold =
\ (ww_sOb :: Int#) (ww1_sOf :: Int#) ->
case ww1_sOf of wild_XM {
__DEFAULT -> Main.$wfold (+# ww_sOb 56) (+# wild_XM 1);
10000000 -> ww_sOb
}
-}
| jwiegley/ghc-release | libraries/base/Data/Bits.hs | gpl-3.0 | 15,595 | 0 | 13 | 4,665 | 2,552 | 1,445 | 1,107 | 195 | 2 |
{-# OPTIONS -XPatternGuards #-}
{-
Generate a distromap, like these:
http://hackage.haskell.org/packages/archive/00-distromap/
Format:
("xmobar","0.8",Just "http://packages.gentoo.org/package/x11-misc/xmobar")
("xmobar","0.9",Just "http://packages.gentoo.org/package/x11-misc/xmobar")
("xmobar","0.9.2",Just "http://packages.gentoo.org/package/x11-misc/xmobar")
("xmonad","0.5",Just "http://packages.gentoo.org/package/x11-wm/xmonad")
("xmonad","0.6",Just "http://packages.gentoo.org/package/x11-wm/xmonad")
("xmonad","0.7",Just "http://packages.gentoo.org/package/x11-wm/xmonad")
("xmonad","0.8",Just "http://packages.gentoo.org/package/x11-wm/xmonad")
("xmonad","0.8.1",Just "http://packages.gentoo.org/package/x11-wm/xmonad")
("xmonad","0.9",Just "http://packages.gentoo.org/package/x11-wm/xmonad")
("xmonad","0.9.1",Just "http://en.gentoo-wiki.com/wiki/Haskell/overlay")
Multiple entries for each package is allowed, given that there are different versions.
Setup:
Join all packages from portage and the overlay into a big map;
From Portage.PackageId: PackageName = category/package
PVULine = (packagename, versionstring, url)
Create such a map: Map PackageName DistroLine
Only one PVULine per version, and prefer portage over the overlay.
Algorithm;
1. Take a package from hackage
2. Look for it in the map
a. For each version:
find a match in the list of versions:
yield the PVULine
-}
module DistroMap
( distroMap ) where
import Control.Applicative
import qualified Data.List as List ( nub )
import qualified Data.Map as Map
import Data.Map ( Map )
import System.FilePath ( (</>) )
import Debug.Trace ( trace )
import Data.Maybe ( fromJust )
import Distribution.Verbosity
import Distribution.Text ( display )
import Distribution.Client.Types ( Repo, SourcePackageDb(..), SourcePackage(..) )
import Distribution.Simple.Utils ( info )
import qualified Data.Version as Cabal
import qualified Distribution.Package as Cabal
import qualified Distribution.Client.PackageIndex as CabalInstall
import qualified Distribution.Client.IndexUtils as CabalInstall
import Portage.Overlay ( readOverlayByPackage, getDirectoryTree )
import qualified Portage.PackageId as Portage
import qualified Portage.Version as Portage
type PVU = (Cabal.PackageName, Cabal.Version, Maybe String)
type PVU_Map = Map Portage.PackageName [(Cabal.Version, Maybe String)]
distroMap :: Verbosity -> Repo -> FilePath -> FilePath -> [String] -> IO ()
distroMap verbosity repo portagePath overlayPath args = do
info verbosity "distro map called"
info verbosity ("verbosity: " ++ show verbosity)
info verbosity ("portage: " ++ portagePath)
info verbosity ("overlay: " ++ overlayPath)
info verbosity ("args: " ++ show args)
portage <- readOverlayByPackage <$> getDirectoryTree portagePath
overlay <- readOverlayByPackage <$> getDirectoryTree overlayPath
info verbosity ("portage packages: " ++ show (length portage))
info verbosity ("overlay packages: " ++ show (length overlay))
let portageMap = buildPortageMap portage
overlayMap = buildOverlayMap overlay
completeMap = unionMap portageMap overlayMap
info verbosity ("portage map: " ++ show (Map.size portageMap))
info verbosity ("overlay map: " ++ show (Map.size overlayMap))
info verbosity ("complete map: " ++ show (Map.size completeMap))
SourcePackageDb { packageIndex = packageIndex } <-
CabalInstall.getSourcePackages verbosity [repo]
let pkgs0 = map (map packageInfoId) (CabalInstall.allPackagesByName packageIndex)
hackagePkgs = [ (Cabal.pkgName (head p), map Cabal.pkgVersion p) | p <- pkgs0 ]
info verbosity ("cabal packages: " ++ show (length hackagePkgs))
let pvus = concat $ map (\(p,vs) -> lookupPVU completeMap p vs) hackagePkgs
info verbosity ("found pvus: " ++ show (length pvus))
mapM_ (putStrLn . showPVU) pvus
return ()
showPVU :: PVU -> String
showPVU (p,v,u) = show $ (display p, display v, u)
-- building the PVU_Map
reduceVersion :: Portage.Version -> Portage.Version
reduceVersion (Portage.Version ns _ _ _) = Portage.Version ns Nothing [] 0
reduceVersions :: [Portage.Version] -> [Portage.Version]
reduceVersions = List.nub . map reduceVersion
buildMap :: [(Portage.PackageName, [Portage.Version])]
-> (Portage.PackageName -> Portage.Version -> Maybe String)
-> PVU_Map
buildMap pvs f = Map.mapWithKey (\p vs -> [ (fromJust $ Portage.toCabalVersion v, f p v)
| v <- reduceVersions vs ])
(Map.fromList pvs)
buildPortageMap :: [(Portage.PackageName, [Portage.Version])] -> PVU_Map
buildPortageMap lst = buildMap lst $ \ (Portage.PackageName c p) _v ->
Just $ "http://packages.gentoo.org/package" </> display c </> display p
buildOverlayMap :: [(Portage.PackageName, [Portage.Version])] -> PVU_Map
buildOverlayMap lst = buildMap lst $ \_ _ -> Just "http://en.gentoo-wiki.com/wiki/Haskell/overlay"
unionMap :: PVU_Map -> PVU_Map -> PVU_Map
unionMap = Map.unionWith f
where
f :: [(Cabal.Version, Maybe String)]
-> [(Cabal.Version, Maybe String)]
-> [(Cabal.Version, Maybe String)]
f vas vbs = Map.toList (Map.union (Map.fromList vas) (Map.fromList vbs))
-- resolving Cabal.PackageName to Portage.PackageName
lookupPVU :: PVU_Map -> Cabal.PackageName -> [Cabal.Version] -> [PVU]
lookupPVU pvu_map pn cvs =
case findItems (Portage.normalizeCabalPackageName pn) of
[] -> []
[item] -> ret item
items | [item] <- preferableItem items -> ret item
| otherwise -> trace (noDefaultText items) []
where
noDefaultText is = unlines $ ("no default for package: " ++ display pn)
: [ " * " ++ (display cat)
| (Portage.PackageName cat _, _) <- is]
ret (_, vs) = [ (pn, v, u) | (v, u) <- vs, v `elem` cvs ]
preferableItem items =
[ item
| item@(Portage.PackageName cat _pn, _vs) <- items
, cat == Portage.Category "dev-haskell"]
findItems cpn = Map.toList $ Map.filterWithKey f pvu_map
where
f (Portage.PackageName _cat _pn) _vs = cpn == pn
| Heather/hackport | DistroMap.hs | gpl-3.0 | 6,117 | 0 | 14 | 1,067 | 1,573 | 830 | 743 | 89 | 3 |
module Language.Octopus.Data where
import Import
import qualified Data.Sequence as Seq
import qualified Data.Map as Map
import Control.Monad.Reader
import Control.Monad.State
import Control.Concurrent.MVar (MVar)
type Tag = (Word, Text) -- ^ Integer for comparison, Text for spelling error reports
type Exn = (Word, Val)
type Fallible = Either Exn
data Val = Nm Rational -- ^ Rational number
| By ByteString -- ^ Bytes
| Tx Text -- ^ Text data
| Fp Handle -- ^ Input/output handle
| Sy Symbol -- ^ Symbol, aka. identifier
| Tg Tag -- ^ Unique tag
| Ab Tag Val -- ^ Abstract data
| Sq (Seq Val) -- ^ Sequence, aka. list
| Xn (Map Symbol Val) -- ^ Symbol-value map, aka. object
| Cl Val Val Val -- ^ Operative closure
| Ce (IORef Val) -- ^ Reference cell
| Ar (IOArray Int Val) -- ^ Mutable array
| Pr Primitive -- ^ Primitive operations
| Ks [Control] -- ^ Control stack
--TODO concurrency
deriving (Eq)
data Primitive = Vau | Eval | Match | Ifz | Imp
| Eq | Neq | Lt | Lte | Gt | Gte
| Add | Mul | Sub | Div
| Numer | Denom | NumParts
| ReadFp | WriteFp | FlushFp | CloseFp
| OpenFp --FIXME refac to be a custom primitive
| MkTag | MkAbstype | Wrap Tag | Unwrap Tag | Typeof
| Len | Cat | Cut
| Extends | Delete | Keys | Get
| New | Deref | Assign
| NewArr | Bounds | Index | AssignIx
--TODO Ks data primitives
| Handle | Raise
--TODO conversions
deriving (Eq, Show)
type FileCache = MVar (Fallible Val)
type ImportsCache = MVar (Map FilePath FileCache)
data MConfig = MConfig { importsCache :: ImportsCache
, libdir :: FilePath
, thisFile :: Maybe FilePath
}
data MState = MState { environ :: Val
, control :: [Control]
, nextTag :: Word
}
type Machine = ReaderT MConfig (StateT MState IO)
data Context = Op Val -- ^ Hole must be a combiner, val is the uneval'd argument
| Ap Val -- ^ Hole is the argument, apply held value to it
| Re Val -- ^ Restore an environment before continuing
| Es [Val] [Val] -- ^ Left-to-right sequence evaluation
| Eo Symbol [(Symbol, Val)] [(Symbol, Val)] -- ^ Object evaluation
deriving (Eq, Show)
data Control = NormK [Context]
| HndlK Tag Val
--TODO onEnter/onSuccess/onFail
| ImptK FilePath (MVar (Fallible Val))
deriving (Eq)
instance Show Control where
show (NormK k) = "NormK " ++ show k
show (HndlK i fn) = "HndlK " ++ show i ++ " " ++ show fn
show (ImptK path slot) = "ImptK " ++ show path
push :: Context -> Machine ()
push k = do
(NormK ks):kss <- gets control
modify $ \s -> s { control = (NormK (k:ks)):kss }
--FIXME remember that when I push a handler or a winding protect, I also need to push an environment restore
pushK :: Control -> Machine ()
pushK k@(NormK _) = do
ks <- gets control
modify $ \s -> s { control = k:ks }
pushK k = do
ks <- gets control
modify $ \s -> s { control = (NormK []):k:ks }
pop :: Machine ()
pop = do
stack <- gets control
case stack of
(NormK []):kss -> modify $ \s -> s { control = kss }
(NormK (Re env:ks)):kss -> modify $ \s -> s { environ = env, control = (NormK ks):kss }
(NormK (_:ks)):kss -> modify $ \s -> s { control = (NormK ks):kss }
(HndlK _ _):kss -> modify $ \s -> s { control = kss }
(ImptK _ _):kss -> modify $ \s -> s { control = kss }
replace :: Context -> Machine ()
replace k = pop >> push k
swapEnv :: Val -> Machine ()
swapEnv env' = do
env <- gets environ
(NormK ks):kss <- gets control
case ks of
(Re _):_ -> modify $ \s -> s { environ = env' } --allows tail recursion by not restoring environments that will immediately be thrown away by a second restoration
ks -> modify $ \s -> s { environ = env', control = (NormK ((Re env):ks)):kss }
splitStack :: Word -> Machine ([Control], [Control])
splitStack tg = break isPoint <$> gets control
where
isPoint (HndlK (tg', _) _) = tg == tg'
isPoint (ImptK _ _) = True
isPoint _ = False
mkTag :: Text -> Machine Val
mkTag spelling = do
n <- gets nextTag
modify $ \s -> s { nextTag = n + 1 }
return $ Tg (n, spelling)
mkAbstype :: Text -> Machine (Val, Val, Val)
mkAbstype spelling = do
tag <- mkTag spelling
let (Tg (n, spelling)) = tag
return (tag, Pr (Wrap (n, spelling)), Pr (Unwrap (n, spelling)))
getCurrentFile :: Machine (Maybe FilePath)
getCurrentFile = do
(top, bottom) <- break isImpt <$> gets control
case bottom of
(ImptK path _ : _) -> return $ Just path
[] -> asks thisFile
where
isImpt (ImptK _ _) = True
isImpt _ = False
instance Show Val where
show (Nm nm) | denominator nm == 1 = show $ numerator nm
| otherwise = show (numerator nm) ++ "/" ++ show (denominator nm)
show (By bytes) = "b" ++ show bytes --FIXME show with \x?? for non-ascii printable chars
show (Tx text) = show text --FIXME show using Octopus encoding, not Haskell
show (Fp _) = "#<handle>" --TODO at least show some metadata
show (Sy sy) = show sy
show (Tg (i, spelling)) = "#<tag " ++ show i ++ ": " ++ show spelling ++ ">"
show (Ab tag x) = "#<box " ++ show tag ++ ": " ++ show x ++ ">"
show (Sq xs) = "[" ++ intercalate ", " (show <$> toList xs) ++ "]"
show (Xn m) = case getCombo m of
Nothing -> "{" ++ intercalate ", " (showPair <$> Map.toList m) ++ "}"
Just (f, x) -> "(" ++ show f ++ " " ++ show x ++ ")"
where
showPair (k,v) = show k ++ ": " ++ show v
getCombo xn = case (Map.lookup (intern "__car__") xn, Map.lookup (intern "__cdr__") xn) of
(Just f, Just x) -> if length (Map.keys xn) == 2 then Just (f, x) else Nothing
_ -> Nothing
show (Cl var ast env) = "#<closure>"
show (Ce x) = "<reference cell>" --TODO show contents
show (Ar xs) = "#<mutable array>" --TODO show contents
--show (Eh tag fn) = "#<handler " ++ show tag ++ ": " ++ show fn ++ ">"
show (Pr f) = "#<" ++ show f ++ ">"
| Zankoku-Okuno/octopus | Language/Octopus/Data.hs | gpl-3.0 | 6,450 | 0 | 20 | 2,037 | 2,247 | 1,206 | 1,041 | 134 | 5 |
module Interface.Database where
import Data.Maybe
import Data.Vector (Vector, fromList)
import Database.HDBC
import Database.HDBC.Sqlite3 (connectSqlite3)
queryVar :: String -> String -> FilePath -> IO (Vector Double)
queryVar var cut infile = do
conn <- connectSqlite3 infile
v <- quickQuery' conn ("SELECT " ++ var ++ " from var where " ++ cut) []
let rows = map convRow v
disconnect conn
return . fromList $ map (fromMaybe (-10)) rows
where convRow :: [SqlValue] -> Maybe Double
convRow [sqlVal] = (return . fromSql) sqlVal
convRow _ = Nothing
queryCount :: String -> FilePath -> IO Int
queryCount cut infile = do
conn <- connectSqlite3 infile
c <- quickQuery' conn ("SELECT count(*) from var where " ++ cut) []
let count = case map getCount c of [[x]] -> x
_ -> 0
disconnect conn
return count
where getCount :: [SqlValue] -> [Int]
getCount [sqlVal] = (return . fromSql) sqlVal
getCount _ = []
| cbpark/GluinoStopPolarization | lib/Interface/Database.hs | gpl-3.0 | 1,085 | 0 | 14 | 339 | 367 | 184 | 183 | 26 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.IAM.Organizations.Roles.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes a custom Role. When you delete a custom role, the following
-- changes occur immediately: * You cannot bind a member to the custom role
-- in an IAM Policy. * Existing bindings to the custom role are not
-- changed, but they have no effect. * By default, the response from
-- ListRoles does not include the custom role. You have 7 days to undelete
-- the custom role. After 7 days, the following changes occur: * The custom
-- role is permanently deleted and cannot be recovered. * If an IAM policy
-- contains a binding to the custom role, the binding is permanently
-- removed.
--
-- /See:/ <https://cloud.google.com/iam/ Identity and Access Management (IAM) API Reference> for @iam.organizations.roles.delete@.
module Network.Google.Resource.IAM.Organizations.Roles.Delete
(
-- * REST Resource
OrganizationsRolesDeleteResource
-- * Creating a Request
, organizationsRolesDelete
, OrganizationsRolesDelete
-- * Request Lenses
, ordXgafv
, ordEtag
, ordUploadProtocol
, ordAccessToken
, ordUploadType
, ordName
, ordCallback
) where
import Network.Google.IAM.Types
import Network.Google.Prelude
-- | A resource alias for @iam.organizations.roles.delete@ method which the
-- 'OrganizationsRolesDelete' request conforms to.
type OrganizationsRolesDeleteResource =
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "etag" Bytes :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] Role
-- | Deletes a custom Role. When you delete a custom role, the following
-- changes occur immediately: * You cannot bind a member to the custom role
-- in an IAM Policy. * Existing bindings to the custom role are not
-- changed, but they have no effect. * By default, the response from
-- ListRoles does not include the custom role. You have 7 days to undelete
-- the custom role. After 7 days, the following changes occur: * The custom
-- role is permanently deleted and cannot be recovered. * If an IAM policy
-- contains a binding to the custom role, the binding is permanently
-- removed.
--
-- /See:/ 'organizationsRolesDelete' smart constructor.
data OrganizationsRolesDelete =
OrganizationsRolesDelete'
{ _ordXgafv :: !(Maybe Xgafv)
, _ordEtag :: !(Maybe Bytes)
, _ordUploadProtocol :: !(Maybe Text)
, _ordAccessToken :: !(Maybe Text)
, _ordUploadType :: !(Maybe Text)
, _ordName :: !Text
, _ordCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'OrganizationsRolesDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ordXgafv'
--
-- * 'ordEtag'
--
-- * 'ordUploadProtocol'
--
-- * 'ordAccessToken'
--
-- * 'ordUploadType'
--
-- * 'ordName'
--
-- * 'ordCallback'
organizationsRolesDelete
:: Text -- ^ 'ordName'
-> OrganizationsRolesDelete
organizationsRolesDelete pOrdName_ =
OrganizationsRolesDelete'
{ _ordXgafv = Nothing
, _ordEtag = Nothing
, _ordUploadProtocol = Nothing
, _ordAccessToken = Nothing
, _ordUploadType = Nothing
, _ordName = pOrdName_
, _ordCallback = Nothing
}
-- | V1 error format.
ordXgafv :: Lens' OrganizationsRolesDelete (Maybe Xgafv)
ordXgafv = lens _ordXgafv (\ s a -> s{_ordXgafv = a})
-- | Used to perform a consistent read-modify-write.
ordEtag :: Lens' OrganizationsRolesDelete (Maybe ByteString)
ordEtag
= lens _ordEtag (\ s a -> s{_ordEtag = a}) .
mapping _Bytes
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
ordUploadProtocol :: Lens' OrganizationsRolesDelete (Maybe Text)
ordUploadProtocol
= lens _ordUploadProtocol
(\ s a -> s{_ordUploadProtocol = a})
-- | OAuth access token.
ordAccessToken :: Lens' OrganizationsRolesDelete (Maybe Text)
ordAccessToken
= lens _ordAccessToken
(\ s a -> s{_ordAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
ordUploadType :: Lens' OrganizationsRolesDelete (Maybe Text)
ordUploadType
= lens _ordUploadType
(\ s a -> s{_ordUploadType = a})
-- | The \`name\` parameter\'s value depends on the target resource for the
-- request, namely
-- [\`projects\`](\/iam\/reference\/rest\/v1\/projects.roles) or
-- [\`organizations\`](\/iam\/reference\/rest\/v1\/organizations.roles).
-- Each resource type\'s \`name\` value format is described below: *
-- [\`projects.roles.delete()\`](\/iam\/reference\/rest\/v1\/projects.roles\/delete):
-- \`projects\/{PROJECT_ID}\/roles\/{CUSTOM_ROLE_ID}\`. This method deletes
-- only [custom roles](\/iam\/docs\/understanding-custom-roles) that have
-- been created at the project level. Example request URL:
-- \`https:\/\/iam.googleapis.com\/v1\/projects\/{PROJECT_ID}\/roles\/{CUSTOM_ROLE_ID}\`
-- *
-- [\`organizations.roles.delete()\`](\/iam\/reference\/rest\/v1\/organizations.roles\/delete):
-- \`organizations\/{ORGANIZATION_ID}\/roles\/{CUSTOM_ROLE_ID}\`. This
-- method deletes only [custom
-- roles](\/iam\/docs\/understanding-custom-roles) that have been created
-- at the organization level. Example request URL:
-- \`https:\/\/iam.googleapis.com\/v1\/organizations\/{ORGANIZATION_ID}\/roles\/{CUSTOM_ROLE_ID}\`
-- Note: Wildcard (*) values are invalid; you must specify a complete
-- project ID or organization ID.
ordName :: Lens' OrganizationsRolesDelete Text
ordName = lens _ordName (\ s a -> s{_ordName = a})
-- | JSONP
ordCallback :: Lens' OrganizationsRolesDelete (Maybe Text)
ordCallback
= lens _ordCallback (\ s a -> s{_ordCallback = a})
instance GoogleRequest OrganizationsRolesDelete where
type Rs OrganizationsRolesDelete = Role
type Scopes OrganizationsRolesDelete =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient OrganizationsRolesDelete'{..}
= go _ordName _ordXgafv _ordEtag _ordUploadProtocol
_ordAccessToken
_ordUploadType
_ordCallback
(Just AltJSON)
iAMService
where go
= buildClient
(Proxy :: Proxy OrganizationsRolesDeleteResource)
mempty
| brendanhay/gogol | gogol-iam/gen/Network/Google/Resource/IAM/Organizations/Roles/Delete.hs | mpl-2.0 | 7,200 | 0 | 16 | 1,418 | 816 | 488 | 328 | 110 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Games.TurnBasedMatches.Finish
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Finish a turn-based match. Each player should make this call once, after
-- all results are in. Only the player whose turn it is may make the first
-- call to Finish, and can pass in the final match state.
--
-- /See:/ <https://developers.google.com/games/services/ Google Play Game Services API Reference> for @games.turnBasedMatches.finish@.
module Network.Google.Resource.Games.TurnBasedMatches.Finish
(
-- * REST Resource
TurnBasedMatchesFinishResource
-- * Creating a Request
, turnBasedMatchesFinish
, TurnBasedMatchesFinish
-- * Request Lenses
, tbmfConsistencyToken
, tbmfPayload
, tbmfLanguage
, tbmfMatchId
) where
import Network.Google.Games.Types
import Network.Google.Prelude
-- | A resource alias for @games.turnBasedMatches.finish@ method which the
-- 'TurnBasedMatchesFinish' request conforms to.
type TurnBasedMatchesFinishResource =
"games" :>
"v1" :>
"turnbasedmatches" :>
Capture "matchId" Text :>
"finish" :>
QueryParam "consistencyToken" (Textual Int64) :>
QueryParam "language" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] TurnBasedMatchResults :>
Put '[JSON] TurnBasedMatch
-- | Finish a turn-based match. Each player should make this call once, after
-- all results are in. Only the player whose turn it is may make the first
-- call to Finish, and can pass in the final match state.
--
-- /See:/ 'turnBasedMatchesFinish' smart constructor.
data TurnBasedMatchesFinish = TurnBasedMatchesFinish'
{ _tbmfConsistencyToken :: !(Maybe (Textual Int64))
, _tbmfPayload :: !TurnBasedMatchResults
, _tbmfLanguage :: !(Maybe Text)
, _tbmfMatchId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'TurnBasedMatchesFinish' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tbmfConsistencyToken'
--
-- * 'tbmfPayload'
--
-- * 'tbmfLanguage'
--
-- * 'tbmfMatchId'
turnBasedMatchesFinish
:: TurnBasedMatchResults -- ^ 'tbmfPayload'
-> Text -- ^ 'tbmfMatchId'
-> TurnBasedMatchesFinish
turnBasedMatchesFinish pTbmfPayload_ pTbmfMatchId_ =
TurnBasedMatchesFinish'
{ _tbmfConsistencyToken = Nothing
, _tbmfPayload = pTbmfPayload_
, _tbmfLanguage = Nothing
, _tbmfMatchId = pTbmfMatchId_
}
-- | The last-seen mutation timestamp.
tbmfConsistencyToken :: Lens' TurnBasedMatchesFinish (Maybe Int64)
tbmfConsistencyToken
= lens _tbmfConsistencyToken
(\ s a -> s{_tbmfConsistencyToken = a})
. mapping _Coerce
-- | Multipart request metadata.
tbmfPayload :: Lens' TurnBasedMatchesFinish TurnBasedMatchResults
tbmfPayload
= lens _tbmfPayload (\ s a -> s{_tbmfPayload = a})
-- | The preferred language to use for strings returned by this method.
tbmfLanguage :: Lens' TurnBasedMatchesFinish (Maybe Text)
tbmfLanguage
= lens _tbmfLanguage (\ s a -> s{_tbmfLanguage = a})
-- | The ID of the match.
tbmfMatchId :: Lens' TurnBasedMatchesFinish Text
tbmfMatchId
= lens _tbmfMatchId (\ s a -> s{_tbmfMatchId = a})
instance GoogleRequest TurnBasedMatchesFinish where
type Rs TurnBasedMatchesFinish = TurnBasedMatch
type Scopes TurnBasedMatchesFinish =
'["https://www.googleapis.com/auth/games",
"https://www.googleapis.com/auth/plus.login"]
requestClient TurnBasedMatchesFinish'{..}
= go _tbmfMatchId _tbmfConsistencyToken _tbmfLanguage
(Just AltJSON)
_tbmfPayload
gamesService
where go
= buildClient
(Proxy :: Proxy TurnBasedMatchesFinishResource)
mempty
| rueshyna/gogol | gogol-games/gen/Network/Google/Resource/Games/TurnBasedMatches/Finish.hs | mpl-2.0 | 4,641 | 0 | 16 | 1,068 | 572 | 337 | 235 | 86 | 1 |
-- | Convenience hooks for writing custom @Setup.hs@ files for
-- bindings.
module Data.GI.CodeGen.CabalHooks
( setupHaskellGIBinding
) where
import qualified Distribution.ModuleName as MN
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.Setup
import Distribution.Simple (UserHooks(..), simpleUserHooks,
defaultMainWithHooks, OptimisationLevel(..),
Dependency(..), PackageName(..))
import Distribution.PackageDescription
import Data.GI.CodeGen.API (loadGIRInfo)
import Data.GI.CodeGen.Code (genCode, writeModuleTree, listModuleTree)
import Data.GI.CodeGen.CodeGen (genModule)
import Data.GI.CodeGen.Config (Config(..), CodeGenFlags(..))
import Data.GI.CodeGen.Overrides (parseOverridesFile, girFixups,
filterAPIsAndDeps)
import Data.GI.CodeGen.PkgConfig (tryPkgConfig)
import Data.GI.CodeGen.Util (ucFirst, tshow)
import Control.Monad (when)
import Data.Maybe (fromJust, fromMaybe)
import qualified Data.Map as M
import Data.Monoid ((<>))
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import System.Directory (doesFileExist)
import System.FilePath ((</>), (<.>))
type ConfHook = (GenericPackageDescription, HookedBuildInfo) -> ConfigFlags
-> IO LocalBuildInfo
-- | Generate the @PkgInfo@ module, listing the build information for
-- the module. We include in particular the versions for the
-- `pkg-config` dependencies of the module.
genPkgInfo :: [Dependency] -> [(FlagName, Bool)] -> FilePath -> Text -> IO ()
genPkgInfo deps flags fName modName = do
versions <- mapM findVersion deps
TIO.writeFile fName $ T.unlines
[ "module " <> modName <> " (pkgConfigVersions, flags) where"
, ""
, "import Prelude (String, Bool(..))"
, ""
, "pkgConfigVersions :: [(String, String)]"
, "pkgConfigVersions = " <> tshow versions
, ""
, "flags :: [(String, Bool)]"
, "flags = " <> tshow flags'
]
where findVersion :: Dependency -> IO (Text, Text)
findVersion (Dependency (PackageName n) _) =
tryPkgConfig (T.pack n) >>= \case
Just v -> return v
Nothing -> error ("Could not determine version for required pkg-config module \"" <> n <> "\".")
flags' :: [(String, Bool)]
flags' = map (\(FlagName fn, v) -> (fn, v)) flags
-- | Parse the set of flags given to configure into flags for the code
-- generator.
parseFlags :: [(FlagName, Bool)] -> CodeGenFlags
parseFlags fs = parsed
where parsed :: CodeGenFlags
parsed = CodeGenFlags {
cgOverloadedProperties = check "overloaded-properties"
, cgOverloadedSignals = check "overloaded-signals"
, cgOverloadedMethods = check "overloaded-methods"
}
check :: String -> Bool
check s = fromMaybe True (M.lookup s flags)
flags :: M.Map String Bool
flags = M.fromList (map (\(FlagName fn, v) -> (fn, v)) fs)
-- | A convenience helper for `confHook`, such that bindings for the
-- given module are generated in the @configure@ step of @cabal@.
confCodeGenHook :: Text -- ^ name
-> Text -- ^ version
-> Bool -- ^ verbose
-> Maybe FilePath -- ^ overrides file
-> Maybe FilePath -- ^ output dir
-> ConfHook -- ^ previous `confHook`
-> ConfHook
confCodeGenHook name version verbosity overrides outputDir
defaultConfHook (gpd, hbi) flags = do
ovsData <- case overrides of
Nothing -> return ""
Just fname -> TIO.readFile fname
ovs <- parseOverridesFile (T.lines ovsData) >>= \case
Left err -> error $ "Error when parsing overrides file: "
++ T.unpack err
Right ovs -> return ovs
(gir, girDeps) <- loadGIRInfo verbosity name (Just version) [] (girFixups ovs)
let (apis, deps) = filterAPIsAndDeps ovs gir girDeps
allAPIs = M.union apis deps
cfg = Config {modName = Just name,
verbose = verbosity,
overrides = ovs,
cgFlags = parseFlags (configConfigurationsFlags flags)}
m <- genCode cfg allAPIs ["GI", ucFirst name] (genModule apis)
alreadyDone <- doesFileExist (fromMaybe "" outputDir
</> "GI" </> T.unpack (ucFirst name) <.> "hs")
moduleList <- if not alreadyDone
then writeModuleTree verbosity outputDir m
else return (listModuleTree m)
let pkgInfoMod = "GI." <> ucFirst name <> ".PkgInfo"
em' = map (MN.fromString . T.unpack) (pkgInfoMod : moduleList)
ctd' = ((condTreeData . fromJust . condLibrary) gpd) {exposedModules = em'}
cL' = ((fromJust . condLibrary) gpd) {condTreeData = ctd'}
gpd' = gpd {condLibrary = Just cL'}
when (not alreadyDone) $
genPkgInfo ((pkgconfigDepends . libBuildInfo . condTreeData .
fromJust . condLibrary) gpd)
(configConfigurationsFlags flags)
(fromMaybe "" outputDir
</> "GI" </> T.unpack (ucFirst name) </> "PkgInfo.hs")
pkgInfoMod
lbi <- defaultConfHook (gpd', hbi) flags
return (lbi {withOptimization = NoOptimisation})
-- | The entry point for @Setup.hs@ files in bindings.
setupHaskellGIBinding :: Text -- ^ name
-> Text -- ^ version
-> Bool -- ^ verbose
-> Maybe FilePath -- ^ overrides file
-> Maybe FilePath -- ^ output dir
-> IO ()
setupHaskellGIBinding name version verbose overridesFile outputDir =
defaultMainWithHooks (simpleUserHooks {
confHook = confCodeGenHook name version verbose
overridesFile outputDir
(confHook simpleUserHooks)
})
| hamishmack/haskell-gi | lib/Data/GI/CodeGen/CabalHooks.hs | lgpl-2.1 | 6,111 | 0 | 15 | 1,833 | 1,454 | 802 | 652 | -1 | -1 |
{-# language DeriveFunctor, DeriveFoldable #-}
module Data.StrictList where
import Control.Monad (ap)
import Data.Foldable as F
data StrictList a
= !a :! !(StrictList a)
| Empty
deriving (Show, Functor, Foldable)
infixr 5 :!
instance Monoid (StrictList a) where
mempty = Empty
mappend (a :! r) bs = a :! mappend r bs
mappend Empty bs = bs
instance Applicative StrictList where
(<*>) = ap
pure = return
instance Monad StrictList where
m >>= k = F.foldr (mappend . k) Empty m
m >> k = F.foldr (mappend . (\ _ -> k)) Empty m
return x = singleton x
fail _ = Empty
type SL a = StrictList a
fromList :: [a] -> StrictList a
fromList (a : r) = a :! fromList r
fromList [] = Empty
singleton :: a -> StrictList a
singleton = (:! Empty)
(!) :: StrictList a -> Int -> a
l ! n = case (l, n) of
((a :! _), 0) -> a
((_ :! r), i) -> r ! pred i
(Empty, _) -> error "(!): index out of bounds"
slFilter :: (a -> Bool) -> StrictList a -> StrictList a
slFilter p (a :! r) =
(if p a then (a :!) else id) (slFilter p r)
slFilter _ Empty = Empty
-- | PRE: SL a is sorted.
-- Inserts an element to a list. The returned list will be sorted.
-- If the input list contained only unique elements, the output list will
-- do also. (This means, the element will not be inserted,
-- if it's already in the list.)
insertUnique :: Ord a => a -> SL a -> SL a
insertUnique a (b :! r) =
case compare a b of
GT -> b :! insertUnique a r
EQ -> b :! r -- not inserted
LT -> a :! b :! r
insertUnique a Empty = singleton a
delete :: Eq a => a -> SL a -> SL a
delete x (a :! r) =
if x == a
then r
else a :! delete x r
delete _ Empty = Empty
| nikki-and-the-robots/nikki | src/Data/StrictList.hs | lgpl-3.0 | 1,733 | 0 | 10 | 483 | 673 | 355 | 318 | 53 | 3 |
------------------------------------------------------------------------------
-- |
-- Module : Even
-- Copyright : (C) 2011 Aliaksiej Artamonaŭ
-- License : LGPL
--
-- Maintainer : [email protected]
-- Stability : unstable
-- Portability : unportable
--
-- Example for TTM package. Decides whether unary number to the right of
-- starting position of machine's head is even or odd. If it's odd the then
-- single one symbol is left on the type. If it's even then no one symbols
-- are left on the tape.
------------------------------------------------------------------------------
module Even
(
Even.even
)
where
import TTM
-- | Transition table for a turing machine which decides whether supplied
-- unary number is even or odd.
even =
-- stop on first non-zero item to the right
Rule Sz Zero Sz Zero MRight :+:
Rule Sz One (S1 Sz) One MNoop :+:
-- remove the number keeping the oddity of eliminated part;
-- S1 Sz — erased part is even
-- S2 Sz — erased part is odd
Rule (S1 Sz) One (S2 Sz) Zero MRight :+:
Rule (S2 Sz) One (S1 Sz) Zero MRight :+:
-- write back the result
Rule (S1 Sz) Zero (S9 Sz) Zero MNoop :+:
Rule (S2 Sz) Zero (S9 Sz) One MNoop :+:
Nil
| aartamonau/haskell-ttm | examples/Even.hs | lgpl-3.0 | 1,277 | 0 | 13 | 302 | 185 | 103 | 82 | 12 | 1 |
maximum' :: (Ord a) => [a] -> a
maximum' [] = error "maximum of empty list"
maximum' [x] = x
maximum' (x:xs) = max x (maximum' xs)
| tokyo-jesus/university | src/haskell/recursion/maximumAwesome.hs | unlicense | 131 | 0 | 7 | 27 | 72 | 37 | 35 | 4 | 1 |
module Main where
import Hello
import OpIO
main :: IO ()
main = run hello
| aztecrex/haskell-testing-with-free-monad | app/Main.hs | unlicense | 76 | 0 | 6 | 17 | 28 | 16 | 12 | 5 | 1 |
-- Copyright (c) 2010 - Seweryn Dynerowicz
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- imitations under the License.
-- A definition of the AS-PATH policy of BGP
module PathInformations.Paths
( Paths(..)
, Path(..)
) where
import Data.Set
import Algebra.Semiring
data Path = P [Int] | Invalid
deriving (Eq,Ord)
instance Semiring (Path) where
add (P as) (P bs) = P as
add Invalid b = b
add a Invalid = a
zero = P []
mul (P as) (P bs) = P (as ++ bs)
mul Invalid _ = Invalid
mul _ Invalid = Invalid
unit = Invalid
showPath :: (Show a) => [a] -> String
showPath [] = "ε"
showPath (a:[]) = show a
showPath (a:as) = (show a) ++ (showPath as)
instance Show Path where
show (P as) = showPath as
show Invalid = "∅"
data Paths = PS (Set Path) | AllPaths
deriving(Eq)
instance Show Paths where
show (PS as) = show (toList as)
show AllPaths = "ALL"
instance Semiring (Paths) where
add AllPaths _ = AllPaths
add _ AllPaths = AllPaths
add (PS as) (PS bs) = PS (union as bs)
zero = PS empty
mul a AllPaths = a
mul AllPaths b = b
mul (PS as) (PS bs) = PS (fromList [mul a b | a <- asl, b <- bsl])
where asl = toList as
bsl = toList bs
unit = AllPaths
| sdynerow/Semirings-Library | haskell/PathInformations/Paths.hs | apache-2.0 | 1,741 | 0 | 11 | 433 | 543 | 288 | 255 | 39 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-
Created : 2015 Aug 26 (Wed) 11:56:37 by Harold Carr.
Last Modified : 2015 Sep 17 (Thu) 15:44:26 by Harold Carr.
-}
module Service.UserEndpoint
( ueMain
)
where
import Control.Monad.IO.Class (liftIO)
import Data.Aeson (decode, encode)
import Data.String.Conversions (convertString)
import Data.Text.Lazy (unpack)
import Data.Text.Lazy.Encoding (decodeUtf8)
import Network.HTTP.Types (badRequest400, ok200)
import qualified Service.Interact as I
import Web.Scotty
ueMain :: I.GetUser -> I.PutUser -> (String -> IO a) -> (Int -> IO a) -> IO ()
ueMain getUser putUser displayHandler adminHandler = scotty 3000 $ do
post "/" $ do
b <- body
let d = decodeUtf8 b
case decode (convertString d) of
Nothing -> do liftIO (displayHandler ("-> " ++ unpack d))
status badRequest400
r <- liftIO I.mkInvalidMsgResponse
liftIO (displayHandler ("<- " ++ convertString (encode r)))
json r
(Just m) -> do liftIO (displayHandler (I.showInput m))
r <- liftIO (I.input getUser putUser m)
status ok200
liftIO (displayHandler (I.showOutput r))
json r
matchAny "/dGVzdDp1c2Vy/:id" $ do
id <- param "id"
liftIO (adminHandler ((read id) :: Int))
status ok200
matchAny "/invalid-ok200" $ do
status ok200
r <- liftIO I.mkInvalidMethodOrRoute
json r
matchAny "/:everythingElse" $ do
status badRequest400
r <- liftIO I.mkInvalidMethodOrRoute
json r
| haroldcarr/utah-haskell | future/src/Service/UserEndpoint.hs | apache-2.0 | 1,826 | 0 | 23 | 665 | 499 | 240 | 259 | 39 | 2 |
-- http://www.codewars.com/kata/541c8630095125aba6000c00
module DigitalRoot where
digitalRoot :: Integral a => a -> a
digitalRoot n = if n < 10
then n
else (digitalRoot . sum . map (`mod`10) . takeWhile (>0) . iterate (`div`10)) n | Bodigrim/katas | src/haskell/6-Sum-of-Digits--Digital-Root.hs | bsd-2-clause | 235 | 0 | 12 | 40 | 88 | 50 | 38 | 5 | 2 |
{-# LANGUAGE PackageImports #-}
module GHC.IO.Encoding.UTF16 (module M) where
import "base" GHC.IO.Encoding.UTF16 as M
| silkapp/base-noprelude | src/GHC/IO/Encoding/UTF16.hs | bsd-3-clause | 124 | 0 | 4 | 18 | 25 | 19 | 6 | 3 | 0 |
{-#LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Concurrent.HEP as HEP
import Control.Concurrent
import Control.Concurrent.HEP.Syslog
import System.IO
import Control.Monad.Trans
import Control.Monad.Trans.Either
import Control.Monad
import Network.AMQP as AMQP
import Data.Typeable
import System.Posix.Signals
data WBInfoMessage = WBSignalStop
| WBSignalReload
deriving Typeable
instance HEP.Message WBInfoMessage
main = runHEPGlobal $! procWithBracket wbInfoInit wbInfoShutdown $! proc wbInfoWorker
wbInfoInit :: HEPProc
wbInfoInit = do
startSyslogSupervisor "wbinfomq" supervisor
syslogInfo "starting rabbitmq client"
startAMQP
inbox <- selfMBox
liftIO $! do
installHandler sigTERM (Catch ( sendMBox inbox (toMessage WBSignalStop))) Nothing
installHandler sigHUP (Catch ( sendMBox inbox (toMessage WBSignalReload))) Nothing
procRunning
wbInfoShutdown:: HEPProc
wbInfoShutdown = do
syslogInfo "stopping AMQP client"
stopAMQP
syslogInfo "stopping Syslog client"
stopSyslog
procFinished
wbInfoWorker:: HEPProc
wbInfoWorker = do
msg <- receive
case fromMessage msg of
Nothing -> procRunning
Just WBSignalStop -> procFinished
supervisor:: HEP HEPProcState
supervisor = do
me <- self
msg <- receive
let handleChildLinkMessage:: Maybe LinkedMessage -> EitherT HEPProcState HEP HEPProcState
handleChildLinkMessage Nothing = lift procRunning >>= right
handleChildLinkMessage (Just (ProcessFinished pid)) = do
liftIO $! putStrLn $! "I've been noticed about " ++ show pid
subscribed <- lift getSubscribed
case subscribed of
[] -> lift procFinished >>= left
_ -> lift procRunning >>= left
handleServiceMessage:: Maybe SupervisorMessage -> EitherT HEPProcState HEP HEPProcState
handleServiceMessage Nothing = lift procRunning >>= right
handleServiceMessage (Just (ProcWorkerFailure cpid e state outbox)) = do
liftIO $! putStrLn $! "supervisor worker: " ++ show e
lift $! procContinue outbox
lift procRunning >>= right
handleServiceMessage (Just (ProcInitFailure cpid e _ outbox)) = do
liftIO $! putStrLn $! "supervisor init: " ++ show e
lift $! procContinue outbox
lift procFinished >>= left
mreq <- runEitherT $! do
handleChildLinkMessage $! fromMessage msg
handleServiceMessage $! fromMessage msg
case mreq of
Left some -> return some
Right some -> return some
data AMQPState = AMQPState
{ conn:: AMQP.Connection
, chan:: AMQP.Channel
}
deriving Typeable
instance HEPLocalState AMQPState
data AMQPMessage = AMQPPing (MBox AMQPAnswer)
| AMQPStop
deriving Typeable
instance HEP.Message AMQPMessage
data AMQPAnswer = AMQPAnswerOK
amqpProc = "AMQPMain"
amqpSupervisorProc = "AMQPSupervisor"
startAMQP:: HEP Pid
startAMQP = do
spawn $! procRegister amqpProc $!
procWithSupervisor (procRegister amqpSupervisorProc $! proc amqpSupervisor) $!
procWithBracket amqpInit amqpShutdown $! proc amqpWorker
amqpInit:: HEPProc
amqpInit = do
_conn <- liftIO $! openConnection "127.0.0.1" "/" "guest" "guest"
_chan <- liftIO $! openChannel _conn
setLocalState $! Just $! AMQPState
{ conn = _conn
, chan = _chan
}
procRunning
amqpShutdown:: HEPProc
amqpShutdown = do
s <- localState
let shutdownAMQP:: AMQPState -> HEPProc
shutdownAMQP state = do
liftIO $! closeConnection (conn state)
setLocalState (Nothing:: Maybe AMQPState)
procFinished
case s of
Nothing -> procFinished
Just state -> shutdownAMQP state
amqpSupervisor:: HEPProc
amqpSupervisor = do
msg <- receive
let handleChildLinkMessage:: Maybe LinkedMessage -> EitherT HEPProcState HEP HEPProcState
handleChildLinkMessage Nothing = lift procRunning >>= right
handleChildLinkMessage (Just (ProcessFinished pid)) = do
lift $! syslogInfo $! "supervisor: spotted client exit " ++ show pid
subscribed <- lift getSubscribed
case subscribed of
[] -> lift procFinished >>= left
_ -> lift procRunning >>= left
handleServiceMessage:: Maybe SupervisorMessage -> EitherT HEPProcState HEP HEPProcState
handleServiceMessage Nothing = lift procRunning >>= right
handleServiceMessage (Just (ProcWorkerFailure cpid e _ outbox)) = do
lift $! syslogError $! "supervisor: worker " ++ show cpid ++
" failed with: " ++ show e ++ ". It will be recovered"
lift $! procContinue outbox
lift procRunning >>= left
handleServiceMessage (Just (ProcInitFailure cpid e _ outbox)) = do
lift $! syslogError $! "supervisor: init of " ++ show cpid ++
" failed with: " ++ show e ++ ". It will be restarted after 10 seconds"
liftIO $! threadDelay 10000000
lift $! procRestart outbox Nothing
lift procRunning >>= left
mreq <- runEitherT $! do
handleChildLinkMessage $! fromMessage msg
handleServiceMessage $! fromMessage msg
case mreq of
Left some -> return some
Right some -> return some
amqpWorker = procFinished
stopAMQP:: HEP ()
stopAMQP = do
send (toPid amqpProc) $! AMQPStop
| dambaev/wbinfomq | src/wbinfomq.hs | bsd-3-clause | 5,632 | 0 | 17 | 1,537 | 1,409 | 671 | 738 | 139 | 6 |
--------------------------------------------------------------------------------
-- | Wraps pandocs bibiliography handling
--
-- In order to add a bibliography, you will need a bibliography file (e.g.
-- @.bib@) and a CSL file (@.csl@). Both need to be compiled with their
-- respective compilers ('biblioCompiler' and 'cslCompiler'). Then, you can
-- refer to these files when you use 'pageReadPandocBiblio'. This function also
-- takes the reader options for completeness -- you can use
-- 'defaultHakyllReaderOptions' if you're unsure.
{-# LANGUAGE Arrows #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Hakyll.Web.Pandoc.Biblio
( CSL
, cslCompiler
, Biblio (..)
, biblioCompiler
, readPandocBiblio
) where
--------------------------------------------------------------------------------
import Control.Applicative ((<$>))
import Data.Binary (Binary (..))
import Data.Traversable (traverse)
import Data.Typeable (Typeable)
import qualified Text.CSL as CSL
import Text.Pandoc (Pandoc, ReaderOptions (..))
import Text.Pandoc.Biblio (processBiblio)
--------------------------------------------------------------------------------
import Hakyll.Core.Compiler
import Hakyll.Core.Identifier
import Hakyll.Core.Item
import Hakyll.Core.Writable
import Hakyll.Web.Pandoc
--------------------------------------------------------------------------------
data CSL = CSL
deriving (Show, Typeable)
--------------------------------------------------------------------------------
instance Binary CSL where
put CSL = return ()
get = return CSL
--------------------------------------------------------------------------------
instance Writable CSL where
-- Shouldn't be written.
write _ _ = return ()
--------------------------------------------------------------------------------
cslCompiler :: Compiler (Item CSL)
cslCompiler = makeItem CSL
--------------------------------------------------------------------------------
newtype Biblio = Biblio [CSL.Reference]
deriving (Show, Typeable)
--------------------------------------------------------------------------------
instance Binary Biblio where
-- Ugly.
get = Biblio . read <$> get
put (Biblio rs) = put $ show rs
--------------------------------------------------------------------------------
instance Writable Biblio where
-- Shouldn't be written.
write _ _ = return ()
--------------------------------------------------------------------------------
biblioCompiler :: Compiler (Item Biblio)
biblioCompiler = do
filePath <- toFilePath <$> getUnderlying
makeItem =<< unsafeCompiler (Biblio <$> CSL.readBiblioFile filePath)
--------------------------------------------------------------------------------
readPandocBiblio :: ReaderOptions
-> Maybe (Item CSL)
-> Item Biblio
-> (Item String)
-> Compiler (Item Pandoc)
readPandocBiblio ropt csl biblio item = do
-- Parse CSL file, if given
style <- unsafeCompiler $
traverse (CSL.readCSLFile . toFilePath . itemIdentifier) csl
-- We need to know the citation keys, add then *before* actually parsing the
-- actual page. If we don't do this, pandoc won't even consider them
-- citations!
let Biblio refs = itemBody biblio
ropt' = ropt {readerReferences = readerReferences ropt ++ refs}
pandoc = itemBody $ readPandocWith ropt' item
pandoc' = processBiblio style refs pandoc
return $ fmap (const pandoc') item
| bergmark/hakyll | src/Hakyll/Web/Pandoc/Biblio.hs | bsd-3-clause | 3,797 | 0 | 13 | 812 | 575 | 322 | 253 | 54 | 1 |
{-|
Copyright : (c) Dave Laing, 2017
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : non-portable
-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TemplateHaskell #-}
module Fragment.Tuple.Ast.Error (
ErrExpectedTyTuple(..)
, AsExpectedTyTuple(..)
, expectTyTuple
, ErrTupleOutOfBounds(..)
, AsTupleOutOfBounds(..)
, lookupTuple
) where
import Control.Monad.Except (MonadError)
import Control.Monad.Error.Lens (throwing)
import Control.Lens (preview)
import Control.Lens.Prism (Prism')
import Control.Lens.TH (makePrisms)
import Ast.Type
import Ast.Error
import Fragment.Tuple.Ast.Type
data ErrExpectedTyTuple ki ty a = ErrExpectedTyTuple (Type ki ty a)
deriving (Eq, Ord, Show)
makePrisms ''ErrExpectedTyTuple
class AsExpectedTyTuple e ki ty a where -- | e -> ty, e -> a where
_ExpectedTyTuple :: Prism' e (Type ki ty a)
instance AsExpectedTyTuple (ErrExpectedTyTuple ki ty a) ki ty a where
_ExpectedTyTuple = _ErrExpectedTyTuple
instance {-# OVERLAPPABLE #-} AsExpectedTyTuple (ErrSum xs) ki ty a => AsExpectedTyTuple (ErrSum (x ': xs)) ki ty a where
_ExpectedTyTuple = _ErrNext . _ExpectedTyTuple
instance {-# OVERLAPPING #-} AsExpectedTyTuple (ErrSum (ErrExpectedTyTuple ki ty a ': xs)) ki ty a where
_ExpectedTyTuple = _ErrNow . _ExpectedTyTuple
expectTyTuple :: (MonadError e m, AsExpectedTyTuple e ki ty a, AsTyTuple ki ty) => Type ki ty a -> m [Type ki ty a]
expectTyTuple ty =
case preview _TyTuple ty of
Just tys -> return tys
_ -> throwing _ExpectedTyTuple ty
data ErrTupleOutOfBounds = ErrTupleOutOfBounds Int Int
deriving (Eq, Ord, Show)
makePrisms ''ErrTupleOutOfBounds
class AsTupleOutOfBounds e where
_TupleOutOfBounds :: Prism' e (Int, Int)
instance AsTupleOutOfBounds ErrTupleOutOfBounds where
_TupleOutOfBounds = _ErrTupleOutOfBounds
instance {-# OVERLAPPABLE #-} AsTupleOutOfBounds (ErrSum xs) => AsTupleOutOfBounds (ErrSum (x ': xs)) where
_TupleOutOfBounds = _ErrNext . _TupleOutOfBounds
instance {-# OVERLAPPING #-} AsTupleOutOfBounds (ErrSum (ErrTupleOutOfBounds ': xs)) where
_TupleOutOfBounds = _ErrNow . _TupleOutOfBounds
lookupTuple :: (MonadError e m, AsTupleOutOfBounds e) => [t a] -> Int -> m (t a)
lookupTuple ts i =
let
l = length ts
f x
| x < 0 = throwing _TupleOutOfBounds (x, l)
| x >= l = throwing _TupleOutOfBounds (x, l)
| otherwise = return $ ts !! i
in
f i
| dalaing/type-systems | src/Fragment/Tuple/Ast/Error.hs | bsd-3-clause | 2,653 | 0 | 12 | 457 | 732 | 393 | 339 | 59 | 2 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Setup
-- Copyright : (c) David Himmelstrup 2005
-- License : BSD-like
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
--
-----------------------------------------------------------------------------
module Distribution.Client.Setup
( globalCommand, GlobalFlags(..), globalRepos
, configureCommand, ConfigFlags(..), filterConfigureFlags
, configureExCommand, ConfigExFlags(..), defaultConfigExFlags
, configureExOptions
, installCommand, InstallFlags(..), installOptions, defaultInstallFlags
, listCommand, ListFlags(..)
, updateCommand
, upgradeCommand
, infoCommand, InfoFlags(..)
, fetchCommand, FetchFlags(..)
, checkCommand
, uploadCommand, UploadFlags(..)
, reportCommand, ReportFlags(..)
, unpackCommand, UnpackFlags(..)
, initCommand, IT.InitFlags(..)
, sdistCommand, SDistFlags(..), SDistExFlags(..), ArchiveFormat(..)
, parsePackageArgs
--TODO: stop exporting these:
, showRepo
, parseRepo
) where
import Distribution.Client.Types
( Username(..), Password(..), Repo(..), RemoteRepo(..), LocalRepo(..) )
import Distribution.Client.BuildReports.Types
( ReportLevel(..) )
import Distribution.Client.Dependency.Types
( Solver(..) )
import qualified Distribution.Client.Init.Types as IT
( InitFlags(..), PackageType(..) )
import Distribution.Client.Targets
( UserConstraint, readUserConstraint )
import Distribution.Simple.Program
( defaultProgramConfiguration )
import Distribution.Simple.Command hiding (boolOpt)
import qualified Distribution.Simple.Setup as Cabal
( configureCommand, sdistCommand, haddockCommand )
import Distribution.Simple.Setup
( ConfigFlags(..), SDistFlags(..), HaddockFlags(..) )
import Distribution.Simple.Setup
( Flag(..), toFlag, fromFlag, flagToList
, optionVerbosity, boolOpt, trueArg, falseArg )
import Distribution.Simple.InstallDirs
( PathTemplate, toPathTemplate, fromPathTemplate )
import Distribution.Version
( Version(Version), anyVersion, thisVersion )
import Distribution.Package
( PackageIdentifier, packageName, packageVersion, Dependency(..) )
import Distribution.Text
( Text(parse), display )
import Distribution.ReadE
( ReadE(..), readP_to_E, succeedReadE )
import qualified Distribution.Compat.ReadP as Parse
( ReadP, readP_to_S, readS_to_P, char, munch1, pfail, (+++) )
import Distribution.Verbosity
( Verbosity, normal )
import Distribution.Simple.Utils
( wrapText )
import Data.Char
( isSpace, isAlphaNum )
import Data.List
( intercalate )
import Data.Maybe
( listToMaybe, maybeToList, fromMaybe )
import Data.Monoid
( Monoid(..) )
import Control.Monad
( liftM )
import System.FilePath
( (</>) )
import Network.URI
( parseAbsoluteURI, uriToString )
-- ------------------------------------------------------------
-- * Global flags
-- ------------------------------------------------------------
-- | Flags that apply at the top level, not to any sub-command.
data GlobalFlags = GlobalFlags {
globalVersion :: Flag Bool,
globalNumericVersion :: Flag Bool,
globalConfigFile :: Flag FilePath,
globalRemoteRepos :: [RemoteRepo], -- ^ Available Hackage servers.
globalCacheDir :: Flag FilePath,
globalLocalRepos :: [FilePath],
globalLogsDir :: Flag FilePath,
globalWorldFile :: Flag FilePath
}
defaultGlobalFlags :: GlobalFlags
defaultGlobalFlags = GlobalFlags {
globalVersion = Flag False,
globalNumericVersion = Flag False,
globalConfigFile = mempty,
globalRemoteRepos = [],
globalCacheDir = mempty,
globalLocalRepos = mempty,
globalLogsDir = mempty,
globalWorldFile = mempty
}
globalCommand :: CommandUI GlobalFlags
globalCommand = CommandUI {
commandName = "",
commandSynopsis = "",
commandUsage = \_ ->
"This program is the command line interface "
++ "to the Haskell Cabal infrastructure.\n"
++ "See http://www.haskell.org/cabal/ for more information.\n",
commandDescription = Just $ \pname ->
"For more information about a command use:\n"
++ " " ++ pname ++ " COMMAND --help\n\n"
++ "To install Cabal packages from hackage use:\n"
++ " " ++ pname ++ " install foo [--dry-run]\n\n"
++ "Occasionally you need to update the list of available packages:\n"
++ " " ++ pname ++ " update\n",
commandDefaultFlags = defaultGlobalFlags,
commandOptions = \showOrParseArgs ->
(case showOrParseArgs of ShowArgs -> take 2; ParseArgs -> id)
[option ['V'] ["version"]
"Print version information"
globalVersion (\v flags -> flags { globalVersion = v })
trueArg
,option [] ["numeric-version"]
"Print just the version number"
globalNumericVersion (\v flags -> flags { globalNumericVersion = v })
trueArg
,option [] ["config-file"]
"Set an alternate location for the config file"
globalConfigFile (\v flags -> flags { globalConfigFile = v })
(reqArgFlag "FILE")
,option [] ["remote-repo"]
"The name and url for a remote repository"
globalRemoteRepos (\v flags -> flags { globalRemoteRepos = v })
(reqArg' "NAME:URL" (maybeToList . readRepo) (map showRepo))
,option [] ["remote-repo-cache"]
"The location where downloads from all remote repos are cached"
globalCacheDir (\v flags -> flags { globalCacheDir = v })
(reqArgFlag "DIR")
,option [] ["local-repo"]
"The location of a local repository"
globalLocalRepos (\v flags -> flags { globalLocalRepos = v })
(reqArg' "DIR" (\x -> [x]) id)
,option [] ["logs-dir"]
"The location to put log files"
globalLogsDir (\v flags -> flags { globalLogsDir = v })
(reqArgFlag "DIR")
,option [] ["world-file"]
"The location of the world file"
globalWorldFile (\v flags -> flags { globalWorldFile = v })
(reqArgFlag "FILE")
]
}
instance Monoid GlobalFlags where
mempty = GlobalFlags {
globalVersion = mempty,
globalNumericVersion = mempty,
globalConfigFile = mempty,
globalRemoteRepos = mempty,
globalCacheDir = mempty,
globalLocalRepos = mempty,
globalLogsDir = mempty,
globalWorldFile = mempty
}
mappend a b = GlobalFlags {
globalVersion = combine globalVersion,
globalNumericVersion = combine globalNumericVersion,
globalConfigFile = combine globalConfigFile,
globalRemoteRepos = combine globalRemoteRepos,
globalCacheDir = combine globalCacheDir,
globalLocalRepos = combine globalLocalRepos,
globalLogsDir = combine globalLogsDir,
globalWorldFile = combine globalWorldFile
}
where combine field = field a `mappend` field b
globalRepos :: GlobalFlags -> [Repo]
globalRepos globalFlags = remoteRepos ++ localRepos
where
remoteRepos =
[ Repo (Left remote) cacheDir
| remote <- globalRemoteRepos globalFlags
, let cacheDir = fromFlag (globalCacheDir globalFlags)
</> remoteRepoName remote ]
localRepos =
[ Repo (Right LocalRepo) local
| local <- globalLocalRepos globalFlags ]
-- ------------------------------------------------------------
-- * Config flags
-- ------------------------------------------------------------
configureCommand :: CommandUI ConfigFlags
configureCommand = (Cabal.configureCommand defaultProgramConfiguration) {
commandDefaultFlags = mempty
}
configureOptions :: ShowOrParseArgs -> [OptionField ConfigFlags]
configureOptions = commandOptions configureCommand
filterConfigureFlags :: ConfigFlags -> Version -> ConfigFlags
filterConfigureFlags flags cabalLibVersion
| cabalLibVersion >= Version [1,3,10] [] = flags
-- older Cabal does not grok the constraints flag:
| otherwise = flags { configConstraints = [] }
-- ------------------------------------------------------------
-- * Config extra flags
-- ------------------------------------------------------------
-- | cabal configure takes some extra flags beyond runghc Setup configure
--
data ConfigExFlags = ConfigExFlags {
configCabalVersion :: Flag Version,
configExConstraints:: [UserConstraint],
configPreferences :: [Dependency],
configSolver :: Flag Solver
}
defaultConfigExFlags :: ConfigExFlags
defaultConfigExFlags = mempty { configSolver = Flag defaultSolver }
configureExCommand :: CommandUI (ConfigFlags, ConfigExFlags)
configureExCommand = configureCommand {
commandDefaultFlags = (mempty, defaultConfigExFlags),
commandOptions = \showOrParseArgs ->
liftOptions fst setFst (filter ((/="constraint") . optionName) $
configureOptions showOrParseArgs)
++ liftOptions snd setSnd (configureExOptions showOrParseArgs)
}
where
setFst a (_,b) = (a,b)
setSnd b (a,_) = (a,b)
configureExOptions :: ShowOrParseArgs -> [OptionField ConfigExFlags]
configureExOptions _showOrParseArgs =
[ option [] ["cabal-lib-version"]
("Select which version of the Cabal lib to use to build packages "
++ "(useful for testing).")
configCabalVersion (\v flags -> flags { configCabalVersion = v })
(reqArg "VERSION" (readP_to_E ("Cannot parse cabal lib version: "++)
(fmap toFlag parse))
(map display . flagToList))
, option [] ["constraint"]
"Specify constraints on a package (version, installed/source, flags)"
configExConstraints (\v flags -> flags { configExConstraints = v })
(reqArg "CONSTRAINT"
(fmap (\x -> [x]) (ReadE readUserConstraint))
(map display))
, option [] ["preference"]
"Specify preferences (soft constraints) on the version of a package"
configPreferences (\v flags -> flags { configPreferences = v })
(reqArg "CONSTRAINT"
(readP_to_E (const "dependency expected")
(fmap (\x -> [x]) parse))
(map display))
, optionSolver configSolver (\v flags -> flags { configSolver = v })
]
instance Monoid ConfigExFlags where
mempty = ConfigExFlags {
configCabalVersion = mempty,
configExConstraints= mempty,
configPreferences = mempty,
configSolver = mempty
}
mappend a b = ConfigExFlags {
configCabalVersion = combine configCabalVersion,
configExConstraints= combine configExConstraints,
configPreferences = combine configPreferences,
configSolver = combine configSolver
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Fetch command
-- ------------------------------------------------------------
data FetchFlags = FetchFlags {
-- fetchOutput :: Flag FilePath,
fetchDeps :: Flag Bool,
fetchDryRun :: Flag Bool,
fetchSolver :: Flag Solver,
fetchMaxBackjumps :: Flag Int,
fetchReorderGoals :: Flag Bool,
fetchIndependentGoals :: Flag Bool,
fetchVerbosity :: Flag Verbosity
}
defaultFetchFlags :: FetchFlags
defaultFetchFlags = FetchFlags {
-- fetchOutput = mempty,
fetchDeps = toFlag True,
fetchDryRun = toFlag False,
fetchSolver = Flag defaultSolver,
fetchMaxBackjumps = Flag defaultMaxBackjumps,
fetchReorderGoals = Flag False,
fetchIndependentGoals = Flag False,
fetchVerbosity = toFlag normal
}
fetchCommand :: CommandUI FetchFlags
fetchCommand = CommandUI {
commandName = "fetch",
commandSynopsis = "Downloads packages for later installation.",
commandDescription = Nothing,
commandUsage = usagePackages "fetch",
commandDefaultFlags = defaultFetchFlags,
commandOptions = \_ -> [
optionVerbosity fetchVerbosity (\v flags -> flags { fetchVerbosity = v })
-- , option "o" ["output"]
-- "Put the package(s) somewhere specific rather than the usual cache."
-- fetchOutput (\v flags -> flags { fetchOutput = v })
-- (reqArgFlag "PATH")
, option [] ["dependencies", "deps"]
"Resolve and fetch dependencies (default)"
fetchDeps (\v flags -> flags { fetchDeps = v })
trueArg
, option [] ["no-dependencies", "no-deps"]
"Ignore dependencies"
fetchDeps (\v flags -> flags { fetchDeps = v })
falseArg
, option [] ["dry-run"]
"Do not install anything, only print what would be installed."
fetchDryRun (\v flags -> flags { fetchDryRun = v })
trueArg
] ++
optionSolver fetchSolver (\v flags -> flags { fetchSolver = v }) :
optionSolverFlags fetchMaxBackjumps (\v flags -> flags { fetchMaxBackjumps = v })
fetchReorderGoals (\v flags -> flags { fetchReorderGoals = v })
fetchIndependentGoals (\v flags -> flags { fetchIndependentGoals = v })
}
-- ------------------------------------------------------------
-- * Other commands
-- ------------------------------------------------------------
updateCommand :: CommandUI (Flag Verbosity)
updateCommand = CommandUI {
commandName = "update",
commandSynopsis = "Updates list of known packages",
commandDescription = Nothing,
commandUsage = usagePackages "update",
commandDefaultFlags = toFlag normal,
commandOptions = \_ -> [optionVerbosity id const]
}
upgradeCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags)
upgradeCommand = configureCommand {
commandName = "upgrade",
commandSynopsis = "(command disabled, use install instead)",
commandDescription = Nothing,
commandUsage = usagePackages "upgrade",
commandDefaultFlags = (mempty, mempty, mempty, mempty),
commandOptions = commandOptions installCommand
}
{-
cleanCommand :: CommandUI ()
cleanCommand = makeCommand name shortDesc longDesc emptyFlags options
where
name = "clean"
shortDesc = "Removes downloaded files"
longDesc = Nothing
emptyFlags = ()
options _ = []
-}
checkCommand :: CommandUI (Flag Verbosity)
checkCommand = CommandUI {
commandName = "check",
commandSynopsis = "Check the package for common mistakes",
commandDescription = Nothing,
commandUsage = \pname -> "Usage: " ++ pname ++ " check\n",
commandDefaultFlags = toFlag normal,
commandOptions = \_ -> []
}
-- ------------------------------------------------------------
-- * Report flags
-- ------------------------------------------------------------
data ReportFlags = ReportFlags {
reportUsername :: Flag Username,
reportPassword :: Flag Password,
reportVerbosity :: Flag Verbosity
}
defaultReportFlags :: ReportFlags
defaultReportFlags = ReportFlags {
reportUsername = mempty,
reportPassword = mempty,
reportVerbosity = toFlag normal
}
reportCommand :: CommandUI ReportFlags
reportCommand = CommandUI {
commandName = "report",
commandSynopsis = "Upload build reports to a remote server.",
commandDescription = Just $ \_ ->
"You can store your Hackage login in the ~/.cabal/config file\n",
commandUsage = \pname -> "Usage: " ++ pname ++ " report [FLAGS]\n\n"
++ "Flags for upload:",
commandDefaultFlags = defaultReportFlags,
commandOptions = \_ ->
[optionVerbosity reportVerbosity (\v flags -> flags { reportVerbosity = v })
,option ['u'] ["username"]
"Hackage username."
reportUsername (\v flags -> flags { reportUsername = v })
(reqArg' "USERNAME" (toFlag . Username)
(flagToList . fmap unUsername))
,option ['p'] ["password"]
"Hackage password."
reportPassword (\v flags -> flags { reportPassword = v })
(reqArg' "PASSWORD" (toFlag . Password)
(flagToList . fmap unPassword))
]
}
instance Monoid ReportFlags where
mempty = ReportFlags {
reportUsername = mempty,
reportPassword = mempty,
reportVerbosity = mempty
}
mappend a b = ReportFlags {
reportUsername = combine reportUsername,
reportPassword = combine reportPassword,
reportVerbosity = combine reportVerbosity
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Unpack flags
-- ------------------------------------------------------------
data UnpackFlags = UnpackFlags {
unpackDestDir :: Flag FilePath,
unpackVerbosity :: Flag Verbosity
}
defaultUnpackFlags :: UnpackFlags
defaultUnpackFlags = UnpackFlags {
unpackDestDir = mempty,
unpackVerbosity = toFlag normal
}
unpackCommand :: CommandUI UnpackFlags
unpackCommand = CommandUI {
commandName = "unpack",
commandSynopsis = "Unpacks packages for user inspection.",
commandDescription = Nothing,
commandUsage = usagePackages "unpack",
commandDefaultFlags = mempty,
commandOptions = \_ -> [
optionVerbosity unpackVerbosity (\v flags -> flags { unpackVerbosity = v })
,option "d" ["destdir"]
"where to unpack the packages, defaults to the current directory."
unpackDestDir (\v flags -> flags { unpackDestDir = v })
(reqArgFlag "PATH")
]
}
instance Monoid UnpackFlags where
mempty = defaultUnpackFlags
mappend a b = UnpackFlags {
unpackDestDir = combine unpackDestDir
,unpackVerbosity = combine unpackVerbosity
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * List flags
-- ------------------------------------------------------------
data ListFlags = ListFlags {
listInstalled :: Flag Bool,
listSimpleOutput :: Flag Bool,
listVerbosity :: Flag Verbosity
}
defaultListFlags :: ListFlags
defaultListFlags = ListFlags {
listInstalled = Flag False,
listSimpleOutput = Flag False,
listVerbosity = toFlag normal
}
listCommand :: CommandUI ListFlags
listCommand = CommandUI {
commandName = "list",
commandSynopsis = "List packages matching a search string.",
commandDescription = Nothing,
commandUsage = usagePackages "list",
commandDefaultFlags = defaultListFlags,
commandOptions = \_ -> [
optionVerbosity listVerbosity (\v flags -> flags { listVerbosity = v })
, option [] ["installed"]
"Only print installed packages"
listInstalled (\v flags -> flags { listInstalled = v })
trueArg
, option [] ["simple-output"]
"Print in a easy-to-parse format"
listSimpleOutput (\v flags -> flags { listSimpleOutput = v })
trueArg
]
}
instance Monoid ListFlags where
mempty = defaultListFlags
mappend a b = ListFlags {
listInstalled = combine listInstalled,
listSimpleOutput = combine listSimpleOutput,
listVerbosity = combine listVerbosity
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Info flags
-- ------------------------------------------------------------
data InfoFlags = InfoFlags {
infoVerbosity :: Flag Verbosity
}
defaultInfoFlags :: InfoFlags
defaultInfoFlags = InfoFlags {
infoVerbosity = toFlag normal
}
infoCommand :: CommandUI InfoFlags
infoCommand = CommandUI {
commandName = "info",
commandSynopsis = "Display detailed information about a particular package.",
commandDescription = Nothing,
commandUsage = usagePackages "info",
commandDefaultFlags = defaultInfoFlags,
commandOptions = \_ -> [
optionVerbosity infoVerbosity (\v flags -> flags { infoVerbosity = v })
]
}
instance Monoid InfoFlags where
mempty = defaultInfoFlags
mappend a b = InfoFlags {
infoVerbosity = combine infoVerbosity
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Install flags
-- ------------------------------------------------------------
-- | Install takes the same flags as configure along with a few extras.
--
data InstallFlags = InstallFlags {
installDocumentation :: Flag Bool,
installHaddockIndex :: Flag PathTemplate,
installDryRun :: Flag Bool,
installMaxBackjumps :: Flag Int,
installReorderGoals :: Flag Bool,
installIndependentGoals :: Flag Bool,
installReinstall :: Flag Bool,
installAvoidReinstalls :: Flag Bool,
installOverrideReinstall :: Flag Bool,
installUpgradeDeps :: Flag Bool,
installOnly :: Flag Bool,
installOnlyDeps :: Flag Bool,
installRootCmd :: Flag String,
installSummaryFile :: [PathTemplate],
installLogFile :: Flag PathTemplate,
installBuildReports :: Flag ReportLevel,
installSymlinkBinDir :: Flag FilePath,
installOneShot :: Flag Bool
}
defaultInstallFlags :: InstallFlags
defaultInstallFlags = InstallFlags {
installDocumentation = Flag False,
installHaddockIndex = Flag docIndexFile,
installDryRun = Flag False,
installMaxBackjumps = Flag defaultMaxBackjumps,
installReorderGoals = Flag False,
installIndependentGoals= Flag False,
installReinstall = Flag False,
installAvoidReinstalls = Flag False,
installOverrideReinstall = Flag False,
installUpgradeDeps = Flag False,
installOnly = Flag False,
installOnlyDeps = Flag False,
installRootCmd = mempty,
installSummaryFile = mempty,
installLogFile = mempty,
installBuildReports = Flag NoReports,
installSymlinkBinDir = mempty,
installOneShot = Flag False
}
where
docIndexFile = toPathTemplate ("$datadir" </> "doc" </> "index.html")
defaultMaxBackjumps :: Int
defaultMaxBackjumps = 200
defaultSolver :: Solver
defaultSolver = TopDown
allSolvers :: String
allSolvers = intercalate ", " (map display ([minBound .. maxBound] :: [Solver]))
installCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags)
installCommand = CommandUI {
commandName = "install",
commandSynopsis = "Installs a list of packages.",
commandUsage = usagePackages "install",
commandDescription = Just $ \pname ->
let original = case commandDescription configureCommand of
Just desc -> desc pname ++ "\n"
Nothing -> ""
in original
++ "Examples:\n"
++ " " ++ pname ++ " install "
++ " Package in the current directory\n"
++ " " ++ pname ++ " install foo "
++ " Package from the hackage server\n"
++ " " ++ pname ++ " install foo-1.0 "
++ " Specific version of a package\n"
++ " " ++ pname ++ " install 'foo < 2' "
++ " Constrained package version\n",
commandDefaultFlags = (mempty, mempty, mempty, mempty),
commandOptions = \showOrParseArgs ->
liftOptions get1 set1 (filter ((/="constraint") . optionName) $
configureOptions showOrParseArgs)
++ liftOptions get2 set2 (configureExOptions showOrParseArgs)
++ liftOptions get3 set3 (installOptions showOrParseArgs)
++ liftOptions get4 set4 (haddockOptions showOrParseArgs)
}
where
get1 (a,_,_,_) = a; set1 a (_,b,c,d) = (a,b,c,d)
get2 (_,b,_,_) = b; set2 b (a,_,c,d) = (a,b,c,d)
get3 (_,_,c,_) = c; set3 c (a,b,_,d) = (a,b,c,d)
get4 (_,_,_,d) = d; set4 d (a,b,c,_) = (a,b,c,d)
haddockOptions showOrParseArgs
= [ opt { optionName = "haddock-" ++ name,
optionDescr = [ fmapOptFlags (\(_, lflags) -> ([], map ("haddock-" ++) lflags)) descr
| descr <- optionDescr opt] }
| opt <- commandOptions Cabal.haddockCommand showOrParseArgs
, let name = optionName opt
, name `elem` ["hoogle", "html", "html-location",
"executables", "internal", "css",
"hyperlink-source", "hscolour-css",
"contents-location"]
]
fmapOptFlags :: (OptFlags -> OptFlags) -> OptDescr a -> OptDescr a
fmapOptFlags modify (ReqArg d f p r w) = ReqArg d (modify f) p r w
fmapOptFlags modify (OptArg d f p r i w) = OptArg d (modify f) p r i w
fmapOptFlags modify (ChoiceOpt xs) = ChoiceOpt [(d, modify f, i, w) | (d, f, i, w) <- xs]
fmapOptFlags modify (BoolOpt d f1 f2 r w) = BoolOpt d (modify f1) (modify f2) r w
installOptions :: ShowOrParseArgs -> [OptionField InstallFlags]
installOptions showOrParseArgs =
[ option "" ["documentation"]
"building of documentation"
installDocumentation (\v flags -> flags { installDocumentation = v })
(boolOpt [] [])
, option [] ["doc-index-file"]
"A central index of haddock API documentation (template cannot use $pkgid)"
installHaddockIndex (\v flags -> flags { installHaddockIndex = v })
(reqArg' "TEMPLATE" (toFlag.toPathTemplate)
(flagToList . fmap fromPathTemplate))
, option [] ["dry-run"]
"Do not install anything, only print what would be installed."
installDryRun (\v flags -> flags { installDryRun = v })
trueArg
] ++
optionSolverFlags installMaxBackjumps (\v flags -> flags { installMaxBackjumps = v })
installReorderGoals (\v flags -> flags { installReorderGoals = v })
installIndependentGoals (\v flags -> flags { installIndependentGoals = v }) ++
[ option [] ["reinstall"]
"Install even if it means installing the same version again."
installReinstall (\v flags -> flags { installReinstall = v })
trueArg
, option [] ["avoid-reinstalls"]
"Do not select versions that would destructively overwrite installed packages."
installAvoidReinstalls (\v flags -> flags { installAvoidReinstalls = v })
trueArg
, option [] ["force-reinstalls"]
"Use to override the check that prevents reinstalling already installed versions of package dependencies."
installOverrideReinstall (\v flags -> flags { installOverrideReinstall = v })
trueArg
, option [] ["upgrade-dependencies"]
"Pick the latest version for all dependencies, rather than trying to pick an installed version."
installUpgradeDeps (\v flags -> flags { installUpgradeDeps = v })
trueArg
, option [] ["only-dependencies"]
"Install only the dependencies necessary to build the given packages"
installOnlyDeps (\v flags -> flags { installOnlyDeps = v })
trueArg
, option [] ["root-cmd"]
"Command used to gain root privileges, when installing with --global."
installRootCmd (\v flags -> flags { installRootCmd = v })
(reqArg' "COMMAND" toFlag flagToList)
, option [] ["symlink-bindir"]
"Add symlinks to installed executables into this directory."
installSymlinkBinDir (\v flags -> flags { installSymlinkBinDir = v })
(reqArgFlag "DIR")
, option [] ["build-summary"]
"Save build summaries to file (name template can use $pkgid, $compiler, $os, $arch)"
installSummaryFile (\v flags -> flags { installSummaryFile = v })
(reqArg' "TEMPLATE" (\x -> [toPathTemplate x]) (map fromPathTemplate))
, option [] ["build-log"]
"Log all builds to file (name template can use $pkgid, $compiler, $os, $arch)"
installLogFile (\v flags -> flags { installLogFile = v })
(reqArg' "TEMPLATE" (toFlag.toPathTemplate)
(flagToList . fmap fromPathTemplate))
, option [] ["remote-build-reporting"]
"Generate build reports to send to a remote server (none, anonymous or detailed)."
installBuildReports (\v flags -> flags { installBuildReports = v })
(reqArg "LEVEL" (readP_to_E (const $ "report level must be 'none', "
++ "'anonymous' or 'detailed'")
(toFlag `fmap` parse))
(flagToList . fmap display))
, option [] ["one-shot"]
"Do not record the packages in the world file."
installOneShot (\v flags -> flags { installOneShot = v })
trueArg
] ++ case showOrParseArgs of -- TODO: remove when "cabal install" avoids
ParseArgs ->
option [] ["only"]
"Only installs the package in the current directory."
installOnly (\v flags -> flags { installOnly = v })
trueArg
: []
_ -> []
instance Monoid InstallFlags where
mempty = InstallFlags {
installDocumentation = mempty,
installHaddockIndex = mempty,
installDryRun = mempty,
installReinstall = mempty,
installAvoidReinstalls = mempty,
installOverrideReinstall = mempty,
installMaxBackjumps = mempty,
installUpgradeDeps = mempty,
installReorderGoals = mempty,
installIndependentGoals= mempty,
installOnly = mempty,
installOnlyDeps = mempty,
installRootCmd = mempty,
installSummaryFile = mempty,
installLogFile = mempty,
installBuildReports = mempty,
installSymlinkBinDir = mempty,
installOneShot = mempty
}
mappend a b = InstallFlags {
installDocumentation = combine installDocumentation,
installHaddockIndex = combine installHaddockIndex,
installDryRun = combine installDryRun,
installReinstall = combine installReinstall,
installAvoidReinstalls = combine installAvoidReinstalls,
installOverrideReinstall = combine installOverrideReinstall,
installMaxBackjumps = combine installMaxBackjumps,
installUpgradeDeps = combine installUpgradeDeps,
installReorderGoals = combine installReorderGoals,
installIndependentGoals= combine installIndependentGoals,
installOnly = combine installOnly,
installOnlyDeps = combine installOnlyDeps,
installRootCmd = combine installRootCmd,
installSummaryFile = combine installSummaryFile,
installLogFile = combine installLogFile,
installBuildReports = combine installBuildReports,
installSymlinkBinDir = combine installSymlinkBinDir,
installOneShot = combine installOneShot
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Upload flags
-- ------------------------------------------------------------
data UploadFlags = UploadFlags {
uploadCheck :: Flag Bool,
uploadUsername :: Flag Username,
uploadPassword :: Flag Password,
uploadVerbosity :: Flag Verbosity
}
defaultUploadFlags :: UploadFlags
defaultUploadFlags = UploadFlags {
uploadCheck = toFlag False,
uploadUsername = mempty,
uploadPassword = mempty,
uploadVerbosity = toFlag normal
}
uploadCommand :: CommandUI UploadFlags
uploadCommand = CommandUI {
commandName = "upload",
commandSynopsis = "Uploads source packages to Hackage",
commandDescription = Just $ \_ ->
"You can store your Hackage login in the ~/.cabal/config file\n",
commandUsage = \pname ->
"Usage: " ++ pname ++ " upload [FLAGS] [TARFILES]\n\n"
++ "Flags for upload:",
commandDefaultFlags = defaultUploadFlags,
commandOptions = \_ ->
[optionVerbosity uploadVerbosity (\v flags -> flags { uploadVerbosity = v })
,option ['c'] ["check"]
"Do not upload, just do QA checks."
uploadCheck (\v flags -> flags { uploadCheck = v })
trueArg
,option ['u'] ["username"]
"Hackage username."
uploadUsername (\v flags -> flags { uploadUsername = v })
(reqArg' "USERNAME" (toFlag . Username)
(flagToList . fmap unUsername))
,option ['p'] ["password"]
"Hackage password."
uploadPassword (\v flags -> flags { uploadPassword = v })
(reqArg' "PASSWORD" (toFlag . Password)
(flagToList . fmap unPassword))
]
}
instance Monoid UploadFlags where
mempty = UploadFlags {
uploadCheck = mempty,
uploadUsername = mempty,
uploadPassword = mempty,
uploadVerbosity = mempty
}
mappend a b = UploadFlags {
uploadCheck = combine uploadCheck,
uploadUsername = combine uploadUsername,
uploadPassword = combine uploadPassword,
uploadVerbosity = combine uploadVerbosity
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Init flags
-- ------------------------------------------------------------
emptyInitFlags :: IT.InitFlags
emptyInitFlags = mempty
defaultInitFlags :: IT.InitFlags
defaultInitFlags = emptyInitFlags { IT.initVerbosity = toFlag normal }
initCommand :: CommandUI IT.InitFlags
initCommand = CommandUI {
commandName = "init",
commandSynopsis = "Interactively create a .cabal file.",
commandDescription = Just $ \_ -> wrapText $
"Cabalise a project by creating a .cabal, Setup.hs, and "
++ "optionally a LICENSE file.\n\n"
++ "Calling init with no arguments (recommended) uses an "
++ "interactive mode, which will try to guess as much as "
++ "possible and prompt you for the rest. Command-line "
++ "arguments are provided for scripting purposes. "
++ "If you don't want interactive mode, be sure to pass "
++ "the -n flag.\n",
commandUsage = \pname ->
"Usage: " ++ pname ++ " init [FLAGS]\n\n"
++ "Flags for init:",
commandDefaultFlags = defaultInitFlags,
commandOptions = \_ ->
[ option ['n'] ["non-interactive"]
"Non-interactive mode."
IT.nonInteractive (\v flags -> flags { IT.nonInteractive = v })
trueArg
, option ['q'] ["quiet"]
"Do not generate log messages to stdout."
IT.quiet (\v flags -> flags { IT.quiet = v })
trueArg
, option [] ["no-comments"]
"Do not generate explanatory comments in the .cabal file."
IT.noComments (\v flags -> flags { IT.noComments = v })
trueArg
, option ['m'] ["minimal"]
"Generate a minimal .cabal file, that is, do not include extra empty fields. Also implies --no-comments."
IT.minimal (\v flags -> flags { IT.minimal = v })
trueArg
, option [] ["package-dir"]
"Root directory of the package (default = current directory)."
IT.packageDir (\v flags -> flags { IT.packageDir = v })
(reqArgFlag "DIRECTORY")
, option ['p'] ["package-name"]
"Name of the Cabal package to create."
IT.packageName (\v flags -> flags { IT.packageName = v })
(reqArgFlag "PACKAGE")
, option [] ["version"]
"Initial version of the package."
IT.version (\v flags -> flags { IT.version = v })
(reqArg "VERSION" (readP_to_E ("Cannot parse package version: "++)
(toFlag `fmap` parse))
(flagToList . fmap display))
, option [] ["cabal-version"]
"Required version of the Cabal library."
IT.cabalVersion (\v flags -> flags { IT.cabalVersion = v })
(reqArg "VERSION_RANGE" (readP_to_E ("Cannot parse Cabal version range: "++)
(toFlag `fmap` parse))
(flagToList . fmap display))
, option ['l'] ["license"]
"Project license."
IT.license (\v flags -> flags { IT.license = v })
(reqArg "LICENSE" (readP_to_E ("Cannot parse license: "++)
(toFlag `fmap` parse))
(flagToList . fmap display))
, option ['a'] ["author"]
"Name of the project's author."
IT.author (\v flags -> flags { IT.author = v })
(reqArgFlag "NAME")
, option ['e'] ["email"]
"Email address of the maintainer."
IT.email (\v flags -> flags { IT.email = v })
(reqArgFlag "EMAIL")
, option ['u'] ["homepage"]
"Project homepage and/or repository."
IT.homepage (\v flags -> flags { IT.homepage = v })
(reqArgFlag "URL")
, option ['s'] ["synopsis"]
"Short project synopsis."
IT.synopsis (\v flags -> flags { IT.synopsis = v })
(reqArgFlag "TEXT")
, option ['c'] ["category"]
"Project category."
IT.category (\v flags -> flags { IT.category = v })
(reqArg' "CATEGORY" (\s -> toFlag $ maybe (Left s) Right (readMaybe s))
(flagToList . fmap (either id show)))
, option [] ["is-library"]
"Build a library."
IT.packageType (\v flags -> flags { IT.packageType = v })
(noArg (Flag IT.Library))
, option [] ["is-executable"]
"Build an executable."
IT.packageType
(\v flags -> flags { IT.packageType = v })
(noArg (Flag IT.Executable))
, option ['o'] ["expose-module"]
"Export a module from the package."
IT.exposedModules
(\v flags -> flags { IT.exposedModules = v })
(reqArg "MODULE" (readP_to_E ("Cannot parse module name: "++)
((Just . (:[])) `fmap` parse))
(fromMaybe [] . fmap (fmap display)))
, option ['d'] ["dependency"]
"Package dependency."
IT.dependencies (\v flags -> flags { IT.dependencies = v })
(reqArg "PACKAGE" (readP_to_E ("Cannot parse dependency: "++)
((Just . (:[])) `fmap` parse))
(fromMaybe [] . fmap (fmap display)))
, option [] ["source-dir"]
"Directory containing package source."
IT.sourceDirs (\v flags -> flags { IT.sourceDirs = v })
(reqArg' "DIR" (Just . (:[]))
(fromMaybe []))
, option [] ["build-tool"]
"Required external build tool."
IT.buildTools (\v flags -> flags { IT.buildTools = v })
(reqArg' "TOOL" (Just . (:[]))
(fromMaybe []))
, optionVerbosity IT.initVerbosity (\v flags -> flags { IT.initVerbosity = v })
]
}
where readMaybe s = case reads s of
[(x,"")] -> Just x
_ -> Nothing
-- ------------------------------------------------------------
-- * SDist flags
-- ------------------------------------------------------------
-- | Extra flags to @sdist@ beyond runghc Setup sdist
--
data SDistExFlags = SDistExFlags {
sDistFormat :: Flag ArchiveFormat
}
deriving Show
data ArchiveFormat = TargzFormat | ZipFormat -- | ...
deriving (Show, Eq)
defaultSDistExFlags :: SDistExFlags
defaultSDistExFlags = SDistExFlags {
sDistFormat = Flag TargzFormat
}
sdistCommand :: CommandUI (SDistFlags, SDistExFlags)
sdistCommand = Cabal.sdistCommand {
commandDefaultFlags = (commandDefaultFlags Cabal.sdistCommand, defaultSDistExFlags),
commandOptions = \showOrParseArgs ->
liftOptions fst setFst (commandOptions Cabal.sdistCommand showOrParseArgs)
++ liftOptions snd setSnd sdistExOptions
}
where
setFst a (_,b) = (a,b)
setSnd b (a,_) = (a,b)
sdistExOptions =
[option [] ["archive-format"] "archive-format"
sDistFormat (\v flags -> flags { sDistFormat = v })
(choiceOpt
[ (Flag TargzFormat, ([], ["targz"]),
"Produce a '.tar.gz' format archive (default and required for uploading to hackage)")
, (Flag ZipFormat, ([], ["zip"]),
"Produce a '.zip' format archive")
])
]
instance Monoid SDistExFlags where
mempty = SDistExFlags {
sDistFormat = mempty
}
mappend a b = SDistExFlags {
sDistFormat = combine sDistFormat
}
where
combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * GetOpt Utils
-- ------------------------------------------------------------
reqArgFlag :: ArgPlaceHolder -> SFlags -> LFlags -> Description ->
(b -> Flag String) -> (Flag String -> b -> b) -> OptDescr b
reqArgFlag ad = reqArg ad (succeedReadE Flag) flagToList
liftOptions :: (b -> a) -> (a -> b -> b)
-> [OptionField a] -> [OptionField b]
liftOptions get set = map (liftOption get set)
optionSolver :: (flags -> Flag Solver)
-> (Flag Solver -> flags -> flags)
-> OptionField flags
optionSolver get set =
option [] ["solver"]
("Select dependency solver to use (default: " ++ display defaultSolver ++ "). Choices: " ++ allSolvers ++ ".")
get set
(reqArg "SOLVER" (readP_to_E (const $ "solver must be one of: " ++ allSolvers)
(toFlag `fmap` parse))
(flagToList . fmap display))
optionSolverFlags :: (flags -> Flag Int ) -> (Flag Int -> flags -> flags)
-> (flags -> Flag Bool ) -> (Flag Bool -> flags -> flags)
-> (flags -> Flag Bool ) -> (Flag Bool -> flags -> flags)
-> [OptionField flags]
optionSolverFlags getmbj setmbj getrg setrg getig setig =
[ option [] ["max-backjumps"]
("Maximum number of backjumps allowed while solving (default: " ++ show defaultMaxBackjumps ++ "). Use a negative number to enable unlimited backtracking. Use 0 to disable backtracking completely.")
getmbj setmbj
(reqArg "NUM" (readP_to_E ("Cannot parse number: "++)
(fmap toFlag (Parse.readS_to_P reads)))
(map show . flagToList))
, option [] ["reorder-goals"]
"Try to reorder goals according to certain heuristics. Slows things down on average, but may make backtracking faster for some packages."
getrg setrg
trueArg
, option [] ["independent-goals"]
"Treat several goals on the command line as independent. If several goals depend on the same package, different versions can be chosen."
getig setig
trueArg
]
usagePackages :: String -> String -> String
usagePackages name pname =
"Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n"
++ " or: " ++ pname ++ " " ++ name ++ " [PACKAGES]\n\n"
++ "Flags for " ++ name ++ ":"
--TODO: do we want to allow per-package flags?
parsePackageArgs :: [String] -> Either String [Dependency]
parsePackageArgs = parsePkgArgs []
where
parsePkgArgs ds [] = Right (reverse ds)
parsePkgArgs ds (arg:args) =
case readPToMaybe parseDependencyOrPackageId arg of
Just dep -> parsePkgArgs (dep:ds) args
Nothing -> Left $
show arg ++ " is not valid syntax for a package name or"
++ " package dependency."
readPToMaybe :: Parse.ReadP a a -> String -> Maybe a
readPToMaybe p str = listToMaybe [ r | (r,s) <- Parse.readP_to_S p str
, all isSpace s ]
parseDependencyOrPackageId :: Parse.ReadP r Dependency
parseDependencyOrPackageId = parse Parse.+++ liftM pkgidToDependency parse
where
pkgidToDependency :: PackageIdentifier -> Dependency
pkgidToDependency p = case packageVersion p of
Version [] _ -> Dependency (packageName p) anyVersion
version -> Dependency (packageName p) (thisVersion version)
showRepo :: RemoteRepo -> String
showRepo repo = remoteRepoName repo ++ ":"
++ uriToString id (remoteRepoURI repo) []
readRepo :: String -> Maybe RemoteRepo
readRepo = readPToMaybe parseRepo
parseRepo :: Parse.ReadP r RemoteRepo
parseRepo = do
name <- Parse.munch1 (\c -> isAlphaNum c || c `elem` "_-.")
_ <- Parse.char ':'
uriStr <- Parse.munch1 (\c -> isAlphaNum c || c `elem` "+-=._/*()@'$:;&!?~")
uri <- maybe Parse.pfail return (parseAbsoluteURI uriStr)
return $ RemoteRepo {
remoteRepoName = name,
remoteRepoURI = uri
}
| alphaHeavy/cabal | cabal-install/Distribution/Client/Setup.hs | bsd-3-clause | 45,137 | 0 | 27 | 12,151 | 10,169 | 5,756 | 4,413 | 892 | 5 |
-- #hide
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GL.BlendingFactor
-- Copyright : (c) Sven Panne 2002-2005
-- License : BSD-style (see the file libraries/OpenGL/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- This is a purely internal module for (un-)marshaling BlendingFactor.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GL.BlendingFactor (
BlendingFactor(..), marshalBlendingFactor, unmarshalBlendingFactor
) where
import Graphics.Rendering.OpenGL.GL.BasicTypes ( GLenum )
--------------------------------------------------------------------------------
data BlendingFactor =
Zero
| One
| SrcColor
| OneMinusSrcColor
| DstColor
| OneMinusDstColor
| SrcAlpha
| OneMinusSrcAlpha
| DstAlpha
| OneMinusDstAlpha
| ConstantColor
| OneMinusConstantColor
| ConstantAlpha
| OneMinusConstantAlpha
| SrcAlphaSaturate
deriving ( Eq, Ord, Show )
marshalBlendingFactor :: BlendingFactor -> GLenum
marshalBlendingFactor x = case x of
Zero -> 0x0
One -> 0x1
SrcColor -> 0x300
OneMinusSrcColor -> 0x301
DstColor -> 0x306
OneMinusDstColor -> 0x307
SrcAlpha -> 0x302
OneMinusSrcAlpha -> 0x303
DstAlpha -> 0x304
OneMinusDstAlpha -> 0x305
ConstantColor -> 0x8001
OneMinusConstantColor -> 0x8002
ConstantAlpha -> 0x8003
OneMinusConstantAlpha -> 0x8004
SrcAlphaSaturate -> 0x308
unmarshalBlendingFactor :: GLenum -> BlendingFactor
unmarshalBlendingFactor x
| x == 0x0 = Zero
| x == 0x1 = One
| x == 0x300 = SrcColor
| x == 0x301 = OneMinusSrcColor
| x == 0x306 = DstColor
| x == 0x307 = OneMinusDstColor
| x == 0x302 = SrcAlpha
| x == 0x303 = OneMinusSrcAlpha
| x == 0x304 = DstAlpha
| x == 0x305 = OneMinusDstAlpha
| x == 0x8001 = ConstantColor
| x == 0x8002 = OneMinusConstantColor
| x == 0x8003 = ConstantAlpha
| x == 0x8004 = OneMinusConstantAlpha
| x == 0x308 = SrcAlphaSaturate
| otherwise = error ("unmarshalBlendingFactor: illegal value " ++ show x)
| FranklinChen/hugs98-plus-Sep2006 | packages/OpenGL/Graphics/Rendering/OpenGL/GL/BlendingFactor.hs | bsd-3-clause | 2,227 | 0 | 9 | 439 | 458 | 244 | 214 | 55 | 15 |
{-# LANGUAGE NamedFieldPuns, RecordWildCards #-}
module Graphics.BarChart.Parser.Progression where
import Text.CSV
import System.FilePath
import Graphics.BarChart.Types
import Graphics.BarChart.Parser
import Graphics.BarChart.Rendering
-- | Used by 'writeProgressionChart' to generate a bar chart from
-- progression's @plot.csv@ file.
--
progressionChart :: Bool -> [Label] -> CSV -> BarChart Ratio
progressionChart flip labels csv
= drawMultiBarIntervals
. (if flip then flipMultiBarIntervals else id)
. parseMultiBarIntervals block_labels
$ csv
where block_labels | null labels = replicate (length csv) ""
| otherwise = labels
-- | Reads the @plot.csv@ file generated by progression and creates a
-- corresponding bar chart.
--
writeProgressionChart :: Bool -> Config -> FilePath -> [Label] -> IO ()
writeProgressionChart flip config@Config{..} file block_labels =
do csv <- readCSV file
let chart = progressionChart flip block_labels csv
renderWith config chart
| sebfisch/haskell-barchart | src/Graphics/BarChart/Parser/Progression.hs | bsd-3-clause | 1,021 | 0 | 10 | 181 | 226 | 120 | 106 | 20 | 2 |
{-# LANGUAGE
ConstraintKinds
, FlexibleContexts
, FlexibleInstances
, MultiParamTypeClasses
, ScopedTypeVariables
, TypeFamilies
, TypeSynonymInstances
#-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.UI.Toy.Gtk.Diagrams
-- Copyright : (c) 2013 Michael Sloan
-- License : BSD-style (see the LICENSE file)
--
-- Maintainer : Michael Sloan <[email protected]>
-- Stability : experimental
-- Portability : GHC only
--
-- toy-diagrams utilities specific to the toy-gtk backend.
--
--------------------------------------------------------------------------------
module Graphics.UI.Toy.Gtk.Diagrams
(
-- * Convenient Type Synonyms
CairoDiagram, CairoDiagrammable, CairoInteractive
, CairoButton
, CairoDraggable, CairoHandle
-- , CairoSlider
-- * Displaying diagrams
, defaultDisplay
, displayDiagram
-- * Widgets
, mkDefaultButton
) where
import Control.Lens hiding ( transform, (#) )
import Diagrams.Backend.Cairo ( Cairo )
import Diagrams.Backend.Cairo.Text ( textLineBounded )
import Diagrams.Backend.Gtk ( renderToGtk )
import Diagrams.Prelude
import Diagrams.Lens
import Graphics.UI.Gtk ( DrawWindow )
import Graphics.UI.Toy.Gtk ( Gtk )
import Graphics.UI.Toy
import Graphics.UI.Toy.Button
import Graphics.UI.Toy.Diagrams
import Graphics.UI.Toy.Draggable
-- import Graphics.UI.Toy.Slider
type CairoDiagram = Diagram Cairo R2
type CairoDiagrammable q a = Diagrammable Cairo R2 q a
type CairoInteractive a = (Diagrammable Cairo R2 Any a, Interactive Gtk a)
type CairoButton = Button Cairo R2
type CairoDraggable a = Draggable a
type CairoHandle = Draggable CairoDiagram
-- type CairoSlider a = Slider Cairo R2 a
-- | Convenience function for implementing the display function of 'GtkDisplay'.
displayDiagram :: (a -> CairoDiagram)
-> DrawWindow -> InputState Gtk -> a -> IO a
displayDiagram f dw _ x = (renderToGtk dw $ f x) >> return x
-- | Simply @'displayDiagram' 'diagram'@, useful for boilerplate implementations
-- of 'GtkDisplay'.
defaultDisplay :: CairoDiagrammable Any a
=> DrawWindow -> InputState Gtk -> a -> IO a
defaultDisplay = displayDiagram diagram
type instance V (InputState Gtk) = R2
instance Transformable (InputState Gtk) where
transform t is = is { mousePos = (wrapped . _P %~ transform (inv t))
$ mousePos is }
-- | Builds a button containing text. The outside border is a rounded
-- rectangle, and when pressed, it's drawn with a black fill and white lines.
mkDefaultButton :: String -> CairoButton
mkDefaultButton txt = mkButton dia
where
dia b = addTint $ case _buttonState b of
NormalState -> blackLined $ label <> border
HoverState -> blackLined $ label <> fc lightgray border
PressState -> fc white label <> (border # fc black # lc white)
where
addTint
| _buttonHit b = flip overlayScaled (square 1 # fcA redTint)
| otherwise = id
redTint = red `withOpacity` 0.5
border = centerXY . lw 2 $ roundedRect (width label + 5) (height label + 5) 3
label = centerXY . pad 1 . reflectY $ textLineBounded monoStyle txt
-- TODO: reinstate when it's figured out how to not use "UndecidableInstances"
-- TODO: GtkDisplay Button instance
-- TODO: document orphans
-- TODO: can these work for any q?
{-
instance (V a ~ R2, CairoDiagrammable Any a) => GtkDisplay (CairoDraggable a) where
display = displayDiagram diagram
instance CairoDiagrammable Any a => GtkDisplay (CairoSlider a) where
display = displayDiagram diagram
instance (V a ~ R2, CairoDiagrammable Any a) => GtkDisplay (Transformed a) where
display = displayDiagram diagram
-}
| mgsloan/toy-gtk-diagrams | src/Graphics/UI/Toy/Gtk/Diagrams.hs | bsd-3-clause | 3,832 | 0 | 15 | 791 | 675 | 379 | 296 | -1 | -1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Spring13.Week7.Scrabble where
import Data.Char
newtype Score = Score Int
deriving (Eq, Read, Show, Ord, Num)
instance Monoid Score where
mempty = Score 0
mappend = (+)
score :: Char -> Score
score c
| c' `elem` "aeilnorstu" = Score 1
| c' `elem` "dg" = Score 2
| c' `elem` "bcmp" = Score 3
| c' `elem` "fhvwy" = Score 4
| c' `elem` "k" = Score 5
| c' `elem` "jx" = Score 8
| c' `elem` "qz" = Score 10
| otherwise = Score 0
where c' = toLower c
scoreString :: String -> Score
scoreString = foldr (\c x -> score c + x) (Score 0)
getScore :: Score -> Int
getScore (Score n) = n
| bibaijin/cis194 | src/Spring13/Week7/Scrabble.hs | bsd-3-clause | 657 | 0 | 9 | 155 | 294 | 153 | 141 | 23 | 1 |
{-#LANGUAGE OverloadedStrings, NoMonomorphismRestriction, TupleSections#-}
import Control.Applicative
import Control.Monad
import System.Directory
import System.FilePath
import Data.String
import Data.Conduit
import qualified Data.Conduit.List as CL
import Data.Conduit.Filesystem
import qualified Filesystem.Path as FS
import qualified Filesystem.Path.Rules as FS
import Codec.Compression.GZip
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Archive.Tar.Entry as Tar
import qualified Data.ByteString.Lazy as BL
base :: IO FilePath
base = getHomeDirectory >>= \home -> return $ home </> ".cabal/packages/cabal-src/"
gzipOnly :: Monad m => Conduit FS.FilePath m FS.FilePath
gzipOnly = CL.filter (\e -> let es = FS.extensions e
in last es == "gz" && last (init es) == "tar"
)
getCabal :: FilePath -> IO (Maybe BL.ByteString)
getCabal path = BL.readFile path >>= return. Tar.foldEntries folding Nothing (const Nothing). Tar.read. decompress
where folding e a = if FS.extension (fromString $ Tar.entryPath e) == Just "cabal"
then case Tar.entryContent e of
Tar.NormalFile s _ -> Just s
_ -> a
else a
main :: IO ()
main = do fp <- base
es <- Tar.write <$> toEntries fp (FS.encodeString FS.darwin)
BL.writeFile (fp </> "00-index.tar") es
let cache = fp </> "00-index.cache"
doesFileExist cache >>= \e -> when e (removeFile cache)
cabalFileNameAndContent :: String -> (FS.FilePath -> FilePath)
-> Conduit FS.FilePath IO (Maybe (FS.FilePath, BL.ByteString))
cabalFileNameAndContent fp encode = CL.mapM (\e -> (getCabal $ encode e) >>= \r -> case r of
Just c -> return $ (,c) <$> toCabalFN e
Nothing -> return Nothing
)
where toCabalFN = FS.stripPrefix (fromString fp) .
flip FS.replaceExtension "cabal" . FS.dropExtension
toEntries :: String -> (FS.FilePath -> FilePath) -> IO [Tar.Entry]
toEntries fp encode =
traverse False (fromString fp) $$ gzipOnly =$
cabalFileNameAndContent fp encode =$
CL.catMaybes =$
CL.mapM (\a@(e,_) -> putStrLn (encode e) >> return a) =$
CL.map (\(p,e) -> case Tar.toTarPath False $ encode p
of Left _ -> Nothing
Right r -> Just (r, e)
) =$
CL.catMaybes =$
CL.map (uncurry Tar.fileEntry) =$
CL.consume
| philopon/cabal-src-recache | Main.hs | bsd-3-clause | 2,611 | 0 | 16 | 779 | 824 | 428 | 396 | 53 | 3 |
--
--
--
----------------
-- Exercise 9.2.
----------------
--
--
--
module E'9''2 where
mult :: Integer -> Integer -> Integer
mult 0 _ = 0
mult left right
= right + mult ( left - 1 ) right
{- GHCi>
mult 1 2
mult 2 2
-}
-- 2
-- 4
-- GHCi> mult 0 ( fact ( -2 ) )
-- 0
-- Explanation: the "right" argument is never evaluated because of the order
-- of arguments in the definition and Haskells laziness.
-- GHCi> mult ( fact ( -2 ) ) 0
-- <interactive>: out of memory
-- Explanation: this time the left argument is the first one to be evaluated.
-- "fact ( -2 )" never terminates and results in a memory exception.
| pascal-knodel/haskell-craft | _/links/E'9''2.hs | mit | 667 | 0 | 8 | 182 | 73 | 48 | 25 | 5 | 1 |
--(Arith,4 + 3,3 + 4,1)
--(Arith,1 + 0,1,1)
--(Language,2*3,Aterm,1)
--(Arith,(2 : 5) + (1 : 6),(4 : 1),1)
-- (Ptruth,(F :| (F :| T)) :| (T :| X),True,1)
1 (Ptaut,P :> ((Neg (Neg P) :> P) :| Q),T,1)
2 done (Ptaut,(Neg P :<> P) :> (Q :<> Q),T,1)
3 done (Ptaut,(P :| P) :> Neg (Neg P),T,1)
5 done (Ptaut,(Neg P :& P) :> Q,T,1)
7 done (Ptaut,P :> (Q :> P),T,1)
9 done (Ptaut,Neg (Neg P :<> P),T,1)
10 (Ptaut,(Neg (P :> Q) :| X) :> Neg Q,T,1)
11 done (Ptaut,Neg (Neg (Neg (P :& Neg P))),T,1)
12 done (Ptaut,(P :> P) :| Q,T,1)
15 done (Ptaut,(P :& P) :| Neg P,T,1)
Examples to solve:
4:4 + 8 5
4:0 + (4 + 8) 7
4:0 + 1:2 7
4+1):2 5
5:2 2
| abdulrahimnizamani/OccamStar | Occam2Examples.hs | gpl-2.0 | 693 | 3 | 14 | 194 | 464 | 241 | 223 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.CodePipeline.DisableStageTransition
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Prevents artifacts in a pipeline from transitioning to the next stage in
-- the pipeline.
--
-- /See:/ <http://docs.aws.amazon.com/codepipeline/latest/APIReference/API_DisableStageTransition.html AWS API Reference> for DisableStageTransition.
module Network.AWS.CodePipeline.DisableStageTransition
(
-- * Creating a Request
disableStageTransition
, DisableStageTransition
-- * Request Lenses
, dstPipelineName
, dstStageName
, dstTransitionType
, dstReason
-- * Destructuring the Response
, disableStageTransitionResponse
, DisableStageTransitionResponse
) where
import Network.AWS.CodePipeline.Types
import Network.AWS.CodePipeline.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | Represents the input of a disable stage transition input action.
--
-- /See:/ 'disableStageTransition' smart constructor.
data DisableStageTransition = DisableStageTransition'
{ _dstPipelineName :: !Text
, _dstStageName :: !Text
, _dstTransitionType :: !StageTransitionType
, _dstReason :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DisableStageTransition' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dstPipelineName'
--
-- * 'dstStageName'
--
-- * 'dstTransitionType'
--
-- * 'dstReason'
disableStageTransition
:: Text -- ^ 'dstPipelineName'
-> Text -- ^ 'dstStageName'
-> StageTransitionType -- ^ 'dstTransitionType'
-> Text -- ^ 'dstReason'
-> DisableStageTransition
disableStageTransition pPipelineName_ pStageName_ pTransitionType_ pReason_ =
DisableStageTransition'
{ _dstPipelineName = pPipelineName_
, _dstStageName = pStageName_
, _dstTransitionType = pTransitionType_
, _dstReason = pReason_
}
-- | The name of the pipeline in which you want to disable the flow of
-- artifacts from one stage to another.
dstPipelineName :: Lens' DisableStageTransition Text
dstPipelineName = lens _dstPipelineName (\ s a -> s{_dstPipelineName = a});
-- | The name of the stage where you want to disable the inbound or outbound
-- transition of artifacts.
dstStageName :: Lens' DisableStageTransition Text
dstStageName = lens _dstStageName (\ s a -> s{_dstStageName = a});
-- | Specifies whether artifacts will be prevented from transitioning into
-- the stage and being processed by the actions in that stage (inbound), or
-- prevented from transitioning from the stage after they have been
-- processed by the actions in that stage (outbound).
dstTransitionType :: Lens' DisableStageTransition StageTransitionType
dstTransitionType = lens _dstTransitionType (\ s a -> s{_dstTransitionType = a});
-- | The reason given to the user why a stage is disabled, such as waiting
-- for manual approval or manual tests. This message is displayed in the
-- pipeline console UI.
dstReason :: Lens' DisableStageTransition Text
dstReason = lens _dstReason (\ s a -> s{_dstReason = a});
instance AWSRequest DisableStageTransition where
type Rs DisableStageTransition =
DisableStageTransitionResponse
request = postJSON codePipeline
response
= receiveNull DisableStageTransitionResponse'
instance ToHeaders DisableStageTransition where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("CodePipeline_20150709.DisableStageTransition" ::
ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON DisableStageTransition where
toJSON DisableStageTransition'{..}
= object
(catMaybes
[Just ("pipelineName" .= _dstPipelineName),
Just ("stageName" .= _dstStageName),
Just ("transitionType" .= _dstTransitionType),
Just ("reason" .= _dstReason)])
instance ToPath DisableStageTransition where
toPath = const "/"
instance ToQuery DisableStageTransition where
toQuery = const mempty
-- | /See:/ 'disableStageTransitionResponse' smart constructor.
data DisableStageTransitionResponse =
DisableStageTransitionResponse'
deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DisableStageTransitionResponse' with the minimum fields required to make a request.
--
disableStageTransitionResponse
:: DisableStageTransitionResponse
disableStageTransitionResponse = DisableStageTransitionResponse'
| fmapfmapfmap/amazonka | amazonka-codepipeline/gen/Network/AWS/CodePipeline/DisableStageTransition.hs | mpl-2.0 | 5,361 | 0 | 12 | 1,119 | 644 | 387 | 257 | 90 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Control.Concurrent.Utils
( Lock()
, Exclusive(..)
, Synchronised(..)
, withLock
) where
import Control.Distributed.Process
( Process
)
import qualified Control.Distributed.Process as Process (catch)
import Control.Exception (SomeException, throw)
import qualified Control.Exception as Exception (catch)
import Control.Concurrent.MVar
( MVar
, tryPutMVar
, newMVar
, takeMVar
)
import Control.Monad.IO.Class (MonadIO, liftIO)
newtype Lock = Lock { mvar :: MVar () }
class Exclusive a where
new :: IO a
acquire :: (MonadIO m) => a -> m ()
release :: (MonadIO m) => a -> m ()
instance Exclusive Lock where
new = return . Lock =<< newMVar ()
acquire = liftIO . takeMVar . mvar
release l = liftIO (tryPutMVar (mvar l) ()) >> return ()
class Synchronised e m where
synchronised :: (Exclusive e, Monad m) => e -> m b -> m b
synchronized :: (Exclusive e, Monad m) => e -> m b -> m b
synchronized = synchronised
instance Synchronised Lock IO where
synchronised = withLock
instance Synchronised Lock Process where
synchronised = withLockP
withLockP :: (Exclusive e) => e -> Process a -> Process a
withLockP excl act = do
Process.catch (do { liftIO $ acquire excl
; result <- act
; liftIO $ release excl
; return result
})
(\(e :: SomeException) -> (liftIO $ release excl) >> throw e)
withLock :: (Exclusive e) => e -> IO a -> IO a
withLock excl act = do
Exception.catch (do { acquire excl
; result <- act
; release excl
; return result
})
(\(e :: SomeException) -> release excl >> throw e)
| qnikst/distributed-process-extras | src/Control/Concurrent/Utils.hs | bsd-3-clause | 1,855 | 0 | 13 | 556 | 617 | 331 | 286 | 52 | 1 |
--
-- Copyright (c) 2012 Citrix Systems, Inc.
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
{-# LANGUAGE ScopedTypeVariables #-}
module StoreHouse.VHD.Forest
( VhdInfo(..)
, vhdGetInfo
, VhdChain(..)
, vhdGetChain
, vhdsGraph
) where
import qualified Data.Map as M
import Data.Map (Map)
import Data.Maybe (catMaybes)
import Data.Either
import Data.Word
import qualified Data.Vhd as Vhd
import Control.Applicative
import Control.Monad
import Control.Exception
import System.IO
import System.Process
import System.Directory
import System.Exit
import System.FilePath
import Tools.Uuid
import Tools.Log
data VhdInfo = VhdInfo
{ vhdUuid :: Uuid KindVHD
, vhdPath :: FilePath
, vhdParent :: Maybe (Uuid KindVHD, FilePath)
} deriving (Show,Eq)
data VhdChain =
ChainWithParent FilePath (Uuid KindVHD) VhdChain
| ChainEnd FilePath (Uuid KindVHD)
| ChainError FilePath String
deriving (Show,Eq)
vhdGetInfo :: FilePath -> FilePath -> IO (Either String VhdInfo)
vhdGetInfo dir filepath = do
e <- try $ Vhd.getInfo (dir </> filepath)
case e of
Left (exn :: SomeException) -> return $ Left $ show exn
Right (Left err) -> return $ Left err
Right (Right (header, footer)) -> do
let (Vhd.ParentUnicodeName parentFilepath) = Vhd.headerParentUnicodeName header
return $ Right $ VhdInfo
{ vhdUuid = uuidFromString $ show $ Vhd.footerUniqueId footer
, vhdPath = filepath
, vhdParent = if parentFilepath == ""
then Nothing
else Just (uuidFromString $ show $ Vhd.headerParentUniqueId header, parentFilepath)
}
vhdGetChain :: FilePath -> FilePath -> IO VhdChain
vhdGetChain dir filepath = do
minfo <- vhdGetInfo dir filepath
case minfo of
Left err -> return $ ChainError filepath err
Right vhdinfo -> case vhdParent vhdinfo of
Nothing -> return $ ChainEnd filepath (vhdUuid vhdinfo)
Just (puuid, pfilepath) -> do
parentChain <- vhdGetChain dir pfilepath
return $ ChainWithParent filepath (vhdUuid vhdinfo) parentChain
-- | return all the tree leaves of a vhd forest.
vhdsGraph :: FilePath -> [FilePath] -> IO [VhdChain]
vhdsGraph dir paths = do
infos <- rights <$> mapM (vhdGetInfo dir) paths
mapM (vhdGetChain dir . vhdPath) (loop infos infos) -- inefficient
where
loop infos [] = []
loop infos (x:xs) = if null $ filter (vhdMatched (vhdUuid x)) infos
then x : loop infos xs
else loop infos xs
vhdMatched uuid vhdinfo = case vhdParent vhdinfo of
Nothing -> False
Just (puuid, _) -> puuid == uuid
| jean-edouard/manager | storehouse/StoreHouse/VHD/Forest.hs | gpl-2.0 | 3,239 | 16 | 20 | 675 | 847 | 450 | 397 | 68 | 4 |
module Main where
import Control.Monad (void)
import Graphics.Vty
main :: IO ()
main = do
cfg <- standardIOConfig
vty <- mkVty cfg
let line1 = charFill (defAttr `withBackColor` blue) ' ' 10 1
line2 = charFill (defAttr `withBackColor` green) ' ' 10 1
img = translate 10 5 (line1 `vertJoin` line2)
pic = picForImage img
update vty pic
void $ nextEvent vty
shutdown vty
| jtdaugherty/vty | test/Issue76.hs | bsd-3-clause | 400 | 0 | 12 | 97 | 154 | 79 | 75 | 14 | 1 |
module Hugs.ForeignPtr
(
ForeignPtr -- abstract, instance of: Eq
, FinalizerPtr
, FinalizerEnvPtr
, newForeignPtr_ -- :: Ptr a -> IO (ForeignPtr a)
, addForeignPtrFinalizer -- :: FinalizerPtr a -> ForeignPtr a -> IO ()
, addForeignPtrFinalizerEnv -- :: FinalizerEnvPtr env a -> Ptr env ->
-- ForeignPtr a -> IO ()
, unsafeForeignPtrToPtr -- :: ForeignPtr a -> Ptr a
, touchForeignPtr -- :: ForeignPtr a -> IO ()
, castForeignPtr -- :: ForeignPtr a -> ForeignPtr b
)
where
import Hugs.Prelude ( ForeignPtr )
import Foreign.Ptr ( Ptr, FunPtr )
-- data ForeignPtr a -- defined in Prelude.hs
type FinalizerPtr a = FunPtr ( Ptr a -> IO ())
type FinalizerEnvPtr env a = FunPtr (Ptr env -> Ptr a -> IO ())
primitive newForeignPtr_ :: Ptr a -> IO (ForeignPtr a)
primitive addForeignPtrFinalizer :: FinalizerPtr a -> ForeignPtr a -> IO ()
primitive addForeignPtrFinalizerEnv ::
FinalizerEnvPtr env a -> Ptr env -> ForeignPtr a -> IO ()
primitive touchForeignPtr :: ForeignPtr a -> IO ()
primitive unsafeForeignPtrToPtr :: ForeignPtr a -> Ptr a
primitive castForeignPtr "primUnsafeCoerce" :: ForeignPtr a -> ForeignPtr b
| kaoskorobase/mescaline | resources/hugs/packages/hugsbase/Hugs/ForeignPtr.hs | gpl-3.0 | 1,220 | 23 | 11 | 294 | 263 | 145 | 118 | -1 | -1 |
module IRTS.CodegenC (codegenC) where
import Idris.AbsSyntax
import IRTS.Bytecode
import IRTS.Lang
import IRTS.Simplified
import IRTS.Defunctionalise
import IRTS.System
import IRTS.CodegenCommon
import Idris.Core.TT
import Util.System
import Numeric
import Data.Char
import Data.Bits
import Data.List (intercalate)
import System.Process
import System.Exit
import System.IO
import System.Directory
import System.FilePath ((</>), (<.>))
import Control.Monad
import Debug.Trace
codegenC :: CodeGenerator
codegenC ci = do codegenC' (simpleDecls ci)
(outputFile ci)
(outputType ci)
(includes ci)
(compileObjs ci)
(map mkLib (compileLibs ci) ++
map incdir (importDirs ci))
(compilerFlags ci)
(exportDecls ci)
(interfaces ci)
(debugLevel ci)
when (interfaces ci) $
codegenH (exportDecls ci)
where mkLib l = "-l" ++ l
incdir i = "-I" ++ i
codegenC' :: [(Name, SDecl)] ->
String -> -- output file name
OutputType -> -- generate executable if True, only .o if False
[FilePath] -> -- include files
[String] -> -- extra object files
[String] -> -- extra compiler flags (libraries)
[String] -> -- extra compiler flags (anything)
[ExportIFace] ->
Bool -> -- interfaces too (so make a .o instead)
DbgLevel ->
IO ()
codegenC' defs out exec incs objs libs flags exports iface dbg
= do -- print defs
let bc = map toBC defs
let h = concatMap toDecl (map fst bc)
let cc = concatMap (uncurry toC) bc
let hi = concatMap ifaceC (concatMap getExp exports)
d <- getDataDir
mprog <- readFile (d </> "rts" </> "idris_main" <.> "c")
let cout = headers incs ++ debug dbg ++ h ++ cc ++
(if (exec == Executable) then mprog else hi)
case exec of
MavenProject -> putStrLn ("FAILURE: output type not supported")
Raw -> writeSource out cout
_ -> do
(tmpn, tmph) <- tempfile ".c"
hPutStr tmph cout
hFlush tmph
hClose tmph
comp <- getCC
libFlags <- getLibFlags
incFlags <- getIncFlags
envFlags <- getEnvFlags
let args = [gccDbg dbg] ++
gccFlags iface ++
-- # Any flags defined here which alter the RTS API must also be added to config.mk
["-DHAS_PTHREAD", "-DIDRIS_ENABLE_STATS",
"-I."] ++ objs ++ envFlags ++
(if (exec == Executable) then [] else ["-c"]) ++
[tmpn] ++
(if not iface then concatMap words libFlags else []) ++
concatMap words incFlags ++
(if not iface then concatMap words libs else []) ++
concatMap words flags ++
["-o", out]
-- putStrLn (show args)
exit <- rawSystem comp args
when (exit /= ExitSuccess) $
putStrLn ("FAILURE: " ++ show comp ++ " " ++ show args)
where
getExp (Export _ _ exp) = exp
headers xs =
concatMap
(\h -> "#include \"" ++ h ++ "\"\n")
(xs ++ ["idris_rts.h", "idris_bitstring.h", "idris_stdfgn.h"])
debug TRACE = "#define IDRIS_TRACE\n\n"
debug _ = ""
-- We're using signed integers now. Make sure we get consistent semantics
-- out of them from gcc. See e.g. http://thiemonagel.de/2010/01/signed-integer-overflow/
gccFlags i = if i then ["-fwrapv"]
else ["-fwrapv", "-fno-strict-overflow"]
gccDbg DEBUG = "-g"
gccDbg TRACE = "-O2"
gccDbg _ = "-O2"
cname :: Name -> String
cname n = "_idris_" ++ concatMap cchar (showCG n)
where cchar x | isAlpha x || isDigit x = [x]
| otherwise = "_" ++ show (fromEnum x) ++ "_"
indent :: Int -> String
indent n = replicate (n*4) ' '
creg RVal = "RVAL"
creg (L i) = "LOC(" ++ show i ++ ")"
creg (T i) = "TOP(" ++ show i ++ ")"
creg Tmp = "REG1"
toDecl :: Name -> String
toDecl f = "void " ++ cname f ++ "(VM*, VAL*);\n"
toC :: Name -> [BC] -> String
toC f code
= -- "/* " ++ show code ++ "*/\n\n" ++
"void " ++ cname f ++ "(VM* vm, VAL* oldbase) {\n" ++
indent 1 ++ "INITFRAME;\n" ++
concatMap (bcc 1) code ++ "}\n\n"
showCStr :: String -> String
showCStr s = '"' : foldr ((++) . showChar) "\"" s
where
showChar :: Char -> String
showChar '"' = "\\\""
showChar '\\' = "\\\\"
showChar c
-- Note: we need the double quotes around the codes because otherwise
-- "\n3" would get encoded as "\x0a3", which is incorrect.
-- Instead, we opt for "\x0a""3" and let the C compiler deal with it.
| ord c < 0x10 = "\"\"\\x0" ++ showHex (ord c) "\"\""
| ord c < 0x20 = "\"\"\\x" ++ showHex (ord c) "\"\""
| ord c < 0x7f = [c] -- 0x7f = \DEL
| otherwise = showHexes (utf8bytes (ord c))
utf8bytes :: Int -> [Int]
utf8bytes x = let (h : bytes) = split [] x in
headHex h (length bytes) : map toHex bytes
where
split acc 0 = acc
split acc x = let xbits = x .&. 0x3f
xrest = shiftR x 6 in
split (xbits : acc) xrest
headHex h 1 = h + 0xc0
headHex h 2 = h + 0xe0
headHex h 3 = h + 0xf0
headHex h n = error "Can't happen: Invalid UTF8 character"
toHex i = i + 0x80
showHexes = foldr ((++) . showUTF8) ""
showUTF8 c = "\"\"\\x" ++ showHex c "\"\""
bcc :: Int -> BC -> String
bcc i (ASSIGN l r) = indent i ++ creg l ++ " = " ++ creg r ++ ";\n"
bcc i (ASSIGNCONST l c)
= indent i ++ creg l ++ " = " ++ mkConst c ++ ";\n"
where
mkConst (I i) = "MKINT(" ++ show i ++ ")"
mkConst (BI i) | i < (2^30) = "MKINT(" ++ show i ++ ")"
| otherwise = "MKBIGC(vm,\"" ++ show i ++ "\")"
mkConst (Fl f) = "MKFLOAT(vm, " ++ show f ++ ")"
mkConst (Ch c) = "MKINT(" ++ show (fromEnum c) ++ ")"
mkConst (Str s) = "MKSTR(vm, " ++ showCStr s ++ ")"
mkConst (B8 x) = "idris_b8const(vm, " ++ show x ++ "U)"
mkConst (B16 x) = "idris_b16const(vm, " ++ show x ++ "U)"
mkConst (B32 x) = "idris_b32const(vm, " ++ show x ++ "UL)"
mkConst (B64 x) = "idris_b64const(vm, " ++ show x ++ "ULL)"
-- if it's a type constant, we won't use it, but equally it shouldn't
-- report an error. These might creep into generated for various reasons
-- (especially if erasure is disabled).
mkConst c | isTypeConst c = "MKINT(42424242)"
mkConst c = error $ "mkConst of (" ++ show c ++ ") not implemented"
bcc i (UPDATE l r) = indent i ++ creg l ++ " = " ++ creg r ++ ";\n"
bcc i (MKCON l loc tag []) | tag < 256
= indent i ++ creg l ++ " = NULL_CON(" ++ show tag ++ ");\n"
bcc i (MKCON l loc tag args)
= indent i ++ alloc loc tag ++
indent i ++ setArgs 0 args ++ "\n" ++
indent i ++ creg l ++ " = " ++ creg Tmp ++ ";\n"
-- "MKCON(vm, " ++ creg l ++ ", " ++ show tag ++ ", " ++
-- show (length args) ++ concatMap showArg args ++ ");\n"
where showArg r = ", " ++ creg r
setArgs i [] = ""
setArgs i (x : xs) = "SETARG(" ++ creg Tmp ++ ", " ++ show i ++ ", " ++ creg x ++
"); " ++ setArgs (i + 1) xs
alloc Nothing tag
= "allocCon(" ++ creg Tmp ++ ", vm, " ++ show tag ++ ", " ++
show (length args) ++ ", 0);\n"
alloc (Just old) tag
= "updateCon(" ++ creg Tmp ++ ", " ++ creg old ++ ", " ++ show tag ++ ", " ++
show (length args) ++ ");\n"
bcc i (PROJECT l loc a) = indent i ++ "PROJECT(vm, " ++ creg l ++ ", " ++ show loc ++
", " ++ show a ++ ");\n"
bcc i (PROJECTINTO r t idx)
= indent i ++ creg r ++ " = GETARG(" ++ creg t ++ ", " ++ show idx ++ ");\n"
bcc i (CASE True r code def)
| length code < 4 = showCase i def code
where
showCode :: Int -> [BC] -> String
showCode i bc = "{\n" ++ concatMap (bcc (i + 1)) bc ++
indent i ++ "}\n"
showCase :: Int -> Maybe [BC] -> [(Int, [BC])] -> String
showCase i Nothing [(t, c)] = indent i ++ showCode i c
showCase i (Just def) [] = indent i ++ showCode i def
showCase i def ((t, c) : cs)
= indent i ++ "if (CTAG(" ++ creg r ++ ") == " ++ show t ++ ") " ++ showCode i c
++ indent i ++ "else\n" ++ showCase i def cs
bcc i (CASE safe r code def)
= indent i ++ "switch(" ++ ctag safe ++ "(" ++ creg r ++ ")) {\n" ++
concatMap (showCase i) code ++
showDef i def ++
indent i ++ "}\n"
where
ctag True = "CTAG"
ctag False = "TAG"
showCase i (t, bc) = indent i ++ "case " ++ show t ++ ":\n"
++ concatMap (bcc (i+1)) bc ++ indent (i + 1) ++ "break;\n"
showDef i Nothing = ""
showDef i (Just c) = indent i ++ "default:\n"
++ concatMap (bcc (i+1)) c ++ indent (i + 1) ++ "break;\n"
bcc i (CONSTCASE r code def)
| intConsts code
-- = indent i ++ "switch(GETINT(" ++ creg r ++ ")) {\n" ++
-- concatMap (showCase i) code ++
-- showDef i def ++
-- indent i ++ "}\n"
= concatMap (iCase (creg r)) code ++
indent i ++ "{\n" ++ showDefS i def ++ indent i ++ "}\n"
| strConsts code
= concatMap (strCase ("GETSTR(" ++ creg r ++ ")")) code ++
indent i ++ "{\n" ++ showDefS i def ++ indent i ++ "}\n"
| bigintConsts code
= concatMap (biCase (creg r)) code ++
indent i ++ "{\n" ++ showDefS i def ++ indent i ++ "}\n"
| otherwise = error $ "Can't happen: Can't compile const case " ++ show code
where
intConsts ((I _, _ ) : _) = True
intConsts ((Ch _, _ ) : _) = True
intConsts ((B8 _, _ ) : _) = True
intConsts ((B16 _, _ ) : _) = True
intConsts ((B32 _, _ ) : _) = True
intConsts ((B64 _, _ ) : _) = True
intConsts _ = False
bigintConsts ((BI _, _ ) : _) = True
bigintConsts _ = False
strConsts ((Str _, _ ) : _) = True
strConsts _ = False
strCase sv (s, bc) =
indent i ++ "if (strcmp(" ++ sv ++ ", " ++ show s ++ ") == 0) {\n" ++
concatMap (bcc (i+1)) bc ++ indent i ++ "} else\n"
biCase bv (BI b, bc) =
indent i ++ "if (bigEqConst(" ++ bv ++ ", " ++ show b ++ ")) {\n"
++ concatMap (bcc (i+1)) bc ++ indent i ++ "} else\n"
iCase v (I b, bc) =
indent i ++ "if (GETINT(" ++ v ++ ") == " ++ show b ++ ") {\n"
++ concatMap (bcc (i+1)) bc ++ indent i ++ "} else\n"
iCase v (Ch b, bc) =
indent i ++ "if (GETINT(" ++ v ++ ") == " ++ show (fromEnum b) ++ ") {\n"
++ concatMap (bcc (i+1)) bc ++ indent i ++ "} else\n"
iCase v (B8 w, bc) =
indent i ++ "if (GETBITS8(" ++ v ++ ") == " ++ show (fromEnum w) ++ ") {\n"
++ concatMap (bcc (i+1)) bc ++ indent i ++ "} else\n"
iCase v (B16 w, bc) =
indent i ++ "if (GETBITS16(" ++ v ++ ") == " ++ show (fromEnum w) ++ ") {\n"
++ concatMap (bcc (i+1)) bc ++ indent i ++ "} else\n"
iCase v (B32 w, bc) =
indent i ++ "if (GETBITS32(" ++ v ++ ") == " ++ show (fromEnum w) ++ ") {\n"
++ concatMap (bcc (i+1)) bc ++ indent i ++ "} else\n"
iCase v (B64 w, bc) =
indent i ++ "if (GETBITS64(" ++ v ++ ") == " ++ show (fromEnum w) ++ ") {\n"
++ concatMap (bcc (i+1)) bc ++ indent i ++ "} else\n"
showCase i (t, bc) = indent i ++ "case " ++ show t ++ ":\n"
++ concatMap (bcc (i+1)) bc ++
indent (i + 1) ++ "break;\n"
showDef i Nothing = ""
showDef i (Just c) = indent i ++ "default:\n"
++ concatMap (bcc (i+1)) c ++
indent (i + 1) ++ "break;\n"
showDefS i Nothing = ""
showDefS i (Just c) = concatMap (bcc (i+1)) c
bcc i (CALL n) = indent i ++ "CALL(" ++ cname n ++ ");\n"
bcc i (TAILCALL n) = indent i ++ "TAILCALL(" ++ cname n ++ ");\n"
bcc i (SLIDE n) = indent i ++ "SLIDE(vm, " ++ show n ++ ");\n"
bcc i REBASE = indent i ++ "REBASE;\n"
bcc i (RESERVE 0) = ""
bcc i (RESERVE n) = indent i ++ "RESERVE(" ++ show n ++ ");\n"
bcc i (ADDTOP 0) = ""
bcc i (ADDTOP n) = indent i ++ "ADDTOP(" ++ show n ++ ");\n"
bcc i (TOPBASE n) = indent i ++ "TOPBASE(" ++ show n ++ ");\n"
bcc i (BASETOP n) = indent i ++ "BASETOP(" ++ show n ++ ");\n"
bcc i STOREOLD = indent i ++ "STOREOLD;\n"
bcc i (OP l fn args) = indent i ++ doOp (creg l ++ " = ") fn args ++ ";\n"
bcc i (FOREIGNCALL l rty (FStr fn) args)
= indent i ++
c_irts (toFType rty) (creg l ++ " = ")
(fn ++ "(" ++ showSep "," (map fcall args) ++ ")") ++ ";\n"
where fcall (t, arg) = irts_c (toFType t) (creg arg)
bcc i (NULL r) = indent i ++ creg r ++ " = NULL;\n" -- clear, so it'll be GCed
bcc i (ERROR str) = indent i ++ "fprintf(stderr, " ++ show str ++ "); fprintf(stderr, \"\\n\"); exit(-1);\n"
-- bcc i c = error (show c) -- indent i ++ "// not done yet\n"
-- Deconstruct the Foreign type in the defunctionalised expression and build
-- a foreign type description for c_irts and irts_c
toAType (FCon i)
| i == sUN "C_IntChar" = ATInt ITChar
| i == sUN "C_IntNative" = ATInt ITNative
| i == sUN "C_IntBits8" = ATInt (ITFixed IT8)
| i == sUN "C_IntBits16" = ATInt (ITFixed IT16)
| i == sUN "C_IntBits32" = ATInt (ITFixed IT32)
| i == sUN "C_IntBits64" = ATInt (ITFixed IT64)
toAType t = error (show t ++ " not defined in toAType")
toFType (FCon c)
| c == sUN "C_Str" = FString
| c == sUN "C_Float" = FArith ATFloat
| c == sUN "C_Ptr" = FPtr
| c == sUN "C_MPtr" = FManagedPtr
| c == sUN "C_Unit" = FUnit
toFType (FApp c [_,ity])
| c == sUN "C_IntT" = FArith (toAType ity)
toFType (FApp c [_])
| c == sUN "C_Any" = FAny
toFType t = FAny
c_irts (FArith (ATInt ITNative)) l x = l ++ "MKINT((i_int)(" ++ x ++ "))"
c_irts (FArith (ATInt ITChar)) l x = c_irts (FArith (ATInt ITNative)) l x
c_irts (FArith (ATInt (ITFixed ity))) l x
= l ++ "idris_b" ++ show (nativeTyWidth ity) ++ "const(vm, " ++ x ++ ")"
c_irts FString l x = l ++ "MKSTR(vm, " ++ x ++ ")"
c_irts FUnit l x = x
c_irts FPtr l x = l ++ "MKPTR(vm, " ++ x ++ ")"
c_irts FManagedPtr l x = l ++ "MKMPTR(vm, " ++ x ++ ")"
c_irts (FArith ATFloat) l x = l ++ "MKFLOAT(vm, " ++ x ++ ")"
c_irts FAny l x = l ++ x
irts_c (FArith (ATInt ITNative)) x = "GETINT(" ++ x ++ ")"
irts_c (FArith (ATInt ITChar)) x = irts_c (FArith (ATInt ITNative)) x
irts_c (FArith (ATInt (ITFixed ity))) x
= "(" ++ x ++ "->info.bits" ++ show (nativeTyWidth ity) ++ ")"
irts_c FString x = "GETSTR(" ++ x ++ ")"
irts_c FUnit x = x
irts_c FPtr x = "GETPTR(" ++ x ++ ")"
irts_c FManagedPtr x = "GETMPTR(" ++ x ++ ")"
irts_c (FArith ATFloat) x = "GETFLOAT(" ++ x ++ ")"
irts_c FAny x = x
bitOp v op ty args = v ++ "idris_b" ++ show (nativeTyWidth ty) ++ op ++ "(vm, " ++ intercalate ", " (map creg args) ++ ")"
bitCoerce v op input output arg
= v ++ "idris_b" ++ show (nativeTyWidth input) ++ op ++ show (nativeTyWidth output) ++ "(vm, " ++ creg arg ++ ")"
signedTy :: NativeTy -> String
signedTy t = "int" ++ show (nativeTyWidth t) ++ "_t"
doOp v (LPlus (ATInt ITNative)) [l, r] = v ++ "ADD(" ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LMinus (ATInt ITNative)) [l, r] = v ++ "INTOP(-," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LTimes (ATInt ITNative)) [l, r] = v ++ "MULT(" ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LUDiv ITNative) [l, r] = v ++ "UINTOP(/," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSDiv (ATInt ITNative)) [l, r] = v ++ "INTOP(/," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LURem ITNative) [l, r] = v ++ "UINTOP(%," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSRem (ATInt ITNative)) [l, r] = v ++ "INTOP(%," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LAnd ITNative) [l, r] = v ++ "INTOP(&," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LOr ITNative) [l, r] = v ++ "INTOP(|," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LXOr ITNative) [l, r] = v ++ "INTOP(^," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSHL ITNative) [l, r] = v ++ "INTOP(<<," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LLSHR ITNative) [l, r] = v ++ "UINTOP(>>," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LASHR ITNative) [l, r] = v ++ "INTOP(>>," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LCompl ITNative) [x] = v ++ "INTOP(~," ++ creg x ++ ")"
doOp v (LEq (ATInt ITNative)) [l, r] = v ++ "INTOP(==," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSLt (ATInt ITNative)) [l, r] = v ++ "INTOP(<," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSLe (ATInt ITNative)) [l, r] = v ++ "INTOP(<=," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSGt (ATInt ITNative)) [l, r] = v ++ "INTOP(>," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSGe (ATInt ITNative)) [l, r] = v ++ "INTOP(>=," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LLt ITNative) [l, r] = v ++ "UINTOP(<," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LLe ITNative) [l, r] = v ++ "UINTOP(<=," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LGt ITNative) [l, r] = v ++ "UINTOP(>," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LGe ITNative) [l, r] = v ++ "UINTOP(>=," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LPlus (ATInt ITChar)) [l, r] = doOp v (LPlus (ATInt ITNative)) [l, r]
doOp v (LMinus (ATInt ITChar)) [l, r] = doOp v (LMinus (ATInt ITNative)) [l, r]
doOp v (LTimes (ATInt ITChar)) [l, r] = doOp v (LTimes (ATInt ITNative)) [l, r]
doOp v (LUDiv ITChar) [l, r] = doOp v (LUDiv ITNative) [l, r]
doOp v (LSDiv (ATInt ITChar)) [l, r] = doOp v (LSDiv (ATInt ITNative)) [l, r]
doOp v (LURem ITChar) [l, r] = doOp v (LURem ITNative) [l, r]
doOp v (LSRem (ATInt ITChar)) [l, r] = doOp v (LSRem (ATInt ITNative)) [l, r]
doOp v (LAnd ITChar) [l, r] = doOp v (LAnd ITNative) [l, r]
doOp v (LOr ITChar) [l, r] = doOp v (LOr ITNative) [l, r]
doOp v (LXOr ITChar) [l, r] = doOp v (LXOr ITNative) [l, r]
doOp v (LSHL ITChar) [l, r] = doOp v (LSHL ITNative) [l, r]
doOp v (LLSHR ITChar) [l, r] = doOp v (LLSHR ITNative) [l, r]
doOp v (LASHR ITChar) [l, r] = doOp v (LASHR ITNative) [l, r]
doOp v (LCompl ITChar) [x] = doOp v (LCompl ITNative) [x]
doOp v (LEq (ATInt ITChar)) [l, r] = doOp v (LEq (ATInt ITNative)) [l, r]
doOp v (LSLt (ATInt ITChar)) [l, r] = doOp v (LSLt (ATInt ITNative)) [l, r]
doOp v (LSLe (ATInt ITChar)) [l, r] = doOp v (LSLe (ATInt ITNative)) [l, r]
doOp v (LSGt (ATInt ITChar)) [l, r] = doOp v (LSGt (ATInt ITNative)) [l, r]
doOp v (LSGe (ATInt ITChar)) [l, r] = doOp v (LSGe (ATInt ITNative)) [l, r]
doOp v (LLt ITChar) [l, r] = doOp v (LLt ITNative) [l, r]
doOp v (LLe ITChar) [l, r] = doOp v (LLe ITNative) [l, r]
doOp v (LGt ITChar) [l, r] = doOp v (LGt ITNative) [l, r]
doOp v (LGe ITChar) [l, r] = doOp v (LGe ITNative) [l, r]
doOp v (LPlus ATFloat) [l, r] = v ++ "FLOATOP(+," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LMinus ATFloat) [l, r] = v ++ "FLOATOP(-," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LTimes ATFloat) [l, r] = v ++ "FLOATOP(*," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSDiv ATFloat) [l, r] = v ++ "FLOATOP(/," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LEq ATFloat) [l, r] = v ++ "FLOATBOP(==," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSLt ATFloat) [l, r] = v ++ "FLOATBOP(<," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSLe ATFloat) [l, r] = v ++ "FLOATBOP(<=," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSGt ATFloat) [l, r] = v ++ "FLOATBOP(>," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSGe ATFloat) [l, r] = v ++ "FLOATBOP(>=," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LIntFloat ITBig) [x] = v ++ "idris_castBigFloat(vm, " ++ creg x ++ ")"
doOp v (LFloatInt ITBig) [x] = v ++ "idris_castFloatBig(vm, " ++ creg x ++ ")"
doOp v (LPlus (ATInt ITBig)) [l, r] = v ++ "idris_bigPlus(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LMinus (ATInt ITBig)) [l, r] = v ++ "idris_bigMinus(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LTimes (ATInt ITBig)) [l, r] = v ++ "idris_bigTimes(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSDiv (ATInt ITBig)) [l, r] = v ++ "idris_bigDivide(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSRem (ATInt ITBig)) [l, r] = v ++ "idris_bigMod(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LEq (ATInt ITBig)) [l, r] = v ++ "idris_bigEq(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSLt (ATInt ITBig)) [l, r] = v ++ "idris_bigLt(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSLe (ATInt ITBig)) [l, r] = v ++ "idris_bigLe(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSGt (ATInt ITBig)) [l, r] = v ++ "idris_bigGt(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSGe (ATInt ITBig)) [l, r] = v ++ "idris_bigGe(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LIntFloat ITNative) [x] = v ++ "idris_castIntFloat(" ++ creg x ++ ")"
doOp v (LFloatInt ITNative) [x] = v ++ "idris_castFloatInt(" ++ creg x ++ ")"
doOp v (LSExt ITNative ITBig) [x] = v ++ "idris_castIntBig(vm, " ++ creg x ++ ")"
doOp v (LTrunc ITBig ITNative) [x] = v ++ "idris_castBigInt(vm, " ++ creg x ++ ")"
doOp v (LStrInt ITBig) [x] = v ++ "idris_castStrBig(vm, " ++ creg x ++ ")"
doOp v (LIntStr ITBig) [x] = v ++ "idris_castBigStr(vm, " ++ creg x ++ ")"
doOp v (LIntStr ITNative) [x] = v ++ "idris_castIntStr(vm, " ++ creg x ++ ")"
doOp v (LStrInt ITNative) [x] = v ++ "idris_castStrInt(vm, " ++ creg x ++ ")"
doOp v (LIntStr (ITFixed _)) [x] = v ++ "idris_castBitsStr(vm, " ++ creg x ++ ")"
doOp v LFloatStr [x] = v ++ "idris_castFloatStr(vm, " ++ creg x ++ ")"
doOp v LStrFloat [x] = v ++ "idris_castStrFloat(vm, " ++ creg x ++ ")"
doOp v (LSLt (ATInt (ITFixed ty))) [x, y] = bitOp v "SLt" ty [x, y]
doOp v (LSLe (ATInt (ITFixed ty))) [x, y] = bitOp v "SLte" ty [x, y]
doOp v (LEq (ATInt (ITFixed ty))) [x, y] = bitOp v "Eq" ty [x, y]
doOp v (LSGe (ATInt (ITFixed ty))) [x, y] = bitOp v "SGte" ty [x, y]
doOp v (LSGt (ATInt (ITFixed ty))) [x, y] = bitOp v "SGt" ty [x, y]
doOp v (LLt (ITFixed ty)) [x, y] = bitOp v "Lt" ty [x, y]
doOp v (LLe (ITFixed ty)) [x, y] = bitOp v "Lte" ty [x, y]
doOp v (LGe (ITFixed ty)) [x, y] = bitOp v "Gte" ty [x, y]
doOp v (LGt (ITFixed ty)) [x, y] = bitOp v "Gt" ty [x, y]
doOp v (LSHL (ITFixed ty)) [x, y] = bitOp v "Shl" ty [x, y]
doOp v (LLSHR (ITFixed ty)) [x, y] = bitOp v "LShr" ty [x, y]
doOp v (LASHR (ITFixed ty)) [x, y] = bitOp v "AShr" ty [x, y]
doOp v (LAnd (ITFixed ty)) [x, y] = bitOp v "And" ty [x, y]
doOp v (LOr (ITFixed ty)) [x, y] = bitOp v "Or" ty [x, y]
doOp v (LXOr (ITFixed ty)) [x, y] = bitOp v "Xor" ty [x, y]
doOp v (LCompl (ITFixed ty)) [x] = bitOp v "Compl" ty [x]
doOp v (LPlus (ATInt (ITFixed ty))) [x, y] = bitOp v "Plus" ty [x, y]
doOp v (LMinus (ATInt (ITFixed ty))) [x, y] = bitOp v "Minus" ty [x, y]
doOp v (LTimes (ATInt (ITFixed ty))) [x, y] = bitOp v "Times" ty [x, y]
doOp v (LUDiv (ITFixed ty)) [x, y] = bitOp v "UDiv" ty [x, y]
doOp v (LSDiv (ATInt (ITFixed ty))) [x, y] = bitOp v "SDiv" ty [x, y]
doOp v (LURem (ITFixed ty)) [x, y] = bitOp v "URem" ty [x, y]
doOp v (LSRem (ATInt (ITFixed ty))) [x, y] = bitOp v "SRem" ty [x, y]
doOp v (LSExt (ITFixed from) ITBig) [x]
= v ++ "MKBIGSI(vm, (" ++ signedTy from ++ ")" ++ creg x ++ "->info.bits" ++ show (nativeTyWidth from) ++ ")"
doOp v (LSExt ITNative (ITFixed to)) [x]
= v ++ "idris_b" ++ show (nativeTyWidth to) ++ "const(vm, GETINT(" ++ creg x ++ "))"
doOp v (LSExt ITChar (ITFixed to)) [x]
= doOp v (LSExt ITNative (ITFixed to)) [x]
doOp v (LSExt (ITFixed from) ITNative) [x]
= v ++ "MKINT((i_int)((" ++ signedTy from ++ ")" ++ creg x ++ "->info.bits" ++ show (nativeTyWidth from) ++ "))"
doOp v (LSExt (ITFixed from) ITChar) [x]
= doOp v (LSExt (ITFixed from) ITNative) [x]
doOp v (LSExt (ITFixed from) (ITFixed to)) [x]
| nativeTyWidth from < nativeTyWidth to = bitCoerce v "S" from to x
doOp v (LZExt ITNative (ITFixed to)) [x]
= v ++ "idris_b" ++ show (nativeTyWidth to) ++ "const(vm, (uintptr_t)GETINT(" ++ creg x ++ "))"
doOp v (LZExt ITChar (ITFixed to)) [x]
= doOp v (LZExt ITNative (ITFixed to)) [x]
doOp v (LZExt (ITFixed from) ITNative) [x]
= v ++ "MKINT((i_int)" ++ creg x ++ "->info.bits" ++ show (nativeTyWidth from) ++ ")"
doOp v (LZExt (ITFixed from) ITChar) [x]
= doOp v (LZExt (ITFixed from) ITNative) [x]
doOp v (LZExt (ITFixed from) ITBig) [x]
= v ++ "MKBIGUI(vm, " ++ creg x ++ "->info.bits" ++ show (nativeTyWidth from) ++ ")"
doOp v (LZExt ITNative ITBig) [x]
= v ++ "MKBIGUI(vm, (uintptr_t)GETINT(" ++ creg x ++ "))"
doOp v (LZExt (ITFixed from) (ITFixed to)) [x]
| nativeTyWidth from < nativeTyWidth to = bitCoerce v "Z" from to x
doOp v (LTrunc ITNative (ITFixed to)) [x]
= v ++ "idris_b" ++ show (nativeTyWidth to) ++ "const(vm, GETINT(" ++ creg x ++ "))"
doOp v (LTrunc ITChar (ITFixed to)) [x]
= doOp v (LTrunc ITNative (ITFixed to)) [x]
doOp v (LTrunc (ITFixed from) ITNative) [x]
= v ++ "MKINT((i_int)" ++ creg x ++ "->info.bits" ++ show (nativeTyWidth from) ++ ")"
doOp v (LTrunc (ITFixed from) ITChar) [x]
= doOp v (LTrunc (ITFixed from) ITNative) [x]
doOp v (LTrunc ITBig (ITFixed to)) [x]
= v ++ "idris_b" ++ show (nativeTyWidth to) ++ "const(vm, ISINT(" ++ creg x ++ ") ? GETINT(" ++ creg x ++ ") : mpz_get_ui(GETMPZ(" ++ creg x ++ ")))"
doOp v (LTrunc (ITFixed from) (ITFixed to)) [x]
| nativeTyWidth from > nativeTyWidth to = bitCoerce v "T" from to x
doOp v LFExp [x] = v ++ flUnOp "exp" (creg x)
doOp v LFLog [x] = v ++ flUnOp "log" (creg x)
doOp v LFSin [x] = v ++ flUnOp "sin" (creg x)
doOp v LFCos [x] = v ++ flUnOp "cos" (creg x)
doOp v LFTan [x] = v ++ flUnOp "tan" (creg x)
doOp v LFASin [x] = v ++ flUnOp "asin" (creg x)
doOp v LFACos [x] = v ++ flUnOp "acos" (creg x)
doOp v LFATan [x] = v ++ flUnOp "atan" (creg x)
doOp v LFSqrt [x] = v ++ flUnOp "sqrt" (creg x)
doOp v LFFloor [x] = v ++ flUnOp "floor" (creg x)
doOp v LFCeil [x] = v ++ flUnOp "ceil" (creg x)
doOp v LFNegate [x] = v ++ "MKFLOAT(vm, -GETFLOAT(" ++ (creg x) ++ "))"
-- String functions which don't need to know we're UTF8
doOp v LStrConcat [l,r] = v ++ "idris_concat(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v LStrLt [l,r] = v ++ "idris_strlt(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v LStrEq [l,r] = v ++ "idris_streq(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v LReadStr [_] = v ++ "idris_readStr(vm, stdin)"
doOp v LWriteStr [_,s]
= v ++ "MKINT((i_int)(idris_writeStr(stdout"
++ ",GETSTR("
++ creg s ++ "))))"
-- String functions which need to know we're UTF8
doOp v LStrHead [x] = v ++ "idris_strHead(vm, " ++ creg x ++ ")"
doOp v LStrTail [x] = v ++ "idris_strTail(vm, " ++ creg x ++ ")"
doOp v LStrCons [x, y] = v ++ "idris_strCons(vm, " ++ creg x ++ "," ++ creg y ++ ")"
doOp v LStrIndex [x, y] = v ++ "idris_strIndex(vm, " ++ creg x ++ "," ++ creg y ++ ")"
doOp v LStrRev [x] = v ++ "idris_strRev(vm, " ++ creg x ++ ")"
doOp v LStrLen [x] = v ++ "idris_strlen(vm, " ++ creg x ++ ")"
doOp v LStrSubstr [x,y,z] = v ++ "idris_substr(vm, " ++ creg x ++ "," ++ creg y ++ "," ++ creg z ++ ")"
doOp v LFork [x] = v ++ "MKPTR(vm, vmThread(vm, " ++ cname (sMN 0 "EVAL") ++ ", " ++ creg x ++ "))"
doOp v LPar [x] = v ++ creg x -- "MKPTR(vm, vmThread(vm, " ++ cname (MN 0 "EVAL") ++ ", " ++ creg x ++ "))"
doOp v (LChInt ITNative) args = v ++ creg (last args)
doOp v (LChInt ITChar) args = doOp v (LChInt ITNative) args
doOp v (LIntCh ITNative) args = v ++ creg (last args)
doOp v (LIntCh ITChar) args = doOp v (LIntCh ITNative) args
doOp v LSystemInfo [x] = v ++ "idris_systemInfo(vm, " ++ creg x ++ ")"
doOp v LNoOp args = v ++ creg (last args)
-- Pointer primitives (declared as %extern in Builtins.idr)
doOp v (LExternal rf) [_,x]
| rf == sUN "prim__readFile"
= v ++ "idris_readStr(vm, GETPTR(" ++ creg x ++ "))"
doOp v (LExternal wf) [_,x,s]
| wf == sUN "prim__writeFile"
= v ++ "MKINT((i_int)(idris_writeStr(GETPTR(" ++ creg x
++ "),GETSTR("
++ creg s ++ "))))"
doOp v (LExternal vm) [] | vm == sUN "prim__vm" = v ++ "MKPTR(vm, vm)"
doOp v (LExternal si) [] | si == sUN "prim__stdin" = v ++ "MKPTR(vm, stdin)"
doOp v (LExternal so) [] | so == sUN "prim__stdout" = v ++ "MKPTR(vm, stdout)"
doOp v (LExternal se) [] | se == sUN "prim__stderr" = v ++ "MKPTR(vm, stderr)"
doOp v (LExternal nul) [] | nul == sUN "prim__null" = v ++ "MKPTR(vm, NULL)"
doOp v (LExternal eqp) [x, y] | eqp == sUN "prim__eqPtr"
= v ++ "MKINT((i_int)(GETPTR(" ++ creg x ++ ") == GETPTR(" ++ creg y ++ ")))"
doOp v (LExternal eqp) [x, y] | eqp == sUN "prim__eqManagedPtr"
= v ++ "MKINT((i_int)(GETMPTR(" ++ creg x ++ ") == GETMPTR(" ++ creg y ++ ")))"
doOp v (LExternal rp) [p, i] | rp == sUN "prim__registerPtr"
= v ++ "MKMPTR(vm, GETPTR(" ++ creg p ++ "), GETINT(" ++ creg i ++ "))"
doOp _ op args = error $ "doOp not implemented (" ++ show (op, args) ++ ")"
flUnOp :: String -> String -> String
flUnOp name val = "MKFLOAT(vm, " ++ name ++ "(GETFLOAT(" ++ val ++ ")))"
-------------------- Interface file generation
-- First, the wrappers in the C file
ifaceC :: Export -> String
ifaceC (ExportData n) = "typedef VAL " ++ cdesc n ++ ";\n"
ifaceC (ExportFun n cn ret args)
= ctype ret ++ " " ++ cdesc cn ++
"(VM* vm" ++ showArgs (zip argNames args) ++ ") {\n"
++ mkBody n (zip argNames args) ret ++ "}\n\n"
where showArgs [] = ""
showArgs ((n, t) : ts) = ", " ++ ctype t ++ " " ++ n ++
showArgs ts
argNames = zipWith (++) (repeat "arg") (map show [0..])
mkBody n as t = indent 1 ++ "INITFRAME;\n" ++
indent 1 ++ "RESERVE(" ++ show (max (length as) 3) ++ ");\n" ++
push 0 as ++ call n ++ retval t
where push i [] = ""
push i ((n, t) : ts) = indent 1 ++ c_irts (toFType t)
("TOP(" ++ show i ++ ") = ") n
++ ";\n" ++ push (i + 1) ts
call _ = indent 1 ++ "STOREOLD;\n" ++
indent 1 ++ "BASETOP(0);\n" ++
indent 1 ++ "ADDTOP(" ++ show (length as) ++ ");\n" ++
indent 1 ++ "CALL(" ++ cname n ++ ");\n"
retval (FIO t)
= indent 1 ++ "TOP(0) = NULL;\n" ++
indent 1 ++ "TOP(1) = NULL;\n" ++
indent 1 ++ "TOP(2) = RVAL;\n" ++
indent 1 ++ "STOREOLD;\n" ++
indent 1 ++ "BASETOP(0);\n" ++
indent 1 ++ "ADDTOP(3);\n" ++
indent 1 ++ "CALL(" ++ cname (sUN "call__IO") ++ ");\n" ++
retval t
retval t = indent 1 ++ "return " ++ irts_c (toFType t) "RVAL" ++ ";\n"
ctype (FCon c)
| c == sUN "C_Str" = "char*"
| c == sUN "C_Float" = "float"
| c == sUN "C_Ptr" = "void*"
| c == sUN "C_MPtr" = "void*"
| c == sUN "C_Unit" = "void"
ctype (FApp c [_,ity])
| c == sUN "C_IntT" = carith ity
ctype (FApp c [_])
| c == sUN "C_Any" = "VAL"
ctype (FStr s) = s
ctype FUnknown = "void*"
ctype (FIO t) = ctype t
ctype t = error "Can't happen: Not a valid interface type " ++ show t
carith (FCon i)
| i == sUN "C_IntChar" = "char"
| i == sUN "C_IntNative" = "int"
carith t = error "Can't happen: Not an exportable arithmetic type"
cdesc (FStr s) = s
cdesc s = error "Can't happen: Not a valid C name"
-- Then, the header files
codegenH :: [ExportIFace] -> IO ()
codegenH es = mapM_ writeIFace es
writeIFace :: ExportIFace -> IO ()
writeIFace (Export ffic hdr exps)
| ffic == sNS (sUN "FFI_C") ["FFI_C"]
= do let hfile = "#ifndef " ++ hdr_guard hdr ++ "\n" ++
"#define " ++ hdr_guard hdr ++ "\n\n" ++
"#include <idris_rts.h>\n\n" ++
concatMap hdr_export exps ++ "\n" ++
"#endif\n\n"
writeFile hdr hfile
| otherwise = return ()
hdr_guard x = "__" ++ map hchar x
where hchar x | isAlphaNum x = toUpper x
hchar _ = '_'
hdr_export :: Export -> String
hdr_export (ExportData n) = "typedef VAL " ++ cdesc n ++ ";\n"
hdr_export (ExportFun n cn ret args)
= ctype ret ++ " " ++ cdesc cn ++
"(VM* vm" ++ showArgs (zip argNames args) ++ ");\n"
where showArgs [] = ""
showArgs ((n, t) : ts) = ", " ++ ctype t ++ " " ++ n ++
showArgs ts
argNames = zipWith (++) (repeat "arg") (map show [0..])
| mrmonday/Idris-dev | src/IRTS/CodegenC.hs | bsd-3-clause | 32,663 | 0 | 27 | 9,379 | 14,894 | 7,408 | 7,486 | 587 | 32 |
module Case1 where
fib n
| n <= 1 = 1
| otherwise = case (fib (n-1), fib (n-2)) of
(n1, n2) -> n1 + n2 + 1
| RefactoringTools/HaRe | old/testing/evalMonad/Case1.hs | bsd-3-clause | 152 | 0 | 10 | 73 | 81 | 42 | 39 | 5 | 1 |
import Test.Cabal.Prelude
main = setupAndCabalTest $ do
skipUnless =<< ghcVersionIs (>= mkVersion [8,1])
withPackageDb $ do
withDirectory "mylib" $ setup_install_with_docs ["--ipid", "mylib-0.1.0.0"]
withDirectory "mysql" $ setup_install_with_docs ["--ipid", "mysql-0.1.0.0"]
withDirectory "postgresql" $ setup_install_with_docs ["--ipid", "postgresql-0.1.0.0"]
withDirectory "mylib" $
setup_install_with_docs ["--ipid", "mylib-0.1.0.0",
"--instantiate-with", "Database=mysql-0.1.0.0:Database.MySQL"]
withDirectory "mylib" $
setup_install_with_docs ["--ipid", "mylib-0.1.0.0",
"--instantiate-with", "Database=postgresql-0.1.0.0:Database.PostgreSQL"]
withDirectory "src" $ setup_install_with_docs []
withDirectory "exe" $ do
setup_install_with_docs []
runExe' "exe" [] >>= assertOutputContains "minemysql minepostgresql"
| mydaum/cabal | cabal-testsuite/PackageTests/Backpack/Includes2/setup-external.test.hs | bsd-3-clause | 948 | 0 | 16 | 196 | 203 | 97 | 106 | 17 | 1 |
euclideanDistance :: [Double] -> [Double] -> Double
euclideanDistance p q = sqrt . sum $ zipWith (\ u v -> (u-v)^2) p q
| imanmafi/Algorithm-Implementations | Euclidean_distance/Haskell/jcla1/euclidean_distance.hs | mit | 120 | 0 | 11 | 23 | 66 | 35 | 31 | 2 | 1 |
{-# LANGUAGE PolyKinds, GADTs, KindSignatures, DataKinds, FlexibleInstances #-}
module T7438a where
data Thrist :: k -> k -> * where
Nil :: Thrist a a
| urbanslug/ghc | testsuite/tests/polykinds/T7438a.hs | bsd-3-clause | 157 | 0 | 6 | 31 | 30 | 18 | 12 | 4 | 0 |
{-# htermination fmToList_GE :: (Ord a, Ord k) => FiniteMap (Either a k) b -> (Either a k) -> [((Either a k),b)] #-}
import FiniteMap
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/FiniteMap_fmToList_GE_10.hs | mit | 135 | 0 | 3 | 26 | 5 | 3 | 2 | 1 | 0 |
module Main(main) where
import System.Directory (Permissions)
import System.IO (FilePath)
data Info = Info {
infoPath :: FilePath
, infoPerms :: Maybe Permissions
, infoSize :: Maybe Integer
, infoModTime :: Maybe ClockTime
} deriving (Eq, Ord, Show)
| rockdragon/julia-programming | code/haskell/ControlVisit.hs | mit | 310 | 0 | 9 | 96 | 86 | 50 | 36 | 9 | 0 |
{-# htermination (/=) :: Float -> Float -> Bool #-}
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/Prelude_SLASHEQ_8.hs | mit | 52 | 0 | 2 | 10 | 3 | 2 | 1 | 1 | 0 |
{-# LANGUAGE OverloadedStrings, DeriveDataTypeable, NamedFieldPuns #-}
-- | Just re-exports a few of the types from
-- "Service.Twilio.Types". There are a number of helper functions
-- which are currently exposed by "Service.Twilio.Types" that aren't
-- exposed here.
module Service.Twilio (
-- * Base Twilio types
Price, -- type synonym for 'Int'
Passport (..),
PhoneNumber (..),
SendStatus (..),
APIKind (..),
SMSKind (..),
Id (..), uri,
SMSCore (..),
SMS (..),
-- * Parsing
FromFormUrlencoded (..),
-- * Request signing
requestSignature
) where
import Prelude hiding (id)
import Service.Twilio.Types
import Data.ByteString (ByteString)
import qualified Data.ByteString.Base64 as B64
import Data.Monoid
import Data.List
import Data.Ord
import Crypto.Hash.SHA1 (hash)
import Crypto.MAC.HMAC (hmac)
-- | Given a Passport, a target URL, the raw query string, and a set
-- of body parameters, this function computes the canonical request
-- signature Twilio uses to authenticate itself.
--
-- A more flexible form of 'requestSignature' could be used with the
-- API inconsistencies for HTTP call requests and HTTPS call
-- requests. See the bottom of <http://www.twilio.com/docs/security>
-- for more details.
requestSignature :: Passport
-> ByteString -- ^ The full URL
-> ByteString -- ^ The raw query string including the "?"
-> [(ByteString, ByteString)] -- ^ Post parameters in Body
-> ByteString
requestSignature (Passport _ token) url qs headers =
encode $ url <> qs <> canonize headers
where encode = B64.encode . hmac hash 64 token
canonize = mconcat . map (uncurry mappend) . sortBy (comparing fst) | reifyhealth/twill | src/Service/Twilio.hs | mit | 1,748 | 0 | 11 | 381 | 289 | 182 | 107 | 31 | 1 |
module Control.Concurrent.ForkOrDoPool where
import Control.Concurrent
import Control.Concurrent.MVar as MVar
import Data.List as List
import Data.Maybe as Maybe
import Prelude.Extensions as PreludeExt
type ForkOrDoPool = [(ThreadId, MVar (IO ()))]
createPool :: Int -> IO ForkOrDoPool
createPool = \number_of_threads -> do
let work_lock = MVar.newEmptyMVar
work_locks <- (mapM id (replicate number_of_threads work_lock))
thread_ids <- (mapM ((.) forkIO workerThread) work_locks)
(return (zip thread_ids work_locks))
workerThread :: (MVar (IO ())) -> IO ()
workerThread = \work_lock -> do
work <- (MVar.readMVar work_lock)
work
(MVar.takeMVar work_lock)
(workerThread work_lock)
submitTask :: ForkOrDoPool -> IO () -> IO Bool
submitTask = \threadpool task -> do
let {submit_first = do
let (thread_id, work_lock) = (List.head threadpool)
(MVar.tryPutMVar work_lock task)}
let {try_submit = do
success <- submit_first
(ifElse success (return True) (submitTask (List.tail threadpool) task))}
(ifElse (List.null threadpool) (return False) try_submit)
forkOrDo :: ForkOrDoPool -> IO () -> IO ()
forkOrDo = \threadpool task -> do
success <- (submitTask threadpool task)
(doIf (not success) task)
| stevedonnelly/haskell | code/Control/Concurrent/ForkOrDoPool.hs | mit | 1,282 | 5 | 19 | 248 | 463 | 243 | 220 | 32 | 1 |
module Language.PCPL.Syntax
( Program(..)
, Domino(..)
, Symbol
, syms
, unsyms
) where
import Language.UTM.Syntax
-- | PCPL program
data Program = Program
{ startDomino :: Input -> Domino
, dominos :: [Domino]
, separator :: Symbol
}
data Domino = Domino [Symbol] [Symbol]
deriving (Eq, Show)
| davidlazar/PCPL | src/Language/PCPL/Syntax.hs | mit | 347 | 0 | 9 | 101 | 101 | 64 | 37 | 13 | 0 |
module PostgREST.Parsers where
import Protolude hiding (try, intercalate)
import Control.Monad ((>>))
import Data.Text (intercalate)
import Data.List (init, last)
import Data.Tree
import PostgREST.QueryBuilder (operators)
import PostgREST.Types
import Text.ParserCombinators.Parsec hiding (many, (<|>))
import PostgREST.RangeQuery (NonnegRange,allRange)
pRequestSelect :: Text -> Parser ReadRequest
pRequestSelect rootNodeName = do
fieldTree <- pFieldForest
return $ foldr treeEntry (Node (readQuery, (rootNodeName, Nothing, Nothing)) []) fieldTree
where
readQuery = Select [] [rootNodeName] [] Nothing allRange
treeEntry :: Tree SelectItem -> ReadRequest -> ReadRequest
treeEntry (Node fld@((fn, _),_,alias) fldForest) (Node (q, i) rForest) =
case fldForest of
[] -> Node (q {select=fld:select q}, i) rForest
_ -> Node (q, i) newForest
where
newForest =
foldr treeEntry (Node (Select [] [fn] [] Nothing allRange, (fn, Nothing, alias)) []) fldForest:rForest
pRequestFilter :: (Text, Text) -> Either ParseError (Path, Filter)
pRequestFilter (k, v) = (,) <$> path <*> (Filter <$> fld <*> op <*> val)
where
treePath = parse pTreePath ("failed to parser tree path (" ++ toS k ++ ")") $ toS k
opVal = parse pOpValueExp ("failed to parse filter (" ++ toS v ++ ")") $ toS v
path = fst <$> treePath
fld = snd <$> treePath
op = fst <$> opVal
val = snd <$> opVal
pRequestOrder :: (Text, Text) -> Either ParseError (Path, [OrderTerm])
pRequestOrder (k, v) = (,) <$> path <*> ord'
where
treePath = parse pTreePath ("failed to parser tree path (" ++ toS k ++ ")") $ toS k
path = fst <$> treePath
ord' = parse pOrder ("failed to parse order (" ++ toS v ++ ")") $ toS v
pRequestRange :: (ByteString, NonnegRange) -> Either ParseError (Path, NonnegRange)
pRequestRange (k, v) = (,) <$> path <*> pure v
where
treePath = parse pTreePath ("failed to parser tree path (" ++ toS k ++ ")") $ toS k
path = fst <$> treePath
ws :: Parser Text
ws = toS <$> many (oneOf " \t")
lexeme :: Parser a -> Parser a
lexeme p = ws *> p <* ws
pTreePath :: Parser (Path,Field)
pTreePath = do
p <- pFieldName `sepBy1` pDelimiter
jp <- optionMaybe pJsonPath
return (init p, (last p, jp))
pFieldForest :: Parser [Tree SelectItem]
pFieldForest = pFieldTree `sepBy1` lexeme (char ',')
pFieldTree :: Parser (Tree SelectItem)
pFieldTree = try (Node <$> pSimpleSelect <*> between (char '{') (char '}') pFieldForest)
<|> Node <$> pSelect <*> pure []
pStar :: Parser Text
pStar = toS <$> (string "*" *> pure ("*"::ByteString))
pFieldName :: Parser Text
pFieldName = do
matches <- (many1 (letter <|> digit <|> oneOf "_") `sepBy1` dash) <?> "field name (* or [a..z0..9_])"
return $ intercalate "-" $ map toS matches
where
isDash :: GenParser Char st ()
isDash = try ( char '-' >> notFollowedBy (char '>') )
dash :: Parser Char
dash = isDash *> pure '-'
pJsonPathStep :: Parser Text
pJsonPathStep = toS <$> try (string "->" *> pFieldName)
pJsonPath :: Parser [Text]
pJsonPath = (<>) <$> many pJsonPathStep <*> ( (:[]) <$> (string "->>" *> pFieldName) )
pField :: Parser Field
pField = lexeme $ (,) <$> pFieldName <*> optionMaybe pJsonPath
aliasSeparator :: Parser ()
aliasSeparator = char ':' >> notFollowedBy (char ':')
pSimpleSelect :: Parser SelectItem
pSimpleSelect = lexeme $ try ( do
alias <- optionMaybe ( try(pFieldName <* aliasSeparator) )
fld <- pField
return (fld, Nothing, alias)
)
pSelect :: Parser SelectItem
pSelect = lexeme $
try (
do
alias <- optionMaybe ( try(pFieldName <* aliasSeparator) )
fld <- pField
cast' <- optionMaybe (string "::" *> many letter)
return (fld, toS <$> cast', alias)
)
<|> do
s <- pStar
return ((s, Nothing), Nothing, Nothing)
pOperator :: Parser Operator
pOperator = toS <$> (pOp <?> "operator (eq, gt, ...)")
where pOp = foldl (<|>) empty $ map (try . string . toS . fst) operators
pValue :: Parser FValue
pValue = VText <$> (toS <$> many anyChar)
pDelimiter :: Parser Char
pDelimiter = char '.' <?> "delimiter (.)"
pOperatiorWithNegation :: Parser Operator
pOperatiorWithNegation = try ( (<>) <$> ( toS <$> string "not." ) <*> pOperator) <|> pOperator
pOpValueExp :: Parser (Operator, FValue)
pOpValueExp = (,) <$> pOperatiorWithNegation <*> (pDelimiter *> pValue)
pOrder :: Parser [OrderTerm]
pOrder = lexeme pOrderTerm `sepBy` char ','
pOrderTerm :: Parser OrderTerm
pOrderTerm =
try ( do
c <- pField
d <- optionMaybe (try $ pDelimiter *> (
try(string "asc" *> pure OrderAsc)
<|> try(string "desc" *> pure OrderDesc)
))
nls <- optionMaybe (pDelimiter *> (
try(string "nullslast" *> pure OrderNullsLast)
<|> try(string "nullsfirst" *> pure OrderNullsFirst)
))
return $ OrderTerm c d nls
)
<|> OrderTerm <$> pField <*> pure Nothing <*> pure Nothing
| NotBrianZach/postgrest | src/PostgREST/Parsers.hs | mit | 5,173 | 0 | 22 | 1,286 | 1,893 | 987 | 906 | 112 | 2 |
module System.Flannel.ParamsSpec
( spec
) where
import System.Flannel.Params
import Test.Hspec
spec :: Spec
spec = do
describe "defaultParams" $ do
it "sets every field as empty" $ do
isSet "test" defaultParams `shouldBe` False
getOption "test" defaultParams `shouldBe` Nothing
getArg "test" defaultParams `shouldBe` Nothing
getRemaining defaultParams `shouldBe` []
describe "setFlag" $ do
let params = setFlag "test" defaultParams
it "sets the specified flag" $ do
isSet "test" params `shouldBe` True
describe "setOption" $ do
let params = setOption "test" "alpha" defaultParams
it "sets the specified option" $ do
getOption "test" params `shouldBe` Just "alpha"
describe "setArg" $ do
let params = setArg "test" "beta" defaultParams
it "sets the specified argument" $ do
getArg "test" params `shouldBe` Just "beta"
describe "addRemaining" $ do
let params = addRemaining ["1", "2"] defaultParams
it "adds the arguments" $ do
getRemaining params `shouldBe` ["1", "2"]
| nahiluhmot/flannel | spec/System/Flannel/ParamsSpec.hs | mit | 1,178 | 0 | 14 | 349 | 321 | 152 | 169 | 28 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-do-bind #-}
module Network.RestClientSpec (main, spec) where
import Helper
import Network.RestClient
main :: IO ()
main = hspec spec
withServer :: (IO Req -> IO a) -> IO a
withServer = withHttpServer status200 [("Content-Type", "text/plain")] "OK"
spec :: Spec
spec = do
describe "get" $ do
it "performs a GET request" $ withServer $ \r -> do
get "http://localhost:3000"
reqMethod <$> r `shouldReturn` "GET"
it "returns server response" $ withServer $ \_ -> do
get "http://localhost:3000" `shouldReturn` "OK"
describe "post" $ do
it "performs a POST request" $ withServer $ \r -> do
post "http://localhost:3000" ""
reqMethod <$> r `shouldReturn` "POST"
it "attaches a body to the request" $ withServer $ \r -> do
post "http://localhost:3000" "foobar"
reqBody <$> r `shouldReturn` "foobar"
it "returns server response" $ withServer $ \_ -> do
post "http://localhost:3000" "" `shouldReturn` "OK"
| sol/rest-client | test/Network/RestClientSpec.hs | mit | 1,063 | 0 | 15 | 244 | 300 | 150 | 150 | 26 | 1 |
module Y2017.M04.D19.Exercise where
import Data.Aeson
-- below imports available from 1HaskellADay git repository
import Wikidata.Query.Aeson
import Wikidata.Query.Endpoint
import Y2017.M04.D18.Exercise
{--
Okay, today we're going to do the same thing as yesterday, but with a different
SPARQL query. Recall that we wanted eye-colors by frequency in wikidata.
Our original query was eyeColors in Y2017.M04.D14.Exercise
Create a value of SPARL that gives an eye color query. Query wikidata and
return the results as a list of EyeColor values
--}
eyeColorQuery :: SPARQL
eyeColorQuery = undefined
data EyeColor = Eyes { color :: String, count :: Int }
deriving Eq
instance FromJSON EyeColor where
parseJSON = undefined
eyeColors :: SPARQL -> IO [EyeColor]
eyeColors queryVal = undefined
-- What is the most-mentioned eye-color? What is the least-mentioned one?
mostMentioned, leastMentioned :: [EyeColor] -> EyeColor
mostMentioned = undefined
leastMentioned = undefined
| geophf/1HaskellADay | exercises/HAD/Y2017/M04/D19/Exercise.hs | mit | 987 | 0 | 8 | 156 | 129 | 80 | 49 | 16 | 1 |
module Spear.Math.Vector.Vector3
(
Vector3(..)
, Right3
, Up3
, Forward3
, Position3
-- * Construction
, unitx3
, unity3
, unitz3
, zero3
, vec3
, orbit
-- * Operations
, cross
)
where
import Spear.Math.Vector.Class
import Foreign.C.Types (CFloat)
import Foreign.Storable
type Right3 = Vector3
type Up3 = Vector3
type Forward3 = Vector3
type Position3 = Vector3
-- | Represents a vector in 3D.
data Vector3 = Vector3
{-# UNPACK #-} !Float
{-# UNPACK #-} !Float
{-# UNPACK #-} !Float
deriving (Eq, Show)
instance Num Vector3 where
Vector3 ax ay az + Vector3 bx by bz = Vector3 (ax + bx) (ay + by) (az + bz)
Vector3 ax ay az - Vector3 bx by bz = Vector3 (ax - bx) (ay - by) (az - bz)
Vector3 ax ay az * Vector3 bx by bz = Vector3 (ax * bx) (ay * by) (az * bz)
abs (Vector3 ax ay az) = Vector3 (abs ax) (abs ay) (abs az)
signum (Vector3 ax ay az) = Vector3 (signum ax) (signum ay) (signum az)
fromInteger i = Vector3 i' i' i' where i' = fromInteger i
instance Fractional Vector3 where
Vector3 ax ay az / Vector3 bx by bz = Vector3 (ax / bx) (ay / by) (az / bz)
fromRational r = Vector3 r' r' r' where r' = fromRational r
instance Ord Vector3 where
Vector3 ax ay az <= Vector3 bx by bz
= (ax <= bx)
|| (az == bx && ay <= by)
|| (ax == bx && ay == by && az <= bz)
Vector3 ax ay az >= Vector3 bx by bz
= (ax >= bx)
|| (ax == bx && ay >= by)
|| (ax == bx && ay == by && az >= bz)
Vector3 ax ay az < Vector3 bx by bz
= (ax < bx)
|| (az == bx && ay < by)
|| (ax == bx && ay == by && az < bz)
Vector3 ax ay az > Vector3 bx by bz
= (ax > bx)
|| (ax == bx && ay > by)
|| (ax == bx && ay == by && az > bz)
max (Vector3 ax ay az) (Vector3 bx by bz) = Vector3 (Prelude.max ax bx) (Prelude.max ay by) (Prelude.max az bz)
min (Vector3 ax ay az) (Vector3 bx by bz) = Vector3 (Prelude.min ax bx) (Prelude.min ay by) (Prelude.min az bz)
instance VectorClass Vector3 where
{-# INLINABLE fromList #-}
fromList (ax:ay:az:_) = Vector3 ax ay az
{-# INLINABLE x #-}
x (Vector3 ax _ _ ) = ax
{-# INLINABLE y #-}
y (Vector3 _ ay _ ) = ay
{-# INLINABLE z #-}
z (Vector3 _ _ az) = az
{-# INLINABLE (!) #-}
(Vector3 ax _ _) ! 0 = ax
(Vector3 _ ay _) ! 1 = ay
(Vector3 _ _ az) ! 2 = az
_ ! _ = 0
{-# INLINABLE dot #-}
Vector3 ax ay az `dot` Vector3 bx by bz = ax*bx + ay*by + az*bz
{-# INLINABLE normSq #-}
normSq (Vector3 ax ay az) = ax*ax + ay*ay + az*az
{-# INLINABLE norm #-}
norm = sqrt . normSq
{-# INLINABLE scale #-}
scale s (Vector3 ax ay az) = Vector3 (s*ax) (s*ay) (s*az)
{-# INLINABLE neg #-}
neg (Vector3 ax ay az) = Vector3 (-ax) (-ay) (-az)
{-# INLINABLE normalise #-}
normalise v =
let n' = norm v
n = if n' == 0 then 1 else n'
in scale (1.0 / n) v
sizeFloat = sizeOf (undefined :: CFloat)
instance Storable Vector3 where
sizeOf _ = 3*sizeFloat
alignment _ = alignment (undefined :: CFloat)
peek ptr = do
ax <- peekByteOff ptr 0
ay <- peekByteOff ptr $ 1*sizeFloat
az <- peekByteOff ptr $ 2*sizeFloat
return (Vector3 ax ay az)
poke ptr (Vector3 ax ay az) = do
pokeByteOff ptr 0 ax
pokeByteOff ptr (1*sizeFloat) ay
pokeByteOff ptr (2*sizeFloat) az
-- | Unit vector along the X axis.
unitx3 = Vector3 1 0 0
-- | Unit vector along the Y axis.
unity3 = Vector3 0 1 0
-- | Unit vector along the Z axis.
unitz3 = Vector3 0 0 1
-- | Zero vector.
zero3 = Vector3 0 0 0
-- | Create a 3D vector from the given values.
vec3 :: Float -> Float -> Float -> Vector3
vec3 ax ay az = Vector3 ax ay az
-- | Create a 3D vector as a point on a sphere.
orbit :: Vector3 -- ^ Sphere center.
-> Float -- ^ Sphere radius
-> Float -- ^ Azimuth angle.
-> Float -- ^ Zenith angle.
-> Vector3
orbit center radius anglex angley =
let ax = anglex * pi / 180
ay = angley * pi / 180
sx = sin ax
sy = sin ay
cx = cos ax
cy = cos ay
px = x center + radius*cy*sx
py = y center + radius*sy
pz = z center + radius*cx*cy
in
vec3 px py pz
-- | Compute the given vectors' cross product.
cross :: Vector3 -> Vector3 -> Vector3
(Vector3 ax ay az) `cross` (Vector3 bx by bz) =
Vector3 (ay * bz - az * by) (az * bx - ax * bz) (ax * by - ay * bx)
| jeannekamikaze/Spear | Spear/Math/Vector/Vector3.hs | mit | 4,913 | 0 | 12 | 1,800 | 1,900 | 973 | 927 | 111 | 1 |
import Control.Monad (forM_)
import Data.Array
import qualified Data.Char as Char
import qualified Data.List as List
data Light = On | Off
deriving (Eq, Show)
type Lights = Array Coordinates Light
type Coordinates = (Int, Int)
main = do
lightsList <- List.transpose <$> map (map parseInput) <$> lines <$> getContents
let gridBounds = ((0, 0), (length (head lightsList) - 1, length lightsList - 1))
let lights = listArray gridBounds (concat lightsList) // cornersOn gridBounds
let steps = iterate step lights
print $ countLights (steps !! 100)
trim :: String -> String
trim = takeWhile (not . Char.isSpace) . dropWhile Char.isSpace
parseInput :: Char -> Light
parseInput '.' = Off
parseInput '#' = On
step :: Lights -> Lights
step lights = (array gridBounds $ map stepLight $ assocs lights) // cornersOn gridBounds
where
stepLight this@(coordinates, state) = case switchedOnNeighbors coordinates of
2 -> (coordinates, state)
3 -> (coordinates, On)
_ -> (coordinates, Off)
switchedOnNeighbors coordinates = countSwitchedOn $ map (lights !) $ neighbours coordinates
neighbours (x, y) =
filter
(inRange gridBounds)
[ (x - 1, y - 1),
(x, y - 1),
(x + 1, y - 1),
(x - 1, y),
(x + 1, y),
(x - 1, y + 1),
(x, y + 1),
(x + 1, y + 1)
]
gridBounds = bounds lights
cornersOn :: (Coordinates, Coordinates) -> [(Coordinates, Light)]
cornersOn ((startX, startY), (endX, endY)) =
map (\c -> (c, On)) [(startX, startY), (endX, startY), (startX, endY), (endX, endY)]
countLights :: Lights -> Int
countLights = countSwitchedOn . elems
countSwitchedOn :: [Light] -> Int
countSwitchedOn = length . filter (== On)
printLights :: Lights -> IO ()
printLights lights =
forM_ [snd (fst gridBounds) .. snd (snd gridBounds)] $ \y -> do
forM_ [fst (fst gridBounds) .. fst (snd gridBounds)] $ \x -> do
case lights ! (x, y) of
Off -> putStr "."
On -> putStr "#"
putStrLn ""
where
gridBounds = bounds lights
| SamirTalwar/advent-of-code | 2015/AOC_18_2.hs | mit | 2,075 | 0 | 16 | 517 | 860 | 468 | 392 | 54 | 3 |
{-# LANGUAGE MultiParamTypeClasses #-}
module Database.EventSafe.ConcSpec
( spec
) where
import Control.Concurrent
import Control.Monad
import Database.EventSafe.Conc
import Database.EventSafe.Types
import Test.Hspec
data EventExample = EventExample deriving (Show, Eq, Ord)
data ResourceRefExample = ResourceRefExample
newtype EventCount = EventCount Int deriving (Show, Eq)
type ESTVarExample = ESTVar [] EventExample
instance ResourceRef EventExample ResourceRefExample where
concerns _ _ = True
instance Resource EventExample EventCount where
firstEvent _ = Just $ EventCount 1
applyEvent _ (EventCount c) = Just $ EventCount $ c + 1
buildResource [] = Nothing
buildResource es = Just $ EventCount $ length es
spec :: Spec
spec = do
describe "ESTVar" $ do
describe "emptyPoolM" $ do
it "returns an empty pool" $ do
pool <- emptyPoolM :: IO ESTVarExample
c <- getResourceM pool ResourceRefExample
c `shouldBe` (Nothing :: Maybe EventCount)
describe "addEventM" $ do
it "adds events concurrently to a ESTVar" $ do
pool <- emptyPoolM :: IO ESTVarExample
let n = 1000
mvars <- replicateM n newEmptyMVar
forM_ mvars $ \mvar -> forkIO $ do
addEventM pool EventExample
putMVar mvar True
mapM_ takeMVar mvars
c <- getResourceM pool ResourceRefExample
c `shouldBe` Just (EventCount n)
| thoferon/eventsafe | tests/Database/EventSafe/ConcSpec.hs | mit | 1,492 | 0 | 21 | 398 | 413 | 204 | 209 | 38 | 1 |
module Elm (module X) where
import Elm.Common as X (Options (..), defaultOptions)
import Elm.Decoder as X
import Elm.Encoder as X
import Elm.File as X
import Elm.Record as X
import Elm.Type as X
| InfernalKnight/elm-export | src/Elm.hs | epl-1.0 | 264 | 0 | 6 | 101 | 64 | 45 | 19 | 7 | 0 |
{-# LANGUAGE QuasiQuotes, OverloadedStrings #-}
module PolymorphismSpec (spec) where
import Test.Hspec
import Language.Mulang.Ast
import Language.Mulang.Inspector.Combiner (scoped)
import Language.Mulang.Inspector.Contextualized (contextualized, decontextualize)
import Language.Mulang.Inspector.ObjectOriented.Polymorphism
import qualified Language.Mulang.Parsers.Java as J (java)
import Data.Text (Text, unpack)
import NeatInterpolation (text)
java :: Text -> Expression
java = J.java . unpack
spec :: Spec
spec = do
describe "usesStaticMethodOverload" $ do
it "is true when two methods with the same name and different arity exist on the same class" $ do
usesStaticMethodOverload (java [text|
class Wallet {
void deposit(BitcoinMoney amount) {}
void deposit(RegularMoney amount, double conversion) {}
}
|]) `shouldBe` True
it "is true when two methods with the same name and different types exist on the same class" $ do
usesStaticMethodOverload (java [text|
class Wallet {
void deposit(BitcoinMoney amount) {}
void deposit(RegularMoney amount) {}
}
|]) `shouldBe` True
it "is false when there are no duplicated methods" $ do
usesStaticMethodOverload (java [text|
class Wallet {
void deposit(BitcoinMoney amount) {}
void withdraw(BitcoinMoney amount) {}
}
|]) `shouldBe` False
describe "usesDynamicMethodOverload" $ do
it "is true when two methods with the same name and different arity exist on the same class" $ do
usesDynamicMethodOverload (java [text|
class Wallet {
void deposit(BitcoinMoney amount) {}
void deposit(RegularMoney amount, double conversion) {}
}
|]) `shouldBe` True
it "is false when two methods with the same name and different types exist on the same class" $ do
usesDynamicMethodOverload (java [text|
class Wallet {
void deposit(BitcoinMoney amount) {}
void deposit(RegularMoney amount) {}
}
|]) `shouldBe` False
it "is false when there are no duplicated methods" $ do
usesDynamicMethodOverload (java [text|
class Wallet {
void deposit(BitcoinMoney amount) {}
void withdraw(BitcoinMoney amount) {}
}
|]) `shouldBe` False
describe "usesTemplateMethod" $ do
it "is true when an abstract method is uses in an abstract class" $ do
usesTemplateMethod (java [text|
abstract class Account {
double balance;
void extract(double amount) {
checkBalance();
balance -= amount;
}
abstract void checkBalance();
}
class NormalAccount {
void checkBalance() {}
}
class PremiumAccount {
void checkBalance() {}
}
|]) `shouldBe` True
it "is false when no abstract method is used" $ do
usesTemplateMethod (java [text|
abstract class Account {
double balance;
void extract(double amount) {
balance -= amount;
}
abstract void checkBalance();
}
class NormalAccount {
void checkBalance() {}
}
class PremiumAccount {
void checkBalance() {}
}
|]) `shouldBe` False
describe "usesObjectComposition" $ do
it "is true an interface attribute is declared" $ do
usesObjectComposition (java [text|
interface Light {
void on();
}
class Room {
Light light;
void enter() {
light.on();
}
}|]) `shouldBe` True
it "is no attribute is declared" $ do
usesObjectComposition (java [text|
interface Light {
void on();
}
class Room {
}|]) `shouldBe` False
it "is a primitive attribute is declared" $ do
usesObjectComposition (java [text|
interface Light {
void on();
}
class Room {
int size;
}|]) `shouldBe` False
describe "usesDynamicPolymorphism" $ do
it "is True when uses" $ do
usesDynamicPolymorphism (java [text|
class Bird { void sing() {} }
class Performer { void sing() {} }
class Festival { void run(Object o) { o.sing(); } }|]) `shouldBe` True
it "is False when there is just one implementor" $ do
usesDynamicPolymorphism (java [text|
class Bird { void sing() {} }
class Festival { void run(Object o) { o.sing(); } }|]) `shouldBe` False
it "is False when there is no user" $ do
usesDynamicPolymorphism (java [text|
class Bird { void sing() {} }
class Performer { void sing() {} }|]) `shouldBe` False
it "is False when not uses" $ do
usesDynamicPolymorphism (java [text|
class Sample { void aMethod() { throw new Exception(); } }|]) `shouldBe` False
describe "usesStaticPolymorphism'" $ do
let ast = (java [text|
interface Singer {
void sing();
}
class Bird implements Singer {
void sing() {}
}
class Performer implements Singer {
void sing() {}
}
class Human {
int rockAge() { return 27; }
}
class Festival {
Singer o;
void run() { o.sing(); }
}|])
it "is False when tested on a class that does not use it" $ do
decontextualize (contextualized (scoped "Human") usesStaticPolymorphism') ast `shouldBe` False
it "is True when tested on a class that uses it" $ do
decontextualize (contextualized (scoped "Festival") usesStaticPolymorphism') ast `shouldBe` True
describe "usesStaticPolymorphism" $ do
it "is False when a scope is used" $ do
let ast = (java [text|
interface Singer {
void sing();
}
class Bird implements Singer {
void sing() {}
}
class Performer implements Singer {
void sing() {}
}
class Human {
int rockAge() { return 27; }
}
class Festival {
Singer o;
void run() { o.sing(); }
}|])
scoped "Human" usesStaticPolymorphism ast `shouldBe` False
scoped "Festival" usesStaticPolymorphism ast `shouldBe` False
it "is True when there is an usage of an interface in an attribute implemented by two or more classes" $ do
usesStaticPolymorphism (java [text|
interface Singer {
void sing();
}
class Bird implements Singer {
void sing() {}
}
class Performer implements Singer {
void sing() {}
}
class Festival {
Singer o;
void run() { o.sing(); }
}|]) `shouldBe` True
it "is True when there is an usage of an interface in a parameter implemented by two or more classes" $ do
usesStaticPolymorphism (java [text|
interface Singer {
void sing();
}
class Bird implements Singer {
void sing() {}
}
class Performer implements Singer {
void sing() {}
}
class Festival {
void run(Singer o) { o.sing(); }
}|]) `shouldBe` True
it "is False when there is an usage of an interface implemented by just one class" $ do
usesStaticPolymorphism (java [text|
interface Singer {
void sing();
}
class Bird implements Singer {
void sing() {}
}
class Festival {
void run(Singer o) { o.sing(); }
}|]) `shouldBe` False
it "is False when there is no interace" $ do
usesStaticPolymorphism (java [text|
class Bird {
void sing() {}
}
class Performer {
void sing() {}
}
class Festival {
void run(Singer o) { o.sing(); }
}|]) `shouldBe` False
it "is True even when there no message is sent to an attribute" $ do
usesStaticPolymorphism (java [text|
interface Singer {
void sing();
}
class Bird implements Singer {
void sing() {}
}
class Performer implements Singer {
void sing() {}
}
class Festival {
Singer o = new Bird();
void run() {}
}|]) `shouldBe` True
it "is True even when there is no message is sent to a paramter" $ do
usesStaticPolymorphism (java [text|
interface Singer {
void sing();
}
class Bird implements Singer {
void sing() {}
}
class Performer implements Singer {
void sing() {}
}
class Festival {
void run(Singer o) {}
}|]) `shouldBe` True
it "is False when interface declares no method" $ do
usesStaticPolymorphism (java [text|
interface Singer {
}
class Bird implements Singer {
void sing() {}
}
class Performer implements Singer {
void sing() {}
}
class Festival {
Singer o;
void run() { }
}|]) `shouldBe` False
| mumuki/mulang | spec/PolymorphismSpec.hs | gpl-3.0 | 9,768 | 0 | 18 | 3,703 | 1,052 | 559 | 493 | 96 | 1 |
import Test.QuickCheck
import Data.List
myCompress' :: Eq a => [a] -> [a] -> [a]
myCompress' c [] = c
myCompress' [] x = myCompress' [head x] (tail x)
myCompress' c x =
case head x == last c of
True -> myCompress' c (tail x)
False -> myCompress' (c ++ [head x]) (tail x)
myCompress :: Eq a => [a] -> [a]
myCompress x = myCompress' [] x
testMyCompress :: [Char] -> Bool
testMyCompress xs = myCompress xs == [head x | x <- group xs]
main = quickCheck testMyCompress
| CmdrMoozy/haskell99 | 008.hs | gpl-3.0 | 473 | 2 | 12 | 99 | 243 | 122 | 121 | 14 | 2 |
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-|
Module : LDLogic
Description : Linear Dynamic Logic
-}
module LDLogic where
import Logic
import Reg
import Prelude hiding (negate)
-- We'll define our own negate.
import Control.Newtype
-- * Ordinary LDLogic (Linear Dynamic Logic)
-- ** Reg type for LDLogic
newtype Reg p = Reg (Reg_T p (LDLogic p))
instance Newtype (Reg p) (Reg_T p (LDLogic p)) where
pack = Reg
unpack (Reg x) = x
--deriving instance Show p => Show (Reg p)
diamond :: Reg p -> LDLogic p -> LDLogic p
diamond = Unary
square :: Reg p -> LDLogic p -> LDLogic p
square reg l = Not $ diamond reg $ Not l
-- ** LDLogic itself
type LDLogic p = PropLogic p (Reg p) Void
ltl2ldl :: LTLogic p -> LDLogic p
ltl2ldl = foldLog PropConst PropVar Not And Or u b where
u Next ld = diamond (Reg trueReg) ld
b Until ld1 ld2 = diamond (Reg (Star (Comp (Test ld1) trueReg))) ld2
ldl_propositions :: LDLogic p -> [p]
ldl_propositions = propositions urr (error "binary modality") where
urr reg ps = reg_propositions reg ++ ps
reg_propositions :: Reg p -> [p]
reg_propositions = (foldReg basic_propositions ldl_propositions (++) (++) id) . op Reg
basic_propositions :: BasicPropLogic p -> [p]
basic_propositions = propositions (error "unary modality") (error "binary modality")
-- * LDLogicNNF (LDL in Negation Normal Form)
-- ** Reg type for LDLogicNNF
newtype RegNNF p = RegNNF (Reg_T p (LDLogicNNF p)) deriving (Eq, Ord, Show)
instance Newtype (RegNNF p) (Reg_T p (LDLogicNNF p)) where
pack = RegNNF
unpack (RegNNF x) = x
-- ** LDLogicNNF itself and accompanying helper functions
-- |Self explanatory, I hope...
class Negatable a where
negate :: a -> a
{-|
To translate LDL to AFAs, we will need negation normal-form (nnf) LDL.
This will have literals rather than propositions and square as a built in operator rather than derived from diamond (DOS).
-}
type LDLogicNNF p = PropLogic (Literal p) (DOS p) Void
instance Negatable (LDLogicNNF p) where
negate = foldLog cr pr nr arr orr ur br where
cr c = PropConst (not c)
pr lit = PropVar (negate lit)
nr = id -- this is the 'double negatives cancel' part
arr log1 log2 = Or log1 log2 -- Ors turn into Ands and vice versa
orr log1 log2 = And log1 log2
ur dos log1 = Unary (negate dos) log1 -- Square modalities turn into Diamonds and vice versa
br b log1 log2 = error "Should not be any binary modalities in negateNNF"
-- |Literal of p
data Literal p = Positive p | Negative p deriving (Eq, Ord)
instance Negatable (Literal p) where
negate (Positive x) = (Negative x)
negate (Negative x) = (Positive x)
instance Show p => Show (Literal p) where
show (Positive x) = show x
show (Negative x) = "!" ++ show x
-- |DOS: Diamond or Square modality.
data DOS p = Diamond (RegNNF p) | Square (RegNNF p) deriving (Eq, Ord, Show)
instance Negatable (DOS p) where
negate (Diamond x) = (Square x)
negate (Square x) = (Diamond x)
-- ** Translation of LDL to NNF (including accompanying Reg to NNF translation)
ldl2nnf :: LDLogic p -> LDLogicNNF p
ldl2nnf = foldLog PropConst (PropVar . Positive) negate And Or ur br where
ur reg log1 = Unary (Diamond (reg2nnf reg)) log1
br b log1 log2 = error "Should not be any binary modalities"
reg2nnf :: Reg p -> RegNNF p
{-
reg2nnf (Reg x) = RegNNF $ (foldReg Base fr Plus Comp Star) x where -- Reg and RegNNF wrapping and unwrapping the newtypes
fr = Test . ldl2nnf
-}
reg2nnf = over Reg (foldReg Base fr Plus Comp Star) where
fr = Test . ldl2nnf
-- do we have to nnf the BasicPropLogic? Not sure. Well I don't think so.
{-
-- ** Eq and Ord derivation
deriving instance Ord p => Ord (LDLogicNNF p)
deriving instance Ord p => Ord (RegNNF p)
deriving instance Ord p => Ord (Literal p)
deriving instance Ord p => Ord (DOS p)
deriving instance Eq p => Eq (LDLogicNNF p)
deriving instance Eq p => Eq (RegNNF p)
deriving instance Eq p => Eq (Literal p)
deriving instance Eq p => Eq (DOS p)
-}
| passionfruit18/ldl_exposition | src/LDLogic.hs | gpl-3.0 | 4,075 | 0 | 15 | 832 | 1,089 | 565 | 524 | 63 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AndroidManagement.Enterprises.EnrollmentTokens.Create
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates an enrollment token for a given enterprise.
--
-- /See:/ <https://developers.google.com/android/management Android Management API Reference> for @androidmanagement.enterprises.enrollmentTokens.create@.
module Network.Google.Resource.AndroidManagement.Enterprises.EnrollmentTokens.Create
(
-- * REST Resource
EnterprisesEnrollmentTokensCreateResource
-- * Creating a Request
, enterprisesEnrollmentTokensCreate
, EnterprisesEnrollmentTokensCreate
-- * Request Lenses
, eetcParent
, eetcXgafv
, eetcUploadProtocol
, eetcAccessToken
, eetcUploadType
, eetcPayload
, eetcCallback
) where
import Network.Google.AndroidManagement.Types
import Network.Google.Prelude
-- | A resource alias for @androidmanagement.enterprises.enrollmentTokens.create@ method which the
-- 'EnterprisesEnrollmentTokensCreate' request conforms to.
type EnterprisesEnrollmentTokensCreateResource =
"v1" :>
Capture "parent" Text :>
"enrollmentTokens" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] EnrollmentToken :>
Post '[JSON] EnrollmentToken
-- | Creates an enrollment token for a given enterprise.
--
-- /See:/ 'enterprisesEnrollmentTokensCreate' smart constructor.
data EnterprisesEnrollmentTokensCreate =
EnterprisesEnrollmentTokensCreate'
{ _eetcParent :: !Text
, _eetcXgafv :: !(Maybe Xgafv)
, _eetcUploadProtocol :: !(Maybe Text)
, _eetcAccessToken :: !(Maybe Text)
, _eetcUploadType :: !(Maybe Text)
, _eetcPayload :: !EnrollmentToken
, _eetcCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'EnterprisesEnrollmentTokensCreate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'eetcParent'
--
-- * 'eetcXgafv'
--
-- * 'eetcUploadProtocol'
--
-- * 'eetcAccessToken'
--
-- * 'eetcUploadType'
--
-- * 'eetcPayload'
--
-- * 'eetcCallback'
enterprisesEnrollmentTokensCreate
:: Text -- ^ 'eetcParent'
-> EnrollmentToken -- ^ 'eetcPayload'
-> EnterprisesEnrollmentTokensCreate
enterprisesEnrollmentTokensCreate pEetcParent_ pEetcPayload_ =
EnterprisesEnrollmentTokensCreate'
{ _eetcParent = pEetcParent_
, _eetcXgafv = Nothing
, _eetcUploadProtocol = Nothing
, _eetcAccessToken = Nothing
, _eetcUploadType = Nothing
, _eetcPayload = pEetcPayload_
, _eetcCallback = Nothing
}
-- | The name of the enterprise in the form enterprises\/{enterpriseId}.
eetcParent :: Lens' EnterprisesEnrollmentTokensCreate Text
eetcParent
= lens _eetcParent (\ s a -> s{_eetcParent = a})
-- | V1 error format.
eetcXgafv :: Lens' EnterprisesEnrollmentTokensCreate (Maybe Xgafv)
eetcXgafv
= lens _eetcXgafv (\ s a -> s{_eetcXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
eetcUploadProtocol :: Lens' EnterprisesEnrollmentTokensCreate (Maybe Text)
eetcUploadProtocol
= lens _eetcUploadProtocol
(\ s a -> s{_eetcUploadProtocol = a})
-- | OAuth access token.
eetcAccessToken :: Lens' EnterprisesEnrollmentTokensCreate (Maybe Text)
eetcAccessToken
= lens _eetcAccessToken
(\ s a -> s{_eetcAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
eetcUploadType :: Lens' EnterprisesEnrollmentTokensCreate (Maybe Text)
eetcUploadType
= lens _eetcUploadType
(\ s a -> s{_eetcUploadType = a})
-- | Multipart request metadata.
eetcPayload :: Lens' EnterprisesEnrollmentTokensCreate EnrollmentToken
eetcPayload
= lens _eetcPayload (\ s a -> s{_eetcPayload = a})
-- | JSONP
eetcCallback :: Lens' EnterprisesEnrollmentTokensCreate (Maybe Text)
eetcCallback
= lens _eetcCallback (\ s a -> s{_eetcCallback = a})
instance GoogleRequest
EnterprisesEnrollmentTokensCreate
where
type Rs EnterprisesEnrollmentTokensCreate =
EnrollmentToken
type Scopes EnterprisesEnrollmentTokensCreate =
'["https://www.googleapis.com/auth/androidmanagement"]
requestClient EnterprisesEnrollmentTokensCreate'{..}
= go _eetcParent _eetcXgafv _eetcUploadProtocol
_eetcAccessToken
_eetcUploadType
_eetcCallback
(Just AltJSON)
_eetcPayload
androidManagementService
where go
= buildClient
(Proxy ::
Proxy EnterprisesEnrollmentTokensCreateResource)
mempty
| brendanhay/gogol | gogol-androidmanagement/gen/Network/Google/Resource/AndroidManagement/Enterprises/EnrollmentTokens/Create.hs | mpl-2.0 | 5,644 | 0 | 17 | 1,243 | 779 | 454 | 325 | 118 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE PackageImports #-}
--
-- Copyright (c) 2009-2014 Stefan Wehr - http://www.stefanwehr.de
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
-- License as published by the Free Software Foundation; either
-- version 2.1 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
-- You should have received a copy of the GNU Lesser General Public
-- License along with this library; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA
--
module Test.Framework.Preprocessor (
transform, progName, preprocessorTests
) where
-- import Debug.Trace
import Control.Monad
import Data.Char
import "haskell-lexer" Language.Haskell.Lexer
import Language.Preprocessor.Cpphs ( runCpphs,
CpphsOptions(..),
BoolOptions(..),
defaultCpphsOptions)
import System.IO ( hPutStrLn, stderr )
import Test.HUnit hiding (State, Location)
import Control.Monad.State.Strict
import qualified Data.List as List
import Data.Maybe
import Test.Framework.Location
_DEBUG_ :: Bool
_DEBUG_ = False
progName :: String
progName = "htfpp"
htfModule :: String
htfModule = "Test.Framework"
mkName varName fullModuleName =
"htf_" ++
map (\c -> if c == '.' then '_' else c)
(fullModuleName ++ "." ++
(case varName of
'h':'t':'f':'_':s -> s
s -> s))
thisModulesTestsFullName :: String -> String
thisModulesTestsFullName = mkName thisModulesTestsName
importedTestListFullName :: String -> String
importedTestListFullName = mkName importedTestListName
thisModulesTestsName :: String
thisModulesTestsName = "htf_thisModulesTests"
importedTestListName :: String
importedTestListName = "htf_importedTests"
nameDefines :: ModuleInfo -> [(String, String)]
nameDefines info =
[(thisModulesTestsName, thisModulesTestsFullName (mi_moduleNameWithDefault info)),
(importedTestListName, importedTestListFullName (mi_moduleNameWithDefault info))]
allAsserts :: [String]
allAsserts =
withGs ["assertBool"
,"assertEqual"
,"assertEqualPretty"
,"assertEqualNoShow"
,"assertNotEqual"
,"assertNotEqualPretty"
,"assertNotEqualNoShow"
,"assertListsEqualAsSets"
,"assertElem"
,"assertEmpty"
,"assertNotEmpty"
,"assertLeft"
,"assertLeftNoShow"
,"assertRight"
,"assertRightNoShow"
,"assertJust"
,"assertNothing"
,"assertNothingNoShow"
,"subAssert"
,"subAssertVerbose"
] ++ ["assertThrows"
,"assertThrowsSome"
,"assertThrowsIO"
,"assertThrowsSomeIO"
,"assertThrowsM"
,"assertThrowsSomeM"]
where
withGs l =
concatMap (\s -> [s, 'g':s]) l
assertDefines :: Bool -> String -> [(String, String)]
assertDefines hunitBackwardsCompat prefix =
concatMap fun allAsserts ++ [("assertFailure", expansion "assertFailure" "_")]
where
fun a =
if hunitBackwardsCompat
then [(a, expansion a "Verbose_"), (a ++ "HTF", expansion a "_")]
else [(a, expansion a "_"), (a ++ "Verbose", expansion a "Verbose_")]
expansion a suffix = "(" ++ prefix ++ a ++ suffix ++ " (" ++
prefix ++ "makeLoc __FILE__ __LINE__))"
data ModuleInfo = ModuleInfo { mi_htfPrefix :: String
, mi_htfImports :: [ImportDecl]
, mi_defs :: [Definition]
, mi_moduleName :: Maybe String }
deriving (Show, Eq)
mi_moduleNameWithDefault :: ModuleInfo -> String
mi_moduleNameWithDefault = fromMaybe "Main" . mi_moduleName
data ImportDecl = ImportDecl { imp_moduleName :: Name
, imp_qualified :: Bool
, imp_alias :: Maybe Name
, imp_loc :: Location }
deriving (Show, Eq)
data Definition = TestDef String Location String
| PropDef String Location String
deriving (Eq, Show)
type Name = String
type PMA a = State ModuleInfo a
setModName :: String -> PMA ()
setModName name =
do oldName <- gets mi_moduleName
when (isNothing oldName) $ modify $ \mi -> mi { mi_moduleName = Just name }
addTestDef :: String -> String -> Location -> PMA ()
addTestDef name fullName loc =
modify $ \mi -> mi { mi_defs = (TestDef name loc fullName) : mi_defs mi }
addPropDef :: String -> String -> Location -> PMA ()
addPropDef name fullName loc =
modify $ \mi -> mi { mi_defs = (PropDef name loc fullName) : mi_defs mi }
addHtfImport :: ImportDecl -> PMA ()
addHtfImport decl =
modify $ \mi -> mi { mi_htfImports = decl : mi_htfImports mi }
setTestFrameworkImport :: String -> PMA ()
setTestFrameworkImport name =
modify $ \mi -> mi { mi_htfPrefix = name }
poorManAnalyzeTokens :: [LocToken] -> ModuleInfo
poorManAnalyzeTokens toks =
-- show toks `trace`
let revRes =
execState (loop toks) $
ModuleInfo { mi_htfPrefix = htfModule ++ "."
, mi_htfImports = []
, mi_defs = []
, mi_moduleName = Nothing }
in ModuleInfo { mi_htfPrefix = mi_htfPrefix revRes
, mi_htfImports = reverse (mi_htfImports revRes)
, mi_defs = reverse $ List.nubBy defEqByName (mi_defs revRes)
, mi_moduleName = mi_moduleName revRes
}
where
defEqByName (TestDef n1 _ _) (TestDef n2 _ _) = n1 == n2
defEqByName (PropDef n1 _ _) (PropDef n2 _ _) = n1 == n2
defEqByName _ _ = False
loop toks =
case toks of
(Reservedid, (_, "module")) : rest ->
case rest of
(Conid, (_, name)):rest2 ->
do setModName name
loop rest2
(Qconid, (_, name)):rest2 ->
do setModName name
loop rest2
_ -> loop rest
(Varid, (loc, name)) : rest
| isStartOfLine loc ->
case name of
't':'e':'s':'t':'_':shortName ->
do addTestDef shortName name (locToLocation loc)
loop rest
'p':'r':'o':'p':'_':shortName ->
do addPropDef shortName name (locToLocation loc)
loop rest
_ -> loop rest
| otherwise -> loop rest
(Special, (loc, "import_HTF_TESTS")) : rest ->
case parseImport loc rest of
Just (imp, rest2) ->
do addHtfImport imp
loop rest2
Nothing -> loop rest
(Reservedid, (loc, "import")) : rest ->
do case parseImport loc rest of
Nothing -> loop rest
Just (imp, rest2) ->
do when (imp_moduleName imp == htfModule) $
let prefix = case (imp_alias imp, imp_qualified imp) of
(Just alias, True) -> alias
(Nothing, True) -> imp_moduleName imp
_ -> ""
in setTestFrameworkImport
(if null prefix then prefix else prefix ++ ".")
loop rest2
_ : rest -> loop rest
[] -> return ()
parseImport loc toks =
do let (qualified, toks2) =
case toks of
(Varid, (_, "qualified")):rest -> (True, rest)
_ -> (False, toks)
(name, toks3) <-
case toks2 of
(Conid, (_, name)):rest -> return (name, rest)
(Qconid, (_, name)):rest -> return (name, rest)
_ -> fail "no import"
let (mAlias, toks4) =
case toks3 of
(Varid, (_, "as")):(Conid, (_, alias)):rest -> (Just alias, rest)
_ -> (Nothing, toks3)
decl = ImportDecl { imp_moduleName = name
, imp_qualified = qualified
, imp_alias = mAlias
, imp_loc = locToLocation loc }
return (decl, toks4)
locToLocation loc =
makeLoc (l_file loc) (l_line loc)
isStartOfLine loc =
l_column loc == 1
cleanupTokens :: [PosToken] -> [PosToken]
cleanupTokens toks =
-- Remove whitespace tokens, remove comments, but replace
-- 'import {-@ HTF_TESTS @-}' with a single
-- token Special with value "import_HTF_TESTS"
case toks of
(Whitespace, _):rest -> cleanupTokens rest
(NestedComment, (loc, "{-@ HTF_TESTS @-}")) : rest ->
(Special, (loc, "import_HTF_TESTS")) :
cleanupTokens rest
tok:rest -> tok : cleanupTokens rest
[] -> []
-- char -> ' (graphic<' | \> | space | escape<\&>) '
-- graphic -> small | large | symbol | digit | special | : | " | '
-- escape -> \ ( charesc | ascii | decimal | o octal | x hexadecimal )
-- charesc -> a | b | f | n | r | t | v | \ | " | ' | &
-- ascii -> ^cntrl | NUL | SOH | STX | ETX | EOT | ENQ | ACK
-- | BEL | BS | HT | LF | VT | FF | CR | SO | SI | DLE
-- | DC1 | DC2 | DC3 | DC4 | NAK | SYN | ETB | CAN
-- | EM | SUB | ESC | FS | GS | RS | US | SP | DEL
-- cntrl -> ascLarge | @ | [ | \ | ] | ^ | _
-- decimal -> digit{digit}
-- octal -> octit{octit}
-- hexadecimal -> hexit{hexit}
-- octit -> 0 | 1 | ... | 7
-- hexit -> digit | A | ... | F | a | ... | f
-- special -> ( | ) | , | ; | [ | ] | `| { | }
-- Purpose of cleanupInputString: filter out template Haskell quotes
cleanupInputString :: String -> String
cleanupInputString s =
case s of
c:'\'':'\'':x:rest
| isSpace c && isUpper x -> -- TH type quote
c:x:cleanupInputString rest
c:'\'':'\'':d:rest
| not (isAlphaNum c) || not (isAlphaNum d)
-> c:'\'':'x':'\'':d:rest
'\'':rest ->
case characterLitRest rest of
Just (restLit, rest') ->
'\'':restLit ++ cleanupInputString rest'
Nothing ->
'\'':cleanupInputString rest
c:'\'':x:rest -- TH name quote
| isSpace c && isNothing (characterLitRest (x:rest)) && isLower x ->
c:x:cleanupInputString rest
c:rest
| not (isSpace c) ->
case span (== '\'') rest of
(quotes, rest') -> c : quotes ++ cleanupInputString rest'
c:rest -> c : cleanupInputString rest
[] -> []
where
characterLitRest s = -- expects that before the s there is a '
case s of
'\\':'\'':'\'':rest -> Just ("\\''", rest) -- '\''
c:'\'':rest -> Just (c:"'", rest) -- regular character lit
'\\':rest ->
case span (/= '\'') rest of
(esc,'\'':rest) ->
Just (('\\':esc) ++ "'", rest)
_ -> Just (s, "") -- should not happen
_ -> Nothing
cleanupInputStringTest =
do flip mapM_ untouched $ \s ->
let cleanedUp = cleanupInputString s
in if s /= cleanedUp
then assertFailure ("Cleanup of " ++ show s ++ " is wrong: " ++ show cleanedUp ++
", expected that input and output are the same")
else return ()
flip mapM_ touched $ \(input, output) ->
let cleanedUp = cleanupInputString input
in if output /= cleanedUp
then assertFailure ("Cleanup of " ++ show input ++ " is wrong: " ++ show cleanedUp
++ ", expected " ++ show output)
else return ()
where
untouched = [" '0'", " '\\''", " ' '", " '\o761'", " '\BEL'", " '\^@' ", "' '", "fixed' ' '"]
touched = [(" 'foo abc", " foo abc"), (" ''T ", " T ")]
type LocToken = (Token,(Loc,String))
data Loc
= Loc
{ l_file :: FilePath
, l_line :: Int
, l_column :: Int
}
deriving (Eq, Show)
-- token stream should not contain whitespace
fixPositions :: FilePath -> [PosToken] -> [LocToken]
fixPositions originalFileName = loop Nothing
where
loop mPragma toks =
case toks of
[] -> []
(Varsym, (pos, "#")) : (Varid, (_, "line")) : (IntLit, (_, lineNo)) : (StringLit,(_, fileName)) : rest
| column pos == 1 ->
map (\(tt, (pos, x)) -> (tt, (fixPos Nothing pos, x))) (take 4 toks) ++
loop (Just (line pos, fileName, read lineNo)) rest
(tt, (pos, x)) : rest ->
(tt, (fixPos mPragma pos, x)) : loop mPragma rest
fixPos mPragma pos =
case mPragma of
Nothing ->
Loc { l_column = column pos
, l_file = originalFileName
, l_line = line pos
}
Just (lineActivated, fileName, lineNo) ->
let offset = line pos - lineActivated - 1
in Loc { l_column = column pos
, l_file = fileName
, l_line = lineNo + offset
}
fixPositionsTest :: IO ()
fixPositionsTest =
let toks = concatMap (\(f, i) -> f i)
(zip [tok, linePragma "bar" 10, tok, tok, linePragma "foo" 99, tok] [1..])
fixedToks = fixPositions origFileName toks
expectedToks = concat $
[tok' origFileName 1
,linePragma' "bar" 10 2
,tok' "bar" 10
,tok' "bar" 11
,linePragma' "foo" 99 5
,tok' "foo" 99]
in assertEqual (show expectedToks ++ "\n\n /= \n\n" ++ show toks) expectedToks fixedToks
where
origFileName = "spam"
tok line = [(Varid, (Pos 0 line 1, "_"))]
linePragma fname line lineHere =
let pos = Pos 0 lineHere 1
in [(Varsym, (pos, "#"))
,(Varid, (pos, "line"))
,(IntLit, (pos, show line))
,(StringLit, (pos, fname))]
tok' fname line =
let loc = Loc fname line 1
in [(Varid, (loc, "_"))]
linePragma' fname line lineHere =
let loc = Loc origFileName lineHere 1
in [(Varsym, (loc, "#"))
,(Varid, (loc, "line"))
,(IntLit, (loc, show line))
,(StringLit,(loc, fname))]
analyze :: FilePath -> String -> ModuleInfo
analyze originalFileName input =
poorManAnalyzeTokens (fixPositions originalFileName (cleanupTokens (lexerPass0 (cleanupInputString input))))
analyzeTests =
[(unlines ["module FOO where"
,"import Test.Framework"
,"import {-@ HTF_TESTS @-} qualified Foo as Bar"
,"import {-@ HTF_TESTS @-} qualified Foo.X as Egg"
,"import {-@ HTF_TESTS @-} Foo.Y as Spam"
,"import {-@ HTF_TESTS @-} Foo.Z"
,"import {-@ HTF_TESTS @-} Baz"
,"deriveSafeCopy 1 'base ''T"
,"$(deriveSafeCopy 2 'extension ''T)"
,"test_blub test_foo = 1"
,"test_blah test_foo = 1"
,"prop_abc prop_foo = 2"
,"prop_xyz = True"]
,ModuleInfo { mi_htfPrefix = ""
, mi_htfImports =
[ImportDecl { imp_moduleName = "Foo"
, imp_qualified = True
, imp_alias = Just "Bar"
, imp_loc = makeLoc "<input>" 3}
,ImportDecl { imp_moduleName = "Foo.X"
, imp_qualified = True
, imp_alias = Just "Egg"
, imp_loc = makeLoc "<input>" 4}
,ImportDecl { imp_moduleName = "Foo.Y"
, imp_qualified = False
, imp_alias = Just "Spam"
, imp_loc = makeLoc "<input>" 5}
,ImportDecl { imp_moduleName = "Foo.Z"
, imp_qualified = False
, imp_alias = Nothing
, imp_loc = makeLoc "<input>" 6}
,ImportDecl { imp_moduleName = "Baz"
, imp_qualified = False
, imp_alias = Nothing
, imp_loc = makeLoc "<input>" 7}]
, mi_moduleName = Just "FOO"
, mi_defs = [TestDef "blub" (makeLoc "<input>" 10) "test_blub"
,TestDef "blah" (makeLoc "<input>" 11) "test_blah"
,PropDef "abc" (makeLoc "<input>" 12) "prop_abc"
,PropDef "xyz" (makeLoc "<input>" 13) "prop_xyz"]
})
,(unlines ["module Foo.Bar where"
,"import Test.Framework as Blub"
,"prop_xyz = True"]
,ModuleInfo { mi_htfPrefix = ""
, mi_htfImports = []
, mi_moduleName = Just "Foo.Bar"
, mi_defs = [PropDef "xyz" (makeLoc "<input>" 3) "prop_xyz"]
})
,(unlines ["module Foo.Bar where"
,"import qualified Test.Framework as Blub"
,"prop_xyz = True"]
,ModuleInfo { mi_htfPrefix = "Blub."
, mi_htfImports = []
, mi_moduleName = Just "Foo.Bar"
, mi_defs = [PropDef "xyz" (makeLoc "<input>" 3) "prop_xyz"]
})
,(unlines ["module Foo.Bar where"
,"import qualified Test.Framework"
,"prop_xyz = True"]
,ModuleInfo { mi_htfPrefix = "Test.Framework."
, mi_htfImports = []
, mi_moduleName = Just "Foo.Bar"
, mi_defs = [PropDef "xyz" (makeLoc "<input>" 3) "prop_xyz"]
})]
testAnalyze =
do mapM_ runTest (zip [1..] analyzeTests)
where
runTest (i, (src, mi)) =
let givenMi = analyze "<input>" src
in if givenMi == mi
then return ()
else assertFailure ("Error in test " ++ show i ++
", expected:\n" ++ show mi ++
"\nGiven:\n" ++ show givenMi ++
"\nSrc:\n" ++ src)
transform :: Bool -> Bool -> FilePath -> String -> IO String
transform hunitBackwardsCompat debug originalFileName input =
let info = analyze originalFileName fixedInput
in preprocess info
where
preprocess :: ModuleInfo -> IO String
preprocess info =
do when debug $ hPutStrLn stderr ("Module info:\n" ++ show info)
preProcessedInput <- runCpphs (cpphsOptions info) originalFileName
fixedInput
return $ preProcessedInput ++ "\n\n" ++ additionalCode info ++ "\n"
-- fixedInput serves two purposes:
-- 1. add a trailing \n
-- 2. turn lines of the form '# <number> "<filename>"' into line directives '#line <number> <filename>'
-- (see http://gcc.gnu.org/onlinedocs/cpp/Preprocessor-Output.html#Preprocessor-Output).
fixedInput :: String
fixedInput = (unlines . map fixLine . lines) input
where
fixLine s =
case parseCppLineInfoOut s of
Just (line, fileName) -> "#line " ++ line ++ " " ++ fileName
_ -> s
cpphsOptions :: ModuleInfo -> CpphsOptions
cpphsOptions info =
defaultCpphsOptions { defines =
defines defaultCpphsOptions ++
assertDefines hunitBackwardsCompat (mi_htfPrefix info) ++
nameDefines info
, boolopts = (boolopts defaultCpphsOptions) { lang = True } -- lex as haskell
}
additionalCode :: ModuleInfo -> String
additionalCode info =
thisModulesTestsFullName (mi_moduleNameWithDefault info) ++ " :: " ++
mi_htfPrefix info ++ "TestSuite\n" ++
thisModulesTestsFullName (mi_moduleNameWithDefault info) ++ " = " ++
mi_htfPrefix info ++ "makeTestSuite" ++
" " ++ show (mi_moduleNameWithDefault info) ++
" [\n" ++ List.intercalate ",\n"
(map (codeForDef (mi_htfPrefix info)) (mi_defs info))
++ "\n ]\n" ++ importedTestListCode info
codeForDef :: String -> Definition -> String
codeForDef pref (TestDef s loc name) =
locPragma loc ++ pref ++ "makeUnitTest " ++ (show s) ++ " " ++ codeForLoc pref loc ++
" " ++ name
codeForDef pref (PropDef s loc name) =
locPragma loc ++ pref ++ "makeQuickCheckTest " ++ (show s) ++ " " ++
codeForLoc pref loc ++ " (" ++ pref ++ "qcAssertion " ++ name ++ ")"
locPragma :: Location -> String
locPragma loc =
"{-# LINE " ++ show (lineNumber loc) ++ " " ++ show (fileName loc) ++ " #-}\n "
codeForLoc :: String -> Location -> String
codeForLoc pref loc = "(" ++ pref ++ "makeLoc " ++ show (fileName loc) ++
" " ++ show (lineNumber loc) ++ ")"
importedTestListCode :: ModuleInfo -> String
importedTestListCode info =
let l = mi_htfImports info
in case l of
[] -> ""
_ -> (importedTestListFullName (mi_moduleNameWithDefault info)
++ " :: [" ++ mi_htfPrefix info ++ "TestSuite]\n" ++
importedTestListFullName (mi_moduleNameWithDefault info)
++ " = [\n " ++
List.intercalate ",\n " (map htfTestsInModule l) ++
"\n ]\n")
htfTestsInModule :: ImportDecl -> String
htfTestsInModule imp = qualify imp (thisModulesTestsFullName (imp_moduleName imp))
qualify :: ImportDecl -> String -> String
qualify imp name =
case (imp_qualified imp, imp_alias imp) of
(False, _) -> name
(True, Just alias) -> alias ++ "." ++ name
(True, _) -> imp_moduleName imp ++ "." ++ name
-- Returns for lines of the form '# <number> "<filename>"'
-- (see http://gcc.gnu.org/onlinedocs/cpp/Preprocessor-Output.html#Preprocessor-Output)
-- the value 'Just <number> "<filename>"'
parseCppLineInfoOut :: String -> Maybe (String, String)
parseCppLineInfoOut line =
case line of
'#':' ':c:rest
| isDigit c ->
case List.span isDigit rest of
(restDigits, ' ' : '"' : rest) ->
case dropWhile (/= '"') (reverse rest) of
'"' : fileNameRev ->
let line = (c:restDigits)
file = "\"" ++ reverse fileNameRev ++ "\""
in Just (line, file)
_ -> Nothing
_ -> Nothing
_ -> Nothing
preprocessorTests =
[("testAnalyze", testAnalyze)
,("fixPositionsTest", fixPositionsTest)
,("cleanupInputStringTest", cleanupInputStringTest)]
| ekarayel/HTF | Test/Framework/Preprocessor.hs | lgpl-2.1 | 24,270 | 0 | 26 | 9,227 | 5,802 | 3,122 | 2,680 | 472 | 21 |
{-# LANGUAGE Safe, ScopedTypeVariables #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.GroupWiths
-- Copyright : (c) Uli Köhler 2014
-- License : Apache License v2.0
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- A collection of grouping utility functions.
-- For a given function that assigns a key to objects,
-- provides functions that group said objects into a multimap
-- by said key.
--
-- This can be used similarly to the SQL GROUP BY statement.
--
-- Provides a more flexible approach to GHC.Exts.groupWith
--
-- > groupWith (take 1) ["a","ab","bc"] == Map.fromList [("a",["a","ab"]), ("b",["bc"])]
--
-- In order to use monadic / applicative functions as key generators,
-- use the A- or M-postfixed variants like 'groupWithA' or 'groupWithMultipleM'
--
--
--
-----------------------------------------------------------------------------
module Control.GroupWith(
MultiMap,
groupWith,
groupWithMultiple,
groupWithUsing,
groupWithA,
groupWithM,
groupWithMultipleM,
groupWithUsingM
) where
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Control.Arrow (first, second)
import Control.Applicative (Applicative, (<$>), liftA2, pure)
import Data.Traversable (sequenceA)
type MultiMap a b = Map a [b]
-- | Group values in a list by a key, generated
-- by a given function. The resulting map contains
-- for each generated key the values (from the given list)
-- that yielded said key by applying the function on said value.
groupWith :: (Ord b) =>
(a -> b) -- ^ The function used to map a list value to its key
-> [a] -- ^ The list to be grouped
-> MultiMap b a -- ^ The resulting key --> value multimap
groupWith f xs = Map.fromListWith (++) [(f x, [x]) | x <- xs]
-- | Like 'groupWith', but the identifier-generating function
-- may generate multiple keys for each value (or none at all).
-- The corresponding value from the original list will be placed
-- in the identifier-corresponding map entry for each generated
-- identifier.
-- Note that values are added to the
groupWithMultiple :: (Ord b) =>
(a -> [b]) -- ^ The function used to map a list value to its keys
-> [a] -- ^ The list to be grouped
-> MultiMap b a -- ^ The resulting map
groupWithMultiple f xs =
let identifiers x = [(val, [x]) | val <- vals] where vals = f x
in Map.fromListWith (++) $ concat [identifiers x | x <- xs]
-- | Like groupWith, but uses a custom combinator function
groupWithUsing :: (Ord b) =>
(a -> c) -- ^ Transformer function used to map a value to the resulting type
-> (c -> c -> c) -- ^ The combinator used to combine an existing value
-- for a given key with a new value
-> (a -> b) -- ^ The function used to map a list value to its key
-> [a] -- ^ The list to be grouped
-> Map b c -- ^ The resulting key --> transformed value map
groupWithUsing t c f xs = Map.fromListWith c $ map (\v -> (f v, t v)) xs
-- | Fuse the functor from a tuple
fuseT2 :: Applicative f => (f a, f b) -> f (a,b)
fuseT2 = uncurry $ liftA2 (,)
-- | Like 'fuseT2', but only requires the first element to be boxed in the functor
fuseFirst :: Applicative f => (f a, b) -> f (a,b)
fuseFirst = fuseT2 . second pure
-- | Move the applicative functor to the outmost level by first mapping
-- fuseT2First and then applying 'Data.Traversable.sequenceA' to move
-- the functor outside the list
fuseFirstList :: Applicative f => [(f a, b)] -> f [(a,b)]
fuseFirstList = sequenceA . map fuseFirst
-- | Group values in a list by a key, generated by a given applicative function.
-- Applicative version of 'groupWith'. See 'groupWith' for documentation.
groupWithA :: (Ord b, Applicative f) =>
(a -> f b) -- ^ The function used to map a list value to its key
-> [a] -- ^ The list to be grouped
-> f (MultiMap b a) -- ^ The resulting key --> value multimap
groupWithA f xs =
Map.fromListWith (++) <$> fuseFirstList [(f x, [x]) | x <- xs]
-- | Alias for 'groupWithA', with additional monad constraint
groupWithM :: (Ord b, Monad m, Applicative m) =>
(a -> m b) -- ^ The function used to map a list value to its key
-> [a] -- ^ The list to be grouped
-> m (MultiMap b a) -- ^ The resulting key --> value multimap
groupWithM = groupWithA
-- | Like 'groupWithM', but the identifier-generating function
-- may generate multiple keys for each value (or none at all).
-- See 'groupWithMultiple' for further behavioural details.
--
-- Note that it's impossible to define this for applicatives:
-- See http://stackoverflow.com/a/6032260/2597135
groupWithMultipleM :: (Ord b, Monad m, Applicative m) =>
(a -> m [b]) -- ^ The function used to map a list value to its keys
-> [a] -- ^ The list to be grouped
-> m (MultiMap b a) -- ^ The resulting map
groupWithMultipleM f xs =
let identifiers x = (\vals -> [(val, [x]) | val <- vals]) <$> f x
idMap = concat <$> (mapM identifiers xs)
in Map.fromListWith (++) <$> idMap
-- | Like 'groupWithM', but uses a custom combinator function
groupWithUsingM :: (Ord b, Monad m, Applicative m) =>
(a -> m c) -- ^ Transformer function used to map a value to the resulting type
-> (c -> c -> c) -- ^ The combinator used to combine an existing value
-- for a given key with a new value
-> (a -> m b) -- ^ The function used to map a list value to its key
-> [a] -- ^ The list to be grouped
-> m (Map b c) -- ^ The resulting key --> transformed value map
groupWithUsingM t c f xs =
Map.fromListWith c <$> mapM (\v -> fuseT2 (f v, t v)) xs | kyclark/group-with | Control/GroupWith.hs | apache-2.0 | 5,997 | 0 | 14 | 1,526 | 1,102 | 630 | 472 | 68 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GADTs #-}
module Startups.GameTypes where
import Startups.Base
import Startups.Cards
import Startups.PrettyPrint
import Control.Lens
import qualified Data.Text as T
import qualified Data.Map.Strict as M
import Control.Monad.Operational
import Control.Monad.State.Strict
import Control.Monad.Except
import Data.List.NonEmpty
import Control.Applicative
import System.Random
type PlayerId = T.Text
showPlayerId :: PlayerId -> PrettyDoc
showPlayerId = emph . pe
data GameState = GameState { _playermap :: M.Map PlayerId PlayerState
, _discardpile :: [Card]
, _rnd :: StdGen
}
type Neighborhood = (PlayerId, PlayerId)
data PlayerState = PlayerState { _pCompany :: CompanyProfile
, _pCompanyStage :: CompanyStage
, _pCards :: [Card]
, _pFunds :: Funding
, _pNeighborhood :: Neighborhood
, _pPoachingResults :: [PoachingOutcome]
}
makeLenses ''GameState
makeLenses ''PlayerState
cardEffects :: Traversal' PlayerState Effect
cardEffects = pCards . traverse . cEffect . traverse
playerEffects :: PlayerId -> Traversal' GameState Effect
playerEffects pid = playermap . ix pid . cardEffects
neighbor :: Neighbor -> Lens' PlayerState PlayerId
neighbor NLeft = pNeighborhood . _1
neighbor NRight = pNeighborhood . _2
type Message = PrettyDoc
data PlayerAction = PlayerAction ActionType Card
deriving Eq
data ActionType = Play | Drop | BuildCompany
deriving Eq
_NonEmpty :: Prism' [a] (NonEmpty a)
_NonEmpty = prism' toList nonEmpty
-- | This describe the capabilities needed to write the rules, when no
-- interaction with the player is required.
type NonInteractive m = (MonadState GameState m, MonadError Message m, Functor m, Applicative m)
type GameStateOnly m = (MonadState GameState m, Functor m, Applicative m)
data CommunicationType = PlayerCom PlayerId Communication
| BroadcastCom Communication
data Communication = RawMessage PrettyDoc
| ActionRecapMsg Age Turn GameState (M.Map PlayerId (PlayerAction, Exchange))
data GameInstr p a where
PlayerDecision :: Age -> Turn -> PlayerId -> NonEmpty Card -> GameInstr p (p (PlayerAction, Exchange))
AskCard :: Age -> PlayerId -> NonEmpty Card -> Message -> GameInstr p (p Card)
GetPromise :: p a -> GameInstr p a
Message :: CommunicationType -> GameInstr p ()
ThrowError :: Message -> GameInstr p a -- ^ Used for the error instance
CatchError :: GameMonad p a -> (Message -> GameMonad p a) -> GameInstr p a
type GameMonad p = ProgramT (GameInstr p) (State GameState)
-- | Ask the player which card he would like to play.
playerDecision :: Age -> Turn -> PlayerId -> NonEmpty Card -> GameMonad p (p (PlayerAction, Exchange))
playerDecision a t p c = singleton (PlayerDecision a t p c)
-- | Tell some information to a specific player
tellPlayer :: PlayerId -> Message -> GameMonad p ()
tellPlayer p = singleton . Message . PlayerCom p . RawMessage
-- | Broadcast some information
generalMessage :: Message -> GameMonad p ()
generalMessage = singleton . Message . BroadcastCom . RawMessage
-- | Awaits a promise
getPromise :: p a -> GameMonad p a
getPromise = singleton . GetPromise
-- | Gives a quick rundown of all actions
actionRecap :: Age -> Turn -> M.Map PlayerId (PlayerAction, Exchange) -> GameMonad p ()
actionRecap age turn mm = get >>= \s -> singleton . Message . BroadcastCom $ ActionRecapMsg age turn s mm
instance MonadError PrettyDoc (ProgramT (GameInstr p) (State GameState)) where
throwError = singleton . ThrowError
catchError a handler = singleton (CatchError a handler)
-- | Ask the player to chose a card, along with a descriptive message.
-- This is used for the Recycling and CopyCommunity effects.
-- We define a "safe" version of the `askCard` function, that makes sure the
-- player doesn't introduce a new card in the game.
askCardSafe :: Age -> PlayerId -> NonEmpty Card -> Message -> GameMonad p Card
askCardSafe a p cl m = do
card <- singleton (AskCard a p cl m) >>= getPromise
when (card `notElem` (cl ^. re _NonEmpty)) (throwError (showPlayerId p <+> "tried to play a non proposed card"))
return card
instance PrettyE PlayerAction where
pe (PlayerAction a c) = a' <+> cardName c
where
a' = case a of
Play -> "played"
Drop -> "dropped"
BuildCompany -> "increase the company stage"
| bitemyapp/7startups | Startups/GameTypes.hs | bsd-3-clause | 4,991 | 0 | 13 | 1,269 | 1,194 | 644 | 550 | 88 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
-----------------------------------------------------------------
-- Auto-generated by regenClassifiers
--
-- DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-- @generated
-----------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Ranking.Classifiers.EN_TT (classifiers) where
import Data.String
import Prelude
import qualified Data.HashMap.Strict as HashMap
import Duckling.Ranking.Types
classifiers :: Classifiers
classifiers
= HashMap.fromList
[("Bhai Dooj",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<integer> to|till|before <hour-of-day>",
Classifier{okData =
ClassData{prior = -2.5649493574615367, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("integer (numeric)noon|midnight|EOD|end of day",
-0.916290731874155),
("hour", -0.916290731874155)],
n = 1},
koData =
ClassData{prior = -8.004270767353637e-2,
unseen = -3.332204510175204,
likelihoods =
HashMap.fromList
[("hour", -0.7308875085427924),
("integer (numeric)time-of-day (latent)", -0.7308875085427924)],
n = 12}}),
("week",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<time> timezone",
Classifier{okData =
ClassData{prior = -9.237332013101517e-2,
unseen = -4.23410650459726,
likelihoods =
HashMap.fromList
[("at <time-of-day>", -2.4277482359480516),
("hhhmm", -1.65455834771457),
("<time-of-day> am|pm", -2.0222831278398874),
("hh:mm", -2.2735975561207935), ("hour", -1.821612432377736),
("minute", -1.128465251817791)],
n = 31},
koData =
ClassData{prior = -2.4277482359480516,
unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("hhhmm", -1.3862943611198906), ("hh:mm", -1.791759469228055),
("minute", -1.0986122886681098)],
n = 3}}),
("Thursday",
Classifier{okData =
ClassData{prior = -7.79615414697118e-2,
unseen = -3.6635616461296463,
likelihoods = HashMap.fromList [("", 0.0)], n = 37},
koData =
ClassData{prior = -2.5902671654458267,
unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3}}),
("integer (numeric)",
Classifier{okData =
ClassData{prior = -0.53208562319284, unseen = -6.186208623900494,
likelihoods = HashMap.fromList [("", 0.0)], n = 484},
koData =
ClassData{prior = -0.8852249122992647, unseen = -5.834810737062605,
likelihoods = HashMap.fromList [("", 0.0)], n = 340}}),
("<year> (bc|ad)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("the nth <day-of-week> of <month-or-greater>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.1972245773362196,
likelihoods =
HashMap.fromList
[("daymonth", -0.6931471805599453),
("ordinals (first..twentieth,thirtieth,...)Mondaythis|last|next <cycle>",
-0.6931471805599453)],
n = 3},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("<duration> hence|ago",
Classifier{okData =
ClassData{prior = -5.406722127027582e-2,
unseen = -3.784189633918261,
likelihoods =
HashMap.fromList
[("week", -1.563975538357343), ("day", -1.8152899666382492),
("year", -2.662587827025453),
("<integer> <unit-of-duration>", -1.0531499145913523),
("a <unit-of-duration>", -2.662587827025453),
("month", -2.662587827025453),
("fortnight", -2.662587827025453)],
n = 18},
koData =
ClassData{prior = -2.9444389791664407,
unseen = -2.3025850929940455,
likelihoods =
HashMap.fromList
[("year", -1.5040773967762742),
("<integer> <unit-of-duration>", -1.5040773967762742)],
n = 1}}),
("noon|midnight|EOD|end of day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3978952727983707,
likelihoods = HashMap.fromList [("", 0.0)], n = 9},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("quarter to|till|before <hour-of-day>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("noon|midnight|EOD|end of day", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("Karva Chauth",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<cycle> after|before <time>",
Classifier{okData =
ClassData{prior = -0.6931471805599453, unseen = -2.833213344056216,
likelihoods =
HashMap.fromList
[("day (grain)tomorrow", -1.6739764335716716),
("dayday", -1.1631508098056809),
("day (grain)yesterday", -1.6739764335716716)],
n = 4},
koData =
ClassData{prior = -0.6931471805599453, unseen = -2.833213344056216,
likelihoods =
HashMap.fromList
[("dayhour", -1.6739764335716716),
("year (grain)Christmas", -2.0794415416798357),
("dayday", -2.0794415416798357),
("day (grain)intersect", -1.6739764335716716),
("day (grain)Easter Sunday", -2.0794415416798357),
("yearday", -2.0794415416798357)],
n = 4}}),
("Easter Monday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Navaratri",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Martin Luther King's Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.4849066497880004,
likelihoods = HashMap.fromList [("", 0.0)], n = 10},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("integer (20..90)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Shemini Atzeret",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("in <duration> at <time-of-day>",
Classifier{okData =
ClassData{prior = -0.6931471805599453,
unseen = -2.1972245773362196,
likelihoods =
HashMap.fromList
[("dayhour", -1.3862943611198906),
("yearhour", -1.3862943611198906),
("<integer> <unit-of-duration><time-of-day> am|pm",
-0.9808292530117262)],
n = 2},
koData =
ClassData{prior = -0.6931471805599453,
unseen = -2.1972245773362196,
likelihoods =
HashMap.fromList
[("dayhour", -1.3862943611198906),
("yearhour", -1.3862943611198906),
("<integer> <unit-of-duration>time-of-day (latent)",
-0.9808292530117262)],
n = 2}}),
("Maha Shivaratri",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Ramadan",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Lazarus Saturday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("intersect 2 numbers",
Classifier{okData =
ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("compose by multiplicationinteger (0..19)",
-0.2231435513142097)],
n = 3},
koData =
ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("powers of tensinteger (0..19)", -0.2231435513142097)],
n = 3}}),
("mm/yyyy",
Classifier{okData =
ClassData{prior = -0.6931471805599453,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -0.6931471805599453,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("from|since|after <time>",
Classifier{okData =
ClassData{prior = -1.701375407759786, unseen = -4.51085950651685,
likelihoods =
HashMap.fromList
[("<day-of-month> (ordinal or number) <named-month>",
-3.8066624897703196),
("July", -3.8066624897703196),
("intersect", -3.8066624897703196),
("year (latent)", -3.4011973816621555),
("day", -2.4203681286504293),
("the <day-of-month> (ordinal)", -3.4011973816621555),
("the <day-of-month> (number)", -3.8066624897703196),
("time-of-day (latent)", -2.5538995212749516),
("year", -3.4011973816621555),
("<time-of-day> am|pm", -2.5538995212749516),
("hh:mm", -2.890371757896165),
("<day-of-month> (ordinal)", -3.4011973816621555),
("hour", -1.9348603128687285), ("month", -3.4011973816621555),
("minute", -2.890371757896165),
("August", -3.8066624897703196)],
n = 27},
koData =
ClassData{prior = -0.201421728167374, unseen = -5.631211781821365,
likelihoods =
HashMap.fromList
[("<integer> to|till|before <hour-of-day>", -4.018183201256536),
("week", -4.9344739331306915),
("<day-of-month> (ordinal or number) <named-month>",
-4.9344739331306915),
("today", -4.9344739331306915),
("intersect", -2.9885637840753785),
("<time> for <duration>", -4.241326752570746),
("second", -4.9344739331306915), ("now", -3.3250360206965914),
("tomorrow", -4.529008825022527),
("this|last|next <cycle>", -4.9344739331306915),
("day", -1.7774735119805785),
("the <day-of-month> (ordinal)", -4.529008825022527),
("the <day-of-month> (number)", -3.548179572010801),
("time-of-day (latent)", -2.492126897761487),
("<time-of-day> am|pm", -4.241326752570746),
("hh:mm", -4.529008825022527), ("nograin", -3.3250360206965914),
("intersect by \",\", \"of\", \"from\", \"'s\"",
-4.529008825022527),
("<named-month>|<named-day> <day-of-month> (ordinal)",
-4.9344739331306915),
("<day-of-month> (ordinal)", -3.835861644462582),
("Easter Sunday", -4.9344739331306915),
("Christmas", -4.241326752570746),
("hour", -2.3317842476863078), ("month", -4.9344739331306915),
("<datetime> - <datetime> (interval)", -2.7372493557944724),
("<time-of-day> - <time-of-day> (interval)",
-2.9885637840753785),
("<named-month> <day-of-month> (non ordinal)",
-4.529008825022527),
("minute", -1.9900349539642512),
("right now", -4.9344739331306915),
("<month> dd-dd (interval)", -4.241326752570746),
("part of days", -4.9344739331306915),
("dd-dd <month> (interval)", -4.529008825022527)],
n = 121}}),
("integer after|past <hour-of-day>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("integer (numeric)noon|midnight|EOD|end of day",
-1.791759469228055),
("hour", -0.8754687373538999),
("integer (numeric)time-of-day (latent)", -1.3862943611198906),
("integer (20..90)time-of-day (latent)", -1.791759469228055)],
n = 4},
koData =
ClassData{prior = -infinity, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [], n = 0}}),
("<ordinal> last <cycle> of <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.4011973816621555,
likelihoods =
HashMap.fromList
[("ordinals (first..twentieth,thirtieth,...)week (grain)year (latent)",
-2.268683541318364),
("daymonth", -2.268683541318364),
("ordinal (digits)day (grain)May", -2.6741486494265287),
("ordinals (first..twentieth,thirtieth,...)week (grain)intersect",
-2.6741486494265287),
("weekmonth", -1.7578579175523736),
("ordinal (digits)week (grain)October", -2.6741486494265287),
("ordinal (digits)week (grain)intersect", -2.6741486494265287),
("ordinal (digits)week (grain)year (latent)",
-2.6741486494265287),
("weekyear", -1.9810014688665833),
("ordinals (first..twentieth,thirtieth,...)week (grain)October",
-2.6741486494265287),
("ordinals (first..twentieth,thirtieth,...)day (grain)May",
-2.6741486494265287)],
n = 9},
koData =
ClassData{prior = -infinity, unseen = -2.4849066497880004,
likelihoods = HashMap.fromList [], n = 0}}),
("Yom HaShoah",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<day-of-month> (ordinal or number) <named-month>",
Classifier{okData =
ClassData{prior = -1.1631508098056809, unseen = -3.044522437723423,
likelihoods =
HashMap.fromList
[("ordinal (digits)December", -1.6094379124341003),
("ordinal (digits)February", -2.3025850929940455),
("integer (numeric)April", -2.3025850929940455),
("month", -1.2039728043259361)],
n = 5},
koData =
ClassData{prior = -0.3746934494414107,
unseen = -3.4965075614664802,
likelihoods =
HashMap.fromList
[("ordinal (digits)October", -2.772588722239781),
("ordinal (digits)July", -2.0794415416798357),
("integer (numeric)September", -2.367123614131617),
("ordinal (digits)August", -2.772588722239781),
("ordinal (digits)April", -2.772588722239781),
("month", -0.9808292530117262),
("integer (numeric)July", -2.0794415416798357)],
n = 11}}),
("<time> <part-of-day>",
Classifier{okData =
ClassData{prior = -0.19105523676270922,
unseen = -4.6443908991413725,
likelihoods =
HashMap.fromList
[("<day-of-month> (ordinal)in|during the <part-of-day>",
-3.9415818076696905),
("dayhour", -1.499234772300486),
("Mondayearly morning", -3.536116699561526),
("time-of-day (latent)tonight", -3.536116699561526),
("hourhour", -2.236833715431265),
("<time-of-day> o'clockin|during the <part-of-day>",
-3.9415818076696905),
("todaypart of days", -3.9415818076696905),
("minutehour", -2.6888188391743224),
("at <time-of-day>in|during the <part-of-day>",
-3.536116699561526),
("time-of-day (latent)this <part-of-day>", -3.9415818076696905),
("Mondayin|during the <part-of-day>", -3.9415818076696905),
("intersectpart of days", -3.0252910757955354),
("Saturdaypart of days", -3.9415818076696905),
("intersectin|during the <part-of-day>", -3.9415818076696905),
("<day-of-month> (ordinal or number) of <named-month>in|during the <part-of-day>",
-3.9415818076696905),
("the <day-of-month> (ordinal)in|during the <part-of-day>",
-3.9415818076696905),
("tomorrowpart of days", -2.33214389523559),
("hh:mmin|during the <part-of-day>", -3.0252910757955354),
("time-of-day (latent)in|during the <part-of-day>",
-3.9415818076696905),
("hhmm (latent)in|during the <part-of-day>",
-3.9415818076696905),
("yesterdaypart of days", -3.536116699561526),
("<day-of-month> (ordinal or number) of <month>in|during the <part-of-day>",
-3.9415818076696905),
("Mondaypart of days", -3.9415818076696905)],
n = 38},
koData =
ClassData{prior = -1.749199854809259, unseen = -3.784189633918261,
likelihoods =
HashMap.fromList
[("dayhour", -3.068052935133617),
("yearhour", -3.068052935133617),
("monthhour", -3.068052935133617),
("hourhour", -1.9694406464655074),
("at <time-of-day>in|during the <part-of-day>",
-3.068052935133617),
("year (latent)in|during the <part-of-day>",
-3.068052935133617),
("Februaryin|during the <part-of-day>", -3.068052935133617),
("tomorrowpart of days", -3.068052935133617),
("time-of-day (latent)in|during the <part-of-day>",
-2.151762203259462)],
n = 8}}),
("dd/mm",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -3.4011973816621555,
likelihoods = HashMap.fromList [("", 0.0)], n = 28}}),
("today",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3978952727983707,
likelihoods = HashMap.fromList [("", 0.0)], n = 9},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("at <time-of-day>",
Classifier{okData =
ClassData{prior = -0.22957444164450025,
unseen = -5.308267697401205,
likelihoods =
HashMap.fromList
[("<time> timezone", -3.917010546939185),
("noon|midnight|EOD|end of day", -4.204692619390966),
("integer after|past <hour-of-day>", -3.917010546939185),
("<time-of-day> o'clock", -4.61015772749913),
("half after|past <hour-of-day>", -4.61015772749913),
("hhhmm", -3.6938669956249752),
("<hour-of-day> <integer>", -3.917010546939185),
("time-of-day (latent)", -1.6397432619294292),
("hhmm (latent)", -3.917010546939185),
("<time-of-day> am|pm", -2.0074680420547466),
("hh:mm", -3.3573947590037623),
("about|exactly <time-of-day>", -4.204692619390966),
("hour", -1.11365016603265),
("<time-of-day> sharp|exactly", -4.61015772749913),
("minute", -1.9360090780726016)],
n = 93},
koData =
ClassData{prior = -1.5841201044498106,
unseen = -4.1588830833596715,
likelihoods =
HashMap.fromList
[("time-of-day (latent)", -1.0076405104623831),
("<time-of-day> am|pm", -3.044522437723423),
("hour", -0.924258901523332)],
n = 24}}),
("December",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("absorption of , after named day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.58351893845611,
likelihoods =
HashMap.fromList
[("Wednesday", -2.8622008809294686),
("Saturday", -2.8622008809294686),
("Monday", -2.456735772821304), ("Friday", -1.6094379124341003),
("day", -0.8472978603872037), ("Sunday", -2.8622008809294686),
("on <day>", -2.169053700369523)],
n = 14},
koData =
ClassData{prior = -infinity, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [], n = 0}}),
("September",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("tonight",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.4849066497880004,
likelihoods = HashMap.fromList [("", 0.0)], n = 10},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("last|past|next <duration>",
Classifier{okData =
ClassData{prior = -3.17486983145803e-2,
unseen = -4.2626798770413155,
likelihoods =
HashMap.fromList
[("week", -2.456735772821304), ("second", -2.639057329615259),
("day", -2.3025850929940455), ("year", -2.639057329615259),
("<integer> <unit-of-duration>", -0.7827593392496325),
("hour", -2.639057329615259), ("month", -2.639057329615259),
("minute", -2.639057329615259)],
n = 31},
koData =
ClassData{prior = -3.4657359027997265,
unseen = -2.3978952727983707,
likelihoods =
HashMap.fromList
[("<integer> <unit-of-duration>", -1.6094379124341003),
("hour", -1.6094379124341003)],
n = 1}}),
("the ides of <named-month>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("March", -0.6931471805599453), ("month", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("the <day-of-month> (ordinal or number) of <named-month>",
Classifier{okData =
ClassData{prior = -0.7621400520468967, unseen = -3.044522437723423,
likelihoods =
HashMap.fromList
[("ordinals (first..twentieth,thirtieth,...)March",
-1.6094379124341003),
("ordinal (digits)February", -1.8971199848858813),
("month", -0.916290731874155),
("ordinal (digits)March", -1.8971199848858813)],
n = 7},
koData =
ClassData{prior = -0.6286086594223742,
unseen = -3.1354942159291497,
likelihoods =
HashMap.fromList
[("ordinal (digits)July", -1.4816045409242156),
("month", -0.8938178760220964),
("integer (numeric)July", -1.4816045409242156)],
n = 8}}),
("integer (0..19)",
Classifier{okData =
ClassData{prior = -1.227009259181436e-2,
unseen = -4.418840607796598,
likelihoods = HashMap.fromList [("", 0.0)], n = 81},
koData =
ClassData{prior = -4.406719247264253, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("in|during <named-month>|year",
Classifier{okData =
ClassData{prior = -0.2744368457017603,
unseen = -3.8066624897703196,
likelihoods =
HashMap.fromList
[("<year> (bc|ad)", -1.9924301646902063),
("October", -3.0910424533583156),
("year (latent)", -1.2992829841302609),
("year", -0.9509762898620451), ("March", -2.6855773452501515),
("month", -2.3978952727983707)],
n = 19},
koData =
ClassData{prior = -1.4271163556401458,
unseen = -2.9444389791664407,
likelihoods =
HashMap.fromList
[("October", -1.5040773967762742),
("year (latent)", -1.5040773967762742),
("year", -1.5040773967762742), ("month", -1.5040773967762742)],
n = 6}}),
("<part-of-day> at <time-of-day>",
Classifier{okData =
ClassData{prior = -8.223809823697212e-2,
unseen = -4.406719247264253,
likelihoods =
HashMap.fromList
[("this <part-of-day>hh:mm", -3.7013019741124937),
("tonighthh:mm", -3.7013019741124937),
("hourhour", -0.8979415932059586),
("hourminute", -3.0081547935525483),
("in|during the <part-of-day>time-of-day (latent)",
-3.7013019741124937),
("this <part-of-day>time-of-day (latent)", -1.9965538818740682),
("early morningtime-of-day (latent)", -3.7013019741124937),
("tonight<time-of-day> o'clock", -3.7013019741124937),
("tonighttime-of-day (latent)", -2.7850112422383386),
("part of dayshh:mm", -3.7013019741124937),
("part of daystime-of-day (latent)", -1.6218604324326575)],
n = 35},
koData =
ClassData{prior = -2.538973871058276, unseen = -2.890371757896165,
likelihoods =
HashMap.fromList
[("hourhour", -1.4469189829363254),
("this <part-of-day>time-of-day (latent)", -2.1400661634962708),
("tonighttime-of-day (latent)", -2.1400661634962708),
("part of daystime-of-day (latent)", -2.1400661634962708)],
n = 3}}),
("between <time-of-day> and <time-of-day> (interval)",
Classifier{okData =
ClassData{prior = -0.40546510810816444,
unseen = -2.833213344056216,
likelihoods =
HashMap.fromList
[("minuteminute", -1.6739764335716716),
("hh:mmhh:mm", -2.0794415416798357),
("<time-of-day> am|pmtime-of-day (latent)",
-2.0794415416798357),
("hhhmmhhhmm", -2.0794415416798357),
("minutehour", -1.6739764335716716),
("<time-of-day> am|pm<time-of-day> am|pm",
-2.0794415416798357)],
n = 4},
koData =
ClassData{prior = -1.0986122886681098,
unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("hh:mmtime-of-day (latent)", -1.791759469228055),
("minutehour", -1.3862943611198906),
("hhhmmtime-of-day (latent)", -1.791759469228055)],
n = 2}}),
("Halloween",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Passover",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("from <month> dd-dd (interval)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3978952727983707,
likelihoods =
HashMap.fromList
[("Julyinteger (numeric)integer (numeric)", -1.6094379124341003),
("Augustordinal (digits)integer (numeric)",
-1.6094379124341003),
("month", -0.916290731874155),
("Augustordinal (digits)ordinal (digits)",
-1.6094379124341003)],
n = 3},
koData =
ClassData{prior = -infinity, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [], n = 0}}),
("Good Friday",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("October",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.1780538303479458,
likelihoods = HashMap.fromList [("", 0.0)], n = 22},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("month (grain)",
Classifier{okData =
ClassData{prior = -0.12136085700426748,
unseen = -3.4965075614664802,
likelihoods = HashMap.fromList [("", 0.0)], n = 31},
koData =
ClassData{prior = -2.169053700369523, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4}}),
("<integer> more <unit-of-duration>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("integer (numeric)minute (grain)", -0.6931471805599453),
("minute", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("<time-of-day> o'clock",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.833213344056216,
likelihoods =
HashMap.fromList
[("at <time-of-day>", -2.0794415416798357),
("<part-of-day> at <time-of-day>", -2.0794415416798357),
("time-of-day (latent)", -1.1631508098056809),
("hour", -0.8266785731844679)],
n = 6},
koData =
ClassData{prior = -infinity, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [], n = 0}}),
("Vesak",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Earth Hour",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("in|within|after <duration>",
Classifier{okData =
ClassData{prior = -0.12260232209233239,
unseen = -4.709530201312334,
likelihoods =
HashMap.fromList
[("week", -3.3141860046725258),
("<integer> more <unit-of-duration>", -4.007333185232471),
("three-quarters of an hour", -3.0910424533583156),
("<integer> + '\"", -3.3141860046725258),
("number.number hours", -4.007333185232471),
("second", -3.6018680771243066),
("half a <time-grain>", -3.0910424533583156),
("day", -3.3141860046725258), ("year", -4.007333185232471),
("<integer> <unit-of-duration>", -1.6094379124341003),
("a <unit-of-duration>", -3.0910424533583156),
("quarter of an hour", -3.0910424533583156),
("hour", -2.503255788456197),
("about|exactly <duration>", -4.007333185232471),
("half an hour (abbrev).", -3.6018680771243066),
("<integer> and an half hour", -4.007333185232471),
("minute", -1.2992829841302609)],
n = 46},
koData =
ClassData{prior = -2.159484249353372, unseen = -3.4339872044851463,
likelihoods =
HashMap.fromList
[("day", -2.70805020110221), ("quarter", -2.70805020110221),
("year", -1.791759469228055),
("<integer> <unit-of-duration>", -1.791759469228055),
("a <unit-of-duration>", -2.3025850929940455)],
n = 6}}),
("the closest <day> to <time>",
Classifier{okData =
ClassData{prior = -0.2876820724517809,
unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("dayday", -1.0986122886681098),
("Christmastoday", -1.791759469228055),
("Monday<named-month>|<named-day> <day-of-month> (ordinal)",
-1.791759469228055),
("Monday<named-month> <day-of-month> (non ordinal)",
-1.791759469228055)],
n = 3},
koData =
ClassData{prior = -1.3862943611198906,
unseen = -2.1972245773362196,
likelihoods =
HashMap.fromList
[("daymonth", -1.3862943611198906),
("MondayOctober", -1.3862943611198906)],
n = 1}}),
("January",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("three-quarters of an hour",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Mattu Pongal",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Wednesday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3025850929940455,
likelihoods = HashMap.fromList [("", 0.0)], n = 8},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("half after|past <hour-of-day>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods =
HashMap.fromList
[("time-of-day (latent)", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 2},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("<integer> + '\"",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("half <integer> (UK style hour-of-day)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("time-of-day (latent)", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("Ganesh Chaturthi",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("July",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.5553480614894135,
likelihoods = HashMap.fromList [("", 0.0)], n = 33},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<time-of-day> <part-of-day>",
Classifier{okData =
ClassData{prior = -0.12516314295400605,
unseen = -3.6888794541139363,
likelihoods =
HashMap.fromList
[("time-of-day (latent)tonight", -2.5649493574615367),
("hourhour", -1.3609765531356008),
("<time-of-day> o'clockin|during the <part-of-day>",
-2.9704144655697013),
("minutehour", -1.717651497074333),
("at <time-of-day>in|during the <part-of-day>",
-2.277267285009756),
("time-of-day (latent)this <part-of-day>", -2.9704144655697013),
("hh:mmin|during the <part-of-day>", -2.0541237336955462),
("time-of-day (latent)in|during the <part-of-day>",
-2.277267285009756),
("hhmm (latent)in|during the <part-of-day>",
-2.9704144655697013)],
n = 15},
koData =
ClassData{prior = -2.1400661634962708, unseen = -2.639057329615259,
likelihoods =
HashMap.fromList
[("hourhour", -1.466337068793427),
("time-of-day (latent)in|during the <part-of-day>",
-1.466337068793427)],
n = 2}}),
("hour (grain)",
Classifier{okData =
ClassData{prior = -1.3723081191451507, unseen = -2.995732273553991,
likelihoods = HashMap.fromList [("", 0.0)], n = 18},
koData =
ClassData{prior = -0.2923879634891936, unseen = -4.007333185232471,
likelihoods = HashMap.fromList [("", 0.0)], n = 53}}),
("Parsi New Year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Shavuot",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<day> <duration> hence|ago",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("dayyear", -0.6931471805599453),
("Diwali<integer> <unit-of-duration>", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("<ordinal> quarter",
Classifier{okData =
ClassData{prior = -0.4700036292457356, unseen = -2.639057329615259,
likelihoods =
HashMap.fromList
[("ordinal (digits)quarter (grain)", -1.1786549963416462),
("ordinals (first..twentieth,thirtieth,...)quarter (grain)",
-1.466337068793427),
("quarter", -0.7731898882334817)],
n = 5},
koData =
ClassData{prior = -0.9808292530117262,
unseen = -2.3025850929940455,
likelihoods =
HashMap.fromList
[("ordinal (digits)quarter (grain)", -0.8109302162163288),
("quarter", -0.8109302162163288)],
n = 3}}),
("Boss's Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Orthodox Easter Sunday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("one twenty two",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("May",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("intersect",
Classifier{okData =
ClassData{prior = -0.4907042205665696, unseen = -7.174724309836376,
likelihoods =
HashMap.fromList
[("Navaratriyear (latent)", -6.480811139196849),
("Karva Chauthyear (latent)", -6.480811139196849),
("<day-of-month> (ordinal)in|during the <part-of-day>",
-6.480811139196849),
("Maha Shivaratriyear (latent)", -6.480811139196849),
("Ramadanyear (latent)", -5.228048170701481),
("<datetime> - <datetime> (interval)on <day>",
-5.094516778076958),
("Bhai Doojyear (latent)", -6.480811139196849),
("hourday", -4.465908118654584),
("dayhour", -3.4362887014734254),
("<time-of-day> - <time-of-day> (interval)on <day>",
-5.094516778076958),
("Martin Luther King's Dayyear (latent)", -6.075346031088684),
("Shemini Atzeretyear (latent)", -6.480811139196849),
("daymonth", -4.465908118654584),
("monthday", -6.075346031088684),
("monthyear", -4.689051669968793),
("Yom Ha'atzmautyear (latent)", -6.480811139196849),
("Orthodox Good Fridayyear (latent)", -6.075346031088684),
("Vijayadashamiyear (latent)", -6.480811139196849),
("Thai Pongalyear (latent)", -5.787663958636903),
("Thiru Onamyear (latent)", -5.787663958636903),
("hhhmmabsorption of , after named day", -6.480811139196849),
("Tuesdaythe <day-of-month> (ordinal)", -6.480811139196849),
("from <datetime> - <datetime> (interval)July",
-5.564520407322694),
("<day-of-month> (ordinal)Wednesday", -6.480811139196849),
("Krishna Janmashtamiyear (latent)", -6.075346031088684),
("Guru Gobind Singh Jayantiyear (latent)", -6.480811139196849),
("houryear", -5.564520407322694),
("this|next <day-of-week>hh(:mm) - <time-of-day> am|pm",
-6.480811139196849),
("Christmas<time-of-day> am|pm", -6.480811139196849),
("last <day-of-week> of <time>year (latent)",
-6.480811139196849),
("<time-of-day> am|pmintersect by \",\", \"of\", \"from\", \"'s\"",
-5.787663958636903),
("intersectin|during <named-month>|year", -6.480811139196849),
("<time-of-day> am|pmintersect", -5.228048170701481),
("Earth Houryear (latent)", -6.480811139196849),
("Ganesh Chaturthiyear (latent)", -6.480811139196849),
("Octoberyear (latent)", -4.976733742420574),
("intersect<time-of-day> am|pm", -6.480811139196849),
("Mattu Pongalyear (latent)", -6.480811139196849),
("Saturday<part-of-day> at <time-of-day>", -6.480811139196849),
("Shavuotyear (latent)", -6.480811139196849),
("Parsi New Yearyear (latent)", -5.564520407322694),
("at <time-of-day>in|within|after <duration>",
-6.480811139196849),
("Thursdayhh(:mm) - <time-of-day> am|pm", -6.480811139196849),
("todayin|within|after <duration>", -6.480811139196849),
("<named-month>|<named-day> <day-of-month> (ordinal)year (latent)",
-6.075346031088684),
("Marchyear (latent)", -6.480811139196849),
("intersect by \",\", \"of\", \"from\", \"'s\"hhhmm",
-6.075346031088684),
("Sukkotyear (latent)", -6.075346031088684),
("hhhmmintersect", -6.075346031088684),
("intersect by \",\", \"of\", \"from\", \"'s\"year (latent)",
-6.075346031088684),
("Clean Mondayyear (latent)", -6.075346031088684),
("monthhour", -6.075346031088684),
("<day-of-month> (ordinal)intersect by \",\", \"of\", \"from\", \"'s\"",
-5.564520407322694),
("todayat <time-of-day>", -6.480811139196849),
("Thursday<time> timezone", -4.082915866398478),
("tonight<time-of-day> am|pm", -6.480811139196849),
("time-of-day (latent)tonight", -6.075346031088684),
("from|since|after <time>December", -6.480811139196849),
("<time-of-day> am|pmon <day>", -4.465908118654584),
("this <time>hh(:mm) - <time-of-day> am|pm",
-6.480811139196849),
("yyyy-mm-ddhh:mm:ss", -6.075346031088684),
("dayday", -3.7082224169570672),
("<time> <part-of-day>at <time-of-day>", -6.075346031088684),
("tonightat <time-of-day>", -5.382198850528739),
("<time-of-day> am|pmabsorption of , after named day",
-5.787663958636903),
("Dayananda Saraswati Jayantiyear (latent)",
-6.480811139196849),
("today<time-of-day> am|pm", -6.480811139196849),
("Februarythe <day-of-month> (ordinal)", -6.075346031088684),
("at <time-of-day><time> <part-of-day>", -6.480811139196849),
("<day-of-month> (ordinal)intersect", -6.075346031088684),
("hourhour", -3.5103966736271475),
("Mahavir Jayantiyear (latent)", -6.075346031088684),
("Navaratriin|during <named-month>|year", -6.480811139196849),
("Wednesdaythis|last|next <cycle>", -6.480811139196849),
("Lentyear (latent)", -6.480811139196849),
("intersect<named-month> <day-of-month> (non ordinal)",
-4.609008962295257),
("Boghiyear (latent)", -6.480811139196849),
("dayyear", -1.9216848917101639),
("Karva Chauthin|during <named-month>|year",
-6.480811139196849),
("Thursdayfrom|since|after <time>", -6.075346031088684),
("<time-of-day> o'clockin|during the <part-of-day>",
-6.480811139196849),
("Thursdayat <time-of-day>", -5.787663958636903),
("Islamic New Yearyear (latent)", -6.075346031088684),
("Laylat al-Qadryear (latent)", -5.564520407322694),
("part of days<time-of-day> am|pm", -6.480811139196849),
("Shrove Tuesdayyear (latent)", -6.480811139196849),
("intersect by \",\", \"of\", \"from\" for year<time-of-day> am|pm",
-5.787663958636903),
("hourminute", -6.075346031088684),
("<time-of-day> am|pmtomorrow", -5.564520407322694),
("Yom Kippuryear (latent)", -6.480811139196849),
("<day-of-month> (ordinal)Tuesday", -5.787663958636903),
("<part-of-day> of <time>year (latent)", -6.480811139196849),
("minutehour", -5.228048170701481),
("Kaanum Pongalyear (latent)", -6.075346031088684),
("Maha Saptamiyear (latent)", -6.480811139196849),
("at <time-of-day>in|during the <part-of-day>",
-5.787663958636903),
("time-of-day (latent)tomorrow", -5.564520407322694),
("part of daysat <time-of-day>", -4.871373226762748),
("absorption of , after named day<named-month> <day-of-month> (non ordinal)",
-4.465908118654584),
("for <duration> from <time>December", -6.480811139196849),
("tomorrow<time-of-day> sharp|exactly", -6.480811139196849),
("Thursdayfrom <datetime> - <datetime> (interval)",
-5.228048170701481),
("intersect by \",\", \"of\", \"from\" for yearhhhmm",
-5.228048170701481),
("time-of-day (latent)this <part-of-day>", -6.480811139196849),
("Pentecostyear (latent)", -6.480811139196849),
("Thursdayfrom <time-of-day> - <time-of-day> (interval)",
-5.228048170701481),
("<day-of-month> (ordinal)February", -6.480811139196849),
("Eid al-Fitryear (latent)", -5.094516778076958),
("Vasant Panchamiin|during <named-month>|year",
-6.480811139196849),
("Mondayin|during the <part-of-day>", -6.480811139196849),
("Chhathin|during <named-month>|year", -6.480811139196849),
("Diwaliin|during <named-month>|year", -6.480811139196849),
("this <part-of-day><time-of-day> am|pm", -6.480811139196849),
("Vaisakhiin|during <named-month>|year", -6.480811139196849),
("Guru Ravidass Jayantiyear (latent)", -5.228048170701481),
("Raksha Bandhanyear (latent)", -6.480811139196849),
("daysecond", -6.075346031088684),
("tomorrowfrom <time-of-day> - <time-of-day> (interval)",
-6.075346031088684),
("Ratha-Yatrayear (latent)", -6.480811139196849),
("Ashurayear (latent)", -6.480811139196849),
("Tuesdayin|during <named-month>|year", -6.480811139196849),
("Chinese New Yearyear (latent)", -6.480811139196849),
("tomorrowintersect", -6.480811139196849),
("Lag BaOmeryear (latent)", -6.480811139196849),
("last weekend of <named-month>year (latent)",
-6.480811139196849),
("Eid al-Adhayear (latent)", -4.340744975700578),
("intersectin|during the <part-of-day>", -6.480811139196849),
("Palm Sundayyear (latent)", -6.480811139196849),
("Christmasat <time-of-day>", -6.480811139196849),
("Passoveryear (latent)", -6.480811139196849),
("Lazarus Saturdayyear (latent)", -6.480811139196849),
("<day-of-month> (ordinal or number) <named-month>year (latent)",
-6.480811139196849),
("hhhmmon <day>", -5.564520407322694),
("Yom HaShoahyear (latent)", -6.075346031088684),
("Thursday<datetime> - <datetime> (interval)",
-6.075346031088684),
("<day-of-month> (ordinal or number) of <named-month>in|during the <part-of-day>",
-6.480811139196849),
("Septemberyear (latent)", -6.075346031088684),
("Thursday<time-of-day> - <time-of-day> (interval)",
-4.976733742420574),
("Halloweenyear (latent)", -6.480811139196849),
("<ordinal> last <cycle> of <time>year (latent)",
-6.075346031088684),
("from <time-of-day> - <time-of-day> (interval)on <day>",
-5.787663958636903),
("intersect by \",\", \"of\", \"from\", \"'s\"<time-of-day> am|pm",
-6.480811139196849),
("at <time-of-day>intersect", -5.564520407322694),
("Rosh Hashanahyear (latent)", -5.787663958636903),
("Dhanterasyear (latent)", -6.480811139196849),
("Tu BiShvatyear (latent)", -6.480811139196849),
("<day-of-month> (ordinal)December", -5.787663958636903),
("Holiyear (latent)", -5.787663958636903),
("<time-of-day> - <time-of-day> (interval)tomorrow",
-6.480811139196849),
("Holika Dahanyear (latent)", -5.787663958636903),
("at <time-of-day>intersect by \",\", \"of\", \"from\", \"'s\"",
-6.075346031088684),
("dayminute", -3.166625134524323),
("Mawlidyear (latent)", -6.480811139196849),
("from <datetime> - <datetime> (interval)on <day>",
-6.075346031088684),
("<datetime> - <datetime> (interval)tomorrow",
-6.480811139196849),
("Jumu'atul-Widayear (latent)", -5.564520407322694),
("minuteday", -2.7313070632664775),
("absorption of , after named dayintersect",
-5.787663958636903),
("intersectyear (latent)", -6.480811139196849),
("Orthodox Easter Sundayyear (latent)", -6.480811139196849),
("time-of-day (latent)in|within|after <duration>",
-6.480811139196849),
("<ordinal> <cycle> of <time>year (latent)",
-6.480811139196849),
("intersecthhhmm", -6.075346031088684),
("the <day-of-month> (ordinal)in|during the <part-of-day>",
-6.480811139196849),
("Boss's Dayyear (latent)", -6.075346031088684),
("hhhmmintersect by \",\", \"of\", \"from\", \"'s\"",
-6.480811139196849),
("Global Youth Service Dayyear (latent)", -6.480811139196849),
("Dhanterasin|during <named-month>|year", -6.480811139196849),
("tonight<time-of-day> o'clock", -6.480811139196849),
("Tisha B'Avyear (latent)", -6.480811139196849),
("Isra and Mi'rajyear (latent)", -5.564520407322694),
("at <time-of-day>on <day>", -4.871373226762748),
("at <time-of-day>absorption of , after named day",
-6.075346031088684),
("time-of-day (latent)<time> <part-of-day>",
-5.787663958636903),
("Christmasyear (latent)", -6.075346031088684),
("Saturdayintersect", -6.480811139196849),
("Naraka Chaturdashiyear (latent)", -6.075346031088684),
("Thai Pongalin|during <named-month>|year", -6.480811139196849),
("dayweek", -6.480811139196849),
("Easter Sundayyear (latent)", -5.787663958636903),
("between <time-of-day> and <time-of-day> (interval)on <day>",
-6.075346031088684),
("weekyear", -5.382198850528739),
("King's Dayyear (latent)", -4.976733742420574),
("hh:mmin|during the <part-of-day>", -5.564520407322694),
("<cycle> after|before <time><time-of-day> am|pm",
-6.075346031088684),
("first|second|third|fourth|fifth <day-of-week> of <time>year (latent)",
-5.787663958636903),
("Hanukkahyear (latent)", -5.787663958636903),
("Rama Navamiyear (latent)", -6.480811139196849),
("February<time> <part-of-day>", -6.480811139196849),
("time-of-day (latent)in|during the <part-of-day>",
-5.787663958636903),
("Great Lentyear (latent)", -6.480811139196849),
("tomorrowat <time-of-day>", -5.787663958636903),
("hhmm (latent)in|during the <part-of-day>",
-6.480811139196849),
("tomorrow<part-of-day> at <time-of-day>", -6.075346031088684),
("Ugadiyear (latent)", -5.094516778076958),
("Vaisakhiyear (latent)", -5.787663958636903),
("absorption of , after named dayintersect by \",\", \"of\", \"from\" for year",
-5.787663958636903),
("last <cycle> of <time>year (latent)", -5.787663958636903),
("at <time-of-day>tomorrow", -6.075346031088684),
("tomorrow<time-of-day> am|pm", -6.480811139196849),
("<named-month> <day-of-month> (non ordinal)year (latent)",
-6.480811139196849),
("Diwaliyear (latent)", -6.480811139196849),
("between <time> and <time>on <day>", -6.075346031088684),
("Black Fridayyear (latent)", -6.075346031088684),
("the <ordinal> last <cycle> of <time>year (latent)",
-6.480811139196849),
("in|during the <part-of-day>at <time-of-day>",
-6.480811139196849),
("Chhathyear (latent)", -6.075346031088684),
("Vasant Panchamiyear (latent)", -6.480811139196849),
("Rabindra Jayantiyear (latent)", -5.228048170701481),
("this <part-of-day>at <time-of-day>", -5.228048170701481),
("St Patrick's Dayyear (latent)", -6.480811139196849),
("Thursday<time> (timezone)", -6.480811139196849),
("<day-of-month> (ordinal or number) of <month>in|during the <part-of-day>",
-6.480811139196849),
("Pargat Diwasyear (latent)", -5.228048170701481),
("<datetime> - <datetime> (interval)July", -6.075346031088684),
("on <day><named-month> <day-of-month> (non ordinal)",
-5.787663958636903),
("Februaryintersect", -6.480811139196849),
("Simchat Torahyear (latent)", -6.480811139196849),
("minuteyear", -6.480811139196849)],
n = 502},
koData =
ClassData{prior = -0.9472529574781219, unseen = -6.843749949006225,
likelihoods =
HashMap.fromList
[("Thursdayhhhmm", -4.645458704902203),
("hourday", -3.546846416234093),
("<hour-of-day> <integer><time-of-day> am|pm",
-6.149536101678477),
("<day-of-month> (ordinal)August", -6.149536101678477),
("dayhour", -3.5845867442169403),
("<time> timezoneyear (latent)", -5.456388921118531),
("<time-of-day> - <time-of-day> (interval)on <day>",
-5.2332453698043215),
("Tuesdayfrom|since|after <time>", -5.456388921118531),
("daymonth", -3.441485900576267),
("hourquarter", -5.2332453698043215),
("monthyear", -4.896773133183109),
("<time-of-day> am|pmyear (latent)", -5.456388921118531),
("Thai Pongalyear (latent)", -5.456388921118531),
("intersecthh:mm", -6.149536101678477),
("from <datetime> - <datetime> (interval)July",
-5.744070993570313),
("<day-of-month> (ordinal)Wednesday", -6.149536101678477),
("houryear", -4.357776632450422),
("from <time-of-day> - <time-of-day> (interval)July",
-6.149536101678477),
("<day-of-month> (ordinal)October", -6.149536101678477),
("<time-of-day> am|pmintersect by \",\", \"of\", \"from\", \"'s\"",
-5.456388921118531),
("hournograin", -4.896773133183109),
("<time-of-day> am|pmintersect", -4.896773133183109),
("Octoberyear (latent)", -6.149536101678477),
("Good Fridayyear (latent)", -5.744070993570313),
("time-of-day (latent)intersect by \",\", \"of\", \"from\", \"'s\"",
-5.744070993570313),
("early morningat <time-of-day>", -6.149536101678477),
("until <time>on <day>", -5.456388921118531),
("part of days<time-of-day> - <time-of-day> (interval)",
-6.149536101678477),
("time-of-day (latent)intersect", -5.456388921118531),
("todayin <number> (implicit minutes)", -6.149536101678477),
("<named-month>|<named-day> <day-of-month> (ordinal)year (latent)",
-5.456388921118531),
("this <part-of-day><time-of-day> - <time-of-day> (interval)",
-6.149536101678477),
("about|exactly <time-of-day>year (latent)",
-6.149536101678477),
("hh:mmon <day>", -4.896773133183109),
("hhhmmintersect", -5.744070993570313),
("absorption of , after named dayJuly", -5.2332453698043215),
("from|since|after <time>July", -5.744070993570313),
("intersect by \",\", \"of\", \"from\", \"'s\"year (latent)",
-5.2332453698043215),
("Clean Mondayyear (latent)", -6.149536101678477),
("monthhour", -6.149536101678477),
("<day-of-month> (ordinal)intersect by \",\", \"of\", \"from\", \"'s\"",
-5.744070993570313),
("hourmonth", -4.009469938182206),
("todayat <time-of-day>", -6.149536101678477),
("hhhmmyear (latent)", -4.896773133183109),
("from|since|after <time>December", -6.149536101678477),
("from|since|after <time><time-of-day> am|pm",
-6.149536101678477),
("<time-of-day> am|pmon <day>", -4.896773133183109),
("Mondayyear (latent)", -5.456388921118531),
("dayday", -4.009469938182206),
("on <day>September", -5.456388921118531),
("time-of-day (latent)September", -5.744070993570313),
("hourhour", -4.0700945599986404),
("time-of-day (latent)on <day>", -4.645458704902203),
("Thursdaydd/mm", -6.149536101678477),
("time-of-day (latent)<cycle> after|before <time>",
-5.744070993570313),
("dayyear", -3.1291112155341145),
("New Year's Dayyear (latent)", -5.2332453698043215),
("time-of-day (latent)Sunday", -5.744070993570313),
("Thursdayfrom|since|after <time>", -4.444788009440051),
("Thursdayat <time-of-day>", -4.540098189244376),
("<integer> to|till|before <hour-of-day>September",
-6.149536101678477),
("Aprilyear (latent)", -6.149536101678477),
("the <day-of-month> (ordinal)July", -6.149536101678477),
("the <day-of-month> (number)July", -6.149536101678477),
("monthminute", -6.149536101678477),
("<time-of-day> am|pmtomorrow", -5.744070993570313),
("Thursdayhh:mm", -5.2332453698043215),
("<day-of-month> (ordinal)Tuesday", -5.744070993570313),
("minutemonth", -4.009469938182206),
("time-of-day (latent)Friday", -5.744070993570313),
("minutehour", -6.149536101678477),
("part of daysat <time-of-day>", -5.0509238130103675),
("time-of-day (latent)this|last|next <cycle>",
-3.7981608445149995),
("Augustyear (latent)", -5.744070993570313),
("week-endin|during <named-month>|year", -6.149536101678477),
("time-of-day (latent)Tuesday", -5.744070993570313),
("tomorrowfrom <time-of-day> - <time-of-day> (interval)",
-6.149536101678477),
("tonight<time-of-day> - <time-of-day> (interval)",
-6.149536101678477),
("Sundayyear (latent)", -5.2332453698043215),
("hourweek", -5.2332453698043215),
("the <day-of-month> (ordinal)Monday", -5.456388921118531),
("Christmasat <time-of-day>", -6.149536101678477),
("from|since|after <time>year (latent)", -5.0509238130103675),
("hhhmmon <day>", -5.2332453698043215),
("yesterday<time-of-day> am|pm", -6.149536101678477),
("<day-of-month> (ordinal)July", -5.456388921118531),
("intersect by \",\", \"of\", \"from\", \"'s\"hh:mm",
-6.149536101678477),
("Thursday<time-of-day> - <time-of-day> (interval)",
-6.149536101678477),
("in|during <named-month>|yearyear (latent)",
-6.149536101678477),
("at <time-of-day>intersect", -5.2332453698043215),
("hh:mmyear (latent)", -4.763241740558586),
("Holiyear (latent)", -6.149536101678477),
("until <time><time-of-day> am|pm", -6.149536101678477),
("at <time-of-day>intersect by \",\", \"of\", \"from\", \"'s\"",
-5.744070993570313),
("dayminute", -3.316322757622261),
("yyyy-mm-ddhh:mm", -5.744070993570313),
("intersectfrom|since|after <time>", -5.744070993570313),
("intersectSeptember", -4.277733924776886),
("minuteday", -2.9925356805283636),
("absorption of , after named dayintersect",
-6.149536101678477),
("intersectyear (latent)", -6.149536101678477),
("Februaryin|during the <part-of-day>", -6.149536101678477),
("<duration> after|before|from|past <time>December",
-6.149536101678477),
("time-of-day (latent)July", -5.456388921118531),
("Saturdayyear (latent)", -6.149536101678477),
("hhhmmintersect by \",\", \"of\", \"from\", \"'s\"",
-6.149536101678477),
("<day-of-month> (ordinal)Monday", -4.896773133183109),
("at <time-of-day>on <day>", -5.2332453698043215),
("absorption of , after named daySeptember",
-4.896773133183109),
("Naraka Chaturdashiyear (latent)", -6.149536101678477),
("from|since|after <time>on <day>", -5.2332453698043215),
("dayweek", -6.149536101678477),
("Easter Sundayyear (latent)", -5.744070993570313),
("Thursday<time-of-day> am|pm", -4.896773133183109),
("weekyear", -5.744070993570313),
("time-of-day (latent)Thursday", -5.744070993570313),
("<named-month> <day-of-month> (non ordinal)until <time>",
-6.149536101678477),
("<day-of-month> (ordinal)April", -6.149536101678477),
("yyyy-mm-dd<time-of-day> - <time-of-day> (interval)",
-5.744070993570313),
("intersect by \",\", \"of\", \"from\" for yearhh:mm",
-5.456388921118531),
("Sundayfrom|since|after <time>", -6.149536101678477),
("absorption of , after named dayFebruary",
-5.2332453698043215),
("time-of-day (latent)in|during the <part-of-day>",
-5.744070993570313),
("July<integer> to|till|before <hour-of-day>",
-6.149536101678477),
("tomorrowat <time-of-day>", -6.149536101678477),
("daynograin", -5.744070993570313),
("Fridayin|during <named-month>|year", -6.149536101678477),
("<integer> to|till|before <hour-of-day>July",
-5.744070993570313),
("last <cycle> of <time>year (latent)", -5.744070993570313),
("tomorrow<time-of-day> am|pm", -6.149536101678477),
("<named-month> <day-of-month> (non ordinal)year (latent)",
-5.456388921118531),
("Diwaliyear (latent)", -5.744070993570313),
("<time-of-day> - <time-of-day> (interval)July",
-6.149536101678477),
("this <part-of-day>at <time-of-day>", -5.2332453698043215),
("Fridayyear (latent)", -5.2332453698043215),
("time-of-day (latent)April", -6.149536101678477),
("minuteyear", -3.9523115243422575)],
n = 318}}),
("one eleven",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4}}),
("after lunch/work/school",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("early morning",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("in <number> (implicit minutes)",
Classifier{okData =
ClassData{prior = -1.329135947279942, unseen = -2.4849066497880004,
likelihoods =
HashMap.fromList
[("integer (numeric)", -0.3184537311185346),
("integer (0..19)", -1.2992829841302609)],
n = 9},
koData =
ClassData{prior = -0.30748469974796055,
unseen = -3.332204510175204,
likelihoods =
HashMap.fromList
[("integer (numeric)", -0.20479441264601328),
("integer (0..19)", -1.6863989535702288)],
n = 25}}),
("<ordinal> <cycle> of <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.9444389791664407,
likelihoods =
HashMap.fromList
[("daymonth", -1.791759469228055),
("ordinal (digits)quarter (grain)year (latent)",
-2.1972245773362196),
("quarteryear", -2.1972245773362196),
("ordinals (first..twentieth,thirtieth,...)day (grain)October",
-2.1972245773362196),
("ordinal (digits)day (grain)this|last|next <cycle>",
-2.1972245773362196),
("ordinals (first..twentieth,thirtieth,...)week (grain)intersect",
-2.1972245773362196),
("weekmonth", -1.791759469228055),
("ordinals (first..twentieth,thirtieth,...)week (grain)October",
-2.1972245773362196)],
n = 5},
koData =
ClassData{prior = -infinity, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [], n = 0}}),
("year (grain)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.6635616461296463,
likelihoods = HashMap.fromList [("", 0.0)], n = 37},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("from <datetime> - <datetime> (interval)",
Classifier{okData =
ClassData{prior = -1.3737155789130304, unseen = -4.532599493153256,
likelihoods =
HashMap.fromList
[("hh:mmtime-of-day (latent)", -3.4231762883809305),
("minuteminute", -2.91235066461494),
("the <day-of-month> (number)the <day-of-month> (ordinal)",
-3.828641396489095),
("<day-of-month> (ordinal)<day-of-month> (ordinal)",
-3.4231762883809305),
("the <day-of-month> (ordinal)the <day-of-month> (ordinal)",
-3.4231762883809305),
("hh:mmhh:mm", -2.91235066461494),
("dayday", -1.9568392195875037),
("the <day-of-month> (ordinal)the <day-of-month> (number)",
-3.828641396489095),
("the <day-of-month> (number)the <day-of-month> (number)",
-3.4231762883809305),
("<named-month>|<named-day> <day-of-month> (ordinal)<day-of-month> (ordinal)",
-3.828641396489095),
("<time-of-day> am|pmtime-of-day (latent)", -3.828641396489095),
("hourhour", -3.4231762883809305),
("minutehour", -3.4231762883809305),
("<day-of-month> (ordinal)the <day-of-month> (ordinal)",
-3.828641396489095),
("the <day-of-month> (ordinal)<day-of-month> (ordinal)",
-3.828641396489095),
("<time-of-day> am|pm<time-of-day> am|pm", -3.828641396489095),
("<day-of-month> (ordinal)the <day-of-month> (number)",
-3.828641396489095)],
n = 20},
koData =
ClassData{prior = -0.29191040856130207, unseen = -5.14166355650266,
likelihoods =
HashMap.fromList
[("hourday", -2.570849079588725),
("dayhour", -3.056356895370426),
("<day-of-month> (ordinal)the <day-of-month> (ordinal or number) of <named-month>",
-4.037186148382152),
("time-of-day (latent)intersect by \",\", \"of\", \"from\", \"'s\"",
-4.442651256490317),
("time-of-day (latent)<day-of-month> (ordinal)",
-4.442651256490317),
("time-of-day (latent)intersect", -4.442651256490317),
("<day-of-month> (ordinal)time-of-day (latent)",
-3.7495040759303713),
("<named-month>|<named-day> <day-of-month> (ordinal)year (latent)",
-4.442651256490317),
("hh:mmtime-of-day (latent)", -3.5263605246161616),
("hh:mm<time-of-day> am|pm", -4.037186148382152),
("minuteminute", -3.5263605246161616),
("<day-of-month> (ordinal)intersect by \",\", \"of\", \"from\", \"'s\"",
-4.442651256490317),
("time-of-day (latent)time-of-day (latent)",
-3.5263605246161616),
("hh:mmhh:mm", -4.442651256490317),
("dayday", -1.916922612182061),
("the <day-of-month> (ordinal)the <day-of-month> (number)",
-4.037186148382152),
("the <day-of-month> (number)the <day-of-month> (number)",
-4.442651256490317),
("time-of-day (latent)the <day-of-month> (ordinal or number) of <named-month>",
-4.037186148382152),
("<day-of-month> (ordinal)intersect", -4.442651256490317),
("hourhour", -3.1898882879949486),
("time-of-day (latent)the <day-of-month> (ordinal)",
-4.442651256490317),
("dayyear", -4.442651256490317),
("the <day-of-month> (ordinal)time-of-day (latent)",
-4.442651256490317),
("the <day-of-month> (number)time-of-day (latent)",
-4.442651256490317),
("minutehour", -3.3440389678222067),
("the <day-of-month> (ordinal)intersect by \",\", \"of\", \"from\", \"'s\"",
-4.442651256490317),
("the <day-of-month> (number)intersect by \",\", \"of\", \"from\", \"'s\"",
-4.442651256490317),
("hh:mmintersect", -4.037186148382152),
("time-of-day (latent)<day-of-month> (ordinal or number) of <month>",
-4.037186148382152),
("<day-of-month> (ordinal)<day-of-month> (ordinal or number) <named-month>",
-4.442651256490317),
("<day-of-month> (ordinal)<day-of-month> (ordinal or number) of <month>",
-4.037186148382152),
("time-of-day (latent)<time-of-day> am|pm", -4.442651256490317),
("time-of-day (latent)<day-of-month> (ordinal or number) <named-month>",
-4.442651256490317),
("time-of-day (latent)the <day-of-month> (number)",
-4.037186148382152),
("the <day-of-month> (ordinal)<day-of-month> (ordinal or number) <named-month>",
-4.442651256490317),
("the <day-of-month> (number)<day-of-month> (ordinal or number) <named-month>",
-4.442651256490317),
("the <day-of-month> (number)the <day-of-month> (ordinal or number) of <named-month>",
-4.037186148382152),
("the <day-of-month> (ordinal)the <day-of-month> (ordinal or number) of <named-month>",
-4.037186148382152),
("<day-of-month> (ordinal)<day-of-month> (ordinal or number) of <named-month>",
-4.037186148382152),
("time-of-day (latent)<day-of-month> (ordinal or number) of <named-month>",
-4.037186148382152),
("<day-of-month> (ordinal)the <day-of-month> (number)",
-4.442651256490317),
("the <day-of-month> (ordinal)intersect", -4.037186148382152),
("<named-month> <day-of-month> (non ordinal)time-of-day (latent)",
-4.442651256490317),
("the <day-of-month> (number)intersect", -4.037186148382152)],
n = 59}}),
("Saturday",
Classifier{okData =
ClassData{prior = -0.11778303565638351,
unseen = -2.3025850929940455,
likelihoods = HashMap.fromList [("", 0.0)], n = 8},
koData =
ClassData{prior = -2.1972245773362196,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("the <cycle> of <time>",
Classifier{okData =
ClassData{prior = -0.15415067982725836,
unseen = -2.995732273553991,
likelihoods =
HashMap.fromList
[("week (grain)<named-month>|<named-day> <day-of-month> (ordinal)",
-1.845826690498331),
("weekmonth", -1.845826690498331),
("week (grain)October", -1.845826690498331),
("week (grain)<named-month> <day-of-month> (non ordinal)",
-1.845826690498331),
("weekday", -1.3350010667323402)],
n = 6},
koData =
ClassData{prior = -1.9459101490553135,
unseen = -2.3025850929940455,
likelihoods =
HashMap.fromList
[("second (grain) March", -1.5040773967762742),
("secondmonth", -1.5040773967762742)],
n = 1}}),
("number.number hours",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("hour (grain)", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("from <time-of-day> - <time-of-day> (interval)",
Classifier{okData =
ClassData{prior = -0.7308875085427924,
unseen = -3.6888794541139363,
likelihoods =
HashMap.fromList
[("hh:mmtime-of-day (latent)", -2.5649493574615367),
("minuteminute", -1.8718021769015913),
("hh:mmhh:mm", -1.8718021769015913),
("<time-of-day> am|pmtime-of-day (latent)",
-2.5649493574615367),
("hourhour", -2.5649493574615367),
("hourminute", -2.5649493574615367),
("minutehour", -2.0541237336955462),
("time-of-day (latent)<time-of-day> sharp|exactly",
-2.9704144655697013),
("time-of-day (latent)hh:mm", -2.9704144655697013),
("<time-of-day> am|pm<time-of-day> am|pm",
-2.5649493574615367)],
n = 13},
koData =
ClassData{prior = -0.6567795363890705,
unseen = -3.7376696182833684,
likelihoods =
HashMap.fromList
[("hh:mmtime-of-day (latent)", -1.9218125974762528),
("hh:mm<time-of-day> am|pm", -2.6149597780361984),
("minuteminute", -2.6149597780361984),
("time-of-day (latent)time-of-day (latent)",
-1.9218125974762528),
("hh:mmhh:mm", -3.0204248861443626),
("hourhour", -1.7676619176489945),
("minutehour", -1.7676619176489945),
("time-of-day (latent)<time-of-day> am|pm",
-3.0204248861443626)],
n = 14}}),
("integer 21..99",
Classifier{okData =
ClassData{prior = -0.916290731874155, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("integer (20..90)integer (0..19)", -0.2876820724517809)],
n = 2},
koData =
ClassData{prior = -0.5108256237659907, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("integer (numeric)integer (numeric)", -0.2231435513142097)],
n = 3}}),
("Global Youth Service Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Tisha B'Av",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("yyyy-mm-dd",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("year (latent)",
Classifier{okData =
ClassData{prior = -0.14681486833704485,
unseen = -5.393627546352362,
likelihoods =
HashMap.fromList
[("integer (numeric)", -2.7779564107075706e-2),
("intersect 2 numbers", -4.00277736869661)],
n = 215},
koData =
ClassData{prior = -1.9910923718485463,
unseen = -3.6635616461296463,
likelihoods =
HashMap.fromList
[("integer (numeric)", -1.55814461804655),
("negative numbers", -0.41871033485818493),
("compose by multiplication", -2.2512917986064953)],
n = 34}}),
("<time> for <duration>",
Classifier{okData =
ClassData{prior = -0.15415067982725836,
unseen = -3.5553480614894135,
likelihoods =
HashMap.fromList
[("minuteminute", -2.833213344056216),
("<time-of-day> am|pm<integer> <unit-of-duration>",
-2.1400661634962708),
("dayday", -1.4469189829363254),
("hourminute", -1.916922612182061),
("hhhmm<integer> <unit-of-duration>", -2.833213344056216),
("intersect<integer> <unit-of-duration>", -2.1400661634962708),
("<day-of-month> (ordinal or number) <named-month><integer> <unit-of-duration>",
-2.4277482359480516),
("from|since|after <time><integer> <unit-of-duration>",
-2.1400661634962708)],
n = 12},
koData =
ClassData{prior = -1.9459101490553135, unseen = -2.70805020110221,
likelihoods =
HashMap.fromList
[("monthday", -1.540445040947149),
("December<integer> <unit-of-duration>", -1.540445040947149)],
n = 2}}),
("hhhmm",
Classifier{okData =
ClassData{prior = -2.247285585205863e-2,
unseen = -3.828641396489095,
likelihoods = HashMap.fromList [("", 0.0)], n = 44},
koData =
ClassData{prior = -3.8066624897703196,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("as soon as possible",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Mahavir Jayanti",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Monday",
Classifier{okData =
ClassData{prior = -0.21357410029805904,
unseen = -3.1354942159291497,
likelihoods = HashMap.fromList [("", 0.0)], n = 21},
koData =
ClassData{prior = -1.6486586255873816,
unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5}}),
("dd/mm/yyyy",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("yesterday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<ordinal> quarter <year>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods =
HashMap.fromList
[("ordinal (digits)quarter (grain)year (latent)",
-0.6931471805599453),
("quarteryear", -0.6931471805599453)],
n = 2},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("Dayananda Saraswati Jayanti",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("hh:mm:ss",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Hanukkah",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("first|second|third|fourth|fifth <day-of-week> of <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.6635616461296463,
likelihoods =
HashMap.fromList
[("daymonth", -0.8649974374866046),
("ordinals (first..twentieth,thirtieth,...)Tuesdaythis|last|next <cycle>",
-2.9444389791664407),
("ordinals (first..twentieth,thirtieth,...)TuesdaySeptember",
-2.9444389791664407),
("ordinals (first..twentieth,thirtieth,...)Tuesdayintersect",
-2.9444389791664407),
("ordinals (first..twentieth,thirtieth,...)WednesdayOctober",
-2.538973871058276),
("ordinals (first..twentieth,thirtieth,...)Wednesdayintersect",
-2.538973871058276),
("ordinals (first..twentieth,thirtieth,...)Mondaythis|last|next <cycle>",
-1.6916760106710724),
("ordinals (first..twentieth,thirtieth,...)TuesdayOctober",
-2.538973871058276)],
n = 15},
koData =
ClassData{prior = -infinity, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [], n = 0}}),
("<hour-of-day> <integer>",
Classifier{okData =
ClassData{prior = -9.53101798043249e-2,
unseen = -3.295836866004329,
likelihoods =
HashMap.fromList
[("at <time-of-day>integer (20..90)", -2.5649493574615367),
("time-of-day (latent)integer (numeric)", -2.159484249353372),
("time-of-day (latent)integer 21..99", -2.159484249353372),
("hour", -0.8602012652231115),
("at <time-of-day>integer (numeric)", -2.159484249353372),
("time-of-day (latent)integer (20..90)", -1.8718021769015913)],
n = 10},
koData =
ClassData{prior = -2.3978952727983707,
unseen = -2.1972245773362196,
likelihoods =
HashMap.fromList
[("hour", -1.3862943611198906),
("time-of-day (latent)integer (20..90)", -1.3862943611198906)],
n = 1}}),
("Rama Navami",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<hour-of-day> quarter",
Classifier{okData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods =
HashMap.fromList
[("time-of-day (latent)", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 2}}),
("half a <time-grain>",
Classifier{okData =
ClassData{prior = -0.1823215567939546,
unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("hour (grain)", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 5},
koData =
ClassData{prior = -1.791759469228055, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("hour (grain)", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 1}}),
("King's Day",
Classifier{okData =
ClassData{prior = -0.11778303565638351,
unseen = -2.3025850929940455,
likelihoods = HashMap.fromList [("", 0.0)], n = 8},
koData =
ClassData{prior = -2.1972245773362196,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("Valentine's Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("from the <day-of-month> (ordinal or number) to the <day-of-month> (ordinal or number) of <named-month> (interval)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.4011973816621555,
likelihoods =
HashMap.fromList
[("ordinal (digits)ordinal (digits)July", -1.9810014688665833),
("integer (numeric)integer (numeric)July", -1.9810014688665833),
("integer (numeric)ordinal (digits)July", -1.9810014688665833),
("ordinal (digits)integer (numeric)July", -1.9810014688665833),
("month", -0.8023464725249373)],
n = 12},
koData =
ClassData{prior = -infinity, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [], n = 0}}),
("April",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Maha Saptami",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("end of month",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.4849066497880004,
likelihoods = HashMap.fromList [("", 0.0)], n = 10},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("week (grain)",
Classifier{okData =
ClassData{prior = -4.546237407675729e-2,
unseen = -4.477336814478207,
likelihoods = HashMap.fromList [("", 0.0)], n = 86},
koData =
ClassData{prior = -3.1135153092103742, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4}}),
("<part-of-day> of <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.2188758248682006,
likelihoods =
HashMap.fromList
[("part of daysintersect by \",\", \"of\", \"from\", \"'s\"",
-2.4849066497880004),
("part of daysintersect", -2.4849066497880004),
("hourday", -0.9808292530117262),
("part of daysthe <day-of-month> (ordinal)",
-2.4849066497880004),
("part of daysthe <day-of-month> (number)",
-2.4849066497880004),
("part of daysthis <time>", -2.4849066497880004),
("part of daysthe <day-of-month> (ordinal or number) of <named-month>",
-2.4849066497880004),
("part of daysChristmas", -2.0794415416798357)],
n = 8},
koData =
ClassData{prior = -infinity, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [], n = 0}}),
("from <time> for <duration>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.4849066497880004,
likelihoods =
HashMap.fromList
[("<time-of-day> am|pm<integer> <unit-of-duration>",
-1.7047480922384253),
("dayday", -1.2992829841302609),
("hourminute", -1.7047480922384253),
("intersect<integer> <unit-of-duration>", -1.7047480922384253),
("<day-of-month> (ordinal or number) <named-month><integer> <unit-of-duration>",
-1.7047480922384253)],
n = 3},
koData =
ClassData{prior = -infinity, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [], n = 0}}),
("<integer> <day-of-week> from <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.995732273553991,
likelihoods =
HashMap.fromList
[("integer (numeric)Fridaynow", -2.2512917986064953),
("integer (0..19)Tuesdaynow", -2.2512917986064953),
("integer (0..19)Sundaynow", -2.2512917986064953),
("integer (0..19)Fridaynow", -2.2512917986064953),
("daynograin", -0.9985288301111273),
("integer (numeric)Sundaynow", -2.2512917986064953),
("integer (numeric)Tuesdaynow", -2.2512917986064953)],
n = 6},
koData =
ClassData{prior = -infinity, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [], n = 0}}),
("Shrove Tuesday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("now",
Classifier{okData =
ClassData{prior = -6.899287148695143e-2,
unseen = -2.772588722239781,
likelihoods = HashMap.fromList [("", 0.0)], n = 14},
koData =
ClassData{prior = -2.70805020110221, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("<day-of-month> (ordinal or number) of <named-month>",
Classifier{okData =
ClassData{prior = -0.7339691750802004,
unseen = -3.4657359027997265,
likelihoods =
HashMap.fromList
[("ordinal (digits)July", -2.740840023925201),
("ordinals (first..twentieth,thirtieth,...)March",
-1.6422277352570913),
("ordinal (digits)February", -2.740840023925201),
("integer (numeric)February", -2.3353749158170367),
("month", -0.8690378470236094),
("ordinal (digits)March", -2.3353749158170367),
("integer (numeric)July", -2.740840023925201)],
n = 12},
koData =
ClassData{prior = -0.6539264674066639,
unseen = -3.5263605246161616,
likelihoods =
HashMap.fromList
[("ordinal (digits)July", -1.550597412411167),
("ordinal (digits)February", -2.803360380906535),
("month", -0.8574502318512216),
("integer (numeric)July", -1.550597412411167)],
n = 13}}),
("this <part-of-day>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.4339872044851463,
likelihoods =
HashMap.fromList
[("hour", -0.6931471805599453),
("part of days", -0.6931471805599453)],
n = 14},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("Kaanum Pongal",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Yom Kippur",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<integer> minutes to|till|before <hour-of-day>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("integer (numeric)minute (grain)time-of-day (latent)",
-0.6931471805599453),
("minutehour", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("Rabindra Jayanti",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("powers of tens",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Pargat Diwas",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Friday",
Classifier{okData =
ClassData{prior = -0.3184537311185346, unseen = -2.890371757896165,
likelihoods = HashMap.fromList [("", 0.0)], n = 16},
koData =
ClassData{prior = -1.2992829841302609,
unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6}}),
("in|during the <part-of-day>",
Classifier{okData =
ClassData{prior = -6.899287148695143e-2,
unseen = -3.4657359027997265,
likelihoods =
HashMap.fromList
[("early morning", -2.740840023925201),
("hour", -0.7259370033829361),
("part of days", -0.7949298748698876)],
n = 14},
koData =
ClassData{prior = -2.70805020110221, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("hour", -0.916290731874155),
("part of days", -0.916290731874155)],
n = 1}}),
("St Patrick's Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("tomorrow",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.1780538303479458,
likelihoods = HashMap.fromList [("", 0.0)], n = 22},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("hh(:mm) - <time-of-day> am|pm",
Classifier{okData =
ClassData{prior = -0.10536051565782628,
unseen = -3.1354942159291497,
likelihoods =
HashMap.fromList
[("time-of-day (latent)", -1.0116009116784799),
("hh:mm", -1.9924301646902063), ("hour", -1.0116009116784799),
("minute", -1.9924301646902063)],
n = 9},
koData =
ClassData{prior = -2.3025850929940455,
unseen = -1.9459101490553135,
likelihoods =
HashMap.fromList
[("time-of-day (latent)", -1.0986122886681098),
("hour", -1.0986122886681098)],
n = 1}}),
("this|last|next <cycle>",
Classifier{okData =
ClassData{prior = -0.42050298547270487,
unseen = -5.241747015059643,
likelihoods =
HashMap.fromList
[("week", -1.2474579162656747),
("month (grain)", -2.3460702049337847),
("year (grain)", -2.528391761727739),
("week (grain)", -1.2474579162656747),
("quarter", -3.6270040503958487), ("year", -2.528391761727739),
("month", -2.3460702049337847),
("quarter (grain)", -3.6270040503958487)],
n = 88},
koData =
ClassData{prior = -1.0691984034618165, unseen = -4.653960350157523,
likelihoods =
HashMap.fromList
[("week", -1.8111775550851565),
("month (grain)", -2.4471663218051534),
("year (grain)", -3.0349529867072724),
("second", -3.0349529867072724),
("week (grain)", -1.8111775550851565),
("day", -2.2464956263430023), ("quarter", -3.0349529867072724),
("year", -3.0349529867072724),
("second (grain) ", -3.0349529867072724),
("month", -2.4471663218051534),
("quarter (grain)", -3.0349529867072724),
("day (grain)", -2.2464956263430023)],
n = 46}}),
("Simchat Torah",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("New Year's Eve",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("the <ordinal> <cycle> of <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("ordinal (digits)quarter (grain)year (latent)",
-0.6931471805599453),
("quarteryear", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("Raksha Bandhan",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Ashura",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Ratha-Yatra",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Palm Sunday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Eid al-Adha",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.890371757896165,
likelihoods = HashMap.fromList [("", 0.0)], n = 16},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("by <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.70805020110221,
likelihoods =
HashMap.fromList
[("noon|midnight|EOD|end of day", -1.9459101490553135),
("time-of-day (latent)", -1.9459101490553135),
("<time-of-day> am|pm", -1.9459101490553135),
("hh:mm", -1.9459101490553135), ("hour", -1.540445040947149),
("minute", -1.540445040947149)],
n = 4},
koData =
ClassData{prior = -infinity, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [], n = 0}}),
("the <day-of-month> (ordinal)",
Classifier{okData =
ClassData{prior = -0.4818380868927383,
unseen = -3.1780538303479458,
likelihoods =
HashMap.fromList
[("ordinals (first..twentieth,thirtieth,...)",
-1.5260563034950494),
("ordinal (digits)", -0.24512245803298496)],
n = 21},
koData =
ClassData{prior = -0.9614111671546247, unseen = -2.772588722239781,
likelihoods =
HashMap.fromList
[("ordinals (first..twentieth,thirtieth,...)",
-0.6286086594223742),
("ordinal (digits)", -0.7621400520468967)],
n = 13}}),
("last weekend of <named-month>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.639057329615259,
likelihoods =
HashMap.fromList
[("October", -0.9555114450274363), ("July", -1.8718021769015913),
("month", -0.7731898882334817)],
n = 5},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("the <day-of-month> (number)",
Classifier{okData =
ClassData{prior = -0.8649974374866046,
unseen = -2.3025850929940455,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 8},
koData =
ClassData{prior = -0.5465437063680699,
unseen = -2.5649493574615367,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 11}}),
("Lag BaOmer",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("fractional number",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -2.772588722239781,
likelihoods = HashMap.fromList [("", 0.0)], n = 14}}),
("Guru Ravidass Jayanti",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Sunday",
Classifier{okData =
ClassData{prior = -0.40546510810816444,
unseen = -2.4849066497880004,
likelihoods = HashMap.fromList [("", 0.0)], n = 10},
koData =
ClassData{prior = -1.0986122886681098,
unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5}}),
("Chinese New Year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("February",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.70805020110221,
likelihoods = HashMap.fromList [("", 0.0)], n = 13},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("minute (grain)",
Classifier{okData =
ClassData{prior = -7.696104113612832e-2,
unseen = -3.295836866004329,
likelihoods = HashMap.fromList [("", 0.0)], n = 25},
koData =
ClassData{prior = -2.6026896854443837,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("last|this|next <season>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("the <ordinal> quarter",
Classifier{okData =
ClassData{prior = -0.6931471805599453,
unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("ordinal (digits)quarter (grain)", -0.6931471805599453),
("quarter", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -0.6931471805599453,
unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("ordinal (digits)quarter (grain)", -0.6931471805599453),
("quarter", -0.6931471805599453)],
n = 1}}),
("Orthodox Good Friday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("time-of-day (latent)",
Classifier{okData =
ClassData{prior = -0.6970764586998348, unseen = -4.867534450455582,
likelihoods =
HashMap.fromList
[("integer (numeric)", -9.763846956391606e-2),
("integer (0..19)", -2.374905754573672)],
n = 127},
koData =
ClassData{prior = -0.689233281238809, unseen = -4.875197323201151,
likelihoods =
HashMap.fromList
[("integer (numeric)", -0.15800424914324832),
("integer (0..19)", -1.923095471289142)],
n = 128}}),
("beginning of year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("last <day-of-week> of <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.772588722239781,
likelihoods =
HashMap.fromList
[("daymonth", -0.916290731874155),
("SundayMarch", -2.0149030205422647),
("MondayMarch", -2.0149030205422647),
("FridayOctober", -1.6094379124341003),
("Sundayintersect", -2.0149030205422647)],
n = 5},
koData =
ClassData{prior = -infinity, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [], n = 0}}),
("<integer> <unit-of-duration>",
Classifier{okData =
ClassData{prior = -0.6164133863285559, unseen = -5.308267697401205,
likelihoods =
HashMap.fromList
[("week", -2.6642475784438173),
("integer (0..19)year (grain)", -3.22386336637924),
("integer (numeric)day (grain)", -2.6642475784438173),
("integer (0..19)second (grain) ", -3.917010546939185),
("integer (0..19)hour (grain)", -3.1060803307228566),
("second", -3.5115454388310208),
("integer (numeric)second (grain) ", -4.204692619390966),
("integer (numeric)year (grain)", -3.3573947590037623),
("day", -2.412933150162911), ("year", -2.6642475784438173),
("integer (numeric)week (grain)", -3.22386336637924),
("integer (0..19)month (grain)", -3.6938669956249752),
("integer (20..90)minute (grain)", -4.61015772749913),
("hour", -2.738355550597539), ("month", -3.3573947590037623),
("integer (numeric)minute (grain)", -2.5952547069568657),
("integer (0..19)minute (grain)", -3.22386336637924),
("integer (numeric)month (grain)", -4.204692619390966),
("minute", -2.167810692129926),
("integer (numeric)hour (grain)", -3.6938669956249752),
("integer (0..19)day (grain)", -3.6938669956249752),
("integer (0..19)week (grain)", -3.3573947590037623)],
n = 88},
koData =
ClassData{prior = -0.7762620872704519, unseen = -5.170483995038151,
likelihoods =
HashMap.fromList
[("week", -3.5553480614894135),
("integer (0..19)year (grain)", -4.0661736852554045),
("integer (numeric)day (grain)", -4.0661736852554045),
("integer (numeric)quarter (grain)", -4.0661736852554045),
("integer (numeric)year (grain)", -4.0661736852554045),
("day", -3.5553480614894135), ("quarter", -3.5553480614894135),
("year", -3.5553480614894135),
("integer (numeric)week (grain)", -4.0661736852554045),
("integer (0..19)month (grain)", -4.0661736852554045),
("hour", -1.157452788691043), ("month", -3.5553480614894135),
("integer (numeric)minute (grain)", -4.471638793363569),
("integer (numeric)month (grain)", -4.0661736852554045),
("minute", -4.471638793363569),
("integer (numeric)hour (grain)", -1.157452788691043),
("integer (0..19)day (grain)", -4.0661736852554045),
("integer (0..19)week (grain)", -4.0661736852554045),
("integer (0..19)quarter (grain)", -4.0661736852554045)],
n = 75}}),
("from the <day-of-month> (ordinal or number) to the <day-of-month> (ordinal or number) <named-month> (interval)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.772588722239781,
likelihoods =
HashMap.fromList
[("ordinal (digits)ordinal (digits)July", -1.3217558399823195),
("integer (numeric)integer (numeric)July", -1.3217558399823195),
("month", -0.7621400520468967)],
n = 6},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("Guru Gobind Singh Jayanti",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("hhmm (latent)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Krishna Janmashtami",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<time-of-day> am|pm",
Classifier{okData =
ClassData{prior = -0.2584829177595186, unseen = -5.697093486505405,
likelihoods =
HashMap.fromList
[("from|since|after <time>", -3.7478219897473863),
("integer after|past <hour-of-day>", -4.307437777682809),
("at <time-of-day>", -2.3978952727983707),
("<time-of-day> o'clock", -4.59511985013459),
("half after|past <hour-of-day>", -4.59511985013459),
("second", -5.000584958242754),
("hh:mm:ss", -5.000584958242754),
("<hour-of-day> <integer>", -4.084294226368599),
("<integer> minutes to|till|before <hour-of-day>",
-5.000584958242754),
("time-of-day (latent)", -1.6863989535702288),
("hhmm (latent)", -5.000584958242754),
("hh:mm", -2.3978952727983707),
("quarter after|past <hour-of-day>", -3.4965075614664802),
("until <time>", -4.307437777682809),
("about|exactly <time-of-day>", -5.000584958242754),
("hour", -1.2393848425491918),
("<time-of-day> sharp|exactly", -5.000584958242754),
("minute", -1.7047480922384253)],
n = 139},
koData =
ClassData{prior = -1.4793847841859027, unseen = -4.624972813284271,
likelihoods =
HashMap.fromList
[("<integer> to|till|before <hour-of-day>", -3.228826155721369),
("from|since|after <time>", -3.228826155721369),
("at <time-of-day>", -3.5165082281731497),
("time-of-day (latent)", -1.1811333123561132),
("hh:mm", -3.5165082281731497),
("until <time>", -3.9219733362813143),
("hour", -1.0316015783851495), ("minute", -2.6692103677859462)],
n = 41}}),
("Yom Ha'atzmaut",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("on <day>",
Classifier{okData =
ClassData{prior = -0.2231435513142097, unseen = -4.04305126783455,
likelihoods =
HashMap.fromList
[("Thursday", -1.8281271133989299),
("absorption of , after named day", -2.639057329615259),
("intersect", -2.639057329615259),
("Saturday", -2.639057329615259),
("Friday", -2.9267394020670396), ("day", -0.8064758658669484),
("the <day-of-month> (ordinal)", -2.9267394020670396),
("intersect by \",\", \"of\", \"from\", \"'s\"",
-2.639057329615259)],
n = 24},
koData =
ClassData{prior = -1.6094379124341003, unseen = -3.044522437723423,
likelihoods =
HashMap.fromList
[("intersect", -1.6094379124341003),
("day", -1.0498221244986778),
("intersect by \",\", \"of\", \"from\", \"'s\"",
-1.6094379124341003)],
n = 6}}),
("Thiru Onam",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Thai Pongal",
Classifier{okData =
ClassData{prior = -0.5596157879354228, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -0.8472978603872037,
unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3}}),
("Vijayadashami",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("part of <named-month>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.1972245773362196,
likelihoods =
HashMap.fromList
[("March", -0.6931471805599453), ("month", -0.6931471805599453)],
n = 3},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("a <unit-of-duration>",
Classifier{okData =
ClassData{prior = -1.0296194171811581,
unseen = -3.5553480614894135,
likelihoods =
HashMap.fromList
[("week", -2.1400661634962708),
("year (grain)", -2.4277482359480516),
("second", -2.833213344056216),
("week (grain)", -2.1400661634962708),
("day", -2.1400661634962708),
("minute (grain)", -2.833213344056216),
("year", -2.4277482359480516),
("second (grain) ", -2.833213344056216),
("minute", -2.833213344056216),
("day (grain)", -2.1400661634962708)],
n = 10},
koData =
ClassData{prior = -0.4418327522790392,
unseen = -3.9318256327243257,
likelihoods =
HashMap.fromList
[("hour (grain)", -2.3025850929940455),
("quarter", -1.3470736479666092),
("minute (grain)", -2.8134107167600364),
("hour", -2.3025850929940455),
("quarter (grain)", -1.3470736479666092),
("minute", -2.8134107167600364)],
n = 18}}),
("at the beginning|end of <year>",
Classifier{okData =
ClassData{prior = -0.13353139262452263,
unseen = -2.890371757896165,
likelihoods =
HashMap.fromList
[("year (latent)", -1.2237754316221157),
("this|last|next <cycle>", -1.4469189829363254),
("year", -0.7537718023763802)],
n = 7},
koData =
ClassData{prior = -2.0794415416798357, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("this|last|next <cycle>", -0.916290731874155),
("year", -0.916290731874155)],
n = 1}}),
("Dhanteras",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Tu BiShvat",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Whit Monday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("hh:mm",
Classifier{okData =
ClassData{prior = -0.12260232209233239,
unseen = -4.2626798770413155,
likelihoods = HashMap.fromList [("", 0.0)], n = 69},
koData =
ClassData{prior = -2.159484249353372, unseen = -2.3978952727983707,
likelihoods = HashMap.fromList [("", 0.0)], n = 9}}),
("Holi",
Classifier{okData =
ClassData{prior = -0.2876820724517809,
unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -1.3862943611198906,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("upcoming <integer> <cycle>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -4.02535169073515,
likelihoods =
HashMap.fromList
[("week", -2.3978952727983707),
("integer (0..19)year (grain)", -2.908720896564361),
("integer (numeric)day (grain)", -2.908720896564361),
("integer (numeric)quarter (grain)", -2.908720896564361),
("integer (numeric)year (grain)", -2.908720896564361),
("day", -2.3978952727983707), ("quarter", -2.3978952727983707),
("year", -2.3978952727983707),
("integer (numeric)week (grain)", -2.908720896564361),
("integer (0..19)month (grain)", -2.908720896564361),
("month", -2.3978952727983707),
("integer (numeric)month (grain)", -2.908720896564361),
("integer (0..19)day (grain)", -2.908720896564361),
("integer (0..19)week (grain)", -2.908720896564361),
("integer (0..19)quarter (grain)", -2.908720896564361)],
n = 20},
koData =
ClassData{prior = -infinity, unseen = -2.772588722239781,
likelihoods = HashMap.fromList [], n = 0}}),
("Rosh Hashanah",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<integer> <named-day> ago|back",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods =
HashMap.fromList
[("integer (numeric)Thursday", -0.6931471805599453),
("day", -0.6931471805599453)],
n = 2},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("<integer> upcoming <cycle>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -4.02535169073515,
likelihoods =
HashMap.fromList
[("week", -2.3978952727983707),
("integer (0..19)year (grain)", -2.908720896564361),
("integer (numeric)day (grain)", -2.908720896564361),
("integer (numeric)quarter (grain)", -2.908720896564361),
("integer (numeric)year (grain)", -2.908720896564361),
("day", -2.3978952727983707), ("quarter", -2.3978952727983707),
("year", -2.3978952727983707),
("integer (numeric)week (grain)", -2.908720896564361),
("integer (0..19)month (grain)", -2.908720896564361),
("month", -2.3978952727983707),
("integer (numeric)month (grain)", -2.908720896564361),
("integer (0..19)day (grain)", -2.908720896564361),
("integer (0..19)week (grain)", -2.908720896564361),
("integer (0..19)quarter (grain)", -2.908720896564361)],
n = 20},
koData =
ClassData{prior = -infinity, unseen = -2.772588722239781,
likelihoods = HashMap.fromList [], n = 0}}),
("Holika Dahan",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("quarter of an hour",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("second (grain) ",
Classifier{okData =
ClassData{prior = -0.5108256237659907,
unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -0.916290731874155, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4}}),
("ordinals (first..twentieth,thirtieth,...)",
Classifier{okData =
ClassData{prior = -3.077165866675366e-2,
unseen = -3.5263605246161616,
likelihoods = HashMap.fromList [("", 0.0)], n = 32},
koData =
ClassData{prior = -3.4965075614664802,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("Mawlid",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Jumu'atul-Wida",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<duration> after|before|from|past <time>",
Classifier{okData =
ClassData{prior = -0.7691330875378672, unseen = -4.189654742026425,
likelihoods =
HashMap.fromList
[("a <unit-of-duration>now", -3.481240089335692),
("<integer> <unit-of-duration>hhhmm", -3.481240089335692),
("a <unit-of-duration>Christmas", -3.481240089335692),
("minuteminute", -3.481240089335692),
("dayday", -3.481240089335692),
("<integer> <unit-of-duration>today", -3.481240089335692),
("<integer> <unit-of-duration>time-of-day (latent)",
-2.382627800667582),
("minutehour", -1.7764919970972666),
("daysecond", -3.481240089335692),
("a <unit-of-duration>right now", -3.481240089335692),
("minutenograin", -3.481240089335692),
("<integer> <unit-of-duration>Christmas", -3.481240089335692),
("<integer> <unit-of-duration>Easter Sunday",
-3.481240089335692),
("secondnograin", -3.481240089335692),
("<integer> <unit-of-duration><time-of-day> am|pm",
-2.5649493574615367),
("yearday", -2.7880929087757464),
("<integer> <unit-of-duration>noon|midnight|EOD|end of day",
-3.481240089335692),
("daynograin", -3.481240089335692),
("<integer> <unit-of-duration>now", -3.0757749812275272)],
n = 19},
koData =
ClassData{prior = -0.6225296133459919, unseen = -4.276666119016055,
likelihoods =
HashMap.fromList
[("quarterhour", -1.318240897874875),
("dayhour", -3.56953269648137),
("<integer> <unit-of-duration>intersect", -3.56953269648137),
("<integer> <unit-of-duration><day-of-month> (ordinal)",
-3.56953269648137),
("a <unit-of-duration><time-of-day> am|pm", -2.065455299705096),
("a <unit-of-duration>time-of-day (latent)",
-2.065455299705096),
("dayday", -2.8763855159214247),
("<integer> <unit-of-duration>time-of-day (latent)",
-3.56953269648137),
("a <unit-of-duration>noon|midnight|EOD|end of day",
-3.164067588373206),
("<integer> <unit-of-duration><day-of-month> (ordinal or number) <named-month>",
-3.56953269648137)],
n = 22}}),
("nth <day-of-week> of <month-or-greater>",
Classifier{okData =
ClassData{prior = -0.2876820724517809, unseen = -3.258096538021482,
likelihoods =
HashMap.fromList
[("daymonth", -0.916290731874155),
("ordinals (first..twentieth,thirtieth,...)Tuesdayintersect",
-2.5257286443082556),
("ordinals (first..twentieth,thirtieth,...)Wednesdayintersect",
-2.120263536200091),
("ordinals (first..twentieth,thirtieth,...)Mondaythis|last|next <cycle>",
-1.6094379124341003),
("ordinals (first..twentieth,thirtieth,...)TuesdayOctober",
-2.120263536200091)],
n = 9},
koData =
ClassData{prior = -1.3862943611198906, unseen = -2.639057329615259,
likelihoods =
HashMap.fromList
[("daymonth", -1.1786549963416462),
("ordinals (first..twentieth,thirtieth,...)TuesdaySeptember",
-1.8718021769015913),
("ordinals (first..twentieth,thirtieth,...)WednesdayOctober",
-1.466337068793427)],
n = 3}}),
("quarter after|past <hour-of-day>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.1780538303479458,
likelihoods =
HashMap.fromList
[("noon|midnight|EOD|end of day", -2.03688192726104),
("time-of-day (latent)", -0.9382696385929302),
("hour", -0.7375989431307791)],
n = 10},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("yyyy-mm",
Classifier{okData =
ClassData{prior = -1.0986122886681098,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -0.40546510810816444,
unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4}}),
("until <time>",
Classifier{okData =
ClassData{prior = -1.252762968495368, unseen = -3.295836866004329,
likelihoods =
HashMap.fromList
[("year (latent)", -2.159484249353372),
("time-of-day (latent)", -2.5649493574615367),
("year", -2.159484249353372),
("<time-of-day> am|pm", -1.8718021769015913),
("hh:mm", -2.159484249353372), ("hour", -2.159484249353372),
("minute", -1.6486586255873816)],
n = 8},
koData =
ClassData{prior = -0.3364722366212129,
unseen = -3.9318256327243257,
likelihoods =
HashMap.fromList
[("intersect", -2.3025850929940455),
("yesterday", -2.8134107167600364),
("day", -2.8134107167600364),
("time-of-day (latent)", -1.6094379124341003),
("<time-of-day> am|pm", -3.2188758248682006),
("hh:mm", -2.3025850929940455), ("hour", -1.4271163556401458),
("minute", -1.8325814637483102)],
n = 20}}),
("the <cycle> after|before <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods =
HashMap.fromList
[("day (grain)tomorrow", -1.252762968495368),
("dayday", -0.8472978603872037),
("day (grain)yesterday", -1.252762968495368)],
n = 2},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("about|exactly <time-of-day>",
Classifier{okData =
ClassData{prior = -0.15415067982725836,
unseen = -3.6109179126442243,
likelihoods =
HashMap.fromList
[("week", -1.791759469228055),
("hh(:mm) - <time-of-day> am|pm", -2.890371757896165),
("this|last|next <cycle>", -1.791759469228055),
("day", -2.4849066497880004),
("time-of-day (latent)", -2.890371757896165),
("hhmm (latent)", -2.4849066497880004),
("<time-of-day> am|pm", -2.890371757896165),
("hour", -2.1972245773362196),
("next <time>", -2.890371757896165),
("this|next <day-of-week>", -2.890371757896165),
("minute", -2.4849066497880004)],
n = 12},
koData =
ClassData{prior = -1.9459101490553135, unseen = -2.833213344056216,
likelihoods =
HashMap.fromList
[("dd/mm", -2.0794415416798357), ("day", -2.0794415416798357),
("time-of-day (latent)", -2.0794415416798357),
("hour", -2.0794415416798357)],
n = 2}}),
("Sukkot",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<day> in <duration>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.4849066497880004,
likelihoods =
HashMap.fromList
[("monthyear", -1.2992829841302609),
("Marcha <unit-of-duration>", -1.7047480922384253),
("March<integer> <unit-of-duration>", -1.7047480922384253),
("Vijayadashami<integer> <unit-of-duration>",
-1.7047480922384253),
("dayyear", -1.7047480922384253)],
n = 3},
koData =
ClassData{prior = -infinity, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [], n = 0}}),
("World Vegan Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("intersect by \",\", \"of\", \"from\", \"'s\"",
Classifier{okData =
ClassData{prior = -0.5596157879354228, unseen = -5.407171771460119,
likelihoods =
HashMap.fromList
[("intersectthis|last|next <cycle>", -4.709530201312334),
("Wednesday<named-month> <day-of-month> (non ordinal)",
-4.709530201312334),
("dayhour", -4.304065093204169),
("daymonth", -2.001480000210124),
("<named-month> <day-of-month> (non ordinal)Friday",
-4.709530201312334),
("Friday<named-month> <day-of-month> (non ordinal)",
-3.4567672328169663),
("Wednesdayintersect", -4.709530201312334),
("from <datetime> - <datetime> (interval)July",
-3.3232358401924436),
("Black Fridaythis|last|next <cycle>", -4.709530201312334),
("<part-of-day> of <time>February", -4.709530201312334),
("Saturday<time-of-day> am|pm", -4.709530201312334),
("Martin Luther King's Daythis|last|next <cycle>",
-4.304065093204169),
("hourmonth", -4.709530201312334),
("Fridayintersect by \",\", \"of\", \"from\" for year",
-4.0163830207523885),
("dayday", -2.458238402705839),
("the <day-of-month> (ordinal)February", -4.304065093204169),
("WednesdayOctober", -4.709530201312334),
("Wednesdaythis|last|next <cycle>", -4.304065093204169),
("intersect<named-month> <day-of-month> (non ordinal)",
-3.4567672328169663),
("dayyear", -4.0163830207523885),
("Saturday<named-month> <day-of-month> (non ordinal)",
-4.709530201312334),
("Thursdayhh:mm", -4.304065093204169),
("TuesdayOctober", -4.709530201312334),
("the <day-of-month> (ordinal)March", -3.6109179126442243),
("Mondaythis|last|next <cycle>", -3.7932394694381792),
("Fridayintersect", -4.0163830207523885),
("Thursday<datetime> - <datetime> (interval)",
-4.304065093204169),
("intersectOctober", -4.304065093204169),
("Thursday<time-of-day> - <time-of-day> (interval)",
-3.7932394694381792),
("Tuesdaythis|last|next <cycle>", -4.304065093204169),
("Sunday<named-month> <day-of-month> (non ordinal)",
-4.709530201312334),
("dayminute", -2.917770732084279),
("minuteday", -3.4567672328169663),
("this|last|next <cycle>Sunday", -4.709530201312334),
("Sundaythis|last|next <cycle>", -4.709530201312334),
("on <day><time-of-day> am|pm", -4.709530201312334),
("intersectintersect", -4.709530201312334),
("weekday", -4.709530201312334),
("dayweek", -3.6109179126442243),
("Monday<named-month> <day-of-month> (non ordinal)",
-4.304065093204169),
("<datetime> - <datetime> (interval)July", -3.7932394694381792),
("on <day><named-month> <day-of-month> (non ordinal)",
-4.0163830207523885)],
n = 76},
koData =
ClassData{prior = -0.8472978603872037, unseen = -5.220355825078324,
likelihoods =
HashMap.fromList
[("week-endJuly", -4.5217885770490405),
("week-endOctober", -3.828641396489095),
("daymonth", -1.7809485531238394),
("TuesdaySeptember", -4.5217885770490405),
("Wednesdayintersect", -4.5217885770490405),
("from <datetime> - <datetime> (interval)July",
-3.4231762883809305),
("from <time-of-day> - <time-of-day> (interval)July",
-4.5217885770490405),
("hournograin", -4.116323468940876),
("from|since|after <time>July", -4.116323468940876),
("hourmonth", -2.575878427993727),
("Fridaythis|last|next <cycle>", -4.5217885770490405),
("SundayFebruary", -4.5217885770490405),
("on <day>September", -3.828641396489095),
("WednesdayOctober", -4.5217885770490405),
("intersectnow", -4.116323468940876),
("week-endintersect", -4.5217885770490405),
("dayyear", -4.5217885770490405),
("FridayJuly", -3.6054978451748854),
("FridaySeptember", -4.116323468940876),
("the <day-of-month> (ordinal)July", -3.6054978451748854),
("WednesdayFebruary", -4.5217885770490405),
("minutemonth", -3.0177111802727663),
("Mondaythis|last|next <cycle>", -4.5217885770490405),
("SundayMarch", -4.5217885770490405),
("MondayFebruary", -4.116323468940876),
("Fridayintersect", -4.5217885770490405),
("intersectOctober", -4.5217885770490405),
("dayminute", -4.5217885770490405),
("SaturdaySeptember", -4.5217885770490405),
("intersectSeptember", -3.1354942159291497),
("Tuesdaynow", -4.116323468940876),
("MondayMarch", -4.5217885770490405),
("FridayOctober", -4.5217885770490405),
("daynograin", -4.116323468940876),
("<integer> to|till|before <hour-of-day>July",
-4.5217885770490405),
("Tuesdayintersect", -4.5217885770490405),
("<time-of-day> - <time-of-day> (interval)July",
-4.5217885770490405),
("<datetime> - <datetime> (interval)July", -4.5217885770490405),
("Sundayintersect", -4.5217885770490405)],
n = 57}}),
("last <time>",
Classifier{okData =
ClassData{prior = -0.7537718023763802, unseen = -3.295836866004329,
likelihoods =
HashMap.fromList
[("Martin Luther King's Day", -2.5649493574615367),
("day", -1.1786549963416462), ("Sunday", -2.5649493574615367),
("Chinese New Year", -1.8718021769015913),
("Easter Sunday", -2.5649493574615367),
("hour", -2.5649493574615367), ("Tuesday", -2.5649493574615367),
("week-end", -2.5649493574615367)],
n = 8},
koData =
ClassData{prior = -0.6359887667199967, unseen = -3.367295829986474,
likelihoods =
HashMap.fromList
[("Monday", -2.639057329615259), ("Friday", -2.2335922215070942),
("day", -1.7227665977411035), ("Sunday", -2.639057329615259),
("hour", -1.540445040947149), ("week-end", -1.540445040947149)],
n = 9}}),
("March",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.295836866004329,
likelihoods = HashMap.fromList [("", 0.0)], n = 25},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<named-month>|<named-day> <day-of-month> (ordinal)",
Classifier{okData =
ClassData{prior = -0.1823215567939546,
unseen = -3.4339872044851463,
likelihoods =
HashMap.fromList
[("Octoberordinal (digits)", -2.0149030205422647),
("Thursdayordinal (digits)", -2.70805020110221),
("day", -2.3025850929940455),
("Augustordinal (digits)", -2.70805020110221),
("Marchordinals (first..twentieth,thirtieth,...)",
-2.3025850929940455),
("Tuesdayordinal (digits)", -2.70805020110221),
("Octoberordinals (first..twentieth,thirtieth,...)",
-2.70805020110221),
("month", -1.2039728043259361),
("Marchordinal (digits)", -2.70805020110221)],
n = 10},
koData =
ClassData{prior = -1.791759469228055, unseen = -2.70805020110221,
likelihoods =
HashMap.fromList
[("from|since|after <time>ordinal (digits)",
-1.9459101490553135),
("Augustordinal (digits)", -1.9459101490553135),
("month", -1.540445040947149)],
n = 2}}),
("Clean Monday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<day-of-month> (ordinal)",
Classifier{okData =
ClassData{prior = -0.6325225587435105, unseen = -2.995732273553991,
likelihoods =
HashMap.fromList
[("ordinals (first..twentieth,thirtieth,...)",
-1.3350010667323402),
("ordinal (digits)", -0.3053816495511819)],
n = 17},
koData =
ClassData{prior = -0.7576857016975165, unseen = -2.890371757896165,
likelihoods =
HashMap.fromList
[("ordinals (first..twentieth,thirtieth,...)",
-0.5306282510621704),
("ordinal (digits)", -0.8873031950009028)],
n = 15}}),
("<time> (timezone)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("<time-of-day> am|pm", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("Easter Sunday",
Classifier{okData =
ClassData{prior = -0.2876820724517809,
unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -1.3862943611198906,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("Christmas",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.70805020110221,
likelihoods = HashMap.fromList [("", 0.0)], n = 13},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<datetime> - <datetime> (interval) timezone",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3025850929940455,
likelihoods =
HashMap.fromList
[("minuteminute", -0.8109302162163288),
("hh:mmhh:mm", -1.5040773967762742),
("hhhmmhhhmm", -1.0986122886681098)],
n = 3},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("Isra and Mi'raj",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<day-of-month> (ordinal or number) of <month>",
Classifier{okData =
ClassData{prior = -0.5679840376059393, unseen = -3.784189633918261,
likelihoods =
HashMap.fromList
[("ordinal (digits)July", -3.068052935133617),
("ordinals (first..twentieth,thirtieth,...)March",
-1.9694406464655074),
("ordinal (digits)this|last|next <cycle>", -2.374905754573672),
("integer (numeric)this|last|next <cycle>", -2.662587827025453),
("ordinal (digits)February", -3.068052935133617),
("integer (numeric)February", -2.662587827025453),
("month", -0.8708283577973976),
("ordinal (digits)March", -2.662587827025453),
("integer (numeric)July", -3.068052935133617)],
n = 17},
koData =
ClassData{prior = -0.8362480242006186, unseen = -3.58351893845611,
likelihoods =
HashMap.fromList
[("ordinal (digits)July", -1.6094379124341003),
("ordinal (digits)February", -2.8622008809294686),
("month", -0.916290731874155),
("integer (numeric)July", -1.6094379124341003)],
n = 13}}),
("decimal number",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("<day-of-month>(ordinal or number)/<named-month>/year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("ordinal (digits)April", -0.6931471805599453),
("month", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("Naraka Chaturdashi",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("yyyyqq",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("beginning of month",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("next <time>",
Classifier{okData =
ClassData{prior = -0.1823215567939546,
unseen = -3.4339872044851463,
likelihoods =
HashMap.fromList
[("Martin Luther King's Day", -2.0149030205422647),
("Halloween", -2.70805020110221),
("Boss's Day", -2.70805020110221),
("Monday", -2.3025850929940455), ("day", -1.0986122886681098),
("March", -2.70805020110221), ("month", -2.70805020110221),
("Tuesday", -2.3025850929940455)],
n = 10},
koData =
ClassData{prior = -1.791759469228055, unseen = -2.70805020110221,
likelihoods =
HashMap.fromList
[("Wednesday", -1.9459101490553135),
("Saturday", -1.9459101490553135), ("day", -1.540445040947149)],
n = 2}}),
("<time-of-day> sharp|exactly",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.890371757896165,
likelihoods =
HashMap.fromList
[("at <time-of-day>", -2.1400661634962708),
("time-of-day (latent)", -2.1400661634962708),
("hhmm (latent)", -2.1400661634962708),
("<time-of-day> am|pm", -2.1400661634962708),
("hh:mm", -2.1400661634962708), ("hour", -1.7346010553881064),
("minute", -1.4469189829363254)],
n = 5},
koData =
ClassData{prior = -infinity, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [], n = 0}}),
("the <ordinal> closest <day> to <time>",
Classifier{okData =
ClassData{prior = -0.2876820724517809,
unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("ordinals (first..twentieth,thirtieth,...)Monday<named-month>|<named-day> <day-of-month> (ordinal)",
-1.791759469228055),
("ordinals (first..twentieth,thirtieth,...)Christmastoday",
-1.791759469228055),
("dayday", -1.0986122886681098),
("ordinal (digits)Christmastoday", -1.791759469228055)],
n = 3},
koData =
ClassData{prior = -1.3862943611198906,
unseen = -2.1972245773362196,
likelihoods =
HashMap.fromList
[("daymonth", -1.3862943611198906),
("ordinals (first..twentieth,thirtieth,...)MondayOctober",
-1.3862943611198906)],
n = 1}}),
("Islamic New Year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Lent",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("negative numbers",
Classifier{okData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -3.713572066704308,
likelihoods =
HashMap.fromList
[("integer (numeric)", -5.129329438755058e-2),
("integer (0..19)", -2.995732273553991)],
n = 38}}),
("about|exactly <duration>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("half a <time-grain>", -0.6931471805599453),
("minute", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("Purim",
Classifier{okData =
ClassData{prior = -0.6931471805599453,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -0.6931471805599453,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("<time> before last|after next",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.639057329615259,
likelihoods =
HashMap.fromList
[("Wednesday", -1.8718021769015913),
("Friday", -1.466337068793427), ("day", -1.1786549963416462),
("March", -1.8718021769015913), ("month", -1.8718021769015913)],
n = 4},
koData =
ClassData{prior = -infinity, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [], n = 0}}),
("by the end of <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3025850929940455,
likelihoods =
HashMap.fromList
[("this|last|next <cycle>", -0.8109302162163288),
("year", -1.5040773967762742), ("month", -1.0986122886681098)],
n = 3},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("half an hour (abbrev).",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("hhmm (military) am|pm",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<datetime> - <datetime> (interval)",
Classifier{okData =
ClassData{prior = -1.1631508098056809, unseen = -4.770684624465665,
likelihoods =
HashMap.fromList
[("intersecthh:mm", -3.6635616461296463),
("from|since|after <time>hh:mm", -3.152736022363656),
("minuteminute", -1.5841201044498106),
("<time> timezone<time> timezone", -3.3758795736778655),
("the <day-of-month> (ordinal)the <day-of-month> (ordinal)",
-3.6635616461296463),
("hh:mmhh:mm", -2.5649493574615367),
("dayday", -2.5649493574615367),
("hhhmmhhhmm", -3.152736022363656),
("hourhour", -3.6635616461296463),
("<named-month> <day-of-month> (non ordinal)<named-month> <day-of-month> (non ordinal)",
-3.6635616461296463),
("intersect by \",\", \"of\", \"from\", \"'s\"hh:mm",
-3.6635616461296463),
("<time-of-day> am|pmhh:mm:ss", -4.069026754237811),
("hoursecond", -3.6635616461296463),
("<time-of-day> am|pm<time-of-day> am|pm", -3.3758795736778655),
("from|since|after <time>the <day-of-month> (ordinal)",
-3.152736022363656)],
n = 35},
koData =
ClassData{prior = -0.3746934494414107, unseen = -5.308267697401205,
likelihoods =
HashMap.fromList
[("hhhmm<time> timezone", -4.204692619390966),
("<time> timezonehhhmm", -4.204692619390966),
("hourday", -3.00071981506503),
("<named-month> <day-of-month> (non ordinal)July",
-4.61015772749913),
("dayhour", -4.61015772749913),
("daymonth", -3.6938669956249752),
("from|since|after <time><day-of-month> (ordinal or number) of <month>",
-3.6938669956249752),
("<time-of-day> am|pmintersect", -4.61015772749913),
("MondayOctober", -4.204692619390966),
("from|since|after <time>hh:mm", -4.61015772749913),
("hh:mm<time-of-day> am|pm", -3.5115454388310208),
("hhhmmintersect", -4.61015772749913),
("minuteminute", -2.167810692129926),
("from|since|after <time><time-of-day> am|pm",
-3.917010546939185),
("hh:mmhh:mm", -3.917010546939185),
("dayday", -1.8067973465925955),
("from|since|after <time><day-of-month> (ordinal or number) <named-month>",
-3.6938669956249752),
("hhhmmhhhmm", -4.204692619390966),
("hourhour", -3.6938669956249752),
("hourminute", -4.61015772749913),
("minutehour", -3.6938669956249752),
("<time> timezonehh:mm", -4.61015772749913),
("hh:mm<time> timezone", -4.61015772749913),
("the <day-of-month> (ordinal)intersect by \",\", \"of\", \"from\", \"'s\"",
-4.61015772749913),
("from|since|after <time><day-of-month> (ordinal or number) of <named-month>",
-3.6938669956249752),
("hh:mmintersect", -3.6938669956249752),
("<named-month> <day-of-month> (non ordinal)August",
-4.61015772749913),
("Christmastoday", -3.917010546939185),
("about|exactly <time-of-day><time-of-day> am|pm",
-4.61015772749913),
("from|since|after <time>intersect", -3.1060803307228566),
("from|since|after <time>intersect by \",\", \"of\", \"from\", \"'s\"",
-3.6938669956249752),
("Monday<named-month>|<named-day> <day-of-month> (ordinal)",
-4.204692619390966),
("<time-of-day> am|pmhh:mm", -4.61015772749913),
("from|since|after <time>the <day-of-month> (ordinal or number) of <named-month>",
-3.1060803307228566),
("the <day-of-month> (ordinal)<day-of-month> (ordinal or number) <named-month>",
-4.61015772749913),
("from|since|after <time>the <day-of-month> (ordinal)",
-4.61015772749913),
("the <day-of-month> (ordinal)the <day-of-month> (ordinal or number) of <named-month>",
-4.204692619390966),
("Monday<named-month> <day-of-month> (non ordinal)",
-4.61015772749913),
("the <day-of-month> (ordinal)intersect", -4.204692619390966)],
n = 77}}),
("Tuesday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.772588722239781,
likelihoods = HashMap.fromList [("", 0.0)], n = 14},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("New Year's Day",
Classifier{okData =
ClassData{prior = -1.8718021769015913,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -0.1670540846631662,
unseen = -2.5649493574615367,
likelihoods = HashMap.fromList [("", 0.0)], n = 11}}),
("fortnight",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<integer> and an half hour",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Laylat al-Qadr",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Boghi",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("at the beginning|end of <named-month>",
Classifier{okData =
ClassData{prior = -0.2231435513142097,
unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("January", -1.3862943611198906),
("April", -1.3862943611198906), ("month", -0.8754687373538999)],
n = 4},
koData =
ClassData{prior = -1.6094379124341003,
unseen = -1.9459101490553135,
likelihoods =
HashMap.fromList
[("October", -1.0986122886681098),
("month", -1.0986122886681098)],
n = 1}}),
("<time-of-day> - <time-of-day> (interval)",
Classifier{okData =
ClassData{prior = -1.116469906068116, unseen = -4.672828834461907,
likelihoods =
HashMap.fromList
[("from|since|after <time>hh:mm", -3.054001181677967),
("hh:mmtime-of-day (latent)", -3.2771447329921766),
("minuteminute", -1.667706820558076),
("<time> timezone<time> timezone", -3.2771447329921766),
("from|since|after <time><time-of-day> am|pm",
-3.970291913552122),
("hh:mmhh:mm", -2.466214516775848),
("<time-of-day> am|pmtime-of-day (latent)",
-3.2771447329921766),
("hhhmmhhhmm", -3.054001181677967),
("hourhour", -2.466214516775848),
("minutehour", -2.466214516775848),
("<time-of-day> am|pmhh:mm:ss", -3.970291913552122),
("hhhmmtime-of-day (latent)", -3.2771447329921766),
("hoursecond", -3.5648268054439574),
("from|since|after <time>time-of-day (latent)",
-3.2771447329921766),
("<time-of-day> am|pm<time-of-day> am|pm", -3.054001181677967)],
n = 37},
koData =
ClassData{prior = -0.3966544784260094, unseen = -5.220355825078324,
likelihoods =
HashMap.fromList
[("hhhmm<time> timezone", -4.116323468940876),
("<time> timezonehhhmm", -4.116323468940876),
("about|exactly <time-of-day>time-of-day (latent)",
-4.5217885770490405),
("until <time>time-of-day (latent)", -3.2690256085536724),
("from|since|after <time>hh:mm", -4.5217885770490405),
("hh:mmtime-of-day (latent)", -2.5068855565067754),
("hh:mm<time-of-day> am|pm", -3.2690256085536724),
("minuteminute", -2.4423470353692043),
("from|since|after <time><time-of-day> am|pm",
-3.6054978451748854),
("hh:mmhh:mm", -3.828641396489095),
("<time-of-day> am|pmtime-of-day (latent)", -4.116323468940876),
("hhhmmhhhmm", -4.116323468940876),
("hourhour", -2.03688192726104),
("from|since|after <time><integer> to|till|before <hour-of-day>",
-3.828641396489095),
("hourminute", -3.4231762883809305),
("minutehour", -1.6885752329928243),
("<time> timezonehh:mm", -4.5217885770490405),
("hh:mm<time> timezone", -4.5217885770490405),
("hhhmmtime-of-day (latent)", -3.828641396489095),
("until <time><time-of-day> am|pm", -4.116323468940876),
("about|exactly <time-of-day><time-of-day> am|pm",
-4.5217885770490405),
("<time-of-day> am|pmhh:mm", -4.5217885770490405),
("<part-of-day> at <time-of-day>time-of-day (latent)",
-3.828641396489095),
("from|since|after <time>time-of-day (latent)",
-3.0177111802727663),
("at <time-of-day>time-of-day (latent)", -3.828641396489095),
("<time> timezonetime-of-day (latent)", -3.828641396489095),
("<integer> to|till|before <hour-of-day><time-of-day> am|pm",
-4.5217885770490405),
("<integer> to|till|before <hour-of-day>time-of-day (latent)",
-3.828641396489095)],
n = 76}}),
("winter",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("nth <time> after <time>",
Classifier{okData =
ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("dayday", -0.916290731874155),
("ordinals (first..twentieth,thirtieth,...)Tuesdayintersect",
-0.916290731874155)],
n = 1},
koData =
ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("dayday", -0.916290731874155),
("ordinals (first..twentieth,thirtieth,...)TuesdayChristmas",
-0.916290731874155)],
n = 1}}),
("Ugadi",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<named-month> <day-of-month> (non ordinal)",
Classifier{okData =
ClassData{prior = -0.3646431135879093, unseen = -4.0943445622221,
likelihoods =
HashMap.fromList
[("Augustinteger (numeric)", -2.691243082785829),
("Marchinteger (numeric)", -2.9789251552376097),
("Aprilinteger (numeric)", -3.3843902633457743),
("month", -0.8194409058842375),
("Februaryinteger (numeric)", -2.1316272948504063),
("Septemberinteger (numeric)", -2.691243082785829),
("Octoberinteger (numeric)", -2.691243082785829),
("Julyinteger (numeric)", -1.9980959022258835)],
n = 25},
koData =
ClassData{prior = -1.1856236656577395,
unseen = -3.4657359027997265,
likelihoods =
HashMap.fromList
[("Augustinteger (numeric)", -2.3353749158170367),
("Marchinteger (numeric)", -2.740840023925201),
("Aprilinteger (numeric)", -2.740840023925201),
("month", -0.9490805546971459),
("from|since|after <time>integer (numeric)",
-2.3353749158170367),
("Julyinteger (numeric)", -1.6422277352570913)],
n = 11}}),
("Diwali",
Classifier{okData =
ClassData{prior = -0.2876820724517809,
unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -1.3862943611198906,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("last night",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("this|next <day-of-week>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.1354942159291497,
likelihoods =
HashMap.fromList
[("Thursday", -2.3978952727983707),
("Wednesday", -2.3978952727983707),
("Saturday", -2.3978952727983707),
("Monday", -1.7047480922384253), ("day", -0.8938178760220964),
("Tuesday", -1.9924301646902063)],
n = 8},
koData =
ClassData{prior = -infinity, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [], n = 0}}),
("ordinal (digits)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -4.143134726391533,
likelihoods = HashMap.fromList [("", 0.0)], n = 61},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("quarter (grain)",
Classifier{okData =
ClassData{prior = -0.4700036292457356,
unseen = -3.0910424533583156,
likelihoods = HashMap.fromList [("", 0.0)], n = 20},
koData =
ClassData{prior = -0.9808292530117262, unseen = -2.639057329615259,
likelihoods = HashMap.fromList [("", 0.0)], n = 12}}),
("last <cycle> of <time>",
Classifier{okData =
ClassData{prior = -0.916290731874155, unseen = -3.1354942159291497,
likelihoods =
HashMap.fromList
[("day (grain)October", -1.9924301646902063),
("daymonth", -1.4816045409242156),
("day (grain)intersect", -1.9924301646902063),
("weekmonth", -1.9924301646902063),
("week (grain)intersect", -2.3978952727983707),
("week (grain)September", -2.3978952727983707)],
n = 6},
koData =
ClassData{prior = -0.5108256237659907, unseen = -3.367295829986474,
likelihoods =
HashMap.fromList
[("daymonth", -2.2335922215070942),
("day (grain)May", -2.2335922215070942),
("week (grain)year (latent)", -1.9459101490553135),
("weekmonth", -1.7227665977411035),
("week (grain)October", -2.2335922215070942),
("weekyear", -1.9459101490553135),
("week (grain)intersect", -2.2335922215070942)],
n = 9}}),
("Chhath",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Vasant Panchami",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<day-of-month>(ordinal) <named-month> year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("ordinal (digits)April", -0.6931471805599453),
("month", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("the <ordinal> last <cycle> of <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.9444389791664407,
likelihoods =
HashMap.fromList
[("ordinals (first..twentieth,thirtieth,...)week (grain)year (latent)",
-2.1972245773362196),
("daymonth", -2.1972245773362196),
("ordinal (digits)day (grain)May", -2.1972245773362196),
("ordinals (first..twentieth,thirtieth,...)week (grain)intersect",
-2.1972245773362196),
("weekmonth", -1.791759469228055),
("ordinal (digits)week (grain)year (latent)",
-2.1972245773362196),
("weekyear", -1.791759469228055),
("ordinals (first..twentieth,thirtieth,...)week (grain)October",
-2.1972245773362196)],
n = 5},
koData =
ClassData{prior = -infinity, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [], n = 0}}),
("Black Friday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("week-end",
Classifier{okData =
ClassData{prior = -1.252762968495368, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -0.3364722366212129,
unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5}}),
("Great Lent",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Maundy Thursday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("day (grain)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.6888794541139363,
likelihoods = HashMap.fromList [("", 0.0)], n = 38},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Vaisakhi",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("right now",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("for <duration> from <time>",
Classifier{okData =
ClassData{prior = -0.3364722366212129, unseen = -2.995732273553991,
likelihoods =
HashMap.fromList
[("<integer> <unit-of-duration>intersect", -2.2512917986064953),
("<integer> <unit-of-duration><day-of-month> (ordinal)",
-2.2512917986064953),
("<integer> + '\"from|since|after <time>", -2.2512917986064953),
("dayday", -1.55814461804655),
("minutehour", -1.845826690498331),
("<integer> + '\"<time-of-day> am|pm", -2.2512917986064953),
("<integer> <unit-of-duration><day-of-month> (ordinal or number) <named-month>",
-2.2512917986064953)],
n = 5},
koData =
ClassData{prior = -1.252762968495368, unseen = -2.639057329615259,
likelihoods =
HashMap.fromList
[("dayhour", -1.8718021769015913),
("<integer> + '\"from|since|after <time>", -1.8718021769015913),
("<integer> <unit-of-duration>time-of-day (latent)",
-1.8718021769015913),
("minutehour", -1.8718021769015913)],
n = 2}}),
("compose by multiplication",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList [("integer (0..19)powers of tens", 0.0)],
n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("end of year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.4849066497880004,
likelihoods = HashMap.fromList [("", 0.0)], n = 10},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("between <time> and <time>",
Classifier{okData =
ClassData{prior = -0.6931471805599453,
unseen = -2.9444389791664407,
likelihoods =
HashMap.fromList
[("minuteminute", -1.791759469228055),
("hh:mmhh:mm", -2.1972245773362196),
("<time-of-day> am|pmtime-of-day (latent)",
-2.1972245773362196),
("hhhmmhhhmm", -2.1972245773362196),
("minutehour", -1.791759469228055),
("<time-of-day> am|pm<time-of-day> am|pm",
-2.1972245773362196)],
n = 4},
koData =
ClassData{prior = -0.6931471805599453,
unseen = -2.9444389791664407,
likelihoods =
HashMap.fromList
[("hh:mmtime-of-day (latent)", -2.1972245773362196),
("hhhmmintersect", -2.1972245773362196),
("minuteminute", -1.791759469228055),
("minutehour", -1.791759469228055),
("hh:mmintersect", -2.1972245773362196),
("hhhmmtime-of-day (latent)", -2.1972245773362196)],
n = 4}}),
("<month> dd-dd (interval)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.4657359027997265,
likelihoods =
HashMap.fromList
[("from|since|after <time>integer (numeric)integer (numeric)",
-2.740840023925201),
("from|since|after <time>ordinal (digits)ordinal (digits)",
-2.740840023925201),
("Julyinteger (numeric)integer (numeric)", -1.6422277352570913),
("Augustordinal (digits)integer (numeric)",
-2.3353749158170367),
("from|since|after <time>ordinal (digits)integer (numeric)",
-2.740840023925201),
("month", -0.8690378470236094),
("Augustordinal (digits)ordinal (digits)",
-2.3353749158170367)],
n = 12},
koData =
ClassData{prior = -infinity, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [], n = 0}}),
("intersect by \",\", \"of\", \"from\" for year",
Classifier{okData =
ClassData{prior = -7.410797215372185e-2,
unseen = -3.5263605246161616,
likelihoods =
HashMap.fromList
[("Black Fridaythis|last|next <cycle>", -2.803360380906535),
("Martin Luther King's Daythis|last|next <cycle>",
-2.3978952727983707),
("intersect by \",\", \"of\", \"from\", \"'s\"year (latent)",
-2.1102132003465894),
("dayyear", -0.8574502318512216),
("intersectyear (latent)", -2.1102132003465894),
("<named-month> <day-of-month> (non ordinal)year (latent)",
-1.8870696490323797)],
n = 13},
koData =
ClassData{prior = -2.639057329615259, unseen = -2.3025850929940455,
likelihoods =
HashMap.fromList
[("Fridaythis|last|next <cycle>", -1.5040773967762742),
("dayyear", -1.5040773967762742)],
n = 1}}),
("part of days",
Classifier{okData =
ClassData{prior = -4.1672696400568074e-2,
unseen = -3.891820298110627,
likelihoods = HashMap.fromList [("", 0.0)], n = 47},
koData =
ClassData{prior = -3.1986731175506815,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("at the beginning|end of <week>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -4.330733340286331,
likelihoods =
HashMap.fromList
[("week", -0.706570200892086),
("this|last|next <cycle>", -0.8209805520698302),
("about|exactly <time-of-day>", -2.70805020110221)],
n = 36},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("Eid al-Fitr",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("summer",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Trinity Sunday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<datetime>/<datetime> (interval)",
Classifier{okData =
ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("secondsecond", -0.916290731874155),
("intersectintersect", -0.916290731874155)],
n = 1},
koData =
ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("hh:mm:ssintersect", -0.916290731874155),
("secondsecond", -0.916290731874155)],
n = 1}}),
("Mid-day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("dd-dd <month> (interval)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.1354942159291497,
likelihoods =
HashMap.fromList
[("integer (numeric)integer (numeric)September",
-1.9924301646902063),
("ordinal (digits)ordinal (digits)July", -1.9924301646902063),
("ordinal (digits)ordinal (digits)October",
-2.3978952727983707),
("integer (numeric)integer (numeric)July", -1.9924301646902063),
("month", -0.8938178760220964),
("ordinal (digits)ordinal (digits)August",
-2.3978952727983707)],
n = 8},
koData =
ClassData{prior = -infinity, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [], n = 0}}),
("Pentecost",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("this <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.258096538021482,
likelihoods =
HashMap.fromList
[("Thursday", -2.5257286443082556),
("Martin Luther King's Day", -2.5257286443082556),
("Monday", -2.5257286443082556), ("day", -1.1394342831883648),
("Christmas", -2.5257286443082556),
("hour", -2.5257286443082556), ("winter", -2.5257286443082556),
("week-end", -2.5257286443082556),
("summer", -2.120263536200091)],
n = 8},
koData =
ClassData{prior = -infinity, unseen = -2.3025850929940455,
likelihoods = HashMap.fromList [], n = 0}}),
("Shushan Purim",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("August",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}})] | facebookincubator/duckling | Duckling/Ranking/Classifiers/EN_TT.hs | bsd-3-clause | 264,531 | 0 | 15 | 128,796 | 43,570 | 27,184 | 16,386 | 4,117 | 1 |
module Dxedrine.Hlists where
import Control.Monad (forM_, replicateM_)
import Dxedrine.Words
import Data.Binary
import Data.Binary.Get
import Data.Binary.Put
import qualified Data.ByteString.Lazy as BL
import Data.Maybe (fromMaybe)
import Data.Word (Word8(..), Word16(..))
data Range =
IgnoreR Int
| OneR Word8 Word8
| TwoR Range Range
| EnumR [Word8]
| MultiR Range Range
deriving (Show, Eq)
data Value =
IgnoreV Int
| OneV Word7
| TwoV Word14
deriving (Show, Eq)
data Entry = Entry
{ _entryName :: String
, _entryRange :: Range
, _entryDefault :: Value
} deriving (Show, Eq)
newtype Hlist = Hlist
{ unHlist :: [(String, Value)]
} deriving (Show, Eq)
validate :: Range -> Value -> Either String ()
validate r v =
case (r, v) of
(OneR s e, OneV w@(Word7 x)) ->
if (x >= s && x <= e)
then return ()
else Left $ show x ++ " outside range [" ++ show s ++ ", " ++ show e ++ "]"
(TwoR e1 e2, TwoV w@(Word14 (x, y))) -> do
_ <- validate e1 (OneV x)
_ <- validate e2 (OneV y)
return ()
(EnumR vals, OneV w@(Word7 x)) ->
if (x `elem` vals)
then return ()
else Left $ show x ++ " not an element of " ++ show vals
(MultiR e1 e2, OneV w@(Word7 x)) -> do
case validate e1 v of
Right _ -> return ()
Left r1 ->
case validate e2 v of
Right _ -> return ()
Left r2 -> Left $ "both " ++ r1 ++ " and " ++ r2
(IgnoreR i, IgnoreV j) ->
if i == j
then return ()
else Left $ "Unmatched ignore lengths: expected " ++ show i ++ " but was " ++ show j
_ -> Left "wrong byte length"
validateHlist :: [Entry] -> Hlist -> Either String ()
validateHlist es (Hlist hs) = go es
where
go [] = return ()
go (e:es) =
let r = _entryRange e
in case r of
IgnoreR _ -> go es
_ -> let n = _entryName e
in case lookup n hs of
Nothing -> Left $ "field \"" ++ n ++ "\" missing"
Just v -> do
case validate (_entryRange e) v of
Left reason -> Left $ "field \"" ++ n ++ "\" invalid: " ++ reason
_ -> go es
addDefaults :: [Entry] -> Hlist -> Hlist
addDefaults es (Hlist hs) = Hlist $ go es hs
where
go [] hs = hs
go (e:es) hs =
case (_entryRange e) of
IgnoreR i -> go es hs
_ -> let n = _entryName e
in (n, fromMaybe (_entryDefault e) (lookup n hs)):(go es hs)
defaultHlist :: [Entry] -> Hlist
defaultHlist es = addDefaults es (Hlist [])
getValue :: Entry -> Get Value
getValue e =
let r = _entryRange e
in case r of
IgnoreR i -> do
replicateM_ i getWord8
return (IgnoreV i)
TwoR _ _ -> do
w <- getWord14
let v = TwoV w
case validate r v of
Right _ -> return v
Left reason -> fail reason
_ -> do
w <- getWord7
let v = OneV w
case validate r v of
Right _ -> return v
Left reason -> fail reason
putValue :: Value -> Put
putValue v =
case v of
IgnoreV i -> replicateM_ i $ putWord8 0x00
OneV v -> putWord7 v
TwoV v -> putWord14 v
getHlist :: [Entry] -> Get Hlist
getHlist es = Hlist . reverse <$> go [] es
where
go hs [] = return hs
go hs (e:es) = do
h <- getValue e
let n = _entryName e
go ((n, h):hs) es
putHlist :: Hlist -> Put
putHlist (Hlist hs) = forM_ hs (\(_, h) -> putValue h)
packValue :: Entry -> Value -> Either String [Word7]
packValue e v =
let r = _entryRange e
in case validate r v of
Left reason -> Left reason
_ -> Right $ Word7 <$> (BL.unpack $ runPut $ putValue v)
packHlist :: [Entry] -> Hlist -> Either String [Word7]
packHlist entries hlist = do
_ <- validateHlist entries hlist
return $ Word7 <$> (BL.unpack $ runPut $ putHlist hlist)
unpackHlist :: [Entry] -> [Word7] -> Either String (Hlist, [Word7])
unpackHlist es ws =
unpack $ runGetOrFail (getHlist es) (BL.pack $ unWord7 <$> ws)
where
unpack (Left (_, _, e)) = Left e
unpack (Right (left, _, h)) = Right (nonIgnored h, Word7 <$> BL.unpack left)
nonIgnored (Hlist hs) = Hlist $ filter (\(_, h) -> shouldKeep h) hs
shouldKeep (IgnoreV _) = False
shouldKeep _ = True
reserved :: Int -> Entry
reserved i = Entry "reserved" (IgnoreR i) (IgnoreV i)
entry :: Range -> Value -> String -> Entry
entry range value name = Entry name range value
oneV :: Word8 -> Value
oneV = OneV . word7FromIntegral
twoV :: Word16 -> Value
twoV = TwoV . word14FromIntegral
| ejconlon/dxedrine | src/Dxedrine/Hlists.hs | bsd-3-clause | 4,539 | 0 | 25 | 1,366 | 1,983 | 997 | 986 | 140 | 11 |
module Tct.Trs.Data.Precedence
( Order (..)
, Precedence (..)
, precedence
, empty
, insert
, eclasses
, recursionDepth
, ranks
) where
import qualified Control.Monad.State.Strict as St
import Data.List (find)
import qualified Data.Map as M
import Data.Maybe (fromMaybe)
import qualified Data.Set as S
import qualified Tct.Core.Common.Pretty as PP
import Tct.Trs.Data.Signature (Signature, Symbols, symbols)
data Order b = b :>: b | b :~: b
deriving (Show, Eq, Ord)
newtype Precedence f = Precedence (Signature f, [Order f]) deriving Show
instance PP.Pretty f => PP.Pretty (Precedence f) where
pretty (Precedence (_, [])) = PP.text "empty"
pretty (Precedence (_,l)) = PP.hsep $ PP.punctuate (PP.text ",") [pp e | e <- l] where
pp (f :>: g) = PP.pretty f PP.<+> PP.text ">" PP.<+> PP.pretty g
pp (f :~: g) = PP.pretty f PP.<+> PP.text "~" PP.<+> PP.pretty g
precedence :: Signature f -> [Order f] -> Precedence f
precedence = curry Precedence
empty :: Signature f -> Precedence f
empty sig = precedence sig []
insert :: Order f -> Precedence f -> Precedence f
insert e (Precedence (sig, l)) = Precedence (sig, e : l)
eclasses :: Ord f => Precedence f -> [Symbols f]
eclasses (Precedence (_, l)) = foldr ins [] l
where
ins (g :~: h) [] = [S.fromList [g,h]]
ins eq@(g :~: h) (ec:ecs)
| g `S.member` ec = h `S.insert` ec : ecs
| h `S.member` ec = g `S.insert` ec : ecs
| otherwise = ec : ins eq ecs
ins _ ecs = ecs
recursionDepth :: Ord f => Symbols f -> Precedence f -> M.Map f Int
recursionDepth recursives prec@(Precedence (sig, l)) = St.execState (mapM_ recdepthM syms) M.empty
where
ecss = eclasses prec
eclassOf f = S.singleton f `fromMaybe` find (\ cs -> f `S.member` cs) ecss
syms = S.toList $ symbols sig
below f = S.toList $ S.unions [ eclassOf h | f' :>: h <- l , f == f' ]
recdepthM f = do
m <- St.get
case M.lookup f m of
Just rd -> return rd
Nothing -> do
rds <- mapM recdepthM (below f)
let rd | f `S.member` recursives = 1 + maximum (0:rds)
| otherwise = maximum (0:rds)
St.modify (M.insert f rd)
return rd
-- | ranks of function symbols in precedence, starting at '1'
ranks :: Ord f => Precedence f -> M.Map f Int
ranks prec@(Precedence(sig,_)) = recursionDepth (symbols sig) prec
| ComputationWithBoundedResources/tct-trs | src/Tct/Trs/Data/Precedence.hs | bsd-3-clause | 2,563 | 0 | 21 | 766 | 1,081 | 565 | 516 | 56 | 3 |
{-# LANGUAGE GeneralizedNewtypeDeriving, DeriveDataTypeable, DeriveFunctor, OverloadedStrings, PatternGuards #-}
-- | Types used to generate the input.
module Input.Item(
Sig(..), Ctx(..), Ty(..),
Item(..), itemName,
Target(..), TargetId(..),
splitIPackage, splitIModule,
hseToSig, hseToItem
) where
import Numeric
import Control.Applicative
import Data.Tuple.Extra
import Language.Haskell.Exts
import Data.List.Extra
import Data.Maybe
import Data.Ix
import Foreign.Storable
import Data.Word
import Control.DeepSeq
import Data.Data
import General.Util
import General.IString
import Prelude
---------------------------------------------------------------------
-- TYPES
-- FIXME: Delete the Read instances
data Sig n = Sig [Ctx n] [Ty n] deriving (Show,Eq,Ord,Typeable,Data,Functor,Read) -- list of -> types
data Ctx n = Ctx n n deriving (Show,Eq,Ord,Typeable,Data,Functor,Read) -- context, second will usually be a free variable
data Ty n = TCon n [Ty n] | TVar n [Ty n] deriving (Show,Eq,Ord,Typeable,Data,Functor,Read) -- type application, vectorised, all symbols may occur at multiple kinds
instance NFData n => NFData (Sig n) where rnf (Sig x y) = rnf x `seq` rnf y
instance NFData n => NFData (Ctx n) where rnf (Ctx x y) = rnf x `seq` rnf y
instance NFData n => NFData (Ty n) where
rnf (TCon x y) = rnf x `seq` rnf y
rnf (TVar x y) = rnf x `seq` rnf y
---------------------------------------------------------------------
-- ITEMS
data Item
= IPackage String
| IModule String
| IName String -- class or newtype
| ISignature String (Sig IString)
| IAlias String [IString] (Sig IString)
| IInstance (Sig IString)
deriving (Show,Eq,Ord,Typeable,Data)
instance NFData Item where
rnf (IPackage x) = rnf x
rnf (IModule x) = rnf x
rnf (IName x) = rnf x
rnf (ISignature a b) = rnf (a,b)
rnf (IAlias a b c) = rnf (a,b,c)
rnf (IInstance a) = rnf a
itemName :: Item -> Maybe String
itemName (IPackage x) = Just x
itemName (IModule x) = Just x
itemName (IName x) = Just x
itemName (ISignature x _) = Just x
itemName (IAlias x _ _) = Just x
itemName (IInstance _) = Nothing
---------------------------------------------------------------------
-- DATABASE
newtype TargetId = TargetId Word32 deriving (Eq,Ord,Storable,NFData,Ix)
instance Show TargetId where
show (TargetId x) = showHex x ""
instance Read TargetId where
readsPrec _ = map (first TargetId) . readHex
data Target = Target
{targetURL :: URL -- URL where this thing is located
,targetPackage :: Maybe (String, URL) -- name and URL of the package it is in (Nothing if it is a package)
,targetModule :: Maybe (String, URL) -- name and URL of the module it is in (Nothing if it is a package or module)
,targetType :: String -- one of package, module or empty string
,targetItem :: String -- HTML span of the item, using <0> for the name and <1> onwards for arguments
,targetDocs :: String -- HTML documentation to show, a sequence of block level elements
} deriving (Show,Eq,Ord)
instance NFData Target where
rnf (Target a b c d e f) = rnf a `seq` rnf b `seq` rnf c `seq` rnf d `seq` rnf e `seq` rnf f
splitIPackage, splitIModule :: [(a, Item)] -> [(String, [(a, Item)])]
splitIPackage = splitUsing $ \x -> case snd x of IPackage x -> Just x; _ -> Nothing
splitIModule = splitUsing $ \x -> case snd x of IModule x -> Just x; _ -> Nothing
splitUsing :: (a -> Maybe String) -> [a] -> [(String, [a])]
splitUsing f = repeatedly $ \(x:xs) ->
let (a,b) = break (isJust . f) xs
in ((fromMaybe "" $ f x, x:a), b)
---------------------------------------------------------------------
-- HSE CONVERSION
hseToSig :: Type -> Sig String
hseToSig = tyForall
where
-- forall at the top is different
tyForall (TyParen x) = tyForall x
tyForall (TyForall _ c t) | Sig cs ts <- tyForall t = Sig (concatMap ctx c ++ cs) ts
tyForall x = Sig [] $ tyFun x
tyFun (TyParen x) = tyFun x
tyFun (TyFun a b) = ty a : tyFun b
tyFun x = [ty x]
ty (TyForall _ _ x) = TCon "\\/" [ty x]
ty x@TyFun{} = TCon "->" $ tyFun x
ty (TyTuple box ts) = TCon (fromQName $ Special $ TupleCon box $ length ts) (map ty ts)
ty (TyList x) = TCon "[]" [ty x]
ty (TyParArray x) = TCon "[::]" [ty x]
ty (TyApp x y) = case ty x of
TCon a b -> TCon a (b ++ [ty y])
TVar a b -> TVar a (b ++ [ty y])
ty (TyVar x) = TVar (fromName x) []
ty (TyCon x) = TCon (fromQName x) []
ty (TyInfix a b c) = ty $ TyCon b `TyApp` a `TyApp` c
ty (TyKind x _) = ty x
ty (TyBang _ x) = ty x
ty (TyParen x) = ty x
ty _ = TVar "_" []
ctx (ParenA x) = ctx x
ctx (InfixA a con b) = ctx $ ClassA con [a,b]
ctx (ClassA con (TyVar var:_)) = [Ctx (fromQName con) (fromName var)]
ctx _ = []
hseToItem :: Decl -> Maybe Item
hseToItem (TypeSig _ [name] ty) = Just $ ISignature (fromName name) (toIString <$> hseToSig ty)
hseToItem (TypeDecl _ name bind rhs) = Just $ IAlias (fromName name) (map (toIString . fromName . fromTyVarBind) bind) (toIString <$> hseToSig rhs)
hseToItem (InstDecl _ _ _ ctx name args _) = Just $ IInstance $ fmap toIString $ hseToSig $ TyForall Nothing ctx $ tyApps (TyCon name) args
hseToItem x | [x] <- declNames x = Just $ IName x
hseToItem x = Nothing
| BartAdv/hoogle | src/Input/Item.hs | bsd-3-clause | 5,430 | 0 | 14 | 1,277 | 2,200 | 1,144 | 1,056 | 106 | 21 |
{-# LANGUAGE
TemplateHaskell
, QuasiQuotes
#-}
module Language.Haskell.TH.HDBI
(
deriveToRow
, deriveFromRow
) where
-- import Control.Applicative
import Control.Monad
import Control.Applicative
import Database.HDBI.SqlValue (ToRow(..),
FromRow(..),
FromSql(..),
ToSql(..),
ConvertError(..))
import Language.Haskell.TH
-- | return constructor name and fields count, or Nothing if data constructor is
-- infix
getTParams :: String -> Name -> Q (Name, Maybe Int)
getTParams exc name = do
tcon <- reify name
case tcon of
(TyConI dec) -> do
case dec of
(DataD _ _ vars constrs _) -> do
checkVars vars
case constrs of
[con] -> getTParams' con
_ -> fl $ "data " ++ (show name) ++ " should have exactly one constructor"
(NewtypeD _ _ vars con _) -> do
checkVars vars
getTParams' con
_ -> fl $ "deriveToRow can derive just for data with one constructor or for newtypes"
_ -> fl $ (show name) ++ " must be a type"
where
fl x = fail $ exc ++ x
checkVars [] = return ()
checkVars _ = fl $ "type " ++ show name ++ " should not have type variables"
getTParams' :: Con -> Q (Name, Maybe Int)
getTParams' (NormalC n fields) = return (n, Just $ length fields)
getTParams' (RecC n fields) = return (n, Just $ length fields)
getTParams' (InfixC _ n _) = return (n, Nothing)
getTParams' _ = fl $ "data constructors should not contain typevar boundries for " ++ show name
-- | Derive `ToRow` instance for any data with one constructor or for newtype
deriveToRow :: Name -> Q [Dec]
deriveToRow name = do
(con, fields) <- getTParams "deriveToRow: " name
names <- case fields of
Just fl -> replicateM fl $ newName "val"
Nothing -> replicateM 2 $ newName "val"
return [InstanceD [] (AppT (ConT ''ToRow) (ConT name))
[FunD 'toRow
[Clause [mkPattern con fields names]
(NormalB $ ListE $ map (\nm -> AppE (VarE 'toSql) (VarE nm)) names) [] ]]]
where
mkPattern con Nothing [n1, n2] = InfixP (VarP n1) con (VarP n2)
mkPattern con (Just _) names = ConP con $ map VarP names
deriveFromRow :: Name -> Q [Dec]
deriveFromRow name = do
(con, fields) <- getTParams "deriveFromRow: " name
names <- case fields of
Just fl -> replicateM fl $ newName "val"
Nothing -> replicateM 2 $ newName "val"
xname <- newName "x"
return [InstanceD [] (AppT (ConT ''FromRow) (ConT name))
[FunD 'safeFromRow
[Clause [ListP $ map VarP names]
(NormalB $ UInfixE (mkCon fields con) (VarE '(<$>)) (foldedFromSql names)) []
,Clause [VarP xname]
(NormalB $ AppE (ConE 'Left) (AppE (ConE 'ConvertError)
(UInfixE
(LitE $ StringL $ "Could not construct " ++ show name
++ ": query must return exactly "
++ (show $ length names) ++ " values but not " )
(VarE '(++))
(AppE (VarE 'show) (AppE (VarE 'length) (VarE xname)))))) []]]]
where
foldedFromSql names = foldl1 (\a b -> UInfixE a (VarE '(<*>)) b)
$ map (\n -> AppE (VarE 'safeFromSql) (VarE n)) names
mkCon (Just _) con = ConE con
mkCon Nothing con = ParensE $ ConE con
| s9gf4ult/hdbi | Language/Haskell/TH/HDBI.hs | bsd-3-clause | 3,428 | 0 | 27 | 1,069 | 1,202 | 611 | 591 | -1 | -1 |
{-# LANGUAGE TypeFamilies #-}
{-| This is a dummy backend that doesn't offer any formalism to specify models or verify contracts.
It is only used to provide a backend for testing purporses (Or if you are too lazy to write components). -}
module Language.GTL.Backend.None where
import Language.GTL.Backend
import Data.Map as Map
-- | The none backend data type
data None = None
instance GTLBackend None where
data GTLBackendModel None = NoneData
backendName _ = "none"
initBackend _ _ args = return NoneData
backendGetAliases _ _ = Map.empty
typeCheckInterface _ _ x = return x
cInterface _ _ = CInterface
{ cIFaceIncludes = []
, cIFaceStateType = []
, cIFaceInputType = []
, cIFaceStateInit = const ""
, cIFaceIterate = \_ _ -> ""
, cIFaceGetOutputVar = \_ _ _ -> Just ""
, cIFaceGetInputVar = \_ _ _ -> Just ""
, cIFaceTranslateType = \_ -> ("","",False)
, cIFaceTranslateValue = \_ -> CValue ""
}
backendVerify _ _ _ _ _ _ _ _ _ = return Nothing
| hguenther/gtl | lib/Language/GTL/Backend/None.hs | bsd-3-clause | 1,009 | 0 | 9 | 229 | 255 | 143 | 112 | 22 | 0 |
{-|
Module : Idris.Core.Evaluate
Description : Evaluate Idris expressions.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE BangPatterns, DeriveGeneric, FlexibleInstances,
MultiParamTypeClasses, PatternGuards #-}
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
module Idris.Core.Evaluate(normalise, normaliseTrace, normaliseC,
normaliseAll, normaliseBlocking, toValue, quoteTerm,
rt_simplify, simplify, inlineSmall,
specialise, unfold, convEq, convEq',
Def(..), CaseInfo(..), CaseDefs(..),
Accessibility(..), Injectivity, Totality(..), TTDecl, PReason(..), MetaInformation(..),
Context, initContext, ctxtAlist, next_tvar,
addToCtxt, setAccess, setInjective, setTotal, setRigCount,
setMetaInformation, addCtxtDef, addTyDecl,
addDatatype, addCasedef, simplifyCasedef, addOperator,
lookupNames, lookupTyName, lookupTyNameExact, lookupTy, lookupTyExact,
lookupP, lookupP_all, lookupDef, lookupNameDef, lookupDefExact, lookupDefAcc, lookupDefAccExact, lookupVal,
mapDefCtxt, tcReducible,
lookupTotal, lookupTotalExact, lookupInjectiveExact,
lookupRigCount, lookupRigCountExact,
lookupNameTotal, lookupMetaInformation, lookupTyEnv, isTCDict,
isCanonical, isDConName, canBeDConName, isTConName, isConName, isFnName,
conGuarded,
Value(..), Quote(..), initEval, uniqueNameCtxt, uniqueBindersCtxt, definitions,
isUniverse, linearCheck, linearCheckArg) where
import Idris.Core.CaseTree
import Idris.Core.TT
import Control.Applicative hiding (Const)
import Control.Monad.State
import Data.Binary hiding (get, put)
import qualified Data.Binary as B
import Data.List
import Data.Maybe (listToMaybe)
import Debug.Trace
import GHC.Generics (Generic)
data EvalState = ES { limited :: [(Name, Int)],
nexthole :: Int,
blocking :: Bool }
deriving Show
type Eval a = State EvalState a
data EvalOpt = Spec
| Simplify Bool -- ^ whether to expand lets or not
| AtREPL
| RunTT
| Unfold
deriving (Show, Eq)
initEval = ES [] 0 False
-- VALUES (as HOAS) ---------------------------------------------------------
-- | A HOAS representation of values
data Value = VP NameType Name Value
| VV Int
-- True for Bool indicates safe to reduce
| VBind Bool Name (Binder Value) (Value -> Eval Value)
-- For frozen let bindings when simplifying
| VBLet Int Name Value Value Value
| VApp Value Value
| VType UExp
| VUType Universe
| VErased
| VImpossible
| VConstant Const
| VProj Value Int
-- | VLazy Env [Value] Term
| VTmp Int
canonical :: Value -> Bool
canonical (VP (DCon _ _ _) _ _) = True
canonical (VApp f a) = canonical f
canonical (VConstant _) = True
canonical (VType _) = True
canonical (VUType _) = True
canonical VErased = True
canonical _ = False
instance Show Value where
show x = show $ evalState (quote 100 x) initEval
instance Show (a -> b) where
show x = "<<fn>>"
-- THE EVALUATOR ------------------------------------------------------------
-- The environment is assumed to be "locally named" - i.e., not de Bruijn
-- indexed.
-- i.e. it's an intermediate environment that we have while type checking or
-- while building a proof.
-- | Normalise fully type checked terms (so, assume all names/let bindings resolved)
normaliseC :: Context -> Env -> TT Name -> TT Name
normaliseC ctxt env t
= evalState (do val <- eval False ctxt [] (map finalEntry env) t []
quote 0 val) initEval
-- | Normalise everything, whether abstract, private or public
normaliseAll :: Context -> Env -> TT Name -> TT Name
normaliseAll ctxt env t
= evalState (do val <- eval False ctxt [] (map finalEntry env) t [AtREPL]
quote 0 val) initEval
-- | As normaliseAll, but with an explicit list of names *not* to reduce
normaliseBlocking :: Context -> Env -> [Name] -> TT Name -> TT Name
normaliseBlocking ctxt env blocked t
= evalState (do val <- eval False ctxt (map (\n -> (n, 0)) blocked)
(map finalEntry env) t [AtREPL]
quote 0 val) initEval
normalise :: Context -> Env -> TT Name -> TT Name
normalise = normaliseTrace False
normaliseTrace :: Bool -> Context -> Env -> TT Name -> TT Name
normaliseTrace tr ctxt env t
= evalState (do val <- eval tr ctxt [] (map finalEntry env) (finalise t) []
quote 0 val) initEval
toValue :: Context -> Env -> TT Name -> Value
toValue ctxt env t
= evalState (eval False ctxt [] (map finalEntry env) t []) initEval
quoteTerm :: Value -> TT Name
quoteTerm val = evalState (quote 0 val) initEval
-- Return a specialised name, and an updated list of reductions available,
-- so that the caller can tell how much specialisation was achieved.
specialise :: Context -> Env -> [(Name, Int)] -> TT Name ->
(TT Name, [(Name, Int)])
specialise ctxt env limits t
= let (tm, st) =
runState (do val <- eval False ctxt []
(map finalEntry env) (finalise t)
[Spec]
quote 0 val) (initEval { limited = limits }) in
(tm, limited st)
-- | Like normalise, but we only reduce functions that are marked as okay to
-- inline, and lets
simplify :: Context -> Env -> TT Name -> TT Name
simplify ctxt env t
= evalState (do val <- eval False ctxt [(sUN "lazy", 0),
(sUN "force", 0),
(sUN "Force", 0),
(sUN "assert_smaller", 0),
(sUN "assert_total", 0),
(sUN "par", 0),
(sUN "prim__syntactic_eq", 0),
(sUN "fork", 0)]
(map finalEntry env) (finalise t)
[Simplify True]
quote 0 val) initEval
-- | Like simplify, but we only reduce functions that are marked as okay to
-- inline, and don't reduce lets
inlineSmall :: Context -> Env -> TT Name -> TT Name
inlineSmall ctxt env t
= evalState (do val <- eval False ctxt []
(map finalEntry env) (finalise t)
[Simplify False]
quote 0 val) initEval
-- | Simplify for run-time (i.e. basic inlining)
rt_simplify :: Context -> Env -> TT Name -> TT Name
rt_simplify ctxt env t
= evalState (do val <- eval False ctxt [(sUN "lazy", 0),
(sUN "force", 0),
(sUN "Force", 0),
(sUN "par", 0),
(sUN "prim__syntactic_eq", 0),
(sUN "prim_fork", 0)]
(map finalEntry env) (finalise t)
[RunTT]
quote 0 val) initEval
-- | Unfold the given names in a term, the given number of times in a stack.
-- Preserves 'let'.
-- This is primarily to support inlining of the given names, and can also
-- help with partial evaluation by allowing a rescursive definition to be
-- unfolded once only.
-- Specifically used to unfold definitions using interfaces before going to
-- the totality checker (otherwise mutually recursive definitions in
-- implementations will not work...)
unfold :: Context -> Env -> [(Name, Int)] -> TT Name -> TT Name
unfold ctxt env ns t
= evalState (do val <- eval False ctxt ns
(map finalEntry env) (finalise t)
[Unfold]
quote 0 val) initEval
-- unbindEnv env (quote 0 (eval ctxt (bindEnv env t)))
finalEntry :: (Name, RigCount, Binder (TT Name)) -> (Name, RigCount, Binder (TT Name))
finalEntry (n, r, b) = (n, r, fmap finalise b)
bindEnv :: EnvTT n -> TT n -> TT n
bindEnv [] tm = tm
bindEnv ((n, r, Let t v):bs) tm = Bind n (NLet t v) (bindEnv bs tm)
bindEnv ((n, r, b):bs) tm = Bind n b (bindEnv bs tm)
unbindEnv :: EnvTT n -> TT n -> TT n
unbindEnv [] tm = tm
unbindEnv (_:bs) (Bind n b sc) = unbindEnv bs sc
unbindEnv env tm = error "Impossible case occurred: couldn't unbind env."
usable :: Bool -- specialising
-> Bool -- unfolding only
-> Int -- Reduction depth limit (when simplifying/at REPL)
-> Name -> [(Name, Int)] -> Eval (Bool, [(Name, Int)])
-- usable _ _ ns@((MN 0 "STOP", _) : _) = return (False, ns)
usable False uf depthlimit n [] = return (True, [])
usable True uf depthlimit n ns
= do ES ls num b <- get
if b then return (False, ns)
else case lookup n ls of
Just 0 -> return (False, ns)
Just i -> return (True, ns)
_ -> return (False, ns)
usable False uf depthlimit n ns
= case lookup n ns of
Just 0 -> return (False, ns)
Just i -> return $ (True, (n, abs (i-1)) : filter (\ (n', _) -> n/=n') ns)
_ -> return $ if uf
then (False, ns)
else (True, (n, depthlimit) : filter (\ (n', _) -> n/=n') ns)
fnCount :: Int -> Name -> Eval ()
fnCount inc n
= do ES ls num b <- get
case lookup n ls of
Just i -> do put $ ES ((n, (i - inc)) :
filter (\ (n', _) -> n/=n') ls) num b
_ -> return ()
setBlock :: Bool -> Eval ()
setBlock b = do ES ls num _ <- get
put (ES ls num b)
deduct = fnCount 1
reinstate = fnCount (-1)
-- | Evaluate in a context of locally named things (i.e. not de Bruijn indexed,
-- such as we might have during construction of a proof)
-- The (Name, Int) pair in the arguments is the maximum depth of unfolding of
-- a name. The corresponding pair in the state is the maximum number of
-- unfoldings overall.
eval :: Bool -> Context -> [(Name, Int)] -> Env -> TT Name ->
[EvalOpt] -> Eval Value
eval traceon ctxt ntimes genv tm opts = ev ntimes [] True [] tm where
spec = Spec `elem` opts
simpl = Simplify True `elem` opts || Simplify False `elem` opts
simpl_inline = Simplify False `elem` opts
runtime = RunTT `elem` opts
atRepl = AtREPL `elem` opts
unfold = Unfold `elem` opts
noFree = all canonical . map snd
-- returns 'True' if the function should block
-- normal evaluation should return false
blockSimplify (CaseInfo inl always dict) n stk
| runtime
= if always then False
else not (inl || dict) || elem n stk
| simpl
= (not inl || elem n stk)
|| (n == sUN "prim__syntactic_eq")
| otherwise = False
getCases cd | simpl = cases_compiletime cd
| runtime = cases_runtime cd
| otherwise = cases_compiletime cd
ev ntimes stk top env (P _ n ty)
| Just (Let t v) <- lookupBinder n genv = ev ntimes stk top env v
ev ntimes_in stk top env (P Ref n ty)
= do let limit = if simpl then 100 else 10000
(u, ntimes) <- usable spec unfold limit n ntimes_in
let red = u && (tcReducible n ctxt || spec || (atRepl && noFree env)
|| runtime || unfold
|| sUN "assert_total" `elem` stk)
if red then
do let val = lookupDefAccExact n (spec || unfold || (atRepl && noFree env) || runtime) ctxt
case val of
Just (Function _ tm, Public) ->
ev ntimes (n:stk) True env tm
Just (TyDecl nt ty, _) -> do vty <- ev ntimes stk True env ty
return $ VP nt n vty
Just (CaseOp ci _ _ _ _ cd, acc)
| (acc == Public || acc == Hidden) &&
-- || sUN "assert_total" `elem` stk) &&
null (fst (cases_compiletime cd)) -> -- unoptimised version
let (ns, tree) = getCases cd in
if blockSimplify ci n stk
then liftM (VP Ref n) (ev ntimes stk top env ty)
else -- traceWhen runtime (show (n, ns, tree)) $
do c <- evCase ntimes n (n:stk) top env ns [] tree
case c of
(Nothing, _) -> liftM (VP Ref n) (ev ntimes stk top env ty)
(Just v, _) -> return v
_ -> liftM (VP Ref n) (ev ntimes stk top env ty)
else liftM (VP Ref n) (ev ntimes stk top env ty)
ev ntimes stk top env (P nt n ty)
= liftM (VP nt n) (ev ntimes stk top env ty)
ev ntimes stk top env (V i)
| i < length env && i >= 0 = return $ snd (env !! i)
| otherwise = return $ VV i
ev ntimes stk top env (Bind n (Let t v) sc)
| (not (runtime || simpl_inline || unfold)) || occurrences n sc < 2
= do v' <- ev ntimes stk top env v --(finalise v)
sc' <- ev ntimes stk top ((n, v') : env) sc
wknV (-1) sc'
| otherwise
= do t' <- ev ntimes stk top env t
v' <- ev ntimes stk top env v --(finalise v)
-- use Tmp as a placeholder, then make it a variable reference
-- again when evaluation finished
hs <- get
let vd = nexthole hs
put (hs { nexthole = vd + 1 })
sc' <- ev ntimes stk top ((n, VP Bound (sMN vd "vlet") VErased) : env) sc
return $ VBLet vd n t' v' sc'
ev ntimes stk top env (Bind n (NLet t v) sc)
= do t' <- ev ntimes stk top env (finalise t)
v' <- ev ntimes stk top env (finalise v)
sc' <- ev ntimes stk top ((n, v') : env) sc
return $ VBind True n (Let t' v') (\x -> return sc')
ev ntimes stk top env (Bind n b sc)
= do b' <- vbind env b
let n' = uniqueName n (map fstEnv genv ++ map fst env)
return $ VBind True -- (vinstances 0 sc < 2)
n' b' (\x -> ev ntimes stk False ((n', x):env) sc)
where vbind env t
= fmapMB (\tm -> ev ntimes stk top env (finalise tm)) t
-- block reduction immediately under codata (and not forced)
ev ntimes stk top env
(App _ (App _ (App _ d@(P _ (UN dly) _) l@(P _ (UN lco) _)) t) arg)
| dly == txt "Delay" && lco == txt "Infinite" && not (unfold || simpl)
= do let (f, _) = unApply arg
let ntimes' = case f of
P _ fn _ -> (fn, 0) : ntimes
_ -> ntimes
when spec $ setBlock True
d' <- ev ntimes' stk False env d
l' <- ev ntimes' stk False env l
t' <- ev ntimes' stk False env t
arg' <- ev ntimes' stk False env arg
when spec $ setBlock False
evApply ntimes' stk top env [l',t',arg'] d'
-- Treat "assert_total" specially, as long as it's defined!
ev ntimes stk top env (App _ (App _ (P _ n@(UN at) _) _) arg)
| Just (CaseOp _ _ _ _ _ _, _) <- lookupDefAccExact n (spec || (atRepl && noFree env)|| runtime) ctxt,
at == txt "assert_total" && not (simpl || unfold)
= ev ntimes (n : stk) top env arg
ev ntimes stk top env (App _ f a)
= do f' <- ev ntimes stk False env f
a' <- ev ntimes stk False env a
evApply ntimes stk top env [a'] f'
ev ntimes stk top env (Proj t i)
= do -- evaluate dictionaries if it means the projection works
t' <- ev ntimes stk top env t
-- tfull' <- reapply ntimes stk top env t' []
return (doProj t' (getValArgs t'))
where doProj t' (VP (DCon _ _ _) _ _, args)
| i >= 0 && i < length args = args!!i
doProj t' _ = VProj t' i
ev ntimes stk top env (Constant c) = return $ VConstant c
ev ntimes stk top env Erased = return VErased
ev ntimes stk top env Impossible = return VImpossible
ev ntimes stk top env (Inferred tm) = ev ntimes stk top env tm
ev ntimes stk top env (TType i) = return $ VType i
ev ntimes stk top env (UType u) = return $ VUType u
evApply ntimes stk top env args (VApp f a)
= evApply ntimes stk top env (a:args) f
evApply ntimes stk top env args f
= apply ntimes stk top env f args
reapply ntimes stk top env f@(VP Ref n ty) args
= let val = lookupDefAccExact n (spec || (atRepl && noFree env) || runtime) ctxt in
case val of
Just (CaseOp ci _ _ _ _ cd, acc) ->
let (ns, tree) = getCases cd in
do c <- evCase ntimes n (n:stk) top env ns args tree
case c of
(Nothing, _) -> return $ unload env (VP Ref n ty) args
(Just v, rest) -> evApply ntimes stk top env rest v
_ -> case args of
(a : as) -> return $ unload env f (a : as)
[] -> return f
reapply ntimes stk top env (VApp f a) args
= reapply ntimes stk top env f (a : args)
reapply ntimes stk top env v args = return v
apply ntimes stk top env (VBind True n (Lam _ t) sc) (a:as)
= do a' <- sc a
app <- apply ntimes stk top env a' as
wknV 1 app
apply ntimes_in stk top env f@(VP Ref n ty) args
= do let limit = if simpl then 100 else 10000
(u, ntimes) <- usable spec unfold limit n ntimes_in
let red = u && (tcReducible n ctxt || spec || (atRepl && noFree env)
|| unfold || runtime
|| sUN "assert_total" `elem` stk)
if red then
do let val = lookupDefAccExact n (spec || unfold || (atRepl && noFree env) || runtime) ctxt
case val of
Just (CaseOp ci _ _ _ _ cd, acc)
| acc == Public || acc == Hidden ->
-- unoptimised version
let (ns, tree) = getCases cd in
if blockSimplify ci n stk
then return $ unload env (VP Ref n ty) args
else -- traceWhen runtime (show (n, ns, tree)) $
do c <- evCase ntimes n (n:stk) top env ns args tree
case c of
(Nothing, _) -> return $ unload env (VP Ref n ty) args
(Just v, rest) -> evApply ntimes stk top env rest v
Just (Operator _ i op, _) ->
if (i <= length args)
then case op (take i args) of
Nothing -> return $ unload env (VP Ref n ty) args
Just v -> evApply ntimes stk top env (drop i args) v
else return $ unload env (VP Ref n ty) args
_ -> case args of
[] -> return f
_ -> return $ unload env f args
else case args of
(a : as) -> return $ unload env f (a:as)
[] -> return f
apply ntimes stk top env f (a:as) = return $ unload env f (a:as)
apply ntimes stk top env f [] = return f
-- specApply stk env f@(VP Ref n ty) args
-- = case lookupCtxt n statics of
-- [as] -> if or as
-- then trace (show (n, map fst (filter (\ (_, s) -> s) (zip args as)))) $
-- return $ unload env f args
-- else return $ unload env f args
-- _ -> return $ unload env f args
-- specApply stk env f args = return $ unload env f args
unload :: [(Name, Value)] -> Value -> [Value] -> Value
unload env f [] = f
unload env f (a:as) = unload env (VApp f a) as
evCase ntimes n stk top env ns args tree
| length ns <= length args
= do let args' = take (length ns) args
let rest = drop (length ns) args
when spec $ deduct n
t <- evTree ntimes stk top env (zip ns args') tree
when spec $ case t of
Nothing -> reinstate n -- Blocked, count n again
Just _ -> return ()
-- (zipWith (\n , t) -> (n, t)) ns args') tree
return (t, rest)
| otherwise = return (Nothing, args)
evTree :: [(Name, Int)] -> [Name] -> Bool ->
[(Name, Value)] -> [(Name, Value)] -> SC -> Eval (Maybe Value)
evTree ntimes stk top env amap (UnmatchedCase str) = return Nothing
evTree ntimes stk top env amap (STerm tm)
= do let etm = pToVs (map fst amap) tm
etm' <- ev ntimes stk (not (conHeaded tm))
(amap ++ env) etm
return $ Just etm'
evTree ntimes stk top env amap (ProjCase t alts)
= do t' <- ev ntimes stk top env t
doCase ntimes stk top env amap t' alts
evTree ntimes stk top env amap (Case _ n alts)
= case lookup n amap of
Just v -> doCase ntimes stk top env amap v alts
_ -> return Nothing
evTree ntimes stk top env amap ImpossibleCase = return Nothing
doCase ntimes stk top env amap v alts =
do c <- chooseAlt env v (getValArgs v) alts amap
case c of
Just (altmap, sc) -> evTree ntimes stk top env altmap sc
_ -> do c' <- chooseAlt' ntimes stk env v (getValArgs v) alts amap
case c' of
Just (altmap, sc) -> evTree ntimes stk top env altmap sc
_ -> return Nothing
conHeaded tm@(App _ _ _)
| (P (DCon _ _ _) _ _, args) <- unApply tm = True
conHeaded t = False
chooseAlt' ntimes stk env _ (f, args) alts amap
= do f' <- apply ntimes stk True env f args
chooseAlt env f' (getValArgs f')
alts amap
chooseAlt :: [(Name, Value)] -> Value -> (Value, [Value]) -> [CaseAlt] ->
[(Name, Value)] ->
Eval (Maybe ([(Name, Value)], SC))
chooseAlt env _ (VP (DCon i a _) _ _, args) alts amap
| Just (ns, sc) <- findTag i alts = return $ Just (updateAmap (zip ns args) amap, sc)
| Just v <- findDefault alts = return $ Just (amap, v)
chooseAlt env _ (VP (TCon i a) _ _, args) alts amap
| Just (ns, sc) <- findTag i alts
= return $ Just (updateAmap (zip ns args) amap, sc)
| Just v <- findDefault alts = return $ Just (amap, v)
chooseAlt env _ (VConstant c, []) alts amap
| Just v <- findConst c alts = return $ Just (amap, v)
| Just (n', sub, sc) <- findSuc c alts
= return $ Just (updateAmap [(n',sub)] amap, sc)
| Just v <- findDefault alts = return $ Just (amap, v)
chooseAlt env _ (VP _ n _, args) alts amap
| Just (ns, sc) <- findFn n alts = return $ Just (updateAmap (zip ns args) amap, sc)
chooseAlt env _ (VBind _ _ (Pi _ i s k) t, []) alts amap
| Just (ns, sc) <- findFn (sUN "->") alts
= do t' <- t (VV 0) -- we know it's not in scope or it's not a pattern
return $ Just (updateAmap (zip ns [s, t']) amap, sc)
chooseAlt _ _ _ alts amap
| Just v <- findDefault alts
= if (any fnCase alts)
then return $ Just (amap, v)
else return Nothing
| otherwise = return Nothing
fnCase (FnCase _ _ _) = True
fnCase _ = False
-- Replace old variable names in the map with new matches
-- (This is possibly unnecessary since we make unique names and don't
-- allow repeated variables...?)
updateAmap newm amap
= newm ++ filter (\ (x, _) -> not (elem x (map fst newm))) amap
findTag i [] = Nothing
findTag i (ConCase n j ns sc : xs) | i == j = Just (ns, sc)
findTag i (_ : xs) = findTag i xs
findFn fn [] = Nothing
findFn fn (FnCase n ns sc : xs) | fn == n = Just (ns, sc)
findFn fn (_ : xs) = findFn fn xs
findDefault [] = Nothing
findDefault (DefaultCase sc : xs) = Just sc
findDefault (_ : xs) = findDefault xs
findSuc c [] = Nothing
findSuc (BI val) (SucCase n sc : _)
| val /= 0 = Just (n, VConstant (BI (val - 1)), sc)
findSuc c (_ : xs) = findSuc c xs
findConst c [] = Nothing
findConst c (ConstCase c' v : xs) | c == c' = Just v
findConst (AType (ATInt ITNative)) (ConCase n 1 [] v : xs) = Just v
findConst (AType ATFloat) (ConCase n 2 [] v : xs) = Just v
findConst (AType (ATInt ITChar)) (ConCase n 3 [] v : xs) = Just v
findConst StrType (ConCase n 4 [] v : xs) = Just v
findConst (AType (ATInt ITBig)) (ConCase n 6 [] v : xs) = Just v
findConst (AType (ATInt (ITFixed ity))) (ConCase n tag [] v : xs)
| tag == 7 + fromEnum ity = Just v
findConst c (_ : xs) = findConst c xs
getValArgs tm = getValArgs' tm []
getValArgs' (VApp f a) as = getValArgs' f (a:as)
getValArgs' f as = (f, as)
-- tmpToV i vd (VLetHole j) | vd == j = return $ VV i
-- tmpToV i vd (VP nt n v) = liftM (VP nt n) (tmpToV i vd v)
-- tmpToV i vd (VBind n b sc) = do b' <- fmapMB (tmpToV i vd) b
-- let sc' = \x -> do x' <- sc x
-- tmpToV (i + 1) vd x'
-- return (VBind n b' sc')
-- tmpToV i vd (VApp f a) = liftM2 VApp (tmpToV i vd f) (tmpToV i vd a)
-- tmpToV i vd x = return x
instance Eq Value where
(==) x y = getTT x == getTT y
where getTT v = evalState (quote 0 v) initEval
class Quote a where
quote :: Int -> a -> Eval (TT Name)
instance Quote Value where
quote i (VP nt n v) = liftM (P nt n) (quote i v)
quote i (VV x) = return $ V x
quote i (VBind _ n b sc) = do sc' <- sc (VTmp i)
b' <- quoteB b
liftM (Bind n b') (quote (i+1) sc')
where quoteB t = fmapMB (quote i) t
quote i (VBLet vd n t v sc)
= do sc' <- quote i sc
t' <- quote i t
v' <- quote i v
let sc'' = pToV (sMN vd "vlet") (addBinder sc')
return (Bind n (Let t' v') sc'')
quote i (VApp f a) = liftM2 (App MaybeHoles) (quote i f) (quote i a)
quote i (VType u) = return (TType u)
quote i (VUType u) = return (UType u)
quote i VErased = return Erased
quote i VImpossible = return Impossible
quote i (VProj v j) = do v' <- quote i v
return (Proj v' j)
quote i (VConstant c) = return $ Constant c
quote i (VTmp x) = return $ V (i - x - 1)
wknV :: Int -> Value -> Eval Value
wknV i (VV x) | x >= i = return $ VV (x - 1)
wknV i (VBind red n b sc) = do b' <- fmapMB (wknV i) b
return $ VBind red n b' (\x -> do x' <- sc x
wknV (i + 1) x')
wknV i (VApp f a) = liftM2 VApp (wknV i f) (wknV i a)
wknV i t = return t
isUniverse :: Term -> Bool
isUniverse (TType _) = True
isUniverse (UType _) = True
isUniverse _ = False
isUsableUniverse :: Term -> Bool
isUsableUniverse (UType NullType) = False
isUsableUniverse x = isUniverse x
convEq' ctxt hs x y = evalStateT (convEq ctxt hs x y) (0, [])
convEq :: Context -> [Name] -> TT Name -> TT Name -> StateT UCs TC Bool
convEq ctxt holes topx topy = ceq [] topx topy where
ceq :: [(Name, Name)] -> TT Name -> TT Name -> StateT UCs TC Bool
ceq ps (P xt x _) (P yt y _)
| x `elem` holes || y `elem` holes = return True
| x == y || (x, y) `elem` ps || (y,x) `elem` ps = return True
| otherwise = sameDefs ps x y
ceq ps x (Bind n (Lam _ t) (App _ y (V 0)))
= ceq ps x (substV (P Bound n t) y)
ceq ps (Bind n (Lam _ t) (App _ x (V 0))) y
= ceq ps (substV (P Bound n t) x) y
ceq ps x (Bind n (Lam _ t) (App _ y (P Bound n' _)))
| n == n' = ceq ps x y
ceq ps (Bind n (Lam _ t) (App _ x (P Bound n' _))) y
| n == n' = ceq ps x y
ceq ps (Bind n (PVar _ t) sc) y = ceq ps sc y
ceq ps x (Bind n (PVar _ t) sc) = ceq ps x sc
ceq ps (Bind n (PVTy t) sc) y = ceq ps sc y
ceq ps x (Bind n (PVTy t) sc) = ceq ps x sc
ceq ps (V x) (V y) = return (x == y)
ceq ps (V x) (P _ y _)
| x >= 0 && length ps > x = return (fst (ps!!x) == y)
| otherwise = return False
ceq ps (P _ x _) (V y)
| y >= 0 && length ps > y = return (x == snd (ps!!y))
| otherwise = return False
ceq ps (Bind n xb xs) (Bind n' yb ys)
= liftM2 (&&) (ceqB ps xb yb) (ceq ((n,n'):ps) xs ys)
where
ceqB ps (Let v t) (Let v' t') = liftM2 (&&) (ceq ps v v') (ceq ps t t')
ceqB ps (Guess v t) (Guess v' t') = liftM2 (&&) (ceq ps v v') (ceq ps t t')
ceqB ps (Pi r i v t) (Pi r' i' v' t') = liftM2 (&&) (ceq ps v v') (ceq ps t t')
ceqB ps b b' = ceq ps (binderTy b) (binderTy b')
-- Special case for 'case' blocks - size of scope causes complications,
-- we only want to check the blocks themselves are valid and identical
-- in the current scope. So, just check the bodies, and the additional
-- arguments the case blocks are applied to.
ceq ps x@(App _ _ _) y@(App _ _ _)
| (P _ cx _, xargs) <- unApply x,
(P _ cy _, yargs) <- unApply y,
caseName cx && caseName cy = sameCase ps cx cy xargs yargs
ceq ps (App _ fx ax) (App _ fy ay) = liftM2 (&&) (ceq ps fx fy) (ceq ps ax ay)
ceq ps (Constant x) (Constant y) = return (x == y)
ceq ps (TType x) (TType y) | x == y = return True
ceq ps (TType (UVal 0)) (TType y) = return True
ceq ps (TType x) (TType y) = do (v, cs) <- get
put (v, ULE x y : cs)
return True
ceq ps (UType AllTypes) x = return (isUsableUniverse x)
ceq ps x (UType AllTypes) = return (isUsableUniverse x)
ceq ps (UType u) (UType v) = return (u == v)
ceq ps Erased _ = return True
ceq ps _ Erased = return True
ceq ps x y = return False
caseeq ps (Case _ n cs) (Case _ n' cs') = caseeqA ((n,n'):ps) cs cs'
where
caseeqA ps (ConCase x i as sc : rest) (ConCase x' i' as' sc' : rest')
= do q1 <- caseeq (zip as as' ++ ps) sc sc'
q2 <- caseeqA ps rest rest'
return $ x == x' && i == i' && q1 && q2
caseeqA ps (ConstCase x sc : rest) (ConstCase x' sc' : rest')
= do q1 <- caseeq ps sc sc'
q2 <- caseeqA ps rest rest'
return $ x == x' && q1 && q2
caseeqA ps (DefaultCase sc : rest) (DefaultCase sc' : rest')
= liftM2 (&&) (caseeq ps sc sc') (caseeqA ps rest rest')
caseeqA ps [] [] = return True
caseeqA ps _ _ = return False
caseeq ps (STerm x) (STerm y) = ceq ps x y
caseeq ps (UnmatchedCase _) (UnmatchedCase _) = return True
caseeq ps _ _ = return False
sameDefs ps x y = case (lookupDef x ctxt, lookupDef y ctxt) of
([Function _ xdef], [Function _ ydef])
-> ceq ((x,y):ps) xdef ydef
([CaseOp _ _ _ _ _ xd],
[CaseOp _ _ _ _ _ yd])
-> let (_, xdef) = cases_compiletime xd
(_, ydef) = cases_compiletime yd in
caseeq ((x,y):ps) xdef ydef
_ -> return False
sameCase :: [(Name, Name)] -> Name -> Name -> [Term] -> [Term] ->
StateT UCs TC Bool
sameCase ps x y xargs yargs
= case (lookupDef x ctxt, lookupDef y ctxt) of
([Function _ xdef], [Function _ ydef])
-> ceq ((x,y):ps) xdef ydef
([CaseOp _ _ _ _ _ xd],
[CaseOp _ _ _ _ _ yd])
-> let (xin, xdef) = cases_compiletime xd
(yin, ydef) = cases_compiletime yd in
do liftM2 (&&)
(do ok <- zipWithM (ceq ps)
(drop (length xin) xargs)
(drop (length yin) yargs)
return (and ok))
(caseeq ((x,y):ps) xdef ydef)
_ -> return False
-- SPECIALISATION -----------------------------------------------------------
-- We need too much control to be able to do this by tweaking the main
-- evaluator
spec :: Context -> Ctxt [Bool] -> Env -> TT Name -> Eval (TT Name)
spec ctxt statics genv tm = error "spec undefined"
-- CONTEXTS -----------------------------------------------------------------
{-| A definition is either a simple function (just an expression with a type),
a constant, which could be a data or type constructor, an axiom or as an
yet undefined function, or an Operator.
An Operator is a function which explains how to reduce.
A CaseOp is a function defined by a simple case tree -}
data Def = Function !Type !Term
| TyDecl NameType !Type
| Operator Type Int ([Value] -> Maybe Value)
| CaseOp CaseInfo
!Type
![(Type, Bool)] -- argument types, whether canonical
![Either Term (Term, Term)] -- original definition
![([Name], Term, Term)] -- simplified for totality check definition
!CaseDefs
deriving Generic
-- [Name] SC -- Compile time case definition
-- [Name] SC -- Run time cae definitions
data CaseDefs = CaseDefs {
cases_compiletime :: !([Name], SC),
cases_runtime :: !([Name], SC)
}
deriving Generic
data CaseInfo = CaseInfo {
case_inlinable :: Bool, -- decided by machine
case_alwaysinline :: Bool, -- decided by %inline flag
tc_dictionary :: Bool
}
deriving Generic
{-!
deriving instance Binary Def
!-}
{-!
deriving instance Binary CaseInfo
!-}
{-!
deriving instance Binary CaseDefs
!-}
instance Show Def where
show (Function ty tm) = "Function: " ++ show (ty, tm)
show (TyDecl nt ty) = "TyDecl: " ++ show nt ++ " " ++ show ty
show (Operator ty _ _) = "Operator: " ++ show ty
show (CaseOp (CaseInfo inlc inla inlr) ty atys ps_in ps cd)
= let (ns, sc) = cases_compiletime cd
(ns', sc') = cases_runtime cd in
"Case: " ++ show ty ++ " " ++ show ps ++ "\n" ++
"COMPILE TIME:\n\n" ++
show ns ++ " " ++ show sc ++ "\n\n" ++
"RUN TIME:\n\n" ++
show ns' ++ " " ++ show sc' ++ "\n\n" ++
if inlc then "Inlinable" else "Not inlinable" ++
if inla then " Aggressively\n" else "\n"
-------
-- Hidden => Programs can't access the name at all
-- Public => Programs can access the name and use at will
-- Frozen => Programs can access the name, which doesn't reduce
-- Private => Programs can't access the name, doesn't reduce internally
data Accessibility = Hidden | Public | Frozen | Private
deriving (Eq, Ord, Generic)
instance Show Accessibility where
show Public = "public export"
show Frozen = "export"
show Private = "private"
show Hidden = "hidden"
type Injectivity = Bool
-- | The result of totality checking
data Totality = Total [Int] -- ^ well-founded arguments
| Productive -- ^ productive
| Partial PReason
| Unchecked
| Generated
deriving (Eq, Generic)
-- | Reasons why a function may not be total
data PReason = Other [Name] | Itself | NotCovering | NotPositive | UseUndef Name
| ExternalIO | BelieveMe | Mutual [Name] | NotProductive
deriving (Show, Eq, Generic)
instance Show Totality where
show (Total args)= "Total" -- ++ show args ++ " decreasing arguments"
show Productive = "Productive" -- ++ show args ++ " decreasing arguments"
show Unchecked = "not yet checked for totality"
show (Partial Itself) = "possibly not total as it is not well founded"
show (Partial NotCovering) = "not total as there are missing cases"
show (Partial NotPositive) = "not strictly positive"
show (Partial ExternalIO) = "an external IO primitive"
show (Partial NotProductive) = "not productive"
show (Partial BelieveMe) = "not total due to use of believe_me in proof"
show (Partial (Other ns)) = "possibly not total due to: " ++ showSep ", " (map show ns)
show (Partial (Mutual ns)) = "possibly not total due to recursive path " ++
showSep " --> " (map show ns)
show (Partial (UseUndef n)) = "possibly not total because it uses the undefined name " ++ show n
show Generated = "auto-generated"
{-!
deriving instance Binary Accessibility
!-}
{-!
deriving instance Binary Totality
!-}
{-!
deriving instance Binary PReason
!-}
-- Possible attached meta-information for a definition in context
data MetaInformation =
EmptyMI -- ^ No meta-information
| DataMI [Int] -- ^ Meta information for a data declaration with position of parameters
deriving (Eq, Show, Generic)
-- | Contexts used for global definitions and for proof state. They contain
-- universe constraints and existing definitions.
-- Also store maximum RigCount of the name (can't bind a name at multiplicity
-- 1 in a RigW, for example)
data Context = MkContext {
next_tvar :: Int,
definitions :: Ctxt TTDecl
} deriving (Show, Generic)
type TTDecl = (Def, RigCount, Injectivity, Accessibility, Totality, MetaInformation)
-- | The initial empty context
initContext = MkContext 0 emptyContext
mapDefCtxt :: (Def -> Def) -> Context -> Context
mapDefCtxt f (MkContext t !defs) = MkContext t (mapCtxt f' defs)
where f' (!d, r, i, a, t, m) = f' (f d, r, i, a, t, m)
-- | Get the definitions from a context
ctxtAlist :: Context -> [(Name, Def)]
ctxtAlist ctxt = map (\(n, (d, r, i, a, t, m)) -> (n, d)) $ toAlist (definitions ctxt)
veval ctxt env t = evalState (eval False ctxt [] env t []) initEval
addToCtxt :: Name -> Term -> Type -> Context -> Context
addToCtxt n tm ty uctxt
= let ctxt = definitions uctxt
!ctxt' = addDef n (Function ty tm, RigW, False, Public, Unchecked, EmptyMI) ctxt in
uctxt { definitions = ctxt' }
setAccess :: Name -> Accessibility -> Context -> Context
setAccess n a uctxt
= let ctxt = definitions uctxt
!ctxt' = updateDef n (\ (d, r, i, _, t, m) -> (d, r, i, a, t, m)) ctxt in
uctxt { definitions = ctxt' }
setInjective :: Name -> Injectivity -> Context -> Context
setInjective n i uctxt
= let ctxt = definitions uctxt
!ctxt' = updateDef n (\ (d, r, _, a, t, m) -> (d, r, i, a, t, m)) ctxt in
uctxt { definitions = ctxt' }
setTotal :: Name -> Totality -> Context -> Context
setTotal n t uctxt
= let ctxt = definitions uctxt
!ctxt' = updateDef n (\ (d, r, i, a, _, m) -> (d, r, i, a, t, m)) ctxt in
uctxt { definitions = ctxt' }
setRigCount :: Name -> RigCount -> Context -> Context
setRigCount n rc uctxt
= let ctxt = definitions uctxt
!ctxt' = updateDef n (\ (d, _, i, a, t, m) -> (d, rc, i, a, t, m)) ctxt in
uctxt { definitions = ctxt' }
setMetaInformation :: Name -> MetaInformation -> Context -> Context
setMetaInformation n m uctxt
= let ctxt = definitions uctxt
!ctxt' = updateDef n (\ (d, r, i, a, t, _) -> (d, r, i, a, t, m)) ctxt in
uctxt { definitions = ctxt' }
addCtxtDef :: Name -> Def -> Context -> Context
addCtxtDef n d c = let ctxt = definitions c
!ctxt' = addDef n (d, RigW, False, Public, Unchecked, EmptyMI) $! ctxt in
c { definitions = ctxt' }
addTyDecl :: Name -> NameType -> Type -> Context -> Context
addTyDecl n nt ty uctxt
= let ctxt = definitions uctxt
!ctxt' = addDef n (TyDecl nt ty, RigW, False, Public, Unchecked, EmptyMI) ctxt in
uctxt { definitions = ctxt' }
addDatatype :: Datatype Name -> Context -> Context
addDatatype (Data n tag ty unique cons) uctxt
= let ctxt = definitions uctxt
ty' = normalise uctxt [] ty
!ctxt' = addCons 0 cons (addDef n
(TyDecl (TCon tag (arity ty')) ty, RigW, True, Public, Unchecked, EmptyMI) ctxt) in
uctxt { definitions = ctxt' }
where
addCons tag [] ctxt = ctxt
addCons tag ((n, ty) : cons) ctxt
= let ty' = normalise uctxt [] ty in
addCons (tag+1) cons (addDef n
(TyDecl (DCon tag (arity ty') unique) ty, RigW, True, Public, Unchecked, EmptyMI) ctxt)
-- FIXME: Too many arguments! Refactor all these Bools.
--
-- Issue #1724 on the issue tracker.
-- https://github.com/idris-lang/Idris-dev/issues/1724
addCasedef :: Name -> ErasureInfo -> CaseInfo ->
Bool -> SC -> -- default case
Bool -> Bool ->
[(Type, Bool)] -> -- argument types, whether canonical
[Int] -> -- inaccessible arguments
[Either Term (Term, Term)] ->
[([Name], Term, Term)] -> -- compile time
[([Name], Term, Term)] -> -- run time
Type -> Context -> TC Context
addCasedef n ei ci@(CaseInfo inline alwaysInline tcdict)
tcase covering reflect asserted argtys inacc
ps_in ps_ct ps_rt ty uctxt
= do let ctxt = definitions uctxt
access = case lookupDefAcc n False uctxt of
[(_, acc)] -> acc
_ -> Public
compileTime <- simpleCase tcase covering reflect CompileTime emptyFC inacc argtys ps_ct ei
runtime <- simpleCase tcase covering reflect RunTime emptyFC inacc argtys ps_rt ei
ctxt' <- case (compileTime, runtime) of
( CaseDef args_ct sc_ct _,
CaseDef args_rt sc_rt _) ->
let inl = alwaysInline -- tcdict
inlc = (inl || small n args_ct sc_ct) && (not asserted)
inlr = inl || small n args_rt sc_rt
cdef = CaseDefs (args_ct, sc_ct)
(args_rt, sc_rt)
op = (CaseOp (ci { case_inlinable = inlc })
ty argtys ps_in ps_ct cdef,
RigW, False, access, Unchecked, EmptyMI)
in return $ addDef n op ctxt
-- other -> tfail (Msg $ "Error adding case def: " ++ show other)
return uctxt { definitions = ctxt' }
-- simplify a definition by unfolding interface methods
-- We need this for totality checking, because functions which use interfaces
-- in an implementation definition themselves need to have the implementation
-- inlined or it'll be treated as a higher order function that will potentially
-- loop.
simplifyCasedef :: Name -> [Name] -> [[Name]] -> ErasureInfo -> Context -> TC Context
simplifyCasedef n ufnames umethss ei uctxt
= do let ctxt = definitions uctxt
ctxt' <- case lookupCtxt n ctxt of
[(CaseOp ci ty atys [] ps _, rc, inj, acc, tot, metainf)] ->
return ctxt -- nothing to simplify (or already done...)
[(CaseOp ci ty atys ps_in ps cd, rc, inj, acc, tot, metainf)] ->
do let ps_in' = map simpl ps_in
pdef = map debind ps_in'
CaseDef args sc _ <- simpleCase False (STerm Erased) False CompileTime emptyFC [] atys pdef ei
return $ addDef n (CaseOp ci
ty atys ps_in' ps (cd { cases_compiletime = (args, sc) }),
rc, inj, acc, tot, metainf) ctxt
_ -> return ctxt
return uctxt { definitions = ctxt' }
where
depat acc (Bind n (PVar _ t) sc)
= depat (n : acc) (instantiate (P Bound n t) sc)
depat acc x = (acc, x)
debind (Right (x, y)) = let (vs, x') = depat [] x
(_, y') = depat [] y in
(vs, x', y')
debind (Left x) = let (vs, x') = depat [] x in
(vs, x', Impossible)
simpl (Right (x, y))
= if null ufnames then Right (x, y)
else Right (x, unfold uctxt [] (map (\n -> (n, 1)) (uns y)) y)
simpl t = t
-- Unfold the given name, interface methdods, and any function which uses it as
-- an argument directly. This is specifically for finding applications of
-- interface dictionaries and inlining them both for totality checking and for
-- a small performance gain.
uns tm = getNamesToUnfold ufnames umethss tm
getNamesToUnfold :: [Name] -> [[Name]] -> Term -> [Name]
getNamesToUnfold inames ms tm = nub $ inames ++ getNames Nothing tm ++ concat ms
where
getNames under fn@(App _ _ _)
| (f, args) <- unApply fn
= let under' = case f of
P _ fn _ -> Just fn
_ -> Nothing
in
getNames under f ++ concatMap (getNames under') args
getNames (Just under) (P _ ref _)
= if ref `elem` inames then [under] else []
getNames under (Bind n (Let t v) sc)
= getNames Nothing t ++
getNames Nothing v ++
getNames Nothing sc
getNames under (Bind n b sc) = getNames Nothing (binderTy b) ++
getNames Nothing sc
getNames _ _ = []
addOperator :: Name -> Type -> Int -> ([Value] -> Maybe Value) ->
Context -> Context
addOperator n ty a op uctxt
= let ctxt = definitions uctxt
ctxt' = addDef n (Operator ty a op, RigW, False, Public, Unchecked, EmptyMI) ctxt in
uctxt { definitions = ctxt' }
tfst (a, _, _, _, _, _) = a
lookupNames :: Name -> Context -> [Name]
lookupNames n ctxt
= let ns = lookupCtxtName n (definitions ctxt) in
map fst ns
-- | Get the list of pairs of fully-qualified names and their types that match some name
lookupTyName :: Name -> Context -> [(Name, Type)]
lookupTyName n ctxt = do
(name, def) <- lookupCtxtName n (definitions ctxt)
ty <- case tfst def of
(Function ty _) -> return ty
(TyDecl _ ty) -> return ty
(Operator ty _ _) -> return ty
(CaseOp _ ty _ _ _ _) -> return ty
return (name, ty)
-- | Get the pair of a fully-qualified name and its type, if there is a unique one matching the name used as a key.
lookupTyNameExact :: Name -> Context -> Maybe (Name, Type)
lookupTyNameExact n ctxt = listToMaybe [ (nm, v) | (nm, v) <- lookupTyName n ctxt, nm == n ]
-- | Get the types that match some name
lookupTy :: Name -> Context -> [Type]
lookupTy n ctxt = map snd (lookupTyName n ctxt)
-- | Get the single type that matches some name precisely
lookupTyExact :: Name -> Context -> Maybe Type
lookupTyExact n ctxt = fmap snd (lookupTyNameExact n ctxt)
-- | Return true if the given type is a concrete type familyor primitive
-- False it it's a function to compute a type or a variable
isCanonical :: Type -> Context -> Bool
isCanonical t ctxt
= case unApply t of
(P _ n _, _) -> isConName n ctxt
(Constant _, _) -> True
_ -> False
isConName :: Name -> Context -> Bool
isConName n ctxt = isTConName n ctxt || isDConName n ctxt
isTConName :: Name -> Context -> Bool
isTConName n ctxt
= case lookupDefExact n ctxt of
Just (TyDecl (TCon _ _) _) -> True
_ -> False
-- | Check whether a resolved name is certainly a data constructor
isDConName :: Name -> Context -> Bool
isDConName n ctxt
= case lookupDefExact n ctxt of
Just (TyDecl (DCon _ _ _) _) -> True
_ -> False
-- | Check whether any overloading of a name is a data constructor
canBeDConName :: Name -> Context -> Bool
canBeDConName n ctxt
= or $ do def <- lookupCtxt n (definitions ctxt)
case tfst def of
(TyDecl (DCon _ _ _) _) -> return True
_ -> return False
isFnName :: Name -> Context -> Bool
isFnName n ctxt
= case lookupDefExact n ctxt of
Just (Function _ _) -> True
Just (Operator _ _ _) -> True
Just (CaseOp _ _ _ _ _ _) -> True
_ -> False
isTCDict :: Name -> Context -> Bool
isTCDict n ctxt
= case lookupDefExact n ctxt of
Just (Function _ _) -> False
Just (Operator _ _ _) -> False
Just (CaseOp ci _ _ _ _ _) -> tc_dictionary ci
_ -> False
-- Is the name guarded by constructors in the term?
-- We assume the term is normalised, so no looking under 'let' for example.
conGuarded :: Context -> Name -> Term -> Bool
conGuarded ctxt n tm = guarded n tm
where
guarded n (P _ n' _) = n == n'
guarded n ap@(App _ _ _)
| (P _ f _, as) <- unApply ap,
isConName f ctxt = any (guarded n) as
guarded _ _ = False
lookupP :: Name -> Context -> [Term]
lookupP = lookupP_all False False
lookupP_all :: Bool -> Bool -> Name -> Context -> [Term]
lookupP_all all exact n ctxt
= do (n', def) <- names
p <- case def of
(Function ty tm, _, inj, a, _, _) -> return (P Ref n' ty, a)
(TyDecl nt ty, _, _, a, _, _) -> return (P nt n' ty, a)
(CaseOp _ ty _ _ _ _, _, inj, a, _, _) -> return (P Ref n' ty, a)
(Operator ty _ _, _, inj, a, _, _) -> return (P Ref n' ty, a)
case snd p of
Hidden -> if all then return (fst p) else []
Private -> if all then return (fst p) else []
_ -> return (fst p)
where
names = let ns = lookupCtxtName n (definitions ctxt) in
if exact
then filter (\ (n', d) -> n' == n) ns
else ns
lookupDefExact :: Name -> Context -> Maybe Def
lookupDefExact n ctxt = tfst <$> lookupCtxtExact n (definitions ctxt)
lookupDef :: Name -> Context -> [Def]
lookupDef n ctxt = tfst <$> lookupCtxt n (definitions ctxt)
lookupNameDef :: Name -> Context -> [(Name, Def)]
lookupNameDef n ctxt = mapSnd tfst $ lookupCtxtName n (definitions ctxt)
where mapSnd f [] = []
mapSnd f ((x,y):xys) = (x, f y) : mapSnd f xys
lookupDefAcc :: Name -> Bool -> Context ->
[(Def, Accessibility)]
lookupDefAcc n mkpublic ctxt
= map mkp $ lookupCtxt n (definitions ctxt)
-- io_bind a special case for REPL prettiness
where mkp (d, _, inj, a, _, _) = if mkpublic && (not (n == sUN "io_bind" || n == sUN "io_pure"))
then (d, Public) else (d, a)
lookupDefAccExact :: Name -> Bool -> Context ->
Maybe (Def, Accessibility)
lookupDefAccExact n mkpublic ctxt
= fmap mkp $ lookupCtxtExact n (definitions ctxt)
-- io_bind a special case for REPL prettiness
where mkp (d, _, inj, a, _, _) = if mkpublic && (not (n == sUN "io_bind" || n == sUN "io_pure"))
then (d, Public) else (d, a)
lookupTotal :: Name -> Context -> [Totality]
lookupTotal n ctxt = map mkt $ lookupCtxt n (definitions ctxt)
where mkt (d, _, inj, a, t, m) = t
lookupTotalExact :: Name -> Context -> Maybe Totality
lookupTotalExact n ctxt = fmap mkt $ lookupCtxtExact n (definitions ctxt)
where mkt (d, _, inj, a, t, m) = t
lookupRigCount :: Name -> Context -> [Totality]
lookupRigCount n ctxt = map mkt $ lookupCtxt n (definitions ctxt)
where mkt (d, _, inj, a, t, m) = t
lookupRigCountExact :: Name -> Context -> Maybe RigCount
lookupRigCountExact n ctxt = fmap mkt $ lookupCtxtExact n (definitions ctxt)
where mkt (d, rc, inj, a, t, m) = rc
lookupInjectiveExact :: Name -> Context -> Maybe Injectivity
lookupInjectiveExact n ctxt = fmap mkt $ lookupCtxtExact n (definitions ctxt)
where mkt (d, _, inj, a, t, m) = inj
-- Assume type is at least in whnfArgs form
linearCheck :: Context -> Type -> TC ()
linearCheck ctxt t = checkArgs t
where
checkArgs (Bind n (Pi RigW _ ty _) sc)
= do linearCheckArg ctxt ty
checkArgs (substV (P Bound n Erased) sc)
checkArgs (Bind n (Pi _ _ _ _) sc)
= checkArgs (substV (P Bound n Erased) sc)
checkArgs _ = return ()
linearCheckArg :: Context -> Type -> TC ()
linearCheckArg ctxt ty = mapM_ checkNameOK (allTTNames ty)
where
checkNameOK f
= case lookupRigCountExact f ctxt of
Just Rig1 ->
tfail $ Msg $ show f ++ " can only appear in a linear binding"
_ -> return ()
checkArgs (Bind n (Pi RigW _ ty _) sc)
= do mapM_ checkNameOK (allTTNames ty)
checkArgs (substV (P Bound n Erased) sc)
checkArgs (Bind n (Pi _ _ _ _) sc)
= checkArgs (substV (P Bound n Erased) sc)
checkArgs _ = return ()
-- Check if a name is reducible in the type checker. Partial definitions
-- are not reducible (so treated as a constant)
tcReducible :: Name -> Context -> Bool
tcReducible n ctxt = case lookupTotalExact n ctxt of
Nothing -> True
Just (Partial _) -> False
_ -> True
lookupMetaInformation :: Name -> Context -> [MetaInformation]
lookupMetaInformation n ctxt = map mkm $ lookupCtxt n (definitions ctxt)
where mkm (d, _, inj, a, t, m) = m
lookupNameTotal :: Name -> Context -> [(Name, Totality)]
lookupNameTotal n = map (\(n, (_, _, _, _, t, _)) -> (n, t)) . lookupCtxtName n . definitions
lookupVal :: Name -> Context -> [Value]
lookupVal n ctxt
= do def <- lookupCtxt n (definitions ctxt)
case tfst def of
(Function _ htm) -> return (veval ctxt [] htm)
(TyDecl nt ty) -> return (VP nt n (veval ctxt [] ty))
_ -> []
lookupTyEnv :: Name -> Env -> Maybe (Int, RigCount, Type)
lookupTyEnv n env = li n 0 env where
li n i [] = Nothing
li n i ((x, r, b): xs)
| n == x = Just (i, r, binderTy b)
| otherwise = li n (i+1) xs
-- | Create a unique name given context and other existing names
uniqueNameCtxt :: Context -> Name -> [Name] -> Name
uniqueNameCtxt ctxt n hs
| n `elem` hs = uniqueNameCtxt ctxt (nextName n) hs
| [_] <- lookupTy n ctxt = uniqueNameCtxt ctxt (nextName n) hs
| otherwise = n
uniqueBindersCtxt :: Context -> [Name] -> TT Name -> TT Name
uniqueBindersCtxt ctxt ns (Bind n b sc)
= let n' = uniqueNameCtxt ctxt n ns in
Bind n' (fmap (uniqueBindersCtxt ctxt (n':ns)) b) (uniqueBindersCtxt ctxt ns sc)
uniqueBindersCtxt ctxt ns (App s f a) = App s (uniqueBindersCtxt ctxt ns f) (uniqueBindersCtxt ctxt ns a)
uniqueBindersCtxt ctxt ns t = t
| jmitchell/Idris-dev | src/Idris/Core/Evaluate.hs | bsd-3-clause | 56,761 | 0 | 27 | 20,577 | 19,696 | 10,079 | 9,617 | 991 | 80 |
{-# LANGUAGE TemplateHaskell #-}
module Main (
main
) where
import Test.Tasty
import Test.Tasty.QuickCheck
import System.Exit
import qualified Data.Schema.Sql.ScriptParseTest as SPT (testGroup)
main = defaultMain tests
tests :: TestTree
tests =
testGroup "All Tests" [
SPT.testGroup
]
| proegssilb/git-sql | test/MainTestSuite.hs | bsd-3-clause | 329 | 0 | 7 | 81 | 69 | 43 | 26 | 12 | 1 |
-- |
module X12.Tokens where
import Data.Text
data ElementToken = SimpleElementToken Text
| ComponentElementToken Text
| CompositeElementToken [ElementToken]
| RepeatedElementToken [ElementToken]
deriving (Eq, Show)
data SegmentToken = SegmentToken { segmentTokenId :: Text
, elementTokens :: [ElementToken]
}
deriving (Eq, Show)
| alexkyllo/xtwelve | src/X12/Tokens.hs | bsd-3-clause | 490 | 0 | 9 | 200 | 88 | 53 | 35 | 10 | 0 |
{-# LANGUAGE ScopedTypeVariables #-}
module GhcUtilsSpec (main, spec) where
import Test.Hspec
import TestUtils
import qualified GHC as GHC
import qualified Data.Generics as SYB
import qualified GHC.SYB.Utils as SYB
import Language.Haskell.GHC.ExactPrint.Utils
import Language.Haskell.Refact.Utils.Binds
import Language.Haskell.Refact.Utils.GhcUtils
import Language.Haskell.Refact.Utils.GhcVersionSpecific
import Language.Haskell.Refact.Utils.Monad
import Language.Haskell.Refact.Utils.MonadFunctions
import Language.Haskell.Refact.Utils.TypeUtils
import Language.Haskell.Refact.Utils.Variables
-- import TestUtils
-- ---------------------------------------------------------------------
main :: IO ()
main = do
hspec spec
spec :: Spec
spec = do
describe "onelayerStaged" $ do
it "only descends one layer into a structure" $ do
-- let s = ([2,1,3,4,5],[6,7,8]) :: ([Int],[Int])
let s' = (2,[3,4],5) :: (Int,[Int],Int)
let -- worker (i :: Int)
-- | i == 2 = ["f"]
-- worker _ = []
worker' (i::Int) = [i]
-- worker'' (i::[Int]) = [head i]
let g = onelayerStaged SYB.Renamer [] ([] `SYB.mkQ` worker') s'
let g1 = SYB.gmapQ ([] `SYB.mkQ` worker') s'
let g2 = SYB.gmapQl (++) [] ([] `SYB.mkQ` worker') s'
(show g) `shouldBe` "[[2],[],[5]]"
(show g1) `shouldBe` "[[2],[],[5]]"
(show g2) `shouldBe` "[2,5]"
-- ---------------------------------
it "Finds a GHC.Name at top level only" $ do
(t, _toks, tgt) <- ct $ parsedFileGhc "./DupDef/Dd1.hs"
let
comp = do
-- (t, toks) <- parseSourceFileTest "./test/testdata/DupDef/Dd1.hs"
-- putParsedModule t toks
renamed <- getRefactRenamed
let mn = locToName (4,1) renamed
let (Just (ln@(GHC.L _ n))) = mn
let mx = locToName (4,10) renamed
let (Just (lx@(GHC.L _ x))) = mx
let declsr = hsBinds renamed
duplicatedDecls = definingDeclsNames [n] declsr True False
res = findEntity ln duplicatedDecls
res2 = findEntity n duplicatedDecls
resx = findEntity lx duplicatedDecls
resx2 = findEntity x duplicatedDecls
worker (nn::GHC.Name) = [showGhc nn]
g = onelayerStaged SYB.Renamer ["-1"] (["-10"] `SYB.mkQ` worker) duplicatedDecls
worker2 ((GHC.L _ (GHC.FunBind (GHC.L _ n') _ _ _ _ _))::GHC.Located (GHC.HsBind GHC.Name))
| n == n' = ["found"]
worker2 _ = []
g2 = onelayerStaged SYB.Renamer ["-1"] (["-10"] `SYB.mkQ` worker2) duplicatedDecls
return (res,res2,resx,resx2,duplicatedDecls,g,g2,ln,lx)
-- ((r,r2,rx,rx2,d,gg,gg2,_l,_x),_s) <- runRefactGhcState comp
((r,r2,rx,rx2,d,gg,gg2,_l,_x),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions
-- (SYB.showData SYB.Renamer 0 d) `shouldBe` ""
(showGhcQual d) `shouldBe` "[DupDef.Dd1.toplevel x = DupDef.Dd1.c GHC.Num.* x]"
(showGhcQual _l) `shouldBe` "DupDef.Dd1.toplevel"
(showGhc _x) `shouldBe` "x"
(show gg) `shouldBe` "[[\"-10\"],[\"-10\"]]"
(show gg2) `shouldBe` "[[\"found\"],[\"-10\"]]"
r `shouldBe` True
r2 `shouldBe` True
rx `shouldBe` False
rx2 `shouldBe` True
-- ---------------------------------------------------------------------
| mpickering/HaRe | test/GhcUtilsSpec.hs | bsd-3-clause | 3,442 | 0 | 30 | 851 | 988 | 550 | 438 | 63 | 2 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.INTEL.ParallelArrays
-- Copyright : (c) Sven Panne 2015
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
-- The <https://www.opengl.org/registry/specs/INTEL/parallel_arrays.txt INTEL_parallel_arrays> extension.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.INTEL.ParallelArrays (
-- * Enums
gl_COLOR_ARRAY_PARALLEL_POINTERS_INTEL,
gl_NORMAL_ARRAY_PARALLEL_POINTERS_INTEL,
gl_PARALLEL_ARRAYS_INTEL,
gl_TEXTURE_COORD_ARRAY_PARALLEL_POINTERS_INTEL,
gl_VERTEX_ARRAY_PARALLEL_POINTERS_INTEL,
-- * Functions
glColorPointervINTEL,
glNormalPointervINTEL,
glTexCoordPointervINTEL,
glVertexPointervINTEL
) where
import Graphics.Rendering.OpenGL.Raw.Tokens
import Graphics.Rendering.OpenGL.Raw.Functions
| phaazon/OpenGLRaw | src/Graphics/Rendering/OpenGL/Raw/INTEL/ParallelArrays.hs | bsd-3-clause | 1,012 | 0 | 4 | 109 | 70 | 54 | 16 | 12 | 0 |
module Board.MoveGen where
import Data.Bits
import Data.Char
import Data.Word
import Numeric
import Utils
import Text.Printf
import qualified Data.Vector.Unboxed as V
{-
noWe nort noEa
+7 +8 +9
\ | /
west -1 <- 0 -> +1 east
/ | \
-9 -8 -7
soWe sout soEa
-}
{-
Board indexing:
y (row/rank)
8 | 56 57 58 59 60 61 62 63
7 | 48 49 50 51 52 53 54 55
6 | 40 41 42 43 44 45 46 47
5 | 32 33 34 35 36 37 38 39
4 | 24 25 26 27 28 29 30 31
3 | 16 17 18 19 20 21 22 23
2 | 8 9 10 11 12 13 14 15
1 | 0 1 2 3 4 5 6 7
----------------------------
| 1 2 3 4 5 6 7 8 -- x (col/file)
| A B C D E F G H
,
0x02824222120a0700
-}
index64 :: V.Vector Int
index64 = V.fromList [
0, 47, 1, 56, 48, 27, 2, 60,
57, 49, 41, 37, 28, 16, 3, 61,
54, 58, 35, 52, 50, 42, 21, 44,
38, 32, 29, 23, 17, 11, 4, 62,
46, 55, 26, 59, 40, 36, 15, 53,
34, 51, 20, 43, 31, 22, 10, 45,
25, 39, 14, 33, 19, 30, 9, 24,
13, 18, 8, 12, 7, 6, 5, 63
]
-- /**
-- * bitScanForward
-- * @author Kim Walisch (2012)
-- * @param bb bitboard to scan
-- * @precondition bb != 0
-- * @return index (0..63) of least significant one bit
-- */
-- int bitScanForward(U64 bb) {
-- const U64 debruijn64 = C64(0x03f79d71b4cb0a89);
-- assert (bb != 0);
-- return index64[((bb ^ (bb-1)) * debruijn64) >> 58];
-- }
bitScanForward :: Word64 -> Int
bitScanForward bb =
let debruijn64 = 0x03f79d71b4cb0a89
ix = ((bb `xor` (bb-1)) * debruijn64) `shiftR` 58
in V.unsafeIndex index64 (fromIntegral ix)
moves :: Word64 -> Square -> Dir -> Word64
moves occ s d =
let aix = attackIndex s d
attack = V.unsafeIndex attacks aix
obstacles = occ .&. attack
firstObstacle = bitScanForward obstacles
aix2 = attackIndex firstObstacle d
attack2 = V.unsafeIndex attacks aix2
in attack `xor` attack2
occupancy :: Word64
occupancy = 0x00000000FF000000
ls1b :: Word64 -> Word64
ls1b x = x .&. (-x)
attacks :: V.Vector Word64
attacks = V.fromList
[0x8141211109050300,
0x02824222120a0700,
0x0404844424150e00,
0x08080888492a1c00,
0x1010101192543800,
0x2020212224a87000,
0x404142444850e000,
0x8182848890a0c000,
0x4121110905030000,
0x824222120a070000,
0x04844424150e0000,
0x080888492a1c0000,
0x1010119254380000,
0x20212224a8700000,
0x4142444850e00000,
0x82848890a0c00000,
0x2111090503000000,
0x4222120a07000000,
0x844424150e000000,
0x0888492a1c000000,
0x1011925438000000,
0x212224a870000000,
0x42444850e0000000,
0x848890a0c0000000,
0x1109050300000000,
0x22120a0700000000,
0x4424150e00000000,
0x88492a1c00000000,
0x1192543800000000,
0x2224a87000000000,
0x444850e000000000,
0x8890a0c000000000,
0x0905030000000000,
0x120a070000000000,
0x24150e0000000000,
0x492a1c0000000000,
0x9254380000000000,
0x24a8700000000000,
0x4850e00000000000,
0x90a0c00000000000,
0x0503000000000000,
0x0a07000000000000,
0x150e000000000000,
0x2a1c000000000000,
0x5438000000000000,
0xa870000000000000,
0x50e0000000000000,
0xa0c0000000000000,
0x0300000000000000,
0x0700000000000000,
0x0e00000000000000,
0x1c00000000000000,
0x3800000000000000,
0x7000000000000000,
0xe000000000000000,
0xc000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000003,
0x0000000000000007,
0x000000000000000e,
0x000000000000001c,
0x0000000000000038,
0x0000000000000070,
0x00000000000000e0,
0x00000000000000c0,
0x0000000000000305,
0x000000000000070a,
0x0000000000000e15,
0x0000000000001c2a,
0x0000000000003854,
0x00000000000070a8,
0x000000000000e050,
0x000000000000c0a0,
0x0000000000030509,
0x0000000000070a12,
0x00000000000e1524,
0x00000000001c2a49,
0x0000000000385492,
0x000000000070a824,
0x0000000000e05048,
0x0000000000c0a090,
0x0000000003050911,
0x00000000070a1222,
0x000000000e152444,
0x000000001c2a4988,
0x0000000038549211,
0x0000000070a82422,
0x00000000e0504844,
0x00000000c0a09088,
0x0000000305091121,
0x000000070a122242,
0x0000000e15244484,
0x0000001c2a498808,
0x0000003854921110,
0x00000070a8242221,
0x000000e050484442,
0x000000c0a0908884,
0x0000030509112141,
0x0000070a12224282,
0x00000e1524448404,
0x00001c2a49880808,
0x0000385492111010,
0x000070a824222120,
0x0000e05048444241,
0x0000c0a090888482,
0x0003050911214181,
0x00070a1222428202,
0x000e152444840404,
0x001c2a4988080808,
0x0038549211101010,
0x0070a82422212020,
0x00e0504844424140,
0x00c0a09088848281]
display :: Word64 -> IO ()
display x =
mapM_ (putStrLn . reverse) $
groupIn 8 $
printf "%064s" $
showIntAtBase 2 intToDigit x ""
attackIndex :: Square -> Dir -> Int
attackIndex s d = if d == Pos then s else 64+s
printTable = mapM_ putStrLn
[ f
| d <- [Pos, Neg]
, s <- [0..63]
, let w64 = t s d
, let ix = attackIndex s d
, let f = printf "(%d, 0x%016x)," ix w64
]
ix :: (Int, Int) -> Int
ix (x,y) = (y-1) * 8 + (x-1)
cix :: Int -> (Int, Int)
cix i = let (q,r) = i `quotRem` 8
in (r+1, q+1)
data Dir = Pos | Neg deriving (Eq)
type Square = Int
t :: Square -> Dir -> Word64
t s d = ray2word64 $ attackRay s d
ray2word64 :: [Square] -> Word64
ray2word64 = foldl setBit 0
-- True = up
attackRay :: Square -> Dir -> [Square]
attackRay z p =
let (x,y) = cix z
in map ix $ case p of
Pos ->
[ (x-i,y+i) | i <- [1 .. min (x-1) (8-y)] ] -- left up
++ [ (x, y+i) | i <- [1 .. 8-y] ] -- straight up
++ [ (x+i,y+i) | i <- [1 .. min (8-x) (8-y)] ] -- right up
Neg ->
[ (x-i,y-i) | i <- [1 .. min (x-1) (y-1)] ] -- left down
++ [ (x, y-i) | i <- [1 .. y-1] ] -- straight down
++ [ (x+i,y-i) | i <- [1 .. min (8-x) (y-1)] ] -- right down
ray :: (Int -> Int) -> Bool -> Int -> [Int]
ray dir increasing square =
let inside = if increasing then (<64) else (>=0)
in case takeWhile inside $ iterate dir square of
[] -> []
squares -> tail squares
north = ray (+8) True
nw = ray (+7) True
ne = ray (+9) True
south = ray (\x -> x - 8) False
sw = ray (\x -> x - 9) False
se = ray (\x -> x - 7) False
| sphynx/hamisado | Board/MoveGen.hs | bsd-3-clause | 6,543 | 0 | 19 | 1,523 | 1,822 | 1,067 | 755 | 216 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{- |
Module : Kiosk.Backend.Data.ReportTemplate
Description : Render a Report Template from a Form and a list of DataTemplates
Copyright : Plow Technologies LLC
License : MIT License
Maintainer : Scott Murphy
Stability : experimental
Portability : portable
Data Templates and Form Helpers for making ReportTemplates
-}
module Kiosk.Backend.Data.ReportTemplate where
import Codec.Xlsx (Cell(..), CellMap, CellValue(..), def, cellValue, wsCells, Worksheet(..))
import Control.Applicative ((<$>), (<*>))
import Control.Lens
import Data.Map (Map)
import qualified Data.Map.Lazy as M
import Data.Maybe
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text as T
import Data.Time
import Kiosk.Backend.Data.DataTemplate
-- import Kiosk.Backend.Data.DataTemplateEntry
import Kiosk.Backend.Form
import ReportTemplate.Report
type KioskReportTemplate context preOut rowOut= ReportTemplate context Form preOut DataTemplate rowOut
makeLenses ''Company
makeLenses ''Report
makePrisms ''ReportTable
makeLenses ''ReportTableRowStyle
makeLenses ''ReportPreamble
makePrisms ''InputType
makeLenses ''InputText
makeLenses ''InputDouble
makeLenses ''InputDate
-- | Kiosk Specific
type KioskPreambleTemplateList context preOut= [(ReportPreambleLabel, context -> Form -> preOut)]
type KioskRowTemplateList context rowOut = [(ReportRowLabel, context -> DataTemplate -> rowOut)]
type KioskPreambleTemplate context preOut= ReportPreambleTemplate context Form preOut
type KioskRowTemplate context rowOut = ReportRowTemplate context DataTemplate rowOut
-- | Spreadsheet specific
data XlsxContext = XlsxContext {
_xlsxCurrentTime :: String}
type XlsxReportTemplate = KioskReportTemplate XlsxContext CellMap Cell
type XlsxPreambleTemplateList = KioskPreambleTemplateList XlsxContext CellMap
type XlsxRowTemplateList = KioskRowTemplateList XlsxContext Cell
type XlsxReport = Report CellMap Cell
type XlsxPreamble = ReportPreamble CellMap
type XlsxTable = ReportTable Cell
-- | Excel Form Rendering Helper Functions
-- Because the excel preamble is a full cell map
getCompanyName :: (Int,Int) -> Form -> CellMap
getCompanyName key form = makeCellMapFromText key companyName
where
companyName = form ^. getCompany.getCompanyText
makeCellMapFromText :: (Int,Int) -> Text -> CellMap
makeCellMapFromText key t = M.insert key cellText M.empty
where
cellText = def & cellValue .~ (Just . CellText $ t)
makeCellMapFromUTCTime :: String -> (Int, Int) -> UTCTime -> CellMap
makeCellMapFromUTCTime timeFormatString key = makeCellMapFromText key .
T.pack .
formatTime defaultTimeLocale
timeFormatString
-- | Row Rendering Helper Functions
-- | Retrieve Cell Data
makeCellDoubleFromInputDouble :: Text -> DataTemplate -> Cell
makeCellDoubleFromInputDouble = makeCellValueFromDataTemplate CellDouble inputDoubleLens
where
inputDoubleLens = _InputTypeDouble.getInputDouble
makeCellTextWithCellTemplate :: ([Text] -> Text )
-> [Text] -> DataTemplate -> Cell
makeCellTextWithCellTemplate templateFcn txts dte = def & cellValue ?~ cellVal
where
cellVal = CellText . templateFcn $ targetTextList
inputTextLens = _InputTypeText.getInputText
targetTextList :: [Text]
targetTextList = fromMaybe "" <$> (getInputTypeByLabel inputTextLens
<$> txts
<*> [dte])
makeCellDoubleWithCellTemplate :: ([Text] -> Either Text Double )
-> [Text] -> DataTemplate -> Cell
makeCellDoubleWithCellTemplate templateFcn txts dte = def & cellValue ?~ cellVal
where
cellVal = either CellText CellDouble $ templateFcn $ targetTextList
inputTextLens = _InputTypeText.getInputText
targetTextList :: [Text]
targetTextList = fromMaybe "" <$> (getInputTypeByLabel inputTextLens
<$> txts
<*> [dte])
makeCellTextFromInputText :: Text -> DataTemplate -> Cell
makeCellTextFromInputText = makeCellValueFromDataTemplate CellText inputTextLens
where
inputTextLens = _InputTypeText.getInputText
makeCellTextFromInputDate :: Text -> DataTemplate -> Cell
makeCellTextFromInputDate l dte = def & cellValue .~ maybeCellValue
where
maybeInputDate = getInputTypeByLabel inputLens l$ dte
maybeCellValue = CellText <$> maybeInputDate
inputLens = _InputTypeDate . getInputDate
makeCellValueFromDataTemplate ::
(s -> CellValue)
-> Getting (First s) InputType s -> Text -> DataTemplate -> Cell
makeCellValueFromDataTemplate cellConstructor lensDt l dt = outputCell
where
maybeCellValue :: Maybe CellValue
maybeCellValue = cellConstructor <$> (getInputTypeByLabel lensDt l $ dt)
outputCell :: Cell
outputCell = def & cellValue .~ maybeCellValue
getInputTypeByLabel ::
Getting (First a) InputType a -> Text -> DataTemplate -> Maybe a
getInputTypeByLabel lensDt l dt = outputCell
where
singletonInput = catMaybes.
fmap (getItemMatchingLabel l lensDt) .
templateItems $ dt
outputCell = case singletonInput of
[] -> Nothing
(x:_) -> Just x
getItemMatchingLabel
:: Text
-> Getting (First a) InputType a
-> TemplateItem
-> Maybe a
getItemMatchingLabel l dtLens (TemplateItem lbl inVal)
|l == lbl = inVal ^? dtLens
|otherwise = Nothing
-- | Build 'Report' from 'ReportTemplate'
buildXlsxReport :: XlsxReportTemplate -> XlsxContext ->
Form ->
[DataTemplate] -> XlsxReport
buildXlsxReport xlsxReportTemplate xlsxContext form dataTemplates = renderedReport
where
renderedReport = renderReport xlsxReportTemplate xlsxContext form dataTemplates
-- | Create Excel Spreadsheet
-- | Render Spreadsheet from report
renderSpreadsheet :: XlsxReport -> Worksheet
renderSpreadsheet report = def & wsCells .~ combinedMap
where
combinedMap :: CellMap
combinedMap = M.unions (preambleMapList ++ [labelCellMap] ++ rowMapList)
preambleOffset = 10
preambleMapList :: [CellMap]
preambleMapList = toListOf (reportPreamble.preambleValue.folded._2) report
labelToIntMap :: Map ReportRowLabel Int
labelToIntMap = M.fromList . zip (report ^. (reportRows . _ReportTableRowIndex . _1 ) ) $ [1..]
rowMapList :: [CellMap]
rowMapList = foldrTableByRowWithIndex transformPositionAndMap M.empty <$>
(toListOf (reportRows._ReportTableRowIndex._2) report)
transformPositionAndMap :: (Int,String) -> Cell -> CellMap -> CellMap
transformPositionAndMap (rowInt,label') rowVal rowMap' = case M.lookup label' labelToIntMap of
Nothing -> rowMap'
(Just i) -> M.insert (rowInt + preambleOffset , i) rowVal rowMap'
labelCellMap = M.foldrWithKey (\label' idx m -> M.insert (preambleOffset,idx) (convertText label') m )
M.empty
labelToIntMap
convertText label' = def & cellValue .~ (Just . CellText . T.pack $ label')
| plow-technologies/cobalt-kiosk-data-template | src/Kiosk/Backend/Data/ReportTemplate.hs | bsd-3-clause | 7,767 | 0 | 14 | 2,097 | 1,614 | 873 | 741 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import CNC.FanucMacro
import CNC.HCode
import CNC.GInterpreter
import CNC.GParser
import CNC.AwePrelude
--import Prelude(Num(..), Fractional(..), Floating(..), Int, ($), id, putStrLn, (++), Just)
import System.Environment
evaluateIsoFile :: FilePath -> IO ProgramStatistics
evaluateIsoFile file = do
parsed <- parseIsoFile file
-- print parsed
case parsed of
Right iso -> do
prog_trace <- gcodeToMoves iso
return $ gcode_stats prog_trace
Left err -> do putStrLn $ "Error parsing: " ++ show err
fail err
main = do
[file] <- getArgs
stats <- evaluateIsoFile file
print stats
| akamaus/gcodec | src/CNC/GEmulator.hs | bsd-3-clause | 680 | 0 | 13 | 141 | 163 | 79 | 84 | 21 | 2 |
{-# LANGUAGE OverloadedStrings, DoAndIfThenElse #-}
-- |
-- Command line utility, this is not meant to be used as a library.
--
-- To use as a library see the README or use this as an example of how
-- to combine the caching backend, request system, and parser/rule
-- checker.
module Main where
import Control.Applicative
import Control.Monad.Error
import Control.Exception.Base (bracket)
import Data.Grob.Types
import Data.Grob.Acid
import Data.Grob.Attoparsec
import Data.ConfigFile as C
import Data.Acid (AcidState, openLocalStateFrom)
import Data.Acid.Local (createCheckpointAndClose)
import Data.Acid.Advanced (update', query')
import Data.ByteString.Char8 as CB hiding (filter, null, any)
import Data.Time (UTCTime, getCurrentTime)
import Data.Maybe
import Data.List (stripPrefix)
import Data.List.Utils (endswith)
import Network.HTTP.Robots
import Network.URI as U hiding (path)
import System.Log.Handler.Color
import System.Log.Logger
import System.Console.CmdArgs
import System.Exit
import System.Posix.Env (getEnv)
import OpenSSL
rargs :: Grob
rargs = Args
{
argrobot = def &= argPos 0 &= typ "ROBOT.TXT",
argagent = def &= argPos 1 &= typ "USERAGENT",
argresource = def &= argPos 2 &= typ "URI",
argnocache = def &= explicit &= name "n" &= help "Override HTTP cache headers to not cache robots.txt",
argallowed = def &= explicit &= name "a" &= help "Explicitly check if allowed",
config = "~/.grobrc" &= typFile &= groupname "Options" &= help "Specify config file to use"
} &=
verbosity &=
help "Parser and rule checker for robots.txt's" &=
helpArg [name "h"] &=
summary "grob v0.1.0" &=
noAtExpand &=
details ["grob is a robots.txt request, parser, and rule checker library and binary.",
"",
"Default configuration file is in \"${HOME}/.grobrc\", use --config=[File] to specify a custom one."]
-- | @main@ begin every invocation in a `withOpenSSL` computation in
-- the event we try to request a robots.txt resource behind
-- HTTPS. Parse the cmdargs and pass into `parseConfig` to generate
-- initial application state inside of `grob/1`.
main :: IO Bool
main = withOpenSSL $ cmdArgs rargs >>= grob . parseConfig
-- | @grob@ builds and executes a given cmdarg/config session.
--
-- This function is responsible for resolving (if we can) the user's
-- HOME for the datadir (unless it's been set otherwise).
grob :: IO Settings -> IO Bool
grob settings = do
sets <- settings
home <- getEnv "HOME"
debugM "Console" "Initializing AcidState"
let ddir = homePath (datadir sets) home
returnCode <- if endswith "robots.txt" (robot sets) then runWithCache (nocache sets) sets ddir else return ExitSuccess
-- proper exit code termination
exitWith returnCode
-- | @runRobot@ run with cache backend (False) or raw (True).
runWithCache :: Bool -> Settings -> String -> IO ExitCode
runWithCache True sets _ = do
(status, (_hcache, _hexp), body) <- open (CB.pack $ robot sets)
return ExitSuccess
runWithCache False sets ddir =
bracket (openLocalStateFrom ddir initialBotState)
createCheckpointAndClose
(middleState sets)
middleState :: Settings -> AcidState Bots -> IO ExitCode
middleState sets db = do
curt <- liftIO getCurrentTime
let uri = robot sets
robotUri = pack uri
directive = if allowed sets then "allow" else "disallow"
qBot <- query' db (RobotById $ RobotId (sha1 robotUri))
-- need to send Last-Modified so server can either tell us if it's
-- modified or not
resp <- open robotUri
tree <- dbOrParse db qBot curt resp robotUri
quiet <- isNormal
print tree
let f = directiveUA directive (agent sets) (resource sets) tree
v = if allowed sets then not f else f
if v
then return (ExitFailure 1)
else do
formatUri quiet (fromJust $ U.parseURI uri) uri (resource sets)
return ExitSuccess
formatUri :: Bool -> U.URI -> String -> String -> IO ()
formatUri False _ r _ = return ()
formatUri True puri _ pth = Prelude.putStrLn $ Prelude.concat [s, "//", n, p, pth]
where s = uriScheme puri
a = fromJust $ uriAuthority puri
n = uriRegName a
p = uriPort a
-- | @filterUA@ find the user agent
filterUA :: String -> [RuleSet] -> Maybe RuleSet
filterUA ua ruleset = filt ua ruleset <|> filt "*" ruleset
where filt u = listToMaybe . filter (\x-> unUA (userAgent x) == pack u)
-- | @directiveUA@ if no user agent then False otherwise get their
-- rule set and check against that
directiveUA :: ByteString -> String -> String -> [RuleSet] -> Bool
directiveUA dr ua path ruleset = maybe False (nany . rules) (filterUA ua ruleset)
where nany ps = any (\(_,y) -> checkPrefix dr path y) (filter (\(x,_) -> x==dr) ps)
-- | @checkPrefix@
checkPrefix :: ByteString -> String -> ByteString -> Bool
checkPrefix "disallow" path res = ("/"==y) || (path == y) || isJust (stripPrefix y path)
where y = CB.unpack res
checkPrefix "allow" path res = path == y
where y = CB.unpack res
checkPrefix _ _ _ = True
-- | If it's a 404 we can't cache it and we should just assume it's a
-- free for all; if it's a 304 then we just want to return the parse
-- tree.
dbOrParse _ _ _ (404,_,_) _ = return []
dbOrParse _ qBot _ (304,_,_) _ = return . parseTree $ fromJust qBot
dbOrParse db qBot curt (stat, (hcache, hexp), body) uri =
case qBot of
Nothing -> do
-- do initial parsing in here
let tree = doParse body
nBot <- update' db (NewRobot Robot {
robotId = RobotId $ sha1 uri,
url = unpack uri,
ttl = curt,
date = curt,
parseTree = tree
})
return tree
Just p ->
return (parseTree p)
-- | @homePath@ given a path and a path from getEnv/1, determine if we
-- want the users home directory and if so replace the leading ~ with
-- the user's HOME environment variable. If HOME is Nothing OR no
-- leading ~ in path then simply return path.
homePath :: String -> Maybe String -> String
homePath ('~':xs) home = fromMaybe "~" home ++ xs
homePath p _ = p
-- | @parseConfig@ given an cmdarg record, try to parse a config file
-- defined either by default or by the user into a settings record.
parseConfig :: Grob -> IO Settings
parseConfig argvs = do
-- these are being "overridden" by the verbosity/quiet arg
whenNormal $ updateGlobalLogger "Console" (setLevel ERROR)
whenLoud $ updateGlobalLogger "Console" (setLevel DEBUG)
-- activate color logging
updateGlobalLogger rootLoggerName (addHandler colorHandler)
debugM "Console" $ "Running Grob with " ++ config argvs
home <- getEnv "HOME"
-- parse the config file
cnf <- runErrorT $ do
cp <- join $ liftIO $ readfile emptyCP (homePath (config argvs) home)
datadirv <- C.get cp "CACHING" "datadir"
loglevelv <- C.get cp "LOGGING" "loglevel"
-- build and return a settings record
return Settings {
robot = argrobot argvs,
agent = argagent argvs,
resource = argresource argvs,
nocache = argnocache argvs,
allowed = argallowed argvs,
datadir = datadirv,
loglevel = loglevelv
}
handleConfig cnf
-- | @handleConfig@ log and exit with any parsing errors or return the
-- new settings record.
handleConfig :: Either (CPErrorData, String) Settings -> IO Settings
handleConfig (Left err) = do
-- log the error and exit the program
criticalM "Console" $ show err
criticalM "Console" "exiting..."
exitWith (ExitFailure 1)
handleConfig (Right conf) = do
quiet <- isNormal
setLL quiet conf
debugM "Console" "Configuration parsed"
debugM "Console" $ "Setting loglevel to " ++ show (loglevel conf)
return conf
setLL :: Bool -> Settings -> IO ()
setLL False _ = return ()
setLL True conf = updateGlobalLogger "Console" (setLevel $ loglevel conf)
| ixmatus/grob | src/Grob.hs | bsd-3-clause | 8,593 | 0 | 17 | 2,457 | 2,080 | 1,072 | 1,008 | 149 | 4 |
{-# LANGUAGE DeriveDataTypeable #-}
{- |
Module : $Header$
Description : Shrimp error types and pretty-printing
Copyright : (c) Galois, Inc.
Shrimp error types and pretty-printing
-}
module SCD.M4.Errors(Error(..), nubError, occurrences, flatten, distribute,
errorsByType) where
import SCD.M4.Syntax(IfdefId, M4Id, LayerModule, ModuleId)
import SCD.SELinux.Syntax(Identifier, ClassId, CommonId, PermissionId,
SignedId, CondExpr, pos)
import SCD.M4.PShow(PShow, pShow, pShowLayerModule, showPos)
import SCD.M4.Kind(Kind, ParameterMap, ParameterInfo, ParameterKind,
ppParameters)
import Text.PrettyPrint.HughesPJ(text, (<+>), (<>), colon, ($+$),
quotes)
import Text.PrettyPrint.Pp(Pp, pp, pnest, above)
import Data.Set(Set)
import Data.Map(assocs, empty, insertWith, Map)
import Data.Foldable(toList)
import Data.List (intersperse, nub)
import Data.List.GroupSort (groupSort)
import Data.Generics(Data, Typeable, toConstr, constrIndex, ConIndex)
import Prelude hiding(FilePath)
import qualified Prelude
data Occ a = Occ ConIndex a
instance Eq (Occ a) where
a == b = compare a b == EQ
instance Ord (Occ a) where
Occ a _ `compare` Occ b _ = a `compare` b
occurrences :: Data a => [a] -> [(Integer,a)]
occurrences as =
[(i,a) | (Occ _ a, i) <- assocs (foldr occ empty [Occ (constrIndex (toConstr a)) a | a <- as])]
where occ :: (Eq a, Ord a) => a -> Map a Integer -> Map a Integer
occ a = insertWith (+) a 1
data Error =
DuplicateAccessVectorDefinition ClassId ClassId
| DuplicateClassPermission PermissionId ClassId
| DuplicateCommonDef CommonId CommonId
| DuplicateCommonPermission PermissionId CommonId
| DuplicateDefinitions [M4Id]
| DuplicateIdentifier Identifier Identifier
| DuplicateMacroDef Identifier Identifier
| DuplicateOccurrences M4Id Identifier
| DuplicateSymbolDeclaration Identifier Kind Identifier
| ErrorsIn Error [Error]
| FragmentKindError [SignedId Identifier] (Set ParameterKind)
| IllegalFragment String Identifier
| IllegalMacroUse Identifier Identifier
| IllegalParameterUse ParameterMap
| IllegalSymbolDeclarations (Set Identifier)
| IllegalSymbolReference Identifier [Identifier]
| InconsistentMacroDefinitions [Identifier]
| InconsistentSymbolDefinitions [(Identifier, Kind)]
| InDefinition M4Id
| InImplementation LayerModule
| MissingAccessVectorDefinition ClassId
| MissingModuleConfig ModuleId
| ModuleIdMismatch ModuleId Prelude.FilePath
| MutuallyRecursive [[M4Id]]
| RefPolicyWarnCall Identifier [String]
| KindMismatch Kind (Set Kind) Identifier
| UndefinedCall M4Id
| UndefinedCommonId CommonId
| UndefinedIdentifier Identifier
| UndefinedIds [(Identifier, Kind)]
| UndocumentedParameters M4Id [ParameterInfo]
| UnknownIfdefId IfdefId
| UnknownModuleConfigNames (Set ModuleId)
| UnusedArguments M4Id [ParameterInfo]
| WhenTunableTrue CondExpr
| WhenTunableFalse CondExpr
| WrongNumberOfArguments M4Id [ParameterInfo] Identifier
deriving (Eq, Ord, Show, Typeable, Data)
flatten :: [Error] -> [Error]
flatten = flip flat [] where
flat (ErrorsIn _ es:l) r = flat es (flat l r)
flat (e:l) r = e:flat l r
flat [] r = r
nubError :: [Error] -> [Error]
nubError es = nub $ map nubErrorsIn es
nubErrorsIn :: Error -> Error
nubErrorsIn (ErrorsIn x es) = ErrorsIn x $ nubError es
nubErrorsIn x = x
distribute :: [Error] -> [Error]
distribute = concatMap dist where
dist :: Error -> [Error]
dist (ErrorsIn e es) = map (ErrorsIn e . (:[])) (distribute es)
dist e = [e]
unite :: [Error] -> [Error]
unite (ErrorsIn e es:ErrorsIn e' es':es'') | e == e' = unite (ErrorsIn e (es++es'):es'')
unite (ErrorsIn e es:es') = ErrorsIn e (unite es):unite es'
unite (e:es) = e:unite es
unite [] = []
deepIndex :: Error -> ConIndex
deepIndex (ErrorsIn _ [e]) = deepIndex e
deepIndex (ErrorsIn _ _) = error "deepIndex"
deepIndex e = constrIndex (toConstr e)
errorsByType :: [Error] -> [[Error]]
errorsByType = map unite . map snd . groupSort deepIndex . distribute
instance Pp Error where
pp (DuplicateAccessVectorDefinition c oc) = text "Duplicate access-vector definition:" <+> pShow c
$+$ pnest (text "defined at" <+> pShow oc)
pp (DuplicateClassPermission p _c) = text "Duplicate permission:" <+> pShow p
pp (DuplicateCommonDef c oc) = text "Duplicate common definition:" <+> pShow c
$+$ pnest (text "defined at" <+> pShow oc)
pp (DuplicateCommonPermission p _c) = text "Duplicate permission:" <+> pShow p
pp (DuplicateDefinitions mds) = text "Duplicate definitions:" <+> pShow mds
pp (DuplicateIdentifier i oi) = text "Duplicate identifier:" <+> pShow i
$+$ pnest (text "defined at" <+> pShow oi)
pp (DuplicateMacroDef i oi) = text "Duplicate definition of macro:" <+> pShow i
$+$ pnest (text "defined at" <+> pShow oi)
pp (DuplicateOccurrences _i p) = text "Duplicate occurrences of identifier: " <+> pShow p
pp (DuplicateSymbolDeclaration i k oi) = text "Duplicate symbol declaration:" <+> pShow (i,k)
$+$ pnest (text "defined at" <+> pShow oi)
pp (ErrorsIn e es) = pp e <> colon $+$ pnest (above (intersperse (text "") (map pp es)))
pp (FragmentKindError a fk) = text "Kind error: expected fragment parameter of kind" <+> pShow fk <> text "but saw complex parameter:" <+> pShow a
pp (IllegalFragment s i) = text "Fragment" <+> quotes (text s) <+> text "is defined as a macro:" <+> pShow i
pp (IllegalMacroUse i oi) = text "Illegal use of macro:" <+> pShow i $+$ pnest (text "defined at" <+> pShow oi)
pp (IllegalParameterUse ps) = text "Illegal use of parameters in implementation:" <+> text (show ps)
pp (IllegalSymbolDeclarations is) = text "Illegal symbol declarations in interface:" <+> pShow (toList is)
pp (IllegalSymbolReference i is) = text "Illegal symbol reference across modules:" <+> pShow i
$+$ pnest (if null is then text "is undefined." else text "is defined at" <+> pShow is)
pp (InconsistentMacroDefinitions ms) = text "Inconsistent macro definitions (not defined in both branches of an ifdef):" <+> pShow ms
pp (InconsistentSymbolDefinitions is) = text "Inconsistent symbol declarations (not defined in both branches of an ifdef):" <+> pShow is
pp (InDefinition i) = text "In definition of" <+> pShow i
pp (InImplementation lm) = text "In implementation of" <+> pShowLayerModule lm
pp (MissingAccessVectorDefinition c) = text "Missing access-vector definition for" <+> pShow c
pp (MissingModuleConfig mi) = text "Missing module configuration for" <+> pShow mi
pp (ModuleIdMismatch mi m) = text "File base name" <+> text m <+> text "doesn't match module name:" <+> pShow mi
pp (MutuallyRecursive mcs) = text "Mutually recursive definitions:" <+> pShow mcs
pp (RefPolicyWarnCall i ws) = text "Call to macro with refpolicywarnings:" <+> pShow i $+$ pnest (above (map text ws))
pp (KindMismatch k ks i) = text "Kind mismatch: expected" <+> pShow k <> text ", got" <+> pShow ks <> colon <+> pShow i
pp (UndefinedCall i) = text "Call to undefined macro:" <+> pShow i
pp (UndefinedCommonId c) = text "Undefined commonId:" <+> pShow c
pp (UndefinedIdentifier p) = text "Undefined identifier:" <+> pShow p
pp (UndefinedIds is) = text "Undefined identifiers (need to be declared or put in require block):" <+> pShow is
pp (UndocumentedParameters _i p) = text "Undocumented parameters (missing parameter names) in" <+> ppParameters p
pp (UnknownIfdefId i) = text "Unknown identifier in ifdef:" <+> pShow i
pp (UnknownModuleConfigNames is) = text "Unknown module identifiers:" <+> pShow (toList is)
pp (UnusedArguments _i u) = text "Unused parameters (with kind {any}) in" <+> ppParameters u
pp (WhenTunableTrue c) = text "When" <+> pp c
pp (WhenTunableFalse c) = text "When not(" <> pp c <> text ")"
pp (WrongNumberOfArguments i pks oi) = text "Wrong number of arguments:" <+> pp i <> ppParameters pks <+> showPos (pos i)
$+$ pnest (text "defined at" <+> pShow oi)
| GaloisInc/sk-dev-platform | libs/SCD/src/SCD/M4/Errors.hs | bsd-3-clause | 8,929 | 0 | 16 | 2,374 | 2,680 | 1,369 | 1,311 | 140 | 3 |
import Control.Concurrent
import Control.Concurrent.STM.TVar
import Control.Monad
import Control.Monad.STM
import Data.IORef
import Data.Map
import Foreign.C.Types
import Foreign.Ptr
import Foreign.Storable
import Graphics.Rendering.OpenGL
import Graphics.UI.GLFW as GLFW
import System.Exit (exitWith, ExitCode (..))
import System.FlyCap
import System.IO.Unsafe
import Prelude hiding (lookup)
data LayoutState = LayoutState {projectMatrix :: GLmatrix GLdouble, dictionary :: Map Int (GLmatrix GLdouble)}
main = do
cs <- createContexts
let n = length cs -- n is the number of cameras we have
print n
zipWithM_ cameraInit cs [0..]
hStartSCapture n cs
ind <- newMVar 0
GLFW.init
GLFW.defaultWindowHints
Just win <- GLFW.createWindow 640 (480 + 480 `div` n) "testing images" Nothing Nothing
GLFW.makeContextCurrent (Just win)
(texs, pMatrix) <- initGL n
layout <- newTVarIO LayoutState {projectMatrix = pMatrix ,dictionary = empty}
GLFW.setKeyCallback win (Just (keyPressed cs))
GLFW.setFramebufferSizeCallback win (Just (resize layout n))
GLFW.setWindowCloseCallback win (Just (shutdown cs))
GLFW.setMouseButtonCallback win (Just (mouseClick layout n ind))
resize layout n win 640 (480 + 480 `div` n)
forever $ do
GLFW.pollEvents
zipWithM_ loadT cs texs
i <-takeMVar ind
putMVar ind i
display layout texs i
GLFW.swapBuffers win
loadT :: Context -> TextureObject -> IO()
loadT c tex = do
i <- hRetBuff c
_ <- loadTex i tex
destroyImage i
getT :: CImage -> IO ()
getT (CImage r c str pData dS f bF iI) = do
(texImage2D Nothing NoProxy 0 Luminance8 (TextureSize2D (fromIntegral c) (fromIntegral r)) 0 (PixelData Luminance UnsignedByte pData))
display :: TVar LayoutState -> [TextureObject] -> Int -> IO ()
display tvar texs i = do
let num = length texs
n = fromIntegral num
clear [ColorBuffer]
loadIdentity
LayoutState pMatrix dictionary <- readTVarIO tvar
zipWithM_ (displayCam num dictionary ) texs [0..]
loadIdentity
translate (Vector3 0 (240/n) (0:: GLfloat))
drawTex (-320, 320, -240, 240::GLfloat) (texs !! i)
flush
displayCam :: Int -> Map Int (GLmatrix GLdouble) -> TextureObject -> Int -> IO ()
displayCam n dictionary tex i = do
let (Just mMatrix) = lookup i dictionary
matrix (Just $ Modelview 0) $= mMatrix
drawTex (-320, 320, -240, 240::GLfloat) tex
drawTex :: (GLfloat, GLfloat, GLfloat, GLfloat) -> TextureObject -> IO ()
drawTex (xl, xh, yl, yh) tex = do
textureBinding Texture2D $= Just tex
renderPrimitive Quads $ do -- render/draw the image
texCoord (TexCoord2 0 (0::GLfloat))
vertex (Vertex3 (xl) (yh) (0::GLfloat))
texCoord (TexCoord2 1 (0::GLfloat))
vertex (Vertex3 (xh) (yh) (0::GLfloat))
texCoord (TexCoord2 1 (1::GLfloat))
vertex (Vertex3 (xh) (yl) (0::GLfloat))
texCoord(TexCoord2 0 (1::GLfloat))
vertex (Vertex3 (xl) (yl) (0::GLfloat))
loadTex :: CImage -> TextureObject -> IO TextureObject
loadTex im tex = do
textureBinding Texture2D $= Just (tex)
textureFilter Texture2D $= ((Nearest, Nothing), Nearest)
getT im -- put the image into the texture
return $ tex
cameraInit ::Context -> Int -> IO()
cameraInit c i = do
pgr <- hGetCamIndex c i
hConnect c pgr
hSetVMandFR c VM800x600_Y8 Fr_30
cameraStop ::Context -> IO()
cameraStop c = do
hStopCapture c
hDisconnect c
hDestroyContext c
resize :: TVar LayoutState -> Int -> GLFW.WindowSizeCallback
resize tvar n win width height =
let (compositeWidth, compositeHeight) = (640, 480 + 480/(realToFrac n))
w = (fromIntegral width :: GLdouble)
h = (fromIntegral height :: GLdouble)
compositeAspect = compositeWidth / compositeHeight
winAspect = w/h
(coordMinX, coordMaxX, coordMinY, coordMaxY)
| winAspect > compositeAspect = (-compositeHeight/2 *winAspect, compositeHeight/2 *winAspect,(-compositeHeight)/2, compositeHeight/2) --wide
| winAspect < compositeAspect = (-compositeWidth/2,compositeWidth/2, (-compositeWidth)/2 /winAspect, compositeWidth/winAspect/2) --tall
| otherwise = ((-compositeWidth)/2,compositeWidth/2,(-compositeHeight)/2, compositeHeight/2)
in do
loadIdentity
dictionary' <- foldM (makeDictionary n) empty [0..n-1]
viewport $= ((Position 0 0), (Size (fromIntegral width)((fromIntegral height) :: GLsizei)))
matrixMode $= Projection
loadIdentity
ortho coordMinX coordMaxX coordMinY coordMaxY (-1) (1 :: GLdouble)
pMatrix <- (get $ matrix (Just Projection) :: IO (GLmatrix GLdouble))
matrixMode $= Modelview 0
loadIdentity
atomically $ writeTVar tvar (LayoutState {projectMatrix = pMatrix, dictionary = dictionary'})
flush
makeDictionary :: Int -> Map Int (GLmatrix GLdouble) -> Int -> IO (Map Int (GLmatrix GLdouble))
makeDictionary num dictionary i = do
let n = realToFrac num
translate (Vector3 (-320 + 320/n + 640/n*(fromIntegral i)) (-240) (0::GLfloat))
scale (1/n) (1/n) (1::GLfloat)
matrix <- (get $ matrix (Just $ Modelview 0) :: IO (GLmatrix GLdouble))
loadIdentity
return $ insert i matrix dictionary
initGL :: Int -> IO ([TextureObject], GLmatrix GLdouble)
initGL num = do
GLFW.windowHint $ WindowHint'RedBits 8
GLFW.windowHint $ WindowHint'GreenBits 8
GLFW.windowHint $ WindowHint'BlueBits 8
texture Texture2D $= Enabled
matrixMode $= Projection
loadIdentity
matrixMode $= Modelview 0
texs <- genObjectNames (num)
pMatrix <- (get $ matrix (Just Projection) :: IO (GLmatrix GLdouble))
flush
return (texs, pMatrix)
mouseClick :: (TVar LayoutState) -> Int -> MVar Int -> GLFW.MouseButtonCallback
mouseClick tvar num ind win (MouseButton'1) (MouseButtonState'Released) (ModifierKeys False False False False) = do
(x,y) <- getWindowSize win
(mX, mY) <- getCursorPos win
LayoutState pMatrix dictionary' <- readTVarIO tvar
vport <- get viewport
let newDictionary = (Data.Map.map (\mV -> unProject (Vertex3 (realToFrac mX) (realToFrac mY) (0.0::GLdouble)) mV pMatrix vport) dictionary' :: Map Int (IO (Vertex3 GLdouble)))
mapM_ (\(k,v) -> setCurrent k ind v) (assocs newDictionary)
flush
mouseClick _ _ _ _ _ _ _ = return ()
setCurrent :: Int -> MVar Int -> IO (Vertex3 GLdouble) -> IO ()
setCurrent i mvar v3 = do
(Vertex3 x y z) <- v3
print $ unwords [show x, show y, show z]
if( x <= 320 && x >= -320 && y <= 1200 && y >= 725) then do takeMVar mvar
putMVar mvar i
else return ()
keyPressed :: [Context] -> GLFW.KeyCallback
keyPressed cs win GLFW.Key'Escape _ GLFW.KeyState'Pressed _ = shutdown cs win
keyPressed _ _ _ _ _ _ = return ()
shutdown :: [Context] -> GLFW.WindowCloseCallback
shutdown cs win = do
mapM_ cameraStop cs
GLFW.destroyWindow win
GLFW.terminate
exitWith ExitSuccess
--fix y values from mouseclick | imalsogreg/flycap | tests/multCameras.hs | bsd-3-clause | 6,866 | 0 | 18 | 1,373 | 2,771 | 1,376 | 1,395 | 164 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeSynonymInstances #-}
module Service.User where
import Base
import Model
import qualified Data.Text as T
import Database.Persist
import Database.Persist.Sql
import Servant
data RegisterRequest = RegisterRequest
{ username :: Text
, password :: Text
} deriving (Show, Generic, ToSchema, FromJSON, ToJSON)
data LoginRequest = LoginRequest
{ signature :: Text
, tokenType :: Maybe TokenType
} deriving (Show, Generic, ToSchema, FromJSON, ToJSON)
data UserResponse = UserResponse
{ userId :: ID
, createTime :: UTCTime
, username :: Text
, roleType :: RoleType
} deriving (Show, Generic, ToSchema, FromJSON, ToJSON)
-- Normal API
type UserApi = CheckUserToken :> Get '[JSON] UserResponse
:<|> ReqBody '[JSON] RegisterRequest :> PostCreated '[JSON] NoContent
:<|> "tokens" :> ReqBody '[JSON] LoginRequest :> Post '[JSON] TokenResponse
:<|> CheckUserToken :> "tokens" :> QueryParam "key" Text :> Put '[JSON] TokenResponse
:<|> CheckUserToken :> "tokens" :> DeleteNoContent '[JSON] NoContent
:<|> CheckUserToken :> "tickets" :> QueryParams "type" TicketType :> Post '[JSON] TokenResponse
userService :: ServerT UserApi App
userService = getUser
:<|> register
:<|> login
:<|> refresh
:<|> revokeToken
:<|> checkoutTicket
-- Administration API
type UserAdminApi = CheckAdminToken :> Capture "userId" ID :> Get '[JSON] UserResponse
:<|> CheckAdminToken :> GetPage '[JSON] UserResponse
userAdminService :: ServerT UserAdminApi App
userAdminService = getUserById
:<|> getUsers
-- Public API
type UserOpenApi = CheckTicket :> "user-info" :> Get '[JSON] UserResponse
userOpenService :: ServerT UserOpenApi App
userOpenService = getUser
getUser :: Token -> App UserResponse
getUser token = getUserById token (tokenUser token)
getUserById :: Token -> ID -> App UserResponse
getUserById _ id = let go (Just u) = return $ toUserResponse id u
go Nothing = throwM $ User_NotFound id
in withLogName "user" $ runTrans (get $ toSqlKey id) >>= go
toUserResponse :: ID -> EntityUser -> UserResponse
toUserResponse id u = UserResponse id (entityUserCreateTime u) (entityUserName u) (entityUserType u)
getUsers :: Token -> PageableApp UserResponse
getUsers _ p s = fmap to <$> runTrans (selectPageList [] p s)
where to entity = toUserResponse (fromSqlKey $ entityKey entity) (entityVal entity)
register :: RegisterRequest -> App NoContent
register (RegisterRequest username password) = do
checkName username
checkPassword password
salt <- liftIO $ randomHex 16
id <- runTrans $ do -- Create User
now <- getNow
let user = EntityUser username salt (hmacSha256 salt password) RoleUser now Nothing
entityId <- insert user `catchAll` throwM
when (fromSqlKey entityId == 1) $
update entityId [EntityUserType =. RoleAdmin, EntityUserUpdateTime =. Just now]
return $ fromSqlKey entityId
return NoContent `thenNotify` UserEvent UserRegister id (Just username)
login :: LoginRequest -> App TokenResponse
login (LoginRequest s t) = do
checkBasicAuth s
let (username,password) = T.breakOn ":" s
getUser = runTrans $ getBy (EntityUserUniqueName username) >>= convert
convert Nothing = throwM AuthenticationException
convert (Just user) = return (fromSqlKey $ entityKey user, entityVal user)
tokenType = fromMaybe TokenUser t
toToken (id, user) | checkPassword user && canSign tokenType (entityUserType user)
= signToken id tokenType [] `thenNotify` UserEvent UserLogin id (Just username)
| otherwise = throwM AuthenticationException
checkPassword user = hmacSha256 (entityUserSalt user) (T.tail password) == entityUserPassword user
getUser >>= toToken
toTokenType :: Token -> TokenType
toTokenType = tokenType
refresh :: Token -> Maybe Text -> App TokenResponse
refresh token mayK = if isNothing mayK
&& tokenRefreshSecret token /= fromJust mayK
&& isOnce (toTokenType token)
then throwM AuthenticationException
else do revokeToken token
signToken (tokenUser token) (toTokenType token) (tokenClaims token)
`thenNotify` UserEvent UserRefresh (tokenUser token) Nothing
checkoutTicket :: Token -> [TicketType] -> App TokenResponse
checkoutTicket token = signToken (tokenUser token) Ticket
| leptonyu/mint | corn-server/src/Service/User.hs | bsd-3-clause | 5,253 | 0 | 24 | 1,516 | 1,372 | 691 | 681 | 100 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -Wno-orphans #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.TypeLits.Singletons.Internal
-- Copyright : (C) 2014 Richard Eisenberg
-- License : BSD-style (see LICENSE)
-- Maintainer : Ryan Scott
-- Stability : experimental
-- Portability : non-portable
--
-- Defines and exports singletons useful for the 'Natural', 'Symbol', and
-- 'Char' kinds. This exports the internal, unsafe constructors. Use import
-- "GHC.TypeLits.Singletons" for a safe interface.
--
----------------------------------------------------------------------------
module GHC.TypeLits.Singletons.Internal (
Sing,
Natural, Symbol, Char,
SNat(..), SSymbol(..), SChar(..),
withKnownNat, withKnownSymbol, withKnownChar,
Error, sError,
ErrorWithoutStackTrace, sErrorWithoutStackTrace,
Undefined, sUndefined,
KnownNat, TN.natVal, KnownSymbol, symbolVal, KnownChar, charVal,
type (^), (%^),
type (<=?), (%<=?),
-- * Defunctionalization symbols
ErrorSym0, ErrorSym1,
ErrorWithoutStackTraceSym0, ErrorWithoutStackTraceSym1,
UndefinedSym0,
type (^@#@$), type (^@#@$$), type (^@#@$$$),
type (<=?@#@$), type (<=?@#@$$), type (<=?@#@$$$)
) where
import Data.Bool.Singletons
import Data.Eq.Singletons
import Data.Kind
import Data.Ord.Singletons as O
import Data.Singletons
import Data.Singletons.Decide
import Data.Singletons.TH
import GHC.Show (appPrec, appPrec1)
import GHC.Stack (HasCallStack)
import GHC.TypeLits as TL
import qualified GHC.TypeNats as TN
import Unsafe.Coerce
import qualified Data.Text as T
import Data.Text ( Text )
----------------------------------------------------------------------
---- TypeLits singletons ---------------------------------------------
----------------------------------------------------------------------
type SNat :: Natural -> Type
data SNat (n :: Natural) = KnownNat n => SNat
type instance Sing = SNat
instance KnownNat n => SingI n where
sing = SNat
instance SingKind Natural where
type Demote Natural = Natural
fromSing (SNat :: Sing n) = TN.natVal (Proxy :: Proxy n)
toSing n = case TN.someNatVal n of
SomeNat (_ :: Proxy n) -> SomeSing (SNat :: Sing n)
type SSymbol :: Symbol -> Type
data SSymbol (n :: Symbol) = KnownSymbol n => SSym
type instance Sing = SSymbol
instance KnownSymbol n => SingI n where
sing = SSym
instance SingKind Symbol where
type Demote Symbol = Text
fromSing (SSym :: Sing n) = T.pack (symbolVal (Proxy :: Proxy n))
toSing s = case someSymbolVal (T.unpack s) of
SomeSymbol (_ :: Proxy n) -> SomeSing (SSym :: Sing n)
type SChar :: Char -> Type
data SChar (c :: Char) = KnownChar c => SChar
type instance Sing = SChar
instance KnownChar c => SingI c where
sing = SChar
instance SingKind Char where
type Demote Char = Char
fromSing (SChar :: Sing c) = charVal (Proxy :: Proxy c)
toSing sc = case someCharVal sc of
SomeChar (_ :: Proxy c) -> SomeSing (SChar :: Sing c)
-- SDecide instances:
instance SDecide Natural where
(SNat :: Sing n) %~ (SNat :: Sing m)
| Just r <- TN.sameNat (Proxy :: Proxy n) (Proxy :: Proxy m)
= Proved r
| otherwise
= Disproved (\Refl -> error errStr)
where errStr = "Broken Natural singletons"
instance SDecide Symbol where
(SSym :: Sing n) %~ (SSym :: Sing m)
| Just r <- sameSymbol (Proxy :: Proxy n) (Proxy :: Proxy m)
= Proved r
| otherwise
= Disproved (\Refl -> error errStr)
where errStr = "Broken Symbol singletons"
instance SDecide Char where
(SChar :: Sing n) %~ (SChar :: Sing m)
| Just r <- sameChar (Proxy :: Proxy n) (Proxy :: Proxy m)
= Proved r
| otherwise
= Disproved (\Refl -> error errStr)
where errStr = "Broken Char singletons"
-- PEq instances
instance PEq Natural where
type x == y = DefaultEq x y
instance PEq Symbol where
type x == y = DefaultEq x y
instance PEq Char where
type x == y = DefaultEq x y
-- need SEq instances for TypeLits kinds
instance SEq Natural where
(SNat :: Sing n) %== (SNat :: Sing m)
= case sameNat (Proxy :: Proxy n) (Proxy :: Proxy m) of
Just Refl -> STrue
Nothing -> unsafeCoerce SFalse
instance SEq Symbol where
(SSym :: Sing n) %== (SSym :: Sing m)
= case sameSymbol (Proxy :: Proxy n) (Proxy :: Proxy m) of
Just Refl -> STrue
Nothing -> unsafeCoerce SFalse
instance SEq Char where
(SChar :: Sing n) %== (SChar :: Sing m)
= case sameChar (Proxy :: Proxy n) (Proxy :: Proxy m) of
Just Refl -> STrue
Nothing -> unsafeCoerce SFalse
-- POrd instances
instance POrd Natural where
type (a :: Natural) `Compare` (b :: Natural) = a `TN.CmpNat` b
instance POrd Symbol where
type (a :: Symbol) `Compare` (b :: Symbol) = a `TL.CmpSymbol` b
instance POrd Char where
type (a :: Char) `Compare` (b :: Char) = a `TL.CmpChar` b
-- SOrd instances
instance SOrd Natural where
a `sCompare` b = case fromSing a `compare` fromSing b of
LT -> unsafeCoerce SLT
EQ -> unsafeCoerce SEQ
GT -> unsafeCoerce SGT
instance SOrd Symbol where
a `sCompare` b = case fromSing a `compare` fromSing b of
LT -> unsafeCoerce SLT
EQ -> unsafeCoerce SEQ
GT -> unsafeCoerce SGT
instance SOrd Char where
a `sCompare` b = case fromSing a `compare` fromSing b of
LT -> unsafeCoerce SLT
EQ -> unsafeCoerce SEQ
GT -> unsafeCoerce SGT
-- Show instances
-- These are a bit special because the singleton constructor does not uniquely
-- determine the type being used in the constructor's return type (e.g., all Naturals
-- have the same singleton constructor, SNat). To compensate for this, we display
-- the type being used using visible type application. (Thanks to @cumber on #179
-- for suggesting this implementation.)
instance Show (SNat n) where
showsPrec p n@SNat
= showParen (p > appPrec)
( showString "SNat @"
. showsPrec appPrec1 (TN.natVal n)
)
instance Show (SSymbol s) where
showsPrec p s@SSym
= showParen (p > appPrec)
( showString "SSym @"
. showsPrec appPrec1 (symbolVal s)
)
instance Show (SChar c) where
showsPrec p s@SChar
= showParen (p > appPrec)
( showString "SChar @"
. showsPrec appPrec1 (charVal s)
)
-- Convenience functions
-- | Given a singleton for @Nat@, call something requiring a
-- @KnownNat@ instance.
withKnownNat :: Sing n -> (KnownNat n => r) -> r
withKnownNat SNat f = f
-- | Given a singleton for @Symbol@, call something requiring
-- a @KnownSymbol@ instance.
withKnownSymbol :: Sing n -> (KnownSymbol n => r) -> r
withKnownSymbol SSym f = f
-- | Given a singleton for @Char@, call something requiring
-- a @KnownChar@ instance.
withKnownChar :: Sing n -> (KnownChar n => r) -> r
withKnownChar SChar f = f
-- | The promotion of 'error'. This version is more poly-kinded for
-- easier use.
type Error :: k0 -> k
type family Error (str :: k0) :: k where {}
$(genDefunSymbols [''Error])
instance SingI (ErrorSym0 :: Symbol ~> a) where
sing = singFun1 sError
-- | The singleton for 'error'
sError :: HasCallStack => Sing (str :: Symbol) -> a
sError sstr = error (T.unpack (fromSing sstr))
-- | The promotion of 'errorWithoutStackTrace'. This version is more
-- poly-kinded for easier use.
type ErrorWithoutStackTrace :: k0 -> k
type family ErrorWithoutStackTrace (str :: k0) :: k where {}
$(genDefunSymbols [''ErrorWithoutStackTrace])
instance SingI (ErrorWithoutStackTraceSym0 :: Symbol ~> a) where
sing = singFun1 sErrorWithoutStackTrace
-- | The singleton for 'errorWithoutStackTrace'.
sErrorWithoutStackTrace :: Sing (str :: Symbol) -> a
sErrorWithoutStackTrace sstr = errorWithoutStackTrace (T.unpack (fromSing sstr))
-- | The promotion of 'undefined'.
type Undefined :: k
type family Undefined :: k where {}
$(genDefunSymbols [''Undefined])
-- | The singleton for 'undefined'.
sUndefined :: HasCallStack => a
sUndefined = undefined
-- | The singleton analogue of '(TN.^)' for 'Natural's.
(%^) :: Sing a -> Sing b -> Sing (a ^ b)
sa %^ sb =
let a = fromSing sa
b = fromSing sb
ex = TN.someNatVal (a ^ b)
in
case ex of
SomeNat (_ :: Proxy ab) -> unsafeCoerce (SNat :: Sing ab)
infixr 8 %^
-- Defunctionalization symbols for type-level (^)
$(genDefunSymbols [''(^)])
instance SingI (^@#@$) where
sing = singFun2 (%^)
instance SingI x => SingI ((^@#@$$) x) where
sing = singFun1 (sing @x %^)
instance SingI1 (^@#@$$) where
liftSing s = singFun1 (s %^)
-- | The singleton analogue of 'TN.<=?'
--
-- Note that, because of historical reasons in GHC's 'Natural' API, 'TN.<=?'
-- is incompatible (unification-wise) with 'O.<=' and the 'PEq', 'SEq',
-- 'POrd', and 'SOrd' instances for 'Natural'. @(a '<=?' b) ~ 'True@ does not
-- imply anything about @a 'O.<=' b@ or any other 'PEq' / 'POrd'
-- relationships.
--
-- (Be aware that 'O.<=' in the paragraph above refers to 'O.<=' from the
-- 'POrd' typeclass, exported from "Data.Ord.Singletons", and /not/
-- the 'TN.<=' from "GHC.TypeNats". The latter is simply a type alias for
-- @(a 'TN.<=?' b) ~ 'True@.)
--
-- This is provided here for the sake of completeness and for compatibility
-- with libraries with APIs built around '<=?'. New code should use
-- 'CmpNat', exposed through this library through the 'POrd' and 'SOrd'
-- instances for 'Natural'.
(%<=?) :: forall (a :: Natural) (b :: Natural). Sing a -> Sing b -> Sing (a <=? b)
sa %<=? sb = unsafeCoerce (sa %<= sb)
infix 4 %<=?
-- Defunctionalization symbols for (<=?)
$(genDefunSymbols [''(<=?)])
instance SingI ((<=?@#@$) @Natural) where
sing = singFun2 (%<=?)
instance SingI x => SingI ((<=?@#@$$) @Natural x) where
sing = singFun1 (sing @x %<=?)
instance SingI1 ((<=?@#@$$) @Natural) where
liftSing s = singFun1 (s %<=?)
| goldfirere/singletons | singletons-base/src/GHC/TypeLits/Singletons/Internal.hs | bsd-3-clause | 10,075 | 11 | 13 | 2,152 | 2,801 | 1,508 | 1,293 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable,OverloadedStrings #-}
module XMonad.Actions.XHints.Translate where
import Data.Typeable
import XMonad.Actions.XHints.State
import qualified Language.Bing as B
import Language.Bing (BingLanguage,BingContext,ClientId,ClientSecret,getAccessToken,execBing)
import Control.Monad.IO.Class (MonadIO,liftIO)
import Data.Text (Text)
import Control.Monad.State.Strict
import qualified Data.Text as T
import XMonad.Actions.XHints.Helpers (newTextHint)
data Translator deriving Typeable
translateHint :: ClientId -> ClientSecret -> BingLanguage -> BingLanguage -> Text -> XHint Translator BingContext (Either Text Text)
translateHint clientId clientSecret from to text = do
s <- get
res <- B.runExceptT $ do
token <- case s of
Nothing -> getAccessToken clientId clientSecret
Just ctx -> return ctx
flip B.runBing token $ do
trans <- B.translateM text from to
ctx <- B.getBingCtx
return (trans,ctx)
case res of
Right (trans,token) -> put (Just token) >> return (Right $ T.pack $ show trans)
_ -> return $ Left "Error translating text"
translate clientId clientSecret from to = newTextHint $ translateHint clientId clientSecret from to
| netogallo/XHints | src/XMonad/Actions/XHints/Translate.hs | bsd-3-clause | 1,214 | 0 | 16 | 202 | 367 | 196 | 171 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Air.Cli.Parser where
import Control.Applicative ((<$>), (<*>), (<*))
import Data.Char (chr)
import Data.ByteString hiding (map, elem)
import Data.Attoparsec hiding (satisfy)
import Data.Attoparsec.Combinator
import Data.Attoparsec.ByteString.Char8
import Air.Cli (Command(..))
import qualified Air.Domain as D
import Air.Test
parseCommand :: ByteString -> Either String Command
parseCommand = parseOnly command
command :: Parser Command
command = choice [
createFlatmate
, deactivateFlatmate
, activateFlatmate
, createBill
, attendBill
, flatmateInformation
, payBill
, flatmateBalance
, flatmateDeposit
, payment
, quit
, help
]
-- eg parses the "quit"
quit :: Parser Command
quit = string "quit" >> return Quit
-- eg parses the "help"
help :: Parser Command
help = string "help" >> return Help
-- eg parses the "create flatmate username Bob Marley"
createFlatmate :: Parser Command
createFlatmate = do
cmd "create" "flatmate"
CreateFlatmate
<$> (username <* spaces)
<*> (many1 (letter <|> space))
-- eg parses the "deactivate flatmate username"
deactivateFlatmate :: Parser Command
deactivateFlatmate = do
cmd "deactivate" "flatmate"
DeactivateFlatmate <$> username
-- eg parses the "activate flatmate username"
activateFlatmate :: Parser Command
activateFlatmate = do
cmd "activate" "flatmate"
ActivateFlatmate <$> username
-- eg parses the "create bill billname [usera,userb]"
createBill :: Parser Command
createBill = do
cmd "create" "bill"
CreateBill
<$> (billname <* spaces)
<*> (listOf username)
-- eg parses the "attend bill user billname"
attendBill :: Parser Command
attendBill = do
cmd "attend" "bill"
AttendBill
<$> (username <* spaces)
<*> billname
-- eg parses the "information username"
flatmateInformation :: Parser Command
flatmateInformation = do
spacesAfter $ string "information"
FlatmateInformation <$> username
-- eg parses the "pay bill billname {number}"
payBill :: Parser Command
payBill = do
string "pay"
spaces
PayBill
<$> (billname <* spaces)
<*> decimal
-- eg parses the "flatmate balance username"
flatmateBalance :: Parser Command
flatmateBalance = do
cmd "flatmate" "balance"
FlatmateBalance <$> username
-- eg parses the "flatmate deposit username {number}"
flatmateDeposit :: Parser Command
flatmateDeposit =
FlatmateDeposit
<$> (username <* spaces <* string "deposits" <* spaces)
<*> decimal
-- eg parses the "payment [usera, userb] {number} description"
payment :: Parser Command
payment = do
spacesAfter $ string "payment"
Payment
<$> ((listOf username) <* spaces)
<*> (decimal <* spaces)
<*> (many1 (letter <|> space))
username = many1 letter
billname = many1 letter
cmd f s = do
f
spaces
s
spaces
-- Tools
spacesAfter parser = do
x <- parser
spaces
return x
brackets open parser close = do
open
x <- parser
close
return x
listOf p = brackets (char '[') (p `sepBy` (char ',' >> spaces)) (char ']')
spaces = many1 space
letter = accented_letter <|> letter_ascii
accented_letter = do
satisfy (flip elem (map chr [
225, 237, 369, 337, 252, 246, 250, 243, 233
, 193, 205, 368, 336, 220, 214, 218, 211, 201
]))
a <|> b = choice [a,b]
{-# INLINE (<|>) #-}
-- Test
parserTests = [
Equals (parseCommand "create flatmate user Bob User")
(Right (CreateFlatmate "user" "Bob User"))
"Parser Test: Create Flatmate"
, Equals (parseCommand "deactivate flatmate user")
(Right (DeactivateFlatmate "user"))
"Parser Test: Deactivate Flatmate"
, Equals (parseCommand "activate flatmate user")
(Right (ActivateFlatmate "user"))
"Parser Test: Activate Flatmate"
, Equals (parseCommand "create bill billname [usera, userb, userc]")
(Right (CreateBill "billname" ["usera", "userb", "userc"]))
"Parser Test: Create Bill"
, Equals (parseCommand "attend bill user bill")
(Right (AttendBill "user" "bill"))
"Parser Test: Attend Bill"
, Equals (parseCommand "information user")
(Right (FlatmateInformation "user"))
"Parser Test: Flatmate Information"
, Equals (parseCommand "pay common 10000")
(Right (PayBill "common" 10000))
"Parser Test: Pay Bill"
, Equals (parseCommand "flatmate balance user")
(Right (FlatmateBalance "user"))
"Parser Test: Flatmate Balance"
, Equals (parseCommand "user deposits 10000")
(Right (FlatmateDeposit "user" 10000))
"Parser Test: Flatmate Deposit"
, Equals (parseCommand "payment [usera, userb] 100 This is a description")
(Right (Payment ["usera", "userb"] 100 "This is a description"))
"Parser Test: Payment"
, Equals (parseCommand "quit") (Right Quit) "Parses Test: Quit"
, Equals (parseCommand "help") (Right Help) "Parses Test: Help"
]
| andorp/air | src/Air/Cli/Parser.hs | bsd-3-clause | 4,974 | 0 | 13 | 1,086 | 1,255 | 660 | 595 | 142 | 1 |
module ReplaceExperiment where
replaceWithP :: b -> Char
replaceWithP = const 'p'
lms :: [Maybe [Char]]
lms = [Just "Ave", Nothing, Just "woohoo"]
replaceWithP' :: [Maybe [Char]] -> Char
replaceWithP' = replaceWithP
liftedReplace :: Functor f => f a -> f Char
liftedReplace = fmap replaceWithP
liftedReplace' :: [Maybe [Char]] -> [Char]
liftedReplace' = liftedReplace
twiceLifted :: (Functor f1, Functor f) => f (f1 a) -> f (f1 Char)
twiceLifted = (fmap . fmap) replaceWithP
twiceLifted' :: [Maybe [Char]] -> [Maybe Char]
twiceLifted' = twiceLifted
thriceLifted :: (Functor f2, Functor f1, Functor f) => f (f1 (f2 a)) -> f (f1 (f2 Char))
thriceLifted = (fmap . fmap . fmap) replaceWithP
thriceLifted' :: [Maybe [Char]] -> [Maybe [Char]]
thriceLifted' = thriceLifted
main :: IO ()
main = do
putStr "replaceWithP' lms: "
print $ replaceWithP' lms
putStr "liftedReplace lms: "
print $ liftedReplace lms
putStr "liftedReplace' lms: "
print $ liftedReplace' lms
putStr "twiceLifted lms: "
print $ twiceLifted lms
putStr "twiceLifted' lms: "
print $ twiceLifted' lms
putStr "thriceLifted lms: "
print $ thriceLifted lms
putStr "thriceLifted' lms: "
print $ thriceLifted' lms
| chengzh2008/hpffp | src/ch16-Functor/oneMoreList.hs | bsd-3-clause | 1,231 | 0 | 11 | 246 | 458 | 226 | 232 | 35 | 1 |
{-# language CPP #-}
-- | = Name
--
-- VK_AMD_shader_explicit_vertex_parameter - device extension
--
-- == VK_AMD_shader_explicit_vertex_parameter
--
-- [__Name String__]
-- @VK_AMD_shader_explicit_vertex_parameter@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 22
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- [__Contact__]
--
-- - Qun Lin
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_AMD_shader_explicit_vertex_parameter] @linqun%0A<<Here describe the issue or question you have about the VK_AMD_shader_explicit_vertex_parameter extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2016-05-10
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Interactions and External Dependencies__]
--
-- - This extension requires
-- <https://htmlpreview.github.io/?https://github.com/KhronosGroup/SPIRV-Registry/blob/master/extensions/AMD/SPV_AMD_shader_explicit_vertex_parameter.html SPV_AMD_shader_explicit_vertex_parameter>
--
-- - This extension provides API support for
-- <https://www.khronos.org/registry/OpenGL/extensions/AMD/AMD_shader_explicit_vertex_parameter.txt GL_AMD_shader_explicit_vertex_parameter>
--
-- [__Contributors__]
--
-- - Matthaeus G. Chajdas, AMD
--
-- - Qun Lin, AMD
--
-- - Daniel Rakos, AMD
--
-- - Graham Sellers, AMD
--
-- - Rex Xu, AMD
--
-- == Description
--
-- This extension adds support for the following SPIR-V extension in
-- Vulkan:
--
-- - <https://htmlpreview.github.io/?https://github.com/KhronosGroup/SPIRV-Registry/blob/master/extensions/AMD/SPV_AMD_shader_explicit_vertex_parameter.html SPV_AMD_shader_explicit_vertex_parameter>
--
-- == New Enum Constants
--
-- - 'AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME'
--
-- - 'AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION'
--
-- == Version History
--
-- - Revision 1, 2016-05-10 (Daniel Rakos)
--
-- - Initial draft
--
-- == See Also
--
-- No cross-references are available
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_AMD_shader_explicit_vertex_parameter Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_AMD_shader_explicit_vertex_parameter ( AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION
, pattern AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION
, AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME
, pattern AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME
) where
import Data.String (IsString)
type AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION"
pattern AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION :: forall a . Integral a => a
pattern AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION = 1
type AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME = "VK_AMD_shader_explicit_vertex_parameter"
-- No documentation found for TopLevel "VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME"
pattern AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME = "VK_AMD_shader_explicit_vertex_parameter"
| expipiplus1/vulkan | src/Vulkan/Extensions/VK_AMD_shader_explicit_vertex_parameter.hs | bsd-3-clause | 3,769 | 0 | 8 | 721 | 197 | 151 | 46 | -1 | -1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1999
Analysis functions over data types. Specficially, detecting recursive types.
This stuff is only used for source-code decls; it's recorded in interface
files for imported data types.
-}
{-# LANGUAGE CPP #-}
module TcTyDecls(
calcRecFlags, RecTyInfo(..),
calcSynCycles,
checkClassCycles,
-- * Roles
RoleAnnots, extractRoleAnnots, emptyRoleAnnots, lookupRoleAnnots,
-- * Implicits
tcAddImplicits,
-- * Record selectors
mkRecSelBinds, mkOneRecordSelector
) where
#include "HsVersions.h"
import TcRnMonad
import TcEnv
import TcTypeable( mkTypeableBinds )
import TcBinds( tcRecSelBinds )
import TyCoRep( Type(..), TyBinder(..), delBinderVar )
import TcType
import TysWiredIn( unitTy )
import MkCore( rEC_SEL_ERROR_ID )
import HsSyn
import Class
import Type
import HscTypes
import TyCon
import ConLike
import DataCon
import Name
import NameEnv
import RdrName ( mkVarUnqual )
import Id
import IdInfo
import VarEnv
import VarSet
import NameSet
import Coercion ( ltRole )
import Digraph
import BasicTypes
import SrcLoc
import Unique ( mkBuiltinUnique )
import Outputable
import Util
import Maybes
import Data.List
import Bag
import FastString
import Control.Monad
{-
************************************************************************
* *
Cycles in type synonym declarations
* *
************************************************************************
Checking for class-decl loops is easy, because we don't allow class decls
in interface files.
We allow type synonyms in hi-boot files, but we *trust* hi-boot files,
so we don't check for loops that involve them. So we only look for synonym
loops in the module being compiled.
We check for type synonym and class cycles on the *source* code.
Main reasons:
a) Otherwise we'd need a special function to extract type-synonym tycons
from a type, whereas we already have the free vars pinned on the decl
b) If we checked for type synonym loops after building the TyCon, we
can't do a hoistForAllTys on the type synonym rhs, (else we fall into
a black hole) which seems unclean. Apart from anything else, it'd mean
that a type-synonym rhs could have for-alls to the right of an arrow,
which means adding new cases to the validity checker
Indeed, in general, checking for cycles beforehand means we need to
be less careful about black holes through synonym cycles.
The main disadvantage is that a cycle that goes via a type synonym in an
.hi-boot file can lead the compiler into a loop, because it assumes that cycles
only occur entirely within the source code of the module being compiled.
But hi-boot files are trusted anyway, so this isn't much worse than (say)
a kind error.
[ NOTE ----------------------------------------------
If we reverse this decision, this comment came from tcTyDecl1, and should
go back there
-- dsHsType, not tcHsKindedType, to avoid a loop. tcHsKindedType does hoisting,
-- which requires looking through synonyms... and therefore goes into a loop
-- on (erroneously) recursive synonyms.
-- Solution: do not hoist synonyms, because they'll be hoisted soon enough
-- when they are substituted
We'd also need to add back in this definition
synonymTyConsOfType :: Type -> [TyCon]
-- Does not look through type synonyms at all
-- Return a list of synonym tycons
synonymTyConsOfType ty
= nameEnvElts (go ty)
where
go :: Type -> NameEnv TyCon -- The NameEnv does duplicate elim
go (TyVarTy v) = emptyNameEnv
go (TyConApp tc tys) = go_tc tc tys
go (AppTy a b) = go a `plusNameEnv` go b
go (FunTy a b) = go a `plusNameEnv` go b
go (ForAllTy _ ty) = go ty
go_tc tc tys | isTypeSynonymTyCon tc = extendNameEnv (go_s tys)
(tyConName tc) tc
| otherwise = go_s tys
go_s tys = foldr (plusNameEnv . go) emptyNameEnv tys
---------------------------------------- END NOTE ]
-}
mkSynEdges :: [LTyClDecl Name] -> [(LTyClDecl Name, Name, [Name])]
mkSynEdges syn_decls = [ (ldecl, name, nameSetElems fvs)
| ldecl@(L _ (SynDecl { tcdLName = L _ name
, tcdFVs = fvs })) <- syn_decls ]
calcSynCycles :: [LTyClDecl Name] -> [SCC (LTyClDecl Name)]
calcSynCycles = stronglyConnCompFromEdgedVertices . mkSynEdges
{- Note [Superclass cycle check]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The superclass cycle check for C decides if we can statically
guarantee that expanding C's superclass cycles transitively is
guaranteed to terminate. This is a Haskell98 requirement,
but one that we lift with -XUndecidableSuperClasses.
The worry is that a superclass cycle could make the type checker loop.
More precisely, with a constraint (Given or Wanted)
C ty1 .. tyn
one approach is to instantiate all of C's superclasses, transitively.
We can only do so if that set is finite.
This potential loop occurs only through superclasses. This, for
exmaple, is fine
class C a where
op :: C b => a -> b -> b
even though C's full definition uses C.
Making the check static also makes it conservative. Eg
type family F a
class F a => C a
Here an instance of (F a) might mention C:
type instance F [a] = C a
and now we'd have a loop.
The static check works like this, starting with C
* Look at C's superclass predicates
* If any is a type-function application,
or is headed by a type variable, fail
* If any has C at the head, fail
* If any has a type class D at the head,
make the same test with D
A tricky point is: what if there is a type variable at the head?
Consider this:
class f (C f) => C f
class c => Id c
and now expand superclasses for constraint (C Id):
C Id
--> Id (C Id)
--> C Id
--> ....
Each step expands superclasses one layer, and clearly does not terminate.
-}
checkClassCycles :: Class -> Maybe SDoc
-- Nothing <=> ok
-- Just err <=> possible cycle error
checkClassCycles cls
= do { (definite_cycle, err) <- go (unitNameSet (getName cls))
cls (mkTyVarTys (classTyVars cls))
; let herald | definite_cycle = text "Superclass cycle for"
| otherwise = text "Potential superclass cycle for"
; return (vcat [ herald <+> quotes (ppr cls)
, nest 2 err, hint]) }
where
hint = text "Use UndecidableSuperClasses to accept this"
-- Expand superclasses starting with (C a b), complaining
-- if you find the same class a second time, or a type function
-- or predicate headed by a type variable
--
-- NB: this code duplicates TcType.transSuperClasses, but
-- with more error message generation clobber
-- Make sure the two stay in sync.
go :: NameSet -> Class -> [Type] -> Maybe (Bool, SDoc)
go so_far cls tys = firstJusts $
map (go_pred so_far) $
immSuperClasses cls tys
go_pred :: NameSet -> PredType -> Maybe (Bool, SDoc)
-- Nothing <=> ok
-- Just (True, err) <=> definite cycle
-- Just (False, err) <=> possible cycle
go_pred so_far pred -- NB: tcSplitTyConApp looks through synonyms
| Just (tc, tys) <- tcSplitTyConApp_maybe pred
= go_tc so_far pred tc tys
| hasTyVarHead pred
= Just (False, hang (text "one of whose superclass constraints is headed by a type variable:")
2 (quotes (ppr pred)))
| otherwise
= Nothing
go_tc :: NameSet -> PredType -> TyCon -> [Type] -> Maybe (Bool, SDoc)
go_tc so_far pred tc tys
| isFamilyTyCon tc
= Just (False, hang (text "one of whose superclass constraints is headed by a type family:")
2 (quotes (ppr pred)))
| Just cls <- tyConClass_maybe tc
= go_cls so_far cls tys
| otherwise -- Equality predicate, for example
= Nothing
go_cls :: NameSet -> Class -> [Type] -> Maybe (Bool, SDoc)
go_cls so_far cls tys
| cls_nm `elemNameSet` so_far
= Just (True, text "one of whose superclasses is" <+> quotes (ppr cls))
| isCTupleClass cls
= go so_far cls tys
| otherwise
= do { (b,err) <- go (so_far `extendNameSet` cls_nm) cls tys
; return (b, text "one of whose superclasses is" <+> quotes (ppr cls)
$$ err) }
where
cls_nm = getName cls
{-
************************************************************************
* *
Deciding which type constructors are recursive
* *
************************************************************************
Identification of recursive TyCons
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The knot-tying parameters: @rec_details_list@ is an alist mapping @Name@s to
@TyThing@s.
Identifying a TyCon as recursive serves two purposes
1. Avoid infinite types. Non-recursive newtypes are treated as
"transparent", like type synonyms, after the type checker. If we did
this for all newtypes, we'd get infinite types. So we figure out for
each newtype whether it is "recursive", and add a coercion if so. In
effect, we are trying to "cut the loops" by identifying a loop-breaker.
2. Avoid infinite unboxing. This has nothing to do with newtypes.
Suppose we have
data T = MkT Int T
f (MkT x t) = f t
Well, this function diverges, but we don't want the strictness analyser
to diverge. But the strictness analyser will diverge because it looks
deeper and deeper into the structure of T. (I believe there are
examples where the function does something sane, and the strictness
analyser still diverges, but I can't see one now.)
Now, concerning (1), the FC2 branch currently adds a coercion for ALL
newtypes. I did this as an experiment, to try to expose cases in which
the coercions got in the way of optimisations. If it turns out that we
can indeed always use a coercion, then we don't risk recursive types,
and don't need to figure out what the loop breakers are.
For newtype *families* though, we will always have a coercion, so they
are always loop breakers! So you can easily adjust the current
algorithm by simply treating all newtype families as loop breakers (and
indeed type families). I think.
For newtypes, we label some as "recursive" such that
INVARIANT: there is no cycle of non-recursive newtypes
In any loop, only one newtype need be marked as recursive; it is
a "loop breaker". Labelling more than necessary as recursive is OK,
provided the invariant is maintained.
A newtype M.T is defined to be "recursive" iff
(a) it is declared in an hi-boot file (see RdrHsSyn.hsIfaceDecl)
(b) it is declared in a source file, but that source file has a
companion hi-boot file which declares the type
or (c) one can get from T's rhs to T via type
synonyms, or non-recursive newtypes *in M*
e.g. newtype T = MkT (T -> Int)
(a) is conservative; declarations in hi-boot files are always
made loop breakers. That's why in (b) we can restrict attention
to tycons in M, because any loops through newtypes outside M
will be broken by those newtypes
(b) ensures that a newtype is not treated as a loop breaker in one place
and later as a non-loop-breaker. This matters in GHCi particularly, when
a newtype T might be embedded in many types in the environment, and then
T's source module is compiled. We don't want T's recursiveness to change.
The "recursive" flag for algebraic data types is irrelevant (never consulted)
for types with more than one constructor.
An algebraic data type M.T is "recursive" iff
it has just one constructor, and
(a) it is declared in an hi-boot file (see RdrHsSyn.hsIfaceDecl)
(b) it is declared in a source file, but that source file has a
companion hi-boot file which declares the type
or (c) one can get from its arg types to T via type synonyms,
or by non-recursive newtypes or non-recursive product types in M
e.g. data T = MkT (T -> Int) Bool
Just like newtype in fact
A type synonym is recursive if one can get from its
right hand side back to it via type synonyms. (This is
reported as an error.)
A class is recursive if one can get from its superclasses
back to it. (This is an error too.)
Hi-boot types
~~~~~~~~~~~~~
A data type read from an hi-boot file will have an AbstractTyCon as its AlgTyConRhs
and will respond True to isAbstractTyCon. The idea is that we treat these as if one
could get from these types to anywhere. So when we see
module Baz where
import {-# SOURCE #-} Foo( T )
newtype S = MkS T
then we mark S as recursive, just in case. What that means is that if we see
import Baz( S )
newtype R = MkR S
then we don't need to look inside S to compute R's recursiveness. Since S is imported
(not from an hi-boot file), one cannot get from R back to S except via an hi-boot file,
and that means that some data type will be marked recursive along the way. So R is
unconditionly non-recursive (i.e. there'll be a loop breaker elsewhere if necessary)
This in turn means that we grovel through fewer interface files when computing
recursiveness, because we need only look at the type decls in the module being
compiled, plus the outer structure of directly-mentioned types.
-}
data RecTyInfo = RTI { rti_roles :: Name -> [Role]
, rti_is_rec :: Name -> RecFlag }
calcRecFlags :: SelfBootInfo -> Bool -- hs-boot file?
-> RoleAnnots -> [TyCon] -> RecTyInfo
-- The 'boot_names' are the things declared in M.hi-boot, if M is the current module.
-- Any type constructors in boot_names are automatically considered loop breakers
-- Recursion of newtypes/data types can happen via
-- the class TyCon, so all_tycons includes the class tycons
calcRecFlags boot_details is_boot mrole_env all_tycons
= RTI { rti_roles = roles
, rti_is_rec = is_rec }
where
roles = inferRoles is_boot mrole_env all_tycons
----------------- Recursion calculation ----------------
is_rec n | n `elemNameSet` rec_names = Recursive
| otherwise = NonRecursive
boot_name_set = case boot_details of
NoSelfBoot -> emptyNameSet
SelfBoot { sb_tcs = tcs } -> tcs
rec_names = boot_name_set `unionNameSet`
nt_loop_breakers `unionNameSet`
prod_loop_breakers
-------------------------------------------------
-- NOTE
-- These edge-construction loops rely on
-- every loop going via tyclss, the types and classes
-- in the module being compiled. Stuff in interface
-- files should be correctly marked. If not (e.g. a
-- type synonym in a hi-boot file) we can get an infinite
-- loop. We could program round this, but it'd make the code
-- rather less nice, so I'm not going to do that yet.
single_con_tycons = [ tc | tc <- all_tycons
, not (tyConName tc `elemNameSet` boot_name_set)
-- Remove the boot_name_set because they are
-- going to be loop breakers regardless.
, isSingleton (tyConDataCons tc) ]
-- Both newtypes and data types, with exactly one data constructor
(new_tycons, prod_tycons) = partition isNewTyCon single_con_tycons
-- NB: we do *not* call isProductTyCon because that checks
-- for vanilla-ness of data constructors; and that depends
-- on empty existential type variables; and that is figured
-- out by tcResultType; which uses tcMatchTy; which uses
-- coreView; which calls expandSynTyCon_maybe; which uses
-- the recursiveness of the TyCon. Result... a black hole.
-- YUK YUK YUK
--------------- Newtypes ----------------------
nt_loop_breakers = mkNameSet (findLoopBreakers nt_edges)
is_rec_nt tc = tyConName tc `elemNameSet` nt_loop_breakers
-- is_rec_nt is a locally-used helper function
nt_edges = [(t, mk_nt_edges t) | t <- new_tycons]
mk_nt_edges nt -- Invariant: nt is a newtype
= [ tc | tc <- nameEnvElts (tyConsOfType (new_tc_rhs nt))
-- tyConsOfType looks through synonyms
, tc `elem` new_tycons ]
-- If not (tc `elem` new_tycons) we know that either it's a local *data* type,
-- or it's imported. Either way, it can't form part of a newtype cycle
--------------- Product types ----------------------
prod_loop_breakers = mkNameSet (findLoopBreakers prod_edges)
prod_edges = [(tc, mk_prod_edges tc) | tc <- prod_tycons]
mk_prod_edges tc -- Invariant: tc is a product tycon
= concatMap (mk_prod_edges1 tc) (dataConOrigArgTys (head (tyConDataCons tc)))
mk_prod_edges1 ptc ty = concatMap (mk_prod_edges2 ptc) (nameEnvElts (tyConsOfType ty))
mk_prod_edges2 ptc tc
| tc `elem` prod_tycons = [tc] -- Local product
| tc `elem` new_tycons = if is_rec_nt tc -- Local newtype
then []
else mk_prod_edges1 ptc (new_tc_rhs tc)
-- At this point we know that either it's a local non-product data type,
-- or it's imported. Either way, it can't form part of a cycle
| otherwise = []
new_tc_rhs :: TyCon -> Type
new_tc_rhs tc = snd (newTyConRhs tc) -- Ignore the type variables
findLoopBreakers :: [(TyCon, [TyCon])] -> [Name]
-- Finds a set of tycons that cut all loops
findLoopBreakers deps
= go [(tc,tc,ds) | (tc,ds) <- deps]
where
go edges = [ name
| CyclicSCC ((tc,_,_) : edges') <- stronglyConnCompFromEdgedVerticesR edges,
name <- tyConName tc : go edges']
{-
************************************************************************
* *
Role annotations
* *
************************************************************************
-}
type RoleAnnots = NameEnv (LRoleAnnotDecl Name)
extractRoleAnnots :: TyClGroup Name -> RoleAnnots
extractRoleAnnots (TyClGroup { group_roles = roles })
= mkNameEnv [ (tycon, role_annot)
| role_annot@(L _ (RoleAnnotDecl (L _ tycon) _)) <- roles ]
emptyRoleAnnots :: RoleAnnots
emptyRoleAnnots = emptyNameEnv
lookupRoleAnnots :: RoleAnnots -> Name -> Maybe (LRoleAnnotDecl Name)
lookupRoleAnnots = lookupNameEnv
{-
************************************************************************
* *
Role inference
* *
************************************************************************
Note [Role inference]
~~~~~~~~~~~~~~~~~~~~~
The role inference algorithm datatype definitions to infer the roles on the
parameters. Although these roles are stored in the tycons, we can perform this
algorithm on the built tycons, as long as we don't peek at an as-yet-unknown
roles field! Ah, the magic of laziness.
First, we choose appropriate initial roles. For families and classes, roles
(including initial roles) are N. For datatypes, we start with the role in the
role annotation (if any), or otherwise use Phantom. This is done in
initialRoleEnv1.
The function irGroup then propagates role information until it reaches a
fixpoint, preferring N over (R or P) and R over P. To aid in this, we have a
monad RoleM, which is a combination reader and state monad. In its state are
the current RoleEnv, which gets updated by role propagation, and an update
bit, which we use to know whether or not we've reached the fixpoint. The
environment of RoleM contains the tycon whose parameters we are inferring, and
a VarEnv from parameters to their positions, so we can update the RoleEnv.
Between tycons, this reader information is missing; it is added by
addRoleInferenceInfo.
There are two kinds of tycons to consider: algebraic ones (excluding classes)
and type synonyms. (Remember, families don't participate -- all their parameters
are N.) An algebraic tycon processes each of its datacons, in turn. Note that
a datacon's universally quantified parameters might be different from the parent
tycon's parameters, so we use the datacon's univ parameters in the mapping from
vars to positions. Note also that we don't want to infer roles for existentials
(they're all at N, too), so we put them in the set of local variables. As an
optimisation, we skip any tycons whose roles are already all Nominal, as there
nowhere else for them to go. For synonyms, we just analyse their right-hand sides.
irType walks through a type, looking for uses of a variable of interest and
propagating role information. Because anything used under a phantom position
is at phantom and anything used under a nominal position is at nominal, the
irType function can assume that anything it sees is at representational. (The
other possibilities are pruned when they're encountered.)
The rest of the code is just plumbing.
How do we know that this algorithm is correct? It should meet the following
specification:
Let Z be a role context -- a mapping from variables to roles. The following
rules define the property (Z |- t : r), where t is a type and r is a role:
Z(a) = r' r' <= r
------------------------- RCVar
Z |- a : r
---------- RCConst
Z |- T : r -- T is a type constructor
Z |- t1 : r
Z |- t2 : N
-------------- RCApp
Z |- t1 t2 : r
forall i<=n. (r_i is R or N) implies Z |- t_i : r_i
roles(T) = r_1 .. r_n
---------------------------------------------------- RCDApp
Z |- T t_1 .. t_n : R
Z, a:N |- t : r
---------------------- RCAll
Z |- forall a:k.t : r
We also have the following rules:
For all datacon_i in type T, where a_1 .. a_n are universally quantified
and b_1 .. b_m are existentially quantified, and the arguments are t_1 .. t_p,
then if forall j<=p, a_1 : r_1 .. a_n : r_n, b_1 : N .. b_m : N |- t_j : R,
then roles(T) = r_1 .. r_n
roles(->) = R, R
roles(~#) = N, N
With -dcore-lint on, the output of this algorithm is checked in checkValidRoles,
called from checkValidTycon.
Note [Role-checking data constructor arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data T a where
MkT :: Eq b => F a -> (a->a) -> T (G a)
Then we want to check the roles at which 'a' is used
in MkT's type. We want to work on the user-written type,
so we need to take into account
* the arguments: (F a) and (a->a)
* the context: C a b
* the result type: (G a) -- this is in the eq_spec
Note [Coercions in role inference]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Is (t |> co1) representationally equal to (t |> co2)? Of course they are! Changing
the kind of a type is totally irrelevant to the representation of that type. So,
we want to totally ignore coercions when doing role inference. This includes omitting
any type variables that appear in nominal positions but only within coercions.
-}
type RoleEnv = NameEnv [Role] -- from tycon names to roles
-- This, and any of the functions it calls, must *not* look at the roles
-- field of a tycon we are inferring roles about!
-- See Note [Role inference]
inferRoles :: Bool -> RoleAnnots -> [TyCon] -> Name -> [Role]
inferRoles is_boot annots tycons
= let role_env = initialRoleEnv is_boot annots tycons
role_env' = irGroup role_env tycons in
\name -> case lookupNameEnv role_env' name of
Just roles -> roles
Nothing -> pprPanic "inferRoles" (ppr name)
initialRoleEnv :: Bool -> RoleAnnots -> [TyCon] -> RoleEnv
initialRoleEnv is_boot annots = extendNameEnvList emptyNameEnv .
map (initialRoleEnv1 is_boot annots)
initialRoleEnv1 :: Bool -> RoleAnnots -> TyCon -> (Name, [Role])
initialRoleEnv1 is_boot annots_env tc
| isFamilyTyCon tc = (name, map (const Nominal) bndrs)
| isAlgTyCon tc = (name, default_roles)
| isTypeSynonymTyCon tc = (name, default_roles)
| otherwise = pprPanic "initialRoleEnv1" (ppr tc)
where name = tyConName tc
bndrs = tyConBinders tc
visflags = map binderVisibility $ take (tyConArity tc) bndrs
num_exps = count (== Visible) visflags
-- if the number of annotations in the role annotation decl
-- is wrong, just ignore it. We check this in the validity check.
role_annots
= case lookupNameEnv annots_env name of
Just (L _ (RoleAnnotDecl _ annots))
| annots `lengthIs` num_exps -> map unLoc annots
_ -> replicate num_exps Nothing
default_roles = build_default_roles visflags role_annots
build_default_roles (Visible : viss) (m_annot : ras)
= (m_annot `orElse` default_role) : build_default_roles viss ras
build_default_roles (_inv : viss) ras
= Nominal : build_default_roles viss ras
build_default_roles [] [] = []
build_default_roles _ _ = pprPanic "initialRoleEnv1 (2)"
(vcat [ppr tc, ppr role_annots])
default_role
| isClassTyCon tc = Nominal
| is_boot && isAbstractTyCon tc = Representational
| otherwise = Phantom
irGroup :: RoleEnv -> [TyCon] -> RoleEnv
irGroup env tcs
= let (env', update) = runRoleM env $ mapM_ irTyCon tcs in
if update
then irGroup env' tcs
else env'
irTyCon :: TyCon -> RoleM ()
irTyCon tc
| isAlgTyCon tc
= do { old_roles <- lookupRoles tc
; unless (all (== Nominal) old_roles) $ -- also catches data families,
-- which don't want or need role inference
irTcTyVars tc $
do { mapM_ (irType emptyVarSet) (tyConStupidTheta tc) -- See #8958
; whenIsJust (tyConClass_maybe tc) irClass
; mapM_ irDataCon (visibleDataCons $ algTyConRhs tc) }}
| Just ty <- synTyConRhs_maybe tc
= irTcTyVars tc $
irType emptyVarSet ty
| otherwise
= return ()
-- any type variable used in an associated type must be Nominal
irClass :: Class -> RoleM ()
irClass cls
= mapM_ ir_at (classATs cls)
where
cls_tvs = classTyVars cls
cls_tv_set = mkVarSet cls_tvs
ir_at at_tc
= mapM_ (updateRole Nominal) (varSetElems nvars)
where nvars = (mkVarSet $ tyConTyVars at_tc) `intersectVarSet` cls_tv_set
-- See Note [Role inference]
irDataCon :: DataCon -> RoleM ()
irDataCon datacon
= setRoleInferenceVars univ_tvs $
irExTyVars ex_tvs $ \ ex_var_set ->
mapM_ (irType ex_var_set)
(map tyVarKind ex_tvs ++ eqSpecPreds eq_spec ++ theta ++ arg_tys)
-- See Note [Role-checking data constructor arguments]
where
(univ_tvs, ex_tvs, eq_spec, theta, arg_tys, _res_ty)
= dataConFullSig datacon
irType :: VarSet -> Type -> RoleM ()
irType = go
where
go lcls (TyVarTy tv) = unless (tv `elemVarSet` lcls) $
updateRole Representational tv
go lcls (AppTy t1 t2) = go lcls t1 >> markNominal lcls t2
go lcls (TyConApp tc tys) = do { roles <- lookupRolesX tc
; zipWithM_ (go_app lcls) roles tys }
go lcls (ForAllTy (Named tv _) ty)
= let lcls' = extendVarSet lcls tv in
markNominal lcls (tyVarKind tv) >> go lcls' ty
go lcls (ForAllTy (Anon arg) res)
= go lcls arg >> go lcls res
go _ (LitTy {}) = return ()
-- See Note [Coercions in role inference]
go lcls (CastTy ty _) = go lcls ty
go _ (CoercionTy _) = return ()
go_app _ Phantom _ = return () -- nothing to do here
go_app lcls Nominal ty = markNominal lcls ty -- all vars below here are N
go_app lcls Representational ty = go lcls ty
irTcTyVars :: TyCon -> RoleM a -> RoleM a
irTcTyVars tc thing
= setRoleInferenceTc (tyConName tc) $ go (tyConTyVars tc)
where
go [] = thing
go (tv:tvs) = do { markNominal emptyVarSet (tyVarKind tv)
; addRoleInferenceVar tv $ go tvs }
irExTyVars :: [TyVar] -> (TyVarSet -> RoleM a) -> RoleM a
irExTyVars orig_tvs thing = go emptyVarSet orig_tvs
where
go lcls [] = thing lcls
go lcls (tv:tvs) = do { markNominal lcls (tyVarKind tv)
; go (extendVarSet lcls tv) tvs }
markNominal :: TyVarSet -- local variables
-> Type -> RoleM ()
markNominal lcls ty = let nvars = get_ty_vars ty `minusVarSet` lcls in
mapM_ (updateRole Nominal) (varSetElems nvars)
where
-- get_ty_vars gets all the tyvars (no covars!) from a type *without*
-- recurring into coercions. Recall: coercions are totally ignored during
-- role inference. See [Coercions in role inference]
get_ty_vars (TyVarTy tv) = unitVarSet tv
get_ty_vars (AppTy t1 t2) = get_ty_vars t1 `unionVarSet` get_ty_vars t2
get_ty_vars (TyConApp _ tys) = foldr (unionVarSet . get_ty_vars) emptyVarSet tys
get_ty_vars (ForAllTy bndr ty)
= get_ty_vars ty `delBinderVar` bndr
`unionVarSet` (tyCoVarsOfType $ binderType bndr)
get_ty_vars (LitTy {}) = emptyVarSet
get_ty_vars (CastTy ty _) = get_ty_vars ty
get_ty_vars (CoercionTy _) = emptyVarSet
-- like lookupRoles, but with Nominal tags at the end for oversaturated TyConApps
lookupRolesX :: TyCon -> RoleM [Role]
lookupRolesX tc
= do { roles <- lookupRoles tc
; return $ roles ++ repeat Nominal }
-- gets the roles either from the environment or the tycon
lookupRoles :: TyCon -> RoleM [Role]
lookupRoles tc
= do { env <- getRoleEnv
; case lookupNameEnv env (tyConName tc) of
Just roles -> return roles
Nothing -> return $ tyConRoles tc }
-- tries to update a role; won't ever update a role "downwards"
updateRole :: Role -> TyVar -> RoleM ()
updateRole role tv
= do { var_ns <- getVarNs
; name <- getTyConName
; case lookupVarEnv var_ns tv of
Nothing -> pprPanic "updateRole" (ppr name $$ ppr tv $$ ppr var_ns)
Just n -> updateRoleEnv name n role }
-- the state in the RoleM monad
data RoleInferenceState = RIS { role_env :: RoleEnv
, update :: Bool }
-- the environment in the RoleM monad
type VarPositions = VarEnv Int
-- See [Role inference]
newtype RoleM a = RM { unRM :: Maybe Name -- of the tycon
-> VarPositions
-> Int -- size of VarPositions
-> RoleInferenceState
-> (a, RoleInferenceState) }
instance Functor RoleM where
fmap = liftM
instance Applicative RoleM where
pure x = RM $ \_ _ _ state -> (x, state)
(<*>) = ap
instance Monad RoleM where
a >>= f = RM $ \m_info vps nvps state ->
let (a', state') = unRM a m_info vps nvps state in
unRM (f a') m_info vps nvps state'
runRoleM :: RoleEnv -> RoleM () -> (RoleEnv, Bool)
runRoleM env thing = (env', update)
where RIS { role_env = env', update = update }
= snd $ unRM thing Nothing emptyVarEnv 0 state
state = RIS { role_env = env
, update = False }
setRoleInferenceTc :: Name -> RoleM a -> RoleM a
setRoleInferenceTc name thing = RM $ \m_name vps nvps state ->
ASSERT( isNothing m_name )
ASSERT( isEmptyVarEnv vps )
ASSERT( nvps == 0 )
unRM thing (Just name) vps nvps state
addRoleInferenceVar :: TyVar -> RoleM a -> RoleM a
addRoleInferenceVar tv thing
= RM $ \m_name vps nvps state ->
ASSERT( isJust m_name )
unRM thing m_name (extendVarEnv vps tv nvps) (nvps+1) state
setRoleInferenceVars :: [TyVar] -> RoleM a -> RoleM a
setRoleInferenceVars tvs thing
= RM $ \m_name _vps _nvps state ->
ASSERT( isJust m_name )
unRM thing m_name (mkVarEnv (zip tvs [0..])) (panic "setRoleInferenceVars")
state
getRoleEnv :: RoleM RoleEnv
getRoleEnv = RM $ \_ _ _ state@(RIS { role_env = env }) -> (env, state)
getVarNs :: RoleM VarPositions
getVarNs = RM $ \_ vps _ state -> (vps, state)
getTyConName :: RoleM Name
getTyConName = RM $ \m_name _ _ state ->
case m_name of
Nothing -> panic "getTyConName"
Just name -> (name, state)
updateRoleEnv :: Name -> Int -> Role -> RoleM ()
updateRoleEnv name n role
= RM $ \_ _ _ state@(RIS { role_env = role_env }) -> ((),
case lookupNameEnv role_env name of
Nothing -> pprPanic "updateRoleEnv" (ppr name)
Just roles -> let (before, old_role : after) = splitAt n roles in
if role `ltRole` old_role
then let roles' = before ++ role : after
role_env' = extendNameEnv role_env name roles' in
RIS { role_env = role_env', update = True }
else state )
{- *********************************************************************
* *
Building implicits
* *
********************************************************************* -}
tcAddImplicits :: [TyCon] -> TcM TcGblEnv
-- Given a [TyCon], add to the TcGblEnv
-- * extend the TypeEnv with their implicitTyThings
-- * extend the TypeEnv with any default method Ids
-- * add bindings for record selectors
-- * add bindings for type representations for the TyThings
tcAddImplicits tycons
= discardWarnings $
tcExtendGlobalEnvImplicit implicit_things $
tcExtendGlobalValEnv def_meth_ids $
do { traceTc "tcAddImplicits" $ vcat
[ text "tycons" <+> ppr tycons
, text "implicits" <+> ppr implicit_things ]
; gbl_env <- mkTypeableBinds tycons
; gbl_env <- setGblEnv gbl_env $
tcRecSelBinds (mkRecSelBinds tycons)
; return gbl_env }
where
implicit_things = concatMap implicitTyConThings tycons
def_meth_ids = mkDefaultMethodIds tycons
mkDefaultMethodIds :: [TyCon] -> [Id]
-- We want to put the default-method Ids (both vanilla and generic)
-- into the type environment so that they are found when we typecheck
-- the filled-in default methods of each instance declaration
-- See Note [Default method Ids and Template Haskell]
mkDefaultMethodIds tycons
= [ mkExportedVanillaId dm_name (mk_dm_ty cls sel_id dm_spec)
| tc <- tycons
, Just cls <- [tyConClass_maybe tc]
, (sel_id, Just (dm_name, dm_spec)) <- classOpItems cls ]
where
mk_dm_ty :: Class -> Id -> DefMethSpec Type -> Type
mk_dm_ty _ sel_id VanillaDM = idType sel_id
mk_dm_ty cls _ (GenericDM dm_ty) = mkSpecSigmaTy cls_tvs [pred] dm_ty
where
cls_tvs = classTyVars cls
pred = mkClassPred cls (mkTyVarTys cls_tvs)
{-
************************************************************************
* *
Building record selectors
* *
************************************************************************
-}
{-
Note [Default method Ids and Template Haskell]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this (Trac #4169):
class Numeric a where
fromIntegerNum :: a
fromIntegerNum = ...
ast :: Q [Dec]
ast = [d| instance Numeric Int |]
When we typecheck 'ast' we have done the first pass over the class decl
(in tcTyClDecls), but we have not yet typechecked the default-method
declarations (because they can mention value declarations). So we
must bring the default method Ids into scope first (so they can be seen
when typechecking the [d| .. |] quote, and typecheck them later.
-}
{-
************************************************************************
* *
Building record selectors
* *
************************************************************************
-}
mkRecSelBinds :: [TyCon] -> HsValBinds Name
-- NB We produce *un-typechecked* bindings, rather like 'deriving'
-- This makes life easier, because the later type checking will add
-- all necessary type abstractions and applications
mkRecSelBinds tycons
= ValBindsOut binds sigs
where
(sigs, binds) = unzip rec_sels
rec_sels = map mkRecSelBind [ (tc,fld)
| tc <- tycons
, fld <- tyConFieldLabels tc ]
mkRecSelBind :: (TyCon, FieldLabel) -> (LSig Name, (RecFlag, LHsBinds Name))
mkRecSelBind (tycon, fl)
= mkOneRecordSelector all_cons (RecSelData tycon) fl
where
all_cons = map RealDataCon (tyConDataCons tycon)
mkOneRecordSelector :: [ConLike] -> RecSelParent -> FieldLabel
-> (LSig Name, (RecFlag, LHsBinds Name))
mkOneRecordSelector all_cons idDetails fl
= (L loc (IdSig sel_id), (NonRecursive, unitBag (L loc sel_bind)))
where
loc = getSrcSpan sel_name
lbl = flLabel fl
sel_name = flSelector fl
sel_id = mkExportedLocalId rec_details sel_name sel_ty
rec_details = RecSelId { sel_tycon = idDetails, sel_naughty = is_naughty }
-- Find a representative constructor, con1
cons_w_field = conLikesWithFields all_cons [lbl]
con1 = ASSERT( not (null cons_w_field) ) head cons_w_field
-- Selector type; Note [Polymorphic selectors]
field_ty = conLikeFieldType con1 lbl
data_tvs = tyCoVarsOfTypeWellScoped data_ty
data_tv_set= mkVarSet data_tvs
is_naughty = not (tyCoVarsOfType field_ty `subVarSet` data_tv_set)
(field_tvs, field_theta, field_tau) = tcSplitSigmaTy field_ty
sel_ty | is_naughty = unitTy -- See Note [Naughty record selectors]
| otherwise = mkSpecForAllTys data_tvs $
mkPhiTy (conLikeStupidTheta con1) $ -- Urgh!
mkFunTy data_ty $
mkSpecForAllTys field_tvs $
mkPhiTy field_theta $
-- req_theta is empty for normal DataCon
mkPhiTy req_theta $
field_tau
-- Make the binding: sel (C2 { fld = x }) = x
-- sel (C7 { fld = x }) = x
-- where cons_w_field = [C2,C7]
sel_bind = mkTopFunBind Generated sel_lname alts
where
alts | is_naughty = [mkSimpleMatch [] unit_rhs]
| otherwise = map mk_match cons_w_field ++ deflt
mk_match con = mkSimpleMatch [L loc (mk_sel_pat con)]
(L loc (HsVar (L loc field_var)))
mk_sel_pat con = ConPatIn (L loc (getName con)) (RecCon rec_fields)
rec_fields = HsRecFields { rec_flds = [rec_field], rec_dotdot = Nothing }
rec_field = noLoc (HsRecField
{ hsRecFieldLbl
= L loc (FieldOcc (L loc $ mkVarUnqual lbl) sel_name)
, hsRecFieldArg = L loc (VarPat (L loc field_var))
, hsRecPun = False })
sel_lname = L loc sel_name
field_var = mkInternalName (mkBuiltinUnique 1) (getOccName sel_name) loc
-- Add catch-all default case unless the case is exhaustive
-- We do this explicitly so that we get a nice error message that
-- mentions this particular record selector
deflt | all dealt_with all_cons = []
| otherwise = [mkSimpleMatch [L loc (WildPat placeHolderType)]
(mkHsApp (L loc (HsVar
(L loc (getName rEC_SEL_ERROR_ID))))
(L loc (HsLit msg_lit)))]
-- Do not add a default case unless there are unmatched
-- constructors. We must take account of GADTs, else we
-- get overlap warning messages from the pattern-match checker
-- NB: we need to pass type args for the *representation* TyCon
-- to dataConCannotMatch, hence the calculation of inst_tys
-- This matters in data families
-- data instance T Int a where
-- A :: { fld :: Int } -> T Int Bool
-- B :: { fld :: Int } -> T Int Char
dealt_with :: ConLike -> Bool
dealt_with (PatSynCon _) = False -- We can't predict overlap
dealt_with con@(RealDataCon dc) =
con `elem` cons_w_field || dataConCannotMatch inst_tys dc
(univ_tvs, _, eq_spec, _, req_theta, _, data_ty) = conLikeFullSig con1
eq_subst = mkTvSubstPrs (map eqSpecPair eq_spec)
inst_tys = substTyVars eq_subst univ_tvs
unit_rhs = mkLHsTupleExpr []
msg_lit = HsStringPrim "" (fastStringToByteString lbl)
{-
Note [Polymorphic selectors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We take care to build the type of a polymorphic selector in the right
order, so that visible type application works.
data Ord a => T a = MkT { field :: forall b. (Num a, Show b) => (a, b) }
We want
field :: forall a. Ord a => T a -> forall b. (Num a, Show b) => (a, b)
Note [Naughty record selectors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A "naughty" field is one for which we can't define a record
selector, because an existential type variable would escape. For example:
data T = forall a. MkT { x,y::a }
We obviously can't define
x (MkT v _) = v
Nevertheless we *do* put a RecSelId into the type environment
so that if the user tries to use 'x' as a selector we can bleat
helpfully, rather than saying unhelpfully that 'x' is not in scope.
Hence the sel_naughty flag, to identify record selectors that don't really exist.
In general, a field is "naughty" if its type mentions a type variable that
isn't in the result type of the constructor. Note that this *allows*
GADT record selectors (Note [GADT record selectors]) whose types may look
like sel :: T [a] -> a
For naughty selectors we make a dummy binding
sel = ()
so that the later type-check will add them to the environment, and they'll be
exported. The function is never called, because the typechecker spots the
sel_naughty field.
Note [GADT record selectors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For GADTs, we require that all constructors with a common field 'f' have the same
result type (modulo alpha conversion). [Checked in TcTyClsDecls.checkValidTyCon]
E.g.
data T where
T1 { f :: Maybe a } :: T [a]
T2 { f :: Maybe a, y :: b } :: T [a]
T3 :: T Int
and now the selector takes that result type as its argument:
f :: forall a. T [a] -> Maybe a
Details: the "real" types of T1,T2 are:
T1 :: forall r a. (r~[a]) => a -> T r
T2 :: forall r a b. (r~[a]) => a -> b -> T r
So the selector loooks like this:
f :: forall a. T [a] -> Maybe a
f (a:*) (t:T [a])
= case t of
T1 c (g:[a]~[c]) (v:Maybe c) -> v `cast` Maybe (right (sym g))
T2 c d (g:[a]~[c]) (v:Maybe c) (w:d) -> v `cast` Maybe (right (sym g))
T3 -> error "T3 does not have field f"
Note the forall'd tyvars of the selector are just the free tyvars
of the result type; there may be other tyvars in the constructor's
type (e.g. 'b' in T2).
Note the need for casts in the result!
Note [Selector running example]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's OK to combine GADTs and type families. Here's a running example:
data instance T [a] where
T1 { fld :: b } :: T [Maybe b]
The representation type looks like this
data :R7T a where
T1 { fld :: b } :: :R7T (Maybe b)
and there's coercion from the family type to the representation type
:CoR7T a :: T [a] ~ :R7T a
The selector we want for fld looks like this:
fld :: forall b. T [Maybe b] -> b
fld = /\b. \(d::T [Maybe b]).
case d `cast` :CoR7T (Maybe b) of
T1 (x::b) -> x
The scrutinee of the case has type :R7T (Maybe b), which can be
gotten by appying the eq_spec to the univ_tvs of the data con.
-}
| nushio3/ghc | compiler/typecheck/TcTyDecls.hs | bsd-3-clause | 45,303 | 3 | 19 | 12,879 | 6,142 | 3,219 | 2,923 | 426 | 10 |
{-# LANGUAGE ViewPatterns, TemplateHaskell #-}
module Data.TrieMap.Representation.TH.ReprMonad (
ReprMonad,
liftQuasi,
recurse,
getInstance,
outputInstance,
mustBreak,
execReprMonad,
forceDefaultListRep) where
import Data.TrieMap.Representation.Class
import Data.TrieMap.Representation.TH.Utils
import Control.Monad
import Language.Haskell.TH.Syntax
import Language.Haskell.TH.ExpandSyns
type Instances = [(Name, ([Name], Type))]
newtype ReprMonad a = ReprMonad {runReprMonad ::
Bool -- whether to force default list reps
-> Instances -- tycons of known instances
-> [Name] -- tycons of instances in progress (breakpoints of recursive loopies)
-> Q ([Dec], Instances, a) -- output decs, new known instances
}
instance Monad ReprMonad where
return x = ReprMonad $ \ _ knowns _ -> return ([], knowns, x)
m >>= k = ReprMonad $ \ def knowns breaks -> do
(outDecs, knowns', a) <- runReprMonad m def knowns breaks
(outDecs', knowns'', b) <- runReprMonad (k a) def knowns' breaks
return (outDecs ++ outDecs', knowns'', b)
fail err = ReprMonad $ \ _ _ _ -> fail err
instance Functor ReprMonad where
fmap = liftM
liftQuasi :: Q a -> ReprMonad a
liftQuasi q = ReprMonad $ \ _ knowns _ -> do
a <- q
return ([], knowns, a)
instance Quasi ReprMonad where
qNewName = liftQuasi . qNewName
qReport b str = liftQuasi (qReport b str)
qRecover m k = ReprMonad $ \ def knowns breaks -> qRecover (runReprMonad m def knowns breaks) (runReprMonad k def knowns breaks)
qReify = liftQuasi . qReify
qClassInstances name typs = liftQuasi (qClassInstances name typs)
qLocation = liftQuasi qLocation
qRunIO = liftQuasi . qRunIO
insNub :: Eq a => a -> [a] -> [a]
insNub x ys0@(y:ys)
| x == y = ys0
| otherwise = y:insNub x ys
insNub x [] = [x]
recurse :: Name -> ReprMonad a -> ReprMonad a
recurse breakTy m = ReprMonad $ \ def knowns breaks -> runReprMonad m def knowns (breakTy `insNub` breaks)
outputInstance :: Type -> Type -> [Dec] -> ReprMonad ()
outputInstance ty tyRep decs = ReprMonad $ \ _ knowns _ -> case decompose' ty of
Just (tyCon, tyArgs)
-> return (decs, (tyCon, (tyArgs, tyRep)):knowns, ())
_ -> return (decs, knowns, ())
getInstance :: Type -> ReprMonad (Maybe Type)
getInstance typ = case decompose typ of
(ConT tyCon, tyArgs) -> ReprMonad $ \ _ knowns _ -> case lookup tyCon knowns of
Nothing -> return ([], knowns, Nothing)
Just (tyArgs', tyRep) -> return ([], knowns, Just $ foldr substInType tyRep (zip tyArgs' tyArgs))
_ -> return Nothing
mustBreak :: Name -> ReprMonad Bool
mustBreak tyCon = ReprMonad $ \ _ knowns breaks -> return ([], knowns, tyCon `elem` breaks)
execReprMonad :: Bool -> ReprMonad a -> Q [Dec]
execReprMonad def m = do
ClassI _ instances <- reify ''Repr
let instanceHeads = [(tyConName, (tyArgs, ConT ''Rep `AppT` compose tyConName tyArgs))
| ClassInstance{ci_tys = [decompose' -> Just (tyConName, tyArgs)]} <- instances]
(decs, _, _) <- runReprMonad m def instanceHeads []
return decs
forceDefaultListRep :: ReprMonad Bool
forceDefaultListRep = ReprMonad $ \ def known _ -> return ([], known, def) | lowasser/TrieMap | Data/TrieMap/Representation/TH/ReprMonad.hs | bsd-3-clause | 3,101 | 88 | 19 | 570 | 1,209 | 660 | 549 | 71 | 3 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[StgSyn]{Shared term graph (STG) syntax for spineless-tagless code generation}
This data type represents programs just before code generation (conversion to
@Cmm@): basically, what we have is a stylised form of @CoreSyntax@, the style
being one that happens to be ideally suited to spineless tagless code
generation.
-}
{-# LANGUAGE CPP #-}
module StgSyn (
GenStgArg(..),
GenStgLiveVars,
GenStgBinding(..), GenStgExpr(..), GenStgRhs(..),
GenStgAlt, AltType(..),
UpdateFlag(..), isUpdatable,
StgBinderInfo,
noBinderInfo, stgSatOcc, stgUnsatOcc, satCallsOnly,
combineStgBinderInfo,
-- a set of synonyms for the most common (only :-) parameterisation
StgArg, StgLiveVars,
StgBinding, StgExpr, StgRhs, StgAlt,
-- StgOp
StgOp(..),
-- utils
topStgBindHasCafRefs, stgArgHasCafRefs, stgRhsArity,
isDllConApp,
stgArgType,
stripStgTicksTop,
pprStgBinding, pprStgBindings,
pprStgLVs
) where
#include "HsVersions.h"
import CoreSyn ( AltCon, Tickish )
import CostCentre ( CostCentreStack )
import Data.List ( intersperse )
import DataCon
import DynFlags
import FastString
import ForeignCall ( ForeignCall )
import Id
import IdInfo ( mayHaveCafRefs )
import Literal ( Literal, literalType )
import Module ( Module )
import Outputable
import Packages ( isDllName )
import Platform
import PprCore ( {- instances -} )
import PrimOp ( PrimOp, PrimCall )
import TyCon ( PrimRep(..) )
import TyCon ( TyCon )
import Type ( Type )
import Type ( typePrimRep )
import UniqSet
import Unique ( Unique )
import UniqFM
import Util
{-
************************************************************************
* *
\subsection{@GenStgBinding@}
* *
************************************************************************
As usual, expressions are interesting; other things are boring. Here
are the boring things [except note the @GenStgRhs@], parameterised
with respect to binder and occurrence information (just as in
@CoreSyn@):
-}
data GenStgBinding bndr occ
= StgNonRec bndr (GenStgRhs bndr occ)
| StgRec [(bndr, GenStgRhs bndr occ)]
{-
************************************************************************
* *
\subsection{@GenStgArg@}
* *
************************************************************************
-}
data GenStgArg occ
= StgVarArg occ
| StgLitArg Literal
-- | Does this constructor application refer to
-- anything in a different *Windows* DLL?
-- If so, we can't allocate it statically
isDllConApp :: DynFlags -> Module -> DataCon -> [StgArg] -> Bool
isDllConApp dflags this_mod con args
| platformOS (targetPlatform dflags) == OSMinGW32
= isDllName dflags this_pkg this_mod (dataConName con) || any is_dll_arg args
| otherwise = False
where
-- NB: typePrimRep is legit because any free variables won't have
-- unlifted type (there are no unlifted things at top level)
is_dll_arg :: StgArg -> Bool
is_dll_arg (StgVarArg v) = isAddrRep (typePrimRep (idType v))
&& isDllName dflags this_pkg this_mod (idName v)
is_dll_arg _ = False
this_pkg = thisPackage dflags
-- True of machine addresses; these are the things that don't
-- work across DLLs. The key point here is that VoidRep comes
-- out False, so that a top level nullary GADT constructor is
-- False for isDllConApp
-- data T a where
-- T1 :: T Int
-- gives
-- T1 :: forall a. (a~Int) -> T a
-- and hence the top-level binding
-- $WT1 :: T Int
-- $WT1 = T1 Int (Coercion (Refl Int))
-- The coercion argument here gets VoidRep
isAddrRep :: PrimRep -> Bool
isAddrRep AddrRep = True
isAddrRep PtrRep = True
isAddrRep _ = False
-- | Type of an @StgArg@
--
-- Very half baked becase we have lost the type arguments.
stgArgType :: StgArg -> Type
stgArgType (StgVarArg v) = idType v
stgArgType (StgLitArg lit) = literalType lit
-- | Strip ticks of a given type from an STG expression
stripStgTicksTop :: (Tickish Id -> Bool) -> StgExpr -> ([Tickish Id], StgExpr)
stripStgTicksTop p = go []
where go ts (StgTick t e) | p t = go (t:ts) e
go ts other = (reverse ts, other)
{-
************************************************************************
* *
\subsection{STG expressions}
* *
************************************************************************
The @GenStgExpr@ data type is parameterised on binder and occurrence
info, as before.
************************************************************************
* *
\subsubsection{@GenStgExpr@ application}
* *
************************************************************************
An application is of a function to a list of atoms [not expressions].
Operationally, we want to push the arguments on the stack and call the
function. (If the arguments were expressions, we would have to build
their closures first.)
There is no constructor for a lone variable; it would appear as
@StgApp var []@.
-}
type GenStgLiveVars occ = UniqSet occ
data GenStgExpr bndr occ
= StgApp
occ -- function
[GenStgArg occ] -- arguments; may be empty
{-
************************************************************************
* *
\subsubsection{@StgConApp@ and @StgPrimApp@---saturated applications}
* *
************************************************************************
There are specialised forms of application, for constructors,
primitives, and literals.
-}
| StgLit Literal
-- StgConApp is vital for returning unboxed tuples
-- which can't be let-bound first
| StgConApp DataCon
[GenStgArg occ] -- Saturated
| StgOpApp StgOp -- Primitive op or foreign call
[GenStgArg occ] -- Saturated
Type -- Result type
-- We need to know this so that we can
-- assign result registers
{-
************************************************************************
* *
\subsubsection{@StgLam@}
* *
************************************************************************
StgLam is used *only* during CoreToStg's work. Before CoreToStg has
finished it encodes (\x -> e) as (let f = \x -> e in f)
-}
| StgLam
[bndr]
StgExpr -- Body of lambda
{-
************************************************************************
* *
\subsubsection{@GenStgExpr@: case-expressions}
* *
************************************************************************
This has the same boxed/unboxed business as Core case expressions.
-}
| StgCase
(GenStgExpr bndr occ)
-- the thing to examine
bndr -- binds the result of evaluating the scrutinee
AltType
[GenStgAlt bndr occ]
-- The DEFAULT case is always *first*
-- if it is there at all
{-
************************************************************************
* *
\subsubsection{@GenStgExpr@: @let(rec)@-expressions}
* *
************************************************************************
The various forms of let(rec)-expression encode most of the
interesting things we want to do.
\begin{enumerate}
\item
\begin{verbatim}
let-closure x = [free-vars] [args] expr
in e
\end{verbatim}
is equivalent to
\begin{verbatim}
let x = (\free-vars -> \args -> expr) free-vars
\end{verbatim}
\tr{args} may be empty (and is for most closures). It isn't under
circumstances like this:
\begin{verbatim}
let x = (\y -> y+z)
\end{verbatim}
This gets mangled to
\begin{verbatim}
let-closure x = [z] [y] (y+z)
\end{verbatim}
The idea is that we compile code for @(y+z)@ in an environment in which
@z@ is bound to an offset from \tr{Node}, and @y@ is bound to an
offset from the stack pointer.
(A let-closure is an @StgLet@ with a @StgRhsClosure@ RHS.)
\item
\begin{verbatim}
let-constructor x = Constructor [args]
in e
\end{verbatim}
(A let-constructor is an @StgLet@ with a @StgRhsCon@ RHS.)
\item
Letrec-expressions are essentially the same deal as
let-closure/let-constructor, so we use a common structure and
distinguish between them with an @is_recursive@ boolean flag.
\item
\begin{verbatim}
let-unboxed u = an arbitrary arithmetic expression in unboxed values
in e
\end{verbatim}
All the stuff on the RHS must be fully evaluated.
No function calls either!
(We've backed away from this toward case-expressions with
suitably-magical alts ...)
\item
~[Advanced stuff here! Not to start with, but makes pattern matching
generate more efficient code.]
\begin{verbatim}
let-escapes-not fail = expr
in e'
\end{verbatim}
Here the idea is that @e'@ guarantees not to put @fail@ in a data structure,
or pass it to another function. All @e'@ will ever do is tail-call @fail@.
Rather than build a closure for @fail@, all we need do is to record the stack
level at the moment of the @let-escapes-not@; then entering @fail@ is just
a matter of adjusting the stack pointer back down to that point and entering
the code for it.
Another example:
\begin{verbatim}
f x y = let z = huge-expression in
if y==1 then z else
if y==2 then z else
1
\end{verbatim}
(A let-escapes-not is an @StgLetNoEscape@.)
\item
We may eventually want:
\begin{verbatim}
let-literal x = Literal
in e
\end{verbatim}
\end{enumerate}
And so the code for let(rec)-things:
-}
| StgLet
(GenStgBinding bndr occ) -- right hand sides (see below)
(GenStgExpr bndr occ) -- body
| StgLetNoEscape
(GenStgBinding bndr occ) -- right hand sides (see below)
(GenStgExpr bndr occ) -- body
{-
%************************************************************************
%* *
\subsubsection{@GenStgExpr@: @hpc@, @scc@ and other debug annotations}
%* *
%************************************************************************
Finally for @hpc@ expressions we introduce a new STG construct.
-}
| StgTick
(Tickish bndr)
(GenStgExpr bndr occ) -- sub expression
-- END of GenStgExpr
{-
************************************************************************
* *
\subsection{STG right-hand sides}
* *
************************************************************************
Here's the rest of the interesting stuff for @StgLet@s; the first
flavour is for closures:
-}
data GenStgRhs bndr occ
= StgRhsClosure
CostCentreStack -- CCS to be attached (default is CurrentCCS)
StgBinderInfo -- Info about how this binder is used (see below)
[occ] -- non-global free vars; a list, rather than
-- a set, because order is important
!UpdateFlag -- ReEntrant | Updatable | SingleEntry
[bndr] -- arguments; if empty, then not a function;
-- as above, order is important.
(GenStgExpr bndr occ) -- body
{-
An example may be in order. Consider:
\begin{verbatim}
let t = \x -> \y -> ... x ... y ... p ... q in e
\end{verbatim}
Pulling out the free vars and stylising somewhat, we get the equivalent:
\begin{verbatim}
let t = (\[p,q] -> \[x,y] -> ... x ... y ... p ...q) p q
\end{verbatim}
Stg-operationally, the @[x,y]@ are on the stack, the @[p,q]@ are
offsets from @Node@ into the closure, and the code ptr for the closure
will be exactly that in parentheses above.
The second flavour of right-hand-side is for constructors (simple but important):
-}
| StgRhsCon
CostCentreStack -- CCS to be attached (default is CurrentCCS).
-- Top-level (static) ones will end up with
-- DontCareCCS, because we don't count static
-- data in heap profiles, and we don't set CCCS
-- from static closure.
DataCon -- constructor
[GenStgArg occ] -- args
stgRhsArity :: StgRhs -> Int
stgRhsArity (StgRhsClosure _ _ _ _ bndrs _)
= ASSERT( all isId bndrs ) length bndrs
-- The arity never includes type parameters, but they should have gone by now
stgRhsArity (StgRhsCon _ _ _) = 0
-- Note [CAF consistency]
-- ~~~~~~~~~~~~~~~~~~~~~~
--
-- `topStgBindHasCafRefs` is only used by an assert (`consistentCafInfo` in
-- `CoreToStg`) to make sure CAF-ness predicted by `TidyPgm` is consistent with
-- reality.
--
-- Specifically, if the RHS mentions any Id that itself is marked
-- `MayHaveCafRefs`; or if the binding is a top-level updateable thunk; then the
-- `Id` for the binding should be marked `MayHaveCafRefs`. The potential trouble
-- is that `TidyPgm` computed the CAF info on the `Id` but some transformations
-- have taken place since then.
topStgBindHasCafRefs :: GenStgBinding bndr Id -> Bool
topStgBindHasCafRefs (StgNonRec _ rhs)
= topRhsHasCafRefs rhs
topStgBindHasCafRefs (StgRec binds)
= any topRhsHasCafRefs (map snd binds)
topRhsHasCafRefs :: GenStgRhs bndr Id -> Bool
topRhsHasCafRefs (StgRhsClosure _ _ _ upd _ body)
= -- See Note [CAF consistency]
isUpdatable upd || exprHasCafRefs body
topRhsHasCafRefs (StgRhsCon _ _ args)
= any stgArgHasCafRefs args
exprHasCafRefs :: GenStgExpr bndr Id -> Bool
exprHasCafRefs (StgApp f args)
= stgIdHasCafRefs f || any stgArgHasCafRefs args
exprHasCafRefs StgLit{}
= False
exprHasCafRefs (StgConApp _ args)
= any stgArgHasCafRefs args
exprHasCafRefs (StgOpApp _ args _)
= any stgArgHasCafRefs args
exprHasCafRefs (StgLam _ body)
= exprHasCafRefs body
exprHasCafRefs (StgCase scrt _ _ alts)
= exprHasCafRefs scrt || any altHasCafRefs alts
exprHasCafRefs (StgLet bind body)
= bindHasCafRefs bind || exprHasCafRefs body
exprHasCafRefs (StgLetNoEscape bind body)
= bindHasCafRefs bind || exprHasCafRefs body
exprHasCafRefs (StgTick _ expr)
= exprHasCafRefs expr
bindHasCafRefs :: GenStgBinding bndr Id -> Bool
bindHasCafRefs (StgNonRec _ rhs)
= rhsHasCafRefs rhs
bindHasCafRefs (StgRec binds)
= any rhsHasCafRefs (map snd binds)
rhsHasCafRefs :: GenStgRhs bndr Id -> Bool
rhsHasCafRefs (StgRhsClosure _ _ _ _ _ body)
= exprHasCafRefs body
rhsHasCafRefs (StgRhsCon _ _ args)
= any stgArgHasCafRefs args
altHasCafRefs :: GenStgAlt bndr Id -> Bool
altHasCafRefs (_, _, rhs) = exprHasCafRefs rhs
stgArgHasCafRefs :: GenStgArg Id -> Bool
stgArgHasCafRefs (StgVarArg id)
= stgIdHasCafRefs id
stgArgHasCafRefs _
= False
stgIdHasCafRefs :: Id -> Bool
stgIdHasCafRefs id =
-- We are looking for occurrences of an Id that is bound at top level, and may
-- have CAF refs. At this point (after TidyPgm) top-level Ids (whether
-- imported or defined in this module) are GlobalIds, so the test is easy.
isGlobalId id && mayHaveCafRefs (idCafInfo id)
-- Here's the @StgBinderInfo@ type, and its combining op:
data StgBinderInfo
= NoStgBinderInfo
| SatCallsOnly -- All occurrences are *saturated* *function* calls
-- This means we don't need to build an info table and
-- slow entry code for the thing
-- Thunks never get this value
noBinderInfo, stgUnsatOcc, stgSatOcc :: StgBinderInfo
noBinderInfo = NoStgBinderInfo
stgUnsatOcc = NoStgBinderInfo
stgSatOcc = SatCallsOnly
satCallsOnly :: StgBinderInfo -> Bool
satCallsOnly SatCallsOnly = True
satCallsOnly NoStgBinderInfo = False
combineStgBinderInfo :: StgBinderInfo -> StgBinderInfo -> StgBinderInfo
combineStgBinderInfo SatCallsOnly SatCallsOnly = SatCallsOnly
combineStgBinderInfo _ _ = NoStgBinderInfo
--------------
pp_binder_info :: StgBinderInfo -> SDoc
pp_binder_info NoStgBinderInfo = empty
pp_binder_info SatCallsOnly = text "sat-only"
{-
************************************************************************
* *
\subsection[Stg-case-alternatives]{STG case alternatives}
* *
************************************************************************
Very like in @CoreSyntax@ (except no type-world stuff).
The type constructor is guaranteed not to be abstract; that is, we can
see its representation. This is important because the code generator
uses it to determine return conventions etc. But it's not trivial
where there's a module loop involved, because some versions of a type
constructor might not have all the constructors visible. So
mkStgAlgAlts (in CoreToStg) ensures that it gets the TyCon from the
constructors or literals (which are guaranteed to have the Real McCoy)
rather than from the scrutinee type.
-}
type GenStgAlt bndr occ
= (AltCon, -- alts: data constructor,
[bndr], -- constructor's parameters,
GenStgExpr bndr occ) -- ...right-hand side.
data AltType
= PolyAlt -- Polymorphic (a type variable)
| UbxTupAlt Int -- Unboxed tuple of this arity
| AlgAlt TyCon -- Algebraic data type; the AltCons will be DataAlts
| PrimAlt TyCon -- Primitive data type; the AltCons will be LitAlts
{-
************************************************************************
* *
\subsection[Stg]{The Plain STG parameterisation}
* *
************************************************************************
This happens to be the only one we use at the moment.
-}
type StgBinding = GenStgBinding Id Id
type StgArg = GenStgArg Id
type StgLiveVars = GenStgLiveVars Id
type StgExpr = GenStgExpr Id Id
type StgRhs = GenStgRhs Id Id
type StgAlt = GenStgAlt Id Id
{-
************************************************************************
* *
\subsubsection[UpdateFlag-datatype]{@UpdateFlag@}
* *
************************************************************************
This is also used in @LambdaFormInfo@ in the @ClosureInfo@ module.
A @ReEntrant@ closure may be entered multiple times, but should not be
updated or blackholed. An @Updatable@ closure should be updated after
evaluation (and may be blackholed during evaluation). A @SingleEntry@
closure will only be entered once, and so need not be updated but may
safely be blackholed.
-}
data UpdateFlag = ReEntrant | Updatable | SingleEntry
instance Outputable UpdateFlag where
ppr u = char $ case u of
ReEntrant -> 'r'
Updatable -> 'u'
SingleEntry -> 's'
isUpdatable :: UpdateFlag -> Bool
isUpdatable ReEntrant = False
isUpdatable SingleEntry = False
isUpdatable Updatable = True
{-
************************************************************************
* *
\subsubsection{StgOp}
* *
************************************************************************
An StgOp allows us to group together PrimOps and ForeignCalls.
It's quite useful to move these around together, notably
in StgOpApp and COpStmt.
-}
data StgOp
= StgPrimOp PrimOp
| StgPrimCallOp PrimCall
| StgFCallOp ForeignCall Unique
-- The Unique is occasionally needed by the C pretty-printer
-- (which lacks a unique supply), notably when generating a
-- typedef for foreign-export-dynamic
{-
************************************************************************
* *
\subsection[Stg-pretty-printing]{Pretty-printing}
* *
************************************************************************
Robin Popplestone asked for semi-colon separators on STG binds; here's
hoping he likes terminators instead... Ditto for case alternatives.
-}
pprGenStgBinding :: (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> GenStgBinding bndr bdee -> SDoc
pprGenStgBinding (StgNonRec bndr rhs)
= hang (hsep [pprBndr LetBind bndr, equals])
4 (ppr rhs <> semi)
pprGenStgBinding (StgRec pairs)
= vcat $ ifPprDebug (text "{- StgRec (begin) -}") :
map (ppr_bind) pairs ++ [ifPprDebug (text "{- StgRec (end) -}")]
where
ppr_bind (bndr, expr)
= hang (hsep [pprBndr LetBind bndr, equals])
4 (ppr expr <> semi)
pprStgBinding :: StgBinding -> SDoc
pprStgBinding bind = pprGenStgBinding bind
pprStgBindings :: [StgBinding] -> SDoc
pprStgBindings binds = vcat $ intersperse blankLine (map pprGenStgBinding binds)
instance (Outputable bdee) => Outputable (GenStgArg bdee) where
ppr = pprStgArg
instance (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> Outputable (GenStgBinding bndr bdee) where
ppr = pprGenStgBinding
instance (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> Outputable (GenStgExpr bndr bdee) where
ppr = pprStgExpr
instance (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> Outputable (GenStgRhs bndr bdee) where
ppr rhs = pprStgRhs rhs
pprStgArg :: (Outputable bdee) => GenStgArg bdee -> SDoc
pprStgArg (StgVarArg var) = ppr var
pprStgArg (StgLitArg con) = ppr con
pprStgExpr :: (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> GenStgExpr bndr bdee -> SDoc
-- special case
pprStgExpr (StgLit lit) = ppr lit
-- general case
pprStgExpr (StgApp func args)
= hang (ppr func) 4 (sep (map (ppr) args))
pprStgExpr (StgConApp con args)
= hsep [ ppr con, brackets (interppSP args)]
pprStgExpr (StgOpApp op args _)
= hsep [ pprStgOp op, brackets (interppSP args)]
pprStgExpr (StgLam bndrs body)
= sep [ char '\\' <+> ppr_list (map (pprBndr LambdaBind) bndrs)
<+> text "->",
pprStgExpr body ]
where ppr_list = brackets . fsep . punctuate comma
-- special case: let v = <very specific thing>
-- in
-- let ...
-- in
-- ...
--
-- Very special! Suspicious! (SLPJ)
{-
pprStgExpr (StgLet srt (StgNonRec bndr (StgRhsClosure cc bi free_vars upd_flag args rhs))
expr@(StgLet _ _))
= ($$)
(hang (hcat [text "let { ", ppr bndr, ptext (sLit " = "),
ppr cc,
pp_binder_info bi,
text " [", ifPprDebug (interppSP free_vars), ptext (sLit "] \\"),
ppr upd_flag, text " [",
interppSP args, char ']'])
8 (sep [hsep [ppr rhs, text "} in"]]))
(ppr expr)
-}
-- special case: let ... in let ...
pprStgExpr (StgLet bind expr@(StgLet _ _))
= ($$)
(sep [hang (text "let {")
2 (hsep [pprGenStgBinding bind, text "} in"])])
(ppr expr)
-- general case
pprStgExpr (StgLet bind expr)
= sep [hang (text "let {") 2 (pprGenStgBinding bind),
hang (text "} in ") 2 (ppr expr)]
pprStgExpr (StgLetNoEscape bind expr)
= sep [hang (text "let-no-escape {")
2 (pprGenStgBinding bind),
hang (text "} in ")
2 (ppr expr)]
pprStgExpr (StgTick tickish expr)
= sdocWithDynFlags $ \dflags ->
if gopt Opt_PprShowTicks dflags
then sep [ ppr tickish, pprStgExpr expr ]
else pprStgExpr expr
pprStgExpr (StgCase expr bndr alt_type alts)
= sep [sep [text "case",
nest 4 (hsep [pprStgExpr expr,
ifPprDebug (dcolon <+> ppr alt_type)]),
text "of", pprBndr CaseBind bndr, char '{'],
nest 2 (vcat (map pprStgAlt alts)),
char '}']
pprStgAlt :: (OutputableBndr bndr, Outputable occ, Ord occ)
=> GenStgAlt bndr occ -> SDoc
pprStgAlt (con, params, expr)
= hang (hsep [ppr con, sep (map (pprBndr CasePatBind) params), text "->"])
4 (ppr expr <> semi)
pprStgOp :: StgOp -> SDoc
pprStgOp (StgPrimOp op) = ppr op
pprStgOp (StgPrimCallOp op)= ppr op
pprStgOp (StgFCallOp op _) = ppr op
instance Outputable AltType where
ppr PolyAlt = text "Polymorphic"
ppr (UbxTupAlt n) = text "UbxTup" <+> ppr n
ppr (AlgAlt tc) = text "Alg" <+> ppr tc
ppr (PrimAlt tc) = text "Prim" <+> ppr tc
pprStgLVs :: Outputable occ => GenStgLiveVars occ -> SDoc
pprStgLVs lvs
= getPprStyle $ \ sty ->
if userStyle sty || isEmptyUniqSet lvs then
empty
else
hcat [text "{-lvs:", pprUFM lvs interpp'SP, text "-}"]
pprStgRhs :: (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> GenStgRhs bndr bdee -> SDoc
-- special case
pprStgRhs (StgRhsClosure cc bi [free_var] upd_flag [{-no args-}] (StgApp func []))
= hcat [ ppr cc,
pp_binder_info bi,
brackets (ifPprDebug (ppr free_var)),
text " \\", ppr upd_flag, ptext (sLit " [] "), ppr func ]
-- general case
pprStgRhs (StgRhsClosure cc bi free_vars upd_flag args body)
= sdocWithDynFlags $ \dflags ->
hang (hsep [if gopt Opt_SccProfilingOn dflags then ppr cc else empty,
pp_binder_info bi,
ifPprDebug (brackets (interppSP free_vars)),
char '\\' <> ppr upd_flag, brackets (interppSP args)])
4 (ppr body)
pprStgRhs (StgRhsCon cc con args)
= hcat [ ppr cc,
space, ppr con, text "! ", brackets (interppSP args)]
| vTurbine/ghc | compiler/stgSyn/StgSyn.hs | bsd-3-clause | 27,004 | 0 | 16 | 7,706 | 3,760 | 2,006 | 1,754 | 316 | 2 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE GADTs #-}
module Oracle.DiffOracleOld where
import qualified Data.IntMap as M
import Data.Maybe
import Data.List
import Debug.Trace
import Oracle.Internal
import Language.Clojure.Lang
import Language.Clojure.AST
import Util.UnixDiff
type DiffOp = Path
data DiffOracle = DiffOracle DelInsMap
type DelInsMap = (M.IntMap DiffOp, M.IntMap DiffOp)
unionDelInsMap :: DelInsMap -> DelInsMap -> DelInsMap
unionDelInsMap (s1, d1) (s2, d2) = (M.union s1 s2, M.union d1 d2)
buildOracle :: [DiffAction] -> DelInsMap
buildOracle [] = (M.empty, M.empty)
buildOracle (first:rest) = (process first) `unionDelInsMap` (buildOracle rest)
where
process (Copy (i1, i2)) = (M.empty, M.empty)
process (Ins i) = (M.empty, M.singleton i I)
process (Del i) = (M.singleton i D, M.empty)
askOracle :: DiffOracle -> LineRange -> LineRange -> [Path]
askOracle (DiffOracle (delMap, insMap)) srcRange dstRange =
if containsRange delMap srcRange
&& containsRange insMap dstRange
&& inSync srcRange dstRange
then []
else if containsRange delMap srcRange
&& not (inSync srcRange dstRange)
then [ D ]
else if containsRange insMap dstRange
&& not (inSync srcRange dstRange)
then [ I ]
else [ M ]
-- dstSpan = findSpan insMap dstRange
-- srcSpan = findSpan delMap srcRange
-- dstOffset = calculateOffset (delMap, insMap) dstStart
-- srcOffset = calculateOffset (delMap, insMap) srcStart
-- dstSpan = (Range (dstStart + dstOffset) (dstEnd + dstOffset))
-- srcSpan = (Range (srcStart - srcOffset) (srcEnd - srcOffset))
inSync :: LineRange -> LineRange -> Bool
inSync (Range s1 _) (Range s2 _) = s1 == s2
findSpan :: M.IntMap DiffOp -> LineRange -> LineRange
findSpan m (Range start end) = go m start end
where
go m s e | isJust (M.lookup s m) = go m (s-1) e
go m s e | isJust (M.lookup e m) = go m s (e + 1)
go m s e | otherwise = Range (s+1) (e-1)
calculateOffset :: DelInsMap -> Int -> Int
calculateOffset (del, ins) i = process (M.elems splitIns ++ M.elems splitDel)
where
(splitIns, _) = M.split (i+1) ins
(splitDel, _) = M.split (i+1) del
process [] = 0
process (I:xs) = (- 1) + process xs
process (D:xs) = 1 + process xs
intersectsRange :: M.IntMap DiffOp -> LineRange -> Bool
intersectsRange m (Range start end) = go m start
where
go m i | i <= end =
if isJust (M.lookup i m)
then True
else go m (i+1)
go m i | otherwise = False
containsRange :: M.IntMap DiffOp -> LineRange -> Bool
containsRange m (Range start end) = go m start
where
go m i | i <= end =
if isJust (M.lookup i m)
then go m (i+1)
else False
go m i | otherwise = True
instance (Monad m) => OracleF DiffOracle m where
callF o s d = do
-- traceM ("src[" ++ show (fromJust $ extractRange s) ++ "]: " ++ show s)
-- traceM ("dst[" ++ show (fromJust $ extractRange d) ++ "]: " ++ show d)
let ans = askOracle o (fromJust $ extractRange s) (fromJust $ extractRange d)
-- traceM ("ans: " ++ show ans)
return ans
instance (Monad m) => OracleP DiffOracle m where
callP _ An An = return []
callP _ An (_ `Ac` _) = return [ I ]
callP _ (_ `Ac` _) An = return [ D ]
callP o (s `Ac` _) (d `Ac` _) = do
case (extractRange s, extractRange d) of
(Nothing, Nothing) -> do
-- traceM "ans: M"
return [ M ]
(Just sRange, Nothing) -> do
-- traceM "ans: D"
return [ D ]
(Nothing, Just dRange) -> do
-- traceM "ans: I"
return [ I ]
(Just sRange, Just dRange) -> do
let ans = askOracle o sRange dRange
-- traceM ("ans: " ++ show ans)
return ans
instance Show DiffOracle where
show (DiffOracle m) = show m
| nazrhom/vcs-clojure | src/Oracle/DiffOracleOld.hs | bsd-3-clause | 3,890 | 0 | 16 | 1,014 | 1,403 | 729 | 674 | 85 | 4 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.